├── assets ├── bunny.png ├── toy_cat.png └── woman.png ├── CHANGELOG.md ├── .gitignore ├── deny.toml ├── src ├── lib.rs ├── conjugate_gradient.rs ├── poisson_vector_field.rs ├── hgrid.rs ├── poisson.rs ├── poisson_layer.rs ├── polynomial.rs └── marching_cubes.rs ├── .github └── workflows │ └── release.yml ├── Cargo.toml ├── LICENSE-MIT ├── README.md ├── examples └── reconstruction_demo.rs └── LICENSE-APACHE /assets/bunny.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ForesightMiningSoftwareCorporation/PoissonReconstruction/HEAD/assets/bunny.png -------------------------------------------------------------------------------- /assets/toy_cat.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ForesightMiningSoftwareCorporation/PoissonReconstruction/HEAD/assets/toy_cat.png -------------------------------------------------------------------------------- /assets/woman.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ForesightMiningSoftwareCorporation/PoissonReconstruction/HEAD/assets/woman.png -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | ## v0.4.0 4 | 5 | - Update parry dependency to 0.21. 6 | 7 | ## v0.3.1 8 | 9 | - Fix the extraction of a mesh from the poisson reconstruction. 10 | - Add `PoissonReconstruction::reconstruct_trimesh` and `::reconstruct_mesh_buffers` for extracting 11 | a triangle mesh with properly wired-up topology. 12 | 13 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Generated by Cargo 2 | # will have compiled files and executables 3 | /target/ 4 | 5 | # Remove Cargo.lock from gitignore if creating an executable, leave it for libraries 6 | # More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html 7 | Cargo.lock 8 | 9 | # These are backup files generated by rustfmt 10 | **/*.rs.bk 11 | 12 | .idea -------------------------------------------------------------------------------- /deny.toml: -------------------------------------------------------------------------------- 1 | [licenses] 2 | allow-osi-fsf-free = "neither" 3 | copyleft = "warn" 4 | default = "warn" 5 | unlicensed = "allow" 6 | allow = [ 7 | "Apache-2.0", 8 | "Apache-2.0 WITH LLVM-exception", 9 | "Artistic-2.0", 10 | "BSD-2-Clause", 11 | "BSD-3-Clause", 12 | "BSL-1.0", 13 | "CC0-1.0", 14 | "ISC", 15 | "MIT", 16 | "MIT-0", 17 | "0BSD", 18 | "Unicode-DFS-2016", 19 | "Unlicense", 20 | "Zlib", 21 | ] 22 | [licenses.private] 23 | ignore = true 24 | 25 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | /*! 2 | Rust implementation of the [Screened poisson reconstruction](https://www.cs.jhu.edu/~misha/MyPapers/ToG13.pdf) 3 | by Kazhdan and Hoppe. 4 | */ 5 | 6 | #![allow(clippy::type_complexity, clippy::too_many_arguments)] 7 | #![warn(missing_docs)] 8 | 9 | /// Floating-point type used by this library. 10 | pub type Real = f64; 11 | 12 | extern crate nalgebra as na; 13 | extern crate parry3d_f64 as parry; 14 | 15 | pub use self::poisson::PoissonReconstruction; 16 | 17 | mod conjugate_gradient; 18 | mod hgrid; 19 | pub mod marching_cubes; 20 | mod poisson; 21 | mod poisson_layer; 22 | mod poisson_vector_field; 23 | mod polynomial; 24 | -------------------------------------------------------------------------------- /src/conjugate_gradient.rs: -------------------------------------------------------------------------------- 1 | use crate::Real; 2 | use na::DVector; 3 | use nalgebra_sparse::CscMatrix; 4 | 5 | pub fn solve_conjugate_gradient(a: &CscMatrix, b: &mut DVector, niters: usize) { 6 | let mut r = &*b - a * &*b; 7 | let mut p = r.clone(); 8 | let mut prev_rr = r.dot(&r); 9 | 10 | for _ in 0..niters { 11 | let ap = a * &p; // TODO: avoid the allocation. 12 | let alpha = r.dot(&r) / p.dot(&ap); 13 | b.axpy(alpha, &p, 1.0); 14 | r.axpy(-alpha, &ap, 1.0); 15 | let rr = r.dot(&r); 16 | let beta = rr / prev_rr; 17 | prev_rr = rr; 18 | p.axpy(1.0, &r, beta); 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Release Pipeline 2 | 3 | on: 4 | pull_request: 5 | push: 6 | branches: 7 | - main 8 | workflow_dispatch: 9 | inputs: 10 | publish: 11 | type: boolean 12 | required: false 13 | description: Trigger with publish 14 | 15 | jobs: 16 | publish: 17 | uses: ForesightMiningSoftwareCorporation/github/.github/workflows/rust-build.yml@v1 18 | with: 19 | skip-test: ${{ github.event_name == 'push' && 'true' || (github.event_name == 'workflow_dispatch' && inputs.publish) }} 20 | publish: ${{ (github.event_name == 'push' || (github.event_name == 'workflow_dispatch' && inputs.publish)) && 'true' || 'false' }} 21 | publish_public_registry: true 22 | secrets: inherit 23 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "poisson_reconstruction" 3 | repository = "https://github.com/ForesightMiningSoftwareCorporation/PoissonReconstruction" 4 | version = "0.4.0" 5 | license = "MIT OR Apache-2.0" 6 | description = "Screened Poisson Reconstruction algorithm in Rust" 7 | authors = ["Sébastien Crozet "] 8 | readme = "README.md" 9 | keywords = ["surface", "reconstruction", "poisson", "implicit"] 10 | edition = "2021" 11 | 12 | [features] 13 | serde-serialize = [ "nalgebra/serde-serialize", "serde" ] 14 | 15 | [dependencies] 16 | serde = { version = "1", features = ["derive"], optional = true } 17 | rayon = "1" 18 | nalgebra = "0.33" 19 | nalgebra-sparse = "0.10" 20 | parry3d-f64 = "0.21" 21 | itertools = "0.14" 22 | fnv = "1" 23 | 24 | [dev-dependencies] 25 | bevy = "0.15" 26 | bevy_panorbit_camera = "0.22" 27 | ply-rs = "0.1" -------------------------------------------------------------------------------- /LICENSE-MIT: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy 4 | of this software and associated documentation files (the "Software"), to deal 5 | in the Software without restriction, including without limitation the rights 6 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 7 | copies of the Software, and to permit persons to whom the Software is 8 | furnished to do so, subject to the following conditions: 9 | 10 | The above copyright notice and this permission notice shall be included in all 11 | copies or substantial portions of the Software. 12 | 13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 14 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 15 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 16 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 17 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 18 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 19 | SOFTWARE. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![.github/workflows/release.yml](https://github.com/ForesightMiningSoftwareCorporation/PoissonReconstruction/actions/workflows/release.yml/badge.svg)](https://github.com/ForesightMiningSoftwareCorporation/PoissonReconstruction/actions/workflows/release.yml) ![crates.io](https://img.shields.io/crates/v/poisson_reconstruction.svg) 2 | # PoissonReconstruction 3 | This is a Rust implementation of the Screened Poisson Reconstruction surface reconstruction algorithm. This 4 | implementation comes courtesy of Foresight Mining Software Corporation who sponsor its creation and maintenance. 5 | 6 | ## Reference papers 7 | - [Poisson Surface Reconstruction](https://hhoppe.com/poissonrecon.pdf) by Kazhdan, Bolitho, and Hoppe. 8 | - [Screened Poisson Surface Reconstruction](https://www.cs.jhu.edu/~misha/MyPapers/ToG13.pdf) by Kazhdan and Hoppe. 9 | 10 | ## Features 11 | Given a set of points and normals, the `PoissonReconstruction` will generate an implicit function from which the 12 | reconstructed surface can be extracted. 13 | 14 | ```rust 15 | let poisson = PoissonReconstruction::from_points_and_normals( 16 | &points, &normals, 0.0, 4, 5, 10 17 | ); 18 | let mesh_vertices = poisson.reconstruct_mesh(); 19 | ``` 20 | 21 | The `PoissonReconstruction` representing an implicit function, it can be evaluated at space locations with 22 | `PoissonReconstruction::eval`. For convenience, a very basic isosurface extraction based on marching-cubes is provided 23 | with `PoissonReconstruction::reconstruct_mesh()`. 24 | 25 | ## Limitations 26 | Some speed optimizations described in the Kazhdan et al. paper. Namely, it currently doesn’t implement the 27 | hierarchical clustering of point optimization nor the conforming version of the castadic solver. 28 | 29 | ## License 30 | 31 | PoissonReconstruction is free and open source! All code in this repository is dual-licensed under either: 32 | 33 | * MIT License ([LICENSE-MIT](LICENSE-MIT) or [http://opensource.org/licenses/MIT](http://opensource.org/licenses/MIT)) 34 | * Apache License, Version 2.0 ([LICENSE-APACHE](LICENSE-APACHE) or [http://www.apache.org/licenses/LICENSE-2.0](http://www.apache.org/licenses/LICENSE-2.0)) 35 | 36 | at your option. This means you can select the license you prefer! This dual-licensing approach is the de-facto standard 37 | in the Rust ecosystem and there are very good reasons to include both. 38 | 39 | Unless you explicitly state otherwise, any contribution intentionally submitted for inclusion in the work by you, as 40 | defined in the Apache-2.0 license, shall be dual licensed as above, without any additional terms or conditions. 41 | 42 | ## Sponsors 43 | The creation and maintenance of PoissonReconstruction is sponsored by Foresight Mining Software Corporation. 44 | 45 | Foresight Mining Software Corporation 46 | 47 | ## Samples 48 | 49 | ![Poisson woman](assets/woman.png) 50 | ![Poisson bunny](assets/bunny.png) 51 | ![Poisson cat](assets/toy_cat.png) 52 | -------------------------------------------------------------------------------- /examples/reconstruction_demo.rs: -------------------------------------------------------------------------------- 1 | use bevy::asset::RenderAssetUsages; 2 | use bevy::pbr::wireframe::{Wireframe, WireframePlugin}; 3 | use bevy::prelude::*; 4 | use bevy::render::mesh::{Indices, PrimitiveTopology}; 5 | use bevy_panorbit_camera::{PanOrbitCamera, PanOrbitCameraPlugin}; 6 | use nalgebra::{Point3, Vector3}; 7 | use ply_rs::{parser, ply}; 8 | use poisson_reconstruction::marching_cubes::MeshBuffers; 9 | use poisson_reconstruction::{PoissonReconstruction, Real}; 10 | use std::io::BufRead; 11 | use std::path::Path; 12 | use std::str::FromStr; 13 | 14 | fn main() { 15 | App::new() 16 | .add_plugins((DefaultPlugins, PanOrbitCameraPlugin)) 17 | .add_plugins(WireframePlugin) 18 | .add_systems(Startup, setup_camera_and_light) 19 | .add_systems(Startup, setup_scene) 20 | .run(); 21 | } 22 | 23 | fn setup_scene( 24 | mut commands: Commands, 25 | mut meshes: ResMut>, 26 | mut materials: ResMut>, 27 | ) { 28 | let point_cloud = parse_file("./assets/xiaojiejie2_pcd.ply", true); 29 | let surface = reconstruct_surface(&point_cloud); 30 | spawn_mesh(&mut commands, &mut meshes, &mut materials, surface); 31 | dbg!("Done"); 32 | } 33 | 34 | fn setup_camera_and_light(mut commands: Commands) { 35 | commands.spawn(PointLightBundle { 36 | point_light: PointLight { 37 | intensity: 900000.0, 38 | range: 1000., 39 | ..default() 40 | }, 41 | transform: Transform::from_xyz(-100.0, 50.0, -100.0), 42 | ..default() 43 | }); 44 | commands.spawn(( 45 | Camera3dBundle { 46 | transform: Transform::from_xyz(-100.0, 25.0, -100.0) 47 | .looking_at(Vec3::new(0., 0., 0.), Vec3::Y), 48 | ..default() 49 | }, 50 | PanOrbitCamera::default(), 51 | )); 52 | } 53 | 54 | fn spawn_mesh( 55 | commands: &mut Commands, 56 | meshes: &mut Assets, 57 | materials: &mut Assets, 58 | points: MeshBuffers, 59 | ) { 60 | // Create the bevy mesh. 61 | let vertices: Vec<_> = points 62 | .vertices() 63 | .iter() 64 | .map(|pt| [pt.x as f32, pt.y as f32, pt.z as f32]) 65 | .collect(); 66 | let mut mesh = Mesh::new( 67 | PrimitiveTopology::TriangleList, 68 | RenderAssetUsages::default(), 69 | ); 70 | mesh.insert_attribute(Mesh::ATTRIBUTE_POSITION, vertices); 71 | mesh.insert_indices(Indices::U32(points.indices().to_vec())); 72 | 73 | commands 74 | .spawn(PbrBundle { 75 | mesh: Mesh3d(meshes.add(mesh)), 76 | material: MeshMaterial3d(materials.add(Color::srgb(0.0, 1.0, 0.0))), 77 | transform: Transform::from_rotation(Quat::from_rotation_x(180.0f32.to_radians())), 78 | ..default() 79 | }) 80 | .insert(Wireframe); 81 | } 82 | 83 | #[derive(Default)] 84 | struct VertexWithNormal { 85 | pos: Point3, 86 | normal: Vector3, 87 | } 88 | 89 | impl ply::PropertyAccess for VertexWithNormal { 90 | fn new() -> Self { 91 | Self::default() 92 | } 93 | 94 | fn set_property(&mut self, key: String, property: ply::Property) { 95 | match (key.as_ref(), property) { 96 | ("x", ply::Property::Float(v)) => self.pos.x = v as Real, 97 | ("y", ply::Property::Float(v)) => self.pos.y = v as Real, 98 | ("z", ply::Property::Float(v)) => self.pos.z = v as Real, 99 | ("nx", ply::Property::Float(v)) => self.normal.x = v as Real, 100 | ("ny", ply::Property::Float(v)) => self.normal.y = v as Real, 101 | ("nz", ply::Property::Float(v)) => self.normal.z = v as Real, 102 | _ => {} 103 | } 104 | } 105 | } 106 | 107 | fn parse_file(path: impl AsRef, ply: bool) -> Vec { 108 | let f = std::fs::File::open(path).unwrap(); 109 | let mut f = std::io::BufReader::new(f); 110 | 111 | if ply { 112 | let vertex_parser = parser::Parser::::new(); 113 | let header = vertex_parser.read_header(&mut f).unwrap(); 114 | 115 | // Depending on the header, read the data into our structs.. 116 | let mut vertex_list = Vec::new(); 117 | for (_ignore_key, element) in &header.elements { 118 | // we could also just parse them in sequence, but the file format might change 119 | match element.name.as_ref() { 120 | "vertex" => { 121 | vertex_list = vertex_parser 122 | .read_payload_for_element(&mut f, &element, &header) 123 | .unwrap(); 124 | } 125 | _ => {} 126 | } 127 | } 128 | vertex_list 129 | } else { 130 | let mut result = vec![]; 131 | for line in f.lines() { 132 | if let Ok(line) = line { 133 | let values: Vec<_> = line 134 | .split_whitespace() 135 | .map(|elt| f64::from_str(elt).unwrap()) 136 | .collect(); 137 | result.push(VertexWithNormal { 138 | pos: Point3::new(values[0], values[1], values[2]), 139 | normal: Vector3::new(values[3], values[4], values[5]), 140 | }); 141 | } 142 | } 143 | result 144 | } 145 | } 146 | 147 | fn reconstruct_surface(vertices: &[VertexWithNormal]) -> MeshBuffers { 148 | let points: Vec<_> = vertices.iter().map(|v| v.pos).collect(); 149 | let normals: Vec<_> = vertices.iter().map(|v| v.normal).collect(); 150 | 151 | dbg!("Running poisson."); 152 | let poisson = PoissonReconstruction::from_points_and_normals(&points, &normals, 0.0, 6, 6, 10); 153 | dbg!("Extracting vertices."); 154 | poisson.reconstruct_mesh_buffers() 155 | } 156 | -------------------------------------------------------------------------------- /src/poisson_vector_field.rs: -------------------------------------------------------------------------------- 1 | use crate::poisson_layer::PoissonLayer; 2 | use crate::polynomial::TriQuadraticBspline; 3 | use crate::{poisson, Real}; 4 | use itertools::multizip; 5 | use na::{vector, DVector, Point3, Vector3}; 6 | use parry::bounding_volume::Aabb; 7 | use rayon::prelude::*; 8 | 9 | const CORNERS: [Vector3; 8] = [ 10 | vector![0, 0, 0], 11 | vector![1, 0, 0], 12 | vector![1, 1, 0], 13 | vector![0, 1, 0], 14 | vector![0, 0, 1], 15 | vector![1, 0, 1], 16 | vector![1, 1, 1], 17 | vector![0, 1, 1], 18 | ]; 19 | 20 | fn trilinear_coefficients(bcoords: Vector3) -> [Real; 8] { 21 | let map = |vals: Vector3| { 22 | vals.zip_map(&bcoords, |v, b| if v == 0 { 1.0 - b } else { b }) 23 | .product() 24 | }; 25 | 26 | [ 27 | map(CORNERS[0]), 28 | map(CORNERS[1]), 29 | map(CORNERS[2]), 30 | map(CORNERS[3]), 31 | map(CORNERS[4]), 32 | map(CORNERS[5]), 33 | map(CORNERS[6]), 34 | map(CORNERS[7]), 35 | ] 36 | } 37 | 38 | pub struct PoissonVectorField { 39 | pub(crate) densities: Vec, 40 | layers_normals: Vec>>, 41 | } 42 | 43 | impl PoissonVectorField { 44 | pub fn new( 45 | layers: &[PoissonLayer], 46 | points: &[Point3], 47 | normals: &[Vector3], 48 | density_estimation_depth: usize, 49 | ) -> Self { 50 | // Compute sample densities. 51 | let mut densities = vec![1.0; points.len()]; 52 | let density_layer = &layers[density_estimation_depth]; 53 | let mut splat_values = vec![0.0; density_layer.ordered_nodes.len()]; 54 | 55 | for pt in points { 56 | let half_width = Vector3::repeat(density_layer.cell_width() / 2.0); 57 | // Subtract half-width so the ref_node is the bottom-left node of the trilinear interpolation. 58 | let ref_node = density_layer.grid.key(&(pt - half_width)); 59 | 60 | // Barycentric coordinates of the points for trilinear interpolation. 61 | let cell_origin = density_layer.grid.cell_center(&ref_node); 62 | let bcoords = (pt - cell_origin) / density_layer.cell_width(); 63 | let coeffs = trilinear_coefficients(bcoords); 64 | 65 | for (corner_shift, coeff) in CORNERS.iter().zip(coeffs.iter()) { 66 | let node = ref_node + corner_shift; 67 | let id = density_layer.grid_node_idx[&node]; 68 | splat_values[id] += *coeff; 69 | } 70 | } 71 | 72 | for (pt, weight) in points.iter().zip(densities.iter_mut()) { 73 | *weight = poisson::eval_triquadratic( 74 | pt, 75 | &density_layer.grid, 76 | &density_layer.grid_node_idx, 77 | &splat_values, 78 | ); 79 | } 80 | 81 | let avg_density = densities.iter().copied().sum::() / (points.len() as Real); 82 | let samples_depths: Vec<_> = densities 83 | .iter() 84 | .map(|d| { 85 | (((layers.len() - 1) as Real + (*d / avg_density).log(4.0)) 86 | .round() 87 | .max(0.0) as usize) 88 | .min(layers.len() - 1) 89 | }) 90 | .collect(); 91 | 92 | let mut layers_normals = vec![]; 93 | 94 | for (layer_id, layer) in layers.iter().enumerate() { 95 | // Splat the normals into the grid. 96 | let mut grid_normals = vec![Vector3::zeros(); layer.ordered_nodes.len()]; 97 | 98 | for (pt, n, w, depth) in multizip((points, normals, &densities, &samples_depths)) { 99 | if *depth == layer_id { 100 | let half_width = Vector3::repeat(layer.grid.cell_width() / 2.0); 101 | let ref_node = layer.grid.key(&(pt - half_width)); 102 | 103 | // Barycentric coordinates of the points for trilinear interpolation. 104 | let cell_origin = layer.grid.cell_center(&ref_node); 105 | let bcoords = (pt - cell_origin) / layer.grid.cell_width(); 106 | let coeffs = trilinear_coefficients(bcoords); 107 | 108 | for (corner_shift, coeff) in CORNERS.iter().zip(coeffs.iter()) { 109 | let node = ref_node + corner_shift; 110 | let id = layer.grid_node_idx[&node]; 111 | grid_normals[id] += *n * *coeff / *w; 112 | } 113 | } 114 | } 115 | 116 | layers_normals.push(grid_normals); 117 | } 118 | 119 | Self { 120 | densities, 121 | layers_normals, 122 | } 123 | } 124 | 125 | pub fn build_rhs( 126 | &self, 127 | layers: &[PoissonLayer], 128 | curr_layer_id: usize, 129 | rhs: &mut DVector, 130 | ) { 131 | let curr_layer = &layers[curr_layer_id]; 132 | 133 | rhs.as_mut_slice() 134 | .par_iter_mut() 135 | .enumerate() 136 | .for_each(|(rhs_id, rhs)| { 137 | let curr_node = curr_layer.ordered_nodes[rhs_id]; 138 | let curr_node_center = curr_layer.grid.cell_center(&curr_node); 139 | 140 | for (other_layer_id, other_layer) in layers.iter().enumerate() { 141 | let aabb = Aabb::from_half_extents( 142 | curr_node_center, 143 | Vector3::repeat( 144 | curr_layer.cell_width() * 1.5 + other_layer.cell_width() * 1.5, 145 | ), 146 | ); 147 | 148 | for (other_node, _) in other_layer 149 | .grid 150 | .cells_intersecting_aabb(&aabb.mins, &aabb.maxs) 151 | { 152 | let other_node_id = other_layer.grid_node_idx[&other_node]; 153 | let normal = self.layers_normals[other_layer_id][other_node_id]; 154 | 155 | if normal != Vector3::zeros() { 156 | let other_node_center = other_layer.grid.cell_center(&other_node); 157 | let poly1 = TriQuadraticBspline::new( 158 | other_node_center, 159 | other_layer.cell_width(), 160 | ); 161 | let poly2 = 162 | TriQuadraticBspline::new(curr_node_center, curr_layer.cell_width()); 163 | let coeff = poly1.grad_grad(poly2, false, true); 164 | *rhs += normal.dot(&coeff); 165 | } 166 | } 167 | } 168 | }); 169 | } 170 | 171 | pub fn area_approximation(&self) -> Real { 172 | self.densities.iter().map(|d| 1.0 / *d).sum() 173 | } 174 | } 175 | -------------------------------------------------------------------------------- /src/hgrid.rs: -------------------------------------------------------------------------------- 1 | // This is a Hierarchical Grid taken from Salva. 2 | // We should probably move this to Parry. 3 | #![allow(dead_code)] 4 | 5 | use fnv::FnvHasher; 6 | 7 | use std::collections::HashMap; 8 | use std::hash::BuildHasher; 9 | 10 | use crate::Real; 11 | use na::{Point3, Vector3}; 12 | 13 | #[derive(Copy, Clone, Debug)] 14 | pub struct DeterministicState; 15 | 16 | impl Default for DeterministicState { 17 | fn default() -> Self { 18 | DeterministicState 19 | } 20 | } 21 | 22 | impl BuildHasher for DeterministicState { 23 | type Hasher = FnvHasher; 24 | 25 | fn build_hasher(&self) -> FnvHasher { 26 | FnvHasher::with_key(1820) 27 | } 28 | } 29 | 30 | /// A grid based on spacial hashing. 31 | #[derive(PartialEq, Debug, Clone)] 32 | #[cfg_attr( 33 | feature = "serde-serialize", 34 | derive(serde::Serialize, serde::Deserialize) 35 | )] 36 | pub struct HGrid { 37 | cells: HashMap, Vec, DeterministicState>, 38 | origin: Point3, 39 | cell_width: Real, 40 | } 41 | 42 | impl HGrid { 43 | /// Initialize a grid where each cell has the width `cell_width`. 44 | pub fn new(origin: Point3, cell_width: Real) -> Self { 45 | Self { 46 | cells: HashMap::with_hasher(DeterministicState), 47 | origin, 48 | cell_width, 49 | } 50 | } 51 | 52 | /// The width of a cell of this spacial grid. 53 | pub fn cell_width(&self) -> Real { 54 | self.cell_width 55 | } 56 | 57 | /// The origin of this grid. 58 | pub fn origin(&self) -> &Point3 { 59 | &self.origin 60 | } 61 | 62 | fn unquantify(value: i64, cell_width: Real) -> Real { 63 | value as Real * cell_width + cell_width / 2.0 64 | } 65 | 66 | fn quantify(value: Real, cell_width: Real) -> i64 { 67 | na::try_convert::((value / cell_width).floor()).unwrap() as i64 68 | } 69 | 70 | fn quantify_ceil(value: Real, cell_width: Real) -> i64 { 71 | na::try_convert::((value / cell_width).ceil()).unwrap() as i64 72 | } 73 | 74 | /// Computes the logical grid cell containing `point`. 75 | pub fn key(&self, point: &Point3) -> Point3 { 76 | Point3::from((point - self.origin).map(|e| Self::quantify(e, self.cell_width))) 77 | } 78 | 79 | /// Removes all elements from this grid. 80 | pub fn clear(&mut self) { 81 | self.cells.clear(); 82 | } 83 | 84 | /// Inserts the given `element` into the cell containing the given `point`. 85 | pub fn insert(&mut self, point: &Point3, element: T) { 86 | let key = self.key(point); 87 | self.cells.entry(key).or_insert_with(Vec::new).push(element) 88 | } 89 | 90 | /// Returns the element attached to the cell containing the given `point`. 91 | /// 92 | /// Returns `None` if the cell is empty. 93 | pub fn cell_containing_point(&self, point: &Point3) -> Option<&Vec> { 94 | let key = self.key(point); 95 | self.cells.get(&key) 96 | } 97 | 98 | /// An iterator through all the non-empty cells of this grid. 99 | /// 100 | /// The returned tuple include the cell indentifier, and the elements attached to this cell. 101 | pub fn cells(&self) -> impl Iterator, &Vec)> { 102 | self.cells.iter() 103 | } 104 | 105 | /// The underlying hash map of this spacial grid. 106 | pub fn inner_table(&self) -> &HashMap, Vec, DeterministicState> { 107 | &self.cells 108 | } 109 | 110 | /// Get the content of the logical cell identified by `key`. 111 | pub fn cell(&self, key: &Point3) -> Option<&Vec> { 112 | self.cells.get(key) 113 | } 114 | 115 | pub fn cell_center(&self, cell: &Point3) -> Point3 { 116 | self.origin + cell.coords.map(|x| Self::unquantify(x, self.cell_width)) 117 | } 118 | 119 | /// An iterator through all the neighbors of the given cell. 120 | /// 121 | /// The given cell itself will be yielded by this iterator too. 122 | pub fn neighbor_cells( 123 | &self, 124 | cell: &Point3, 125 | radius: Real, 126 | ) -> impl Iterator, &Vec)> { 127 | let cells = &self.cells; 128 | let quantified_radius = Self::quantify_ceil(radius, self.cell_width); 129 | 130 | CellRangeIterator::with_center(*cell, quantified_radius) 131 | .filter_map(move |cell| cells.get(&cell).map(|c| (cell, c))) 132 | } 133 | 134 | /// An iterator through all the neighbors of the given cell, including empty cells. 135 | /// 136 | /// The given cell itself will be yielded by this iterator too. 137 | pub fn maybe_neighbor_cells( 138 | &self, 139 | cell: &Point3, 140 | radius: Real, 141 | ) -> impl Iterator, Option<&Vec>)> { 142 | let cells = &self.cells; 143 | let quantified_radius = Self::quantify_ceil(radius, self.cell_width); 144 | CellRangeIterator::with_center(*cell, quantified_radius) 145 | .map(move |cell| (cell, cells.get(&cell))) 146 | } 147 | 148 | /// An iterator through all the cells intersecting the given Aabb. 149 | pub fn cells_intersecting_aabb( 150 | &self, 151 | mins: &Point3, 152 | maxs: &Point3, 153 | ) -> impl Iterator, &Vec)> { 154 | let cells = &self.cells; 155 | let start = self.key(mins); 156 | let end = self.key(maxs); 157 | 158 | CellRangeIterator::new(start, end) 159 | .filter_map(move |cell| cells.get(&cell).map(|c| (cell, c))) 160 | } 161 | 162 | /// An iterator through all the cells intersecting the given Aabb, including empty cells. 163 | pub fn maybe_cells_intersecting_aabb( 164 | &self, 165 | mins: &Point3, 166 | maxs: &Point3, 167 | ) -> impl Iterator, Option<&Vec>)> { 168 | let cells = &self.cells; 169 | let start = self.key(mins); 170 | let end = self.key(maxs); 171 | 172 | CellRangeIterator::new(start, end).map(move |cell| (cell, cells.get(&cell))) 173 | } 174 | } 175 | 176 | struct CellRangeIterator { 177 | start: Point3, 178 | end: Point3, 179 | curr: Point3, 180 | done: bool, 181 | } 182 | 183 | impl CellRangeIterator { 184 | fn new(start: Point3, end: Point3) -> Self { 185 | Self { 186 | start, 187 | end, 188 | curr: start, 189 | done: false, 190 | } 191 | } 192 | 193 | fn with_center(center: Point3, radius: i64) -> Self { 194 | let start = center - Vector3::repeat(radius); 195 | Self { 196 | start, 197 | end: center + Vector3::repeat(radius), 198 | curr: start, 199 | done: false, 200 | } 201 | } 202 | } 203 | 204 | impl Iterator for CellRangeIterator { 205 | type Item = Point3; 206 | 207 | fn next(&mut self) -> Option { 208 | if self.done { 209 | return None; 210 | } 211 | 212 | if self.curr == self.end { 213 | self.done = true; 214 | Some(self.curr) 215 | } else { 216 | let result = self.curr; 217 | 218 | for i in 0..3 { 219 | self.curr[i] += 1; 220 | 221 | if self.curr[i] > self.end[i] { 222 | self.curr[i] = self.start[i]; 223 | } else { 224 | break; 225 | } 226 | } 227 | 228 | Some(result) 229 | } 230 | } 231 | } 232 | -------------------------------------------------------------------------------- /LICENSE-APACHE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | -------------------------------------------------------------------------------- /src/poisson.rs: -------------------------------------------------------------------------------- 1 | use crate::hgrid::HGrid; 2 | use crate::marching_cubes::{march_cube_idx, MeshBuffers}; 3 | use crate::poisson_layer::PoissonLayer; 4 | use crate::poisson_vector_field::PoissonVectorField; 5 | use crate::polynomial::{eval_bspline, eval_bspline_diff}; 6 | use crate::Real; 7 | use na::{vector, Point3, Vector3}; 8 | use parry::bounding_volume::{Aabb, BoundingVolume}; 9 | use parry::partitioning::IndexedData; 10 | use parry::shape::{TriMesh, TriMeshFlags}; 11 | use std::collections::HashMap; 12 | use std::ops::{AddAssign, Mul}; 13 | 14 | /// An implicit surface reconstructed with the Screened Poisson reconstruction algorithm. 15 | #[derive(Clone)] 16 | pub struct PoissonReconstruction { 17 | layers: Vec, 18 | isovalue: Real, 19 | } 20 | 21 | #[derive(Copy, Clone, PartialEq, Eq)] 22 | pub struct CellWithId { 23 | pub cell: Point3, 24 | pub id: usize, 25 | } 26 | 27 | impl IndexedData for CellWithId { 28 | fn default() -> Self { 29 | Self { 30 | cell: Point3::default(), 31 | id: 0, 32 | } 33 | } 34 | 35 | fn index(&self) -> usize { 36 | self.id 37 | } 38 | } 39 | 40 | impl PoissonReconstruction { 41 | /// Reconstruct a surface using the Screened Poisson reconstruction algorithm, 42 | /// given a set of sample points and normals at these points. 43 | /// 44 | /// # Parameters 45 | /// - `points`: the sample points. 46 | /// - `normals`: the normals at the sample points. Must have the same length as `points`. 47 | /// - `screening`: the screening coefficient. Larger values increase the fitting of the 48 | /// reconstructed surface relative to the sample point’s positions. Setting this to `0.0` 49 | /// disables screening (but reduces computation times). 50 | /// - `density_estimation_depth`: the depth on the multigrid solver where point density estimation 51 | /// is calculated. The estimation kernel radius will be equal to the maximum extent of the 52 | /// input point’s AABB, divided by `2.pow(max_depth)`. Smaller value of this parameter results 53 | /// in more robustness wrt. occasional holes and sampling irregularities, but reduces the 54 | /// detail accuracies. 55 | /// - `max_depth`: the max depth of the multigrid solver. Larger values result in higher accuracy 56 | /// (which requires higher sampling densities, or a `density_estimation_depth` set to a smaller 57 | /// value). Higher values increases computation times. 58 | /// - `max_relaxation_iters`: the maximum number of iterations for the internal 59 | /// conjugate-gradient solver. Values around `10` should be enough for most cases. 60 | pub fn from_points_and_normals( 61 | points: &[Point3], 62 | normals: &[Vector3], 63 | screening: Real, 64 | density_estimation_depth: usize, 65 | max_depth: usize, 66 | max_relaxation_iters: usize, 67 | ) -> Self { 68 | assert_eq!( 69 | points.len(), 70 | normals.len(), 71 | "Exactly one normal per point must be provided." 72 | ); 73 | assert!(density_estimation_depth <= max_depth); 74 | let mut root_aabb = Aabb::from_points(points); 75 | let max_extent = root_aabb.extents().max(); 76 | let leaf_cell_width = max_extent / (2.0 as Real).powi(max_depth as i32); 77 | root_aabb.loosen(leaf_cell_width); 78 | let grid_origin = root_aabb.mins; 79 | 80 | let mut layers = vec![]; 81 | layers.push(PoissonLayer::from_points( 82 | points, 83 | grid_origin, 84 | leaf_cell_width, 85 | )); 86 | 87 | for i in 0..max_depth { 88 | let layer = PoissonLayer::from_next_layer(points, &layers[i]); 89 | layers.push(layer); 90 | } 91 | 92 | // Reverse so the coarser layers go first. 93 | layers.reverse(); 94 | 95 | let vector_field = 96 | PoissonVectorField::new(&layers, points, normals, density_estimation_depth); 97 | 98 | for i in 0..layers.len() { 99 | let result = PoissonLayer::solve( 100 | &layers, 101 | i, 102 | &vector_field, 103 | points, 104 | normals, 105 | screening, 106 | max_relaxation_iters, 107 | ); 108 | layers[i].node_weights = result; 109 | } 110 | 111 | let mut total_weight = 0.0; 112 | let mut result = Self { 113 | layers, 114 | isovalue: 0.0, 115 | }; 116 | let mut isovalue = 0.0; 117 | 118 | for (pt, w) in points.iter().zip(vector_field.densities.iter()) { 119 | isovalue += result.eval(pt) / *w; 120 | total_weight += 1.0 / *w; 121 | } 122 | 123 | result.isovalue = isovalue / total_weight; 124 | result 125 | } 126 | 127 | /// The domain where the surface’s implicit function is defined. 128 | pub fn aabb(&self) -> &Aabb { 129 | self.layers.last().unwrap().cells_qbvh.root_aabb() 130 | } 131 | 132 | /// Does the given AABB intersect any of the smallest grid cells of the reconstruction? 133 | pub fn leaf_cells_intersect_aabb(&self, aabb: &Aabb) -> bool { 134 | let mut intersections = vec![]; 135 | self.layers 136 | .last() 137 | .unwrap() 138 | .cells_qbvh 139 | .intersect_aabb(aabb, &mut intersections); 140 | !intersections.is_empty() 141 | } 142 | 143 | /// Evaluates the value of the implicit function at the given 3D point. 144 | /// 145 | /// In order to get a meaningful value, the point must be located inside of [`Self::aabb`]. 146 | pub fn eval(&self, pt: &Point3) -> Real { 147 | let mut result = 0.0; 148 | 149 | for layer in &self.layers { 150 | result += layer.eval_triquadratic(pt); 151 | } 152 | 153 | result - self.isovalue 154 | } 155 | 156 | /// Evaluates the value of the implicit function’s gradient at the given 3D point. 157 | /// 158 | /// In order to get a meaningful value, the point must be located inside of [`Self::aabb`]. 159 | pub fn eval_gradient(&self, pt: &Point3) -> Vector3 { 160 | let mut result = Vector3::zeros(); 161 | 162 | for layer in &self.layers { 163 | result += layer.eval_triquadratic_gradient(pt); 164 | } 165 | 166 | result 167 | } 168 | 169 | /// Reconstructs a mesh from this implicit function using a simple marching-cubes, extracting 170 | /// the isosurface at 0. 171 | #[deprecated = "use `reconstruct_mesh_buffers` or `reconstruct_trimesh` instead"] 172 | pub fn reconstruct_mesh(&self) -> Vec> { 173 | self.reconstruct_mesh_buffers().result_as_triangle_soup() 174 | } 175 | 176 | /// Reconstructs a `TriMesh` from this implicit function using a simple marching-cubes, extracting 177 | /// the isosurface at 0. 178 | pub fn reconstruct_trimesh(&self, flags: TriMeshFlags) -> Option { 179 | self.reconstruct_mesh_buffers().result(flags) 180 | } 181 | 182 | /// Reconstructs a mesh from this implicit function using a simple marching-cubes, extracting 183 | /// the isosurface at 0. 184 | pub fn reconstruct_mesh_buffers(&self) -> MeshBuffers { 185 | let mut result = MeshBuffers::default(); 186 | let mut visited = HashMap::new(); 187 | 188 | if let Some(last_layer) = self.layers.last() { 189 | // Check all the existing leaves. 190 | let mut eval_cell = |key: Point3, visited: &mut HashMap, bool>| { 191 | let cell_center = last_layer.grid.cell_center(&key); 192 | let cell_width = Vector3::repeat(last_layer.grid.cell_width() / 2.0); 193 | let aabb = Aabb::from_half_extents(cell_center, cell_width); 194 | let mut vertex_values = [0.0; 8]; 195 | 196 | for (pt, val) in aabb.vertices().iter().zip(vertex_values.iter_mut()) { 197 | *val = self.eval(pt); 198 | } 199 | 200 | let len_before = result.indices().len(); 201 | march_cube_idx( 202 | &aabb, 203 | &vertex_values, 204 | key.cast::().into(), 205 | 0.0, 206 | &mut result, 207 | ); 208 | let has_sign_change = result.indices().len() != len_before; 209 | visited.insert(key, has_sign_change); 210 | has_sign_change 211 | }; 212 | 213 | for cell in last_layer.cells_qbvh.raw_proxies() { 214 | // let aabb = last_layer.cells_qbvh.node_aabb(cell.node).unwrap(); 215 | eval_cell(cell.data.cell, &mut visited); 216 | } 217 | 218 | // Checking only the leaves isn’t enough, isosurfaces might escape leaves through levels 219 | // at a coarser level. So we also check adjacent leaves that experienced a sign change. 220 | // PERF: instead of traversing ALL the adjacent leaves, only traverse the ones adjacent 221 | // to an edge that actually experienced a sign change. 222 | // PERF: don’t re-evaluate vertices that were already evaluated. 223 | let mut stack: Vec<_> = visited 224 | .iter() 225 | .filter(|(_key, sign_change)| **sign_change) 226 | .map(|e| *e.0) 227 | .collect(); 228 | 229 | while let Some(cell) = stack.pop() { 230 | for i in -1..=1 { 231 | for j in -1..=1 { 232 | for k in -1..=1 { 233 | let new_cell = cell + Vector3::new(i, j, k); 234 | 235 | if !visited.contains_key(&new_cell) { 236 | let has_sign_change = eval_cell(new_cell, &mut visited); 237 | if has_sign_change { 238 | stack.push(new_cell); 239 | } 240 | } 241 | } 242 | } 243 | } 244 | } 245 | } 246 | 247 | result 248 | } 249 | } 250 | 251 | pub fn eval_triquadratic + AddAssign + Copy + Default>( 252 | pt: &Point3, 253 | grid: &HGrid, 254 | grid_node_idx: &HashMap, usize>, 255 | node_weights: &[T], 256 | ) -> T { 257 | let cell_width = grid.cell_width(); 258 | let ref_cell = grid.key(pt); 259 | let mut result = T::default(); 260 | 261 | for i in -1..=1 { 262 | for j in -1..=1 { 263 | for k in -1..=1 { 264 | let curr_cell = ref_cell + vector![i, j, k]; 265 | 266 | if let Some(node_id) = grid_node_idx.get(&curr_cell) { 267 | let spline_origin = grid.cell_center(&curr_cell); 268 | let valx = eval_bspline(pt.x, spline_origin.x, cell_width); 269 | let valy = eval_bspline(pt.y, spline_origin.y, cell_width); 270 | let valz = eval_bspline(pt.z, spline_origin.z, cell_width); 271 | result += node_weights[*node_id] * valx * valy * valz; 272 | } 273 | } 274 | } 275 | } 276 | 277 | result 278 | } 279 | 280 | pub fn eval_triquadratic_gradient( 281 | pt: &Point3, 282 | grid: &HGrid, 283 | grid_node_idx: &HashMap, usize>, 284 | node_weights: &[Real], 285 | ) -> Vector3 { 286 | let cell_width = grid.cell_width(); 287 | let ref_cell = grid.key(pt); 288 | let mut result = Vector3::default(); 289 | 290 | for i in -1..=1 { 291 | for j in -1..=1 { 292 | for k in -1..=1 { 293 | let curr_cell = ref_cell + vector![i, j, k]; 294 | 295 | if let Some(node_id) = grid_node_idx.get(&curr_cell) { 296 | let spline_origin = grid.cell_center(&curr_cell); 297 | 298 | let valx = eval_bspline(pt.x, spline_origin.x, cell_width); 299 | let valy = eval_bspline(pt.y, spline_origin.y, cell_width); 300 | let valz = eval_bspline(pt.z, spline_origin.z, cell_width); 301 | 302 | let diffx = eval_bspline_diff(pt.x, spline_origin.x, cell_width); 303 | let diffy = eval_bspline_diff(pt.y, spline_origin.y, cell_width); 304 | let diffz = eval_bspline_diff(pt.z, spline_origin.z, cell_width); 305 | 306 | result += Vector3::new( 307 | diffx * valy * valz, 308 | valx * diffy * valz, 309 | valx * valy * diffz, 310 | ) * node_weights[*node_id]; 311 | } 312 | } 313 | } 314 | } 315 | 316 | result 317 | } 318 | -------------------------------------------------------------------------------- /src/poisson_layer.rs: -------------------------------------------------------------------------------- 1 | use crate::conjugate_gradient::solve_conjugate_gradient; 2 | use crate::hgrid::HGrid; 3 | use crate::poisson_vector_field::PoissonVectorField; 4 | use crate::polynomial::TriQuadraticBspline; 5 | use crate::{ 6 | poisson::{self, CellWithId}, 7 | polynomial, Real, 8 | }; 9 | use na::{vector, DVector, Point3, Vector3}; 10 | use nalgebra_sparse::{CooMatrix, CscMatrix}; 11 | use parry::bounding_volume::Aabb; 12 | use parry::partitioning::Qbvh; 13 | use rayon::prelude::*; 14 | use std::collections::HashMap; 15 | 16 | #[derive(Clone)] 17 | pub struct PoissonLayer { 18 | pub grid: HGrid, 19 | pub cells_qbvh: Qbvh, 20 | pub grid_node_idx: HashMap, usize>, 21 | pub ordered_nodes: Vec>, 22 | pub node_weights: DVector, 23 | } 24 | 25 | impl PoissonLayer { 26 | pub fn cell_width(&self) -> Real { 27 | self.grid.cell_width() 28 | } 29 | } 30 | 31 | impl PoissonLayer { 32 | pub fn from_points( 33 | points: &[Point3], 34 | grid_origin: Point3, 35 | cell_width: Real, 36 | ) -> Self { 37 | let mut grid = HGrid::new(grid_origin, cell_width); 38 | let mut grid_node_idx = HashMap::new(); 39 | let mut ordered_nodes = vec![]; 40 | 41 | // for pt in points { 42 | // let ref_node = grid.key(pt); 43 | // 44 | // for corner_shift in CORNERS.iter() { 45 | // let node = ref_node + corner_shift; 46 | // let _ = grid_node_idx.entry(node).or_insert_with(|| { 47 | // let center = grid.cell_center(&node); 48 | // grid.insert(¢er, 0); 49 | // ordered_nodes.push(node); 50 | // ordered_nodes.len() - 1 51 | // }); 52 | // } 53 | // } 54 | 55 | // TODO: do we still need this when using the multigrid solver? 56 | for (pid, pt) in points.iter().enumerate() { 57 | let ref_node = grid.key(pt); 58 | let ref_center = grid.cell_center(&ref_node); 59 | grid.insert(&ref_center, pid); 60 | 61 | for i in -2..=2 { 62 | for j in -2..=2 { 63 | for k in -2..=2 { 64 | let node = ref_node + vector![i, j, k]; 65 | let center = grid.cell_center(&node); 66 | let _ = grid_node_idx.entry(node).or_insert_with(|| { 67 | grid.insert(¢er, usize::MAX); 68 | ordered_nodes.push(node); 69 | ordered_nodes.len() - 1 70 | }); 71 | } 72 | } 73 | } 74 | } 75 | 76 | Self::from_populated_grid(grid, grid_node_idx, ordered_nodes) 77 | } 78 | 79 | pub fn from_next_layer(points: &[Point3], layer: &Self) -> Self { 80 | let cell_width = layer.cell_width() * 2.0; 81 | let mut grid = HGrid::new(*layer.grid.origin(), cell_width); 82 | let mut grid_node_idx = HashMap::new(); 83 | let mut ordered_nodes = vec![]; 84 | 85 | // Add nodes to the new grid to form a comforming "octree". 86 | for sub_node_key in &layer.ordered_nodes { 87 | let pt = layer.grid.cell_center(sub_node_key); 88 | let my_key = grid.key(&pt); 89 | let my_center = grid.cell_center(&my_key); 90 | let quadrant = pt - my_center; 91 | 92 | let range = |x| { 93 | if x < 0.0 { 94 | -2..=1 95 | } else { 96 | -1..=2 97 | } 98 | }; 99 | 100 | for i in range(quadrant.x) { 101 | for j in range(quadrant.y) { 102 | for k in range(quadrant.z) { 103 | let adj_key = my_key + vector![i, j, k]; 104 | 105 | let _ = grid_node_idx.entry(adj_key).or_insert_with(|| { 106 | let adj_center = grid.cell_center(&adj_key); 107 | grid.insert(&adj_center, usize::MAX); 108 | ordered_nodes.push(adj_key); 109 | ordered_nodes.len() - 1 110 | }); 111 | } 112 | } 113 | } 114 | } 115 | 116 | for (pid, pt) in points.iter().enumerate() { 117 | let ref_node = grid.key(pt); 118 | let ref_center = grid.cell_center(&ref_node); 119 | grid.insert(&ref_center, pid); 120 | } 121 | 122 | Self::from_populated_grid(grid, grid_node_idx, ordered_nodes) 123 | } 124 | 125 | fn from_populated_grid( 126 | grid: HGrid, 127 | grid_node_idx: HashMap, usize>, 128 | ordered_nodes: Vec>, 129 | ) -> Self { 130 | let cell_width = grid.cell_width(); 131 | let mut cells_qbvh = Qbvh::new(); 132 | cells_qbvh.clear_and_rebuild( 133 | ordered_nodes.iter().map(|key| { 134 | let center = grid.cell_center(key); 135 | let id = grid_node_idx[key]; 136 | let half_width = Vector3::repeat(cell_width / 2.0); 137 | ( 138 | CellWithId { cell: *key, id }, 139 | Aabb::from_half_extents(center, half_width), 140 | ) 141 | }), 142 | 0.0, 143 | ); 144 | 145 | let node_weights = DVector::zeros(grid_node_idx.len()); 146 | 147 | Self { 148 | grid, 149 | cells_qbvh, 150 | ordered_nodes, 151 | grid_node_idx, 152 | node_weights, 153 | } 154 | } 155 | 156 | pub(crate) fn solve( 157 | layers: &[Self], 158 | curr_layer: usize, 159 | vector_field: &PoissonVectorField, 160 | points: &[Point3], 161 | normals: &[Vector3], 162 | screening: Real, 163 | niters: usize, 164 | ) -> DVector { 165 | let my_layer = &layers[curr_layer]; 166 | let cell_width = my_layer.cell_width(); 167 | assert_eq!(points.len(), normals.len()); 168 | let convolution = polynomial::compute_quadratic_bspline_convolution_coeffs(cell_width); 169 | let num_nodes = my_layer.ordered_nodes.len(); 170 | 171 | // Compute the gradient matrix. 172 | let mut grad_matrix = CooMatrix::new(num_nodes, num_nodes); 173 | let screen_factor = 174 | (2.0 as Real).powi(curr_layer as i32) * screening * vector_field.area_approximation() 175 | / (points.len() as Real); 176 | 177 | for (nid, node) in my_layer.ordered_nodes.iter().enumerate() { 178 | let center1 = my_layer.grid.cell_center(node); 179 | 180 | for i in -2..=2 { 181 | for j in -2..=2 { 182 | for k in -2..=2 { 183 | let other_node = node + vector![i, j, k]; 184 | let center2 = my_layer.grid.cell_center(&other_node); 185 | 186 | if let Some(other_nid) = my_layer.grid_node_idx.get(&other_node) { 187 | let ii = (i + 2) as usize; 188 | let jj = (j + 2) as usize; 189 | let kk = (k + 2) as usize; 190 | 191 | let mut laplacian = convolution.laplacian[ii][jj][kk]; 192 | 193 | if screening != 0.0 { 194 | for si in -1..=1 { 195 | for sj in -1..=1 { 196 | for sk in -1..=1 { 197 | let adj = node + vector![si, sj, sk]; 198 | 199 | if let Some(pt_ids) = my_layer.grid.cell(&adj) { 200 | for pid in pt_ids { 201 | // Use get to ignore the sentinel. 202 | if let Some(pt) = points.get(*pid) { 203 | let poly1 = TriQuadraticBspline::new( 204 | center1, cell_width, 205 | ); 206 | let poly2 = TriQuadraticBspline::new( 207 | center2, cell_width, 208 | ); 209 | laplacian += screen_factor 210 | * poly1.eval(*pt) 211 | * poly2.eval(*pt); 212 | } 213 | } 214 | } 215 | } 216 | } 217 | } 218 | } 219 | 220 | grad_matrix.push(nid, *other_nid, laplacian); 221 | } 222 | } 223 | } 224 | } 225 | } 226 | 227 | // Build rhs 228 | let mut rhs = DVector::zeros(my_layer.ordered_nodes.len()); 229 | vector_field.build_rhs(layers, curr_layer, &mut rhs); 230 | 231 | // Subtract the results from the coarser layers. 232 | rhs.as_mut_slice() 233 | .par_iter_mut() 234 | .enumerate() 235 | .for_each(|(rhs_id, rhs)| { 236 | let node_key = my_layer.ordered_nodes[rhs_id]; 237 | let node_center = my_layer.grid.cell_center(&node_key); 238 | let poly1 = TriQuadraticBspline::new(node_center, my_layer.cell_width()); 239 | 240 | for coarser_layer in &layers[0..curr_layer] { 241 | let aabb = Aabb::from_half_extents( 242 | node_center, 243 | Vector3::repeat( 244 | my_layer.cell_width() * 1.5 + coarser_layer.cell_width() * 1.5, 245 | ), 246 | ); 247 | 248 | for (coarser_node_key, _) in coarser_layer 249 | .grid 250 | .cells_intersecting_aabb(&aabb.mins, &aabb.maxs) 251 | { 252 | let coarser_node_center = coarser_layer.grid.cell_center(&coarser_node_key); 253 | let poly2 = TriQuadraticBspline::new( 254 | coarser_node_center, 255 | coarser_layer.cell_width(), 256 | ); 257 | let mut coeff = poly1.grad_grad(poly2, true, true).sum(); 258 | let coarser_rhs_id = coarser_layer.grid_node_idx[&coarser_node_key]; 259 | 260 | if screening != 0.0 { 261 | for si in -1..=1 { 262 | for sj in -1..=1 { 263 | for sk in -1..=1 { 264 | let adj = node_key + vector![si, sj, sk]; 265 | 266 | if let Some(pt_ids) = my_layer.grid.cell(&adj) { 267 | for pid in pt_ids { 268 | // Use get to ignore the sentinel. 269 | if let Some(pt) = points.get(*pid) { 270 | coeff += screen_factor 271 | * poly1.eval(*pt) 272 | * poly2.eval(*pt); 273 | } 274 | } 275 | } 276 | } 277 | } 278 | } 279 | } 280 | 281 | *rhs -= coarser_layer.node_weights[coarser_rhs_id] * coeff; 282 | } 283 | } 284 | }); 285 | 286 | // Solve the sparse system. 287 | let lhs = CscMatrix::from(&grad_matrix); 288 | solve_conjugate_gradient(&lhs, &mut rhs, niters); 289 | // let chol = CscCholesky::factor(&lhs).unwrap(); 290 | // chol.solve_mut(&mut rhs); 291 | 292 | rhs 293 | } 294 | 295 | pub fn eval_triquadratic(&self, pt: &Point3) -> Real { 296 | poisson::eval_triquadratic( 297 | pt, 298 | &self.grid, 299 | &self.grid_node_idx, 300 | self.node_weights.as_slice(), 301 | ) 302 | } 303 | 304 | pub fn eval_triquadratic_gradient(&self, pt: &Point3) -> Vector3 { 305 | poisson::eval_triquadratic_gradient( 306 | pt, 307 | &self.grid, 308 | &self.grid_node_idx, 309 | self.node_weights.as_slice(), 310 | ) 311 | } 312 | } 313 | -------------------------------------------------------------------------------- /src/polynomial.rs: -------------------------------------------------------------------------------- 1 | use crate::Real; 2 | use na::{Point3, Vector3}; 3 | use std::ops::{Add, Div, Mul, Neg}; 4 | 5 | #[derive(Copy, Clone, PartialEq, Debug)] 6 | pub struct TriQuadraticBspline { 7 | center: Point3, 8 | width: Real, 9 | } 10 | 11 | impl TriQuadraticBspline { 12 | pub fn new(center: Point3, width: Real) -> Self { 13 | Self { center, width } 14 | } 15 | 16 | pub fn eval(&self, pt: Point3) -> Real { 17 | let mut result = 1.0; 18 | 19 | for i in 0..3 { 20 | result *= eval_bspline(pt[i], self.center[i], self.width) 21 | } 22 | 23 | result 24 | } 25 | 26 | pub fn grad_grad(self, rhs: Self, diff1: bool, diff2: bool) -> Vector3 { 27 | let dcenter = rhs.center - self.center; 28 | let poly1 = bspline::<6>(0.0, self.width); 29 | let poly_diff1 = if diff1 { 30 | [ 31 | poly1[0].derivative(), 32 | poly1[1].derivative(), 33 | poly1[2].derivative(), 34 | ] 35 | } else { 36 | poly1 37 | }; 38 | 39 | let mut result_int = [0.0; 3]; 40 | let mut result_diff_diff_int = [0.0; 3]; 41 | 42 | for dim in 0..3 { 43 | if dcenter[dim].abs() >= (self.width + rhs.width) * 1.5 { 44 | return Vector3::zeros(); // The splines don’t overlap along this dimension. 45 | } 46 | 47 | // We have to check the splines domain pieces to multiply together 48 | // the correct polynomials. 49 | let sub1 = [ 50 | -1.5 * self.width, 51 | -0.5 * self.width, 52 | 0.5 * self.width, 53 | 1.5 * self.width, 54 | ]; 55 | let sub2 = [ 56 | dcenter[dim] - 1.5 * rhs.width, 57 | dcenter[dim] - 0.5 * rhs.width, 58 | dcenter[dim] + 0.5 * rhs.width, 59 | dcenter[dim] + 1.5 * rhs.width, 60 | ]; 61 | 62 | let poly2 = bspline::<6>(dcenter[dim], rhs.width); 63 | let poly_diff2 = if diff2 { 64 | [ 65 | poly2[0].derivative(), 66 | poly2[1].derivative(), 67 | poly2[2].derivative(), 68 | ] 69 | } else { 70 | poly2 71 | }; 72 | 73 | // Compute the 9 potential interval intersections. 74 | for i in 0..3 { 75 | for j in 0..3 { 76 | let start = sub1[i].max(sub2[j]); 77 | let end = sub1[i + 1].min(sub2[j + 1]); 78 | if end > start { 79 | let primitive = (poly1[i] * poly2[j]).primitive(); 80 | result_int[dim] += primitive.eval(end) - primitive.eval(start); 81 | 82 | let primitive_diff_diff = (poly_diff1[i] * poly_diff2[j]).primitive(); 83 | result_diff_diff_int[dim] += 84 | primitive_diff_diff.eval(end) - primitive_diff_diff.eval(start); 85 | } 86 | } 87 | } 88 | } 89 | 90 | Vector3::new( 91 | result_diff_diff_int[0] * result_int[1] * result_int[2], 92 | result_int[0] * result_diff_diff_int[1] * result_int[2], 93 | result_int[0] * result_int[1] * result_diff_diff_int[2], 94 | ) 95 | } 96 | } 97 | 98 | #[derive(Copy, Clone, Debug, PartialEq)] 99 | pub struct PoissonQuadraticBsplineCoeffs { 100 | pub laplacian: [[[Real; 5]; 5]; 5], 101 | pub normal_div: [[[[Real; 5]; 5]; 5]; 3], 102 | } 103 | 104 | impl Default for PoissonQuadraticBsplineCoeffs { 105 | fn default() -> Self { 106 | Self { 107 | laplacian: [[[0.0; 5]; 5]; 5], 108 | normal_div: [[[[0.0; 5]; 5]; 5]; 3], 109 | } 110 | } 111 | } 112 | 113 | fn bspline03() -> [Polynomial; 3] { 114 | [ 115 | Polynomial::::quadratic(0.0, 0.0, 0.5), // x in [0, 1) 116 | Polynomial::::quadratic(-1.5, 3.0, -1.0), // x in [1, 2) 117 | Polynomial::::quadratic(4.5, -3.0, 0.5), // x in [2, 3) 118 | ] 119 | } 120 | 121 | fn bspline(origin: Real, width: Real) -> [Polynomial; 3] { 122 | let b = bspline03::(); 123 | 124 | [ 125 | b[0].scale_shift(origin - 1.5 * width, width) / width, 126 | b[1].scale_shift(origin - 1.5 * width, width) / width, 127 | b[2].scale_shift(origin - 1.5 * width, width) / width, 128 | ] 129 | } 130 | 131 | pub fn eval_bspline(x: Real, origin: Real, width: Real) -> Real { 132 | // Bring the value between [0, 3) 133 | let val = (x - origin) / width + 1.5; 134 | let [b0, b1, b2] = bspline03::<3>(); 135 | 136 | if val < 0.0 { 137 | 0.0 138 | } else if val < 1.0 { 139 | b0.eval(val) / width 140 | } else if val < 2.0 { 141 | b1.eval(val) / width 142 | } else if val < 3.0 { 143 | b2.eval(val) / width 144 | } else { 145 | 0.0 146 | } 147 | } 148 | 149 | pub fn eval_bspline_diff(x: Real, origin: Real, width: Real) -> Real { 150 | // Bring the value between [0, 3) 151 | let val = (x - origin) / width + 1.5; 152 | let [b0, b1, b2] = bspline03::<3>(); 153 | 154 | if val < 0.0 { 155 | 0.0 156 | } else if val < 1.0 { 157 | b0.derivative().eval(val) / width 158 | } else if val < 2.0 { 159 | b1.derivative().eval(val) / width 160 | } else if val < 3.0 { 161 | b2.derivative().eval(val) / width 162 | } else { 163 | 0.0 164 | } 165 | } 166 | 167 | pub fn compute_quadratic_bspline_convolution_coeffs(width: Real) -> PoissonQuadraticBsplineCoeffs { 168 | let [mut b0, mut b1, mut b2] = bspline03::<6>(); 169 | 170 | // Center and normalize each section of the b-spline. 171 | b0 = b0.scale_shift(0.0, width) / width; // x in [0, w) 172 | b1 = b1.scale_shift(-width, width) / width; // x in [0, w) 173 | b2 = b2.scale_shift(-2.0 * width, width) / width; // x in [0, w) 174 | let bdiff0 = b0.derivative(); 175 | let bdiff1 = b1.derivative(); 176 | let bdiff2 = b2.derivative(); 177 | 178 | let primitives = [ 179 | (b2 * b0).primitive(), 180 | (b1 * b0).primitive() + (b2 * b1).primitive(), 181 | (b0 * b0).primitive() + (b1 * b1).primitive() + (b2 * b2).primitive(), 182 | (b1 * b0).primitive() + (b2 * b1).primitive(), 183 | (b2 * b0).primitive(), 184 | ]; 185 | let integrals = [ 186 | primitives[0].eval(width) - primitives[0].eval(0.0), 187 | primitives[1].eval(width) - primitives[1].eval(0.0), 188 | primitives[2].eval(width) - primitives[2].eval(0.0), 189 | primitives[3].eval(width) - primitives[3].eval(0.0), 190 | primitives[4].eval(width) - primitives[4].eval(0.0), 191 | ]; 192 | 193 | let primitives_diff = [ 194 | (bdiff2 * b0).primitive(), 195 | (bdiff1 * b0).primitive() + (bdiff2 * b1).primitive(), 196 | (b0 * bdiff0).primitive() + (b1 * bdiff1).primitive() + (b2 * bdiff2).primitive(), 197 | (b1 * bdiff0).primitive() + (b2 * bdiff1).primitive(), 198 | (b2 * bdiff0).primitive(), 199 | ]; 200 | let integrals_diff = [ 201 | primitives_diff[0].eval(width) - primitives_diff[0].eval(0.0), 202 | primitives_diff[1].eval(width) - primitives_diff[1].eval(0.0), 203 | primitives_diff[2].eval(width) - primitives_diff[2].eval(0.0), 204 | primitives_diff[3].eval(width) - primitives_diff[3].eval(0.0), 205 | primitives_diff[4].eval(width) - primitives_diff[4].eval(0.0), 206 | ]; 207 | 208 | let primitives_diff_diff = [ 209 | (bdiff2 * bdiff0).primitive(), 210 | ((bdiff1 * bdiff0).primitive() + (bdiff2 * bdiff1).primitive()), 211 | ((bdiff0 * bdiff0).primitive() 212 | + (bdiff1 * bdiff1).primitive() 213 | + (bdiff2 * bdiff2).primitive()), 214 | ((bdiff1 * bdiff0).primitive() + (bdiff2 * bdiff1).primitive()), 215 | ((bdiff2 * bdiff0).primitive()), 216 | ]; 217 | let integrals_diff_diff = [ 218 | primitives_diff_diff[0].eval(width) - primitives_diff_diff[0].eval(0.0), 219 | primitives_diff_diff[1].eval(width) - primitives_diff_diff[1].eval(0.0), 220 | primitives_diff_diff[2].eval(width) - primitives_diff_diff[2].eval(0.0), 221 | primitives_diff_diff[3].eval(width) - primitives_diff_diff[3].eval(0.0), 222 | primitives_diff_diff[4].eval(width) - primitives_diff_diff[4].eval(0.0), 223 | ]; 224 | 225 | let mut result = PoissonQuadraticBsplineCoeffs::default(); 226 | 227 | for i in -2i32..=2 { 228 | for j in -2i32..=2 { 229 | for k in -2i32..=2 { 230 | let ia = (i + 2) as usize; 231 | let ja = (j + 2) as usize; 232 | let ka = (k + 2) as usize; 233 | 234 | result.laplacian[ia][ja][ka] = 235 | integrals_diff_diff[ia] * integrals[ja] * integrals[ka] 236 | + integrals[ia] * integrals_diff_diff[ja] * integrals[ka] 237 | + integrals[ia] * integrals[ja] * integrals_diff_diff[ka]; 238 | result.normal_div[0][ia][ja][ka] = 239 | integrals_diff[ia] * integrals[ja] * integrals[ka]; 240 | result.normal_div[1][ia][ja][ka] = 241 | integrals[ia] * integrals_diff[ja] * integrals[ka]; 242 | result.normal_div[2][ia][ja][ka] = 243 | integrals[ia] * integrals[ja] * integrals_diff[ka]; 244 | } 245 | } 246 | } 247 | 248 | result 249 | } 250 | 251 | #[derive(Copy, Clone, PartialEq, Debug)] 252 | pub struct Polynomial { 253 | pub coeffs: [Real; N], 254 | } 255 | 256 | impl Default for Polynomial { 257 | fn default() -> Self { 258 | Self { coeffs: [0.0; N] } 259 | } 260 | } 261 | 262 | impl Polynomial { 263 | pub fn eval(&self, x: Real) -> Real { 264 | let mut result = self.coeffs[N - 1]; 265 | 266 | for i in (0..N - 1).rev() { 267 | result = result * x + self.coeffs[i]; 268 | } 269 | 270 | result 271 | } 272 | 273 | #[must_use] 274 | pub fn quadratic(cst: Real, x: Real, xx: Real) -> Self { 275 | let mut coeffs = [0.0; N]; 276 | coeffs[0] = cst; 277 | coeffs[1] = x; 278 | coeffs[2] = xx; 279 | Self { coeffs } 280 | } 281 | 282 | #[must_use] 283 | pub fn derivative(mut self) -> Self { 284 | for i in 0..N - 1 { 285 | self.coeffs[i] = self.coeffs[i + 1] * (i as Real + 1.0); 286 | } 287 | self.coeffs[N - 1] = 0.0; 288 | self 289 | } 290 | 291 | #[must_use] 292 | pub fn primitive(mut self) -> Self { 293 | assert_eq!( 294 | self.coeffs[N - 1], 295 | 0.0, 296 | "Integration coefficient overflow. Increase the polynomial degree." 297 | ); 298 | for i in (1..N).rev() { 299 | self.coeffs[i] = self.coeffs[i - 1] / (i as Real); 300 | } 301 | self.coeffs[0] = 0.0; 302 | self 303 | } 304 | 305 | // For a polynomial up to degree 2, this computes the polynomial 306 | // representation of P(X) = P((x - center) / width) 307 | #[must_use] 308 | pub fn scale_shift(self, center: Real, width: Real) -> Self { 309 | for k in 3..N { 310 | assert_eq!( 311 | self.coeffs[k], 0.0, 312 | "Only implemented for polynomials with degrees up to 2." 313 | ); 314 | } 315 | 316 | let a = self.coeffs[0]; 317 | let b = self.coeffs[1]; 318 | let c = self.coeffs[2]; 319 | let w = width; 320 | let ww = w * w; 321 | 322 | let mut result = Self::default(); 323 | result.coeffs[0] = a - center * b / w + c * center * center / ww; 324 | result.coeffs[1] = b / w - 2.0 * c * center / ww; 325 | result.coeffs[2] = c / ww; 326 | result 327 | } 328 | } 329 | 330 | impl Neg for Polynomial { 331 | type Output = Self; 332 | fn neg(mut self) -> Self { 333 | for i in 0..N { 334 | self.coeffs[i] = -self.coeffs[i]; 335 | } 336 | self 337 | } 338 | } 339 | 340 | impl Div for Polynomial { 341 | type Output = Self; 342 | fn div(mut self, rhs: Real) -> Self { 343 | for i in 0..N { 344 | self.coeffs[i] /= rhs; 345 | } 346 | self 347 | } 348 | } 349 | 350 | impl Mul> for Polynomial { 351 | type Output = Self; 352 | fn mul(self, rhs: Self) -> Self { 353 | let mut result = Self::default(); 354 | for i in 0..N { 355 | for j in 0..N { 356 | let val = self.coeffs[i] * rhs.coeffs[j]; 357 | if j + i >= N { 358 | assert_eq!( 359 | val, 0.0, 360 | "The result of the product must have a degree smaller than N" 361 | ); 362 | } else { 363 | result.coeffs[j + i] += self.coeffs[i] * rhs.coeffs[j]; 364 | } 365 | } 366 | } 367 | result 368 | } 369 | } 370 | 371 | impl Add> for Polynomial { 372 | type Output = Self; 373 | fn add(mut self, rhs: Self) -> Self { 374 | for i in 0..N { 375 | self.coeffs[i] += rhs.coeffs[i]; 376 | } 377 | self 378 | } 379 | } 380 | 381 | #[cfg(test)] 382 | mod test { 383 | use crate::polynomial::Polynomial; 384 | 385 | #[test] 386 | fn poly_eval() { 387 | let poly = Polynomial { 388 | coeffs: [1.0, 2.0, 3.0, 4.0, 5.0], 389 | }; 390 | assert_eq!( 391 | poly.eval(2.0), 392 | 1.0 + 2.0 * 2.0 + 3.0 * 4.0 + 4.0 * 8.0 + 5.0 * 16.0 393 | ); 394 | } 395 | 396 | #[test] 397 | fn poly_add() { 398 | let poly1 = Polynomial { 399 | coeffs: [1.0, 2.0, 3.0, 4.0, 5.0], 400 | }; 401 | let poly2 = Polynomial { 402 | coeffs: [10.0, 20.0, 30.0, 40.0, 50.0], 403 | }; 404 | let expected = Polynomial { 405 | coeffs: [11.0, 22.0, 33.0, 44.0, 55.0], 406 | }; 407 | assert_eq!(poly1 + poly2, expected); 408 | } 409 | 410 | #[test] 411 | fn poly_mul() { 412 | let poly1 = Polynomial { 413 | coeffs: [1.0, 2.0, 3.0, 0.0, 0.0], 414 | }; 415 | let poly2 = Polynomial { 416 | coeffs: [10.0, 20.0, 30.0, 0.0, 0.0], 417 | }; 418 | let expected = Polynomial { 419 | coeffs: [10.0, 40.0, 100.0, 120.0, 90.0], 420 | }; 421 | assert_eq!(poly1 * poly2, expected); 422 | } 423 | 424 | #[test] 425 | fn poly_diff() { 426 | let poly = Polynomial { 427 | coeffs: [1.0, 2.0, 3.0, 4.0, 5.0], 428 | }; 429 | let expected = Polynomial { 430 | coeffs: [2.0, 6.0, 12.0, 20.0, 0.0], 431 | }; 432 | assert_eq!(poly.derivative(), expected); 433 | } 434 | 435 | #[test] 436 | fn poly_primitive() { 437 | let poly = Polynomial { 438 | coeffs: [1.0, 2.0, 3.0, 4.0, 0.0], 439 | }; 440 | let expected = Polynomial { 441 | coeffs: [0.0, 1.0, 1.0, 1.0, 1.0], 442 | }; 443 | assert_eq!(poly.primitive(), expected); 444 | assert_eq!(poly.primitive().derivative(), poly); 445 | } 446 | 447 | #[test] 448 | fn scale_shift() { 449 | let shift = 0.5; 450 | let width = 2.5; 451 | let poly = Polynomial { 452 | coeffs: [10.0, 20.0, 30.0, 0.0, 0.0], 453 | }; 454 | let poly_scale_shifted = poly.scale_shift(shift, width); 455 | assert_eq!( 456 | poly.eval((11.0 - shift) / width), 457 | poly_scale_shifted.eval(11.0) 458 | ); 459 | assert_eq!(poly.eval(0.0), poly_scale_shifted.eval(shift)); 460 | assert!((poly.eval(-shift / width) - poly_scale_shifted.eval(0.0)).abs() < 1.0e-8); 461 | } 462 | } 463 | -------------------------------------------------------------------------------- /src/marching_cubes.rs: -------------------------------------------------------------------------------- 1 | //! Basic marching-cubes implementation. 2 | 3 | use crate::Real; 4 | use na::Point3; 5 | use parry::bounding_volume::Aabb; 6 | use parry::shape::{TriMesh, TriMeshFlags}; 7 | use parry::utils::SortedPair; 8 | use std::collections::HashMap; 9 | 10 | type MarchingCubesCellKey = [i32; 3]; 11 | 12 | /// Represents an index and vertex buffer of a mesh for incremental construction. 13 | #[derive(Default)] 14 | pub struct MeshBuffers { 15 | vertices: Vec>, 16 | indices: Vec, 17 | edge_to_index: HashMap, u32>, 18 | } 19 | 20 | impl MeshBuffers { 21 | /// The mesh’s index buffer. 22 | pub fn indices(&self) -> &[u32] { 23 | &self.indices 24 | } 25 | 26 | /// The mesh’s vertex buffer. 27 | pub fn vertices(&self) -> &[Point3] { 28 | &self.vertices 29 | } 30 | 31 | /// Return the results as a soup of triangle, with duplicated vertices. 32 | pub fn result_as_triangle_soup(&self) -> Vec> { 33 | self.indices 34 | .iter() 35 | .map(|i| self.vertices[*i as usize]) 36 | .collect() 37 | } 38 | 39 | /// Constructs a `TriMesh` from this buffer. 40 | /// 41 | /// The result is `None` if the index buffer of `self` is empty. 42 | pub fn result(&self, flags: TriMeshFlags) -> Option { 43 | let idx: Vec<_> = self 44 | .indices 45 | .chunks_exact(3) 46 | .map(|i| [i[0], i[1], i[2]]) 47 | .collect(); 48 | TriMesh::with_flags(self.vertices.clone(), idx, flags).ok() 49 | } 50 | } 51 | 52 | /* The cube vertex and edge indices for base rotation: 53 | * 54 | * v7------e6------v6 55 | * / | /| 56 | * e11 | e10| 57 | * / e7 / | 58 | * / | / e5 59 | * v3------e2-------v2 | 60 | * | | | | 61 | * | v4------e4---|---v5 62 | * e3 / e1 / 63 | * | e8 | e9 64 | * | / | / y z 65 | * |/ |/ |/ 66 | * v0------e0-------v1 O--x 67 | * 68 | * (Same schematic but with right-handed coordinates:) 69 | * 70 | * v3------e2------v2 71 | * / | /| 72 | * e11 | e10| 73 | * / e3 / | 74 | * / | / e1 75 | * v7------e6-------v6 | 76 | * | | | | 77 | * | v0------e0---|---v1 78 | * e7 / e5 / 79 | * | e8 | e9 80 | * | / | / y 81 | * |/ |/ | 82 | * v4------e4-------v5 O--x 83 | * / 84 | * z 85 | */ 86 | 87 | // The triangle table gives us the mapping from index to actual 88 | // triangles to return for this configuration 89 | // v0 assumed at 0.0, 0.0, 0.0 & v6 at 1.0, 1.0, 1.0 90 | 91 | /// Calculates the triangles associated to this cube based on its vertex values and the desired 92 | /// isovalues to extract. 93 | /// 94 | /// The vertex values must be given in the following order: 95 | /// v7--------------v6 96 | /// / | /| 97 | /// / | / | 98 | /// / | / | 99 | /// / | / | 100 | /// v3---------------v2 | 101 | /// | | | | 102 | /// | v4-----------|---v5 103 | /// | / | / 104 | /// | / | / 105 | /// | / | / y z 106 | /// |/ |/ |/ 107 | /// v0---------------v1 O--x 108 | /// 109 | /// # Parameters 110 | /// - `mins`: the cube’s corner with the smallest coordinates. 111 | /// - `maxs`: the cube’s corner with the biggest coordinates. 112 | /// - `vertex_values`: the value associated to each cube’s vertex (see the graphic above above 113 | /// the requested value order. 114 | /// - `iso_value`: the isovalue to extract. 115 | /// - `out_triangles`: new triangles will be output to this buffer. 116 | pub fn march_cube( 117 | mins: &Point3, 118 | maxs: &Point3, 119 | vertex_values: &[Real; 8], 120 | iso_value: Real, 121 | out_triangles: &mut Vec>, 122 | ) { 123 | // Compute the index for MC_TRI_TABLE 124 | let mut index = 0; 125 | 126 | for (v, value) in vertex_values.iter().enumerate() { 127 | if *value <= iso_value { 128 | index |= 1 << v; 129 | } 130 | } 131 | 132 | for t in MC_TRI_TABLE[index].iter().take_while(|t| **t >= 0) { 133 | let v_idx = *t as usize; 134 | let v0 = EDGE_VERTICES[v_idx][0]; 135 | let v1 = EDGE_VERTICES[v_idx][1]; 136 | 137 | // The normalized_vert will have components 138 | // in 0..1. 139 | let normalized_vert = lerp_vertices( 140 | &INDEX_TO_VERTEX[v0 as usize], 141 | &INDEX_TO_VERTEX[v1 as usize], 142 | vertex_values[v0 as usize], 143 | vertex_values[v1 as usize], 144 | iso_value, 145 | ); 146 | 147 | // Convert the normalized_vert into an Aabb vert. 148 | let vert = mins + (maxs - mins).component_mul(&normalized_vert.coords); 149 | out_triangles.push(vert); 150 | } 151 | } 152 | 153 | // The triangle table gives us the mapping from index to actual 154 | // triangles to return for this configuration 155 | // v0 assumed at 0.0, 0.0, 0.0 & v6 at 1.0, 1.0, 1.0 156 | pub(crate) fn march_cube_idx( 157 | aabb: &Aabb, 158 | corner_values: &[f64; 8], 159 | // Grid coordinates of v0. 160 | first_corner_cell_key: [i32; 3], 161 | iso_value: f64, 162 | out: &mut MeshBuffers, 163 | ) { 164 | // Compute the index for MC_TRI_TABLE 165 | let mut index = 0; 166 | let old_indices_len = out.indices.len(); 167 | 168 | for (v, value) in corner_values.iter().enumerate() { 169 | if *value < iso_value { 170 | index |= 1 << v; 171 | } 172 | } 173 | 174 | for t in MC_TRI_TABLE[index].iter().take_while(|t| **t >= 0) { 175 | let v_idx = *t as usize; 176 | let [v0, v1] = EDGE_VERTICES[v_idx]; 177 | 178 | let local_corner_0 = INDEX_TO_VERTEX[v0 as usize]; 179 | let local_corner_1 = INDEX_TO_VERTEX[v1 as usize]; 180 | 181 | let eid0 = [ 182 | first_corner_cell_key[0] + local_corner_0[0] as i32, 183 | first_corner_cell_key[1] + local_corner_0[1] as i32, 184 | first_corner_cell_key[2] + local_corner_0[2] as i32, 185 | ]; 186 | let eid1 = [ 187 | first_corner_cell_key[0] + local_corner_1[0] as i32, 188 | first_corner_cell_key[1] + local_corner_1[1] as i32, 189 | first_corner_cell_key[2] + local_corner_1[2] as i32, 190 | ]; 191 | 192 | let edge_key = SortedPair::new(eid0, eid1); 193 | let vid = *out.edge_to_index.entry(edge_key).or_insert_with(|| { 194 | // The normalized_vert will have components 195 | // in 0..1. 196 | let normalized_vert = lerp_vertices( 197 | &INDEX_TO_VERTEX[v0 as usize], 198 | &INDEX_TO_VERTEX[v1 as usize], 199 | corner_values[v0 as usize], 200 | corner_values[v1 as usize], 201 | iso_value, 202 | ); 203 | 204 | // Convert the normalized_vert into an Aabb vert. 205 | let vertex = aabb.mins + aabb.extents().component_mul(&normalized_vert.coords); 206 | out.vertices.push(vertex); 207 | (out.vertices.len() - 1) as u32 208 | }); 209 | 210 | out.indices.push(vid); 211 | } 212 | 213 | out.indices[old_indices_len..].reverse(); 214 | } 215 | 216 | /// Interpolates linearly between two weighted integer points. 217 | /// 218 | /// # Parameters 219 | /// - `va`: the first integer endpoint. 220 | /// - `vb`: the second integer endpoint. 221 | /// - `fa`: the weight associated to `va`. 222 | /// - `fb`: the weight associated to `vb`. 223 | /// - `isoval`: the interpolation parameter in weight space. 224 | fn lerp_vertices(va: &[u8; 3], vb: &[u8; 3], fa: Real, fb: Real, isoval: Real) -> Point3 { 225 | let t = if (fa - fb).abs() < 0.0001 { 226 | 0.0 227 | } else { 228 | (isoval - fa) / (fb - fa) 229 | }; 230 | 231 | Point3::new( 232 | va[0] as Real + (vb[0] as Real - va[0] as Real) * t, 233 | va[1] as Real + (vb[1] as Real - va[1] as Real) * t, 234 | va[2] as Real + (vb[2] as Real - va[2] as Real) * t, 235 | ) 236 | } 237 | 238 | static MC_TRI_TABLE: [[i8; 16]; 256] = [ 239 | [-1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 240 | [0, 8, 3, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 241 | [1, 9, 0, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 242 | [8, 1, 9, 8, 3, 1, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0], 243 | [2, 10, 1, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 244 | [0, 8, 3, 1, 2, 10, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0], 245 | [9, 2, 10, 9, 0, 2, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0], 246 | [3, 2, 10, 3, 10, 8, 8, 10, 9, -1, 0, 0, 0, 0, 0, 0], 247 | [2, 3, 11, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 248 | [11, 0, 8, 11, 2, 0, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0], 249 | [1, 9, 0, 2, 3, 11, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0], 250 | [2, 1, 9, 2, 9, 11, 11, 9, 8, -1, 0, 0, 0, 0, 0, 0], 251 | [3, 10, 1, 3, 11, 10, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0], 252 | [1, 0, 8, 1, 8, 10, 10, 8, 11, -1, 0, 0, 0, 0, 0, 0], 253 | [0, 3, 11, 0, 11, 9, 9, 11, 10, -1, 0, 0, 0, 0, 0, 0], 254 | [11, 10, 9, 11, 9, 8, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0], 255 | [4, 7, 8, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 256 | [4, 3, 0, 4, 7, 3, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0], 257 | [4, 7, 8, 9, 0, 1, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0], 258 | [9, 4, 7, 9, 7, 1, 1, 7, 3, -1, 0, 0, 0, 0, 0, 0], 259 | [4, 7, 8, 1, 2, 10, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0], 260 | [4, 3, 0, 4, 7, 3, 2, 10, 1, -1, 0, 0, 0, 0, 0, 0], 261 | [2, 9, 0, 2, 10, 9, 4, 7, 8, -1, 0, 0, 0, 0, 0, 0], 262 | [3, 2, 7, 7, 9, 4, 7, 2, 9, 9, 2, 10, -1, 0, 0, 0], 263 | [8, 4, 7, 3, 11, 2, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0], 264 | [7, 11, 2, 7, 2, 4, 4, 2, 0, -1, 0, 0, 0, 0, 0, 0], 265 | [2, 3, 11, 1, 9, 0, 8, 4, 7, -1, 0, 0, 0, 0, 0, 0], 266 | [2, 1, 9, 2, 9, 4, 2, 4, 11, 11, 4, 7, -1, 0, 0, 0], 267 | [10, 3, 11, 10, 1, 3, 8, 4, 7, -1, 0, 0, 0, 0, 0, 0], 268 | [4, 7, 0, 0, 10, 1, 7, 10, 0, 7, 11, 10, -1, 0, 0, 0], 269 | [8, 4, 7, 0, 3, 11, 0, 11, 9, 9, 11, 10, -1, 0, 0, 0], 270 | [7, 9, 4, 7, 11, 9, 9, 11, 10, -1, 0, 0, 0, 0, 0, 0], 271 | [4, 9, 5, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 272 | [8, 3, 0, 4, 9, 5, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0], 273 | [0, 5, 4, 0, 1, 5, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0], 274 | [4, 8, 3, 4, 3, 5, 5, 3, 1, -1, 0, 0, 0, 0, 0, 0], 275 | [1, 2, 10, 9, 5, 4, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0], 276 | [4, 9, 5, 8, 3, 0, 1, 2, 10, -1, 0, 0, 0, 0, 0, 0], 277 | [10, 5, 4, 10, 4, 2, 2, 4, 0, -1, 0, 0, 0, 0, 0, 0], 278 | [4, 8, 3, 4, 3, 2, 4, 2, 5, 5, 2, 10, -1, 0, 0, 0], 279 | [2, 3, 11, 5, 4, 9, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0], 280 | [11, 0, 8, 11, 2, 0, 9, 5, 4, -1, 0, 0, 0, 0, 0, 0], 281 | [5, 0, 1, 5, 4, 0, 3, 11, 2, -1, 0, 0, 0, 0, 0, 0], 282 | [11, 2, 8, 8, 5, 4, 2, 5, 8, 2, 1, 5, -1, 0, 0, 0], 283 | [3, 10, 1, 3, 11, 10, 5, 4, 9, -1, 0, 0, 0, 0, 0, 0], 284 | [9, 5, 4, 1, 0, 8, 1, 8, 10, 10, 8, 11, -1, 0, 0, 0], 285 | [10, 5, 11, 11, 0, 3, 11, 5, 0, 0, 5, 4, -1, 0, 0, 0], 286 | [4, 10, 5, 4, 8, 10, 10, 8, 11, -1, 0, 0, 0, 0, 0, 0], 287 | [7, 9, 5, 7, 8, 9, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0], 288 | [0, 9, 5, 0, 5, 3, 3, 5, 7, -1, 0, 0, 0, 0, 0, 0], 289 | [8, 0, 1, 8, 1, 7, 7, 1, 5, -1, 0, 0, 0, 0, 0, 0], 290 | [3, 1, 5, 3, 5, 7, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0], 291 | [7, 9, 5, 7, 8, 9, 1, 2, 10, -1, 0, 0, 0, 0, 0, 0], 292 | [1, 2, 10, 0, 9, 5, 0, 5, 3, 3, 5, 7, -1, 0, 0, 0], 293 | [7, 8, 5, 5, 2, 10, 8, 2, 5, 8, 0, 2, -1, 0, 0, 0], 294 | [10, 3, 2, 10, 5, 3, 3, 5, 7, -1, 0, 0, 0, 0, 0, 0], 295 | [9, 7, 8, 9, 5, 7, 11, 2, 3, -1, 0, 0, 0, 0, 0, 0], 296 | [0, 9, 2, 2, 7, 11, 2, 9, 7, 7, 9, 5, -1, 0, 0, 0], 297 | [3, 11, 2, 8, 0, 1, 8, 1, 7, 7, 1, 5, -1, 0, 0, 0], 298 | [2, 7, 11, 2, 1, 7, 7, 1, 5, -1, 0, 0, 0, 0, 0, 0], 299 | [11, 1, 3, 11, 10, 1, 7, 8, 9, 7, 9, 5, -1, 0, 0, 0], 300 | [11, 10, 1, 11, 1, 7, 7, 1, 0, 7, 0, 9, 7, 9, 5, -1], 301 | [5, 7, 8, 5, 8, 10, 10, 8, 0, 10, 0, 3, 10, 3, 11, -1], 302 | [11, 10, 5, 11, 5, 7, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0], 303 | [10, 6, 5, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 304 | [0, 8, 3, 10, 6, 5, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0], 305 | [9, 0, 1, 5, 10, 6, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0], 306 | [8, 1, 9, 8, 3, 1, 10, 6, 5, -1, 0, 0, 0, 0, 0, 0], 307 | [6, 1, 2, 6, 5, 1, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0], 308 | [6, 1, 2, 6, 5, 1, 0, 8, 3, -1, 0, 0, 0, 0, 0, 0], 309 | [5, 9, 0, 5, 0, 6, 6, 0, 2, -1, 0, 0, 0, 0, 0, 0], 310 | [6, 5, 2, 2, 8, 3, 5, 8, 2, 5, 9, 8, -1, 0, 0, 0], 311 | [2, 3, 11, 10, 6, 5, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0], 312 | [0, 11, 2, 0, 8, 11, 6, 5, 10, -1, 0, 0, 0, 0, 0, 0], 313 | [0, 1, 9, 3, 11, 2, 10, 6, 5, -1, 0, 0, 0, 0, 0, 0], 314 | [10, 6, 5, 2, 1, 9, 2, 9, 11, 11, 9, 8, -1, 0, 0, 0], 315 | [11, 6, 5, 11, 5, 3, 3, 5, 1, -1, 0, 0, 0, 0, 0, 0], 316 | [11, 6, 8, 8, 1, 0, 8, 6, 1, 1, 6, 5, -1, 0, 0, 0], 317 | [0, 3, 11, 0, 11, 6, 0, 6, 9, 9, 6, 5, -1, 0, 0, 0], 318 | [5, 11, 6, 5, 9, 11, 11, 9, 8, -1, 0, 0, 0, 0, 0, 0], 319 | [7, 8, 4, 6, 5, 10, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0], 320 | [3, 4, 7, 3, 0, 4, 5, 10, 6, -1, 0, 0, 0, 0, 0, 0], 321 | [6, 5, 10, 7, 8, 4, 9, 0, 1, -1, 0, 0, 0, 0, 0, 0], 322 | [5, 10, 6, 9, 4, 7, 9, 7, 1, 1, 7, 3, -1, 0, 0, 0], 323 | [1, 6, 5, 1, 2, 6, 7, 8, 4, -1, 0, 0, 0, 0, 0, 0], 324 | [7, 0, 4, 7, 3, 0, 6, 5, 1, 6, 1, 2, -1, 0, 0, 0], 325 | [4, 7, 8, 5, 9, 0, 5, 0, 6, 6, 0, 2, -1, 0, 0, 0], 326 | [2, 6, 5, 2, 5, 3, 3, 5, 9, 3, 9, 4, 3, 4, 7, -1], 327 | [4, 7, 8, 5, 10, 6, 11, 2, 3, -1, 0, 0, 0, 0, 0, 0], 328 | [6, 5, 10, 7, 11, 2, 7, 2, 4, 4, 2, 0, -1, 0, 0, 0], 329 | [4, 7, 8, 9, 0, 1, 6, 5, 10, 3, 11, 2, -1, 0, 0, 0], 330 | [6, 5, 10, 11, 4, 7, 11, 2, 4, 4, 2, 9, 9, 2, 1, -1], 331 | [7, 8, 4, 11, 6, 5, 11, 5, 3, 3, 5, 1, -1, 0, 0, 0], 332 | [0, 4, 7, 0, 7, 1, 1, 7, 11, 1, 11, 6, 1, 6, 5, -1], 333 | [4, 7, 8, 9, 6, 5, 9, 0, 6, 6, 0, 11, 11, 0, 3, -1], 334 | [7, 11, 4, 11, 9, 4, 11, 5, 9, 11, 6, 5, -1, 0, 0, 0], 335 | [10, 4, 9, 10, 6, 4, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0], 336 | [10, 4, 9, 10, 6, 4, 8, 3, 0, -1, 0, 0, 0, 0, 0, 0], 337 | [1, 10, 6, 1, 6, 0, 0, 6, 4, -1, 0, 0, 0, 0, 0, 0], 338 | [4, 8, 6, 6, 1, 10, 6, 8, 1, 1, 8, 3, -1, 0, 0, 0], 339 | [9, 1, 2, 9, 2, 4, 4, 2, 6, -1, 0, 0, 0, 0, 0, 0], 340 | [0, 8, 3, 9, 1, 2, 9, 2, 4, 4, 2, 6, -1, 0, 0, 0], 341 | [0, 2, 6, 0, 6, 4, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0], 342 | [3, 4, 8, 3, 2, 4, 4, 2, 6, -1, 0, 0, 0, 0, 0, 0], 343 | [4, 10, 6, 4, 9, 10, 2, 3, 11, -1, 0, 0, 0, 0, 0, 0], 344 | [8, 2, 0, 8, 11, 2, 4, 9, 10, 4, 10, 6, -1, 0, 0, 0], 345 | [2, 3, 11, 1, 10, 6, 1, 6, 0, 0, 6, 4, -1, 0, 0, 0], 346 | [8, 11, 2, 8, 2, 4, 4, 2, 1, 4, 1, 10, 4, 10, 6, -1], 347 | [3, 11, 1, 1, 4, 9, 11, 4, 1, 11, 6, 4, -1, 0, 0, 0], 348 | [6, 4, 9, 6, 9, 11, 11, 9, 1, 11, 1, 0, 11, 0, 8, -1], 349 | [11, 0, 3, 11, 6, 0, 0, 6, 4, -1, 0, 0, 0, 0, 0, 0], 350 | [8, 11, 6, 8, 6, 4, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0], 351 | [6, 7, 8, 6, 8, 10, 10, 8, 9, -1, 0, 0, 0, 0, 0, 0], 352 | [3, 0, 7, 7, 10, 6, 0, 10, 7, 0, 9, 10, -1, 0, 0, 0], 353 | [1, 10, 6, 1, 6, 7, 1, 7, 0, 0, 7, 8, -1, 0, 0, 0], 354 | [6, 1, 10, 6, 7, 1, 1, 7, 3, -1, 0, 0, 0, 0, 0, 0], 355 | [9, 1, 8, 8, 6, 7, 8, 1, 6, 6, 1, 2, -1, 0, 0, 0], 356 | [7, 3, 0, 7, 0, 6, 6, 0, 9, 6, 9, 1, 6, 1, 2, -1], 357 | [8, 6, 7, 8, 0, 6, 6, 0, 2, -1, 0, 0, 0, 0, 0, 0], 358 | [2, 6, 7, 2, 7, 3, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0], 359 | [11, 2, 3, 6, 7, 8, 6, 8, 10, 10, 8, 9, -1, 0, 0, 0], 360 | [9, 10, 6, 9, 6, 0, 0, 6, 7, 0, 7, 11, 0, 11, 2, -1], 361 | [3, 11, 2, 0, 7, 8, 0, 1, 7, 7, 1, 6, 6, 1, 10, -1], 362 | [6, 7, 10, 7, 1, 10, 7, 2, 1, 7, 11, 2, -1, 0, 0, 0], 363 | [1, 3, 11, 1, 11, 9, 9, 11, 6, 9, 6, 7, 9, 7, 8, -1], 364 | [6, 7, 11, 9, 1, 0, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0], 365 | [8, 0, 7, 0, 6, 7, 0, 11, 6, 0, 3, 11, -1, 0, 0, 0], 366 | [6, 7, 11, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 367 | [6, 11, 7, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 368 | [3, 0, 8, 11, 7, 6, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0], 369 | [6, 11, 7, 9, 0, 1, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0], 370 | [1, 8, 3, 1, 9, 8, 7, 6, 11, -1, 0, 0, 0, 0, 0, 0], 371 | [11, 7, 6, 2, 10, 1, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0], 372 | [1, 2, 10, 0, 8, 3, 11, 7, 6, -1, 0, 0, 0, 0, 0, 0], 373 | [9, 2, 10, 9, 0, 2, 11, 7, 6, -1, 0, 0, 0, 0, 0, 0], 374 | [11, 7, 6, 3, 2, 10, 3, 10, 8, 8, 10, 9, -1, 0, 0, 0], 375 | [2, 7, 6, 2, 3, 7, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0], 376 | [8, 7, 6, 8, 6, 0, 0, 6, 2, -1, 0, 0, 0, 0, 0, 0], 377 | [7, 2, 3, 7, 6, 2, 1, 9, 0, -1, 0, 0, 0, 0, 0, 0], 378 | [8, 7, 9, 9, 2, 1, 9, 7, 2, 2, 7, 6, -1, 0, 0, 0], 379 | [6, 10, 1, 6, 1, 7, 7, 1, 3, -1, 0, 0, 0, 0, 0, 0], 380 | [6, 10, 1, 6, 1, 0, 6, 0, 7, 7, 0, 8, -1, 0, 0, 0], 381 | [7, 6, 3, 3, 9, 0, 6, 9, 3, 6, 10, 9, -1, 0, 0, 0], 382 | [6, 8, 7, 6, 10, 8, 8, 10, 9, -1, 0, 0, 0, 0, 0, 0], 383 | [8, 6, 11, 8, 4, 6, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0], 384 | [11, 3, 0, 11, 0, 6, 6, 0, 4, -1, 0, 0, 0, 0, 0, 0], 385 | [6, 8, 4, 6, 11, 8, 0, 1, 9, -1, 0, 0, 0, 0, 0, 0], 386 | [1, 9, 3, 3, 6, 11, 9, 6, 3, 9, 4, 6, -1, 0, 0, 0], 387 | [8, 6, 11, 8, 4, 6, 10, 1, 2, -1, 0, 0, 0, 0, 0, 0], 388 | [2, 10, 1, 11, 3, 0, 11, 0, 6, 6, 0, 4, -1, 0, 0, 0], 389 | [11, 4, 6, 11, 8, 4, 2, 10, 9, 2, 9, 0, -1, 0, 0, 0], 390 | [4, 6, 11, 4, 11, 9, 9, 11, 3, 9, 3, 2, 9, 2, 10, -1], 391 | [3, 8, 4, 3, 4, 2, 2, 4, 6, -1, 0, 0, 0, 0, 0, 0], 392 | [2, 0, 4, 2, 4, 6, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0], 393 | [0, 1, 9, 3, 8, 4, 3, 4, 2, 2, 4, 6, -1, 0, 0, 0], 394 | [9, 2, 1, 9, 4, 2, 2, 4, 6, -1, 0, 0, 0, 0, 0, 0], 395 | [6, 10, 4, 4, 3, 8, 4, 10, 3, 3, 10, 1, -1, 0, 0, 0], 396 | [1, 6, 10, 1, 0, 6, 6, 0, 4, -1, 0, 0, 0, 0, 0, 0], 397 | [10, 9, 0, 10, 0, 6, 6, 0, 3, 6, 3, 8, 6, 8, 4, -1], 398 | [10, 9, 4, 10, 4, 6, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0], 399 | [6, 11, 7, 5, 4, 9, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0], 400 | [0, 8, 3, 9, 5, 4, 7, 6, 11, -1, 0, 0, 0, 0, 0, 0], 401 | [0, 5, 4, 0, 1, 5, 6, 11, 7, -1, 0, 0, 0, 0, 0, 0], 402 | [7, 6, 11, 4, 8, 3, 4, 3, 5, 5, 3, 1, -1, 0, 0, 0], 403 | [2, 10, 1, 11, 7, 6, 5, 4, 9, -1, 0, 0, 0, 0, 0, 0], 404 | [0, 8, 3, 1, 2, 10, 4, 9, 5, 11, 7, 6, -1, 0, 0, 0], 405 | [6, 11, 7, 10, 5, 4, 10, 4, 2, 2, 4, 0, -1, 0, 0, 0], 406 | [6, 11, 7, 5, 2, 10, 5, 4, 2, 2, 4, 3, 3, 4, 8, -1], 407 | [2, 7, 6, 2, 3, 7, 4, 9, 5, -1, 0, 0, 0, 0, 0, 0], 408 | [4, 9, 5, 8, 7, 6, 8, 6, 0, 0, 6, 2, -1, 0, 0, 0], 409 | [3, 6, 2, 3, 7, 6, 0, 1, 5, 0, 5, 4, -1, 0, 0, 0], 410 | [1, 5, 4, 1, 4, 2, 2, 4, 8, 2, 8, 7, 2, 7, 6, -1], 411 | [5, 4, 9, 6, 10, 1, 6, 1, 7, 7, 1, 3, -1, 0, 0, 0], 412 | [4, 9, 5, 7, 0, 8, 7, 6, 0, 0, 6, 1, 1, 6, 10, -1], 413 | [3, 7, 6, 3, 6, 0, 0, 6, 10, 0, 10, 5, 0, 5, 4, -1], 414 | [4, 8, 5, 8, 10, 5, 8, 6, 10, 8, 7, 6, -1, 0, 0, 0], 415 | [5, 6, 11, 5, 11, 9, 9, 11, 8, -1, 0, 0, 0, 0, 0, 0], 416 | [0, 9, 5, 0, 5, 6, 0, 6, 3, 3, 6, 11, -1, 0, 0, 0], 417 | [8, 0, 11, 11, 5, 6, 11, 0, 5, 5, 0, 1, -1, 0, 0, 0], 418 | [11, 5, 6, 11, 3, 5, 5, 3, 1, -1, 0, 0, 0, 0, 0, 0], 419 | [10, 1, 2, 5, 6, 11, 5, 11, 9, 9, 11, 8, -1, 0, 0, 0], 420 | [2, 10, 1, 3, 6, 11, 3, 0, 6, 6, 0, 5, 5, 0, 9, -1], 421 | [0, 2, 10, 0, 10, 8, 8, 10, 5, 8, 5, 6, 8, 6, 11, -1], 422 | [11, 3, 6, 3, 5, 6, 3, 10, 5, 3, 2, 10, -1, 0, 0, 0], 423 | [2, 3, 6, 6, 9, 5, 3, 9, 6, 3, 8, 9, -1, 0, 0, 0], 424 | [5, 0, 9, 5, 6, 0, 0, 6, 2, -1, 0, 0, 0, 0, 0, 0], 425 | [6, 2, 3, 6, 3, 5, 5, 3, 8, 5, 8, 0, 5, 0, 1, -1], 426 | [6, 2, 1, 6, 1, 5, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0], 427 | [8, 9, 5, 8, 5, 3, 3, 5, 6, 3, 6, 10, 3, 10, 1, -1], 428 | [1, 0, 10, 0, 6, 10, 0, 5, 6, 0, 9, 5, -1, 0, 0, 0], 429 | [0, 3, 8, 10, 5, 6, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0], 430 | [10, 5, 6, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 431 | [11, 5, 10, 11, 7, 5, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0], 432 | [5, 11, 7, 5, 10, 11, 3, 0, 8, -1, 0, 0, 0, 0, 0, 0], 433 | [11, 5, 10, 11, 7, 5, 9, 0, 1, -1, 0, 0, 0, 0, 0, 0], 434 | [9, 3, 1, 9, 8, 3, 5, 10, 11, 5, 11, 7, -1, 0, 0, 0], 435 | [2, 11, 7, 2, 7, 1, 1, 7, 5, -1, 0, 0, 0, 0, 0, 0], 436 | [3, 0, 8, 2, 11, 7, 2, 7, 1, 1, 7, 5, -1, 0, 0, 0], 437 | [2, 11, 0, 0, 5, 9, 0, 11, 5, 5, 11, 7, -1, 0, 0, 0], 438 | [9, 8, 3, 9, 3, 5, 5, 3, 2, 5, 2, 11, 5, 11, 7, -1], 439 | [10, 2, 3, 10, 3, 5, 5, 3, 7, -1, 0, 0, 0, 0, 0, 0], 440 | [5, 10, 7, 7, 0, 8, 10, 0, 7, 10, 2, 0, -1, 0, 0, 0], 441 | [1, 9, 0, 10, 2, 3, 10, 3, 5, 5, 3, 7, -1, 0, 0, 0], 442 | [7, 5, 10, 7, 10, 8, 8, 10, 2, 8, 2, 1, 8, 1, 9, -1], 443 | [7, 5, 1, 7, 1, 3, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0], 444 | [8, 1, 0, 8, 7, 1, 1, 7, 5, -1, 0, 0, 0, 0, 0, 0], 445 | [0, 5, 9, 0, 3, 5, 5, 3, 7, -1, 0, 0, 0, 0, 0, 0], 446 | [7, 5, 9, 7, 9, 8, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0], 447 | [4, 5, 10, 4, 10, 8, 8, 10, 11, -1, 0, 0, 0, 0, 0, 0], 448 | [11, 3, 10, 10, 4, 5, 10, 3, 4, 4, 3, 0, -1, 0, 0, 0], 449 | [9, 0, 1, 4, 5, 10, 4, 10, 8, 8, 10, 11, -1, 0, 0, 0], 450 | [3, 1, 9, 3, 9, 11, 11, 9, 4, 11, 4, 5, 11, 5, 10, -1], 451 | [8, 4, 11, 11, 1, 2, 4, 1, 11, 4, 5, 1, -1, 0, 0, 0], 452 | [5, 1, 2, 5, 2, 4, 4, 2, 11, 4, 11, 3, 4, 3, 0, -1], 453 | [11, 8, 4, 11, 4, 2, 2, 4, 5, 2, 5, 9, 2, 9, 0, -1], 454 | [2, 11, 3, 5, 9, 4, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0], 455 | [4, 5, 10, 4, 10, 2, 4, 2, 8, 8, 2, 3, -1, 0, 0, 0], 456 | [10, 4, 5, 10, 2, 4, 4, 2, 0, -1, 0, 0, 0, 0, 0, 0], 457 | [0, 1, 9, 8, 2, 3, 8, 4, 2, 2, 4, 10, 10, 4, 5, -1], 458 | [10, 2, 5, 2, 4, 5, 2, 9, 4, 2, 1, 9, -1, 0, 0, 0], 459 | [4, 3, 8, 4, 5, 3, 3, 5, 1, -1, 0, 0, 0, 0, 0, 0], 460 | [0, 4, 5, 0, 5, 1, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0], 461 | [0, 3, 9, 3, 5, 9, 3, 4, 5, 3, 8, 4, -1, 0, 0, 0], 462 | [4, 5, 9, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 463 | [7, 4, 9, 7, 9, 11, 11, 9, 10, -1, 0, 0, 0, 0, 0, 0], 464 | [8, 3, 0, 7, 4, 9, 7, 9, 11, 11, 9, 10, -1, 0, 0, 0], 465 | [0, 1, 4, 4, 11, 7, 1, 11, 4, 1, 10, 11, -1, 0, 0, 0], 466 | [10, 11, 7, 10, 7, 1, 1, 7, 4, 1, 4, 8, 1, 8, 3, -1], 467 | [2, 11, 7, 2, 7, 4, 2, 4, 1, 1, 4, 9, -1, 0, 0, 0], 468 | [0, 8, 3, 1, 4, 9, 1, 2, 4, 4, 2, 7, 7, 2, 11, -1], 469 | [7, 2, 11, 7, 4, 2, 2, 4, 0, -1, 0, 0, 0, 0, 0, 0], 470 | [7, 4, 11, 4, 2, 11, 4, 3, 2, 4, 8, 3, -1, 0, 0, 0], 471 | [7, 4, 3, 3, 10, 2, 3, 4, 10, 10, 4, 9, -1, 0, 0, 0], 472 | [2, 0, 8, 2, 8, 10, 10, 8, 7, 10, 7, 4, 10, 4, 9, -1], 473 | [4, 0, 1, 4, 1, 7, 7, 1, 10, 7, 10, 2, 7, 2, 3, -1], 474 | [4, 8, 7, 1, 10, 2, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0], 475 | [9, 7, 4, 9, 1, 7, 7, 1, 3, -1, 0, 0, 0, 0, 0, 0], 476 | [8, 7, 0, 7, 1, 0, 7, 9, 1, 7, 4, 9, -1, 0, 0, 0], 477 | [4, 0, 3, 4, 3, 7, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0], 478 | [4, 8, 7, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 479 | [8, 9, 10, 8, 10, 11, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0], 480 | [0, 11, 3, 0, 9, 11, 11, 9, 10, -1, 0, 0, 0, 0, 0, 0], 481 | [1, 8, 0, 1, 10, 8, 8, 10, 11, -1, 0, 0, 0, 0, 0, 0], 482 | [3, 1, 10, 3, 10, 11, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0], 483 | [2, 9, 1, 2, 11, 9, 9, 11, 8, -1, 0, 0, 0, 0, 0, 0], 484 | [0, 9, 3, 9, 11, 3, 9, 2, 11, 9, 1, 2, -1, 0, 0, 0], 485 | [11, 8, 0, 11, 0, 2, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0], 486 | [2, 11, 3, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 487 | [3, 10, 2, 3, 8, 10, 10, 8, 9, -1, 0, 0, 0, 0, 0, 0], 488 | [9, 10, 2, 9, 2, 0, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0], 489 | [3, 8, 2, 8, 10, 2, 8, 1, 10, 8, 0, 1, -1, 0, 0, 0], 490 | [2, 1, 10, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 491 | [8, 9, 1, 8, 1, 3, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0], 492 | [1, 0, 9, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 493 | [0, 3, 8, -1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 494 | [-1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], 495 | ]; 496 | static INDEX_TO_VERTEX: [[u8; 3]; 8] = [ 497 | [0, 0, 0], 498 | [1, 0, 0], 499 | [1, 1, 0], 500 | [0, 1, 0], 501 | [0, 0, 1], 502 | [1, 0, 1], 503 | [1, 1, 1], 504 | [0, 1, 1], 505 | ]; 506 | 507 | static EDGE_VERTICES: [[u32; 2]; 12] = [ 508 | [0, 1], 509 | [1, 2], 510 | [2, 3], 511 | [3, 0], 512 | [4, 5], 513 | [6, 5], 514 | [6, 7], 515 | [7, 4], 516 | [0, 4], 517 | [1, 5], 518 | [2, 6], 519 | [3, 7], 520 | ]; 521 | --------------------------------------------------------------------------------