├── .gitignore ├── Cargo.toml ├── LICENSE-APACHE ├── LICENSE-MIT ├── README.md ├── examples ├── copc_http.rs ├── copc_to_xyz.rs └── print_points.rs └── src ├── compressor.rs ├── copc.rs ├── decompressor.rs ├── error.rs ├── lib.rs ├── reader.rs └── writer.rs /.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | /Cargo.lock 3 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "copc-rs" 3 | version = "0.5.0" 4 | authors = ["Pirmin Kalberer ", "Øyvind Hjermstad <@yvind>"] 5 | edition = "2021" 6 | 7 | description = "Cloud Optimized Point Cloud (COPC) reader and writer." 8 | homepage = "https://github.com/pka/copc-rs" 9 | repository = "https://github.com/pka/copc-rs" 10 | readme = "README.md" 11 | license = "MIT/Apache-2.0" 12 | keywords = ["lidar", "pointcloud", "copc", "las", "geo"] 13 | categories = ["science::geo", "rendering::data-formats"] 14 | 15 | [dependencies] 16 | byteorder = "1.4.3" 17 | fastrand = "2.3.0" 18 | las-crs = "0.1.1" 19 | las = { version = "0.9.2", features = ["laz"] } 20 | laz = "0.9.2" 21 | log = "0.4.25" 22 | thiserror = "2.0.6" 23 | crs-definitions = "0.3.0" 24 | 25 | [dev-dependencies] 26 | env_logger = "0.11.8" 27 | http-range-client = { version = "0.9.0", default-features = false, features = [ 28 | "ureq-sync", 29 | ] } 30 | -------------------------------------------------------------------------------- /LICENSE-APACHE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /LICENSE-MIT: -------------------------------------------------------------------------------- 1 | Copyright (c) 2025 Pirmin Kalberer, Øyvind Hjermstad 2 | 3 | Permission is hereby granted, free of charge, to any 4 | person obtaining a copy of this software and associated 5 | documentation files (the "Software"), to deal in the 6 | Software without restriction, including without 7 | limitation the rights to use, copy, modify, merge, 8 | publish, distribute, sublicense, and/or sell copies of 9 | the Software, and to permit persons to whom the Software 10 | is furnished to do so, subject to the following 11 | conditions: 12 | 13 | The above copyright notice and this permission notice 14 | shall be included in all copies or substantial portions 15 | of the Software. 16 | 17 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF 18 | ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED 19 | TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 20 | PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT 21 | SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY 22 | CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 23 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR 24 | IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER 25 | DEALINGS IN THE SOFTWARE. 26 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # copc-rs 2 | 3 | [![crates.io version](https://img.shields.io/crates/v/copc-rs.svg)](https://crates.io/crates/copc-rs) 4 | [![docs.rs docs](https://docs.rs/copc-rs/badge.svg)](https://docs.rs/copc-rs) 5 | 6 | copc-rs is a rust library for reading and writing Cloud Optimized Point Cloud ([COPC](https://copc.io/)) data. 7 | It utilizes the las and laz crates heavily and tries to offer a similiar API to las. 8 | 9 | ## Usage examples 10 | 11 | ### Reader 12 | 13 | ```rust 14 | let mut copc_reader = CopcReader::from_path("autzen-classified.copc.laz")?; 15 | for point in copc_reader.points(LodSelection::Level(0), BoundsSelection::All)?.take(5) { 16 | println!("Point coordinates: ({}, {}, {})", point.x, point.y, point.z); 17 | } 18 | ``` 19 | 20 | Full example with bounds selection: 21 | ```rust 22 | use copc_rs::{Bounds, BoundsSelection, CopcReader, LodSelection, Vector}; 23 | 24 | fn main() { 25 | let mut copc_reader = CopcReader::from_path("./lidar.copc.laz").unwrap(); 26 | 27 | let bounds = Bounds { 28 | min: Vector { 29 | x: 698_100., 30 | y: 6_508_100., 31 | z: 0., 32 | }, 33 | max: Vector { 34 | x: 698_230., 35 | y: 6_508_189., 36 | z: 2_000., 37 | }, 38 | }; 39 | 40 | for point in copc_reader 41 | .points(LodSelection::Resolution(1.), BoundsSelection::Within(bounds)) 42 | .unwrap() 43 | { 44 | // do something with the points 45 | } 46 | } 47 | ``` 48 | 49 | Run an example: 50 | ``` 51 | cargo run --example copc_http 52 | ``` 53 | 54 | ### Writer [[*]](#writing-is-still-a-wip) 55 | 56 | ```rust 57 | use copc_rs::CopcWriter; 58 | use las::Reader; 59 | 60 | fn main() { 61 | let mut las_reader = Reader::from_path("./lidar.las").unwrap(); 62 | 63 | let header = las_reader.header().clone(); 64 | let num_points = header.number_of_points() as i32; 65 | let points = las_reader.points().filter_map(las::Result::ok); 66 | 67 | let mut copc_writer = CopcWriter::from_path("./lidar.copc.laz", header, -1, -1).unwrap(); 68 | 69 | copc_writer.write(points, num_points).unwrap(); 70 | 71 | println!("{:#?}", copc_writer.copc_info()); 72 | } 73 | ``` 74 | 75 | ## Writing is still a WIP 76 | 77 | Writing of the octree structure seem to work, so spatial queries in full resolution on copc-rs written files should be good. 78 | BUT the octree levels does not yet contain a similar point distribution as the whole cloud so results from resolution queries on copc-rs written files are wrong. 79 | This means the written files will look bad in viewers. 80 | 81 | 82 | I will look into it when I find time, for now I only need full resolution spatial queries in my current project anyway. 83 | 84 | -yvind 85 | 86 | ## Credits 87 | 88 | This library depends heavily on the work of Thomas Montaigu (@tmontaigu) and Pete Gadomski (@gadomski), the authors of the laz and las crates. 89 | -------------------------------------------------------------------------------- /examples/copc_http.rs: -------------------------------------------------------------------------------- 1 | use copc_rs::{BoundsSelection, CopcReader, LodSelection}; 2 | use http_range_client::UreqHttpReader as HttpReader; 3 | 4 | fn main() -> copc_rs::Result<()> { 5 | env_logger::init(); 6 | let mut http_reader = 7 | HttpReader::new("https://s3.amazonaws.com/hobu-lidar/autzen-classified.copc.laz"); 8 | // http_reader.set_min_req_size(1_048_576); // 1MB - 3 requests, 3'145'728 B 9 | // http_reader.set_min_req_size(524288); // 512KB - 4 requests, 2'097'152 B 10 | http_reader.set_min_req_size(262144); // 256KB - 5 requests, 1'310'720 B 11 | 12 | let mut copc_reader = CopcReader::new(http_reader)?; 13 | 14 | let mut max_z: f64 = 0.0; 15 | for point in copc_reader.points(LodSelection::Level(0), BoundsSelection::All)? { 16 | max_z = max_z.max(point.z); 17 | } 18 | println!("Max Z Level 0: {max_z}"); 19 | 20 | Ok(()) 21 | } 22 | -------------------------------------------------------------------------------- /examples/copc_to_xyz.rs: -------------------------------------------------------------------------------- 1 | use copc_rs::{BoundsSelection, CopcReader, LodSelection}; 2 | use std::env; 3 | use std::fs::File; 4 | use std::io::{BufWriter, Write}; 5 | use std::path::Path; 6 | 7 | fn main() -> copc_rs::Result<()> { 8 | let lazfn = env::args().nth(1).expect("COPC file required"); 9 | 10 | let mut copc_reader = CopcReader::from_path(&lazfn)?; 11 | 12 | let dest = Path::new(&lazfn).with_extension("xyz"); 13 | println!("Writing {:?}", &dest); 14 | let mut file = BufWriter::new(File::create(dest)?); 15 | 16 | for point in copc_reader.points(LodSelection::Level(0), BoundsSelection::All)? { 17 | writeln!(&mut file, "{} {} {}", point.x, point.y, point.z)?; 18 | } 19 | 20 | Ok(()) 21 | } 22 | -------------------------------------------------------------------------------- /examples/print_points.rs: -------------------------------------------------------------------------------- 1 | use copc_rs::{BoundsSelection, CopcReader, LodSelection}; 2 | use std::env; 3 | 4 | fn main() -> copc_rs::Result<()> { 5 | let lazfn = env::args().nth(1).expect("COPC file required"); 6 | 7 | let mut copc_reader = CopcReader::from_path(&lazfn)?; 8 | for (i, point) in copc_reader 9 | .points(LodSelection::Level(0), BoundsSelection::All)? 10 | .enumerate() 11 | .take(5) 12 | { 13 | if i == 0 { 14 | dbg!(&point); 15 | } 16 | println!("Point coordinates: ({}, {}, {})", point.x, point.y, point.z); 17 | } 18 | 19 | Ok(()) 20 | } 21 | -------------------------------------------------------------------------------- /src/compressor.rs: -------------------------------------------------------------------------------- 1 | use byteorder::{LittleEndian, WriteBytesExt}; 2 | use laz::laszip::{ChunkTable, ChunkTableEntry, LazVlr}; 3 | use laz::record::{LayeredPointRecordCompressor, RecordCompressor}; 4 | 5 | use std::io::{Seek, SeekFrom, Write}; 6 | 7 | pub(crate) struct CopcCompressor<'a, W: Write + Seek + 'a> { 8 | vlr: LazVlr, 9 | record_compressor: LayeredPointRecordCompressor<'a, W>, 10 | /// Position where LasZipCompressor started 11 | start_pos: u64, 12 | /// Position where the current chunk started 13 | chunk_start_pos: u64, 14 | /// Entry for the chunk we are currently compressing 15 | current_chunk_entry: ChunkTableEntry, 16 | /// Table of chunks written so far 17 | chunk_table: ChunkTable, 18 | } 19 | 20 | impl<'a, W: Write + Seek + 'a> CopcCompressor<'a, W> { 21 | /// Creates a compressor using the provided vlr. 22 | pub(crate) fn new(write: W, vlr: LazVlr) -> crate::Result { 23 | let mut record_compressor = LayeredPointRecordCompressor::new(write); 24 | record_compressor.set_fields_from(vlr.items())?; 25 | let stream = record_compressor.get_mut(); 26 | 27 | let start_pos = stream.stream_position()?; 28 | // reserve 8 bytes for the offset to the chunk table 29 | stream.write_i64::(-1)?; 30 | 31 | Ok(Self { 32 | vlr, 33 | record_compressor, 34 | chunk_start_pos: start_pos + 8, // size of the written i64 35 | start_pos, 36 | chunk_table: ChunkTable::default(), 37 | current_chunk_entry: ChunkTableEntry::default(), 38 | }) 39 | } 40 | 41 | /// Compress a chunk 42 | pub(crate) fn compress_chunk>( 43 | &mut self, 44 | chunk: Chunk, 45 | ) -> std::io::Result<(ChunkTableEntry, u64)> { 46 | for point in chunk.as_ref().chunks_exact(self.vlr.items_size() as usize) { 47 | self.record_compressor.compress_next(point)?; 48 | self.current_chunk_entry.point_count += 1; 49 | } 50 | 51 | // finish the chunk 52 | self.record_compressor.done()?; 53 | self.record_compressor.reset(); 54 | self.record_compressor 55 | .set_fields_from(self.vlr.items()) 56 | .unwrap(); 57 | 58 | // update the chunk table 59 | let current_pos = self.record_compressor.get_mut().stream_position()?; 60 | self.current_chunk_entry.byte_count = current_pos - self.chunk_start_pos; 61 | self.chunk_table.push(self.current_chunk_entry); 62 | 63 | // store chunk entry and chunk start pos for returning 64 | let old_chunk_start_pos = self.chunk_start_pos; 65 | let written_chunk_entry = self.current_chunk_entry; 66 | 67 | // reset the chunk 68 | self.chunk_start_pos = current_pos; 69 | self.current_chunk_entry = ChunkTableEntry::default(); 70 | 71 | Ok((written_chunk_entry, old_chunk_start_pos)) 72 | } 73 | 74 | /// Must be called when you have compressed all your points. 75 | pub(crate) fn done(&mut self) -> std::io::Result<()> { 76 | self.record_compressor.done()?; 77 | 78 | // update the offset to the chunk table 79 | let stream = self.record_compressor.get_mut(); 80 | let start_of_chunk_table_pos = stream.stream_position()?; 81 | stream.seek(SeekFrom::Start(self.start_pos))?; 82 | stream.write_i64::(start_of_chunk_table_pos as i64)?; 83 | stream.seek(SeekFrom::Start(start_of_chunk_table_pos))?; 84 | 85 | self.chunk_table.write_to(stream, &self.vlr) 86 | } 87 | 88 | pub(crate) fn get_mut(&mut self) -> &mut W { 89 | self.record_compressor.get_mut() 90 | } 91 | } 92 | -------------------------------------------------------------------------------- /src/copc.rs: -------------------------------------------------------------------------------- 1 | //! COPC VLR. 2 | 3 | use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt}; 4 | use las::{Bounds, Vector, Vlr}; 5 | use std::hash::Hash; 6 | use std::io::{Cursor, Read, Write}; 7 | 8 | /// COPC Info VLR data. 9 | #[derive(Clone, Debug, Default)] 10 | pub struct CopcInfo { 11 | /// Actual (unscaled) coordinates of center of octree 12 | pub center: Vector, 13 | /// Perpendicular distance from the center to any side of the root node. 14 | pub halfsize: f64, 15 | /// Space between points at the root node. 16 | /// This value is halved at each octree level 17 | pub spacing: f64, 18 | /// File offset to the first hierarchy page 19 | pub root_hier_offset: u64, 20 | /// Size of the first hierarchy page in bytes 21 | pub root_hier_size: u64, 22 | /// Minimum of GPSTime 23 | pub gpstime_minimum: f64, 24 | /// Maximum of GPSTime 25 | pub gpstime_maximum: f64, 26 | // Must be 0 27 | //_reserved: [u64; 11], 28 | } 29 | 30 | impl CopcInfo { 31 | /// Reads COPC VLR data from a `Read`. 32 | pub(crate) fn read_from(mut read: R) -> crate::Result { 33 | Ok(CopcInfo { 34 | center: Vector { 35 | x: read.read_f64::()?, 36 | y: read.read_f64::()?, 37 | z: read.read_f64::()?, 38 | }, 39 | halfsize: read.read_f64::()?, 40 | spacing: read.read_f64::()?, 41 | root_hier_offset: read.read_u64::()?, 42 | root_hier_size: read.read_u64::()?, 43 | gpstime_minimum: read.read_f64::()?, 44 | gpstime_maximum: read.read_f64::()?, 45 | //_reserved: [0; 11], 46 | }) 47 | } 48 | 49 | /// Convert COPC VLR data to a Vlr, size of VLR is 160bytes + header 50 | pub(crate) fn into_vlr(self) -> crate::Result { 51 | let mut buffer = Cursor::new([0_u8; 160]); 52 | 53 | buffer.write_f64::(self.center.x)?; 54 | buffer.write_f64::(self.center.y)?; 55 | buffer.write_f64::(self.center.z)?; 56 | buffer.write_f64::(self.halfsize)?; 57 | buffer.write_f64::(self.spacing)?; 58 | buffer.write_u64::(self.root_hier_offset)?; 59 | buffer.write_u64::(self.root_hier_size)?; 60 | buffer.write_f64::(self.gpstime_minimum)?; 61 | buffer.write_f64::(self.gpstime_maximum)?; 62 | 63 | Ok(Vlr { 64 | user_id: "copc".to_string(), 65 | record_id: 1, 66 | description: "COPC info VLR".to_string(), 67 | data: Vec::from(buffer.into_inner()), 68 | }) 69 | } 70 | } 71 | 72 | /// EPT hierarchy key 73 | #[derive(Hash, PartialEq, Eq, Clone, Debug)] 74 | pub struct VoxelKey { 75 | /// Level 76 | /// 77 | /// A value < 0 indicates an invalid VoxelKey 78 | pub level: i32, 79 | /// x 80 | pub x: i32, 81 | /// y 82 | pub y: i32, 83 | /// z 84 | pub z: i32, 85 | } 86 | 87 | impl Default for VoxelKey { 88 | fn default() -> Self { 89 | VoxelKey { 90 | level: -1, 91 | x: 0, 92 | y: 0, 93 | z: 0, 94 | } 95 | } 96 | } 97 | 98 | impl VoxelKey { 99 | /// Reads VoxelKey from a `Read`. 100 | pub(crate) fn read_from(read: &mut R) -> crate::Result { 101 | Ok(VoxelKey { 102 | level: read.read_i32::()?, 103 | x: read.read_i32::()?, 104 | y: read.read_i32::()?, 105 | z: read.read_i32::()?, 106 | }) 107 | } 108 | 109 | /// Writes VoxelKey to a `Write`. 110 | pub(crate) fn write_to(self, write: &mut W) -> crate::Result<()> { 111 | write.write_i32::(self.level)?; 112 | write.write_i32::(self.x)?; 113 | write.write_i32::(self.y)?; 114 | write.write_i32::(self.z)?; 115 | 116 | Ok(()) 117 | } 118 | 119 | pub(crate) fn child(&self, dir: i32) -> VoxelKey { 120 | VoxelKey { 121 | level: self.level + 1, 122 | x: (self.x << 1) | (dir & 0x1), 123 | y: (self.y << 1) | ((dir >> 1) & 0x1), 124 | z: (self.z << 1) | ((dir >> 2) & 0x1), 125 | } 126 | } 127 | pub(crate) fn children(&self) -> Vec { 128 | (0..8).map(|i| self.child(i)).collect() 129 | } 130 | pub(crate) fn bounds(&self, root_bounds: &Bounds) -> Bounds { 131 | // In an octree every cell is a cube 132 | let side_size = 133 | (root_bounds.max.x - root_bounds.min.x) / 2_u32.pow(self.level as u32) as f64; 134 | 135 | Bounds { 136 | min: Vector { 137 | x: root_bounds.min.x + self.x as f64 * side_size, 138 | y: root_bounds.min.y + self.y as f64 * side_size, 139 | z: root_bounds.min.z + self.z as f64 * side_size, 140 | }, 141 | max: Vector { 142 | x: root_bounds.min.x + (self.x + 1) as f64 * side_size, 143 | y: root_bounds.min.y + (self.y + 1) as f64 * side_size, 144 | z: root_bounds.min.z + (self.z + 1) as f64 * side_size, 145 | }, 146 | } 147 | } 148 | } 149 | 150 | /// Hierarchy entry 151 | /// 152 | /// An entry corresponds to a single key/value pair in an EPT hierarchy, but contains additional information to allow direct access and decoding of the corresponding point data. 153 | #[derive(Clone, Default, Debug)] 154 | pub struct Entry { 155 | /// EPT key of the data to which this entry corresponds 156 | pub key: VoxelKey, 157 | 158 | /// Absolute offset to the data chunk if the pointCount > 0. 159 | /// Absolute offset to a child hierarchy page if the pointCount is -1. 160 | /// 0 if the pointCount is 0. 161 | pub offset: u64, 162 | 163 | /// Size of the data chunk in bytes (compressed size) if the pointCount > 0. 164 | /// Size of the hierarchy page if the pointCount is -1. 165 | /// 0 if the pointCount is 0. 166 | pub byte_size: i32, 167 | 168 | /// If > 0, represents the number of points in the data chunk. 169 | /// If -1, indicates the information for this octree node is found in another hierarchy page. 170 | /// If 0, no point data exists for this key, though may exist for child entries. 171 | pub point_count: i32, 172 | } 173 | 174 | impl Entry { 175 | /// Reads hierarchy entry from a `Read`. 176 | pub(crate) fn read_from(read: &mut R) -> crate::Result { 177 | Ok(Entry { 178 | key: VoxelKey::read_from(read)?, 179 | offset: read.read_u64::()?, 180 | byte_size: read.read_i32::()?, 181 | point_count: read.read_i32::()?, 182 | }) 183 | } 184 | 185 | /// Writes a hierarchy entry to a `Write` 186 | pub(crate) fn write_to(self, write: &mut W) -> crate::Result<()> { 187 | self.key.write_to(write)?; 188 | write.write_u64::(self.offset)?; 189 | write.write_i32::(self.byte_size)?; 190 | write.write_i32::(self.point_count)?; 191 | 192 | Ok(()) 193 | } 194 | } 195 | 196 | /// Hierarchy page 197 | /// 198 | /// COPC stores hierarchy information to allow a reader to locate points that are in a particular octree node. 199 | /// The hierarchy may be arranged in a tree of pages, but shall always consist of at least one hierarchy page. 200 | #[derive(Clone, Debug)] 201 | pub struct HierarchyPage { 202 | /// Hierarchy page entries 203 | pub entries: Vec, 204 | } 205 | 206 | impl HierarchyPage { 207 | /// Reads hierarchy page from a `Read`. 208 | pub(crate) fn read_from(mut read: R, page_size: u64) -> crate::Result { 209 | let num_entries = page_size as usize / 32; 210 | let mut entries = Vec::with_capacity(num_entries); 211 | for _ in 0..num_entries { 212 | let entry = Entry::read_from(&mut read)?; 213 | entries.push(entry); 214 | } 215 | Ok(HierarchyPage { entries }) 216 | } 217 | 218 | /// Writes a hierarchy page to a `Write` 219 | /// 220 | /// This implementation of COPC writer writes all ept entries to a single page 221 | pub(crate) fn into_evlr(self) -> crate::Result { 222 | // page size in bytes is the number of entries times 32 bytes per entry 223 | let mut buffer = Cursor::new(vec![0_u8; self.entries.len() * 32]); 224 | 225 | for e in self.entries { 226 | e.write_to(&mut buffer)?; 227 | } 228 | 229 | Ok(Vlr { 230 | user_id: "copc".to_string(), 231 | record_id: 1000, 232 | description: "EPT Hierarchy".to_string(), 233 | data: buffer.into_inner(), 234 | }) 235 | } 236 | 237 | /// The number of bytes the data in the evlr is 238 | pub fn byte_size(&self) -> u64 { 239 | // each entry is 32 bytes 240 | (self.entries.len() * 32) as u64 241 | } 242 | } 243 | 244 | /// Our 'custom' type to build an octree from COPC hierarchy page 245 | #[derive(Clone, Debug)] 246 | pub(crate) struct OctreeNode { 247 | /// Hierarchy entry 248 | pub entry: Entry, 249 | /// The bounds this node represents, in file's coordinate 250 | pub bounds: Bounds, 251 | /// Children of this node, since its an octree, there 252 | /// are at most 8 children 253 | pub children: Vec, 254 | } 255 | 256 | impl OctreeNode { 257 | pub fn new() -> Self { 258 | OctreeNode { 259 | entry: Entry::default(), 260 | bounds: Bounds { 261 | min: Vector::default(), 262 | max: Vector::default(), 263 | }, 264 | children: Vec::with_capacity(8), 265 | } 266 | } 267 | 268 | pub fn is_full(&self, max_size: i32) -> bool { 269 | self.entry.point_count >= max_size 270 | } 271 | } 272 | -------------------------------------------------------------------------------- /src/decompressor.rs: -------------------------------------------------------------------------------- 1 | use laz::laszip::LazVlr; 2 | use laz::record::{LayeredPointRecordDecompressor, RecordDecompressor}; 3 | use std::io::{Read, Seek, SeekFrom}; 4 | 5 | /// LasZip decompressor. 6 | pub(crate) struct CopcDecompressor<'a, R: Read + Seek> { 7 | start: u64, 8 | vlr: &'a LazVlr, 9 | record_decompressor: LayeredPointRecordDecompressor<'a, R>, 10 | } 11 | 12 | // Stripped down variant of laz::LasZipDecompressor 13 | // without ChunkTable reading as enough info is stored in COPC-evlr 14 | impl<'a, R: Read + Seek> CopcDecompressor<'a, R> { 15 | /// Creates a new instance from a data source of compressed points 16 | /// and the LazVlr describing the compressed data 17 | pub(crate) fn new(mut source: R, vlr: &'a LazVlr) -> laz::Result { 18 | // the read was seeked to the beginning of the las file in the read stream before calling new 19 | let start = source.stream_position()?; 20 | let mut record_decompressor = LayeredPointRecordDecompressor::new(source); 21 | 22 | // an early fail-check to avoid a potential panic when PointIter.next() unwraps a call to source_seek 23 | record_decompressor.set_fields_from(vlr.items())?; 24 | 25 | Ok(Self { 26 | start, 27 | vlr, 28 | record_decompressor, 29 | }) 30 | } 31 | 32 | #[inline] 33 | pub(crate) fn source_seek(&mut self, offset: u64) -> laz::Result<()> { 34 | self.record_decompressor 35 | .get_mut() 36 | .seek(SeekFrom::Start(offset + self.start))?; 37 | 38 | self.record_decompressor.reset(); 39 | self.record_decompressor.set_fields_from(self.vlr.items()) 40 | } 41 | 42 | /// Decompress the next point and write the uncompressed data to the out buffer. 43 | /// 44 | /// - The buffer should have at least enough byte to store the decompressed data 45 | /// - The data is written in the buffer exactly as it would have been in a LAS File 46 | /// in Little Endian order, 47 | #[inline] 48 | pub(crate) fn decompress_one(&mut self, out: &mut [u8]) -> laz::Result<()> { 49 | self.record_decompressor 50 | .decompress_next(out) 51 | .map_err(laz::errors::LasZipError::IoError) 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /src/error.rs: -------------------------------------------------------------------------------- 1 | use thiserror::Error; 2 | 3 | /// crate specific Result type 4 | pub type Result = std::result::Result; 5 | 6 | /// crate specific Error enum 7 | #[derive(Error, Debug)] 8 | pub enum Error { 9 | /// When trying to add points to a writer that already been closed 10 | #[error("This writer has already been closed")] 11 | ClosedWriter, 12 | 13 | /// When trying to close an empty copc file 14 | #[error("There are no points added to this file")] 15 | EmptyCopcFile, 16 | 17 | /// [las::Error] 18 | #[error(transparent)] 19 | LasError(#[from] las::Error), 20 | 21 | /// [laz::LasZipError] 22 | #[error(transparent)] 23 | LasZipError(#[from] laz::LasZipError), 24 | 25 | /// The input file-path does not end in .copc.laz 26 | #[error("The extension of the file to write does not match .copc.laz")] 27 | WrongCopcExtension, 28 | 29 | /// The requested resolution is either negative or not normal 30 | #[error("The requested error is not possible: {}", .0)] 31 | InvalidResolution(f64), 32 | 33 | /// [std::io::Error] 34 | #[error(transparent)] 35 | Io(#[from] std::io::Error), 36 | 37 | /// The Copc Info vlr was not found, octree can not be built 38 | #[error("The source to be read does not contain a COPC info vlr")] 39 | CopcInfoVlrNotFound, 40 | 41 | /// The Ept hierarchy evlr was not found, octree can not be built 42 | #[error("The source to be read does not contain a EPT hierarchy vlr")] 43 | EptHierarchyVlrNotFound, 44 | 45 | /// The laszip vlr was not found, the points cannot be decompressed. 46 | #[error("laszip vlr not found")] 47 | LasZipVlrNotFound, 48 | 49 | /// The provided iterator for writing points to copc did not contain any points 50 | #[error("The provided iterator for writing points to copc did not contain any points")] 51 | EmptyIterator, 52 | 53 | /// Should not be possible 54 | #[error("The point could not be added to any node in the octree")] 55 | PointNotAddedToAnyNode, 56 | 57 | /// If the bounds in the passed in header is invalid 58 | #[error("the bounds in the passed in header is not normal: {:?}", .0)] 59 | InvalidBounds(las::Bounds), 60 | 61 | /// If a point fails to be added to the copc 62 | #[error(transparent)] 63 | InvalidPoint(crate::PointAddError), 64 | 65 | /// If a copc writer is created with invalid max or min node cound bounds 66 | #[error("the set min or max sizes for point in node is invalid")] 67 | InvalidNodeSize, 68 | 69 | /// [las_crs::CrsError] 70 | #[error(transparent)] 71 | InvalidCrs(#[from] las_crs::CrsError), 72 | 73 | /// Unsupported epsg 74 | #[error("the found epsg-code is not defined in the crs-definitions library")] 75 | InvalidEPSGCode(u16), 76 | } 77 | 78 | /// crate specific Error enum related to adding points to the writer 79 | #[derive(Error, Debug)] 80 | pub enum PointAddError { 81 | /// A point in the iterator passed to [write] did not 82 | /// match the format specified by the `header` passed to [new] 83 | /// 84 | /// [new]: crate::writer::CopcWriter::new 85 | /// [write]: crate::writer::CopcWriter::write 86 | #[error("The point attributes of a point in the iterator don't match the header: {:?}", .0)] 87 | PointAttributesDoNotMatch(las::point::Format), 88 | 89 | /// A point in the iterator passed to [write] was not 90 | /// inside the bounds of the header passed to [new] 91 | /// 92 | /// [new]: crate::writer::CopcWriter::new 93 | /// [write]: crate::writer::CopcWriter::write 94 | #[error("A point in the iterator was not inside the bounds of the header")] 95 | PointNotInBounds, 96 | } 97 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | //! Library for reading and writing Cloud Optimized Point Cloud ([COPC](https://copc.io/)) data. 2 | 3 | const MIN_NODE_SIZE_DEFAULT: i32 = 256; 4 | const MAX_NODE_SIZE_DEFAULT: i32 = 16384; 5 | const VERSION: &str = env!("CARGO_PKG_VERSION"); 6 | 7 | mod compressor; 8 | mod copc; 9 | mod decompressor; 10 | mod error; 11 | mod reader; 12 | mod writer; 13 | 14 | pub use error::*; 15 | pub use las::{Bounds, Vector}; 16 | pub use reader::*; 17 | pub use writer::*; 18 | -------------------------------------------------------------------------------- /src/reader.rs: -------------------------------------------------------------------------------- 1 | //! COPC file reader. 2 | 3 | use crate::copc::{CopcInfo, Entry, HierarchyPage, OctreeNode, VoxelKey}; 4 | use crate::decompressor::CopcDecompressor; 5 | use las::raw; 6 | use las::{Bounds, Builder, Header, Transform, Vector, Vlr}; 7 | use laz::LazVlr; 8 | use std::cmp::Ordering; 9 | use std::collections::HashMap; 10 | use std::fs::File; 11 | use std::io::{BufReader, Cursor, Read, Seek, SeekFrom}; 12 | use std::path::Path; 13 | 14 | /// COPC file reader 15 | pub struct CopcReader { 16 | // the start position of the data of interest in the read, most often 0 17 | start: u64, 18 | // the read- and seekable data source, seeked to the beginning of the copc file data 19 | read: R, 20 | header: Header, 21 | copc_info: CopcInfo, 22 | laz_vlr: LazVlr, 23 | /// Entries of loaded hierarchy pages 24 | hierarchy_entries: HashMap, 25 | } 26 | 27 | impl CopcReader> { 28 | /// Read a COPC file from a path, wraps the file in a BufRead for you 29 | pub fn from_path>(path: P) -> crate::Result { 30 | File::open(path) 31 | .map_err(crate::Error::from) 32 | .and_then(|file| CopcReader::new(BufReader::new(file))) 33 | } 34 | } 35 | 36 | impl CopcReader { 37 | /// Setup by reading LAS header and LasZip VLRs 38 | pub fn new(mut read: R) -> crate::Result { 39 | // to be able to read a copc file not starting at the beginning of the read stream 40 | let start = read.stream_position()?; 41 | 42 | let raw_header = raw::Header::read_from(&mut read)?; 43 | 44 | // store useful parts of the raw header before its consumed by the builder 45 | let mut position = raw_header.header_size as u64; 46 | let number_of_variable_length_records = raw_header.number_of_variable_length_records; 47 | let offset_to_point_data = raw_header.offset_to_point_data as u64; 48 | let evlr = raw_header.evlr; 49 | 50 | // start building a header from a raw header 51 | let mut builder = Builder::new(raw_header)?; 52 | 53 | // add the vlrs to the builder 54 | for _ in 0..number_of_variable_length_records { 55 | let vlr = raw::Vlr::read_from(&mut read, false).map(Vlr::new)?; 56 | position += vlr.len(false) as u64; 57 | builder.vlrs.push(vlr); 58 | } 59 | 60 | // adjust read pointer position and add the padding if it exists 61 | match position.cmp(&offset_to_point_data) { 62 | Ordering::Less => { 63 | let _ = read 64 | .by_ref() 65 | .take(offset_to_point_data + start - position) 66 | .read_to_end(&mut builder.vlr_padding)?; 67 | } 68 | Ordering::Equal => {} // pass 69 | Ordering::Greater => Err(las::Error::OffsetToPointDataTooSmall( 70 | offset_to_point_data as u32, 71 | ))?, 72 | } 73 | 74 | // add the evlrs to the builder 75 | if let Some(evlr) = evlr { 76 | let _ = read.seek(SeekFrom::Start(evlr.start_of_first_evlr + start))?; 77 | for _ in 0..evlr.number_of_evlrs { 78 | builder 79 | .evlrs 80 | .push(raw::Vlr::read_from(&mut read, true).map(Vlr::new)?); 81 | } 82 | } 83 | 84 | // build the header 85 | let header = builder.into_header()?; 86 | 87 | // check and store the relevant (e)vlrs 88 | let mut copc_info = None; 89 | let mut laszip_vlr = None; 90 | let mut ept_hierarchy = None; 91 | 92 | for vlr in header.all_vlrs() { 93 | match (vlr.user_id.to_lowercase().as_str(), vlr.record_id) { 94 | ("copc", 1) => { 95 | copc_info = Some(CopcInfo::read_from(vlr.data.as_slice())?); 96 | } 97 | ("copc", 1000) => { 98 | ept_hierarchy = Some(vlr); 99 | } 100 | ("laszip encoded", 22204) => { 101 | laszip_vlr = Some(LazVlr::read_from(vlr.data.as_slice())?); 102 | } 103 | _ => (), 104 | } 105 | } 106 | 107 | let copc_info = copc_info.ok_or(crate::Error::CopcInfoVlrNotFound)?; 108 | 109 | // store all ept-hierarchy entries in a hashmap 110 | let hierarchy_entries = match ept_hierarchy { 111 | None => return Err(crate::Error::EptHierarchyVlrNotFound), 112 | Some(vlr) => { 113 | let mut hierarchy_entries = HashMap::new(); 114 | 115 | let mut read_vlr = Cursor::new(vlr.data.as_slice()); 116 | 117 | // read the root hierarchy page 118 | let mut page = 119 | HierarchyPage::read_from(&mut read_vlr, copc_info.root_hier_size)?.entries; 120 | 121 | while let Some(entry) = page.pop() { 122 | if entry.point_count == -1 { 123 | // read a new hierarchy page 124 | read.seek(SeekFrom::Start(entry.offset - copc_info.root_hier_offset))?; 125 | page.extend( 126 | HierarchyPage::read_from(&mut read, entry.byte_size as u64)?.entries, 127 | ); 128 | } else { 129 | hierarchy_entries.insert(entry.key.clone(), entry); 130 | } 131 | } 132 | hierarchy_entries 133 | } 134 | }; 135 | 136 | // set the read pointer to the start of the compressed data block 137 | let _ = read.seek(SeekFrom::Start(offset_to_point_data + start))?; 138 | Ok(CopcReader { 139 | start, 140 | read, 141 | header, 142 | copc_info, 143 | laz_vlr: laszip_vlr.ok_or(crate::Error::LasZipVlrNotFound)?, 144 | hierarchy_entries, 145 | }) 146 | } 147 | 148 | /// LAS header 149 | pub fn header(&self) -> &Header { 150 | &self.header 151 | } 152 | 153 | /// COPC info VLR content 154 | pub fn copc_info(&self) -> &CopcInfo { 155 | &self.copc_info 156 | } 157 | 158 | pub fn num_entries(&self) -> usize { 159 | self.hierarchy_entries.len() 160 | } 161 | 162 | /// Loads the nodes of the COPC octree that 163 | /// satisfies the parameters `query_bounds` and `level_range`. 164 | /// 165 | /// It returns the nodes of the matching 'sub-octree' 166 | fn load_octree_for_query( 167 | &mut self, 168 | level_range: LodSelection, 169 | query_bounds: &BoundsSelection, 170 | ) -> crate::Result> { 171 | let (level_min, level_max) = match level_range { 172 | LodSelection::All => (0, i32::MAX), 173 | LodSelection::Resolution(resolution) => { 174 | if !resolution.is_normal() || !resolution.is_sign_positive() { 175 | return Err(crate::Error::InvalidResolution(resolution)); 176 | } 177 | ( 178 | 0, 179 | 1.max((self.copc_info.spacing / resolution).log2().ceil() as i32 + 1), 180 | ) 181 | } 182 | LodSelection::Level(level) => (level, level + 1), 183 | LodSelection::LevelMinMax(min, max) => (min, max), 184 | }; 185 | 186 | let root_bounds = Bounds { 187 | min: Vector { 188 | x: self.copc_info.center.x - self.copc_info.halfsize, 189 | y: self.copc_info.center.y - self.copc_info.halfsize, 190 | z: self.copc_info.center.z - self.copc_info.halfsize, 191 | }, 192 | max: Vector { 193 | x: self.copc_info.center.x + self.copc_info.halfsize, 194 | y: self.copc_info.center.y + self.copc_info.halfsize, 195 | z: self.copc_info.center.z + self.copc_info.halfsize, 196 | }, 197 | }; 198 | 199 | let mut root_node = OctreeNode::new(); 200 | root_node.entry.key.level = 0; 201 | 202 | let mut satisfying_nodes = Vec::new(); 203 | let mut node_stack = vec![root_node]; 204 | 205 | while let Some(mut current_node) = node_stack.pop() { 206 | // bottom of tree of interest reached 207 | if current_node.entry.key.level >= level_max { 208 | continue; 209 | } 210 | 211 | let entry = match self.hierarchy_entries.get(¤t_node.entry.key) { 212 | None => continue, // no entries for this node 213 | Some(e) => e, 214 | }; 215 | 216 | current_node.bounds = current_node.entry.key.bounds(&root_bounds); 217 | if let BoundsSelection::Within(bounds) = query_bounds { 218 | // this octree node does not overlap with the bounds of interest 219 | if !bounds_intersect(¤t_node.bounds, bounds) { 220 | continue; 221 | } 222 | } 223 | 224 | // the entry exists and intersects with our interests 225 | // push its children to the node stack 226 | for child_key in current_node.entry.key.children() { 227 | let mut child_node = OctreeNode::new(); 228 | child_node.entry.key = child_key; 229 | current_node.children.push(child_node.clone()); 230 | node_stack.push(child_node); 231 | } 232 | 233 | // this node has points and belongs to the LOD of interest 234 | if entry.point_count > 0 235 | && (level_min..level_max).contains(¤t_node.entry.key.level) 236 | { 237 | current_node.entry = entry.clone(); 238 | satisfying_nodes.push(current_node); 239 | } 240 | } 241 | 242 | // Sort nodes by decending offsets for sequential reading 243 | satisfying_nodes.sort_by(|a, b| b.entry.offset.partial_cmp(&a.entry.offset).unwrap()); 244 | 245 | Ok(satisfying_nodes) 246 | } 247 | 248 | /// Point iterator for selected level and bounds 249 | pub fn points( 250 | &mut self, 251 | levels: LodSelection, 252 | bounds: BoundsSelection, 253 | ) -> crate::Result> { 254 | let nodes = self.load_octree_for_query(levels, &bounds)?; 255 | let total_points_left = nodes.iter().map(|n| n.entry.point_count as usize).sum(); 256 | 257 | let transforms = *self.header().transforms(); 258 | 259 | // Reverse transform to unscaled values 260 | let raw_bounds = match bounds { 261 | BoundsSelection::All => None, 262 | BoundsSelection::Within(bounds) => Some(RawBounds { 263 | min: Vector { 264 | x: transforms.x.inverse(bounds.min.x)?, 265 | y: transforms.y.inverse(bounds.min.y)?, 266 | z: transforms.z.inverse(bounds.min.z)?, 267 | }, 268 | max: Vector { 269 | x: transforms.x.inverse(bounds.max.x)?, 270 | y: transforms.y.inverse(bounds.max.y)?, 271 | z: transforms.z.inverse(bounds.max.z)?, 272 | }, 273 | }), 274 | }; 275 | 276 | self.read.seek(SeekFrom::Start(self.start))?; 277 | let decompressor = CopcDecompressor::new(&mut self.read, &self.laz_vlr)?; 278 | let point = vec![ 279 | 0u8; 280 | (self.header.point_format().len() + self.header.point_format().extra_bytes) 281 | as usize 282 | ]; 283 | 284 | Ok(PointIter { 285 | nodes, 286 | bounds: raw_bounds, 287 | point_format: *self.header.point_format(), 288 | transforms, 289 | decompressor, 290 | point_buffer: point, 291 | node_points_left: 0, 292 | total_points_left, 293 | }) 294 | } 295 | } 296 | 297 | struct RawBounds { 298 | min: Vector, 299 | max: Vector, 300 | } 301 | 302 | impl RawBounds { 303 | #[inline] 304 | fn contains_point(&self, p: &las::raw::Point) -> bool { 305 | !(p.x < self.min.x 306 | || p.y < self.min.y 307 | || p.z < self.min.z 308 | || p.x > self.max.x 309 | || p.y > self.max.y 310 | || p.z > self.max.z) 311 | } 312 | } 313 | 314 | #[inline] 315 | fn bounds_intersect(a: &Bounds, b: &Bounds) -> bool { 316 | !(a.max.x < b.min.x 317 | || a.max.y < b.min.y 318 | || a.max.z < b.min.z 319 | || a.min.x > b.max.x 320 | || a.min.y > b.max.y 321 | || a.min.z > b.max.z) 322 | } 323 | 324 | /// Limits the octree levels to be queried in order to have 325 | /// a point cloud with the requested resolution. 326 | /// 327 | /// resolution: Limits the octree levels to be queried in order 328 | /// to have a point cloud with the requested resolution. 329 | /// 330 | /// - The unit is the one of the data. 331 | /// - If absent, the resulting cloud will be at the 332 | /// full resolution offered by the COPC source 333 | /// 334 | /// level: The level of detail (LOD). 335 | /// 336 | /// If absent, all LOD are going to be considered 337 | pub enum LodSelection { 338 | /// Full resolution (all LODs) 339 | All, 340 | /// requested minimal resolution of point cloud 341 | /// given as space between points 342 | /// based on the spacing given in the copc info vlr 343 | /// defined as root-node side length / number of points in root node 344 | /// when traversing the octree levels the spacing of level i is copc_spacing*2^-i 345 | /// 346 | /// Tldr; higher value -> fewer points / cube unit 347 | Resolution(f64), 348 | /// only points that that are of the requested LOD will be returned. 349 | Level(i32), 350 | /// points for which the LOD is within the range will be returned. 351 | LevelMinMax(i32, i32), 352 | } 353 | 354 | /// Select points within bounds 355 | pub enum BoundsSelection { 356 | /// No bounds filter. 357 | All, 358 | /// Select points within bounds. 359 | Within(Bounds), 360 | } 361 | 362 | /// LasZip point iterator 363 | pub struct PointIter<'a, R: Read + Seek> { 364 | nodes: Vec, 365 | bounds: Option, 366 | point_format: las::point::Format, 367 | transforms: Vector, 368 | decompressor: CopcDecompressor<'a, &'a mut R>, 369 | point_buffer: Vec, 370 | node_points_left: usize, 371 | total_points_left: usize, 372 | } 373 | 374 | impl Iterator for PointIter<'_, R> { 375 | type Item = las::point::Point; 376 | 377 | fn next(&mut self) -> Option { 378 | if self.total_points_left == 0 { 379 | return None; 380 | } 381 | let mut in_bounds; 382 | loop { 383 | while self.node_points_left == 0 { 384 | // get the next node with points 385 | if let Some(node) = self.nodes.pop() { 386 | self.decompressor.source_seek(node.entry.offset).unwrap(); 387 | self.node_points_left = node.entry.point_count as usize; 388 | } else { 389 | return None; 390 | } 391 | } 392 | self.decompressor 393 | .decompress_one(self.point_buffer.as_mut_slice()) 394 | .unwrap(); 395 | let raw_point = 396 | las::raw::Point::read_from(self.point_buffer.as_slice(), &self.point_format) 397 | .unwrap(); 398 | self.node_points_left -= 1; 399 | self.total_points_left -= 1; 400 | in_bounds = if let Some(bounds) = &self.bounds { 401 | bounds.contains_point(&raw_point) 402 | } else { 403 | true 404 | }; 405 | 406 | if in_bounds { 407 | return Some(las::point::Point::new(raw_point, &self.transforms)); 408 | } 409 | } 410 | } 411 | 412 | fn size_hint(&self) -> (usize, Option) { 413 | (self.total_points_left, Some(self.total_points_left)) 414 | } 415 | } 416 | -------------------------------------------------------------------------------- /src/writer.rs: -------------------------------------------------------------------------------- 1 | //! COPC file writer. 2 | 3 | use crate::compressor::CopcCompressor; 4 | use crate::copc::{CopcInfo, Entry, HierarchyPage, OctreeNode, VoxelKey}; 5 | 6 | use las::{Builder, Header}; 7 | 8 | use std::collections::HashMap; 9 | use std::fs::File; 10 | use std::io::{BufWriter, Cursor, Seek, SeekFrom, Write}; 11 | use std::path::Path; 12 | 13 | // enum for point data record format upgrades 14 | enum UpgradePdrf { 15 | From1to6, // upgrades (1=>6) 16 | From3to7, // upgrades (3=>7) 17 | NoUpgrade, // 6, 7 and 8 18 | } 19 | 20 | impl UpgradePdrf { 21 | fn log_string(&self) -> &str { 22 | match self { 23 | UpgradePdrf::From1to6 => "Upgrading LAS PDRF from 1 to 6", 24 | UpgradePdrf::From3to7 => "Upgrading LAS PDRF from 3 to 7", 25 | UpgradePdrf::NoUpgrade => "COPC supports the given PDRF", 26 | } 27 | } 28 | } 29 | 30 | /// COPC file writer 31 | pub struct CopcWriter<'a, W: 'a + Write + Seek> { 32 | is_closed: bool, 33 | start: u64, 34 | // point writer 35 | compressor: CopcCompressor<'a, W>, 36 | header: Header, 37 | // a page of the written entries 38 | hierarchy: HierarchyPage, 39 | min_node_size: i32, 40 | max_node_size: i32, 41 | copc_info: CopcInfo, 42 | // root node in octree, access point for the tree 43 | root_node: OctreeNode, 44 | // a hashmap to store chunks that are not full yet 45 | open_chunks: HashMap>>, 46 | } 47 | 48 | impl CopcWriter<'_, BufWriter> { 49 | /// Creates a new COPC-writer for a path, 50 | /// creates a file at that path and wraps it in a BufWrite for you 51 | /// and passes it along to [new] 52 | /// 53 | /// see [new] for usage 54 | /// 55 | /// [new]: Self::new 56 | pub fn from_path>( 57 | path: P, 58 | header: Header, 59 | min_size: i32, 60 | max_size: i32, 61 | ) -> crate::Result { 62 | let copc_ext = Path::new(match path.as_ref().file_stem() { 63 | Some(copc) => copc, 64 | None => return Err(crate::Error::WrongCopcExtension), 65 | }) 66 | .extension(); 67 | 68 | match (copc_ext, path.as_ref().extension()) { 69 | (Some(copc), Some(laz)) => match (&copc.to_str(), &laz.to_str()) { 70 | (Some(copc_str), Some(laz_str)) => { 71 | if &copc_str.to_lowercase() != "copc" || &laz_str.to_lowercase() != "laz" { 72 | return Err(crate::Error::WrongCopcExtension); 73 | } 74 | } 75 | _ => return Err(crate::Error::WrongCopcExtension), 76 | }, 77 | _ => return Err(crate::Error::WrongCopcExtension), 78 | } 79 | 80 | File::create(path) 81 | .map_err(crate::Error::from) 82 | .and_then(|file| CopcWriter::new(BufWriter::new(file), header, min_size, max_size)) 83 | } 84 | } 85 | 86 | /// public API 87 | impl CopcWriter<'_, W> { 88 | /// Create a COPC file writer for the write- and seekable `write` 89 | /// configured with the provided [las::Header] 90 | /// recommended to use [from_path] for writing to file 91 | /// 92 | /// The `bounds` field in the `header` is used as the bounds for the octree 93 | /// the bounds are checked for being normal 94 | /// 95 | /// `max_size` is the maximal number of [las::Point]s an octree node can hold 96 | /// any max_size < 1 sets the max_size to [crate::MAX_NODE_SIZE_DEFAULT] 97 | /// this is a soft limit 98 | /// 99 | /// `min_size` is the minimal number of [las::Point]s an octree node can hold 100 | /// any min_size < 1 sets the min_size to [crate::MIN_NODE_SIZE_DEFAULT] 101 | /// this is a hard limit 102 | /// 103 | /// `min_size` greater or equal to `max_size` after checking values < 1 104 | /// results in a [crate::Error::InvalidNodeSize] error 105 | /// 106 | /// 107 | /// This writer is strictly following the LAS 1.4 spec and the COPC spec 108 | /// which means that any provided header not compatible with those will lead 109 | /// to an Err 110 | /// That being said, LAS 1.2 headers and PDRFs 1 and 3 are accepted and upgraded to 111 | /// their matching LAS 1.4 versions 112 | /// GeoTiff CRS VLR's are parsed and written to WKT CRS VLR's 113 | /// A CRS VLR is __MANDATORY__ and without one 114 | /// 115 | /// [from_path]: Self::from_path 116 | pub fn new(mut write: W, header: Header, min_size: i32, max_size: i32) -> crate::Result { 117 | let start = write.stream_position()?; 118 | 119 | let min_node_size = if min_size < 1 { 120 | crate::MIN_NODE_SIZE_DEFAULT 121 | } else { 122 | min_size 123 | }; 124 | 125 | let max_node_size = if max_size < 1 { 126 | crate::MAX_NODE_SIZE_DEFAULT 127 | } else { 128 | max_size 129 | }; 130 | 131 | if min_node_size >= max_node_size { 132 | return Err(crate::Error::InvalidNodeSize); 133 | } 134 | 135 | if header.version() != las::Version::new(1, 4) { 136 | log::log!(log::Level::Info, "Old Las version. Upgrading"); 137 | } 138 | 139 | let mut has_wkt_vlr = false; 140 | 141 | // store the vlrs contained in the header for forwarding 142 | let mut forward_vlrs = Vec::with_capacity(header.vlrs().len()); 143 | for vlr in header.vlrs() { 144 | match (vlr.user_id.to_lowercase().as_str(), vlr.record_id) { 145 | ("lasf_projection", 2112) => { 146 | has_wkt_vlr = true; 147 | forward_vlrs.push(vlr.clone()); 148 | } 149 | // not forwarding these vlrs 150 | ("lasf_projection", 34735..=34737) => (), // geo-tiff crs 151 | ("copc", 1 | 1000) => (), 152 | ("laszip encoded", 22204) => (), 153 | ("lasf_spec", 100..355 | 65535) => (), // wave form packet descriptors 154 | // forwarding all other vlrs 155 | _ => forward_vlrs.push(vlr.clone()), 156 | } 157 | } 158 | 159 | // store the evlrs contained in the header for forwarding 160 | let mut forward_evlrs = Vec::with_capacity(header.evlrs().len()); 161 | for evlr in header.evlrs() { 162 | match (evlr.user_id.to_lowercase().as_str(), evlr.record_id) { 163 | ("lasf_projection", 2112) => { 164 | has_wkt_vlr = true; 165 | forward_evlrs.push(evlr.clone()); 166 | } 167 | // not forwarding these vlrs 168 | ("lasf_projection", 34735..=34737) => (), // geo-tiff crs 169 | ("copc", 1 | 1000) => (), // 1 should never be a evlr 170 | ("laszip encoded", 22204) => (), // should never be a evlr 171 | ("lasf_spec", 100..355 | 65535) => (), // waveform data packets 172 | // forwarding all other evlrs 173 | _ => forward_evlrs.push(evlr.clone()), 174 | } 175 | } 176 | 177 | // las version 1.4 says pdrf 6-10 must have a wkt crs 178 | // copc is only valid for las 1.4 and says only pdrf 6-8 is supported 179 | // 180 | // which means that any geotiff crs must be converted to a wkt crs 181 | // 182 | // could just use header.has_wkt_vlr(), but so many las files are wrongly written 183 | // so I don't trust it 184 | // 185 | // ignores any vertical crs that might stored in geotiff 186 | if !has_wkt_vlr { 187 | let epsg = las_crs::parse_las_crs(&header)?; 188 | let wkt_data = match crs_definitions::from_code(epsg.horizontal) { 189 | Some(wkt) => wkt, 190 | None => return Err(crate::Error::InvalidEPSGCode(epsg.horizontal)), 191 | } 192 | .wkt 193 | .as_bytes() 194 | .to_owned(); 195 | 196 | let mut user_id = [0; 16]; 197 | for (i, c) in "LASF_Projection".as_bytes().iter().enumerate() { 198 | user_id[i] = *c; 199 | } 200 | 201 | let crs_vlr = las::raw::Vlr { 202 | reserved: 0, 203 | user_id, 204 | record_id: 2112, 205 | record_length_after_header: las::raw::vlr::RecordLength::Vlr(wkt_data.len() as u16), 206 | description: [0; 32], 207 | data: wkt_data, 208 | }; 209 | 210 | forward_vlrs.push(las::Vlr::new(crs_vlr)); 211 | } 212 | 213 | // check bounds are normal 214 | let bounds = header.bounds(); 215 | if !(bounds.max.x - bounds.min.x).is_normal() 216 | || !(bounds.max.y - bounds.min.y).is_normal() 217 | || !(bounds.max.z - bounds.min.z).is_normal() 218 | { 219 | return Err(crate::Error::InvalidBounds(bounds)); 220 | } 221 | 222 | let mut raw_head = header.into_raw()?; 223 | 224 | // mask off the two leftmost bits corresponding to compression of pdrf 225 | let pdrf = raw_head.point_data_record_format & 0b00111111; 226 | let upgrade_pdrf = match pdrf { 227 | 1 => { 228 | let upgrade = UpgradePdrf::From1to6; 229 | 230 | log::log!(log::Level::Info, "{}", upgrade.log_string()); 231 | upgrade 232 | } 233 | 3 => { 234 | let upgrade = UpgradePdrf::From3to7; 235 | 236 | log::log!(log::Level::Info, "{}", upgrade.log_string()); 237 | upgrade 238 | } 239 | 0 | 2 => { 240 | return Err(las::Error::InvalidPointFormat(las::point::Format::new( 241 | raw_head.point_data_record_format, 242 | )?))?; 243 | } 244 | 4..=5 | 9.. => { 245 | return Err(las::Error::InvalidPointFormat(las::point::Format::new( 246 | raw_head.point_data_record_format, 247 | )?))?; 248 | } 249 | 6..=8 => UpgradePdrf::NoUpgrade, 250 | }; 251 | 252 | // adjust and clear some fields 253 | raw_head.version = las::Version::new(1, 4); 254 | raw_head.point_data_record_format += match upgrade_pdrf { 255 | UpgradePdrf::NoUpgrade => 0, 256 | UpgradePdrf::From1to6 => 5, 257 | UpgradePdrf::From3to7 => 4, 258 | }; 259 | raw_head.point_data_record_format |= 0b11000000; // make sure the compress bits are set 260 | raw_head.point_data_record_length += match upgrade_pdrf { 261 | UpgradePdrf::NoUpgrade => 0, 262 | _ => 2, 263 | }; 264 | raw_head.global_encoding |= 0b10000; // make sure wkt crs bit is set 265 | raw_head.number_of_point_records = 0; 266 | raw_head.number_of_points_by_return = [0; 5]; 267 | raw_head.large_file = None; 268 | raw_head.evlr = None; 269 | raw_head.padding = vec![]; 270 | 271 | let mut software_buffer = [0_u8; 32]; 272 | for (i, byte) in format!("COPC-rs v{}", crate::VERSION).bytes().enumerate() { 273 | software_buffer[i] = byte; 274 | } 275 | raw_head.generating_software = software_buffer; 276 | 277 | // start building a real header from the raw header 278 | let mut builder = Builder::new(raw_head)?; 279 | // add a blank COPC-vlr as the first vlr 280 | builder.vlrs.push(CopcInfo::default().into_vlr()?); 281 | 282 | // create the laz vlr 283 | let point_format = builder.point_format; 284 | let mut laz_items = laz::laszip::LazItemRecordBuilder::new(); 285 | laz_items.add_item(laz::LazItemType::Point14); 286 | if point_format.has_color { 287 | if point_format.has_nir { 288 | laz_items.add_item(laz::LazItemType::RGBNIR14); 289 | } else { 290 | laz_items.add_item(laz::LazItemType::RGB14); 291 | } 292 | } 293 | if point_format.extra_bytes > 0 { 294 | laz_items.add_item(laz::LazItemType::Byte14(point_format.extra_bytes)); 295 | } 296 | 297 | let laz_vlr = laz::LazVlrBuilder::new(laz_items.build()) 298 | .with_variable_chunk_size() 299 | .build(); 300 | let mut cursor = Cursor::new(Vec::::new()); 301 | laz_vlr.write_to(&mut cursor)?; 302 | let laz_vlr = las::Vlr { 303 | user_id: laz::LazVlr::USER_ID.to_owned(), 304 | record_id: laz::LazVlr::RECORD_ID, 305 | description: laz::LazVlr::DESCRIPTION.to_owned(), 306 | data: cursor.into_inner(), 307 | }; 308 | builder.vlrs.push(laz_vlr); 309 | 310 | // add the forwarded vlrs 311 | builder.vlrs.extend(forward_vlrs); 312 | builder.evlrs.extend(forward_evlrs); 313 | // the EPT-hierarchy evlr is not yet added 314 | 315 | let header = builder.into_header()?; 316 | 317 | // write the header and vlrs 318 | // this is just to reserve the space 319 | header.write_to(&mut write)?; 320 | 321 | let center_point = las::Vector { 322 | x: (bounds.min.x + bounds.max.x) / 2., 323 | y: (bounds.min.y + bounds.max.y) / 2., 324 | z: (bounds.min.z + bounds.max.z) / 2., 325 | }; 326 | let halfsize = (center_point.x - bounds.min.x) 327 | .max((center_point.y - bounds.min.y).max(center_point.z - bounds.min.z)); 328 | 329 | let mut root_node = OctreeNode::new(); 330 | 331 | root_node.bounds = las::Bounds { 332 | min: las::Vector { 333 | x: center_point.x - halfsize, 334 | y: center_point.y - halfsize, 335 | z: center_point.z - halfsize, 336 | }, 337 | max: las::Vector { 338 | x: center_point.x + halfsize, 339 | y: center_point.y + halfsize, 340 | z: center_point.z + halfsize, 341 | }, 342 | }; 343 | root_node.entry.key.level = 0; 344 | root_node.entry.offset = write.stream_position()?; 345 | 346 | let copc_info = CopcInfo { 347 | center: center_point, 348 | halfsize, 349 | spacing: 0., 350 | root_hier_offset: 0, 351 | root_hier_size: 0, 352 | gpstime_minimum: f64::MAX, 353 | gpstime_maximum: f64::MIN, 354 | }; 355 | 356 | Ok(CopcWriter { 357 | is_closed: false, 358 | start, 359 | compressor: CopcCompressor::new(write, header.laz_vlr()?)?, 360 | header, 361 | hierarchy: HierarchyPage { entries: vec![] }, 362 | min_node_size, 363 | max_node_size, 364 | copc_info, 365 | root_node, 366 | open_chunks: HashMap::default(), 367 | }) 368 | } 369 | 370 | /// Write anything that implements [IntoIterator] 371 | /// over [las::Point] to the COPC [Write] 372 | /// Only one iterator can be written so a call to [Self::write] closes the writer. 373 | /// 374 | /// `num_points` is the number of points in the iterator 375 | /// the number of points is used for stochastically filling the nodes 376 | /// if `num_points` is < 1 a greedy filling strategy is used 377 | /// this should only be used if the passed iterator is randomly ordered 378 | /// which most of the time not is the case 379 | /// if `num_points` is not equal to the actual number of points in the 380 | /// iterator all points will still be written but the point distribution 381 | /// in a node will not represent of the entire distribution over that node 382 | /// i.e. only full resolution queries will look right which means the point cloud 383 | /// will look wierd in any viewer which utilizes the COPC information 384 | /// 385 | /// returns an `Err`([crate::Error::ClosedWriter]) if the writer has already been closed. 386 | /// 387 | /// If a point is outside the copc `bounds` or not matching the 388 | /// [las::point::Format] of the writer's header `Err` is returned 389 | /// [crate::PointAddError::PointAttributesDoNotMatch] take precedence over 390 | /// [crate::PointAddError::PointNotInBounds] 391 | /// All the points inside the bounds and matching the point format are written regardless 392 | /// 393 | /// All points which both match the point format and are inside the bounds are added 394 | /// 395 | /// Lastly [Self::close] is called. If closing fails an [crate::Error] is returned and 396 | /// the state of the [Write] is undefined 397 | /// 398 | /// If all points match the format, are inside the bounds and [Self::close] is successfull `Ok(())` is returned 399 | pub fn write>( 400 | &mut self, 401 | data: D, 402 | num_points: i32, 403 | ) -> crate::Result<()> { 404 | if self.is_closed { 405 | return Err(crate::Error::ClosedWriter); 406 | } 407 | 408 | let result = if num_points < self.max_node_size + self.min_node_size { 409 | // greedy filling strategy 410 | self.write_greedy(data) 411 | } else { 412 | // stochastic filling strategy 413 | self.write_stochastic(data, num_points as usize) 414 | }; 415 | 416 | self.close()?; 417 | result 418 | } 419 | 420 | /// Whether this writer is closed or not 421 | pub fn is_closed(&self) -> bool { 422 | self.is_closed 423 | } 424 | 425 | /// number of points in the largest node 426 | pub fn max_node_size(&self) -> i32 { 427 | self.max_node_size 428 | } 429 | 430 | /// number of points in the smallest node 431 | pub fn min_node_size(&self) -> i32 { 432 | self.min_node_size 433 | } 434 | 435 | /// This writer's header, some fields are updated on closing of the writer 436 | pub fn header(&self) -> &Header { 437 | &self.header 438 | } 439 | 440 | /// This writer's EPT Hierarchy 441 | pub fn hierarchy_entries(&self) -> &HierarchyPage { 442 | &self.hierarchy 443 | } 444 | 445 | /// This writer's COPC info 446 | pub fn copc_info(&self) -> &CopcInfo { 447 | &self.copc_info 448 | } 449 | } 450 | 451 | /// private functions 452 | impl CopcWriter<'_, W> { 453 | /// Greedy strategy for writing points 454 | fn write_greedy>(&mut self, data: D) -> crate::Result<()> { 455 | let mut invalid_points = Ok(()); 456 | 457 | for p in data.into_iter() { 458 | if !p.matches(self.header.point_format()) { 459 | invalid_points = Err(crate::Error::InvalidPoint( 460 | crate::PointAddError::PointAttributesDoNotMatch(*self.header.point_format()), 461 | )); 462 | continue; 463 | } 464 | if !bounds_contains_point(&self.root_node.bounds, &p) { 465 | if invalid_points.is_ok() { 466 | invalid_points = Err(crate::Error::InvalidPoint( 467 | crate::PointAddError::PointNotInBounds, 468 | )); 469 | } 470 | continue; 471 | } 472 | 473 | self.add_point_greedy(p)?; 474 | } 475 | invalid_points 476 | } 477 | 478 | /// Stochastic strategy for writing points 479 | fn write_stochastic>( 480 | &mut self, 481 | data: D, 482 | num_points: usize, 483 | ) -> crate::Result<()> { 484 | let mut invalid_points = Ok(()); 485 | 486 | // the number of expected levels in the copc hierarchy 487 | // assuming that the lidar scans cover a way bigger horizontal span than vertical 488 | // effectivly dividing every level into 4 instead of 8 489 | // (removing this assumption would lead to a division by 3 instead of 2, and thus fewer expected levels) 490 | // 491 | // each level then holds 4^i * max_points_per_node points 492 | // 493 | // solve for l: 494 | // num_points / sum_i=0^l 4^i = max_points_per_node 495 | // 496 | // sum_i=0^l 4^i = 1/3 (4^(l+1) - 1) 497 | // => 498 | // l = (log_2( 3*num_points/max_points_per_node + 1) - 2)/2 499 | let expected_levels = 500 | ((((3 * num_points) as f64 / self.max_node_size as f64 + 1.).log2() - 2.) / 2.).ceil() 501 | as usize; 502 | 503 | for (i, p) in data.into_iter().enumerate() { 504 | if !p.matches(self.header.point_format()) { 505 | invalid_points = Err(crate::Error::InvalidPoint( 506 | crate::PointAddError::PointAttributesDoNotMatch(*self.header.point_format()), 507 | )); 508 | continue; 509 | } 510 | if !bounds_contains_point(&self.root_node.bounds, &p) { 511 | if invalid_points.is_ok() { 512 | invalid_points = Err(crate::Error::InvalidPoint( 513 | crate::PointAddError::PointNotInBounds, 514 | )); 515 | } 516 | continue; 517 | } 518 | 519 | // if the given num_points was smaller than the actual number of points 520 | // and we have passed that number revert to the greedy strategy 521 | if num_points <= i { 522 | self.add_point_greedy(p)?; 523 | } else { 524 | self.add_point_stochastic(p, expected_levels)?; 525 | } 526 | } 527 | invalid_points 528 | } 529 | 530 | /// Close is called after the last point is written 531 | fn close(&mut self) -> crate::Result<()> { 532 | if self.is_closed { 533 | return Err(crate::Error::ClosedWriter); 534 | } 535 | if self.header.number_of_points() < 1 { 536 | return Err(crate::Error::EmptyCopcFile); 537 | } 538 | 539 | // write the unclosed chunks, order does not matter 540 | for (key, chunk) in self.open_chunks.drain() { 541 | let inner = chunk.into_inner(); 542 | if inner.is_empty() { 543 | continue; 544 | } 545 | let (chunk_table_entry, chunk_offset) = self.compressor.compress_chunk(inner)?; 546 | self.hierarchy.entries.push(Entry { 547 | key, 548 | offset: chunk_offset, 549 | byte_size: chunk_table_entry.byte_count as i32, 550 | point_count: chunk_table_entry.point_count as i32, 551 | }) 552 | } 553 | 554 | self.compressor.done()?; 555 | 556 | let start_of_first_evlr = self.compressor.get_mut().stream_position()?; 557 | 558 | let raw_evlrs: Vec> = self 559 | .header 560 | .evlrs() 561 | .iter() 562 | .map(|evlr| evlr.clone().into_raw(true)) 563 | .collect(); 564 | 565 | // write copc-evlr 566 | self.hierarchy 567 | .clone() 568 | .into_evlr()? 569 | .into_raw(true)? 570 | .write_to(self.compressor.get_mut())?; 571 | // write the rest of the evlrs 572 | for raw_evlr in raw_evlrs { 573 | raw_evlr?.write_to(self.compressor.get_mut())?; 574 | } 575 | 576 | self.compressor 577 | .get_mut() 578 | .seek(SeekFrom::Start(self.start))?; 579 | self.header.clone().into_raw().and_then(|mut raw_header| { 580 | if let Some(mut e) = raw_header.evlr { 581 | e.start_of_first_evlr = start_of_first_evlr; 582 | e.number_of_evlrs += 1; 583 | } else { 584 | raw_header.evlr = Some(las::raw::header::Evlr { 585 | start_of_first_evlr, 586 | number_of_evlrs: 1, 587 | }); 588 | } 589 | raw_header.write_to(self.compressor.get_mut()) 590 | })?; 591 | 592 | // update the copc info vlr and write it 593 | self.copc_info.spacing = 594 | 2. * self.copc_info.halfsize / (self.root_node.entry.point_count as f64); 595 | self.copc_info.root_hier_offset = start_of_first_evlr + 60; // the header is 60bytes 596 | self.copc_info.root_hier_size = self.hierarchy.byte_size(); 597 | 598 | self.copc_info 599 | .clone() 600 | .into_vlr()? 601 | .into_raw(false)? 602 | .write_to(self.compressor.get_mut())?; 603 | 604 | self.compressor 605 | .get_mut() 606 | .seek(SeekFrom::Start(self.start))?; 607 | 608 | self.is_closed = true; 609 | Ok(()) 610 | } 611 | 612 | // find the first non-full octree-node that contains the point 613 | // and add it to the node, if the node now is full 614 | // add the node to the hierarchy page and write to file 615 | fn add_point_greedy(&mut self, point: las::Point) -> crate::Result<()> { 616 | self.header.add_point(&point); 617 | 618 | if point.gps_time.unwrap() < self.copc_info.gpstime_minimum { 619 | self.copc_info.gpstime_minimum = point.gps_time.unwrap(); 620 | } else if point.gps_time.unwrap() > self.copc_info.gpstime_maximum { 621 | self.copc_info.gpstime_maximum = point.gps_time.unwrap(); 622 | } 623 | 624 | let mut node_key = None; 625 | let mut write_chunk = false; 626 | 627 | let root_bounds = self.root_node.bounds; 628 | 629 | // starting from the root walk thorugh the octree 630 | // and find the correct node to add the point to 631 | let mut nodes_to_check = vec![&mut self.root_node]; 632 | while let Some(node) = nodes_to_check.pop() { 633 | if !bounds_contains_point(&node.bounds, &point) { 634 | // the point does not belong to this subtree 635 | continue; 636 | } 637 | if node.is_full(self.max_node_size) { 638 | // the point belongs to the subtree, but this node is full 639 | // need to push the node's children to the nodes_to_check stack 640 | if node.children.is_empty() { 641 | // the node does not have any children 642 | // so lets add children to the node 643 | let child_keys = node.entry.key.children(); 644 | for key in child_keys { 645 | let child_bounds = key.bounds(&root_bounds); 646 | node.children.push(OctreeNode { 647 | entry: Entry { 648 | key, 649 | offset: 0, 650 | byte_size: 0, 651 | point_count: 0, 652 | }, 653 | bounds: child_bounds, 654 | children: Vec::with_capacity(8), 655 | }) 656 | } 657 | } 658 | // push the children to the stack 659 | for child in node.children.iter_mut() { 660 | nodes_to_check.push(child); 661 | } 662 | } else { 663 | // we've found the first non-full node that contains the point 664 | node_key = Some(node.entry.key.clone()); 665 | node.entry.point_count += 1; 666 | 667 | // check if the node now is full 668 | write_chunk = node.is_full(self.max_node_size); 669 | break; 670 | } 671 | } 672 | let Some(node_key) = node_key else { 673 | return Err(crate::Error::PointNotAddedToAnyNode); 674 | }; 675 | 676 | let raw_point = point.into_raw(self.header.transforms())?; 677 | 678 | if !self.open_chunks.contains_key(&node_key) { 679 | let mut val = Cursor::new(vec![]); 680 | raw_point.write_to(&mut val, self.header.point_format())?; 681 | 682 | self.open_chunks.insert(node_key.clone(), val); 683 | } else { 684 | let buffer = self.open_chunks.get_mut(&node_key).unwrap(); 685 | raw_point.write_to(buffer, self.header.point_format())?; 686 | } 687 | 688 | if write_chunk { 689 | let chunk = self.open_chunks.remove(&node_key).unwrap(); 690 | let (chunk_table_entry, chunk_offset) = 691 | self.compressor.compress_chunk(chunk.into_inner())?; 692 | self.hierarchy.entries.push(Entry { 693 | key: node_key, 694 | offset: chunk_offset, 695 | byte_size: chunk_table_entry.byte_count as i32, 696 | point_count: chunk_table_entry.point_count as i32, 697 | }); 698 | } 699 | Ok(()) 700 | } 701 | 702 | fn add_point_stochastic( 703 | &mut self, 704 | point: las::Point, 705 | expected_levels: usize, 706 | ) -> crate::Result<()> { 707 | // strategy: find the deepest node that contains this point 708 | // choose at (weighted) random this node or one of its parents 709 | // add point to that node 710 | // write full nodes to file 711 | 712 | let root_bounds = self.root_node.bounds; 713 | 714 | let mut node_candidates = vec![]; 715 | 716 | // starting from the root walk thorugh the octree 717 | let mut nodes_to_check = vec![&mut self.root_node]; 718 | while let Some(node) = nodes_to_check.pop() { 719 | if !bounds_contains_point(&node.bounds, &point) { 720 | // the point does not belong to this subtree 721 | continue; 722 | } 723 | 724 | if node.children.is_empty() && node.entry.key.level < expected_levels as i32 { 725 | let child_keys = node.entry.key.children(); 726 | for key in child_keys { 727 | let child_bounds = key.bounds(&root_bounds); 728 | node.children.push(OctreeNode { 729 | entry: Entry { 730 | key, 731 | offset: 0, 732 | byte_size: 0, 733 | point_count: 0, 734 | }, 735 | bounds: child_bounds, 736 | children: Vec::with_capacity(8), 737 | }) 738 | } 739 | } 740 | if !node.is_full(self.max_node_size) { 741 | node_candidates.push(&mut node.entry); 742 | } 743 | // push the children to the stack 744 | for child in node.children.iter_mut() { 745 | nodes_to_check.push(child); 746 | } 747 | } 748 | 749 | if node_candidates.is_empty() { 750 | // we need to add a new level, revert to greedy approach 751 | return self.add_point_greedy(point); 752 | } 753 | 754 | // weighted by the inverse of the area (should volume be used?) the nodes cover 755 | let chosen_index = get_random_weighted_index(&node_candidates); 756 | 757 | let chosen_entry = &mut node_candidates[chosen_index]; 758 | 759 | chosen_entry.point_count += 1; 760 | 761 | let write_chunk = chosen_entry.point_count > self.max_node_size; 762 | 763 | let node_key = chosen_entry.key.clone(); 764 | 765 | self.header.add_point(&point); 766 | 767 | if point.gps_time.unwrap() < self.copc_info.gpstime_minimum { 768 | self.copc_info.gpstime_minimum = point.gps_time.unwrap(); 769 | } else if point.gps_time.unwrap() > self.copc_info.gpstime_maximum { 770 | self.copc_info.gpstime_maximum = point.gps_time.unwrap(); 771 | } 772 | 773 | let raw_point = point.into_raw(self.header.transforms())?; 774 | 775 | if !self.open_chunks.contains_key(&node_key) { 776 | let mut val = Cursor::new(vec![]); 777 | raw_point.write_to(&mut val, self.header.point_format())?; 778 | 779 | self.open_chunks.insert(node_key.clone(), val); 780 | } else { 781 | let buffer = self.open_chunks.get_mut(&node_key).unwrap(); 782 | raw_point.write_to(buffer, self.header.point_format())?; 783 | } 784 | 785 | if write_chunk { 786 | let chunk = self.open_chunks.remove(&node_key).unwrap(); 787 | let (chunk_table_entry, chunk_offset) = 788 | self.compressor.compress_chunk(chunk.into_inner())?; 789 | self.hierarchy.entries.push(Entry { 790 | key: node_key, 791 | offset: chunk_offset, 792 | byte_size: chunk_table_entry.byte_count as i32, 793 | point_count: chunk_table_entry.point_count as i32, 794 | }); 795 | } 796 | Ok(()) 797 | } 798 | } 799 | 800 | fn get_random_weighted_index(entries: &Vec<&mut Entry>) -> usize { 801 | // calculate weights 802 | let levels: Vec = entries.iter().map(|e| e.key.level).collect(); 803 | let zero_level = levels[0]; 804 | 805 | // for each level down the side lengths are halved i.e area is a quarter 806 | let areas: Vec = levels 807 | .iter() 808 | .map(|l| (0.25_f64).powi(l - zero_level)) 809 | .collect(); 810 | // total inv area 811 | let inv_sum = areas.iter().fold(0., |acc, a| acc + 1. / a); 812 | 813 | let weights: Vec = areas.iter().map(|a| (1. / a) / inv_sum).collect(); 814 | 815 | // get random index 816 | let random = fastrand::f64(); 817 | let mut chosen_index = weights.len() - 1; 818 | 819 | for i in 0..weights.len() - 1 { 820 | if (weights[i]..=weights[i + 1]).contains(&random) { 821 | chosen_index = i; 822 | break; 823 | } 824 | } 825 | chosen_index 826 | } 827 | 828 | impl Drop for CopcWriter<'_, W> { 829 | fn drop(&mut self) { 830 | if !self.is_closed { 831 | // can only happen if the writer is created but no points is written 832 | // or something goes wrong while writing 833 | self.close() 834 | .expect("Error when dropping the writer. No points written."); 835 | } 836 | } 837 | } 838 | 839 | #[inline] 840 | fn bounds_contains_point(b: &las::Bounds, p: &las::Point) -> bool { 841 | !(b.max.x < p.x 842 | || b.max.y < p.y 843 | || b.max.z < p.z 844 | || b.min.x > p.x 845 | || b.min.y > p.y 846 | || b.min.z > p.z) 847 | } 848 | --------------------------------------------------------------------------------