├── .github └── workflows │ └── ci.yaml ├── .gitignore ├── .vscode └── tasks.json ├── CHANGELOG.md ├── Cargo.toml ├── LICENSE ├── README.md ├── clippy.toml ├── src ├── bitpack.rs ├── blob.rs ├── bounds.rs ├── bs_read.rs ├── bs_write.rs ├── crc32.rs ├── cv_section.rs ├── date_time.rs ├── e57_reader.rs ├── e57_writer.rs ├── error.rs ├── extension.rs ├── header.rs ├── image_writer.rs ├── images.rs ├── lib.rs ├── limits.rs ├── packet.rs ├── paged_reader.rs ├── paged_writer.rs ├── pc_reader_raw.rs ├── pc_reader_simple.rs ├── pc_writer.rs ├── point.rs ├── pointcloud.rs ├── queue_reader.rs ├── record.rs ├── root.rs ├── transform.rs └── xml.rs ├── testdata ├── bunnyDouble.e57 ├── bunnyFloat.e57 ├── bunnyInt19.e57 ├── bunnyInt21.e57 ├── bunnyInt24.e57 ├── bunnyInt32.e57 ├── castle.jpg ├── corrupt_crc.e57 ├── cpp_generator │ ├── .gitignore │ ├── CMakeLists.txt │ ├── README.md │ ├── build_and_run.ps1 │ ├── main.cpp │ └── vcpkg.json ├── empty.e57 ├── empty_pc.e57 ├── float_intensity_without_min_max.e57 ├── integer_intensity.e57 ├── las2e57_no_images_tag.e57 ├── no_ext_namespace.e57 ├── original_guids.e57 ├── read_error.e57 ├── scaled_integer_intensity.e57 ├── square.png ├── tinyCartesianFloatRgb.e57 ├── tiny_pc_and_images.e57 ├── tiny_pc_with_extension.e57 └── tiny_spherical.e57 ├── tests ├── extensions.rs ├── reader_tests.rs └── writer_tests.rs └── tools ├── e57-check-crc ├── Cargo.toml └── src │ └── main.rs ├── e57-extract-scan-info ├── Cargo.toml └── src │ └── main.rs ├── e57-extract-xml ├── Cargo.toml └── src │ └── main.rs ├── e57-from-xyz ├── Cargo.toml └── src │ └── main.rs ├── e57-to-image ├── Cargo.toml └── src │ └── main.rs ├── e57-to-laz ├── Cargo.toml └── src │ └── main.rs ├── e57-to-pano ├── Cargo.toml └── src │ └── main.rs ├── e57-to-xyz ├── Cargo.toml └── src │ └── main.rs └── e57-unpack ├── Cargo.toml └── src └── main.rs /.github/workflows/ci.yaml: -------------------------------------------------------------------------------- 1 | name: CI 2 | on: [push, pull_request, workflow_dispatch] 3 | jobs: 4 | linux_x86_64: 5 | name: Linux x86-64 6 | runs-on: ubuntu-latest 7 | steps: 8 | - name: Checkout 9 | uses: actions/checkout@v4 10 | - name: Update Rust 11 | run: rustup toolchain install stable --profile minimal --no-self-update 12 | - name: Enable Rust Caching 13 | uses: Swatinem/rust-cache@v2 14 | - name: Release Build 15 | run: cargo build --release --all 16 | - name: Execute Tests 17 | run: cargo test --release --all 18 | - name: Run Clippy 19 | run: cargo clippy --release --all --all-targets --all-features --locked -- -D warnings 20 | - name: Check Formatting 21 | run: cargo fmt --all -- --check 22 | - name: Check Docs 23 | run: RUSTDOCFLAGS="-Dwarnings" cargo doc --package e57 24 | linux_aarch64: 25 | name: Linux AArch64 26 | runs-on: ubuntu-latest 27 | steps: 28 | - name: Checkout 29 | uses: actions/checkout@v4 30 | - name: Update Rust 31 | run: rustup toolchain install stable --profile minimal --no-self-update 32 | - name: Install Cargo Binary Install 33 | run: curl -L --proto '=https' --tlsv1.2 -sSf https://raw.githubusercontent.com/cargo-bins/cargo-binstall/main/install-from-binstall-release.sh | bash 34 | - name: Install Cargo Cross 35 | run: cargo binstall cross --no-confirm 36 | - name: Enable Rust Caching 37 | uses: Swatinem/rust-cache@v2 38 | - name: Release Build 39 | run: cross build --release --all --target aarch64-unknown-linux-gnu 40 | - name: Execute Tests 41 | run: cross test --release --all --target aarch64-unknown-linux-gnu 42 | windows_x86_64: 43 | name: Windows x86-64 44 | runs-on: windows-latest 45 | steps: 46 | - name: Checkout 47 | uses: actions/checkout@v4 48 | - name: Update Rust 49 | run: rustup toolchain install stable --profile minimal --no-self-update 50 | - name: Enable Rust Caching 51 | uses: Swatinem/rust-cache@v2 52 | - name: Release Build 53 | run: cargo build --release --all 54 | - name: Execute Tests 55 | run: cargo test --release --all 56 | mac_x86_64: 57 | name: MacOS x86-64 58 | runs-on: macos-latest 59 | steps: 60 | - name: Checkout 61 | uses: actions/checkout@v4 62 | - name: Update Rust 63 | run: rustup toolchain install stable --profile minimal --no-self-update 64 | - name: Install x64 target 65 | run: rustup target add x86_64-apple-darwin 66 | - name: Enable Rust Caching 67 | uses: Swatinem/rust-cache@v2 68 | - name: Release Build 69 | run: cargo build --release --all --target x86_64-apple-darwin 70 | - name: Execute Tests 71 | run: cargo test --release --all --target x86_64-apple-darwin 72 | mac_aarch64: 73 | name: MacOS AArch64 74 | runs-on: macos-latest 75 | steps: 76 | - name: Checkout 77 | uses: actions/checkout@v4 78 | - name: Update Rust 79 | run: rustup toolchain install stable --profile minimal --no-self-update 80 | - name: Enable Rust Caching 81 | uses: Swatinem/rust-cache@v2 82 | - name: Release Build 83 | run: cargo build --release --all --target aarch64-apple-darwin 84 | - name: Execute Tests 85 | run: cargo test --release --all --target aarch64-apple-darwin 86 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | /Cargo.lock 3 | /todo.txt 4 | *.pdf 5 | *.profraw 6 | /cpp 7 | -------------------------------------------------------------------------------- /.vscode/tasks.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "2.0.0", 3 | "tasks": [ 4 | { 5 | "label": "Build", 6 | "command": "cargo", 7 | "args": [ 8 | "build", 9 | "--all" 10 | ], 11 | "problemMatcher": [ 12 | "$rustc" 13 | ] 14 | }, 15 | { 16 | "label": "Format", 17 | "command": "cargo", 18 | "args": [ 19 | "fmt", 20 | "--all" 21 | ], 22 | "problemMatcher": [ 23 | "$rustc" 24 | ] 25 | }, 26 | { 27 | "label": "Clippy", 28 | "command": "cargo", 29 | "args": [ 30 | "clippy", 31 | "--all", 32 | "--locked", 33 | "--all-targets", 34 | "--all-features" 35 | ], 36 | "problemMatcher": [ 37 | "$rustc" 38 | ] 39 | }, 40 | { 41 | "label": "Docs", 42 | "command": "cargo", 43 | "args": [ 44 | "doc", 45 | "--all" 46 | ], 47 | "problemMatcher": [ 48 | "$rustc" 49 | ] 50 | }, 51 | { 52 | "label": "Test", 53 | "command": "cargo", 54 | "args": [ 55 | "test", 56 | "--all" 57 | ], 58 | "problemMatcher": [ 59 | "$rustc" 60 | ] 61 | }, 62 | { 63 | "label": "Full Check", 64 | "dependsOrder": "sequence", 65 | "dependsOn": [ 66 | "Format", 67 | "Clippy", 68 | "Docs", 69 | "Build", 70 | "Test" 71 | ], 72 | "problemMatcher": [] 73 | }, 74 | { 75 | "label": "Clean Coverage Data", 76 | "type": "shell", 77 | "command": "rm", 78 | "args": [ 79 | "*.profraw" 80 | ], 81 | "problemMatcher": [] 82 | }, 83 | { 84 | "label": "Clean Coverage Report", 85 | "type": "shell", 86 | "command": "rm", 87 | "args": [ 88 | "target/coverage_*", 89 | "-R" 90 | ], 91 | "problemMatcher": [] 92 | }, 93 | { 94 | "label": "Measure Coverage", 95 | "command": "cargo", 96 | "args": [ 97 | "test", 98 | "--all" 99 | ], 100 | "options": { 101 | "env": { 102 | "CARGO_INCREMENTAL": "0", 103 | "RUSTFLAGS": "-Cinstrument-coverage", 104 | "LLVM_PROFILE_FILE": "%m.profraw" 105 | } 106 | }, 107 | "problemMatcher": [ 108 | "$rustc" 109 | ] 110 | }, 111 | { 112 | "label": "Export Coverage", 113 | "command": "grcov", 114 | "args": [ 115 | ".", 116 | "--binary-path", 117 | "target/debug", 118 | "--source-dir", 119 | "src", 120 | "--output-type", 121 | "html", 122 | "--output-path", 123 | "target/coverage_html" 124 | ], 125 | "problemMatcher": [] 126 | }, 127 | { 128 | "label": "Show Coverage", 129 | "type": "shell", 130 | "command": "target/coverage_html/index.html", 131 | "problemMatcher": [] 132 | }, 133 | { 134 | "label": "Code Coverage", 135 | "dependsOrder": "sequence", 136 | "dependsOn": [ 137 | "Clean Coverage Data", 138 | "Clean Coverage Report", 139 | "Measure Coverage", 140 | "Export Coverage", 141 | "Show Coverage" 142 | ], 143 | "problemMatcher": [] 144 | } 145 | ] 146 | } -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | All notable changes to this project will be documented in this file. 4 | 5 | ## [0.11.10] - 2025-03-26 6 | 7 | - Fixed simple reader bug for case where floating point records did not have the optional minimum and maximum values 8 | - Avoid writing invalid empty or partial limits for intensity and color (all fields are required according to the spec) 9 | 10 | ## [0.11.9] - 2025-02-17 11 | 12 | - Implemented fallback for missing pixel sizes in spherical images 13 | 14 | ## [0.11.8] - 2025-02-16 15 | 16 | - Allow reading of unknown custom point attributes without extension namespace. 17 | - Added example application to convert structured scans to spherical 360 degree panorama PNG images. 18 | - Added example application to convert structured scans to planar PNG images. 19 | - Renamed some of the example applications to have consistent names starting with `e57-*`. 20 | - Added Linux ARM 64bit CI builds and tests. 21 | - Added MacOS CI builds and tests. 22 | - Some minor documentation improvements. 23 | 24 | ## [0.11.7] - 2024-10-22 25 | 26 | - Fixed two bugs that prevented intensity limits from being read correctly. 27 | - Made color and intensity normalization optional (still enabled by default). 28 | - Better intensity and color normalization using the proper limits of the point cloud. 29 | The library was using the type ranges before, which is now only a fallback. 30 | - Allow setting intensity and color limits manually when writing point clouds. 31 | - Improved documentation related to color and intensity topics. 32 | 33 | ## [0.11.6] - 2024-10-07 34 | 35 | - Fixed reader bug that occured for some cases when a point cloud contained values where min=max. 36 | See issue #12 for more details: https://github.com/cry-inc/e57/issues/12 37 | - Added some convenience helpers for getting Cartesian bounds. 38 | - Some very minor documentation improvements. 39 | 40 | ## [0.11.5] - 2024-08-27 41 | 42 | - Fixed critical writer bug that occurred when the compressed vector 43 | section header and the data offset were not in the same page. 44 | In such cases the the data offset was off by the 4 bytes, 45 | (which is the size of the CRC32 checksum). 46 | - Fixed potential alignment bug in writer when doing early outs while writings points to disk. 47 | - Updated example code to use the latest version of the LAS crate. 48 | 49 | ## [0.11.4] - 2024-08-17 50 | 51 | - Fixed critical writer bug that could produce invalid E57 files. 52 | This occured when point attributes sizes were not full bytes, 53 | for example intensity integer values between 0 and 2047. 54 | In other cases the file was valid, but contained wrong values for such attributes. 55 | - Fixed minor corner case in reader when a data packet did not contain a full point. 56 | 57 | ## ~~[0.11.3]~~ - 2024-08-17 58 | 59 | - **This version was yanked from crates.io and is no longer available!** 60 | - Broken fix for critical writer bug that could produce corrupt E57 files. 61 | 62 | ## [0.11.2] - 2024-06-02 63 | 64 | - Updated `roxmltree` dependency to version 0.20. 65 | 66 | ## [0.11.1] - 2024-04-22 67 | 68 | - Fixed typo in `intensityMaximum` XML tag when writing E57 files. 69 | - Added missing support for non-integer color and intensity limits when writing E57 files. 70 | 71 | ## [0.11.0] - 2024-04-11 72 | 73 | - Breaking Change: Fixed typo in public API to register E57 extensions. 74 | - Fixed reading E57 files without data3D or images2D tags in XML section. 75 | - Added public API to write and read arbitrary blobs for E57 extensions. 76 | - Added public API to allow XML customization for E57 extensions. 77 | - Extended documentation and example code for E57 extensions. 78 | - New flag for e57-unpack tool to extract only images and skip point data (thx Graham!) 79 | 80 | ## [0.10.5] - 2024-03-18 81 | 82 | - Fixed handling of integer values when min and max values are equal. 83 | - Very minor documentation improvements. 84 | - Enabled and fixed additional Clippy lints. 85 | - Deleted some unused code from paged reader. 86 | 87 | ## [0.10.4] - 2024-02-22 88 | 89 | - Smaller perfomance improvements for reading E57 files. 90 | - Fixed paged writer boundary crossing errors (thx nil-vr!) 91 | - Fixed alignment issues after writing image blob sections (thx nil-vr!) 92 | 93 | ## [0.10.3] - 2023-12-06 94 | 95 | - Updated `roxmltree` dependency to 0.19, which removes the indirect dependency to `xmlparser`. 96 | - Fixed handling of integers and scaled integer values without explicit min and max values. 97 | - Fixed handling of big integer and scaled integer values (avoid i64 overflows). 98 | - Allow bigger integer ranges in the simpler iterator for invalid state values. 99 | - Make simple iterator more robust against weird color and intensity values. 100 | It will now use zero values as fall back in case a value cannot be mapped to a unit float. 101 | 102 | ## [0.10.2] - 2023-11-08 103 | 104 | - Fixed bug when converting Cartesian to spherical coordinates. 105 | The code used `atan2(x, y)` instead of `atan2(y, x)` which flipped the data horizontally. 106 | This problem was not detected since the unit tests were too simple. 107 | They have now been extended to capture this issue. 108 | 109 | ## [0.10.1] - 2023-11-03 110 | 111 | - Added missing support for original GUIDs member of point clouds. 112 | The breaking API changes for this feature were already part of the last release. 113 | - Allow access to the E57 library version field when reading E57 files. 114 | 115 | ## [0.10.0] - 2023-10-13 116 | 117 | - Breaking Change: Made GUIDs for point clouds and images optional. 118 | This required changes in the corresponding public structs. 119 | The spec says the GUID for both is required, but the C++ implementation allows to omit it. 120 | Some software (e.g. Matterport) is generating files without them, 121 | so we need to make it optional to stay compatible and read these files. 122 | When creating E57 files, the library still enforces setting the GUIDs. 123 | - Breaking Change: Prepared structs for missing original GUIDs. 124 | This feature was missing and was prepared now to avoid more breaking changes later. 125 | Its not yet implemented and can be added later as non-breaking change. 126 | 127 | ## [0.9.1] - 2023-09-11 128 | 129 | - Fixed major bug that prevented adding images to E57 files. 130 | Some required property structs were accidentally private. 131 | - Added some C++ utility code to generate test example files using the libE57format library. 132 | - Restructured and extended integration tests to cover more cases. 133 | 134 | ## [0.9.0] - 2023-08-30 135 | 136 | - Breaking Fix: Added missing implementation for offset in scaled integers. 137 | This required changes in the basic enum for record data types. 138 | - Additional perfomance improvements when reading E57 files. 139 | - Added validation for XML namespaces and attributes when writing E57 files with extensions. 140 | - Added support for optional faster external CRC32 crate. 141 | - Implemented optional size_hint() for reading point cloud iterators. 142 | - Reworked image extraction tool to become a generic E57 unpack tool. 143 | - Very minor improvements to the XYZ-to-E57 tool. 144 | 145 | ## [0.8.0] - 2023-08-22 146 | 147 | - Breaking: Reworked simple iterator to make it easier to use 148 | - Breaking: Removed simple iterator option to skip invalid points 149 | - Speed up reading E57 files by ~30% 150 | - Added convenience helper functions for point cloud struct 151 | to easily check if it has certain point attributes 152 | - Added simple iterator option to convert Cartesian to spherical coordinates 153 | - Added new E57-to-LAZ example tool 154 | - Faster E57-to-XYZ tool (uses now ryu for float-to-string conversion) 155 | - Added this CHANGELOG.md file 156 | 157 | ## [0.7.0] - 2023-08-16 158 | 159 | - Breaking: Extended RecordName enum and made it non_exhaustive 160 | - Added missing support for point attribute extensions 161 | - Optimized simple iterator to be ~30% faster 162 | 163 | ## [0.6.0] - 2023-08-12 164 | 165 | - Breaking: Renamed some image structs and enums 166 | - Breaking: Renamed point cloud iterator interface 167 | - Breaking: Simplified Point struct and removed options 168 | - Breaking: Removed Point constructor from raw values 169 | - Added missing feature to add/write images in E57 files 170 | - Added new simple point cloud iterator with some useful options 171 | to apply pose, skip invalid points, convert spherical to Cartesian 172 | coordinates and convert intensity to color. 173 | - E57 to XYZ tool now respects and includes poses 174 | - E57 to XYZ tool now reads all point clouds of the input file 175 | 176 | ## [0.5.1] - 2023-07-10 177 | 178 | - Fix: Allow empty translation and rotation for poses 179 | 180 | ## [0.5.0] - 2023-05-07 181 | 182 | - Breaking: Refactored some Record related prototype types 183 | - Breaking: Removed simple XYZ RGB writing interface 184 | - Added generic E57 point cloud writing for arbitrary point attributes 185 | - Set optional XML root element metadata when writing 186 | - Set optional point cloud metadata when writing 187 | 188 | ## [0.4.0] - 2023-03-26 189 | 190 | - Breaking: Renamed E57 struct to E57Reader 191 | - Added basic E57 writing support for XYZ RGB point clouds 192 | 193 | ## [0.3.1] - 2023-03-18 194 | 195 | - Added extract images example tool 196 | - Minor documentation improvements 197 | 198 | ## [0.3.0] - 2023-03-18 199 | 200 | - Breaking: Fixed some typos in coordinate struct names 201 | - Breaking: Changed CRC validation interface 202 | - Breaking: Changed XML extraction interface 203 | - Added functionality to read images from E57 files 204 | - Use buffered reader for faster E57 file reading 205 | - Added XML-extractor as example code 206 | - Added E57-to-XYZ converter as example code 207 | - Added CRC-validator as example code 208 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "e57" 3 | version = "0.11.10" 4 | edition = "2021" 5 | readme = "README.md" 6 | repository = "https://github.com/cry-inc/e57" 7 | documentation = "https://docs.rs/e57" 8 | license = "MIT" 9 | keywords = ["e57", "lidar", "pointclouds", "laserscanning", "geospatial"] 10 | categories = ["parser-implementations", "science::geo"] 11 | description = "A pure Rust library for reading and writing E57 files with point clouds and related image data." 12 | include = [ 13 | "/src/**/*.rs", 14 | "/Cargo.toml", 15 | "/LICENSE", 16 | "/README.md", 17 | "/CHANGELOG.md", 18 | ] 19 | 20 | [features] 21 | crc32c = ["dep:crc32c"] 22 | 23 | [dependencies] 24 | roxmltree = "0.20" 25 | crc32c = { version = "0.6", optional = true } 26 | 27 | [workspace] 28 | members = [ 29 | "tools/e57-check-crc", 30 | "tools/e57-extract-scan-info", 31 | "tools/e57-extract-xml", 32 | "tools/e57-from-xyz", 33 | "tools/e57-to-image", 34 | "tools/e57-to-laz", 35 | "tools/e57-to-pano", 36 | "tools/e57-to-xyz", 37 | "tools/e57-unpack", 38 | ] 39 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2023 cry-inc 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy 4 | of this software and associated documentation files (the "Software"), to deal 5 | in the Software without restriction, including without limitation the rights 6 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 7 | copies of the Software, and to permit persons to whom the Software is 8 | furnished to do so, subject to the following conditions: 9 | 10 | The above copyright notice and this permission notice shall be included in all 11 | copies or substantial portions of the Software. 12 | 13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 14 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 15 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 16 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 17 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 18 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 19 | SOFTWARE. 20 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # E57 Rust Library 2 | [![Build Status](https://github.com/cry-inc/e57/workflows/CI/badge.svg)](https://github.com/cry-inc/e57/actions) 3 | [![Crates.io](https://img.shields.io/crates/v/e57.svg)](https://crates.io/crates/e57) 4 | [![Documentation](https://docs.rs/e57/badge.svg)](https://docs.rs/e57) 5 | [![No Unsafe](https://img.shields.io/badge/unsafe-forbidden-brightgreen.svg)](https://doc.rust-lang.org/nomicon/meet-safe-and-unsafe.html) 6 | [![License: MIT](https://img.shields.io/badge/License-MIT-blue.svg)](https://opensource.org/licenses/MIT) 7 | [![Dependencies](https://deps.rs/repo/github/cry-inc/e57/status.svg)](https://deps.rs/repo/github/cry-inc/e57) 8 | 9 | A pure Rust library for reading and writing E57 files. No unsafe code, no bloaty dependencies. 10 | 11 | Check out the [tools folder](tools/) for some code examples that show how to use the library. 12 | 13 | ## E57 File Format 14 | The E57 file format is used for storing point clouds and related image data. 15 | Its a neutral file format not tied to any specific vendor or device type and therefore often used 16 | as exchange format between different applications and organizations. 17 | Typically its used for data generated by terrestrial and handheld laser scanners. 18 | It can also handle data generated by airborne laser scanners, 19 | but for that use case the LAS/LAZ file format is more commonly used. 20 | 21 | ## Changelog 22 | Read the [CHANGELOG.md](CHANGELOG.md) file for a list of all released versions and their corresponding changes. 23 | 24 | ## Known Limitations 25 | * Does not support point grouping 26 | * Does ignore index packets 27 | * Does not support point attributes of type string 28 | 29 | ## Please report incompatible files! 30 | If you found an E57 file that can be read with other software but produces an error with this crate, 31 | please let me know and create an issue on Github. 32 | The same applies for E57 files that were created by this library and are not correctly read by this or any other software. 33 | I want this library to work with as many files and applications as possible! 34 | 35 | Ideally, you can provide a link to the file itself. If that is not possible, 36 | please include the full error message and the name of the software that produced the file. 37 | If possible, please also include the XML section of the file. 38 | 39 | ## E57 Tools 40 | The [tools folder](tools/) contains a set of small applications to work with E57 files. 41 | They are intended as example code that explains how to use the library, 42 | but also as a set of usefuls tools when working with E57 files: 43 | 44 | * `e57-check-crc` to validate all the CRC32 checksums in an E57 file. This helps to detect corrupt files upfront before evaluating the actual content of the file. 45 | * `e57-extract-scan-info` to extract some metadata for all scans/point clouds into a CSV file. 46 | * `e57-extract-xml` to quickly extract the XML document that contains all the metadata of the E57 file. 47 | * `e57-from-xyz` to generate a E57 file from a unstructured XYZ ASCII point cloud file. 48 | * `e57-to-image` to export the 2D row/column grids of structured point clouds inside E57 files to planar PNG images. 49 | * `e57-to-laz` to convert an E57 into a compressed and unstructured LAZ file. 50 | * `e57-to-pano` to project structured point clouds inside E57 files to spherical 360 degree panorama PNG images. 51 | * `e57-to-xyz` to convert an E57 into an unstructured XYZ ASCII file. 52 | * `e57-unpack` to "unpack" the content of an E57 file to a folder on disk to be able to investigate and evaluate the contents of the file easier. 53 | 54 | ### How to compile the tools 55 | 1. Clone this repository with git or download and extract a ZIP from GitHub 56 | 2. Install the Rust toolchain (see https://rustup.rs/) 57 | 3. Open a terminal in the folder with this README file 58 | 4. Run the command `cargo build --release --all` 59 | 5. Find all the compiled tool binaries in the folder `target/release/` 60 | 61 | ## Motivation 62 | The E57 file format is well established for exchanging data produced by terrestrial lasers scanners. 63 | However, there are not many implementations that can read and write this file format. 64 | Most applications use the original C++ reference implementation (see http://www.libe57.org/) 65 | or the well maintained fork from Andy Maloney (see https://github.com/asmaloney/libE57Format). 66 | 67 | I thought it would be nice to have a pure Rust solution without any unsafe code. 68 | In my opinion Rust is an excellent choice for parsers of untrusted data, 69 | especially if you plan to use the code in something like a cloud backend. 70 | 71 | If you want to handle E57 files inside a Rust project this crate will also avoid 72 | all the issues that come with integrating C++ code. 73 | 74 | ## Code Coverage 75 | The Visual Studio Code tasks included in this repository contain some tasks for code coverage measurement. 76 | To be able to run them, you need to install `grcov` with the command `cargo install grcov` and the 77 | LLVM tools by running `rustup component add llvm-tools`. 78 | -------------------------------------------------------------------------------- /clippy.toml: -------------------------------------------------------------------------------- 1 | # Allow some things in tests 2 | allow-unwrap-in-tests = true 3 | allow-panic-in-tests = true 4 | allow-expect-in-tests = true 5 | -------------------------------------------------------------------------------- /src/bitpack.rs: -------------------------------------------------------------------------------- 1 | use crate::bs_read::ByteStreamReadBuffer; 2 | use crate::RecordValue; 3 | use crate::Result; 4 | use std::collections::VecDeque; 5 | 6 | pub struct BitPack; 7 | 8 | impl BitPack { 9 | pub fn unpack_doubles( 10 | stream: &mut ByteStreamReadBuffer, 11 | output: &mut VecDeque, 12 | ) -> Result<()> { 13 | while let Some(data) = stream.extract(64) { 14 | let bytes = data.to_le_bytes(); 15 | let value = f64::from_le_bytes(bytes); 16 | output.push_back(RecordValue::Double(value)); 17 | } 18 | Ok(()) 19 | } 20 | 21 | pub fn unpack_singles( 22 | stream: &mut ByteStreamReadBuffer, 23 | output: &mut VecDeque, 24 | ) -> Result<()> { 25 | while let Some(data) = stream.extract(32) { 26 | let bytes = (data as u32).to_le_bytes(); 27 | let value = f32::from_le_bytes(bytes); 28 | output.push_back(RecordValue::Single(value)); 29 | } 30 | Ok(()) 31 | } 32 | 33 | pub fn unpack_ints( 34 | stream: &mut ByteStreamReadBuffer, 35 | min: i64, 36 | max: i64, 37 | output: &mut VecDeque, 38 | ) -> Result<()> { 39 | let range = max as i128 - min as i128; 40 | let bits = range.ilog2() as usize + 1; 41 | let mask = ((1_u128 << bits) - 1) as u64; 42 | while let Some(uint) = stream.extract(bits) { 43 | let int = (uint & mask) as i128 + min as i128; 44 | output.push_back(RecordValue::Integer(int as i64)); 45 | } 46 | Ok(()) 47 | } 48 | 49 | pub fn unpack_scaled_ints( 50 | stream: &mut ByteStreamReadBuffer, 51 | min: i64, 52 | max: i64, 53 | output: &mut VecDeque, 54 | ) -> Result<()> { 55 | let range = max as i128 - min as i128; 56 | let bits = range.ilog2() as usize + 1; 57 | let mask = ((1_u128 << bits) - 1) as u64; 58 | while let Some(uint) = stream.extract(bits) { 59 | let int = (uint & mask) as i128 + min as i128; 60 | output.push_back(RecordValue::ScaledInteger(int as i64)); 61 | } 62 | Ok(()) 63 | } 64 | } 65 | -------------------------------------------------------------------------------- /src/blob.rs: -------------------------------------------------------------------------------- 1 | use crate::error::{Converter, WRONG_OFFSET}; 2 | use crate::paged_reader::PagedReader; 3 | use crate::paged_writer::PagedWriter; 4 | use crate::{Error, Result}; 5 | use roxmltree::Node; 6 | use std::io::{copy, Read, Seek, Write}; 7 | 8 | /// Describes a binary data blob stored inside an E57 file. 9 | #[derive(Clone, Debug)] 10 | #[non_exhaustive] 11 | pub struct Blob { 12 | /// Physical file offset of the binary blob section in the E57 file. 13 | pub offset: u64, 14 | /// The logical size of the associated binary blob in bytes. 15 | pub length: u64, 16 | } 17 | 18 | impl Blob { 19 | /// Creates a blob instance manually from offset and length. 20 | /// WARNING: This constructor is NOT required for standard E57 functionality! 21 | /// In normal cases, like when reading images, the library will provide all Blob instances. 22 | /// This is only needed for use cases with E57 extensions to read custom binary data. 23 | /// In this case the offset and length values must be extracted manually from the XML data. 24 | pub fn new(offset: u64, length: u64) -> Self { 25 | Blob { offset, length } 26 | } 27 | 28 | pub(crate) fn from_node(node: &Node) -> Result { 29 | if Some("Blob") != node.attribute("type") { 30 | Error::invalid("The supplided tag is not a blob")? 31 | } 32 | 33 | let offset = node 34 | .attribute("fileOffset") 35 | .invalid_err("Failed to find 'fileOffset' attribute in blob tag")?; 36 | let offset = offset 37 | .parse::() 38 | .invalid_err("Unable to parse offset as u64")?; 39 | 40 | let length = node 41 | .attribute("length") 42 | .invalid_err("Failed to find 'length' attribute in blob tag")?; 43 | let length = length 44 | .parse::() 45 | .invalid_err("Unable to parse length as u64")?; 46 | 47 | Ok(Self { offset, length }) 48 | } 49 | 50 | pub(crate) fn from_parent_node(tag_name: &str, parent_node: &Node) -> Result> { 51 | if let Some(node) = &parent_node.children().find(|n| n.has_tag_name(tag_name)) { 52 | Ok(Some(Self::from_node(node)?)) 53 | } else { 54 | Ok(None) 55 | } 56 | } 57 | 58 | pub(crate) fn xml_string(&self, tag_name: &str) -> String { 59 | format!( 60 | "<{tag_name} type=\"Blob\" fileOffset=\"{}\" length=\"{}\"/>\n", 61 | self.offset, self.length 62 | ) 63 | } 64 | 65 | pub(crate) fn read( 66 | &self, 67 | reader: &mut PagedReader, 68 | writer: &mut dyn Write, 69 | ) -> Result { 70 | reader 71 | .seek_physical(self.offset) 72 | .read_err("Failed to seek to start offset of blob")?; 73 | let header = BlobSectionHeader::from_reader(reader)?; 74 | if self.length > header.section_length + 16 { 75 | Error::invalid("Blob XML length and blob section header mismatch")? 76 | } 77 | 78 | let mut limited = reader.take(self.length); 79 | copy(&mut limited, writer).read_err("Failed to read binary blob data") 80 | } 81 | 82 | pub(crate) fn write( 83 | writer: &mut PagedWriter, 84 | reader: &mut dyn Read, 85 | ) -> Result { 86 | // Write temporary section header with invalid zero length 87 | let start_offset = writer.physical_position()?; 88 | let mut section_header = BlobSectionHeader { section_length: 0 }; 89 | section_header.to_writer(writer)?; 90 | 91 | // Write blob data 92 | let length = std::io::copy(reader, writer).write_err("Failed to write blob data")?; 93 | 94 | // Update blob section header with actual lenght 95 | let end_offset = writer.physical_position()?; 96 | section_header.section_length = length; 97 | writer.physical_seek(start_offset)?; 98 | section_header.to_writer(writer)?; 99 | writer.physical_seek(end_offset)?; 100 | 101 | writer 102 | .align() 103 | .write_err("Failed to align writer on next 4-byte offset after writing blob section")?; 104 | 105 | Ok(Self { 106 | offset: start_offset, 107 | length, 108 | }) 109 | } 110 | } 111 | 112 | struct BlobSectionHeader { 113 | section_length: u64, 114 | } 115 | 116 | impl BlobSectionHeader { 117 | fn from_array(buffer: &[u8; 16]) -> Result { 118 | let section_id = buffer[0]; 119 | if section_id != 0 { 120 | Error::invalid("Section ID of the blob section header is not 0")? 121 | } 122 | Ok(Self { 123 | section_length: u64::from_le_bytes( 124 | buffer[8..16].try_into().internal_err(WRONG_OFFSET)?, 125 | ), 126 | }) 127 | } 128 | 129 | fn from_reader(reader: &mut PagedReader) -> Result { 130 | let mut buffer = [0_u8; 16]; 131 | reader 132 | .read_exact(&mut buffer) 133 | .read_err("Failed to read compressed vector section header")?; 134 | BlobSectionHeader::from_array(&buffer) 135 | } 136 | 137 | fn to_writer(&self, writer: &mut PagedWriter) -> Result<()> { 138 | let mut bytes: [u8; 16] = [0; 16]; 139 | let length_bytes = u64::to_le_bytes(self.section_length); 140 | bytes[8..16].copy_from_slice(&length_bytes); 141 | writer 142 | .write_all(&bytes) 143 | .write_err("Failed to write blob section header") 144 | } 145 | } 146 | -------------------------------------------------------------------------------- /src/bounds.rs: -------------------------------------------------------------------------------- 1 | use crate::xml; 2 | use crate::Result; 3 | use roxmltree::Node; 4 | 5 | /// Optional minimum and maximum values for Cartesian X, Y and Z coordinates. 6 | /// 7 | /// Represents an axis-aligned bounding box of Cartesian coordinates. 8 | #[derive(Clone, Debug, Default)] 9 | pub struct CartesianBounds { 10 | pub x_min: Option, 11 | pub x_max: Option, 12 | pub y_min: Option, 13 | pub y_max: Option, 14 | pub z_min: Option, 15 | pub z_max: Option, 16 | } 17 | 18 | impl CartesianBounds { 19 | pub(crate) fn from_node(node: &Node) -> Result { 20 | Ok(Self { 21 | x_min: xml::opt_f64(node, "xMinimum")?, 22 | x_max: xml::opt_f64(node, "xMaximum")?, 23 | y_min: xml::opt_f64(node, "yMinimum")?, 24 | y_max: xml::opt_f64(node, "yMaximum")?, 25 | z_min: xml::opt_f64(node, "zMinimum")?, 26 | z_max: xml::opt_f64(node, "zMaximum")?, 27 | }) 28 | } 29 | 30 | pub(crate) fn xml_string(&self) -> String { 31 | let mut xml = String::from("\n"); 32 | if let Some(min) = self.x_min { 33 | xml += &xml::gen_float("xMinimum", min); 34 | } 35 | if let Some(max) = self.x_max { 36 | xml += &xml::gen_float("xMaximum", max); 37 | } 38 | if let Some(min) = self.y_min { 39 | xml += &xml::gen_float("yMinimum", min); 40 | } 41 | if let Some(max) = self.y_max { 42 | xml += &xml::gen_float("yMaximum", max); 43 | } 44 | if let Some(min) = self.z_min { 45 | xml += &xml::gen_float("zMinimum", min); 46 | } 47 | if let Some(max) = self.z_max { 48 | xml += &xml::gen_float("zMaximum", max); 49 | } 50 | xml += "\n"; 51 | xml 52 | } 53 | } 54 | 55 | /// Optional minimum and maximum values for spherical coordinates. 56 | #[derive(Clone, Debug, Default)] 57 | pub struct SphericalBounds { 58 | pub range_min: Option, 59 | pub range_max: Option, 60 | pub elevation_min: Option, 61 | pub elevation_max: Option, 62 | pub azimuth_start: Option, 63 | pub azimuth_end: Option, 64 | } 65 | 66 | impl SphericalBounds { 67 | pub(crate) fn from_node(node: &Node) -> Result { 68 | Ok(Self { 69 | range_min: xml::opt_f64(node, "rangeMinimum")?, 70 | range_max: xml::opt_f64(node, "rangeMaximum")?, 71 | elevation_min: xml::opt_f64(node, "elevationMinimum")?, 72 | elevation_max: xml::opt_f64(node, "elevationMaximum")?, 73 | azimuth_start: xml::opt_f64(node, "azimuthStart")?, 74 | azimuth_end: xml::opt_f64(node, "azimuthEnd")?, 75 | }) 76 | } 77 | 78 | pub(crate) fn xml_string(&self) -> String { 79 | let mut xml = String::from("\n"); 80 | if let Some(min) = self.azimuth_start { 81 | xml += &xml::gen_float("azimuthStart", min); 82 | } 83 | if let Some(max) = self.azimuth_end { 84 | xml += &xml::gen_float("azimuthEnd", max); 85 | } 86 | if let Some(min) = self.elevation_min { 87 | xml += &xml::gen_float("elevationMinimum", min); 88 | } 89 | if let Some(max) = self.elevation_max { 90 | xml += &xml::gen_float("elevationMaximum", max); 91 | } 92 | if let Some(min) = self.range_min { 93 | xml += &xml::gen_float("rangeMinimum", min); 94 | } 95 | if let Some(max) = self.range_max { 96 | xml += &xml::gen_float("rangeMaximum", max); 97 | } 98 | xml += "\n"; 99 | xml 100 | } 101 | 102 | /// Converts the spherical bounds into Cartesian bounds. 103 | /// The result will be bigger than the actual Cartesian bounds, since it is not possible 104 | /// to calculate the exact Cartesian bounds without iterating over all points. 105 | /// Will return `None` if the spherical range is not defined. 106 | pub fn to_cartesian(&self) -> Option { 107 | self.range_max.map(|range| CartesianBounds { 108 | x_min: Some(-range), 109 | x_max: Some(range), 110 | y_min: Some(-range), 111 | y_max: Some(range), 112 | z_min: Some(-range), 113 | z_max: Some(range), 114 | }) 115 | } 116 | } 117 | 118 | /// Optional minimum and maximum values for the row, column and return indices. 119 | #[derive(Clone, Debug, Default)] 120 | pub struct IndexBounds { 121 | pub row_min: Option, 122 | pub row_max: Option, 123 | pub column_min: Option, 124 | pub column_max: Option, 125 | pub return_min: Option, 126 | pub return_max: Option, 127 | } 128 | 129 | impl IndexBounds { 130 | pub(crate) fn from_node(node: &Node) -> Result { 131 | Ok(Self { 132 | row_min: xml::opt_int(node, "rowMinimum")?, 133 | row_max: xml::opt_int(node, "rowMaximum")?, 134 | column_min: xml::opt_int(node, "columnMinimum")?, 135 | column_max: xml::opt_int(node, "columnMaximum")?, 136 | return_min: xml::opt_int(node, "returnMinimum")?, 137 | return_max: xml::opt_int(node, "returnMaximum")?, 138 | }) 139 | } 140 | 141 | pub(crate) fn xml_string(&self) -> String { 142 | let mut xml = String::from("\n"); 143 | if let Some(min) = self.row_min { 144 | xml += &xml::gen_int("rowMinimum", min); 145 | } 146 | if let Some(max) = self.row_max { 147 | xml += &xml::gen_int("rowMaximum", max); 148 | } 149 | if let Some(min) = self.column_min { 150 | xml += &xml::gen_int("columnMinimum", min); 151 | } 152 | if let Some(max) = self.column_max { 153 | xml += &xml::gen_int("columnMaximum", max); 154 | } 155 | if let Some(min) = self.return_min { 156 | xml += &xml::gen_int("returnMinimum", min); 157 | } 158 | if let Some(max) = self.return_max { 159 | xml += &xml::gen_int("returnMaximum", max); 160 | } 161 | xml += "\n"; 162 | xml 163 | } 164 | } 165 | -------------------------------------------------------------------------------- /src/bs_read.rs: -------------------------------------------------------------------------------- 1 | #[derive(Clone)] 2 | pub struct ByteStreamReadBuffer { 3 | buffer: Vec, 4 | tmp: Vec, 5 | offset: usize, 6 | } 7 | 8 | impl ByteStreamReadBuffer { 9 | pub fn new() -> Self { 10 | Self { 11 | buffer: Vec::new(), 12 | tmp: Vec::new(), 13 | offset: 0, 14 | } 15 | } 16 | 17 | /// Append a fresh slice of bytes to the end of the stream 18 | pub fn append(&mut self, data: &[u8]) { 19 | let consumed_bytes = self.offset / 8; 20 | let remaining_bytes = self.buffer.len() - consumed_bytes; 21 | self.offset -= consumed_bytes * 8; 22 | self.tmp.reserve(remaining_bytes + data.len()); 23 | self.tmp.extend_from_slice(&self.buffer[consumed_bytes..]); 24 | self.tmp.extend_from_slice(data); 25 | self.buffer.clear(); 26 | std::mem::swap(&mut self.buffer, &mut self.tmp); 27 | } 28 | 29 | /// Extract 64 bits or less from the byte stream and return them as u64. 30 | /// The returned u64 might contain more than the requested number of bits. 31 | /// Please make sure to ignore/mask the additional bits! 32 | /// Returns None if the request cannot be satisfied. 33 | pub fn extract(&mut self, bits: usize) -> Option { 34 | if self.available() < bits { 35 | return None; 36 | } 37 | 38 | let start_offset = self.offset / 8; 39 | let end_offset = (self.offset + bits).div_ceil(8); // Integer division with rounding up 40 | let offset = self.offset % 8; 41 | 42 | let mut data = [0; 16]; 43 | let data_len = end_offset - start_offset; 44 | let dst = &mut data[..data_len]; 45 | let src = &self.buffer[start_offset..end_offset]; 46 | dst.copy_from_slice(src); 47 | 48 | self.offset += bits; 49 | let data = u128::from_le_bytes(data) >> offset; 50 | Some(data as u64) 51 | } 52 | 53 | /// Returns the number of available bits in the stream 54 | pub fn available(&self) -> usize { 55 | (self.buffer.len() * 8) - self.offset 56 | } 57 | } 58 | 59 | #[cfg(test)] 60 | mod tests { 61 | use super::*; 62 | 63 | #[test] 64 | fn empty() { 65 | let mut bs = ByteStreamReadBuffer::new(); 66 | assert_eq!(bs.available(), 0); 67 | let result = bs.extract(0).unwrap(); 68 | assert_eq!(result, 0); 69 | assert_eq!(bs.available(), 0); 70 | assert!(bs.extract(1).is_none()); 71 | } 72 | 73 | #[test] 74 | fn append_and_extract_bits() { 75 | let mut bs = ByteStreamReadBuffer::new(); 76 | bs.append(&[255]); 77 | 78 | assert_eq!(bs.available(), 8); 79 | let result = bs.extract(2).unwrap(); 80 | assert_eq!(result, 255); 81 | 82 | assert_eq!(bs.available(), 6); 83 | let result = bs.extract(6).unwrap(); 84 | assert_eq!(result, 63); 85 | 86 | assert_eq!(bs.available(), 0); 87 | assert!(bs.extract(1).is_none()); 88 | } 89 | 90 | #[test] 91 | fn append_and_extract_bytes() { 92 | let mut bs = ByteStreamReadBuffer::new(); 93 | bs.append(&[23, 42, 13]); 94 | bs.extract(2).unwrap(); 95 | 96 | assert_eq!(bs.available(), 22); 97 | let result = bs.extract(22).unwrap(); 98 | assert_eq!(result, 215685); 99 | } 100 | 101 | #[test] 102 | fn remove_consume_when_appending() { 103 | let mut bs = ByteStreamReadBuffer::new(); 104 | bs.append(&[1, 2, 3, 4, 5]); 105 | bs.extract(4 * 8 + 2).unwrap(); 106 | 107 | // We append one byte and the buffer should become smaller 108 | // because all fully consumed bytes are removed. 109 | bs.append(&[6]); 110 | assert!(bs.buffer.len() == 2); 111 | 112 | // Offsets are updated correctly appended 113 | // data can be extracted as expected. 114 | let result = bs.extract(14).unwrap(); 115 | assert_eq!(result, 385); 116 | } 117 | } 118 | -------------------------------------------------------------------------------- /src/bs_write.rs: -------------------------------------------------------------------------------- 1 | #[derive(Clone)] 2 | pub struct ByteStreamWriteBuffer { 3 | buffer: Vec, 4 | last_byte_bit: usize, 5 | } 6 | 7 | impl ByteStreamWriteBuffer { 8 | pub fn new() -> Self { 9 | Self { 10 | buffer: Vec::new(), 11 | last_byte_bit: 0, 12 | } 13 | } 14 | 15 | pub fn add_bytes(&mut self, data: &[u8]) { 16 | if self.last_byte_bit == 0 { 17 | self.buffer.extend_from_slice(data); 18 | } else { 19 | self.add_bits(data, data.len() * 8) 20 | } 21 | } 22 | 23 | pub fn add_bits(&mut self, data: &[u8], bits: usize) { 24 | if self.last_byte_bit == 0 { 25 | let to_append = bits.div_ceil(8); // Integer division with rounding up 26 | self.buffer.extend_from_slice(&data[..to_append]); 27 | self.last_byte_bit = bits % 8; 28 | } else { 29 | let start_byte = self.buffer.len() - 1; 30 | let start_bit = self.last_byte_bit; 31 | for b in 0..bits { 32 | let source_byte = b / 8; 33 | let source_mask = 1 << (b % 8); 34 | let source_bit = (data[source_byte] & source_mask) != 0; 35 | let target_mask = if source_bit { 36 | 1 << self.last_byte_bit 37 | } else { 38 | 0 39 | }; 40 | let target_byte = start_byte + ((start_bit + b) / 8); 41 | if target_byte >= self.buffer.len() { 42 | self.buffer.push(0); 43 | } 44 | self.buffer[target_byte] |= target_mask; 45 | self.last_byte_bit = (self.last_byte_bit + 1) % 8; 46 | } 47 | } 48 | } 49 | 50 | pub fn get_full_bytes(&mut self) -> Vec { 51 | let to_take = self.full_bytes(); 52 | self.buffer.drain(..to_take).collect() 53 | } 54 | 55 | pub fn get_all_bytes(&mut self) -> Vec { 56 | self.last_byte_bit = 0; 57 | self.buffer.drain(..).collect() 58 | } 59 | 60 | pub fn full_bytes(&self) -> usize { 61 | let len = self.buffer.len(); 62 | if self.last_byte_bit != 0 { 63 | len - 1 64 | } else { 65 | len 66 | } 67 | } 68 | 69 | pub fn all_bytes(&self) -> usize { 70 | self.buffer.len() 71 | } 72 | } 73 | 74 | #[cfg(test)] 75 | mod tests { 76 | use super::*; 77 | 78 | #[test] 79 | fn empty() { 80 | let mut buffer = ByteStreamWriteBuffer::new(); 81 | assert_eq!(buffer.full_bytes(), 0); 82 | assert_eq!(buffer.all_bytes(), 0); 83 | 84 | let full = buffer.get_full_bytes(); 85 | assert_eq!(full.len(), 0); 86 | 87 | let all = buffer.get_all_bytes(); 88 | assert_eq!(all.len(), 0); 89 | } 90 | 91 | #[test] 92 | fn add_bytes() { 93 | let mut buffer = ByteStreamWriteBuffer::new(); 94 | buffer.add_bytes(&[1, 2, 3, 4]); 95 | assert_eq!(buffer.full_bytes(), 4); 96 | assert_eq!(buffer.all_bytes(), 4); 97 | 98 | let full = buffer.get_full_bytes(); 99 | assert_eq!(full.len(), 4); 100 | assert_eq!(full, [1, 2, 3, 4]); 101 | 102 | assert_eq!(buffer.full_bytes(), 0); 103 | assert_eq!(buffer.all_bytes(), 0); 104 | } 105 | 106 | #[test] 107 | fn add_bits_after_full_bytes() { 108 | let mut buffer = ByteStreamWriteBuffer::new(); 109 | buffer.add_bytes(&[1, 2, 3, 4]); 110 | buffer.add_bits(&[0b00001111], 4); 111 | 112 | assert_eq!(buffer.full_bytes(), 4); 113 | assert_eq!(buffer.all_bytes(), 5); 114 | 115 | let full = buffer.get_full_bytes(); 116 | assert_eq!(full.len(), 4); 117 | assert_eq!(full, [1, 2, 3, 4]); 118 | assert_eq!(buffer.full_bytes(), 0); 119 | assert_eq!(buffer.all_bytes(), 1); 120 | 121 | let all = buffer.get_all_bytes(); 122 | assert_eq!(all.len(), 1); 123 | assert_eq!(all, [0b00001111]); 124 | 125 | assert_eq!(buffer.full_bytes(), 0); 126 | assert_eq!(buffer.all_bytes(), 0); 127 | } 128 | 129 | #[test] 130 | fn add_two_times_four_bits() { 131 | let mut buffer = ByteStreamWriteBuffer::new(); 132 | buffer.add_bits(&[0b00001111], 4); 133 | buffer.add_bits(&[0b00001111], 4); 134 | 135 | assert_eq!(buffer.full_bytes(), 1); 136 | assert_eq!(buffer.all_bytes(), 1); 137 | 138 | let all = buffer.get_all_bytes(); 139 | assert_eq!(all, [0b11111111]); 140 | } 141 | 142 | #[test] 143 | fn add_mixed_bits_and_bytes() { 144 | let mut buffer = ByteStreamWriteBuffer::new(); 145 | buffer.add_bits(&[0b101], 3); 146 | buffer.add_bytes(&[0b10000001]); 147 | buffer.add_bits(&[0b100001], 6); 148 | 149 | assert_eq!(buffer.full_bytes(), 2); 150 | assert_eq!(buffer.all_bytes(), 3); 151 | 152 | let all = buffer.get_all_bytes(); 153 | assert_eq!(all, [0b00001101, 0b00001100, 0b00000001]); 154 | } 155 | } 156 | -------------------------------------------------------------------------------- /src/crc32.rs: -------------------------------------------------------------------------------- 1 | /// Simple CRC 32 ISCSI/Castagnoli implementation. 2 | /// This is code is based on the SW fallback of . 3 | pub struct Crc32 { 4 | table: [u32; 256], 5 | } 6 | 7 | impl Crc32 { 8 | pub fn new() -> Self { 9 | let mut table = [0_u32; 256]; 10 | for i in 0..256 { 11 | let mut val = i; 12 | for _ in 0..8 { 13 | if val % 2 == 0 { 14 | val /= 2; 15 | } else { 16 | val /= 2; 17 | val ^= 0x82_F6_3B_78; 18 | } 19 | } 20 | table[i as usize] = val; 21 | } 22 | Self { table } 23 | } 24 | 25 | pub fn calculate(&mut self, data: &[u8]) -> u32 { 26 | !data.iter().fold(!0, |sum, &next| { 27 | let index = (sum ^ next as u32) as u8; 28 | self.table[index as usize] ^ (sum >> 8) 29 | }) 30 | } 31 | } 32 | 33 | #[cfg(test)] 34 | mod tests { 35 | use super::*; 36 | 37 | #[test] 38 | fn empty() { 39 | let data = [0_u8; 0]; 40 | let mut crc = Crc32::new(); 41 | let sum = crc.calculate(&data); 42 | assert_eq!(sum, 0); 43 | } 44 | 45 | #[test] 46 | fn single_u64() { 47 | let data = [123_u8; 8]; 48 | let mut crc = Crc32::new(); 49 | let sum = crc.calculate(&data); 50 | assert_eq!(sum, 3786498929); 51 | } 52 | 53 | #[test] 54 | fn full_page() { 55 | let mut data = [0_u8; 1024]; 56 | for i in 0..data.len() { 57 | data[i] = (i % 256) as u8; 58 | } 59 | let mut crc = Crc32::new(); 60 | let sum = crc.calculate(&data); 61 | assert_eq!(sum, 752840335); 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /src/cv_section.rs: -------------------------------------------------------------------------------- 1 | use crate::error::Converter; 2 | use crate::error::WRONG_OFFSET; 3 | use crate::Error; 4 | use crate::Result; 5 | use std::io::Read; 6 | use std::io::Write; 7 | 8 | #[derive(Debug)] 9 | pub struct CompressedVectorSectionHeader { 10 | section_id: u8, 11 | pub section_length: u64, 12 | pub data_offset: u64, 13 | pub index_offset: u64, 14 | } 15 | 16 | impl CompressedVectorSectionHeader { 17 | pub const SIZE: u64 = 32; 18 | 19 | pub fn read(reader: &mut dyn Read) -> Result { 20 | let mut buffer = [0_u8; Self::SIZE as usize]; 21 | reader 22 | .read_exact(&mut buffer) 23 | .read_err("Failed to read compressed vector section header")?; 24 | 25 | let header = Self { 26 | section_id: buffer[0], 27 | section_length: u64::from_le_bytes( 28 | buffer[8..16].try_into().internal_err(WRONG_OFFSET)?, 29 | ), 30 | data_offset: u64::from_le_bytes(buffer[16..24].try_into().internal_err(WRONG_OFFSET)?), 31 | index_offset: u64::from_le_bytes(buffer[24..32].try_into().internal_err(WRONG_OFFSET)?), 32 | }; 33 | 34 | if header.section_id != 1 { 35 | Error::invalid("Section ID of the compressed vector section header is not 1")? 36 | } 37 | if header.section_length % 4 != 0 { 38 | Error::invalid("Section length is not aligned and a multiple of four")? 39 | } 40 | 41 | Ok(header) 42 | } 43 | 44 | pub fn write(&self, writer: &mut dyn Write) -> Result<()> { 45 | let mut buffer = [0_u8; Self::SIZE as usize]; 46 | buffer[0] = self.section_id; 47 | buffer[8..16].copy_from_slice(&self.section_length.to_le_bytes()); 48 | buffer[16..24].copy_from_slice(&self.data_offset.to_le_bytes()); 49 | buffer[24..32].copy_from_slice(&self.index_offset.to_le_bytes()); 50 | writer 51 | .write_all(&buffer) 52 | .write_err("Failed to write compressed vector section header") 53 | } 54 | } 55 | 56 | impl Default for CompressedVectorSectionHeader { 57 | fn default() -> Self { 58 | Self { 59 | section_id: 1, 60 | section_length: 0, 61 | data_offset: 0, 62 | index_offset: 0, 63 | } 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /src/date_time.rs: -------------------------------------------------------------------------------- 1 | use crate::error::Converter; 2 | use crate::Result; 3 | use roxmltree::Node; 4 | 5 | /// Represents a specific date and time used in E57 files. 6 | #[derive(Clone, Debug)] 7 | pub struct DateTime { 8 | /// Number of seconds since GPS start epoch (00:00 UTC on January 6, 1980). 9 | pub gps_time: f64, 10 | /// True if the a global navigation satellite system device (such as GPS or GLONASS) was used to record the time. 11 | pub atomic_reference: bool, 12 | } 13 | 14 | impl DateTime { 15 | pub(crate) fn from_node(node: &Node) -> Result> { 16 | let gps_time_text = node 17 | .children() 18 | .find(|n| n.has_tag_name("dateTimeValue") && n.attribute("type") == Some("Float")) 19 | .invalid_err("Unable to find XML tag 'dateTimeValue' with type 'Float'")? 20 | .text(); 21 | let gps_time = if let Some(text) = gps_time_text { 22 | text.parse::() 23 | .invalid_err("Failed to parse inner text of XML tag 'dateTimeValue' as double")? 24 | } else { 25 | return Ok(None); 26 | }; 27 | 28 | let atomic_reference_node = node.children().find(|n| { 29 | n.has_tag_name("isAtomicClockReferenced") && n.attribute("type") == Some("Integer") 30 | }); 31 | let atomic_reference = if let Some(node) = atomic_reference_node { 32 | node.text().unwrap_or("0").trim() == "1" 33 | } else { 34 | return Ok(None); 35 | }; 36 | 37 | Ok(Some(Self { 38 | gps_time, 39 | atomic_reference, 40 | })) 41 | } 42 | 43 | pub(crate) fn xml_string(&self, tag_name: &str) -> String { 44 | let mut xml = String::new(); 45 | xml += &format!("<{tag_name} type=\"Structure\">\n"); 46 | xml += &format!( 47 | "{}\n", 48 | self.gps_time 49 | ); 50 | xml += &format!( 51 | "{}\n", 52 | if self.atomic_reference { "1" } else { "0" } 53 | ); 54 | xml += &format!("\n"); 55 | xml 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /src/e57_reader.rs: -------------------------------------------------------------------------------- 1 | use crate::error::Converter; 2 | use crate::paged_reader::PagedReader; 3 | use crate::root::root_from_document; 4 | use crate::root::Root; 5 | use crate::Blob; 6 | use crate::DateTime; 7 | use crate::Error; 8 | use crate::Extension; 9 | use crate::Header; 10 | use crate::Image; 11 | use crate::PointCloud; 12 | use crate::PointCloudReaderRaw; 13 | use crate::PointCloudReaderSimple; 14 | use crate::Result; 15 | use roxmltree::Document; 16 | use std::fs::File; 17 | use std::io::BufReader; 18 | use std::io::Read; 19 | use std::io::Seek; 20 | use std::io::Write; 21 | use std::path::Path; 22 | 23 | const MAX_XML_SIZE: usize = 1024 * 1024 * 10; 24 | 25 | /// Main interface for reading E57 files. 26 | pub struct E57Reader { 27 | reader: PagedReader, 28 | header: Header, 29 | xml: String, 30 | root: Root, 31 | pointclouds: Vec, 32 | images: Vec, 33 | extensions: Vec, 34 | } 35 | 36 | impl E57Reader { 37 | /// Creates a new E57 instance for from a reader. 38 | pub fn new(mut reader: T) -> Result { 39 | // Read, parse and validate E57 header 40 | let header = Header::read(&mut reader)?; 41 | 42 | // Set up paged reader for the CRC page layer 43 | let mut reader = PagedReader::new(reader, header.page_size) 44 | .read_err("Failed creating paged CRC reader")?; 45 | 46 | // Read and parse XML data 47 | let xml_raw = Self::extract_xml( 48 | &mut reader, 49 | header.phys_xml_offset, 50 | header.xml_length as usize, 51 | )?; 52 | let xml = String::from_utf8(xml_raw).read_err("Failed to parse XML as UTF8")?; 53 | let document = Document::parse(&xml).invalid_err("Failed to parse XML data")?; 54 | let root = root_from_document(&document)?; 55 | let pointclouds = PointCloud::vec_from_document(&document)?; 56 | let images = Image::vec_from_document(&document)?; 57 | let extensions = Extension::vec_from_document(&document); 58 | 59 | Ok(Self { 60 | reader, 61 | header, 62 | xml, 63 | root, 64 | pointclouds, 65 | images, 66 | extensions, 67 | }) 68 | } 69 | 70 | /// Returns the contents of E57 binary file header structure. 71 | pub fn header(&self) -> Header { 72 | self.header.clone() 73 | } 74 | 75 | /// Returns the XML section of the E57 file. 76 | pub fn xml(&self) -> &str { 77 | &self.xml 78 | } 79 | 80 | /// Returns format name stored in the XML section. 81 | pub fn format_name(&self) -> &str { 82 | &self.root.format 83 | } 84 | 85 | /// Returns GUID stored in the XML section. 86 | pub fn guid(&self) -> &str { 87 | &self.root.guid 88 | } 89 | 90 | /// Returns the library version string of the root XML section. 91 | pub fn library_version(&self) -> Option<&str> { 92 | self.root.library_version.as_deref() 93 | } 94 | 95 | /// Returns a list of all extensions defined in this file. 96 | pub fn extensions(&self) -> Vec { 97 | self.extensions.clone() 98 | } 99 | 100 | /// Returns a list of all point cloud descriptors in the file. 101 | pub fn pointclouds(&self) -> Vec { 102 | self.pointclouds.clone() 103 | } 104 | 105 | /// Returns an iterator for reading point cloud data. 106 | /// The data provided by this interface is already normalized for convenience. 107 | /// There is also a raw iterator for advanced use-cases that require direct access. 108 | pub fn pointcloud_simple(&mut self, pc: &PointCloud) -> Result> { 109 | PointCloudReaderSimple::new(pc, &mut self.reader) 110 | } 111 | 112 | /// Returns an iterator for reading raw low level point cloud data. 113 | /// This provides access to the original values stored in the E57 file. 114 | /// This interface is only recommended for advanced use-cases. 115 | /// In most scenarios the simple iterator is the better choice. 116 | pub fn pointcloud_raw(&mut self, pc: &PointCloud) -> Result> { 117 | PointCloudReaderRaw::new(pc, &mut self.reader) 118 | } 119 | 120 | /// Returns a list of all image descriptors in the file. 121 | pub fn images(&self) -> Vec { 122 | self.images.clone() 123 | } 124 | 125 | /// Reads the content of a blob and copies it into the supplied writer. 126 | /// Returns the number of written bytes. 127 | pub fn blob(&mut self, blob: &Blob, writer: &mut dyn Write) -> Result { 128 | blob.read(&mut self.reader, writer) 129 | } 130 | 131 | /// Returns the optional creation date and time of the file. 132 | pub fn creation(&self) -> Option { 133 | self.root.creation.clone() 134 | } 135 | 136 | /// Returns the optional coordinate system metadata of the file. 137 | /// 138 | /// This should contain a Coordinate Reference System that is specified by 139 | /// a string in a well-known text format for a spatial reference system, 140 | /// as defined by the Coordinate Transformation Service specification 141 | /// developed by the Open Geospatial Consortium. 142 | /// See also: 143 | pub fn coordinate_metadata(&self) -> Option<&str> { 144 | self.root.coordinate_metadata.as_deref() 145 | } 146 | 147 | /// Iterate over an reader to check an E57 file for CRC errors. 148 | /// 149 | /// This standalone function does only the minimal parsing required 150 | /// to get the E57 page size and without any other checks or validation. 151 | /// After that it will CRC-validate the whole file. 152 | /// It will not read or check any other file header and XML data! 153 | /// This method returns the page size of the E57 file. 154 | pub fn validate_crc(mut reader: T) -> Result { 155 | let page_size = Self::get_u64(&mut reader, 40, "page size")?; 156 | let mut paged_reader = 157 | PagedReader::new(reader, page_size).read_err("Failed creating paged CRC reader")?; 158 | let mut buffer = vec![0_u8; page_size as usize]; 159 | let mut page = 0; 160 | while paged_reader 161 | .read(&mut buffer) 162 | .read_err(format!("Failed to validate CRC for page {page}"))? 163 | != 0 164 | { 165 | page += 1; 166 | } 167 | Ok(page_size) 168 | } 169 | 170 | /// Returns the raw unparsed binary XML data of the E57 file as bytes. 171 | /// 172 | /// This standalone function does only the minimal parsing required 173 | /// to get the XML section without any other checks or any other 174 | /// validation than basic CRC ckecking for the XML section itself. 175 | pub fn raw_xml(mut reader: T) -> Result> { 176 | let page_size = Self::get_u64(&mut reader, 40, "page size")?; 177 | let xml_offset = Self::get_u64(&mut reader, 24, "XML offset")?; 178 | let xml_length = Self::get_u64(&mut reader, 32, "XML length")?; 179 | 180 | // Create paged CRC reader 181 | let mut paged_reader = 182 | PagedReader::new(reader, page_size).read_err("Failed creating paged CRC reader")?; 183 | 184 | // Read XML data 185 | Self::extract_xml(&mut paged_reader, xml_offset, xml_length as usize) 186 | } 187 | 188 | fn get_u64(reader: &mut T, offset: u64, name: &str) -> Result { 189 | reader 190 | .seek(std::io::SeekFrom::Start(offset)) 191 | .read_err(format!("Cannot seek to {name} offset"))?; 192 | let mut buf = [0_u8; 8]; 193 | reader 194 | .read_exact(&mut buf) 195 | .read_err(format!("Cannot read {name} bytes"))?; 196 | Ok(u64::from_le_bytes(buf)) 197 | } 198 | 199 | fn extract_xml(reader: &mut PagedReader, offset: u64, length: usize) -> Result> { 200 | if length > MAX_XML_SIZE { 201 | Error::not_implemented(format!( 202 | "XML sections larger than {MAX_XML_SIZE} bytes are not supported" 203 | ))? 204 | } 205 | reader 206 | .seek_physical(offset) 207 | .read_err("Cannot seek to XML offset")?; 208 | let mut xml = vec![0_u8; length]; 209 | reader 210 | .read_exact(&mut xml) 211 | .read_err("Failed to read XML data")?; 212 | Ok(xml) 213 | } 214 | } 215 | 216 | impl E57Reader> { 217 | /// Creates an E57 instance from a Path. 218 | pub fn from_file(path: impl AsRef) -> Result { 219 | let file = File::open(path).read_err("Unable to open file")?; 220 | let reader = BufReader::new(file); 221 | Self::new(reader) 222 | } 223 | } 224 | -------------------------------------------------------------------------------- /src/e57_writer.rs: -------------------------------------------------------------------------------- 1 | use crate::error::Converter; 2 | use crate::paged_writer::PagedWriter; 3 | use crate::pc_writer::PointCloudWriter; 4 | use crate::root::{serialize_root, Root}; 5 | use crate::{ 6 | Blob, DateTime, Error, Extension, Header, Image, ImageWriter, PointCloud, Record, Result, 7 | }; 8 | use std::fs::{File, OpenOptions}; 9 | use std::io::{Read, Seek, Write}; 10 | use std::path::Path; 11 | 12 | /// Main interface for creating and writing E57 files. 13 | pub struct E57Writer { 14 | pub(crate) writer: PagedWriter, 15 | pub(crate) pointclouds: Vec, 16 | extensions: Vec, 17 | images: Vec, 18 | root: Root, 19 | } 20 | 21 | impl E57Writer { 22 | /// Creates a new E57 generator from a writer that must also implement Read and Seek. 23 | /// 24 | /// `File::create()` will not work as input because it only opens the file for writing. 25 | /// Most typical use cases should prefer `E57Writer::from_file()` over this constructor. 26 | pub fn new(writer: T, guid: &str) -> Result { 27 | // Set up paged writer abstraction for CRC 28 | let mut writer = PagedWriter::new(writer)?; 29 | 30 | // Write placeholder header that will be replaced later 31 | let header = Header::default(); 32 | header.write(&mut writer)?; 33 | 34 | let version = env!("CARGO_PKG_VERSION"); 35 | let library_version = Some(format!( 36 | "Rust E57 Library v{version} github.com/cry-inc/e57" 37 | )); 38 | let root = Root { 39 | guid: guid.to_owned(), 40 | library_version, 41 | ..Default::default() 42 | }; 43 | 44 | Ok(Self { 45 | writer, 46 | pointclouds: Vec::new(), 47 | images: Vec::new(), 48 | extensions: Vec::new(), 49 | root, 50 | }) 51 | } 52 | 53 | /// Set optional coordinate metadata string (empty by default). 54 | pub fn set_coordinate_metadata(&mut self, value: Option) { 55 | self.root.coordinate_metadata = value; 56 | } 57 | 58 | /// Set optional creation date time (empty by default). 59 | pub fn set_creation(&mut self, value: Option) { 60 | self.root.creation = value; 61 | } 62 | 63 | /// Creates a new writer for adding a new point cloud to the E57 file. 64 | pub fn add_pointcloud( 65 | &mut self, 66 | guid: &str, 67 | prototype: Vec, 68 | ) -> Result> { 69 | Extension::validate_prototype(&prototype, &self.extensions)?; 70 | PointCloudWriter::new(&mut self.writer, &mut self.pointclouds, guid, prototype) 71 | } 72 | 73 | /// Adds a new binary data section to the E57 file. 74 | /// This feature is only required for custom data and extensions! 75 | pub fn add_blob(&mut self, reader: &mut dyn Read) -> Result { 76 | Blob::write(&mut self.writer, reader) 77 | } 78 | 79 | /// Creates a new image writer for adding an image to the E57 file. 80 | pub fn add_image(&mut self, guid: &str) -> Result> { 81 | ImageWriter::new(&mut self.writer, &mut self.images, guid) 82 | } 83 | 84 | /// Registers a new E57 extension used by this file. 85 | pub fn register_extension(&mut self, extension: Extension) -> Result<()> { 86 | Extension::validate_name(&extension.namespace)?; 87 | if self 88 | .extensions 89 | .iter() 90 | .any(|e| e.namespace == extension.namespace) 91 | { 92 | let ns = &extension.namespace; 93 | Error::invalid(format!( 94 | "An extension using the namespace {ns} is already registered" 95 | ))? 96 | } else { 97 | self.extensions.push(extension); 98 | Ok(()) 99 | } 100 | } 101 | 102 | /// Needs to be called after adding all point clouds and images. 103 | /// 104 | /// This will generate and write the XML metadata to finalize and complete the E57 file. 105 | /// Without calling this method before dropping the E57 file will be incomplete and invalid! 106 | pub fn finalize(&mut self) -> Result<()> { 107 | self.finalize_customized_xml(Ok) 108 | } 109 | 110 | /// Same as `finalize()` but with additional XML transformation step. 111 | /// 112 | /// Allows customizing the XML data before its written into the E57 file. 113 | /// This is required for adding E57 extension data to the XML. 114 | /// The transformer receives an XML string and must return an XML string. 115 | /// The client is responsible for parsing, modifying and serializing th XML again in a non-destructive way. 116 | /// The E57 library will not validate the XML string before writing it into the E57 file! 117 | /// If the transformer fails, the finalization is aborted and any error is forwarded. 118 | pub fn finalize_customized_xml( 119 | &mut self, 120 | transformer: impl Fn(String) -> Result, 121 | ) -> Result<()> { 122 | let xml = serialize_root( 123 | &self.root, 124 | &self.pointclouds, 125 | &self.images, 126 | &self.extensions, 127 | )?; 128 | let xml = transformer(xml)?; 129 | let xml_bytes = xml.as_bytes(); 130 | let xml_length = xml_bytes.len(); 131 | let xml_offset = self.writer.physical_position()?; 132 | self.writer 133 | .write_all(xml_bytes) 134 | .write_err("Failed to write XML data")?; 135 | let phys_length = self.writer.physical_size()?; 136 | 137 | // Add missing values in header at start of the the file 138 | let header = Header { 139 | phys_xml_offset: xml_offset, 140 | xml_length: xml_length as u64, 141 | phys_length, 142 | ..Default::default() 143 | }; 144 | self.writer.physical_seek(0)?; 145 | header.write(&mut self.writer)?; 146 | self.writer 147 | .flush() 148 | .write_err("Failed to flush writer at the end") 149 | } 150 | } 151 | 152 | impl E57Writer { 153 | /// Creates an E57 writer instance from a Path. 154 | pub fn from_file(path: impl AsRef, guid: &str) -> Result { 155 | let file = OpenOptions::new() 156 | .create(true) 157 | .write(true) 158 | .read(true) 159 | .truncate(true) 160 | .open(path) 161 | .read_err("Unable to create file for writing, reading and seeking")?; 162 | Self::new(file, guid) 163 | } 164 | } 165 | -------------------------------------------------------------------------------- /src/error.rs: -------------------------------------------------------------------------------- 1 | use std::convert::Infallible; 2 | use std::error::Error as StdError; 3 | use std::fmt::Result as FmtResult; 4 | use std::fmt::{Display, Formatter}; 5 | use std::result::Result as StdResult; 6 | 7 | /// To be used as error message when extracting stuff from arrays that should never fail 8 | pub const WRONG_OFFSET: &str = "Wrong buffer offset detected"; 9 | 10 | /// Possible errors that can occur while working with E57 files. 11 | #[derive(Debug)] 12 | #[non_exhaustive] 13 | pub enum Error { 14 | /// The E57 you are reading or creating is invalid and does not confirm with the E57 format specification. 15 | Invalid { 16 | desc: String, 17 | source: Option>, 18 | }, 19 | 20 | /// Something went wrong while reading data from an E57 file. 21 | /// Typically this is caused by an IO error outside the library or because of an incomplete file. 22 | Read { 23 | desc: String, 24 | source: Option>, 25 | }, 26 | 27 | /// Something went wrong while writing data to an E57 file. 28 | /// Typically this is caused by an IO error outside the library. 29 | Write { 30 | desc: String, 31 | source: Option>, 32 | }, 33 | 34 | /// Some feature or aspect of E57 that is not yet implement by this library. 35 | /// Feel free to create feature request issues here: 36 | NotImplemented { desc: String }, 37 | 38 | /// An unexpected internal issue occured. 39 | /// Most likely this is a logic bug inside the library. 40 | /// Please file an issue on GitHub, if possible. 41 | Internal { 42 | desc: String, 43 | source: Option>, 44 | }, 45 | } 46 | 47 | impl Error { 48 | /// Creates a new Invalid error. 49 | pub fn invalid(desc: C) -> Result 50 | where 51 | C: Display + Send + Sync + 'static, 52 | { 53 | Err(Error::Invalid { 54 | desc: desc.to_string(), 55 | source: None, 56 | }) 57 | } 58 | 59 | /// Creates a new `NotImplemented` error. 60 | pub fn not_implemented(desc: C) -> Result 61 | where 62 | C: Display + Send + Sync + 'static, 63 | { 64 | Err(Error::NotImplemented { 65 | desc: desc.to_string(), 66 | }) 67 | } 68 | 69 | /// Creates a new Internal error. 70 | pub fn internal(desc: C) -> Result 71 | where 72 | C: Display + Send + Sync + 'static, 73 | { 74 | Err(Error::Internal { 75 | desc: desc.to_string(), 76 | source: None, 77 | }) 78 | } 79 | } 80 | 81 | impl Display for Error { 82 | fn fmt(&self, f: &mut Formatter) -> FmtResult { 83 | match self { 84 | Error::Invalid { desc, .. } => write!(f, "Invalid E57 content: {desc}"), 85 | Error::Read { desc, .. } => write!(f, "Failed to read E57: {desc}"), 86 | Error::Internal { desc, .. } => write!(f, "Internal error: {desc}"), 87 | Error::NotImplemented { desc } => write!(f, "Not implemented: {desc}"), 88 | Error::Write { desc, .. } => write!(f, "Failed to write E57: {desc}"), 89 | } 90 | } 91 | } 92 | 93 | impl StdError for Error { 94 | fn source(&self) -> Option<&(dyn StdError + 'static)> { 95 | match self { 96 | Error::Invalid { source, .. } => source 97 | .as_ref() 98 | .map(|s| s.as_ref() as &(dyn StdError + 'static)), 99 | Error::Read { source, .. } => source 100 | .as_ref() 101 | .map(|s| s.as_ref() as &(dyn StdError + 'static)), 102 | Error::Internal { source, .. } => source 103 | .as_ref() 104 | .map(|s| s.as_ref() as &(dyn StdError + 'static)), 105 | Error::Write { source, .. } => source 106 | .as_ref() 107 | .map(|s| s.as_ref() as &(dyn StdError + 'static)), 108 | Error::NotImplemented { .. } => None, 109 | } 110 | } 111 | } 112 | 113 | /// Custom result type hardwired to use the Error type of this crate. 114 | pub type Result = StdResult; 115 | 116 | /// Helper trait for types that can be converted into an Error. 117 | pub trait Converter { 118 | fn read_err(self, context: C) -> Result 119 | where 120 | C: Display + Send + Sync + 'static; 121 | 122 | fn invalid_err(self, context: C) -> Result 123 | where 124 | C: Display + Send + Sync + 'static; 125 | 126 | fn internal_err(self, context: C) -> Result 127 | where 128 | C: Display + Send + Sync + 'static; 129 | 130 | fn write_err(self, context: C) -> Result 131 | where 132 | C: Display + Send + Sync + 'static; 133 | } 134 | 135 | /// Create an library Error from std Error instances. 136 | impl Converter for StdResult 137 | where 138 | E: StdError + Send + Sync + 'static, 139 | { 140 | fn read_err(self, desc: C) -> Result 141 | where 142 | C: Display + Send + Sync + 'static, 143 | { 144 | match self { 145 | Ok(ok) => Ok(ok), 146 | Err(error) => Err(Error::Read { 147 | desc: desc.to_string(), 148 | source: Some(Box::new(error)), 149 | }), 150 | } 151 | } 152 | 153 | fn invalid_err(self, desc: C) -> Result 154 | where 155 | C: Display + Send + Sync + 'static, 156 | { 157 | match self { 158 | Ok(ok) => Ok(ok), 159 | Err(error) => Err(Error::Invalid { 160 | desc: desc.to_string(), 161 | source: Some(Box::new(error)), 162 | }), 163 | } 164 | } 165 | 166 | fn internal_err(self, desc: C) -> Result 167 | where 168 | C: Display + Send + Sync + 'static, 169 | { 170 | match self { 171 | Ok(ok) => Ok(ok), 172 | Err(error) => Err(Error::Internal { 173 | desc: desc.to_string(), 174 | source: Some(Box::new(error)), 175 | }), 176 | } 177 | } 178 | 179 | fn write_err(self, desc: C) -> Result 180 | where 181 | C: Display + Send + Sync + 'static, 182 | { 183 | match self { 184 | Ok(ok) => Ok(ok), 185 | Err(error) => Err(Error::Write { 186 | desc: desc.to_string(), 187 | source: Some(Box::new(error)), 188 | }), 189 | } 190 | } 191 | } 192 | 193 | /// Create an library Error from Option instances. 194 | impl Converter for Option { 195 | fn read_err(self, desc: C) -> Result 196 | where 197 | C: Display + Send + Sync + 'static, 198 | { 199 | match self { 200 | Some(ok) => Ok(ok), 201 | None => Err(Error::Read { 202 | desc: desc.to_string(), 203 | source: None, 204 | }), 205 | } 206 | } 207 | 208 | fn invalid_err(self, desc: C) -> Result 209 | where 210 | C: Display + Send + Sync + 'static, 211 | { 212 | match self { 213 | Some(ok) => Ok(ok), 214 | None => Err(Error::Invalid { 215 | desc: desc.to_string(), 216 | source: None, 217 | }), 218 | } 219 | } 220 | 221 | fn internal_err(self, desc: C) -> Result 222 | where 223 | C: Display + Send + Sync + 'static, 224 | { 225 | match self { 226 | Some(ok) => Ok(ok), 227 | None => Err(Error::Internal { 228 | desc: desc.to_string(), 229 | source: None, 230 | }), 231 | } 232 | } 233 | 234 | fn write_err(self, desc: C) -> Result 235 | where 236 | C: Display + Send + Sync + 'static, 237 | { 238 | match self { 239 | Some(ok) => Ok(ok), 240 | None => Err(Error::Write { 241 | desc: desc.to_string(), 242 | source: None, 243 | }), 244 | } 245 | } 246 | } 247 | -------------------------------------------------------------------------------- /src/extension.rs: -------------------------------------------------------------------------------- 1 | use crate::{Error, Record, RecordName, Result}; 2 | use roxmltree::Document; 3 | 4 | /// Describes an E57 extension by name and URL. 5 | /// 6 | /// The E57 specification includes an mechanism for extensions. 7 | /// Each extension has its own namespace in the XML section of the E57 file. 8 | /// Such extensions can for example specify custom point attributes or 9 | /// add additional metadata and custom binary blobs. 10 | /// 11 | /// Every E57 parser must be able to ignore any unknown extensions. 12 | /// Some extensions are officially documented, 13 | /// others are proprietary and have no public documentation. 14 | /// 15 | /// Since full extension support involves all kinds of XML operations, 16 | /// it can greatly increase the API of any E57 library. 17 | /// This library is using a more pragmatic approach and requires you to bring your own XML library. 18 | /// This allows the API of this library to stay small, focused and lightweight. 19 | /// 20 | /// Extension features directly supported by this library are: 21 | /// * reading and defining of XML namespaces for extensions 22 | /// * reading and writing additional custom point attributes 23 | /// * reading and writing of binary blobs 24 | /// 25 | /// Extensions that require specific XML parsing are possible. 26 | /// You need to load your E57 file and then call `E57Reader::xml()` method to get the full original XML string. 27 | /// This will return an UTF8 string that can be feed into an XML parser. 28 | /// This library is using `roxmltree` for lightweight XML parsing. 29 | /// 30 | /// Extensions that require XML manipulation when writing E57 files are also possible. 31 | /// You need to first finishing writing all point clouds, images and binary blobs. 32 | /// Then when you are ready to call `E57Writer::finalize()` to write the XML section and close the file, 33 | /// you need to call `E57Writer::finalize_customized_xml()` instead. 34 | /// This allows you to supply a transformer that will receive the generated XML string 35 | /// and can manipulate it before its written into the file. 36 | /// Your code is responsible for parsing, modifying and serializing th XML again in a non-destructive way! 37 | /// 38 | /// # Example Code 39 | /// You can find a 40 | /// complete example for reading and writing E57 files with extensions in the automated tests of the library. 41 | #[derive(Clone, Debug)] 42 | pub struct Extension { 43 | /// XML namespace name. 44 | pub namespace: String, 45 | /// XML namespace URL. 46 | pub url: String, 47 | } 48 | 49 | impl Extension { 50 | /// Intialize and return a new Extension structure with the given values. 51 | pub fn new(namespace: &str, url: &str) -> Self { 52 | Self { 53 | namespace: namespace.to_owned(), 54 | url: url.to_owned(), 55 | } 56 | } 57 | 58 | pub(crate) fn vec_from_document(document: &Document) -> Vec { 59 | let mut extensions = Vec::new(); 60 | for item in document.root_element().namespaces() { 61 | if let Some(name) = item.name() { 62 | extensions.push(Extension { 63 | namespace: name.to_string(), 64 | url: item.uri().to_string(), 65 | }); 66 | } 67 | } 68 | extensions 69 | } 70 | 71 | pub(crate) fn validate_prototype(prototype: &[Record], extensions: &[Extension]) -> Result<()> { 72 | for record in prototype { 73 | if let RecordName::Unknown { namespace, name } = &record.name { 74 | Self::validate_name(namespace)?; 75 | Self::validate_name(name)?; 76 | if !extensions.iter().any(|e| &e.namespace == namespace) { 77 | Error::invalid(format!( 78 | "Cannot find extension namespace {namespace} used by attribute {name}, please register extension first" 79 | ))? 80 | } 81 | } 82 | } 83 | Ok(()) 84 | } 85 | 86 | pub(crate) fn validate_name(name: &str) -> Result<()> { 87 | if name.is_empty() { 88 | Error::invalid("Strings used as XML namespaces or attributes must not be empty")? 89 | } 90 | if name.to_lowercase().starts_with("xml") { 91 | Error::invalid(format!( 92 | "Strings used as XML namespaces or attributes must not start with 'XML': {name}" 93 | ))? 94 | } 95 | let valid_chars = name 96 | .chars() 97 | .all(|c| c.is_ascii_alphanumeric() || (c == '_') || (c == '-')); 98 | if !valid_chars { 99 | Error::invalid( 100 | format!("Strings used as XML namespaces or attributes should consist only of a-z, A-Z, 0-9, dashes and underscores: '{name}'"), 101 | )? 102 | } 103 | Ok(()) 104 | } 105 | } 106 | 107 | #[cfg(test)] 108 | mod tests { 109 | use super::*; 110 | 111 | #[test] 112 | fn validate_name() { 113 | assert!(Extension::validate_name("abcz").is_ok()); 114 | assert!(Extension::validate_name("ABCZ").is_ok()); 115 | assert!(Extension::validate_name("0129").is_ok()); 116 | assert!(Extension::validate_name("-_-").is_ok()); 117 | assert!(Extension::validate_name("aBC-DEf-Z_09").is_ok()); 118 | 119 | assert!(Extension::validate_name("xmlabc").is_err()); 120 | assert!(Extension::validate_name("XMLabc").is_err()); 121 | assert!(Extension::validate_name("axml").is_ok()); 122 | 123 | assert!(Extension::validate_name("abc.").is_err()); 124 | assert!(Extension::validate_name("äüöß").is_err()); 125 | 126 | assert!(Extension::validate_name("").is_err()); 127 | } 128 | } 129 | -------------------------------------------------------------------------------- /src/header.rs: -------------------------------------------------------------------------------- 1 | use crate::error::Converter; 2 | use crate::error::WRONG_OFFSET; 3 | use crate::Error; 4 | use crate::Result; 5 | use std::io::Read; 6 | use std::io::Write; 7 | 8 | const SIGNATURE: &[u8; 8] = b"ASTM-E57"; 9 | const MAJOR_VERSION: u32 = 1; 10 | const MINOR_VERSION: u32 = 0; 11 | const PAGE_SIZE: u64 = 1024; 12 | 13 | /// Represents the file structure from the start of an E57 file. 14 | #[derive(Clone, Debug)] 15 | #[non_exhaustive] 16 | pub struct Header { 17 | /// File header signature that must be always "ASTM-E57". 18 | pub signature: [u8; 8], 19 | 20 | /// Major version number of the E57 format of the file. 21 | pub major: u32, 22 | 23 | /// Minor version number of the E57 format of the file. 24 | pub minor: u32, 25 | 26 | /// Physical length of the E57 file on disk or in memory. 27 | pub phys_length: u64, 28 | 29 | /// Physical offset of the XML data inside the XML file. 30 | pub phys_xml_offset: u64, 31 | 32 | /// Logical (without CRC bytes) length of the XML data. 33 | pub xml_length: u64, 34 | 35 | /// Page size of the E57 file. 36 | pub page_size: u64, 37 | } 38 | 39 | impl Header { 40 | /// Reads an E57 file header structure. 41 | pub fn read(reader: &mut dyn Read) -> Result { 42 | let mut data = [0_u8; 48]; 43 | reader 44 | .read_exact(&mut data) 45 | .read_err("Failed to read E57 file header")?; 46 | 47 | let header = Header { 48 | signature: data[0..8].try_into().internal_err(WRONG_OFFSET)?, 49 | major: u32::from_le_bytes(data[8..12].try_into().internal_err(WRONG_OFFSET)?), 50 | minor: u32::from_le_bytes(data[12..16].try_into().internal_err(WRONG_OFFSET)?), 51 | phys_length: u64::from_le_bytes(data[16..24].try_into().internal_err(WRONG_OFFSET)?), 52 | phys_xml_offset: u64::from_le_bytes( 53 | data[24..32].try_into().internal_err(WRONG_OFFSET)?, 54 | ), 55 | xml_length: u64::from_le_bytes(data[32..40].try_into().internal_err(WRONG_OFFSET)?), 56 | page_size: u64::from_le_bytes(data[40..48].try_into().internal_err(WRONG_OFFSET)?), 57 | }; 58 | 59 | if &header.signature != SIGNATURE { 60 | Error::invalid("Found unsupported signature in header")? 61 | } 62 | if header.major != MAJOR_VERSION { 63 | Error::invalid("Found unsupported major version in header")? 64 | } 65 | if header.minor != MINOR_VERSION { 66 | Error::invalid("Found unsupported minor version in header")? 67 | } 68 | if header.page_size != PAGE_SIZE { 69 | Error::invalid("Found unsupported page size in header")? 70 | } 71 | 72 | Ok(header) 73 | } 74 | 75 | pub fn write(&self, writer: &mut dyn Write) -> Result<()> { 76 | writer 77 | .write_all(&self.signature) 78 | .write_err("Failed to write file header signature")?; 79 | writer 80 | .write_all(&self.major.to_le_bytes()) 81 | .write_err("Failed to write file header major version")?; 82 | writer 83 | .write_all(&self.minor.to_le_bytes()) 84 | .write_err("Failed to write file header minor version")?; 85 | writer 86 | .write_all(&self.phys_length.to_le_bytes()) 87 | .write_err("Failed to write file length in file header")?; 88 | writer 89 | .write_all(&self.phys_xml_offset.to_le_bytes()) 90 | .write_err("Failed to write XML offset in file header")?; 91 | writer 92 | .write_all(&self.xml_length.to_le_bytes()) 93 | .write_err("Failed to write XML length in file header")?; 94 | writer 95 | .write_all(&self.page_size.to_le_bytes()) 96 | .write_err("Failed to write page size in file header")?; 97 | Ok(()) 98 | } 99 | } 100 | 101 | impl Default for Header { 102 | fn default() -> Self { 103 | Self { 104 | signature: *SIGNATURE, 105 | major: MAJOR_VERSION, 106 | minor: MINOR_VERSION, 107 | phys_length: 0, 108 | phys_xml_offset: 0, 109 | xml_length: 0, 110 | page_size: PAGE_SIZE, 111 | } 112 | } 113 | } 114 | -------------------------------------------------------------------------------- /src/image_writer.rs: -------------------------------------------------------------------------------- 1 | use crate::paged_writer::PagedWriter; 2 | use crate::Blob; 3 | use crate::CylindricalImage; 4 | use crate::CylindricalImageProperties; 5 | use crate::DateTime; 6 | use crate::Error; 7 | use crate::Image; 8 | use crate::ImageBlob; 9 | use crate::ImageFormat; 10 | use crate::PinholeImage; 11 | use crate::PinholeImageProperties; 12 | use crate::Projection; 13 | use crate::Result; 14 | use crate::SphericalImage; 15 | use crate::SphericalImageProperties; 16 | use crate::Transform; 17 | use crate::VisualReferenceImage; 18 | use crate::VisualReferenceImageProperties; 19 | use std::io::{Read, Seek, Write}; 20 | 21 | /// Defines a new image and writes it into an E57 file. 22 | pub struct ImageWriter<'a, T: Read + Write + Seek> { 23 | writer: &'a mut PagedWriter, 24 | images: &'a mut Vec, 25 | image: Image, 26 | } 27 | 28 | impl<'a, T: Read + Write + Seek> ImageWriter<'a, T> { 29 | pub(crate) fn new( 30 | writer: &'a mut PagedWriter, 31 | images: &'a mut Vec, 32 | guid: &str, 33 | ) -> Result { 34 | Ok(Self { 35 | writer, 36 | images, 37 | image: Image { 38 | guid: Some(guid.to_owned()), 39 | visual_reference: None, 40 | projection: None, 41 | transform: None, 42 | pointcloud_guid: None, 43 | name: None, 44 | description: None, 45 | acquisition: None, 46 | sensor_vendor: None, 47 | sensor_model: None, 48 | sensor_serial: None, 49 | }, 50 | }) 51 | } 52 | 53 | /// Set optional user-defined name for the image. 54 | /// Not set by default. 55 | pub fn set_name(&mut self, value: &str) { 56 | self.image.name = Some(value.to_owned()); 57 | } 58 | 59 | /// Set optional user-defined description for the image. 60 | /// Not set by default. 61 | pub fn set_description(&mut self, value: &str) { 62 | self.image.description = Some(value.to_owned()); 63 | } 64 | 65 | /// Set optional GUID of the point cloud that is connected to this image. 66 | /// Not set by default. 67 | pub fn set_pointcloud_guid(&mut self, value: &str) { 68 | self.image.pointcloud_guid = Some(value.to_owned()); 69 | } 70 | 71 | /// Set optional transformation to convert data from the local 72 | /// image coordinates to the file-level coordinate system. 73 | /// By default this is not set, meaning the image has no transformation. 74 | pub fn set_transform(&mut self, value: Transform) { 75 | self.image.transform = Some(value); 76 | } 77 | 78 | /// Set optional start date and time when the images was captured. 79 | /// Not set by default. 80 | pub fn set_acquisition(&mut self, value: DateTime) { 81 | self.image.acquisition = Some(value); 82 | } 83 | 84 | /// Set optional name of the manufacturer for the sensor used to capture the image. 85 | /// Not set by default. 86 | pub fn set_sensor_vendor(&mut self, value: &str) { 87 | self.image.sensor_vendor = Some(value.to_owned()); 88 | } 89 | 90 | /// Set optional model name of the sensor used for capturing the image. 91 | /// Not set by default. 92 | pub fn set_sensor_model(&mut self, value: &str) { 93 | self.image.sensor_model = Some(value.to_owned()); 94 | } 95 | 96 | /// Set optional serial number of the sensor used for capturing the image. 97 | /// Not set by default. 98 | pub fn set_sensor_serial(&mut self, value: &str) { 99 | self.image.sensor_serial = Some(value.to_owned()); 100 | } 101 | 102 | /// Adds an optional visual reference image, also known as preview image. 103 | /// See also `VisualReferenceImageProperties` struct for more details. 104 | /// The optional PNG mask image can be used to indicate valid/invalid 105 | /// pixels in the image, for example if the image is not rectangular. 106 | /// The mask must have the same size as the actual image. 107 | /// Non-zero-valued pixels mark valid pixel locations and 108 | /// zero-valued pixels mark invalid pixels. 109 | pub fn add_visual_reference( 110 | &mut self, 111 | format: ImageFormat, 112 | image: &mut dyn Read, 113 | properties: VisualReferenceImageProperties, 114 | mask: Option<&mut dyn Read>, 115 | ) -> Result<()> { 116 | let data = Blob::write(self.writer, image)?; 117 | let blob = ImageBlob { data, format }; 118 | let mask = if let Some(mask_data) = mask { 119 | Some(Blob::write(self.writer, mask_data)?) 120 | } else { 121 | None 122 | }; 123 | self.image.visual_reference = Some(VisualReferenceImage { 124 | properties, 125 | mask, 126 | blob, 127 | }); 128 | Ok(()) 129 | } 130 | 131 | /// Adds pinhole image data. 132 | /// Width and height must match the actual binary PNG or JPEG image. 133 | /// See also `PinholeImageProperties` struct for more details. 134 | /// The optional PNG mask image can be used to indicate valid/invalid 135 | /// pixels in the image, for example if the image is not rectangular. 136 | /// The mask must have the same size as the actual image. 137 | /// Non-zero-valued pixels mark valid pixel locations and 138 | /// zero-valued pixels mark invalid pixels. 139 | pub fn add_pinhole( 140 | &mut self, 141 | format: ImageFormat, 142 | image: &mut dyn Read, 143 | properties: PinholeImageProperties, 144 | mask: Option<&mut dyn Read>, 145 | ) -> Result<()> { 146 | if self.image.projection.is_some() { 147 | Error::invalid("A projected image is already set")? 148 | } 149 | let data = Blob::write(self.writer, image)?; 150 | let blob = ImageBlob { data, format }; 151 | let mask = if let Some(mask_data) = mask { 152 | Some(Blob::write(self.writer, mask_data)?) 153 | } else { 154 | None 155 | }; 156 | let rep = PinholeImage { 157 | blob, 158 | mask, 159 | properties, 160 | }; 161 | self.image.projection = Some(Projection::Pinhole(rep)); 162 | Ok(()) 163 | } 164 | 165 | /// Adds spherical image data. 166 | /// See also `SphericalImageProperties` struct for more details. 167 | /// The optional PNG mask image can be used to indicate valid/invalid 168 | /// pixels in the image, for example if the image is not rectangular. 169 | /// The mask must have the same size as the actual image. 170 | /// Non-zero-valued pixels mark valid pixel locations and 171 | /// zero-valued pixels mark invalid pixels. 172 | pub fn add_spherical( 173 | &mut self, 174 | format: ImageFormat, 175 | image: &mut dyn Read, 176 | properties: SphericalImageProperties, 177 | mask: Option<&mut dyn Read>, 178 | ) -> Result<()> { 179 | if self.image.projection.is_some() { 180 | Error::invalid("A projected image is already set")? 181 | } 182 | let data = Blob::write(self.writer, image)?; 183 | let blob = ImageBlob { data, format }; 184 | let mask = if let Some(mask_data) = mask { 185 | Some(Blob::write(self.writer, mask_data)?) 186 | } else { 187 | None 188 | }; 189 | let rep = SphericalImage { 190 | blob, 191 | mask, 192 | properties, 193 | }; 194 | self.image.projection = Some(Projection::Spherical(rep)); 195 | Ok(()) 196 | } 197 | 198 | /// Adds cylindrical image data. 199 | /// See also `CylindricalImageProperties` struct for more details. 200 | /// The optional PNG mask image can be used to indicate valid/invalid 201 | /// pixels in the image, for example if the image is not rectangular. 202 | /// The mask must have the same size as the actual image. 203 | /// Non-zero-valued pixels mark valid pixel locations and 204 | /// zero-valued pixels mark invalid pixels. 205 | pub fn add_cylindrical( 206 | &mut self, 207 | format: ImageFormat, 208 | image_data: &mut dyn Read, 209 | properties: CylindricalImageProperties, 210 | mask_data: Option<&mut dyn Read>, 211 | ) -> Result<()> { 212 | if self.image.projection.is_some() { 213 | Error::invalid("A projected image is already set")? 214 | } 215 | let data = Blob::write(self.writer, image_data)?; 216 | let blob = ImageBlob { data, format }; 217 | let mask = if let Some(mask_data) = mask_data { 218 | Some(Blob::write(self.writer, mask_data)?) 219 | } else { 220 | None 221 | }; 222 | let rep = CylindricalImage { 223 | blob, 224 | mask, 225 | properties, 226 | }; 227 | self.image.projection = Some(Projection::Cylindrical(rep)); 228 | Ok(()) 229 | } 230 | 231 | /// Must be called after image is complete to finishing adding the new image. 232 | /// Binary image and mask data is directly written into the E57 file earlier, 233 | /// but the XML metadata will be only added to the E57 if you call finalize. 234 | /// Skipping the finalize call after you added image or mask data means 235 | /// that the data will be part of the E57 file but is never referenced by 236 | /// its XML header section. 237 | pub fn finalize(&mut self) -> Result<()> { 238 | if self.image.visual_reference.is_none() && self.image.projection.is_none() { 239 | Error::invalid("Image must have a visual reference or a projection")? 240 | } 241 | 242 | // Add metadata for XML generation later, when the file is completed. 243 | self.images.push(self.image.clone()); 244 | 245 | Ok(()) 246 | } 247 | } 248 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | //! A pure Rust library for reading and writing E57 files without any unsafe code. 2 | //! 3 | //! Some example code can be found [here](https://github.com/cry-inc/e57/tree/master/tools) in the GitHub repository. 4 | //! 5 | //! ### Extensions 6 | //! This library supports reading and writing [extensions](Extension) as defined in the E57 specification. 7 | //! 8 | //! ### Optional Crate Features 9 | //! There is an optional feature called `crc32c`. 10 | //! If enabled, it will include an [external CRC crate](https://crates.io/crates/crc32c) as additional dependency. 11 | //! This crate provides a faster CRC implementation with HW support. 12 | //! It can speed up reading and writing of larger E57 files. 13 | //! The feature is **disabled by default** to keep the number dependencies as small as possible. 14 | 15 | #![forbid(unsafe_code)] 16 | #![deny( 17 | clippy::unwrap_used, 18 | clippy::expect_used, 19 | clippy::panic, 20 | clippy::large_stack_arrays, 21 | clippy::large_types_passed_by_value, 22 | clippy::doc_markdown, 23 | clippy::cognitive_complexity 24 | )] 25 | 26 | mod bitpack; 27 | mod blob; 28 | mod bounds; 29 | mod bs_read; 30 | mod bs_write; 31 | mod cv_section; 32 | mod date_time; 33 | mod e57_reader; 34 | mod e57_writer; 35 | mod error; 36 | mod extension; 37 | mod header; 38 | mod image_writer; 39 | mod images; 40 | mod limits; 41 | mod packet; 42 | mod paged_reader; 43 | mod paged_writer; 44 | mod pc_reader_raw; 45 | mod pc_reader_simple; 46 | mod pc_writer; 47 | mod point; 48 | mod pointcloud; 49 | mod queue_reader; 50 | mod record; 51 | mod root; 52 | mod transform; 53 | mod xml; 54 | 55 | #[cfg(not(feature = "crc32c"))] 56 | mod crc32; 57 | 58 | // Public types 59 | pub use self::blob::Blob; 60 | pub use self::bounds::CartesianBounds; 61 | pub use self::bounds::IndexBounds; 62 | pub use self::bounds::SphericalBounds; 63 | pub use self::date_time::DateTime; 64 | pub use self::e57_reader::E57Reader; 65 | pub use self::e57_writer::E57Writer; 66 | pub use self::error::Error; 67 | pub use self::error::Result; 68 | pub use self::extension::Extension; 69 | pub use self::header::Header; 70 | pub use self::image_writer::ImageWriter; 71 | pub use self::images::CylindricalImage; 72 | pub use self::images::CylindricalImageProperties; 73 | pub use self::images::Image; 74 | pub use self::images::ImageBlob; 75 | pub use self::images::ImageFormat; 76 | pub use self::images::PinholeImage; 77 | pub use self::images::PinholeImageProperties; 78 | pub use self::images::Projection; 79 | pub use self::images::SphericalImage; 80 | pub use self::images::SphericalImageProperties; 81 | pub use self::images::VisualReferenceImage; 82 | pub use self::images::VisualReferenceImageProperties; 83 | pub use self::limits::ColorLimits; 84 | pub use self::limits::IntensityLimits; 85 | pub use self::pc_reader_raw::PointCloudReaderRaw; 86 | pub use self::pc_reader_simple::PointCloudReaderSimple; 87 | pub use self::pc_writer::PointCloudWriter; 88 | pub use self::point::CartesianCoordinate; 89 | pub use self::point::Color; 90 | pub use self::point::Point; 91 | pub use self::point::SphericalCoordinate; 92 | pub use self::pointcloud::PointCloud; 93 | pub use self::record::Record; 94 | pub use self::record::RecordDataType; 95 | pub use self::record::RecordName; 96 | pub use self::record::RecordValue; 97 | pub use self::transform::Quaternion; 98 | pub use self::transform::Transform; 99 | pub use self::transform::Translation; 100 | 101 | /// Storage container for low level point data. 102 | pub type RawValues = Vec; 103 | -------------------------------------------------------------------------------- /src/limits.rs: -------------------------------------------------------------------------------- 1 | use crate::error::Converter; 2 | use crate::Error; 3 | use crate::RecordDataType; 4 | use crate::RecordValue; 5 | use crate::Result; 6 | use roxmltree::Node; 7 | 8 | fn extract_limit(bounds: &Node, tag_name: &str) -> Result> { 9 | if let Some(tag) = bounds.descendants().find(|n| n.has_tag_name(tag_name)) { 10 | let type_str = tag 11 | .attribute("type") 12 | .invalid_err(format!("Cannot find type attribute of limit '{tag_name}'"))?; 13 | let value_str = tag.text().unwrap_or("0"); 14 | Ok(match type_str { 15 | "Integer" => Some(RecordValue::Integer( 16 | value_str 17 | .parse::() 18 | .invalid_err("Cannot parse integer limit value")?, 19 | )), 20 | "ScaledInteger" => Some(RecordValue::ScaledInteger( 21 | value_str 22 | .parse::() 23 | .invalid_err("Cannot parse scaled integer limit value")?, 24 | )), 25 | "Float" => { 26 | let single = tag.attribute("precision").unwrap_or("double") == "single"; 27 | if single { 28 | Some(RecordValue::Single( 29 | value_str 30 | .parse::() 31 | .invalid_err("Cannot parse single limit value")?, 32 | )) 33 | } else { 34 | Some(RecordValue::Double( 35 | value_str 36 | .parse::() 37 | .invalid_err("Cannot parse double limit value")?, 38 | )) 39 | } 40 | } 41 | _ => Error::not_implemented(format!( 42 | "Found unsupported limit of type '{type_str}' for '{tag_name}'" 43 | ))?, 44 | }) 45 | } else { 46 | Ok(None) 47 | } 48 | } 49 | 50 | /// Optional minimum and maximum values for intensity. 51 | #[derive(Clone, Debug)] 52 | pub struct IntensityLimits { 53 | pub intensity_min: Option, 54 | pub intensity_max: Option, 55 | } 56 | 57 | impl IntensityLimits { 58 | pub(crate) fn from_node(node: &Node) -> Result { 59 | let intensity_min = extract_limit(node, "intensityMinimum")?; 60 | let intensity_max = extract_limit(node, "intensityMaximum")?; 61 | Ok(Self { 62 | intensity_min, 63 | intensity_max, 64 | }) 65 | } 66 | 67 | pub(crate) fn from_record_type(data_type: &RecordDataType) -> Self { 68 | let (intensity_min, intensity_max) = data_type.limits(); 69 | Self { 70 | intensity_min, 71 | intensity_max, 72 | } 73 | } 74 | 75 | pub(crate) fn xml_string(&self) -> String { 76 | let mut xml = String::from("\n"); 77 | if let Some(min) = &self.intensity_min { 78 | xml += &record_value_to_xml("intensityMinimum", min); 79 | } 80 | if let Some(max) = &self.intensity_max { 81 | xml += &record_value_to_xml("intensityMaximum", max); 82 | } 83 | xml += "\n"; 84 | xml 85 | } 86 | } 87 | 88 | /// Optional minimum and maximum values for the colors red, green and blue. 89 | #[derive(Clone, Debug)] 90 | pub struct ColorLimits { 91 | pub red_min: Option, 92 | pub red_max: Option, 93 | pub green_min: Option, 94 | pub green_max: Option, 95 | pub blue_min: Option, 96 | pub blue_max: Option, 97 | } 98 | 99 | impl ColorLimits { 100 | pub(crate) fn from_node(node: &Node) -> Result { 101 | let red_min = extract_limit(node, "colorRedMinimum")?; 102 | let red_max = extract_limit(node, "colorRedMaximum")?; 103 | let green_min = extract_limit(node, "colorGreenMinimum")?; 104 | let green_max = extract_limit(node, "colorGreenMaximum")?; 105 | let blue_min = extract_limit(node, "colorBlueMinimum")?; 106 | let blue_max = extract_limit(node, "colorBlueMaximum")?; 107 | Ok(Self { 108 | red_min, 109 | red_max, 110 | green_min, 111 | green_max, 112 | blue_min, 113 | blue_max, 114 | }) 115 | } 116 | 117 | pub(crate) fn from_record_types( 118 | red: &RecordDataType, 119 | green: &RecordDataType, 120 | blue: &RecordDataType, 121 | ) -> Self { 122 | let (red_min, red_max) = red.limits(); 123 | let (green_min, green_max) = green.limits(); 124 | let (blue_min, blue_max) = blue.limits(); 125 | Self { 126 | red_min, 127 | red_max, 128 | green_min, 129 | green_max, 130 | blue_min, 131 | blue_max, 132 | } 133 | } 134 | 135 | pub(crate) fn xml_string(&self) -> String { 136 | let mut xml = String::from("\n"); 137 | if let Some(min) = &self.red_min { 138 | xml += &record_value_to_xml("colorRedMinimum", min); 139 | } 140 | if let Some(max) = &self.red_max { 141 | xml += &record_value_to_xml("colorRedMaximum", max); 142 | } 143 | if let Some(min) = &self.green_min { 144 | xml += &record_value_to_xml("colorGreenMinimum", min); 145 | } 146 | if let Some(max) = &self.green_max { 147 | xml += &record_value_to_xml("colorGreenMaximum", max); 148 | } 149 | if let Some(min) = &self.blue_min { 150 | xml += &record_value_to_xml("colorBlueMinimum", min); 151 | } 152 | if let Some(max) = &self.blue_max { 153 | xml += &record_value_to_xml("colorBlueMaximum", max); 154 | } 155 | xml += "\n"; 156 | xml 157 | } 158 | } 159 | 160 | /// Converts a record value to a XML limit tag with the correct type 161 | fn record_value_to_xml(tag_name: &str, value: &RecordValue) -> String { 162 | match value { 163 | RecordValue::Integer(value) => { 164 | format!("<{tag_name} type=\"Integer\">{value}\n") 165 | } 166 | RecordValue::ScaledInteger(value) => { 167 | format!("<{tag_name} type=\"ScaledInteger\">{value}\n") 168 | } 169 | RecordValue::Single(value) => { 170 | format!("<{tag_name} type=\"Float\" precision=\"single\">{value}\n") 171 | } 172 | RecordValue::Double(value) => format!("<{tag_name} type=\"Float\">{value}\n"), 173 | } 174 | } 175 | -------------------------------------------------------------------------------- /src/packet.rs: -------------------------------------------------------------------------------- 1 | use crate::error::{Converter, WRONG_OFFSET}; 2 | use crate::{Error, Result}; 3 | use std::io::{Read, Write}; 4 | 5 | pub enum PacketHeader { 6 | Index(IndexPacketHeader), 7 | Data(DataPacketHeader), 8 | Ignored(IgnoredPacketHeader), 9 | } 10 | 11 | impl PacketHeader { 12 | pub fn read(reader: &mut dyn Read) -> Result { 13 | // Read only first byte of header to indetify packet type 14 | let mut buffer = [0_u8; 1]; 15 | reader 16 | .read_exact(&mut buffer) 17 | .read_err("Failed to read packet type ID")?; 18 | 19 | if buffer[0] == IndexPacketHeader::ID { 20 | Ok(PacketHeader::Index(IndexPacketHeader::read(reader)?)) 21 | } else if buffer[0] == DataPacketHeader::ID { 22 | Ok(PacketHeader::Data(DataPacketHeader::read(reader)?)) 23 | } else if buffer[0] == IgnoredPacketHeader::ID { 24 | Ok(PacketHeader::Ignored(IgnoredPacketHeader::read(reader)?)) 25 | } else { 26 | Error::invalid("Found unknown packet ID when trying to read packet header")? 27 | } 28 | } 29 | } 30 | 31 | pub struct IndexPacketHeader { 32 | pub packet_length: u64, 33 | } 34 | 35 | impl IndexPacketHeader { 36 | pub const ID: u8 = 0; 37 | 38 | pub fn read(reader: &mut dyn Read) -> Result { 39 | let mut buffer = [0_u8; 15]; 40 | reader 41 | .read_exact(&mut buffer) 42 | .read_err("Failed to read index packet header")?; 43 | 44 | // Check reserved values in second and last eight bytes of header 45 | if buffer[0] != 0 { 46 | Error::invalid("The reserved bytes inside an index packet must be zero")? 47 | } 48 | for value in buffer.iter().skip(7) { 49 | if *value != 0 { 50 | Error::invalid("The reserved bytes inside an index packet must be zero")? 51 | } 52 | } 53 | 54 | // Parse values 55 | let packet_length = 56 | u16::from_le_bytes(buffer[1..3].try_into().internal_err(WRONG_OFFSET)?) as u64 + 1; 57 | 58 | // Currently unused header fields 59 | let _entry_count = u16::from_le_bytes(buffer[3..5].try_into().internal_err(WRONG_OFFSET)?); 60 | let _index_level = buffer[5]; 61 | 62 | // Validate length 63 | if packet_length % 4 != 0 { 64 | Error::invalid("Index packet length is not aligned and a multiple of four")? 65 | } 66 | 67 | Ok(Self { packet_length }) 68 | } 69 | } 70 | 71 | pub struct DataPacketHeader { 72 | pub comp_restart_flag: bool, 73 | pub packet_length: u64, 74 | pub bytestream_count: u16, 75 | } 76 | 77 | impl DataPacketHeader { 78 | pub const ID: u8 = 1; 79 | 80 | pub const SIZE: usize = 6; 81 | 82 | pub fn read(reader: &mut dyn Read) -> Result { 83 | let mut buffer = [0_u8; 5]; 84 | reader 85 | .read_exact(&mut buffer) 86 | .read_err("Failed to read data packet header")?; 87 | 88 | // Parse values 89 | let comp_restart_flag = buffer[0] & 1 != 0; 90 | let packet_length = 91 | u16::from_le_bytes(buffer[1..3].try_into().internal_err(WRONG_OFFSET)?) as u64 + 1; 92 | let bytestream_count = 93 | u16::from_le_bytes(buffer[3..5].try_into().internal_err(WRONG_OFFSET)?); 94 | 95 | // Validate values 96 | if packet_length % 4 != 0 { 97 | Error::invalid("Data packet length is not aligned and a multiple of four")? 98 | } 99 | if bytestream_count == 0 { 100 | Error::invalid("A byte stream count of 0 is not allowed")? 101 | } 102 | 103 | Ok(Self { 104 | comp_restart_flag, 105 | packet_length, 106 | bytestream_count, 107 | }) 108 | } 109 | 110 | pub fn write(&self, writer: &mut dyn Write) -> Result<()> { 111 | let mut buffer = [0_u8; Self::SIZE]; 112 | buffer[0] = 1; 113 | let flags = if self.comp_restart_flag { 1_u8 } else { 0_u8 }; 114 | buffer[1] = flags; 115 | let length = (self.packet_length - 1) as u16; 116 | buffer[2..4].copy_from_slice(&length.to_le_bytes()); 117 | buffer[4..6].copy_from_slice(&self.bytestream_count.to_le_bytes()); 118 | writer 119 | .write_all(&buffer) 120 | .write_err("Failed to write data packet header") 121 | } 122 | } 123 | 124 | pub struct IgnoredPacketHeader { 125 | pub packet_length: u64, 126 | } 127 | 128 | impl IgnoredPacketHeader { 129 | pub const ID: u8 = 2; 130 | 131 | pub fn read(reader: &mut dyn Read) -> Result { 132 | // Read Ignored Packet 133 | let mut buffer = [0_u8; 3]; 134 | reader 135 | .read_exact(&mut buffer) 136 | .read_err("Failed to read ignore packet header")?; 137 | 138 | // Check reserved value 139 | if buffer[0] != 0 { 140 | Error::invalid("The first byte inside ignored packets is reserved and must be zero")? 141 | } 142 | 143 | // Parse length 144 | let packet_length = 145 | u16::from_le_bytes(buffer[1..3].try_into().internal_err(WRONG_OFFSET)?) as u64 + 1; 146 | 147 | // Validate length 148 | if packet_length % 4 != 0 { 149 | Error::invalid("Ignored packet length is not aligned and a multiple of four")? 150 | } 151 | 152 | Ok(Self { packet_length }) 153 | } 154 | } 155 | -------------------------------------------------------------------------------- /src/paged_reader.rs: -------------------------------------------------------------------------------- 1 | use std::io::{Error, ErrorKind, Read, Result, Seek, SeekFrom}; 2 | 3 | #[cfg(not(feature = "crc32c"))] 4 | use crate::crc32::Crc32; 5 | 6 | const CHECKSUM_SIZE: u64 = 4; 7 | const ALIGNMENT_SIZE: u64 = 4; 8 | const MAX_PAGE_SIZE: u64 = 1024 * 1024; 9 | 10 | pub struct PagedReader { 11 | page_size: u64, 12 | phy_file_size: u64, 13 | log_file_size: u64, 14 | pages: u64, 15 | reader: T, 16 | offset: u64, 17 | page_num: Option, 18 | page_buffer: Vec, 19 | 20 | #[cfg(not(feature = "crc32c"))] 21 | crc: Crc32, 22 | } 23 | 24 | impl PagedReader { 25 | /// Create and initialize a paged reader that abstracts the E57 CRC scheme 26 | pub fn new(mut reader: T, page_size: u64) -> Result { 27 | if page_size > MAX_PAGE_SIZE { 28 | Err(Error::new( 29 | ErrorKind::InvalidInput, 30 | format!("Page size {page_size} is bigger than the allowed maximum page size of {MAX_PAGE_SIZE} bytes"), 31 | ))?; 32 | } 33 | if page_size <= CHECKSUM_SIZE { 34 | Err(Error::new( 35 | ErrorKind::InvalidInput, 36 | format!("Page size {page_size} needs to be bigger than checksum ({CHECKSUM_SIZE} bytes)"), 37 | ))?; 38 | } 39 | 40 | let phy_file_size = reader.seek(SeekFrom::End(0))?; 41 | if phy_file_size == 0 { 42 | let msg = "A file size of zero is not allowed"; 43 | Err(Error::new(ErrorKind::InvalidData, msg))?; 44 | } 45 | if phy_file_size % page_size != 0 { 46 | Err(Error::new( 47 | ErrorKind::InvalidData, 48 | format!("File size {phy_file_size} is not a multiple of the page size {page_size}"), 49 | ))?; 50 | } 51 | 52 | let pages = phy_file_size / page_size; 53 | 54 | Ok(Self { 55 | reader, 56 | page_size, 57 | pages, 58 | phy_file_size, 59 | log_file_size: pages * (page_size - CHECKSUM_SIZE), 60 | page_buffer: vec![0_u8; page_size as usize], 61 | page_num: None, 62 | offset: 0, 63 | 64 | #[cfg(not(feature = "crc32c"))] 65 | crc: Crc32::new(), 66 | }) 67 | } 68 | 69 | /// Seeking to a physical file address as offset relative to the start of the file. 70 | /// Will return the new logical offset inside the file or an error. 71 | pub fn seek_physical(&mut self, offset: u64) -> Result { 72 | if offset >= self.phy_file_size { 73 | Err(Error::new( 74 | ErrorKind::InvalidInput, 75 | format!("Offset {offset} is behind end of file"), 76 | ))?; 77 | } 78 | 79 | let pages_before = offset / self.page_size; 80 | self.offset = offset - pages_before * CHECKSUM_SIZE; 81 | Ok(self.offset) 82 | } 83 | 84 | fn read_page(&mut self, page: u64) -> Result<()> { 85 | if page >= self.pages { 86 | let max = self.pages - 1; 87 | Err(Error::new( 88 | ErrorKind::InvalidInput, 89 | format!("Page {page} does not exist, only page numbers 0..{max} are valid"), 90 | ))?; 91 | } 92 | let offset = page * self.page_size; 93 | self.reader.seek(SeekFrom::Start(offset))?; 94 | self.reader.read_exact(&mut self.page_buffer)?; 95 | let data_size = self.page_size - CHECKSUM_SIZE; 96 | let expected_checksum = &self.page_buffer[data_size as usize..]; 97 | 98 | // Simple & slower default included SW implementation 99 | #[cfg(not(feature = "crc32c"))] 100 | let crc = self.crc.calculate(&self.page_buffer[0..data_size as usize]); 101 | 102 | // Optional faster external crate with HW support 103 | #[cfg(feature = "crc32c")] 104 | let crc = crc32c::crc32c(&self.page_buffer[0..data_size as usize]); 105 | 106 | // The standard says all binary values are stored as little endian, 107 | // but for some reason E57 files contain the checksum in big endian order. 108 | // Probably the reference implementation used a weird CRC library and 109 | // now everybody has to swap bytes as well because it was not noticed back then :) 110 | let calculated_checksum = crc.to_be_bytes(); 111 | 112 | if expected_checksum != calculated_checksum { 113 | self.page_num = None; 114 | return Err(Error::new( 115 | ErrorKind::InvalidData, 116 | format!("Detected invalid checksum (expected: {expected_checksum:?}, actual: {calculated_checksum:?}) for page {page}") 117 | )); 118 | } 119 | 120 | self.page_num = Some(page); 121 | Ok(()) 122 | } 123 | 124 | /// Do some skipping to next 4-byte-aligned offset, if needed. 125 | pub fn align(&mut self) -> Result<()> { 126 | let off_alignment = self.offset % 4; 127 | if off_alignment != 0 { 128 | let skip = ALIGNMENT_SIZE - off_alignment; 129 | if self.offset + skip > self.log_file_size { 130 | Err(Error::new( 131 | ErrorKind::InvalidInput, 132 | "Tried to seek behind end of the file", 133 | ))? 134 | } 135 | self.offset += skip; 136 | } 137 | Ok(()) 138 | } 139 | } 140 | 141 | impl Read for PagedReader { 142 | fn read(&mut self, buf: &mut [u8]) -> Result { 143 | let page = self.offset / (self.page_size - CHECKSUM_SIZE); 144 | if page >= self.pages { 145 | return Ok(0); 146 | } 147 | if self.page_num != Some(page) { 148 | self.read_page(page)?; 149 | } 150 | let page_offset = self.offset % (self.page_size - CHECKSUM_SIZE); 151 | let page_readable = self.page_size - CHECKSUM_SIZE - page_offset; 152 | let read_size = usize::min(buf.len(), page_readable as usize); 153 | buf[..read_size].copy_from_slice( 154 | &self.page_buffer[page_offset as usize..page_offset as usize + read_size], 155 | ); 156 | self.offset += read_size as u64; 157 | Ok(read_size) 158 | } 159 | } 160 | 161 | #[cfg(test)] 162 | mod tests { 163 | use super::*; 164 | use std::fs::File; 165 | use std::io::Cursor; 166 | 167 | const PAGE_SIZE: u64 = 1024; 168 | 169 | #[test] 170 | fn read_full_valid_file() { 171 | let file_size = 743424_u64; 172 | let pages = file_size / PAGE_SIZE; 173 | let logical_file_size = file_size - pages * CHECKSUM_SIZE; 174 | let file = File::open("testdata/bunnyDouble.e57").unwrap(); 175 | let mut reader = PagedReader::new(file, PAGE_SIZE).unwrap(); 176 | 177 | let mut buf = Vec::new(); 178 | reader.read_to_end(&mut buf).unwrap(); 179 | assert_eq!(buf.len(), logical_file_size as usize); 180 | } 181 | 182 | #[test] 183 | fn size_not_multiple_of_page() { 184 | let file = File::open("testdata/bunnyDouble.e57").unwrap(); 185 | assert!(PagedReader::new(file, PAGE_SIZE - 1).is_err()); 186 | } 187 | 188 | #[test] 189 | fn page_size_too_small() { 190 | let file = File::open("testdata/bunnyDouble.e57").unwrap(); 191 | assert!(PagedReader::new(file, CHECKSUM_SIZE).is_err()); 192 | } 193 | 194 | #[test] 195 | fn zero_pages() { 196 | let file = Vec::::new(); 197 | let cursor = Cursor::new(file); 198 | assert!(PagedReader::new(cursor, PAGE_SIZE).is_err()); 199 | } 200 | 201 | #[test] 202 | fn corrupt_page() { 203 | let data = vec![0_u8; 128]; 204 | let cursor = Cursor::new(data); 205 | let mut reader = PagedReader::new(cursor, 128).unwrap(); 206 | 207 | let mut buf = Vec::new(); 208 | assert!(reader.read_to_end(&mut buf).is_err()); 209 | assert_eq!(buf.len(), 0); 210 | } 211 | 212 | #[test] 213 | fn physical_seek() { 214 | let file = File::open("testdata/bunnyDouble.e57").unwrap(); 215 | let mut reader = PagedReader::new(file, PAGE_SIZE).unwrap(); 216 | 217 | let xml_physical_offset = 740736; 218 | let expected_logical_offset = 737844; 219 | 220 | let logical_offset = reader.seek_physical(xml_physical_offset).unwrap(); 221 | assert_eq!(logical_offset, expected_logical_offset); 222 | 223 | let mut buffer = [0_u8; 5]; 224 | reader.read_exact(&mut buffer).unwrap(); 225 | assert_eq!(String::from_utf8(buffer.to_vec()).unwrap(), " { 10 | queue_reader: QueueReader<'a, T>, 11 | prototype_len: usize, 12 | records: u64, 13 | read: u64, 14 | } 15 | 16 | impl<'a, T: Read + Seek> PointCloudReaderRaw<'a, T> { 17 | pub(crate) fn new(pc: &PointCloud, reader: &'a mut PagedReader) -> Result { 18 | let queue_reader = QueueReader::new(pc, reader)?; 19 | let prototype_len = pc.prototype.len(); 20 | let records = pc.records; 21 | Ok(Self { 22 | queue_reader, 23 | prototype_len, 24 | records, 25 | read: 0, 26 | }) 27 | } 28 | } 29 | 30 | impl Iterator for PointCloudReaderRaw<'_, T> { 31 | /// Each iterator item is a result for an extracted point. 32 | type Item = Result; 33 | 34 | /// Returns the next available point or None if the end was reached. 35 | fn next(&mut self) -> Option { 36 | // Already read all points? 37 | if self.read >= self.records { 38 | return None; 39 | } 40 | 41 | // Refill property queues if required 42 | // (in some corner cases more than one advance is required) 43 | while self.queue_reader.available() < 1 { 44 | if let Err(err) = self.queue_reader.advance() { 45 | return Some(Err(err)); 46 | } 47 | } 48 | 49 | // Extract next point 50 | let mut point = RawValues::with_capacity(self.prototype_len); 51 | match self.queue_reader.pop_point(&mut point) { 52 | Ok(()) => { 53 | self.read += 1; 54 | Some(Ok(point)) 55 | } 56 | Err(err) => Some(Err(err)), 57 | } 58 | } 59 | 60 | fn size_hint(&self) -> (usize, Option) { 61 | let overall = self.records; 62 | let remaining = overall - self.read; 63 | (remaining as usize, Some(remaining as usize)) 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /src/point.rs: -------------------------------------------------------------------------------- 1 | /// Structure for Cartesian coordinates with an X, Y and Z value. 2 | #[derive(Clone, Debug, PartialEq)] 3 | pub enum CartesianCoordinate { 4 | /// The Cartesian coordinate is fully valid. 5 | Valid { x: f64, y: f64, z: f64 }, 6 | /// The Cartesian coordinate only contains a direction vector. 7 | /// Be careful, the vector might not be normalized! 8 | Direction { x: f64, y: f64, z: f64 }, 9 | /// The Cartesian coordinate is fully invalid and has no meaning or the point cloud had no cartesian coordinates in general. 10 | Invalid, 11 | } 12 | 13 | /// Spherical coordinates with range, azimuth and elevation. 14 | #[derive(Clone, Debug, PartialEq)] 15 | pub enum SphericalCoordinate { 16 | /// The spherical coordinate is fully valid. 17 | Valid { 18 | range: f64, 19 | azimuth: f64, 20 | elevation: f64, 21 | }, 22 | /// The spherical coordinate only defines direction and has no valid range. 23 | Direction { azimuth: f64, elevation: f64 }, 24 | /// The spherical coordinate is fully invalid and has no meaning or the point cloud had no spherical coordinates in general. 25 | Invalid, 26 | } 27 | 28 | /// Simple RGB point colors. 29 | /// 30 | /// When reading, the colors are by default normalized to values between 0 and 1. 31 | /// The normalization is done using the color limits of the point cloud being read. 32 | /// If there are no color limits, the min and max values of the color record types are used as fallback. 33 | /// See also [`PointCloud::color_limits`](crate::PointCloud::color_limits) and 34 | /// [`PointCloudReaderSimple::normalize_color`](crate::PointCloudReaderSimple::normalize_color). 35 | #[derive(Clone, Debug, PartialEq)] 36 | pub struct Color { 37 | pub red: f32, 38 | pub green: f32, 39 | pub blue: f32, 40 | } 41 | 42 | /// Represents a high level point with its different attributes. 43 | #[derive(Clone, Debug)] 44 | pub struct Point { 45 | /// Cartesian coordinates. 46 | /// Might be always invalid if the point cloud does only contain spherical coordinates and the automatic conversion from spherical to Cartesian is disabled. 47 | /// See also [`PointCloudReaderSimple::spherical_to_cartesian`](crate::PointCloudReaderSimple::spherical_to_cartesian) 48 | /// and [`PointCloudReaderSimple::cartesian_to_spherical`](crate::PointCloudReaderSimple::cartesian_to_spherical). 49 | pub cartesian: CartesianCoordinate, 50 | 51 | /// Spherical coordinates. 52 | /// Might be always invalid if the point cloud does only contain Cartesian coordinates. 53 | /// By default spherical coordinates are converted to Cartesian coordinates. 54 | /// See also [`PointCloudReaderSimple::spherical_to_cartesian`](crate::PointCloudReaderSimple::spherical_to_cartesian) 55 | /// and [`PointCloudReaderSimple::cartesian_to_spherical`](crate::PointCloudReaderSimple::cartesian_to_spherical). 56 | pub spherical: SphericalCoordinate, 57 | 58 | /// RGB point colors. 59 | /// None means the whole point cloud has no colors or the color of this individual point is invalid. 60 | /// Please check the point cloud properties to understand whether the point cloud in general has color or not. 61 | /// See also [`PointCloud::has_color`](crate::PointCloud::has_color) and [Color]. 62 | pub color: Option, 63 | 64 | /// Floating point intensity value. 65 | /// When reading, the intensity is by default normalized to values between 0 and 1. 66 | /// The normalization is done using the intensity limits of the point cloud being read. 67 | /// If there are no intensity limits, the min and max values of the intensity record type are used as fallback. 68 | /// None means the whole point cloud has no intensity or the intensity of this individual point is invalid. 69 | /// Please check the point cloud properties to understand whether the point cloud in general has intensity or not. 70 | /// See also [`PointCloud::has_intensity`](crate::PointCloud::has_intensity) and 71 | /// [`PointCloud::intensity_limits`](crate::PointCloud::intensity_limits) and 72 | /// [`PointCloudReaderSimple::normalize_intensity`](crate::PointCloudReaderSimple::normalize_intensity) 73 | pub intensity: Option, 74 | 75 | /// Row index (Y-axis) to describe point data in a 2D image-like grid. 76 | /// Default value for point clouds without row index will be -1. 77 | /// Since this cannot be invalid for individual points, its not an option. 78 | /// Please check the point cloud properties to understand if the points 79 | /// have a row index or not. 80 | /// See also [`PointCloud::has_row_column`](crate::PointCloud::has_row_column). 81 | pub row: i64, 82 | 83 | /// Column index (X-axis) to describe point data in a 2D image-like grid. 84 | /// Default value for point clouds without column index will be -1. 85 | /// Since this cannot be invalid for individual points, its not an option. 86 | /// Please check the point cloud properties to understand if the points 87 | /// have a column index or not. 88 | /// See also [`PointCloud::has_row_column`](crate::PointCloud::has_row_column). 89 | pub column: i64, 90 | } 91 | -------------------------------------------------------------------------------- /src/queue_reader.rs: -------------------------------------------------------------------------------- 1 | use crate::bitpack::BitPack; 2 | use crate::bs_read::ByteStreamReadBuffer; 3 | use crate::cv_section::CompressedVectorSectionHeader; 4 | use crate::error::Converter; 5 | use crate::packet::PacketHeader; 6 | use crate::paged_reader::PagedReader; 7 | use crate::Error; 8 | use crate::PointCloud; 9 | use crate::RawValues; 10 | use crate::RecordDataType; 11 | use crate::RecordValue; 12 | use crate::Result; 13 | use std::collections::VecDeque; 14 | use std::io::{Read, Seek}; 15 | 16 | /// Read compressed vector sections into queues of raw values. 17 | pub struct QueueReader<'a, T: Read + Seek> { 18 | pc: PointCloud, 19 | reader: &'a mut PagedReader, 20 | buffer: Vec, 21 | buffer_sizes: Vec, 22 | byte_streams: Vec, 23 | queues: Vec>, 24 | } 25 | 26 | impl<'a, T: Read + Seek> QueueReader<'a, T> { 27 | pub fn new(pc: &PointCloud, reader: &'a mut PagedReader) -> Result { 28 | reader 29 | .seek_physical(pc.file_offset) 30 | .read_err("Cannot seek to compressed vector header")?; 31 | let section_header = CompressedVectorSectionHeader::read(reader)?; 32 | reader 33 | .seek_physical(section_header.data_offset) 34 | .read_err("Cannot seek to packet header")?; 35 | 36 | Ok(Self { 37 | pc: pc.clone(), 38 | reader, 39 | buffer: Vec::new(), 40 | buffer_sizes: vec![0; pc.prototype.len()], 41 | byte_streams: vec![ByteStreamReadBuffer::new(); pc.prototype.len()], 42 | queues: vec![VecDeque::new(); pc.prototype.len()], 43 | }) 44 | } 45 | 46 | /// Returns the number of complete and available points across all queues. 47 | pub fn available(&self) -> usize { 48 | if self.queues.is_empty() { 49 | return 0; 50 | } 51 | 52 | let mut av = usize::MAX; 53 | for q in &self.queues { 54 | let len = q.len(); 55 | if len < av { 56 | av = len; 57 | } 58 | } 59 | av 60 | } 61 | 62 | /// Return values for the next point by popping one value from each queue. 63 | /// Use an existing vector with enough capacity to avoid frequent reallocations! 64 | pub fn pop_point(&mut self, output: &mut RawValues) -> Result<()> { 65 | output.clear(); 66 | for i in 0..self.pc.prototype.len() { 67 | let value = self.queues[i] 68 | .pop_front() 69 | .internal_err("Failed to pop value for next point")?; 70 | output.push(value); 71 | } 72 | Ok(()) 73 | } 74 | 75 | /// Reads the next packet from the compressed vector and decodes it into the queues. 76 | pub fn advance(&mut self) -> Result<()> { 77 | let packet_header = PacketHeader::read(self.reader)?; 78 | match packet_header { 79 | PacketHeader::Index(header) => { 80 | // Just skip over index packets 81 | let mut buffer = vec![0; header.packet_length as usize]; 82 | self.reader 83 | .read_exact(&mut buffer) 84 | .read_err("Failed to read data of index packet")? 85 | } 86 | PacketHeader::Ignored(header) => { 87 | // Just skip over ignored packets 88 | let mut buffer = vec![0; header.packet_length as usize]; 89 | self.reader 90 | .read_exact(&mut buffer) 91 | .read_err("Failed to read data of ignored packet")? 92 | } 93 | PacketHeader::Data(header) => { 94 | if header.bytestream_count as usize != self.byte_streams.len() { 95 | Error::invalid("Bytestream count does not match prototype size")? 96 | } 97 | 98 | // Read byte stream sizes 99 | for i in 0..self.buffer_sizes.len() { 100 | let mut buf = [0_u8; 2]; 101 | self.reader 102 | .read_exact(&mut buf) 103 | .read_err("Failed to read data packet buffer sizes")?; 104 | let len = u16::from_le_bytes(buf) as usize; 105 | self.buffer_sizes[i] = len; 106 | } 107 | 108 | // Read byte streams into memory 109 | for (i, bs) in self.buffer_sizes.iter().enumerate() { 110 | self.buffer.resize(*bs, 0_u8); 111 | self.reader 112 | .read_exact(&mut self.buffer) 113 | .read_err("Failed to read data packet buffers")?; 114 | self.byte_streams[i].append(&self.buffer); 115 | } 116 | 117 | // Find smallest number of expected items in any queue after stream unpacking. 118 | // This is required for the corner case when the bit size of an record 119 | // is zero and we don't know how many items to "unpack" from an empty buffer. 120 | // This happens for example with integer values where min=max, because all values are equal. 121 | let mut min_queue_size = usize::MAX; 122 | for (i, bs) in self.byte_streams.iter().enumerate() { 123 | let bit_size = self.pc.prototype[i].data_type.bit_size(); 124 | // We can only check records with a non-zero bit size 125 | if bit_size != 0 { 126 | let bs_items = bs.available() / bit_size; 127 | let queue_items = self.queues[i].len(); 128 | let items = bs_items + queue_items; 129 | if items < min_queue_size { 130 | min_queue_size = items; 131 | } 132 | } 133 | } 134 | 135 | self.parse_byte_streams(min_queue_size)?; 136 | } 137 | }; 138 | 139 | self.reader 140 | .align() 141 | .read_err("Failed to align reader on next 4-byte offset after reading packet") 142 | } 143 | 144 | /// Extracts raw values from byte streams into queues. 145 | fn parse_byte_streams(&mut self, min_queue_size: usize) -> Result<()> { 146 | for (i, r) in self.pc.prototype.iter().enumerate() { 147 | match r.data_type { 148 | RecordDataType::Single { .. } => { 149 | BitPack::unpack_singles(&mut self.byte_streams[i], &mut self.queues[i])? 150 | } 151 | RecordDataType::Double { .. } => { 152 | BitPack::unpack_doubles(&mut self.byte_streams[i], &mut self.queues[i])? 153 | } 154 | RecordDataType::ScaledInteger { min, max, .. } => { 155 | if r.data_type.bit_size() == 0 { 156 | // If the bit size of an record is zero, we don't know how many items to unpack. 157 | // Thats because they are not really unpacked, but instead generated with a predefined value. 158 | // Since this can only happen when min=max we know that min is the expected value. 159 | // We use the supplied minimal size to ensure that we create enough items 160 | // to fill the queue enough to not be the limiting queue. 161 | while self.queues[i].len() < min_queue_size { 162 | self.queues[i].push_back(RecordValue::ScaledInteger(min)); 163 | } 164 | } else { 165 | BitPack::unpack_scaled_ints( 166 | &mut self.byte_streams[i], 167 | min, 168 | max, 169 | &mut self.queues[i], 170 | )? 171 | } 172 | } 173 | RecordDataType::Integer { min, max } => { 174 | if r.data_type.bit_size() == 0 { 175 | // See comment above for scaled integers! 176 | while self.queues[i].len() < min_queue_size { 177 | self.queues[i].push_back(RecordValue::Integer(min)); 178 | } 179 | } else { 180 | BitPack::unpack_ints( 181 | &mut self.byte_streams[i], 182 | min, 183 | max, 184 | &mut self.queues[i], 185 | )? 186 | } 187 | } 188 | }; 189 | } 190 | 191 | Ok(()) 192 | } 193 | } 194 | -------------------------------------------------------------------------------- /src/root.rs: -------------------------------------------------------------------------------- 1 | use crate::error::Converter; 2 | use crate::Extension; 3 | use crate::{xml, DateTime, Error, Image, PointCloud, Result}; 4 | use roxmltree::Document; 5 | 6 | /// E57 XML Root structure with information shared by all elements in the file. 7 | #[derive(Debug)] 8 | #[non_exhaustive] 9 | pub struct Root { 10 | pub format: String, 11 | pub guid: String, 12 | pub major_version: i64, 13 | pub minor_version: i64, 14 | pub library_version: Option, 15 | pub creation: Option, 16 | pub coordinate_metadata: Option, 17 | } 18 | 19 | impl Default for Root { 20 | fn default() -> Self { 21 | Self { 22 | format: String::from("ASTM E57 3D Imaging Data File"), 23 | guid: String::new(), 24 | major_version: 1, 25 | minor_version: 0, 26 | creation: None, 27 | coordinate_metadata: None, 28 | library_version: None, 29 | } 30 | } 31 | } 32 | 33 | pub fn root_from_document(document: &Document) -> Result { 34 | let root = document 35 | .descendants() 36 | .find(|n| n.has_tag_name("e57Root")) 37 | .invalid_err("Unable to find e57Root tag in XML document")?; 38 | 39 | // Required fields 40 | let format = xml::req_string(&root, "formatName")?; 41 | let guid = xml::req_string(&root, "guid")?; 42 | let major_version = xml::req_int(&root, "versionMajor")?; 43 | let minor_version = xml::req_int(&root, "versionMajor")?; 44 | 45 | // Optional fields 46 | let creation = xml::opt_date_time(&root, "creationDateTime")?; 47 | let coordinate_metadata = xml::opt_string(&root, "coordinateMetadata")?; 48 | let library_version = xml::opt_string(&root, "e57LibraryVersion")?; 49 | 50 | Ok(Root { 51 | format, 52 | guid, 53 | creation, 54 | major_version, 55 | minor_version, 56 | coordinate_metadata, 57 | library_version, 58 | }) 59 | } 60 | 61 | pub fn serialize_root( 62 | root: &Root, 63 | pointclouds: &[PointCloud], 64 | images: &[Image], 65 | extensions: &[Extension], 66 | ) -> Result { 67 | let mut xml = String::new(); 68 | xml += "\n"; 69 | xml += "\n"; 74 | xml += "\n"; 75 | if root.guid.is_empty() { 76 | Error::invalid("Empty file GUID is not allowed")? 77 | } 78 | xml += &format!("\n", root.guid); 79 | xml += &format!( 80 | "{}\n", 81 | root.major_version 82 | ); 83 | xml += &format!( 84 | "{}\n", 85 | root.minor_version 86 | ); 87 | if let Some(cm) = &root.coordinate_metadata { 88 | xml += 89 | &format!("\n"); 90 | } 91 | if let Some(lv) = &root.library_version { 92 | xml += 93 | &format!("\n"); 94 | } 95 | if let Some(dt) = &root.creation { 96 | xml += &dt.xml_string("creationDateTime"); 97 | } 98 | xml += "\n"; 99 | for pc in pointclouds { 100 | xml += &pc.xml_string()?; 101 | } 102 | xml += "\n"; 103 | xml += "\n"; 104 | for img in images { 105 | xml += &img.xml_string(); 106 | } 107 | xml += "\n"; 108 | xml += "\n"; 109 | Ok(xml) 110 | } 111 | -------------------------------------------------------------------------------- /src/transform.rs: -------------------------------------------------------------------------------- 1 | use crate::xml; 2 | use crate::Result; 3 | use roxmltree::Node; 4 | 5 | /// Describes the rotation of a point cloud. 6 | #[derive(Clone, Debug)] 7 | pub struct Quaternion { 8 | /// The scalar part of the quaternion. Shall be nonnegative. 9 | pub w: f64, 10 | /// The i coefficient of the quaternion. 11 | pub x: f64, 12 | /// The j coefficient of the quaternion. 13 | pub y: f64, 14 | /// The k coefficient of the quaternion. 15 | pub z: f64, 16 | } 17 | 18 | impl Quaternion { 19 | pub(crate) fn from_node(node: &Node) -> Result { 20 | let w = xml::req_f64(node, "w")?; 21 | let x = xml::req_f64(node, "x")?; 22 | let y = xml::req_f64(node, "y")?; 23 | let z = xml::req_f64(node, "z")?; 24 | Ok(Self { w, x, y, z }) 25 | } 26 | } 27 | 28 | impl Default for Quaternion { 29 | fn default() -> Self { 30 | Self { 31 | w: 1.0, 32 | x: 0.0, 33 | y: 0.0, 34 | z: 0.0, 35 | } 36 | } 37 | } 38 | 39 | /// Describes the translation of a point cloud. 40 | #[derive(Clone, Debug)] 41 | pub struct Translation { 42 | /// The X coordinate of the translation in meters. 43 | pub x: f64, 44 | /// The Y coordinate of the translation in meters. 45 | pub y: f64, 46 | /// The Z coordinate of the translation in meters. 47 | pub z: f64, 48 | } 49 | 50 | impl Translation { 51 | pub(crate) fn from_node(node: &Node) -> Result { 52 | let x = xml::req_f64(node, "x")?; 53 | let y = xml::req_f64(node, "y")?; 54 | let z = xml::req_f64(node, "z")?; 55 | Ok(Self { x, y, z }) 56 | } 57 | } 58 | 59 | impl Default for Translation { 60 | fn default() -> Self { 61 | Self { 62 | x: 0.0, 63 | y: 0.0, 64 | z: 0.0, 65 | } 66 | } 67 | } 68 | 69 | /// Describes a transformation of a point cloud with a rotation and translation component. 70 | #[derive(Clone, Debug, Default)] 71 | pub struct Transform { 72 | /// A unit quaternion representing the rotation of the transform. 73 | pub rotation: Quaternion, 74 | /// The translation of the transform. 75 | pub translation: Translation, 76 | } 77 | 78 | impl Transform { 79 | pub(crate) fn from_node(node: &Node) -> Result { 80 | let translation = match node.children().find(|n| n.has_tag_name("translation")) { 81 | Some(node) => Translation::from_node(&node)?, 82 | None => Translation::default(), 83 | }; 84 | let rotation = match node.children().find(|n| n.has_tag_name("rotation")) { 85 | Some(node) => Quaternion::from_node(&node)?, 86 | None => Quaternion::default(), 87 | }; 88 | Ok(Self { 89 | rotation, 90 | translation, 91 | }) 92 | } 93 | 94 | pub(crate) fn xml_string(&self, tag_name: &str) -> String { 95 | let w = xml::gen_float("w", self.rotation.w); 96 | let x = xml::gen_float("x", self.rotation.x); 97 | let y = xml::gen_float("y", self.rotation.y); 98 | let z = xml::gen_float("z", self.rotation.z); 99 | let quat = format!("\n{w}{x}{y}{z}\n"); 100 | 101 | let x = xml::gen_float("x", self.translation.x); 102 | let y = xml::gen_float("y", self.translation.y); 103 | let z = xml::gen_float("z", self.translation.z); 104 | let trans = format!("\n{x}{y}{z}\n"); 105 | 106 | format!("<{tag_name} type=\"Structure\">\n{quat}{trans}\n") 107 | } 108 | } 109 | -------------------------------------------------------------------------------- /src/xml.rs: -------------------------------------------------------------------------------- 1 | use crate::error::Converter; 2 | use crate::{DateTime, Error, Result, Transform}; 3 | use roxmltree::Node; 4 | use std::fmt::Display; 5 | use std::str::FromStr; 6 | 7 | pub fn opt_string(parent_node: &Node, tag_name: &str) -> Result> { 8 | if let Some(tag) = parent_node.children().find(|n| n.has_tag_name(tag_name)) { 9 | let expected_type = "String"; 10 | if let Some(found_type) = tag.attribute("type") { 11 | if found_type != expected_type { 12 | Error::invalid(format!( 13 | "Found XML tag '{tag_name}' with type '{found_type}' instead of '{expected_type}'" 14 | ))? 15 | } 16 | } else { 17 | Error::invalid(format!("XML tag '{tag_name}' has no 'type' attribute"))? 18 | } 19 | let text = tag.text().unwrap_or(""); 20 | Ok(Some(text.to_string())) 21 | } else { 22 | Ok(None) 23 | } 24 | } 25 | 26 | pub fn req_string(parent_node: &Node, tag_name: &str) -> Result { 27 | let str = opt_string(parent_node, tag_name)?; 28 | str.invalid_err(format!("XML tag '{tag_name}' was not found")) 29 | } 30 | 31 | fn opt_num( 32 | parent_node: &Node, 33 | tag_name: &str, 34 | expected_type: &str, 35 | ) -> Result> { 36 | if let Some(tag) = parent_node.children().find(|n| n.has_tag_name(tag_name)) { 37 | if let Some(found_type) = tag.attribute("type") { 38 | if found_type != expected_type { 39 | Error::invalid(format!( 40 | "Found XML tag '{tag_name}' with type '{found_type}' instead of '{expected_type}'" 41 | ))? 42 | } 43 | } else { 44 | Error::invalid(format!("XML tag '{tag_name}' has no 'type' attribute"))? 45 | } 46 | let text = tag.text().unwrap_or("0"); 47 | if let Ok(parsed) = text.parse::() { 48 | Ok(Some(parsed)) 49 | } else { 50 | Error::invalid(format!( 51 | "Cannot parse value '{text}' of XML tag '{tag_name}' as '{expected_type}'" 52 | ))? 53 | } 54 | } else { 55 | Ok(None) 56 | } 57 | } 58 | 59 | pub fn opt_f64(parent_node: &Node, tag_name: &str) -> Result> { 60 | opt_num(parent_node, tag_name, "Float") 61 | } 62 | 63 | pub fn req_f64(parent_node: &Node, tag_name: &str) -> Result { 64 | let double = opt_num(parent_node, tag_name, "Float")?; 65 | double.invalid_err(format!("XML tag '{tag_name}' was not found")) 66 | } 67 | 68 | pub fn opt_int(parent_node: &Node, tag_name: &str) -> Result> { 69 | opt_num(parent_node, tag_name, "Integer") 70 | } 71 | 72 | pub fn req_int(parent_node: &Node, tag_name: &str) -> Result { 73 | let integer = opt_num(parent_node, tag_name, "Integer")?; 74 | integer.invalid_err(format!("XML tag '{tag_name}' was not found")) 75 | } 76 | 77 | pub fn opt_date_time(parent_node: &Node, tag_name: &str) -> Result> { 78 | if let Some(tag) = parent_node.children().find(|n| n.has_tag_name(tag_name)) { 79 | let expected_type = "Structure"; 80 | if let Some(found_type) = tag.attribute("type") { 81 | if found_type != expected_type { 82 | Error::invalid(format!( 83 | "Found XML tag '{tag_name}' with type '{found_type}' instead of '{expected_type}'" 84 | ))? 85 | } 86 | } else { 87 | Error::invalid(format!("XML tag '{tag_name}' has no 'type' attribute"))? 88 | } 89 | DateTime::from_node(&tag) 90 | } else { 91 | Ok(None) 92 | } 93 | } 94 | 95 | pub fn opt_transform(parent_node: &Node, tag_name: &str) -> Result> { 96 | let node = parent_node.children().find(|n| n.has_tag_name(tag_name)); 97 | if let Some(node) = node { 98 | Ok(Some(Transform::from_node(&node)?)) 99 | } else { 100 | Ok(None) 101 | } 102 | } 103 | 104 | pub fn gen_string(tag_name: &str, value: &T) -> String { 105 | format!("<{tag_name} type=\"String\">\n") 106 | } 107 | 108 | pub fn gen_float(tag_name: &str, value: T) -> String { 109 | format!("<{tag_name} type=\"Float\">{value}\n") 110 | } 111 | 112 | pub fn gen_int(tag_name: &str, value: T) -> String { 113 | format!("<{tag_name} type=\"Integer\">{value}\n") 114 | } 115 | -------------------------------------------------------------------------------- /testdata/bunnyDouble.e57: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cry-inc/e57/64adfe538c34afd6f2429e11672172e8c6b6712c/testdata/bunnyDouble.e57 -------------------------------------------------------------------------------- /testdata/bunnyFloat.e57: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cry-inc/e57/64adfe538c34afd6f2429e11672172e8c6b6712c/testdata/bunnyFloat.e57 -------------------------------------------------------------------------------- /testdata/bunnyInt19.e57: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cry-inc/e57/64adfe538c34afd6f2429e11672172e8c6b6712c/testdata/bunnyInt19.e57 -------------------------------------------------------------------------------- /testdata/bunnyInt21.e57: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cry-inc/e57/64adfe538c34afd6f2429e11672172e8c6b6712c/testdata/bunnyInt21.e57 -------------------------------------------------------------------------------- /testdata/bunnyInt24.e57: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cry-inc/e57/64adfe538c34afd6f2429e11672172e8c6b6712c/testdata/bunnyInt24.e57 -------------------------------------------------------------------------------- /testdata/bunnyInt32.e57: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cry-inc/e57/64adfe538c34afd6f2429e11672172e8c6b6712c/testdata/bunnyInt32.e57 -------------------------------------------------------------------------------- /testdata/castle.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cry-inc/e57/64adfe538c34afd6f2429e11672172e8c6b6712c/testdata/castle.jpg -------------------------------------------------------------------------------- /testdata/corrupt_crc.e57: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cry-inc/e57/64adfe538c34afd6f2429e11672172e8c6b6712c/testdata/corrupt_crc.e57 -------------------------------------------------------------------------------- /testdata/cpp_generator/.gitignore: -------------------------------------------------------------------------------- 1 | /vcpkg 2 | /build 3 | -------------------------------------------------------------------------------- /testdata/cpp_generator/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 3.27) 2 | project(e57-testdata-generator) 3 | find_package(E57Format CONFIG REQUIRED) 4 | add_executable(main main.cpp) 5 | target_link_libraries(main PRIVATE E57Format) 6 | -------------------------------------------------------------------------------- /testdata/cpp_generator/README.md: -------------------------------------------------------------------------------- 1 | This folder contains some C++ code that uses the libE57Format library. 2 | The code was used to generate test data for the Rust integration tests. 3 | Its using CMake for building and VCPKG for resolving all dependencies. 4 | A simple Powershell build script for Windows is also included. 5 | -------------------------------------------------------------------------------- /testdata/cpp_generator/build_and_run.ps1: -------------------------------------------------------------------------------- 1 | if (Test-Path build) { Remove-Item -Recurse -Force build } 2 | if (Test-Path vcpkg) { Remove-Item -Recurse -Force vcpkg } 3 | git clone https://github.com/Microsoft/vcpkg.git 4 | ./vcpkg/bootstrap-vcpkg.bat -disableMetrics 5 | mkdir build | out-null 6 | cmake -B ./build -S . -DCMAKE_TOOLCHAIN_FILE="./vcpkg/scripts/buildsystems/vcpkg.cmake" 7 | cmake --build ./build --config Debug 8 | ./build/Debug/main.exe 9 | -------------------------------------------------------------------------------- /testdata/cpp_generator/main.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | 4 | #include 5 | #include 6 | 7 | void empty() { 8 | e57::Writer writer("empty.e57"); 9 | if (!writer.Close()) throw std::string("Failed to close empty.e57"); 10 | } 11 | 12 | void tiny_pc() { 13 | e57::Writer writer("tiny_pc.e57"); 14 | e57::DateTime time; 15 | time.isAtomicClockReferenced = 1; 16 | time.dateTimeValue = 1.23; 17 | e57::Data3D header; 18 | header.guid = "guid"; 19 | header.name = "name"; 20 | header.description = "desc"; 21 | header.sensorFirmwareVersion = "fw"; 22 | header.sensorHardwareVersion = "hw"; 23 | header.sensorSoftwareVersion = "sw"; 24 | header.sensorModel = "model"; 25 | header.sensorVendor = "vendor"; 26 | header.sensorSerialNumber = "serial"; 27 | header.relativeHumidity = 99; 28 | header.temperature = 20; 29 | header.acquisitionStart = time; 30 | header.acquisitionEnd = time; 31 | header.pointCount = 1; 32 | header.pointFields.cartesianXField = true; 33 | header.pointFields.cartesianYField = true; 34 | header.pointFields.cartesianZField = true; 35 | e57::Data3DPointsFloat buffers(header); 36 | buffers.cartesianX[0] = 1; 37 | buffers.cartesianY[0] = 2; 38 | buffers.cartesianZ[0] = 3; 39 | writer.WriteData3DData(header, buffers); 40 | if (!writer.Close()) throw std::string("Failed to close tiny_pc.e57"); 41 | } 42 | 43 | void tiny_pc_with_extension() { 44 | e57::Writer writer("tiny_pc_with_extension.e57"); 45 | e57::Data3D header; 46 | header.pointCount = 1; 47 | header.pointFields.cartesianXField = true; 48 | header.pointFields.cartesianYField = true; 49 | header.pointFields.cartesianZField = true; 50 | header.pointFields.pointRangeNodeType = e57::NumericalNodeType::Double; 51 | header.pointFields.normalXField = true; 52 | header.pointFields.normalYField = true; 53 | header.pointFields.normalZField = true; 54 | e57::Data3DPointsDouble buffers(header); 55 | buffers.cartesianX[0] = 1; 56 | buffers.cartesianY[0] = 2; 57 | buffers.cartesianZ[0] = 3; 58 | buffers.normalX[0] = 1; 59 | buffers.normalY[0] = 0; 60 | buffers.normalZ[0] = 0; 61 | writer.WriteData3DData(header, buffers); 62 | if (!writer.Close()) throw std::string("Failed to close tiny_pc_with_extension.e57"); 63 | } 64 | 65 | void empty_pc() { 66 | e57::Writer writer("empty_pc.e57"); 67 | e57::Data3D header; 68 | header.pointCount = 0; 69 | header.pointFields.cartesianXField = true; 70 | header.pointFields.cartesianYField = true; 71 | header.pointFields.cartesianZField = true; 72 | e57::Data3DPointsFloat buffers; 73 | buffers.cartesianX = new float[0]; 74 | buffers.cartesianY = new float[0]; 75 | buffers.cartesianZ = new float[0]; 76 | writer.WriteData3DData(header, buffers); 77 | if (!writer.Close()) throw std::string("Failed to close empty_pc.e57"); 78 | } 79 | 80 | void tiny_pc_and_images() { 81 | e57::Writer writer("tiny_pc_and_images.e57"); 82 | 83 | e57::Data3D header; 84 | header.pointCount = 2; 85 | header.pointFields.cartesianXField = true; 86 | header.pointFields.cartesianYField = true; 87 | header.pointFields.cartesianZField = true; 88 | e57::Data3DPointsFloat buffers(header); 89 | buffers.cartesianX[0] = 0; 90 | buffers.cartesianY[0] = 0; 91 | buffers.cartesianZ[0] = 0; 92 | buffers.cartesianX[1] = 1; 93 | buffers.cartesianY[1] = 1; 94 | buffers.cartesianZ[1] = 1; 95 | writer.WriteData3DData(header, buffers); 96 | 97 | std::vector jpegData; 98 | { 99 | std::ifstream ifs("../castle.jpg", std::ios::in | std::ios::binary); 100 | if (!ifs) throw std::string("Cannot open JPEG file"); 101 | ifs.seekg(0, ifs.end); 102 | auto length = ifs.tellg(); 103 | ifs.seekg(0, ifs.beg); 104 | jpegData.resize(length); 105 | ifs.read((char*)jpegData.data(), length); 106 | } 107 | 108 | std::vector pngData; 109 | { 110 | std::ifstream ifs("../square.png", std::ios::in | std::ios::binary); 111 | if (!ifs) throw std::string("Cannot open PNG file"); 112 | ifs.seekg(0, ifs.end); 113 | auto length = ifs.tellg(); 114 | ifs.seekg(0, ifs.beg); 115 | pngData.resize(length); 116 | ifs.read((char*)pngData.data(), length); 117 | } 118 | 119 | e57::Image2D visImg; 120 | visImg.name = "visual"; 121 | visImg.visualReferenceRepresentation.imageHeight = 100; 122 | visImg.visualReferenceRepresentation.imageWidth = 100; 123 | visImg.visualReferenceRepresentation.jpegImageSize = jpegData.size(); 124 | writer.WriteImage2DData(visImg, e57::Image2DType::ImageJPEG, e57::Image2DProjection::ProjectionVisual, 0, jpegData.data(), jpegData.size()); 125 | 126 | e57::Image2D sphImg; 127 | sphImg.name = "spherical"; 128 | sphImg.sensorModel = "sensor"; 129 | sphImg.sensorSerialNumber = "serial"; 130 | sphImg.sensorVendor = "vendor"; 131 | sphImg.associatedData3DGuid = header.guid; 132 | sphImg.description = "desc"; 133 | sphImg.pose.rotation.x = 1; 134 | sphImg.pose.rotation.y = 0; 135 | sphImg.pose.rotation.z = 0; 136 | sphImg.pose.rotation.w = 0.5; 137 | sphImg.pose.translation.x = 1; 138 | sphImg.pose.translation.y = 2; 139 | sphImg.pose.translation.z = 3; 140 | sphImg.sphericalRepresentation.imageHeight = 100; 141 | sphImg.sphericalRepresentation.imageWidth = 100; 142 | sphImg.sphericalRepresentation.pixelHeight = 0.0314; 143 | sphImg.sphericalRepresentation.pixelWidth = 0.0314; 144 | sphImg.sphericalRepresentation.pngImageSize = pngData.size(); 145 | writer.WriteImage2DData(sphImg, e57::Image2DType::ImagePNG, e57::Image2DProjection::ProjectionSpherical, 0, pngData.data(), pngData.size()); 146 | 147 | e57::Image2D pinImg; 148 | pinImg.name = "pinhole"; 149 | pinImg.pinholeRepresentation.imageHeight = 100; 150 | pinImg.pinholeRepresentation.imageWidth = 100; 151 | pinImg.pinholeRepresentation.pixelHeight = 0.033; 152 | pinImg.pinholeRepresentation.pixelWidth = 0.044; 153 | pinImg.pinholeRepresentation.focalLength = 123; 154 | pinImg.pinholeRepresentation.principalPointX = 23; 155 | pinImg.pinholeRepresentation.principalPointY = 42; 156 | pinImg.pinholeRepresentation.pngImageSize = pngData.size(); 157 | writer.WriteImage2DData(pinImg, e57::Image2DType::ImageJPEG, e57::Image2DProjection::ProjectionPinhole, 0, jpegData.data(), jpegData.size()); 158 | 159 | e57::Image2D cylImg; 160 | cylImg.name = "cylindrical"; 161 | cylImg.cylindricalRepresentation.imageHeight = 100; 162 | cylImg.cylindricalRepresentation.imageWidth = 100; 163 | cylImg.cylindricalRepresentation.pixelHeight = 0.033; 164 | cylImg.cylindricalRepresentation.pixelWidth = 0.044; 165 | cylImg.cylindricalRepresentation.principalPointY = 42; 166 | cylImg.cylindricalRepresentation.radius = 666; 167 | cylImg.cylindricalRepresentation.pngImageSize = pngData.size(); 168 | writer.WriteImage2DData(cylImg, e57::Image2DType::ImageJPEG, e57::Image2DProjection::ProjectionCylindrical, 0, jpegData.data(), jpegData.size()); 169 | 170 | if (!writer.Close()) throw std::string("Failed to close tiny_pc_and_images.e57"); 171 | } 172 | 173 | void tiny_spherical() { 174 | e57::Writer writer("tiny_spherical.e57"); 175 | e57::Data3D header; 176 | header.pointCount = 360; 177 | header.pointFields.sphericalAzimuthField = true; 178 | header.pointFields.sphericalElevationField = true; 179 | header.pointFields.sphericalRangeField = true; 180 | header.pointFields.sphericalInvalidStateField = true; 181 | e57::Data3DPointsDouble buffers(header); 182 | for (auto i = 0; i < header.pointCount; i++) { 183 | buffers.sphericalAzimuth[i] = i * (3.14 / 360.0); 184 | buffers.sphericalElevation[i] = i * (3.14 / 360.0); 185 | buffers.sphericalRange[i] = 1.0; 186 | buffers.sphericalInvalidState[i] = i % 2 ? 1 : 0; 187 | } 188 | writer.WriteData3DData(header, buffers); 189 | if (!writer.Close()) throw std::string("Failed to close tiny_spherical.e57"); 190 | } 191 | 192 | void original_guids() { 193 | e57::Writer writer("original_guids.e57"); 194 | e57::Data3D header; 195 | header.pointCount = 1; 196 | header.pointFields.cartesianXField = true; 197 | header.pointFields.cartesianYField = true; 198 | header.pointFields.cartesianZField = true; 199 | header.originalGuids.push_back("guid1"); 200 | header.originalGuids.push_back("guid2"); 201 | header.originalGuids.push_back("guid3"); 202 | e57::Data3DPointsFloat buffers(header); 203 | buffers.cartesianX[0] = 1; 204 | buffers.cartesianY[0] = 2; 205 | buffers.cartesianZ[0] = 3; 206 | writer.WriteData3DData(header, buffers); 207 | } 208 | 209 | void integer_intensity() { 210 | e57::Writer writer("integer_intensity.e57"); 211 | e57::Data3D header; 212 | header.pointCount = 2; 213 | header.pointFields.cartesianInvalidStateField = true; 214 | header.pointFields.cartesianXField = true; 215 | header.pointFields.cartesianYField = true; 216 | header.pointFields.cartesianZField = true; 217 | header.pointFields.intensityField = true; 218 | header.pointFields.intensityNodeType = e57::NumericalNodeType::Integer; 219 | e57::Data3DPointsDouble buffers(header); 220 | buffers.cartesianInvalidState[0] = 0; 221 | buffers.cartesianX[0] = 1.1; 222 | buffers.cartesianY[0] = 2.2; 223 | buffers.cartesianZ[0] = 3.3; 224 | buffers.intensity[0] = -66; 225 | buffers.cartesianInvalidState[1] = 0; 226 | buffers.cartesianX[1] = 4.4; 227 | buffers.cartesianY[1] = 5.5; 228 | buffers.cartesianZ[1] = 6.6; 229 | buffers.intensity[1] = 66; 230 | writer.WriteData3DData(header, buffers); 231 | if (!writer.Close()) throw std::string("Failed to close integer_intensity.e57"); 232 | } 233 | 234 | void scaled_integer_intensity() { 235 | e57::Writer writer("scaled_integer_intensity.e57"); 236 | e57::Data3D header; 237 | header.pointCount = 2; 238 | header.pointFields.cartesianInvalidStateField = true; 239 | header.pointFields.cartesianXField = true; 240 | header.pointFields.cartesianYField = true; 241 | header.pointFields.cartesianZField = true; 242 | header.pointFields.intensityField = true; 243 | header.pointFields.intensityNodeType = e57::NumericalNodeType::ScaledInteger; 244 | header.pointFields.intensityScale = 0.1; 245 | e57::Data3DPointsDouble buffers(header); 246 | buffers.cartesianInvalidState[0] = 0; 247 | buffers.cartesianX[0] = 1.1; 248 | buffers.cartesianY[0] = 2.2; 249 | buffers.cartesianZ[0] = 3.3; 250 | buffers.intensity[0] = -66.6; 251 | buffers.cartesianInvalidState[1] = 0; 252 | buffers.cartesianX[1] = 4.4; 253 | buffers.cartesianY[1] = 5.5; 254 | buffers.cartesianZ[1] = 6.6; 255 | buffers.intensity[1] = 66.6; 256 | writer.WriteData3DData(header, buffers); 257 | if (!writer.Close()) throw std::string("Failed to close scaled_integer_intensity.e57"); 258 | } 259 | 260 | // Read error reproduction case for 261 | // https://github.com/cry-inc/e57/issues/12 262 | void read_error() { 263 | e57::WriterOptions options; 264 | e57::Writer writer("read_error.e57", options); 265 | e57::Data3D header; 266 | header.pointCount = 2651; 267 | header.pointFields.colorRedField = true; 268 | header.pointFields.colorGreenField = true; 269 | header.pointFields.colorBlueField = true; 270 | header.pointFields.sphericalAzimuthField = true; 271 | header.pointFields.sphericalElevationField = true; 272 | header.pointFields.sphericalRangeField = true; 273 | header.pointFields.sphericalInvalidStateField = true; 274 | header.pointFields.intensityField = true; 275 | header.pointFields.rowIndexField = true; 276 | header.pointFields.columnIndexField = true; 277 | header.colorLimits.colorRedMaximum = 255; 278 | header.colorLimits.colorGreenMaximum = 255; 279 | header.colorLimits.colorBlueMaximum = 255; 280 | header.intensityLimits.intensityMinimum = 0; 281 | header.intensityLimits.intensityMaximum = 0; 282 | header.pointFields.pointRangeNodeType = e57::NumericalNodeType::Float; 283 | header.pointFields.angleNodeType = e57::NumericalNodeType::Float; 284 | header.pointFields.intensityNodeType = e57::NumericalNodeType::Integer; 285 | header.pointFields.columnIndexMaximum = 5742 - 1; 286 | header.pointFields.rowIndexMaximum = 8534 - 1; 287 | e57::Data3DPointsFloat buffer(header); 288 | for (size_t i = 0; i < header.pointCount; i++) { 289 | buffer.sphericalAzimuth[i] = 0; 290 | buffer.sphericalElevation[i] = 0; 291 | buffer.sphericalRange[i] = 0; 292 | buffer.sphericalInvalidState[i] = 0; 293 | buffer.colorRed[i] = 0; 294 | buffer.colorGreen[i] = 0; 295 | buffer.colorBlue[i] = 0; 296 | buffer.intensity[i] = 0; 297 | buffer.rowIndex[i] = 0; 298 | buffer.columnIndex[i] = 0; 299 | } 300 | writer.WriteData3DData(header, buffer); 301 | writer.Close(); 302 | } 303 | 304 | int main() { 305 | empty(); 306 | tiny_pc(); 307 | tiny_pc_with_extension(); 308 | empty_pc(); 309 | tiny_pc_and_images(); 310 | tiny_spherical(); 311 | original_guids(); 312 | integer_intensity(); 313 | scaled_integer_intensity(); 314 | read_error(); 315 | 316 | std::cout << "Finished!\n"; 317 | } 318 | -------------------------------------------------------------------------------- /testdata/cpp_generator/vcpkg.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://raw.githubusercontent.com/microsoft/vcpkg-tool/main/docs/vcpkg.schema.json", 3 | "name": "e57-testdata-generator", 4 | "version": "0.1.0", 5 | "dependencies": [ 6 | "libe57format" 7 | ] 8 | } 9 | -------------------------------------------------------------------------------- /testdata/empty.e57: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cry-inc/e57/64adfe538c34afd6f2429e11672172e8c6b6712c/testdata/empty.e57 -------------------------------------------------------------------------------- /testdata/empty_pc.e57: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cry-inc/e57/64adfe538c34afd6f2429e11672172e8c6b6712c/testdata/empty_pc.e57 -------------------------------------------------------------------------------- /testdata/float_intensity_without_min_max.e57: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cry-inc/e57/64adfe538c34afd6f2429e11672172e8c6b6712c/testdata/float_intensity_without_min_max.e57 -------------------------------------------------------------------------------- /testdata/integer_intensity.e57: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cry-inc/e57/64adfe538c34afd6f2429e11672172e8c6b6712c/testdata/integer_intensity.e57 -------------------------------------------------------------------------------- /testdata/las2e57_no_images_tag.e57: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cry-inc/e57/64adfe538c34afd6f2429e11672172e8c6b6712c/testdata/las2e57_no_images_tag.e57 -------------------------------------------------------------------------------- /testdata/no_ext_namespace.e57: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cry-inc/e57/64adfe538c34afd6f2429e11672172e8c6b6712c/testdata/no_ext_namespace.e57 -------------------------------------------------------------------------------- /testdata/original_guids.e57: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cry-inc/e57/64adfe538c34afd6f2429e11672172e8c6b6712c/testdata/original_guids.e57 -------------------------------------------------------------------------------- /testdata/read_error.e57: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cry-inc/e57/64adfe538c34afd6f2429e11672172e8c6b6712c/testdata/read_error.e57 -------------------------------------------------------------------------------- /testdata/scaled_integer_intensity.e57: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cry-inc/e57/64adfe538c34afd6f2429e11672172e8c6b6712c/testdata/scaled_integer_intensity.e57 -------------------------------------------------------------------------------- /testdata/square.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cry-inc/e57/64adfe538c34afd6f2429e11672172e8c6b6712c/testdata/square.png -------------------------------------------------------------------------------- /testdata/tinyCartesianFloatRgb.e57: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cry-inc/e57/64adfe538c34afd6f2429e11672172e8c6b6712c/testdata/tinyCartesianFloatRgb.e57 -------------------------------------------------------------------------------- /testdata/tiny_pc_and_images.e57: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cry-inc/e57/64adfe538c34afd6f2429e11672172e8c6b6712c/testdata/tiny_pc_and_images.e57 -------------------------------------------------------------------------------- /testdata/tiny_pc_with_extension.e57: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cry-inc/e57/64adfe538c34afd6f2429e11672172e8c6b6712c/testdata/tiny_pc_with_extension.e57 -------------------------------------------------------------------------------- /testdata/tiny_spherical.e57: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cry-inc/e57/64adfe538c34afd6f2429e11672172e8c6b6712c/testdata/tiny_spherical.e57 -------------------------------------------------------------------------------- /tests/extensions.rs: -------------------------------------------------------------------------------- 1 | use e57::{Blob, E57Reader, E57Writer, Error, Extension, RawValues}; 2 | use e57::{Record, RecordDataType, RecordName, RecordValue}; 3 | use roxmltree::Document; 4 | use std::fs::remove_file; 5 | use std::io::Cursor; 6 | use std::path::Path; 7 | 8 | // This test demonstrates how to write and read E57 files with custom extensions. 9 | // Please check the documentation at for additional information! 10 | #[test] 11 | fn extensions_example() { 12 | let path = Path::new("extension_example.e57"); 13 | 14 | { 15 | // Write an E57 file with extension 16 | let mut writer = E57Writer::from_file(path, "file_guid").unwrap(); 17 | 18 | // Define extension 19 | let ext = Extension { 20 | namespace: "myext".to_owned(), 21 | url: "https://www.mycorp.com/myext".to_owned(), 22 | }; 23 | 24 | // Register extension 25 | writer.register_extension(ext.clone()).unwrap(); 26 | 27 | // Define point cloud prototype with XYZ and custom classification attribute 28 | let prototype = vec![ 29 | Record::CARTESIAN_X_F32, 30 | Record::CARTESIAN_Y_F32, 31 | Record::CARTESIAN_Z_F32, 32 | Record { 33 | name: RecordName::Unknown { 34 | namespace: ext.namespace.clone(), 35 | name: String::from("classification"), 36 | }, 37 | data_type: RecordDataType::Integer { min: 0, max: 10 }, 38 | }, 39 | ]; 40 | 41 | // Add example point cloud with extension attribute 42 | let mut pc_writer = writer.add_pointcloud("pc_guid", prototype).unwrap(); 43 | pc_writer 44 | .add_point(vec![ 45 | RecordValue::Single(1.0), 46 | RecordValue::Single(2.0), 47 | RecordValue::Single(3.0), 48 | RecordValue::Integer(9), 49 | ]) 50 | .unwrap(); 51 | pc_writer.finalize().unwrap(); 52 | 53 | // Add additional binary data to the E57 file 54 | let data: Vec = vec![1, 3, 3, 7]; 55 | let mut cursor = Cursor::new(data); 56 | let blob = writer.add_blob(&mut cursor).unwrap(); 57 | 58 | // Prepare custom XML tag for blob data 59 | let blob_xml = format!( 60 | "", 61 | blob.offset, blob.length 62 | ); 63 | 64 | // Finalize file and inject additional XML tag using transformer closure 65 | let transformer = |xml: String| { 66 | let old = ""; 67 | let new = format!("{}\n", blob_xml); 68 | Ok(xml.replace(old, &new)) 69 | }; 70 | writer.finalize_customized_xml(transformer).unwrap(); 71 | } 72 | 73 | { 74 | // Open E57 file with extenstion for reading 75 | let mut e57 = E57Reader::from_file(path).unwrap(); 76 | 77 | // Check extensions registered as XML namespaces 78 | let extensions = e57.extensions(); 79 | assert_eq!(extensions.len(), 1); 80 | let ext = extensions.first().unwrap(); 81 | assert_eq!(ext.namespace, "myext"); 82 | assert_eq!(ext.url, "https://www.mycorp.com/myext"); 83 | 84 | // Get point cloud and check for custom attribute 85 | let pointclouds = e57.pointclouds(); 86 | assert_eq!(pointclouds.len(), 1); 87 | let pointcloud = pointclouds.first().unwrap(); 88 | let custom_record = &pointcloud.prototype[3]; 89 | assert_eq!( 90 | custom_record.name, 91 | RecordName::Unknown { 92 | namespace: String::from("myext"), 93 | name: String::from("classification") 94 | } 95 | ); 96 | 97 | // Read point data and check custom attribute value 98 | let points = e57 99 | .pointcloud_raw(pointcloud) 100 | .unwrap() 101 | .collect::, Error>>() 102 | .unwrap(); 103 | assert_eq!(points.len(), 1); 104 | let point = points.first().unwrap(); 105 | assert_eq!(point[3], RecordValue::Integer(9)); 106 | 107 | // Get custom binary blob metadata from XML using roxmltree 108 | let xml = e57.xml(); 109 | let document = Document::parse(xml).unwrap(); 110 | let blob = document 111 | .descendants() 112 | .find(|node| node.has_tag_name("myblob")) 113 | .unwrap(); 114 | let offset = blob.attribute("offset").unwrap().parse::().unwrap(); 115 | let length = blob.attribute("length").unwrap().parse::().unwrap(); 116 | 117 | // Read blob data from E57 file 118 | let blob = Blob::new(offset, length); 119 | let mut data = Vec::new(); 120 | e57.blob(&blob, &mut data).unwrap(); 121 | assert_eq!(data.len(), 4); 122 | } 123 | 124 | remove_file(path).unwrap(); 125 | } 126 | -------------------------------------------------------------------------------- /tools/e57-check-crc/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "e57-check-crc" 3 | version = "0.1.0" 4 | edition = "2021" 5 | publish = false 6 | 7 | [dependencies] 8 | e57 = { path = "../../" } 9 | anyhow = "1" 10 | -------------------------------------------------------------------------------- /tools/e57-check-crc/src/main.rs: -------------------------------------------------------------------------------- 1 | /* 2 | * Small example application that will validate all CRC checksums of E57 files. 3 | * If the argument is a file path, it will check a single file. 4 | * If the argument is a directory, will check recurisvely all E57 files in that directory. 5 | */ 6 | 7 | use anyhow::{bail, ensure, Context, Result}; 8 | use e57::E57Reader; 9 | use std::fs::File; 10 | use std::io::BufReader; 11 | use std::path::Path; 12 | 13 | fn main() -> Result<()> { 14 | let args: Vec = std::env::args().collect(); 15 | ensure!( 16 | args.len() >= 2, 17 | "Usage:\n e57-check-crc \n e57-check-crc " 18 | ); 19 | 20 | let path_str = &args[1]; 21 | let path = Path::new(path_str); 22 | ensure!(path.exists(), "The path '{path_str}' does not exist"); 23 | 24 | let all_ok = if path.is_dir() { 25 | let files = list_e57_files(path).context("Failed to list E57 files")?; 26 | println!("Found {} files, starting validation...", files.len()); 27 | check_files(&files) 28 | } else if path.is_file() { 29 | check_file(path_str) 30 | } else { 31 | bail!("The path '{path_str}' does not point to a directory or a file"); 32 | }; 33 | 34 | if !all_ok { 35 | bail!("Some of the checked files are not okay") 36 | } 37 | 38 | println!("All files are okay!"); 39 | Ok(()) 40 | } 41 | 42 | fn list_e57_files(path: &Path) -> Result> { 43 | let mut res = Vec::new(); 44 | for entry in path.read_dir().expect("Failed to read directory").flatten() { 45 | let path = entry.path(); 46 | if path.is_file() { 47 | if let Some(ext) = path.extension() { 48 | let ext = ext 49 | .to_str() 50 | .context("Failed to extract file extension as string")? 51 | .to_ascii_lowercase(); 52 | if ext == "e57" { 53 | res.push( 54 | path.to_str() 55 | .context("Failed to convert path to string")? 56 | .to_string(), 57 | ); 58 | } 59 | } 60 | } else if path.is_dir() { 61 | let mut files = list_e57_files(&path)?; 62 | res.append(&mut files); 63 | } 64 | } 65 | Ok(res) 66 | } 67 | 68 | fn check_files(files: &[String]) -> bool { 69 | files.iter().all(|f| check_file(f)) 70 | } 71 | 72 | fn check_file(file_str: &str) -> bool { 73 | match File::open(file_str) { 74 | Ok(file) => match E57Reader::validate_crc(BufReader::new(file)) { 75 | Ok(_) => { 76 | println!("Validated file '{file_str}' successfully"); 77 | true 78 | } 79 | Err(err) => { 80 | eprintln!("Failed to validate file '{file_str}': {err:#}"); 81 | false 82 | } 83 | }, 84 | Err(err) => { 85 | eprintln!("Failed to validate file '{file_str}': Failed to open file: {err:#}"); 86 | false 87 | } 88 | } 89 | } 90 | -------------------------------------------------------------------------------- /tools/e57-extract-scan-info/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "e57-extract-scan-info" 3 | version = "0.1.0" 4 | edition = "2021" 5 | publish = false 6 | 7 | [dependencies] 8 | e57 = { path = "../../" } 9 | anyhow = "1" 10 | -------------------------------------------------------------------------------- /tools/e57-extract-scan-info/src/main.rs: -------------------------------------------------------------------------------- 1 | /* 2 | * Small example application that extracts some metadata for 3 | * all scans/point clouds in the E57 file into a CSV file. 4 | * 5 | * The CSV will contain the following properties of each point cloud: 6 | * - GUID 7 | * - Name of the point cloud 8 | * - Number of points 9 | * - Position (translation part of the transform) as X,Y,Z 10 | * - Rotation quaternion of the transform as X,Y,Z,W 11 | * 12 | * There will be one line per point cloud and each value 13 | * is separated by an semicolon and ends with an Unix line break. 14 | * 15 | * The output file will be named like the input file plus `.csv` extension. 16 | */ 17 | 18 | use anyhow::{ensure, Context, Result}; 19 | use e57::{E57Reader, Quaternion, Translation}; 20 | 21 | fn main() -> Result<()> { 22 | let args: Vec = std::env::args().collect(); 23 | ensure!( 24 | args.len() >= 2, 25 | "Usage: e57-extract-scan-info " 26 | ); 27 | 28 | let infile = &args[1]; 29 | let outfile = format!("{infile}.csv"); 30 | 31 | let reader = E57Reader::from_file(infile).context("Failed to open E57 file")?; 32 | let mut csv_data = String::new(); 33 | for pc in reader.pointclouds() { 34 | let guid = pc.guid.unwrap_or_default(); 35 | let name = pc.name.unwrap_or_default(); 36 | let points = pc.records; 37 | let transform = pc.transform.unwrap_or_default(); 38 | let Translation { x, y, z } = transform.translation; 39 | let position = format!("{x},{y},{z}"); 40 | let Quaternion { w, x, y, z } = transform.rotation; 41 | let rotation = format!("{x},{y},{z},{w}"); 42 | let line = format!("{guid};{name};{points};{position};{rotation}\n"); 43 | csv_data.push_str(&line); 44 | } 45 | 46 | std::fs::write(outfile, csv_data).context("Failed to write outputr CSV file") 47 | } 48 | -------------------------------------------------------------------------------- /tools/e57-extract-xml/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "e57-extract-xml" 3 | version = "0.1.0" 4 | edition = "2021" 5 | publish = false 6 | 7 | [dependencies] 8 | e57 = { path = "../../" } 9 | anyhow = "1" 10 | -------------------------------------------------------------------------------- /tools/e57-extract-xml/src/main.rs: -------------------------------------------------------------------------------- 1 | /* 2 | * Small example application that will dump the XML section of any E57 to stdout. 3 | */ 4 | 5 | use anyhow::{ensure, Context, Result}; 6 | use e57::E57Reader; 7 | use std::fs::File; 8 | use std::io::{stdout, BufReader, Write}; 9 | 10 | fn main() -> Result<()> { 11 | let args: Vec = std::env::args().collect(); 12 | ensure!(args.len() >= 2, "Usage: e57-extract-xml "); 13 | 14 | let file = File::open(&args[1]).context("Failed to open E57 file")?; 15 | let reader = BufReader::new(file); 16 | let xml = E57Reader::raw_xml(reader).context("Failed to extract XML data")?; 17 | 18 | stdout() 19 | .write_all(&xml) 20 | .context("Failed to write XML data to stdout") 21 | } 22 | -------------------------------------------------------------------------------- /tools/e57-from-xyz/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "e57-from-xyz" 3 | version = "0.1.0" 4 | edition = "2021" 5 | publish = false 6 | 7 | [dependencies] 8 | e57 = { path = "../../" } 9 | anyhow = "1" 10 | uuid = { version = "1", features = ["v4"] } 11 | -------------------------------------------------------------------------------- /tools/e57-from-xyz/src/main.rs: -------------------------------------------------------------------------------- 1 | /* 2 | * Small example that can convert point clouds from XYZ into E57 files. 3 | * 4 | * The output file name will be the input file name plus ".e57". 5 | * The values in the input file need to be separated by spaces. 6 | * The first three values in each line must be X, Y and Z (as floating point values) 7 | * and last three values must be integers between 0 and 255 for red, green and blue. 8 | * Any additional columns will be ignored. 9 | * 10 | * X, Y and Z will be stored as Cartesian coordinates with 32 floarting points bits for each component. 11 | * RGB colors will be stored with 8 integer bits for each component. 12 | */ 13 | 14 | use anyhow::{ensure, Context, Result}; 15 | use e57::{E57Writer, Record, RecordValue}; 16 | use std::env::args; 17 | use std::fs::File; 18 | use std::io::{BufRead, BufReader}; 19 | use uuid::Uuid; 20 | 21 | fn main() -> Result<()> { 22 | let args: Vec = args().collect(); 23 | ensure!(args.len() >= 2, "Usage: e57-from-xyz "); 24 | 25 | let in_file = args[1].clone(); 26 | let out_file = in_file.clone() + ".e57"; 27 | 28 | let file = File::open(in_file).context("Failed to open XYZ file")?; 29 | let mut reader = BufReader::new(file); 30 | 31 | let file_guid = Uuid::new_v4().to_string(); 32 | let mut e57_writer = E57Writer::from_file(out_file, &file_guid) 33 | .context("Unable to open E57 output file for writing")?; 34 | 35 | let pc_guid = Uuid::new_v4().to_string(); 36 | let prototype = vec![ 37 | Record::CARTESIAN_X_F32, 38 | Record::CARTESIAN_Y_F32, 39 | Record::CARTESIAN_Z_F32, 40 | Record::COLOR_RED_U8, 41 | Record::COLOR_GREEN_U8, 42 | Record::COLOR_BLUE_U8, 43 | ]; 44 | let mut pc_writer = e57_writer 45 | .add_pointcloud(&pc_guid, prototype) 46 | .context("Failed to create point cloud writer")?; 47 | 48 | let mut line = String::new(); 49 | while reader 50 | .read_line(&mut line) 51 | .context("Failed to read line from XYZ file")? 52 | > 0 53 | { 54 | let parts: Vec<&str> = line.trim().split(' ').collect(); 55 | if parts.len() >= 6 { 56 | // Parse XYZ ASCII data 57 | let x: f32 = parts[0].parse().context("Failed to parse X value")?; 58 | let y: f32 = parts[1].parse().context("Failed to parse Y value")?; 59 | let z: f32 = parts[2].parse().context("Failed to parse Z value")?; 60 | let r: u8 = parts[3].parse().context("Failed to parse red value")?; 61 | let g: u8 = parts[4].parse().context("Failed to parse green value")?; 62 | let b: u8 = parts[5].parse().context("Failed to parse blue value")?; 63 | 64 | // Create E57 point for inserting 65 | let point = vec![ 66 | RecordValue::Single(x), 67 | RecordValue::Single(y), 68 | RecordValue::Single(z), 69 | RecordValue::Integer(r as i64), 70 | RecordValue::Integer(g as i64), 71 | RecordValue::Integer(b as i64), 72 | ]; 73 | pc_writer 74 | .add_point(point) 75 | .context("Failed to add E57 point")?; 76 | } 77 | line.clear(); 78 | } 79 | 80 | pc_writer 81 | .finalize() 82 | .context("Failed to finalize point cloud in E57 file")?; 83 | 84 | e57_writer 85 | .finalize() 86 | .context("Failed to finalize E57 file")?; 87 | 88 | Ok(()) 89 | } 90 | -------------------------------------------------------------------------------- /tools/e57-to-image/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "e57-to-image" 3 | version = "0.1.0" 4 | edition = "2021" 5 | publish = false 6 | 7 | [dependencies] 8 | e57 = { path = "../../" } 9 | anyhow = "1" 10 | png = "0.17" 11 | -------------------------------------------------------------------------------- /tools/e57-to-image/src/main.rs: -------------------------------------------------------------------------------- 1 | /* 2 | * Small example application that converts structured scans in E57 files to planar PNG RGBA images. 3 | * This works only for structured scans with row and column indices. 4 | * By default the point color will be used, the intensity will be used as fallback. 5 | * Areas without color and intensity will stay transparent and black. 6 | * 7 | * Important hint: 8 | * To get the existing PNG or JPEG images stored in E57 files use the `e57-unpack` tool instead. 9 | * 10 | * The output files will be named like the input file and placed in the same folder. 11 | * They will have an additional number suffix and the extension PNG. 12 | * 13 | * Please note that the output picture *must not* be treated as 360 degree panorama image. 14 | * It just visualizes the 2D row/column grid of the scans as is. 15 | * Use the tool `e57-to-pano` if you need valid 360 degree panorama images! 16 | */ 17 | 18 | use anyhow::{ensure, Context, Result}; 19 | use e57::E57Reader; 20 | use png::Encoder; 21 | use std::{env::args, fs::File, io::BufWriter, path::Path}; 22 | 23 | fn main() -> Result<()> { 24 | // Check command line arguments and show usage 25 | let args: Vec = args().collect(); 26 | ensure!(args.len() >= 2, "Usage: e57-to-image "); 27 | 28 | // Prepare input file path 29 | let in_path = Path::new(&args[1]); 30 | 31 | // Open E57 input file for reading 32 | let mut file = E57Reader::from_file(in_path).context("Failed to open E57 file")?; 33 | 34 | // Loop over all point clouds in the E57 file 35 | let pointclouds = file.pointclouds(); 36 | for (index, pointcloud) in pointclouds.iter().enumerate() { 37 | if !pointcloud.has_row_column() { 38 | println!("Point cloud #{index} has no row/column indices, skipping..."); 39 | continue; 40 | } 41 | 42 | if !pointcloud.has_color() && !pointcloud.has_intensity() { 43 | println!("Point cloud #{index} has no color and no intensity, skipping..."); 44 | continue; 45 | } 46 | 47 | if pointcloud.records < 1 { 48 | println!("Point cloud #{index} is empty, skipping..."); 49 | continue; 50 | } 51 | 52 | // First loop over all points to determine image size 53 | let mut row_min = i64::MAX; 54 | let mut row_max = i64::MIN; 55 | let mut col_min = i64::MAX; 56 | let mut col_max = i64::MIN; 57 | let iter = file 58 | .pointcloud_simple(pointcloud) 59 | .context("Unable to get simple point cloud iterator")?; 60 | for p in iter { 61 | let p = p.context("Unable to read next point")?; 62 | if p.row < row_min { 63 | row_min = p.row; 64 | } 65 | if p.row > row_max { 66 | row_max = p.row; 67 | } 68 | if p.column < col_min { 69 | col_min = p.column; 70 | } 71 | if p.column > col_max { 72 | col_max = p.column; 73 | } 74 | } 75 | 76 | // Determine image size 77 | let width = col_max - col_min; 78 | println!("Point cloud #{index} image width: {width}"); 79 | ensure!(width >= 0, "Column values have empty or negative width"); 80 | let width = (width + 1) as usize; 81 | 82 | let height = row_max - row_min; 83 | println!("Point cloud #{index} image height: {height}"); 84 | ensure!(height >= 0, "Row values have empty or negative height"); 85 | let height = (height + 1) as usize; 86 | 87 | // Allocate memory for output image RGBA buffer 88 | // Default color for all pixels is black and transparent! 89 | let mut buffer = vec![0_u8; width * height * 4]; 90 | 91 | // Second loop over all points to draw the image 92 | let iter = file 93 | .pointcloud_simple(pointcloud) 94 | .context("Unable to get simple point cloud iterator")?; 95 | for p in iter { 96 | let p = p.context("Unable to read next point")?; 97 | 98 | // Since there is a intensity to color fallback 99 | // we only need to ask for color here! 100 | let rgb = if let Some(color) = p.color { 101 | [ 102 | (color.red * 255.0) as u8, 103 | (color.green * 255.0) as u8, 104 | (color.blue * 255.0) as u8, 105 | ] 106 | } else { 107 | // Individual points might have no color and intensity. 108 | // Leave them at the default color! 109 | continue; 110 | }; 111 | 112 | let x = (p.column - col_min) as usize; 113 | let y = (p.row - row_min) as usize; 114 | let offset = y * width * 4 + x * 4; 115 | 116 | buffer[offset] = rgb[0]; 117 | buffer[offset + 1] = rgb[1]; 118 | buffer[offset + 2] = rgb[2]; 119 | buffer[offset + 3] = 255; // Set alpha to opaque 120 | } 121 | 122 | // Prepare output file name 123 | let out_path = args[1].clone() + &format!(".{index}.png"); 124 | 125 | // Write PNG file 126 | let out_file = File::create(&out_path).context("Unable to open output file")?; 127 | let writer = BufWriter::new(out_file); 128 | let mut encoder = Encoder::new(writer, width as u32, height as u32); 129 | encoder.set_color(png::ColorType::Rgba); 130 | encoder.set_depth(png::BitDepth::Eight); 131 | let mut writer = encoder 132 | .write_header() 133 | .context("Failed to write PNG header")?; 134 | writer 135 | .write_image_data(&buffer) 136 | .context("Failed to write PNG data")?; 137 | 138 | println!("Exported image for point cloud #{index} to {out_path}"); 139 | } 140 | 141 | Ok(()) 142 | } 143 | -------------------------------------------------------------------------------- /tools/e57-to-laz/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "e57-to-laz" 3 | version = "0.1.0" 4 | edition = "2021" 5 | publish = false 6 | 7 | [dependencies] 8 | e57 = { path = "../../" } 9 | las = { version = "0.9", features = ["laz"] } 10 | anyhow = "1" 11 | -------------------------------------------------------------------------------- /tools/e57-to-laz/src/main.rs: -------------------------------------------------------------------------------- 1 | /* 2 | * Small example application that can convert all point clouds 3 | * from any E57 file into a single merged LAZ file. 4 | * 5 | * The output file name will be the input file name plus ".laz". 6 | * 7 | * Spherical coordinates are converted automatically to Cartesian coordinates. 8 | * Invalid and incomplete coordinates will be skipped. 9 | */ 10 | 11 | use anyhow::{ensure, Context, Result}; 12 | use e57::{CartesianCoordinate, E57Reader}; 13 | use las::{Builder, Color, Point, Version, Writer}; 14 | use std::env::args; 15 | 16 | fn main() -> Result<()> { 17 | // Check command line arguments and show usage 18 | let args: Vec = args().collect(); 19 | ensure!(args.len() >= 2, "Usage: e57-to-laz "); 20 | 21 | // Prepare input and output file paths 22 | let in_file = args[1].clone(); 23 | let out_file = in_file.clone() + ".laz"; 24 | 25 | // Open E57 input file for reading 26 | let mut file = E57Reader::from_file(in_file).context("Failed to open E57 file")?; 27 | 28 | // Check if any of the input point clouds has color 29 | let has_color = file.pointclouds().iter().any(|pc| pc.has_color()); 30 | 31 | // Build LAZ header 32 | let mut builder = Builder::from(Version::default()); 33 | builder.point_format.has_color = has_color; 34 | builder.point_format.is_compressed = true; 35 | let header = builder 36 | .into_header() 37 | .context("Failed to build LAZ header")?; 38 | 39 | // Prepare writing to output file 40 | let mut writer = 41 | Writer::from_path(out_file, header).context("Failed to open new LAZ file for writing")?; 42 | 43 | // Loop over all point clouds in the E57 file 44 | let pointclouds = file.pointclouds(); 45 | for pointcloud in pointclouds { 46 | let mut iter = file 47 | .pointcloud_simple(&pointcloud) 48 | .context("Unable to get point cloud iterator")?; 49 | 50 | // Set point iterator options 51 | iter.spherical_to_cartesian(true); 52 | iter.cartesian_to_spherical(false); 53 | iter.intensity_to_color(false); 54 | iter.apply_pose(true); 55 | 56 | // Iterate over all points in point cloud 57 | for p in iter { 58 | let p = p.context("Unable to read next point")?; 59 | let mut point = Point::default(); 60 | if let CartesianCoordinate::Valid { x, y, z } = p.cartesian { 61 | point.x = x; 62 | point.y = y; 63 | point.z = z; 64 | } else { 65 | continue; 66 | } 67 | if let Some(color) = p.color { 68 | point.color = Some(Color { 69 | red: (color.red * u16::MAX as f32) as u16, 70 | green: (color.green * u16::MAX as f32) as u16, 71 | blue: (color.blue * u16::MAX as f32) as u16, 72 | }) 73 | } 74 | if let Some(intensity) = p.intensity { 75 | point.intensity = (intensity * u16::MAX as f32) as u16; 76 | } 77 | writer 78 | .write_point(point) 79 | .context("Failed to write LAZ point")?; 80 | } 81 | } 82 | 83 | Ok(()) 84 | } 85 | -------------------------------------------------------------------------------- /tools/e57-to-pano/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "e57-to-pano" 3 | version = "0.1.0" 4 | edition = "2021" 5 | publish = false 6 | 7 | [dependencies] 8 | e57 = { path = "../../" } 9 | anyhow = "1" 10 | png = "0.17" 11 | -------------------------------------------------------------------------------- /tools/e57-to-pano/src/main.rs: -------------------------------------------------------------------------------- 1 | /* 2 | * Example application that projects structured scans in E57 files to 360 degree spherical panorama PNG RGBA images. 3 | * By default the point color will be used, the intensity will be used as fallback. 4 | * Areas without color and intensity will stay transparent and black. 5 | * The origins of the scans will be the center of projection for the generated panorama images. 6 | * Horizontally the image will cover 360 degress and vertically 180 degrees. 7 | * 8 | * Important hint: 9 | * To get the existing PNG or JPEG spherical images stored in E57 files use the `e57-unpack` tool instead. 10 | * 11 | * The output files will be named like the input file and placed in the same folder. 12 | * They will have an additional number suffix and the extension PNG. 13 | * 14 | * You are just interested in the 2D row/column grid of the structured scan? 15 | * Use the `e57-to-image` tool instead! 16 | */ 17 | 18 | use anyhow::{ensure, Context, Result}; 19 | use e57::{E57Reader, SphericalCoordinate}; 20 | use png::Encoder; 21 | use std::{ 22 | env::args, 23 | f64::consts::{FRAC_PI_2, PI}, 24 | fs::File, 25 | io::BufWriter, 26 | path::Path, 27 | }; 28 | 29 | fn main() -> Result<()> { 30 | // Check command line arguments and show usage 31 | let args: Vec = args().collect(); 32 | ensure!( 33 | args.len() >= 2, 34 | "Usage: e57-to-pano [optional_image_width] [optional_image_height]" 35 | ); 36 | 37 | // Prepare input file path 38 | let in_path = Path::new(&args[1]); 39 | 40 | // Check optional width and height 41 | let width = if args.len() >= 3 { 42 | let width = args[2].parse::().context("Failed to parse width")?; 43 | ensure!(width > 0); 44 | Some(width) 45 | } else { 46 | None 47 | }; 48 | let height = if args.len() >= 4 { 49 | let height = args[3].parse::().context("Failed to parse height")?; 50 | ensure!(height > 0); 51 | Some(height) 52 | } else { 53 | None 54 | }; 55 | 56 | // Open E57 input file for reading 57 | let mut file = E57Reader::from_file(in_path).context("Failed to open E57 file")?; 58 | 59 | // Loop over all point clouds in the E57 file 60 | let pointclouds = file.pointclouds(); 61 | for (index, pointcloud) in pointclouds.iter().enumerate() { 62 | if !pointcloud.has_color() && !pointcloud.has_intensity() { 63 | println!("Point cloud #{index} has no color and no intensity, skipping..."); 64 | continue; 65 | } 66 | 67 | if pointcloud.records < 1 { 68 | println!("Point cloud #{index} is empty, skipping..."); 69 | continue; 70 | } 71 | 72 | if !pointcloud.has_row_column() && !pointcloud.has_spherical() { 73 | println!("Warning: Point cloud #{index} has no row/column indices and no spherical coordinates, it might be unstructured!"); 74 | } 75 | 76 | // Determine width and height of image 77 | let calc_height = (((pointcloud.records as f32) * 2.0).sqrt() / 2.0) as usize; 78 | let width = width.unwrap_or(calc_height * 2); 79 | let height = height.unwrap_or(calc_height); 80 | println!("Point cloud #{index} image size: {width}x{height}"); 81 | 82 | // Allocate memory for output image RGBA buffer 83 | // Default color for all pixels is black and transparent! 84 | let mut buffer = vec![0_u8; width * height * 4]; 85 | 86 | // Loop over all points to project the points into the panorama 87 | let mut iter = file 88 | .pointcloud_simple(pointcloud) 89 | .context("Unable to get simple point cloud iterator")?; 90 | iter.cartesian_to_spherical(true); // We need spherical coordinates for the projection! 91 | for p in iter { 92 | let p = p.context("Unable to read next point")?; 93 | 94 | // Since there is a intensity to color fallback 95 | // we only need to ask for color here! 96 | let rgb = if let Some(color) = p.color { 97 | [ 98 | (color.red * 255.0) as u8, 99 | (color.green * 255.0) as u8, 100 | (color.blue * 255.0) as u8, 101 | ] 102 | } else { 103 | // Individual points might have no color and intensity. 104 | // Leave them at the default color! 105 | continue; 106 | }; 107 | 108 | // Get angles from spherical coordinates 109 | let (mut az, mut el) = match p.spherical { 110 | SphericalCoordinate::Valid { 111 | azimuth, elevation, .. 112 | } => (azimuth, elevation), 113 | SphericalCoordinate::Direction { azimuth, elevation } => (azimuth, elevation), 114 | SphericalCoordinate::Invalid => continue, // Nothing to project 115 | }; 116 | 117 | // Make sure the angles are in the expected range 118 | const TWO_PI: f64 = PI * 2.0; 119 | while az <= -PI { 120 | az += TWO_PI; 121 | } 122 | while az > PI { 123 | az -= TWO_PI; 124 | } 125 | while el <= -FRAC_PI_2 { 126 | el += PI; 127 | } 128 | while el > FRAC_PI_2 { 129 | el -= PI; 130 | } 131 | 132 | // Get X and Y coordinates in panorama image from angles 133 | let az_normalized = (az + PI) / TWO_PI; 134 | let x = (az_normalized * width as f64).clamp(0.0, (width - 1) as f64) as usize; 135 | let el_normalized = (el + FRAC_PI_2) / PI; 136 | let y = (el_normalized * height as f64).clamp(0.0, (height - 1) as f64) as usize; 137 | let x = width - x - 1; // Prevent image from being horizontally mirrored 138 | let y = height - y - 1; // Prevent image from being upside down 139 | 140 | // Set pixel color 141 | let offset = y * width * 4 + x * 4; 142 | buffer[offset] = rgb[0]; 143 | buffer[offset + 1] = rgb[1]; 144 | buffer[offset + 2] = rgb[2]; 145 | buffer[offset + 3] = 255; // Set alpha to opaque 146 | } 147 | 148 | // Prepare output file name 149 | let out_path = args[1].clone() + &format!(".{index}.png"); 150 | 151 | // Write PNG file 152 | let out_file = File::create(&out_path).context("Unable to open output file")?; 153 | let writer = BufWriter::new(out_file); 154 | let mut encoder = Encoder::new(writer, width as u32, height as u32); 155 | encoder.set_color(png::ColorType::Rgba); 156 | encoder.set_depth(png::BitDepth::Eight); 157 | let mut writer = encoder 158 | .write_header() 159 | .context("Failed to write PNG header")?; 160 | writer 161 | .write_image_data(&buffer) 162 | .context("Failed to write PNG data")?; 163 | 164 | println!("Exported panorama for point cloud #{index} to {out_path}"); 165 | } 166 | 167 | Ok(()) 168 | } 169 | -------------------------------------------------------------------------------- /tools/e57-to-xyz/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "e57-to-xyz" 3 | version = "0.1.0" 4 | edition = "2021" 5 | publish = false 6 | 7 | [dependencies] 8 | e57 = { path = "../../", features = ["crc32c"] } 9 | anyhow = "1" 10 | ryu = "1" 11 | -------------------------------------------------------------------------------- /tools/e57-to-xyz/src/main.rs: -------------------------------------------------------------------------------- 1 | /* 2 | * Small example application that can convert all point clouds 3 | * from any E57 file into a single merged XYZ ASCII file. 4 | * 5 | * The output file name will be the input file name plus ".xyz". 6 | * The values in the output file will be separated by a space as separator. 7 | * 8 | * Spherical coordinates are converted automatically to Cartesian coordinates. 9 | * Invalid and incomplete coordinates will be skipped. 10 | * If there is no RGB color, it will use the intensity as grayscale RGB values. 11 | * If there is no color and no intensity, it will only write X, Y and Z values. 12 | */ 13 | 14 | use anyhow::{ensure, Context, Result}; 15 | use e57::{CartesianCoordinate, E57Reader}; 16 | use std::env::args; 17 | use std::fs::File; 18 | use std::io::{BufWriter, Write}; 19 | 20 | fn main() -> Result<()> { 21 | // Check command line arguments and show usage 22 | let args: Vec = args().collect(); 23 | ensure!(args.len() >= 2, "Usage: e57-to-xyz "); 24 | 25 | // Prepare input and output file paths 26 | let in_file = args[1].clone(); 27 | let out_file = in_file.clone() + ".xyz"; 28 | 29 | // Open E57 input file for reading 30 | let mut file = E57Reader::from_file(in_file).context("Failed to open E57 file")?; 31 | 32 | // Prepare buffered writing into output file 33 | let writer = File::create(out_file).context("Unable to open output file for writing")?; 34 | let mut writer = BufWriter::new(writer); 35 | 36 | // Prepare fast floating point to ASCII generation. 37 | // The std implementation is a bit slower compared to the specialized ryu crate. 38 | let mut buffer = ryu::Buffer::new(); 39 | 40 | // Loop over all point clouds in the E57 file 41 | let pointclouds = file.pointclouds(); 42 | for pointcloud in pointclouds { 43 | let mut iter = file 44 | .pointcloud_simple(&pointcloud) 45 | .context("Unable to get point cloud iterator")?; 46 | 47 | // Set point iterator options 48 | iter.spherical_to_cartesian(true); 49 | iter.cartesian_to_spherical(false); 50 | iter.intensity_to_color(true); 51 | iter.apply_pose(true); 52 | 53 | // Iterate over all points in point cloud 54 | for p in iter { 55 | let p = p.context("Unable to read next point")?; 56 | 57 | // Write XYZ data to output file 58 | if let CartesianCoordinate::Valid { x, y, z } = p.cartesian { 59 | let space = " ".as_bytes(); 60 | let xyz_err = "Failed to write XYZ coordinates"; 61 | 62 | let str = buffer.format(x); 63 | writer.write_all(str.as_bytes()).context(xyz_err)?; 64 | writer.write_all(space).context(xyz_err)?; 65 | 66 | let str = buffer.format(y); 67 | writer.write_all(str.as_bytes()).context(xyz_err)?; 68 | writer.write_all(space).context(xyz_err)?; 69 | 70 | let str = buffer.format(z); 71 | writer.write_all(str.as_bytes()).context(xyz_err)?; 72 | } else { 73 | continue; 74 | } 75 | 76 | // If available, write RGB color or intensity color values 77 | if let Some(color) = p.color { 78 | writer 79 | .write_fmt(format_args!( 80 | " {} {} {}", 81 | (color.red * 255.) as u8, 82 | (color.green * 255.) as u8, 83 | (color.blue * 255.) as u8 84 | )) 85 | .context("Failed to write RGB color")?; 86 | } 87 | 88 | // Write new line before next point 89 | writer 90 | .write_all("\n".as_bytes()) 91 | .context("Failed to write newline")?; 92 | } 93 | } 94 | 95 | Ok(()) 96 | } 97 | -------------------------------------------------------------------------------- /tools/e57-unpack/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "e57-unpack" 3 | version = "0.1.0" 4 | edition = "2021" 5 | publish = false 6 | 7 | [dependencies] 8 | e57 = { path = "../../" } 9 | anyhow = "1" 10 | -------------------------------------------------------------------------------- /tools/e57-unpack/src/main.rs: -------------------------------------------------------------------------------- 1 | /* 2 | * Small example application that will try to "unpack" an E57 file. 3 | * 4 | * It will create an XML file for the full original metadata, 5 | * a CSV file with the raw values for all point clouds, 6 | * all images will be extracted as individual files and 7 | * the parsed pieces of metadata will be stored as text files. 8 | * 9 | * The CSV files will use a semicolon as separator and Unix line endings. 10 | * The first line of the CSV file contains the names and types of the columns. 11 | * 12 | * The unpacked results will be saved into an folder with the suffix "_unpacked" 13 | * in the same folder as the original file. 14 | * 15 | * If --no-points is passed after the file path then the points 16 | * will not be extracted, greatly reducing the time to extract the other files 17 | */ 18 | 19 | use anyhow::{ensure, Context, Result}; 20 | use e57::{DateTime, E57Reader, Extension, Header, Projection, RecordValue}; 21 | use std::fs::{create_dir_all, write, File}; 22 | use std::io::{BufWriter, Write}; 23 | use std::path::Path; 24 | 25 | #[derive(Debug)] 26 | pub struct E57Metadata { 27 | pub header: Header, 28 | pub format_name: String, 29 | pub guid: String, 30 | pub extensions: Vec, 31 | pub creation: Option, 32 | pub coordinate_metadata: Option, 33 | } 34 | 35 | fn main() -> Result<()> { 36 | let args: Vec = std::env::args().collect(); 37 | ensure!(args.len() >= 2, "Usage: extract-images "); 38 | let extract_points = !args.contains(&"--no-points".to_string()); 39 | 40 | // Prepare input file and output folder 41 | let input_file = &args[1]; 42 | let output_folder = input_file.to_owned() + "_unpacked"; 43 | let output_folder = Path::new(&output_folder); 44 | create_dir_all(output_folder).context("Failed to create output folder")?; 45 | 46 | // Open E57 file 47 | let mut e57 = E57Reader::from_file(input_file).context("Failed to open E57 file")?; 48 | 49 | // Extract XML section 50 | let xml = e57.xml(); 51 | let xml_file = output_folder.join("metadata.xml"); 52 | write(xml_file, xml).context("Failed to write XML metadata")?; 53 | println!("Finished extracting XML data"); 54 | 55 | // Extract parsed metadata 56 | let metadata = E57Metadata { 57 | header: e57.header(), 58 | format_name: e57.format_name().to_owned(), 59 | guid: e57.guid().to_owned(), 60 | extensions: e57.extensions(), 61 | creation: e57.creation(), 62 | coordinate_metadata: e57.coordinate_metadata().map(|cm| cm.to_owned()), 63 | }; 64 | let metadata_file = output_folder.join("metadata.txt"); 65 | let desc = format!("{metadata:#?}"); 66 | write(metadata_file, desc).context("Failed to write parsed metadata of E57")?; 67 | println!("Finished writing parsed metadata"); 68 | 69 | // Extract images 70 | let images = e57.images(); 71 | let image_count = images.len(); 72 | println!("Found {image_count} image(s)"); 73 | for (index, img) in images.iter().enumerate() { 74 | println!("Starting to extract data for image #{index}..."); 75 | 76 | // Extract metadata and write to txt file 77 | let img_metadata_file = output_folder.join(format!("image_{index}.txt")); 78 | let desc = format!("{img:#?}"); 79 | write(img_metadata_file, &desc).context("Failed to write metadata of image")?; 80 | println!(" Exported image metadata"); 81 | 82 | // Extract preview image, if available 83 | if let Some(preview) = &img.visual_reference { 84 | let ext = format!("{:?}", preview.blob.format).to_lowercase(); 85 | let file_name = format!("image_{index}_preview.{ext}"); 86 | let file_path = output_folder.join(file_name); 87 | let file = 88 | File::create(file_path).context("Failed to open preview image file for writing")?; 89 | let mut writer = BufWriter::new(file); 90 | let size = e57 91 | .blob(&preview.blob.data, &mut writer) 92 | .context("Failed to write preview image blob")?; 93 | println!(" Exported preview image with {size} bytes"); 94 | 95 | // Extract preview mask, if available 96 | if let Some(blob) = &preview.mask { 97 | let file_name = format!("image_{index}_preview_mask.png"); 98 | let file_path = output_folder.join(file_name); 99 | let file = File::create(file_path) 100 | .context("Failed to open preview mask image file for writing")?; 101 | let mut writer = BufWriter::new(file); 102 | let size = e57 103 | .blob(blob, &mut writer) 104 | .context("Failed to write preview mask image blob")?; 105 | println!(" Exported preview image mask with {size} bytes"); 106 | } 107 | } 108 | 109 | // Extract projectable image, if available 110 | if let Some(rep) = &img.projection { 111 | let (blob, mask, type_name) = match rep { 112 | Projection::Pinhole(rep) => (&rep.blob, &rep.mask, "pinhole"), 113 | Projection::Spherical(rep) => (&rep.blob, &rep.mask, "spherical"), 114 | Projection::Cylindrical(rep) => (&rep.blob, &rep.mask, "cylindrical"), 115 | }; 116 | let ext = format!("{:?}", blob.format).to_lowercase(); 117 | let file_name = format!("image_{index}_{type_name}.{ext}"); 118 | let file_path = output_folder.join(file_name); 119 | let file = File::create(file_path).context("Failed to open image file for writing")?; 120 | let mut writer = BufWriter::new(file); 121 | let size = e57 122 | .blob(&blob.data, &mut writer) 123 | .context("Failed to write image blob")?; 124 | println!("Exported {type_name} image with {size} bytes"); 125 | 126 | // Extract mask, if available 127 | if let Some(blob) = mask { 128 | let file_name = format!("image_{index}_{type_name}_mask.png"); 129 | let file_path = output_folder.join(file_name); 130 | let file = File::create(file_path) 131 | .context("Failed to open mask image file for writing")?; 132 | let mut writer = BufWriter::new(file); 133 | let size = e57 134 | .blob(blob, &mut writer) 135 | .context("Failed to write mask image blob")?; 136 | println!(" Exported image mask with {size} bytes"); 137 | } 138 | } 139 | } 140 | 141 | // Extract point clouds 142 | if extract_points { 143 | let pointclouds = e57.pointclouds(); 144 | let pc_count = pointclouds.len(); 145 | println!("Found {pc_count} point cloud(s)"); 146 | for (index, pc) in pointclouds.iter().enumerate() { 147 | println!("Starting to extract data for point cloud #{index}..."); 148 | 149 | // Extract metadata and write to txt file 150 | let pc_metadata_file = output_folder.join(format!("pc_{index}.txt")); 151 | let desc = format!("{pc:#?}"); 152 | write(pc_metadata_file, &desc).context("Failed to write metadata of point cloud")?; 153 | println!(" Exported point cloud metadata"); 154 | 155 | // Create CSV header 156 | let file_name = format!("pc_{index}.csv"); 157 | let file_path = output_folder.join(file_name); 158 | let file = 159 | File::create(file_path).context("Failed to open point cloud file for writing")?; 160 | let mut writer = BufWriter::new(file); 161 | let headers: Vec = pc 162 | .prototype 163 | .iter() 164 | .map(|r| format!("{:?} {:?}", r.name, r.data_type)) 165 | .collect(); 166 | let mut header = headers.join(";"); 167 | header += "\n"; 168 | writer 169 | .write_all(header.as_bytes()) 170 | .context("Failed to write CSV header")?; 171 | 172 | // Write CSV data 173 | let iter = e57 174 | .pointcloud_raw(pc) 175 | .context("Failed to open point cloud iterator")?; 176 | for p in iter { 177 | let p = p.context("Failed to extract raw point")?; 178 | let values: Vec = p 179 | .iter() 180 | .map(|r| match &r { 181 | RecordValue::Single(s) => s.to_string(), 182 | RecordValue::Double(d) => d.to_string(), 183 | RecordValue::ScaledInteger(si) => si.to_string(), 184 | RecordValue::Integer(i) => i.to_string(), 185 | }) 186 | .collect(); 187 | let line = values.join(";") + "\n"; 188 | writer 189 | .write_all(line.as_bytes()) 190 | .context("Failed to write CSV point")?; 191 | } 192 | println!(" Exported point cloud data to CSV file"); 193 | } 194 | } 195 | 196 | Ok(()) 197 | } 198 | --------------------------------------------------------------------------------