├── .github ├── ISSUE_TEMPLATE │ ├── bug_report.md │ ├── feature_request.md │ └── other.md ├── pull_request_template.md └── workflows │ └── build.yml ├── .gitignore ├── .rustfmt.toml ├── CHANGELOG.md ├── Cargo.toml ├── LICENSE ├── README.md ├── README.tpl ├── data └── test_tex.ktx2 ├── deny.toml ├── examples └── load.rs ├── release.toml ├── renovate.json └── src ├── enums.rs ├── error.rs └── lib.rs /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug Report 3 | about: ktx2 malfunctioning? Please provide a detailed bug report. 4 | title: '' 5 | labels: bug 6 | assignees: '' 7 | --- 8 | 9 | 10 | 11 | ## Description 12 | 13 | 14 | ## Repro steps 15 | 16 | 17 | ## Extra Materials 18 | 19 | 20 | ## System Information 21 | 22 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature Request 3 | about: Suggest a way ktx2 could be better. 4 | title: '' 5 | labels: enhancement 6 | assignees: '' 7 | --- 8 | 9 | ## Description 10 | 11 | 12 | ## Proposed Solution 13 | 14 | 15 | ## Alternatives 16 | 17 | 18 | ## Additional context 19 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/other.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Other 3 | about: Strange things you want to tell us 4 | title: '' 5 | labels: question 6 | assignees: '' 7 | --- 8 | 9 | -------------------------------------------------------------------------------- /.github/pull_request_template.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | ## Checklist 4 | 5 | - [ ] `cargo clippy` reports no issues 6 | - [ ] `cargo doc` reports no issues 7 | - [ ] [`cargo deny`](https://github.com/EmbarkStudios/cargo-deny/) issues have been fixed or added to `deny.toml` 8 | - [ ] `cargo test` shows all tests passing 9 | - [ ] human-readable change descriptions added to the changelog under the "Unreleased" heading. 10 | - [ ] If the change does not affect the user (or is a process change), preface the change with "Internal:" 11 | - [ ] Add credit to yourself for each change: `Added new functionality @githubname`. 12 | 13 | ## Description 14 | 15 | ## Related Issues 16 | -------------------------------------------------------------------------------- /.github/workflows/build.yml: -------------------------------------------------------------------------------- 1 | name: Build 2 | 3 | on: 4 | push: 5 | branches: 6 | - trunk 7 | - ci/** 8 | pull_request: 9 | 10 | env: 11 | RUSTFLAGS: -D warnings 12 | RUSTDOCFLAGS: -D warnings 13 | CI_RUST_VERSION: "1.85" 14 | MSRV: "1.56" 15 | 16 | jobs: 17 | build: 18 | timeout-minutes: 10 19 | 20 | strategy: 21 | matrix: 22 | include: 23 | # wasm stable 24 | - name: "stable wasm" 25 | os: "ubuntu-latest" 26 | target: "wasm32-unknown-unknown" 27 | 28 | # native stable 29 | - name: "stable linux" 30 | os: "ubuntu-latest" 31 | target: "x86_64-unknown-linux-gnu" 32 | 33 | - name: "stable mac" 34 | os: "macos-latest" 35 | target: "x86_64-apple-darwin" 36 | 37 | - name: "stable windows" 38 | os: "windows-latest" 39 | target: "x86_64-pc-windows-msvc" 40 | 41 | fail-fast: false 42 | 43 | runs-on: ${{ matrix.os }} 44 | name: ${{ matrix.name }} 45 | 46 | steps: 47 | - name: checkout repo 48 | uses: actions/checkout@v4 49 | 50 | - name: install rust 51 | shell: bash 52 | run: | 53 | set -e 54 | 55 | rustup toolchain install ${{ env.CI_RUST_VERSION }} --no-self-update --profile=minimal --component clippy 56 | rustup override set ${{ env.CI_RUST_VERSION }} 57 | rustup target add ${{ matrix.target }} 58 | cargo -V 59 | 60 | - name: clippy 61 | shell: bash 62 | run: | 63 | set -e 64 | 65 | cargo clippy --target ${{ matrix.target }} 66 | cargo clippy --target ${{ matrix.target }} --no-default-features 67 | 68 | - name: test 69 | shell: bash 70 | if: matrix.name != 'stable wasm' 71 | run: | 72 | set -e 73 | 74 | cargo test --target ${{ matrix.target }} 75 | 76 | - name: doc 77 | shell: bash 78 | run: | 79 | set -e 80 | 81 | cargo doc --no-deps --target ${{ matrix.target }} 82 | 83 | msrv: 84 | runs-on: ubuntu-latest 85 | steps: 86 | - name: checkout repo 87 | uses: actions/checkout@v4 88 | 89 | - name: install rust 90 | run: | 91 | set -e 92 | 93 | rustup toolchain install ${{ env.MSRV }} --no-self-update --profile=minimal 94 | rustup override set ${{ env.MSRV }} 95 | cargo -V 96 | 97 | - name: check msrv 98 | run: | 99 | set -e 100 | 101 | cargo check 102 | cargo check --no-default-features 103 | 104 | cargo-fmt: 105 | runs-on: ubuntu-latest 106 | steps: 107 | - name: checkout repo 108 | uses: actions/checkout@v4 109 | 110 | - name: install rust 111 | run: | 112 | set -e 113 | 114 | rustup toolchain install ${{ env.CI_RUST_VERSION }} --no-self-update --profile=minimal --component rustfmt 115 | rustup override set ${{ env.CI_RUST_VERSION }} 116 | cargo -V 117 | 118 | - name: check format 119 | run: | 120 | set -e 121 | 122 | cargo fmt -- --check 123 | 124 | cargo-deny: 125 | runs-on: ubuntu-latest 126 | steps: 127 | - name: checkout repo 128 | uses: actions/checkout@v4 129 | 130 | - name: check denies 131 | uses: EmbarkStudios/cargo-deny-action@v2 132 | with: 133 | log-level: warn 134 | command: check 135 | arguments: --all-features 136 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /.idea 2 | /.vs 3 | /target 4 | Cargo.lock 5 | -------------------------------------------------------------------------------- /.rustfmt.toml: -------------------------------------------------------------------------------- 1 | max_width = 120 2 | use_field_init_shorthand = true 3 | use_try_shorthand = true 4 | edition = "2018" 5 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | All notable changes to this project will be documented in this file. 4 | 5 | The format is loosely based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), 6 | and this project adheres to cargo's version of [Semantic Versioning](https://doc.rust-lang.org/cargo/reference/semver.html). 7 | 8 | Per Keep a Changelog there are 6 main categories of changes: 9 | - Added 10 | - Changed 11 | - Deprecated 12 | - Removed 13 | - Fixed 14 | - Security 15 | 16 | #### Table of Contents 17 | 18 | - [Unreleased](#unreleased) 19 | - [v0.4.0](#v040) 20 | - [v0.3.0](#v030) 21 | - [Diffs](#diffs) 22 | 23 | ## Unreleased 24 | 25 | ## v0.4.0 26 | 27 | Released 2025-03-24 28 | 29 | - Added a `key_value_data` function to the reader that returns an iterator over key-value pairs (by @expenses). 30 | - `Reader::levels` now returns an iterator over `Level` structs, which contain the bytes of the level as well as the uncompressed length (by @expenses). 31 | - Added `Header::from_bytes`, `Header::as_bytes`, `LevelIndex::from_bytes` and `LevelIndex::as_bytes` (by @expenses). 32 | - Made the following fields public (by @expenses): 33 | - `Header::LENGTH` 34 | - `Header::index` 35 | - `LevelIndex::LENGTH` 36 | - `LevelIndex::byte_offset` 37 | - `LevelIndex::byte_length` 38 | - `LevelIndex::uncompressed_byte_length` 39 | - `Level::data` 40 | - `Level::uncompressed_byte_length` 41 | - Moved header data in `BasicDataFormatDescriptor` into `BasicDataFormatDescriptorHeader`. 42 | - Add `ASTC_n_SFLOAT_BLOCK` variants to `Format`. 43 | - Rename Data Format Descriptor types to all start with `Dfd` (by @cwfitzgerald): 44 | - `Reader::data_format_descriptors` -> `Reader::dfd_blocks` 45 | - `DataFormatDescriptor` -> `DfdBlock` 46 | - `DataFormatDescriptorHeader` -> `DfdBlockHeader` 47 | - `BasicDataFormatDescriptor` -> `DfdBlockBasic` 48 | - `BasicDataFormatDescriptorHeader` -> `DfdBlockHeaderBasic` 49 | 50 | ## v0.3.0 51 | 52 | Released 2022-02-03 53 | 54 | Initial release under new ownership. 55 | - Added support for Data Format Descriptor parsing (Rob Swain [@superdump](https://github.com/superdump)) 56 | 57 | ### Changed 58 | - Cleaned up a signifigant portion of the crate. 59 | 60 | ## Diffs 61 | 62 | - [Unreleased](https://github.com/BVE-Reborn/ktx2/compare/v0.4.0...HEAD) 63 | - [v0.4.0](https://github.com/BVE-Reborn/ktx2/compare/v0.3.0...v0.4.0) 64 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [workspace] 2 | members = [] 3 | 4 | [package] 5 | name = "ktx2" 6 | version = "0.4.0" 7 | authors = [ 8 | "Benjamin Saunders ", 9 | "Connor Fitzgerald ", 10 | "f3kilo ", 11 | ] 12 | edition = "2021" 13 | description = "Parser for the ktx2 texture container format" 14 | readme = "README.md" 15 | repository = "https://github.com/BVE-Reborn/ktx2" 16 | license = "Apache-2.0" 17 | keywords = [] 18 | categories = [] 19 | exclude = ["data/*"] 20 | rust-version = "1.56" 21 | 22 | [features] 23 | default = ["std"] 24 | std = [] 25 | 26 | [dependencies] 27 | bitflags = "2" 28 | 29 | [package.metadata.release] 30 | pre-release-hook = ["cargo", "readme", "-o", "README.md", "-t", "README.tpl"] 31 | [[package.metadata.release.pre-release-replacements]] 32 | file = "CHANGELOG.md" 33 | search = "\\[Unreleased\\]\\(#unreleased\\)" 34 | replace = "[Unreleased](#unreleased)\n- [v{{version}}](#v{{version}})" 35 | [[package.metadata.release.pre-release-replacements]] 36 | file = "CHANGELOG.md" 37 | search = "\\[v([0-9]+)\\.([0-9]+)\\.([0-9]+)\\]\\(#v[0-9\\.]+\\)" 38 | replace = "[v$1.$2.$3](#v$1$2$3)" 39 | [[package.metadata.release.pre-release-replacements]] 40 | file = "CHANGELOG.md" 41 | search = "## Unreleased" 42 | replace = "## Unreleased\n\n## v{{version}}\n\nReleased {{date}}" 43 | [[package.metadata.release.pre-release-replacements]] 44 | file = "CHANGELOG.md" 45 | search = "\\[Unreleased\\]\\(https://github.com/BVE-Reborn/ktx2/compare/v([a-z0-9.-]+)\\.\\.\\.HEAD\\)" 46 | replace = "[Unreleased](https://github.com/BVE-Reborn/ktx2/compare/v{{version}}...HEAD)\n- [v{{version}}](https://github.com/BVE-Reborn/ktx2/compare/v$1...v{{version}})" 47 | # allow first increment 48 | min = 0 49 | [[package.metadata.release.pre-release-replacements]] 50 | file = "CHANGELOG.md" 51 | search = "" 52 | replace = "- [Unreleased](https://github.com/BVE-Reborn/ktx2/compare/v{{version}}...HEAD)" 53 | # allow non-first increment 54 | min = 0 55 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright 2021 The BVE-Reborn Developers 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # ktx2 2 | 3 | ![GitHub Actions Workflow Status](https://img.shields.io/github/actions/workflow/status/BVE-Reborn/ktx2/build.yml?branch=trunk) 4 | [![Crates.io](https://img.shields.io/crates/v/ktx2)](https://crates.io/crates/ktx2) 5 | [![Documentation](https://docs.rs/ktx2/badge.svg)](https://docs.rs/ktx2) 6 | ![License](https://img.shields.io/crates/l/ktx2) 7 | 8 | Parser for the [ktx2](https://github.khronos.org/KTX-Specification/ktxspec.v2.html) texture container format. 9 | 10 | ### Features 11 | - [x] Async reading 12 | - [x] Parsing 13 | - [x] Validating 14 | - [x] [Data format description](https://github.khronos.org/KTX-Specification/ktxspec.v2.html#_data_format_descriptor) 15 | - [x] [Key/value data](https://github.khronos.org/KTX-Specification/ktxspec.v2.html#_keyvalue_data) 16 | 17 | ### Example 18 | ```rust 19 | // Crate instance of reader. This validates the header 20 | let mut reader = ktx2::Reader::new(file).expect("Can't create reader"); // Crate instance of reader. 21 | 22 | // Get general texture information. 23 | let header = reader.header(); 24 | 25 | // Read iterator over slices of each mipmap level. 26 | let levels = reader.levels().collect::>(); 27 | ``` 28 | 29 | ### MSRV 30 | 31 | The minimum supported Rust version is 1.56. MSRV bumps are treated as breaking changes. 32 | 33 | License: Apache-2.0 34 | -------------------------------------------------------------------------------- /README.tpl: -------------------------------------------------------------------------------- 1 | # ktx2 2 | 3 | ![GitHub Actions Workflow Status](https://img.shields.io/github/actions/workflow/status/BVE-Reborn/ktx2/build.yml?branch=trunk) 4 | [![Crates.io](https://img.shields.io/crates/v/ktx2)](https://crates.io/crates/ktx2) 5 | [![Documentation](https://docs.rs/ktx2/badge.svg)](https://docs.rs/ktx2) 6 | ![License](https://img.shields.io/crates/l/ktx2) 7 | 8 | {{readme}} 9 | 10 | License: {{license}} 11 | -------------------------------------------------------------------------------- /data/test_tex.ktx2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/BVE-Reborn/ktx2/e8c222ae90dbc49e6aa07aa3e965dde3ed35f89b/data/test_tex.ktx2 -------------------------------------------------------------------------------- /deny.toml: -------------------------------------------------------------------------------- 1 | [licenses] 2 | allow = ["MIT", "Apache-2.0"] 3 | 4 | [bans] 5 | multiple-versions = "deny" 6 | skip = [] 7 | 8 | [advisories] 9 | ignore = [] 10 | 11 | [sources] 12 | unknown-registry = "deny" 13 | unknown-git = "allow" 14 | -------------------------------------------------------------------------------- /examples/load.rs: -------------------------------------------------------------------------------- 1 | use ktx2::{Format, Header, Reader}; 2 | 3 | fn main() { 4 | let file = include_bytes!("../data/test_tex.ktx2"); 5 | let reader = Reader::new(file).expect("Can't create reader"); 6 | let header = reader.header(); 7 | println!("Header: {:#?}", header); 8 | assert_head(header); 9 | 10 | let key_value_pairs = reader.key_value_data().collect::>(); 11 | assert_eq!(key_value_pairs.len(), 2); 12 | 13 | for (k, v) in key_value_pairs { 14 | println!("Key '{}': {}", k, String::from_utf8_lossy(v)); 15 | } 16 | 17 | let levels = reader.levels().map(|level| level.data).collect::>(); 18 | assert_eq!(levels.len(), header.level_count.max(1) as usize); 19 | 20 | let data = reader.data(); 21 | println!("Data len: {:?}", data.len()); 22 | test_data(&levels); 23 | } 24 | 25 | fn test_data(info: &[&[u8]]) { 26 | for (i, region) in info.iter().enumerate() { 27 | println!("Bytes for level {:?}: {:?}", i, ®ion[..4]); 28 | } 29 | } 30 | 31 | fn assert_head(header: Header) { 32 | assert_eq!(header.format, Some(Format::R8G8B8A8_UINT)); 33 | assert_eq!(header.type_size, 1); 34 | assert_eq!(header.pixel_width, 1024); 35 | assert_eq!(header.pixel_height, 512); 36 | assert_eq!(header.pixel_depth, 0); 37 | assert_eq!(header.layer_count, 0); 38 | assert_eq!(header.face_count, 1); 39 | assert_eq!(header.level_count, 11); 40 | assert_eq!(header.supercompression_scheme, None); 41 | } 42 | -------------------------------------------------------------------------------- /release.toml: -------------------------------------------------------------------------------- 1 | consolidate-commits = true 2 | sign-commit = true 3 | sign-tag = true 4 | shared-version = true 5 | allow-branch = ["trunk"] 6 | pre-release-commit-message = "Version {{version}}" 7 | -------------------------------------------------------------------------------- /renovate.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://docs.renovatebot.com/renovate-schema.json", 3 | "extends": ["config:recommended", "schedule:weekly"], 4 | "dependencyDashboard": true, 5 | "prConcurrentLimit": 20, 6 | "prHourlyLimit": 200, 7 | "labels": ["dependencies"], 8 | "packageRules": [ 9 | { 10 | "matchUpdateTypes": ["patch"], 11 | "matchCurrentVersion": "<1.0.0", 12 | "groupName": "Minor Updates", 13 | "description": "Patch updates to 0.x.y crates are treated as compatible by cargo" 14 | }, 15 | { 16 | "matchUpdateTypes": ["minor", "patch"], 17 | "matchCurrentVersion": ">=1.0.0", 18 | "groupName": "Minor Updates", 19 | "description": "Minor and patch updates to x.y.z crates are treated as compatible by cargo" 20 | } 21 | ] 22 | } 23 | -------------------------------------------------------------------------------- /src/enums.rs: -------------------------------------------------------------------------------- 1 | use core::{ 2 | fmt, 3 | num::{NonZeroU32, NonZeroU8}, 4 | }; 5 | 6 | macro_rules! pseudo_enum { 7 | ($(#[$attr:meta])* $container:ident($prim:ident) $name:ident { $($case:ident = $value:literal,)* }) => { 8 | $(#[$attr])* 9 | #[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)] 10 | pub struct $name($container); 11 | 12 | #[allow(non_upper_case_globals)] 13 | impl $name { 14 | pub fn new(x: $prim) -> Option { 15 | Some(Self($container::new(x)?)) 16 | } 17 | 18 | pub fn value(&self) -> $prim { 19 | self.0.get() 20 | } 21 | 22 | $( 23 | pub const $case: Self = Self(unsafe { $container::new_unchecked($value) }); 24 | )* 25 | } 26 | 27 | impl fmt::Debug for $name { 28 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 29 | let name = match self.0.get() { 30 | $($value => Some(stringify!($case)),)* 31 | _ => None, 32 | }; 33 | match name { 34 | Some(name) => f.pad(name), 35 | None => write!(f, concat!(stringify!($name), "({})"), self.0.get()), 36 | } 37 | } 38 | } 39 | }; 40 | } 41 | 42 | pseudo_enum! { 43 | /// Known texture formats 44 | NonZeroU32(u32) Format { 45 | R4G4_UNORM_PACK8 = 1, 46 | R4G4B4A4_UNORM_PACK16 = 2, 47 | B4G4R4A4_UNORM_PACK16 = 3, 48 | R5G6B5_UNORM_PACK16 = 4, 49 | B5G6R5_UNORM_PACK16 = 5, 50 | R5G5B5A1_UNORM_PACK16 = 6, 51 | B5G5R5A1_UNORM_PACK16 = 7, 52 | A1R5G5B5_UNORM_PACK16 = 8, 53 | R8_UNORM = 9, 54 | R8_SNORM = 10, 55 | R8_UINT = 13, 56 | R8_SINT = 14, 57 | R8_SRGB = 15, 58 | R8G8_UNORM = 16, 59 | R8G8_SNORM = 17, 60 | R8G8_UINT = 20, 61 | R8G8_SINT = 21, 62 | R8G8_SRGB = 22, 63 | R8G8B8_UNORM = 23, 64 | R8G8B8_SNORM = 24, 65 | R8G8B8_UINT = 27, 66 | R8G8B8_SINT = 28, 67 | R8G8B8_SRGB = 29, 68 | B8G8R8_UNORM = 30, 69 | B8G8R8_SNORM = 31, 70 | B8G8R8_UINT = 34, 71 | B8G8R8_SINT = 35, 72 | B8G8R8_SRGB = 36, 73 | R8G8B8A8_UNORM = 37, 74 | R8G8B8A8_SNORM = 38, 75 | R8G8B8A8_UINT = 41, 76 | R8G8B8A8_SINT = 42, 77 | R8G8B8A8_SRGB = 43, 78 | B8G8R8A8_UNORM = 44, 79 | B8G8R8A8_SNORM = 45, 80 | B8G8R8A8_UINT = 48, 81 | B8G8R8A8_SINT = 49, 82 | B8G8R8A8_SRGB = 50, 83 | A2R10G10B10_UNORM_PACK32 = 58, 84 | A2R10G10B10_SNORM_PACK32 = 59, 85 | A2R10G10B10_UINT_PACK32 = 62, 86 | A2R10G10B10_SINT_PACK32 = 63, 87 | A2B10G10R10_UNORM_PACK32 = 64, 88 | A2B10G10R10_SNORM_PACK32 = 65, 89 | A2B10G10R10_UINT_PACK32 = 68, 90 | A2B10G10R10_SINT_PACK32 = 69, 91 | R16_UNORM = 70, 92 | R16_SNORM = 71, 93 | R16_UINT = 74, 94 | R16_SINT = 75, 95 | R16_SFLOAT = 76, 96 | R16G16_UNORM = 77, 97 | R16G16_SNORM = 78, 98 | R16G16_UINT = 81, 99 | R16G16_SINT = 82, 100 | R16G16_SFLOAT = 83, 101 | R16G16B16_UNORM = 84, 102 | R16G16B16_SNORM = 85, 103 | R16G16B16_UINT = 88, 104 | R16G16B16_SINT = 89, 105 | R16G16B16_SFLOAT = 90, 106 | R16G16B16A16_UNORM = 91, 107 | R16G16B16A16_SNORM = 92, 108 | R16G16B16A16_UINT = 95, 109 | R16G16B16A16_SINT = 96, 110 | R16G16B16A16_SFLOAT = 97, 111 | R32_UINT = 98, 112 | R32_SINT = 99, 113 | R32_SFLOAT = 100, 114 | R32G32_UINT = 101, 115 | R32G32_SINT = 102, 116 | R32G32_SFLOAT = 103, 117 | R32G32B32_UINT = 104, 118 | R32G32B32_SINT = 105, 119 | R32G32B32_SFLOAT = 106, 120 | R32G32B32A32_UINT = 107, 121 | R32G32B32A32_SINT = 108, 122 | R32G32B32A32_SFLOAT = 109, 123 | R64_UINT = 110, 124 | R64_SINT = 111, 125 | R64_SFLOAT = 112, 126 | R64G64_UINT = 113, 127 | R64G64_SINT = 114, 128 | R64G64_SFLOAT = 115, 129 | R64G64B64_UINT = 116, 130 | R64G64B64_SINT = 117, 131 | R64G64B64_SFLOAT = 118, 132 | R64G64B64A64_UINT = 119, 133 | R64G64B64A64_SINT = 120, 134 | R64G64B64A64_SFLOAT = 121, 135 | B10G11R11_UFLOAT_PACK32 = 122, 136 | E5B9G9R9_UFLOAT_PACK32 = 123, 137 | D16_UNORM = 124, 138 | X8_D24_UNORM_PACK32 = 125, 139 | D32_SFLOAT = 126, 140 | S8_UINT = 127, 141 | D16_UNORM_S8_UINT = 128, 142 | D24_UNORM_S8_UINT = 129, 143 | D32_SFLOAT_S8_UINT = 130, 144 | BC1_RGB_UNORM_BLOCK = 131, 145 | BC1_RGB_SRGB_BLOCK = 132, 146 | BC1_RGBA_UNORM_BLOCK = 133, 147 | BC1_RGBA_SRGB_BLOCK = 134, 148 | BC2_UNORM_BLOCK = 135, 149 | BC2_SRGB_BLOCK = 136, 150 | BC3_UNORM_BLOCK = 137, 151 | BC3_SRGB_BLOCK = 138, 152 | BC4_UNORM_BLOCK = 139, 153 | BC4_SNORM_BLOCK = 140, 154 | BC5_UNORM_BLOCK = 141, 155 | BC5_SNORM_BLOCK = 142, 156 | BC6H_UFLOAT_BLOCK = 143, 157 | BC6H_SFLOAT_BLOCK = 144, 158 | BC7_UNORM_BLOCK = 145, 159 | BC7_SRGB_BLOCK = 146, 160 | ETC2_R8G8B8_UNORM_BLOCK = 147, 161 | ETC2_R8G8B8_SRGB_BLOCK = 148, 162 | ETC2_R8G8B8A1_UNORM_BLOCK = 149, 163 | ETC2_R8G8B8A1_SRGB_BLOCK = 150, 164 | ETC2_R8G8B8A8_UNORM_BLOCK = 151, 165 | ETC2_R8G8B8A8_SRGB_BLOCK = 152, 166 | EAC_R11_UNORM_BLOCK = 153, 167 | EAC_R11_SNORM_BLOCK = 154, 168 | EAC_R11G11_UNORM_BLOCK = 155, 169 | EAC_R11G11_SNORM_BLOCK = 156, 170 | ASTC_4x4_UNORM_BLOCK = 157, 171 | ASTC_4x4_SRGB_BLOCK = 158, 172 | ASTC_5x4_UNORM_BLOCK = 159, 173 | ASTC_5x4_SRGB_BLOCK = 160, 174 | ASTC_5x5_UNORM_BLOCK = 161, 175 | ASTC_5x5_SRGB_BLOCK = 162, 176 | ASTC_6x5_UNORM_BLOCK = 163, 177 | ASTC_6x5_SRGB_BLOCK = 164, 178 | ASTC_6x6_UNORM_BLOCK = 165, 179 | ASTC_6x6_SRGB_BLOCK = 166, 180 | ASTC_8x5_UNORM_BLOCK = 167, 181 | ASTC_8x5_SRGB_BLOCK = 168, 182 | ASTC_8x6_UNORM_BLOCK = 169, 183 | ASTC_8x6_SRGB_BLOCK = 170, 184 | ASTC_8x8_UNORM_BLOCK = 171, 185 | ASTC_8x8_SRGB_BLOCK = 172, 186 | ASTC_10x5_UNORM_BLOCK = 173, 187 | ASTC_10x5_SRGB_BLOCK = 174, 188 | ASTC_10x6_UNORM_BLOCK = 175, 189 | ASTC_10x6_SRGB_BLOCK = 176, 190 | ASTC_10x8_UNORM_BLOCK = 177, 191 | ASTC_10x8_SRGB_BLOCK = 178, 192 | ASTC_10x10_UNORM_BLOCK = 179, 193 | ASTC_10x10_SRGB_BLOCK = 180, 194 | ASTC_12x10_UNORM_BLOCK = 181, 195 | ASTC_12x10_SRGB_BLOCK = 182, 196 | ASTC_12x12_UNORM_BLOCK = 183, 197 | ASTC_12x12_SRGB_BLOCK = 184, 198 | ASTC_4x4_SFLOAT_BLOCK = 1000066000, 199 | ASTC_5x4_SFLOAT_BLOCK = 1000066001, 200 | ASTC_5x5_SFLOAT_BLOCK = 1000066002, 201 | ASTC_6x5_SFLOAT_BLOCK = 1000066003, 202 | ASTC_6x6_SFLOAT_BLOCK = 1000066004, 203 | ASTC_8x5_SFLOAT_BLOCK = 1000066005, 204 | ASTC_8x6_SFLOAT_BLOCK = 1000066006, 205 | ASTC_8x8_SFLOAT_BLOCK = 1000066007, 206 | ASTC_10x5_SFLOAT_BLOCK = 1000066008, 207 | ASTC_10x6_SFLOAT_BLOCK = 1000066009, 208 | ASTC_10x8_SFLOAT_BLOCK = 1000066010, 209 | ASTC_10x10_SFLOAT_BLOCK = 1000066011, 210 | ASTC_12x10_SFLOAT_BLOCK = 1000066012, 211 | ASTC_12x12_SFLOAT_BLOCK = 1000066013, 212 | } 213 | } 214 | 215 | pseudo_enum! { 216 | /// Known supercompression schemes 217 | NonZeroU32(u32) SupercompressionScheme { 218 | BasisLZ = 1, 219 | Zstandard = 2, 220 | ZLIB = 3, 221 | } 222 | } 223 | 224 | pseudo_enum! { 225 | NonZeroU8(u8) ColorModel { 226 | RGBSDA = 1, 227 | YUVSDA = 2, 228 | YIQSDA = 3, 229 | LabSDA = 4, 230 | CMYKA = 5, 231 | XYZW = 6, 232 | HSVAAng = 7, 233 | HSLAAng = 8, 234 | HSVAHex = 9, 235 | HSLAHex = 10, 236 | YCgCoA = 11, 237 | YcCbcCrc = 12, 238 | ICtCp = 13, 239 | CIEXYZ = 14, 240 | CIEXYY = 15, 241 | BC1A = 128, 242 | BC2 = 129, 243 | BC3 = 130, 244 | BC4 = 131, 245 | BC5 = 132, 246 | BC6H = 133, 247 | BC7 = 134, 248 | ETC1 = 160, 249 | ETC2 = 161, 250 | ASTC = 162, 251 | ETC1S = 163, 252 | PVRTC = 164, 253 | PVRTC2 = 165, 254 | UASTC = 166, 255 | } 256 | } 257 | 258 | pseudo_enum! { 259 | NonZeroU8(u8) ColorPrimaries { 260 | BT709 = 1, 261 | BT601EBU = 2, 262 | BT601SMPTE = 3, 263 | BT2020 = 4, 264 | CIEXYZ = 5, 265 | ACES = 6, 266 | ACESCC = 7, 267 | NTSC1953 = 8, 268 | PAL525 = 9, 269 | DISPLAYP3 = 10, 270 | AdobeRGB = 11, 271 | } 272 | } 273 | 274 | pseudo_enum! { 275 | NonZeroU8(u8) TransferFunction { 276 | Linear = 1, 277 | SRGB = 2, 278 | ITU = 3, 279 | NTSC = 4, 280 | SLOG = 5, 281 | SLOG2 = 6, 282 | BT1886 = 7, 283 | HLGOETF = 8, 284 | HLGEOTF = 9, 285 | PQEOTF = 10, 286 | PQOETF = 11, 287 | DCIP3 = 12, 288 | PALOETF = 13, 289 | PAL625EOTF = 14, 290 | ST240 = 15, 291 | ACESCC = 16, 292 | ACESCCT = 17, 293 | AdobeRGB = 18, 294 | } 295 | } 296 | -------------------------------------------------------------------------------- /src/error.rs: -------------------------------------------------------------------------------- 1 | use core::fmt; 2 | #[cfg(feature = "std")] 3 | use std::error::Error; 4 | 5 | /// Error, that happened when data doesn't satisfy expected parameters. 6 | #[derive(Debug)] 7 | #[non_exhaustive] 8 | pub enum ParseError { 9 | /// Unexpected magic numbers 10 | BadMagic, 11 | /// Zero pixel width 12 | ZeroWidth, 13 | /// Zero face count 14 | ZeroFaceCount, 15 | /// Data Format Descriptor had an invalid sample bit length. 16 | InvalidSampleBitLength, 17 | /// Unexpected end of buffer 18 | UnexpectedEnd, 19 | } 20 | 21 | #[cfg(feature = "std")] 22 | impl Error for ParseError {} 23 | 24 | impl fmt::Display for ParseError { 25 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 26 | match &self { 27 | ParseError::BadMagic => f.pad("unexpected magic numbers"), 28 | ParseError::ZeroWidth => f.pad("zero pixel width"), 29 | ParseError::ZeroFaceCount => f.pad("zero face count"), 30 | ParseError::InvalidSampleBitLength => f.pad("invalid sample bit length"), 31 | ParseError::UnexpectedEnd => f.pad("unexpected end of buffer"), 32 | } 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | //! Parser for the [ktx2](https://github.khronos.org/KTX-Specification/ktxspec.v2.html) texture container format. 2 | //! 3 | //! ## Features 4 | //! - [x] Async reading 5 | //! - [x] Parsing 6 | //! - [x] Validating 7 | //! - [x] [Data format description](https://github.khronos.org/KTX-Specification/ktxspec.v2.html#_data_format_descriptor) 8 | //! - [x] [Key/value data](https://github.khronos.org/KTX-Specification/ktxspec.v2.html#_keyvalue_data) 9 | //! 10 | //! ## Example 11 | //! ```rust 12 | //! // Crate instance of reader. This validates the header 13 | //! # let file = include_bytes!("../data/test_tex.ktx2"); 14 | //! let mut reader = ktx2::Reader::new(file).expect("Can't create reader"); // Crate instance of reader. 15 | //! 16 | //! // Get general texture information. 17 | //! let header = reader.header(); 18 | //! 19 | //! // Read iterator over slices of each mipmap level. 20 | //! let levels = reader.levels().collect::>(); 21 | //! # let _ = (header, levels); 22 | //! ``` 23 | //! 24 | //! ## MSRV 25 | //! 26 | //! The minimum supported Rust version is 1.56. MSRV bumps are treated as breaking changes. 27 | 28 | #![no_std] 29 | 30 | #[cfg(feature = "std")] 31 | extern crate std; 32 | 33 | mod enums; 34 | mod error; 35 | 36 | pub use crate::{ 37 | enums::{ColorModel, ColorPrimaries, Format, SupercompressionScheme, TransferFunction}, 38 | error::ParseError, 39 | }; 40 | 41 | use core::{convert::TryInto, num::NonZeroU8}; 42 | 43 | /// Decodes KTX2 texture data 44 | pub struct Reader> { 45 | input: Data, 46 | header: Header, 47 | } 48 | 49 | impl> Reader { 50 | /// Decode KTX2 data from `input` 51 | pub fn new(input: Data) -> Result { 52 | if input.as_ref().len() < Header::LENGTH { 53 | return Err(ParseError::UnexpectedEnd); 54 | } 55 | let header_data = input.as_ref()[0..Header::LENGTH].try_into().unwrap(); 56 | let header = Header::from_bytes(header_data)?; 57 | 58 | // Check DFD bounds 59 | let dfd_start = header 60 | .index 61 | .dfd_byte_offset 62 | .checked_add(4) 63 | .ok_or(ParseError::UnexpectedEnd)?; 64 | let dfd_end = header 65 | .index 66 | .dfd_byte_offset 67 | .checked_add(header.index.dfd_byte_length) 68 | .ok_or(ParseError::UnexpectedEnd)?; 69 | if dfd_end < dfd_start || dfd_end as usize >= input.as_ref().len() { 70 | return Err(ParseError::UnexpectedEnd); 71 | } 72 | 73 | // Check SGD bounds 74 | if header 75 | .index 76 | .sgd_byte_offset 77 | .checked_add(header.index.sgd_byte_length) 78 | .ok_or(ParseError::UnexpectedEnd)? 79 | >= input.as_ref().len() as u64 80 | { 81 | return Err(ParseError::UnexpectedEnd); 82 | } 83 | 84 | // Check KVD bounds 85 | if header 86 | .index 87 | .kvd_byte_offset 88 | .checked_add(header.index.kvd_byte_length) 89 | .ok_or(ParseError::UnexpectedEnd)? as usize 90 | >= input.as_ref().len() 91 | { 92 | return Err(ParseError::UnexpectedEnd); 93 | } 94 | 95 | let result = Self { input, header }; 96 | let index = result.level_index()?; // Check index integrity 97 | 98 | // Check level data bounds 99 | for level in index { 100 | if level 101 | .byte_offset 102 | .checked_add(level.byte_length) 103 | .ok_or(ParseError::UnexpectedEnd)? 104 | > result.input.as_ref().len() as u64 105 | { 106 | return Err(ParseError::UnexpectedEnd); 107 | } 108 | } 109 | 110 | Ok(result) 111 | } 112 | 113 | fn level_index(&self) -> ParseResult + '_> { 114 | let level_count = self.header().level_count.max(1) as usize; 115 | 116 | let level_index_end_byte = Header::LENGTH 117 | .checked_add( 118 | level_count 119 | .checked_mul(LevelIndex::LENGTH) 120 | .ok_or(ParseError::UnexpectedEnd)?, 121 | ) 122 | .ok_or(ParseError::UnexpectedEnd)?; 123 | let level_index_bytes = self 124 | .input 125 | .as_ref() 126 | .get(Header::LENGTH..level_index_end_byte) 127 | .ok_or(ParseError::UnexpectedEnd)?; 128 | Ok(level_index_bytes.chunks_exact(LevelIndex::LENGTH).map(|data| { 129 | let level_data = data.try_into().unwrap(); 130 | LevelIndex::from_bytes(&level_data) 131 | })) 132 | } 133 | 134 | /// Access underlying raw bytes 135 | pub fn data(&self) -> &[u8] { 136 | self.input.as_ref() 137 | } 138 | 139 | /// Container-level metadata 140 | pub fn header(&self) -> Header { 141 | self.header 142 | } 143 | 144 | /// Iterator over the texture's mip levels 145 | pub fn levels(&self) -> impl ExactSizeIterator + '_ { 146 | self.level_index().unwrap().map(move |level| Level { 147 | // Bounds-checking previously performed in `new` 148 | data: &self.input.as_ref()[level.byte_offset as usize..(level.byte_offset + level.byte_length) as usize], 149 | uncompressed_byte_length: level.uncompressed_byte_length, 150 | }) 151 | } 152 | 153 | pub fn supercompression_global_data(&self) -> &[u8] { 154 | let header = self.header(); 155 | let start = header.index.sgd_byte_offset as usize; 156 | // Bounds-checking previously performed in `new` 157 | let end = (header.index.sgd_byte_offset + header.index.sgd_byte_length) as usize; 158 | &self.input.as_ref()[start..end] 159 | } 160 | 161 | pub fn dfd_blocks(&self) -> impl Iterator { 162 | let header = self.header(); 163 | let start = header.index.dfd_byte_offset as usize; 164 | // Bounds-checking previously performed in `new` 165 | let end = (header.index.dfd_byte_offset + header.index.dfd_byte_length) as usize; 166 | DfdBlockIterator { 167 | // start + 4 to skip the data format descriptors total length 168 | data: &self.input.as_ref()[start + 4..end], 169 | } 170 | } 171 | 172 | /// Iterator over the key-value pairs 173 | pub fn key_value_data(&self) -> KeyValueDataIterator { 174 | let header = self.header(); 175 | 176 | let start = header.index.kvd_byte_offset as usize; 177 | // Bounds-checking previously performed in `new` 178 | let end = (header.index.kvd_byte_offset + header.index.kvd_byte_length) as usize; 179 | 180 | KeyValueDataIterator::new(&self.input.as_ref()[start..end]) 181 | } 182 | } 183 | 184 | struct DfdBlockIterator<'data> { 185 | data: &'data [u8], 186 | } 187 | 188 | impl<'data> Iterator for DfdBlockIterator<'data> { 189 | type Item = DfdBlock<'data>; 190 | 191 | fn next(&mut self) -> Option { 192 | if self.data.len() < DfdHeader::LENGTH { 193 | return None; 194 | } 195 | DfdHeader::parse(&self.data[..DfdHeader::LENGTH]).map_or(None, |(header, descriptor_block_size)| { 196 | if descriptor_block_size == 0 || self.data.len() < descriptor_block_size { 197 | return None; 198 | } 199 | let data = &self.data[DfdHeader::LENGTH..descriptor_block_size]; 200 | self.data = &self.data[descriptor_block_size..]; 201 | Some(DfdBlock { header, data }) 202 | }) 203 | } 204 | } 205 | 206 | /// An iterator that parses the key-value pairs in the KTX2 file. 207 | pub struct KeyValueDataIterator<'data> { 208 | data: &'data [u8], 209 | } 210 | 211 | impl<'data> KeyValueDataIterator<'data> { 212 | /// Create a new iterator from the key-value data section of the KTX2 file. 213 | /// 214 | /// From the start of the file, this is a slice between [`Index::kvd_byte_offset`] 215 | /// and [`Index::kvd_byte_offset`] + [`Index::kvd_byte_length`]. 216 | pub fn new(data: &'data [u8]) -> Self { 217 | Self { data } 218 | } 219 | } 220 | 221 | impl<'data> Iterator for KeyValueDataIterator<'data> { 222 | type Item = (&'data str, &'data [u8]); 223 | 224 | fn next(&mut self) -> Option { 225 | let mut offset = 0; 226 | 227 | loop { 228 | let length = bytes_to_u32(self.data, &mut offset).ok()?; 229 | 230 | let start_offset = offset; 231 | 232 | offset = offset.checked_add(length as usize)?; 233 | 234 | let end_offset = offset; 235 | 236 | // Ensure that we're 4-byte aligned 237 | if offset % 4 != 0 { 238 | offset += 4 - (offset % 4); 239 | } 240 | 241 | let key_and_value = match self.data.get(start_offset..end_offset) { 242 | Some(key_and_value) => key_and_value, 243 | None => continue, 244 | }; 245 | 246 | // The key is terminated with a NUL character. 247 | let key_end_index = match key_and_value.iter().position(|&c| c == b'\0') { 248 | Some(index) => index, 249 | None => continue, 250 | }; 251 | 252 | let key = &key_and_value[..key_end_index]; 253 | let value = &key_and_value[key_end_index + 1..]; 254 | 255 | let key = match core::str::from_utf8(key) { 256 | Ok(key) => key, 257 | Err(_) => continue, 258 | }; 259 | 260 | self.data = self.data.get(offset..).unwrap_or_default(); 261 | 262 | return Some((key, value)); 263 | } 264 | } 265 | } 266 | 267 | /// Identifier, expected in start of input texture data. 268 | const KTX2_MAGIC: [u8; 12] = [0xAB, 0x4B, 0x54, 0x58, 0x20, 0x32, 0x30, 0xBB, 0x0D, 0x0A, 0x1A, 0x0A]; 269 | 270 | /// Result of parsing data operation. 271 | type ParseResult = Result; 272 | 273 | /// Container-level metadata 274 | #[derive(Copy, Clone, Eq, PartialEq, Debug)] 275 | pub struct Header { 276 | pub format: Option, 277 | pub type_size: u32, 278 | pub pixel_width: u32, 279 | pub pixel_height: u32, 280 | pub pixel_depth: u32, 281 | pub layer_count: u32, 282 | pub face_count: u32, 283 | pub level_count: u32, 284 | pub supercompression_scheme: Option, 285 | pub index: Index, 286 | } 287 | 288 | /// An index giving the byte offsets from the start of the file and byte sizes of the various sections of the KTX2 file. 289 | #[derive(Copy, Clone, Eq, PartialEq, Debug)] 290 | pub struct Index { 291 | pub dfd_byte_offset: u32, 292 | pub dfd_byte_length: u32, 293 | pub kvd_byte_offset: u32, 294 | pub kvd_byte_length: u32, 295 | pub sgd_byte_offset: u64, 296 | pub sgd_byte_length: u64, 297 | } 298 | 299 | impl Header { 300 | pub const LENGTH: usize = 80; 301 | 302 | pub fn from_bytes(data: &[u8; Self::LENGTH]) -> ParseResult { 303 | if !data.starts_with(&KTX2_MAGIC) { 304 | return Err(ParseError::BadMagic); 305 | } 306 | 307 | let header = Self { 308 | format: Format::new(u32::from_le_bytes(data[12..16].try_into().unwrap())), 309 | type_size: u32::from_le_bytes(data[16..20].try_into().unwrap()), 310 | pixel_width: u32::from_le_bytes(data[20..24].try_into().unwrap()), 311 | pixel_height: u32::from_le_bytes(data[24..28].try_into().unwrap()), 312 | pixel_depth: u32::from_le_bytes(data[28..32].try_into().unwrap()), 313 | layer_count: u32::from_le_bytes(data[32..36].try_into().unwrap()), 314 | face_count: u32::from_le_bytes(data[36..40].try_into().unwrap()), 315 | level_count: u32::from_le_bytes(data[40..44].try_into().unwrap()), 316 | supercompression_scheme: SupercompressionScheme::new(u32::from_le_bytes(data[44..48].try_into().unwrap())), 317 | index: Index { 318 | dfd_byte_offset: u32::from_le_bytes(data[48..52].try_into().unwrap()), 319 | dfd_byte_length: u32::from_le_bytes(data[52..56].try_into().unwrap()), 320 | kvd_byte_offset: u32::from_le_bytes(data[56..60].try_into().unwrap()), 321 | kvd_byte_length: u32::from_le_bytes(data[60..64].try_into().unwrap()), 322 | sgd_byte_offset: u64::from_le_bytes(data[64..72].try_into().unwrap()), 323 | sgd_byte_length: u64::from_le_bytes(data[72..80].try_into().unwrap()), 324 | }, 325 | }; 326 | 327 | if header.pixel_width == 0 { 328 | return Err(ParseError::ZeroWidth); 329 | } 330 | if header.face_count == 0 { 331 | return Err(ParseError::ZeroFaceCount); 332 | } 333 | 334 | Ok(header) 335 | } 336 | 337 | pub fn as_bytes(&self) -> [u8; Self::LENGTH] { 338 | let mut bytes = [0; Self::LENGTH]; 339 | 340 | let format = self.format.map(|format| format.value()).unwrap_or(0); 341 | let supercompression_scheme = self.supercompression_scheme.map(|scheme| scheme.value()).unwrap_or(0); 342 | 343 | bytes[0..12].copy_from_slice(&KTX2_MAGIC); 344 | bytes[12..16].copy_from_slice(&format.to_le_bytes()[..]); 345 | bytes[16..20].copy_from_slice(&self.type_size.to_le_bytes()[..]); 346 | bytes[20..24].copy_from_slice(&self.pixel_width.to_le_bytes()[..]); 347 | bytes[24..28].copy_from_slice(&self.pixel_height.to_le_bytes()[..]); 348 | bytes[28..32].copy_from_slice(&self.pixel_depth.to_le_bytes()[..]); 349 | bytes[32..36].copy_from_slice(&self.layer_count.to_le_bytes()[..]); 350 | bytes[36..40].copy_from_slice(&self.face_count.to_le_bytes()[..]); 351 | bytes[40..44].copy_from_slice(&self.level_count.to_le_bytes()[..]); 352 | bytes[44..48].copy_from_slice(&supercompression_scheme.to_le_bytes()[..]); 353 | bytes[48..52].copy_from_slice(&self.index.dfd_byte_offset.to_le_bytes()[..]); 354 | bytes[52..56].copy_from_slice(&self.index.dfd_byte_length.to_le_bytes()[..]); 355 | bytes[56..60].copy_from_slice(&self.index.kvd_byte_offset.to_le_bytes()[..]); 356 | bytes[60..64].copy_from_slice(&self.index.kvd_byte_length.to_le_bytes()[..]); 357 | bytes[64..72].copy_from_slice(&self.index.sgd_byte_offset.to_le_bytes()[..]); 358 | bytes[72..80].copy_from_slice(&self.index.sgd_byte_length.to_le_bytes()[..]); 359 | 360 | bytes 361 | } 362 | } 363 | 364 | pub struct Level<'a> { 365 | pub data: &'a [u8], 366 | pub uncompressed_byte_length: u64, 367 | } 368 | 369 | #[derive(Debug, Eq, PartialEq, Copy, Clone)] 370 | pub struct LevelIndex { 371 | pub byte_offset: u64, 372 | pub byte_length: u64, 373 | pub uncompressed_byte_length: u64, 374 | } 375 | 376 | impl LevelIndex { 377 | pub const LENGTH: usize = 24; 378 | 379 | pub fn from_bytes(data: &[u8; Self::LENGTH]) -> Self { 380 | Self { 381 | byte_offset: u64::from_le_bytes(data[0..8].try_into().unwrap()), 382 | byte_length: u64::from_le_bytes(data[8..16].try_into().unwrap()), 383 | uncompressed_byte_length: u64::from_le_bytes(data[16..24].try_into().unwrap()), 384 | } 385 | } 386 | 387 | pub fn as_bytes(&self) -> [u8; Self::LENGTH] { 388 | let mut bytes = [0; Self::LENGTH]; 389 | 390 | bytes[0..8].copy_from_slice(&self.byte_offset.to_le_bytes()[..]); 391 | bytes[8..16].copy_from_slice(&self.byte_length.to_le_bytes()[..]); 392 | bytes[16..24].copy_from_slice(&self.uncompressed_byte_length.to_le_bytes()[..]); 393 | 394 | bytes 395 | } 396 | } 397 | 398 | bitflags::bitflags! { 399 | #[derive(Debug, Clone, Copy, PartialEq, Eq)] 400 | #[repr(transparent)] 401 | pub struct ChannelTypeQualifiers: u8 { 402 | const LINEAR = (1 << 0); 403 | const EXPONENT = (1 << 1); 404 | const SIGNED = (1 << 2); 405 | const FLOAT = (1 << 3); 406 | } 407 | } 408 | 409 | bitflags::bitflags! { 410 | #[derive(Clone, Copy, Debug, Default, PartialEq, Eq)] 411 | #[repr(transparent)] 412 | pub struct DataFormatFlags: u8 { 413 | const STRAIGHT_ALPHA = 0; 414 | const ALPHA_PREMULTIPLIED = (1 << 0); 415 | } 416 | } 417 | 418 | #[derive(Debug, PartialEq, Eq)] 419 | pub struct DfdHeader { 420 | pub vendor_id: u32, //: 17; 421 | pub descriptor_type: u32, //: 15; 422 | pub version_number: u16, //: 16; 423 | } 424 | 425 | impl DfdHeader { 426 | pub const LENGTH: usize = 8; 427 | 428 | pub const BASIC: Self = Self { 429 | vendor_id: 0, 430 | descriptor_type: 0, 431 | version_number: 2, 432 | }; 433 | 434 | pub fn as_bytes(&self, descriptor_block_size: u16) -> [u8; Self::LENGTH] { 435 | let mut output = [0u8; Self::LENGTH]; 436 | 437 | let first_word = (self.vendor_id & ((1 << 17) - 1)) | (self.descriptor_type << 17); 438 | output[0..4].copy_from_slice(&first_word.to_le_bytes()); 439 | output[4..6].copy_from_slice(&self.version_number.to_le_bytes()); 440 | output[6..8].copy_from_slice(&descriptor_block_size.to_le_bytes()); 441 | 442 | output 443 | } 444 | 445 | fn parse(bytes: &[u8]) -> Result<(Self, usize), ParseError> { 446 | let mut offset = 0; 447 | 448 | let v = bytes_to_u32(bytes, &mut offset)?; 449 | let vendor_id = shift_and_mask_lower(0, 17, v); 450 | let descriptor_type = shift_and_mask_lower(17, 15, v); 451 | 452 | let version_number = read_u16(bytes, &mut offset)?; 453 | let descriptor_block_size = read_u16(bytes, &mut offset)?; 454 | 455 | Ok(( 456 | Self { 457 | vendor_id, 458 | descriptor_type, 459 | version_number, 460 | }, 461 | descriptor_block_size as usize, 462 | )) 463 | } 464 | } 465 | 466 | pub struct DfdBlock<'data> { 467 | pub header: DfdHeader, 468 | pub data: &'data [u8], 469 | } 470 | 471 | #[derive(Debug, Copy, Clone, PartialEq, Eq)] 472 | pub struct DfdBlockHeaderBasic { 473 | /// None means Unspecified 474 | pub color_model: Option, //: 8; 475 | /// None means Unspecified 476 | pub color_primaries: Option, //: 8; 477 | /// None means Unspecified 478 | pub transfer_function: Option, //: 8; 479 | pub flags: DataFormatFlags, //: 8; 480 | pub texel_block_dimensions: [NonZeroU8; 4], //: 8 x 4; 481 | pub bytes_planes: [u8; 8], //: 8 x 8; 482 | } 483 | 484 | impl DfdBlockHeaderBasic { 485 | pub const LENGTH: usize = 16; 486 | 487 | pub fn as_bytes(&self) -> [u8; Self::LENGTH] { 488 | let mut bytes = [0u8; Self::LENGTH]; 489 | 490 | let color_model = self.color_model.map(|c| c.value()).unwrap_or(0); 491 | let color_primaries = self.color_primaries.map(|c| c.value()).unwrap_or(0); 492 | let transfer_function = self.transfer_function.map(|t| t.value()).unwrap_or(0); 493 | 494 | let texel_block_dimensions = self.texel_block_dimensions.map(|dim| dim.get() - 1); 495 | 496 | bytes[0] = color_model; 497 | bytes[1] = color_primaries; 498 | bytes[2] = transfer_function; 499 | bytes[3] = self.flags.bits(); 500 | bytes[4..8].copy_from_slice(&texel_block_dimensions); 501 | bytes[8..16].copy_from_slice(&self.bytes_planes); 502 | 503 | bytes 504 | } 505 | 506 | pub fn from_bytes(bytes: &[u8; Self::LENGTH]) -> Result { 507 | let mut offset = 0; 508 | 509 | let [model, primaries, transfer, flags] = read_bytes(bytes, &mut offset)?; 510 | let texel_block_dimensions = read_bytes(bytes, &mut offset)?.map(|dim| NonZeroU8::new(dim + 1).unwrap()); 511 | let bytes_planes = read_bytes(bytes, &mut offset)?; 512 | 513 | Ok(Self { 514 | color_model: ColorModel::new(model), 515 | color_primaries: ColorPrimaries::new(primaries), 516 | transfer_function: TransferFunction::new(transfer), 517 | flags: DataFormatFlags::from_bits_truncate(flags), 518 | texel_block_dimensions, 519 | bytes_planes, 520 | }) 521 | } 522 | } 523 | 524 | pub struct DfdBlockBasic<'data> { 525 | pub header: DfdBlockHeaderBasic, 526 | sample_information: &'data [u8], 527 | } 528 | 529 | impl<'data> DfdBlockBasic<'data> { 530 | pub fn parse(bytes: &'data [u8]) -> Result { 531 | let header_data = bytes 532 | .get(0..DfdBlockHeaderBasic::LENGTH) 533 | .ok_or(ParseError::UnexpectedEnd)? 534 | .try_into() 535 | .unwrap(); 536 | let header = DfdBlockHeaderBasic::from_bytes(header_data)?; 537 | 538 | Ok(Self { 539 | header, 540 | sample_information: &bytes[DfdBlockHeaderBasic::LENGTH..], 541 | }) 542 | } 543 | 544 | pub fn sample_information(&self) -> impl Iterator + 'data { 545 | SampleInformationIterator { 546 | data: self.sample_information, 547 | } 548 | } 549 | } 550 | 551 | struct SampleInformationIterator<'data> { 552 | data: &'data [u8], 553 | } 554 | 555 | impl Iterator for SampleInformationIterator<'_> { 556 | type Item = SampleInformation; 557 | 558 | fn next(&mut self) -> Option { 559 | let bytes = self.data.get(0..SampleInformation::LENGTH)?.try_into().unwrap(); 560 | SampleInformation::from_bytes(&bytes).map_or(None, |sample_information| { 561 | self.data = &self.data[SampleInformation::LENGTH..]; 562 | Some(sample_information) 563 | }) 564 | } 565 | } 566 | 567 | #[derive(Debug, Copy, Clone, PartialEq, Eq)] 568 | pub struct SampleInformation { 569 | pub bit_offset: u16, //: 16; 570 | pub bit_length: NonZeroU8, //: 8; 571 | pub channel_type: u8, //: 4; 572 | pub channel_type_qualifiers: ChannelTypeQualifiers, //: 4; 573 | pub sample_positions: [u8; 4], //: 8 x 4; 574 | pub lower: u32, //: 32; 575 | pub upper: u32, //: 32; 576 | } 577 | 578 | impl SampleInformation { 579 | pub const LENGTH: usize = 16; 580 | 581 | pub fn as_bytes(&self) -> [u8; Self::LENGTH] { 582 | let mut bytes = [0u8; Self::LENGTH]; 583 | 584 | let channel_info = self.channel_type | (self.channel_type_qualifiers.bits() << 4); 585 | 586 | bytes[0..2].copy_from_slice(&self.bit_offset.to_le_bytes()); 587 | bytes[2] = self.bit_length.get() - 1; 588 | bytes[3] = channel_info; 589 | bytes[4..8].copy_from_slice(&self.sample_positions); 590 | bytes[8..12].copy_from_slice(&self.lower.to_le_bytes()); 591 | bytes[12..16].copy_from_slice(&self.upper.to_le_bytes()); 592 | 593 | bytes 594 | } 595 | 596 | pub fn from_bytes(bytes: &[u8; Self::LENGTH]) -> Result { 597 | let mut offset = 0; 598 | 599 | let v = bytes_to_u32(bytes, &mut offset)?; 600 | let bit_offset = shift_and_mask_lower(0, 16, v) as u16; 601 | let bit_length = (shift_and_mask_lower(16, 8, v) as u8) 602 | .checked_add(1) 603 | .and_then(NonZeroU8::new) 604 | .ok_or(ParseError::InvalidSampleBitLength)?; 605 | let channel_type = shift_and_mask_lower(24, 4, v) as u8; 606 | let channel_type_qualifiers = ChannelTypeQualifiers::from_bits_truncate(shift_and_mask_lower(28, 4, v) as u8); 607 | 608 | let sample_positions = read_bytes(bytes, &mut offset)?; 609 | let lower = bytes_to_u32(bytes, &mut offset)?; 610 | let upper = bytes_to_u32(bytes, &mut offset)?; 611 | 612 | Ok(Self { 613 | bit_offset, 614 | bit_length, 615 | channel_type, 616 | channel_type_qualifiers, 617 | sample_positions, 618 | lower, 619 | upper, 620 | }) 621 | } 622 | } 623 | 624 | fn read_bytes(bytes: &[u8], offset: &mut usize) -> Result<[u8; N], ParseError> { 625 | let v = bytes 626 | .get(*offset..*offset + N) 627 | .ok_or(ParseError::UnexpectedEnd)? 628 | .try_into() 629 | .unwrap(); 630 | *offset += N; 631 | Ok(v) 632 | } 633 | 634 | fn read_u16(bytes: &[u8], offset: &mut usize) -> Result { 635 | let v = u16::from_le_bytes(read_bytes(bytes, offset)?); 636 | Ok(v) 637 | } 638 | 639 | fn bytes_to_u32(bytes: &[u8], offset: &mut usize) -> Result { 640 | let v = u32::from_le_bytes( 641 | bytes 642 | .get(*offset..*offset + 4) 643 | .ok_or(ParseError::UnexpectedEnd)? 644 | .try_into() 645 | .unwrap(), 646 | ); 647 | *offset += 4; 648 | Ok(v) 649 | } 650 | 651 | fn shift_and_mask_lower(shift: u32, mask: u32, value: u32) -> u32 { 652 | (value >> shift) & ((1 << mask) - 1) 653 | } 654 | 655 | #[cfg(test)] 656 | mod test { 657 | use super::*; 658 | 659 | fn to_nonzero(input: [u8; N]) -> [NonZeroU8; N] { 660 | input.map(|n| NonZeroU8::new(n).unwrap()) 661 | } 662 | 663 | #[test] 664 | fn basic_dfd_header_roundtrip() { 665 | let header = DfdBlockHeaderBasic { 666 | color_model: Some(ColorModel::LabSDA), 667 | color_primaries: Some(ColorPrimaries::ACES), 668 | transfer_function: Some(TransferFunction::ITU), 669 | flags: DataFormatFlags::STRAIGHT_ALPHA, 670 | texel_block_dimensions: to_nonzero([1, 2, 3, 4]), 671 | bytes_planes: [5, 6, 7, 8, 9, 10, 11, 12], 672 | }; 673 | 674 | let bytes = header.as_bytes(); 675 | let decoded = DfdBlockHeaderBasic::from_bytes(&bytes).unwrap(); 676 | assert_eq!(header, decoded); 677 | } 678 | 679 | #[test] 680 | fn sample_information_roundtrip() { 681 | let info = SampleInformation { 682 | bit_offset: 234, 683 | bit_length: NonZeroU8::new(123).unwrap(), 684 | channel_type: 2, 685 | channel_type_qualifiers: ChannelTypeQualifiers::LINEAR, 686 | sample_positions: [1, 2, 3, 4], 687 | lower: 1234, 688 | upper: 4567, 689 | }; 690 | 691 | let bytes = info.as_bytes(); 692 | let decoded = SampleInformation::from_bytes(&bytes).unwrap(); 693 | 694 | assert_eq!(info, decoded); 695 | } 696 | 697 | #[test] 698 | fn sample_info_invalid_bit_length() { 699 | let bytes = &[ 700 | 0u8, 0, // bit_offset 701 | 255, // bit_length 702 | 1, // channel_type | channel_type_qualifiers 703 | 0, 0, 0, 0, // sample_positions 704 | 0, 0, 0, 0, // lower 705 | 255, 255, 255, 255, // upper 706 | ]; 707 | 708 | assert!(matches!( 709 | SampleInformation::from_bytes(bytes), 710 | Err(ParseError::InvalidSampleBitLength) 711 | )); 712 | } 713 | 714 | #[test] 715 | #[allow(clippy::octal_escapes)] 716 | fn test_malformed_key_value_data_handling() { 717 | let data = [ 718 | &0_u32.to_le_bytes()[..], 719 | // Regular key-value pair 720 | &7_u32.to_le_bytes()[..], 721 | b"xyz\0123 ", 722 | // Malformed key-value pair with missing NUL byte 723 | &11_u32.to_le_bytes()[..], 724 | b"abcdefghi!! ", 725 | // Regular key-value pair again 726 | &7_u32.to_le_bytes()[..], 727 | b"abc\0987", 728 | &1000_u32.to_le_bytes()[..], 729 | &[1; 1000], 730 | &u32::MAX.to_le_bytes()[..], 731 | ]; 732 | 733 | let mut iterator = KeyValueDataIterator { data: &data.concat() }; 734 | 735 | assert_eq!(iterator.next(), Some(("xyz", &b"123"[..]))); 736 | assert_eq!(iterator.next(), Some(("abc", &b"987"[..]))); 737 | assert_eq!(iterator.next(), None); 738 | } 739 | } 740 | --------------------------------------------------------------------------------