├── parser ├── src │ ├── error.rs │ ├── guid │ │ ├── mod.rs │ │ ├── blob.rs │ │ └── descriptor.rs │ ├── data │ │ ├── mod.rs │ │ ├── impact │ │ │ ├── bytecode │ │ │ │ ├── mod.rs │ │ │ │ ├── cursor.rs │ │ │ │ ├── error.rs │ │ │ │ ├── token.rs │ │ │ │ ├── tokenizer.rs │ │ │ │ ├── text.rs │ │ │ │ ├── data.rs │ │ │ │ ├── parser.rs │ │ │ │ └── assembler.rs │ │ │ ├── mod.rs │ │ │ ├── descriptor.rs │ │ │ └── nodes.rs │ │ ├── localization.rs │ │ ├── audio.rs │ │ ├── hash_types.rs │ │ └── image.rs │ ├── container │ │ ├── mod.rs │ │ ├── error.rs │ │ ├── reader.rs │ │ ├── static_map.rs │ │ ├── writer.rs │ │ ├── header.rs │ │ └── file.rs │ ├── reflection │ │ ├── mod.rs │ │ ├── util.rs │ │ ├── error.rs │ │ ├── mapping.rs │ │ ├── types.rs │ │ ├── collection.rs │ │ ├── pe_file.rs │ │ └── parser.rs │ └── lib.rs └── Cargo.toml ├── shared ├── src │ ├── lib.rs │ ├── hash.rs │ └── io.rs └── Cargo.toml ├── .gitignore ├── .editorconfig ├── cli ├── Cargo.toml └── src │ ├── logging.rs │ └── cli.rs ├── Cargo.toml ├── .github └── workflows │ └── build_and_test.yml └── README.md /parser/src/error.rs: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /shared/src/lib.rs: -------------------------------------------------------------------------------- 1 | pub mod io; 2 | pub mod hash; 3 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | .idea/ 3 | .vscode/ 4 | *.iml 5 | .local/ -------------------------------------------------------------------------------- /parser/src/guid/mod.rs: -------------------------------------------------------------------------------- 1 | mod blob; 2 | mod descriptor; 3 | 4 | pub use blob::*; 5 | pub use descriptor::*; 6 | -------------------------------------------------------------------------------- /shared/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "shared" 3 | version.workspace = true 4 | edition.workspace = true 5 | 6 | [dependencies] 7 | crc64fast = "1.1.0" 8 | -------------------------------------------------------------------------------- /parser/src/data/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod audio; 2 | pub mod image; 3 | pub mod localization; 4 | pub mod impact; 5 | 6 | mod hash_types; 7 | 8 | pub use hash_types::*; 9 | -------------------------------------------------------------------------------- /parser/src/container/mod.rs: -------------------------------------------------------------------------------- 1 | mod file; 2 | mod header; 3 | mod static_map; 4 | mod error; 5 | mod reader; 6 | mod writer; 7 | 8 | pub use file::*; 9 | pub use static_map::*; 10 | pub use error::*; 11 | pub use reader::*; 12 | pub use writer::*; 13 | -------------------------------------------------------------------------------- /parser/src/reflection/mod.rs: -------------------------------------------------------------------------------- 1 | mod error; 2 | mod util; 3 | mod pe_file; 4 | mod parser; 5 | mod types; 6 | mod collection; 7 | mod serialization; 8 | mod mapping; 9 | 10 | pub use types::*; 11 | pub use error::*; 12 | pub use collection::TypeCollection; 13 | pub use mapping::*; 14 | -------------------------------------------------------------------------------- /parser/src/data/impact/bytecode/mod.rs: -------------------------------------------------------------------------------- 1 | use super::{EventStream, ImpactCommand, ImpactNode, ImpactProgram, ImpactVariable}; 2 | 3 | mod tokenizer; 4 | mod parser; 5 | mod error; 6 | mod cursor; 7 | mod token; 8 | mod assembler; 9 | mod data; 10 | mod text; 11 | 12 | pub use assembler::*; 13 | pub use error::*; 14 | pub use data::*; 15 | -------------------------------------------------------------------------------- /parser/src/lib.rs: -------------------------------------------------------------------------------- 1 | use container::StaticHash; 2 | 3 | pub mod container; 4 | pub mod guid; 5 | pub mod error; 6 | pub mod reflection; 7 | pub mod data; 8 | 9 | /// Represent an fnv1a32 hash 10 | pub type Hash32 = u32; 11 | 12 | /// Represent a crc64 hash 13 | pub type Hash64 = u64; 14 | 15 | impl StaticHash for Hash32 { 16 | fn static_hash(&self) -> u32 { 17 | *self 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /parser/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "parser" 3 | version.workspace = true 4 | edition.workspace = true 5 | 6 | [dependencies] 7 | shared = { path = "../shared" } 8 | 9 | anyhow.workspace = true 10 | thiserror.workspace = true 11 | serde.workspace = true 12 | serde_json.workspace = true 13 | bitflags.workspace = true 14 | 15 | half = "2.3.1" 16 | hound = "3.5.1" 17 | image = "0.24.8" 18 | texture2ddecoder = "0.1.1" 19 | bcdec_rs = "0.2.0" 20 | -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | root = true 2 | 3 | [*] 4 | charset = utf-8 5 | end_of_line = lf 6 | trim_trailing_whitespace = true 7 | insert_final_newline = true 8 | 9 | [*.{rs,toml}] 10 | indent_style = space 11 | indent_size = 4 12 | 13 | [*.{json,yaml,yml}] 14 | indent_style = space 15 | indent_size = 2 16 | 17 | [*.md] 18 | trim_trailing_whitespace = false 19 | 20 | [*.html] 21 | indent_style = space 22 | indent_size = 2 23 | 24 | [*.{css,scss,sass}] 25 | indent_style = tab 26 | 27 | [*.{js,jsx,ts,tsx}] 28 | indent_style = tab 29 | -------------------------------------------------------------------------------- /cli/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "cli" 3 | version.workspace = true 4 | edition.workspace = true 5 | 6 | [dependencies] 7 | parser = { path = "../parser" } 8 | shared = { path = "../shared" } 9 | 10 | anyhow.workspace = true 11 | serde_json.workspace = true 12 | serde.workspace = true 13 | 14 | clap = "4.5.0" 15 | clap_derive = "4.5.0" 16 | walkdir = "2.5.0" 17 | indicatif = "0.17.11" 18 | colored = "3.0.0" 19 | image = "0.25.5" 20 | crossbeam-channel = "0.5.15" 21 | 22 | [[bin]] 23 | name = "kfc-parser" 24 | path = "src/main.rs" 25 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [workspace] 2 | resolver = "2" 3 | members = [ 4 | "cli", 5 | "parser", 6 | "shared" 7 | ] 8 | 9 | [workspace.package] 10 | edition = "2021" 11 | version = "0.1.0" 12 | authors = ["Brabb3l"] 13 | license = "GPL-3.0-or-later" 14 | license-file = "LICENSE" 15 | 16 | [workspace.dependencies] 17 | anyhow = { version = "1.0.95", features = ["backtrace"] } 18 | thiserror = "2.0.11" 19 | serde = { version = "1.0.217", features = ["derive"] } 20 | serde_json = "1.0.138" 21 | bitflags = { version = "2.8.0", features = ["serde"] } 22 | -------------------------------------------------------------------------------- /parser/src/data/localization.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | 3 | use super::HashKey32; 4 | 5 | #[derive(Debug, Serialize, Deserialize)] 6 | #[serde(rename_all = "camelCase")] 7 | pub struct LocaTagCollectionResourceData { 8 | pub tags: Vec, 9 | } 10 | 11 | #[derive(Debug, Serialize, Deserialize)] 12 | #[serde(rename_all = "camelCase")] 13 | pub struct LocaTagResource { 14 | pub id: HashKey32, 15 | pub text: String, 16 | pub arguments: Vec, 17 | pub generic_arguments: u32, 18 | } 19 | 20 | #[derive(Debug, Serialize, Deserialize)] 21 | #[serde(rename_all = "camelCase")] 22 | pub struct LocaTagArgument { 23 | pub id: u32, 24 | pub r#type: LocaArgumentType, 25 | } 26 | 27 | #[derive(Debug, Serialize, Deserialize)] 28 | pub enum LocaArgumentType { 29 | Generic, 30 | Input, 31 | Config, 32 | Balancing, 33 | } 34 | -------------------------------------------------------------------------------- /parser/src/data/audio.rs: -------------------------------------------------------------------------------- 1 | use std::io::{Read, Seek, Write}; 2 | 3 | use hound::{SampleFormat, WavSpec, WavWriter}; 4 | 5 | use shared::io::ReadExt; 6 | 7 | pub fn deserialize_audio( 8 | mut reader: R, 9 | writer: W, 10 | channels: u16, 11 | sample_rate: u32, 12 | frame_count: u32 13 | ) -> anyhow::Result<()> { 14 | let spec = WavSpec { 15 | channels, 16 | sample_rate, 17 | bits_per_sample: 16, 18 | sample_format: SampleFormat::Int 19 | }; 20 | 21 | let writer = std::io::BufWriter::new(writer); 22 | let mut wav_writer = WavWriter::new(writer, spec)?; 23 | 24 | let sample_count = frame_count * channels as u32; 25 | 26 | for _ in 0..sample_count { 27 | let sample = reader.read_i16()?; 28 | wav_writer.write_sample(sample)?; 29 | } 30 | 31 | wav_writer.finalize()?; 32 | 33 | Ok(()) 34 | } 35 | -------------------------------------------------------------------------------- /parser/src/reflection/util.rs: -------------------------------------------------------------------------------- 1 | use super::{TypeCollection, TypeInfo}; 2 | 3 | #[inline] 4 | pub fn prefix_pattern( 5 | pattern: [u8; N], 6 | value: u8 7 | ) -> [u8; M] { 8 | let mut new_pattern = [0; M]; 9 | new_pattern[0] = value; 10 | new_pattern[1..M].copy_from_slice(&pattern); 11 | new_pattern 12 | } 13 | 14 | impl TypeCollection { 15 | 16 | pub(super) fn get_inner_type(&self, type_entry: &TypeInfo) -> &TypeInfo { 17 | type_entry.inner_type.as_ref() 18 | .and_then(|t| self.get_type(*t)) 19 | .map(|t| self.resolve_typedef(t)) 20 | .expect("invalid inner type") 21 | } 22 | 23 | pub(super) fn get_inner_type_opt(&self, type_entry: &TypeInfo) -> Option<&TypeInfo> { 24 | type_entry.inner_type.as_ref() 25 | .and_then(|t| self.get_type(*t)) 26 | .map(|t| self.resolve_typedef(t)) 27 | } 28 | 29 | } 30 | -------------------------------------------------------------------------------- /parser/src/data/hash_types.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | 3 | use crate::{guid::BlobGuid, Hash32}; 4 | 5 | #[derive(Debug, Clone, Serialize, Deserialize)] 6 | #[serde(rename_all = "camelCase")] 7 | pub struct HashKey32 { 8 | pub value: Hash32, 9 | } 10 | 11 | impl From for HashKey32 { 12 | fn from(value: u32) -> Self { 13 | Self { value } 14 | } 15 | } 16 | 17 | #[derive(Debug, Clone, Serialize, Deserialize)] 18 | #[serde(rename_all = "camelCase")] 19 | pub struct ContentHash { 20 | pub hash0: u32, 21 | pub hash1: u32, 22 | pub hash2: u32, 23 | pub size: u32, 24 | } 25 | 26 | impl ContentHash { 27 | 28 | pub fn as_blob_guid(&self) -> BlobGuid { 29 | BlobGuid::from_parts(self.hash0, self.hash1, self.hash2, self.size) 30 | } 31 | 32 | pub fn into_blob_guid(self) -> BlobGuid { 33 | BlobGuid::from_parts(self.hash0, self.hash1, self.hash2, self.size) 34 | } 35 | 36 | } 37 | -------------------------------------------------------------------------------- /parser/src/container/error.rs: -------------------------------------------------------------------------------- 1 | use thiserror::Error; 2 | 3 | #[derive(Debug, Error)] 4 | pub enum KFCWriteError { 5 | #[error("IO error: {0}")] 6 | Io(#[from] std::io::Error), 7 | 8 | #[error("Size too large: {0}")] 9 | SizeTooLarge(u64), 10 | } 11 | 12 | #[derive(Debug, Error)] 13 | pub enum KFCReadError { 14 | #[error("IO error: {0}")] 15 | Io(#[from] std::io::Error), 16 | #[error("StaticMap error: {0}")] 17 | StaticMap(#[from] StaticMapError), 18 | 19 | #[error("Invalid magic number: {0:X}")] 20 | InvalidMagic(u32), 21 | } 22 | 23 | #[derive(Debug, Error)] 24 | pub enum StaticMapError { 25 | #[error("Keys and values must have the same length: {0} != {1}")] 26 | LengthMismatch(usize, usize), 27 | #[error("Bucket reference count does not match key count: {0} != {1}")] 28 | BucketCountMismatch(usize, usize), 29 | #[error("Bucket size must be a power of 2: {0}")] 30 | InvalidBucketSize(usize), 31 | } 32 | -------------------------------------------------------------------------------- /.github/workflows/build_and_test.yml: -------------------------------------------------------------------------------- 1 | name: Rust Build and Test 2 | 3 | on: 4 | push: 5 | 6 | env: 7 | CARGO_TERM_COLOR: always 8 | 9 | jobs: 10 | build_and_test: 11 | runs-on: windows-latest 12 | strategy: 13 | matrix: 14 | toolchain: 15 | - stable 16 | steps: 17 | - uses: actions/checkout@v4 18 | - run: rustup update ${{ matrix.toolchain }} && rustup default ${{ matrix.toolchain }} 19 | - run: cargo build --verbose 20 | - run: cargo build --release --verbose 21 | - run: cargo test --verbose 22 | - name: Debug 23 | uses: actions/upload-artifact@v4 24 | with: 25 | name: 'kfc-parser-debug' 26 | path: target/debug/kfc-parser.exe 27 | retention-days: 90 28 | if-no-files-found: error 29 | - name: Release 30 | uses: actions/upload-artifact@v4 31 | with: 32 | name: 'kfc-parser' 33 | path: target/release/kfc-parser.exe 34 | retention-days: 90 35 | if-no-files-found: error 36 | -------------------------------------------------------------------------------- /parser/src/data/impact/mod.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashMap; 2 | use shared::hash::fnv_const; 3 | 4 | use crate::reflection::{TypeCollection, TypeFlags, TypeInfo}; 5 | 6 | mod nodes; 7 | mod descriptor; 8 | 9 | pub mod graph; 10 | pub mod bytecode; 11 | 12 | pub use nodes::*; 13 | pub use descriptor::*; 14 | 15 | const IMPACT_NODE_HASH: u32 = fnv_const("keen::impact_nodes::ImpactNode"); 16 | 17 | impl TypeCollection { 18 | pub(super) fn get_impact_node_types(&self) -> HashMap { 19 | let mut nodes = HashMap::new(); 20 | 21 | for node in self.iter() { 22 | if node.flags.contains(TypeFlags::HAS_DS) { 23 | continue; 24 | } 25 | 26 | let inheritance_chain = self.get_inheritance_chain(node); 27 | 28 | for child_node in inheritance_chain { 29 | if child_node.qualified_hash == IMPACT_NODE_HASH { 30 | nodes.insert(node.name_hash, node); 31 | break; 32 | } 33 | } 34 | } 35 | 36 | nodes 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /shared/src/hash.rs: -------------------------------------------------------------------------------- 1 | 2 | pub fn fnv>(input: T) -> u32 { 3 | let mut hash = 0x811c9dc5_u32; 4 | 5 | for byte in input.as_ref().iter() { 6 | hash ^= *byte as u32; 7 | hash = hash.wrapping_mul(0x1000193); 8 | } 9 | 10 | hash 11 | } 12 | 13 | pub fn fnv_with_seed>(input: T, seed: u32) -> u32 { 14 | let mut hash = seed; 15 | 16 | for byte in input.as_ref().iter() { 17 | hash ^= *byte as u32; 18 | hash = hash.wrapping_mul(0x1000193); 19 | } 20 | 21 | hash 22 | } 23 | 24 | pub const fn fnv_const(input: &str) -> u32 { 25 | fnv_const_iter(input.as_bytes(), 0, 0x811c9dc5) 26 | } 27 | 28 | const fn fnv_const_iter(input: &[u8], index: usize, hash: u32) -> u32 { 29 | if index == input.len() { 30 | hash 31 | } else { 32 | fnv_const_iter(input, index + 1, (hash ^ input[index] as u32).wrapping_mul(0x1000193)) 33 | } 34 | } 35 | 36 | pub fn crc64>(input: T) -> u64 { 37 | let mut output = crc64fast::Digest::new(); 38 | output.write(input.as_ref()); 39 | output.sum64() 40 | } 41 | -------------------------------------------------------------------------------- /parser/src/data/impact/descriptor.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | use crate::data::hash_types::HashKey32; 3 | use crate::guid::BlobGuid; 4 | 5 | pub type ProgramId = HashKey32; 6 | pub type ImpactCommand = u32; 7 | 8 | #[derive(Debug, Clone, Serialize, Deserialize)] 9 | pub enum EventStream { 10 | OnCollision, 11 | OnHit, 12 | OnParry, 13 | OnFootDown, 14 | } 15 | 16 | 17 | #[derive(Debug, Serialize, Deserialize)] 18 | #[serde(rename_all = "camelCase")] 19 | pub struct ImpactVariable { 20 | pub name: HashKey32, // fnv hash of dbg_name 21 | pub config_id: HashKey32, 22 | pub r#type: HashKey32, // fnv(name) of the type 23 | pub size: u16, 24 | pub offset_in_bytes: u16, 25 | pub dbg_name: String, 26 | } 27 | 28 | #[derive(Debug, Serialize, Deserialize)] 29 | #[serde(rename_all = "camelCase")] 30 | pub struct ImpactProgram { 31 | pub id: ProgramId, // fnv hash of the program_guid 32 | pub program_guid: BlobGuid, // BlobGuid part of the descriptor guid 33 | pub stack_size: u16, 34 | pub used_streams: Vec, 35 | pub code: Vec, 36 | pub code_shutdown: Vec, 37 | pub data_layout: Vec, 38 | pub data: Vec, 39 | } 40 | -------------------------------------------------------------------------------- /parser/src/data/impact/bytecode/cursor.rs: -------------------------------------------------------------------------------- 1 | pub struct Cursor<'a> { 2 | index: usize, 3 | content: &'a str, 4 | chars: std::str::Chars<'a>, 5 | } 6 | 7 | #[allow(dead_code)] 8 | impl<'a> Cursor<'a> { 9 | pub fn new(content: &'a str) -> Self { 10 | Self { 11 | index: 0, 12 | content, 13 | chars: content.chars(), 14 | } 15 | } 16 | 17 | #[inline] 18 | pub fn index(&self) -> usize { 19 | self.index 20 | } 21 | 22 | #[inline] 23 | pub fn as_str(&self) -> &'a str { 24 | self.chars.as_str() 25 | } 26 | 27 | #[inline] 28 | pub fn content(&self) -> &'a str { 29 | self.content 30 | } 31 | 32 | pub fn is_eof(&self) -> bool { 33 | self.chars.as_str().is_empty() 34 | } 35 | 36 | pub fn first(&self) -> char { 37 | self.chars.clone().next().unwrap_or('\0') 38 | } 39 | 40 | pub fn next(&mut self) -> Option { 41 | self.chars.next() 42 | } 43 | 44 | pub fn skip(&mut self) { 45 | self.next(); 46 | } 47 | 48 | pub fn skip_n(&mut self, n: usize) { 49 | for _ in 0..n { 50 | self.next(); 51 | } 52 | } 53 | 54 | pub fn slice(&mut self) -> &'a str { 55 | let start = self.index; 56 | let end = self.content.len() - self.chars.as_str().len(); 57 | 58 | self.index = end; 59 | &self.content[start..end] 60 | } 61 | 62 | pub fn peek_slice(&self) -> &'a str { 63 | let start = self.index; 64 | let end = self.content.len() - self.chars.as_str().len(); 65 | 66 | &self.content[start..end] 67 | } 68 | 69 | } 70 | -------------------------------------------------------------------------------- /cli/src/logging.rs: -------------------------------------------------------------------------------- 1 | use std::sync::atomic::AtomicBool; 2 | 3 | static LOGGING: AtomicBool = AtomicBool::new(true); 4 | 5 | #[inline] 6 | pub fn set_logging(enabled: bool) { 7 | LOGGING.store(enabled, std::sync::atomic::Ordering::SeqCst); 8 | } 9 | 10 | #[inline] 11 | pub fn is_logging_enabled() -> bool { 12 | LOGGING.load(std::sync::atomic::Ordering::SeqCst) 13 | } 14 | 15 | macro_rules! info { 16 | ($message:expr) => { 17 | if $crate::logging::is_logging_enabled() { 18 | println!("{} {}", colored::Colorize::bold(colored::Colorize::blue("info:")), $message) 19 | } 20 | }; 21 | ($message:expr, $($arg:tt)*) => { 22 | if $crate::logging::is_logging_enabled() { 23 | println!("{} {}", colored::Colorize::bold(colored::Colorize::blue("info:")), format!($message, $($arg)*)) 24 | } 25 | }; 26 | } 27 | 28 | macro_rules! warning { 29 | ($message:expr) => { 30 | if $crate::logging::is_logging_enabled() { 31 | println!("{} {}", colored::Colorize::bold(colored::Colorize::yellow("warning:")), $message) 32 | } 33 | }; 34 | ($message:expr, $($arg:tt)*) => { 35 | if $crate::logging::is_logging_enabled() { 36 | println!("{} {}", colored::Colorize::bold(colored::Colorize::yellow("warning:")), format!($message, $($arg)*)) 37 | } 38 | }; 39 | } 40 | 41 | macro_rules! error { 42 | ($message:expr) => { 43 | if $crate::logging::is_logging_enabled() { 44 | println!("{} {}", colored::Colorize::bold(colored::Colorize::red("error:")), $message) 45 | } 46 | }; 47 | ($message:expr, $($arg:tt)*) => { 48 | if $crate::logging::is_logging_enabled() { 49 | println!("{} {}", colored::Colorize::bold(colored::Colorize::red("error:")), format!($message, $($arg)*)) 50 | } 51 | }; 52 | } 53 | 54 | pub(crate) use info; 55 | pub(crate) use warning as warn; // use alias to resolve ambiguity with builtin attribute 56 | pub(crate) use error; 57 | -------------------------------------------------------------------------------- /parser/src/data/impact/bytecode/error.rs: -------------------------------------------------------------------------------- 1 | use super::token::Span; 2 | 3 | #[derive(Debug)] 4 | pub struct ParseError { 5 | pub span: Span, 6 | pub kind: ParseErrorKind, 7 | } 8 | 9 | impl std::error::Error for ParseError {} 10 | 11 | impl std::fmt::Display for ParseError { 12 | fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { 13 | write!(f, "{} - {}", self.span, self.kind) 14 | } 15 | } 16 | 17 | #[derive(Debug)] 18 | pub enum ParseErrorKind { 19 | DuplicateLabel { 20 | label: String, 21 | }, 22 | UnknownLabel { 23 | label: String, 24 | }, 25 | Expected { 26 | expected: String, 27 | found: String, 28 | }, 29 | NumberParseError { 30 | content: String, 31 | error: std::num::ParseIntError, 32 | }, 33 | UnknownType { 34 | type_name: String, 35 | }, 36 | UnknownData { 37 | name: String, 38 | }, 39 | } 40 | 41 | impl std::fmt::Display for ParseErrorKind { 42 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 43 | match &self { 44 | ParseErrorKind::DuplicateLabel { label } => { 45 | write!(f, "Duplicate label `{}`", label) 46 | } 47 | ParseErrorKind::UnknownLabel { label } => { 48 | write!(f, "Unknown label `{}`", label) 49 | } 50 | ParseErrorKind::Expected { expected, found } => { 51 | write!(f, "Expected {}, found {}", expected, found) 52 | } 53 | ParseErrorKind::NumberParseError { content, error } => { 54 | write!(f, "Failed to parse number `{}`: {}", content, error) 55 | } 56 | ParseErrorKind::UnknownType { type_name } => { 57 | write!(f, "Unknown type `{}`", type_name) 58 | } 59 | ParseErrorKind::UnknownData { name } => { 60 | write!(f, "Unknown data `{}`", name) 61 | } 62 | } 63 | } 64 | } 65 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # KFC Parser (WIP) 2 | 3 | Parser for unpacking and repacking Enshrouded game files. 4 | 5 | ## Features 6 | 7 | - Unpacking and repacking (1:1) all types of descriptor files. 8 | - Extracting reflection data from the enshrouded executable. (Only windows x64) 9 | - Disassembling and assembling of impact programs. 10 | 11 | ## Usage 12 | 13 | ### Unpacking and Repacking 14 | 15 | To unpack game files, use the `unpack` command. 16 | 17 | ```sh 18 | kfc-parser.exe unpack -g -o [OPTIONS] 19 | ``` 20 | 21 | To repack unpacked files, use the `repack` command. 22 | 23 | It will repack all `.json` files in the input directory which have a 24 | qualified guid name (e.g. `82706b40-61b1-4b8f-8b23-dcec6971bda1_9398e747_0.json`). 25 | The hash between the two underscores (`9398e747` in this case) is used to determine the file type. 26 | 27 | ```sh 28 | kfc-parser.exe repack -g -i [OPTIONS] 29 | ``` 30 | 31 | ### Restoring Original Game Files 32 | 33 | To restore the original game files, use the `restore` command. 34 | 35 | ```sh 36 | kfc-parser.exe restore -g 37 | ``` 38 | 39 | ### Impact CLI 40 | 41 | The `impact` sub command can be used to convert an impact program into 42 | a more manageable format and vice versa. 43 | 44 | The `disassemble` command will convert an impact program into a `.impact` and `.shutdown.impact` 45 | file which will contain the program's bytecode in text format and a `.data.json` file which will 46 | contain the program's data such as variables, etc. 47 | 48 | ```sh 49 | kfc-parser.exe impact disassemble -i 50 | ``` 51 | 52 | To convert the disassembled files back into an impact program, use the `assemble` command. 53 | 54 | The `input-file-name` should be the shared name of the disassembled files as follows: 55 | - `.impact` 56 | - `.shutdown.impact` 57 | - `.data.json` 58 | 59 | ```sh 60 | kfc-parser.exe impact assemble -i [OPTIONS] 61 | ``` 62 | 63 | ### Extracting Reflection Data 64 | 65 | To extract reflection data from the enshrouded executable, use the `extract-types` command. 66 | 67 | **Note:** This is automatically executed when unpacking or repacking files. 68 | 69 | ```sh 70 | kfc-parser.exe extract-types [OPTIONS] 71 | ``` 72 | 73 | ## TODO 74 | 75 | - Implement unpacking/repacking of blob files such as images, sounds, etc. 76 | -------------------------------------------------------------------------------- /parser/src/reflection/error.rs: -------------------------------------------------------------------------------- 1 | use thiserror::Error; 2 | 3 | #[derive(Debug, Error)] 4 | pub enum PEParseError { 5 | #[error("IO error: {0}")] 6 | IO(#[from] std::io::Error), 7 | 8 | #[error("Invalid DOS signature")] 9 | InvalidDosSignature, 10 | #[error("Invalid NT signature")] 11 | InvalidNTSignature, 12 | #[error("Unsupported PE type")] 13 | UnsupportedPEType, 14 | #[error("Malformed section name")] 15 | MalformedSectionName, 16 | } 17 | 18 | #[derive(Debug, Error)] 19 | pub enum ReflectionParseError { 20 | #[error("IO error: {0}")] 21 | IO(#[from] std::io::Error), 22 | #[error("PE parse error: {0}")] 23 | PEParse(#[from] PEParseError), 24 | 25 | #[error("Missing .data section")] 26 | MissingDataSection, 27 | #[error("Missing .rdata section")] 28 | MissingRDataSection, 29 | #[error("Malformed pattern")] 30 | MalformedPattern, 31 | } 32 | 33 | #[derive(Debug, Error)] 34 | pub enum TypeParseError { 35 | #[error("IO error: {0}")] 36 | Io(#[from] std::io::Error), 37 | #[error("JSON error: {0}")] 38 | Json(#[from] serde_json::Error), 39 | } 40 | 41 | #[derive(Debug, Error)] 42 | pub enum ReadError { 43 | #[error("IO error: {0}")] 44 | Io(#[from] std::io::Error), 45 | #[error("FromUtf8 error: {0}")] 46 | FromUtf8(#[from] std::string::FromUtf8Error), 47 | 48 | #[error("Unknown type: {0}")] 49 | UnknownType(u32), 50 | #[error("Root is not an object")] 51 | RootNotObject, 52 | 53 | #[error("Invalid type hash: {0}")] 54 | InvalidTypeHash(u32), 55 | } 56 | 57 | #[derive(Debug, Error)] 58 | pub enum WriteError { 59 | #[error("IO error: {0}")] 60 | Io(#[from] std::io::Error), 61 | 62 | #[error("Root is not an object")] 63 | RootNotObject, 64 | #[error("Missing root type specifier")] 65 | MissingRootType, 66 | #[error("Missing root guid specifier")] 67 | MissingRootGuid, 68 | 69 | #[error("Incompatible type: got {got}, expected {expected}")] 70 | IncompatibleType { 71 | got: String, 72 | expected: String 73 | }, 74 | #[error("Invalid enum value: got {got}, expected one of {expected:?}")] 75 | InvalidEnumValue { 76 | got: String, 77 | expected: Vec 78 | }, 79 | #[error("Invalid field: {0}")] 80 | MissingField(String), 81 | #[error("Missing field type annotation")] 82 | MissingFieldType, 83 | #[error("Missing field value annotation")] 84 | MissingFieldValue, 85 | #[error("Invalid type: {0}")] 86 | InvalidType(String), 87 | #[error("Malformed blob GUID: {0}")] 88 | MalformedBlobGuid(String), 89 | #[error("Malformed descriptor GUID: {0}")] 90 | MalformedDescriptorGuid(String), 91 | } 92 | -------------------------------------------------------------------------------- /parser/src/data/impact/bytecode/token.rs: -------------------------------------------------------------------------------- 1 | use std::fmt::Display; 2 | 3 | #[derive(Debug)] 4 | pub struct Token<'a> { 5 | pub kind: TokenKind, 6 | pub content: &'a str, 7 | pub span: Span, 8 | } 9 | 10 | impl<'a> Token<'a> { 11 | pub fn new( 12 | kind: TokenKind, 13 | content: &'a str, 14 | span: Span 15 | ) -> Self { 16 | Self { 17 | kind, 18 | content, 19 | span, 20 | } 21 | } 22 | } 23 | 24 | #[derive(Debug, Clone)] 25 | pub struct Span { 26 | pub start: Position, 27 | pub end: Position, 28 | } 29 | 30 | impl Span { 31 | pub fn new(start: Position, end: Position) -> Self { 32 | Self { 33 | start, 34 | end, 35 | } 36 | } 37 | } 38 | 39 | impl Display for Span { 40 | fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { 41 | write!(f, "{} - {}", self.start, self.end) 42 | } 43 | } 44 | 45 | #[derive(Debug, Clone)] 46 | pub struct Position { 47 | pub line: usize, 48 | pub column: usize, 49 | pub index: usize, 50 | } 51 | 52 | impl Position { 53 | pub fn new(line: usize, column: usize, index: usize) -> Self { 54 | Self { 55 | line, 56 | column, 57 | index, 58 | } 59 | } 60 | } 61 | 62 | impl Display for Position { 63 | fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { 64 | write!(f, "{}:{}", self.line, self.column) 65 | } 66 | } 67 | 68 | #[derive(Debug, Clone, Copy, PartialEq, Eq)] 69 | pub enum TokenKind { 70 | Comment, 71 | Whitespace, 72 | Newline, 73 | 74 | Identifier, 75 | Number, 76 | 77 | Eof, 78 | Unknown 79 | } 80 | 81 | impl std::fmt::Display for TokenKind { 82 | fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { 83 | match self { 84 | TokenKind::Comment => write!(f, "comment"), 85 | TokenKind::Whitespace => write!(f, "whitespace"), 86 | TokenKind::Newline => write!(f, "newline"), 87 | TokenKind::Identifier => write!(f, "identifier"), 88 | TokenKind::Number => write!(f, "number"), 89 | TokenKind::Eof => write!(f, "EOF"), 90 | TokenKind::Unknown => write!(f, "unknown"), 91 | } 92 | } 93 | } 94 | 95 | #[derive(Debug, Clone, Copy, PartialEq, Eq)] 96 | pub enum KeywordKind { 97 | Invalid, 98 | IAdd, 99 | ISub, 100 | IMul, 101 | IDiv, 102 | Ilt, 103 | Ieq, 104 | Ileq, 105 | Br, 106 | Brt, 107 | Brf, 108 | IConst, 109 | IConst0, 110 | IConst1, 111 | Inc, 112 | Dec, 113 | Copy, 114 | Dup, 115 | Call, 116 | ECall, 117 | Ret, 118 | Load, 119 | GLoad, 120 | Store, 121 | GStore, 122 | LTime, 123 | TimeFF, 124 | Pop, 125 | Rvm, 126 | DSelf, 127 | Halt, 128 | Unknown, 129 | } 130 | -------------------------------------------------------------------------------- /parser/src/data/impact/bytecode/tokenizer.rs: -------------------------------------------------------------------------------- 1 | use super::token::*; 2 | use super::cursor::Cursor; 3 | 4 | pub struct Tokenizer<'a> { 5 | cursor: Cursor<'a>, 6 | line: usize, 7 | column: usize, 8 | } 9 | 10 | impl<'a> Tokenizer<'a> { 11 | pub fn new(content: &'a str) -> Self { 12 | Self { 13 | cursor: Cursor::new(content), 14 | line: 1, 15 | column: 1, 16 | } 17 | } 18 | 19 | pub fn advance(&mut self) -> Token<'a> { 20 | let start = Position::new(self.line, self.column, self.cursor.index()); 21 | let char = match self.next() { 22 | Some(c) => c, 23 | None => return Token::new(TokenKind::Eof, "", Span::new(start.clone(), start)), 24 | }; 25 | 26 | let kind = match char { 27 | '#' => { 28 | self.skip_while(|c| c != '\n' && c != '\r'); 29 | TokenKind::Comment 30 | } 31 | ' ' | '\t' => { 32 | self.skip_while(|c| c == ' ' || c == '\t'); 33 | TokenKind::Whitespace 34 | } 35 | '\r' => { 36 | self.optional('\n'); 37 | self.next_line(); 38 | TokenKind::Newline 39 | } 40 | '\n' => { 41 | self.next_line(); 42 | TokenKind::Newline 43 | } 44 | 45 | '0'..='9' => { 46 | self.skip_while(|c| c.is_numeric()); 47 | 48 | let next = self.first(); 49 | 50 | if next.is_ascii_alphanumeric() || next == '_' || next == ':' { 51 | self.skip(); 52 | self.skip_while(|c| c.is_ascii_alphanumeric() || c == '_' || c == ':'); 53 | TokenKind::Identifier 54 | } else { 55 | TokenKind::Number 56 | } 57 | } 58 | 59 | 'a'..='z' | 'A'..='Z' | '_' => { 60 | self.skip_while(|c| c.is_ascii_alphanumeric() || c == '_' || c == ':'); 61 | TokenKind::Identifier 62 | } 63 | 64 | _ => TokenKind::Unknown, 65 | }; 66 | 67 | let content = self.cursor.slice(); 68 | let end = Position::new(self.line, self.column, self.cursor.index()); 69 | 70 | Token::new(kind, content, Span::new(start, end)) 71 | } 72 | 73 | #[inline] 74 | fn first(&self) -> char { 75 | self.cursor.first() 76 | } 77 | 78 | #[inline] 79 | fn next(&mut self) -> Option { 80 | self.column += 1; 81 | self.cursor.next() 82 | } 83 | 84 | #[inline] 85 | fn skip(&mut self) { 86 | self.column += 1; 87 | self.cursor.skip(); 88 | } 89 | 90 | #[inline] 91 | fn skip_while(&mut self, mut predicate: F) 92 | where 93 | F: FnMut(char) -> bool 94 | { 95 | while predicate(self.cursor.first()) && !self.cursor.is_eof() { 96 | self.skip(); 97 | } 98 | } 99 | 100 | #[inline] 101 | fn next_line(&mut self) { 102 | self.line += 1; 103 | self.column = 1; 104 | } 105 | 106 | #[inline] 107 | fn optional(&mut self, c: char) -> bool { 108 | if self.first() == c { 109 | self.skip(); 110 | true 111 | } else { 112 | false 113 | } 114 | } 115 | 116 | } 117 | -------------------------------------------------------------------------------- /parser/src/guid/blob.rs: -------------------------------------------------------------------------------- 1 | use std::fmt::{Debug, Display}; 2 | use std::io::{Read, Write}; 3 | use std::num::ParseIntError; 4 | use std::str::FromStr; 5 | 6 | use serde::Deserialize; 7 | use shared::hash::{crc64, fnv}; 8 | 9 | use crate::container::StaticHash; 10 | use crate::{Hash32, Hash64}; 11 | 12 | use super::DescriptorGuid; 13 | 14 | #[derive(Clone, PartialEq, Eq, Hash, PartialOrd, Ord, Default)] 15 | pub struct BlobGuid { 16 | pub data: [u8; 16], 17 | } 18 | 19 | impl BlobGuid { 20 | 21 | pub const NONE: BlobGuid = BlobGuid { 22 | data: [0; 16], 23 | }; 24 | 25 | pub fn from_bytes(data: [u8; 16]) -> Self { 26 | Self { 27 | data 28 | } 29 | } 30 | 31 | pub fn from_parts( 32 | hash0: u32, 33 | hash1: u32, 34 | hash2: u32, 35 | size: u32, 36 | ) -> Self { 37 | let mut data = [0; 16]; 38 | 39 | data[0..4].copy_from_slice(&size.to_le_bytes()); 40 | data[4..8].copy_from_slice(&hash0.to_le_bytes()); 41 | data[8..12].copy_from_slice(&hash1.to_le_bytes()); 42 | data[12..16].copy_from_slice(&hash2.to_le_bytes()); 43 | 44 | Self { 45 | data 46 | } 47 | } 48 | 49 | pub fn hash32(&self) -> Hash32 { 50 | fnv(self.data) 51 | } 52 | 53 | pub fn hash64(&self) -> Hash64 { 54 | crc64(self.to_string()) 55 | } 56 | 57 | pub fn size(&self) -> u32 { 58 | let data: [u8; 4] = self.data[0..4].try_into().unwrap(); 59 | u32::from_le_bytes(data) 60 | } 61 | 62 | pub fn is_none(&self) -> bool { 63 | self.data == [0; 16] 64 | } 65 | 66 | pub fn as_descriptor_guid(&self, type_hash: u32, part_number: u32) -> DescriptorGuid { 67 | DescriptorGuid { 68 | data: self.data, 69 | type_hash, 70 | part_number, 71 | } 72 | } 73 | 74 | } 75 | 76 | impl StaticHash for BlobGuid { 77 | fn static_hash(&self) -> u32 { 78 | u32::from_le_bytes([ 79 | self.data[4], 80 | self.data[5], 81 | self.data[6], 82 | self.data[7], 83 | ]) 84 | } 85 | } 86 | 87 | impl FromStr for BlobGuid { 88 | type Err = String; 89 | 90 | fn from_str(s: &str) -> Result { 91 | if s.len() != 32 { 92 | return Err(format!("Invalid length: got {}, expected 32", s.len())); 93 | } 94 | 95 | let mut data = [0; 16]; 96 | 97 | for i in 0..16 { 98 | data[15 - i] = u8::from_str_radix(&s[(i * 2)..(i * 2 + 2)], 16) 99 | .map_err(|e: ParseIntError| format!("Failed to parse byte: {}", e))?; 100 | } 101 | 102 | Ok(BlobGuid { 103 | data, 104 | }) 105 | } 106 | } 107 | 108 | impl Debug for BlobGuid { 109 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 110 | write!(f, "\"{}\" ({})", self, self.hash32()) 111 | } 112 | } 113 | 114 | impl Display for BlobGuid { 115 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 116 | for i in (0..16).rev() { 117 | write!(f, "{:0>2x}", self.data[i])?; 118 | } 119 | 120 | Ok(()) 121 | } 122 | } 123 | 124 | impl<'de> Deserialize<'de> for BlobGuid { 125 | fn deserialize(deserializer: D) -> Result 126 | where 127 | D: serde::Deserializer<'de>, 128 | { 129 | let s = String::deserialize(deserializer)?; 130 | BlobGuid::from_str(&s).map_err(serde::de::Error::custom) 131 | } 132 | } 133 | 134 | impl serde::Serialize for BlobGuid { 135 | fn serialize(&self, serializer: S) -> Result 136 | where 137 | S: serde::Serializer, 138 | { 139 | self.to_string().serialize(serializer) 140 | } 141 | } 142 | 143 | impl BlobGuid { 144 | 145 | pub fn read(reader: &mut R) -> std::io::Result { 146 | let mut data = [0; 16]; 147 | reader.read_exact(&mut data)?; 148 | 149 | Ok(Self { 150 | data 151 | }) 152 | } 153 | 154 | pub fn write(&self, writer: &mut W) -> std::io::Result<()> { 155 | writer.write_all(&self.data) 156 | } 157 | 158 | } 159 | -------------------------------------------------------------------------------- /parser/src/container/reader.rs: -------------------------------------------------------------------------------- 1 | use std::{fs::File, io::{BufReader, Read, Seek, SeekFrom}, path::{Path, PathBuf}}; 2 | 3 | use serde_json::Value as JsonValue; 4 | 5 | use crate::{guid::{BlobGuid, DescriptorGuid}, reflection::{ReadError, TypeCollection}}; 6 | 7 | use super::KFCFile; 8 | 9 | pub struct KFCReader<'a, 'b> { 10 | pub path: PathBuf, 11 | pub file: &'a KFCFile, 12 | pub type_collection: &'b TypeCollection, 13 | 14 | reader: BufReader, 15 | dat_readers: Vec>>, 16 | } 17 | 18 | impl<'a, 'b> KFCReader<'a, 'b> { 19 | 20 | pub fn new( 21 | path: &Path, 22 | file: &'a KFCFile, 23 | type_collection: &'b TypeCollection, 24 | ) -> std::io::Result { 25 | Ok(Self { 26 | path: path.to_path_buf(), 27 | file, 28 | type_collection, 29 | reader: BufReader::new(File::open(path)?), 30 | dat_readers: Vec::new(), 31 | }) 32 | } 33 | 34 | pub fn read_descriptor( 35 | &mut self, 36 | guid: &DescriptorGuid 37 | ) -> Result, ReadError> { 38 | let data = match self.read_descriptor_bytes(guid)? { 39 | Some(data) => data, 40 | None => return Ok(None), 41 | }; 42 | 43 | Ok(Some(self.type_collection.deserialize_descriptor(guid, &data)?)) 44 | } 45 | 46 | pub fn read_descriptor_into( 47 | &mut self, 48 | guid: &DescriptorGuid, 49 | buf: &mut Vec 50 | ) -> Result, ReadError> { 51 | if !self.read_descriptor_bytes_into(guid, buf)? { 52 | return Ok(None); 53 | } 54 | 55 | Ok(Some(self.type_collection.deserialize_descriptor(guid, buf)?)) 56 | } 57 | 58 | pub fn read_descriptor_bytes( 59 | &mut self, 60 | guid: &DescriptorGuid 61 | ) -> std::io::Result>> { 62 | let mut data = Vec::new(); 63 | 64 | if !self.read_descriptor_bytes_into(guid, &mut data)? { 65 | return Ok(None); 66 | } 67 | 68 | Ok(Some(data)) 69 | } 70 | 71 | pub fn read_descriptor_bytes_into( 72 | &mut self, 73 | guid: &DescriptorGuid, 74 | dst: &mut Vec 75 | ) -> std::io::Result { 76 | let link = match self.file.get_descriptor_link(guid) { 77 | Some(link) => link, 78 | None => return Ok(false), 79 | }; 80 | 81 | let offset = self.file.data_offset() + link.offset; 82 | dst.resize(link.size as usize, 0); 83 | self.reader.seek(SeekFrom::Start(offset))?; 84 | self.reader.read_exact(dst)?; 85 | 86 | Ok(true) 87 | } 88 | 89 | pub fn read_blob(&mut self, guid: &BlobGuid) -> std::io::Result>> { 90 | let mut data = Vec::new(); 91 | 92 | if !self.read_blob_into(guid, &mut data)? { 93 | return Ok(None); 94 | } 95 | 96 | Ok(Some(data)) 97 | } 98 | 99 | pub fn read_blob_into( 100 | &mut self, 101 | guid: &BlobGuid, 102 | dst: &mut Vec 103 | ) -> std::io::Result { 104 | let link = match self.file.get_blob_link(guid) { 105 | Some(link) => link, 106 | None => return Ok(false), 107 | }; 108 | 109 | let offset = link.offset; 110 | dst.resize(guid.size() as usize, 0); 111 | 112 | let dat_reader = self.get_dat_reader(link.dat_index)?; 113 | 114 | dat_reader.seek(SeekFrom::Start(offset))?; 115 | dat_reader.read_exact(dst)?; 116 | 117 | Ok(true) 118 | } 119 | 120 | fn get_dat_reader(&mut self, index: usize) -> std::io::Result<&mut BufReader> { 121 | if index >= self.dat_readers.len() { 122 | self.dat_readers.resize_with(index + 1, || None); 123 | } 124 | 125 | if self.dat_readers[index].is_none() { 126 | // Format: FILE_NAME_{INDEX}.dat where INDEX is 3 digits with leading zeros 127 | let path = self.path.with_file_name(format!("{}_{:03}.dat", self.path.file_stem().unwrap().to_string_lossy(), index)); 128 | self.dat_readers[index] = Some(BufReader::new(File::open(path)?)); 129 | } 130 | 131 | Ok(self.dat_readers[index].as_mut().unwrap()) 132 | } 133 | 134 | } 135 | -------------------------------------------------------------------------------- /cli/src/cli.rs: -------------------------------------------------------------------------------- 1 | use std::path::PathBuf; 2 | use clap_derive::{Parser, Subcommand}; 3 | 4 | #[derive(Parser)] 5 | #[command(version)] 6 | pub(crate) struct Cli { 7 | #[command(subcommand)] 8 | pub commands: Commands, 9 | 10 | /// How many threads to use for exporting 11 | #[arg(short, long, default_value = "8")] 12 | pub threads: u8, 13 | } 14 | 15 | #[derive(Subcommand)] 16 | pub enum Commands { 17 | /// Unpack enshrouded files 18 | Unpack { 19 | /// Game directory (should contain enshrouded.kfc and enshrouded._XXX.dat files) 20 | #[arg(short, long)] 21 | game_directory: PathBuf, 22 | 23 | /// File name override (defaults to `enshrouded` and `enshrouded_server`) 24 | #[arg(long)] 25 | file_name: Option, 26 | 27 | /// Output directory 28 | #[arg(short, long, required_unless_present = "stdout", conflicts_with = "stdout")] 29 | output: Option, 30 | 31 | /// Comma separated filter by type (prefixed with t) or guid 32 | #[arg(short, long, default_value = "*")] 33 | filter: String, 34 | 35 | /// Write unpacked content to stdout (newline-separated) 36 | #[arg(short, long, required_unless_present = "output", conflicts_with = "output")] 37 | stdout: bool, 38 | }, 39 | 40 | /// Repack enshrouded files (will backup the origin .kfc to .kfc.bak) 41 | Repack { 42 | /// Game directory (should contain enshrouded.kfc and enshrouded._XXX.dat files) 43 | #[arg(short, long)] 44 | game_directory: PathBuf, 45 | 46 | /// File name override (defaults to `enshrouded` and `enshrouded_server`) 47 | #[arg(long)] 48 | file_name: Option, 49 | 50 | /// Input directory containing unpacked files 51 | #[arg(short, long, required_unless_present = "stdin", conflicts_with = "stdin")] 52 | input: Option, 53 | 54 | /// Read unpacked content from stdin (newline-separated) 55 | #[arg(short, long, required_unless_present = "input", conflicts_with = "input")] 56 | stdin: bool, 57 | }, 58 | 59 | /// Extract type information from enshrouded files 60 | ExtractTypes { 61 | /// Game directory (should contain enshrouded.exe) 62 | #[arg(short, long)] 63 | game_directory: PathBuf, 64 | 65 | /// File name override (defaults to `enshrouded` and `enshrouded_server`) 66 | #[arg(long)] 67 | file_name: Option, 68 | }, 69 | 70 | /// Restore the original enshrouded files 71 | Restore { 72 | /// Game directory (should contain enshrouded.kfc.bak) 73 | #[arg(short, long)] 74 | game_directory: PathBuf, 75 | 76 | /// File name override (defaults to `enshrouded` and `enshrouded_server`) 77 | #[arg(long)] 78 | file_name: Option, 79 | }, 80 | 81 | /// CLI for impact files 82 | #[command(subcommand)] 83 | Impact(CommandImpact), 84 | } 85 | 86 | #[derive(Subcommand)] 87 | pub enum CommandImpact { 88 | /// Creates a descriptor file from a disassembled impact program 89 | Assemble { 90 | /// The shared name of the impact program files 91 | /// The files should be named as follows: 92 | /// - `file_name.impact` 93 | /// - `file_name.shutdown.impact` 94 | /// - `file_name.data.json` 95 | #[arg(short, long, verbatim_doc_comment)] 96 | input: PathBuf, 97 | 98 | /// An optional file name for the new impact program descriptor (will fallback to file_name.json) 99 | #[arg(short, long)] 100 | output: Option, 101 | 102 | /// An optional guid to use for the new impact program descriptor 103 | #[arg(short, long)] 104 | guid: Option, 105 | }, 106 | 107 | /// Disassembles an impact program from a descriptor file into a more human-readable format 108 | Disassemble { 109 | /// The impact program descriptor file 110 | #[arg(short, long)] 111 | input: PathBuf, 112 | 113 | /// The output directory for the disassembled impact program (will fallback to input's directory) 114 | #[arg(short, long)] 115 | output: Option, 116 | }, 117 | 118 | /// Extracts all nodes from the reflection data 119 | ExtractNodes, 120 | } 121 | -------------------------------------------------------------------------------- /parser/src/data/impact/bytecode/text.rs: -------------------------------------------------------------------------------- 1 | use std::collections::{HashMap, HashSet}; 2 | use std::io::Write; 3 | 4 | use crate::Hash32; 5 | 6 | use super::parser::Parser; 7 | use super::{ImpactNode, ImpactProgram, ImpactAssembler, ImpactOps, ImpactProgramData, ParseError}; 8 | 9 | impl ImpactAssembler<'_> { 10 | pub fn parse_text(&self, data: &ImpactProgramData, code: &str) -> Result, ParseError> { 11 | Parser::new(&self.type_collection.get_impact_nodes(), data, code).parse() 12 | } 13 | 14 | // TODO: Replace unwraps with proper error handling 15 | pub fn write_text( 16 | &self, 17 | out: &mut W, 18 | program: &ImpactProgram, 19 | code: &[ImpactOps] 20 | ) -> std::io::Result<()> { 21 | let mut labels = code.iter() 22 | .filter_map(|instruction| { 23 | match instruction { 24 | ImpactOps::BR(address) => Some(*address), 25 | ImpactOps::BRT(address) => Some(*address), 26 | ImpactOps::BRF(address) => Some(*address), 27 | _ => None, 28 | } 29 | }) 30 | .collect::>() 31 | .into_iter() 32 | .collect::>(); 33 | 34 | labels.sort(); 35 | 36 | let labels = labels.into_iter() 37 | .enumerate() 38 | .map(|(i, address)| (address, i)) 39 | .collect::>(); 40 | 41 | let mut pc = 0; 42 | 43 | for instruction in code { 44 | if let Some(label) = labels.get(&pc) { 45 | writeln!(out, "label_{}:", label)?; 46 | } 47 | 48 | pc += instruction.size() as u32; 49 | 50 | write!(out, "{}", instruction.name())?; 51 | 52 | match instruction { 53 | ImpactOps::BR(address) => { 54 | let label = labels.get(address).unwrap(); 55 | write!(out, " label_{}", label)?; 56 | }, 57 | ImpactOps::BRT(address) => { 58 | let label = labels.get(address).unwrap(); 59 | write!(out, " label_{}", label)?; 60 | }, 61 | ImpactOps::BRF(address) => { 62 | let label = labels.get(address).unwrap(); 63 | write!(out, " label_{}", label)?; 64 | }, 65 | ImpactOps::IConst(index) | 66 | ImpactOps::Load(index) | 67 | ImpactOps::GLoad(index) | 68 | ImpactOps::Store(index) | 69 | ImpactOps::GStore(index) => { 70 | let index = *index as usize & 0xFFFF; 71 | let layout = &program.data_layout[index]; 72 | 73 | write!(out, " {}", layout.dbg_name)?; 74 | } 75 | ImpactOps::Call(hash) | 76 | ImpactOps::ECall(hash) => { 77 | if let Some(node) = self.get_call_type(*hash) { 78 | write!(out, " {} # (", node.name)?; 79 | 80 | let mut count = 0; 81 | 82 | for input in &node.inputs { 83 | if input.is_execution() { continue; } 84 | if count > 0 { write!(out, ", ")? } 85 | write!(out, "in {}: {}", input.name, input.r#type.name)?; 86 | count += 1; 87 | } 88 | 89 | for config in &node.configs { 90 | if count > 0 { write!(out, ", ")? } 91 | write!(out, "cfg {}: {}", config.name, config.r#type.name)?; 92 | count += 1; 93 | } 94 | 95 | for output in &node.outputs { 96 | if output.is_execution() { continue; } 97 | if count > 0 { write!(out, ", ")? } 98 | write!(out, "out {}: {}", output.name, output.r#type.name)?; 99 | count += 1; 100 | } 101 | 102 | write!(out, ")")?; 103 | } else { 104 | println!("Missing type info for hash: {:08X}", hash); 105 | write!(out, " {} # missing type info", hash)?; 106 | } 107 | }, 108 | ImpactOps::Unknown(value) => write!(out, "{:08X}", value)?, 109 | _ => {}, 110 | } 111 | 112 | writeln!(out)?; 113 | } 114 | 115 | Ok(()) 116 | } 117 | 118 | fn get_call_type(&self, index: Hash32) -> Option<&ImpactNode> { 119 | self.nodes.get(&index) 120 | } 121 | 122 | } 123 | -------------------------------------------------------------------------------- /parser/src/reflection/mapping.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashMap; 2 | 3 | use shared::hash::fnv_const; 4 | 5 | use crate::{container::KFCReader, data::{localization::{LocaTagCollectionResourceData, LocaTagResource}, ContentHash}, guid::DescriptorGuid}; 6 | 7 | use super::{PrimitiveType, ReadError, TypeCollection}; 8 | 9 | const LOCA_TAG_COLLECTION_RESOURCE: u32 = fnv_const("keen::LocaTagCollectionResource"); 10 | const LOCA_TAG_COLLECTION_RESOURCE_DATA: u32 = fnv_const("keen::LocaTagCollectionResourceData"); 11 | 12 | const NAME_LOCA_TAG: u32 = fnv_const("keen::NameLocaTag"); // typedef 13 | const LOCA_TAG_REFERENCE: u32 = fnv_const("keen::LocaTagReference"); 14 | const LOCA_TAG_ID: u32 = fnv_const("keen::LocaTagId"); 15 | 16 | pub struct DescriptorNameMapper<'a> { 17 | type_collection: &'a TypeCollection, 18 | loca_tags: HashMap, 19 | guid_only: bool, 20 | } 21 | 22 | impl<'a> DescriptorNameMapper<'a> { 23 | 24 | pub fn new(type_collection: &'a TypeCollection) -> Self { 25 | Self { 26 | type_collection, 27 | loca_tags: HashMap::new(), 28 | guid_only: false, 29 | } 30 | } 31 | 32 | pub fn set_guid_only(&mut self, guid_only: bool) { 33 | self.guid_only = guid_only; 34 | } 35 | 36 | pub fn read_data(&mut self, reader: &mut KFCReader) -> Result<(), ReadError> { 37 | let root_type_info = reader.file.get_descriptor_guids_by_type_hash(LOCA_TAG_COLLECTION_RESOURCE); 38 | let guid = root_type_info.first().unwrap(); 39 | let descriptor = reader.read_descriptor(guid)?.unwrap(); 40 | let keenglish_data_hash = descriptor.as_object().unwrap()["keenglishDataHash"].clone(); 41 | let keenglish_data_hash = serde_json::from_value::(keenglish_data_hash).unwrap(); 42 | let keenglish_data = reader.read_blob(&keenglish_data_hash.as_blob_guid())?.unwrap(); 43 | 44 | let loca_type_info = self.type_collection.get_type_by_qualified_hash(LOCA_TAG_COLLECTION_RESOURCE_DATA).unwrap(); 45 | let loca_data_json = self.type_collection.deserialize(loca_type_info, &keenglish_data)?; 46 | let loca_data = serde_json::from_value::(loca_data_json).unwrap(); 47 | 48 | self.loca_tags = loca_data.tags.into_iter().map(|tag| (tag.id.value, tag)).collect(); 49 | 50 | Ok(()) 51 | } 52 | 53 | pub fn get_name( 54 | &self, 55 | guid: &DescriptorGuid, 56 | value: &serde_json::Value 57 | ) -> String { 58 | if self.guid_only { 59 | return guid.to_qualified_string(); 60 | } 61 | 62 | // TODO: Give singletons a proper name 63 | 64 | let type_info = self.type_collection.get_type_by_qualified_hash(guid.type_hash); 65 | 66 | let loca_field = type_info 67 | .and_then(|type_info| { 68 | type_info.struct_fields.iter() 69 | .filter(|field| { 70 | let type_info = self.type_collection.get_type(field.r#type).unwrap(); 71 | 72 | type_info.qualified_hash == LOCA_TAG_REFERENCE || 73 | type_info.qualified_hash == NAME_LOCA_TAG 74 | }) 75 | .find(|field| { 76 | field.name == "name" || field.name == "debugName" || field.name == "dbgName" 77 | }) 78 | .and_then(|field| value.get(&field.name)) 79 | .and_then(|loca_tag| loca_tag.as_str()) 80 | .and_then(|loca_tag| DescriptorGuid::from_str(loca_tag, 0, 0)) 81 | .and_then(|loca_tag| self.loca_tags.get(&loca_tag.hash32())) 82 | }); 83 | 84 | if let Some(loca_field) = loca_field { 85 | return loca_field.text.to_ascii_lowercase() 86 | .replace(" ", "_") 87 | .replace(|c: char| !c.is_ascii_alphanumeric(), ""); 88 | } 89 | 90 | let loca_field = type_info 91 | .and_then(|type_info| { 92 | type_info.struct_fields.iter() 93 | .filter(|field| { 94 | let type_info = self.type_collection.get_type(field.r#type).unwrap(); 95 | 96 | type_info.qualified_hash == LOCA_TAG_ID 97 | }) 98 | .find(|field| { 99 | field.name == "name" || field.name == "debugName" || field.name == "dbgName" 100 | }) 101 | .and_then(|field| value.get(&field.name)) 102 | .and_then(|loca_tag| loca_tag.as_u64()) 103 | .and_then(|loca_tag| self.loca_tags.get(&(loca_tag as u32))) 104 | }); 105 | 106 | if let Some(loca_field) = loca_field { 107 | return loca_field.text.to_ascii_lowercase() 108 | .replace(" ", "_") 109 | .replace(|c: char| !c.is_ascii_alphanumeric(), ""); 110 | } 111 | 112 | let debug_name = type_info 113 | .and_then(|type_info| { 114 | type_info.struct_fields.iter() 115 | .filter(|field| 116 | self.type_collection.resolve_typedef( 117 | self.type_collection.get_type(field.r#type).unwrap() 118 | ).primitive_type == PrimitiveType::BlobString 119 | ) 120 | .find(|field| { 121 | field.name == "name" || field.name == "debugName" || field.name == "dbgName" 122 | }) 123 | .and_then(|field| value.get(&field.name)) 124 | .and_then(|name| name.as_str()) 125 | }); 126 | 127 | if let Some(name) = debug_name { 128 | name.to_string() 129 | } else { 130 | guid.to_qualified_string() 131 | } 132 | } 133 | 134 | } 135 | -------------------------------------------------------------------------------- /parser/src/container/static_map.rs: -------------------------------------------------------------------------------- 1 | // TODO: Implement better Debug trait representation for StaticMap and StaticMapBuilder 2 | 3 | use std::{cmp::{Eq, Ord}, collections::HashMap, hash::Hash, io::{Read, Write}}; 4 | use shared::io::{ReadExt, WriteExt}; 5 | 6 | use super::StaticMapError; 7 | 8 | pub trait StaticHash { 9 | fn static_hash(&self) -> u32; 10 | } 11 | 12 | #[derive(Debug, Clone, Default)] 13 | pub struct StaticMap { 14 | keys: Vec, 15 | values: Vec, 16 | buckets: Vec, 17 | } 18 | 19 | impl StaticMap { 20 | 21 | pub fn from_parts( 22 | keys: Vec, 23 | values: Vec, 24 | buckets: Vec, 25 | ) -> Result { 26 | let bucket_ref_count = buckets.iter().map(|b| b.count).sum(); 27 | 28 | if keys.len() != values.len() { 29 | return Err(StaticMapError::LengthMismatch(keys.len(), values.len())); 30 | } 31 | 32 | if keys.len() != bucket_ref_count { 33 | return Err(StaticMapError::BucketCountMismatch(keys.len(), bucket_ref_count)); 34 | } 35 | 36 | if !buckets.is_empty() && buckets.len().count_ones() != 1 { 37 | return Err(StaticMapError::InvalidBucketSize(buckets.len())); 38 | } 39 | 40 | Ok(Self { 41 | keys, 42 | values, 43 | buckets, 44 | }) 45 | } 46 | 47 | pub fn get(&self, key: &K) -> Option<&V> { 48 | let hash = key.static_hash(); 49 | let bucket_index = hash as usize % self.buckets.len(); 50 | let bucket = &self.buckets[bucket_index]; 51 | 52 | for i in bucket.index..bucket.index + bucket.count { 53 | if self.keys[i] == *key { 54 | return Some(&self.values[i]); 55 | } 56 | } 57 | 58 | None 59 | } 60 | 61 | pub fn contains_key(&self, key: &K) -> bool { 62 | self.get(key).is_some() 63 | } 64 | 65 | pub fn len(&self) -> usize { 66 | self.keys.len() 67 | } 68 | 69 | pub fn is_empty(&self) -> bool { 70 | self.keys.is_empty() 71 | } 72 | 73 | pub fn iter(&self) -> impl Iterator { 74 | self.keys.iter().zip(self.values.iter()) 75 | } 76 | 77 | pub fn keys(&self) -> &[K] { 78 | &self.keys 79 | } 80 | 81 | pub fn values(&self) -> &[V] { 82 | &self.values 83 | } 84 | 85 | pub fn buckets(&self) -> &[StaticMapBucket] { 86 | &self.buckets 87 | } 88 | 89 | } 90 | 91 | impl StaticMap { 92 | 93 | pub fn into_builder(self) -> StaticMapBuilder { 94 | StaticMapBuilder { 95 | entries: self.keys.into_iter().zip(self.values).collect() 96 | } 97 | } 98 | 99 | } 100 | 101 | impl StaticMap { 102 | 103 | pub fn as_builder(&self) -> StaticMapBuilder { 104 | StaticMapBuilder { 105 | entries: self.keys.iter().cloned().zip(self.values.iter().cloned()).collect(), 106 | } 107 | } 108 | 109 | } 110 | 111 | #[derive(Debug, Clone, Default)] 112 | pub struct StaticMapBucket { 113 | index: usize, 114 | count: usize, 115 | } 116 | 117 | impl StaticMapBucket { 118 | 119 | pub fn read(reader: &mut R) -> std::io::Result { 120 | let index = reader.read_u32()? as usize; 121 | let count = reader.read_u32()? as usize; 122 | 123 | Ok(Self { index, count }) 124 | } 125 | 126 | pub fn write(&self, writer: &mut W) -> std::io::Result<()> { 127 | writer.write_u32(self.index as u32)?; 128 | writer.write_u32(self.count as u32)?; 129 | 130 | Ok(()) 131 | } 132 | 133 | } 134 | 135 | #[derive(Debug, Clone, Default)] 136 | pub struct StaticMapBuilder { 137 | entries: HashMap, 138 | } 139 | 140 | impl StaticMapBuilder { 141 | 142 | pub fn insert(&mut self, key: K, value: V) { 143 | self.entries.insert(key, value); 144 | } 145 | 146 | pub fn get(&self, key: &K) -> Option<&V> { 147 | self.entries.get(key) 148 | } 149 | 150 | pub fn contains_key(&self, key: &K) -> bool { 151 | self.entries.contains_key(key) 152 | } 153 | 154 | pub fn len(&self) -> usize { 155 | self.entries.len() 156 | } 157 | 158 | pub fn is_empty(&self) -> bool { 159 | self.entries.is_empty() 160 | } 161 | 162 | pub fn iter(&self) -> impl Iterator { 163 | self.entries.iter() 164 | } 165 | 166 | pub fn build(self) -> StaticMap { 167 | let bucket_size = self.entries.len().next_power_of_two(); 168 | let mut buckets = vec![StaticMapBucket::default(); bucket_size]; 169 | 170 | let mut entries = self.entries.into_iter().collect::>(); 171 | entries.sort_by_key(|(k, _)| k.static_hash() % bucket_size as u32); 172 | 173 | let mut bucket_index = 0; 174 | let mut entry_index = 0; 175 | 176 | while bucket_index < bucket_size { 177 | let bucket = &mut buckets[bucket_index]; 178 | bucket.index = entry_index; 179 | 180 | let mut count = 0; 181 | 182 | while entry_index < entries.len() && entries[entry_index].0.static_hash() as usize % bucket_size == bucket_index { 183 | entry_index += 1; 184 | count += 1; 185 | } 186 | 187 | bucket.count = count; 188 | bucket_index += 1; 189 | } 190 | 191 | let (keys, values) = entries.into_iter().unzip(); 192 | 193 | StaticMap::from_parts(keys, values, buckets) 194 | .expect("Failed to build StaticMap") // should never fail 195 | } 196 | 197 | } 198 | 199 | impl From> for StaticMap { 200 | 201 | fn from(map: HashMap) -> Self { 202 | StaticMapBuilder { 203 | entries: map, 204 | }.build() 205 | } 206 | 207 | } 208 | -------------------------------------------------------------------------------- /shared/src/io.rs: -------------------------------------------------------------------------------- 1 | use std::io::{Read, Seek, Write}; 2 | 3 | pub trait ReadExt: Read { 4 | fn read_u8(&mut self) -> std::io::Result { 5 | let mut buf = [0; 1]; 6 | self.read_exact(&mut buf)?; 7 | Ok(buf[0]) 8 | } 9 | 10 | fn read_u16(&mut self) -> std::io::Result { 11 | let mut buf = [0; 2]; 12 | self.read_exact(&mut buf)?; 13 | Ok(u16::from_le_bytes(buf)) 14 | } 15 | 16 | fn read_u32(&mut self) -> std::io::Result { 17 | let mut buf = [0; 4]; 18 | self.read_exact(&mut buf)?; 19 | Ok(u32::from_le_bytes(buf)) 20 | } 21 | 22 | fn read_u64(&mut self) -> std::io::Result { 23 | let mut buf = [0; 8]; 24 | self.read_exact(&mut buf)?; 25 | Ok(u64::from_le_bytes(buf)) 26 | } 27 | 28 | fn read_i8(&mut self) -> std::io::Result { 29 | let mut buf = [0; 1]; 30 | self.read_exact(&mut buf)?; 31 | Ok(buf[0] as i8) 32 | } 33 | 34 | fn read_i16(&mut self) -> std::io::Result { 35 | let mut buf = [0; 2]; 36 | self.read_exact(&mut buf)?; 37 | Ok(i16::from_le_bytes(buf)) 38 | } 39 | 40 | fn read_i32(&mut self) -> std::io::Result { 41 | let mut buf = [0; 4]; 42 | self.read_exact(&mut buf)?; 43 | Ok(i32::from_le_bytes(buf)) 44 | } 45 | 46 | fn read_i64(&mut self) -> std::io::Result { 47 | let mut buf = [0; 8]; 48 | self.read_exact(&mut buf)?; 49 | Ok(i64::from_le_bytes(buf)) 50 | } 51 | 52 | fn read_f32(&mut self) -> std::io::Result { 53 | let mut buf = [0; 4]; 54 | self.read_exact(&mut buf)?; 55 | Ok(f32::from_le_bytes(buf)) 56 | } 57 | 58 | fn read_f64(&mut self) -> std::io::Result { 59 | let mut buf = [0; 8]; 60 | self.read_exact(&mut buf)?; 61 | Ok(f64::from_le_bytes(buf)) 62 | } 63 | 64 | fn read_string(&mut self, len: usize) -> std::io::Result { 65 | let mut buf = vec![0; len]; 66 | self.read_exact(&mut buf)?; 67 | String::from_utf8(buf) 68 | .map_err(|_| std::io::Error::new(std::io::ErrorKind::InvalidData, "invalid utf-8")) 69 | } 70 | 71 | fn read_exact_n(&mut self, len: usize, buf: &mut Vec) -> std::io::Result<()> { 72 | let mut chunk = self.take(len as u64); 73 | assert_eq!(chunk.read_to_end(buf)?, len); 74 | Ok(()) 75 | } 76 | 77 | fn padding(&mut self, n: usize) -> std::io::Result<()> { 78 | let mut buf = vec![0; n]; 79 | self.read_exact(&mut buf)?; 80 | 81 | for byte in buf { 82 | if byte != 0 { 83 | return Err(std::io::Error::new(std::io::ErrorKind::InvalidData, "padding is not 0")); 84 | } 85 | } 86 | 87 | Ok(()) 88 | } 89 | } 90 | 91 | impl ReadExt for T {} 92 | 93 | pub trait ReadSeekExt: Read + Seek + Sized { 94 | fn read_u32_offset(&mut self) -> std::io::Result { 95 | let pos = self.stream_position()?; 96 | let offset = self.read_u32()? as u64; 97 | 98 | Ok(pos + offset) 99 | } 100 | 101 | fn align(&mut self, alignment: usize) -> std::io::Result { 102 | let pos = self.stream_position()? as usize; 103 | let padding = (alignment - (pos % alignment)) % alignment; 104 | self.padding(padding)?; 105 | Ok(padding) 106 | } 107 | } 108 | 109 | impl ReadSeekExt for T {} 110 | 111 | pub trait WriteExt: Write { 112 | fn write_u8(&mut self, n: u8) -> std::io::Result<()> { 113 | self.write_all(&n.to_le_bytes()) 114 | } 115 | 116 | fn write_u16(&mut self, n: u16) -> std::io::Result<()> { 117 | self.write_all(&n.to_le_bytes()) 118 | } 119 | 120 | fn write_u32(&mut self, n: u32) -> std::io::Result<()> { 121 | self.write_all(&n.to_le_bytes()) 122 | } 123 | 124 | fn write_u64(&mut self, n: u64) -> std::io::Result<()> { 125 | self.write_all(&n.to_le_bytes()) 126 | } 127 | 128 | fn write_i8(&mut self, n: i8) -> std::io::Result<()> { 129 | self.write_all(&n.to_le_bytes()) 130 | } 131 | 132 | fn write_i16(&mut self, n: i16) -> std::io::Result<()> { 133 | self.write_all(&n.to_le_bytes()) 134 | } 135 | 136 | fn write_i32(&mut self, n: i32) -> std::io::Result<()> { 137 | self.write_all(&n.to_le_bytes()) 138 | } 139 | 140 | fn write_i64(&mut self, n: i64) -> std::io::Result<()> { 141 | self.write_all(&n.to_le_bytes()) 142 | } 143 | 144 | fn write_f32(&mut self, n: f32) -> std::io::Result<()> { 145 | self.write_all(&n.to_le_bytes()) 146 | } 147 | 148 | fn write_f64(&mut self, n: f64) -> std::io::Result<()> { 149 | self.write_all(&n.to_le_bytes()) 150 | } 151 | 152 | fn write_string(&mut self, s: &str, len: usize) -> std::io::Result<()> { 153 | let s = s.as_bytes(); 154 | let s = &s[..len]; 155 | self.write_all(s) 156 | } 157 | 158 | fn padding(&mut self, n: u64) -> std::io::Result<()> { 159 | self.write_all(&vec![0; n as usize]) 160 | } 161 | } 162 | 163 | impl WriteExt for T {} 164 | 165 | pub trait WriteSeekExt: Write + Seek + Sized { 166 | fn write_offset(&mut self, offset: u64) -> std::io::Result<()> { 167 | if offset == 0 { 168 | self.write_u32(0)?; 169 | return Ok(()); 170 | } 171 | 172 | let pos = self.stream_position()?; 173 | let relative_offset = offset - pos; 174 | 175 | self.write_u32(relative_offset as u32)?; 176 | 177 | Ok(()) 178 | } 179 | 180 | fn align(&mut self, alignment: usize) -> std::io::Result { 181 | let pos = self.stream_position()? as usize; 182 | let padding = (alignment - (pos % alignment)) % alignment; 183 | self.write_all(&vec![0; padding])?; 184 | Ok(padding) 185 | } 186 | 187 | fn align_with(&mut self, alignment: usize, value: u8) -> std::io::Result { 188 | let pos = self.stream_position()? as usize; 189 | let padding = (alignment - (pos % alignment)) % alignment; 190 | self.write_all(&vec![value; padding])?; 191 | Ok(padding) 192 | } 193 | } 194 | 195 | impl WriteSeekExt for T {} 196 | -------------------------------------------------------------------------------- /parser/src/reflection/types.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | use std::hash::Hash; 3 | 4 | use crate::Hash32; 5 | 6 | #[derive(Debug, Clone, Serialize, Deserialize)] 7 | #[serde(rename_all = "camelCase")] 8 | pub struct TypeInfo { 9 | pub name: String, 10 | pub impact_name: String, 11 | pub qualified_name: String, 12 | #[serde(default, skip_serializing_if = "Vec::is_empty")] 13 | pub namespace: Vec, 14 | #[serde(default, skip_serializing_if = "Option::is_none")] 15 | pub inner_type: Option, 16 | pub size: u32, 17 | pub alignment: u16, 18 | pub element_alignment: u16, 19 | pub field_count: u32, 20 | pub primitive_type: PrimitiveType, 21 | pub flags: TypeFlags, 22 | pub name_hash: Hash32, // pre-computed 23 | pub impact_hash: Hash32, // pre-computed 24 | pub qualified_hash: Hash32, 25 | pub internal_hash: Hash32, 26 | #[serde(default, skip_serializing_if = "Vec::is_empty")] 27 | pub struct_fields: Vec, 28 | #[serde(default, skip_serializing_if = "Vec::is_empty")] 29 | pub enum_fields: Vec, 30 | #[serde(default, skip_serializing_if = "Option::is_none")] 31 | pub default_value: Option, 32 | #[serde(default, skip_serializing_if = "Vec::is_empty")] 33 | pub attributes: Vec, 34 | } 35 | 36 | #[derive(Debug, Clone, Serialize, Deserialize)] 37 | #[serde(rename_all = "camelCase")] 38 | pub struct StructFieldInfo { 39 | pub name: String, 40 | pub r#type: usize, 41 | pub data_offset: u64, 42 | #[serde(default, skip_serializing_if = "Vec::is_empty")] 43 | pub attributes: Vec, 44 | } 45 | 46 | #[derive(Debug, Clone, Serialize, Deserialize)] 47 | #[serde(rename_all = "camelCase")] 48 | pub struct Attribute { 49 | pub name: String, 50 | #[serde(default, skip_serializing_if = "Vec::is_empty")] 51 | pub namespace: Vec, 52 | #[serde(default, skip_serializing_if = "Option::is_none")] 53 | pub r#type: Option, 54 | #[serde(default, skip_serializing_if = "String::is_empty")] 55 | pub value: String, 56 | } 57 | 58 | #[derive(Debug, Clone, Serialize, Deserialize)] 59 | #[serde(rename_all = "camelCase")] 60 | pub struct EnumFieldInfo { 61 | pub name: String, 62 | pub value: u64, 63 | } 64 | 65 | bitflags::bitflags! { 66 | #[derive(Debug, Clone, Copy, Default, Serialize, Deserialize)] 67 | pub struct TypeFlags: u8 { 68 | const NONE = 0x00; 69 | const HAS_DS = 0x01; 70 | const HAS_BLOB_ARRAY = 0x02; 71 | const HAS_BLOB_STRING = 0x04; 72 | const HAS_BLOB_OPTIONAL = 0x08; 73 | const HAS_BLOB_VARIANT = 0x10; 74 | const IS_GPU_UNIFORM = 0x20; 75 | const IS_GPU_STORAGE = 0x40; 76 | const IS_GPU_CONSTANT = 0x80; 77 | } 78 | 79 | // TODO: Serialize, Deserialize to array instead of pipe separated string 80 | } 81 | 82 | #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] 83 | #[derive(Serialize, Deserialize)] 84 | #[serde(rename_all = "SCREAMING_SNAKE_CASE")] 85 | #[repr(u8)] 86 | pub enum PrimitiveType { 87 | None, 88 | Bool, 89 | #[serde(rename = "UINT8")] 90 | UInt8, 91 | #[serde(rename = "SINT8")] 92 | SInt8, 93 | #[serde(rename = "UINT16")] 94 | UInt16, 95 | #[serde(rename = "SINT16")] 96 | SInt16, 97 | #[serde(rename = "UINT32")] 98 | UInt32, 99 | #[serde(rename = "SINT32")] 100 | SInt32, 101 | #[serde(rename = "UINT64")] 102 | UInt64, 103 | #[serde(rename = "SINT64")] 104 | SInt64, 105 | Float32, 106 | Float64, 107 | Enum, 108 | Bitmask8, 109 | Bitmask16, 110 | Bitmask32, 111 | Bitmask64, 112 | Typedef, 113 | Struct, 114 | StaticArray, 115 | DsArray, 116 | DsString, 117 | DsOptional, 118 | DsVariant, 119 | BlobArray, 120 | BlobString, 121 | BlobOptional, 122 | BlobVariant, 123 | ObjectReference, 124 | Guid, 125 | } 126 | 127 | impl PrimitiveType { 128 | pub fn from_u8(value: u8) -> Self { 129 | match value { 130 | 0x0 => Self::None, 131 | 0x1 => Self::Bool, 132 | 0x2 => Self::UInt8, 133 | 0x3 => Self::SInt8, 134 | 0x4 => Self::UInt16, 135 | 0x5 => Self::SInt16, 136 | 0x6 => Self::UInt32, 137 | 0x7 => Self::SInt32, 138 | 0x8 => Self::UInt64, 139 | 0x9 => Self::SInt64, 140 | 0xA => Self::Float32, 141 | 0xB => Self::Float64, 142 | 0xC => Self::Enum, 143 | 0xD => Self::Bitmask8, 144 | 0xE => Self::Bitmask16, 145 | 0xF => Self::Bitmask32, 146 | 0x10 => Self::Bitmask64, 147 | 0x11 => Self::Typedef, 148 | 0x12 => Self::Struct, 149 | 0x13 => Self::StaticArray, 150 | 0x14 => Self::DsArray, 151 | 0x15 => Self::DsString, 152 | 0x16 => Self::DsOptional, 153 | 0x17 => Self::DsVariant, 154 | 0x18 => Self::BlobArray, 155 | 0x19 => Self::BlobString, 156 | 0x1A => Self::BlobOptional, 157 | 0x1B => Self::BlobVariant, 158 | 0x1C => Self::ObjectReference, 159 | 0x1D => Self::Guid, 160 | _ => panic!("Invalid PrimitiveType: 0x{:X}", value), 161 | } 162 | } 163 | 164 | pub fn to_u8(&self) -> u8 { 165 | match self { 166 | Self::None => 0x0, 167 | Self::Bool => 0x1, 168 | Self::UInt8 => 0x2, 169 | Self::SInt8 => 0x3, 170 | Self::UInt16 => 0x4, 171 | Self::SInt16 => 0x5, 172 | Self::UInt32 => 0x6, 173 | Self::SInt32 => 0x7, 174 | Self::UInt64 => 0x8, 175 | Self::SInt64 => 0x9, 176 | Self::Float32 => 0xA, 177 | Self::Float64 => 0xB, 178 | Self::Enum => 0xC, 179 | Self::Bitmask8 => 0xD, 180 | Self::Bitmask16 => 0xE, 181 | Self::Bitmask32 => 0xF, 182 | Self::Bitmask64 => 0x10, 183 | Self::Typedef => 0x11, 184 | Self::Struct => 0x12, 185 | Self::StaticArray => 0x13, 186 | Self::DsArray => 0x14, 187 | Self::DsString => 0x15, 188 | Self::DsOptional => 0x16, 189 | Self::DsVariant => 0x17, 190 | Self::BlobArray => 0x18, 191 | Self::BlobString => 0x19, 192 | Self::BlobOptional => 0x1A, 193 | Self::BlobVariant => 0x1B, 194 | Self::ObjectReference => 0x1C, 195 | Self::Guid => 0x1D, 196 | } 197 | } 198 | } 199 | -------------------------------------------------------------------------------- /parser/src/reflection/collection.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | use shared::hash::fnv; 3 | use std::collections::HashMap; 4 | use std::fmt::Debug; 5 | use std::fs::File; 6 | use std::io::{BufReader, BufWriter}; 7 | use std::path::Path; 8 | use std::sync::Arc; 9 | 10 | use super::types::*; 11 | use super::{parser, ReflectionParseError, TypeParseError}; 12 | 13 | #[derive(Debug, Default)] 14 | pub struct TypeCollection { 15 | pub version: String, 16 | types: Vec>, 17 | types_by_qualified_hash: HashMap>, 18 | types_by_impact_hash: HashMap>, 19 | } 20 | 21 | #[derive(Debug, Serialize, Deserialize)] 22 | struct TypeCollectionJson { 23 | version: String, 24 | types: T, 25 | } 26 | 27 | impl TypeCollection { 28 | 29 | pub fn load_from_path(&mut self, path: impl AsRef) -> Result { 30 | let file = File::open(path)?; 31 | let reader = BufReader::new(file); 32 | let json = serde_json::from_reader::<_, TypeCollectionJson>>(reader) 33 | .map_err(TypeParseError::from)?; 34 | 35 | self.version = json.version; 36 | 37 | Ok(self.extend(json.types)) 38 | } 39 | 40 | pub fn load_from_executable( 41 | &mut self, 42 | path: impl AsRef, 43 | deserialize_default_values: bool 44 | ) -> Result { 45 | parser::extract_reflection_data(path, deserialize_default_values) 46 | .map(|types| self.extend(types)) 47 | } 48 | 49 | pub fn dump_to_path( 50 | &self, 51 | path: impl AsRef, 52 | pretty: bool 53 | ) -> Result<(), TypeParseError> { 54 | let file = File::create(path)?; 55 | let writer = BufWriter::new(file); 56 | let types = self.types.iter() 57 | .map(|node| node.as_ref()) 58 | .collect::>(); 59 | 60 | let json = TypeCollectionJson { 61 | version: self.version.clone(), 62 | types, 63 | }; 64 | 65 | if pretty { 66 | serde_json::to_writer_pretty(writer, &json)?; 67 | } else { 68 | serde_json::to_writer(writer, &json)?; 69 | } 70 | 71 | Ok(()) 72 | } 73 | 74 | pub fn get_type( 75 | &self, 76 | index: usize, 77 | ) -> Option<&TypeInfo> { 78 | self.types.get(index) 79 | .map(|node| node.as_ref()) 80 | } 81 | 82 | pub fn get_type_by_qualified_hash( 83 | &self, 84 | hash: u32 85 | ) -> Option<&TypeInfo> { 86 | self.types_by_qualified_hash.get(&hash) 87 | .map(|node| node.as_ref()) 88 | } 89 | 90 | pub fn get_type_by_impact_hash( 91 | &self, 92 | hash: u32 93 | ) -> Option<&TypeInfo> { 94 | self.types_by_impact_hash.get(&hash) 95 | .map(|node| node.as_ref()) 96 | } 97 | 98 | pub fn get_type_by_qualified_name( 99 | &self, 100 | name: &str 101 | ) -> Option<&TypeInfo> { 102 | self.get_type_by_qualified_hash(fnv(name.as_bytes())) 103 | } 104 | 105 | pub fn get_type_by_impact_name( 106 | &self, 107 | name: &str 108 | ) -> Option<&TypeInfo> { 109 | self.get_type_by_impact_hash(fnv(name.as_bytes())) 110 | } 111 | 112 | pub fn get_inheritance_chain<'a>(&'a self, node: &'a TypeInfo) -> Vec<&'a TypeInfo> { 113 | let mut chain = Vec::new(); 114 | let mut current = node; 115 | 116 | loop { 117 | chain.push(current); 118 | 119 | if let Some(parent) = ¤t.inner_type { 120 | if let Some(parent) = self.get_type(*parent) { 121 | current = parent; 122 | } else { 123 | break; 124 | } 125 | } else { 126 | break; 127 | } 128 | } 129 | 130 | chain 131 | } 132 | 133 | pub fn clear(&mut self) { 134 | self.types.clear(); 135 | self.types_by_qualified_hash.clear(); 136 | self.types_by_impact_hash.clear(); 137 | } 138 | 139 | pub fn extend(&mut self, types: Vec) -> usize { 140 | let len = types.len(); 141 | 142 | // TODO: Handle duplicates properly 143 | 144 | for entry in types { 145 | let value = Arc::new(entry); 146 | 147 | if !value.flags.contains(TypeFlags::HAS_DS) { 148 | if self.types_by_impact_hash.contains_key(&value.impact_hash) { 149 | panic!("Duplicate impact hash: {:#010X}", value.impact_hash); 150 | } 151 | 152 | self.types_by_impact_hash.insert(value.impact_hash, value.clone()); 153 | } 154 | 155 | if self.types_by_qualified_hash.contains_key(&value.qualified_hash) { 156 | panic!("Duplicate qualified hash: {:#010X}", value.qualified_hash); 157 | } 158 | 159 | self.types_by_qualified_hash.insert(value.qualified_hash, value.clone()); 160 | self.types.push(value); 161 | } 162 | 163 | len 164 | } 165 | 166 | pub fn len(&self) -> usize { 167 | self.types.len() 168 | } 169 | 170 | pub fn is_empty(&self) -> bool { 171 | self.types.is_empty() 172 | } 173 | 174 | pub fn iter(&self) -> impl Iterator { 175 | self.types.iter() 176 | .map(|node| node.as_ref()) 177 | } 178 | 179 | pub fn iter_arc(&self) -> impl Iterator> { 180 | self.types.iter() 181 | } 182 | 183 | /// Consumes the collection and returns the inner types. 184 | /// 185 | /// # Errors 186 | /// If there are still strong references to the types in the collection, 187 | /// it will return an error with the unchanged collection. 188 | /// 189 | /// # Panics 190 | /// It may panic if another thread creates a new strong 191 | /// reference to a type while this method is running. 192 | #[allow(clippy::result_large_err)] 193 | pub fn into_inner(self) -> Result, TypeCollection> { 194 | for node in &self.types { 195 | if Arc::strong_count(node) > 3 { 196 | return Err(self); 197 | } 198 | } 199 | 200 | drop(self.types_by_impact_hash); 201 | drop(self.types_by_qualified_hash); 202 | 203 | // panics if another thread creates a new strong reference 204 | let result = self.types.into_iter() 205 | .map(|node| Arc::try_unwrap(node).unwrap()) 206 | .collect::>(); 207 | 208 | Ok(result) 209 | } 210 | } 211 | -------------------------------------------------------------------------------- /parser/src/data/impact/bytecode/data.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | use shared::hash::fnv; 3 | use std::cmp::max; 4 | use std::collections::HashMap; 5 | 6 | use crate::data::hash_types::HashKey32; 7 | use crate::guid::BlobGuid; 8 | use crate::reflection::TypeCollection; 9 | 10 | use super::{EventStream, ImpactProgram}; 11 | use super::{ImpactCommand, ImpactVariable}; 12 | 13 | #[derive(Debug, Serialize, Deserialize)] 14 | #[serde(rename_all = "camelCase")] 15 | pub struct ImpactProgramData { 16 | pub data: Vec, 17 | pub used_streams: Vec, 18 | } 19 | 20 | #[derive(Debug, Serialize, Deserialize)] 21 | #[serde(rename_all = "camelCase")] 22 | pub struct ImpactProgramDataEntry { 23 | pub name: String, 24 | pub r#type: String, 25 | pub config_id: u32, 26 | pub data: serde_json::Value, 27 | #[serde(skip_serializing_if = "Option::is_none")] 28 | pub parent: Option, 29 | } 30 | 31 | #[derive(Debug, Serialize, Deserialize)] 32 | #[serde(rename_all = "camelCase")] 33 | pub struct ImpactProgramDataMapping { 34 | pub parent_name: String, 35 | pub name: String, 36 | pub field_name: String, 37 | } 38 | 39 | // TODO: Proper error handling 40 | 41 | impl ImpactProgramData { 42 | pub fn from_program( 43 | type_collection: &TypeCollection, 44 | program: &ImpactProgram, 45 | ) -> anyhow::Result { 46 | let mut data = Vec::new(); 47 | let mut mapping_offsets = HashMap::new(); 48 | 49 | for (i, layout) in program.data_layout.iter().enumerate() { 50 | let type_info = type_collection 51 | .get_type_by_impact_hash(layout.r#type.value) 52 | .ok_or_else(|| anyhow::anyhow!("Type not found: {}", layout.r#type.value))?; 53 | 54 | let start = layout.offset_in_bytes as usize; 55 | let end = start + layout.size as usize; 56 | 57 | if let Some(i) = mapping_offsets.get(&start) { 58 | let entry: &ImpactProgramDataEntry = &data[*i]; 59 | let parent_layout: &ImpactVariable = &program.data_layout[*i]; 60 | let field_offset = (layout.offset_in_bytes - parent_layout.offset_in_bytes) as u64; 61 | let parent_type = type_collection 62 | .get_type_by_qualified_name(&entry.r#type) 63 | .unwrap(); // already checked 64 | let field_name = parent_type.struct_fields.iter() 65 | .find(|x| x.data_offset == field_offset) 66 | .map(|x| x.name.clone()) 67 | .ok_or_else(|| anyhow::anyhow!("Field not found: {}", layout.dbg_name))?; 68 | 69 | data.push(ImpactProgramDataEntry { 70 | name: layout.dbg_name.clone(), 71 | r#type: type_info.qualified_name.clone(), 72 | config_id: layout.config_id.value, 73 | data: serde_json::Value::Null, 74 | parent: Some(ImpactProgramDataMapping { 75 | parent_name: entry.name.clone(), 76 | name: layout.dbg_name.clone(), 77 | field_name 78 | }), 79 | }); 80 | } else { 81 | let json = type_collection.deserialize( 82 | type_info, 83 | &program.data[start..end] 84 | )?; 85 | 86 | data.push(ImpactProgramDataEntry { 87 | name: layout.dbg_name.clone(), 88 | r#type: type_info.qualified_name.clone(), 89 | config_id: layout.config_id.value, 90 | data: json, 91 | parent: None, 92 | }); 93 | } 94 | 95 | for field in &type_info.struct_fields { 96 | mapping_offsets.insert(start + field.data_offset as usize, i); 97 | } 98 | } 99 | 100 | Ok(ImpactProgramData { 101 | data, 102 | used_streams: program.used_streams.clone(), 103 | }) 104 | } 105 | 106 | pub fn into_program( 107 | self, 108 | type_collection: &TypeCollection, 109 | guid: BlobGuid, 110 | code: Vec, 111 | code_shutdown: Vec, 112 | ) -> anyhow::Result { 113 | let id = HashKey32::from(guid.hash32()); 114 | let program_guid = guid; 115 | let stack_size = 256; 116 | let used_streams = self.used_streams; 117 | 118 | let mut buf = Vec::new(); 119 | let mut data = Vec::new(); 120 | let mut data_layout = Vec::::new(); 121 | 122 | for entry in self.data.into_iter() { 123 | let dbg_name = entry.name; 124 | let config_id = HashKey32::from(entry.config_id); 125 | let name = HashKey32::from(fnv(dbg_name.as_bytes())); 126 | 127 | let type_info = type_collection 128 | .get_type_by_qualified_name(&entry.r#type) 129 | .ok_or_else(|| anyhow::anyhow!("Type not found: {}", entry.r#type))?; 130 | let r#type = HashKey32::from(type_info.impact_hash); 131 | 132 | if let Some(mapping) = entry.parent { 133 | let parent_hash = fnv(mapping.parent_name.as_bytes()); 134 | let parent_entry = data_layout.iter() 135 | .find(|x| x.name.value == parent_hash) 136 | .ok_or_else(|| anyhow::anyhow!("Parent not found: {}", mapping.parent_name))?; 137 | let parent_type = type_collection 138 | .get_type_by_impact_hash(parent_entry.r#type.value) 139 | .unwrap(); // already checked 140 | let field = parent_type.struct_fields.iter() 141 | .find(|x| x.name == mapping.field_name) 142 | .ok_or_else(|| anyhow::anyhow!("Field not found: {}", mapping.field_name))?; 143 | 144 | data_layout.push(ImpactVariable { 145 | name, 146 | config_id, 147 | r#type, 148 | size: type_info.size as u16, 149 | offset_in_bytes: parent_entry.offset_in_bytes + field.data_offset as u16, 150 | dbg_name, 151 | }); 152 | } else { 153 | if !entry.data.is_null() { 154 | buf.clear(); 155 | type_collection.serialize_into(type_info, &entry.data, &mut buf)?; 156 | } else { 157 | buf.clear(); 158 | buf.resize(type_info.size as usize, 0); 159 | } 160 | 161 | let alignment = max(buf.len(), 16); 162 | let padding = (alignment - (data.len() % alignment)) % alignment; 163 | data.resize(data.len() + padding, 0); 164 | 165 | let offset_in_bytes = data.len() as u16; 166 | let size = buf.len() as u16; 167 | 168 | data.extend(&buf); 169 | data_layout.push(ImpactVariable { 170 | name, 171 | config_id, 172 | r#type, 173 | size, 174 | offset_in_bytes, 175 | dbg_name, 176 | }); 177 | } 178 | } 179 | 180 | Ok(ImpactProgram { 181 | id, 182 | program_guid, 183 | stack_size, 184 | used_streams, 185 | code, 186 | code_shutdown, 187 | data_layout, 188 | data, 189 | }) 190 | } 191 | 192 | } 193 | -------------------------------------------------------------------------------- /parser/src/reflection/pe_file.rs: -------------------------------------------------------------------------------- 1 | use std::io::{Cursor, Read, Seek, SeekFrom}; 2 | use std::path::Path; 3 | 4 | use shared::io::ReadExt; 5 | 6 | use super::PEParseError; 7 | 8 | pub const DOS_SIGNATURE: u16 = 0x5A4D; 9 | pub const NT_SIGNATURE: u32 = 0x00004550; 10 | 11 | pub const PE32_MAGIC: u16 = 0x010B; 12 | pub const PE32_PLUS_MAGIC: u16 = 0x020B; 13 | 14 | #[derive(Debug)] 15 | struct Section { 16 | name: String, 17 | // virtual_size: u32, 18 | virtual_address: u32, 19 | size_of_raw_data: u32, 20 | pointer_to_raw_data: u32, 21 | } 22 | 23 | pub struct PEFile { 24 | data: Vec, 25 | sections: Vec
, 26 | image_base: u64, 27 | } 28 | 29 | impl PEFile { 30 | pub fn load_from_file(path: impl AsRef) -> Result { 31 | // load things in memory since we need to read through the file multiple times 32 | let data = std::fs::read(path)?; 33 | let mut reader = Cursor::new(&data); 34 | 35 | reader.seek(SeekFrom::Start(0))?; 36 | 37 | let dos_signature = reader.read_u16()?; 38 | if dos_signature != DOS_SIGNATURE { 39 | return Err(PEParseError::InvalidDosSignature); 40 | } 41 | 42 | reader.seek(SeekFrom::Start(60))?; // jump to e_lfanew 43 | let pe_offset = reader.read_u32()?; 44 | 45 | // coff header 46 | reader.seek(SeekFrom::Start(pe_offset as u64))?; 47 | let pe_signature = reader.read_u32()?; 48 | 49 | if pe_signature != NT_SIGNATURE { 50 | return Err(PEParseError::InvalidNTSignature); 51 | } 52 | 53 | let _machine = reader.read_u16()?; 54 | let number_of_sections = reader.read_u16()?; 55 | let _time_date_stamp = reader.read_u32()?; 56 | let _pointer_to_symbol_table = reader.read_u32()?; 57 | let _number_of_symbols = reader.read_u32()?; 58 | let size_of_optional_header = reader.read_u16()?; 59 | let _characteristics = reader.read_u16()?; 60 | 61 | // optional header 62 | let opt_header_start = reader.stream_position()?; 63 | let magic = reader.read_u16()?; 64 | let image_base = match magic { 65 | PE32_MAGIC => { 66 | panic!("PE32 not supported"); 67 | // reader.seek(SeekFrom::Current(26))?; // skip to image base 68 | // reader.read_u32()? as u64 69 | } 70 | PE32_PLUS_MAGIC => { 71 | reader.seek(SeekFrom::Current(22))?; // skip to image base 72 | reader.read_u64()? 73 | } 74 | _ => return Err(PEParseError::UnsupportedPEType), 75 | }; 76 | 77 | reader.seek(SeekFrom::Start(opt_header_start + size_of_optional_header as u64))?; // skip optional header 78 | 79 | // section headers 80 | let mut sections = Vec::new(); 81 | 82 | for _ in 0..number_of_sections { 83 | let mut name = [0; 8]; 84 | reader.read_exact(&mut name)?; 85 | let index_of_nul = name.iter() 86 | .position(|&c| c == 0) 87 | .ok_or(PEParseError::MalformedSectionName)?; 88 | let name = String::from_utf8(name[..index_of_nul].to_vec()) 89 | .map_err(|_| PEParseError::MalformedSectionName)?; 90 | 91 | let _virtual_size = reader.read_u32()?; 92 | let virtual_address = reader.read_u32()?; 93 | let size_of_raw_data = reader.read_u32()?; 94 | let pointer_to_raw_data = reader.read_u32()?; 95 | let _pointer_to_relocations = reader.read_u32()?; 96 | let _pointer_to_line_numbers = reader.read_u32()?; 97 | let _number_of_relocations = reader.read_u16()?; 98 | let _number_of_line_numbers = reader.read_u16()?; 99 | let _characteristics = reader.read_u32()?; 100 | 101 | sections.push(Section { 102 | name, 103 | // virtual_size, 104 | virtual_address, 105 | size_of_raw_data, 106 | pointer_to_raw_data, 107 | }); 108 | } 109 | 110 | Ok(PEFile { 111 | data, 112 | sections, 113 | image_base, 114 | }) 115 | } 116 | 117 | pub fn va_to_fo(&self, va: u64) -> Option { 118 | if va < self.image_base { 119 | return None; 120 | } 121 | 122 | let va = (va - self.image_base) as u32; 123 | 124 | for section in &self.sections { 125 | let start = section.virtual_address; 126 | let end = start + section.size_of_raw_data; 127 | 128 | if va >= start && va < end { 129 | return Some((section.pointer_to_raw_data + (va - start)) as u64); 130 | } 131 | } 132 | 133 | None 134 | } 135 | 136 | pub fn fo_to_va(&self, offset: u64) -> Option { 137 | let offset = offset as u32; 138 | 139 | for section in &self.sections { 140 | let start = section.pointer_to_raw_data; 141 | let end = start + section.size_of_raw_data; 142 | 143 | if offset >= start && offset < end { 144 | return Some((section.virtual_address + (offset - start)) as u64 + self.image_base); 145 | } 146 | } 147 | 148 | None 149 | } 150 | 151 | pub fn find( 152 | &self, 153 | from_offset: u64, 154 | needle: [u8; N], 155 | alignment: usize, 156 | ) -> Option { 157 | let haystack = &self.data[from_offset as usize..]; 158 | let mut ptr = 0; 159 | 160 | while ptr < haystack.len() - N { 161 | if haystack[ptr..].starts_with(&needle) { 162 | return Some(ptr as u64 + from_offset); 163 | } 164 | 165 | ptr += alignment; 166 | } 167 | 168 | // try again without alignment 169 | if alignment > 1 { 170 | return self.find(from_offset, needle, 1); 171 | } 172 | 173 | None 174 | } 175 | 176 | pub fn offset_to_section(&self, name: &str) -> Option { 177 | self.sections.iter() 178 | .find(|s| s.name == name) 179 | .map(|s| s.pointer_to_raw_data as u64) 180 | } 181 | 182 | pub fn find_pointer_to_0va( 183 | &self, 184 | from_offset: u64, 185 | va: u64, 186 | ) -> Option { 187 | let needle = super::util::prefix_pattern::<8, 9>( 188 | va.to_le_bytes(), 189 | 0x00 190 | ); 191 | 192 | self.find(from_offset - 1, needle, 8) 193 | .map(|offset| offset + 1) 194 | } 195 | 196 | pub fn get_cursor_at(&self, offset: u64) -> std::io::Result> { 197 | self.data.get(offset as usize..) 198 | .map(Cursor::new) 199 | .ok_or(std::io::Error::new(std::io::ErrorKind::Other, "Out of bounds")) 200 | } 201 | 202 | } 203 | 204 | pub trait ReadPEExt { 205 | fn read_pointee<'a>(&mut self, file: &'a PEFile) -> std::io::Result>; 206 | fn read_pointee_opt<'a>(&mut self, file: &'a PEFile) -> std::io::Result>>; 207 | } 208 | 209 | impl ReadPEExt for R { 210 | 211 | fn read_pointee<'a>(&mut self, file: &'a PEFile) -> std::io::Result> { 212 | file.va_to_fo(self.read_u64()?) 213 | .and_then(|offset| file.data.get(offset as usize..)) 214 | .map(Cursor::new) 215 | .ok_or(std::io::Error::new(std::io::ErrorKind::Other, "Could not read pointee")) 216 | } 217 | 218 | fn read_pointee_opt<'a>(&mut self, file: &'a PEFile) -> std::io::Result>> { 219 | let va = self.read_u64()?; 220 | 221 | if va == 0 { 222 | return Ok(None); 223 | } 224 | 225 | file.va_to_fo(va) 226 | .and_then(|offset| file.data.get(offset as usize..)) 227 | .map(Cursor::new) 228 | .map(Some) 229 | .ok_or(std::io::Error::new(std::io::ErrorKind::Other, "Could not read pointee")) 230 | } 231 | 232 | } 233 | -------------------------------------------------------------------------------- /parser/src/guid/descriptor.rs: -------------------------------------------------------------------------------- 1 | use std::fmt::{Debug, Display}; 2 | use std::io::{Read, Write}; 3 | use std::str::FromStr; 4 | 5 | use serde::Deserialize; 6 | use shared::hash::fnv_with_seed; 7 | use shared::io::{ReadExt, WriteExt}; 8 | 9 | use crate::container::StaticHash; 10 | use crate::Hash32; 11 | 12 | use super::BlobGuid; 13 | 14 | #[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord, Default)] 15 | pub struct DescriptorGuid { 16 | pub data: [u8; 16], 17 | pub type_hash: Hash32, 18 | pub part_number: u32, 19 | // pub reserved: u64, 20 | } 21 | 22 | impl DescriptorGuid { 23 | 24 | pub const NONE: DescriptorGuid = DescriptorGuid { 25 | data: [0; 16], 26 | type_hash: 0, 27 | part_number: 0, 28 | }; 29 | 30 | pub fn from_bytes(data: [u8; 16], type_hash: Hash32, part_number: u32) -> Self { 31 | Self { 32 | data, 33 | type_hash, 34 | part_number, 35 | } 36 | } 37 | 38 | /// Create a new DescriptorGuid from a string with following format: 39 | /// `XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX` 40 | /// where `X` is a hexadecimal digit. 41 | /// 42 | /// If the string is not in the correct format, `None` is returned. 43 | pub fn from_str(s: &str, type_hash: Hash32, part_number: u32) -> Option { 44 | if s.len() != 36 { 45 | return None; 46 | } 47 | 48 | if !is_hex_slice(&s[0..8]) || 49 | !is_separator(s[8..].chars().next().unwrap()) || 50 | !is_hex_slice(&s[9..13]) || 51 | !is_separator(s[13..].chars().next().unwrap()) || 52 | !is_hex_slice(&s[14..18]) || 53 | !is_separator(s[18..].chars().next().unwrap()) || 54 | !is_hex_slice(&s[19..23]) || 55 | !is_separator(s[23..].chars().next().unwrap()) || 56 | !is_hex_slice(&s[24..36]) 57 | { 58 | return None; 59 | } 60 | 61 | let mut data = [0; 16]; 62 | 63 | data[0..4].copy_from_slice(&u32::from_str_radix(&s[0..8], 16).unwrap().to_le_bytes()); 64 | data[4..6].copy_from_slice(&u16::from_str_radix(&s[9..13], 16).unwrap().to_le_bytes()); 65 | data[6..8].copy_from_slice(&u16::from_str_radix(&s[14..18], 16).unwrap().to_le_bytes()); 66 | data[8..10].copy_from_slice(&u16::from_str_radix(&s[19..23], 16).unwrap().to_be_bytes()); 67 | data[10..14].copy_from_slice(&u32::from_str_radix(&s[24..32], 16).unwrap().to_be_bytes()); 68 | data[14..16].copy_from_slice(&u16::from_str_radix(&s[32..36], 16).unwrap().to_be_bytes()); 69 | 70 | Some(Self { 71 | data, 72 | type_hash, 73 | part_number, 74 | }) 75 | } 76 | 77 | /// Create a new DescriptorGuid from a string with following format: 78 | /// `XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX_XXXXXXXX_N` 79 | /// where `X` is a hexadecimal digit. 80 | /// 81 | /// If the string is not in the correct format, `None` is returned. 82 | pub fn from_qualified_str(s: &str) -> Option { 83 | if s.len() < 47 { 84 | return None; 85 | } 86 | 87 | if !is_section_separator(s[36..].chars().next().unwrap()) || 88 | !is_hex_slice(&s[37..45]) || 89 | !is_section_separator(s[45..].chars().next().unwrap()) 90 | { 91 | return None; 92 | } 93 | 94 | let type_hash = u32::from_str_radix(&s[37..45], 16).ok()?; 95 | let part_number = u32::from_str(&s[46..]).ok()?; 96 | 97 | Self::from_str(&s[0..36], type_hash, part_number) 98 | } 99 | 100 | /// Convert the DescriptorGuid to a string with following format: 101 | /// `XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX` 102 | /// where `X` is a hexadecimal digit. 103 | /// 104 | /// Type hash and part number are not included in the string. 105 | #[allow(clippy::inherent_to_string_shadow_display)] // this is intentional 106 | pub fn to_string(&self) -> String { 107 | let part_0 = u32::from_le_bytes([self.data[0], self.data[1], self.data[2], self.data[3]]); 108 | let part_1 = u16::from_le_bytes([self.data[4], self.data[5]]); 109 | let part_2 = u16::from_le_bytes([self.data[6], self.data[7]]); 110 | let part_3 = u16::from_be_bytes([self.data[8], self.data[9]]); 111 | let part_4 = u32::from_be_bytes([self.data[10], self.data[11], self.data[12], self.data[13]]); 112 | let part_5 = u16::from_be_bytes([self.data[14], self.data[15]]); 113 | 114 | format!("{:0>8x}-{:0>4x}-{:0>4x}-{:0>4x}-{:0>8x}{:0>4x}", part_0, part_1, part_2, part_3, part_4, part_5) 115 | } 116 | 117 | /// Convert the DescriptorGuid to a qualified string with following format: 118 | /// `XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX_XXXXXXXX_N` 119 | /// where `X` is a hexadecimal digit and `N` is a 32-bit decimal integer. 120 | pub fn to_qualified_string(&self) -> String { 121 | format!("{}_{:0>8x}_{}", self.to_string(), self.type_hash, self.part_number) 122 | } 123 | 124 | pub fn hash32(&self) -> Hash32 { 125 | self.as_blob_guid().hash32() 126 | } 127 | 128 | pub fn is_none(&self) -> bool { 129 | self.data == [0; 16] 130 | } 131 | 132 | pub fn as_blob_guid(&self) -> BlobGuid { 133 | BlobGuid { 134 | data: self.data, 135 | } 136 | } 137 | 138 | pub fn with_type_hash(&self, type_hash: Hash32) -> Self { 139 | Self { 140 | data: self.data, 141 | type_hash, 142 | part_number: self.part_number, 143 | } 144 | } 145 | 146 | pub fn with_part_number(&self, part_number: u32) -> Self { 147 | Self { 148 | data: self.data, 149 | type_hash: self.type_hash, 150 | part_number, 151 | } 152 | } 153 | 154 | } 155 | 156 | impl StaticHash for DescriptorGuid { 157 | fn static_hash(&self) -> u32 { 158 | let seed = u32::from_le_bytes(self.data[0..4].try_into().unwrap()); 159 | let mut rest = [0u8; 8]; 160 | rest[0..4].copy_from_slice(self.type_hash.to_le_bytes().as_ref()); 161 | rest[4..8].copy_from_slice(self.part_number.to_le_bytes().as_ref()); 162 | 163 | fnv_with_seed(rest, seed) 164 | } 165 | } 166 | 167 | impl Display for DescriptorGuid { 168 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 169 | write!(f, "{}", self.to_string()) 170 | } 171 | } 172 | 173 | impl DescriptorGuid { 174 | 175 | pub fn read(reader: &mut R) -> std::io::Result { 176 | let mut data = [0; 16]; 177 | reader.read_exact(&mut data)?; 178 | 179 | let type_hash = reader.read_u32()?; 180 | let part_number = reader.read_u32()?; 181 | reader.padding(8)?; 182 | 183 | Ok(Self { 184 | data, 185 | type_hash, 186 | part_number, 187 | }) 188 | } 189 | 190 | pub fn write(&self, writer: &mut W) -> std::io::Result<()> { 191 | writer.write_all(&self.data)?; 192 | writer.write_u32(self.type_hash)?; 193 | writer.write_u32(self.part_number)?; 194 | writer.padding(8)?; 195 | 196 | Ok(()) 197 | } 198 | 199 | } 200 | 201 | impl<'de> Deserialize<'de> for DescriptorGuid { 202 | fn deserialize(deserializer: D) -> Result 203 | where 204 | D: serde::Deserializer<'de>, 205 | { 206 | let s = String::deserialize(deserializer)?; 207 | DescriptorGuid::from_qualified_str(&s) 208 | .ok_or_else(|| serde::de::Error::custom("invalid DescriptorGuid")) 209 | } 210 | } 211 | 212 | impl serde::Serialize for DescriptorGuid { 213 | fn serialize(&self, serializer: S) -> Result 214 | where 215 | S: serde::Serializer, 216 | { 217 | self.to_qualified_string().serialize(serializer) 218 | } 219 | } 220 | 221 | #[inline] 222 | fn is_hex_slice(s: &str) -> bool { 223 | s.chars().all(|c| c.is_ascii_hexdigit()) 224 | } 225 | 226 | #[inline] 227 | fn is_separator(c: char) -> bool { 228 | c == '-' 229 | } 230 | 231 | #[inline] 232 | fn is_section_separator(c: char) -> bool { 233 | c == '_' 234 | } 235 | 236 | #[cfg(test)] 237 | mod test { 238 | #[test] 239 | fn test_guid_qualified_string() { 240 | const GUID: &str = "40e6ba42-a397-5790-a5c9-a4151fffe1c5_647628d6_420"; 241 | 242 | let guid = super::DescriptorGuid::from_qualified_str(GUID).unwrap(); 243 | 244 | assert_eq!(guid.to_qualified_string(), GUID); 245 | } 246 | 247 | #[test] 248 | fn test_guid_string() { 249 | const GUID: &str = "40e6ba42-a397-5790-a5c9-a4151fffe1c5"; 250 | 251 | let guid = super::DescriptorGuid::from_str(GUID, 0, 0).unwrap(); 252 | 253 | assert_eq!(guid.to_string(), GUID); 254 | } 255 | } 256 | -------------------------------------------------------------------------------- /parser/src/container/writer.rs: -------------------------------------------------------------------------------- 1 | use std::{fs::File, io::{BufWriter, Cursor, Read, Seek, SeekFrom, Write}, path::{Path, PathBuf}}; 2 | 3 | use shared::io::{WriteExt, WriteSeekExt}; 4 | use serde_json::Value as JsonValue; 5 | 6 | use crate::{guid::{BlobGuid, DescriptorGuid}, reflection::{TypeCollection, WriteError}}; 7 | 8 | use super::{header::{BlobLink, DatInfo, DescriptorLink}, KFCFile, KFCReadError, KFCWriteError, StaticMapBuilder}; 9 | 10 | pub struct KFCWriter<'a, 'b> { 11 | path: PathBuf, 12 | reference_file: &'a KFCFile, 13 | type_collection: &'b TypeCollection, 14 | 15 | descriptors: StaticMapBuilder, 16 | blobs: StaticMapBuilder, 17 | 18 | old_header_space: u64, 19 | default_data_size: u64, 20 | default_data_size_unaligned: u64, 21 | 22 | data_writer: Cursor>, 23 | file: File, 24 | 25 | dat_infos: Vec, 26 | dat_writer: Option>, 27 | } 28 | 29 | impl<'a, 'b> KFCWriter<'a, 'b> { 30 | 31 | pub fn new( 32 | path: &Path, 33 | reference_file: &'a KFCFile, 34 | type_collection: &'b TypeCollection, 35 | ) -> Result { 36 | let current_file = KFCFile::from_path(path, true)?; 37 | let header_space = current_file.data_offset(); 38 | let default_data_size = reference_file.data_size() + (16 - (reference_file.data_size() % 16)) % 16; 39 | let default_data_size_unaligned = reference_file.data_size(); 40 | 41 | drop(current_file); 42 | 43 | let file = File::options().write(true).read(true).open(path)?; 44 | 45 | Ok(Self { 46 | path: path.to_path_buf(), 47 | reference_file, 48 | type_collection, 49 | 50 | descriptors: reference_file.get_descriptor_map().as_builder(), 51 | blobs: reference_file.get_blob_map().as_builder(), 52 | 53 | old_header_space: header_space, 54 | default_data_size, 55 | default_data_size_unaligned, 56 | 57 | data_writer: Cursor::new(Vec::new()), 58 | file, 59 | 60 | dat_infos: reference_file.get_dat_infos().to_vec(), 61 | dat_writer: None, 62 | }) 63 | } 64 | 65 | pub fn write_descriptor( 66 | &mut self, 67 | value: &JsonValue 68 | ) -> Result<(), WriteError> { 69 | let (guid, data) = self.type_collection.serialize_descriptor(value)?; 70 | 71 | Ok(self.write_descriptor_bytes(&guid, &data)?) 72 | } 73 | 74 | pub fn write_descriptor_with_guid( 75 | &mut self, 76 | guid: &DescriptorGuid, 77 | value: &JsonValue 78 | ) -> Result<(), WriteError> { 79 | let (_, data) = self.type_collection.serialize_descriptor(value)?; 80 | 81 | Ok(self.write_descriptor_bytes(guid, &data)?) 82 | } 83 | 84 | pub fn write_descriptor_bytes( 85 | &mut self, 86 | guid: &DescriptorGuid, 87 | bytes: &[u8] 88 | ) -> std::io::Result<()> { 89 | let offset = self.data_writer.stream_position()? + self.default_data_size; 90 | self.descriptors.insert(guid.clone(), DescriptorLink { 91 | offset, 92 | size: bytes.len() as u64 93 | }); 94 | 95 | self.data_writer.write_all(bytes)?; 96 | self.data_writer.align(16)?; 97 | 98 | Ok(()) 99 | } 100 | 101 | pub fn write_blob( 102 | &mut self, 103 | guid: &BlobGuid, 104 | data: &[u8], 105 | ) -> std::io::Result<()> { 106 | if guid.size() != data.len() as u32 { 107 | return Err(std::io::Error::new(std::io::ErrorKind::InvalidData, "Blob size mismatch")); 108 | } 109 | 110 | let (dat_writer, dat_index) = self.get_dat_writer()?; 111 | let offset = dat_writer.stream_position()?; 112 | 113 | dat_writer.write_all(data)?; 114 | dat_writer.align(4096)?; 115 | 116 | self.blobs.insert(guid.clone(), BlobLink::new(offset, 0, dat_index)); 117 | 118 | Ok(()) 119 | } 120 | 121 | pub fn finalize(mut self) -> Result<(), KFCWriteError> { 122 | self.finalize_dat_info()?; 123 | 124 | let size = self.data_writer.stream_position()? + self.default_data_size; 125 | let data = self.data_writer.into_inner(); 126 | 127 | let mut dat_infos = self.reference_file.get_dat_infos().to_vec(); 128 | dat_infos.extend(self.dat_infos); 129 | 130 | // header construction 131 | 132 | let mut header_writer = BufWriter::new(Cursor::new(Vec::new())); 133 | let mut file = KFCFile::default(); 134 | 135 | file.set_game_version(self.reference_file.game_version().to_string()); 136 | file.set_descriptors(self.descriptors.build(), self.type_collection); 137 | file.set_blobs(self.blobs.build()); 138 | file.set_dat_infos(self.reference_file.get_dat_infos().to_vec()); 139 | file.set_data_location(0, size); 140 | 141 | file.write(&mut header_writer)?; 142 | 143 | let header_size = header_writer.stream_position()?; 144 | let mut available_header_space = self.old_header_space; 145 | let mut padding = 0; 146 | 147 | while available_header_space < header_size { 148 | // add 64KiB padding to reduce consecutive default data movement 149 | padding += 0x10000; 150 | available_header_space += 0x10000; 151 | } 152 | 153 | file.set_data_location(available_header_space, size); 154 | 155 | // write data 156 | 157 | if padding > 0 { 158 | Self::copy_within_file(&mut self.file, self.old_header_space, self.default_data_size_unaligned, available_header_space)?; 159 | } 160 | 161 | let mut file_writer = BufWriter::new(self.file); 162 | 163 | file_writer.seek(SeekFrom::Start(0))?; 164 | file.write_info(&mut file_writer)?; 165 | file_writer.padding(available_header_space - header_size)?; 166 | 167 | file_writer.seek(SeekFrom::Current(self.default_data_size as i64))?; 168 | file_writer.write_all(&data)?; 169 | 170 | Ok(()) 171 | } 172 | 173 | fn copy_within_file( 174 | file: &mut File, 175 | src: u64, 176 | len: u64, 177 | dst: u64, 178 | ) -> std::io::Result<()> { 179 | if src == dst { 180 | return Ok(()); 181 | } 182 | 183 | if src < dst { 184 | const BUFFER_SIZE: u64 = 8192; 185 | let mut buf = vec![0u8; BUFFER_SIZE as usize]; 186 | let mut remaining = len; 187 | 188 | file.seek(SeekFrom::End(0))?; 189 | 190 | while remaining > 0 { 191 | let chunk_len = BUFFER_SIZE.min(remaining); 192 | let src_off = src + remaining - chunk_len; 193 | let dst_off = dst + remaining - chunk_len; 194 | let chunk = &mut buf[..chunk_len as usize]; 195 | 196 | file.seek(SeekFrom::Start(src_off))?; 197 | file.read_exact(chunk)?; 198 | 199 | file.seek(SeekFrom::Start(dst_off))?; 200 | file.write_all(chunk)?; 201 | 202 | remaining -= chunk_len; 203 | } 204 | } else { 205 | const BUFFER_SIZE: u64 = 8192; 206 | let mut buf = vec![0u8; BUFFER_SIZE as usize]; 207 | let mut remaining = len; 208 | 209 | while remaining > 0 { 210 | let chunk_len = BUFFER_SIZE.min(remaining); 211 | let src_off = src + len - remaining; 212 | let dst_off = dst + len - remaining; 213 | let chunk = &mut buf[..chunk_len as usize]; 214 | 215 | file.seek(SeekFrom::Start(src_off))?; 216 | file.read_exact(chunk)?; 217 | 218 | file.seek(SeekFrom::Start(dst_off))?; 219 | file.write_all(chunk)?; 220 | 221 | remaining -= chunk_len; 222 | } 223 | } 224 | 225 | Ok(()) 226 | } 227 | 228 | // TODO: Support splitting data into multiple dat files 229 | fn get_dat_writer(&mut self) -> std::io::Result<(&mut BufWriter, usize)> { 230 | let index = 0; 231 | 232 | if index >= self.dat_infos.len() { 233 | self.finalize_dat_info()?; 234 | 235 | self.dat_infos.push(DatInfo::default()); 236 | 237 | // Format: FILE_NAME_{INDEX}.dat where INDEX is 3 digits with leading zeros 238 | let path = self.path.with_extension(format!("_{:03}.dat", index)); 239 | self.dat_writer = Some(BufWriter::new(File::create(path)?)); 240 | } 241 | 242 | self.dat_infos[index].count += 1; 243 | 244 | Ok((self.dat_writer.as_mut().unwrap(), index)) 245 | } 246 | 247 | fn finalize_dat_info(&mut self) -> std::io::Result<()> { 248 | if let Some(writer) = self.dat_writer.as_mut() { 249 | writer.flush()?; 250 | self.dat_infos.last_mut().unwrap().size = writer.stream_position()?; 251 | self.dat_writer = None; 252 | } 253 | 254 | Ok(()) 255 | } 256 | 257 | } 258 | -------------------------------------------------------------------------------- /parser/src/data/impact/nodes.rs: -------------------------------------------------------------------------------- 1 | use shared::hash::{fnv, fnv_const}; 2 | use std::collections::HashMap; 3 | 4 | use crate::{reflection::{TypeCollection, TypeInfo}, Hash32}; 5 | 6 | pub const IMPACT_NODE_EXECUTION: &str = "$ImpactNodeExecution"; 7 | pub const IMPACT_NODE_EXECUTION_HASH: u32 = fnv_const(IMPACT_NODE_EXECUTION); 8 | 9 | pub const IMPACT_NODE_EXECUTION_BRANCH: &str = "keen::impact_nodes::ImpactNodeExecutionBranch"; 10 | pub const IMPACT_NODE_EXECUTION_BRANCH_HASH: u32 = fnv_const(IMPACT_NODE_EXECUTION_BRANCH); 11 | 12 | #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] 13 | #[serde(rename_all = "camelCase")] 14 | pub struct ImpactNode<'a> { 15 | pub name: &'a str, 16 | pub hash: Hash32, 17 | pub r#type: ImpactNodeTypeRef<'a>, 18 | pub super_types: Vec>, 19 | pub inputs: Vec>, 20 | pub outputs: Vec>, 21 | pub configs: Vec>, 22 | pub values: Vec>, 23 | } 24 | 25 | #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] 26 | #[serde(rename_all = "camelCase")] 27 | pub struct ImpactNodeTypeRef<'a> { 28 | pub name: &'a str, 29 | pub hash: Hash32, 30 | } 31 | 32 | #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] 33 | #[serde(rename_all = "camelCase")] 34 | pub struct ImpactNodePin<'a> { 35 | pub name: &'a str, 36 | pub r#type: ImpactNodeTypeRef<'a>, 37 | #[serde(default, skip_serializing_if = "is_false")] 38 | pub is_mandatory: bool, 39 | } 40 | 41 | impl ImpactNodePin<'_> { 42 | pub fn is_execution(&self) -> bool { 43 | self.r#type.hash == IMPACT_NODE_EXECUTION_HASH || 44 | self.r#type.hash == IMPACT_NODE_EXECUTION_BRANCH_HASH 45 | } 46 | } 47 | 48 | fn is_false(b: impl std::borrow::Borrow) -> bool { 49 | !b.borrow() 50 | } 51 | 52 | const IMPACT_INPUT: &str = "impact_node_input"; 53 | const IMPACT_OUTPUT: &str = "impact_node_output"; 54 | const IMPACT_CONFIG: &str = "impact_config"; 55 | const IMPACT_VALUE: &str = "impact_node_value"; 56 | const IMPACT_MANDATORY: &str = "impact_mandatory_connection"; 57 | const IMPACT_NODE_SHUTDOWN: &str = "impact_node_shutdown"; 58 | 59 | impl TypeCollection { 60 | pub fn get_impact_nodes(&self) -> HashMap { 61 | let node_types = self.get_impact_node_types(); 62 | let mut nodes = HashMap::with_capacity(node_types.len()); 63 | 64 | for node in node_types.values() { 65 | self.create_node(node, &mut nodes, None); 66 | } 67 | 68 | nodes 69 | } 70 | 71 | fn create_node<'a: 'b, 'b: 'c, 'c>( 72 | &'a self, 73 | node: &'a TypeInfo, 74 | nodes: &'c mut HashMap>, 75 | shutdown_name: Option<&'a str>, 76 | ) -> &'c ImpactNode<'b> { 77 | if nodes.contains_key(&node.name_hash) { 78 | return &nodes[&node.name_hash]; 79 | } 80 | 81 | if shutdown_name.is_none() { 82 | if let Some(shutdown_name) = node.attributes.iter() 83 | .find(|attr| attr.name == IMPACT_NODE_SHUTDOWN) 84 | .map(|attr| attr.value.as_str()) 85 | { 86 | self.create_node(node, nodes, Some(shutdown_name)); 87 | } 88 | } 89 | 90 | let mut super_types = Vec::new(); 91 | let mut inner = &node.inner_type; 92 | while let Some(ty) = inner { 93 | let info = self.get_type(*ty) 94 | .expect("invalid inner type"); 95 | 96 | super_types.push(ImpactNodeTypeRef { 97 | name: &info.qualified_name, 98 | hash: info.qualified_hash, 99 | }); 100 | 101 | inner = if let Some(ty) = self.get_type(*ty) { 102 | &ty.inner_type 103 | } else { 104 | break; 105 | }; 106 | } 107 | 108 | let mut inputs = Vec::new(); 109 | let mut outputs = Vec::new(); 110 | let mut configs = Vec::new(); 111 | let mut values = Vec::new(); 112 | 113 | if let Some(super_type) = super_types.first() 114 | .map(|ty| ty.hash) 115 | .and_then(|hash| self.get_type_by_qualified_hash(hash)) 116 | { 117 | let super_node = self.create_node(super_type, nodes, None); 118 | 119 | inputs.extend(super_node.inputs.iter().cloned()); 120 | outputs.extend(super_node.outputs.iter().cloned()); 121 | configs.extend(super_node.configs.iter().cloned()); 122 | values.extend(super_node.values.iter().cloned()); 123 | } 124 | 125 | if node.qualified_hash == 2802340932 { 126 | inputs.push(ImpactNodePin { 127 | name: "", 128 | r#type: ImpactNodeTypeRef { 129 | name: IMPACT_NODE_EXECUTION, 130 | hash: IMPACT_NODE_EXECUTION_HASH, 131 | }, 132 | is_mandatory: false, 133 | }); 134 | outputs.push(ImpactNodePin { 135 | name: "", 136 | r#type: ImpactNodeTypeRef { 137 | name: IMPACT_NODE_EXECUTION, 138 | hash: IMPACT_NODE_EXECUTION_HASH, 139 | }, 140 | is_mandatory: false, 141 | }); 142 | } 143 | 144 | inputs.extend( 145 | node.struct_fields.iter() 146 | .filter(|field| field.attributes.iter() 147 | .any(|attr| attr.name == IMPACT_INPUT)) 148 | .map(|field| { 149 | let ty = self.get_type(field.r#type) 150 | .expect("invalid type"); 151 | 152 | ImpactNodePin { 153 | name: &field.name, 154 | r#type: ImpactNodeTypeRef { 155 | name: &ty.qualified_name, 156 | hash: ty.qualified_hash, 157 | }, 158 | is_mandatory: field.attributes.iter() 159 | .any(|attr| attr.name == IMPACT_MANDATORY) 160 | } 161 | }) 162 | ); 163 | 164 | outputs.extend( 165 | node.struct_fields.iter() 166 | .filter(|field| 167 | field.attributes.iter() 168 | .any(|attr| attr.name == IMPACT_OUTPUT) || 169 | self.get_type(field.r#type).unwrap().qualified_hash == IMPACT_NODE_EXECUTION_BRANCH_HASH 170 | ) 171 | .map(|field| { 172 | let ty = self.get_type(field.r#type) 173 | .expect("invalid type"); 174 | 175 | ImpactNodePin { 176 | name: &field.name, 177 | r#type: ImpactNodeTypeRef { 178 | name: &ty.qualified_name, 179 | hash: ty.qualified_hash, 180 | }, 181 | is_mandatory: field.attributes.iter() 182 | .any(|attr| attr.name == IMPACT_MANDATORY) 183 | } 184 | }) 185 | ); 186 | 187 | configs.extend( 188 | node.struct_fields.iter() 189 | .filter(|field| field.attributes.iter() 190 | .any(|attr| attr.name == IMPACT_CONFIG)) 191 | .map(|field| { 192 | let ty = self.get_type(field.r#type) 193 | .expect("invalid type"); 194 | 195 | ImpactNodePin { 196 | name: &field.name, 197 | r#type: ImpactNodeTypeRef { 198 | name: &ty.qualified_name, 199 | hash: ty.qualified_hash, 200 | }, 201 | is_mandatory: false 202 | } 203 | }) 204 | ); 205 | 206 | values.extend( 207 | node.struct_fields.iter() 208 | .filter(|field| field.attributes.iter() 209 | .any(|attr| attr.name == IMPACT_VALUE)) 210 | .map(|field| { 211 | let ty = self.get_type(field.r#type) 212 | .expect("invalid type"); 213 | 214 | ImpactNodePin { 215 | name: &field.name, 216 | r#type: ImpactNodeTypeRef { 217 | name: &ty.qualified_name, 218 | hash: ty.qualified_hash, 219 | }, 220 | is_mandatory: false 221 | } 222 | }) 223 | ); 224 | 225 | let node = ImpactNode { 226 | name: shutdown_name.unwrap_or(&node.name), 227 | hash: fnv(shutdown_name.unwrap_or(&node.name)), 228 | r#type: ImpactNodeTypeRef { 229 | name: &node.qualified_name, 230 | hash: node.qualified_hash, 231 | }, 232 | super_types, 233 | inputs, 234 | outputs, 235 | configs, 236 | values, 237 | }; 238 | 239 | nodes.entry(node.hash) 240 | .or_insert_with(|| node) 241 | } 242 | 243 | } 244 | -------------------------------------------------------------------------------- /parser/src/container/header.rs: -------------------------------------------------------------------------------- 1 | use std::io::{Read, Seek, Write}; 2 | use shared::io::{ReadExt, ReadSeekExt, WriteExt, WriteSeekExt}; 3 | 4 | use crate::Hash32; 5 | 6 | use super::{KFCReadError, KFCWriteError}; 7 | 8 | const KFC_DIR_MAGIC: u32 = 0x3243464B; // KFC2 9 | 10 | /// # Layout 11 | /// ```c 12 | /// struct KFCHeader { 13 | /// u32 magic; // KFC_DIR_MAGIC 14 | /// u32 size; 15 | /// u32 unk0; // 12 16 | /// u8 padding[4]; 17 | /// 18 | /// KFCLocation version; 19 | /// KFCLocation dat_infos; 20 | /// 21 | /// KFCLocation unused0; 22 | /// KFCLocation unused1; 23 | /// 24 | /// KFCLocation descriptor_locations; 25 | /// KFCLocation descriptor_indices; 26 | /// 27 | /// KFCLocation blob_buckets; 28 | /// KFCLocation blob_guids; 29 | /// KFCLocation blob_links; 30 | /// 31 | /// KFCLocation descriptor_buckets; 32 | /// KFCLocation descriptor_guids; 33 | /// KFCLocation descriptor_links; 34 | /// 35 | /// KFCLocation group_buckets; 36 | /// KFCLocation group_hashes; 37 | /// KFCLocation group_infos; 38 | /// }; 39 | /// ``` 40 | #[derive(Debug, Clone, Default)] 41 | pub struct KFCHeader { 42 | pub size: u64, 43 | // pub unk0: u32, 44 | 45 | pub version: KFCLocation, 46 | pub dat_infos: KFCLocation, 47 | 48 | pub unused0: KFCLocation, 49 | pub unused1: KFCLocation, 50 | 51 | pub descriptor_locations: KFCLocation, 52 | pub descriptor_indices: KFCLocation, 53 | 54 | pub blob_buckets: KFCLocation, 55 | pub blob_guids: KFCLocation, 56 | pub blob_links: KFCLocation, 57 | 58 | pub descriptor_buckets: KFCLocation, 59 | pub descriptor_guids: KFCLocation, 60 | pub descriptor_links: KFCLocation, 61 | 62 | pub group_buckets: KFCLocation, 63 | pub group_hashes: KFCLocation, 64 | pub group_infos: KFCLocation, 65 | } 66 | 67 | impl KFCHeader { 68 | 69 | pub fn read(reader: &mut R) -> Result { 70 | let magic = reader.read_u32()?; 71 | 72 | if magic != KFC_DIR_MAGIC { 73 | return Err(KFCReadError::InvalidMagic(magic)); 74 | } 75 | 76 | let size = reader.read_u32()? as u64; 77 | let _unk0 = reader.read_u32()?; 78 | 79 | reader.padding(4)?; 80 | 81 | let version = KFCLocation::read(reader)?; 82 | let dat_infos = KFCLocation::read(reader)?; 83 | 84 | let unused0 = KFCLocation::read(reader)?; 85 | let unused1 = KFCLocation::read(reader)?; 86 | 87 | let descriptor_locations = KFCLocation::read(reader)?; 88 | let descriptor_indices = KFCLocation::read(reader)?; 89 | 90 | let blob_buckets = KFCLocation::read(reader)?; 91 | let blob_guids = KFCLocation::read(reader)?; 92 | let blob_links = KFCLocation::read(reader)?; 93 | 94 | let descriptor_buckets = KFCLocation::read(reader)?; 95 | let descriptor_guids = KFCLocation::read(reader)?; 96 | let descriptor_links = KFCLocation::read(reader)?; 97 | 98 | let group_buckets = KFCLocation::read(reader)?; 99 | let group_hashes = KFCLocation::read(reader)?; 100 | let group_infos = KFCLocation::read(reader)?; 101 | 102 | Ok(Self { 103 | size, 104 | 105 | version, 106 | dat_infos, 107 | 108 | unused0, 109 | unused1, 110 | 111 | descriptor_locations, 112 | descriptor_indices, 113 | 114 | blob_buckets, 115 | blob_guids, 116 | blob_links, 117 | 118 | descriptor_buckets, 119 | descriptor_guids, 120 | descriptor_links, 121 | 122 | group_buckets, 123 | group_hashes, 124 | group_infos, 125 | }) 126 | } 127 | 128 | pub fn write(&self, writer: &mut W) -> Result<(), KFCWriteError> { 129 | if self.size > u32::MAX as u64 { 130 | return Err(KFCWriteError::SizeTooLarge(self.size)); 131 | } 132 | 133 | writer.write_u32(KFC_DIR_MAGIC)?; 134 | writer.write_u32(self.size as u32)?; 135 | writer.write_u32(12)?; 136 | writer.padding(4)?; 137 | 138 | self.version.write(writer)?; 139 | self.dat_infos.write(writer)?; 140 | 141 | self.unused0.write(writer)?; 142 | self.unused1.write(writer)?; 143 | 144 | self.descriptor_locations.write(writer)?; 145 | self.descriptor_indices.write(writer)?; 146 | 147 | self.blob_buckets.write(writer)?; 148 | self.blob_guids.write(writer)?; 149 | self.blob_links.write(writer)?; 150 | 151 | self.descriptor_buckets.write(writer)?; 152 | self.descriptor_guids.write(writer)?; 153 | self.descriptor_links.write(writer)?; 154 | 155 | self.group_buckets.write(writer)?; 156 | self.group_hashes.write(writer)?; 157 | self.group_infos.write(writer)?; 158 | 159 | Ok(()) 160 | } 161 | 162 | } 163 | 164 | /// # Layout 165 | /// ```c 166 | /// struct KFCLocation { 167 | /// u32 offset; 168 | /// u32 count; 169 | /// }; 170 | /// ``` 171 | #[derive(Debug, Clone, Default)] 172 | pub struct KFCLocation { 173 | pub offset: u64, 174 | pub count: usize, 175 | } 176 | 177 | impl KFCLocation { 178 | 179 | #[inline] 180 | pub fn new(offset: u64, count: usize) -> Self { 181 | Self { 182 | offset, 183 | count, 184 | } 185 | } 186 | 187 | pub fn read(reader: &mut R) -> Result { 188 | let offset = reader.read_u32_offset()?; 189 | let count = reader.read_u32()? as usize; 190 | 191 | Ok(Self { 192 | offset, 193 | count, 194 | }) 195 | } 196 | 197 | pub fn write(&self, writer: &mut W) -> Result<(), KFCWriteError> { 198 | writer.write_offset(self.offset)?; 199 | writer.write_u32(self.count as u32)?; 200 | 201 | Ok(()) 202 | } 203 | 204 | } 205 | 206 | /// # Layout 207 | /// ```c 208 | /// struct DatInfo { 209 | /// u64 size; 210 | /// u32 count; 211 | /// u8 padding[4]; 212 | /// }; 213 | /// ``` 214 | #[derive(Debug, Clone, Default)] 215 | pub struct DatInfo { 216 | pub size: u64, 217 | pub count: usize, 218 | } 219 | 220 | impl DatInfo { 221 | 222 | pub fn read(reader: &mut R) -> Result { 223 | let size = reader.read_u64()?; 224 | let count = reader.read_u32()? as usize; 225 | reader.padding(4)?; 226 | 227 | Ok(Self { 228 | size, 229 | count, 230 | }) 231 | } 232 | 233 | pub fn write(&self, writer: &mut W) -> Result<(), KFCWriteError> { 234 | writer.write_u64(self.size)?; 235 | writer.write_u32(self.count as u32)?; 236 | writer.padding(4)?; 237 | 238 | Ok(()) 239 | } 240 | 241 | } 242 | 243 | /// # Layout 244 | /// ```c 245 | /// struct DescriptorLocation { 246 | /// u32 offset; 247 | /// u32 size; 248 | /// u32 count; 249 | /// }; 250 | /// ``` 251 | #[derive(Debug, Clone, Default)] 252 | pub struct DescriptorLocation { 253 | pub offset: u64, 254 | pub size: u64, 255 | pub count: usize, 256 | } 257 | 258 | impl DescriptorLocation { 259 | 260 | pub fn read(reader: &mut R) -> Result { 261 | let offset = reader.read_u32()? as u64; 262 | let size = reader.read_u32()? as u64; 263 | let count = reader.read_u32()? as usize; 264 | 265 | Ok(Self { 266 | offset, 267 | size, 268 | count, 269 | }) 270 | } 271 | 272 | pub fn write(&self, writer: &mut W) -> Result<(), KFCWriteError> { 273 | writer.write_u32(self.offset as u32)?; 274 | writer.write_u32(self.size as u32)?; 275 | writer.write_u32(self.count as u32)?; 276 | 277 | Ok(()) 278 | } 279 | 280 | } 281 | 282 | /// # Layout 283 | /// ```c 284 | /// struct BlobLink { 285 | /// u32 offset; 286 | /// u16 flags; 287 | /// u16 dat_index; 288 | /// u8 padding[8]; 289 | /// }; 290 | /// ``` 291 | #[derive(Debug, Clone, Default)] 292 | pub struct BlobLink { 293 | pub offset: u64, 294 | pub flags: u16, 295 | pub dat_index: usize, 296 | } 297 | 298 | impl BlobLink { 299 | 300 | #[inline] 301 | pub fn new(offset: u64, flags: u16, dat_index: usize) -> Self { 302 | Self { 303 | offset, 304 | flags, 305 | dat_index, 306 | } 307 | } 308 | 309 | pub fn read(reader: &mut R) -> Result { 310 | let offset = reader.read_u32()? as u64; 311 | let flags = reader.read_u16()?; 312 | let dat_index = reader.read_u16()? as usize; 313 | reader.padding(8)?; 314 | 315 | Ok(Self { 316 | offset, 317 | flags, 318 | dat_index, 319 | }) 320 | } 321 | 322 | pub fn write(&self, writer: &mut W) -> Result<(), KFCWriteError> { 323 | writer.write_u32(self.offset as u32)?; 324 | writer.write_u16(self.flags)?; 325 | writer.write_u16(self.dat_index as u16)?; 326 | writer.padding(8)?; 327 | 328 | Ok(()) 329 | } 330 | 331 | } 332 | 333 | /// # Layout 334 | /// ```c 335 | /// struct DescriptorLink { 336 | /// u32 offset; 337 | /// u32 size; 338 | /// }; 339 | /// ``` 340 | #[derive(Debug, Clone, Default)] 341 | pub struct DescriptorLink { 342 | pub offset: u64, 343 | pub size: u64, 344 | } 345 | 346 | impl DescriptorLink { 347 | 348 | #[inline] 349 | pub fn new(offset: u64, size: u64) -> Self { 350 | Self { 351 | offset, 352 | size, 353 | } 354 | } 355 | 356 | pub fn read(reader: &mut R) -> Result { 357 | let offset = reader.read_u32()? as u64; 358 | let size = reader.read_u32()? as u64; 359 | 360 | Ok(Self { 361 | offset, 362 | size, 363 | }) 364 | } 365 | 366 | pub fn write(&self, writer: &mut W) -> Result<(), KFCWriteError> { 367 | writer.write_u32(self.offset as u32)?; 368 | writer.write_u32(self.size as u32)?; 369 | 370 | Ok(()) 371 | } 372 | 373 | } 374 | 375 | /// # Layout 376 | /// ```c 377 | /// struct GroupInfo { 378 | /// u32 internal_hash; 379 | /// u32 index; 380 | /// u32 count; 381 | /// }; 382 | /// ``` 383 | #[derive(Debug, Clone, Default)] 384 | pub struct GroupInfo { 385 | pub internal_hash: Hash32, 386 | pub index: usize, 387 | pub count: usize, 388 | } 389 | 390 | impl GroupInfo { 391 | 392 | pub fn read(reader: &mut R) -> Result { 393 | let internal_hash = reader.read_u32()?; 394 | let index = reader.read_u32()? as usize; 395 | let count = reader.read_u32()? as usize; 396 | 397 | Ok(Self { 398 | internal_hash, 399 | index, 400 | count, 401 | }) 402 | } 403 | 404 | pub fn write(&self, writer: &mut W) -> Result<(), KFCWriteError> { 405 | writer.write_u32(self.internal_hash)?; 406 | writer.write_u32(self.index as u32)?; 407 | writer.write_u32(self.count as u32)?; 408 | 409 | Ok(()) 410 | } 411 | 412 | } 413 | -------------------------------------------------------------------------------- /parser/src/data/impact/bytecode/parser.rs: -------------------------------------------------------------------------------- 1 | use shared::hash::fnv; 2 | use std::cell::{Ref, RefCell}; 3 | use std::collections::HashMap; 4 | 5 | use super::token::*; 6 | use super::tokenizer::Tokenizer; 7 | use super::{ImpactNode, ImpactOps, ImpactProgramData, ParseError, ParseErrorKind}; 8 | 9 | pub struct Parser<'a, 'b, 'c> { 10 | nodes: &'b HashMap>, 11 | impact_data: &'c ImpactProgramData, 12 | tokenizer: RefCell>, 13 | next: RefCell>, 14 | } 15 | 16 | impl<'a, 'b, 'c> Parser<'a, 'b, 'c> { 17 | pub fn new( 18 | nodes: &'b HashMap, 19 | impact_data: &'c ImpactProgramData, 20 | content: &'a str 21 | ) -> Self { 22 | let mut tokenizer = Tokenizer::new(content); 23 | let next = tokenizer.advance(); 24 | 25 | Self { 26 | nodes, 27 | impact_data, 28 | tokenizer: tokenizer.into(), 29 | next: next.into(), 30 | } 31 | } 32 | 33 | pub fn parse(&'a self) -> Result, ParseError> { 34 | let mut labels = HashMap::new(); 35 | let mut label_mappings = HashMap::new(); 36 | let mut instructions = Vec::new(); 37 | let mut pc = 0; 38 | 39 | while self.peek().kind != TokenKind::Eof { 40 | let token = self.expect(TokenKind::Identifier)?; 41 | let identifier = token.content; 42 | 43 | if let Some(stripped) = identifier.strip_suffix(':') { 44 | if !(labels.contains_key(stripped)) { 45 | labels.insert(stripped, pc); 46 | } else { 47 | return Err(ParseError { 48 | span: token.span, 49 | kind: ParseErrorKind::DuplicateLabel { 50 | label: stripped.to_string(), 51 | }, 52 | }); 53 | } 54 | } else if let Some(keyword) = Self::match_keyword(identifier) { 55 | let instruction = self.parse_instruction(keyword, &mut label_mappings)?; 56 | pc += instruction.size(); 57 | instructions.push(instruction); 58 | } else { 59 | return Err(ParseError { 60 | span: token.span, 61 | kind: ParseErrorKind::Expected { 62 | expected: "instruction or label".to_string(), 63 | found: identifier.to_string(), 64 | }, 65 | }); 66 | } 67 | } 68 | 69 | for instruction in instructions.iter_mut() { 70 | match instruction { 71 | ImpactOps::BR(id) | 72 | ImpactOps::BRT(id) | 73 | ImpactOps::BRF(id) => { 74 | let name = label_mappings.get(&(*id as usize)).expect("unreachable"); 75 | 76 | if let Some(address) = labels.get(name.content) { 77 | *id = *address as u32; 78 | } else { 79 | return Err(ParseError { 80 | span: name.span.clone(), 81 | kind: ParseErrorKind::UnknownLabel { 82 | label: name.content.to_string(), 83 | }, 84 | }); 85 | } 86 | }, 87 | _ => {}, 88 | } 89 | } 90 | 91 | Ok(instructions) 92 | } 93 | 94 | fn parse_instruction( 95 | &'a self, 96 | keyword: KeywordKind, 97 | label_mappings: &mut HashMap>, 98 | ) -> Result { 99 | let op = match keyword { 100 | KeywordKind::Invalid => ImpactOps::Invalid, 101 | KeywordKind::IAdd => ImpactOps::IAdd, 102 | KeywordKind::ISub => ImpactOps::ISub, 103 | KeywordKind::IMul => ImpactOps::IMul, 104 | KeywordKind::IDiv => ImpactOps::IDiv, 105 | KeywordKind::Ilt => ImpactOps::ILT, 106 | KeywordKind::Ieq => ImpactOps::IEQ, 107 | KeywordKind::Ileq => ImpactOps::ILEQ, 108 | KeywordKind::IConst0 => ImpactOps::IConst0, 109 | KeywordKind::IConst1 => ImpactOps::IConst1, 110 | KeywordKind::Inc => ImpactOps::Inc, 111 | KeywordKind::Dec => ImpactOps::Dec, 112 | KeywordKind::Copy => ImpactOps::Copy, 113 | KeywordKind::Dup => ImpactOps::Dup, 114 | KeywordKind::Ret => ImpactOps::Ret, 115 | KeywordKind::LTime => ImpactOps::LTime, 116 | KeywordKind::TimeFF => ImpactOps::TimeFF, 117 | KeywordKind::Pop => ImpactOps::Pop, 118 | KeywordKind::Rvm => ImpactOps::RVM, 119 | KeywordKind::DSelf => ImpactOps::DSelf, 120 | KeywordKind::Halt => ImpactOps::Halt, 121 | 122 | KeywordKind::Br => { 123 | let label = self.expect(TokenKind::Identifier)?; 124 | let id = label_mappings.len(); 125 | label_mappings.insert(id, label); 126 | ImpactOps::BR(id as u32) 127 | }, 128 | KeywordKind::Brt => { 129 | let label = self.expect(TokenKind::Identifier)?; 130 | let id = label_mappings.len(); 131 | label_mappings.insert(id, label); 132 | ImpactOps::BRT(id as u32) 133 | }, 134 | KeywordKind::Brf => { 135 | let label = self.expect(TokenKind::Identifier)?; 136 | let id = label_mappings.len(); 137 | label_mappings.insert(id, label); 138 | ImpactOps::BRF(id as u32) 139 | }, 140 | 141 | KeywordKind::IConst => ImpactOps::IConst(self.parse_data()?), 142 | KeywordKind::Load => ImpactOps::Load(self.parse_data()?), 143 | KeywordKind::GLoad => ImpactOps::GLoad(self.parse_data()?), 144 | KeywordKind::Store => ImpactOps::Store(self.parse_data()?), 145 | KeywordKind::GStore => ImpactOps::GStore(self.parse_data()?), 146 | 147 | KeywordKind::Call => ImpactOps::Call(self.parse_call_type()?), 148 | KeywordKind::ECall => ImpactOps::ECall(self.parse_call_type()?), 149 | 150 | KeywordKind::Unknown => ImpactOps::Unknown(self.parse_number()?), 151 | }; 152 | 153 | Ok(op) 154 | } 155 | 156 | fn parse_call_type(&self) -> Result { 157 | let kind = { self.peek().kind }; 158 | 159 | match kind { 160 | TokenKind::Number => self.parse_number(), 161 | TokenKind::Identifier => { 162 | let name = self.expect(TokenKind::Identifier)?; 163 | let hash = fnv(name.content.as_bytes()); 164 | 165 | if !self.nodes.contains_key(&hash) { 166 | return Err(ParseError { 167 | span: name.span, 168 | kind: ParseErrorKind::UnknownType { 169 | type_name: name.content.to_string(), 170 | }, 171 | }); 172 | } 173 | 174 | Ok(hash) 175 | } 176 | _ => Err(ParseError { 177 | span: self.peek().span.clone(), 178 | kind: ParseErrorKind::Expected { 179 | expected: "function hash or function name".to_string(), 180 | found: self.peek().kind.to_string(), 181 | }, 182 | }), 183 | } 184 | } 185 | 186 | fn parse_number(&self) -> Result { 187 | let token = self.expect(TokenKind::Number)?; 188 | 189 | token.content.parse().map_err(|error| ParseError { 190 | span: token.span, 191 | kind: ParseErrorKind::NumberParseError { 192 | content: token.content.to_string(), 193 | error, 194 | }, 195 | }) 196 | } 197 | 198 | fn parse_data(&self) -> Result { 199 | let token = self.expect(TokenKind::Identifier)?; 200 | let data = self.impact_data.data.iter() 201 | .enumerate() 202 | .find(|(_, data)| data.name == token.content) 203 | .map(|(index, _)| index as u32 | 0xFFFF_0000) 204 | .ok_or_else(|| ParseError { 205 | span: token.span, 206 | kind: ParseErrorKind::UnknownData { 207 | name: token.content.to_string(), 208 | }, 209 | })?; 210 | 211 | Ok(data) 212 | } 213 | 214 | fn match_keyword(identifier: &str) -> Option { 215 | let kind = match identifier { 216 | "invalid" => KeywordKind::Invalid, 217 | "iadd" => KeywordKind::IAdd, 218 | "isub" => KeywordKind::ISub, 219 | "imul" => KeywordKind::IMul, 220 | "idiv" => KeywordKind::IDiv, 221 | "ilt" => KeywordKind::Ilt, 222 | "ieq" => KeywordKind::Ieq, 223 | "ileq" => KeywordKind::Ileq, 224 | "br" => KeywordKind::Br, 225 | "brt" => KeywordKind::Brt, 226 | "brf" => KeywordKind::Brf, 227 | "iconst" => KeywordKind::IConst, 228 | "iconst0" => KeywordKind::IConst0, 229 | "iconst1" => KeywordKind::IConst1, 230 | "inc" => KeywordKind::Inc, 231 | "dec" => KeywordKind::Dec, 232 | "copy" => KeywordKind::Copy, 233 | "dup" => KeywordKind::Dup, 234 | "call" => KeywordKind::Call, 235 | "ecall" => KeywordKind::ECall, 236 | "ret" => KeywordKind::Ret, 237 | "load" => KeywordKind::Load, 238 | "gload" => KeywordKind::GLoad, 239 | "store" => KeywordKind::Store, 240 | "gstore" => KeywordKind::GStore, 241 | "ltime" => KeywordKind::LTime, 242 | "timeff" => KeywordKind::TimeFF, 243 | "pop" => KeywordKind::Pop, 244 | "rvm" => KeywordKind::Rvm, 245 | "dself" => KeywordKind::DSelf, 246 | "halt" => KeywordKind::Halt, 247 | "unknown" => KeywordKind::Unknown, 248 | _ => return None, 249 | }; 250 | 251 | Some(kind) 252 | } 253 | 254 | fn peek(&self) -> Ref<'_, Token<'a>> { 255 | self.skip_whitespace(); 256 | self.peek0() 257 | } 258 | 259 | fn next(&self) -> Token { 260 | self.skip_whitespace(); 261 | self.next0() 262 | } 263 | 264 | fn expect(&self, kind: TokenKind) -> Result { 265 | let token = self.next(); 266 | 267 | if token.kind != kind { 268 | return Err(ParseError { 269 | span: token.span, 270 | kind: ParseErrorKind::Expected { 271 | expected: kind.to_string(), 272 | found: token.kind.to_string(), 273 | }, 274 | }); 275 | } 276 | 277 | Ok(token) 278 | } 279 | 280 | fn is_whitespace(&self, token: &Token) -> bool { 281 | matches!( 282 | token.kind, 283 | TokenKind::Comment | 284 | TokenKind::Whitespace | 285 | TokenKind::Newline 286 | ) 287 | } 288 | 289 | fn skip_whitespace(&self) { 290 | while self.is_whitespace(&self.peek0()) { 291 | self.next0(); 292 | } 293 | } 294 | 295 | fn peek0(&self) -> Ref<'_, Token<'a>> { 296 | self.next.borrow() 297 | } 298 | 299 | fn next0(&self) -> Token { 300 | let token = self.tokenizer.borrow_mut().advance(); 301 | self.next.replace(token) 302 | } 303 | 304 | } 305 | -------------------------------------------------------------------------------- /parser/src/data/impact/bytecode/assembler.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashMap; 2 | 3 | use crate::{reflection::TypeCollection, Hash32}; 4 | 5 | use super::{ImpactNode, ImpactCommand}; 6 | 7 | #[derive(Debug)] 8 | pub struct ImpactAssembler<'a> { 9 | pub(super) type_collection: &'a TypeCollection, 10 | pub(super) nodes: HashMap>, 11 | } 12 | 13 | impl<'a> ImpactAssembler<'a> { 14 | pub fn new(type_collection: &'a TypeCollection) -> Self { 15 | let nodes = type_collection.get_impact_nodes(); 16 | 17 | Self { 18 | type_collection, 19 | nodes, 20 | } 21 | } 22 | 23 | pub fn disassemble(code: &[ImpactCommand]) -> Vec { 24 | let mut instructions = Vec::new(); 25 | let mut ptr = 0; 26 | 27 | while ptr < code.len() { 28 | let value = code[ptr]; 29 | ptr += 1; 30 | 31 | let value = match value { 32 | 0x00 => ImpactOps::Invalid, 33 | 0x01 => ImpactOps::IAdd, 34 | 0x02 => ImpactOps::ISub, 35 | 0x03 => ImpactOps::IMul, 36 | 0x04 => ImpactOps::IDiv, 37 | 0x05 => ImpactOps::ILT, 38 | 0x06 => ImpactOps::IEQ, 39 | 0x07 => ImpactOps::ILEQ, 40 | 0x08 => ImpactOps::BR({ 41 | let address = code[ptr]; 42 | ptr += 1; 43 | address 44 | }), 45 | 0x09 => ImpactOps::BRT({ 46 | let address = code[ptr]; 47 | ptr += 1; 48 | address 49 | }), 50 | 0x0A => ImpactOps::BRF({ 51 | let address = code[ptr]; 52 | ptr += 1; 53 | address 54 | }), 55 | 0x0B => ImpactOps::IConst({ 56 | let value = code[ptr]; 57 | assert_eq!(value & 0xFFFF_0000, 0xFFFF_0000); 58 | ptr += 1; 59 | value 60 | }), 61 | 0x0C => ImpactOps::IConst0, 62 | 0x0D => ImpactOps::IConst1, 63 | 0x0E => ImpactOps::Inc, 64 | 0x0F => ImpactOps::Dec, 65 | 0x10 => ImpactOps::Copy, 66 | 0x11 => ImpactOps::Dup, 67 | 0x12 => ImpactOps::Call({ 68 | let index = code[ptr]; 69 | ptr += 1; 70 | index 71 | }), 72 | 0x13 => ImpactOps::ECall({ 73 | let index = code[ptr]; 74 | ptr += 1; 75 | index 76 | }), 77 | 0x14 => ImpactOps::Ret, 78 | 0x15 => ImpactOps::Load({ 79 | let index = code[ptr]; 80 | ptr += 1; 81 | index 82 | }), 83 | 0x16 => ImpactOps::GLoad({ 84 | let index = code[ptr]; 85 | ptr += 1; 86 | index 87 | }), 88 | 0x17 => ImpactOps::Store({ 89 | let index = code[ptr]; 90 | ptr += 1; 91 | index 92 | }), 93 | 0x18 => ImpactOps::GStore({ 94 | let index = code[ptr]; 95 | ptr += 1; 96 | index 97 | }), 98 | 0x19 => ImpactOps::LTime, 99 | 0x1A => ImpactOps::TimeFF, 100 | 0x1B => ImpactOps::Pop, 101 | 0x1C => ImpactOps::RVM, 102 | 0x1D => ImpactOps::DSelf, 103 | 0x1E => ImpactOps::Halt, 104 | _ => ImpactOps::Unknown(value), 105 | }; 106 | 107 | instructions.push(value); 108 | } 109 | 110 | instructions 111 | } 112 | 113 | pub fn assemble(code: &[ImpactOps]) -> Vec { 114 | let mut instructions = Vec::new(); 115 | 116 | for instruction in code { 117 | match instruction { 118 | ImpactOps::Invalid => instructions.push(0x00), 119 | ImpactOps::IAdd => instructions.push(0x01), 120 | ImpactOps::ISub => instructions.push(0x02), 121 | ImpactOps::IMul => instructions.push(0x03), 122 | ImpactOps::IDiv => instructions.push(0x04), 123 | ImpactOps::ILT => instructions.push(0x05), 124 | ImpactOps::IEQ => instructions.push(0x06), 125 | ImpactOps::ILEQ => instructions.push(0x07), 126 | ImpactOps::BR(address) => { 127 | instructions.push(0x08); 128 | instructions.push(*address); 129 | }, 130 | ImpactOps::BRT(address) => { 131 | instructions.push(0x09); 132 | instructions.push(*address); 133 | }, 134 | ImpactOps::BRF(address) => { 135 | instructions.push(0x0A); 136 | instructions.push(*address); 137 | }, 138 | ImpactOps::IConst(value) => { 139 | instructions.push(0x0B); 140 | instructions.push(*value); 141 | }, 142 | ImpactOps::IConst0 => instructions.push(0x0C), 143 | ImpactOps::IConst1 => instructions.push(0x0D), 144 | ImpactOps::Inc => instructions.push(0x0E), 145 | ImpactOps::Dec => instructions.push(0x0F), 146 | ImpactOps::Copy => instructions.push(0x10), 147 | ImpactOps::Dup => instructions.push(0x11), 148 | ImpactOps::Call(index) => { 149 | instructions.push(0x12); 150 | instructions.push(*index); 151 | }, 152 | ImpactOps::ECall(index) => { 153 | instructions.push(0x13); 154 | instructions.push(*index); 155 | }, 156 | ImpactOps::Ret => instructions.push(0x14), 157 | ImpactOps::Load(index) => { 158 | instructions.push(0x15); 159 | instructions.push(*index); 160 | }, 161 | ImpactOps::GLoad(index) => { 162 | instructions.push(0x16); 163 | instructions.push(*index); 164 | }, 165 | ImpactOps::Store(index) => { 166 | instructions.push(0x17); 167 | instructions.push(*index); 168 | }, 169 | ImpactOps::GStore(index) => { 170 | instructions.push(0x18); 171 | instructions.push(*index); 172 | }, 173 | ImpactOps::LTime => instructions.push(0x19), 174 | ImpactOps::TimeFF => instructions.push(0x1A), 175 | ImpactOps::Pop => instructions.push(0x1B), 176 | ImpactOps::RVM => instructions.push(0x1C), 177 | ImpactOps::DSelf => instructions.push(0x1D), 178 | ImpactOps::Halt => instructions.push(0x1E), 179 | ImpactOps::Unknown(value) => instructions.push(*value), 180 | } 181 | } 182 | 183 | instructions 184 | } 185 | } 186 | 187 | #[derive(Debug, Clone, PartialEq, Eq)] 188 | #[repr(u8)] 189 | pub enum ImpactOps { 190 | Invalid = 0x00, // unused 191 | IAdd = 0x01, 192 | ISub = 0x02, // unused 193 | IMul = 0x03, // unused 194 | IDiv = 0x04, // unused 195 | ILT = 0x05, 196 | IEQ = 0x06, // unused 197 | ILEQ = 0x07, 198 | BR(u32) = 0x08, 199 | BRT(u32) = 0x09, 200 | BRF(u32) = 0x0A, 201 | IConst(u32) = 0x0B, // load data from data_layout table 202 | IConst0 = 0x0C, 203 | IConst1 = 0x0D, // unused 204 | Inc = 0x0E, 205 | Dec = 0x0F, // unused 206 | Copy = 0x10, // unused 207 | Dup = 0x11, 208 | Call(u32) = 0x12, // unused 209 | // Call pops each type of argument from the top of the stack until all arguments are consumed. 210 | // First comes the input arguments, then config arguments and finally the output arguments. 211 | // f.e.: 212 | // MyFunction( 213 | // cfg flag: bool, 214 | // cfg value: float, 215 | // in input1: float, 216 | // in input2: float, 217 | // out output1: float 218 | // out output2: float 219 | // ) 220 | // requires the stack to be in the following order: 221 | // - input2 222 | // - input1 223 | // - value 224 | // - flag 225 | // - output2 226 | // - output1 227 | ECall(u32) = 0x13, 228 | Ret = 0x14, // unused 229 | Load(u32) = 0x15, // unused 230 | GLoad(u32) = 0x16, 231 | Store(u32) = 0x17, // unused 232 | GStore(u32) = 0x18, 233 | LTime = 0x19, // pushes some time on the stack 234 | TimeFF = 0x1A, // time from float 235 | Pop = 0x1B, 236 | RVM = 0x1C, // yield 237 | DSelf = 0x1D, // destroy self 238 | Halt = 0x1E, 239 | 240 | Unknown(u32) = 0xFF, 241 | } 242 | 243 | impl ImpactOps { 244 | pub fn name(&self) -> &'static str { 245 | match self { 246 | ImpactOps::Invalid => "invalid", 247 | ImpactOps::IAdd => "iadd", 248 | ImpactOps::ISub => "isub", 249 | ImpactOps::IMul => "imul", 250 | ImpactOps::IDiv => "idiv", 251 | ImpactOps::ILT => "ilt", 252 | ImpactOps::IEQ => "ieq", 253 | ImpactOps::ILEQ => "ileq", 254 | ImpactOps::BR(_) => "br", 255 | ImpactOps::BRT(_) => "brt", 256 | ImpactOps::BRF(_) => "brf", 257 | ImpactOps::IConst(_) => "iconst", 258 | ImpactOps::IConst0 => "iconst0", 259 | ImpactOps::IConst1 => "iconst1", 260 | ImpactOps::Inc => "inc", 261 | ImpactOps::Dec => "dec", 262 | ImpactOps::Copy => "copy", 263 | ImpactOps::Dup => "dup", 264 | ImpactOps::Call(_) => "call", 265 | ImpactOps::ECall(_) => "ecall", 266 | ImpactOps::Ret => "ret", 267 | ImpactOps::Load(_) => "load", 268 | ImpactOps::GLoad(_) => "gload", 269 | ImpactOps::Store(_) => "store", 270 | ImpactOps::GStore(_) => "gstore", 271 | ImpactOps::LTime => "ltime", 272 | ImpactOps::TimeFF => "timeff", 273 | ImpactOps::Pop => "pop", 274 | ImpactOps::RVM => "rvm", 275 | ImpactOps::DSelf => "dself", 276 | ImpactOps::Halt => "halt", 277 | ImpactOps::Unknown(_) => "unknown", 278 | } 279 | } 280 | 281 | pub fn size(&self) -> usize { 282 | match self { 283 | ImpactOps::Invalid => 1, 284 | ImpactOps::IAdd => 1, 285 | ImpactOps::ISub => 1, 286 | ImpactOps::IMul => 1, 287 | ImpactOps::IDiv => 1, 288 | ImpactOps::ILT => 1, 289 | ImpactOps::IEQ => 1, 290 | ImpactOps::ILEQ => 1, 291 | ImpactOps::BR(_) => 2, 292 | ImpactOps::BRT(_) => 2, 293 | ImpactOps::BRF(_) => 2, 294 | ImpactOps::IConst(_) => 2, 295 | ImpactOps::IConst0 => 1, 296 | ImpactOps::IConst1 => 1, 297 | ImpactOps::Inc => 1, 298 | ImpactOps::Dec => 1, 299 | ImpactOps::Copy => 1, 300 | ImpactOps::Dup => 1, 301 | ImpactOps::Call(_) => 2, 302 | ImpactOps::ECall(_) => 2, 303 | ImpactOps::Ret => 1, 304 | ImpactOps::Load(_) => 2, 305 | ImpactOps::GLoad(_) => 2, 306 | ImpactOps::Store(_) => 2, 307 | ImpactOps::GStore(_) => 2, 308 | ImpactOps::LTime => 1, 309 | ImpactOps::TimeFF => 1, 310 | ImpactOps::Pop => 1, 311 | ImpactOps::RVM => 1, 312 | ImpactOps::DSelf => 1, 313 | ImpactOps::Halt => 1, 314 | ImpactOps::Unknown(_) => 1, 315 | } 316 | } 317 | } 318 | -------------------------------------------------------------------------------- /parser/src/reflection/parser.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashMap; 2 | use std::io::{Cursor, Read, Seek, SeekFrom}; 3 | use std::path::Path; 4 | 5 | use shared::hash::fnv; 6 | use shared::io::ReadExt; 7 | 8 | use super::pe_file::{PEFile, ReadPEExt}; 9 | use super::types::*; 10 | use super::{ReflectionParseError, TypeCollection}; 11 | 12 | pub fn extract_reflection_data( 13 | exe_file: impl AsRef, 14 | deserialize_default_values: bool, 15 | ) -> Result, ReflectionParseError> { 16 | let pe_file = PEFile::load_from_file(exe_file)?; 17 | let data_section_offset = pe_file.offset_to_section(".data") 18 | .ok_or(ReflectionParseError::MissingDataSection)?; 19 | let rdata_section_offset = pe_file.offset_to_section(".rdata") 20 | .ok_or(ReflectionParseError::MissingRDataSection)?; 21 | 22 | let offset_to_blob_string_literal = pe_file.find( 23 | rdata_section_offset - 1, 24 | [0x00, 0x42, 0x6C, 0x6F, 0x62, 0x53, 0x74, 0x72, 0x69, 0x6E, 0x67, 0x00], 25 | 8 26 | ) 27 | .and_then(|offset| pe_file.fo_to_va(offset + 1)) 28 | .ok_or(ReflectionParseError::MalformedPattern)?; 29 | 30 | let offset = pe_file.find_pointer_to_0va(rdata_section_offset, offset_to_blob_string_literal) 31 | .and_then(|offset| pe_file.fo_to_va(offset)) 32 | .ok_or(ReflectionParseError::MalformedPattern)?; 33 | 34 | let offset = pe_file.find_pointer_to_0va(data_section_offset, offset) 35 | .and_then(|offset| pe_file.fo_to_va(offset)) 36 | .ok_or(ReflectionParseError::MalformedPattern)?; 37 | 38 | let offset = pe_file.find_pointer_to_0va(rdata_section_offset, offset) 39 | .ok_or(ReflectionParseError::MalformedPattern)?; 40 | 41 | let mut cursor = pe_file.get_cursor_at(offset)?; 42 | let mut table_cursor = cursor.read_pointee(&pe_file)?; 43 | let start_position = table_cursor.stream_position()?; 44 | let table_count = cursor.read_u64()?; 45 | 46 | let mut reference_table = HashMap::new(); 47 | let mut reference_table_cursor = table_cursor.clone(); 48 | 49 | for _ in 0..table_count { 50 | let offset = reference_table_cursor.read_u64()?; 51 | reference_table.insert(offset, reference_table.len()); 52 | } 53 | 54 | let mut table = Vec::with_capacity(table_count as usize); 55 | 56 | for _ in 0..table_count { 57 | let mut type_cursor = table_cursor.read_pointee(&pe_file)?; 58 | let ty = read_type(&mut type_cursor, &pe_file, &reference_table, false)?; 59 | 60 | table.push(ty); 61 | } 62 | 63 | if deserialize_default_values { 64 | table_cursor.seek(SeekFrom::Start(start_position))?; 65 | 66 | let mut type_collection = TypeCollection::default(); 67 | let mut values = Vec::with_capacity(table_count as usize); 68 | 69 | type_collection.extend(table); 70 | 71 | for _ in 0..table_count { 72 | let mut type_cursor = table_cursor.read_pointee(&pe_file)?; 73 | let value = read_default_value( 74 | &mut type_cursor, 75 | &pe_file, 76 | &type_collection, 77 | )?; 78 | 79 | values.push(value); 80 | } 81 | 82 | table = type_collection.into_inner().unwrap(); 83 | 84 | for (ty, value) in table.iter_mut().zip(values) { 85 | ty.default_value = value; 86 | } 87 | } 88 | 89 | Ok(table) 90 | } 91 | 92 | /// # Layout 93 | /// 94 | /// ```c 95 | /// struct TypeInfo { 96 | /// char* name_ptr; 97 | /// u64 name_len; 98 | /// char* impact_name_ptr; 99 | /// u64 impact_name_len; 100 | /// char* qualified_name_ptr; 101 | /// u64 qualified_name_len; 102 | /// Namespace* namespace; 103 | /// TypeInfo* inner_type; 104 | /// u32 size; 105 | /// u16 alignment; 106 | /// u16 element_alignment; 107 | /// u32 field_count; 108 | /// u8 primitive_type; 109 | /// TypeFlags flags; 110 | /// u8 padding[2]; 111 | /// u32 qualified_hash; // @0x50 112 | /// u32 internal_hash; 113 | /// StructFieldInfo* struct_fields[field_count]; // ptr to array of field_count StructFieldInfos 114 | /// EnumFieldInfo* enum_fields[field_count]; // ptr to array of field_count EnumFieldInfos 115 | /// TypeInfo** variant_type; // contains as inner_type 116 | /// u8* default_value_ptr; // @0x70 117 | /// u64 default_value_len; 118 | /// Attribute* attributes_ptr; 119 | /// u64 attributes_count; 120 | /// } 121 | /// 122 | /// enum TypeFlags : u8 { 123 | /// None = 0x00, 124 | /// HasDS = 0x01, 125 | /// HasBlobArray = 0x02, 126 | /// HasBlobString = 0x04, 127 | /// HasBlobOptional = 0x08, 128 | /// HasBlobVariant = 0x10, 129 | /// Unknown_0x20 = 0x20, // shader/gpu related? 130 | /// Unknown_0x40 = 0x40, // shader/gpu related? 131 | /// Unknown_0x80 = 0x80, // shader/gpu related? 132 | /// } 133 | /// ``` 134 | fn read_type( 135 | cursor: &mut Cursor<&[u8]>, 136 | pe_file: &PEFile, 137 | reference_table: &HashMap, 138 | skip_fields: bool 139 | ) -> std::io::Result { 140 | let name = cursor.read_pointee(pe_file)? 141 | .read_string(cursor.read_u64()? as usize)?; 142 | let impact_name = cursor.read_pointee(pe_file)? 143 | .read_string(cursor.read_u64()? as usize)?; 144 | let qualified_name = cursor.read_pointee(pe_file)? 145 | .read_string(cursor.read_u64()? as usize)?; 146 | 147 | let namespace_cursor = cursor.read_pointee(pe_file)?; 148 | let namespace = read_namespace(namespace_cursor, pe_file)?; 149 | 150 | let inner_type = read_type_ref(cursor, reference_table)?; 151 | 152 | let size = cursor.read_u32()?; 153 | let alignment = cursor.read_u16()?; 154 | let element_alignment = cursor.read_u16()?; 155 | let field_count = cursor.read_u32()?; 156 | let primitive_type = PrimitiveType::from_u8(cursor.read_u8()?); 157 | let flags = TypeFlags::from_bits_truncate(cursor.read_u8()?); 158 | cursor.padding(2)?; 159 | let qualified_hash = cursor.read_u32()?; 160 | let internal_hash = cursor.read_u32()?; 161 | 162 | let (struct_fields, enum_fields) = if !skip_fields { 163 | let struct_fields = cursor.read_pointee_opt(pe_file)? 164 | .map(|mut cursor| { 165 | read_struct_fields(&mut cursor, field_count as u64, pe_file, reference_table) 166 | }) 167 | .transpose()? 168 | .unwrap_or_else(Vec::new); 169 | 170 | let enum_fields = cursor.read_pointee_opt(pe_file)? 171 | .map(|mut cursor| { 172 | read_enum_fields(&mut cursor, field_count as u64, pe_file) 173 | }) 174 | .transpose()? 175 | .unwrap_or_else(Vec::new); 176 | 177 | (struct_fields, enum_fields) 178 | } else { 179 | cursor.seek_relative(16)?; 180 | (Vec::new(), Vec::new()) 181 | }; 182 | 183 | cursor.seek_relative(8)?; // skip variant_type 184 | cursor.seek_relative(8)?; // skip default_value_ptr 185 | cursor.seek_relative(8)?; // skip default_value_len 186 | 187 | let attributes_cursor = cursor.read_pointee_opt(pe_file)?; 188 | let attributes_count = cursor.read_u64()?; 189 | let attributes = attributes_cursor.map(|mut cursor| { 190 | read_attributes(&mut cursor, attributes_count, pe_file, reference_table) 191 | }) 192 | .transpose()? 193 | .unwrap_or_else(Vec::new); 194 | 195 | Ok(TypeInfo { 196 | name_hash: fnv(&name), 197 | impact_hash: fnv(&impact_name), 198 | 199 | name, 200 | impact_name, 201 | qualified_name, 202 | namespace, 203 | inner_type, 204 | size, 205 | alignment, 206 | element_alignment, 207 | field_count, 208 | primitive_type, 209 | flags, 210 | qualified_hash, 211 | internal_hash, 212 | struct_fields, 213 | enum_fields, 214 | default_value: None, 215 | attributes, 216 | }) 217 | } 218 | 219 | fn read_default_value( 220 | cursor: &mut Cursor<&[u8]>, 221 | pe_file: &PEFile, 222 | type_collection: &TypeCollection, 223 | ) -> std::io::Result> { 224 | cursor.seek_relative(0x50)?; // skip to qualified_hash 225 | let qualified_hash = cursor.read_u32()?; 226 | 227 | cursor.seek_relative(0x20 - 0x04)?; // skip to default_value_ptr 228 | 229 | let default_value_cursor = cursor.read_pointee_opt(pe_file)?; 230 | let default_value_len = cursor.read_u64()?; 231 | 232 | if let Some(mut cursor) = default_value_cursor { 233 | let type_info = type_collection.get_type_by_qualified_hash(qualified_hash) 234 | .ok_or_else(|| std::io::Error::new( 235 | std::io::ErrorKind::InvalidData, 236 | format!("Failed to find type by hash: {}", qualified_hash) 237 | ))?; 238 | 239 | let mut value = vec![0; default_value_len as usize]; 240 | cursor.read_exact(&mut value)?; 241 | 242 | let value = type_collection.deserialize(type_info, &value) 243 | .map_err(|e| std::io::Error::new( 244 | std::io::ErrorKind::InvalidData, 245 | format!("Failed to deserialize default value: {}", e) 246 | ))?; 247 | 248 | Ok(Some(value)) 249 | } else { 250 | Ok(None) 251 | } 252 | } 253 | 254 | fn read_struct_fields( 255 | cursor: &mut Cursor<&[u8]>, 256 | count: u64, 257 | pe_file: &PEFile, 258 | reference_table: &HashMap, 259 | ) -> std::io::Result> { 260 | let mut fields = Vec::with_capacity(count as usize); 261 | 262 | for _ in 0..count { 263 | fields.push(read_struct_field(cursor, pe_file, reference_table)?); 264 | } 265 | 266 | Ok(fields) 267 | } 268 | 269 | /// # Layout 270 | /// 271 | /// ```c 272 | /// struct StructFieldInfo { 273 | /// char* name_ptr; 274 | /// u64 name_len; 275 | /// TypeInfo* type; 276 | /// u64 data_offset; 277 | /// Attribute* attributes; 278 | /// u64 attributes_count; 279 | /// } 280 | /// ``` 281 | fn read_struct_field( 282 | cursor: &mut Cursor<&[u8]>, 283 | pe_file: &PEFile, 284 | reference_table: &HashMap, 285 | ) -> std::io::Result { 286 | let name = cursor.read_pointee(pe_file)? 287 | .read_string(cursor.read_u64()? as usize)?; 288 | let type_info = read_type_ref(cursor, reference_table)? 289 | .ok_or_else(|| std::io::Error::new(std::io::ErrorKind::InvalidData, "TypeRef is None"))?; 290 | let data_offset = cursor.read_u64()?; 291 | let attributes_cursor = cursor.read_pointee_opt(pe_file)?; 292 | let attribute_count = cursor.read_u64()?; 293 | 294 | let attributes = attributes_cursor.map(|mut cursor| { 295 | read_attributes(&mut cursor, attribute_count, pe_file, reference_table) 296 | }) 297 | .transpose()? 298 | .unwrap_or_else(Vec::new); 299 | 300 | Ok(StructFieldInfo { 301 | name, 302 | r#type: type_info, 303 | data_offset, 304 | attributes, 305 | }) 306 | } 307 | 308 | fn read_enum_fields( 309 | cursor: &mut Cursor<&[u8]>, 310 | count: u64, 311 | pe_file: &PEFile, 312 | ) -> std::io::Result> { 313 | let mut fields = Vec::with_capacity(count as usize); 314 | 315 | for _ in 0..count { 316 | fields.push(read_enum_field(cursor, pe_file)?); 317 | } 318 | 319 | Ok(fields) 320 | } 321 | 322 | /// # Layout 323 | /// 324 | /// ```c 325 | /// struct EnumFieldInfo { 326 | /// char* name_ptr; 327 | /// u64 name_len; 328 | /// u64 value; 329 | /// u8 padding[16]; 330 | /// } 331 | /// ``` 332 | fn read_enum_field( 333 | cursor: &mut Cursor<&[u8]>, 334 | pe_file: &PEFile, 335 | ) -> std::io::Result { 336 | let name = cursor.read_pointee(pe_file)? 337 | .read_string(cursor.read_u64()? as usize)?; 338 | let value = cursor.read_u64()?; 339 | 340 | cursor.padding(16)?; 341 | 342 | Ok(EnumFieldInfo { 343 | name, 344 | value, 345 | }) 346 | } 347 | 348 | fn read_attributes( 349 | cursor: &mut Cursor<&[u8]>, 350 | count: u64, 351 | pe_file: &PEFile, 352 | reference_table: &HashMap, 353 | ) -> std::io::Result> { 354 | let mut attributes = Vec::with_capacity(count as usize); 355 | 356 | for _ in 0..count { 357 | attributes.push(read_attribute(cursor, pe_file, reference_table)?); 358 | } 359 | 360 | Ok(attributes) 361 | } 362 | 363 | /// # Layout 364 | /// 365 | /// ```c 366 | /// struct Attribute { 367 | /// AttributeInfo* info; 368 | /// char* value_ptr; 369 | /// u64 value_len; 370 | /// } 371 | /// 372 | /// struct AttributeInfo { 373 | /// Namespace* namespace; 374 | /// char* name_ptr; 375 | /// u64 name_len; 376 | /// TypeInfo* type; 377 | /// } 378 | /// ``` 379 | fn read_attribute( 380 | cursor: &mut Cursor<&[u8]>, 381 | pe_file: &PEFile, 382 | reference_table: &HashMap, 383 | ) -> std::io::Result { 384 | let mut data_cursor = cursor.read_pointee(pe_file)?; 385 | let value = cursor.read_pointee(pe_file)? 386 | .read_string(cursor.read_u64()? as usize)?; 387 | 388 | // data 389 | let namespace = read_namespace(data_cursor.read_pointee(pe_file)?, pe_file)?; 390 | let name = data_cursor.read_pointee(pe_file)? 391 | .read_string(data_cursor.read_u64()? as usize)?; 392 | let r#type = read_type_ref(&mut data_cursor, reference_table)?; 393 | 394 | Ok(Attribute { 395 | name, 396 | namespace, 397 | r#type, 398 | value, 399 | }) 400 | } 401 | 402 | /// # Layout 403 | /// 404 | /// ```c 405 | /// struct Namespace { 406 | /// char* name_ptr; 407 | /// u64 name_len; 408 | /// Namespace* parent; 409 | /// }; 410 | /// ``` 411 | fn read_namespace( 412 | cursor: Cursor<&[u8]>, 413 | pe_file: &PEFile, 414 | ) -> std::io::Result> { 415 | let mut namespaces = Vec::new(); 416 | let mut parent_cursor = Some(cursor); 417 | 418 | while let Some(mut cursor) = parent_cursor { 419 | let name = cursor.read_pointee(pe_file)? 420 | .read_string(cursor.read_u64()? as usize)?; 421 | 422 | namespaces.push(name); 423 | parent_cursor = cursor.read_pointee_opt(pe_file)?; 424 | } 425 | 426 | namespaces.reverse(); 427 | 428 | Ok(namespaces) 429 | } 430 | 431 | fn read_type_ref( 432 | cursor: &mut Cursor<&[u8]>, 433 | reference_table: &HashMap, 434 | ) -> std::io::Result> { 435 | let offset = cursor.read_u64()?; 436 | 437 | if offset == 0 { 438 | return Ok(None); 439 | } 440 | 441 | Ok(reference_table.get(&offset).copied()) 442 | } 443 | -------------------------------------------------------------------------------- /parser/src/container/file.rs: -------------------------------------------------------------------------------- 1 | use std::{collections::{HashMap, HashSet}, fs::File, io::{BufReader, Read, Seek, SeekFrom, Write}, path::Path}; 2 | use shared::io::{ReadExt, WriteExt, WriteSeekExt}; 3 | 4 | use crate::{guid::{BlobGuid, DescriptorGuid}, reflection::TypeCollection, Hash32}; 5 | use super::{header::*, KFCReadError, KFCWriteError, StaticMap, StaticMapBucket}; 6 | 7 | #[derive(Debug, Clone)] 8 | pub struct KFCFile { 9 | version: String, 10 | 11 | dat_infos: Vec, 12 | descriptor_locations: Vec, 13 | 14 | blobs: StaticMap, 15 | descriptors: StaticMap, 16 | 17 | descriptor_indices: Vec, 18 | groups: StaticMap, 19 | } 20 | 21 | impl Default for KFCFile { 22 | 23 | fn default() -> Self { 24 | Self { 25 | descriptor_locations: vec![DescriptorLocation::default()], 26 | 27 | version: String::default(), 28 | dat_infos: Vec::default(), 29 | 30 | blobs: StaticMap::default(), 31 | descriptors: StaticMap::default(), 32 | 33 | descriptor_indices: Vec::default(), 34 | groups: StaticMap::default(), 35 | } 36 | } 37 | 38 | } 39 | 40 | impl KFCFile { 41 | 42 | pub fn from_path( 43 | path: &Path, 44 | skip_entries: bool, 45 | ) -> Result { 46 | let file = File::open(path)?; 47 | let mut reader = BufReader::new(file); 48 | Self::read(&mut reader, skip_entries) 49 | } 50 | 51 | pub fn from_reader( 52 | reader: &mut R, 53 | skip_entries: bool, 54 | ) -> Result { 55 | Self::read(reader, skip_entries) 56 | } 57 | 58 | pub fn get_version_tag(path: &Path) -> Result { 59 | let file = File::open(path)?; 60 | let mut reader = BufReader::new(file); 61 | let header = KFCHeader::read(&mut reader)?; 62 | reader.seek(SeekFrom::Start(header.version.offset))?; 63 | let version = reader.read_string(header.version.count)?; 64 | Ok(version) 65 | } 66 | 67 | pub fn get_descriptor_guids(&self) -> &[DescriptorGuid] { 68 | self.descriptors.keys() 69 | } 70 | 71 | pub fn get_descriptor_link(&self, guid: &DescriptorGuid) -> Option<&DescriptorLink> { 72 | self.descriptors.get(guid) 73 | } 74 | 75 | pub fn get_descriptor_iter(&self) -> impl Iterator { 76 | self.descriptors.iter() 77 | } 78 | 79 | pub fn get_descriptor_map(&self) -> &StaticMap { 80 | &self.descriptors 81 | } 82 | 83 | pub fn contains_descriptor(&self, guid: &DescriptorGuid) -> bool { 84 | self.descriptors.contains_key(guid) 85 | } 86 | 87 | pub fn get_descriptor_guids_by_type_hash(&self, type_hash: Hash32) -> Vec { 88 | self.groups.get(&type_hash).map(|info| { 89 | let start = info.index; 90 | let end = start + info.count; 91 | &self.descriptor_indices[start..end] 92 | }).unwrap_or_default() 93 | .iter() 94 | .map(|&index| &self.descriptors.keys()[index as usize]) 95 | .cloned() 96 | .collect() 97 | } 98 | 99 | pub fn get_blob_guids(&self) -> &[BlobGuid] { 100 | self.blobs.keys() 101 | } 102 | 103 | pub fn get_blob_link(&self, guid: &BlobGuid) -> Option<&BlobLink> { 104 | self.blobs.get(guid) 105 | } 106 | 107 | pub fn get_blob_iter(&self) -> impl Iterator { 108 | self.blobs.iter() 109 | } 110 | 111 | pub fn get_blob_map(&self) -> &StaticMap { 112 | &self.blobs 113 | } 114 | 115 | pub fn contains_blob(&self, guid: &BlobGuid) -> bool { 116 | self.blobs.contains_key(guid) 117 | } 118 | 119 | pub fn game_version(&self) -> &str { 120 | &self.version 121 | } 122 | 123 | pub fn data_offset(&self) -> u64 { 124 | self.descriptor_locations[0].offset 125 | } 126 | 127 | pub fn data_size(&self) -> u64 { 128 | self.descriptor_locations[0].size 129 | } 130 | 131 | pub fn get_dat_infos(&self) -> &[DatInfo] { 132 | &self.dat_infos 133 | } 134 | 135 | // mutators 136 | 137 | pub fn set_descriptors( 138 | &mut self, 139 | descriptors: StaticMap, 140 | type_collection: &TypeCollection, 141 | ) { 142 | self.descriptors = descriptors; 143 | self.rebuild_groups(type_collection); 144 | } 145 | 146 | pub fn set_blobs(&mut self, blobs: StaticMap) { 147 | self.blobs = blobs; 148 | } 149 | 150 | pub fn set_dat_infos(&mut self, dat_infos: Vec) { 151 | self.dat_infos = dat_infos; 152 | } 153 | 154 | pub fn set_game_version(&mut self, version: String) { 155 | self.version = version; 156 | } 157 | 158 | pub fn set_data_location(&mut self, offset: u64, size: u64) { 159 | self.descriptor_locations[0].offset = offset; 160 | self.descriptor_locations[0].size = size; 161 | self.descriptor_locations[0].count = self.descriptors.len(); 162 | } 163 | 164 | fn rebuild_groups(&mut self, type_collection: &TypeCollection) { 165 | let mut type_hashes = self.descriptors.keys() 166 | .iter() 167 | .map(|guid| guid.type_hash) 168 | .collect::>() 169 | .into_iter() 170 | .map(|hash| (hash, GroupInfo { 171 | // TODO: Remove unwrap 172 | internal_hash: type_collection.get_type_by_qualified_hash(hash).unwrap().internal_hash, 173 | ..Default::default() 174 | })) 175 | .collect::>(); 176 | 177 | let mut indices = Vec::with_capacity(self.descriptors.len()); 178 | 179 | for (hash, info) in type_hashes.iter_mut() { 180 | info.index = indices.len(); 181 | 182 | let hash = *hash; 183 | let mut count = 0; 184 | 185 | for (i, (guid, _)) in self.descriptors.iter().enumerate() { 186 | if guid.type_hash == hash { 187 | indices.push(i as u32); 188 | count += 1; 189 | } 190 | } 191 | 192 | info.count = count; 193 | } 194 | 195 | self.descriptor_indices = indices; 196 | self.groups = type_hashes.into_iter().collect::>().into(); 197 | } 198 | 199 | } 200 | 201 | impl KFCFile { 202 | 203 | fn read(reader: &mut R, skip_entries: bool) -> Result { 204 | let header = KFCHeader::read(reader)?; 205 | 206 | // version 207 | reader.seek(SeekFrom::Start(header.version.offset))?; 208 | let version = reader.read_string(header.version.count)?; 209 | 210 | // dat infos 211 | reader.seek(SeekFrom::Start(header.dat_infos.offset))?; 212 | let dat_infos = (0..header.dat_infos.count) 213 | .map(|_| DatInfo::read(reader)) 214 | .collect::, _>>()?; 215 | 216 | // descriptor locations 217 | reader.seek(SeekFrom::Start(header.descriptor_locations.offset))?; 218 | let descriptor_locations = (0..header.descriptor_locations.count) 219 | .map(|_| DescriptorLocation::read(reader)) 220 | .collect::, _>>()?; 221 | 222 | if !skip_entries { 223 | // group indices 224 | reader.seek(SeekFrom::Start(header.descriptor_indices.offset))?; 225 | let descriptor_indices = (0..header.descriptor_indices.count) 226 | .map(|_| reader.read_u32()) 227 | .collect::, _>>()?; 228 | 229 | // blob static map 230 | 231 | reader.seek(SeekFrom::Start(header.blob_buckets.offset))?; 232 | let blob_buckets = (0..header.blob_buckets.count) 233 | .map(|_| StaticMapBucket::read(reader)) 234 | .collect::, _>>()?; 235 | 236 | reader.seek(SeekFrom::Start(header.blob_guids.offset))?; 237 | let blob_guids = (0..header.blob_guids.count) 238 | .map(|_| BlobGuid::read(reader)) 239 | .collect::, _>>()?; 240 | 241 | reader.seek(SeekFrom::Start(header.blob_links.offset))?; 242 | let blob_links = (0..header.blob_links.count) 243 | .map(|_| BlobLink::read(reader)) 244 | .collect::, _>>()?; 245 | 246 | // descriptor static map 247 | 248 | reader.seek(SeekFrom::Start(header.descriptor_buckets.offset))?; 249 | let descriptor_buckets = (0..header.descriptor_buckets.count) 250 | .map(|_| StaticMapBucket::read(reader)) 251 | .collect::, _>>()?; 252 | 253 | reader.seek(SeekFrom::Start(header.descriptor_guids.offset))?; 254 | let descriptor_guids = (0..header.descriptor_guids.count) 255 | .map(|_| DescriptorGuid::read(reader)) 256 | .collect::, _>>()?; 257 | 258 | reader.seek(SeekFrom::Start(header.descriptor_links.offset))?; 259 | let descriptor_links = (0..header.descriptor_links.count) 260 | .map(|_| DescriptorLink::read(reader)) 261 | .collect::, _>>()?; 262 | 263 | // group static map 264 | 265 | reader.seek(SeekFrom::Start(header.group_buckets.offset))?; 266 | let group_buckets = (0..header.group_buckets.count) 267 | .map(|_| StaticMapBucket::read(reader)) 268 | .collect::, _>>()?; 269 | 270 | reader.seek(SeekFrom::Start(header.group_hashes.offset))?; 271 | let group_guids = (0..header.group_hashes.count) 272 | .map(|_| reader.read_u32()) 273 | .collect::, _>>()?; 274 | 275 | reader.seek(SeekFrom::Start(header.group_infos.offset))?; 276 | let group_links = (0..header.group_infos.count) 277 | .map(|_| GroupInfo::read(reader)) 278 | .collect::, _>>()?; 279 | 280 | Ok(Self { 281 | version, 282 | dat_infos, 283 | 284 | descriptor_locations, 285 | descriptor_indices, 286 | 287 | blobs: StaticMap::from_parts( 288 | blob_guids, 289 | blob_links, 290 | blob_buckets, 291 | )?, 292 | descriptors: StaticMap::from_parts( 293 | descriptor_guids, 294 | descriptor_links, 295 | descriptor_buckets, 296 | )?, 297 | groups: StaticMap::from_parts( 298 | group_guids, 299 | group_links, 300 | group_buckets, 301 | )?, 302 | }) 303 | } else { 304 | Ok(Self { 305 | version, 306 | dat_infos: Vec::new(), 307 | descriptor_locations, 308 | descriptor_indices: Vec::new(), 309 | blobs: StaticMap::default(), 310 | descriptors: StaticMap::default(), 311 | groups: StaticMap::default(), 312 | }) 313 | } 314 | } 315 | 316 | pub(super) fn write_info(&self, writer: &mut W) -> Result<(), KFCWriteError> { 317 | // OPTIMIZE: Only write the necessary parts of the file 318 | self.write(writer) 319 | } 320 | 321 | pub(super) fn write(&self, writer: &mut W) -> Result<(), KFCWriteError> { 322 | KFCHeader::default().write(writer)?; 323 | 324 | // version 325 | let version_offset = writer.stream_position()?; 326 | writer.write_string(&self.version, self.version.len())?; 327 | writer.align(8)?; 328 | 329 | // dat infos 330 | let dat_infos_offset = writer.stream_position()?; 331 | for dat_info in &self.dat_infos { 332 | dat_info.write(writer)?; 333 | } 334 | writer.align(8)?; 335 | 336 | // descriptor locations 337 | let descriptor_locations_offset = writer.stream_position()?; 338 | DescriptorLocation::default().write(writer)?; 339 | 340 | // group indices 341 | let descriptor_indices_offset = writer.stream_position()?; 342 | for descriptor_index in &self.descriptor_indices { 343 | writer.write_u32(*descriptor_index)?; 344 | } 345 | 346 | // blob static map 347 | 348 | let blob_buckets_offset = writer.stream_position()?; 349 | for blob_bucket in self.blobs.buckets() { 350 | blob_bucket.write(writer)?; 351 | } 352 | 353 | let blob_guids_offset = writer.stream_position()?; 354 | for blob_guid in self.blobs.keys() { 355 | blob_guid.write(writer)?; 356 | } 357 | writer.align(8)?; 358 | 359 | let blob_links_offset = writer.stream_position()?; 360 | for blob_link in self.blobs.values() { 361 | blob_link.write(writer)?; 362 | } 363 | 364 | // descriptor static map 365 | 366 | let descriptor_buckets_offset = writer.stream_position()?; 367 | for descriptor_bucket in self.descriptors.buckets() { 368 | descriptor_bucket.write(writer)?; 369 | } 370 | 371 | let descriptor_guids_offset = writer.stream_position()?; 372 | for descriptor_guid in self.descriptors.keys() { 373 | descriptor_guid.write(writer)?; 374 | } 375 | writer.align(8)?; 376 | 377 | let descriptor_links_offset = writer.stream_position()?; 378 | for descriptor_link in self.descriptors.values() { 379 | descriptor_link.write(writer)?; 380 | } 381 | 382 | // group static map 383 | 384 | let group_buckets_offset = writer.stream_position()?; 385 | for group_bucket in self.groups.buckets() { 386 | group_bucket.write(writer)?; 387 | } 388 | 389 | let group_hashes_offset = writer.stream_position()?; 390 | for group_hash in self.groups.keys() { 391 | writer.write_u32(*group_hash)?; 392 | } 393 | 394 | let group_infos_offset = writer.stream_position()?; 395 | for group_info in self.groups.values() { 396 | group_info.write(writer)?; 397 | } 398 | 399 | let size = writer.stream_position()?; 400 | 401 | // DescriptorLocation 402 | writer.seek(SeekFrom::Start(descriptor_locations_offset))?; 403 | for descriptor_location in &self.descriptor_locations { 404 | descriptor_location.write(writer)?; 405 | } 406 | 407 | // KFCHeader 408 | let header = KFCHeader { 409 | size, 410 | 411 | version: KFCLocation::new(version_offset, self.version.len()), 412 | dat_infos: KFCLocation::new(dat_infos_offset, self.dat_infos.len()), 413 | 414 | descriptor_locations: KFCLocation::new(descriptor_locations_offset, self.descriptor_locations.len()), 415 | descriptor_indices: KFCLocation::new(descriptor_indices_offset, self.descriptor_indices.len()), 416 | 417 | blob_buckets: KFCLocation::new(blob_buckets_offset, self.blobs.buckets().len()), 418 | blob_guids: KFCLocation::new(blob_guids_offset, self.blobs.len()), 419 | blob_links: KFCLocation::new(blob_links_offset, self.blobs.len()), 420 | 421 | descriptor_buckets: KFCLocation::new(descriptor_buckets_offset, self.descriptors.buckets().len()), 422 | descriptor_guids: KFCLocation::new(descriptor_guids_offset, self.descriptors.len()), 423 | descriptor_links: KFCLocation::new(descriptor_links_offset, self.descriptors.len()), 424 | 425 | group_buckets: KFCLocation::new(group_buckets_offset, self.groups.buckets().len()), 426 | group_hashes: KFCLocation::new(group_hashes_offset, self.groups.len()), 427 | group_infos: KFCLocation::new(group_infos_offset, self.groups.len()), 428 | 429 | ..Default::default() 430 | }; 431 | 432 | writer.seek(SeekFrom::Start(0))?; 433 | header.write(writer)?; 434 | 435 | writer.seek(SeekFrom::Start(size))?; 436 | 437 | Ok(()) 438 | } 439 | 440 | } 441 | -------------------------------------------------------------------------------- /parser/src/data/image.rs: -------------------------------------------------------------------------------- 1 | use bcdec_rs::bc6h_float; 2 | use half::f16; 3 | use serde::{Deserialize, Serialize}; 4 | use texture2ddecoder::{decode_bc1, decode_bc3, decode_bc4, decode_bc5, decode_bc7}; 5 | 6 | #[allow(non_camel_case_types)] 7 | #[derive(Debug, Copy, Clone, PartialEq, Eq, Serialize, Deserialize)] 8 | pub enum PixelFormat { 9 | None, 10 | R4G4_unorm_pack8, 11 | R4G4B4A4_unorm_pack16, 12 | B4G4R4A4_unorm_pack16, 13 | R5G6B5_unorm_pack16, 14 | B5G6R5_unorm_pack16, 15 | R5G5B5A1_unorm_pack16, 16 | B5G5R5A1_unorm_pack16, 17 | A1R5G5B5_unorm_pack16, 18 | R8_unorm, 19 | R8_snorm, 20 | R8_uscaled, 21 | R8_sscaled, 22 | R8_uint, 23 | R8_sint, 24 | R8_srgb, 25 | R8G8_unorm, 26 | R8G8_snorm, 27 | R8G8_uscaled, 28 | R8G8_sscaled, 29 | R8G8_uint, 30 | R8G8_sint, 31 | R8G8_srgb, 32 | R8G8B8_unorm, 33 | R8G8B8_snorm, 34 | R8G8B8_uscaled, 35 | R8G8B8_sscaled, 36 | R8G8B8_uint, 37 | R8G8B8_sint, 38 | R8G8B8_srgb, 39 | B8G8R8_unorm, 40 | B8G8R8_snorm, 41 | B8G8R8_uscaled, 42 | B8G8R8_sscaled, 43 | B8G8R8_uint, 44 | B8G8R8_sint, 45 | B8G8R8_srgb, 46 | R8G8B8A8_unorm, 47 | R8G8B8A8_snorm, 48 | R8G8B8A8_uscaled, 49 | R8G8B8A8_sscaled, 50 | R8G8B8A8_uint, 51 | R8G8B8A8_sint, 52 | R8G8B8A8_srgb, 53 | B8G8R8A8_unorm, 54 | B8G8R8A8_snorm, 55 | B8G8R8A8_uscaled, 56 | B8G8R8A8_sscaled, 57 | B8G8R8A8_uint, 58 | B8G8R8A8_sint, 59 | B8G8R8A8_srgb, 60 | A8B8G8R8_unorm_pack32, 61 | A8B8G8R8_snorm_pack32, 62 | A8B8G8R8_uscaled_pack32, 63 | A8B8G8R8_sscaled_pack32, 64 | A8B8G8R8_uint_pack32, 65 | A8B8G8R8_sint_pack32, 66 | A8B8G8R8_srgb_pack32, 67 | A2R10G10B10_unorm_pack32, 68 | A2R10G10B10_snorm_pack32, 69 | A2R10G10B10_uscaled_pack32, 70 | A2R10G10B10_sscaled_pack32, 71 | A2R10G10B10_uint_pack32, 72 | A2R10G10B10_sint_pack32, 73 | A2B10G10R10_unorm_pack32, 74 | A2B10G10R10_snorm_pack32, 75 | A2B10G10R10_uscaled_pack32, 76 | A2B10G10R10_sscaled_pack32, 77 | A2B10G10R10_uint_pack32, 78 | A2B10G10R10_sint_pack32, 79 | R16_unorm, 80 | R16_snorm, 81 | R16_uscaled, 82 | R16_sscaled, 83 | R16_uint, 84 | R16_sint, 85 | R16_sfloat, 86 | R16G16_unorm, 87 | R16G16_snorm, 88 | R16G16_uscaled, 89 | R16G16_sscaled, 90 | R16G16_uint, 91 | R16G16_sint, 92 | R16G16_sfloat, 93 | R16G16B16_unorm, 94 | R16G16B16_snorm, 95 | R16G16B16_uscaled, 96 | R16G16B16_sscaled, 97 | R16G16B16_uint, 98 | R16G16B16_sint, 99 | R16G16B16_sfloat, 100 | R16G16B16A16_unorm, 101 | R16G16B16A16_snorm, 102 | R16G16B16A16_uscaled, 103 | R16G16B16A16_sscaled, 104 | R16G16B16A16_uint, 105 | R16G16B16A16_sint, 106 | R16G16B16A16_sfloat, 107 | R32_uint, 108 | R32_sint, 109 | R32_sfloat, 110 | R32G32_uint, 111 | R32G32_sint, 112 | R32G32_sfloat, 113 | R32G32B32_uint, 114 | R32G32B32_sint, 115 | R32G32B32_sfloat, 116 | R32G32B32A32_uint, 117 | R32G32B32A32_sint, 118 | R32G32B32A32_sfloat, 119 | R64_uint, 120 | R64_sint, 121 | R64_sfloat, 122 | R64G64_uint, 123 | R64G64_sint, 124 | R64G64_sfloat, 125 | R64G64B64_uint, 126 | R64G64B64_sint, 127 | R64G64B64_sfloat, 128 | R64G64B64A64_uint, 129 | R64G64B64A64_sint, 130 | R64G64B64A64_sfloat, 131 | B10G11R11_ufloat_pack32, 132 | E5B9G9R9_ufloat_pack32, 133 | D16_unorm, 134 | X8_D24_unorm_pack32, 135 | D32_sfloat, 136 | S8_uint, 137 | D16_unorm_S8_uint, 138 | D24_unorm_S8_uint, 139 | D32_sfloat_S8_uint, 140 | BC1_RGB_unorm_block, 141 | BC1_RGB_srgb_block, 142 | BC1_RGBA_unorm_block, 143 | BC1_RGBA_srgb_block, 144 | BC2_unorm_block, 145 | BC2_srgb_block, 146 | BC3_unorm_block, 147 | BC3_srgb_block, 148 | BC4_unorm_block, 149 | BC4_snorm_block, 150 | BC5_unorm_block, 151 | BC5_snorm_block, 152 | BC6H_ufloat_block, 153 | BC6H_sfloat_block, 154 | BC7_unorm_block, 155 | BC7_srgb_block, 156 | } 157 | 158 | macro_rules! map_component { 159 | (U8, $value:expr, $index:expr) => { $value[$index] }; 160 | (S8, $value:expr, $index:expr) => { ($value[$index] & 0x7F) | (!$value[$index] & 0x80) }; 161 | (U16, $value:expr, $index:expr) => { map_component!(U8, $value, $index + 1) }; 162 | (S16, $value:expr, $index:expr) => { map_component!(S8, $value, $index + 1) }; 163 | (U32, $value:expr, $index:expr) => { map_component!(U8, $value, $index + 3) }; 164 | (S32, $value:expr, $index:expr) => { map_component!(S8, $value, $index + 3) }; 165 | (U64, $value:expr, $index:expr) => { map_component!(U8, $value, $index + 7) }; 166 | (S64, $value:expr, $index:expr) => { map_component!(S8, $value, $index + 7) }; 167 | (F16, $value:expr, $index:expr) => { 168 | (f16::from_le_bytes($value[$index..$index + 2].try_into()?).to_f32()) * 255.0 169 | }; 170 | (F32, $value:expr, $index:expr) => { 171 | { 172 | let data = u32::from_le_bytes($value[$index..$index + 4].try_into()?); 173 | f32::from_bits(data) * (1 << 8) 174 | } 175 | }; 176 | (F64, $value:expr, $index:expr) => { 177 | { 178 | let data = u64::from_le_bytes($value[$index..$index + 8].try_into()?); 179 | f64::from_bits(data) * (1 << 8) 180 | } 181 | }; 182 | } 183 | 184 | macro_rules! convert_pixel { 185 | (R, $ty:ident, $pixel:ident, $size:expr) => { 186 | map_component!($ty, $pixel, 0) as u32 187 | }; 188 | (RG, $ty:ident, $pixel:ident, $size:expr) => { 189 | { 190 | let step = $size / 2; 191 | let r = map_component!($ty, $pixel, step * 0) as u32; 192 | let g = map_component!($ty, $pixel, step * 1) as u32; 193 | 194 | g << 8 | r 195 | } 196 | }; 197 | (RGB, $ty:ident, $pixel:ident, $size:expr) => { 198 | { 199 | let step = $size / 3; 200 | let r = map_component!($ty, $pixel, step * 0) as u32; 201 | let g = map_component!($ty, $pixel, step * 1) as u32; 202 | let b = map_component!($ty, $pixel, step * 2) as u32; 203 | 204 | b << 16 | g << 8 | r 205 | } 206 | }; 207 | (BGR, $ty:ident, $pixel:ident, $size:expr) => { 208 | { 209 | let step = $size / 3; 210 | let r = map_component!($ty, $pixel, step * 2) as u32; 211 | let g = map_component!($ty, $pixel, step * 1) as u32; 212 | let b = map_component!($ty, $pixel, step * 0) as u32; 213 | 214 | b << 16 | g << 8 | r 215 | } 216 | }; 217 | (RGBA, $ty:ident, $pixel:ident, $size:expr) => { 218 | { 219 | let step = $size / 4; 220 | let r = map_component!($ty, $pixel, step * 0) as u32; 221 | let g = map_component!($ty, $pixel, step * 1) as u32; 222 | let b = map_component!($ty, $pixel, step * 2) as u32; 223 | let a = map_component!($ty, $pixel, step * 3) as u32; 224 | 225 | a << 24 | b << 16 | g << 8 | r 226 | } 227 | }; 228 | (BGRA, $ty:ident, $pixel:ident, $size:expr) => { 229 | { 230 | let step = $size / 4; 231 | let r = map_component!($ty, $pixel, step * 2) as u32; 232 | let g = map_component!($ty, $pixel, step * 1) as u32; 233 | let b = map_component!($ty, $pixel, step * 0) as u32; 234 | let a = map_component!($ty, $pixel, step * 3) as u32; 235 | 236 | a << 24 | b << 16 | g << 8 | r 237 | } 238 | }; 239 | (ABGR, $ty:ident, $pixel:ident, $size:expr) => { 240 | { 241 | let step = $size / 4; 242 | let r = map_component!($ty, $pixel, step * 3) as u32; 243 | let g = map_component!($ty, $pixel, step * 2) as u32; 244 | let b = map_component!($ty, $pixel, step * 1) as u32; 245 | let a = map_component!($ty, $pixel, step * 0) as u32; 246 | 247 | a << 24 | b << 16 | g << 8 | r 248 | } 249 | }; 250 | } 251 | 252 | macro_rules! convert_image { 253 | ($reader:expr, $width:expr, $height:expr, $pixels:expr, $format:ident, $ty:ident, $size:expr) => { 254 | for (i, pixel) in (&$reader[..$width * $height * $size]).chunks($size).enumerate() { 255 | $pixels[i] = convert_pixel!($format, $ty, pixel, $size); 256 | } 257 | }; 258 | } 259 | 260 | pub fn deserialize_image( 261 | format: PixelFormat, 262 | width: usize, 263 | height: usize, 264 | reader: &[u8], 265 | ) -> anyhow::Result> { 266 | let mut pixels = vec![0; width * height]; 267 | 268 | match format { 269 | PixelFormat::R8_unorm | 270 | PixelFormat::R8_uscaled | 271 | PixelFormat::R8_uint | 272 | PixelFormat::R8_srgb => convert_image!(reader, width, height, pixels, R, U8, 1), 273 | PixelFormat::R8_snorm | 274 | PixelFormat::R8_sscaled | 275 | PixelFormat::R8_sint => convert_image!(reader, width, height, pixels, R, S8, 1), 276 | 277 | PixelFormat::R8G8_unorm | 278 | PixelFormat::R8G8_uscaled | 279 | PixelFormat::R8G8_uint | 280 | PixelFormat::R8G8_srgb => convert_image!(reader, width, height, pixels, RG, U8, 2), 281 | PixelFormat::R8G8_snorm | 282 | PixelFormat::R8G8_sscaled | 283 | PixelFormat::R8G8_sint => convert_image!(reader, width, height, pixels, RG, S8, 2), 284 | 285 | PixelFormat::R8G8B8_unorm | 286 | PixelFormat::R8G8B8_uscaled | 287 | PixelFormat::R8G8B8_uint | 288 | PixelFormat::R8G8B8_srgb => convert_image!(reader, width, height, pixels, RGB, U8, 3), 289 | PixelFormat::R8G8B8_snorm | 290 | PixelFormat::R8G8B8_sscaled | 291 | PixelFormat::R8G8B8_sint => convert_image!(reader, width, height, pixels, RGB, S8, 3), 292 | 293 | PixelFormat::B8G8R8_unorm | 294 | PixelFormat::B8G8R8_uscaled | 295 | PixelFormat::B8G8R8_uint | 296 | PixelFormat::B8G8R8_srgb => convert_image!(reader, width, height, pixels, BGR, U8, 3), 297 | PixelFormat::B8G8R8_snorm | 298 | PixelFormat::B8G8R8_sscaled | 299 | PixelFormat::B8G8R8_sint => convert_image!(reader, width, height, pixels, BGR, S8, 3), 300 | 301 | PixelFormat::R8G8B8A8_unorm | 302 | PixelFormat::R8G8B8A8_uscaled | 303 | PixelFormat::R8G8B8A8_uint | 304 | PixelFormat::R8G8B8A8_srgb => convert_image!(reader, width, height, pixels, RGBA, U8, 4), 305 | PixelFormat::R8G8B8A8_snorm | 306 | PixelFormat::R8G8B8A8_sscaled | 307 | PixelFormat::R8G8B8A8_sint => convert_image!(reader, width, height, pixels, RGBA, S8, 4), 308 | 309 | PixelFormat::B8G8R8A8_unorm | 310 | PixelFormat::B8G8R8A8_uscaled | 311 | PixelFormat::B8G8R8A8_uint | 312 | PixelFormat::B8G8R8A8_srgb => convert_image!(reader, width, height, pixels, BGRA, U8, 4), 313 | PixelFormat::B8G8R8A8_snorm | 314 | PixelFormat::B8G8R8A8_sscaled | 315 | PixelFormat::B8G8R8A8_sint => convert_image!(reader, width, height, pixels, BGRA, S8, 4), 316 | 317 | PixelFormat::A8B8G8R8_unorm_pack32 | 318 | PixelFormat::A8B8G8R8_uscaled_pack32 | 319 | PixelFormat::A8B8G8R8_uint_pack32 | 320 | PixelFormat::A8B8G8R8_srgb_pack32 => convert_image!(reader, width, height, pixels, ABGR, U8, 4), 321 | PixelFormat::A8B8G8R8_snorm_pack32 | 322 | PixelFormat::A8B8G8R8_sscaled_pack32 | 323 | PixelFormat::A8B8G8R8_sint_pack32 => convert_image!(reader, width, height, pixels, ABGR, S8, 4), 324 | 325 | // PixelFormat::A2R10G10B10_unorm_pack32 | 326 | // PixelFormat::A2R10G10B10_snorm_pack32 | 327 | // PixelFormat::A2R10G10B10_uscaled_pack32 | 328 | // PixelFormat::A2R10G10B10_sscaled_pack32 | 329 | // PixelFormat::A2R10G10B10_uint_pack32 | 330 | // PixelFormat::A2R10G10B10_sint_pack32 => { 331 | // let reader = &reader[..width * height * 4]; 332 | // for (i, pixel) in reader.chunks(4).enumerate() { 333 | // let data = u32::from_be_bytes(pixel.try_into()?); 334 | // 335 | // let a = (data >> 30) & 0x3; 336 | // let b = (data >> 20) & 0x3FF; 337 | // let g = (data >> 10) & 0x3FF; 338 | // let r = data & 0x3FF; 339 | // 340 | // let a = (a * 0xFF) / 0x3; 341 | // let b = (b * 0xFF) / 0x3FF; 342 | // let g = (g * 0xFF) / 0x3FF; 343 | // let r = (r * 0xFF) / 0x3FF; 344 | // 345 | // pixels[i] = a << 24 | b << 16 | g << 8 | r; 346 | // } 347 | // } 348 | 349 | // PixelFormat::A2B10G10R10_unorm_pack32 | 350 | // PixelFormat::A2B10G10R10_snorm_pack32 | 351 | // PixelFormat::A2B10G10R10_uscaled_pack32 | 352 | // PixelFormat::A2B10G10R10_sscaled_pack32 | 353 | // PixelFormat::A2B10G10R10_uint_pack32 | 354 | // PixelFormat::A2B10G10R10_sint_pack32 => { 355 | // let reader = &reader[..width * height * 4]; 356 | // for (i, pixel) in reader.chunks(4).enumerate() { 357 | // let data = u32::from_be_bytes(pixel.try_into()?); 358 | // 359 | // let a = (data >> 30) & 0x3; 360 | // let r = (data >> 20) & 0x3FF; 361 | // let g = (data >> 10) & 0x3FF; 362 | // let b = data & 0x3FF; 363 | // 364 | // let a = (a * 0xFF) / 0x3; 365 | // let r = (r * 0xFF) / 0x3FF; 366 | // let g = (g * 0xFF) / 0x3FF; 367 | // let b = (b * 0xFF) / 0x3FF; 368 | // 369 | // pixels[i] = a << 24 | b << 16 | g << 8 | r; 370 | // } 371 | // } 372 | 373 | PixelFormat::R16_unorm | 374 | PixelFormat::R16_uscaled | 375 | PixelFormat::R16_uint => convert_image!(reader, width, height, pixels, R, U16, 2), 376 | PixelFormat::R16_snorm | 377 | PixelFormat::R16_sscaled | 378 | PixelFormat::R16_sint => convert_image!(reader, width, height, pixels, R, S16, 2), 379 | PixelFormat::R16_sfloat => convert_image!(reader, width, height, pixels, R, F16, 2), 380 | 381 | PixelFormat::R16G16_unorm | 382 | PixelFormat::R16G16_uscaled | 383 | PixelFormat::R16G16_uint => convert_image!(reader, width, height, pixels, RG, U16, 4), 384 | PixelFormat::R16G16_snorm | 385 | PixelFormat::R16G16_sscaled | 386 | PixelFormat::R16G16_sint => convert_image!(reader, width, height, pixels, RG, S16, 4), 387 | PixelFormat::R16G16_sfloat => convert_image!(reader, width, height, pixels, RG, F16, 4), 388 | 389 | PixelFormat::R16G16B16_unorm | 390 | PixelFormat::R16G16B16_uscaled | 391 | PixelFormat::R16G16B16_uint => convert_image!(reader, width, height, pixels, RGB, U16, 6), 392 | PixelFormat::R16G16B16_snorm | 393 | PixelFormat::R16G16B16_sscaled | 394 | PixelFormat::R16G16B16_sint => convert_image!(reader, width, height, pixels, RGB, S16, 6), 395 | PixelFormat::R16G16B16_sfloat => convert_image!(reader, width, height, pixels, RGB, F16, 6), 396 | 397 | PixelFormat::R16G16B16A16_unorm | 398 | PixelFormat::R16G16B16A16_uscaled | 399 | PixelFormat::R16G16B16A16_uint => convert_image!(reader, width, height, pixels, RGBA, U16, 8), 400 | PixelFormat::R16G16B16A16_snorm | 401 | PixelFormat::R16G16B16A16_sscaled | 402 | PixelFormat::R16G16B16A16_sint => convert_image!(reader, width, height, pixels, RGBA, S16, 8), 403 | PixelFormat::R16G16B16A16_sfloat => convert_image!(reader, width, height, pixels, RGBA, F16, 8), 404 | 405 | // PixelFormat::R32_uint | 406 | // PixelFormat::R32_sint => { 407 | // let reader = &reader[..width * height * 4]; 408 | // for (i, pixel) in reader.chunks(4).enumerate() { 409 | // let data = u32::from_le_bytes(pixel.try_into()?) >> 24; 410 | // pixels[i] = data; 411 | // } 412 | // } 413 | // 414 | // PixelFormat::R32G32_uint | 415 | // PixelFormat::R32G32_sint => { 416 | // let reader = &reader[..width * height * 8]; 417 | // for (i, pixel) in reader.chunks(8).enumerate() { 418 | // let r = u32::from_le_bytes(pixel[0..4].try_into()?) >> 24; 419 | // let g = u32::from_le_bytes(pixel[4..8].try_into()?) >> 24; 420 | // 421 | // pixels[i] = g << 8 | r; 422 | // } 423 | // } 424 | // 425 | // PixelFormat::R32G32B32_uint | 426 | // PixelFormat::R32G32B32_sint => { 427 | // let reader = &reader[..width * height * 12]; 428 | // for (i, pixel) in reader.chunks(12).enumerate() { 429 | // let r = u32::from_le_bytes(pixel[0..4].try_into()?) >> 24; 430 | // let g = u32::from_le_bytes(pixel[4..8].try_into()?) >> 24; 431 | // let b = u32::from_le_bytes(pixel[8..12].try_into()?) >> 24; 432 | // 433 | // pixels[i] = b << 16 | g << 8 | r; 434 | // } 435 | // } 436 | // 437 | // PixelFormat::R32G32B32A32_uint | 438 | // PixelFormat::R32G32B32A32_sint => { 439 | // let reader = &reader[..width * height * 16]; 440 | // for (i, pixel) in reader.chunks(16).enumerate() { 441 | // let r = u32::from_le_bytes(pixel[0..4].try_into()?) >> 24; 442 | // let g = u32::from_le_bytes(pixel[4..8].try_into()?) >> 24; 443 | // let b = u32::from_le_bytes(pixel[8..12].try_into()?) >> 24; 444 | // let a = u32::from_le_bytes(pixel[12..16].try_into()?) >> 24; 445 | // 446 | // pixels[i] = a << 24 | b << 16 | g << 8 | r; 447 | // } 448 | // } 449 | // 450 | // PixelFormat::R64_uint | 451 | // PixelFormat::R64_sint => { 452 | // let reader = &reader[..width * height * 8]; 453 | // for (i, pixel) in reader.chunks(8).enumerate() { 454 | // let data = u64::from_le_bytes(pixel.try_into()?) >> 56; 455 | // pixels[i] = data as u32; 456 | // } 457 | // } 458 | // 459 | // PixelFormat::R64G64_uint | 460 | // PixelFormat::R64G64_sint => { 461 | // let reader = &reader[..width * height * 16]; 462 | // for (i, pixel) in reader.chunks(16).enumerate() { 463 | // let r = u64::from_le_bytes(pixel[0..8].try_into()?) >> 56; 464 | // let g = u64::from_le_bytes(pixel[8..16].try_into()?) >> 56; 465 | // 466 | // pixels[i] = (g << 8 | r) as u32; 467 | // } 468 | // } 469 | // 470 | // PixelFormat::R64G64B64_uint | 471 | // PixelFormat::R64G64B64_sint => { 472 | // let reader = &reader[..width * height * 24]; 473 | // for (i, pixel) in reader.chunks(24).enumerate() { 474 | // let r = u64::from_le_bytes(pixel[0..8].try_into()?) >> 56; 475 | // let g = u64::from_le_bytes(pixel[8..16].try_into()?) >> 56; 476 | // let b = u64::from_le_bytes(pixel[16..24].try_into()?) >> 56; 477 | // 478 | // pixels[i] = (b << 16 | g << 8 | r) as u32; 479 | // } 480 | // } 481 | // 482 | // PixelFormat::R64G64B64A64_uint | 483 | // PixelFormat::R64G64B64A64_sint => { 484 | // let reader = &reader[..width * height * 32]; 485 | // for (i, pixel) in reader.chunks(32).enumerate() { 486 | // let r = u64::from_le_bytes(pixel[0..8].try_into()?) >> 56; 487 | // let g = u64::from_le_bytes(pixel[8..16].try_into()?) >> 56; 488 | // let b = u64::from_le_bytes(pixel[16..24].try_into()?) >> 56; 489 | // let a = u64::from_le_bytes(pixel[24..32].try_into()?) >> 56; 490 | // 491 | // pixels[i] = (a << 24 | b << 16 | g << 8 | r) as u32; 492 | // } 493 | // } 494 | 495 | // TODO: sfloat of R16, R16G16, R16G16B16, R16G16B16A16, R32, R32G32, R32G32B32, R32G32B32A32, R64, R64G64, R64G64B64, R64G64B64A64 496 | 497 | PixelFormat::BC1_RGB_unorm_block => decode_bc1(reader, width, height, &mut pixels).unwrap(), 498 | PixelFormat::BC1_RGB_srgb_block => decode_bc1(reader, width, height, &mut pixels).unwrap(), 499 | PixelFormat::BC1_RGBA_unorm_block => decode_bc1(reader, width, height, &mut pixels).unwrap(), 500 | PixelFormat::BC1_RGBA_srgb_block => decode_bc1(reader, width, height, &mut pixels).unwrap(), 501 | PixelFormat::BC2_unorm_block => panic!("BC2_unorm_block is not supported"), 502 | PixelFormat::BC2_srgb_block => panic!("BC2_srgb_block is not supported"), 503 | PixelFormat::BC3_unorm_block => decode_bc3(reader, width, height, &mut pixels).unwrap(), 504 | PixelFormat::BC3_srgb_block => decode_bc3(reader, width, height, &mut pixels).unwrap(), 505 | PixelFormat::BC4_unorm_block => decode_bc4(reader, width, height, &mut pixels).unwrap(), 506 | PixelFormat::BC4_snorm_block => decode_bc4(reader, width, height, &mut pixels).unwrap(), 507 | PixelFormat::BC5_unorm_block => decode_bc5(reader, width, height, &mut pixels).unwrap(), 508 | PixelFormat::BC5_snorm_block => decode_bc5(reader, width, height, &mut pixels).unwrap(), 509 | PixelFormat::BC6H_ufloat_block => decode_bc6h(reader, width, height, &mut pixels, false).unwrap(), 510 | PixelFormat::BC6H_sfloat_block => decode_bc6h(reader, width, height, &mut pixels, true).unwrap(), 511 | PixelFormat::BC7_unorm_block => decode_bc7(reader, width, height, &mut pixels).unwrap(), 512 | PixelFormat::BC7_srgb_block => decode_bc7(reader, width, height, &mut pixels).unwrap(), 513 | _ => return Err(anyhow::anyhow!("Unsupported format: {:?}", format)) 514 | }; 515 | 516 | let _bgr = matches!( 517 | format, 518 | PixelFormat::BC1_RGB_unorm_block | 519 | PixelFormat::BC1_RGB_srgb_block | 520 | PixelFormat::BC1_RGBA_unorm_block | 521 | PixelFormat::BC1_RGBA_srgb_block | 522 | PixelFormat::BC2_unorm_block | 523 | PixelFormat::BC2_srgb_block | 524 | PixelFormat::BC3_unorm_block | 525 | PixelFormat::BC3_srgb_block | 526 | PixelFormat::BC4_unorm_block | 527 | PixelFormat::BC4_snorm_block | 528 | PixelFormat::BC5_unorm_block | 529 | PixelFormat::BC5_snorm_block | 530 | PixelFormat::BC6H_ufloat_block | 531 | PixelFormat::BC6H_sfloat_block | 532 | PixelFormat::BC7_unorm_block | 533 | PixelFormat::BC7_srgb_block 534 | ); 535 | 536 | // let mut x = 0; 537 | // let mut y = 0; 538 | 539 | // for pixel in pixels { 540 | // let r = (pixel & 0xFF) as u8; 541 | // let g = ((pixel >> 8) & 0xFF) as u8; 542 | // let b = ((pixel >> 16) & 0xFF) as u8; 543 | // let a = ((pixel >> 24) & 0xFF) as u8; 544 | 545 | // if bgr { 546 | // image.put_pixel(x, y, image::Rgba([b, g, r, a])); 547 | // } else { 548 | // image.put_pixel(x, y, image::Rgba([r, g, b, a])); 549 | // } 550 | 551 | // x += 1; 552 | 553 | // if x == width as u32 { 554 | // x = 0; 555 | // y += 1; 556 | // } 557 | // } 558 | 559 | Ok(pixels) 560 | } 561 | 562 | fn decode_bc6h( 563 | data: &[u8], 564 | width: usize, 565 | height: usize, 566 | image: &mut [u32], 567 | signed: bool, 568 | ) -> Result<(), &'static str> { 569 | const BLOCK_WIDTH: usize = 4; 570 | const BLOCK_HEIGHT: usize = 4; 571 | const BLOCK_SIZE: usize = BLOCK_WIDTH * BLOCK_HEIGHT; 572 | let num_blocks_x: usize = (width + BLOCK_WIDTH - 1) / BLOCK_WIDTH; 573 | let num_blocks_y: usize = (height + BLOCK_WIDTH - 1) / BLOCK_HEIGHT; 574 | 575 | if data.len() < num_blocks_x * num_blocks_y * 16 { 576 | return Err("Not enough data to decode image!"); 577 | } 578 | 579 | if image.len() < width * height { 580 | return Err("Image buffer is too small!"); 581 | } 582 | 583 | let mut block_buffer = [0f32; BLOCK_SIZE * 3]; 584 | let mut data_offset = 0; 585 | 586 | for by in 0..num_blocks_y { 587 | for bx in 0..num_blocks_x { 588 | bc6h_float(&data[data_offset..], &mut block_buffer, 4 * 3, signed); 589 | copy_block_buffer( 590 | bx, 591 | by, 592 | width, 593 | height, 594 | &block_buffer, 595 | image, 596 | ); 597 | data_offset += 16; 598 | } 599 | } 600 | Ok(()) 601 | } 602 | 603 | #[inline] 604 | fn pack_rgb(rgb: &[f32]) -> u32 { 605 | let r = (rgb[0] * 255.0) as u32; 606 | let g = (rgb[1] * 255.0) as u32; 607 | let b = (rgb[2] * 255.0) as u32; 608 | 609 | b << 16 | g << 8 | r 610 | } 611 | 612 | #[inline] 613 | fn copy_block_buffer( 614 | bx: usize, 615 | by: usize, 616 | w: usize, 617 | h: usize, 618 | buffer: &[f32], 619 | image: &mut [u32], 620 | ) { 621 | let x: usize = 4 * bx; 622 | let copy_width: usize = if 4 * (bx + 1) > w { w - 4 * bx } else { 4 }; 623 | 624 | let y_0 = by * 4; 625 | let copy_height: usize = if 4 * (by + 1) > h { h - y_0 } else { 4 }; 626 | let mut buffer_offset = 0; 627 | 628 | for y in y_0..y_0 + copy_height { 629 | let image_offset = y * w + x; 630 | 631 | for x in 0..copy_width { 632 | image[image_offset + x] = pack_rgb(&buffer[buffer_offset..buffer_offset + 3]); 633 | buffer_offset += 3; 634 | } 635 | 636 | // image[image_offset..image_offset + copy_width] 637 | // .copy_from_slice(&buffer[buffer_offset..buffer_offset + copy_width]); 638 | } 639 | } 640 | --------------------------------------------------------------------------------