├── .gitignore ├── rust-toolchain.toml ├── release.toml ├── repak ├── tests │ ├── pack │ │ └── root │ │ │ ├── test.png │ │ │ ├── test.txt │ │ │ ├── directory │ │ │ └── nested.txt │ │ │ └── zeros.bin │ ├── packs │ │ ├── pack_v11.pak │ │ ├── pack_v5.pak │ │ ├── pack_v7.pak │ │ ├── pack_v8a.pak │ │ ├── pack_v8b.pak │ │ ├── pack_v9.pak │ │ ├── pack_v5_encrypt.pak │ │ ├── pack_v7_encrypt.pak │ │ ├── pack_v9_encrypt.pak │ │ ├── pack_v11_compress.pak │ │ ├── pack_v11_encrypt.pak │ │ ├── pack_v5_compress.pak │ │ ├── pack_v7_compress.pak │ │ ├── pack_v8a_compress.pak │ │ ├── pack_v8a_encrypt.pak │ │ ├── pack_v8b_compress.pak │ │ ├── pack_v8b_encrypt.pak │ │ ├── pack_v9_compress.pak │ │ ├── pack_v5_encryptindex.pak │ │ ├── pack_v7_encryptindex.pak │ │ ├── pack_v9_encryptindex.pak │ │ ├── pack_v11_encryptindex.pak │ │ ├── pack_v8a_encryptindex.pak │ │ ├── pack_v8b_encryptindex.pak │ │ ├── pack_v11_compress_encrypt.pak │ │ ├── pack_v5_compress_encrypt.pak │ │ ├── pack_v7_compress_encrypt.pak │ │ ├── pack_v8a_compress_encrypt.pak │ │ ├── pack_v8b_compress_encrypt.pak │ │ ├── pack_v9_compress_encrypt.pak │ │ ├── pack_v5_encrypt_encryptindex.pak │ │ ├── pack_v7_encrypt_encryptindex.pak │ │ ├── pack_v9_encrypt_encryptindex.pak │ │ ├── pack_v11_compress_encryptindex.pak │ │ ├── pack_v11_encrypt_encryptindex.pak │ │ ├── pack_v5_compress_encryptindex.pak │ │ ├── pack_v7_compress_encryptindex.pak │ │ ├── pack_v8a_compress_encryptindex.pak │ │ ├── pack_v8a_encrypt_encryptindex.pak │ │ ├── pack_v8b_compress_encryptindex.pak │ │ ├── pack_v8b_encrypt_encryptindex.pak │ │ ├── pack_v9_compress_encryptindex.pak │ │ ├── pack_v11_compress_encrypt_encryptindex.pak │ │ ├── pack_v5_compress_encrypt_encryptindex.pak │ │ ├── pack_v7_compress_encrypt_encryptindex.pak │ │ ├── pack_v8a_compress_encrypt_encryptindex.pak │ │ ├── pack_v8b_compress_encrypt_encryptindex.pak │ │ └── pack_v9_compress_encrypt_encryptindex.pak │ ├── config.example.sh │ ├── crypto.json │ ├── generate.sh │ └── test.rs ├── Cargo.toml └── src │ ├── reader.rs │ ├── writer.rs │ ├── error.rs │ ├── ext.rs │ ├── lib.rs │ ├── footer.rs │ ├── data.rs │ ├── entry.rs │ └── pak.rs ├── oodle_loader ├── Cargo.toml └── src │ └── lib.rs ├── repak_cli ├── tests │ ├── docs.rs │ └── cli.rs ├── Cargo.toml ├── wix │ └── main.wxs └── src │ └── main.rs ├── Cargo.toml ├── LICENSE-MIT ├── README.md ├── LICENSE-APACHE └── .github └── workflows └── release.yml /.gitignore: -------------------------------------------------------------------------------- 1 | *target/ 2 | /repak/tests/config.sh 3 | -------------------------------------------------------------------------------- /rust-toolchain.toml: -------------------------------------------------------------------------------- 1 | [toolchain] 2 | channel = "stable" 3 | -------------------------------------------------------------------------------- /release.toml: -------------------------------------------------------------------------------- 1 | allow-branch = ["master"] 2 | publish = false 3 | tag-prefix = "" 4 | -------------------------------------------------------------------------------- /repak/tests/pack/root/test.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xavo95/repak/HEAD/repak/tests/pack/root/test.png -------------------------------------------------------------------------------- /repak/tests/packs/pack_v11.pak: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xavo95/repak/HEAD/repak/tests/packs/pack_v11.pak -------------------------------------------------------------------------------- /repak/tests/packs/pack_v5.pak: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xavo95/repak/HEAD/repak/tests/packs/pack_v5.pak -------------------------------------------------------------------------------- /repak/tests/packs/pack_v7.pak: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xavo95/repak/HEAD/repak/tests/packs/pack_v7.pak -------------------------------------------------------------------------------- /repak/tests/packs/pack_v8a.pak: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xavo95/repak/HEAD/repak/tests/packs/pack_v8a.pak -------------------------------------------------------------------------------- /repak/tests/packs/pack_v8b.pak: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xavo95/repak/HEAD/repak/tests/packs/pack_v8b.pak -------------------------------------------------------------------------------- /repak/tests/packs/pack_v9.pak: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xavo95/repak/HEAD/repak/tests/packs/pack_v9.pak -------------------------------------------------------------------------------- /repak/tests/packs/pack_v5_encrypt.pak: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xavo95/repak/HEAD/repak/tests/packs/pack_v5_encrypt.pak -------------------------------------------------------------------------------- /repak/tests/packs/pack_v7_encrypt.pak: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xavo95/repak/HEAD/repak/tests/packs/pack_v7_encrypt.pak -------------------------------------------------------------------------------- /repak/tests/packs/pack_v9_encrypt.pak: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xavo95/repak/HEAD/repak/tests/packs/pack_v9_encrypt.pak -------------------------------------------------------------------------------- /repak/tests/packs/pack_v11_compress.pak: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xavo95/repak/HEAD/repak/tests/packs/pack_v11_compress.pak -------------------------------------------------------------------------------- /repak/tests/packs/pack_v11_encrypt.pak: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xavo95/repak/HEAD/repak/tests/packs/pack_v11_encrypt.pak -------------------------------------------------------------------------------- /repak/tests/packs/pack_v5_compress.pak: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xavo95/repak/HEAD/repak/tests/packs/pack_v5_compress.pak -------------------------------------------------------------------------------- /repak/tests/packs/pack_v7_compress.pak: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xavo95/repak/HEAD/repak/tests/packs/pack_v7_compress.pak -------------------------------------------------------------------------------- /repak/tests/packs/pack_v8a_compress.pak: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xavo95/repak/HEAD/repak/tests/packs/pack_v8a_compress.pak -------------------------------------------------------------------------------- /repak/tests/packs/pack_v8a_encrypt.pak: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xavo95/repak/HEAD/repak/tests/packs/pack_v8a_encrypt.pak -------------------------------------------------------------------------------- /repak/tests/packs/pack_v8b_compress.pak: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xavo95/repak/HEAD/repak/tests/packs/pack_v8b_compress.pak -------------------------------------------------------------------------------- /repak/tests/packs/pack_v8b_encrypt.pak: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xavo95/repak/HEAD/repak/tests/packs/pack_v8b_encrypt.pak -------------------------------------------------------------------------------- /repak/tests/packs/pack_v9_compress.pak: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xavo95/repak/HEAD/repak/tests/packs/pack_v9_compress.pak -------------------------------------------------------------------------------- /repak/tests/packs/pack_v5_encryptindex.pak: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xavo95/repak/HEAD/repak/tests/packs/pack_v5_encryptindex.pak -------------------------------------------------------------------------------- /repak/tests/packs/pack_v7_encryptindex.pak: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xavo95/repak/HEAD/repak/tests/packs/pack_v7_encryptindex.pak -------------------------------------------------------------------------------- /repak/tests/packs/pack_v9_encryptindex.pak: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xavo95/repak/HEAD/repak/tests/packs/pack_v9_encryptindex.pak -------------------------------------------------------------------------------- /repak/tests/packs/pack_v11_encryptindex.pak: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xavo95/repak/HEAD/repak/tests/packs/pack_v11_encryptindex.pak -------------------------------------------------------------------------------- /repak/tests/packs/pack_v8a_encryptindex.pak: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xavo95/repak/HEAD/repak/tests/packs/pack_v8a_encryptindex.pak -------------------------------------------------------------------------------- /repak/tests/packs/pack_v8b_encryptindex.pak: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xavo95/repak/HEAD/repak/tests/packs/pack_v8b_encryptindex.pak -------------------------------------------------------------------------------- /repak/tests/packs/pack_v11_compress_encrypt.pak: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xavo95/repak/HEAD/repak/tests/packs/pack_v11_compress_encrypt.pak -------------------------------------------------------------------------------- /repak/tests/packs/pack_v5_compress_encrypt.pak: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xavo95/repak/HEAD/repak/tests/packs/pack_v5_compress_encrypt.pak -------------------------------------------------------------------------------- /repak/tests/packs/pack_v7_compress_encrypt.pak: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xavo95/repak/HEAD/repak/tests/packs/pack_v7_compress_encrypt.pak -------------------------------------------------------------------------------- /repak/tests/packs/pack_v8a_compress_encrypt.pak: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xavo95/repak/HEAD/repak/tests/packs/pack_v8a_compress_encrypt.pak -------------------------------------------------------------------------------- /repak/tests/packs/pack_v8b_compress_encrypt.pak: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xavo95/repak/HEAD/repak/tests/packs/pack_v8b_compress_encrypt.pak -------------------------------------------------------------------------------- /repak/tests/packs/pack_v9_compress_encrypt.pak: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xavo95/repak/HEAD/repak/tests/packs/pack_v9_compress_encrypt.pak -------------------------------------------------------------------------------- /repak/tests/packs/pack_v5_encrypt_encryptindex.pak: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xavo95/repak/HEAD/repak/tests/packs/pack_v5_encrypt_encryptindex.pak -------------------------------------------------------------------------------- /repak/tests/packs/pack_v7_encrypt_encryptindex.pak: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xavo95/repak/HEAD/repak/tests/packs/pack_v7_encrypt_encryptindex.pak -------------------------------------------------------------------------------- /repak/tests/packs/pack_v9_encrypt_encryptindex.pak: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xavo95/repak/HEAD/repak/tests/packs/pack_v9_encrypt_encryptindex.pak -------------------------------------------------------------------------------- /repak/tests/packs/pack_v11_compress_encryptindex.pak: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xavo95/repak/HEAD/repak/tests/packs/pack_v11_compress_encryptindex.pak -------------------------------------------------------------------------------- /repak/tests/packs/pack_v11_encrypt_encryptindex.pak: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xavo95/repak/HEAD/repak/tests/packs/pack_v11_encrypt_encryptindex.pak -------------------------------------------------------------------------------- /repak/tests/packs/pack_v5_compress_encryptindex.pak: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xavo95/repak/HEAD/repak/tests/packs/pack_v5_compress_encryptindex.pak -------------------------------------------------------------------------------- /repak/tests/packs/pack_v7_compress_encryptindex.pak: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xavo95/repak/HEAD/repak/tests/packs/pack_v7_compress_encryptindex.pak -------------------------------------------------------------------------------- /repak/tests/packs/pack_v8a_compress_encryptindex.pak: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xavo95/repak/HEAD/repak/tests/packs/pack_v8a_compress_encryptindex.pak -------------------------------------------------------------------------------- /repak/tests/packs/pack_v8a_encrypt_encryptindex.pak: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xavo95/repak/HEAD/repak/tests/packs/pack_v8a_encrypt_encryptindex.pak -------------------------------------------------------------------------------- /repak/tests/packs/pack_v8b_compress_encryptindex.pak: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xavo95/repak/HEAD/repak/tests/packs/pack_v8b_compress_encryptindex.pak -------------------------------------------------------------------------------- /repak/tests/packs/pack_v8b_encrypt_encryptindex.pak: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xavo95/repak/HEAD/repak/tests/packs/pack_v8b_encrypt_encryptindex.pak -------------------------------------------------------------------------------- /repak/tests/packs/pack_v9_compress_encryptindex.pak: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xavo95/repak/HEAD/repak/tests/packs/pack_v9_compress_encryptindex.pak -------------------------------------------------------------------------------- /repak/tests/packs/pack_v11_compress_encrypt_encryptindex.pak: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xavo95/repak/HEAD/repak/tests/packs/pack_v11_compress_encrypt_encryptindex.pak -------------------------------------------------------------------------------- /repak/tests/packs/pack_v5_compress_encrypt_encryptindex.pak: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xavo95/repak/HEAD/repak/tests/packs/pack_v5_compress_encrypt_encryptindex.pak -------------------------------------------------------------------------------- /repak/tests/packs/pack_v7_compress_encrypt_encryptindex.pak: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xavo95/repak/HEAD/repak/tests/packs/pack_v7_compress_encrypt_encryptindex.pak -------------------------------------------------------------------------------- /repak/tests/packs/pack_v8a_compress_encrypt_encryptindex.pak: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xavo95/repak/HEAD/repak/tests/packs/pack_v8a_compress_encrypt_encryptindex.pak -------------------------------------------------------------------------------- /repak/tests/packs/pack_v8b_compress_encrypt_encryptindex.pak: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xavo95/repak/HEAD/repak/tests/packs/pack_v8b_compress_encrypt_encryptindex.pak -------------------------------------------------------------------------------- /repak/tests/packs/pack_v9_compress_encrypt_encryptindex.pak: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xavo95/repak/HEAD/repak/tests/packs/pack_v9_compress_encrypt_encryptindex.pak -------------------------------------------------------------------------------- /oodle_loader/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "oodle_loader" 3 | repository.workspace = true 4 | authors.workspace = true 5 | license.workspace = true 6 | version.workspace = true 7 | edition.workspace = true 8 | 9 | [dependencies] 10 | libloading = "0.8" 11 | ureq = "2.12" 12 | hex = { workspace = true } 13 | sha2 = "0.10.8" 14 | thiserror = "2.0.11" 15 | -------------------------------------------------------------------------------- /repak/tests/config.example.sh: -------------------------------------------------------------------------------- 1 | UNREAL_4_20=ue/4.20/Engine/Binaries/Linux/UnrealPak 2 | UNREAL_4_21=ue/4.21/Engine/Binaries/Linux/UnrealPak 3 | UNREAL_4_22=ue/4.22/Engine/Binaries/Linux/UnrealPak 4 | UNREAL_4_23=ue/4.23/Engine/Binaries/Linux/UnrealPak 5 | UNREAL_4_25=ue/4.25/Engine/Binaries/Linux/UnrealPak 6 | UNREAL_4_27=ue/4.27/Engine/Binaries/Linux/UnrealPak 7 | -------------------------------------------------------------------------------- /repak/tests/pack/root/test.txt: -------------------------------------------------------------------------------- 1 | Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum. 2 | -------------------------------------------------------------------------------- /repak/tests/pack/root/directory/nested.txt: -------------------------------------------------------------------------------- 1 | Proin urna leo, placerat non tristique sed, commodo sit amet enim. Nam aliquet metus et turpis semper tempus. Aliquam vitae dolor aliquam, elementum augue non, molestie nisi. Maecenas aliquet sagittis elit, id elementum magna dictum sed. Vivamus nulla nulla, aliquet et magna ut, tempus ultrices diam. Donec posuere fringilla feugiat. Etiam imperdiet neque nec mollis ornare. Fusce mollis neque risus, ac molestie ligula sagittis vel. Nam tempus et ante eget egestas. Curabitur porta placerat nisi ut vehicula. Nunc suscipit lacinia leo nec tincidunt. Phasellus blandit arcu non pulvinar mollis. 2 | -------------------------------------------------------------------------------- /repak_cli/tests/docs.rs: -------------------------------------------------------------------------------- 1 | fn workspace_dir() -> std::path::PathBuf { 2 | let output = std::process::Command::new(env!("CARGO")) 3 | .arg("locate-project") 4 | .arg("--workspace") 5 | .arg("--message-format=plain") 6 | .output() 7 | .unwrap() 8 | .stdout; 9 | let cargo_path = std::path::Path::new(std::str::from_utf8(&output).unwrap().trim()); 10 | cargo_path.parent().unwrap().to_path_buf() 11 | } 12 | 13 | #[test] 14 | fn test_readme_help() { 15 | use assert_cmd::prelude::*; 16 | use std::process::Command; 17 | 18 | let err = Command::cargo_bin("repak").unwrap().unwrap_err(); 19 | let help = std::str::from_utf8(&err.as_output().unwrap().stderr).unwrap(); 20 | 21 | let readme = std::fs::read_to_string(workspace_dir().join("README.md")).unwrap(); 22 | 23 | assert!(readme.contains(&format!("```console\n$ repak --help\n{help}```"))); 24 | } 25 | -------------------------------------------------------------------------------- /repak/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "repak" 3 | repository.workspace = true 4 | authors.workspace = true 5 | license.workspace = true 6 | version.workspace = true 7 | edition.workspace = true 8 | keywords.workspace = true 9 | 10 | [features] 11 | default = ["compression", "encryption"] 12 | compression = ["dep:flate2", "dep:zstd", "dep:lz4_flex"] 13 | oodle = ["dep:oodle_loader", "compression"] 14 | encryption = ["dep:aes"] 15 | wuthering-waves = [] 16 | wuthering-waves-2_4 = [] 17 | 18 | [dependencies] 19 | byteorder = "1.5" 20 | aes = { workspace = true, optional = true } 21 | flate2 = { version = "1.0", optional = true } 22 | zstd = { version = "0.13", optional = true } 23 | lz4_flex = { version = "0.11.3", optional = true } 24 | oodle_loader = { path = "../oodle_loader", optional = true} 25 | thiserror = "2.0" 26 | sha1 = { workspace = true } 27 | strum = { workspace = true } 28 | hex.workspace = true 29 | 30 | [dev-dependencies] 31 | base64 = { workspace = true } 32 | paste = "1.0.15" 33 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [workspace] 2 | resolver = "2" 3 | members = ["repak", "repak_cli"] 4 | 5 | [workspace.package] 6 | repository = "https://github.com/trumank/repak" 7 | authors = ["spuds", "trumank"] 8 | license = "MIT OR Apache-2.0" 9 | version = "0.2.2" 10 | edition = "2021" 11 | keywords = ["unreal-engine", "pak", "unrealpak"] 12 | 13 | [workspace.dependencies] 14 | aes = "^0.8.3" 15 | base64 = "0.22.1" 16 | strum = { version = "0.26", features = ["derive"] } 17 | sha1 = "0.10" 18 | hex = "0.4" 19 | 20 | # Config for 'cargo dist' 21 | [workspace.metadata.dist] 22 | # The preferred cargo-dist version to use in CI (Cargo.toml SemVer syntax) 23 | cargo-dist-version = "0.7.1" 24 | # CI backends to support 25 | ci = ["github"] 26 | # The installers to generate for each app 27 | installers = ["shell", "powershell", "msi"] 28 | # Target platforms to build apps for (Rust target-triple syntax) 29 | targets = ["x86_64-unknown-linux-gnu", "x86_64-pc-windows-msvc"] 30 | # Publish jobs to run in CI 31 | pr-run-mode = "plan" 32 | 33 | # The profile that 'cargo dist' will build with 34 | [profile.dist] 35 | inherits = "release" 36 | lto = "thin" 37 | -------------------------------------------------------------------------------- /LICENSE-MIT: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright 2024 Truman Kilen, spuds 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | 23 | -------------------------------------------------------------------------------- /repak_cli/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "repak_cli" 3 | description = "Utility for building/extracting/inspecting Unreal Engine .pak files" 4 | repository.workspace = true 5 | authors.workspace = true 6 | license.workspace = true 7 | version.workspace = true 8 | edition.workspace = true 9 | 10 | [package.metadata.wix] 11 | upgrade-guid = "723E95D8-E87B-46BC-AD3B-C128583967CF" 12 | path-guid = "F58A5112-1B44-49DA-8772-4E68F01E4AB7" 13 | license = false 14 | eula = false 15 | 16 | [[bin]] 17 | name = "repak" 18 | path = "src/main.rs" 19 | 20 | [features] 21 | default = ["oodle"] 22 | oodle = ["repak/oodle"] 23 | 24 | [dependencies] 25 | repak = { path = "../repak" } 26 | aes = { workspace = true } 27 | base64 = { workspace = true } 28 | clap = { version = "4.5.26", features = ["derive"] } 29 | hex = { workspace = true } 30 | indicatif = { version = "0.17.9", features = ["rayon"] } 31 | path-clean = "1.0.1" 32 | path-slash = "0.2.1" 33 | rayon = "1.10.0" 34 | sha2 = "0.10.8" 35 | strum = { workspace = true } 36 | itertools = "0.14.0" 37 | glob = "0.3.2" 38 | 39 | [dev-dependencies] 40 | assert_cmd = "2.0.16" 41 | dir-diff = "0.3.3" 42 | indoc = "2.0.5" 43 | tempfile = "3.15.0" 44 | -------------------------------------------------------------------------------- /repak/tests/crypto.json: -------------------------------------------------------------------------------- 1 | { 2 | "$types":{ 3 | "UnrealBuildTool.EncryptionAndSigning+CryptoSettings, UnrealBuildTool, Version=4.0.0.0, Culture=neutral, PublicKeyToken=null":"1", 4 | "UnrealBuildTool.EncryptionAndSigning+EncryptionKey, UnrealBuildTool, Version=4.0.0.0, Culture=neutral, PublicKeyToken=null":"2", 5 | "UnrealBuildTool.EncryptionAndSigning+SigningKeyPair, UnrealBuildTool, Version=4.0.0.0, Culture=neutral, PublicKeyToken=null":"3", 6 | "UnrealBuildTool.EncryptionAndSigning+SigningKey, UnrealBuildTool, Version=4.0.0.0, Culture=neutral, PublicKeyToken=null":"4" 7 | }, 8 | "$type":"1", 9 | "EncryptionKey":{ 10 | "$type":"2", 11 | "Name":"key", 12 | "Guid":"00000000000000000000000000000000", 13 | "Key":"lNJbw660IOC+kU7cnVQ1oeqrXyhk4J6UAZrCBbcnp94=" 14 | }, 15 | "SigningKey": null, 16 | "bEnablePakSigning":true, 17 | "bEnablePakIndexEncryption":true, 18 | "bEnablePakIniEncryption":true, 19 | "bEnablePakUAssetEncryption":true, 20 | "bEnablePakFullAssetEncryption":false, 21 | "bDataCryptoRequired":true, 22 | "PakEncryptionRequired":true, 23 | "PakSigningRequired":true, 24 | "SecondaryEncryptionKeys":[ 25 | 26 | ] 27 | } 28 | -------------------------------------------------------------------------------- /repak/tests/generate.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -a 4 | . config.sh 5 | set +a 6 | 7 | unreal-version-for() { 8 | case "$1" in 9 | 5) 10 | echo -n "$UNREAL_4_20" 11 | ;; 12 | 7) 13 | echo -n "$UNREAL_4_21" 14 | ;; 15 | 8a) 16 | echo -n "$UNREAL_4_22" 17 | ;; 18 | 8b) 19 | echo -n "$UNREAL_4_23" 20 | ;; 21 | 9) 22 | echo -n "$UNREAL_4_25" 23 | ;; 24 | 11) 25 | echo -n "$UNREAL_4_27" 26 | ;; 27 | esac 28 | } 29 | 30 | generate() { 31 | rm -r packs && mkdir packs 32 | _version=(5 7 8a 8b 9 11) 33 | _compress=("" "-compress") 34 | _encrypt=("" "-encrypt") 35 | _encryptindex=("" "-encryptindex") 36 | echo "\"$(realpath "pack/*")\" \"../mount/point/\"" > input.txt 37 | for version in "${_version[@]}"; do 38 | for compress in "${_compress[@]}"; do 39 | for encrypt in "${_encrypt[@]}"; do 40 | for encryptindex in "${_encryptindex[@]}"; do 41 | name="$version$compress$encrypt$encryptindex" 42 | "$(unreal-version-for "$version")" "$(realpath "packs/pack_v${name//-/_}.pak")" -Create="$(realpath input.txt)" -cryptokeys="$(realpath crypto.json)" ${compress:+"$compress"} ${encrypt:+"$encrypt"} ${encryptindex:+"$encryptindex"} & 43 | done 44 | done 45 | done 46 | done 47 | wait 48 | rm input.txt 49 | } 50 | 51 | if [ $# -eq 0 ]; then 52 | generate 53 | else 54 | "$@" 55 | fi 56 | -------------------------------------------------------------------------------- /repak/src/reader.rs: -------------------------------------------------------------------------------- 1 | use std::io; 2 | 3 | use byteorder::{LE, ReadBytesExt}; 4 | 5 | use crate::Version; 6 | 7 | pub(crate) fn flag_reader(reader: &mut R, 8 | version: super::Version) -> Result { 9 | let bits = reader.read_u32::()?; 10 | #[cfg(not(feature = "wuthering-waves"))] 11 | { Ok(bits) } 12 | #[cfg(feature = "wuthering-waves")] 13 | if version == Version::V12 { 14 | reader.read_u8()?; 15 | Ok( 16 | (bits >> 16) & 0x3f | 17 | (bits & 0xFFFF) << 6 | 18 | (bits & (1 << 28)) >> 6 | 19 | (bits & 0x0FC00000) << 1 | 20 | bits & 0xE0000000 21 | ) 22 | } else { 23 | Ok(bits) 24 | } 25 | } 26 | 27 | pub(crate) fn offset_reader(reader: &mut R, 28 | version: super::Version, 29 | bits: u32) -> Result<(u64, u64), super::Error> { 30 | let offset = read_safe(reader, bits, 31)?; 31 | let uncompressed = read_safe(reader, bits, 30)?; 32 | #[cfg(not(feature = "wuthering-waves"))] 33 | { Ok((offset, uncompressed)) } 34 | #[cfg(feature = "wuthering-waves")] 35 | if version == Version::V12 { 36 | Ok((uncompressed, offset)) 37 | } else { 38 | Ok((offset, uncompressed)) 39 | } 40 | } 41 | 42 | #[inline(always)] 43 | pub(crate) fn read_safe(reader: &mut R, 44 | bits: u32, 45 | bit: u32) -> Result { 46 | Ok(if (bits & (1 << bit)) != 0 { 47 | reader.read_u32::()? as u64 48 | } else { 49 | reader.read_u64::()? 50 | }) 51 | } -------------------------------------------------------------------------------- /repak/tests/pack/root/zeros.bin: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /repak/src/writer.rs: -------------------------------------------------------------------------------- 1 | use std::io; 2 | 3 | use byteorder::{LE, WriteBytesExt}; 4 | 5 | use crate::Version; 6 | 7 | pub(crate) fn flag_writer(writer: &mut W, 8 | version: super::Version, 9 | flags: u32) -> Result<(), super::Error> { 10 | #[cfg(not(feature = "wuthering-waves"))] 11 | writer.write_u32::(flags)?; 12 | #[cfg(feature = "wuthering-waves")] 13 | if version == Version::V12 { 14 | let tmp = 15 | ((flags & 0x3f) << 16) | 16 | ((flags >> 6) & 0xFFFF) | 17 | ((flags << 6) & (1 << 28)) | // (flags & (1 << 22)) << 6 18 | ((flags >> 1) & 0x0FC00000) | // (flags & 0x1F800000) >> 1 19 | flags & 0xE0000000; 20 | writer.write_u32::(tmp)?; 21 | writer.write_u8(0)?; 22 | } else { 23 | writer.write_u32::(flags)?; 24 | } 25 | Ok(()) 26 | } 27 | 28 | pub(crate) fn offset_writer(writer: &mut W, 29 | version: super::Version, 30 | offset: u64, 31 | offset_safe_32: bool, 32 | uncompressed: u64, 33 | uncompressed_safe_32: bool) -> Result<(), super::Error> { 34 | #[cfg(not(feature = "wuthering-waves"))] 35 | { 36 | write_safe(writer, offset_safe_32, offset)?; 37 | write_safe(writer, uncompressed_safe_32, uncompressed)?; 38 | } 39 | #[cfg(feature = "wuthering-waves")] 40 | if version == Version::V12 { 41 | write_safe(writer, uncompressed_safe_32, uncompressed)?; 42 | write_safe(writer, offset_safe_32, offset)?; 43 | } else { 44 | write_safe(writer, offset_safe_32, offset)?; 45 | write_safe(writer, uncompressed_safe_32, uncompressed)?; 46 | } 47 | Ok(()) 48 | } 49 | 50 | #[inline(always)] 51 | pub(crate) fn write_safe(writer: &mut W, safe: bool, value: u64) -> Result<(), super::Error> { 52 | if safe { 53 | writer.write_u32::(value as u32)? 54 | } else { 55 | writer.write_u64::(value)? 56 | } 57 | Ok(()) 58 | } -------------------------------------------------------------------------------- /repak/src/error.rs: -------------------------------------------------------------------------------- 1 | use crate::Compression; 2 | 3 | #[derive(thiserror::Error)] 4 | pub enum Error { 5 | // dependency errors 6 | #[error("enum conversion: {0}")] 7 | Strum(#[from] strum::ParseError), 8 | 9 | #[error("expect 256 bit AES key as base64 or hex string")] 10 | Aes, 11 | 12 | // feature errors 13 | #[error("enable the compression feature to read compressed paks")] 14 | Compression, 15 | 16 | #[error("enable the encryption feature to read encrypted paks")] 17 | Encryption, 18 | 19 | #[error("enable the oodle feature to read Oodle compressed paks")] 20 | Oodle, 21 | 22 | // std errors 23 | #[error("io error: {0}")] 24 | Io(#[from] std::io::Error), 25 | 26 | #[error("fmt error: {0}")] 27 | Fmt(#[from] std::fmt::Error), 28 | 29 | #[error("utf8 conversion: {0}")] 30 | Utf8(#[from] std::string::FromUtf8Error), 31 | 32 | #[error("utf16 conversion: {0}")] 33 | Utf16(#[from] std::string::FromUtf16Error), 34 | 35 | #[error("bufwriter dereference: {0}")] 36 | IntoInner(#[from] std::io::IntoInnerError>>), 37 | 38 | // crate errors 39 | #[error("got {0}, which is not a boolean")] 40 | Bool(u8), 41 | 42 | #[error("found magic of {:#x} instead of {:#x}", .0, super::MAGIC)] 43 | Magic(u32), 44 | 45 | #[cfg(feature = "oodle")] 46 | #[error("Oodle loader error: {0}")] 47 | OodleFailed(#[from] oodle_loader::Error), 48 | 49 | #[error("No entry found at {0}")] 50 | MissingEntry(String), 51 | 52 | #[error("Prefix \"{prefix}\" does not match path \"{path}\"")] 53 | PrefixMismatch { prefix: String, path: String }, 54 | 55 | #[error("Attempted to write to \"{0}\" which is outside of output directory")] 56 | WriteOutsideOutput(String), 57 | 58 | #[error("Output directory is not empty: \"{0}\"")] 59 | OutputNotEmpty(String), 60 | 61 | #[error("Input is not a directory: \"{0}\"")] 62 | InputNotADirectory(String), 63 | 64 | #[error("{0} decompression failed")] 65 | DecompressionFailed(Compression), 66 | 67 | #[error("used version {used} but pak is version {version}")] 68 | Version { 69 | used: super::VersionMajor, 70 | version: super::VersionMajor, 71 | }, 72 | 73 | #[error("pak is encrypted but no key was provided")] 74 | Encrypted, 75 | 76 | #[error("error with OsString")] 77 | OsString(std::ffi::OsString), 78 | 79 | #[error("{0}version unsupported or is encrypted (possibly missing --aes-key?)")] 80 | UnsupportedOrEncrypted(String), 81 | 82 | #[error("{0}")] 83 | Other(String), 84 | } 85 | 86 | impl std::fmt::Debug for Error { 87 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 88 | std::fmt::Display::fmt(self, f) 89 | } 90 | } 91 | -------------------------------------------------------------------------------- /repak/src/ext.rs: -------------------------------------------------------------------------------- 1 | use byteorder::{ReadBytesExt, WriteBytesExt, LE}; 2 | 3 | pub trait BoolExt Result> { 4 | fn then_try(&self, f: F) -> Result, E>; 5 | } 6 | 7 | impl Result> BoolExt for bool { 8 | fn then_try(&self, f: F) -> Result, E> { 9 | self.then(f).transpose() 10 | } 11 | } 12 | 13 | pub trait ReadExt { 14 | fn read_bool(&mut self) -> Result; 15 | fn read_guid(&mut self) -> Result<[u8; 20], super::Error>; 16 | fn read_array( 17 | &mut self, 18 | func: impl FnMut(&mut Self) -> Result, 19 | ) -> Result, super::Error>; 20 | fn read_array_len( 21 | &mut self, 22 | len: usize, 23 | func: impl FnMut(&mut Self) -> Result, 24 | ) -> Result, super::Error>; 25 | fn read_string(&mut self) -> Result; 26 | fn read_len(&mut self, len: usize) -> Result, super::Error>; 27 | } 28 | 29 | pub trait WriteExt { 30 | fn write_bool(&mut self, value: bool) -> Result<(), super::Error>; 31 | fn write_string(&mut self, value: &str) -> Result<(), super::Error>; 32 | } 33 | 34 | impl ReadExt for R { 35 | fn read_bool(&mut self) -> Result { 36 | match self.read_u8()? { 37 | 1 => Ok(true), 38 | 0 => Ok(false), 39 | err => Err(super::Error::Bool(err)), 40 | } 41 | } 42 | 43 | fn read_guid(&mut self) -> Result<[u8; 20], super::Error> { 44 | let mut guid = [0; 20]; 45 | self.read_exact(&mut guid)?; 46 | Ok(guid) 47 | } 48 | 49 | fn read_array( 50 | &mut self, 51 | func: impl FnMut(&mut Self) -> Result, 52 | ) -> Result, super::Error> { 53 | let len = self.read_u32::()? as usize; 54 | self.read_array_len(len, func) 55 | } 56 | 57 | fn read_array_len( 58 | &mut self, 59 | len: usize, 60 | mut func: impl FnMut(&mut Self) -> Result, 61 | ) -> Result, super::Error> { 62 | let mut buf = Vec::with_capacity(len); 63 | for _ in 0..buf.capacity() { 64 | buf.push(func(self)?); 65 | } 66 | Ok(buf) 67 | } 68 | 69 | fn read_string(&mut self) -> Result { 70 | let len = self.read_i32::()?; 71 | if len < 0 { 72 | let chars = self.read_array_len((-len) as usize, |r| Ok(r.read_u16::()?))?; 73 | let length = chars.iter().position(|&c| c == 0).unwrap_or(chars.len()); 74 | Ok(String::from_utf16(&chars[..length]).unwrap()) 75 | } else { 76 | let mut chars = vec![0; len as usize]; 77 | self.read_exact(&mut chars)?; 78 | let length = chars.iter().position(|&c| c == 0).unwrap_or(chars.len()); 79 | Ok(String::from_utf8_lossy(&chars[..length]).into_owned()) 80 | } 81 | } 82 | 83 | fn read_len(&mut self, len: usize) -> Result, super::Error> { 84 | let mut buf = vec![0; len]; 85 | self.read_exact(&mut buf)?; 86 | Ok(buf) 87 | } 88 | } 89 | 90 | impl WriteExt for W { 91 | fn write_bool(&mut self, value: bool) -> Result<(), super::Error> { 92 | self.write_u8(match value { 93 | true => 1, 94 | false => 0, 95 | })?; 96 | Ok(()) 97 | } 98 | fn write_string(&mut self, value: &str) -> Result<(), super::Error> { 99 | if value.is_empty() || value.is_ascii() { 100 | self.write_u32::(value.as_bytes().len() as u32 + 1)?; 101 | self.write_all(value.as_bytes())?; 102 | self.write_u8(0)?; 103 | } else { 104 | let chars: Vec = value.encode_utf16().collect(); 105 | self.write_i32::(-(chars.len() as i32 + 1))?; 106 | for c in chars { 107 | self.write_u16::(c)?; 108 | } 109 | self.write_u16::(0)?; 110 | } 111 | Ok(()) 112 | } 113 | } 114 | -------------------------------------------------------------------------------- /repak/src/lib.rs: -------------------------------------------------------------------------------- 1 | #![allow(dead_code)] 2 | mod data; 3 | mod entry; 4 | mod error; 5 | mod ext; 6 | mod footer; 7 | mod pak; 8 | mod reader; 9 | mod writer; 10 | 11 | pub use {data::PartialEntry, error::*, pak::*}; 12 | 13 | pub const MAGIC: u32 = 0x5A6F12E1; 14 | 15 | #[derive( 16 | Clone, 17 | Copy, 18 | PartialEq, 19 | Eq, 20 | PartialOrd, 21 | Debug, 22 | strum::Display, 23 | strum::FromRepr, 24 | strum::EnumIter, 25 | strum::EnumString, 26 | strum::VariantNames, 27 | )] 28 | pub enum Version { 29 | V0, 30 | V1, 31 | V2, 32 | V3, 33 | V4, 34 | V5, 35 | V6, 36 | V7, 37 | V8A, 38 | V8B, 39 | V9, 40 | V10, 41 | V11, 42 | #[cfg(feature = "wuthering-waves")] 43 | V12, 44 | } 45 | 46 | #[repr(u32)] 47 | #[derive( 48 | Clone, Copy, PartialEq, Eq, PartialOrd, Debug, strum::Display, strum::FromRepr, strum::EnumIter, 49 | )] 50 | /// Version actually written to the pak file 51 | pub enum VersionMajor { 52 | Unknown, // v0 unknown (mostly just for padding) 53 | Initial, // v1 initial specification 54 | NoTimestamps, // v2 timestamps removed 55 | CompressionEncryption, // v3 compression and encryption support 56 | IndexEncryption, // v4 index encryption support 57 | RelativeChunkOffsets, // v5 offsets are relative to header 58 | DeleteRecords, // v6 record deletion support 59 | EncryptionKeyGuid, // v7 include key GUID 60 | FNameBasedCompression, // v8 compression names included 61 | FrozenIndex, // v9 frozen index byte included 62 | PathHashIndex, // v10 63 | Fnv64BugFix, // v11 64 | #[cfg(feature = "wuthering-waves")] 65 | WuwaCustom, // v12 66 | } 67 | 68 | // strum shouldn't need to be installed by users 69 | impl Version { 70 | pub fn iter() -> std::iter::Rev { 71 | ::iter().rev() 72 | } 73 | 74 | pub fn size(self) -> i64 { 75 | // (magic + version): u32 + (offset + size): u64 + hash: [u8; 20] 76 | let mut size = 4 + 4 + 8 + 8 + 20; 77 | if self.version_major() >= VersionMajor::EncryptionKeyGuid { 78 | // encryption uuid: u128 79 | size += 16; 80 | } 81 | if self.version_major() >= VersionMajor::IndexEncryption { 82 | // encrypted: bool 83 | size += 1; 84 | } 85 | if self.version_major() == VersionMajor::FrozenIndex { 86 | // frozen index: bool 87 | size += 1; 88 | } 89 | if self >= Version::V8A { 90 | // compression names: [[u8; 32]; 4] 91 | size += 32 * 4; 92 | } 93 | if self >= Version::V8B { 94 | // additional compression name 95 | size += 32; 96 | } 97 | size 98 | } 99 | 100 | /// Losslessly convert full version into major version 101 | pub fn version_major(&self) -> VersionMajor { 102 | match self { 103 | Version::V0 => VersionMajor::Unknown, 104 | Version::V1 => VersionMajor::Initial, 105 | Version::V2 => VersionMajor::NoTimestamps, 106 | Version::V3 => VersionMajor::CompressionEncryption, 107 | Version::V4 => VersionMajor::IndexEncryption, 108 | Version::V5 => VersionMajor::RelativeChunkOffsets, 109 | Version::V6 => VersionMajor::DeleteRecords, 110 | Version::V7 => VersionMajor::EncryptionKeyGuid, 111 | Version::V8A => VersionMajor::FNameBasedCompression, 112 | Version::V8B => VersionMajor::FNameBasedCompression, 113 | Version::V9 => VersionMajor::FrozenIndex, 114 | Version::V10 => VersionMajor::PathHashIndex, 115 | Version::V11 => VersionMajor::Fnv64BugFix, 116 | #[cfg(feature = "wuthering-waves")] 117 | Version::V12 => VersionMajor::WuwaCustom, 118 | } 119 | } 120 | } 121 | 122 | #[derive( 123 | Clone, Copy, PartialEq, Eq, Debug, strum::Display, strum::EnumString, strum::VariantNames, 124 | )] 125 | pub enum Compression { 126 | Zlib, 127 | Gzip, 128 | Oodle, 129 | Zstd, 130 | LZ4, 131 | } 132 | 133 | #[allow(clippy::large_enum_variant)] 134 | #[derive(Debug, Default)] 135 | pub(crate) enum Key { 136 | #[cfg(feature = "encryption")] 137 | Some(aes::Aes256), 138 | #[default] 139 | None, 140 | } 141 | 142 | #[cfg(feature = "encryption")] 143 | impl From for Key { 144 | fn from(value: aes::Aes256) -> Self { 145 | Self::Some(value) 146 | } 147 | } 148 | -------------------------------------------------------------------------------- /repak/src/footer.rs: -------------------------------------------------------------------------------- 1 | use crate::{ 2 | ext::{BoolExt, WriteExt}, 3 | Hash, 4 | }; 5 | 6 | use super::{ext::ReadExt, Compression, Version, VersionMajor}; 7 | use byteorder::{ReadBytesExt, WriteBytesExt, LE}; 8 | use std::str::FromStr; 9 | 10 | #[derive(Debug)] 11 | pub struct Footer { 12 | pub encryption_uuid: Option, 13 | pub encrypted: bool, 14 | pub magic: u32, 15 | pub version: Version, 16 | pub version_major: VersionMajor, 17 | pub index_offset: u64, 18 | pub index_size: u64, 19 | pub hash: Hash, 20 | pub frozen: bool, 21 | pub compression: Vec>, 22 | } 23 | 24 | impl Footer { 25 | pub fn read(reader: &mut R, version: Version) -> Result { 26 | let encryption_uuid = (version.version_major() >= VersionMajor::EncryptionKeyGuid) 27 | .then_try(|| reader.read_u128::())?; 28 | let encrypted = 29 | version.version_major() >= VersionMajor::IndexEncryption && reader.read_bool()?; 30 | let magic = reader.read_u32::()?; 31 | let version_major = 32 | VersionMajor::from_repr(reader.read_u32::()?).unwrap_or(version.version_major()); 33 | let index_offset = reader.read_u64::()?; 34 | let index_size = reader.read_u64::()?; 35 | let hash = Hash(reader.read_guid()?); 36 | let frozen = version.version_major() == VersionMajor::FrozenIndex && reader.read_bool()?; 37 | let compression = { 38 | let mut compression = Vec::with_capacity(match version { 39 | ver if ver < Version::V8A => 0, 40 | ver if ver < Version::V8B => 4, 41 | _ => 5, 42 | }); 43 | for _ in 0..compression.capacity() { 44 | compression.push( 45 | Compression::from_str( 46 | &reader 47 | .read_len(32)? 48 | .iter() 49 | // filter out whitespace and convert to char 50 | .filter_map(|&ch| (ch != 0).then_some(ch as char)) 51 | .collect::(), 52 | ) 53 | .ok(), 54 | ) 55 | } 56 | if version.version_major() < VersionMajor::FNameBasedCompression { 57 | compression.push(Some(Compression::Zlib)); 58 | compression.push(Some(Compression::Gzip)); 59 | compression.push(Some(Compression::Oodle)); 60 | } 61 | compression 62 | }; 63 | if super::MAGIC != magic { 64 | return Err(super::Error::Magic(magic)); 65 | } 66 | if version.version_major() != version_major { 67 | return Err(super::Error::Version { 68 | used: version.version_major(), 69 | version: version_major, 70 | }); 71 | } 72 | Ok(Self { 73 | encryption_uuid, 74 | encrypted, 75 | magic, 76 | version, 77 | version_major, 78 | index_offset, 79 | index_size, 80 | hash, 81 | frozen, 82 | compression, 83 | }) 84 | } 85 | 86 | pub fn write(&self, writer: &mut W) -> Result<(), super::Error> { 87 | if self.version_major >= VersionMajor::EncryptionKeyGuid { 88 | writer.write_u128::(0)?; 89 | } 90 | if self.version_major >= VersionMajor::IndexEncryption { 91 | writer.write_bool(self.encrypted)?; 92 | } 93 | writer.write_u32::(self.magic)?; 94 | writer.write_u32::(self.version_major as u32)?; 95 | writer.write_u64::(self.index_offset)?; 96 | writer.write_u64::(self.index_size)?; 97 | writer.write_all(&self.hash.0)?; 98 | if self.version_major == VersionMajor::FrozenIndex { 99 | writer.write_bool(self.frozen)?; 100 | } 101 | let algo_size = match self.version { 102 | ver if ver < Version::V8A => 0, 103 | ver if ver < Version::V8B => 4, 104 | _ => 5, 105 | }; 106 | // TODO: handle if compression.len() > algo_size 107 | for i in 0..algo_size { 108 | let mut name = [0; 32]; 109 | if let Some(algo) = self.compression.get(i).cloned().flatten() { 110 | for (i, b) in algo.to_string().as_bytes().iter().enumerate() { 111 | name[i] = *b; 112 | } 113 | } 114 | writer.write_all(&name)?; 115 | } 116 | Ok(()) 117 | } 118 | } 119 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # repak 2 | 3 | Library and CLI tool for working with Unreal Engine .pak files. 4 | 5 | - Supports reading and writing a wide range of versions 6 | - Easy to use API while providing low level control: 7 | - Only parses index initially and reads file data upon request 8 | - Can rewrite index in place to perform append or delete operations without rewriting entire pak 9 | 10 | `repak` CLI 11 | - Sane handling of mount points: defaults to `../../../` but can be configured via flag 12 | - 2x faster unpacking over `UnrealPak`. As much as 30x faster has been observed (on Linux unpacked to ramdisk) 13 | - Unpacking is guarded against malicious pak that attempt to write to parent directories 14 | 15 | ## cli 16 | ```console 17 | $ repak --help 18 | Usage: repak [OPTIONS] 19 | 20 | Commands: 21 | info Print .pak info 22 | list List .pak files 23 | hash-list List .pak files and the SHA256 of their contents. Useful for finding differences between paks 24 | unpack Unpack .pak file 25 | pack Pack directory into .pak file 26 | get Reads a single file to stdout 27 | help Print this message or the help of the given subcommand(s) 28 | 29 | Options: 30 | -a, --aes-key 256 bit AES encryption key as base64 or hex string if the pak is encrypted 31 | -h, --help Print help 32 | -V, --version Print version 33 | ``` 34 | 35 | ### packing 36 | ```console 37 | $ find mod 38 | mod 39 | mod/assets 40 | mod/assets/AssetA.uasset 41 | mod/assets/AssetA.uexp 42 | 43 | $ repak pack -v mod 44 | packing assets/AssetA.uasset 45 | packing assets/AssetA.uexp 46 | Packed 4 files to mod.pak 47 | 48 | $ repak list mod.pak 49 | assets/AssetA.uasset 50 | assets/AssetA.uexp 51 | ``` 52 | 53 | ### unpacking 54 | ```console 55 | $ repak --aes-key 0x12345678 unpack MyEncryptedGame.pak 56 | Unpacked 12345 files to MyEncryptedGame from MyEncryptedGame.pak 57 | ``` 58 | 59 | ## compatibility 60 | 61 | | UE Version | Version | Version Feature | Read | Write | 62 | |--------------|---------|-----------------------|--------------------|------------------------| 63 | | | 1 | Initial | :grey_question: | :grey_question: | 64 | | 4.0-4.2 | 2 | NoTimestamps | :heavy_check_mark: | :heavy_check_mark: | 65 | | 4.3-4.15 | 3 | CompressionEncryption | :heavy_check_mark: | :heavy_check_mark: | 66 | | 4.16-4.19 | 4 | IndexEncryption | :heavy_check_mark: | :heavy_check_mark: | 67 | | 4.20 | 5 | RelativeChunkOffsets | :heavy_check_mark: | :heavy_check_mark: | 68 | | | 6 | DeleteRecords | :grey_question: | :grey_question: | 69 | | 4.21 | 7 | EncryptionKeyGuid | :heavy_check_mark: | :heavy_check_mark: | 70 | | 4.22 | 8A | FNameBasedCompression | :heavy_check_mark: | :heavy_check_mark: | 71 | | 4.23-4.24 | 8B | FNameBasedCompression | :heavy_check_mark: | :heavy_check_mark: | 72 | | 4.25 | 9 | FrozenIndex | :heavy_check_mark: | :heavy_check_mark:[^1] | 73 | | | 10 | PathHashIndex | :grey_question: | :grey_question: | 74 | | 4.26-5.3[^2] | 11 | Fnv64BugFix | :heavy_check_mark: | :heavy_check_mark: | 75 | 76 | | Feature | Read | Write | 77 | |-----------------|--------------------|-----------------| 78 | | Compression | :heavy_check_mark: | :wavy_dash:[^3] | 79 | | Encrypted Index | :heavy_check_mark: | :x: | 80 | | Encrypted Data | :heavy_check_mark: | :x: | 81 | 82 | 83 | [^1]: Except for paks compressed using frozen index which has significant 84 | complexity and only existed for UE 4.25 anyway. 85 | [^2]: As of writing. Later versions are likely supported but untested. 86 | [^3]: Zlib, Gzip, and Zstd are supported. Not all compression algorithms are 87 | available in all games. 88 | 89 | Supports reading encrypted (both index and/or data) and compressed paks. 90 | Writing does not support compression or encryption yet. 91 | 92 | ## notes 93 | 94 | ### determinism 95 | 96 | As far as I can tell, the index is not necessarily written deterministically by `UnrealPak`. `repak` uses `BTreeMap` in place of `HashMap` to deterministically write the index and *happens* to rewrite the test paks in the same order, but this more likely than not stops happening on larger pak files. 97 | 98 | ### full directory index 99 | 100 | `UnrealPak` includes a directory entry in the full directory index for all parent directories back to the pak root for a given file path regardless of whether those directories contain any files or just other directories. `repak` only includes directories that contain files. So far no functional differences have been observed as a result. 101 | 102 | ## acknowledgements 103 | - [unpak](https://github.com/bananaturtlesandwich/unpak): original crate featuring read-only pak operations 104 | - [rust-u4pak](https://github.com/panzi/rust-u4pak)'s README detailing the pak file layout 105 | - [jieyouxu](https://github.com/jieyouxu) for serialization implementation of the significantly more complex V11 index 106 | -------------------------------------------------------------------------------- /repak_cli/tests/cli.rs: -------------------------------------------------------------------------------- 1 | use assert_cmd::prelude::*; 2 | use indoc::{formatdoc, indoc}; 3 | use std::process::Command; 4 | 5 | const PAK: &str = "../repak/tests/packs/pack_v11.pak"; 6 | 7 | #[test] 8 | fn test_cli_info() { 9 | let assert = Command::cargo_bin("repak") 10 | .unwrap() 11 | .arg("info") 12 | .arg(PAK) 13 | .assert(); 14 | assert.success().stdout(indoc! {" 15 | mount point: ../mount/point/root/ 16 | version: V11 17 | version major: Fnv64BugFix 18 | encrypted index: false 19 | encrytion guid: Some(00000000000000000000000000000000) 20 | path hash seed: Some(205C5A7D) 21 | 4 file entries 22 | "}); 23 | } 24 | 25 | #[test] 26 | fn test_cli_list() { 27 | let assert = Command::cargo_bin("repak") 28 | .unwrap() 29 | .arg("list") 30 | .arg("-s") 31 | .arg("") 32 | .arg(PAK) 33 | .assert(); 34 | assert.success().stdout(indoc! {r#" 35 | ../mount/point/root/directory/nested.txt 36 | ../mount/point/root/test.png 37 | ../mount/point/root/test.txt 38 | ../mount/point/root/zeros.bin 39 | "#}); 40 | 41 | let assert = Command::cargo_bin("repak") 42 | .unwrap() 43 | .arg("list") 44 | .arg("-s") 45 | .arg("../mount") 46 | .arg(PAK) 47 | .assert(); 48 | assert.success().stdout(indoc! {r#" 49 | point/root/directory/nested.txt 50 | point/root/test.png 51 | point/root/test.txt 52 | point/root/zeros.bin 53 | "#}); 54 | 55 | let assert = Command::cargo_bin("repak") 56 | .unwrap() 57 | .arg("list") 58 | .arg("-s") 59 | .arg("../mount/root/asdf") 60 | .arg(PAK) 61 | .assert(); 62 | assert.failure().stderr(indoc! {r#" 63 | Error: Prefix "../mount/root/asdf" does not match path "../mount/point/root/directory/nested.txt" 64 | "#}); 65 | } 66 | 67 | #[test] 68 | fn test_cli_get() { 69 | let assert = Command::cargo_bin("repak") 70 | .unwrap() 71 | .arg("get") 72 | .arg("-s") 73 | .arg("../mount/point") 74 | .arg(PAK) 75 | .arg("root/test.txt") 76 | .assert(); 77 | assert.success().stdout(indoc! {r#" 78 | Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum. 79 | "#}); 80 | 81 | let assert = Command::cargo_bin("repak") 82 | .unwrap() 83 | .arg("get") 84 | .arg("-s") 85 | .arg("../mount/point") 86 | .arg(PAK) 87 | .arg("root/doesnotexist.txt") 88 | .assert(); 89 | assert.failure().stderr(indoc! {r#" 90 | Error: No entry found at doesnotexist.txt 91 | "#}); 92 | } 93 | 94 | #[test] 95 | fn test_cli_pack() { 96 | let dir = tempfile::tempdir().unwrap(); 97 | 98 | let out_pak = dir.path().join("output.pak"); 99 | let out_dir = dir.path().join("output"); 100 | 101 | let assert = Command::cargo_bin("repak") 102 | .unwrap() 103 | .arg("pack") 104 | .arg("../repak/tests/pack/") 105 | .arg("-m") 106 | .arg("../mount/point/root") 107 | .arg("--version") 108 | .arg("V11") 109 | .arg(&out_pak) 110 | .assert(); 111 | assert.success().stdout(formatdoc! {r#" 112 | Packed 4 files to {} 113 | "#, out_pak.to_string_lossy()}); 114 | 115 | // TODO test packing to non-empty file 116 | 117 | let assert = Command::cargo_bin("repak") 118 | .unwrap() 119 | .arg("unpack") 120 | .arg(&out_pak) 121 | .arg("-s") 122 | .arg("../mount/point/root") 123 | .assert(); 124 | assert.success().stdout(formatdoc! {r#" 125 | Unpacked 4 files to {} from {} 126 | "#, out_dir.to_string_lossy(), out_pak.to_string_lossy()}); 127 | assert!(!dir_diff::is_different("../repak/tests/pack/", out_dir).unwrap()); 128 | } 129 | 130 | #[test] 131 | fn test_cli_unpack() { 132 | let dir = tempfile::tempdir().unwrap(); 133 | 134 | let assert = Command::cargo_bin("repak") 135 | .unwrap() 136 | .arg("unpack") 137 | .arg(PAK) 138 | .arg("-s") 139 | .arg("../mount") 140 | .arg("-o") 141 | .arg(dir.path()) 142 | .assert(); 143 | assert.success().stdout(formatdoc! {r#" 144 | Unpacked 4 files to {} from ../repak/tests/packs/pack_v11.pak 145 | "#, &dir.path().to_string_lossy()}); 146 | assert!(!dir_diff::is_different("../repak/tests/pack/", dir.path().join("point")).unwrap()); 147 | 148 | // TODO test unpacking to non-empty directory 149 | } 150 | 151 | #[test] 152 | fn test_cli_unpack_include() { 153 | let dir = tempfile::tempdir().unwrap(); 154 | 155 | let assert = Command::cargo_bin("repak") 156 | .unwrap() 157 | .arg("unpack") 158 | .arg(PAK) 159 | .arg("-s") 160 | .arg("../mount") 161 | .arg("-i") 162 | .arg("point/**/*.txt") 163 | .arg("-o") 164 | .arg(dir.path()) 165 | .assert(); 166 | assert.success().stdout(formatdoc! {r#" 167 | Unpacked 2 files to {} from ../repak/tests/packs/pack_v11.pak 168 | "#, &dir.path().to_string_lossy()}); 169 | } 170 | 171 | #[test] 172 | fn test_cli_hashlist() { 173 | let assert = Command::cargo_bin("repak") 174 | .unwrap() 175 | .arg("hash-list") 176 | .arg(PAK) 177 | .arg("-s") 178 | .arg("../mount") 179 | .assert(); 180 | assert.success().stdout(formatdoc! {r#" 181 | 246c88de650fb20d63abaeb7c1bd8556d0ea260bf4579beafe0b2597e00270a5 point/root/directory/nested.txt 182 | d7d3e1c21a5b98621add61a4244a413abf5ad6413b0d25ba09bfd5536c75e3b1 point/root/test.png 183 | 56293a80e0394d252e995f2debccea8223e4b5b2b150bee212729b3b39ac4d46 point/root/test.txt 184 | e5a00aa9991ac8a5ee3109844d84a55583bd20572ad3ffcd42792f3c36b183ad point/root/zeros.bin 185 | "#}); 186 | } 187 | -------------------------------------------------------------------------------- /repak/src/data.rs: -------------------------------------------------------------------------------- 1 | use std::io::Write; 2 | 3 | use crate::{ 4 | entry::{Block, Entry}, 5 | Compression, Error, Hash, Version, VersionMajor, 6 | }; 7 | 8 | type Result = std::result::Result; 9 | 10 | pub struct PartialEntry> { 11 | compression: Option, 12 | compressed_size: u64, 13 | uncompressed_size: u64, 14 | compression_block_size: u32, 15 | data: PartialEntryData, 16 | hash: Hash, 17 | } 18 | pub(crate) struct PartialBlock { 19 | uncompressed_size: usize, 20 | data: Vec, 21 | } 22 | pub(crate) enum PartialEntryData { 23 | Slice(D), 24 | Blocks(Vec), 25 | } 26 | 27 | #[cfg(feature = "compression")] 28 | fn get_compression_slot( 29 | version: Version, 30 | compression_slots: &mut Vec>, 31 | compression: Compression, 32 | ) -> Result { 33 | let slot = compression_slots 34 | .iter() 35 | .enumerate() 36 | .find(|(_, s)| **s == Some(compression)); 37 | Ok(if let Some((i, _)) = slot { 38 | // existing found 39 | i 40 | } else { 41 | if version.version_major() < VersionMajor::FNameBasedCompression { 42 | return Err(Error::Other(format!( 43 | "cannot use {compression:?} prior to FNameBasedCompression (pak version 8)" 44 | ))); 45 | } 46 | 47 | // find empty slot 48 | if let Some((i, empty_slot)) = compression_slots 49 | .iter_mut() 50 | .enumerate() 51 | .find(|(_, s)| s.is_none()) 52 | { 53 | // empty found, set it to used compression type 54 | *empty_slot = Some(compression); 55 | i 56 | } else { 57 | // no empty slot found, add a new one 58 | compression_slots.push(Some(compression)); 59 | compression_slots.len() - 1 60 | } 61 | } as u32) 62 | } 63 | 64 | impl> PartialEntry { 65 | pub(crate) fn build_entry( 66 | &self, 67 | version: Version, 68 | #[allow(unused)] compression_slots: &mut Vec>, 69 | file_offset: u64, 70 | ) -> Result { 71 | #[cfg(feature = "compression")] 72 | let compression_slot = self 73 | .compression 74 | .map(|c| get_compression_slot(version, compression_slots, c)) 75 | .transpose()?; 76 | #[cfg(not(feature = "compression"))] 77 | let compression_slot = None; 78 | 79 | let blocks = match &self.data { 80 | PartialEntryData::Slice(_) => None, 81 | PartialEntryData::Blocks(blocks) => { 82 | let entry_size = 83 | Entry::get_serialized_size(version, compression_slot, blocks.len() as u32); 84 | 85 | let mut offset = entry_size; 86 | if version.version_major() < VersionMajor::RelativeChunkOffsets { 87 | offset += file_offset; 88 | }; 89 | 90 | Some( 91 | blocks 92 | .iter() 93 | .map(|block| { 94 | let start = offset; 95 | offset += block.data.len() as u64; 96 | let end = offset; 97 | Block { start, end } 98 | }) 99 | .collect(), 100 | ) 101 | } 102 | }; 103 | 104 | Ok(Entry { 105 | offset: file_offset, 106 | compressed: self.compressed_size, 107 | uncompressed: self.uncompressed_size, 108 | compression_slot, 109 | timestamp: None, 110 | hash: Some(self.hash), 111 | blocks, 112 | flags: 0, 113 | compression_block_size: self.compression_block_size, 114 | }) 115 | } 116 | pub(crate) fn write_data(&self, stream: &mut S) -> Result<()> { 117 | match &self.data { 118 | PartialEntryData::Slice(data) => { 119 | stream.write_all(data.as_ref())?; 120 | } 121 | PartialEntryData::Blocks(blocks) => { 122 | for block in blocks { 123 | stream.write_all(&block.data)?; 124 | } 125 | } 126 | } 127 | Ok(()) 128 | } 129 | } 130 | 131 | pub(crate) fn build_partial_entry( 132 | allowed_compression: &[Compression], 133 | data: D, 134 | ) -> Result> 135 | where 136 | D: AsRef<[u8]>, 137 | { 138 | // TODO hash needs to be post-compression/encryption 139 | use sha1::{Digest, Sha1}; 140 | let mut hasher = Sha1::new(); 141 | 142 | // TODO possibly select best compression based on some criteria instead of picking first 143 | let compression = allowed_compression.first().cloned(); 144 | let uncompressed_size = data.as_ref().len() as u64; 145 | let compression_block_size; 146 | 147 | let (data, compressed_size) = match compression { 148 | #[cfg(not(feature = "compression"))] 149 | Some(_) => { 150 | unreachable!("should not be able to reach this point without compression feature") 151 | } 152 | #[cfg(feature = "compression")] 153 | Some(compression) => { 154 | // https://github.com/EpicGames/UnrealEngine/commit/3aad0ff7976be1073005dca2c1282af548b45d89 155 | // Block size must fit into flags field or it may cause unreadable paks for earlier Unreal Engine versions 156 | compression_block_size = 0x3e << 11; // max possible block size 157 | let mut compressed_size = 0; 158 | let mut blocks = vec![]; 159 | for chunk in data.as_ref().chunks(compression_block_size as usize) { 160 | let data = compress(compression, chunk)?; 161 | compressed_size += data.len() as u64; 162 | hasher.update(&data); 163 | blocks.push(PartialBlock { 164 | uncompressed_size: chunk.len(), 165 | data, 166 | }) 167 | } 168 | 169 | (PartialEntryData::Blocks(blocks), compressed_size) 170 | } 171 | None => { 172 | compression_block_size = 0; 173 | hasher.update(data.as_ref()); 174 | (PartialEntryData::Slice(data), uncompressed_size) 175 | } 176 | }; 177 | 178 | Ok(PartialEntry { 179 | compression, 180 | compressed_size, 181 | uncompressed_size, 182 | compression_block_size, 183 | data, 184 | hash: Hash(hasher.finalize().into()), 185 | }) 186 | } 187 | 188 | #[cfg(feature = "compression")] 189 | fn compress(compression: Compression, data: &[u8]) -> Result> { 190 | use std::io::Write; 191 | 192 | let compressed = match compression { 193 | Compression::Zlib => { 194 | let mut compress = 195 | flate2::write::ZlibEncoder::new(Vec::new(), flate2::Compression::fast()); 196 | compress.write_all(data.as_ref())?; 197 | compress.finish()? 198 | } 199 | Compression::Gzip => { 200 | let mut compress = 201 | flate2::write::GzEncoder::new(Vec::new(), flate2::Compression::fast()); 202 | compress.write_all(data.as_ref())?; 203 | compress.finish()? 204 | } 205 | Compression::Zstd => zstd::stream::encode_all(data, 0)?, 206 | Compression::LZ4 => lz4_flex::block::compress(data), 207 | Compression::Oodle => { 208 | #[cfg(not(feature = "oodle"))] 209 | return Err(super::Error::Oodle); 210 | #[cfg(feature = "oodle")] 211 | { 212 | oodle_loader::oodle().unwrap().compress( 213 | data.as_ref(), 214 | oodle_loader::Compressor::Mermaid, 215 | oodle_loader::CompressionLevel::Normal, 216 | )? 217 | } 218 | } 219 | }; 220 | 221 | Ok(compressed) 222 | } 223 | -------------------------------------------------------------------------------- /LICENSE-APACHE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | -------------------------------------------------------------------------------- /repak/tests/test.rs: -------------------------------------------------------------------------------- 1 | #![cfg(feature = "default")] 2 | use byteorder::{ReadBytesExt, WriteBytesExt}; 3 | use paste::paste; 4 | use std::io::{self, Cursor, Read, Seek, SeekFrom}; 5 | 6 | /// A reader that tracks how many times bytes in the inner reader been read. Useful to check read coverage. 7 | #[derive(Debug)] 8 | pub struct ReadCounter { 9 | inner: T, 10 | reads: io::Cursor>, 11 | } 12 | 13 | impl ReadCounter { 14 | pub fn new(inner: T) -> Self { 15 | ReadCounter { 16 | inner, 17 | reads: Cursor::new(vec![]), 18 | } 19 | } 20 | pub fn new_size(inner: T, size: usize) -> Self { 21 | ReadCounter { 22 | inner, 23 | reads: Cursor::new(vec![0; size]), 24 | } 25 | } 26 | pub fn into_reads(self) -> Vec { 27 | self.reads.into_inner() 28 | } 29 | } 30 | 31 | impl Seek for ReadCounter 32 | where 33 | T: Seek, 34 | { 35 | fn seek(&mut self, style: SeekFrom) -> io::Result { 36 | self.reads.seek(style).unwrap(); 37 | self.inner.seek(style) 38 | } 39 | } 40 | 41 | impl Read for ReadCounter 42 | where 43 | T: Read, 44 | { 45 | fn read(&mut self, buf: &mut [u8]) -> io::Result { 46 | let read = self.inner.read(buf); 47 | if let Ok(read) = read { 48 | for _ in 0..read { 49 | let save = self.reads.position(); 50 | let r = match self.reads.read_u8() { 51 | Ok(r) => { 52 | self.reads.seek(SeekFrom::Current(-1)).unwrap(); 53 | Ok(r) 54 | } 55 | Err(ref e) if e.kind() == io::ErrorKind::UnexpectedEof => { 56 | // since rust 1.80 read_exact will move cursor position to end of internal 57 | // buffer so we have to reset it 58 | // ref https://github.com/rust-lang-ci/rust/commit/67b37f5054e4508694b7bd0b766e27f64cbd2d7f 59 | self.reads.seek(SeekFrom::Start(save)).unwrap(); 60 | Ok(0) 61 | } 62 | Err(e) => Err(e), 63 | } 64 | .unwrap(); 65 | self.reads.write_u8(r + 1).unwrap(); 66 | } 67 | } 68 | read 69 | } 70 | } 71 | 72 | mod test { 73 | #[test] 74 | fn test_read_counter() { 75 | use byteorder::{ReadBytesExt, LE}; 76 | use std::io::{Cursor, Seek, SeekFrom}; 77 | 78 | let source = Cursor::new(vec![1, 2, 3, 4, 5, 6, 7, 8, 9]); 79 | let mut proxy = super::ReadCounter::new(source); 80 | 81 | proxy.seek(SeekFrom::Start(3)).unwrap(); 82 | proxy.read_u8().unwrap(); 83 | proxy.seek(SeekFrom::Current(-1)).unwrap(); 84 | proxy.read_u8().unwrap(); 85 | proxy.read_u16::().unwrap(); 86 | 87 | assert_eq!(proxy.reads.into_inner(), vec![0, 0, 0, 2, 1, 1]); 88 | } 89 | } 90 | 91 | static AES_KEY: &str = "lNJbw660IOC+kU7cnVQ1oeqrXyhk4J6UAZrCBbcnp94="; 92 | 93 | fn test_read(version: repak::Version, _file_name: &str, bytes: &[u8]) { 94 | use aes::cipher::KeyInit; 95 | use base64::{engine::general_purpose, Engine as _}; 96 | let key = general_purpose::STANDARD 97 | .decode(AES_KEY) 98 | .as_ref() 99 | .map_err(|_| repak::Error::Aes) 100 | .and_then(|bytes| aes::Aes256::new_from_slice(bytes).map_err(|_| repak::Error::Aes)) 101 | .unwrap(); 102 | 103 | let mut inner_reader = std::io::Cursor::new(bytes); 104 | let len = inner_reader.seek(SeekFrom::End(0)).unwrap(); 105 | let mut reader = ReadCounter::new_size(inner_reader, len as usize); 106 | 107 | let pak = repak::PakBuilder::new() 108 | .key(key) 109 | .reader(&mut reader) 110 | .unwrap(); 111 | 112 | assert_eq!(pak.mount_point(), "../mount/point/root/"); 113 | assert_eq!(pak.version(), version); 114 | use std::collections::HashSet; 115 | let files: HashSet = HashSet::from_iter(pak.files()); 116 | assert_eq!( 117 | files, 118 | HashSet::from_iter( 119 | vec!["test.txt", "test.png", "zeros.bin", "directory/nested.txt"] 120 | .into_iter() 121 | .map(String::from) 122 | ) 123 | ); 124 | 125 | for file in files { 126 | let mut buf = vec![]; 127 | let mut writer = std::io::Cursor::new(&mut buf); 128 | pak.read_file(&file, &mut reader, &mut writer).unwrap(); 129 | match file.as_str() { 130 | "test.txt" => assert_eq!( 131 | buf, 132 | include_bytes!("pack/root/test.txt"), 133 | "test.txt incorrect contents" 134 | ), 135 | "test.png" => assert_eq!( 136 | buf, 137 | include_bytes!("pack/root/test.png"), 138 | "test.png incorrect contents" 139 | ), 140 | "zeros.bin" => assert_eq!( 141 | buf, 142 | include_bytes!("pack/root/zeros.bin"), 143 | "zeros.bin incorrect contents" 144 | ), 145 | "directory/nested.txt" => assert_eq!( 146 | buf, 147 | include_bytes!("pack/root/directory/nested.txt"), 148 | "nested.txt incorrect contents" 149 | ), 150 | name => panic!("unrecognized file {}", name), 151 | } 152 | } 153 | 154 | for r in reader.into_reads() { 155 | // sanity check. a pak file can be constructed with a lot of dead space 156 | // which wouldn't have to be read, but so far all bytes in paks generated 157 | // by UnrealPak are meaningful 158 | assert!(r > 0, "every byte has been read at least once"); 159 | } 160 | } 161 | 162 | fn test_write(_version: repak::Version, _file_name: &str, bytes: &[u8]) { 163 | use aes::cipher::KeyInit; 164 | use base64::{engine::general_purpose, Engine as _}; 165 | let key = general_purpose::STANDARD 166 | .decode(AES_KEY) 167 | .as_ref() 168 | .map_err(|_| repak::Error::Aes) 169 | .and_then(|bytes| aes::Aes256::new_from_slice(bytes).map_err(|_| repak::Error::Aes)) 170 | .unwrap(); 171 | 172 | let mut reader = std::io::Cursor::new(bytes); 173 | let pak_reader = repak::PakBuilder::new() 174 | .key(key) 175 | .reader(&mut reader) 176 | .unwrap(); 177 | 178 | let writer = Cursor::new(vec![]); 179 | let mut pak_writer = repak::PakBuilder::new().writer( 180 | writer, 181 | pak_reader.version(), 182 | pak_reader.mount_point().to_owned(), 183 | Some(0x205C5A7D), 184 | ); 185 | 186 | for path in pak_reader.files() { 187 | let data = pak_reader.get(&path, &mut reader).unwrap(); 188 | pak_writer.write_file(&path, false, data).unwrap(); 189 | } 190 | 191 | assert!(pak_writer.write_index().unwrap().into_inner() == reader.into_inner()); 192 | } 193 | 194 | fn test_rewrite_index(_version: repak::Version, _file_name: &str, bytes: &[u8]) { 195 | use aes::cipher::KeyInit; 196 | use base64::{engine::general_purpose, Engine as _}; 197 | let key = general_purpose::STANDARD 198 | .decode(AES_KEY) 199 | .as_ref() 200 | .map_err(|_| repak::Error::Aes) 201 | .and_then(|bytes| aes::Aes256::new_from_slice(bytes).map_err(|_| repak::Error::Aes)) 202 | .unwrap(); 203 | 204 | let mut buf = std::io::Cursor::new(bytes.to_vec()); 205 | let pak_reader = repak::PakBuilder::new().key(key).reader(&mut buf).unwrap(); 206 | 207 | let rewrite = pak_reader 208 | .into_pakwriter(buf) 209 | .unwrap() 210 | .write_index() 211 | .unwrap() 212 | .into_inner(); 213 | 214 | assert!(bytes == rewrite); 215 | } 216 | 217 | macro_rules! matrix_test { 218 | ( $name:literal, ($($version:literal $exp_version:expr),* $(,)?), $compress:tt, $encrypt:tt, $encryptindex:tt, $body:tt ) => { 219 | $( matrix_test_compress!($name, $version, $exp_version, $compress, $encrypt, $encryptindex, $body); )* 220 | }; 221 | } 222 | 223 | macro_rules! matrix_test_compress { 224 | ( $name:literal, $version:literal, $exp_version:expr, ($($compress:literal),* $(,)?), $encrypt:tt, $encryptindex:tt, $body:tt ) => { 225 | $( matrix_test_encrypt!($name, $version, $exp_version, $compress, $encrypt, $encryptindex, $body); )* 226 | }; 227 | } 228 | 229 | macro_rules! matrix_test_encrypt { 230 | ( $name:literal, $version:literal, $exp_version:expr, $compress:literal, ($($encrypt:literal),* $(,)?), $encryptindex:tt, $body:tt ) => { 231 | $( matrix_test_encryptindex!($name, $version, $exp_version, $compress, $encrypt, $encryptindex, $body); )* 232 | }; 233 | } 234 | 235 | macro_rules! matrix_test_encryptindex { 236 | ( $name:literal, $version:literal, $exp_version:expr, $compress:literal, $encrypt:literal, ($($encryptindex:literal),* $(,)?), $body:tt ) => { 237 | $( matrix_test_body!($name, $version, $exp_version, $compress, $encrypt, $encryptindex, $body); )* 238 | }; 239 | } 240 | 241 | macro_rules! matrix_test_body { 242 | ( $name:literal, $version:literal, $exp_version:expr, $compress:literal, $encrypt:literal, $encryptindex:literal, $body:expr ) => { 243 | paste! { 244 | #[test] 245 | fn [< test_ $name _version_ $version $compress $encrypt $encryptindex >]() { 246 | $body( 247 | $exp_version, 248 | concat!("pack_", $version, $compress, $encrypt, $encryptindex, ".pak"), 249 | include_bytes!(concat!("packs/pack_", $version, $compress, $encrypt, $encryptindex, ".pak"))); 250 | } 251 | } 252 | }; 253 | } 254 | 255 | matrix_test!( 256 | "read", 257 | ( 258 | "v5" repak::Version::V5, 259 | "v7" repak::Version::V7, 260 | "v8a" repak::Version::V8A, 261 | "v8b" repak::Version::V8B, 262 | "v9" repak::Version::V9, 263 | "v11" repak::Version::V11, 264 | ), 265 | ("", "_compress"), 266 | ("", "_encrypt"), 267 | ("", "_encryptindex"), 268 | test_read 269 | ); 270 | 271 | matrix_test!( 272 | "write", 273 | ( 274 | "v5" repak::Version::V5, 275 | "v7" repak::Version::V7, 276 | "v8a" repak::Version::V8A, 277 | "v8b" repak::Version::V8B, 278 | "v9" repak::Version::V9, 279 | "v11" repak::Version::V11, 280 | ), 281 | ("", /*"_compress"*/), 282 | ("", /*"_encrypt"*/), 283 | ("", /*"_encryptindex"*/), 284 | test_write 285 | ); 286 | 287 | matrix_test!( 288 | "rewrite_index", 289 | ( 290 | "v5" repak::Version::V5, 291 | "v7" repak::Version::V7, 292 | "v8a" repak::Version::V8A, 293 | "v8b" repak::Version::V8B, 294 | "v9" repak::Version::V9, 295 | "v11" repak::Version::V11, 296 | ), 297 | ("", "_compress"), 298 | ("", "_encrypt"), 299 | ("", /*"_encryptindex"*/), 300 | test_rewrite_index 301 | ); 302 | -------------------------------------------------------------------------------- /repak_cli/wix/main.wxs: -------------------------------------------------------------------------------- 1 | 2 | 17 | 18 | 46 | 47 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 69 | 70 | 80 | 81 | 84 | 85 | 86 | 87 | 88 | 89 | 90 | 91 | 92 | 106 | 111 | 112 | 113 | 114 | 122 | 123 | 124 | 130 | 131 | 132 | 133 | 134 | 135 | 136 | 145 | 146 | 150 | 151 | 152 | 153 | 154 | 160 | 161 | 162 | 163 | 164 | 165 | 166 | 167 | 173 | 174 | 175 | 176 | 177 | 178 | 179 | 180 | 181 | 191 | 1 192 | 1 193 | 194 | 195 | 196 | 197 | 202 | 203 | 204 | 205 | 213 | 214 | 215 | 216 | 224 | 225 | 226 | 227 | 228 | 229 | -------------------------------------------------------------------------------- /oodle_loader/src/lib.rs: -------------------------------------------------------------------------------- 1 | use std::{io::Read, sync::OnceLock}; 2 | 3 | type Result = std::result::Result; 4 | 5 | pub use oodle_lz::{CompressionLevel, Compressor}; 6 | 7 | mod oodle_lz { 8 | #[derive(Debug, Clone, Copy)] 9 | #[repr(i32)] 10 | pub enum Compressor { 11 | /// None = memcpy, pass through uncompressed bytes 12 | None = 3, 13 | 14 | /// Fast decompression and high compression ratios, amazing! 15 | Kraken = 8, 16 | /// Leviathan = Kraken's big brother with higher compression, slightly slower decompression. 17 | Leviathan = 13, 18 | /// Mermaid is between Kraken & Selkie - crazy fast, still decent compression. 19 | Mermaid = 9, 20 | /// Selkie is a super-fast relative of Mermaid. For maximum decode speed. 21 | Selkie = 11, 22 | /// Hydra, the many-headed beast = Leviathan, Kraken, Mermaid, or Selkie (see $OodleLZ_About_Hydra) 23 | Hydra = 12, 24 | } 25 | 26 | #[derive(Debug, Clone, Copy)] 27 | #[repr(i32)] 28 | pub enum CompressionLevel { 29 | /// don't compress, just copy raw bytes 30 | None = 0, 31 | /// super fast mode, lower compression ratio 32 | SuperFast = 1, 33 | /// fastest LZ mode with still decent compression ratio 34 | VeryFast = 2, 35 | /// fast - good for daily use 36 | Fast = 3, 37 | /// standard medium speed LZ mode 38 | Normal = 4, 39 | 40 | /// optimal parse level 1 (faster optimal encoder) 41 | Optimal1 = 5, 42 | /// optimal parse level 2 (recommended baseline optimal encoder) 43 | Optimal2 = 6, 44 | /// optimal parse level 3 (slower optimal encoder) 45 | Optimal3 = 7, 46 | /// optimal parse level 4 (very slow optimal encoder) 47 | Optimal4 = 8, 48 | /// optimal parse level 5 (don't care about encode speed, maximum compression) 49 | Optimal5 = 9, 50 | 51 | /// faster than SuperFast, less compression 52 | HyperFast1 = -1, 53 | /// faster than HyperFast1, less compression 54 | HyperFast2 = -2, 55 | /// faster than HyperFast2, less compression 56 | HyperFast3 = -3, 57 | /// fastest, less compression 58 | HyperFast4 = -4, 59 | } 60 | 61 | pub type Compress = unsafe extern "system" fn( 62 | compressor: Compressor, 63 | rawBuf: *const u8, 64 | rawLen: usize, 65 | compBuf: *mut u8, 66 | level: CompressionLevel, 67 | pOptions: *const (), 68 | dictionaryBase: *const (), 69 | lrm: *const (), 70 | scratchMem: *mut u8, 71 | scratchSize: usize, 72 | ) -> isize; 73 | 74 | pub type Decompress = unsafe extern "system" fn( 75 | compBuf: *const u8, 76 | compBufSize: usize, 77 | rawBuf: *mut u8, 78 | rawLen: usize, 79 | fuzzSafe: u32, 80 | checkCRC: u32, 81 | verbosity: u32, 82 | decBufBase: u64, 83 | decBufSize: usize, 84 | fpCallback: u64, 85 | callbackUserData: u64, 86 | decoderMemory: *mut u8, 87 | decoderMemorySize: usize, 88 | threadPhase: u32, 89 | ) -> isize; 90 | 91 | pub type GetCompressedBufferSizeNeeded = 92 | unsafe extern "system" fn(compressor: Compressor, rawSize: usize) -> usize; 93 | 94 | pub type SetPrintf = unsafe extern "system" fn(printf: *const ()); 95 | } 96 | 97 | static OODLE_VERSION: &str = "2.9.10"; 98 | static OODLE_BASE_URL: &str = "https://github.com/WorkingRobot/OodleUE/raw/refs/heads/main/Engine/Source/Programs/Shared/EpicGames.Oodle/Sdk/"; 99 | 100 | struct OodlePlatform { 101 | path: &'static str, 102 | name: &'static str, 103 | hash: &'static str, 104 | } 105 | 106 | #[cfg(target_os = "linux")] 107 | static OODLE_PLATFORM: OodlePlatform = OodlePlatform { 108 | path: "linux/lib", 109 | name: "liboo2corelinux64.so.9", 110 | hash: "ed7e98f70be1254a80644efd3ae442ff61f854a2fe9debb0b978b95289884e9c", 111 | }; 112 | 113 | #[cfg(target_os = "macos")] 114 | static OODLE_PLATFORM: OodlePlatform = OodlePlatform { 115 | path: "mac/lib", 116 | name: "liboo2coremac64.2.9.10.dylib", 117 | hash: "b09af35f6b84a61e2b6488495c7927e1cef789b969128fa1c845e51a475ec501", 118 | }; 119 | 120 | #[cfg(windows)] 121 | static OODLE_PLATFORM: OodlePlatform = OodlePlatform { 122 | path: "win/redist", 123 | name: "oo2core_9_win64.dll", 124 | hash: "6f5d41a7892ea6b2db420f2458dad2f84a63901c9a93ce9497337b16c195f457", 125 | }; 126 | 127 | fn url() -> String { 128 | format!( 129 | "{OODLE_BASE_URL}/{}/{}/{}", 130 | OODLE_VERSION, OODLE_PLATFORM.path, OODLE_PLATFORM.name 131 | ) 132 | } 133 | 134 | #[derive(thiserror::Error, Debug)] 135 | pub enum Error { 136 | #[error("Oodle lib hash mismatch expected: {expected} got {found}")] 137 | HashMismatch { expected: String, found: String }, 138 | #[error("Oodle compression failed")] 139 | CompressionFailed, 140 | #[error("Oodle initialization failed previously")] 141 | InitializationFailed, 142 | #[error("IO error {0:?}")] 143 | Io(#[from] std::io::Error), 144 | #[error("ureq error {0:?}")] 145 | Ureq(Box), 146 | #[error("Oodle libloading error {0:?}")] 147 | LibLoading(#[from] libloading::Error), 148 | } 149 | impl From for Error { 150 | fn from(value: ureq::Error) -> Self { 151 | Self::Ureq(value.into()) 152 | } 153 | } 154 | 155 | fn check_hash(buffer: &[u8]) -> Result<()> { 156 | use sha2::{Digest, Sha256}; 157 | 158 | let mut hasher = Sha256::new(); 159 | hasher.update(buffer); 160 | let hash = hex::encode(hasher.finalize()); 161 | if hash != OODLE_PLATFORM.hash { 162 | return Err(Error::HashMismatch { 163 | expected: OODLE_PLATFORM.hash.into(), 164 | found: hash, 165 | }); 166 | } 167 | 168 | Ok(()) 169 | } 170 | 171 | fn fetch_oodle() -> Result { 172 | let oodle_path = std::env::current_exe()?.with_file_name(OODLE_PLATFORM.name); 173 | if !oodle_path.exists() { 174 | let mut buffer = vec![]; 175 | ureq::get(&url()) 176 | .call()? 177 | .into_reader() 178 | .read_to_end(&mut buffer)?; 179 | check_hash(&buffer)?; 180 | std::fs::write(&oodle_path, buffer)?; 181 | } 182 | // don't check existing file to allow user to substitute other versions 183 | // check_hash(&std::fs::read(&oodle_path)?)?; 184 | Ok(oodle_path) 185 | } 186 | 187 | pub struct Oodle { 188 | _library: libloading::Library, 189 | compress: oodle_lz::Compress, 190 | decompress: oodle_lz::Decompress, 191 | get_compressed_buffer_size_needed: oodle_lz::GetCompressedBufferSizeNeeded, 192 | set_printf: oodle_lz::SetPrintf, 193 | } 194 | impl Oodle { 195 | fn new(lib: libloading::Library) -> Result { 196 | unsafe { 197 | let res = Oodle { 198 | compress: *lib.get(b"OodleLZ_Compress")?, 199 | decompress: *lib.get(b"OodleLZ_Decompress")?, 200 | get_compressed_buffer_size_needed: *lib 201 | .get(b"OodleLZ_GetCompressedBufferSizeNeeded")?, 202 | set_printf: *lib.get(b"OodleCore_Plugins_SetPrintf")?, 203 | _library: lib, 204 | }; 205 | (res.set_printf)(std::ptr::null()); // silence oodle logging 206 | Ok(res) 207 | } 208 | } 209 | pub fn compress( 210 | &self, 211 | input: &[u8], 212 | compressor: Compressor, 213 | compression_level: CompressionLevel, 214 | ) -> Result> { 215 | unsafe { 216 | let buffer_size = self.get_compressed_buffer_size_needed(compressor, input.len()); 217 | let mut buffer = vec![0; buffer_size]; 218 | 219 | let len = (self.compress)( 220 | compressor, 221 | input.as_ptr(), 222 | input.len(), 223 | buffer.as_mut_ptr(), 224 | compression_level, 225 | std::ptr::null(), 226 | std::ptr::null(), 227 | std::ptr::null(), 228 | std::ptr::null_mut(), 229 | 0, 230 | ); 231 | 232 | if len == -1 { 233 | return Err(Error::CompressionFailed); 234 | } 235 | buffer.truncate(len as usize); 236 | 237 | Ok(buffer) 238 | } 239 | } 240 | pub fn decompress(&self, input: &[u8], output: &mut [u8]) -> isize { 241 | unsafe { 242 | (self.decompress)( 243 | input.as_ptr(), 244 | input.len(), 245 | output.as_mut_ptr(), 246 | output.len(), 247 | 1, 248 | 1, 249 | 0, 250 | 0, 251 | 0, 252 | 0, 253 | 0, 254 | std::ptr::null_mut(), 255 | 0, 256 | 3, 257 | ) 258 | } 259 | } 260 | fn get_compressed_buffer_size_needed( 261 | &self, 262 | compressor: oodle_lz::Compressor, 263 | raw_buffer: usize, 264 | ) -> usize { 265 | unsafe { (self.get_compressed_buffer_size_needed)(compressor, raw_buffer) } 266 | } 267 | } 268 | 269 | static OODLE: OnceLock> = OnceLock::new(); 270 | 271 | fn load_oodle() -> Result { 272 | let path = fetch_oodle()?; 273 | unsafe { 274 | let library = libloading::Library::new(path)?; 275 | Oodle::new(library) 276 | } 277 | } 278 | 279 | pub fn oodle() -> Result<&'static Oodle> { 280 | let mut result = None; 281 | let oodle = OODLE.get_or_init(|| match load_oodle() { 282 | Err(err) => { 283 | result = Some(Err(err)); 284 | None 285 | } 286 | Ok(oodle) => Some(oodle), 287 | }); 288 | match (result, oodle) { 289 | // oodle initialized so return 290 | (_, Some(oodle)) => Ok(oodle), 291 | // error during initialization 292 | (Some(result), _) => result?, 293 | // no error because initialization was tried and failed before 294 | _ => Err(Error::InitializationFailed), 295 | } 296 | } 297 | 298 | #[cfg(test)] 299 | mod test { 300 | use super::*; 301 | 302 | #[test] 303 | fn test_oodle() { 304 | let oodle = oodle().unwrap(); 305 | 306 | let data = b"In tools and when compressing large inputs in one call, consider using 307 | $OodleXLZ_Compress_AsyncAndWait (in the Oodle2 Ext lib) instead to get parallelism. Alternatively, 308 | chop the data into small fixed size chunks (we recommend at least 256KiB, i.e. 262144 bytes) and 309 | call compress on each of them, which decreases compression ratio but makes for trivial parallel 310 | compression and decompression."; 311 | 312 | let buffer = oodle 313 | .compress(data, Compressor::Mermaid, CompressionLevel::Optimal5) 314 | .unwrap(); 315 | 316 | dbg!((data.len(), buffer.len())); 317 | 318 | let mut uncomp = vec![0; data.len()]; 319 | oodle.decompress(&buffer, &mut uncomp); 320 | 321 | assert_eq!(data[..], uncomp[..]); 322 | } 323 | } 324 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | # Copyright 2022-2023, axodotdev 2 | # SPDX-License-Identifier: MIT or Apache-2.0 3 | # 4 | # CI that: 5 | # 6 | # * checks for a Git Tag that looks like a release 7 | # * builds artifacts with cargo-dist (archives, installers, hashes) 8 | # * uploads those artifacts to temporary workflow zip 9 | # * on success, uploads the artifacts to a Github Release 10 | # 11 | # Note that the Github Release will be created with a generated 12 | # title/body based on your changelogs. 13 | 14 | name: Release 15 | 16 | permissions: 17 | contents: write 18 | 19 | # This task will run whenever you push a git tag that looks like a version 20 | # like "1.0.0", "v0.1.0-prerelease.1", "my-app/0.1.0", "releases/v1.0.0", etc. 21 | # Various formats will be parsed into a VERSION and an optional PACKAGE_NAME, where 22 | # PACKAGE_NAME must be the name of a Cargo package in your workspace, and VERSION 23 | # must be a Cargo-style SemVer Version (must have at least major.minor.patch). 24 | # 25 | # If PACKAGE_NAME is specified, then the announcement will be for that 26 | # package (erroring out if it doesn't have the given version or isn't cargo-dist-able). 27 | # 28 | # If PACKAGE_NAME isn't specified, then the announcement will be for all 29 | # (cargo-dist-able) packages in the workspace with that version (this mode is 30 | # intended for workspaces with only one dist-able package, or with all dist-able 31 | # packages versioned/released in lockstep). 32 | # 33 | # If you push multiple tags at once, separate instances of this workflow will 34 | # spin up, creating an independent announcement for each one. However Github 35 | # will hard limit this to 3 tags per commit, as it will assume more tags is a 36 | # mistake. 37 | # 38 | # If there's a prerelease-style suffix to the version, then the release(s) 39 | # will be marked as a prerelease. 40 | on: 41 | push: 42 | tags: 43 | - '**[0-9]+.[0-9]+.[0-9]+*' 44 | pull_request: 45 | 46 | jobs: 47 | # Run 'cargo dist plan' (or host) to determine what tasks we need to do 48 | plan: 49 | runs-on: ubuntu-latest 50 | outputs: 51 | val: ${{ steps.plan.outputs.manifest }} 52 | tag: ${{ !github.event.pull_request && github.ref_name || '' }} 53 | tag-flag: ${{ !github.event.pull_request && format('--tag={0}', github.ref_name) || '' }} 54 | publishing: ${{ !github.event.pull_request }} 55 | env: 56 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} 57 | steps: 58 | - uses: actions/checkout@v4 59 | with: 60 | submodules: recursive 61 | - name: Install cargo-dist 62 | # we specify bash to get pipefail; it guards against the `curl` command 63 | # failing. otherwise `sh` won't catch that `curl` returned non-0 64 | shell: bash 65 | run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.7.1/cargo-dist-installer.sh | sh" 66 | # sure would be cool if github gave us proper conditionals... 67 | # so here's a doubly-nested ternary-via-truthiness to try to provide the best possible 68 | # functionality based on whether this is a pull_request, and whether it's from a fork. 69 | # (PRs run on the *source* but secrets are usually on the *target* -- that's *good* 70 | # but also really annoying to build CI around when it needs secrets to work right.) 71 | - id: plan 72 | run: | 73 | cargo dist ${{ !github.event.pull_request && format('host --steps=create --tag={0}', github.ref_name) || (github.event.pull_request.head.repo.fork && 'plan' || 'host --steps=check') }} --output-format=json > dist-manifest.json 74 | echo "cargo dist ran successfully" 75 | cat dist-manifest.json 76 | echo "manifest=$(jq -c "." dist-manifest.json)" >> "$GITHUB_OUTPUT" 77 | - name: "Upload dist-manifest.json" 78 | uses: actions/upload-artifact@v3 79 | with: 80 | name: artifacts 81 | path: dist-manifest.json 82 | 83 | # Build and packages all the platform-specific things 84 | build-local-artifacts: 85 | name: build-local-artifacts (${{ join(matrix.targets, ', ') }}) 86 | # Let the initial task tell us to not run (currently very blunt) 87 | needs: 88 | - plan 89 | if: ${{ fromJson(needs.plan.outputs.val).ci.github.artifacts_matrix.include != null && (needs.plan.outputs.publishing == 'true' || fromJson(needs.plan.outputs.val).ci.github.pr_run_mode == 'upload') }} 90 | strategy: 91 | fail-fast: false 92 | # Target platforms/runners are computed by cargo-dist in create-release. 93 | # Each member of the matrix has the following arguments: 94 | # 95 | # - runner: the github runner 96 | # - dist-args: cli flags to pass to cargo dist 97 | # - install-dist: expression to run to install cargo-dist on the runner 98 | # 99 | # Typically there will be: 100 | # - 1 "global" task that builds universal installers 101 | # - N "local" tasks that build each platform's binaries and platform-specific installers 102 | matrix: ${{ fromJson(needs.plan.outputs.val).ci.github.artifacts_matrix }} 103 | runs-on: ${{ matrix.runner }} 104 | env: 105 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} 106 | BUILD_MANIFEST_NAME: target/distrib/${{ join(matrix.targets, '-') }}-dist-manifest.json 107 | steps: 108 | - uses: actions/checkout@v4 109 | with: 110 | submodules: recursive 111 | - uses: swatinem/rust-cache@v2 112 | - name: Install cargo-dist 113 | run: ${{ matrix.install_dist }} 114 | # Get the dist-manifest 115 | - name: Fetch local artifacts 116 | uses: actions/download-artifact@v3 117 | with: 118 | name: artifacts 119 | path: target/distrib/ 120 | - name: Install dependencies 121 | run: | 122 | ${{ matrix.packages_install }} 123 | - name: Build artifacts 124 | run: | 125 | # Actually do builds and make zips and whatnot 126 | cargo dist build ${{ needs.plan.outputs.tag-flag }} --print=linkage --output-format=json ${{ matrix.dist_args }} > dist-manifest.json 127 | echo "cargo dist ran successfully" 128 | - id: cargo-dist 129 | name: Post-build 130 | # We force bash here just because github makes it really hard to get values up 131 | # to "real" actions without writing to env-vars, and writing to env-vars has 132 | # inconsistent syntax between shell and powershell. 133 | shell: bash 134 | run: | 135 | # Parse out what we just built and upload it to scratch storage 136 | echo "paths<> "$GITHUB_OUTPUT" 137 | jq --raw-output ".artifacts[]?.path | select( . != null )" dist-manifest.json >> "$GITHUB_OUTPUT" 138 | echo "EOF" >> "$GITHUB_OUTPUT" 139 | 140 | cp dist-manifest.json "$BUILD_MANIFEST_NAME" 141 | - name: "Upload artifacts" 142 | uses: actions/upload-artifact@v3 143 | with: 144 | name: artifacts 145 | path: | 146 | ${{ steps.cargo-dist.outputs.paths }} 147 | ${{ env.BUILD_MANIFEST_NAME }} 148 | 149 | # Build and package all the platform-agnostic(ish) things 150 | build-global-artifacts: 151 | needs: 152 | - plan 153 | - build-local-artifacts 154 | runs-on: "ubuntu-20.04" 155 | env: 156 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} 157 | BUILD_MANIFEST_NAME: target/distrib/global-dist-manifest.json 158 | steps: 159 | - uses: actions/checkout@v4 160 | with: 161 | submodules: recursive 162 | - name: Install cargo-dist 163 | run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.7.1/cargo-dist-installer.sh | sh" 164 | # Get all the local artifacts for the global tasks to use (for e.g. checksums) 165 | - name: Fetch local artifacts 166 | uses: actions/download-artifact@v3 167 | with: 168 | name: artifacts 169 | path: target/distrib/ 170 | - id: cargo-dist 171 | shell: bash 172 | run: | 173 | cargo dist build ${{ needs.plan.outputs.tag-flag }} --output-format=json "--artifacts=global" > dist-manifest.json 174 | echo "cargo dist ran successfully" 175 | 176 | # Parse out what we just built and upload it to scratch storage 177 | echo "paths<> "$GITHUB_OUTPUT" 178 | jq --raw-output ".artifacts[]?.path | select( . != null )" dist-manifest.json >> "$GITHUB_OUTPUT" 179 | echo "EOF" >> "$GITHUB_OUTPUT" 180 | 181 | cp dist-manifest.json "$BUILD_MANIFEST_NAME" 182 | - name: "Upload artifacts" 183 | uses: actions/upload-artifact@v3 184 | with: 185 | name: artifacts 186 | path: | 187 | ${{ steps.cargo-dist.outputs.paths }} 188 | ${{ env.BUILD_MANIFEST_NAME }} 189 | # Determines if we should publish/announce 190 | host: 191 | needs: 192 | - plan 193 | - build-local-artifacts 194 | - build-global-artifacts 195 | # Only run if we're "publishing", and only if local and global didn't fail (skipped is fine) 196 | if: ${{ always() && needs.plan.outputs.publishing == 'true' && (needs.build-global-artifacts.result == 'skipped' || needs.build-global-artifacts.result == 'success') && (needs.build-local-artifacts.result == 'skipped' || needs.build-local-artifacts.result == 'success') }} 197 | env: 198 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} 199 | runs-on: "ubuntu-20.04" 200 | outputs: 201 | val: ${{ steps.host.outputs.manifest }} 202 | steps: 203 | - uses: actions/checkout@v4 204 | with: 205 | submodules: recursive 206 | - name: Install cargo-dist 207 | run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.7.1/cargo-dist-installer.sh | sh" 208 | # Fetch artifacts from scratch-storage 209 | - name: Fetch artifacts 210 | uses: actions/download-artifact@v3 211 | with: 212 | name: artifacts 213 | path: target/distrib/ 214 | # This is a harmless no-op for Github Releases, hosting for that happens in "announce" 215 | - id: host 216 | shell: bash 217 | run: | 218 | cargo dist host ${{ needs.plan.outputs.tag-flag }} --steps=upload --steps=release --output-format=json > dist-manifest.json 219 | echo "artifacts uploaded and released successfully" 220 | cat dist-manifest.json 221 | echo "manifest=$(jq -c "." dist-manifest.json)" >> "$GITHUB_OUTPUT" 222 | - name: "Upload dist-manifest.json" 223 | uses: actions/upload-artifact@v3 224 | with: 225 | name: artifacts 226 | path: dist-manifest.json 227 | 228 | # Create a Github Release while uploading all files to it 229 | announce: 230 | needs: 231 | - plan 232 | - host 233 | # use "always() && ..." to allow us to wait for all publish jobs while 234 | # still allowing individual publish jobs to skip themselves (for prereleases). 235 | # "host" however must run to completion, no skipping allowed! 236 | if: ${{ always() && needs.host.result == 'success' }} 237 | runs-on: "ubuntu-20.04" 238 | env: 239 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} 240 | steps: 241 | - uses: actions/checkout@v4 242 | with: 243 | submodules: recursive 244 | - name: "Download Github Artifacts" 245 | uses: actions/download-artifact@v3 246 | with: 247 | name: artifacts 248 | path: artifacts 249 | - name: Cleanup 250 | run: | 251 | # Remove the granular manifests 252 | rm -f artifacts/*-dist-manifest.json 253 | - name: Create Github Release 254 | uses: ncipollo/release-action@v1 255 | with: 256 | tag: ${{ needs.plan.outputs.tag }} 257 | name: ${{ fromJson(needs.host.outputs.val).announcement_title }} 258 | body: ${{ fromJson(needs.host.outputs.val).announcement_github_body }} 259 | prerelease: ${{ fromJson(needs.host.outputs.val).announcement_is_prerelease }} 260 | artifacts: "artifacts/*" 261 | -------------------------------------------------------------------------------- /repak/src/entry.rs: -------------------------------------------------------------------------------- 1 | use std::io; 2 | 3 | use byteorder::{LE, ReadBytesExt, WriteBytesExt}; 4 | 5 | use crate::{data::build_partial_entry, reader, writer, Error, Hash}; 6 | 7 | use super::{Compression, ext::BoolExt, ext::ReadExt, Version, VersionMajor}; 8 | 9 | #[derive(Debug, PartialEq, Clone, Copy)] 10 | pub(crate) enum EntryLocation { 11 | Data, 12 | Index, 13 | } 14 | 15 | #[derive(Debug, Default, Clone)] 16 | pub(crate) struct Block { 17 | pub start: u64, 18 | pub end: u64, 19 | } 20 | 21 | impl Block { 22 | pub fn read(reader: &mut R) -> Result { 23 | Ok(Self { 24 | start: reader.read_u64::()?, 25 | end: reader.read_u64::()?, 26 | }) 27 | } 28 | 29 | pub fn write(&self, writer: &mut W) -> Result<(), super::Error> { 30 | writer.write_u64::(self.start)?; 31 | writer.write_u64::(self.end)?; 32 | Ok(()) 33 | } 34 | } 35 | 36 | fn align(offset: u64) -> u64 { 37 | // add alignment (aes block size: 16) then zero out alignment bits 38 | (offset + 15) & !15 39 | } 40 | 41 | fn compression_index_size(version: Version) -> CompressionIndexSize { 42 | match version { 43 | Version::V8A => CompressionIndexSize::U8, 44 | _ => CompressionIndexSize::U32, 45 | } 46 | } 47 | 48 | enum CompressionIndexSize { 49 | U8, 50 | U32, 51 | } 52 | 53 | #[derive(Debug)] 54 | pub(crate) struct Entry { 55 | pub offset: u64, 56 | pub compressed: u64, 57 | pub uncompressed: u64, 58 | pub compression_slot: Option, 59 | pub timestamp: Option, 60 | pub hash: Option, 61 | pub blocks: Option>, 62 | pub flags: u8, 63 | pub compression_block_size: u32, 64 | } 65 | 66 | impl Entry { 67 | pub fn is_encrypted(&self) -> bool { 68 | 0 != (self.flags & 1) 69 | } 70 | pub fn is_deleted(&self) -> bool { 71 | 0 != (self.flags >> 1) & 1 72 | } 73 | pub fn is_partial_encrypted(&self) -> bool { 74 | 0 != (self.flags >> 3) & 1 75 | } 76 | pub fn get_serialized_size( 77 | version: super::Version, 78 | compression: Option, 79 | block_count: u32, 80 | ) -> u64 { 81 | let mut size = 0; 82 | size += 8; // offset 83 | size += 8; // compressed 84 | size += 8; // uncompressed 85 | size += match compression_index_size(version) { 86 | CompressionIndexSize::U8 => 1, // 8 bit compression 87 | CompressionIndexSize::U32 => 4, // 32 bit compression 88 | }; 89 | size += match version.version_major() == VersionMajor::Initial { 90 | true => 8, // timestamp 91 | false => 0, 92 | }; 93 | size += 20; // hash 94 | size += match compression { 95 | Some(_) => 4 + (8 + 8) * block_count as u64, // blocks 96 | None => 0, 97 | }; 98 | size += 1; // encrypted 99 | size += match version.version_major() >= VersionMajor::CompressionEncryption { 100 | true => 4, // blocks uncompressed 101 | false => 0, 102 | }; 103 | size 104 | } 105 | 106 | pub(crate) fn write_file( 107 | writer: &mut W, 108 | version: Version, 109 | compression_slots: &mut Vec>, 110 | allowed_compression: &[Compression], 111 | data: &[u8], 112 | ) -> Result { 113 | let partial_entry = build_partial_entry(allowed_compression, data)?; 114 | let stream_position = writer.stream_position()?; 115 | let entry = partial_entry.build_entry(version, compression_slots, stream_position)?; 116 | entry.write(writer, version, crate::entry::EntryLocation::Data)?; 117 | partial_entry.write_data(writer)?; 118 | Ok(entry) 119 | } 120 | 121 | pub fn read( 122 | reader: &mut R, 123 | version: super::Version, 124 | ) -> Result { 125 | let ver = version.version_major(); 126 | let offset = reader.read_u64::()?; 127 | let compressed = reader.read_u64::()?; 128 | let uncompressed = reader.read_u64::()?; 129 | let compression = match match compression_index_size(version) { 130 | CompressionIndexSize::U8 => reader.read_u8()? as u32, 131 | CompressionIndexSize::U32 => reader.read_u32::()?, 132 | } { 133 | 0 => None, 134 | n => Some(n - 1), 135 | }; 136 | let timestamp = (ver == VersionMajor::Initial).then_try(|| reader.read_u64::())?; 137 | let hash = Some(Hash(reader.read_guid()?)); 138 | let blocks = (ver >= VersionMajor::CompressionEncryption && compression.is_some()) 139 | .then_try(|| reader.read_array(Block::read))?; 140 | let flags = (ver >= VersionMajor::CompressionEncryption) 141 | .then_try(|| reader.read_u8())? 142 | .unwrap_or(0); 143 | let compression_block_size = (ver >= VersionMajor::CompressionEncryption) 144 | .then_try(|| reader.read_u32::())? 145 | .unwrap_or(0); 146 | Ok(Self { 147 | offset, 148 | compressed, 149 | uncompressed, 150 | compression_slot: compression, 151 | timestamp, 152 | hash, 153 | blocks, 154 | flags, 155 | compression_block_size, 156 | }) 157 | } 158 | 159 | pub fn write( 160 | &self, 161 | writer: &mut W, 162 | version: super::Version, 163 | location: EntryLocation, 164 | ) -> Result<(), super::Error> { 165 | writer.write_u64::(match location { 166 | EntryLocation::Data => 0, 167 | EntryLocation::Index => self.offset, 168 | })?; 169 | writer.write_u64::(self.compressed)?; 170 | writer.write_u64::(self.uncompressed)?; 171 | let compression = self.compression_slot.map_or(0, |n| n + 1); 172 | match compression_index_size(version) { 173 | CompressionIndexSize::U8 => writer.write_u8(compression.try_into().unwrap())?, 174 | CompressionIndexSize::U32 => writer.write_u32::(compression)?, 175 | } 176 | 177 | if version.version_major() == VersionMajor::Initial { 178 | writer.write_u64::(self.timestamp.unwrap_or_default())?; 179 | } 180 | if let Some(hash) = self.hash { 181 | writer.write_all(&hash.0)?; 182 | } else { 183 | panic!("hash missing"); 184 | } 185 | if version.version_major() >= VersionMajor::CompressionEncryption { 186 | if let Some(blocks) = &self.blocks { 187 | writer.write_u32::(blocks.len() as u32)?; 188 | for block in blocks { 189 | block.write(writer)?; 190 | } 191 | } 192 | writer.write_u8(self.flags)?; 193 | writer.write_u32::(self.compression_block_size)?; 194 | } 195 | 196 | Ok(()) 197 | } 198 | 199 | pub fn read_encoded( 200 | reader: &mut R, 201 | version: super::Version, 202 | ) -> Result { 203 | let bits = reader::flag_reader(reader, version)?; 204 | 205 | let compression = match (bits >> 23) & 0x3f { 206 | 0 => None, 207 | n => Some(n - 1), 208 | }; 209 | 210 | let encrypted = (bits & (1 << 22)) != 0; 211 | let compression_block_count: u32 = (bits >> 6) & 0xffff; 212 | let mut compression_block_size = bits & 0x3f; 213 | 214 | if compression_block_size == 0x3f { 215 | compression_block_size = reader.read_u32::()?; 216 | } else { 217 | compression_block_size <<= 11; 218 | } 219 | 220 | let (offset, uncompressed) = reader::offset_reader(reader, version, bits)?; 221 | 222 | let compressed = match compression { 223 | None => uncompressed, 224 | _ => reader::read_safe(reader, bits, 29)?, 225 | }; 226 | 227 | let offset_base = Entry::get_serialized_size(version, compression, compression_block_count); 228 | 229 | let blocks = if compression_block_count == 1 && !encrypted { 230 | Some(vec![Block { 231 | start: offset_base, 232 | end: offset_base + compressed, 233 | }]) 234 | } else if compression_block_count > 0 { 235 | let mut index = offset_base; 236 | Some( 237 | (0..compression_block_count) 238 | .map(|_| { 239 | let mut block_size = reader.read_u32::()? as u64; 240 | let block = Block { 241 | start: index, 242 | end: index + block_size, 243 | }; 244 | if encrypted { 245 | block_size = align(block_size); 246 | } 247 | index += block_size; 248 | Ok(block) 249 | }) 250 | .collect::, super::Error>>()?, 251 | ) 252 | } else { 253 | None 254 | }; 255 | 256 | Ok(Entry { 257 | offset, 258 | compressed, 259 | uncompressed, 260 | timestamp: None, 261 | compression_slot: compression, 262 | hash: None, 263 | blocks, 264 | flags: encrypted as u8, 265 | compression_block_size, 266 | }) 267 | } 268 | 269 | pub fn write_encoded(&self, writer: &mut W, version: Version) -> Result<(), super::Error> { 270 | let mut compression_block_size = (self.compression_block_size >> 11) & 0x3f; 271 | if (compression_block_size << 11) != self.compression_block_size { 272 | compression_block_size = 0x3f; 273 | } 274 | let compression_blocks_count = if self.compression_slot.is_some() { 275 | self.blocks.as_ref().unwrap().len() as u32 276 | } else { 277 | 0 278 | }; 279 | let is_size_32_bit_safe = self.compressed <= u32::MAX as u64; 280 | let is_uncompressed_size_32_bit_safe = self.uncompressed <= u32::MAX as u64; 281 | let is_offset_32_bit_safe = self.offset <= u32::MAX as u64; 282 | 283 | assert!( 284 | compression_blocks_count < 0x10_000, 285 | "compression blocks count fits in 16 bits" 286 | ); 287 | 288 | let flags = (compression_block_size) 289 | | (compression_blocks_count << 6) 290 | | ((self.is_encrypted() as u32) << 22) 291 | | (self.compression_slot.map_or(0, |n| n + 1) << 23) 292 | | ((is_size_32_bit_safe as u32) << 29) 293 | | ((is_uncompressed_size_32_bit_safe as u32) << 30) 294 | | ((is_offset_32_bit_safe as u32) << 31); 295 | 296 | writer::flag_writer(writer, version, flags)?; 297 | 298 | if compression_block_size == 0x3f { 299 | writer.write_u32::(self.compression_block_size)?; 300 | } 301 | 302 | writer::offset_writer( 303 | writer, 304 | version, 305 | self.offset, 306 | is_offset_32_bit_safe, 307 | self.uncompressed, 308 | is_uncompressed_size_32_bit_safe, 309 | )?; 310 | 311 | if self.compression_slot.is_some() { 312 | writer::write_safe(writer, is_size_32_bit_safe, self.compressed)?; 313 | 314 | assert!(self.blocks.is_some()); 315 | let blocks = self.blocks.as_ref().unwrap(); 316 | if blocks.len() > 1 || self.is_encrypted() { 317 | for b in blocks { 318 | let block_size = b.end - b.start; 319 | writer.write_u32::(block_size.try_into().unwrap())?; 320 | } 321 | } 322 | } 323 | 324 | Ok(()) 325 | } 326 | 327 | pub fn read_file( 328 | &self, 329 | reader: &mut R, 330 | version: Version, 331 | compression: &[Option], 332 | #[allow(unused)] key: &super::Key, 333 | buf: &mut W, 334 | ) -> Result<(), super::Error> { 335 | reader.seek(io::SeekFrom::Start(self.offset))?; 336 | let entry_read = Entry::read(reader, version)?; 337 | #[cfg(any(feature = "compression", feature = "oodle"))] 338 | let data_offset = reader.stream_position()?; 339 | #[allow(unused_mut)] 340 | let mut data = reader.read_len(match self.is_encrypted() { 341 | true => align(self.compressed), 342 | false => self.compressed, 343 | } as usize)?; 344 | if self.is_encrypted() { 345 | #[cfg(not(feature = "encryption"))] 346 | return Err(super::Error::Encryption); 347 | #[cfg(feature = "encryption")] 348 | { 349 | let super::Key::Some(key) = key else { 350 | return Err(super::Error::Encrypted); 351 | }; 352 | use aes::cipher::BlockDecrypt; 353 | 354 | #[cfg(not(feature = "wuthering-waves-2_4"))] 355 | let data_len = data.len(); 356 | #[cfg(feature = "wuthering-waves-2_4")] 357 | let data_len = if entry_read.is_partial_encrypted() { 358 | data.len().min(2048) 359 | } else { 360 | data.len() 361 | }; 362 | 363 | for block in data[..data_len].chunks_mut(16) { 364 | key.decrypt_block(aes::Block::from_mut_slice(block)) 365 | } 366 | data.truncate(self.compressed as usize); 367 | } 368 | } 369 | 370 | #[cfg(feature = "compression")] 371 | let ranges = { 372 | let offset = |index: u64| -> usize { 373 | (match version.version_major() >= VersionMajor::RelativeChunkOffsets { 374 | true => index - (data_offset - self.offset), 375 | false => index - data_offset, 376 | }) as usize 377 | }; 378 | 379 | match &self.blocks { 380 | Some(blocks) => blocks 381 | .iter() 382 | .map(|block| offset(block.start)..offset(block.end)) 383 | .collect::>(), 384 | #[allow(clippy::single_range_in_vec_init)] 385 | None => vec![0..data.len()], 386 | } 387 | }; 388 | 389 | #[cfg(feature = "compression")] 390 | macro_rules! decompress { 391 | ($decompressor: ty) => { 392 | for range in ranges { 393 | io::copy(&mut <$decompressor>::new(&data[range]), buf)?; 394 | } 395 | }; 396 | } 397 | 398 | match self.compression_slot.and_then(|c| compression[c as usize]) { 399 | None => buf.write_all(&data)?, 400 | #[cfg(not(feature = "compression"))] 401 | _ => return Err(super::Error::Compression), 402 | #[cfg(feature = "compression")] 403 | Some(comp) => { 404 | let chunk_size = if ranges.len() == 1 { 405 | self.uncompressed as usize 406 | } else { 407 | self.compression_block_size as usize 408 | }; 409 | 410 | match comp { 411 | Compression::Zlib => decompress!(flate2::read::ZlibDecoder<&[u8]>), 412 | Compression::Gzip => decompress!(flate2::read::GzDecoder<&[u8]>), 413 | Compression::Zstd => { 414 | for range in ranges { 415 | io::copy(&mut zstd::stream::read::Decoder::new(&data[range])?, buf)?; 416 | } 417 | } 418 | Compression::LZ4 => { 419 | let mut decompressed = vec![0; self.uncompressed as usize]; 420 | for (decomp_chunk, comp_range) in 421 | decompressed.chunks_mut(chunk_size).zip(ranges) 422 | { 423 | lz4_flex::block::decompress_into(&data[comp_range], decomp_chunk) 424 | .map_err(|_| Error::DecompressionFailed(Compression::LZ4))?; 425 | } 426 | buf.write_all(&decompressed)?; 427 | } 428 | #[cfg(feature = "oodle")] 429 | Compression::Oodle => { 430 | let mut decompressed = vec![0; self.uncompressed as usize]; 431 | for (decomp_chunk, comp_range) in 432 | decompressed.chunks_mut(chunk_size).zip(ranges) 433 | { 434 | let out = 435 | oodle_loader::oodle()?.decompress(&data[comp_range], decomp_chunk); 436 | if out == 0 { 437 | return Err(Error::DecompressionFailed(Compression::Oodle)); 438 | } 439 | } 440 | buf.write_all(&decompressed)?; 441 | } 442 | #[cfg(not(feature = "oodle"))] 443 | Compression::Oodle => return Err(super::Error::Oodle), 444 | } 445 | } 446 | } 447 | buf.flush()?; 448 | Ok(()) 449 | } 450 | } 451 | 452 | mod test { 453 | #[test] 454 | fn test_entry() { 455 | let data = vec![ 456 | 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x54, 0x02, 0x00, 0x00, 0x00, 0x00, 457 | 0x00, 0x00, 0x54, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 458 | 0x00, 0xDD, 0x94, 0xFD, 0xC3, 0x5F, 0xF5, 0x91, 0xA9, 0x9A, 0x5E, 0x14, 0xDC, 0x9B, 459 | 0xD3, 0x58, 0x89, 0x78, 0xA6, 0x1C, 0x00, 0x00, 0x00, 0x00, 0x00, 460 | ]; 461 | let mut out = vec![]; 462 | let entry = super::Entry::read(&mut std::io::Cursor::new(data.clone()), super::Version::V5) 463 | .unwrap(); 464 | entry 465 | .write(&mut out, super::Version::V5, super::EntryLocation::Data) 466 | .unwrap(); 467 | assert_eq!(&data, &out); 468 | } 469 | } 470 | -------------------------------------------------------------------------------- /repak_cli/src/main.rs: -------------------------------------------------------------------------------- 1 | use std::collections::BTreeMap; 2 | use std::fs::{self, File}; 3 | use std::io::{self, BufReader, BufWriter}; 4 | use std::path::{Path, PathBuf}; 5 | 6 | use clap::builder::TypedValueParser; 7 | use clap::{Parser, Subcommand}; 8 | use path_clean::PathClean; 9 | use path_slash::PathExt; 10 | use rayon::prelude::*; 11 | use strum::VariantNames; 12 | 13 | #[derive(Parser, Debug)] 14 | struct ActionInfo { 15 | /// Input .pak path 16 | #[arg(index = 1)] 17 | input: String, 18 | } 19 | 20 | #[derive(Parser, Debug)] 21 | struct ActionList { 22 | /// Input .pak path 23 | #[arg(index = 1)] 24 | input: String, 25 | 26 | /// Prefix to strip from entry path 27 | #[arg(short, long, default_value = "../../../")] 28 | strip_prefix: String, 29 | } 30 | 31 | #[derive(Parser, Debug)] 32 | struct ActionHashList { 33 | /// Input .pak path 34 | #[arg(index = 1)] 35 | input: String, 36 | 37 | /// Prefix to strip from entry path 38 | #[arg(short, long, default_value = "../../../")] 39 | strip_prefix: String, 40 | } 41 | 42 | #[derive(Parser, Debug)] 43 | struct ActionUnpack { 44 | /// Input .pak path 45 | #[arg(index = 1)] 46 | input: Vec, 47 | 48 | /// Output directory. Defaults to next to input pak 49 | #[arg(short, long)] 50 | output: Option, 51 | 52 | /// Prefix to strip from entry path 53 | #[arg(short, long, default_value = "../../../")] 54 | strip_prefix: String, 55 | 56 | /// Verbose 57 | #[arg(short, long, default_value = "false")] 58 | verbose: bool, 59 | 60 | /// Hides normal output such as progress bar and completion status 61 | #[arg(short, long, default_value = "false")] 62 | quiet: bool, 63 | 64 | /// Force overwrite existing files/directories. 65 | #[arg(short, long, default_value = "false")] 66 | force: bool, 67 | 68 | /// Files or directories to include. Can be specified multiple times. If not specified, everything is extracted. 69 | #[arg(action = clap::ArgAction::Append, short, long)] 70 | include: Vec, 71 | } 72 | 73 | #[derive(Parser, Debug)] 74 | struct ActionPack { 75 | /// Input directory 76 | #[arg(index = 1)] 77 | input: String, 78 | 79 | /// Output directory. Defaults to next to input dir 80 | #[arg(index = 2)] 81 | output: Option, 82 | 83 | /// Mount point 84 | #[arg(short, long, default_value = "../../../")] 85 | mount_point: String, 86 | 87 | /// Version 88 | #[arg( 89 | long, 90 | default_value_t = repak::Version::V8B, 91 | value_parser = clap::builder::PossibleValuesParser::new(repak::Version::VARIANTS).map(|s| s.parse::().unwrap()) 92 | )] 93 | version: repak::Version, 94 | 95 | /// Compression 96 | #[arg( 97 | long, 98 | value_parser = clap::builder::PossibleValuesParser::new(repak::Compression::VARIANTS).map(|s| s.parse::().unwrap()) 99 | )] 100 | compression: Option, 101 | 102 | /// Path hash seed for >= V10 103 | #[arg(short, long, default_value = "0")] 104 | path_hash_seed: u64, 105 | 106 | /// Verbose 107 | #[arg(short, long, default_value = "false")] 108 | verbose: bool, 109 | 110 | /// Hides normal output such as progress bar and completion status 111 | #[arg(short, long, default_value = "false")] 112 | quiet: bool, 113 | } 114 | 115 | #[derive(Parser, Debug)] 116 | struct ActionGet { 117 | /// Input .pak path 118 | #[arg(index = 1)] 119 | input: String, 120 | 121 | /// Path to file to read to stdout 122 | #[arg(index = 2)] 123 | file: String, 124 | 125 | /// Prefix to strip from entry path 126 | #[arg(short, long, default_value = "../../../")] 127 | strip_prefix: String, 128 | } 129 | 130 | #[derive(Subcommand, Debug)] 131 | enum Action { 132 | /// Print .pak info 133 | Info(ActionInfo), 134 | /// List .pak files 135 | List(ActionList), 136 | /// List .pak files and the SHA256 of their contents. Useful for finding differences between paks 137 | HashList(ActionHashList), 138 | /// Unpack .pak file 139 | Unpack(ActionUnpack), 140 | /// Pack directory into .pak file 141 | Pack(ActionPack), 142 | /// Reads a single file to stdout 143 | Get(ActionGet), 144 | } 145 | 146 | #[derive(Parser, Debug)] 147 | #[command(author, version)] 148 | struct Args { 149 | /// 256 bit AES encryption key as base64 or hex string if the pak is encrypted 150 | #[arg(short, long)] 151 | aes_key: Option, 152 | 153 | #[command(subcommand)] 154 | action: Action, 155 | } 156 | 157 | #[derive(Debug, Clone)] 158 | struct AesKey(aes::Aes256); 159 | impl std::str::FromStr for AesKey { 160 | type Err = repak::Error; 161 | fn from_str(s: &str) -> Result { 162 | use aes::cipher::KeyInit; 163 | use base64::{engine::general_purpose, Engine as _}; 164 | let try_parse = |bytes: Vec<_>| aes::Aes256::new_from_slice(&bytes).ok().map(AesKey); 165 | hex::decode(s.strip_prefix("0x").unwrap_or(s)) 166 | .ok() 167 | .and_then(try_parse) 168 | .or_else(|| { 169 | general_purpose::STANDARD_NO_PAD 170 | .decode(s.trim_end_matches('=')) 171 | .ok() 172 | .and_then(try_parse) 173 | }) 174 | .ok_or(repak::Error::Aes) 175 | } 176 | } 177 | 178 | fn main() -> Result<(), repak::Error> { 179 | let args = Args::parse(); 180 | let aes_key = args.aes_key.map(|k| k.0); 181 | 182 | match args.action { 183 | Action::Info(action) => info(aes_key, action), 184 | Action::List(action) => list(aes_key, action), 185 | Action::HashList(action) => hash_list(aes_key, action), 186 | Action::Unpack(action) => unpack(aes_key, action), 187 | Action::Pack(action) => pack(action), 188 | Action::Get(action) => get(aes_key, action), 189 | } 190 | } 191 | 192 | fn info(aes_key: Option, action: ActionInfo) -> Result<(), repak::Error> { 193 | let mut builder = repak::PakBuilder::new(); 194 | if let Some(aes_key) = aes_key { 195 | builder = builder.key(aes_key); 196 | } 197 | let pak = builder.reader(&mut BufReader::new(File::open(action.input)?))?; 198 | println!("mount point: {}", pak.mount_point()); 199 | println!("version: {}", pak.version()); 200 | println!("version major: {}", pak.version().version_major()); 201 | println!("encrypted index: {}", pak.encrypted_index()); 202 | println!("encrytion guid: {:032X?}", pak.encryption_guid()); 203 | println!("path hash seed: {:08X?}", pak.path_hash_seed()); 204 | println!("{} file entries", pak.files().len()); 205 | Ok(()) 206 | } 207 | 208 | fn list(aes_key: Option, action: ActionList) -> Result<(), repak::Error> { 209 | let mut builder = repak::PakBuilder::new(); 210 | if let Some(aes_key) = aes_key { 211 | builder = builder.key(aes_key); 212 | } 213 | let pak = builder.reader(&mut BufReader::new(File::open(action.input)?))?; 214 | 215 | let mount_point = PathBuf::from(pak.mount_point()); 216 | let prefix = Path::new(&action.strip_prefix); 217 | 218 | let full_paths = pak 219 | .files() 220 | .into_iter() 221 | .map(|f| mount_point.join(f)) 222 | .collect::>(); 223 | let stripped = full_paths 224 | .iter() 225 | .map(|f| { 226 | f.strip_prefix(prefix) 227 | .map_err(|_| repak::Error::PrefixMismatch { 228 | path: f.to_string_lossy().to_string(), 229 | prefix: prefix.to_string_lossy().to_string(), 230 | }) 231 | }) 232 | .collect::, _>>()?; 233 | 234 | for f in stripped { 235 | println!("{}", f.to_slash_lossy()); 236 | } 237 | 238 | Ok(()) 239 | } 240 | 241 | fn hash_list(aes_key: Option, action: ActionHashList) -> Result<(), repak::Error> { 242 | let mut builder = repak::PakBuilder::new(); 243 | if let Some(aes_key) = aes_key { 244 | builder = builder.key(aes_key); 245 | } 246 | let pak = builder.reader(&mut BufReader::new(File::open(&action.input)?))?; 247 | 248 | let mount_point = PathBuf::from(pak.mount_point()); 249 | let prefix = Path::new(&action.strip_prefix); 250 | 251 | let full_paths = pak 252 | .files() 253 | .into_iter() 254 | .map(|f| (mount_point.join(&f), f)) 255 | .collect::>(); 256 | let stripped = full_paths 257 | .iter() 258 | .map(|(full_path, _path)| { 259 | full_path 260 | .strip_prefix(prefix) 261 | .map_err(|_| repak::Error::PrefixMismatch { 262 | path: full_path.to_string_lossy().to_string(), 263 | prefix: prefix.to_string_lossy().to_string(), 264 | }) 265 | }) 266 | .collect::, _>>()?; 267 | 268 | let hashes: std::sync::Arc, Vec>>> = 269 | Default::default(); 270 | full_paths.par_iter().zip(stripped).try_for_each_init( 271 | || (hashes.clone(), File::open(&action.input)), 272 | |(hashes, file), ((_full_path, path), stripped)| -> Result<(), repak::Error> { 273 | use sha2::Digest; 274 | 275 | let mut hasher = sha2::Sha256::new(); 276 | pak.read_file( 277 | path, 278 | &mut BufReader::new(file.as_ref().unwrap()), 279 | &mut hasher, 280 | )?; 281 | let hash = hasher.finalize(); 282 | hashes 283 | .lock() 284 | .unwrap() 285 | .insert(stripped.to_slash_lossy(), hash.to_vec()); 286 | Ok(()) 287 | }, 288 | )?; 289 | 290 | for (file, hash) in hashes.lock().unwrap().iter() { 291 | println!("{} {}", hex::encode(hash), file); 292 | } 293 | 294 | Ok(()) 295 | } 296 | 297 | const STYLE: &str = "[{elapsed_precise}] [{wide_bar}] {pos}/{len} ({eta})"; 298 | 299 | #[derive(Clone)] 300 | enum Output { 301 | Progress(indicatif::ProgressBar), 302 | Stdout, 303 | } 304 | impl Output { 305 | pub fn println>(&self, msg: I) { 306 | match self { 307 | Output::Progress(progress) => progress.println(msg), 308 | Output::Stdout => println!("{}", msg.as_ref()), 309 | } 310 | } 311 | } 312 | 313 | fn unpack(aes_key: Option, action: ActionUnpack) -> Result<(), repak::Error> { 314 | for input in &action.input { 315 | let mut builder = repak::PakBuilder::new(); 316 | if let Some(aes_key) = aes_key.clone() { 317 | builder = builder.key(aes_key); 318 | } 319 | let pak = builder.reader(&mut BufReader::new(File::open(input)?))?; 320 | let output = action 321 | .output 322 | .as_ref() 323 | .map(PathBuf::from) 324 | .unwrap_or_else(|| Path::new(input).with_extension("")); 325 | match fs::create_dir(&output) { 326 | Ok(_) => Ok(()), 327 | Err(ref e) 328 | if action.output.is_some() && e.kind() == std::io::ErrorKind::AlreadyExists => 329 | { 330 | Ok(()) 331 | } 332 | Err(e) => Err(e), 333 | }?; 334 | if action.output.is_none() && !action.force && output.read_dir()?.next().is_some() { 335 | return Err(repak::Error::OutputNotEmpty( 336 | output.to_string_lossy().to_string(), 337 | )); 338 | } 339 | let mount_point = PathBuf::from(pak.mount_point()); 340 | let prefix = Path::new(&action.strip_prefix); 341 | 342 | struct UnpackEntry { 343 | entry_path: String, 344 | out_path: PathBuf, 345 | out_dir: PathBuf, 346 | } 347 | 348 | let entries = pak 349 | .files() 350 | .into_iter() 351 | .map(|entry_path| { 352 | let full_path = mount_point.join(&entry_path); 353 | if !action.include.is_empty() { 354 | if let Ok(stripped) = full_path.strip_prefix(prefix) { 355 | let options = glob::MatchOptions { 356 | case_sensitive: true, 357 | require_literal_separator: true, 358 | require_literal_leading_dot: false, 359 | }; 360 | if !action.include.iter().any(|i| { 361 | // check full file path 362 | i.matches_path_with(stripped, options) 363 | // check ancestor directories 364 | || stripped.ancestors().skip(1).any(|a| { 365 | i.matches_path_with(a, options) 366 | // hack to check ancestor directories with trailing slash 367 | || i.matches_path_with(&a.join(""), options) 368 | }) 369 | }) { 370 | return Ok(None); 371 | } 372 | } else { 373 | return Ok(None); 374 | } 375 | } 376 | let out_path = output 377 | .join(full_path.strip_prefix(prefix).map_err(|_| { 378 | repak::Error::PrefixMismatch { 379 | path: full_path.to_string_lossy().to_string(), 380 | prefix: prefix.to_string_lossy().to_string(), 381 | } 382 | })?) 383 | .clean(); 384 | 385 | if !out_path.starts_with(&output) { 386 | return Err(repak::Error::WriteOutsideOutput( 387 | out_path.to_string_lossy().to_string(), 388 | )); 389 | } 390 | 391 | let out_dir = out_path.parent().expect("will be a file").to_path_buf(); 392 | 393 | Ok(Some(UnpackEntry { 394 | entry_path, 395 | out_path, 396 | out_dir, 397 | })) 398 | }) 399 | .filter_map(|e| e.transpose()) 400 | .collect::, repak::Error>>()?; 401 | 402 | let progress = (!action.quiet).then(|| { 403 | indicatif::ProgressBar::new(entries.len() as u64) 404 | .with_style(indicatif::ProgressStyle::with_template(STYLE).unwrap()) 405 | }); 406 | let log = match &progress { 407 | Some(progress) => Output::Progress(progress.clone()), 408 | None => Output::Stdout, 409 | }; 410 | 411 | entries.par_iter().try_for_each_init( 412 | || (progress.clone(), File::open(input)), 413 | |(progress, file), entry| -> Result<(), repak::Error> { 414 | if action.verbose { 415 | log.println(format!("unpacking {}", entry.entry_path)); 416 | } 417 | fs::create_dir_all(&entry.out_dir)?; 418 | pak.read_file( 419 | &entry.entry_path, 420 | &mut BufReader::new( 421 | file.as_ref() 422 | .map_err(|e| repak::Error::Other(format!("error reading pak: {e}")))?, 423 | ), 424 | &mut fs::File::create(&entry.out_path)?, 425 | )?; 426 | if let Some(progress) = progress { 427 | progress.inc(1); 428 | } 429 | Ok(()) 430 | }, 431 | )?; 432 | if let Some(progress) = progress { 433 | progress.finish(); 434 | } 435 | 436 | if !action.quiet { 437 | println!( 438 | "Unpacked {} files to {} from {}", 439 | entries.len(), 440 | output.display(), 441 | input 442 | ); 443 | } 444 | } 445 | 446 | Ok(()) 447 | } 448 | 449 | fn pack(args: ActionPack) -> Result<(), repak::Error> { 450 | let output = args.output.map(PathBuf::from).unwrap_or_else(|| { 451 | // NOTE: don't use `with_extension` here because it will replace e.g. the `.1` in 452 | // `test_v1.1`. 453 | PathBuf::from(format!("{}.pak", args.input)) 454 | }); 455 | 456 | fn collect_files(paths: &mut Vec, dir: &Path) -> io::Result<()> { 457 | for entry in fs::read_dir(dir)? { 458 | let entry = entry?; 459 | let path = entry.path(); 460 | if path.is_dir() { 461 | collect_files(paths, &path)?; 462 | } else { 463 | paths.push(entry.path()); 464 | } 465 | } 466 | Ok(()) 467 | } 468 | let input_path = Path::new(&args.input); 469 | if !input_path.is_dir() { 470 | return Err(repak::Error::InputNotADirectory( 471 | input_path.to_string_lossy().to_string(), 472 | )); 473 | } 474 | let mut paths = vec![]; 475 | collect_files(&mut paths, input_path)?; 476 | paths.sort(); 477 | 478 | let mut pak = repak::PakBuilder::new() 479 | .compression(args.compression.iter().cloned()) 480 | .writer( 481 | BufWriter::new(File::create(&output)?), 482 | args.version, 483 | args.mount_point, 484 | Some(args.path_hash_seed), 485 | ); 486 | 487 | use indicatif::ProgressIterator; 488 | 489 | let iter = paths.iter(); 490 | let (log, iter) = if !args.quiet { 491 | let iter = 492 | iter.progress_with_style(indicatif::ProgressStyle::with_template(STYLE).unwrap()); 493 | ( 494 | Output::Progress(iter.progress.clone()), 495 | itertools::Either::Left(iter), 496 | ) 497 | } else { 498 | (Output::Stdout, itertools::Either::Right(iter)) 499 | }; 500 | let log = log.clone(); 501 | 502 | let mut result = None; 503 | let result_ref = &mut result; 504 | rayon::in_place_scope(|scope| -> Result<(), repak::Error> { 505 | let (tx, rx) = std::sync::mpsc::sync_channel(0); 506 | let entry_builder = pak.entry_builder(); 507 | 508 | scope.spawn(move |_| { 509 | *result_ref = Some( 510 | iter.par_bridge() 511 | .try_for_each(|p| -> Result<(), repak::Error> { 512 | let rel = &p 513 | .strip_prefix(input_path) 514 | .expect("file not in input directory") 515 | .to_slash() 516 | .expect("failed to convert to slash path"); 517 | if args.verbose { 518 | log.println(format!("packing {}", &rel)); 519 | } 520 | let entry = entry_builder.build_entry(true, std::fs::read(p)?)?; 521 | 522 | tx.send((rel.to_string(), entry)).unwrap(); 523 | Ok(()) 524 | }), 525 | ); 526 | }); 527 | 528 | for (path, entry) in rx { 529 | pak.write_entry(path, entry)?; 530 | } 531 | Ok(()) 532 | })?; 533 | result.unwrap()?; 534 | 535 | pak.write_index()?; 536 | 537 | if !args.quiet { 538 | println!("Packed {} files to {}", paths.len(), output.display()); 539 | } 540 | 541 | Ok(()) 542 | } 543 | 544 | fn get(aes_key: Option, args: ActionGet) -> Result<(), repak::Error> { 545 | let mut reader = BufReader::new(File::open(&args.input)?); 546 | let mut builder = repak::PakBuilder::new(); 547 | if let Some(aes_key) = aes_key { 548 | builder = builder.key(aes_key); 549 | } 550 | let pak = builder.reader(&mut reader)?; 551 | let mount_point = PathBuf::from(pak.mount_point()); 552 | let prefix = Path::new(&args.strip_prefix); 553 | 554 | let full_path = prefix.join(args.file); 555 | let file = full_path 556 | .strip_prefix(&mount_point) 557 | .map_err(|_| repak::Error::PrefixMismatch { 558 | path: full_path.to_string_lossy().to_string(), 559 | prefix: mount_point.to_string_lossy().to_string(), 560 | })?; 561 | 562 | use std::io::Write; 563 | std::io::stdout().write_all(&pak.get(&file.to_slash_lossy(), &mut reader)?)?; 564 | Ok(()) 565 | } 566 | -------------------------------------------------------------------------------- /repak/src/pak.rs: -------------------------------------------------------------------------------- 1 | use crate::data::build_partial_entry; 2 | use crate::entry::Entry; 3 | use crate::{Compression, Error, PartialEntry}; 4 | 5 | use super::ext::{ReadExt, WriteExt}; 6 | use super::{Version, VersionMajor}; 7 | use byteorder::{ReadBytesExt, WriteBytesExt, LE}; 8 | use std::collections::BTreeMap; 9 | use std::io::{self, Read, Seek, Write}; 10 | 11 | #[derive(Default, Clone, Copy)] 12 | pub(crate) struct Hash(pub(crate) [u8; 20]); 13 | impl std::fmt::Debug for Hash { 14 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 15 | write!(f, "Hash({})", hex::encode(self.0)) 16 | } 17 | } 18 | 19 | #[derive(Debug)] 20 | pub struct PakBuilder { 21 | key: super::Key, 22 | allowed_compression: Vec, 23 | } 24 | 25 | impl Default for PakBuilder { 26 | fn default() -> Self { 27 | Self::new() 28 | } 29 | } 30 | 31 | impl PakBuilder { 32 | pub fn new() -> Self { 33 | Self { 34 | key: Default::default(), 35 | allowed_compression: Default::default(), 36 | } 37 | } 38 | #[cfg(feature = "encryption")] 39 | pub fn key(mut self, key: aes::Aes256) -> Self { 40 | self.key = super::Key::Some(key); 41 | self 42 | } 43 | #[cfg(feature = "compression")] 44 | pub fn compression(mut self, compression: impl IntoIterator) -> Self { 45 | self.allowed_compression = compression.into_iter().collect(); 46 | self 47 | } 48 | pub fn reader(self, reader: &mut R) -> Result { 49 | PakReader::new_any_inner(reader, self.key) 50 | } 51 | pub fn reader_with_version( 52 | self, 53 | reader: &mut R, 54 | version: super::Version, 55 | ) -> Result { 56 | PakReader::new_inner(reader, version, self.key) 57 | } 58 | pub fn writer( 59 | self, 60 | writer: W, 61 | version: super::Version, 62 | mount_point: String, 63 | path_hash_seed: Option, 64 | ) -> PakWriter { 65 | PakWriter::new_inner( 66 | writer, 67 | self.key, 68 | version, 69 | mount_point, 70 | path_hash_seed, 71 | self.allowed_compression, 72 | ) 73 | } 74 | } 75 | 76 | #[derive(Debug)] 77 | pub struct PakReader { 78 | pak: Pak, 79 | key: super::Key, 80 | } 81 | 82 | #[derive(Debug)] 83 | pub struct PakWriter { 84 | pak: Pak, 85 | writer: W, 86 | key: super::Key, 87 | allowed_compression: Vec, 88 | } 89 | 90 | #[derive(Debug)] 91 | pub(crate) struct Pak { 92 | version: Version, 93 | mount_point: String, 94 | index_offset: Option, 95 | index: Index, 96 | encrypted_index: bool, 97 | encryption_guid: Option, 98 | compression: Vec>, 99 | } 100 | 101 | impl Pak { 102 | fn new(version: Version, mount_point: String, path_hash_seed: Option) -> Self { 103 | Pak { 104 | version, 105 | mount_point, 106 | index_offset: None, 107 | index: Index::new(path_hash_seed), 108 | encrypted_index: false, 109 | encryption_guid: None, 110 | compression: (if version.version_major() < VersionMajor::FNameBasedCompression { 111 | vec![ 112 | Some(Compression::Zlib), 113 | Some(Compression::Gzip), 114 | Some(Compression::Oodle), 115 | ] 116 | } else { 117 | vec![] 118 | }), 119 | } 120 | } 121 | } 122 | 123 | #[derive(Debug, Default)] 124 | pub(crate) struct Index { 125 | path_hash_seed: Option, 126 | entries: BTreeMap, 127 | } 128 | 129 | impl Index { 130 | fn new(path_hash_seed: Option) -> Self { 131 | Index { 132 | path_hash_seed, 133 | ..Index::default() 134 | } 135 | } 136 | 137 | fn entries(&self) -> &BTreeMap { 138 | &self.entries 139 | } 140 | 141 | fn into_entries(self) -> BTreeMap { 142 | self.entries 143 | } 144 | 145 | fn add_entry(&mut self, path: String, entry: super::entry::Entry) { 146 | self.entries.insert(path, entry); 147 | } 148 | } 149 | 150 | #[cfg(feature = "encryption")] 151 | fn decrypt(key: &super::Key, bytes: &mut [u8]) -> Result<(), super::Error> { 152 | if let super::Key::Some(key) = key { 153 | use aes::cipher::BlockDecrypt; 154 | for chunk in bytes.chunks_mut(16) { 155 | key.decrypt_block(aes::Block::from_mut_slice(chunk)) 156 | } 157 | Ok(()) 158 | } else { 159 | Err(super::Error::Encrypted) 160 | } 161 | } 162 | 163 | impl PakReader { 164 | fn new_any_inner( 165 | reader: &mut R, 166 | key: super::Key, 167 | ) -> Result { 168 | use std::fmt::Write; 169 | let mut log = "\n".to_owned(); 170 | 171 | for ver in Version::iter() { 172 | match Pak::read(&mut *reader, ver, &key) { 173 | Ok(pak) => return Ok(Self { pak, key }), 174 | Err(err) => writeln!(log, "trying version {} failed: {}", ver, err)?, 175 | } 176 | } 177 | Err(super::Error::UnsupportedOrEncrypted(log)) 178 | } 179 | 180 | fn new_inner( 181 | reader: &mut R, 182 | version: super::Version, 183 | key: super::Key, 184 | ) -> Result { 185 | Pak::read(reader, version, &key).map(|pak| Self { pak, key }) 186 | } 187 | 188 | pub fn version(&self) -> super::Version { 189 | self.pak.version 190 | } 191 | 192 | pub fn mount_point(&self) -> &str { 193 | &self.pak.mount_point 194 | } 195 | 196 | pub fn encrypted_index(&self) -> bool { 197 | self.pak.encrypted_index 198 | } 199 | 200 | pub fn encryption_guid(&self) -> Option { 201 | self.pak.encryption_guid 202 | } 203 | 204 | pub fn path_hash_seed(&self) -> Option { 205 | self.pak.index.path_hash_seed 206 | } 207 | 208 | pub fn get(&self, path: &str, reader: &mut R) -> Result, super::Error> { 209 | let mut data = Vec::new(); 210 | self.read_file(path, reader, &mut data)?; 211 | Ok(data) 212 | } 213 | 214 | pub fn read_file( 215 | &self, 216 | path: &str, 217 | reader: &mut R, 218 | writer: &mut W, 219 | ) -> Result<(), super::Error> { 220 | match self.pak.index.entries().get(path) { 221 | Some(entry) => entry.read_file( 222 | reader, 223 | self.pak.version, 224 | &self.pak.compression, 225 | &self.key, 226 | writer, 227 | ), 228 | None => Err(super::Error::MissingEntry(path.to_owned())), 229 | } 230 | } 231 | 232 | pub fn files(&self) -> Vec { 233 | self.pak.index.entries().keys().cloned().collect() 234 | } 235 | 236 | pub fn into_pakwriter( 237 | self, 238 | mut writer: W, 239 | ) -> Result, super::Error> { 240 | writer.seek(io::SeekFrom::Start(self.pak.index_offset.unwrap()))?; 241 | Ok(PakWriter { 242 | allowed_compression: self.pak.compression.iter().filter_map(|c| *c).collect(), 243 | pak: self.pak, 244 | key: self.key, 245 | writer, 246 | }) 247 | } 248 | } 249 | 250 | impl PakWriter { 251 | fn new_inner( 252 | writer: W, 253 | key: super::Key, 254 | version: Version, 255 | mount_point: String, 256 | path_hash_seed: Option, 257 | allowed_compression: Vec, 258 | ) -> Self { 259 | PakWriter { 260 | pak: Pak::new(version, mount_point, path_hash_seed), 261 | writer, 262 | key, 263 | allowed_compression, 264 | } 265 | } 266 | 267 | pub fn into_writer(self) -> W { 268 | self.writer 269 | } 270 | 271 | pub fn write_file( 272 | &mut self, 273 | path: &str, 274 | allow_compress: bool, 275 | data: impl AsRef<[u8]>, 276 | ) -> Result<(), super::Error> { 277 | self.pak.index.add_entry( 278 | path.to_string(), 279 | Entry::write_file( 280 | &mut self.writer, 281 | self.pak.version, 282 | &mut self.pak.compression, 283 | if allow_compress { 284 | &self.allowed_compression 285 | } else { 286 | &[] 287 | }, 288 | data.as_ref(), 289 | )?, 290 | ); 291 | 292 | Ok(()) 293 | } 294 | 295 | pub fn entry_builder(&self) -> EntryBuilder { 296 | EntryBuilder { 297 | allowed_compression: self.allowed_compression.clone(), 298 | } 299 | } 300 | 301 | pub fn write_entry>( 302 | &mut self, 303 | path: String, 304 | partial_entry: PartialEntry, 305 | ) -> Result<(), Error> { 306 | let stream_position = self.writer.stream_position()?; 307 | 308 | let entry = partial_entry.build_entry( 309 | self.pak.version, 310 | &mut self.pak.compression, 311 | stream_position, 312 | )?; 313 | 314 | entry.write( 315 | &mut self.writer, 316 | self.pak.version, 317 | crate::entry::EntryLocation::Data, 318 | )?; 319 | 320 | self.pak.index.add_entry(path, entry); 321 | partial_entry.write_data(&mut self.writer)?; 322 | 323 | Ok(()) 324 | } 325 | pub fn write_index(mut self) -> Result { 326 | self.pak.write(&mut self.writer, &self.key)?; 327 | Ok(self.writer) 328 | } 329 | } 330 | 331 | struct Data<'d>(Box + Send + Sync + 'd>); 332 | impl AsRef<[u8]> for Data<'_> { 333 | fn as_ref(&self) -> &[u8] { 334 | self.0.as_ref().as_ref() 335 | } 336 | } 337 | 338 | #[derive(Clone)] 339 | pub struct EntryBuilder { 340 | allowed_compression: Vec, 341 | } 342 | impl EntryBuilder { 343 | /// Builds an entry in memory (compressed if requested) which must be written out later 344 | pub fn build_entry + Send + Sync>( 345 | &self, 346 | compress: bool, 347 | data: D, 348 | ) -> Result, Error> { 349 | let compression = compress 350 | .then_some(self.allowed_compression.as_slice()) 351 | .unwrap_or_default(); 352 | build_partial_entry(compression, data) 353 | } 354 | } 355 | 356 | impl Pak { 357 | fn read( 358 | reader: &mut R, 359 | version: super::Version, 360 | #[allow(unused)] key: &super::Key, 361 | ) -> Result { 362 | // read footer to get index, encryption & compression info 363 | reader.seek(io::SeekFrom::End(-version.size()))?; 364 | let footer = super::footer::Footer::read(reader, version)?; 365 | // read index to get all the entry info 366 | reader.seek(io::SeekFrom::Start(footer.index_offset))?; 367 | #[allow(unused_mut)] 368 | let mut index = reader.read_len(footer.index_size as usize)?; 369 | 370 | // decrypt index if needed 371 | if footer.encrypted { 372 | #[cfg(not(feature = "encryption"))] 373 | return Err(super::Error::Encryption); 374 | #[cfg(feature = "encryption")] 375 | decrypt(key, &mut index)?; 376 | } 377 | 378 | let mut index = io::Cursor::new(index); 379 | let mount_point = index.read_string()?; 380 | let len = index.read_u32::()? as usize; 381 | 382 | let index = if version.version_major() >= VersionMajor::PathHashIndex { 383 | let path_hash_seed = index.read_u64::()?; 384 | 385 | // Left in for potential desire to verify path index hashes. 386 | let _path_hash_index = if index.read_u32::()? != 0 { 387 | let path_hash_index_offset = index.read_u64::()?; 388 | let path_hash_index_size = index.read_u64::()?; 389 | let _path_hash_index_hash = index.read_len(20)?; 390 | 391 | reader.seek(io::SeekFrom::Start(path_hash_index_offset))?; 392 | let mut path_hash_index_buf = reader.read_len(path_hash_index_size as usize)?; 393 | // TODO verify hash 394 | 395 | if footer.encrypted { 396 | #[cfg(not(feature = "encryption"))] 397 | return Err(super::Error::Encryption); 398 | #[cfg(feature = "encryption")] 399 | decrypt(key, &mut path_hash_index_buf)?; 400 | } 401 | 402 | let mut path_hash_index = vec![]; 403 | let mut phi_reader = io::Cursor::new(&mut path_hash_index_buf); 404 | for _ in 0..phi_reader.read_u32::()? { 405 | let hash = phi_reader.read_u64::()?; 406 | let encoded_entry_offset = phi_reader.read_u32::()?; 407 | path_hash_index.push((hash, encoded_entry_offset)); 408 | } 409 | 410 | Some(path_hash_index) 411 | } else { 412 | None 413 | }; 414 | 415 | // Left in for potential desire to verify full directory index hashes. 416 | let full_directory_index = if index.read_u32::()? != 0 { 417 | let full_directory_index_offset = index.read_u64::()?; 418 | let full_directory_index_size = index.read_u64::()?; 419 | let _full_directory_index_hash = index.read_len(20)?; 420 | 421 | reader.seek(io::SeekFrom::Start(full_directory_index_offset))?; 422 | #[allow(unused_mut)] 423 | let mut full_directory_index = 424 | reader.read_len(full_directory_index_size as usize)?; 425 | // TODO verify hash 426 | 427 | if footer.encrypted { 428 | #[cfg(not(feature = "encryption"))] 429 | return Err(super::Error::Encryption); 430 | #[cfg(feature = "encryption")] 431 | decrypt(key, &mut full_directory_index)?; 432 | } 433 | let mut fdi = io::Cursor::new(full_directory_index); 434 | 435 | let dir_count = fdi.read_u32::()? as usize; 436 | let mut directories = BTreeMap::new(); 437 | for _ in 0..dir_count { 438 | let dir_name = fdi.read_string()?; 439 | let file_count = fdi.read_u32::()? as usize; 440 | let mut files = BTreeMap::new(); 441 | for _ in 0..file_count { 442 | let file_name = fdi.read_string()?; 443 | files.insert(file_name, fdi.read_u32::()?); 444 | } 445 | directories.insert(dir_name, files); 446 | } 447 | Some(directories) 448 | } else { 449 | None 450 | }; 451 | let size = index.read_u32::()? as usize; 452 | let encoded_entries = index.read_len(size)?; 453 | 454 | let mut entries_by_path = BTreeMap::new(); 455 | if let Some(fdi) = &full_directory_index { 456 | let mut encoded_entries = io::Cursor::new(&encoded_entries); 457 | for (dir_name, dir) in fdi { 458 | for (file_name, encoded_offset) in dir { 459 | if *encoded_offset == 0x80000000 { 460 | println!("{file_name:?} has invalid offset: 0x{encoded_offset:08x}"); 461 | continue; 462 | } 463 | encoded_entries.seek(io::SeekFrom::Start(*encoded_offset as u64))?; 464 | let entry = 465 | super::entry::Entry::read_encoded(&mut encoded_entries, version)?; 466 | let path = format!( 467 | "{}{}", 468 | dir_name.strip_prefix('/').unwrap_or(dir_name), 469 | file_name 470 | ); 471 | entries_by_path.insert(path, entry); 472 | } 473 | } 474 | } 475 | 476 | assert_eq!(index.read_u32::()?, 0, "remaining index bytes are 0"); // TODO possibly remaining unencoded entries? 477 | 478 | Index { 479 | path_hash_seed: Some(path_hash_seed), 480 | entries: entries_by_path, 481 | } 482 | } else { 483 | let mut entries = BTreeMap::new(); 484 | for _ in 0..len { 485 | entries.insert( 486 | index.read_string()?, 487 | super::entry::Entry::read(&mut index, version)?, 488 | ); 489 | } 490 | Index { 491 | path_hash_seed: None, 492 | entries, 493 | } 494 | }; 495 | 496 | Ok(Pak { 497 | version, 498 | mount_point, 499 | index_offset: Some(footer.index_offset), 500 | index, 501 | encrypted_index: footer.encrypted, 502 | encryption_guid: footer.encryption_uuid, 503 | compression: footer.compression, 504 | }) 505 | } 506 | 507 | fn write( 508 | &self, 509 | writer: &mut W, 510 | _key: &super::Key, 511 | ) -> Result<(), super::Error> { 512 | let index_offset = writer.stream_position()?; 513 | 514 | let mut index_buf = vec![]; 515 | let mut index_writer = io::Cursor::new(&mut index_buf); 516 | index_writer.write_string(&self.mount_point)?; 517 | 518 | let secondary_index = if self.version < super::Version::V10 { 519 | let record_count = self.index.entries.len() as u32; 520 | index_writer.write_u32::(record_count)?; 521 | for (path, entry) in &self.index.entries { 522 | index_writer.write_string(path)?; 523 | entry.write( 524 | &mut index_writer, 525 | self.version, 526 | super::entry::EntryLocation::Index, 527 | )?; 528 | } 529 | None 530 | } else { 531 | let record_count = self.index.entries.len() as u32; 532 | let path_hash_seed = self.index.path_hash_seed.unwrap_or_default(); 533 | index_writer.write_u32::(record_count)?; 534 | index_writer.write_u64::(path_hash_seed)?; 535 | 536 | let (encoded_entries, offsets) = { 537 | let mut offsets = Vec::with_capacity(self.index.entries.len()); 538 | let mut encoded_entries = io::Cursor::new(vec![]); 539 | for entry in self.index.entries.values() { 540 | offsets.push(encoded_entries.get_ref().len() as u32); 541 | entry.write_encoded(&mut encoded_entries, self.version)?; 542 | } 543 | (encoded_entries.into_inner(), offsets) 544 | }; 545 | 546 | // The index is organized sequentially as: 547 | // - Index Header, which contains: 548 | // - Mount Point (u32 len + string w/ terminating byte) 549 | // - Entry Count (u32) 550 | // - Path Hash Seed (u64) 551 | // - Has Path Hash Index (u32); if true, then: 552 | // - Path Hash Index Offset (u64) 553 | // - Path Hash Index Size (u64) 554 | // - Path Hash Index Hash ([u8; 20]) 555 | // - Has Full Directory Index (u32); if true, then: 556 | // - Full Directory Index Offset (u64) 557 | // - Full Directory Index Size (u64) 558 | // - Full Directory Index Hash ([u8; 20]) 559 | // - Encoded Index Records Size 560 | // - (Unused) File Count 561 | // - Path Hash Index 562 | // - Full Directory Index 563 | // - Encoded Index Records; each encoded index record is (0xC bytes) from: 564 | // - Flags (u32) 565 | // - Offset (u32) 566 | // - Size (u32) 567 | let bytes_before_phi = { 568 | let mut size = 0; 569 | size += 4; // mount point len 570 | size += self.mount_point.len() as u64 + 1; // mount point string w/ NUL byte 571 | size += 8; // path hash seed 572 | size += 4; // record count 573 | size += 4; // has path hash index (since we're generating, always true) 574 | size += 8 + 8 + 20; // path hash index offset, size and hash 575 | size += 4; // has full directory index (since we're generating, always true) 576 | size += 8 + 8 + 20; // full directory index offset, size and hash 577 | size += 4; // encoded entry size 578 | size += encoded_entries.len() as u64; 579 | size += 4; // unused file count 580 | size 581 | }; 582 | 583 | let path_hash_index_offset = index_offset + bytes_before_phi; 584 | 585 | let mut phi_buf = vec![]; 586 | let mut phi_writer = io::Cursor::new(&mut phi_buf); 587 | generate_path_hash_index( 588 | &mut phi_writer, 589 | path_hash_seed, 590 | &self.index.entries, 591 | &offsets, 592 | )?; 593 | 594 | let full_directory_index_offset = path_hash_index_offset + phi_buf.len() as u64; 595 | 596 | let mut fdi_buf = vec![]; 597 | let mut fdi_writer = io::Cursor::new(&mut fdi_buf); 598 | generate_full_directory_index(&mut fdi_writer, &self.index.entries, &offsets)?; 599 | 600 | index_writer.write_u32::(1)?; // we have path hash index 601 | index_writer.write_u64::(path_hash_index_offset)?; 602 | index_writer.write_u64::(phi_buf.len() as u64)?; // path hash index size 603 | index_writer.write_all(&hash(&phi_buf).0)?; 604 | 605 | index_writer.write_u32::(1)?; // we have full directory index 606 | index_writer.write_u64::(full_directory_index_offset)?; 607 | index_writer.write_u64::(fdi_buf.len() as u64)?; // path hash index size 608 | index_writer.write_all(&hash(&fdi_buf).0)?; 609 | 610 | index_writer.write_u32::(encoded_entries.len() as u32)?; 611 | index_writer.write_all(&encoded_entries)?; 612 | 613 | index_writer.write_u32::(0)?; 614 | 615 | Some((phi_buf, fdi_buf)) 616 | }; 617 | 618 | let index_hash = hash(&index_buf); 619 | 620 | writer.write_all(&index_buf)?; 621 | 622 | if let Some((phi_buf, fdi_buf)) = secondary_index { 623 | writer.write_all(&phi_buf[..])?; 624 | writer.write_all(&fdi_buf[..])?; 625 | } 626 | 627 | let footer = super::footer::Footer { 628 | encryption_uuid: None, 629 | encrypted: false, 630 | magic: super::MAGIC, 631 | version: self.version, 632 | version_major: self.version.version_major(), 633 | index_offset, 634 | index_size: index_buf.len() as u64, 635 | hash: index_hash, 636 | frozen: false, 637 | compression: self.compression.clone(), // TODO: avoid this clone 638 | }; 639 | 640 | footer.write(writer)?; 641 | 642 | Ok(()) 643 | } 644 | } 645 | 646 | fn hash(data: &[u8]) -> Hash { 647 | use sha1::{Digest, Sha1}; 648 | let mut hasher = Sha1::new(); 649 | hasher.update(data); 650 | Hash(hasher.finalize().into()) 651 | } 652 | 653 | fn generate_path_hash_index( 654 | writer: &mut W, 655 | path_hash_seed: u64, 656 | entries: &BTreeMap, 657 | offsets: &Vec, 658 | ) -> Result<(), super::Error> { 659 | writer.write_u32::(entries.len() as u32)?; 660 | for (path, offset) in entries.keys().zip(offsets) { 661 | let path_hash = fnv64_path(path, path_hash_seed); 662 | writer.write_u64::(path_hash)?; 663 | writer.write_u32::(*offset)?; 664 | } 665 | 666 | writer.write_u32::(0)?; 667 | 668 | Ok(()) 669 | } 670 | 671 | fn fnv64(data: I, offset: u64) -> u64 672 | where 673 | I: IntoIterator, 674 | { 675 | const OFFSET: u64 = 0xcbf29ce484222325; 676 | const PRIME: u64 = 0x00000100000001b3; 677 | let mut hash = OFFSET.wrapping_add(offset); 678 | for b in data.into_iter() { 679 | hash ^= b as u64; 680 | hash = hash.wrapping_mul(PRIME); 681 | } 682 | hash 683 | } 684 | 685 | fn fnv64_path(path: &str, offset: u64) -> u64 { 686 | let lower = path.to_lowercase(); 687 | let data = lower.encode_utf16().flat_map(u16::to_le_bytes); 688 | fnv64(data, offset) 689 | } 690 | 691 | fn split_path_child(path: &str) -> Option<(&str, &str)> { 692 | if path == "/" || path.is_empty() { 693 | None 694 | } else { 695 | let path = path.strip_suffix('/').unwrap_or(path); 696 | let i = path.rfind('/').map(|i| i + 1); 697 | match i { 698 | Some(i) => Some(path.split_at(i)), 699 | None => Some(("/", path)), 700 | } 701 | } 702 | } 703 | 704 | fn generate_full_directory_index( 705 | writer: &mut W, 706 | entries: &BTreeMap, 707 | offsets: &Vec, 708 | ) -> Result<(), super::Error> { 709 | let mut fdi: BTreeMap<&str, BTreeMap<&str, u32>> = Default::default(); 710 | for (path, offset) in entries.keys().zip(offsets) { 711 | let mut p = path.as_str(); 712 | while let Some((parent, _)) = split_path_child(p) { 713 | p = parent; 714 | fdi.entry(p).or_default(); 715 | } 716 | 717 | let (directory, filename) = split_path_child(path).expect("none root path"); 718 | 719 | fdi.entry(directory).or_default().insert(filename, *offset); 720 | } 721 | 722 | writer.write_u32::(fdi.len() as u32)?; 723 | for (directory, files) in &fdi { 724 | writer.write_string(directory)?; 725 | writer.write_u32::(files.len() as u32)?; 726 | for (filename, offset) in files { 727 | writer.write_string(filename)?; 728 | writer.write_u32::(*offset)?; 729 | } 730 | } 731 | 732 | Ok(()) 733 | } 734 | 735 | #[cfg(feature = "encryption")] 736 | fn encrypt(key: aes::Aes256, bytes: &mut [u8]) { 737 | use aes::cipher::BlockEncrypt; 738 | for chunk in bytes.chunks_mut(16) { 739 | key.encrypt_block(aes::Block::from_mut_slice(chunk)) 740 | } 741 | } 742 | 743 | #[cfg(test)] 744 | mod test { 745 | use super::*; 746 | 747 | #[test] 748 | fn test_split_path_child() { 749 | assert_eq!( 750 | split_path_child("a/really/long/path"), 751 | Some(("a/really/long/", "path")) 752 | ); 753 | assert_eq!( 754 | split_path_child("a/really/long/"), 755 | Some(("a/really/", "long")) 756 | ); 757 | assert_eq!(split_path_child("a"), Some(("/", "a"))); 758 | assert_eq!(split_path_child("a//b"), Some(("a//", "b"))); 759 | assert_eq!(split_path_child("a//"), Some(("a/", ""))); 760 | assert_eq!(split_path_child("/"), None); 761 | assert_eq!(split_path_child(""), None); 762 | } 763 | } 764 | --------------------------------------------------------------------------------