├── .github
└── workflows
│ └── build.yml
├── .gitignore
├── .gitmodules
├── .vscode
├── launch.json
├── settings.json
└── tasks.json
├── ArkUnpacker.ico
├── Build.py
├── CHANGELOG.md
├── LICENSE
├── Main.py
├── README.md
├── Test.py
├── docs
├── ArkModelsRepoKit.md
├── ArkVoiceRepoKit.md
├── AssetsGuide.md
├── ConfigFile.md
├── Essentials.md
├── ForDevelopers.md
└── TextAssetsDecoding.md
├── poetry.lock
├── pyproject.toml
├── src
├── CollectModels.py
├── CollectVoice.py
├── CombineRGBwithA.py
├── DecodeTextAsset.py
├── ModelsDataDist.py
├── ResolveAB.py
├── ResolveSpine.py
├── VoiceDataDist.py
├── __init__.py
├── lz4ak
│ ├── Block.py
│ └── __init__.py
└── utils
│ ├── ArgParser.py
│ ├── AtlasFile.py
│ ├── Config.py
│ ├── GlobalMethods.py
│ ├── Logger.py
│ ├── Profiler.py
│ ├── SaverUtils.py
│ ├── TaskUtils.py
│ └── __init__.py
└── test
└── res
├── client-2.2
├── activity-[uc]act1mainss.ab
├── activity-commonassets.ab
├── arts-charportraits-pack1.ab
├── arts-dynchars-char_2014_nian_2.ab
├── arts-effects-[pack]map.ab
├── arts-loadingillusts_1.ab
├── arts-rglktopic.ab
├── arts-ui-common.ab
├── audio-sound_beta_2-enemy-e_imp1.ab
├── audio-sound_beta_2-general_1.ab
├── audio-sound_beta_2-voice-char_002_amiya.ab
├── avg-characters-avg_003_kalts_1.ab
├── battle-prefabs-[uc]skills.ab
├── battle-prefabs-effects-amiya.ab
├── battle-prefabs-enemies-enemy_40.ab
├── chararts-char_002_amiya.ab
├── charpack-char_002_amiya.ab
├── gamedata-levels-enemydata.ab
├── i18n-string_map.ab
├── npcpack-npc_001_doctor.ab
├── refs-rglktp_rogue_1.ab
├── retro-permanent_sidestory_1_grani_and_the_treasure_of_knights.ab
├── scenes-activities-a001-level_a001_01-level_a001_01-lightingdata.ab
├── scenes-activities-a001-level_a001_01-level_a001_01.ab
├── skinpack-char_002_amiya.ab
├── spritepack-ui_camp_logo_h2_0.ab
└── ui-[uc]battlefinish.ab
├── client-2.4
├── arts-dynchars-char_2014_nian_nian#4.ab
├── chararts-char_377_gdglow.ab
└── chararts-char_388_mint.ab
└── client-2.5
├── anon-3d8cc04a4457d205f1c975252a7e71cf.bin
├── chararts-char_1026_gvial2.ab
└── chararts-char_4179_monstr.ab
/.github/workflows/build.yml:
--------------------------------------------------------------------------------
1 | name: Build
2 |
3 | on:
4 | push:
5 | branches:
6 | - "v4.x"
7 |
8 | env:
9 | PYTHONIOENCODING: "UTF-8"
10 |
11 | jobs:
12 | build:
13 | strategy:
14 | fail-fast: false
15 | matrix:
16 | python-version: ["3.9.13", "3.10.11", "3.11.9", "3.12.9"]
17 | os: ["windows"]
18 |
19 | runs-on: ${{ matrix.os }}-latest
20 |
21 | env:
22 | matrix-name: ${{ matrix.os }}-python${{ matrix.python-version }}
23 |
24 | steps:
25 | - name: Checkout repo
26 | uses: actions/checkout@v4
27 | with:
28 | submodules: recursive
29 |
30 | - name: Setup Python
31 | uses: actions/setup-python@v5
32 | with:
33 | python-version: ${{ matrix.python-version }}
34 |
35 | - name: Install Poetry
36 | uses: abatilo/actions-poetry@v4
37 | with:
38 | poetry-version: 2.1.2
39 |
40 | - name: Define venv location
41 | run: |
42 | poetry config virtualenvs.create true --local
43 | poetry config virtualenvs.in-project true --local
44 |
45 | - name: Define venv caching
46 | uses: actions/cache@v3
47 | with:
48 | path: ./.venv
49 | key: venv-${{ env.matrix-name }}-${{ hashFiles('poetry.lock') }}
50 |
51 | - name: Install dependencies
52 | run: |
53 | poetry install
54 |
55 | - name: Build distributable
56 | run: |
57 | poetry run python Build.py
58 |
59 | - name: Upload artifacts
60 | uses: actions/upload-artifact@v4
61 | with:
62 | name: ArkUnpacker-${{ env.matrix-name }}-${{ github.ref_name }}-${{ github.sha }}
63 | path: build/dist/*.*
64 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Exclude
2 | /*/
3 | __pycache__/
4 | test/cmb
5 | test/dta
6 | test/fbo
7 | test/spi
8 | test/upk
9 |
10 | *.exe
11 | *.json
12 | *.log
13 | *.csv
14 |
15 | # Include
16 | !.github/
17 | !.vscode/
18 | !docs/
19 | !src/
20 | !test/
21 |
--------------------------------------------------------------------------------
/.gitmodules:
--------------------------------------------------------------------------------
1 | [submodule "src/fbs"]
2 | path = src/fbs
3 | url = https://github.com/isHarryh/Ark-FBS-Py.git
4 | branch = dist
5 |
--------------------------------------------------------------------------------
/.vscode/launch.json:
--------------------------------------------------------------------------------
1 | {
2 | // 使用 IntelliSense 了解相关属性。
3 | // 悬停以查看现有属性的描述。
4 | // 欲了解更多信息,请访问: https://go.microsoft.com/fwlink/?linkid=830387
5 | "version": "0.2.0",
6 | "configurations": [
7 | {
8 | "name": "Python: ArkUnpacker",
9 | "type": "debugpy",
10 | "request": "launch",
11 | "program": "Main.py",
12 | "console": "integratedTerminal",
13 | "justMyCode": true
14 | },
15 | {
16 | "name": "Python: Current File",
17 | "type": "debugpy",
18 | "request": "launch",
19 | "program": "${file}",
20 | "console": "integratedTerminal",
21 | "justMyCode": true
22 | },
23 | {
24 | "name": "Build: ArkUnpacker",
25 | "type": "debugpy",
26 | "request": "launch",
27 | "program": "Build.py",
28 | "console": "integratedTerminal",
29 | "justMyCode": true
30 | }
31 | ]
32 | }
33 |
--------------------------------------------------------------------------------
/.vscode/settings.json:
--------------------------------------------------------------------------------
1 | {
2 | "python.analysis.typeCheckingMode": "standard",
3 | "cSpell.language": "en,en-US,en-GB",
4 | "cSpell.words": [
5 | "abfile",
6 | "analy",
7 | "archspec",
8 | "arkmodels",
9 | "arknights",
10 | "arkunpacker",
11 | "arkvoice",
12 | "audioclips",
13 | "bson",
14 | "chararts",
15 | "charpack",
16 | "curdestdir",
17 | "destdir",
18 | "destdirs",
19 | "dynchars",
20 | "dynillust",
21 | "dynillusts",
22 | "enemydata",
23 | "ensurepath",
24 | "flatbuffers",
25 | "flist",
26 | "fmod",
27 | "fmoderror",
28 | "fromarray",
29 | "gamedata",
30 | "getchannel",
31 | "hypergryph",
32 | "monobehaviors",
33 | "ndarray",
34 | "numpy",
35 | "pathid",
36 | "pipx",
37 | "putalpha",
38 | "pycryptodome",
39 | "pyproject",
40 | "quickaccess",
41 | "rootdir",
42 | "rtype",
43 | "skel",
44 | "skinpack",
45 | "srcdir",
46 | "srcdirs",
47 | "subdestdir",
48 | "subthread",
49 | "surrogateescape",
50 | "textasset",
51 | "textassets",
52 | "typetree",
53 | "unpad",
54 | "upkdir"
55 | ],
56 | "editor.defaultFormatter": "ms-python.black-formatter",
57 | "editor.formatOnSave": true
58 | }
59 |
--------------------------------------------------------------------------------
/.vscode/tasks.json:
--------------------------------------------------------------------------------
1 | {
2 | "version": "2.0.0",
3 | "tasks": [
4 | {
5 | "label": "Project Setup",
6 | "type": "shell",
7 | "command": "poetry install",
8 | "group": {
9 | "kind": "build",
10 | "isDefault": true
11 | },
12 | "icon": {
13 | "id": "project"
14 | },
15 | "runOptions": {
16 | "runOn": "folderOpen",
17 | "instanceLimit": 1
18 | }
19 | },
20 | {
21 | "label": "Build Dist",
22 | "type": "shell",
23 | "command": "python Build.py",
24 | "group": {
25 | "kind": "build",
26 | "isDefault": true
27 | },
28 | "icon": {
29 | "id": "project"
30 | },
31 | "runOptions": {
32 | "instanceLimit": 1
33 | }
34 | },
35 | {
36 | "label": "Test",
37 | "type": "shell",
38 | "command": "python Test.py",
39 | "group": {
40 | "kind": "build",
41 | "isDefault": true
42 | },
43 | "icon": {
44 | "id": "project"
45 | },
46 | "runOptions": {
47 | "instanceLimit": 1
48 | }
49 | }
50 | ]
51 | }
52 |
--------------------------------------------------------------------------------
/ArkUnpacker.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/isHarryh/Ark-Unpacker/9705b48686de97112c33d87f02007daba89c3213/ArkUnpacker.ico
--------------------------------------------------------------------------------
/Build.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright (c) 2022-2025, Harry Huang
3 | # @ BSD 3-Clause License
4 | import os
5 | import os.path as osp
6 | import toml
7 |
8 |
9 | def __get_venv_dir():
10 | import re, subprocess
11 |
12 | rst = subprocess.run(["poetry", "env", "info"], capture_output=True)
13 |
14 | if rst.returncode == 0:
15 | for l in rst.stdout.splitlines():
16 | match = re.search(r"Path:\s+(.+)", str(l, encoding="UTF-8"))
17 | if match:
18 | path = match.group(1).strip()
19 | if osp.isdir(path):
20 | return path
21 | print("× Failed to parse poetry output to query venv dir.")
22 | else:
23 | print(
24 | f"× Failed to run poetry to query venv dir. Returned code: {rst.returncode}"
25 | )
26 | print(f"- StdErr: {rst.stderr}")
27 | print(f"- StdOut: {rst.stdout}")
28 | print("- Please check the compatibility of poetry version.")
29 | print("- Please check the poetry status and the venv info.")
30 | raise Exception("venv dir not found or poetry config failed")
31 |
32 |
33 | def __get_proj_info():
34 | try:
35 | config = toml.load("pyproject.toml")
36 | config = config["project"]
37 | return {
38 | "name": config["name"],
39 | "version": config["version"],
40 | "description": config["description"],
41 | "author": config["authors"][0]["name"],
42 | "license": config["license"],
43 | }
44 | except KeyError as arg:
45 | print(f"x Required field missing, {arg}")
46 | raise arg
47 | except Exception as arg:
48 | print("× Failed to parse poetry project info.")
49 | raise arg
50 |
51 |
52 | def __get_build_def(proj_dir, venv_dir):
53 | try:
54 | config = toml.load("pyproject.toml")
55 | return {
56 | k: v.replace("$project$", proj_dir).replace("$venv$", venv_dir)
57 | for k, v in config["tool"]["build"].items()
58 | }
59 | except Exception as arg:
60 | print("× Failed to parse build definition fields.")
61 | raise arg
62 |
63 |
64 | def __main():
65 | proj_dir = osp.dirname(osp.abspath(__file__))
66 | venv_dir = __get_venv_dir()
67 | proj_info = __get_proj_info()
68 | build_def = __get_build_def(proj_dir, venv_dir)
69 | print(
70 | f"Project: {proj_info['name']}|{proj_info['version']}|{proj_info['author']}|{proj_info['license']}"
71 | )
72 | print(f"Root: {proj_dir}")
73 | print(f"Venv: {venv_dir}")
74 | print("")
75 | __build(proj_info, proj_dir, build_def)
76 | exit(0)
77 |
78 |
79 | def __exec(cmd):
80 | rst = os.system(cmd)
81 | if rst == 0:
82 | print(f"\n[Done] <- {cmd}")
83 | else:
84 | print(f"\n[Error] <- {cmd}")
85 | print(f"× Execution failed! Returned code: {rst}")
86 | exit(1)
87 |
88 |
89 | def __build(proj_info, proj_dir, build_def):
90 | import time, shutil
91 |
92 | t1 = time.time()
93 |
94 | print(f"Removing build dir...")
95 | os.chdir(proj_dir)
96 | build_dir = build_def["build-dir"]
97 | if osp.exists(build_dir):
98 | shutil.rmtree(build_dir, ignore_errors=False)
99 |
100 | print(f"Creating build dir...")
101 | os.mkdir(build_dir)
102 | os.chdir(build_dir)
103 |
104 | print(f"Creating version file...")
105 | version_file = "version.txt"
106 | with open(version_file, "w", encoding="UTF-8") as f:
107 | # spell-checker: disable
108 | f.write(
109 | f"""# UTF-8
110 | VSVersionInfo(
111 | ffi=FixedFileInfo(
112 | filevers=({proj_info['version'].replace('.',',')},0),
113 | prodvers=({proj_info['version'].replace('.',',')},0),
114 | mask=0x3f,
115 | flags=0x0,
116 | OS=0x4,
117 | fileType=0x1,
118 | subtype=0x0,
119 | date=(0,0)
120 | ),
121 | kids=[
122 | StringFileInfo([
123 | StringTable(
124 | u'040904B0',
125 | [StringStruct(u'CompanyName', u'{proj_info['author']}'),
126 | StringStruct(u'FileDescription', u'{proj_info['description']}'),
127 | StringStruct(u'FileVersion', u'{proj_info['version']}'),
128 | StringStruct(u'LegalCopyright', u'©{proj_info['author']} @{proj_info['license']} License'),
129 | StringStruct(u'ProductName', u'{proj_info['name']}'),
130 | StringStruct(u'ProductVersion', u'{proj_info['version']}')])
131 | ])
132 | ])
133 | """
134 | ) # End f.write
135 | # spell-checker: enable
136 |
137 | print("Running pyinstaller...")
138 | cmd_pyinstaller = f"poetry run pyinstaller -F"
139 | cmd_pyinstaller += f" --name \"{proj_info['name']}-v{proj_info['version']}\""
140 | cmd_pyinstaller += f" --version-file {version_file}"
141 | cmd_pyinstaller += (
142 | f" --icon \"{build_def['icon']}\"" if "icon" in build_def.keys() else ""
143 | )
144 | if "add-binary" in build_def.keys():
145 | for i in build_def["add-binary"].split("|"):
146 | cmd_pyinstaller += f' --add-binary "{i}"' if i else ""
147 | if "add-data" in build_def.keys():
148 | for i in build_def["add-data"].split("|"):
149 | cmd_pyinstaller += f' --add-data "{i}"' if i else ""
150 | if "hidden-import" in build_def.keys():
151 | for i in build_def["hidden-import"].split("|"):
152 | cmd_pyinstaller += f' --hidden-import "{i}"' if i else ""
153 | cmd_pyinstaller += (
154 | f" --log-level {build_def['log-level']}"
155 | if "log-level" in build_def.keys()
156 | else ""
157 | )
158 | cmd_pyinstaller += f" \"{build_def['entry']}\""
159 | __exec(cmd_pyinstaller)
160 |
161 | print(f"√ Build finished in {round(time.time() - t1, 1)}s!")
162 | print(f"- Dist files see: {osp.join(build_dir, 'dist')}")
163 |
164 |
165 | if __name__ == "__main__":
166 | __main()
167 |
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | 更新日志 / CHANGELOG
2 | ==========
3 | This project only supports Chinese docs. If you are an English user, feel free to contact us.
4 |
5 | | **表示** | ★ | ☆ | ~~Text~~ |
6 | | :------: | :------: | :------: | :----------: |
7 | | **含义** | 重大变更 | 主要变更 | 已弃用的特性 |
8 |
9 | -----
10 |
11 | ## v4.1
12 | #### 新增
13 | 1. ★ 新增了单独的 Spine 模型导出模式。
14 | 2. ★ 新增了可以导出 Mesh 对象的 3D 模型为 OBJ 文件的功能。
15 |
16 | #### 修复
17 | 1. ☆ 修复了无法解析游戏版本 2.5.60 及以上的部分 Spine 模型的问题。
18 | 2. ☆ 修复了“ArkModels提取与分拣工具”不支持解包游戏版本 2.5.60 及以上的敌方小人模型的问题。
19 |
20 | #### 优化
21 | 1. 优化了日志系统的性能表现。
22 | 2. 优化了在交互式命令行界面中输入文件路径的用户体验。
23 |
24 | #### 补丁
25 | 1. (v4.1.1) 修复了无法正确解包个别干员的战斗模型的问题,详情参见 [#26](https://github.com/isHarryh/Ark-Unpacker/issues/26)。
26 | 2. (v4.1.1) 修复了无法正确解包个别干员的基建模型的问题。
27 |
28 |
29 | ## v4.0
30 | #### 新增
31 | 1. ★ 新增了针对游戏版本 2.5.04 及以上的支持,引入了特殊的解压算法,详情参见 [#20](https://github.com/isHarryh/Ark-Unpacker/issues/20)。
32 | 2. ☆ 新增了在 Spine 模型的 PNG 图片尺寸与 ATLAS 文件中记录的尺寸不匹配时,会自动缩放 PNG 图片到规定的尺寸的特性。
33 | 3. ☆ 新增了更为详细的任务信息显示(例如运行状态和累计耗时)。
34 |
35 | #### 优化
36 | 1. 移除了命令行界面的黑色背景。
37 | 2. 优化了日志系统的性能表现。
38 | 3. 优化了各类代码逻辑和类型注解。
39 |
40 | #### 依赖
41 | 1. ☆ 将 UnityPy 升级到了 1.22.1。
42 | 2. ☆ 将 ArkFBSPy 库升级到游戏版本 2.5.04。
43 |
44 | #### 构建
45 | 1. ★ 弃用了 Python 3.8,目前构建版的 Python 版本采用的是 3.12,大幅提升了性能。
46 | 2. ★ 弃用了 Poetry 1.8,目前 Poetry 版本采用的是 2.1。
47 | 3. ☆ 更新了自动化构建的工作流脚本。
48 | 4. 其他次级依赖项的版本更新。
49 |
50 | #### 补丁
51 | 1. (v4.0.1) 修复了解码 FlatBuffers 数据文件时,浮点数的序列化不精确的问题,详情参见 [#23](https://github.com/isHarryh/Ark-Unpacker/issues/23)。
52 | 2. (v4.0.1) 修复了文本资源解码的任务进度显示不正确的问题。
53 | 3. (v4.0.2) 修复了解码 FlatBuffers 数据文件时,无法使用 UTF-8 解码的字符串会引发错误的问题。
54 | 4. (v4.0.2) 将 ArkFBSPy 库升级到游戏版本 2.5.60。注意,旧的游戏版本的某些 FlatBuffers 数据解码有可能不受支持。
55 |
56 | -----
57 |
58 | ## v3.6
59 | #### 新增
60 | 1. ★ 新增了针对非预乘 Alpha (PMA) 的 Spine 模型纹理强制启用 PMA 的特性。
61 |
62 | #### 修复
63 | 1. \([#19](https://github.com/isHarryh/Ark-Unpacker/issues/19)\) 修复了在打包版本中无法正确调用 `exit` 的问题。
64 |
65 | #### 优化
66 | 1. ☆ 减少了 Windows 打包文件的大小,这得益于 [PyInstaller #8799](https://github.com/pyinstaller/pyinstaller/pull/8799) 的修复。
67 |
68 | #### 依赖
69 | 1. ☆ 将 UnityPy 升级到了 1.20.21。
70 | 2. ☆ 将 PyInstaller 升级到了 6.12.0。
71 | 3. ☆ 将 ArkFBSPy 库升级到游戏版本 2.4.61。
72 |
73 | #### 补丁
74 | 1. (v3.6.1) 将 UnityPy 升级到了 1.22.0,以及其他依赖性的更新。
75 | 2. (v3.6.1) 增加了在载入文件资源时的异常捕获。
76 |
77 |
78 | ## v3.5
79 | #### 新增
80 | 1. ★ 支持新功能“ArkVoice提取与分拣工具”,详情参见[附加文档](docs/ArkVoiceRepoKit.md)。
81 |
82 | #### 优化
83 | 1. ★ 优化了提取音频文件时的效率,这得益于 [UnityPy #291](https://github.com/K0lb3/UnityPy/pull/291) 对于多线程性能的改进。
84 | 2. 采用了标准级类型检查,优化了部分代码样式。
85 |
86 | #### 依赖
87 | 1. ☆ 将 UnityPy 升级到了 1.20.19。
88 | 2. ☆ 将 PIL 升级到了 9.5.0。
89 | 3. ☆ 将 ArkFBSPy 库升级到游戏版本 2.4.41。
90 |
91 |
92 | ## v3.4
93 | #### 新增
94 | 1. ☆ 新增了“ArkModels提取与分拣工具”对无扩展名(非二进制)骨骼文件的支持。
95 |
96 | #### 优化
97 | 1. ★ 重构了任务时间记录的代码逻辑,优化了任务剩余时间的估算准确度。
98 | 2. ☆ 重构了 Spine 模型解析的代码逻辑。
99 | 3. ★ 采用了基本级类型检查,优化了不恰当的类型注解。
100 |
101 | #### 依赖
102 | 1. ☆ 将 ArkFBSPy 库升级到游戏版本 2.4.21 (2)。
103 | 2. 将 PyInstaller 升级到了 6.10.0。
104 | 3. 其他次级依赖项的版本更新。
105 |
106 | #### 补丁
107 | 1. (v3.4.1) 适应了游戏版本 2.4.21 出现的部分 Spine 模型的字段结构变更。
108 |
109 |
110 | ## v3.3
111 | #### 新增
112 | 1. ★ 新增了对匿名 BIN 文件的解包支持。
113 |
114 | #### 优化
115 | 1. ☆ 重构了“ArkModels提取与分拣工具”中对游戏数据的获取逻辑,因为游戏数据自游戏版本 2.4.01 起已采用匿名文件存储。
116 |
117 | #### 依赖
118 | 1. ☆ 将 ArkFBSPy 库升级到游戏版本 2.4.01。
119 | 2. 将 UnityPy 升级到了 1.10.18,将 archspec 添加到了依赖项中。
120 | 3. 将 pycryptodome 升级到了 3.21。
121 | 4. 其他次级依赖项的版本更新。
122 |
123 | #### 补丁
124 | 1. ☆ (v3.3.1) 将 ArkFBSPy 库升级到游戏版本 2.4.21。
125 |
126 |
127 | ## v3.2
128 | #### 新增
129 | 1. ★ 新增了对使用 AES-CBC 加密的文件进行解码的功能,该功能已经与 FlatBuffers 解码功能合并成为“解码文本资源”功能。
130 |
131 | #### 修复
132 | 1. 修复了对早期版本 AB 文件进行解包时可能出现的某种 `AttributeError` 错误。
133 |
134 | #### 优化
135 | 1. ☆ 优化了各类代码逻辑,修复了各类代码样式问题。
136 | 2. 优化了日志系统的性能表现。
137 |
138 | #### 依赖
139 | 1. ★ 引入了 PyLint 作为代码检查系统。
140 | 2. ☆ 将 ArkFBSPy 库升级到游戏版本 2.3.21。
141 | 3. 将 bson 和 pycryptodome 添加到了依赖项中。
142 |
143 | #### 补丁
144 | 1. ☆ (v3.2.1) 将 ArkFBSPy 库升级到游戏版本 2.3.81。
145 |
146 |
147 | ## v3.1
148 | #### 新增
149 | 1. ★ 新增了可以通过命令行的方式直接启动程序并执行任务的功能。
150 |
151 | #### 修复
152 | 1. ☆ 修复了解包音频文件时有概率因 `Fmoderror` 导致失败的问题。
153 |
154 | #### 优化
155 | 1. 不再在任务完成后加入延迟等待。
156 |
157 | #### 依赖
158 | 1. 将 UnityPy 升级到了 1.10.14。
159 |
160 |
161 | ## v3.0
162 | #### 新增
163 | 1. ★ 新增了基于动态调度 Worker 实现的多线程文件写入的功能,提升了保存文件的速度。
164 | 2. ☆ 新增了支持解包单个 AB 文件的功能。
165 | 3. 新增了在部分输入操作中可以输入符号 `*` 来取消任务的功能。
166 | 4. ☆ 新增了单独解包 Spine 模型的资源类型导出选项。
167 | 5. ★ 新增了对使用 FlatBuffers 编码的二进制数据文件进行解码的功能。
168 | 6. ☆ 新增了“ArkModels提取与分拣工具”使用本地 `gamedata` 进行“生成数据集”的特性。
169 | 7. 新增了“ArkModels提取与分拣工具”内的“一键执行”功能。
170 |
171 | #### 修复
172 | 1. ★ 修复了特定情况下写入文件时有概率发生由线程竞争导致文件缺失的问题。
173 | 2. 修复了由 `os.system` 函数导致的潜在的跨平台兼容性问题。
174 |
175 | #### 优化
176 | 1. ☆ 优化了各类代码逻辑,修复了各类代码样式问题。
177 | 2. 优化了文件重名的后缀命名格式,现在采用 `$` 后缀,而不是 `#` 后缀。
178 | 3. 优化了交互式命令行的文本提示。
179 | 4. 优化了路径解析、文件筛选和文件列表读取的逻辑。
180 | 5. ☆ 重构了“生成数据集”和“合并图片”的代码逻辑,现在采用了面向对象的模式。
181 | 6. 优化了配置文件性能等级与多线程数量的映射关系。
182 | 7. ★ 优化了任务进度的计算逻辑和进度条的显示,优化了任务剩余时间的估算准确度。
183 | 8. ☆ 重构了“ArkModels提取与分拣工具”中“分拣模型”的代码逻辑。
184 | 9. ☆ 优化了对运行时异常的处理。
185 |
186 | #### 构建
187 | 1. ★ 新增了自动化的构建脚本,优化了构建可分发文件的步骤。
188 | 2. ★ 实现了针对 Windows 系统的可执行文件的直接打包,不再采用虚拟化打包。
189 | 3. ☆ 新增了 GitHub Actions 的持续集成,实现了构建工作流。
190 | 4. ★ 新增了自动化的单元测试脚本,新增了测试用资源文件。
191 |
192 | #### 依赖
193 | 1. ★ 引入了 Poetry 作为依赖管理系统。
194 | 2. 将 PyInstaller 升级到了 6.8.0。
195 | 3. ☆ 作为 Git Submodule 引入了 ArkFBSPy 模块。
196 | 4. 将 flatbuffers 和 numpy 添加到了依赖项中。
197 | 5. ☆ 将 requests 从依赖项中移除,取消了对 ArknightsGameData 仓库的调用。
198 |
199 | -----
200 |
201 | ## v2.7
202 | #### 新增
203 | 1. ★ 新增了根据设备CPU核心数来自动选择最大多线程数量的功能,不再需要手动设置此参数。
204 |
205 | #### 优化
206 | 1. 优化了代码自文档的样式,并进行了部分自文档的国际化。
207 | 2. 优化了软件包的导入逻辑。
208 | 3. 优化了代码内各类样式问题和拼写错误。
209 | 4. ☆ 优化了资源类的数据导出和保存逻辑。
210 | 5. ☆ 优化了配置文件的读写逻辑和鲁棒性。
211 |
212 |
213 | ## v2.6
214 | #### 新增
215 | 1. ★ 新增了“ArkModels提取与分拣工具”对于动态立绘的提取和分拣支持。
216 |
217 | #### 优化
218 | 1. ☆ 优化了用户界面输出的打印逻辑,减少了不必要的性能消耗。
219 |
220 |
221 | ## v2.5
222 | #### 优化
223 | 1. ★ 优化了多线程分配逻辑,文件写入系统已支持多线程,提升了大型ab文件的解包速度。
224 | 2. ☆ 优化了程序的目录结构,分离了部分程序脚本。
225 | 3. ~~优化了默认配置,包括但不限于将默认的多线程数上调至16。~~
226 | 4. 调整了读取文件列表的代码逻辑。
227 |
228 | #### 构建
229 | 1. 公开了构建所用的脚本、附加库文件和步骤说明,参见[build目录](build)。
230 |
231 |
232 | ## v2.4
233 | #### 修复
234 | 1. ★ 修复了无法正确导出拥有多纹理图的Spine模型(例如岁相、部分干员动态立绘)的图片文件的问题。
235 |
236 | #### 优化
237 | 1. ☆ 重构了命令行界面的打印方式,现在采用Unicode控制符进行更美观高效的打印操作。
238 | 2. 优化了删除目录树的方式,提升了删除的速度。
239 | 3. 优化了“ArkModels提取与分拣工具”的分拣逻辑,提升了分拣的速度。
240 | 4. 优化了解包时的多线程分配逻辑,提升了解包的速度。
241 |
242 |
243 | ## v2.3
244 | #### 新增
245 | 1. ★ 新增了日志系统。程序运行时会在工作目录保存日志文件,以便后续的检查和故障排除等操作。
246 | 2. ★ 新增了配置文件。程序启动时会在工作目录读取配置文件,用户可以通过编辑该文件来调整部分设置参数。
247 |
248 | #### 修复
249 | 1. ☆ 修复了战斗小人与基建小人重名等特定情况下,无法正确分类导出Spine模型文件的问题。现在基建小人会单独导出到 `Building` 子目录中。
250 |
251 | #### 优化
252 | 1. 调整了读取文件列表的代码逻辑。
253 |
254 |
255 | ## v2.2
256 | #### 新增
257 | 1. ★ 新增了“ArkModels提取与分拣工具”对于敌方模型的提取与分拣支持。
258 |
259 | #### 修复
260 | 1. ☆ ([#4](https://github.com/isHarryh/Ark-Unpacker/issues/4)) 修复了战斗小人图片正背面图片区分不准确的问题。
261 |
262 |
263 | ## v2.1
264 | #### 新增
265 | 1. ★ 支持新功能“ArkModels提取与分拣工具”,详情参见[附加文档](docs/ArkModelsRepoKit.md)。
266 |
267 |
268 | ## v2.0
269 | #### 新增
270 | 1. ★ 支持且默认以多线程模式运行资源解包和图片合并,极大地提升了运行速度。
271 | 2. ~~支持在自定义模式下可以自定义多线程数的功能。~~
272 |
273 | #### 优化
274 | 1. 优化了运行时的回显表现和打印速度。
275 | 2. 优化了图片匹配的精确度,调整了算法的参数。
276 | 3. 不再支持流式显示运行时的详细信息,现在都只以简洁模式(进度条模式)回显信息。
277 |
278 | #### 修复
279 | 1. ☆ 修复了特定情况下保存目的地异常的问题:现在任务目标相对路径含有 `..` 时,也能被正确地保存。
280 |
281 | #### 文档
282 | 1. 整理了说明文档,建立了[docs目录](docs)用于存储附加说明文档。
283 |
284 | #### 测试数据
285 | 该版本在作者的电脑上测试结果如下:
286 | > 电脑配置
287 | > -- Windows 10 (64位)
288 | > -- CPU 2.00GHz, RAM 8.00GB
289 | > 明日方舟版本
290 | > -- Android v1.9.01
291 | > 测试内容
292 | > -- “一键执行”整个Android文件夹
293 | > -- 线程模式:8线程
294 | > -- 源文件夹:5.19GB(6.8K个文件)
295 | > 测试结果
296 | > -- 用时:1.5h
297 | > -- 资源解包文件夹:13.5GB(61K个文件)
298 | > -- 图片合并文件夹:2.04GB(5.8K个文件)
299 |
300 | -----
301 |
302 | ## v1.2
303 | #### 新增
304 | 1. ★ 支持将不同ab文件中解包出的文件分别放到不同的子文件夹中,以便分类保存。
305 | 2. ☆ 支持显示任务的预计剩余时间。
306 |
307 | #### 优化
308 | 1. 移除了目录创建时的回显。
309 |
310 | #### 测试数据
311 | 该版本在作者的电脑上测试结果如下:
312 | > 电脑配置
313 | > -- Windows 10 (64位)
314 | > -- CPU 2.00GHz, RAM 8.00GB
315 | > 明日方舟版本
316 | > -- Android v1.8.81
317 | > 测试内容
318 | > -- “一键执行”整个Android文件夹
319 | > -- 源文件夹:5.06GB(6.6K个文件)
320 | > 测试结果
321 | > -- 用时:3.0h
322 | > -- 资源解包文件夹:20.1GB(70K个文件)
323 | > -- 图片合并文件夹:2.03GB(5.2K个文件)
324 |
325 |
326 | ## v1.1
327 | #### 新增
328 | 1. 新增了程序运行时的图标和窗口标题。
329 |
330 | #### 优化
331 | 1. ☆ 优化了各种代码逻辑,例如字符串的拼接和文件列表的解析逻辑。
332 | 2. ☆ 优化了文件的保存逻辑。当文件夹内存在文件名相似、内容完全相同的文件时,不会进行保存操作。
333 | 3. ☆ 优化了图片合并的算法:现在使用 `PIL.Image` 的内置函数 `putalpha` 来合并RGB通道图和A通道图,极大地提升了图片合并的效率。
334 | 4. 优化了图片合并任务的信息回显。
335 |
336 | #### 修复
337 | 1. ☆ 修复了干员战斗小人相关资源由于重名而不能正确地全部导出的问题。
338 | 2. 修复了图片合并时,不能按目录分别保存的问题。
339 | 3. ☆ 修复了干员战斗小人相关图片无法被正确地匹配和合并的问题。
340 | 4. ☆ 修复了重名的文件保存冲突的问题:~~现在会自动添加 `_#` 后缀来区分重名的文件~~,且不再需要用户配置是否执行覆盖。
341 | 5. 修复了在试图保存大小为0的图片时抛出意外错误的问题,现在这些无效图片会被直接跳过。
342 | 6. ☆ 修复了不能准确匹配RGB通道图和A通道图的问题。修复后不排除还有极个别图片匹配错误。
343 |
344 | #### 构建
345 | 1. 修复了可执行文件中潜在的 `fmod.dll` 缺失的问题。
346 | 2. ☆ 采用了的文件虚拟化技术打包可执行文件。
347 |
348 | #### 测试数据
349 | 该版本在作者的电脑上测试结果如下:
350 | > 电脑配置
351 | > -- Windows 10 (64位)
352 | > -- CPU 2.00GHz, RAM 8.00GB
353 | > 明日方舟版本
354 | > -- Android v1.8.01
355 | > 测试内容
356 | > -- “一键执行”整个Android文件夹
357 | > -- 源文件夹:4.74GB(5.9K个文件)
358 | > 测试结果
359 | > -- 用时:2.5h
360 | > -- 资源解包文件夹:18.4GB(57K个文件)
361 | > -- 图片合并文件夹:1.90GB(5.4K个文件)
362 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | BSD 3-Clause License
2 |
3 | Copyright (c) 2022, Harry Huang
4 | All rights reserved.
5 |
6 | Redistribution and use in source and binary forms, with or without
7 | modification, are permitted provided that the following conditions are met:
8 |
9 | 1. Redistributions of source code must retain the above copyright notice, this
10 | list of conditions and the following disclaimer.
11 |
12 | 2. Redistributions in binary form must reproduce the above copyright notice,
13 | this list of conditions and the following disclaimer in the documentation
14 | and/or other materials provided with the distribution.
15 |
16 | 3. Neither the name of the copyright holder nor the names of its
17 | contributors may be used to endorse or promote products derived from
18 | this software without specific prior written permission.
19 |
20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
Ark-Unpacker
7 |

8 |
9 | Arknights Assets Unpacker | 明日方舟游戏资源批量解包器
10 | v4.1
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 | This project only supports Chinese docs. If you are an English user, feel free to contact us.
21 |
22 |
23 |
24 | ## 介绍 Intro
25 |
26 | ### 终止更新预告
27 |
28 | 本项目正在逐步升级和迁移到 [ArkStudio](https://github.com/isHarryh/Ark-Studio),敬请关注。
29 | ArkStudio 是正在开发阶段的,功能更加强大的《明日方舟》游戏资源集成式管理平台,开发完成后,当前项目将由于生命周期结束而终止更新,感谢各位一直以来的支持和喜爱。
30 |
31 | ### 实现的功能
32 |
33 | 1. 批量解包《明日方舟》Unity AssetBundle(AB) 文件中的游戏资源对象。
34 | 1. 支持游戏版本 v2.5.04 及以上的特殊解压算法;
35 | 2. 解包时可对基建小人、动态立绘和战斗小人的 Spine 模型文件进行区分;
36 | 3. 解包时可以将文件按源 AB 文件的名称分目录存放。
37 | 2. 批量合并 RGB 通道图和 Alpha 通道图。
38 | 3. 批量解码 FlatBuffers 数据文件。
39 | 4. 既提供交互式命令行界面进行操作,也支持直接通过命令行运行。
40 |
41 | ### 支持的类型
42 |
43 | | Unity类型 | 描述 | 导出格式 |
44 | | :-------- | :------------- | :------- |
45 | | Sprite | 精灵图 | PNG 图片 |
46 | | Texture2D | 二维纹理图 | PNG 图片 |
47 | | AudioClip | 音频剪辑 | WAV 音频 |
48 | | TextAsset | 文本或字节数据 | 未指定 |
49 | | Mesh | 3D 模型 | OBJ 文件 |
50 |
51 | ### 相关文档
52 |
53 | - 更新日志 > [点击查看](./CHANGELOG.md)
54 | - 配置文件指引 > [点击查看](docs/ConfigFile.md)
55 | - 游戏资源查找指南 > [点击查看](docs/AssetsGuide.md)
56 |
57 | ## 使用方法 Usage
58 |
59 | ### 1.资源准备
60 |
61 | 无论您是想要使用我们的发行版本还是源代码来解包明日方舟的游戏资源,您都需要先获取到明日方舟的资源文件。明日方舟是基于 Unity 开发的游戏,它的游戏资源会全部打包到一种 **AssetBundle文件**(后缀名 `.ab`,下简称“AB文件”,但少数情况下后缀名是 `.bin`)中。
62 |
63 | 下面将以 **Android 安卓系统** 为例讲述**如何获取到明日方舟的 AB 文件**。明日方舟的游戏资源有 **2 个部分**:
64 |
65 | - 一部分是通过**安装包**(`.apk`)提供的,从明日方舟[**官网**](https://ak.hypergryph.com)将其下载到本地后,使用压缩文件查看工具打开(后缀名改成 `.zip` 后打开),然后把里面的 `assets/AB/Android` 文件夹解压出来;
66 | - 另一部分是通过**热更新**提供的,首先确保您的安卓手机上的明日方舟更新到了最新版本,然后(推荐使用 USB 数据线)将手机存储的 `Android/data/com.hypergryph.arknights/files/Bundles` 文件夹(一般情况下是这个路径)复制到电脑上(重命名为 `Android(2)`)。至此,我们的目录结构大致如下:
67 |
68 | > **你的目录**
69 | > ├─Android
70 | > └─Android(2)
71 |
72 | 最后,将 `Android(2)` 文件夹里的内容复制到 `Android` 中,并**覆盖**同名文件,就能得到完整的游戏资源。在这之后,您就可以使用我们的程序来解包其中的游戏资源了。
73 |
74 | 当然,您也可以将 `Android` 里的部分文件夹复制出来进行处理,以解包特定的资源。为了便于您找到特定资源的 AB 文件位置,我们整理并列出了各个子目录储存的资源的内容,浏览[此文档](docs/AssetsGuide.md)以查看详情。
75 |
76 | ### 2.下载 ArkUnpacker
77 |
78 | 为了方便一般用户使用,我们推出了适用于 Windows 64位操作系统(暂不支持其他操作系统)的可执行文件。
79 |
80 | 请进入 Releases 页面下载 exe 文件 `ArkUnpacker-vx.x.x_x64.exe`:[前往下载](https://github.com/isHarryh/Ark-Unpacker/releases)
81 |
82 | ### 3.必备知识
83 |
84 | 在正式地使用本程序前,您最好对以下内容有初步了解:
85 |
86 | - [RGB 通道图和 A 通道图](docs/Essentials.md#rgb通道图和a通道图)
87 | - [Spine 动画小人](docs/Essentials.md#spine动画小人)
88 |
89 | ### 4.示例用法
90 |
91 | 一个最简单的用法是,将需要解包的文件夹(可以是多个)放到程序文件所在的目录中。至此,我们的目录结构大致如下:
92 |
93 | > **你的目录**
94 | > ├─Android (解包整个目录需要很久)
95 | > ├─charpack (可以选择解包部分文件夹)
96 | > └─ArkUnpacker.exe
97 |
98 | 然后直接运行程序,弹出交互式命令行界面如下,依据其提示操作即可:
99 |
100 | ```
101 | 欢迎使用ArkUnpacker
102 | ====================
103 | 模式选择:
104 | 1: 一键执行
105 | 2: 自定义资源解包
106 | 3: 自定义图片合并
107 | 4: 自定义文本资源解码
108 | 5: 自定义Spine模型导出
109 | 6: ArkModels提取与分拣工具
110 | 7: ArkVoice提取与分拣工具
111 | 0: 退出
112 | 输入序号后按回车即可,如果您不清楚以上功能的含义,强烈建议您先阅读使用手册(README)
113 | ```
114 |
115 | 此外,运行程序后,工作目录会生成配置文件 `ArkUnpackerConfig.json` 与日志文件 `ArkUnpackerLogs.log`。有关配置文件的字段说明,请参阅[此文档](docs/ConfigFile.md)了解详情。
116 |
117 | ### 5.各模式的功能概述
118 |
119 | #### 一键执行
120 |
121 | 直接解包程序所在目录中的文件,并执行图片合并。解包出的文件默认导出到 `Unpacked_xxxx` 文件夹,合并完成的图片默认导出到 `Combined_xxxx` 文件夹。
122 |
123 | #### 自定义资源解包
124 |
125 | 解包指定路径的游戏资源,并导出到指定目录。可以选择需要解包哪些资源类型。
126 |
127 | #### 自定义图片合并
128 |
129 | 合并指定路径中的 RGB 图和 Alpha 图,并将合并的结果导出到指定目录。
130 |
131 | #### 自定义文本数据解码
132 |
133 | 解码指定目录中的数据文件,并将解码结果导出到指定目录。具体的原理,请参阅[此文档](docs/TextAssetsDecoding.md)了解详情。
134 |
135 | #### 自定义Spine模型导出
136 |
137 | 解包指定路径中的游戏资源,并将其中包含的 Spine 动画小人模型导出到指定目录。
138 |
139 | #### ArkModels 提取与分拣工具
140 |
141 | 此为定制功能,请参阅[此文档](docs/ArkModelsRepoKit.md)了解详情。
142 |
143 | #### ArkVoice 提取与分拣工具
144 |
145 | 此为定制功能,请参阅[此文档](docs/ArkVoiceRepoKit.md)了解详情。
146 |
147 | ### 6.命令行用法
148 |
149 | 除了上述示例用法展示的**交互式**命令行界面外,程序还支持**直接**通过命令行来运行,以便熟悉命令行调用的用户使用。相关参数如下:
150 |
151 | ```
152 | usage: ArkUnpacker [-h] [-v] [-m {ab,cb,fb,sp}] [-i INPUT] [-o OUTPUT] [-d] [--image] [--text] [--audio] [--spine] [--mesh] [-g] [-l {0,1,2,3,4}]
153 |
154 | Arknights Assets Unpacker. Use no argument to run to enter the interactive CLI mode.
155 |
156 | options:
157 | -h, --help show this help message and exit
158 | -v, --version show a version message and exit
159 | -m {ab,cb,fb,sp}, --mode {ab,cb,fb,sp}
160 | working mode, ab=resolve-ab, cb=combine-image, fb=decode-flatbuffers, sp=resolve-spine
161 | -i INPUT, --input INPUT
162 | source file or directory path
163 | -o OUTPUT, --output OUTPUT
164 | destination directory path
165 | -d, -delete delete the existed destination directory first
166 | --image in resolve ab mode: export image files
167 | --text in resolve ab mode: export text or binary files
168 | --audio in resolve ab mode: export audio files
169 | --spine in resolve ab mode: export spine asset files
170 | --mesh in resolve ab mode: export mesh resources
171 | -g, --group in resolve ab mode: group files into separate directories named by their source ab file
172 | -l {0,1,2,3,4}, --logging-level {0,1,2,3,4}
173 | logging level, 0=none, 1=error, 2=warn, 3=info, 4=debug
174 | ```
175 |
176 | 运行 `ArkUnpacker -h` 命令可以显示此帮助信息。如果您使用的命令不带任何 `mode` 参数,那么程序会以交互式命令行界面的模式启动。
177 |
178 | ## 注意事项 Notice
179 |
180 | 1. 使用一键执行模式时,不会解包直接位于程序所在目录中的 AB 文件,只会解包子文件夹里的 AB 文件。
181 | 2. 程序会根据设备 CPU 核心数自动调整多线程策略,并且对设备性能(尤其是 CPU 和硬盘性能)有一定要求,配置过低的电脑在运行时可能会缓慢。
182 | 3. Windows 命令行基本常识:
183 | 1. 快捷键 `Ctrl+C` 用于强行终止程序,若想复制文本,请用鼠标选取文本后再按此快捷键。
184 | 2. 左键单击小黑窗会进入“文本选取”模式,此时主程序会暂停执行。
185 | 3. 右键单击小黑窗可以粘贴文本,也可用于退出“文本选取”模式。
186 | 4. 拖放文件到小黑窗可以快速粘贴其路径。
187 | 4. 如果您希望了解更多细节或参与贡献,请[查看开发者指引](docs/ForDevelopers.md)。
188 |
189 | ## 许可证 Licensing
190 |
191 | 本项目基于 **BSD-3 开源协议**。任何人都可以自由地使用和修改项目内的源代码,前提是要在源代码或版权声明中保留作者说明和原有协议,且不可以使用本项目名称或作者名称进行宣传推广。
192 |
193 | -----
194 |
195 |
201 |
--------------------------------------------------------------------------------
/Test.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright (c) 2022-2023, Harry Huang
3 | # @ BSD 3-Clause License
4 | import os, sys, json, shutil
5 | import subprocess
6 | from src.utils.Profiler import CodeProfiler
7 | from src.utils.GlobalMethods import print, stacktrace
8 |
9 |
10 | def __assert_file_count(path, expected_count):
11 | if not os.path.isdir(path):
12 | raise AssertionError(f"Directory {path} not found")
13 | count = 0
14 | for _, _, files in os.walk(path):
15 | count += len(files)
16 | if count != expected_count:
17 | raise AssertionError(
18 | f"Expected {expected_count} files but got {count} files in {path}"
19 | )
20 |
21 |
22 | def __run_cli(args: list):
23 | cmd = [sys.executable, "Main.py"] + args
24 | result = subprocess.run(
25 | cmd, stdout=sys.stdout, stderr=sys.stderr, encoding="utf-8", errors="replace"
26 | )
27 | return "", "", result.returncode
28 |
29 |
30 | def test():
31 | for i in range(int(sys.argv[1]) if len(sys.argv) > 1 else 1):
32 | try:
33 | print(f"[#{i}] Preparing...", c=6)
34 | DIR_UPK = "test/upk"
35 | DIR_CMB = "test/cmb"
36 | DIR_DTA = "test/dta"
37 | DIR_SPI = "test/spi"
38 | shutil.rmtree(DIR_UPK, ignore_errors=True)
39 | shutil.rmtree(DIR_CMB, ignore_errors=True)
40 | shutil.rmtree(DIR_DTA, ignore_errors=True)
41 | shutil.rmtree(DIR_SPI, ignore_errors=True)
42 |
43 | print(f"[#{i}] Testing...", c=6)
44 | with CodeProfiler("unit_1"):
45 | out, err, code = __run_cli(
46 | [
47 | "-m",
48 | "ab",
49 | "-i",
50 | "test/res",
51 | "-o",
52 | DIR_UPK,
53 | "--image",
54 | "--text",
55 | "--audio",
56 | "--mesh",
57 | "-g",
58 | ]
59 | )
60 | if code != 0:
61 | print(out)
62 | print(err)
63 | raise AssertionError(f"ArkUnpacker ab mode failed, code={code}")
64 | with CodeProfiler("unit_2"):
65 | out, err, code = __run_cli(
66 | [
67 | "-m",
68 | "cb",
69 | "-i",
70 | DIR_UPK,
71 | "-o",
72 | DIR_CMB,
73 | ]
74 | )
75 | if code != 0:
76 | print(out)
77 | print(err)
78 | raise AssertionError(f"ArkUnpacker cb mode failed, code={code}")
79 | with CodeProfiler("unit_3"):
80 | out, err, code = __run_cli(
81 | [
82 | "-m",
83 | "fb",
84 | "-i",
85 | DIR_UPK,
86 | "-o",
87 | DIR_DTA,
88 | ]
89 | )
90 | if code != 0:
91 | print(out)
92 | print(err)
93 | raise AssertionError(f"ArkUnpacker fb mode failed, code={code}")
94 | with CodeProfiler("unit_4"):
95 | out, err, code = __run_cli(
96 | [
97 | "-m",
98 | "sp",
99 | "-i",
100 | "test/res",
101 | "-o",
102 | DIR_SPI,
103 | ]
104 | )
105 | if code != 0:
106 | print(out)
107 | print(err)
108 | raise AssertionError(f"ArkUnpacker sp mode failed, code={code}")
109 |
110 | print(f"[#{i}] Analysing...", c=6)
111 | __assert_file_count(DIR_UPK, 1470)
112 | __assert_file_count(DIR_CMB, 157)
113 | __assert_file_count(DIR_DTA, 2)
114 | __assert_file_count(DIR_SPI, 208)
115 |
116 | print(f"[#{i}] Test success!", c=2)
117 | except BaseException as arg:
118 | print(f"[#{i}] Test failed because an error occurred!", c=1)
119 | print(stacktrace(), c=3)
120 | json.dump(
121 | {
122 | "average": CodeProfiler.get_avg_time_all(),
123 | "total": CodeProfiler.get_total_time_all(),
124 | },
125 | open("test/time_consumption.json", "w", encoding="UTF-8"),
126 | indent=4,
127 | )
128 |
129 |
130 | if __name__ == "__main__":
131 | test()
132 |
--------------------------------------------------------------------------------
/docs/ArkModelsRepoKit.md:
--------------------------------------------------------------------------------
1 | ArkUnpacker附加说明文档
2 | # 关于ArkModels提取与分拣工具
3 |
4 | ## 功能概述
5 |
6 | [ArkModels](https://github.com/isHarryh/Ark-Models)是作者建立的明日方舟 Spine 模型仓库,收录了游戏中的一部分 Spine 模型。“ArkModels提取与分拣工具” 用于辅助该仓库进行更新,其子功能包括:
7 |
8 | - 模型提取:自动从游戏原始资源文件中,提取出 ArkModels 仓库可能需要的文件。
9 | - 文件分拣:对提取出的文件进行进一步筛选,按照 ArkModels 仓库的相关格式要求进行命名与分类。
10 | - 生成数据集:从 FlatBuffers 编码的资源文件中解码出角色数据,然后将 ArkModels 仓库中的每个模型的信息写入一个 JSON 数据集中。
11 |
12 | ## 使用方法
13 |
14 | 1. 按照 README 文档中的说明,进行[资源准备](../README.md#1资源准备)。具体需要哪些资源文件夹,请参考程序内的提示所述的资源文件夹名。请将准备的资源文件夹直接放在**程序所在目录**里面。
15 | 2. 打开程序,进入 “ArkModels提取与分拣工具” 界面,按照提示完成整个流程。之后,程序所在目录就会生成 ArkModels 仓库需要的文件(和文件夹)了。
16 | 3. 克隆ArkModels仓库,使用刚刚生成的文件(和文件夹)替换掉原来的内容,并进行 Commit-Push 等操作更新仓库,即可完成模型的更新。
17 |
--------------------------------------------------------------------------------
/docs/ArkVoiceRepoKit.md:
--------------------------------------------------------------------------------
1 | ArkUnpacker附加说明文档
2 | # 关于ArkVoice提取与分拣工具
3 |
4 | ## 功能概述
5 |
6 | [ArkVoice](https://github.com/isHarryh/Ark-Voice)是作者建立的明日方舟语音仓库,收录了游戏中的各个语种的干员语音文件,采用 OGG 编码格式、分片合并法存储。“ArkVoice提取与分拣工具” 用于辅助该仓库进行更新,其子功能包括:
7 |
8 | - 语音提取:自动从游戏原始资源文件中,提取语音为 WAV 文件。
9 | - 分拣与合并:对提取出的 WAV 文件进行分片合并,并另存为压缩后的 OGG 文件。
10 | - 生成数据集:将 ArkVoice 仓库中的语音的详细信息写入一个 JSON 数据集中。
11 |
12 | ## 使用方法
13 |
14 | 1. 按照 README 文档中的说明,进行[资源准备](../README.md#1资源准备)。具体需要哪些资源文件夹,请参考程序内的提示所述的资源文件夹名。请将准备的资源文件夹直接放在**程序所在目录**里面。
15 | 2. 打开程序,进入 “ArkVoice提取与分拣工具” 界面,按照提示完成整个流程。之后,程序所在目录就会生成 ArkVoice 仓库需要的文件(和文件夹)了。
16 | 3. 克隆ArkVoice仓库,使用刚刚生成的文件(和文件夹)替换掉原来的内容,并进行 Commit-Push 等操作更新仓库,即可完成语音库的更新。
17 |
--------------------------------------------------------------------------------
/docs/AssetsGuide.md:
--------------------------------------------------------------------------------
1 | ArkUnpacker附加说明文档
2 | # 游戏资源查找指南
3 |
4 | > **注意:**
5 | > - 请在合理范围内使用本程序。根据 BSD3 许可证,对于非法使用本程序解包出来的资源造成的侵权行为,作者不承担相应责任。
6 | > - 以下内容是基于 **Android `v2.4.01`** 的《明日方舟》编制的,不同版本客户端的特定资源的具体位置可能存在较大差异。
7 | > - 此文档内的所有内容均为作者独立整理,仅供参考,如需转载请注明出处。
8 |
9 |
10 |
11 | ## 资源导览
12 |
13 | 各个子目录储存的资源的内容:
14 |
15 | **Android**
16 | ├─[activity](#activity) / 活动
17 | ├─anon / 匿名数据
18 | ├─[arts](#arts) / 美术图片
19 | ├─[audio](#audio) / 音频
20 | ├─[avg](#avg) / 剧情图
21 | ├─[battle](#battle) / 战斗相关
22 | ├─[building](#building) / 基建
23 | ├─chararts / 干员(立绘和基建小人)
24 | ├─charpack / 干员(战斗小人)
25 | ├─climbtowerseasons / 保全派驻
26 | ├─config / 配置
27 | ├─crisisv2longterm / 新版危机合约
28 | ├─cutin / 角色插入
29 | ├─graphics / 图形渲染
30 | ├─npcpack / NPC
31 | ├─[prefabs](#perfabs) / 预设文件
32 | ├─raw / 未打包文件(例如视频)
33 | ├─refs / 集成战略
34 | ├─retro / 常驻支线与别传活动
35 | ├─scenes / 关卡
36 | ├─skinpack / 干员皮肤
37 | ├─[spritepack](#spritepack) / 图标
38 | └─[ui](#ui) / 用户界面
39 |
40 | > **重要变更:**
41 | > 1. 在明日方舟`v1.8.01`及之前版本中,干员默认皮肤的立绘、基建小人和战斗小人全都位于`charpack`中。而在之后的版本中,立绘和基建小人被转移到了`chararts`中存放。
42 | > 2. 在明日方舟`v2.3.81`及之前版本中,游戏数据文件位于`gamedata`目录,热更新开屏素材位于`hotupdate`目录,国际化数据位于`i18n`目录。而在之后的版本中,这些数据都被转移到了`anon`中作为匿名文件存放,并且后缀名被更改为`.bin`。
43 |
44 | > **提示:**
45 | > 解包只是解包AB文件。这意味着像`raw/video/`里的部分非AB文件不会放到解包后文件夹中,所以请在原始文件中查找它们。
46 |
47 |
48 | ## 常用资源定位
49 |
50 | 资源条目较多,可使用浏览器或编辑器的查找功能(如`Ctrl+F`快捷键)来检索关键词。
51 |
52 | [返回顶部](#资源导览)
53 |
54 | ### Activity
55 |
56 | - `activity/[uc]act{xxx}.ab` 某个活动的界面相关资源
57 | - `activity/commonassets.ab` 活动物资的通用图标
58 |
59 | ### Arts
60 |
61 | - `arts/building/` 基建相关图标(基建技能图标等)
62 | - `arts/charportraits/` 干员半身像
63 | - `arts/dynchars/` 动态立绘资源
64 | - `arts/guidebookpages/` 新手指引相关
65 | - `arts/maps/` 地图地形材质
66 | - `arts/shop/` 采购中心相关
67 | - `arts/ui/` 各类UI插图
68 | - `arts/clue_hub.ab` 线索图标
69 | - `arts/elite_hub.ab` 精英化图标
70 | - `arts/potential_hub.ab` 潜能图标
71 | - `arts/profession_hub.ab` 职业图
72 | - `arts/rarity_hub.ab` 稀有度星级图标
73 | - `arts/specialized_hub.ab` 技能专精图标
74 |
75 | ### Audio
76 |
77 | - `audio/sound_beta_2/enemy/` 敌人战斗音效
78 | - `audio/sound_beta_2/music/` 游戏音乐
79 | - `audio/sound_beta_2/player/` 干员战斗音效
80 | - `audio/sound_beta_2/voice/` 语音包(日文)
81 | - `audio/sound_beta_2/voice_{xxx}/` 语音包(其他语言)
82 | - `audio/sound_beta_2/ambience.ab` 环境氛围音效
83 | - `audio/sound_beta_2/avg_{xxx}.ab/` 剧情音效
84 | - `audio/sound_beta_2/battle.ab` 其他战斗音效
85 | - `audio/sound_beta_2/vox.ab` 人声音效
86 |
87 | > **提示:**
88 | > 解包出的语音包内容取决于您的原始游戏文件。这意味着如果您在游戏里没有下载某个语言的语音包,则不会解包出此语言包。
89 |
90 | ### Avg
91 |
92 | - `avg/bg/` 剧情背景图
93 | - `avg/characters/` 剧情人物图
94 | - `avg/effects/` 剧情特效
95 | - `avg/imgs/` 剧情插图
96 | - `avg/items/` 剧情道具图
97 |
98 | ### Battle
99 |
100 | - `battle/prefabs/effects/` 战斗特效
101 | - `battle/prefabs/enemies/` 敌方Spine
102 | - `battle/prefabs/[uc]tokens.ab` 战斗道具和部分召唤物Spine
103 |
104 | ### Building
105 |
106 | - `building/blueprint/` 基建UI
107 | - `building/diy/` 装扮模式素材与家具素材
108 | - `building/ui/[uc]diy.ab` 基建房间装扮模式UI
109 | - `building/vault/[uc]arts.ab` 基建功能室Sprite
110 |
111 | ### Perfabs
112 |
113 | - `prefabs/shop/shopkeeper/` 可露希尔
114 | - `prefabs/gacha/` 干员寻访相关
115 |
116 | ### Spritepack
117 |
118 | 活动
119 | - `spritepack/act_achieve_{xxx}.ab` 活动相关插图
120 | - `spritepack/ui_charm_icon_list.ab` 活动:多索雷斯假日标志物
121 |
122 | 剿灭作战
123 | - `spritepack/ui_campaign_stage_icon.ab` 剿灭作战:关卡背景图
124 | - `spritepack/ui_campaign_world_map_piece.ab` 剿灭作战:地图碎片图
125 | - `spritepack/ui_campaign_zone_icon.ab` 剿灭作战:地区图标
126 |
127 | 剧情回顾
128 | - `spritepack/story_review_mini_activity.ab` 剧情回顾:故事集封面
129 | - `spritepack/story_review_mini_char.ab` 剧情回顾:子故事封面
130 |
131 | 干员模组
132 | - `spritepack/ui_equip_big_img_hub.ab` 干员模组:模组大图
133 | - `spritepack/ui_equip_type_direction_hub.ab` 干员模组:模组类型图标
134 | - `spritepack/ui_equip_type_hub.ab` 干员模组:模组类型图标
135 |
136 | 预览
137 | - `spritepack/ui_handbook_battle_preview.ab` 预览:关卡加载中背景图
138 | - `spritepack/ui_homebackground_preview.ab` 预览:首页背景图
139 |
140 | 宣传图
141 | - `spritepack/ui_home_act_banner_gacha.ab` 宣传图:新寻访开放
142 | - `spritepack/ui_home_act_banner_shop.ab` 宣传图:可露希尔推荐
143 | - `spritepack/ui_home_act_banner_zone.ab` 宣传图:新章节开放
144 |
145 | 仓库
146 | - `spritepack/ui_item_icons.ab` 仓库:常规物品图标
147 | - `spritepack/ui_item_icons_acticon.ab` 仓库:活动物品图标?
148 | - `spritepack/ui_item_icons_apsupply.ab` 仓库:理智道具图标
149 | - `spritepack/ui_item_icons_classpotential.ab` 仓库:中坚潜能信物图标
150 | - `spritepack/ui_item_icons_potential.ab` 仓库:潜能信物图标
151 |
152 | 蚀刻章
153 | - `spritepack/ui_medal_banner_list.ab` 蚀刻章:套组横幅
154 | - `spritepack/ui_medal_diy_frame_bkg.ab` 蚀刻章:套组卡槽背景
155 | - `spritepack/ui_medal_icons.ab` 蚀刻章:蚀刻章图标
156 |
157 | 头像
158 | - `spritepack/icon_enemies.ab` 敌人头像
159 | - `spritepack/ui_char_avatar.ab` 干员头像
160 | - `spritepack/ui_player_avatar_list.ab` 玩家头像
161 |
162 | 图标
163 | - `spritepack/building_ui_buff_skills.ab` 基建技能图标
164 | - `spritepack/character_sort_type_icon.ab` 干员筛选要素图标
165 | - `spritepack/skill_icons.ab` 技能图标
166 | - `spritepack/ui_camp_logo.ab` 阵营图标
167 | - `spritepack/ui_sub_profession_icon_hub.ab` 职业分支图标
168 | - `spritepack/ui_team_icon.ab` 阵营图标
169 |
170 | 其他
171 | - `spritepack/building_diy_theme.ab` 家具套装预览
172 | - `spritepack/chapter_title.ab` 主线章节文字标题图
173 | - `spritepack/ui_brand_image_hub.ab` 时装品牌
174 | - `spritepack/ui_gp_shop_dyn.ab` 采购中心组合包相关
175 | - `spritepack/ui_kv_img.ab` 时装展示大图
176 | - `spritepack/ui_main_mission_bg.ab` 主线任务背景图
177 | - `spritepack/ui_stage_retro_title.ab` 复刻后活动封面
178 | - `spritepack/ui_start_battle_button.ab` 开始行动按钮图
179 | - `spritepack/ui_zone_home_theme.ab` 终端封面图
180 |
181 | > **提示:**
182 | > 为简洁起见,上方列出的Spritepack中的AB文件名不是完整的,通常其后面会有一些分类号比如`_h1`、`_0`等。
183 |
184 | ### Ui
185 |
186 | - `ui/activity/` 各种活动
187 | - `ui/bossrush/` 引航者行动
188 | - `ui/campaign/` 剿灭作战
189 | - `ui/characterinfo/` 干员信息页面相关
190 | - `ui/friendassist/` 好友助战页面相关
191 | - `ui/gacha/` 干员寻访相关
192 | - `ui/handbook/` 干员档案相关
193 | - `ui/legion/` 保全派驻
194 | - `ui/pages/` 各种页面的UI
195 | - `ui/rglktopic/` 集成战略各主题UI
196 | - `ui/sandboxv2/` 生息演算相关
197 | - `ui/squadassist/` 好友助战编队相关
198 | - `ui/stage/enemyhandbook/` 敌人档案相关
199 | - `ui/timelydrop/` 限时掉落
200 | - `ui/[uc]charsortfilter.ab` 干员筛选相关
201 | - `ui/[uc]climbtower.ab` 保全派驻关卡页相关
202 | - `ui/[uc]squad.ab` 编队页面相关
203 | - `ui/operation/return.ab` 玩家回归活动
204 | - `ui/recruit/states/recruit_ten_result_state.ab` 十连寻访
205 | - `ui/skin_groups.ab` 时装品牌
206 | - `ui/zonemap_{x}.ab` 主线关卡页背景
207 | - `ui/zonemap_camp{x}.ab` 剿灭作战关卡页背景
208 |
209 | [返回顶部](#资源导览)
210 |
--------------------------------------------------------------------------------
/docs/ConfigFile.md:
--------------------------------------------------------------------------------
1 | ArkUnpacker附加说明文档
2 | # 配置文件指引
3 |
4 | ## 概述
5 |
6 | 在运行程序后,工作目录会生成 ArkUnpacker 的配置文件,以 JSON 格式存储部分设置参数。文件名为 `ArkUnpackerConfig.json`。
7 |
8 | > **注意:**
9 | > 1. 不同版本的 ArkUnpacker 配置文件可能不兼容。若因此导致程序故障,请删除原来的配置文件。
10 | > 2. 修改或删除配置文件后,必须重新启动 ArkUnpacker 才能应用更改。
11 |
12 | ## 字段说明
13 |
14 | 配置文件内容示例如下,文件编码为 `UTF-8`:
15 | ```json
16 | {
17 | "log_file": "ArkUnpackerLogs.log",
18 | "log_level": 3,
19 | "performance_level": 2
20 | }
21 | ```
22 | - `log_file`:字符串。日志文件的存储路径,设为 `""` 以禁用日志文件写入。
23 | - `log_level`:整数。日志等级,`0=None` `1=Error` `2=Warn` `3=Info` `4=Debug`。
24 | - `performance_level`:整数。性能等级,数值越大则线程和性能消耗越多,`0=Minimal` `1=Low` `2=Standard` `3=High`。
25 |
26 | > **提示:**
27 | > 由于 Python 离谱的多线程实现,性能等级越高并不一定会使得效率越高。通常来说 Standard 是比较推荐的。
28 |
--------------------------------------------------------------------------------
/docs/Essentials.md:
--------------------------------------------------------------------------------
1 | ArkUnpacker附加说明文档
2 | # 必备知识
3 |
4 | ## RGB通道图和A通道图
5 |
6 | 明日方舟的大部分图片(例如角色立绘、小人图片等)从 AB 文件中提取出来后,并不是单独的一张图片,而是两张图片:一张有颜色的图(称为 RGB 通道图),一张只有黑白灰的图(称为 A 通道图,文件名通常有"alpha"字样)。
7 |
8 | A 通道图中,完全白色的部分表示这里是不透明的,完全黑色的部分表示这里是透明的。欲获得完整的既有颜色又能显示透明度的图片,需要进行图片合并。
9 |
10 | 本程序的功能之一就是自动识别 A 通道图,然后自动找到对应的 RGB 通道图,将其合并为完整的图片并保存。
11 |
12 | ## Spine动画小人
13 |
14 | 明日方舟的小人角色是使用 [Spine 动画技术](http://esotericsoftware.com)实现的,Spine 版本是 3.8。
15 |
16 | 明日方舟里一套完整的 Spine 动画模型通常包含 3 种文件:包含了各个零散素材的 **png 图片**、标注各个素材在图片中的位置的 **atlas 文件**、存储骨骼动画的 **skel 文件**。特别地,少数模型的 png 图片可能不止有 1 张,少数模型的 skel 文件可能是 JSON 格式。
17 |
18 | 需要特别注意的是,干员战斗小人具有正面和背面之分,但是在 AB 文件中,它们的文件名完全一样,有时候甚至基建小人也会与战斗小人重名。因此,使用常规的解包功能有时无法正确地导出 Spine 动画模型。
19 |
20 | 使用专门的 Spine 模型导出模式,就可以准确地区分它们的类型归属,并在解包时将其放入单独的文件夹,例如:
21 |
22 | - `BattleFront` 战斗小人正面
23 | - `BattleBack` 战斗小人背面
24 | - `Building` 干员基建小人
25 | - `DynIllust` 动态立绘
26 |
--------------------------------------------------------------------------------
/docs/ForDevelopers.md:
--------------------------------------------------------------------------------
1 | ArkUnpacker附加说明文档
2 | # 开发者指引
3 |
4 | 此文档描述了运行源程序或参与项目开发的准备工作和具体步骤。
5 |
6 | ## 依赖
7 |
8 | 1. **Python:** 本项目需要 **Python 3.9~3.12** 运行环境,您可[前往下载](https://www.python.org/downloads)。
9 | 2. **IDE:** 建议使用的集成开发环境(IDE)是 **VS Code**,您可[前往下载](https://code.visualstudio.com)。
10 | > 建议使用的 VS Code 插件:
11 | > - [Code Spell Checker](https://marketplace.visualstudio.com/items?itemName=streetsidesoftware.code-spell-checker)
12 | > - [Python](https://marketplace.visualstudio.com/items?itemName=ms-python.python)
13 | 3. **Poetry:** 本项目使用 **Poetry 2** 进行依赖项的自动化管理,您可[查看教程](https://python-poetry.org/docs)。所有依赖项将被安装在 Poetry 的虚拟环境中。
14 | > Poetry 快速安装方法:
15 | > 1. 命令行运行 `pip install pipx` 安装 pipx 这一命令行程序管理工具;
16 | > 2. 命令行运行 `pipx install poetry` 安装 Poetry,并在命令行运行 `pipx ensurepath` 以完善 PATH 配置;
17 | > 3. 命令行运行 `poetry -v` 以检查是否安装成功(这可能需要在新的命令行窗口中运行)。
18 | 4. **子模块:** 本项目使用 [ArkFBSPy](https://github.com/isHarryh/Ark-FBS-Py) 模块来实现 FlatBuffers 的数据解码,该模块是以 Git Submodule 的形式存储在仓库中的。
19 |
20 | ## 项目初始化
21 |
22 | 1. 使用 Git 克隆仓库到本地,然后使用 IDE 打开项目文件夹。
23 | > 由于仓库使用了 Git Submodule,因此:
24 | > - 在初始化仓库时,您需要运行 `git submodule update --init --recursive` 来初始化子模块。
25 | > - 当子模块的远程仓库有更新时,您需要运行 `git submodule update --remote --recursive` 来更新本地的子模块。
26 | 2. 在 VS Code 中启动 `Project Setup` 终端任务(通常情况下,每次打开项目都会自动运行这一任务);或者命令行运行 `poetry install`。这将激活 Poetry 并在虚拟环境中安装依赖项。本项目的主要依赖项可在 `pyproject.toml` 文件中查看。
27 | 3. 选择 Python 解释器为 Poetry 虚拟环境中的解释器(命令行运行 `poetry env info` 即可查看解释器路径)。
28 | 4. 在 VS Code 中启动 `Python: ArkUnpacker` 运行,即可开始调试主程序。
29 |
30 | ## 测试与构建
31 |
32 | 1. **测试:** 在 VS Code 中启动 `Test` 终端任务;或者直接运行 `Test.py` 脚本。这将使用仓库自带的测试用的游戏资源文件(位于 `test/res` 目录中)进行模拟解包。测试完成后会生成运行用时的记录文件 `test/rt.json`。
33 | 2. **构建:** 在 VS Code 中启动 `Build Dist` 终端任务;或者直接运行 `Build.py` 脚本。这将使用 PyInstaller 在项目文件夹的 `build/dist` 目录中自动生成可分发的文件。
34 |
--------------------------------------------------------------------------------
/docs/TextAssetsDecoding.md:
--------------------------------------------------------------------------------
1 | ArkUnpacker附加说明文档
2 | # 文本资源解码
3 |
4 | ## 概述
5 |
6 | 解包《明日方舟》AB 文件所导出的 TextAsset 类型的文件,主要有三种格式:
7 |
8 | 1. 纯文本文件,例如某些剧情文本;
9 | 2. 以 FlatBuffers 格式存储的文件;
10 | 3. 用 AES 加密的文件,例如。
11 |
12 | 其中,FlatBuffers 文件和 AES 加密文件都需要经过一定的解码步骤才能查看,直接打开是乱码的。为了解码,需要使用 ArkUnpacker 提供的“解码文本资源”功能。
13 |
14 | 在“解码文本数据”的功能中,您无需自行区分乱码的文件到底是 FlatBuffers 文件还是 ASE 加密的文件,因为 ArkUnpacker 会自动进行识别。
15 |
16 | 若您对解码这些数据的具体步骤感兴趣,并且您了解数据存储和密码学的基本常识,您可以继续阅读下面的原理介绍。
17 |
18 | ## FlatBuffers 文件的解码原理
19 |
20 | FlatBuffers 是一种用于高效地存储数据的格式,您可前往 [FlatBuffers 官网](https://flatbuffers.dev)查看介绍。
21 |
22 | 为了将二进制的 FlatBuffers 文件转化为 JSON 等可读格式,除了需要原始二进制文件外,我们还需要 FlatBuffers Schema (FBS) 这一架构文件。
23 |
24 | FBS 相当于一种抽象模板,它定义了哪些数据以怎样的类型被存储。如果没有 FBS,那么我们只能从原始二进制文件中读取到非结构性的孤立的值。
25 |
26 | 获得 FBS 的有效途径是对游戏内部数据结构进行解析。
27 |
28 | ArkUnpacker 所使用的 FBS 根本上来自于 [OpenArknightsFBS](https://github.com/MooncellWiki/OpenArknightsFBS)。为了便于解析,程序中使用的 FBS 并不是原始的 FBS,而是编译到 Python 的 FBS 的类,即 [ArkFBSPy](https://github.com/isHarryh/Ark-FBS-Py)。
29 |
30 | ### 解码步骤
31 |
32 | 1. 通过原始二进制文件的名称,判断该文件采用的是哪个 FBS;
33 | 2. 使用该 FBS 对文件进行解码,得到结构性的数据;
34 | 3. 将解码后的数据保存为 JSON 格式。
35 |
36 | ## AES 加密文件的解码原理
37 |
38 | AES 是一种常见的对称加密算法。
39 |
40 | 解码前,首先需要获得一个起到密钥作用的 32 字节的 CHAT_MASK。所有 AES 加密文件使用的 CHAT_MASK 都相同,但《明日方舟》各个大版本所使用的 CHAT_MASK 很可能会发生变更。
41 |
42 | ### 解码步骤
43 |
44 | 1. 对于较新版本《明日方舟》的二进制加密文件,其文件头部通常有 128 字节的 RSA 签名,在解密时需要跳过;
45 | 2. 通过某种途径取得 CHAT_MASK,CHAT_MASK 的首 16 字节是 AES 密钥;
46 | 3. 跳过签名块后,将首 16 字节数据与 CHAT_MASK 的后 16 字节进行异或,可以得到初始向量(IV);
47 | 4. 利用上述步骤所得的 AES 密钥和 IV,对后面的数据进行 AES-CBC 解密;
48 | 5. 解密后的数据可能是 JSON 文件,也可能是 BSON 文件(需要转换为 JSON 文件)。
49 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [project]
2 | name = "ArkUnpacker"
3 | version = "4.1.1"
4 | description = "Arknights Assets Unpacker"
5 | authors = [
6 | {name = "Harry Huang", email = "harryhuang2652@qq.com"}
7 | ]
8 | license = "BSD-3-Clause"
9 | readme = "README.md"
10 | requires-python = ">=3.9,<3.13"
11 | dependencies = [
12 | "bson (~=0.5)",
13 | "flatbuffers (~=25.2)",
14 | "numpy (~=1.26)",
15 | "Pillow (~=9.5)",
16 | "pycryptodome (~=3.22)",
17 | "pydub (~=0.25)",
18 | "UnityPy (~=1.22)"
19 | ]
20 |
21 | [tool.poetry]
22 | package-mode = false
23 |
24 | [[tool.poetry.source]]
25 | name = "PyPI-Tsinghua"
26 | url = "https://pypi.tuna.tsinghua.edu.cn/simple"
27 | priority = "primary"
28 |
29 | [tool.poetry.group.dev.dependencies]
30 | toml = "0.10.2"
31 | pyinstaller = "6.12.0"
32 |
33 | [tool.build]
34 | build-dir = "$project$\\build"
35 | entry = "$project$\\Main.py"
36 | icon = "$project$\\ArkUnpacker.ico"
37 | add-binary = "$venv$\\Lib\\site-packages\\UnityPy\\lib:UnityPy\\lib|$venv$\\Lib\\site-packages\\UnityPy\\resources\\uncompressed.tpk:UnityPy\\resources"
38 | add-data = "$project$\\src\\fbs\\CN:src\\fbs\\CN|$venv$\\Lib\\site-packages\\archspec\\json:archspec\\json"
39 | hidden-import = "flatbuffers|UnityPy.resources"
40 | log-level = "WARN"
41 |
42 | [build-system]
43 | requires = ["poetry-core>=2.0"]
44 | build-backend = "poetry.core.masonry.api"
45 |
--------------------------------------------------------------------------------
/src/CollectModels.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright (c) 2022-2025, Harry Huang
3 | # @ BSD 3-Clause License
4 | from typing import Callable, Sequence
5 |
6 | import os.path as osp
7 | import re
8 | import shutil
9 |
10 | from .utils.GlobalMethods import print, rmdir, get_dirlist
11 | from .utils.Logger import Logger
12 | from .utils.SaverUtils import SafeSaver
13 | from .utils.TaskUtils import (
14 | ThreadCtrl,
15 | Counter,
16 | UICtrl,
17 | TaskReporter,
18 | TaskReporterTracker,
19 | )
20 |
21 |
22 | def collect_models(
23 | upkdir: str,
24 | destdir: str,
25 | do_del: bool,
26 | on_finished: Callable,
27 | on_collected: Callable,
28 | ):
29 | error_occurred = False
30 | for model_type_dir in get_dirlist(upkdir, max_depth=1):
31 | model_type: str = osp.basename(model_type_dir) # Sub dir of one model type
32 | for model_dir in get_dirlist(model_type_dir, max_depth=1):
33 | model: str = osp.basename(model_dir) # Sub dir of one determined model
34 | if not model.islower():
35 | # To solve model typo caused by Arknights side
36 | model = model.lower()
37 | Logger.info(f'CollectModels: "{model_dir}" may has a typo name')
38 | try:
39 | newname = None
40 | if model_type.startswith("Building") and re.match(
41 | r"(build_)?char_", model
42 | ):
43 | newname = re.match(r"(build_)?char_(\d+_[0-9a-zA-Z]+(_[0-9a-zA-Z#]+)?)", model).group(2) # type: ignore
44 | elif model_type.startswith("Battle") and re.match(r"enemy_", model):
45 | newname = re.match(r"enemy_(\d+_[0-9a-zA-Z]+(_\d+)?)", model).group(1) # type: ignore
46 | elif model_type.startswith("DynIllust") and re.match(
47 | r"dyn_illust_char_", model
48 | ):
49 | newname = "dyn_illust_" + re.match(r"dyn_illust_char_(\d+_[0-9a-zA-Z]+(_[0-9a-zA-Z#]+)?)", model).group(1) # type: ignore
50 | if newname:
51 | # Move
52 | dest = osp.join(destdir, newname)
53 | Logger.debug(f'CollectModels: "{model_dir}" -> "{dest}"')
54 | shutil.copytree(model_dir, dest, dirs_exist_ok=True)
55 | rmdir(model_dir)
56 | if on_collected:
57 | on_collected()
58 | else:
59 | # Not matched any rules
60 | pass
61 | except Exception as arg:
62 | error_occurred = True
63 | Logger.error(
64 | f'CollectModels: Error occurred while handling "{model_dir}": Exception{type(arg)} {arg}'
65 | )
66 | if do_del and not error_occurred:
67 | rmdir(upkdir)
68 | if on_finished:
69 | on_finished()
70 |
71 |
72 | ########## Main-主程序 ##########
73 | def main(srcdirs: Sequence[str], destdirs: Sequence[str]):
74 | """Collects the Spine models from the source directories to the destination directories accordingly.
75 | The structure of the source directory is shown below.
76 |
77 | ```
78 | ├─source_dir
79 | │ ├─unpacked_dir
80 | │ │ ├─model_type_dir
81 | │ │ │ ├─model_dir
82 | │ │ │ │ ├─files
83 | ```
84 |
85 | :param srcdirs: Source directories list;
86 | :param destdirs: Destination directories list;
87 | :rtype: None;
88 | """
89 | print("\n正在解析目录...", s=1)
90 | Logger.info("CollectModels: Reading directories...")
91 | if len(srcdirs) != len(destdirs):
92 | Logger.error("CollectModels: Arguments error")
93 | print("参数错误", c=3)
94 | return
95 |
96 | flist = [] # [(upkdir, destdir), ...]
97 | for srcdir, destdir in zip(srcdirs, destdirs):
98 | print(f"\t正在读取目录 {srcdir}")
99 | for upkdir in get_dirlist(srcdir, max_depth=1):
100 | flist.append((upkdir, destdir))
101 |
102 | thread_ctrl = ThreadCtrl()
103 | collected = Counter()
104 | ui = UICtrl()
105 | tr_finished = TaskReporter(1, len(flist))
106 | tracker = TaskReporterTracker(tr_finished)
107 |
108 | ui.reset()
109 | ui.loop_start()
110 | for upkdir, destdir in flist:
111 | # (i stands for a source dir's path)
112 | ui.request(
113 | [
114 | "正在分拣模型...",
115 | tracker.to_progress_bar_str(),
116 | f"当前搜索:\t{osp.basename(upkdir)}",
117 | f"累计分拣:\t{collected.now()}",
118 | f"预计剩余时间:\t{tracker.to_eta_str()}",
119 | f"累计消耗时间:\t{tracker.to_rt_str()}",
120 | f"运行状态统计:\t{Logger.to_ew_stats_str()}",
121 | ]
122 | )
123 | ###
124 | thread_ctrl.run_subthread(
125 | collect_models,
126 | (upkdir, destdir, True, tr_finished.report, collected.update),
127 | name=f"CmThread:{id(upkdir)}",
128 | )
129 |
130 | ui.reset()
131 | ui.loop_stop()
132 | while (
133 | thread_ctrl.count_subthread()
134 | or not SafeSaver.get_instance().completed()
135 | or tracker.get_progress() < 1
136 | ):
137 | ui.request(
138 | [
139 | "正在分拣模型...",
140 | tracker.to_progress_bar_str(),
141 | f"累计分拣:\t{collected.now()}",
142 | f"预计剩余时间:\t{tracker.to_eta_str()}",
143 | f"累计消耗时间:\t{tracker.to_rt_str()}",
144 | f"运行状态统计:\t{Logger.to_ew_stats_str()}",
145 | ]
146 | )
147 | ui.refresh(post_delay=0.1)
148 |
149 | ui.loop_stop()
150 | ui.reset()
151 | print("\n分拣模型结束!", s=1)
152 | print(f" 累计分拣 {collected.now()} 套模型")
153 | print(f" 此项用时 {round(tracker.get_rt(), 1)} 秒")
154 |
--------------------------------------------------------------------------------
/src/CollectVoice.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright (c) 2022-2025, Harry Huang
3 | # @ BSD 3-Clause License
4 | from typing import Callable
5 |
6 | import json
7 | import os.path as osp
8 | import threading
9 | from io import BytesIO
10 | from pydub import AudioSegment
11 |
12 | from .utils.GlobalMethods import print, rmdir, get_dirlist, get_filelist
13 | from .utils.Logger import Logger
14 | from .utils.SaverUtils import SafeSaver
15 | from .utils.TaskUtils import (
16 | ThreadCtrl,
17 | Counter,
18 | UICtrl,
19 | TaskReporter,
20 | TaskReporterTracker,
21 | )
22 |
23 | _INTERNAL_LOCK = threading.Lock()
24 |
25 |
26 | class FixedFloat(float):
27 | def __init__(self, f_str: str):
28 | self._s = f_str
29 |
30 | def __repr__(self):
31 | return self._s
32 |
33 | def __float__(self):
34 | return self
35 |
36 |
37 | def collect_voice(
38 | upkdir: str,
39 | destdir: str,
40 | do_del: bool,
41 | force_std_name: bool,
42 | info_merged: dict,
43 | on_finished: Callable,
44 | on_collected: Callable,
45 | ):
46 | global _INTERNAL_LOCK
47 | ori_name = osp.basename(upkdir)
48 | std_name = "_".join(ori_name.split("_")[:3]) if force_std_name else ori_name
49 | voice_merged: AudioSegment = AudioSegment.empty()
50 | duration_merged = 0.0
51 | clips = []
52 | # For each audio file unpacked
53 | for file in sorted(get_filelist(upkdir, max_depth=1)):
54 | name, ext = osp.splitext(osp.basename(file))
55 | # Ensure the audio file is supported
56 | if not ext.lower() == ".wav":
57 | Logger.warn(f'CollectVoice: Unexpected file type "{ext}"')
58 | continue
59 | if not name.startswith("CN_"):
60 | Logger.info(f'CollectVoice: Unsupported voice type at "{file}"')
61 | continue
62 | # Merge this audio file
63 | voice_clip: AudioSegment = AudioSegment.from_file(file)
64 | duration_clip = voice_clip.frame_count() / voice_clip.frame_rate
65 | clips.append(
66 | {
67 | "name": name,
68 | "start": FixedFloat(f"{duration_merged:6f}"), # Start time (second)
69 | }
70 | )
71 | voice_merged += voice_clip
72 | duration_merged += duration_clip
73 | # Logger.debug(f"CollectVoice: Merged \"{name}\" from \"{osp.basename(upkdir)}\"")
74 |
75 | if clips:
76 | # Save the final audio file
77 | Logger.debug(
78 | f'CollectVoice: Completed collection at "{ori_name}", {len(clips)} clips merged'
79 | )
80 | voice_io = BytesIO()
81 | voice_merged.export(voice_io, format="ogg", parameters=["-q:a", str(3)])
82 | voice_bytes = voice_io.read()
83 | SafeSaver.save_bytes(voice_bytes, destdir, std_name, ".ogg")
84 | # Post processing
85 | if on_collected:
86 | on_collected()
87 | with _INTERNAL_LOCK:
88 | if info_merged is not None:
89 | info_merged[std_name] = {
90 | "size": len(voice_bytes),
91 | "duration": FixedFloat(f"{duration_merged:6f}"),
92 | "clips": clips,
93 | }
94 | else:
95 | Logger.warn(f'CollectVoice: Collection not performed at "{ori_name}"')
96 |
97 | if do_del:
98 | rmdir(upkdir)
99 | if on_finished:
100 | on_finished()
101 |
102 |
103 | ########## Main-主程序 ##########
104 | def main(srcdir: str, destdir: str, force_std_name: bool):
105 | """Collects the voice files from the source directory to the destination directory.
106 | The structure of the source directory is shown below.
107 |
108 | ```
109 | ├─source_dir
110 | │ ├─unpacked_dir
111 | │ │ ├─files (typically .wav)
112 | ```
113 |
114 | :param srcdir: Source directory;
115 | :param destdir: Destination directory;
116 | :param force_std_name: Forces the keys to use standard character name;
117 | :rtype: None;
118 | """
119 | print("\n正在解析目录...", s=1)
120 | Logger.info("CollectVoice: Reading directories...")
121 |
122 | flist = [] # [(upkdir, destdir), ...]
123 | print(f"\t正在读取目录 {srcdir}")
124 | for upkdir in get_dirlist(srcdir, max_depth=1):
125 | flist.append((upkdir, destdir))
126 | flist = list(filter(lambda x: osp.basename(x[0]).startswith("char_"), flist))
127 | info_merged = {}
128 |
129 | thread_ctrl = ThreadCtrl()
130 | collected = Counter()
131 | ui = UICtrl()
132 | tr_finished = TaskReporter(1, len(flist))
133 | tracker = TaskReporterTracker(tr_finished)
134 |
135 | ui.reset()
136 | ui.loop_start()
137 | for upkdir, destdir in flist:
138 | # (i stands for a source dir's path)
139 | ui.request(
140 | [
141 | "正在分拣语音...",
142 | tracker.to_progress_bar_str(),
143 | f"当前搜索:\t{osp.basename(upkdir)}",
144 | f"累计分拣:\t{collected.now()}",
145 | f"预计剩余时间:\t{tracker.to_eta_str()}",
146 | f"累计消耗时间:\t{tracker.to_rt_str()}",
147 | f"运行状态统计:\t{Logger.to_ew_stats_str()}",
148 | ]
149 | )
150 | ###
151 | thread_ctrl.run_subthread(
152 | collect_voice,
153 | (
154 | upkdir,
155 | destdir,
156 | False,
157 | force_std_name,
158 | info_merged,
159 | tr_finished.report,
160 | collected.update,
161 | ),
162 | name=f"CvThread:{id(upkdir)}",
163 | )
164 |
165 | ui.reset()
166 | ui.loop_stop()
167 | while (
168 | thread_ctrl.count_subthread()
169 | or not SafeSaver.get_instance().completed()
170 | or tracker.get_progress() < 1
171 | ):
172 | ui.request(
173 | [
174 | "正在分拣语音...",
175 | tracker.to_progress_bar_str(),
176 | f"累计分拣:\t{collected.now()}",
177 | f"预计剩余时间:\t{tracker.to_eta_str()}",
178 | f"累计消耗时间:\t{tracker.to_rt_str()}",
179 | f"运行状态统计:\t{Logger.to_ew_stats_str()}",
180 | ]
181 | )
182 | ui.refresh(post_delay=0.1)
183 |
184 | if len(info_merged):
185 | json.dump(
186 | info_merged,
187 | open(osp.join(destdir, "voice_data_part.json"), "w", encoding="UTF-8"),
188 | indent=4,
189 | ensure_ascii=False,
190 | )
191 | Logger.info("CollectVoice: Saved voice data")
192 |
193 | ui.loop_stop()
194 | ui.reset()
195 | print("\n分拣语音结束!", s=1)
196 | print(f" 累计分拣 {collected.now()} 套语音")
197 | print(f" 此项用时 {round(tracker.get_rt(), 1)} 秒")
198 |
--------------------------------------------------------------------------------
/src/CombineRGBwithA.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright (c) 2022-2025, Harry Huang
3 | # @ BSD 3-Clause License
4 | from typing import Callable, List, Optional, Union
5 |
6 | import numpy as np
7 | import os
8 | import os.path as osp
9 | import re
10 |
11 | from PIL import Image
12 |
13 | from .utils.GlobalMethods import print, rmdir, get_filelist, is_image_file
14 | from .utils.Logger import Logger
15 | from .utils.SaverUtils import SafeSaver
16 | from .utils.TaskUtils import ThreadCtrl, UICtrl, TaskReporter, TaskReporterTracker
17 |
18 |
19 | class NoRGBImageMatchedError(FileNotFoundError):
20 | def __init__(self, *args):
21 | super().__init__(*args)
22 |
23 |
24 | class AlphaRGBCombiner:
25 | def __init__(self, alpha: Union[str, Image.Image]):
26 | self.img_alpha = image_open(alpha, "RGBA")
27 |
28 | def combine_with(
29 | self,
30 | rgb: Union[str, Image.Image],
31 | resize: Optional[tuple] = None,
32 | remove_bleeding: bool = True,
33 | ):
34 | """Merges the RGB image and the Alpha image in an efficient way.
35 |
36 | :param rgb: Instance of RGB image or its file path;
37 | :param resize: Resize the final image to the given size, `None` for disabled;
38 | :param remove_bleeding: Whether to remove the color bleeding;
39 | :returns: A new image instance;
40 | :rtype: Image;
41 | """
42 | img_rgb: Image.Image = image_open(rgb, "RGBA")
43 | img_alpha: Image.Image = self.img_alpha.convert("L")
44 | if resize:
45 | img_rgb = image_resize(img_rgb, resize)
46 | img_alpha = image_resize(img_alpha, resize)
47 | else:
48 | img_alpha = image_resize(img_alpha, img_rgb.size)
49 | img_rgb.putalpha(img_alpha)
50 | if remove_bleeding:
51 | img_rgb = AlphaRGBCombiner.remove_bleeding(img_rgb)
52 | return img_rgb
53 |
54 | @staticmethod
55 | def remove_bleeding(rgba: Union[str, Image.Image], min_alpha: int = 0):
56 | """Removes the color bleeding in the given RGBA image
57 | by setting the RGB value of the transparent pixel to (0, 0, 0).
58 |
59 | :param rgba: Instance of RGBA image or its file path;
60 | :param min_alpha: The minimal alpha value to determine transparency;
61 | :returns: A new image instance;
62 | :rtype: Image;
63 | """
64 | img_rgba: Image.Image = image_open(rgba, "RGBA")
65 | img_black = Image.new("RGBA", img_rgba.size)
66 | img_alpha = img_rgba.getchannel("A")
67 | img_mask = img_alpha.point(lambda x: 0 if x > min_alpha else 255)
68 | img_rgba.paste(img_black, img_mask)
69 | return img_rgba
70 |
71 | @staticmethod
72 | def apply_premultiplied_alpha(
73 | rgba: Union[str, Image.Image], resize: Optional[tuple] = None
74 | ):
75 | """Multiplies the RGB channels with the alpha channel.
76 | Useful when handling non-PMA Spine textures.
77 |
78 | :param rgba: Instance of RGBA image or its file path;
79 | :param resize: Resize the final image to the given size, `None` for disabled;
80 | :returns: A new image instance;
81 | :rtype: Image;
82 | """
83 | img_rgba: Image.Image = image_open(rgba, "RGBA")
84 | if resize:
85 | # Resize RGB/A channel separately
86 | data = np.array(img_rgba, dtype=np.float32)
87 | img_rgb = Image.fromarray(data[:, :, :3], "RGB")
88 | img_alpha = Image.fromarray(data[:, :, 3], "L")
89 | img_rgb = image_resize(img_rgb, resize)
90 | img_alpha = image_resize(img_alpha, resize)
91 | img_rgb.putalpha(img_alpha)
92 | # Apply PMA
93 | data = np.array(img_rgba, dtype=np.float32)
94 | data[:, :, :3] *= data[:, :, 3:] / 255.0
95 | data_int = np.clip(data, 0, 255).astype(np.uint8)
96 | return Image.fromarray(data_int, "RGBA")
97 |
98 |
99 | class AlphaRGBSearcher:
100 | PATTERNS = [
101 | re.compile(r"(.+)\[alpha\](\$[0-9]+)?"),
102 | re.compile(r"(.+)_alpha(\$[0-9]+)?"),
103 | re.compile(r"(.+)alpha(\$[0-9]+)?"),
104 | re.compile(r"(.+)a(\$[0-9]+)?"),
105 | ]
106 |
107 | def __init__(self, fp_alpha: str):
108 | self.fp_alpha = fp_alpha
109 |
110 | def get_real_name(self):
111 | return AlphaRGBSearcher.calc_real_name(self.fp_alpha)
112 |
113 | def search_rgb(self):
114 | real = self.get_real_name()
115 | if not real:
116 | raise ValueError("Not a recognized alpha image name")
117 | if not is_image_file(self.fp_alpha):
118 | raise ValueError("Not a image file path")
119 | ext = osp.splitext(self.fp_alpha)[1]
120 | dirname = osp.dirname(self.fp_alpha)
121 | flist = os.listdir(dirname)
122 | flist = list(filter(is_image_file, flist))
123 | flist = list(
124 | filter(
125 | lambda x: x == real + ext or (x.startswith(real) and "$" in x), flist
126 | )
127 | )
128 | flist = [osp.join(dirname, x) for x in flist]
129 |
130 | if len(flist) == 0:
131 | Logger.info(
132 | f'CombineRGBwithA: No RGB-image could be matched to "{self.fp_alpha}"'
133 | )
134 | raise NoRGBImageMatchedError(self.fp_alpha)
135 | elif len(flist) == 1:
136 | Logger.debug(
137 | f'CombineRGBwithA: "{flist[0]}" matched "{self.fp_alpha}" exclusively'
138 | )
139 | return flist[0]
140 | else:
141 | best, similarity = self.choose_most_similar_rgb(flist)
142 | if best:
143 | Logger.info(
144 | f'CombineRGBwithA: "{best}" matched "{self.fp_alpha}" among {len(flist)} candidates, confidentiality {similarity}'
145 | )
146 | return best
147 | else:
148 | raise NoRGBImageMatchedError(self.fp_alpha)
149 |
150 | def choose_most_similar_rgb(self, candidates: List[str]):
151 | best_candidate = None
152 | best_similarity = -1
153 | for i in candidates:
154 | similarity = AlphaRGBSearcher.calc_similarity(i, self.fp_alpha)
155 | if similarity > best_similarity:
156 | best_candidate = i
157 | best_similarity = similarity
158 | return best_candidate, best_similarity
159 |
160 | @staticmethod
161 | def calc_real_name(fp_alpha: str):
162 | basename, _ = osp.splitext(osp.basename(fp_alpha))
163 | for p in AlphaRGBSearcher.PATTERNS:
164 | m = p.fullmatch(basename)
165 | if m:
166 | return m.group(1)
167 |
168 | @staticmethod
169 | def calc_similarity(
170 | rgb: Union[str, Image.Image],
171 | alpha: Union[str, Image.Image],
172 | mode: str = "L",
173 | precision: int = 150,
174 | ):
175 | """Compares the similarity between the RGB image and the Alpha image.
176 |
177 | :param rgb: Instance of RGB image or its file path;
178 | :param alpha: Instance of Alpha image or its file path;
179 | :param mode: Image mode during comparing, `L` for default;
180 | :param precision: Precision of the judgement, higher for more precise, `150` for default;
181 | :returns: Similarity value in `[0, 255]`, higher for more similar;
182 | :rtype: int;
183 | """
184 | img_rgb = image_open(rgb, mode)
185 | img_alpha = image_open(alpha, mode)
186 | precision = 150 if precision <= 0 else precision
187 | # Resize the two images
188 | img_rgb = image_resize(img_rgb, (precision, precision))
189 | img_alpha = image_resize(img_alpha, (precision, precision))
190 | # Load pixels into arrays
191 | px_rgb = img_rgb.load()
192 | px_a = img_alpha.load()
193 | assert px_rgb is not None and px_a is not None
194 | # Calculate differences of every pixel
195 | diff = []
196 | for y in range(precision):
197 | for x in range(precision):
198 | diff.append(
199 | (((px_rgb[x, y] if px_rgb[x, y] < 255 else 0) - px_a[x, y]) ** 2)
200 | / 256.0
201 | )
202 | # Return the similarity
203 | diff_mean = round(sum(diff) / len(diff))
204 | return 0 if diff_mean >= 255 else (255 if diff_mean <= 0 else 255 - diff_mean)
205 |
206 |
207 | def image_open(fp_or_img: Union[str, Image.Image], mode: str):
208 | if isinstance(fp_or_img, Image.Image):
209 | img = fp_or_img
210 | else:
211 | img = Image.open(fp_or_img)
212 | img = img.convert(mode)
213 | assert isinstance(img, Image.Image)
214 | return img
215 |
216 |
217 | def image_resize(img: Image.Image, size: tuple):
218 | if len(img.size) == 2 and len(size) == 2:
219 | if img.size[0] != size[0] or img.size[1] != size[1]:
220 | img = img.resize(size, resample=Image.BILINEAR)
221 | assert isinstance(img, Image.Image)
222 | return img
223 |
224 |
225 | def image_resolve(
226 | fp: str,
227 | destdir: str,
228 | on_processed: Optional[Callable],
229 | on_file_queued: Optional[Callable],
230 | on_file_saved: Optional[Callable],
231 | ):
232 | """Finds an RGB image to combine with the given Alpha image then saves the combined image into the given directory.
233 |
234 | :param fp: Path to the Alpha image;
235 | :param destdir: Destination directory;
236 | :param on_processed: Callback `f()` for finished, `None` for ignore;
237 | :param on_file_queued: Callback `f()` invoked when a file was queued, `None` for ignore;
238 | :param on_file_saved: Callback `f(file_path_or_none_for_not_saved)`, `None` for ignore;
239 | :rtype: None;
240 | """
241 | try:
242 | combiner = AlphaRGBCombiner(fp)
243 | searcher = AlphaRGBSearcher(fp)
244 | result = combiner.combine_with(searcher.search_rgb())
245 | real_name = searcher.get_real_name()
246 | if real_name:
247 | SafeSaver.save_image(
248 | result,
249 | destdir,
250 | real_name,
251 | on_queued=on_file_queued,
252 | on_saved=on_file_saved,
253 | )
254 | except NoRGBImageMatchedError:
255 | pass
256 | except BaseException as arg:
257 | # Error feedback
258 | Logger.error(
259 | f'CombineRGBwithA: Error occurred while processing alpha image "{fp}": Exception{type(arg)} {arg}'
260 | )
261 | # raise(arg)
262 | if on_processed:
263 | on_processed()
264 |
265 |
266 | ########## Main-主程序 ##########
267 | def main(rootdir: str, destdir: str, do_del: bool = False):
268 | """Combines the RGB images and the Alpha images in the given directory automatically according to their file names,
269 | then saves the combined images into another given directory.
270 |
271 | :param rootdir: Source directory;
272 | :param destdir: Destination directory;
273 | :param do_del: Whether to delete the existed destination directory first, `False` for default;
274 | :rtype: None;
275 | """
276 | print("\n正在解析路径...", s=1)
277 | Logger.info("CombineRGBwithA: Retrieving file paths...")
278 | rootdir = osp.normpath(osp.realpath(rootdir))
279 | destdir = osp.normpath(osp.realpath(destdir))
280 | flist = get_filelist(rootdir)
281 | flist = list(filter(is_image_file, flist))
282 | flist = list(
283 | filter(lambda x: AlphaRGBSearcher.calc_real_name(x) is not None, flist)
284 | )
285 |
286 | if do_del:
287 | print("\n正在清理...", s=1)
288 | rmdir(destdir) # 慎用,会预先删除目的地目录的所有内容
289 | SafeSaver.get_instance().reset_counter()
290 | thread_ctrl = ThreadCtrl()
291 | ui = UICtrl()
292 | tr_processed = TaskReporter(2, len(flist))
293 | tr_file_saving = TaskReporter(1)
294 | tracker = TaskReporterTracker(tr_processed, tr_file_saving)
295 |
296 | ui.reset()
297 | ui.loop_start()
298 | for i in flist:
299 | # 递归处理各个文件(i是文件的路径名)
300 | ui.request(
301 | [
302 | "正在批量合并图片...",
303 | tracker.to_progress_bar_str(),
304 | f"当前目录:\t{osp.basename(osp.dirname(i))}",
305 | f"当前文件:\t{osp.basename(i)}",
306 | f"累计搜索:\t{tr_processed.to_progress_str()}",
307 | f"累计导出:\t{tr_file_saving.to_progress_str()}",
308 | f"预计剩余时间:\t{tracker.to_eta_str()}",
309 | f"累计消耗时间:\t{tracker.to_rt_str()}",
310 | f"运行状态统计:\t{Logger.to_ew_stats_str()}",
311 | ]
312 | )
313 | ###
314 | subdestdir = osp.dirname(i).strip(osp.sep).replace(rootdir, "").strip(osp.sep)
315 | thread_ctrl.run_subthread(
316 | image_resolve,
317 | (
318 | i,
319 | osp.join(destdir, subdestdir),
320 | tr_processed.report,
321 | tr_file_saving.update_demand,
322 | tr_file_saving.report,
323 | ),
324 | name=f"CBThread:{id(i)}",
325 | )
326 |
327 | ui.reset()
328 | ui.loop_stop()
329 | while (
330 | thread_ctrl.count_subthread()
331 | or not SafeSaver.get_instance().completed()
332 | or tracker.get_progress() < 1
333 | ):
334 | ui.request(
335 | [
336 | "正在批量合并图片...",
337 | tracker.to_progress_bar_str(),
338 | f"累计搜索:\t{tr_processed.to_progress_str()}",
339 | f"累计导出:\t{tr_file_saving.to_progress_str()}",
340 | f"预计剩余时间:\t{tracker.to_eta_str()}",
341 | f"累计消耗时间:\t{tracker.to_rt_str()}",
342 | f"运行状态统计:\t{Logger.to_ew_stats_str()}",
343 | ]
344 | )
345 | ui.refresh(post_delay=0.1)
346 |
347 | ui.reset()
348 | print("\n批量合并图片结束!", s=1)
349 | print(f" 累计导出 {tr_file_saving.get_done()} 张照片")
350 | print(f" 此项用时 {round(tracker.get_rt(), 1)} 秒")
351 |
--------------------------------------------------------------------------------
/src/DecodeTextAsset.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright (c) 2022-2025, Harry Huang
3 | # @ BSD 3-Clause License
4 | from typing import Callable, Optional, Union
5 |
6 | import json
7 | import math
8 | import os.path as osp
9 | from collections import defaultdict
10 |
11 | import bson
12 | import numpy as np
13 | from Crypto.Cipher import AES
14 | from Crypto.Util.Padding import unpad
15 |
16 | from .utils.GlobalMethods import (
17 | print,
18 | rmdir,
19 | get_filelist,
20 | is_ab_file,
21 | is_known_asset_file,
22 | is_binary_file,
23 | get_modules_from_package_name,
24 | )
25 | from .utils.Logger import Logger
26 | from .utils.SaverUtils import SafeSaver
27 | from .utils.TaskUtils import ThreadCtrl, UICtrl, TaskReporter, TaskReporterTracker
28 |
29 |
30 | class CompatibleFloat(float):
31 | def __new__(cls, value: float):
32 | f32 = CompatibleFloat.truncate(value, 7)
33 | f64 = CompatibleFloat.truncate(value, 16)
34 | return super().__new__(cls, f64 if f32 == f64 else f32)
35 |
36 | @staticmethod
37 | def truncate(value: float, precision: int) -> str:
38 | if value == 0.0:
39 | return "0.0"
40 | l_digits = int(math.floor(math.log10(value if value > 0.0 else -value))) + 1
41 | r_digits = precision - l_digits if l_digits < precision else 0
42 | formatted = f"{value:.{r_digits}f}".rstrip("0")
43 | return formatted + "0" if formatted.endswith(".") else formatted
44 |
45 |
46 | class ArkFBOLibrary:
47 | """Helper class for Arknights **FlatBuffers Objects** (FBO) decoding,
48 | which provides access to the Arknights FlatBuffers Schema (FBS).
49 |
50 | Conventionally, Arknights FBO can be converted to JSON format, assuming that FBS are known.
51 | Note that the FBS may be incompatible between different Arknights servers.
52 |
53 | *Special thanks to OpenArknightsFBS (https://github.com/MooncellWiki/OpenArknightsFBS).*
54 | """
55 |
56 | CN = get_modules_from_package_name("src.fbs.CN")
57 | _AUTO_GUESS_ROOT_TYPE = None
58 |
59 | @staticmethod
60 | def guess_root_type(path: str):
61 | """Returns the type class of the most possible FBS root type of the given file.
62 |
63 | :param path: The file path;
64 | :returns: The root type or `None` indicates that the file may not be a FlatBuffer Object file;
65 | :rtype: Type or None;
66 | """
67 | target = osp.basename(path)
68 | for m in ArkFBOLibrary.CN:
69 | name = m.__name__.split(".")[-1]
70 | if name in target:
71 | return getattr(m, "ROOT_TYPE", None)
72 | return None
73 |
74 | @staticmethod
75 | def decode(path: str, root_type: Optional[type] = _AUTO_GUESS_ROOT_TYPE):
76 | """Decodes the given FlatBuffers binary file.
77 |
78 | :param path: The file path;
79 | :param root_type: The root type of the FBO;
80 | :returns: The decoded object;
81 | :rtype: JSON serializable object;
82 | """
83 | if not root_type:
84 | root_type = ArkFBOLibrary.guess_root_type(path)
85 | if not root_type:
86 | Logger.error(f'DecodeTextAsset: Failed to guess root type of "{path}"')
87 | Logger.error(f"DecodeTextAsset: CN lib data = {ArkFBOLibrary.CN}")
88 | raise AssertionError("Failed to guess root type")
89 | with open(path, "rb") as f:
90 | data = bytearray(f.read())[128:]
91 | handle = FBOHandler(data, root_type)
92 | dic = handle.to_json_dict()
93 | Logger.debug(
94 | f'DecodeTextAsset: FBS decoded "{path}" with type {root_type.__name__}'
95 | )
96 | return dic
97 |
98 |
99 | # spell-checker: disable
100 |
101 |
102 | class ArkAESLibrary:
103 | """Helper class for Arknights **AES-CBC encrypted files** decoding,
104 | which provides methods and keys for decryption.
105 |
106 | Conventionally, Arknights AES-CBC encrypted files are originally JSON or BSON format.
107 | Note that the secret key (chat_mask) may be incompatible between different major version of Arknights.
108 |
109 | *Special thanks to ashlen (https://github.com/thesadru).*
110 | """
111 |
112 | MASK_V2 = b"UITpAi82pHAWwnzqHRMCwPonJLIB3WCl"
113 |
114 | # spell-checker: enable
115 |
116 | @staticmethod
117 | def aes_cbc_decrypt_bytes(data: bytes, mask: bytes, has_rsa: bool = True):
118 | """Decrypts the given AES-CBC encrypted data using the specified mask.
119 |
120 | :param data: The data to decrypt;
121 | :param mask: The 32-bytes secret mask whose first 16-bytes are incomplete key
122 | and last 16-bytes are initialization vector (IV);
123 | :param has_rsa: Whether the data has a 128-bytes RSA signature in the very beginning;
124 | :returns: The decrypted data;
125 | :rtype: bytes;
126 | """
127 | if not isinstance(data, bytes) or len(data) < 16:
128 | raise ValueError(
129 | "The data argument should be a bytes object longer than 16 bytes"
130 | )
131 | if not isinstance(mask, bytes) or len(mask) != 32:
132 | raise ValueError("The mask argument should be a 32-byte-long bytes object")
133 | # Trim the signature
134 | if has_rsa:
135 | data = data[128:]
136 | # Calculate the key and IV
137 | key = mask[:16]
138 | iv = bytearray(d ^ m for d, m in zip(data[:16], mask[16:]))
139 | # Decrypt the data
140 | aes = AES.new(key, AES.MODE_CBC, iv)
141 | return unpad(aes.decrypt(data[16:]), AES.block_size)
142 |
143 | @staticmethod
144 | def decode(path: str, mask: bytes = MASK_V2):
145 | """Decodes the given AES-CBC encrypted file using the given mask.
146 | If the decrypted data is not JSON, it will be recognized as BSON and be converted to JSON.
147 |
148 | :param path: The file path;
149 | :param mask: The 32-bytes secret mask;
150 | :returns: The decoded object;
151 | :rtype: JSON serializable object;
152 | """
153 | with open(path, "rb") as f:
154 | data = f.read()
155 | decrypted = ArkAESLibrary.aes_cbc_decrypt_bytes(data, mask)
156 | try:
157 | dic = json.loads(decrypted)
158 | Logger.debug(f'DecodeTextAsset: AES decoded JSON document "{path}"')
159 | except UnicodeError:
160 | dic = bson.loads(decrypted)
161 | Logger.debug(f'DecodeTextAsset: AES decoded BSON document "{path}"')
162 | return dic
163 |
164 |
165 | class FBOHandler:
166 | """Handler for FlatBuffers Objects, implementing conversion to Python dict type."""
167 |
168 | SERIALIZE_AS_IS = Union[bool, int, str, list, tuple, dict, None]
169 | SERIALIZE_AS_STR = Union[bytes, bytearray, memoryview]
170 | SERIALIZE_ENCODING = "UTF-8"
171 |
172 | def __init__(self, data: bytearray, root_type: type):
173 | self._root = root_type.GetRootAs(data, 0)
174 |
175 | @staticmethod
176 | def _to_literal(obj: object):
177 | if isinstance(obj, float):
178 | return CompatibleFloat(obj)
179 | if isinstance(obj, np.ndarray):
180 | return obj.tolist()
181 | if isinstance(obj, FBOHandler.SERIALIZE_AS_IS):
182 | return obj
183 | if isinstance(obj, FBOHandler.SERIALIZE_AS_STR):
184 | return str(
185 | obj, encoding=FBOHandler.SERIALIZE_ENCODING, errors="surrogateescape"
186 | )
187 | return FBOHandler._to_json_dict(obj)
188 |
189 | @staticmethod
190 | def _to_json_dict(obj: object):
191 | if obj is None:
192 | return None
193 |
194 | data = {}
195 |
196 | f_obj_key = getattr(obj, "Key", None)
197 | f_obj_value = getattr(obj, "Value", None)
198 | f_obj_value_len = getattr(obj, "ValueLength", None)
199 |
200 | if f_obj_key and f_obj_value:
201 | # As key-value item:
202 | assert isinstance(f_obj_key, Callable) and isinstance(f_obj_value, Callable)
203 | if f_obj_value_len:
204 | # Value is array
205 | assert isinstance(f_obj_value_len, Callable)
206 | data[FBOHandler._to_literal(f_obj_key())] = [
207 | FBOHandler._to_literal(f_obj_value(i))
208 | for i in range(f_obj_value_len())
209 | ]
210 | else:
211 | # Value is single
212 | data[FBOHandler._to_literal(f_obj_key())] = FBOHandler._to_literal(
213 | f_obj_value()
214 | )
215 | else:
216 | # As table object:
217 | # Collect field names
218 | field_name_map = defaultdict(lambda: [None, None, None])
219 | for field_name in dir(obj):
220 | if field_name in ("Init", "Clear"):
221 | continue
222 | elif field_name.startswith(("_", "GetRootAs")):
223 | continue
224 | elif field_name != "IsNone" and field_name.endswith("IsNone"):
225 | field_name_map[field_name[:-6]][0] = getattr(obj, field_name, None)
226 | elif field_name != "Length" and field_name.endswith("Length"):
227 | field_name_map[field_name[:-6]][1] = getattr(obj, field_name, None)
228 | else:
229 | field_name_map[field_name][2] = getattr(obj, field_name, None)
230 |
231 | # Collect field values
232 | for field_name, (
233 | f_field_is_none,
234 | f_field_len,
235 | f_field,
236 | ) in field_name_map.items():
237 | if isinstance(f_field, Callable):
238 | value = None
239 | if isinstance(f_field_is_none, Callable) and f_field_is_none():
240 | # Value is explicit null
241 | continue
242 | elif isinstance(f_field_len, Callable):
243 | # Value is table or array
244 | field_len = f_field_len()
245 | if field_len:
246 | if "Key" in dir(f_field(0)):
247 | # Value is table
248 | value = {}
249 | for i in range(field_len):
250 | item = FBOHandler._to_json_dict(f_field(i))
251 | assert isinstance(item, dict)
252 | value.update(item)
253 | else:
254 | # Value is array
255 | value = [
256 | FBOHandler._to_literal(f_field(i))
257 | for i in range(field_len)
258 | ]
259 | else:
260 | # TODO handle empty table
261 | pass
262 | else:
263 | # Value is common literal
264 | value = FBOHandler._to_literal(f_field())
265 | # Add this field to the object data
266 | data[field_name] = value
267 |
268 | # Return the whole object data
269 | return data
270 |
271 | def to_json_dict(self):
272 | return FBOHandler._to_json_dict(self._root)
273 |
274 |
275 | def text_asset_resolve(
276 | fp: str,
277 | destdir: str,
278 | on_processed: Optional[Callable],
279 | on_file_queued: Optional[Callable],
280 | on_file_saved: Optional[Callable],
281 | ):
282 | """Decodes the give Arknights TextAsset file that is either FBO stored format or AES encrypted format,
283 | otherwise does nothing.
284 |
285 | :param fp: Path to the file;
286 | :param destdir: Destination directory;
287 | :param on_processed: Callback `f()` for finished, `None` for ignore;
288 | :param on_file_queued: Callback `f()` invoked when a file was queued, `None` for ignore;
289 | :param on_file_saved: Callback `f(file_path_or_none_for_not_saved)`, `None` for ignore;
290 | :rtype: None;
291 | """
292 | try:
293 | if osp.isfile(fp) and is_binary_file(fp):
294 | typ = ArkFBOLibrary.guess_root_type(fp)
295 | dic = ArkFBOLibrary.decode(fp, typ) if typ else ArkAESLibrary.decode(fp)
296 | if dic:
297 | byt = bytes(
298 | json.dumps(dic, ensure_ascii=False, indent=2), encoding="UTF-8"
299 | )
300 | SafeSaver.save_bytes(
301 | byt,
302 | destdir,
303 | osp.basename(fp),
304 | ".json",
305 | on_file_queued,
306 | on_file_saved,
307 | )
308 | else:
309 | Logger.debug(f'DecodeTextAsset: "{fp}" not binary file')
310 | except Exception as arg:
311 | Logger.error(
312 | f'DecodeTextAsset: Failed to handle "{fp}": Exception{type(arg)} {arg}'
313 | )
314 | if on_processed:
315 | on_processed()
316 |
317 |
318 | ########## Main-主程序 ##########
319 | def main(rootdir: str, destdir: str, do_del: bool = False):
320 | """Decodes the possible Arknights TextAsset files in the specified directory
321 | then saves the decoded data into another given directory.
322 |
323 | :param rootdir: Source directory;
324 | :param destdir: Destination directory;
325 | :param do_del: Whether to delete the existed destination directory first, `False` for default;
326 | :rtype: None;
327 | """
328 | print("\n正在解析路径...", s=1)
329 | Logger.info("DecodeTextAsset: Retrieving file paths...")
330 | rootdir = osp.normpath(osp.realpath(rootdir))
331 | destdir = osp.normpath(osp.realpath(destdir))
332 | flist = get_filelist(rootdir)
333 | flist = list(filter(lambda x: not is_known_asset_file(x), flist))
334 | flist = list(filter(lambda x: not is_ab_file(x), flist))
335 |
336 | if do_del:
337 | print("\n正在清理...", s=1)
338 | rmdir(destdir)
339 | SafeSaver.get_instance().reset_counter()
340 | thread_ctrl = ThreadCtrl()
341 | ui = UICtrl()
342 | tr_processed = TaskReporter(2, len(flist))
343 | tr_file_saving = TaskReporter(1)
344 | tracker = TaskReporterTracker(tr_processed, tr_file_saving)
345 |
346 | ui.reset()
347 | ui.loop_start()
348 | for i in flist:
349 | ui.request(
350 | [
351 | "正在批量解码文本资源...",
352 | tracker.to_progress_bar_str(),
353 | f"当前目录:\t{osp.basename(osp.dirname(i))}",
354 | f"当前搜索:\t{osp.basename(i)}",
355 | f"累计搜索:\t{tr_processed.to_progress_str()}",
356 | f"累计解码:\t{tr_file_saving.to_progress_str()}",
357 | f"预计剩余时间:\t{tracker.to_eta_str()}",
358 | f"累计消耗时间:\t{tracker.to_rt_str()}",
359 | f"运行状态统计:\t{Logger.to_ew_stats_str()}",
360 | ]
361 | )
362 | ###
363 | subdestdir = osp.dirname(i).strip(osp.sep).replace(rootdir, "").strip(osp.sep)
364 | thread_ctrl.run_subthread(
365 | text_asset_resolve,
366 | (
367 | i,
368 | osp.join(destdir, subdestdir),
369 | tr_processed.report,
370 | tr_file_saving.update_demand,
371 | tr_file_saving.report,
372 | ),
373 | name=f"RFThread:{id(i)}",
374 | )
375 |
376 | ui.reset()
377 | ui.loop_stop()
378 | while (
379 | thread_ctrl.count_subthread()
380 | or not SafeSaver.get_instance().completed()
381 | or tracker.get_progress() < 1
382 | ):
383 | ui.request(
384 | [
385 | "正在批量解码文本资源...",
386 | tracker.to_progress_bar_str(),
387 | f"累计搜索:\t{tr_processed.to_progress_str()}",
388 | f"累计解码:\t{tr_file_saving.to_progress_str()}",
389 | f"预计剩余时间:\t{tracker.to_eta_str()}",
390 | f"累计消耗时间:\t{tracker.to_rt_str()}",
391 | f"运行状态统计:\t{Logger.to_ew_stats_str()}",
392 | ]
393 | )
394 | ui.refresh(post_delay=0.1)
395 |
396 | ui.reset()
397 | print("\n批量解码文本资源结束!", s=1)
398 | print(f" 累计搜索 {tr_processed.get_done()} 个文件")
399 | print(f" 累计解码 {tr_file_saving.get_done()} 个文件")
400 | print(f" 此项用时 {round(tracker.get_rt(), 1)} 秒")
401 |
--------------------------------------------------------------------------------
/src/ModelsDataDist.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright (c) 2022-2025, Harry Huang
3 | # @ BSD 3-Clause License
4 | from typing import Any
5 |
6 | import json
7 | import os
8 | import os.path as osp
9 | import re
10 | from datetime import datetime
11 |
12 | from .DecodeTextAsset import ArkFBOLibrary
13 | from .utils.GlobalMethods import color, print, get_dirlist, get_filelist
14 | from .utils.Logger import Logger
15 |
16 |
17 | class ModelsDist:
18 | SORT_TAGS_L10N = {
19 | # tag -> translation
20 | "DynIllust": "动态立绘",
21 | "Operator": "干员",
22 | "Skinned": "时装",
23 | "Special": "异格",
24 | "Enemy": "敌人",
25 | "EnemyNormal": "普通敌人",
26 | "EnemyElite": "精英敌人",
27 | "EnemyBoss": "领袖敌人",
28 | "Rarity_1": "一星",
29 | "Rarity_2": "二星",
30 | "Rarity_3": "三星",
31 | "Rarity_4": "四星",
32 | "Rarity_5": "五星",
33 | "Rarity_6": "六星",
34 | }
35 | ARK_PETS_COMPATIBILITY = [2, 2, 0]
36 | SERVER_REGION = "zh_CN"
37 | MODELS_DIR = {
38 | # type -> dirname
39 | "Operator": "models",
40 | "Enemy": "models_enemies",
41 | "DynIllust": "models_illust",
42 | }
43 | MODELS_FILE_EXT = {
44 | # extension type -> extension alternatives
45 | ".atlas": [".atlas"],
46 | ".png": [".png"],
47 | ".skel": [".skel", ""],
48 | }
49 | GAMEDATA_DIR = "anon"
50 | TEMP_DIR = "temp/am_upk_mdd"
51 |
52 | def __init__(self):
53 | self.data = {
54 | "storageDirectory": ModelsDist.MODELS_DIR,
55 | "sortTags": ModelsDist.SORT_TAGS_L10N,
56 | "gameDataVersionDescription": f"Producer: ArkUnpacker 3\nDate: {datetime.now().date()}\n",
57 | "gameDataServerRegion": ModelsDist.SERVER_REGION,
58 | "data": {},
59 | "arkPetsCompatibility": ModelsDist.ARK_PETS_COMPATIBILITY,
60 | }
61 |
62 | def get_gamedata(self, alias: tuple):
63 | for i in get_filelist(ModelsDist.TEMP_DIR):
64 | if any(osp.basename(i).startswith(a) for a in alias):
65 | rst = ArkFBOLibrary.decode(i)
66 | if rst is None:
67 | raise ValueError("Decoded data is none")
68 | return rst
69 | raise FileNotFoundError(f"Failed to find data file with the name: {alias}")
70 |
71 | def get_item_data(
72 | self,
73 | asset_id: Any,
74 | type: Any,
75 | style: Any,
76 | sort_tags: list,
77 | name: Any,
78 | appellation: Any,
79 | sg_id: Any,
80 | sg_name: Any,
81 | ):
82 | return {
83 | "assetId": asset_id,
84 | "type": type,
85 | "style": style,
86 | "name": name,
87 | "appellation": appellation,
88 | "skinGroupId": sg_id,
89 | "skinGroupName": sg_name,
90 | "sortTags": sort_tags,
91 | }
92 |
93 | def get_operator_sort_tags(self, item: dict):
94 | rst = ["Operator"]
95 | if item.get("IsSpChar", False):
96 | rst.append("Special")
97 | try:
98 | if item.get("Rarity", None) is not None:
99 | rarity = f"Rarity_{int(item['Rarity']) + 1}"
100 | if rarity in self.data["sortTags"]:
101 | rst.append(rarity)
102 | except BaseException:
103 | Logger.warn("ModelsDataDist: Failed to recognize rarity tag.")
104 | return rst
105 |
106 | def get_enemy_sort_tags(self, item: dict):
107 | rst = ["Enemy"]
108 | additional = {0: "EnemyNormal", 1: "EnemyElite", 2: "EnemyBoss"}.get(
109 | item["LevelType"]["MValue"], None
110 | )
111 | return rst + [additional] if additional else rst
112 |
113 | def update_operator_data(self):
114 | Logger.info("ModelsDataDist: Decoding operator data.")
115 | print("解析干员信息...")
116 | raw: "dict[str,dict]" = self.get_gamedata(("character_table",))
117 | Logger.info("ModelsDataDist: Parsing operator data.")
118 | collected = {}
119 | for k, v in raw["Characters"].items():
120 | if k.startswith("char_") and not v.get("IsNotObtainable", None):
121 | key_char = k.lower()[5:]
122 | collected[key_char] = self.get_item_data(
123 | f"build_char_{key_char}",
124 | "Operator",
125 | "BuildingDefault",
126 | self.get_operator_sort_tags(v),
127 | v["Name"],
128 | v["Appellation"],
129 | "DEFAULT",
130 | "默认服装",
131 | )
132 | self.data["data"].update(collected)
133 | Logger.info(f"ModelsDataDist: Found {len(collected)} operators.")
134 | print(f"\t找到 {len(collected)} 位干员", c=2)
135 |
136 | def update_skin_data(self):
137 | Logger.info("ModelsDataDist: Decoding skin data.")
138 | print("解析干员皮肤信息...")
139 | raw: "dict[str,dict]" = self.get_gamedata(("skin_table",))
140 | Logger.info("ModelsDataDist: Parsing skin data.")
141 | collected = {}
142 | for k, v in raw["CharSkins"].items():
143 | if v.get("BuildingId", None):
144 | key_char = v["CharId"][5:].lower()
145 | if key_char in self.data["data"]:
146 | origin = self.data["data"][key_char]
147 | key_skin = v["BuildingId"][5:].lower()
148 | if key_skin not in self.data["data"]:
149 | sort_tags = origin["sortTags"] + ["Skinned"]
150 | collected[key_skin] = self.get_item_data(
151 | f"build_char_{key_skin}",
152 | "Operator",
153 | "BuildingSkin",
154 | sort_tags,
155 | origin["name"],
156 | origin["appellation"],
157 | v["DisplaySkin"]["SkinGroupId"],
158 | v["DisplaySkin"]["SkinGroupName"],
159 | )
160 | else:
161 | Logger.info(
162 | f'ModelsDataDist: The skin-key of the skin "{k}" collided with an existed one.'
163 | )
164 | else:
165 | Logger.warn(
166 | f'ModelsDataDist: The operator-key of the skin "{k}" not found.'
167 | )
168 | print(f"\t皮肤 {k} 找不到对应的干员Key", c=3)
169 | self.data["data"].update(collected)
170 | Logger.info(f"ModelsDataDist: Found {len(collected)} skins.")
171 | print(f"\t找到 {len(collected)} 件干员皮肤", c=2)
172 |
173 | def update_enemy_data(self):
174 | Logger.info("ModelsDataDist: Decoding enemy data.")
175 | print("解析敌方单位信息...")
176 | raw: "dict[str,list]" = self.get_gamedata(("enemydata", "enemy_database"))
177 | Logger.info("ModelsDataDist: Parsing enemy data.")
178 | collected = {}
179 | if not isinstance(raw["Enemies"], dict):
180 | raise TypeError("Value key 'Enemies' is not a dict")
181 | for k, v in raw["Enemies"].items():
182 | if k.startswith("enemy_"):
183 | key_enemy = k.lower()[6:]
184 | tags = self.get_enemy_sort_tags(v[0]["EnemyData"])
185 | collected[key_enemy] = self.get_item_data(
186 | f"enemy_{key_enemy}",
187 | "Enemy",
188 | None,
189 | tags,
190 | v[0]["EnemyData"]["Name"]["MValue"],
191 | None,
192 | tags[-1],
193 | self.data["sortTags"][tags[-1]],
194 | )
195 | self.data["data"].update(collected)
196 | Logger.info(f"ModelsDataDist: Found {len(collected)} enemies.")
197 | print(f"\t找到 {len(collected)} 个敌方单位", c=2)
198 |
199 | def update_dynillust_data(self):
200 | Logger.info("ModelsDataDist: Parsing dynillust data.")
201 | print("分析动态立绘信息...")
202 | collected = {}
203 | if osp.isdir(self.data["storageDirectory"]["DynIllust"]):
204 | for i in get_dirlist(
205 | self.data["storageDirectory"]["DynIllust"], max_depth=1
206 | ):
207 | # (i是每个动态立绘的文件夹)
208 | base = osp.basename(i)
209 | if base.startswith("dyn_"):
210 | key = base.lower()
211 | key_char = re.findall(r"[0-9]+.+", key)
212 | if len(key_char) > 0:
213 | key_char = key_char[0] # 该动态立绘对应的原干员的key
214 | if key_char in self.data["data"]:
215 | origin = self.data["data"][key_char]
216 | sort_tags = origin["sortTags"] + ["DynIllust"]
217 | collected[key] = self.get_item_data(
218 | key,
219 | "DynIllust",
220 | None,
221 | sort_tags,
222 | origin["name"],
223 | origin["appellation"],
224 | origin["skinGroupId"],
225 | origin["skinGroupName"],
226 | )
227 | else:
228 | Logger.warn(
229 | f'ModelsDataDist: The operator-key of the dyn illust "{key}" not found.'
230 | )
231 | print(f"\t动态立绘 {key} 找不到对应的干员Key", c=3)
232 | else:
233 | Logger.warn(
234 | f'ModelsDataDist: The operator-key of the dyn illust "{key}" could not pass the regular expression check.'
235 | )
236 | print(f"\t动态立绘 {key} 未成功通过正则匹配", c=3)
237 | else:
238 | Logger.warn("ModelsDataDist: The directory of dyn illust not found.")
239 | print("\t动态立绘根文件夹未找到", c=3)
240 | self.data["data"].update(collected)
241 | Logger.info(f"ModelsDataDist: Found {len(collected)} dynillusts.")
242 | print(f"\t找到 {len(collected)} 套动态立绘", c=2)
243 |
244 | def verify_models(self):
245 | Logger.info("ModelsDataDist: Validating models files.")
246 | print("校验模型文件...")
247 | cur_done = 0
248 | cur_fail = 0
249 | total = len(self.data["data"])
250 | for k, v in self.data["data"].items():
251 | # (i是Key,Key应为文件夹的名称)
252 | fail_flag = False
253 | asset_list = {}
254 | if v["type"] in self.data["storageDirectory"]:
255 | # 如果其type在模型存放目录预设中有对应值
256 | d = osp.join(self.data["storageDirectory"][v["type"]], k)
257 | asset_list_pending = {}
258 | if osp.isdir(d):
259 | # 如果预期的目录存在
260 | file_list = os.listdir(d)
261 | for ext_type, ext_alt in ModelsDist.MODELS_FILE_EXT.items():
262 | # 要求每个ext_alt组内的文件扩展名至少存在一种
263 | ext_verified = False
264 | for ext in ext_alt:
265 | # (ext是文件扩展名)
266 | asset_list_specified = list(
267 | filter(lambda x: osp.splitext(x)[1] == ext, file_list)
268 | )
269 | if len(asset_list_specified) > 0:
270 | # 以ext为扩展名的文件存在
271 | if len(asset_list_specified) == 1:
272 | asset_list_pending[ext_type] = asset_list_specified[
273 | 0
274 | ]
275 | else:
276 | Logger.debug(
277 | f'ModelsDataDist: The {ext_type} asset of "{k}" is multiple, see in "{d}".'
278 | )
279 | asset_list_specified.sort()
280 | asset_list_pending[ext_type] = asset_list_specified
281 | ext_verified = True
282 | break # 跳出对ext的遍历
283 | # 如果ext_alt组所指定的文件不存在
284 | if not ext_verified:
285 | Logger.info(
286 | f'ModelsDataDist: The {ext_type} asset of "{k}" not found, see in "{d}".'
287 | )
288 | print(
289 | f"[{color(3)}{k}{color(7)}] {v['name']}({v['type']}):{color(1)}{ext_type} 文件缺失{color(7)}"
290 | )
291 | fail_flag = True
292 | break # 跳出对ext_alt的遍历
293 | if not fail_flag:
294 | asset_list = asset_list_pending
295 | else:
296 | cur_fail += 1
297 | else:
298 | Logger.info(
299 | f'ModelsDataDist: The model directory of "{k}" not found, expected path "{d}".'
300 | )
301 | print(
302 | f"[{color(3)}{k}{color(7)}] {v['name']}({v['type']}):模型不存在"
303 | )
304 | cur_fail += 1
305 | else:
306 | Logger.info(
307 | f"ModelsDataDist: The model asset of \"{k}\" is the type of \"{v['type']}\" which is not declared in the prefab."
308 | )
309 | print(
310 | f"[{color(3)}{k}{color(7)}] {v['name']}({v['type']}):未在脚本预设中找到其类型的存储目录"
311 | )
312 | cur_fail += 1
313 | self.data["data"][k]["assetList"] = asset_list
314 | cur_done += 1
315 | if cur_done % 100 == 0:
316 | print(
317 | f"\t已处理完成 {color(2)}{round(cur_done / total * 100)}%{color(7)}"
318 | )
319 | Logger.info(
320 | f"ModelsDataDist: Verify models completed, {cur_done - cur_fail} success, {cur_fail} failure."
321 | )
322 | print(
323 | f"\n\t校验完成:{color(2)}成功{cur_done - cur_fail}{color(7)},失败{cur_fail}"
324 | )
325 |
326 | def export_json(self):
327 | Logger.info("ModelsDataDist: Writing to json.")
328 | with open("models_data.json", "w", encoding="UTF-8") as f:
329 | json.dump(self.data, f, ensure_ascii=False, indent=4)
330 | Logger.info("ModelsDataDist: Succeeded in writing to json.")
331 |
332 |
333 | ########## Main-主程序 ##########
334 | def main():
335 | md = ModelsDist()
336 | md.update_operator_data()
337 | md.update_skin_data()
338 | md.update_enemy_data()
339 | md.update_dynillust_data()
340 | md.verify_models()
341 | md.export_json()
342 |
--------------------------------------------------------------------------------
/src/ResolveAB.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright (c) 2022-2025, Harry Huang
3 | # @ BSD 3-Clause License
4 | from typing import Callable, List, Optional, Sequence, TypeVar, Union
5 |
6 | import os.path as osp
7 |
8 | import UnityPy
9 | import UnityPy.classes as uc
10 | from UnityPy.enums.BundleFile import CompressionFlags
11 | from UnityPy.files.File import File
12 | from UnityPy.helpers import CompressionHelper
13 | from UnityPy.streams.EndianBinaryReader import EndianBinaryReader
14 |
15 | from .lz4ak.Block import decompress_lz4ak
16 | from .utils.GlobalMethods import print, rmdir, get_filelist, is_ab_file
17 | from .utils.Logger import Logger
18 | from .utils.SaverUtils import SafeSaver
19 | from .utils.TaskUtils import ThreadCtrl, UICtrl, TaskReporter, TaskReporterTracker
20 |
21 | # New compression algorithm introduced in Arknights v2.5.04+
22 | CompressionHelper.DECOMPRESSION_MAP[CompressionFlags.LZHAM] = decompress_lz4ak
23 |
24 |
25 | _T = TypeVar("_T", bound=uc.Object)
26 |
27 |
28 | class Resource:
29 | """The class representing a collection of the objects in an UnityPy Environment."""
30 |
31 | def __init__(self, env: UnityPy.Environment):
32 | """Initializes with the given UnityPy Environment instance.
33 |
34 | :param env: The Environment instance from `UnityPy.load()`;
35 | :rtype: None;
36 | """
37 | if isinstance(env.file, File):
38 | self.name: str = env.file.name
39 | elif isinstance(env.file, EndianBinaryReader):
40 | self.name: str = ""
41 | else:
42 | raise TypeError(
43 | f"Unknown type of UnityPy Environment file: {type(env.file).__name__}"
44 | )
45 | self.env: UnityPy.Environment = env
46 | self.length: int = len(env.objects)
47 | ###
48 | self.sprites: List[uc.Sprite] = []
49 | self.texture2ds: List[uc.Texture2D] = []
50 | self.textassets: List[uc.TextAsset] = []
51 | self.audioclips: List[uc.AudioClip] = []
52 | self.materials: List[uc.Material] = []
53 | self.monobehaviors: List[uc.MonoBehaviour] = []
54 | self.meshes: List[uc.Mesh] = []
55 | ###
56 | for i in [o.read() for o in env.objects]:
57 | if isinstance(i, uc.Sprite):
58 | self.sprites.append(i)
59 | elif isinstance(i, uc.Texture2D):
60 | self.texture2ds.append(i)
61 | elif isinstance(i, uc.TextAsset) and not isinstance(i, uc.MonoScript):
62 | self.textassets.append(i)
63 | elif isinstance(i, uc.AudioClip):
64 | self.audioclips.append(i)
65 | elif isinstance(i, uc.Material):
66 | self.materials.append(i)
67 | elif isinstance(i, uc.MonoBehaviour):
68 | self.monobehaviors.append(i)
69 | elif isinstance(i, uc.Mesh):
70 | self.meshes.append(i)
71 | elif isinstance(i, uc.AssetBundle):
72 | if getattr(i, "m_Name", None):
73 | if self.name != osp.basename(i.m_Name):
74 | Logger.debug(
75 | f'ResolveAB: Resource "{self.name}" internally named "{i.m_Name}"'
76 | )
77 | self.name = osp.basename(i.m_Name)
78 |
79 | def get_object_by_pathid(
80 | self, pathid: Union[int, dict], search_in: Sequence[_T]
81 | ) -> Optional[_T]:
82 | """Gets the object with the given PathID.
83 |
84 | :param pathid: PathID in int or a dict containing `m_PathID` field;
85 | :param search_in: Searching range;
86 | :returns: The object, `None` for not found;
87 | """
88 | _key = "m_PathID"
89 | if isinstance(pathid, dict):
90 | if _key in pathid:
91 | _pathid = int(pathid[_key])
92 | else:
93 | return None
94 | else:
95 | _pathid = int(pathid)
96 | for i in search_in:
97 | if i.object_reader is not None and i.object_reader.path_id == _pathid:
98 | return i
99 | return None
100 |
101 |
102 | def ab_resolve(
103 | abfile: str,
104 | destdir: str,
105 | do_img: bool,
106 | do_txt: bool,
107 | do_aud: bool,
108 | do_mesh: bool,
109 | on_processed: Optional[Callable] = None,
110 | on_file_queued: Optional[Callable] = None,
111 | on_file_saved: Optional[Callable] = None,
112 | ):
113 | """Extracts an AB file.
114 |
115 | :param abfile: Path to the AB file;
116 | :param destdir: Destination directory;
117 | :param do_img: Whether to extract images;
118 | :param do_txt: Whether to extract text scripts;
119 | :param do_aud: Whether to extract audios;
120 | :param do_mesh: Whether to extract mesh;
121 | :param on_processed: Callback `f()` for finished, `None` for ignore;
122 | :param on_file_queued: Callback `f()` invoked when a file was queued, `None` for ignore;
123 | :param on_file_saved: Callback `f(file_path_or_none_for_not_saved)`, `None` for ignore;
124 | :rtype: None;
125 | """
126 | from .ResolveSpine import SpineAsset
127 |
128 | if not osp.isfile(abfile):
129 | if on_processed:
130 | on_processed()
131 | return
132 | try:
133 | res = Resource(UnityPy.load(abfile))
134 |
135 | Logger.debug(f'ResolveAB: "{res.name}" has {res.length} objects.')
136 | if res.length >= 10000:
137 | Logger.info(
138 | f'ResolveAB: Too many objects in file "{res.name}", unpacking it may take a long time.'
139 | )
140 | elif res.length == 0:
141 | Logger.info(f'ResolveAB: No object in file "{res.name}".')
142 |
143 | for s in SpineAsset.from_resource(res):
144 | s.add_prefix()
145 |
146 | if do_img:
147 | SafeSaver.save_objects(res.sprites, destdir, on_file_queued, on_file_saved)
148 | SafeSaver.save_objects(
149 | res.texture2ds, destdir, on_file_queued, on_file_saved
150 | )
151 | if do_txt:
152 | SafeSaver.save_objects(
153 | res.textassets, destdir, on_file_queued, on_file_saved
154 | )
155 | if do_aud:
156 | SafeSaver.save_objects(
157 | res.audioclips, destdir, on_file_queued, on_file_saved
158 | )
159 | if do_mesh:
160 | SafeSaver.save_objects(res.meshes, destdir, on_file_queued, on_file_saved)
161 | except BaseException as arg:
162 | # Error feedback
163 | Logger.error(
164 | f'ResolveAB: Error occurred while unpacking file "{abfile}": Exception{type(arg)} {arg}'
165 | )
166 | # raise(arg)
167 | if on_processed:
168 | on_processed()
169 |
170 |
171 | ########## Main-主程序 ##########
172 | def main(
173 | src: str,
174 | destdir: str,
175 | do_del: bool = False,
176 | do_img: bool = True,
177 | do_txt: bool = True,
178 | do_aud: bool = True,
179 | do_mesh: bool = False,
180 | separate: bool = True,
181 | ):
182 | """Extract all the AB files from the given directory or extract a given AB file.
183 |
184 | :param src: Source directory or file;
185 | :param destdir: Destination directory;
186 | :param do_del: Whether to delete the existing files in the destination directory, `False` for default;
187 | :param do_img: Whether to extract images;
188 | :param do_txt: Whether to extract text scripts;
189 | :param do_aud: Whether to extract audios;
190 | :param do_mesh: Whether to extract mesh;
191 | :param separate: Whether to sort the extracted files by their source AB file path.
192 | :rtype: None;
193 | """
194 | print("\n正在解析路径...", s=1)
195 | Logger.info("ResolveAB: Retrieving file paths...")
196 | src = osp.normpath(osp.realpath(src))
197 | destdir = osp.normpath(osp.realpath(destdir))
198 | flist = [src] if osp.isfile(src) else get_filelist(src)
199 | flist = list(filter(is_ab_file, flist))
200 | if do_del:
201 | print("\n正在清理...", s=1)
202 | rmdir(destdir) # Danger zone
203 | SafeSaver.get_instance().reset_counter()
204 | thread_ctrl = ThreadCtrl()
205 | ui = UICtrl()
206 | tr_processed = TaskReporter(50, len(flist))
207 | tr_file_saving = TaskReporter(1)
208 | tracker = TaskReporterTracker(tr_processed, tr_file_saving)
209 |
210 | ui.reset()
211 | ui.loop_start()
212 | for i in flist:
213 | # (i stands for a file's path)
214 | ui.request(
215 | [
216 | "正在批量解包...",
217 | tracker.to_progress_bar_str(),
218 | f"当前目录:\t{osp.basename(osp.dirname(i))}",
219 | f"当前文件:\t{osp.basename(i)}",
220 | f"累计解包:\t{tr_processed.to_progress_str()}",
221 | f"累计导出:\t{tr_file_saving.to_progress_str()}",
222 | f"预计剩余时间:\t{tracker.to_eta_str()}",
223 | f"累计消耗时间:\t{tracker.to_rt_str()}",
224 | f"运行状态统计:\t{Logger.to_ew_stats_str()}",
225 | ]
226 | )
227 | ###
228 | subdestdir = osp.dirname(i).strip(osp.sep).replace(src, "").strip(osp.sep)
229 | curdestdir = (
230 | destdir
231 | if osp.samefile(i, src)
232 | else (
233 | osp.join(destdir, subdestdir, osp.splitext(osp.basename(i))[0])
234 | if separate
235 | else osp.join(destdir, subdestdir)
236 | )
237 | )
238 | thread_ctrl.run_subthread(
239 | ab_resolve,
240 | (
241 | i,
242 | curdestdir,
243 | do_img,
244 | do_txt,
245 | do_aud,
246 | do_mesh,
247 | tr_processed.report,
248 | tr_file_saving.update_demand,
249 | tr_file_saving.report,
250 | ),
251 | name=f"RsThread:{id(i)}",
252 | )
253 |
254 | ui.reset()
255 | ui.loop_stop()
256 | while (
257 | thread_ctrl.count_subthread()
258 | or not SafeSaver.get_instance().completed()
259 | or tracker.get_progress() < 1
260 | ):
261 | ui.request(
262 | [
263 | "正在批量解包...",
264 | tracker.to_progress_bar_str(),
265 | f"累计解包:\t{tr_processed.to_progress_str()}",
266 | f"累计导出:\t{tr_file_saving.to_progress_str()}",
267 | f"预计剩余时间:\t{tracker.to_eta_str()}",
268 | f"累计消耗时间:\t{tracker.to_rt_str()}",
269 | f"运行状态统计:\t{Logger.to_ew_stats_str()}",
270 | ]
271 | )
272 | ui.refresh(post_delay=0.1)
273 |
274 | ui.reset()
275 | print("\n批量解包结束!", s=1)
276 | print(f" 累计解包 {tr_processed.get_done()} 个文件")
277 | print(f" 累计导出 {tr_file_saving.get_done()} 个文件")
278 | print(f" 此项用时 {round(tracker.get_rt(), 1)} 秒")
279 |
--------------------------------------------------------------------------------
/src/ResolveSpine.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright (c) 2022-2025, Harry Huang
3 | # @ BSD 3-Clause License
4 | from enum import StrEnum
5 | from typing import Any, Callable, List, Optional, Sequence, Tuple, TYPE_CHECKING, Union
6 |
7 | import os.path as osp
8 |
9 | import UnityPy
10 | import UnityPy.classes as uc
11 |
12 | from .CombineRGBwithA import AlphaRGBCombiner, image_resize
13 | from .utils.AtlasFile import AtlasFile
14 | from .utils.GlobalMethods import print, rmdir, get_filelist, is_ab_file, stacktrace
15 | from .utils.Logger import Logger
16 | from .utils.SaverUtils import SafeSaver
17 | from .utils.TaskUtils import ThreadCtrl, UICtrl, TaskReporter, TaskReporterTracker
18 |
19 | if TYPE_CHECKING:
20 | from .ResolveAB import Resource
21 |
22 |
23 | class TreeReader:
24 | def __init__(self, obj: Optional[uc.Object]):
25 | self.obj = getattr(obj, "object_reader", obj)
26 |
27 | def __enter__(self):
28 | if self.obj is None:
29 | raise AttributeError("Given object or object reader is none")
30 | read_typetree = getattr(self.obj, "read_typetree", None)
31 | if callable(read_typetree):
32 | tree = read_typetree()
33 | if isinstance(tree, dict):
34 | return tree
35 | raise AttributeError("Given object has no serialized type tree")
36 |
37 | def __exit__(self, exc_type, exc_val, exc_tb):
38 | return False
39 |
40 |
41 | class SpineType(StrEnum):
42 | UNKNOWN = "Unknown"
43 | BUILDING = "Building"
44 | BATTLE_FRONT = "BattleFront"
45 | BATTLE_BACK = "BattleBack"
46 | DYN_ILLUST = "DynIllust"
47 |
48 | @staticmethod
49 | def guess(skel: uc.TextAsset, atlas: uc.TextAsset):
50 | if not skel or not atlas:
51 | return SpineType.UNKNOWN
52 |
53 | name = skel.m_Name.replace(".skel", "")
54 | if not name:
55 | name = atlas.m_Name.replace(".atlas", "")
56 |
57 | t = SpineType._guess_from_name(name)
58 | if t != SpineType.UNKNOWN:
59 | return t
60 |
61 | t = SpineType._guess_from_skel(skel)
62 | if t != SpineType.UNKNOWN:
63 | Logger.info(f'ResolveSpine: Guessed Spine type "{t}" for "{name}" via skel')
64 | return t
65 |
66 | t = SpineType._guess_from_atlas(atlas)
67 | Logger.info(f'ResolveSpine: Guessed Spine type "{t}" for "{name}" via atlas')
68 | return t
69 |
70 | @staticmethod
71 | def _guess_from_name(name: str) -> "SpineType":
72 | t = name.lower()
73 | if t.startswith("dyn_"):
74 | return SpineType.DYN_ILLUST
75 | elif t.startswith("enemy_"):
76 | return SpineType.BATTLE_FRONT
77 | elif t.startswith("build_"):
78 | return SpineType.BUILDING
79 | else:
80 | return SpineType.UNKNOWN
81 |
82 | @staticmethod
83 | def _guess_from_skel(skel: uc.TextAsset) -> "SpineType":
84 | t = skel.m_Script.lower()
85 | if "default" in t and "relax" in t:
86 | return SpineType.BUILDING
87 | else:
88 | return SpineType.UNKNOWN
89 |
90 | @staticmethod
91 | def _guess_from_atlas(atlas: uc.TextAsset) -> "SpineType":
92 | t = atlas.m_Script.lower()
93 | if t.count("\nf_") + t.count("\nc_") >= t.count("\nb_"):
94 | return SpineType.BATTLE_FRONT
95 | else:
96 | return SpineType.BATTLE_BACK
97 |
98 |
99 | class SpineAsset:
100 | def __init__(
101 | self,
102 | skel: uc.TextAsset,
103 | atlas: uc.TextAsset,
104 | tex_list: Sequence[Tuple[uc.Texture2D, uc.Texture2D]],
105 | type: SpineType,
106 | ):
107 | self.skel = skel
108 | self.atlas = atlas
109 | self.tex_list = tex_list
110 | self.type = type
111 |
112 | def add_prefix(self):
113 | """Adds a prefix to the names of the Spine assets to avoid conflicts.
114 |
115 | :rtype: None;
116 | """
117 |
118 | def _add_prefix(obj: Union[uc.TextAsset, uc.Texture2D], pre: str):
119 | if obj and not obj.m_Name.startswith(pre):
120 | obj.m_Name = pre + obj.m_Name
121 |
122 | prefix = (
123 | f"{self.type.value}/{osp.splitext(osp.basename(self.atlas.m_Name))[0]}/"
124 | )
125 | _add_prefix(self.skel, prefix)
126 | _add_prefix(self.atlas, prefix)
127 | for i in self.tex_list:
128 | for j in i:
129 | _add_prefix(j, prefix)
130 |
131 | def save_spine(
132 | self,
133 | destdir: str,
134 | on_queued: Optional[Callable],
135 | on_saved: Optional[Callable],
136 | ):
137 | """Saves the Spine assets to the destination directory.
138 |
139 | :param destdir: Destination directory;
140 | :param on_queued: Callback `f(file_path)` invoked when a file was queued, `None` for ignore;
141 | :param on_saved: Callback `f(file_path_or_none_for_not_saved)` invoked when a file was saved, `None` for ignore;
142 | :rtype: None;
143 | """
144 | Logger.debug(
145 | f'ResolveSpine: Exporting Spine "{self.skel.m_Name}" + "{self.atlas.m_Name}" + {len(self.tex_list)} textures with type "{self.type}"'
146 | )
147 | atlas = AtlasFile.loads(self.atlas.m_Script)
148 | for i in self.tex_list:
149 | if i[0]:
150 | rgb = i[0].image
151 | if i[1]:
152 | Logger.debug(
153 | f'ResolveSpine: Spine asset "{i[0].m_Name}" found with Alpha texture.'
154 | )
155 | rgba = AlphaRGBCombiner(i[1].image).combine_with(rgb)
156 | else:
157 | Logger.debug(
158 | f'ResolveSpine: Spine asset "{i[0].m_Name}" found with NO Alpha texture.'
159 | )
160 | rgba = AlphaRGBCombiner.apply_premultiplied_alpha(rgb)
161 | for p in atlas["pages"]:
162 | n1 = osp.basename(osp.splitext(p["filename"])[0]).lower()
163 | n2 = osp.basename(osp.splitext(i[0].m_Name)[0]).lower()
164 | if n1 == n2:
165 | rgba = image_resize(rgba, p["size"])
166 | break
167 | SafeSaver.save_image(
168 | rgba,
169 | destdir,
170 | i[0].m_Name,
171 | on_queued=on_queued,
172 | on_saved=on_saved,
173 | )
174 | else:
175 | Logger.warn("ResolveSpine: Spine asset RGB texture missing.")
176 | for i in (self.atlas, self.skel):
177 | SafeSaver.save_object(i, destdir, i.m_Name, on_queued, on_saved)
178 |
179 | @classmethod
180 | def from_resource(cls, res: "Resource") -> List["SpineAsset"]:
181 | """Gets Spine assets from the given resource.
182 |
183 | :param res: The Resource instance to extract Spine assets from;
184 | :returns: A list of SpineAsset instances;
185 | """
186 | spines = []
187 | found_front_and_back = list(cls._find_front_and_back_skel_data(res))
188 | try:
189 | # sd = SkeletonData
190 | for mono_sd, tree_sd in cls._find_typetree_by_keys(
191 | res.monobehaviors, ["atlasAssets", "skeletonJSON"]
192 | ):
193 | skel = res.get_object_by_pathid(tree_sd["skeletonJSON"], res.textassets)
194 | mono_ad = res.get_object_by_pathid(
195 | tree_sd["atlasAssets"][0], res.monobehaviors
196 | )
197 | # ad = AtlasData
198 | with TreeReader(mono_ad) as tree_ad:
199 | atlas = res.get_object_by_pathid(
200 | tree_ad["atlasFile"], res.textassets
201 | )
202 | tex_list = []
203 | for mat in (
204 | res.get_object_by_pathid(i, res.materials)
205 | for i in tree_ad["materials"]
206 | ):
207 | # mat = MaterialData
208 | tex_rgb, tex_alpha = None, None
209 | with TreeReader(mat) as tree_mat:
210 | tex_envs = tree_mat["m_SavedProperties"]["m_TexEnvs"]
211 | for tex in tex_envs:
212 | if tex[0] == "_MainTex":
213 | tex_rgb = res.get_object_by_pathid(
214 | tex[1]["m_Texture"], res.texture2ds
215 | )
216 | elif tex[0] == "_AlphaTex":
217 | tex_alpha = res.get_object_by_pathid(
218 | tex[1]["m_Texture"], res.texture2ds
219 | )
220 | tex_list.append((tex_rgb, tex_alpha))
221 |
222 | if not skel or not atlas or not tex_list:
223 | raise ValueError("Incomplete Spine asset")
224 |
225 | if any(mono_sd is f for f, _ in found_front_and_back):
226 | sp_type = SpineType.BATTLE_FRONT
227 | elif any(mono_sd is b for _, b in found_front_and_back):
228 | sp_type = SpineType.BATTLE_BACK
229 | else:
230 | sp_type = SpineType.guess(skel, atlas)
231 |
232 | spine = cls(skel, atlas, tex_list, sp_type)
233 | spines.append(spine)
234 | except Exception:
235 | Logger.warn(
236 | f'ResolveSpine: Failed to handle skeletons in resource "{res.name}": {stacktrace()}'
237 | )
238 | return spines
239 |
240 | @classmethod
241 | def _find_front_and_back_skel_data(cls, res: "Resource"):
242 | try:
243 | # ca = CharacterAnimator
244 | for _, tree_ca in cls._find_typetree_by_keys(
245 | res.monobehaviors, ["_animations", "_front", "_back"]
246 | ):
247 | mono_sa_front = res.get_object_by_pathid(
248 | tree_ca["_front"]["skeleton"], res.monobehaviors
249 | )
250 | mono_sa_back = res.get_object_by_pathid(
251 | tree_ca["_back"]["skeleton"], res.monobehaviors
252 | )
253 | # sa = SkeletonAnimation
254 | if mono_sa_front and mono_sa_back:
255 | with TreeReader(mono_sa_front) as tree_sa:
256 | mono_sd_front = res.get_object_by_pathid(
257 | tree_sa["skeletonDataAsset"], res.monobehaviors
258 | )
259 | with TreeReader(mono_sa_back) as tree_sa:
260 | mono_sd_back = res.get_object_by_pathid(
261 | tree_sa["skeletonDataAsset"], res.monobehaviors
262 | )
263 | # sd = SkeletonData
264 | if mono_sd_front and mono_sd_back:
265 | yield mono_sd_front, mono_sd_back
266 | except Exception:
267 | Logger.warn(
268 | f'ResolveSpine: Failed to find front-and-back skeleton data in resource "{res.name}": {stacktrace()}'
269 | )
270 |
271 | @classmethod
272 | def _find_typetree_by_keys(
273 | cls, objs: Sequence[uc.Object], contains_keys: Sequence[str]
274 | ):
275 | for obj in objs:
276 | with TreeReader(obj) as tree:
277 | if not all(k in tree for k in contains_keys):
278 | continue
279 | yield obj, tree
280 |
281 |
282 | def spine_resolve(
283 | abfile: str,
284 | destdir: str,
285 | on_processed: Optional[Callable] = None,
286 | on_file_queued: Optional[Callable] = None,
287 | on_file_saved: Optional[Callable] = None,
288 | ):
289 | """Extracts Spine assets from an AB file.
290 |
291 | :param abfile: Path to the AB file;
292 | :param destdir: Destination directory;
293 | :param on_processed: Callback `f()` for finished, `None` for ignore;
294 | :param on_file_queued: Callback `f()` invoked when a file was queued, `None` for ignore;
295 | :param on_file_saved: Callback `f(file_path_or_none_for_not_saved)`, `None` for ignore;
296 | :rtype: None;
297 | """
298 | from .ResolveAB import Resource
299 |
300 | if not osp.isfile(abfile):
301 | if on_processed:
302 | on_processed()
303 | return
304 | try:
305 | res = Resource(UnityPy.load(abfile))
306 | spines = SpineAsset.from_resource(res)
307 | if len(spines) >= 10:
308 | Logger.info(
309 | f'ResolveSpine: "{res.name}" has {len(spines)} spines, unpacking it may take a long time.'
310 | )
311 | for s in spines:
312 | s.add_prefix()
313 | s.save_spine(destdir, on_file_queued, on_file_saved)
314 | except BaseException as arg:
315 | Logger.error(
316 | f'ResolveSpine: Error occurred while unpacking file "{abfile}": Exception{type(arg)} {arg}'
317 | )
318 | if on_processed:
319 | on_processed()
320 |
321 |
322 | def main(
323 | src: str,
324 | destdir: str,
325 | do_del: bool = False,
326 | separate: bool = True,
327 | ):
328 | """Extracts all Spine assets from the given directory or a given AB file.
329 |
330 | :param src: Source directory or file;
331 | :param destdir: Destination directory;
332 | :param do_del: Whether to delete the existing files in the destination directory, `False` for default;
333 | :param separate: Whether to sort the extracted files by their source AB file path;
334 | :rtype: None;
335 | """
336 | print("\n正在解析路径...", s=1)
337 | Logger.info("ResolveSpine: Retrieving file paths...")
338 | src = osp.normpath(osp.realpath(src))
339 | destdir = osp.normpath(osp.realpath(destdir))
340 | flist = [src] if osp.isfile(src) else get_filelist(src)
341 | flist = list(filter(is_ab_file, flist))
342 | if do_del:
343 | print("\n正在清理...", s=1)
344 | rmdir(destdir)
345 | SafeSaver.get_instance().reset_counter()
346 | thread_ctrl = ThreadCtrl()
347 | ui = UICtrl()
348 | tr_processed = TaskReporter(50, len(flist))
349 | tr_file_saving = TaskReporter(1)
350 | tracker = TaskReporterTracker(tr_processed, tr_file_saving)
351 | ui.reset()
352 | ui.loop_start()
353 | for i in flist:
354 | ui.request(
355 | [
356 | "正在批量导出Spine模型...",
357 | tracker.to_progress_bar_str(),
358 | f"当前目录:\t{osp.basename(osp.dirname(i))}",
359 | f"当前文件:\t{osp.basename(i)}",
360 | f"累计解包:\t{tr_processed.to_progress_str()}",
361 | f"累计导出:\t{tr_file_saving.to_progress_str()}",
362 | f"预计剩余时间:\t{tracker.to_eta_str()}",
363 | f"累计消耗时间:\t{tracker.to_rt_str()}",
364 | f"运行状态统计:\t{Logger.to_ew_stats_str()}",
365 | ]
366 | )
367 | subdestdir = osp.dirname(i).strip(osp.sep).replace(src, "").strip(osp.sep)
368 | curdestdir = (
369 | destdir
370 | if osp.samefile(i, src)
371 | else (
372 | osp.join(destdir, subdestdir, osp.splitext(osp.basename(i))[0])
373 | if separate
374 | else osp.join(destdir, subdestdir)
375 | )
376 | )
377 | thread_ctrl.run_subthread(
378 | spine_resolve,
379 | (
380 | i,
381 | curdestdir,
382 | tr_processed.report,
383 | tr_file_saving.update_demand,
384 | tr_file_saving.report,
385 | ),
386 | name=f"SpineThread:{id(i)}",
387 | )
388 | ui.reset()
389 | ui.loop_stop()
390 | while (
391 | thread_ctrl.count_subthread()
392 | or not SafeSaver.get_instance().completed()
393 | or tracker.get_progress() < 1
394 | ):
395 | ui.request(
396 | [
397 | "正在批量导出Spine模型...",
398 | tracker.to_progress_bar_str(),
399 | f"累计解包:\t{tr_processed.to_progress_str()}",
400 | f"累计导出:\t{tr_file_saving.to_progress_str()}",
401 | f"预计剩余时间:\t{tracker.to_eta_str()}",
402 | f"累计消耗时间:\t{tracker.to_rt_str()}",
403 | f"运行状态统计:\t{Logger.to_ew_stats_str()}",
404 | ]
405 | )
406 | ui.refresh(post_delay=0.1)
407 | ui.reset()
408 | print("\nSpine模型批量导出结束!", s=1)
409 | print(f" 累计解包 {tr_processed.get_done()} 个文件")
410 | print(f" 累计导出 {tr_file_saving.get_done()} 个文件")
411 | print(f" 此项用时 {round(tracker.get_rt(), 1)} 秒")
412 |
--------------------------------------------------------------------------------
/src/VoiceDataDist.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright (c) 2022-2025, Harry Huang
3 | # @ BSD 3-Clause License
4 | import json
5 | import os.path as osp
6 | from datetime import datetime
7 |
8 | from .utils.GlobalMethods import print
9 | from .utils.Logger import Logger
10 |
11 |
12 | class VoiceDist:
13 | L10N = {
14 | # lang_id -> dict (id -> translation)
15 | "zh-CN": {
16 | "JP": "日语",
17 | "CN": "普通话",
18 | "KR": "韩语",
19 | "EN": "英语",
20 | "CUSTOM": "个性化",
21 | "001": "任命助理",
22 | "002": "交谈1",
23 | "003": "交谈2",
24 | "004": "交谈3",
25 | "005": "晋升后交谈1",
26 | "006": "晋升后交谈2",
27 | "007": "信赖提升后交谈1",
28 | "008": "信赖提升后交谈2",
29 | "009": "信赖提升后交谈3",
30 | "010": "闲置",
31 | "011": "干员报到",
32 | "012": "观看作战记录",
33 | "013": "精英化晋升1",
34 | "014": "精英化晋升2",
35 | "017": "编入队伍",
36 | "018": "任命队长",
37 | "019": "行动出发",
38 | "020": "行动开始",
39 | "021": "选中干员1",
40 | "022": "选中干员2",
41 | "023": "部署1",
42 | "024": "部署2",
43 | "025": "作战中1",
44 | "026": "作战中2",
45 | "027": "作战中3",
46 | "028": "作战中4",
47 | "029": "完成高难行动",
48 | "030": "3星行动结束",
49 | "031": "非3星行动结束",
50 | "032": "行动失败",
51 | "033": "进驻设施",
52 | "034": "戳一下",
53 | "036": "信赖触摸",
54 | "037": "标题",
55 | "038": "新年祝福",
56 | "042": "问候",
57 | "043": "生日祝福",
58 | "044": "周年庆典",
59 | },
60 | "zh-TW": {
61 | "JP": "日語",
62 | "CN": "普通話",
63 | "KR": "韓語",
64 | "EN": "英語",
65 | "CUSTOM": "個性化",
66 | "001": "任命助理",
67 | "002": "交談1",
68 | "003": "交談2",
69 | "004": "交談3",
70 | "005": "晉升後交談1",
71 | "006": "晉升後交談2",
72 | "007": "信賴提升後交談1",
73 | "008": "信賴提升後交談2",
74 | "009": "信賴提升後交談3",
75 | "010": "閒置",
76 | "011": "幹員報到",
77 | "012": "觀看作戰紀錄",
78 | "013": "精英化晉升1",
79 | "014": "精英化晉升2",
80 | "017": "編入隊伍",
81 | "018": "任命隊長",
82 | "019": "行動出發",
83 | "020": "行動開始",
84 | "021": "選中幹員1",
85 | "022": "選中幹員2",
86 | "023": "部署1",
87 | "024": "部署2",
88 | "025": "作戰中1",
89 | "026": "作戰中2",
90 | "027": "作戰中3",
91 | "028": "作戰中4",
92 | "029": "完成高難度行動",
93 | "030": "3星行動結束",
94 | "031": "非3星行動結束",
95 | "032": "行動失敗",
96 | "033": "進駐設施",
97 | "034": "戳一下",
98 | "036": "信賴觸摸",
99 | "037": "標題",
100 | "038": "新年祝福",
101 | "042": "問候",
102 | "043": "生日祝福",
103 | "044": "週年慶典",
104 | },
105 | "jp-JP": {
106 | "JP": "日本語",
107 | "CN": "標準中国語",
108 | "KR": "韓国語",
109 | "EN": "英語",
110 | "CUSTOM": "カスタマイズ",
111 | "001": "アシスタントを任命",
112 | "002": "会話1",
113 | "003": "会話2",
114 | "004": "会話3",
115 | "005": "昇進後の会話1",
116 | "006": "昇進後の会話2",
117 | "007": "信頼度アップ後の会話1",
118 | "008": "信頼度アップ後の会話2",
119 | "009": "信頼度アップ後の会話3",
120 | "010": "待機中",
121 | "011": "オペレーター報告",
122 | "012": "作戦記録の確認",
123 | "013": "エリート昇進1",
124 | "014": "エリート昇進2",
125 | "017": "チームに編成",
126 | "018": "隊長を任命",
127 | "019": "作戦出発",
128 | "020": "作戦開始",
129 | "021": "オペレーター選択1",
130 | "022": "オペレーター選択2",
131 | "023": "配置1",
132 | "024": "配置2",
133 | "025": "作戦中1",
134 | "026": "作戦中2",
135 | "027": "作戦中3",
136 | "028": "作戦中4",
137 | "029": "高難易度作戦完了",
138 | "030": "3星評価作戦終了",
139 | "031": "非3星評価作戦終了",
140 | "032": "作戦失敗",
141 | "033": "施設に入駐",
142 | "034": "タップ1回",
143 | "036": "信頼タッチ",
144 | "037": "タイトル",
145 | "038": "新年のご挨拶",
146 | "042": "挨拶",
147 | "043": "誕生日祝い",
148 | "044": "周年記念",
149 | },
150 | "ko-KR": {
151 | "JP": "일본어",
152 | "CN": "표준 중국어",
153 | "KR": "한국어",
154 | "EN": "영어",
155 | "CUSTOM": "개인화",
156 | "001": "보조 임명",
157 | "002": "대화1",
158 | "003": "대화2",
159 | "004": "대화3",
160 | "005": "승진 후 대화1",
161 | "006": "승진 후 대화2",
162 | "007": "신뢰도 상승 후 대화1",
163 | "008": "신뢰도 상승 후 대화2",
164 | "009": "신뢰도 상승 후 대화3",
165 | "010": "대기 중",
166 | "011": "오퍼레이터 보고",
167 | "012": "작전 기록 확인",
168 | "013": "엘리트 승진1",
169 | "014": "엘리트 승진2",
170 | "017": "팀에 배치",
171 | "018": "대장 임명",
172 | "019": "작전 출발",
173 | "020": "작전 시작",
174 | "021": "오퍼레이터 선택1",
175 | "022": "오퍼레이터 선택2",
176 | "023": "배치1",
177 | "024": "배치2",
178 | "025": "작전 중1",
179 | "026": "작전 중2",
180 | "027": "작전 중3",
181 | "028": "작전 중4",
182 | "029": "고난이도 작전 완료",
183 | "030": "3성 작전 종료",
184 | "031": "비 3성 작전 종료",
185 | "032": "작전 실패",
186 | "033": "시설에 입주",
187 | "034": "한 번 눌러요",
188 | "036": "신뢰 터치",
189 | "037": "타이틀",
190 | "038": "새해 인사",
191 | "042": "인사",
192 | "043": "생일 축하",
193 | "044": "기념일",
194 | },
195 | "en-US": {
196 | "JP": "Japanese",
197 | "CN": "Mandarin",
198 | "KR": "Korean",
199 | "EN": "English",
200 | "CUSTOM": "Custom",
201 | "001": "Appoint Assistant",
202 | "002": "Talk 1",
203 | "003": "Talk 2",
204 | "004": "Talk 3",
205 | "005": "Post-Promotion Talk 1",
206 | "006": "Post-Promotion Talk 2",
207 | "007": "Trust Increase Talk 1",
208 | "008": "Trust Increase Talk 2",
209 | "009": "Trust Increase Talk 3",
210 | "010": "Idle",
211 | "011": "Operator Reporting",
212 | "012": "View Mission Records",
213 | "013": "Elite Promotion 1",
214 | "014": "Elite Promotion 2",
215 | "017": "Deploy to Team",
216 | "018": "Appoint Leader",
217 | "019": "Mission Departure",
218 | "020": "Mission Start",
219 | "021": "Selected Operator 1",
220 | "022": "Selected Operator 2",
221 | "023": "Deploy 1",
222 | "024": "Deploy 2",
223 | "025": "In Mission 1",
224 | "026": "In Mission 2",
225 | "027": "In Mission 3",
226 | "028": "In Mission 4",
227 | "029": "Hard Mission End",
228 | "030": "3-Star Mission End",
229 | "031": "Non-3-Star Mission End",
230 | "032": "Mission Failure",
231 | "033": "Enter Facility",
232 | "034": "Tap Once",
233 | "036": "Trust Touch",
234 | "037": "Title",
235 | "038": "New Year Greeting",
236 | "042": "Greeting",
237 | "043": "Birthday Wishes",
238 | "044": "Anniversary Celebration",
239 | },
240 | }
241 | ARK_PETS_COMPATIBILITY = [4, 0, 0]
242 | SERVER_REGION = "zh_CN"
243 | FORMAT = ".ogg"
244 | VARIATIONS_DIR = {
245 | # variation_id -> dirname
246 | "JP": "voice",
247 | "CN": "voice_cn",
248 | "KR": "voice_kr",
249 | "EN": "voice_en",
250 | "CUSTOM": "voice_custom",
251 | }
252 | TYPES = {
253 | # type -> regex
254 | "common": r"^CN_\d\d\d$",
255 | "effected": r"^FX_\d\d\d(_\d)?$",
256 | }
257 | DATA_PART_FILE = "voice_data_part.json"
258 |
259 | def __init__(self):
260 | self.data = {
261 | "localizations": VoiceDist.L10N,
262 | "storageDirectory": VoiceDist.VARIATIONS_DIR,
263 | "gameDataVersionDescription": f"Producer: ArkUnpacker 3\nDate: {datetime.now().date()}\n",
264 | "gameDataServerRegion": VoiceDist.SERVER_REGION,
265 | "data": {},
266 | "audioTypes": VoiceDist.TYPES,
267 | "audioFormat": VoiceDist.FORMAT,
268 | "arkPetsCompatibility": VoiceDist.ARK_PETS_COMPATIBILITY,
269 | }
270 |
271 | def retrieve(self):
272 | Logger.info(f"VoiceDataDist: Starting retrieve voice data.")
273 | print("读取各语种的子数据集...")
274 | failed = False
275 | for var, dir in VoiceDist.VARIATIONS_DIR.items():
276 | cnt = 0
277 | if not osp.isdir(dir):
278 | Logger.error(f"VoiceDataDist: Dir {dir} not found.")
279 | print(f"\t未找到语种文件夹 {dir}", c=3)
280 | failed = True
281 | else:
282 | data_part_file = osp.join(dir, VoiceDist.DATA_PART_FILE)
283 | if not osp.isfile(data_part_file):
284 | Logger.error(
285 | f"VoiceDataDist: Data part file {data_part_file} not found."
286 | )
287 | print(f"\t未找到子数据集文件 {data_part_file}", c=3)
288 | failed = True
289 | else:
290 | data_part: dict = json.load(
291 | open(data_part_file, "r", encoding="UTF-8")
292 | )
293 | for cha, lst in data_part.items():
294 | cnt += 1
295 | if cha not in self.data["data"]:
296 | self.data["data"][cha] = {"variations": {}}
297 | self.data["data"][cha]["variations"][var] = lst
298 | Logger.info(
299 | f"VoiceDataDist: Variation {var} includes {cnt} voice file."
300 | )
301 | print(f"\t语种 {var} 包含 {cnt} 套语音文件", c=2)
302 | if failed:
303 | print("读取子数据集时发生警告,因此总数据集可能不完整", c=1)
304 | else:
305 | print("读取子数据集完毕", c=2)
306 |
307 | def sort(self):
308 | self.data["data"] = dict(sorted(self.data["data"].items()))
309 |
310 | def export_json(self):
311 | Logger.info("VoiceDataDist: Writing to json.")
312 | with open("voice_data.json", "w", encoding="UTF-8") as f:
313 | json.dump(
314 | self.data, f, ensure_ascii=False, indent=None, separators=(",", ":")
315 | )
316 | Logger.info("VoiceDataDist: Succeeded in writing to json.")
317 | print("\n已写入总数据集文件", c=2)
318 |
319 |
320 | ########## Main-主程序 ##########
321 | def main():
322 | vd = VoiceDist()
323 | vd.retrieve()
324 | vd.sort()
325 | vd.export_json()
326 |
--------------------------------------------------------------------------------
/src/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright (c) 2022-2025, Harry Huang
3 | # @ BSD 3-Clause License
4 |
--------------------------------------------------------------------------------
/src/lz4ak/Block.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright (c) 2022-2025, Harry Huang
3 | # @ BSD 3-Clause License
4 | from typing import Union
5 |
6 | import lz4
7 | import lz4.block
8 | from ..utils.Profiler import CodeProfiler
9 |
10 | ByteString = Union[bytes, bytearray, memoryview]
11 |
12 |
13 | def _read_extra_length(data: ByteString, cur_pos: int, max_pos: int) -> tuple[int, int]:
14 | l = 0
15 | while cur_pos < max_pos:
16 | b = data[cur_pos]
17 | l += b
18 | cur_pos += 1
19 | if b != 0xFF:
20 | break
21 | return l, cur_pos
22 |
23 |
24 | def decompress_lz4ak(compressed_data: ByteString, uncompressed_size: int) -> bytes:
25 | """Decompresses the given data block using LZ4AK algorithm.
26 |
27 | *Special thanks to Kengxxiao (https://github.com/Kengxxiao).*
28 |
29 | *Algorithm adapted from MooncellWiki:UnityPy (https://github.com/MooncellWiki/UnityPy)*
30 |
31 | :param compressed_data: The raw compressed data bytes;
32 | :param uncompressed_size: The size of the uncompressed data;
33 | :returns: The decompressed data bytes;
34 | :rtype: bytes;
35 | """
36 | with CodeProfiler("lz4ak_fix"):
37 | ip = 0
38 | op = 0
39 | fixed_compressed_data = bytearray(compressed_data)
40 | compressed_size = len(compressed_data)
41 |
42 | while ip < compressed_size:
43 | # Sequence token
44 | literal_length = fixed_compressed_data[ip] & 0xF
45 | match_length = (fixed_compressed_data[ip] >> 4) & 0xF
46 | fixed_compressed_data[ip] = (literal_length << 4) | match_length
47 | ip += 1
48 |
49 | # Literals
50 | if literal_length == 0xF:
51 | l, ip = _read_extra_length(fixed_compressed_data, ip, compressed_size)
52 | literal_length += l
53 | ip += literal_length
54 | op += literal_length
55 | if op >= uncompressed_size:
56 | break # End of block
57 |
58 | # Match copy
59 | offset = (fixed_compressed_data[ip] << 8) | fixed_compressed_data[ip + 1]
60 | fixed_compressed_data[ip] = offset & 0xFF
61 | fixed_compressed_data[ip + 1] = (offset >> 8) & 0xFF
62 | ip += 2
63 | if match_length == 0xF:
64 | l, ip = _read_extra_length(fixed_compressed_data, ip, compressed_size)
65 | match_length += l
66 | match_length += 4 # Min match
67 | op += match_length
68 |
69 | return lz4.block.decompress(fixed_compressed_data, uncompressed_size)
70 |
--------------------------------------------------------------------------------
/src/lz4ak/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright (c) 2022-2025, Harry Huang
3 | # @ BSD 3-Clause License
4 | from . import Block as Block
5 |
--------------------------------------------------------------------------------
/src/utils/ArgParser.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright (c) 2022-2025, Harry Huang
3 | # @ BSD 3-Clause License
4 | import argparse
5 | import os
6 |
7 |
8 | class ArgParserFailure(Exception):
9 | def __init__(self, *args: object):
10 | super().__init__(*args)
11 |
12 |
13 | class _ArkUnpackerArgParser(argparse.ArgumentParser):
14 | def __init__(self, prog: str, description: str, epilog: str):
15 | super().__init__(prog=prog, description=description, epilog=epilog)
16 |
17 | def error(self, message: str):
18 | raise ArgParserFailure(message)
19 |
20 | def validate_input_output_arg(self, args, allow_file_input: bool = False):
21 | if not getattr(args, "input", None):
22 | self.error("input should be defined in this mode")
23 | if not getattr(args, "output", None):
24 | self.error("output should be defined in this mode")
25 | if not allow_file_input and os.path.isfile(args.input):
26 | self.error("input should be a directory, not file")
27 | if not os.path.isdir(args.input) and not (
28 | allow_file_input and os.path.isfile(args.input)
29 | ):
30 | self.error(
31 | f"input should be a {'file or ' if allow_file_input else ''}directory that exists"
32 | )
33 |
34 | def validate_logging_level_arg(self, args):
35 | if getattr(args, "logging_level", None) is None:
36 | return
37 | if args.logging_level not in range(5):
38 | self.error("invalid logging level")
39 |
40 | @staticmethod
41 | def instantiate():
42 | parser = _ArkUnpackerArgParser(
43 | prog="ArkUnpacker",
44 | description="Arknights Assets Unpacker. Use no argument to run to enter the interactive CLI mode.",
45 | epilog="GitHub: https://github.com/isHarryh/Ark-Unpacker",
46 | )
47 | parser.add_argument(
48 | "-v",
49 | "--version",
50 | action="store_true",
51 | help="show a version message and exit",
52 | )
53 | parser.add_argument(
54 | "-m",
55 | "--mode",
56 | choices=["ab", "cb", "fb", "sp"],
57 | help="working mode, ab=resolve-ab, cb=combine-image, fb=decode-flatbuffers, sp=resolve-spine",
58 | )
59 | parser.add_argument("-i", "--input", help="source file or directory path")
60 | parser.add_argument("-o", "--output", help="destination directory path")
61 | parser.add_argument(
62 | "-d",
63 | "-delete",
64 | action="store_true",
65 | help="delete the existed destination directory first",
66 | )
67 | parser.add_argument(
68 | "--image",
69 | action="store_true",
70 | help="in resolve ab mode: export image files",
71 | )
72 | parser.add_argument(
73 | "--text",
74 | action="store_true",
75 | help="in resolve ab mode: export text or binary files",
76 | )
77 | parser.add_argument(
78 | "--audio",
79 | action="store_true",
80 | help="in resolve ab mode: export audio files",
81 | )
82 | parser.add_argument(
83 | "--spine",
84 | action="store_true",
85 | help="in resolve ab mode: export spine asset files",
86 | )
87 | parser.add_argument(
88 | "--mesh",
89 | action="store_true",
90 | help="in resolve ab mode: export mesh resources",
91 | )
92 | parser.add_argument(
93 | "-g",
94 | "--group",
95 | action="store_true",
96 | help="in resolve ab mode: group files into separate directories named by their source ab file",
97 | )
98 | parser.add_argument(
99 | "-l",
100 | "--logging-level",
101 | choices=range(5),
102 | type=int,
103 | help="logging level, 0=none, 1=error, 2=warn, 3=info, 4=debug",
104 | )
105 | return parser
106 |
107 |
108 | INSTANCE = _ArkUnpackerArgParser.instantiate()
109 |
--------------------------------------------------------------------------------
/src/utils/AtlasFile.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright (c) 2022-2025, Harry Huang
3 | # @ BSD 3-Clause License
4 | from typing import Union
5 |
6 | import re
7 | from io import TextIOBase
8 |
9 |
10 | class AtlasFile:
11 | PAGE_PARSERS = {
12 | "size": lambda v: tuple(map(int, re.split(r"\s*,\s*", v))),
13 | "filter": lambda v: tuple(map(str.strip, re.split(r"\s*,\s*", v))),
14 | "format": lambda v: v,
15 | "repeat": lambda v: v,
16 | }
17 | REGION_PARSERS = {
18 | "rotate": lambda v: v.lower() == "true",
19 | "xy": lambda v: tuple(map(int, re.split(r"\s*,\s*", v))),
20 | "size": lambda v: tuple(map(int, re.split(r"\s*,\s*", v))),
21 | "orig": lambda v: tuple(map(int, re.split(r"\s*,\s*", v))),
22 | "offset": lambda v: tuple(map(int, re.split(r"\s*,\s*", v))),
23 | "index": lambda v: int(v),
24 | }
25 |
26 | @classmethod
27 | def loads(cls, text: str):
28 | pages = []
29 | lines = [line.rstrip("\n") for line in text.splitlines()]
30 | page_blocks = []
31 |
32 | current_page = []
33 | for line in lines:
34 | stripped = line.strip()
35 | if not stripped:
36 | if current_page:
37 | page_blocks.append(current_page)
38 | current_page = []
39 | else:
40 | current_page.append(line)
41 | if current_page:
42 | page_blocks.append(current_page)
43 |
44 | for block in page_blocks:
45 | if not block:
46 | continue
47 | page_info = {"regions": []}
48 | page_info["filename"] = block[0].strip()
49 | remaining_lines = block[1:]
50 | page_attrs = {}
51 | current_line = 0
52 | while current_line < len(remaining_lines):
53 | line = remaining_lines[current_line]
54 | if not line.startswith((" ", "\t")) and ":" in line:
55 | key, value = cls._parse_line(line)
56 | parsed_value = cls.PAGE_PARSERS.get(key, lambda v: v)(value)
57 | page_attrs[key] = parsed_value
58 | current_line += 1
59 | else:
60 | break
61 | regions = []
62 | regions_lines = remaining_lines[current_line:]
63 | i = 0
64 | while i < len(regions_lines):
65 | line = regions_lines[i]
66 | if not line.startswith((" ", "\t")):
67 | region_name = line.strip()
68 | region_attrs = {}
69 | i += 1
70 | while i < len(regions_lines) and regions_lines[i].startswith(
71 | (" ", "\t")
72 | ):
73 | attr_line = regions_lines[i].strip()
74 | key, value = cls._parse_line(attr_line)
75 | parsed_value = cls.REGION_PARSERS.get(key, lambda v: v)(value)
76 | region_attrs[key] = parsed_value
77 | i += 1
78 | regions.append({"name": region_name, **region_attrs})
79 | else:
80 | i += 1
81 | page_info.update(page_attrs)
82 | page_info["regions"] = regions
83 | pages.append(page_info)
84 | return {"pages": pages}
85 |
86 | @classmethod
87 | def load(cls, file: Union[str, TextIOBase]):
88 | if isinstance(file, str):
89 | with open(file, "r") as f:
90 | text = f.read()
91 | elif isinstance(file, TextIOBase):
92 | text = file.read()
93 | else:
94 | raise TypeError("Expected str or TextIOBase")
95 | return cls.loads(text)
96 |
97 | @classmethod
98 | def dumps(cls, data: dict):
99 | raise NotImplementedError()
100 |
101 | @classmethod
102 | def dump(cls, data: dict, file: Union[str, TextIOBase]):
103 | text = cls.dumps(data)
104 | if isinstance(file, str):
105 | with open(file, "w") as f:
106 | f.write(text)
107 | elif isinstance(file, TextIOBase):
108 | file.write(text)
109 | else:
110 | raise TypeError("Expected str or TextIOBase")
111 |
112 | @staticmethod
113 | def _parse_line(line):
114 | line = line.strip()
115 | key, value = re.split(r"\s*:\s*", line, 1)
116 | return key, value
117 |
118 | @staticmethod
119 | def _serialize_value(key, value):
120 | if isinstance(value, tuple):
121 | return ",".join(map(str, value))
122 | elif isinstance(value, bool):
123 | return "true" if value else "false"
124 | elif key == "index" and isinstance(value, int):
125 | return str(value)
126 | else:
127 | return str(value)
128 |
129 |
130 | if __name__ == "__main__":
131 | data = """
132 | example.png
133 | size: 2048,2048
134 | format: RGBA8888
135 | filter: Linear,Linear
136 | repeat: none
137 | Example_Region_1
138 | rotate: false
139 | xy: 1900, 1800
140 | size: 65, 60
141 | orig: 65, 60
142 | offset: 0, 0
143 | index: -1
144 | Example_Region_2
145 | rotate: true
146 | xy: 250, 450
147 | size: 100, 25
148 | orig: 100, 25
149 | offset: 0, 0
150 | index: 1
151 |
152 | example_2.png
153 | size: 1024,1024
154 | format: RGBA8888
155 | filter: Linear,Linear
156 | repeat: none
157 | Example_Region_Bad
158 | rotate:false
159 | xy:0,200
160 | size:230,125
161 | orig:230,125
162 | offset:0,0
163 | index:-1
164 | Example_Region_/\\!@#$%
165 | rotate: false
166 | xy: 500, 450
167 | size: 140, 120
168 | orig: 140, 120
169 | offset: 0, 0
170 | index: -1
171 |
172 | """
173 | loaded = AtlasFile.loads(data)
174 | print(loaded)
175 |
--------------------------------------------------------------------------------
/src/utils/Config.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright (c) 2022-2025, Harry Huang
3 | # @ BSD 3-Clause License
4 | import json
5 | import os
6 | import os.path as osp
7 |
8 | from .Logger import Logger
9 |
10 |
11 | def get_cpu_count():
12 | cpu = os.cpu_count()
13 | return 1 if cpu is None or cpu <= 0 else cpu
14 |
15 |
16 | class PerformanceLevel:
17 | """Enumeration class for performance level."""
18 |
19 | MINIMAL = 0
20 | LOW = 1
21 | STANDARD = 2
22 | HIGH = 3
23 |
24 | __CPU = get_cpu_count()
25 | __MAP = {
26 | MINIMAL: 1,
27 | LOW: max(2, __CPU // 2),
28 | STANDARD: max(4, __CPU),
29 | HIGH: max(8, __CPU * 2),
30 | }
31 |
32 | @staticmethod
33 | def get_thread_limit(performance_level: int):
34 | """Gets the maximum thread count according to the given performance level."""
35 | return PerformanceLevel.__MAP.get(
36 | performance_level, PerformanceLevel.__MAP[PerformanceLevel.STANDARD]
37 | )
38 |
39 |
40 | class Config:
41 | """Configuration class for ArkUnpacker."""
42 |
43 | __instance = None
44 | __config_path = "ArkUnpackerConfig.json"
45 | __file_encoding = "UTF-8"
46 | __default_config = {
47 | "log_file": "ArkUnpackerLogs.log",
48 | "log_level": Logger.LV_INFO,
49 | "performance_level": PerformanceLevel.STANDARD,
50 | }
51 |
52 | def __init__(self):
53 | """Not recommended to use. Please use the static methods."""
54 | self._config = {}
55 |
56 | def _get(self, key: str):
57 | return self._config[key]
58 |
59 | def _read_config(self):
60 | if osp.isfile(Config.__config_path):
61 | try:
62 | loaded_config = json.load(
63 | open(Config.__config_path, "r", encoding=Config.__file_encoding)
64 | )
65 | if isinstance(loaded_config, dict):
66 | for k in Config.__default_config.keys():
67 | default_val = Config.__default_config[k]
68 | self._config[k] = (
69 | loaded_config[k]
70 | if isinstance(loaded_config.get(k, None), type(default_val))
71 | else default_val
72 | )
73 | Logger.set_instance(self.get("log_file"), self.get("log_level"))
74 | Logger.set_level(self.get("log_level"))
75 | Logger.info("Config: Applied config.")
76 | except Exception as arg:
77 | self._config = Config.__default_config
78 | Logger.set_instance(self.get("log_file"), self.get("log_level"))
79 | Logger.set_level(self.get("log_level"))
80 | Logger.error(
81 | f"Config: Failed to parsing config, now using default config, cause: {arg}"
82 | )
83 | else:
84 | self._config = Config.__default_config
85 | Logger.set_instance(self.get("log_file"), self.get("log_level"))
86 | Logger.set_level(self.get("log_level"))
87 | Logger.info("Config: Applied default config.")
88 | self.save_config()
89 |
90 | def _save_config(self):
91 | try:
92 | json.dump(
93 | self._config,
94 | open(self.__config_path, "w", encoding=Config.__file_encoding),
95 | indent=4,
96 | ensure_ascii=False,
97 | )
98 | Logger.info("Config: Saved config.")
99 | except Exception as arg:
100 | Logger.error(f"Config: Failed to save config, cause: {arg}")
101 |
102 | @staticmethod
103 | def _get_instance():
104 | if not Config.__instance:
105 | Config.__instance = Config()
106 | Config.__instance._read_config()
107 | return Config.__instance
108 |
109 | @staticmethod
110 | def get(key):
111 | """Gets the specified config field.
112 |
113 | :param key: The JSON key to the field;
114 | :returns: The value of the field, `None` if the key doesn't exist;
115 | :rtype: Any;
116 | """
117 | return Config._get_instance()._get(key)
118 |
119 | @staticmethod
120 | def read_config():
121 | """Reads the config from file, aka. deserialize the config.
122 | The default config will be used if the config file doesn't exist or an error occurs.
123 | The logging level of `Logger` class will be updated according to the config.
124 | """
125 | return Config._get_instance()._read_config()
126 |
127 | @staticmethod
128 | def save_config():
129 | """Saves the config to file, aka. serialize the config."""
130 | return Config._get_instance()._save_config()
131 |
--------------------------------------------------------------------------------
/src/utils/GlobalMethods.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright (c) 2022-2025, Harry Huang
3 | # @ BSD 3-Clause License
4 | import builtins
5 | import importlib
6 | import os
7 | import os.path as osp
8 | import pkgutil
9 | import shutil
10 | import traceback
11 | import types
12 |
13 |
14 | ##### ↓ CLI related ↓ #####
15 |
16 | os.system("")
17 |
18 |
19 | def color(c: int = 7, s: int = 0):
20 | """Gets a color controller string in interactive CLI.
21 |
22 | :param c: The color [0=black,1=red,2=green,3=yellow,4=blue,5=purple,6=cyan,7=white];
23 | :param s: The style [0=default,1=bold,4=underlined,5=flashing,7=inverted,8=none];
24 | :returns: The color controller string.
25 | :rtype: str;
26 | """
27 | return f"\033[{s};3{c}m"
28 |
29 |
30 | def input(text: str = "", c: int = 7, s: int = 0, y: int = 0):
31 | """Enhanced version of inputting in interactive CLI.
32 |
33 | :param text: The text to display;
34 | :param c: The color [0=black,1=red,2=green,3=yellow,4=blue,5=purple,6=cyan,7=white];
35 | :param s: The style [0=default,1=bold,4=underlined,5=flashing,7=inverted,8=none];
36 | :param y: The y-position of the line to print or overwrite [0=append];
37 | :returns: The inputted text;
38 | :rtype: str;
39 | """
40 | ctrl = f"\033[K\033[{y};1H" if y > 0 else ""
41 | return builtins.input(f"{ctrl}{color(c, s)}{text}\033[?25h")
42 |
43 |
44 | def print(obj: object = "", c: int = 7, s: int = 0, y: int = 0):
45 | """Enhanced version of printing in interactive CLI.
46 |
47 | :param obj: The object to print;
48 | :param c: The color [0=black,1=red,2=green,3=yellow,4=blue,5=purple,6=cyan,7=white];
49 | :param s: The style [0=default,1=bold,4=underlined,5=flashing,7=inverted,8=none];
50 | :param y: The y-position of the line to print or overwrite [0=append];
51 | :rtype: None;
52 | """
53 | ctrl = f"\033[K\033[{y};{1}H" if y > 0 else ""
54 | builtins.print(f"\033[?25l{ctrl}{color(c, s)}{obj}")
55 |
56 |
57 | def clear(use_ansi: bool = False):
58 | """Clears the CLI output.
59 |
60 | rtype: None;
61 | """
62 | if use_ansi:
63 | builtins.print("\033[2J")
64 | else:
65 | os.system("cls" if os.name == "nt" else "clear")
66 |
67 |
68 | def title(text: str):
69 | """Sets the CLI window title. Windows only.
70 |
71 | :param text: The text of the title;
72 | rtype: None;
73 | """
74 | if os.name == "nt":
75 | os.system(f"title {text}")
76 |
77 |
78 | def stacktrace():
79 | return traceback.format_exc()
80 |
81 |
82 | ##### ↓ IO related ↓ #####
83 |
84 |
85 | def mkdir(path: str):
86 | """Creates a directory.
87 |
88 | :param path: Path to the directory to be created;
89 | :rtype: None;
90 | """
91 | try:
92 | path = path.strip().strip("/\\")
93 | os.makedirs(path, exist_ok=True)
94 | except BaseException:
95 | pass
96 |
97 |
98 | def rmdir(path: str):
99 | """Deletes a directory."""
100 | shutil.rmtree(path, ignore_errors=True)
101 |
102 |
103 | def get_dir_size(path: str):
104 | """Gets the size of the given directory.
105 |
106 | :param path: Path to the directory;
107 | :returns: Size in bytes;
108 | :rtype: int;
109 | """
110 | size = 0
111 | lst = get_filelist(path)
112 | for i in lst:
113 | if osp.isfile(i):
114 | size += osp.getsize(i)
115 | return size
116 |
117 |
118 | def get_filelist(path: str, max_depth=0):
119 | """Gets a list containing all the files in the given dir and its sub dirs.
120 | Note that If `max_depth` is specified to unlimited,
121 | `os.walk` (the most efficient way) will be used in this method instead of `os.listdir`.
122 |
123 | :param path: Path to the specified parent dir;
124 | :param max_depth: Max searching depth, `0` for unlimited;
125 | :returns: A list of paths;
126 | :rtype: list[str];
127 | """
128 | lst = []
129 | max_depth = int(max_depth)
130 | if max_depth <= 0:
131 | for root, _, files in os.walk(path):
132 | for file in files:
133 | lst.append(osp.join(root, file))
134 | else:
135 | for i in os.listdir(path):
136 | lst.append(osp.join(path, i))
137 | return lst
138 |
139 |
140 | def get_dirlist(path: str, max_depth=0):
141 | """Gets a list containing all the sub dirs in the given dir.
142 | Note that If `max_depth` is specified to unlimited,
143 | `os.walk` (the most efficient way) will be used in this method instead of `os.listdir`.
144 |
145 | :param path: Path to the specified parent dir;
146 | :param max_depth: Max searching depth, `0` for unlimited;
147 | :returns: A list of paths;
148 | :rtype: list[str];
149 | """
150 | lst = []
151 | max_depth = int(max_depth)
152 | if max_depth <= 0:
153 | for root, dirs, _ in os.walk(path):
154 | for i in dirs:
155 | lst.append(osp.join(root, i))
156 | else:
157 | for i in os.listdir(path):
158 | i = osp.join(path, i)
159 | if osp.isdir(i):
160 | lst.append(i)
161 | if max_depth != 1:
162 | lst.extend(get_filelist(i, max_depth - 1))
163 | return lst
164 |
165 |
166 | _EXT_IMAGE = (".png", ".jpg", ".jpeg", ".bmp", ".gif", ".tiff")
167 |
168 |
169 | def is_image_file(path: str):
170 | """Returns `True` if the given file is an image judging from its path.
171 |
172 | :param path: Path;
173 | :returns: `True` if the file is an image;
174 | :rtype: bool;
175 | """
176 | return any(path.lower().endswith(ext) for ext in _EXT_IMAGE)
177 |
178 |
179 | _EXT_KNOWN = (
180 | ".atlas",
181 | ".skel",
182 | ".wav",
183 | ".mp3",
184 | ".m4a",
185 | ".mp4",
186 | ".avi",
187 | ".mov",
188 | ".mkv",
189 | ".flv",
190 | )
191 | _EXT_AB = (".ab", ".bin")
192 |
193 |
194 | def is_known_asset_file(path: str):
195 | """Returns `True` if the given file is a known asset type from its judging from its name.
196 | Images, audios, videos and Spine are all known asset types.
197 |
198 | :param path: Path;
199 | :returns: `True` if the file is a known asset type;
200 | :rtype: bool;
201 | """
202 | return is_image_file(path) or any(path.lower().endswith(ext) for ext in _EXT_KNOWN)
203 |
204 |
205 | def is_ab_file(path: str):
206 | """Returns `True` if the given file is an asset bundle judging from its name.
207 |
208 | :param path: Path;
209 | :returns: `True` if the file is an asset bundle;
210 | :rtype: bool;
211 | """
212 | return any(path.lower().endswith(ext) for ext in _EXT_AB)
213 |
214 |
215 | def is_binary_file(path: str, guess_encoding: str = "UTF-8"):
216 | """Returns `True` if the given file is a binary file rather than text file.
217 |
218 | :param path: Path;
219 | :param guess_encoding: The specified charset to test the file;
220 | :returns: `True` if the file is a binary file;
221 | :rtype: bool;
222 | """
223 | try:
224 | with open(path, encoding=guess_encoding) as f:
225 | f.read()
226 | return False
227 | except UnicodeError:
228 | return True
229 |
230 |
231 | ##### ↓ Dynamic import related ↓ #####
232 |
233 |
234 | def get_modules_from_package(package: types.ModuleType):
235 | walk_result = pkgutil.walk_packages(package.__path__, package.__name__ + ".")
236 | module_names = [name for _, name, is_pkg in walk_result if not is_pkg]
237 | return [importlib.import_module(name) for name in module_names]
238 |
239 |
240 | def get_modules_from_package_name(package_name: str):
241 | package = importlib.import_module(package_name)
242 | return get_modules_from_package(package)
243 |
--------------------------------------------------------------------------------
/src/utils/Logger.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright (c) 2022-2025, Harry Huang
3 | # @ BSD 3-Clause License
4 | import queue
5 | import threading
6 | from datetime import datetime
7 |
8 | from .GlobalMethods import color
9 |
10 |
11 | class Logger:
12 | """Logger class for ArkUnpacker"""
13 |
14 | __time_format = "%Y-%m-%d %H:%M:%S"
15 | __file_encoding = "UTF-8"
16 | __instance = None
17 |
18 | LV_NONE = 0
19 | LV_ERROR = 1
20 | LV_WARN = 2
21 | LV_INFO = 3
22 | LV_DEBUG = 4
23 |
24 | def __init__(self, log_file_path: str, level: int):
25 | """Not recommended to use. Please use the singleton instance."""
26 | self._log_level = level
27 | self._log_file_path = log_file_path
28 | self._file = None
29 | self._queue = queue.Queue()
30 |
31 | self._internal_lock = threading.Lock()
32 | self._level_stats = {}
33 | self._reset_stats()
34 |
35 | def loop(self: Logger):
36 | buffer = []
37 | while True:
38 | try:
39 | while len(buffer) < 0x10:
40 | try:
41 | timeout = 1 / ((len(buffer) + 1) ** 2)
42 | buffer.append(self._queue.get(timeout=timeout))
43 | except queue.Empty:
44 | break
45 | if buffer and self._log_file_path:
46 | with open(
47 | self._log_file_path,
48 | "a",
49 | encoding=Logger.__file_encoding,
50 | ) as f:
51 | f.writelines(buffer)
52 | buffer.clear()
53 | except BaseException:
54 | pass
55 |
56 | self.thread = threading.Thread(
57 | name=self.__class__.__name__, target=loop, args=(self,), daemon=True
58 | )
59 | self.thread.start()
60 |
61 | def _set_level(self, level: int):
62 | if level is not None:
63 | self._log_level = level
64 |
65 | def _reset_stats(self):
66 | with self._internal_lock:
67 | self._level_stats = {
68 | Logger.LV_ERROR: 0,
69 | Logger.LV_WARN: 0,
70 | Logger.LV_INFO: 0,
71 | Logger.LV_DEBUG: 0,
72 | }
73 |
74 | def _log(self, tag: str, msg: str):
75 | try:
76 | self._queue.put(
77 | f"{datetime.now().strftime(Logger.__time_format)} [{tag}] {msg}\n"
78 | )
79 | except BaseException:
80 | pass
81 |
82 | def _error(self, msg: str):
83 | if self._log_level >= Logger.LV_ERROR:
84 | with self._internal_lock:
85 | self._level_stats[Logger.LV_ERROR] += 1
86 | self._log("ERROR", msg)
87 |
88 | def _warn(self, msg: str):
89 | if self._log_level >= Logger.LV_WARN:
90 | with self._internal_lock:
91 | self._level_stats[Logger.LV_WARN] += 1
92 | self._log("WARN", msg)
93 |
94 | def _info(self, msg: str):
95 | if self._log_level >= Logger.LV_INFO:
96 | with self._internal_lock:
97 | self._level_stats[Logger.LV_INFO] += 1
98 | self._log("INFO", msg)
99 |
100 | def _debug(self, msg: str):
101 | if self._log_level >= Logger.LV_DEBUG:
102 | with self._internal_lock:
103 | self._level_stats[Logger.LV_DEBUG] += 1
104 | self._log("DEBUG", msg)
105 |
106 | @staticmethod
107 | def set_instance(log_file_path: str, level: int = LV_INFO):
108 | """Initializes the Logger static instance.
109 | If the instance has been initialized yet, this method does nothing.
110 |
111 | :param log_file_path: The path to the log file;
112 | :param level: The logging level;
113 | :rtype: None;
114 | """
115 | if not Logger.__instance:
116 | Logger.set_instance_override(log_file_path, level)
117 |
118 | @staticmethod
119 | def set_instance_override(log_file_path: str, level: int = LV_INFO):
120 | """Initializes the Logger static instance forcibly.
121 | If the instance has been initialized yet, this method will override it.
122 |
123 | :param log_file_path: The path to the log file;
124 | :param level: The logging level;
125 | :rtype: None;
126 | """
127 | Logger.__instance = Logger(log_file_path, level)
128 |
129 | @staticmethod
130 | def set_level(level: int):
131 | """Sets the logging level.
132 |
133 | :param level: The new logging level;
134 | :rtype: None;
135 | """
136 | if Logger.__instance:
137 | Logger.__instance._set_level(level)
138 |
139 | @staticmethod
140 | def reset_stats():
141 | """Resets the logging level stats.
142 |
143 | :rtype: None;
144 | """
145 | if Logger.__instance:
146 | Logger.__instance._reset_stats()
147 |
148 | @staticmethod
149 | def get_stats(key: int):
150 | """Returns the logging level stats of the specified level key.
151 |
152 | :param key: The logging level;
153 | :returns: The number of hit count;
154 | :rtype: int;
155 | """
156 | if Logger.__instance:
157 | return Logger.__instance._level_stats[key]
158 | return 0
159 |
160 | @staticmethod
161 | def to_ew_stats_str():
162 | """Returns the error-warning logging level stats string.
163 |
164 | :returns: A string that can be printed to CLI;
165 | :rtype: str;
166 | """
167 | errors = Logger.get_stats(Logger.LV_ERROR)
168 | warns = Logger.get_stats(Logger.LV_WARN)
169 | if errors + warns <= 0:
170 | return f"{color()}正常"
171 | rst = ""
172 | if errors > 0:
173 | rst += f"{color(1)}{errors}{color()} 个错误"
174 | if warns > 0:
175 | if rst:
176 | rst += ","
177 | rst += f"{color(3)}{warns}{color()} 个警告"
178 | return rst
179 |
180 | @staticmethod
181 | def log(tag: str, msg: str):
182 | if Logger.__instance:
183 | Logger.__instance._log(tag, msg)
184 |
185 | @staticmethod
186 | def error(msg: str):
187 | if Logger.__instance:
188 | Logger.__instance._error(msg)
189 |
190 | @staticmethod
191 | def warn(msg: str):
192 | if Logger.__instance:
193 | Logger.__instance._warn(msg)
194 |
195 | @staticmethod
196 | def info(msg: str):
197 | if Logger.__instance:
198 | Logger.__instance._info(msg)
199 |
200 | @staticmethod
201 | def debug(msg: str):
202 | if Logger.__instance:
203 | Logger.__instance._debug(msg)
204 |
--------------------------------------------------------------------------------
/src/utils/Profiler.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright (c) 2022-2025, Harry Huang
3 | # @ BSD 3-Clause License
4 | from typing import Dict, List
5 |
6 | import threading
7 | import time
8 | from collections import defaultdict
9 | from contextlib import ContextDecorator
10 |
11 |
12 | class CodeProfiler(ContextDecorator):
13 | """Utility class for profiling the time consumption of running a specified code block.
14 | Usage is shown below.
15 |
16 | ```
17 | with CodeProfiler('scope'):
18 | pass # The code block to test
19 | print(CodeProfiler.get_avg_time('scope'))
20 | ```
21 | """
22 |
23 | _records: Dict[str, List[float]] = defaultdict(list)
24 | _internal_lock = threading.Lock()
25 |
26 | def __init__(self, name: str):
27 | self._name = name
28 | self._start_time = None
29 |
30 | def __enter__(self):
31 | self._start_time = time.perf_counter()
32 | return self
33 |
34 | def __exit__(self, exc_type, exc_val, exc_tb):
35 | if self._start_time:
36 | with CodeProfiler._internal_lock:
37 | while len(CodeProfiler._records[self._name]) >= 65536:
38 | CodeProfiler._records[self._name].pop(0)
39 | CodeProfiler._records[self._name].append(
40 | time.perf_counter() - self._start_time
41 | )
42 | return False # Hand down the exception
43 |
44 | @staticmethod
45 | def get_avg_time(name: str):
46 | times = CodeProfiler._records.get(name, None)
47 | return sum(times) / len(times) if times else None
48 |
49 | @staticmethod
50 | def get_avg_time_all():
51 | return {k: CodeProfiler.get_avg_time(k) for k in CodeProfiler._records}
52 |
53 | @staticmethod
54 | def get_total_time(name: str):
55 | times = CodeProfiler._records.get(name, None)
56 | return sum(times) if times else None
57 |
58 | @staticmethod
59 | def get_total_time_all():
60 | return {k: CodeProfiler.get_total_time(k) for k in CodeProfiler._records}
61 |
--------------------------------------------------------------------------------
/src/utils/SaverUtils.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright (c) 2022-2025, Harry Huang
3 | # @ BSD 3-Clause License
4 | import os
5 | import os.path as osp
6 | import re
7 | import threading
8 | from contextlib import ContextDecorator
9 | from io import BytesIO
10 | from typing import Callable, Optional, Sequence
11 |
12 | import UnityPy.classes as uc
13 | from PIL import Image
14 |
15 | from .Profiler import CodeProfiler
16 | from .Config import Config, PerformanceLevel
17 | from .GlobalMethods import mkdir
18 | from .Logger import Logger
19 | from .TaskUtils import WorkerCtrl
20 |
21 |
22 | class EntryLock(ContextDecorator):
23 | """The entry lock class to prevent simultaneous access to the same entry."""
24 |
25 | _ENTRIES = set()
26 | _INTERNAL_LOCK = threading.Condition()
27 |
28 | def __init__(self, entry):
29 | self.entry = entry
30 |
31 | def __enter__(self):
32 | with EntryLock._INTERNAL_LOCK:
33 | while self.entry in EntryLock._ENTRIES:
34 | EntryLock._INTERNAL_LOCK.wait()
35 | EntryLock._ENTRIES.add(self.entry)
36 |
37 | def __exit__(self, exc_type, exc_val, exc_tb):
38 | with EntryLock._INTERNAL_LOCK:
39 | EntryLock._ENTRIES.discard(self.entry)
40 | EntryLock._INTERNAL_LOCK.notify_all()
41 |
42 | # EndClass
43 |
44 |
45 | class SafeSaver(WorkerCtrl):
46 | """The file saver class to save file and avoid file name collision."""
47 |
48 | __instance = None
49 | _EXT_IMAGE = ".png"
50 | _EXT_RAW = ""
51 |
52 | def __init__(self):
53 | """Not recommended to use. Please use the static methods."""
54 | max_workers = PerformanceLevel.get_thread_limit(Config.get("performance_level"))
55 | super(SafeSaver, self).__init__(
56 | self._save, max_workers=max_workers, name="Saver"
57 | )
58 |
59 | @staticmethod
60 | def get_instance():
61 | if not SafeSaver.__instance:
62 | SafeSaver.__instance = SafeSaver()
63 | return SafeSaver.__instance
64 |
65 | @staticmethod
66 | def save_bytes(
67 | data: bytes,
68 | destdir: str,
69 | name: str,
70 | ext: str,
71 | on_queued: Optional[Callable] = None,
72 | on_saved: Optional[Callable] = None,
73 | ):
74 | """Saves a binary data to a file.
75 |
76 | :param data: Bytes data;
77 | :param destdir: Destination directory;
78 | :param name: File name (without the extension);
79 | :param ext: File extension;
80 | :param on_queued: Callback `f()` invoked when the file was queued, `None` for ignore;
81 | :param on_saved: Callback `f(file_path_or_none_for_not_saved)`, `None` for ignore;
82 | :rtype: None;
83 | """
84 | if on_queued:
85 | on_queued()
86 | SafeSaver.get_instance().submit((data, destdir, name, ext, on_saved))
87 |
88 | @staticmethod
89 | def save_image(
90 | img: Image.Image,
91 | destdir: str,
92 | name: str,
93 | ext: str = _EXT_IMAGE,
94 | on_queued: Optional[Callable] = None,
95 | on_saved: Optional[Callable] = None,
96 | ):
97 | """Saves an image to a file.
98 |
99 | :param img: Image instance;
100 | :param destdir: Destination directory;
101 | :param name: File name (without the extension);
102 | :param ext: File extension, `png` for default;
103 | :param on_queued: Callback `f()` invoked when the file was queued, `None` for ignore;
104 | :param on_saved: Callback `f(file_path_or_none_for_not_saved)`, `None` for ignore;
105 | :rtype: None;
106 | """
107 | bio = BytesIO()
108 | img.save(bio, format=ext.lstrip("."))
109 | SafeSaver.save_bytes(bio.getvalue(), destdir, name, ext, on_queued, on_saved)
110 |
111 | @staticmethod
112 | def save_object(
113 | obj: uc.Object,
114 | destdir: str,
115 | name: str,
116 | on_queued: Optional[Callable] = None,
117 | on_saved: Optional[Callable] = None,
118 | ):
119 | """Saves the given Unity object as a file. If a object is not exportable, it does nothing.
120 |
121 | :param obj: The object to save as file;
122 | :param destdir: Destination directory;
123 | :param name: File name (without the extension);
124 | :param on_queued: Callback `f()` invoked when the file was queued, `None` for ignore;
125 | :param on_saved: Callback `f(file_path_or_none_for_not_saved)`, `None` for ignore;
126 | :rtype: None;
127 | """
128 | if obj.object_reader is None or obj.object_reader.byte_size == 0:
129 | # No data:
130 | pass
131 | elif isinstance(obj, (uc.Sprite, uc.Texture2D)):
132 | # As image file:
133 | if obj.image.width > 0 and obj.image.height > 0:
134 | SafeSaver.save_image(
135 | obj.image, destdir, name, SafeSaver._EXT_IMAGE, on_queued, on_saved
136 | )
137 | return
138 | elif isinstance(obj, uc.AudioClip):
139 | # As audio file:
140 | samples = obj.samples
141 | if samples:
142 | for name, byte in samples.items():
143 | SafeSaver.save_bytes(
144 | byte, destdir, name, SafeSaver._EXT_RAW, on_queued, on_saved
145 | )
146 | return
147 | elif isinstance(obj, uc.TextAsset):
148 | # As raw file:
149 | byte = obj.m_Script.encode("utf-8", "surrogateescape")
150 | SafeSaver.save_bytes(
151 | byte, destdir, name, SafeSaver._EXT_RAW, on_queued, on_saved
152 | )
153 | return
154 | elif isinstance(obj, uc.Mesh):
155 | # As mesh file (.obj):
156 | try:
157 | obj_data = obj.export()
158 | SafeSaver.save_bytes(
159 | obj_data.encode("utf-8"), destdir, name, ".obj", on_queued, on_saved
160 | )
161 | except Exception as e:
162 | Logger.warn(f"SafeSaver: Failed to export Mesh: {e}")
163 | return
164 | else:
165 | # Not an exportable type:
166 | pass
167 |
168 | @staticmethod
169 | def save_objects(
170 | lst: Sequence[uc.Object],
171 | destdir: str,
172 | on_queued: Optional[Callable] = None,
173 | on_saved: Optional[Callable] = None,
174 | ):
175 | """Saves all the Unity objects in the given list as files.
176 | If a object is not exportable, it does nothing.
177 |
178 | :param lst: The objects list;
179 | :param destdir: Destination directory;
180 | :param on_queued: Callback `f()` invoked when the file was queued, `None` for ignore;
181 | :param on_saved: Callback `f(file_path_or_none_for_not_saved)`, `None` for ignore;
182 | :rtype: None;
183 | """
184 | for i in lst:
185 | SafeSaver.save_object(
186 | i, destdir, getattr(i, "m_Name", "Unknown"), on_queued, on_saved
187 | )
188 |
189 | @staticmethod
190 | def _save(
191 | data: bytes, destdir: str, name: str, ext: str, on_saved: Optional[Callable]
192 | ):
193 | dest = osp.join(destdir, name + ext)
194 | try:
195 | with CodeProfiler("saver_save"):
196 | # Ensure files with identical name cannot be saved simultaneously
197 | with EntryLock(dest):
198 | # Ensure this new file is unique to prevent duplication
199 | if SafeSaver._is_unique(data, dest):
200 | # Modify the file name to avoid namesake
201 | dest = SafeSaver._purify_name(dest)
202 | # Save the file eventually
203 | mkdir(osp.dirname(dest))
204 | SafeSaver._save_bytes(data, dest)
205 | # Invoke callback with destination path as argument
206 | if on_saved:
207 | on_saved(dest)
208 | Logger.debug(f'Saver: Saved file "{dest}"')
209 | return
210 | except Exception as arg:
211 | Logger.error(
212 | f'Saver: Failed to save file "{dest}" because: Exception{type(arg)} {arg}'
213 | )
214 | # Invoke call back with `None` indicating the file was not saved
215 | if on_saved:
216 | on_saved(None)
217 |
218 | @staticmethod
219 | def _save_bytes(data: bytes, dest: str):
220 | with open(dest, "wb") as f:
221 | f.write(data)
222 |
223 | @staticmethod
224 | def _is_unique(data: bytes, dest: str):
225 | destdir = osp.dirname(dest)
226 | name, ext = osp.splitext(osp.basename(dest))
227 | if not osp.isdir(destdir):
228 | return True
229 | flist = filter(
230 | lambda x: x.startswith(name) and x.endswith(ext), os.listdir(destdir)
231 | )
232 | for i in flist:
233 | with open(osp.join(destdir, i), "rb") as f:
234 | if f.read() == data:
235 | Logger.debug(
236 | f'Saver: File "{i}" duplication was prevented, size {len(data)}'
237 | )
238 | return False
239 | return True
240 |
241 | @staticmethod
242 | def _purify_name(dest: str):
243 | destdir = osp.dirname(dest)
244 | name, ext = osp.splitext(osp.basename(dest))
245 | new_name = re.sub(r"[\\/:*?\"<>|\x00-\x1F]", "#", name)
246 | if new_name != name:
247 | Logger.debug(
248 | f'Saver: File name "{name}" was modified to "{new_name}" to prevent invalid characters'
249 | )
250 | name = new_name
251 |
252 | dest = osp.join(destdir, name + ext)
253 | tmp = 0
254 | while osp.isfile(dest):
255 | dest = osp.join(destdir, f"{name}${tmp}{ext}")
256 | tmp += 1
257 | return dest
258 |
259 | # EndClass
260 |
--------------------------------------------------------------------------------
/src/utils/TaskUtils.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright (c) 2022-2025, Harry Huang
3 | # @ BSD 3-Clause License
4 | from typing import Callable, Optional, Union
5 |
6 | import queue
7 | import threading
8 | import time
9 |
10 | from .Config import PerformanceLevel, Config
11 | from .GlobalMethods import color, print, clear
12 | from .Logger import Logger
13 |
14 |
15 | class ThreadCtrl:
16 | """Controller for Multi Threading."""
17 |
18 | def __init__(self, max_subthread: Optional[int] = None):
19 | """Initializes a tool for multi threading."""
20 | self.__sts: "list[threading.Thread]" = []
21 | if not max_subthread:
22 | max_subthread = PerformanceLevel.get_thread_limit(
23 | Config.get("performance_level")
24 | )
25 | self.set_max_subthread(max_subthread)
26 |
27 | def set_max_subthread(self, max_subthread: int):
28 | """Sets the max number of sub threads."""
29 | self.__max: int = max(1, max_subthread)
30 |
31 | def count_subthread(self):
32 | """Gets the number of alive sub threads."""
33 | self.__sts = list(filter(lambda x: x.is_alive(), self.__sts))
34 | return len(self.__sts)
35 |
36 | def run_subthread(
37 | self,
38 | fun,
39 | args: Optional[tuple] = None,
40 | kwargs: Optional[dict] = None,
41 | name: Optional[str] = None,
42 | ):
43 | """Creates a sub thread and run it."""
44 | while self.count_subthread() >= self.__max:
45 | pass
46 | ts = threading.Thread(
47 | target=fun,
48 | args=args if args is not None else (),
49 | kwargs=kwargs if kwargs is not None else {},
50 | daemon=True,
51 | name=name,
52 | )
53 | self.__sts.append(ts)
54 | ts.start()
55 |
56 | # EndClass
57 |
58 |
59 | class WorkerCtrl:
60 | """Controller for Permanent Worker Threads."""
61 |
62 | LAYOFF_INTERVAL = 5
63 | BACKUP_THRESHOLD = 5
64 |
65 | def __init__(self, handler: Callable, max_workers: int = 1, name: str = ""):
66 | """Initializes a Worker Controller.
67 |
68 | :param handler: The handler function of the workers;
69 | :param max_workers: The maximum number of workers;
70 | :param name: The optional name for the workers;
71 | """
72 | if max_workers < 1:
73 | raise ValueError("max_workers should not be less than 1")
74 | self.__queue = queue.Queue()
75 | self.__handler = handler
76 | self.__opened = True
77 | self.__workers = []
78 | self.__idle_timestamp = time.time()
79 | self.__max_workers = max_workers
80 | self._name = name
81 | self._total_requested = Counter()
82 | self._total_processed = Counter()
83 | self._backup_worker()
84 | Logger.debug(f"Worker: Workers are ready to work for {name}!")
85 |
86 | def submit(self, data: tuple):
87 | """Submits new data to workers.
88 |
89 | :param data: A tuple that contains the arguments that the handler required;
90 | :rtype: None;
91 | """
92 | if self.__opened:
93 | self.__queue.put(data)
94 | self._total_requested.update()
95 | else:
96 | raise RuntimeError("The worker controller has terminated")
97 |
98 | def terminate(self, block: bool = False):
99 | """Requests the workers to terminate and stop receiving new data.
100 |
101 | :param block: Whether to wait for workers to complete.
102 | :rtype: None;
103 | """
104 | if self.__opened:
105 | self.__opened = False
106 | if block:
107 | self.__queue.join()
108 |
109 | def completed(self):
110 | """Returns `True` if there is no data in queue or in handler.
111 |
112 | :rtype: bool;
113 | """
114 | return self._total_requested.now() == self._total_processed.now()
115 |
116 | def get_total_requested(self):
117 | """Gets the total number of requested tasks.
118 |
119 | :rtype: int;
120 | """
121 | return self._total_requested.now()
122 |
123 | def get_total_processed(self):
124 | """Gets the total number of processed tasks.
125 |
126 | :rtype: int;
127 | """
128 | return self._total_processed.now()
129 |
130 | def reset_counter(self):
131 | """Resets the counter of requested tasks and processed tasks.
132 |
133 | :rtype: None;
134 | """
135 | if self.completed():
136 | self._total_requested = Counter()
137 | self._total_processed = Counter()
138 | else:
139 | raise RuntimeError("Cannot reset counter while the workers are busy")
140 |
141 | def _loop(self):
142 | while self.__opened or not self.__queue.empty():
143 | # Intelligent scheduling
144 | if self.__queue.empty():
145 | if self.__idle_timestamp <= 0:
146 | self.__idle_timestamp = time.time()
147 | elif self.__idle_timestamp + WorkerCtrl.LAYOFF_INTERVAL < time.time():
148 | cur_worker = threading.current_thread()
149 | if (
150 | cur_worker in self.__workers
151 | and self.__workers.index(cur_worker) != 0
152 | ):
153 | self._layoff_worker(cur_worker)
154 | break
155 | else:
156 | self.__idle_timestamp = 0
157 | if self.__queue.qsize() > WorkerCtrl.BACKUP_THRESHOLD:
158 | self._backup_worker()
159 | # Task receiving
160 | try:
161 | args = self.__queue.get(timeout=WorkerCtrl.LAYOFF_INTERVAL)
162 | try:
163 | self.__handler(*args)
164 | finally:
165 | self.__queue.task_done()
166 | self._total_processed.update()
167 | except queue.Empty:
168 | pass
169 |
170 | def _backup_worker(self):
171 | if len(self.__workers) < self.__max_workers:
172 | t = threading.Thread(
173 | target=self._loop, name=f"Worker:{self._name}", daemon=True
174 | )
175 | self.__workers.append(t)
176 | t.start()
177 | if len(self.__workers) >= self.__max_workers:
178 | Logger.debug("Worker: Workers are in full load, slogging guts out!")
179 |
180 | def _layoff_worker(self, worker: threading.Thread):
181 | if worker in self.__workers:
182 | self.__workers.remove(worker)
183 | if len(self.__workers) <= 1:
184 | Logger.debug("Worker: Workers nodded off, sleeping for new tasks!")
185 |
186 |
187 | class UICtrl:
188 | """UI Controller in the separated thread."""
189 |
190 | THREAD_NAME = "UIThread"
191 |
192 | def __init__(self, interval: float = 0.1):
193 | """Initializes a UI Controller.
194 |
195 | :param interval: Auto-refresh interval (seconds);
196 | """
197 | self.__lines = []
198 | self.__cache_lines = []
199 | self.__status = True
200 | self.set_refresh_rate(interval)
201 |
202 | def __loop(self):
203 | while self.__status:
204 | self.refresh(post_delay=self.__interval)
205 |
206 | def loop_start(self):
207 | """Starts auto-refresh."""
208 | self.__status = True
209 | self.__cache_lines = []
210 | threading.Thread(
211 | target=self.__loop, daemon=True, name=UICtrl.THREAD_NAME
212 | ).start()
213 |
214 | def loop_stop(self):
215 | """Stops auto-refresh."""
216 | self.__status = False
217 | self.__cache_lines = []
218 |
219 | def refresh(self, post_delay: float = 0, force_refresh: bool = False):
220 | """Requests a immediate refresh.
221 |
222 | :param post_delay: Set the post delay after this refresh (seconds);
223 | :param force_refresh: If `True`, do refresh regardless of whether the content has changed or not;
224 | :rtype: None;
225 | """
226 | if self.__lines != self.__cache_lines or force_refresh:
227 | try:
228 | self.__cache_lines = self.__lines[:]
229 | for i in range(len(self.__cache_lines)):
230 | print(self.__cache_lines[i], y=i + 1)
231 | except IndexError:
232 | pass
233 | if post_delay > 0:
234 | time.sleep(post_delay)
235 |
236 | def request(self, lines: "list[str]"):
237 | """Updates the content
238 |
239 | :param lines: A list containing the content of each line;
240 | :rtype: None;
241 | """
242 | self.__lines = lines
243 |
244 | def reset(self):
245 | """Clears the content."""
246 | clear()
247 | self.__lines = []
248 | self.__cache_lines = []
249 |
250 | def set_refresh_rate(self, interval: float):
251 | """Sets the auto-refresh interval.
252 |
253 | :param interval: Auto-refresh interval (seconds);
254 | :rtype: None;
255 | """
256 | self.__interval = interval
257 |
258 | # EndClass
259 |
260 |
261 | class Counter:
262 | """Cumulative Counter."""
263 |
264 | def __init__(self):
265 | """Initializes a cumulative counter."""
266 | self.__s = 0
267 |
268 | def update(self, val: Union[int, bool] = 1):
269 | """Updates the counter.
270 |
271 | :param val: Delta value in int or bool (`True` for 1 and `False` for 0);
272 | :returns: Current value;
273 | :rtype: int;
274 | """
275 | if isinstance(val, int):
276 | self.__s += val
277 | elif val is True:
278 | self.__s += 1
279 | return self.__s
280 |
281 | def now(self):
282 | """Gets the current value.
283 |
284 | :returns: Current value;
285 | :rtype: int;
286 | """
287 | return self.__s
288 |
289 | # EndClass
290 |
291 |
292 | class TaskReporter:
293 | """Task reporter providing functions to record time consumptions of one kind of tasks."""
294 |
295 | def __init__(self, weight: int, demand: int = 0, window_size: int = 100):
296 | """Initializes a task reporter with a sliding window for time tracking.
297 |
298 | :param weight: The weight per task, higher weight indicating more time consumption;
299 | :param demand: The initial number of the tasks to be done;
300 | :param window_size: The size of the sliding window for speed calculation;
301 | """
302 | self._weight = weight
303 | self._demand = demand
304 | self._done = 0
305 | self._timestamps = queue.Queue(maxsize=window_size)
306 | self._internal_lock = threading.Lock()
307 |
308 | def report(self, success: bool = True):
309 | """Reports that one task has been successfully done (or failed).
310 |
311 | :param success: `True` to let `done += 1`, `False` to let `demand -= 1`;
312 | :rtype: None;
313 | """
314 | with self._internal_lock:
315 | if success:
316 | self._done += 1
317 | if self._timestamps.full():
318 | # Remove the oldest timestamp if the queue is full
319 | self._timestamps.get()
320 | # Record the current completion timestamp
321 | self._timestamps.put(time.time())
322 | else:
323 | # Task failed, decrease the demand
324 | self._demand -= 1
325 |
326 | def update_demand(self, delta: int = 1):
327 | """Updates the number of the tasks to be done by the given value."""
328 | with self._internal_lock:
329 | self._demand += delta
330 |
331 | def get_demand(self):
332 | """Gets the number of the tasks to be done."""
333 | return self._demand
334 |
335 | def get_done(self):
336 | """Gets the number of the tasks done."""
337 | return self._done
338 |
339 | def get_speed(self):
340 | """Calculates the average time per task based on the sliding window.
341 |
342 | :returns: The average speed (tasks per second), `0` if no enough data;
343 | :rtype: float;
344 | """
345 | if self._timestamps.qsize() < 2:
346 | return 0.0
347 | timestamps = list(self._timestamps.queue)
348 | delta_time = float(timestamps[-1] - timestamps[0])
349 | task_count = len(timestamps) - 1
350 | return task_count / delta_time if delta_time > 0 else 0.0
351 |
352 | def to_progress_str(self):
353 | """Returns a string representing the done and demand of the tasks.
354 |
355 | :returns: A human-readable string;
356 | :rtype: str;
357 | """
358 | return f"{self._done}/{self._demand}"
359 |
360 | # EndClass
361 |
362 |
363 | class TaskReporterTracker:
364 | """Task reporter tracker providing functions to manage multiple task reporters."""
365 |
366 | def __init__(self, *reporters: TaskReporter):
367 | """Initializes a task reporter tracker with multiple task reporters.
368 |
369 | :param reporters: Some TaskReporter instances to be managed;
370 | """
371 | self._reporters = reporters
372 | self._start_at = time.time()
373 | self._cache_pg = -1.0
374 |
375 | def get_rt(self):
376 | """Gets the running time since this instance was initialized.
377 |
378 | :returns: Time (seconds);
379 | :rtype: None;
380 | """
381 | return time.time() - self._start_at
382 |
383 | def get_eta(self):
384 | """Calculates the total estimated time to complete all tasks across all reporters.
385 |
386 | :returns: The total remaining time (seconds), `0` if not available;
387 | :rtype: float;
388 | """
389 | eta = 0.0
390 | for reporter in self._reporters:
391 | s = reporter.get_speed()
392 | eta += (reporter._demand - reporter._done) / s if s > 0 else float("inf")
393 | return eta if eta != float("inf") else 0.0
394 |
395 | def get_progress(self, force_inc: bool = False):
396 | """Calculates the overall progress of tasks completed.
397 |
398 | :param force_inc: Whether prevent the progress to decrease;
399 | :returns: The overall progress in `[0.0, 1.0]`;
400 | :rtype: float;
401 | """
402 | done = sum(reporter._done * reporter._weight for reporter in self._reporters)
403 | demand = sum(
404 | reporter._demand * reporter._weight for reporter in self._reporters
405 | )
406 | pg = max(0.0, min(1.0, done / demand)) if demand > 0 else 1.0
407 | self._cache_pg = max(self._cache_pg, pg)
408 | return self._cache_pg if force_inc else pg
409 |
410 | def to_progress_bar_str(self, force_inc: bool = True, length: int = 25):
411 | """Gets a string representing the current progress.
412 |
413 | :param force_inc: Whether prevent the progress to decrease;
414 | :param length: The length of the progress bar;
415 | :returns: A progress bar string that can be printed to CLI;
416 | :rtype: str;
417 | """
418 | p = self.get_progress(force_inc)
419 | return f"[{TaskReporterTracker._format_progress_bar_str(p, length)}] {color(2, 1)}{p:.1%}"
420 |
421 | def to_rt_str(self):
422 | """Gets a string representing the running time since this instance was initialized.
423 |
424 | :returns: A human-readable string;
425 | :rtype: str;
426 | """
427 | return TaskReporterTracker._format_time_str(self.get_rt())
428 |
429 | def to_eta_str(self):
430 | """Gets a string representing the estimated time to complete all tasks.
431 |
432 | :returns: A human-readable string;
433 | :rtype: str;
434 | """
435 | return TaskReporterTracker._format_time_str(self.get_eta())
436 |
437 | @staticmethod
438 | def _format_time_str(seconds: float):
439 | h = int(seconds / 3600)
440 | m = int(seconds % 3600 / 60)
441 | s = int(seconds % 60)
442 | if h != 0:
443 | return f"{h}:{m:02}:{s:02}"
444 | if seconds != 0:
445 | return f"{m:02}:{s:02}"
446 | return "--:--"
447 |
448 | @staticmethod
449 | def _format_progress_bar_str(progress: float, length: int):
450 | try:
451 | add_chars = (" ", "▏", "▎", "▍", "▌", "▋", "▊", "▉", "█")
452 | max_idx = len(add_chars) - 1
453 | rst = ""
454 | unit = 1 / length
455 | for i in range(length):
456 | ratio = (progress - i * unit) / unit
457 | rst += add_chars[max(0, min(max_idx, round(ratio * max_idx)))]
458 | return rst
459 | except BaseException:
460 | return ""
461 |
462 | # EndClass
463 |
--------------------------------------------------------------------------------
/src/utils/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # Copyright (c) 2022-2025, Harry Huang
3 | # @ BSD 3-Clause License
4 |
--------------------------------------------------------------------------------
/test/res/client-2.2/activity-[uc]act1mainss.ab:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/isHarryh/Ark-Unpacker/9705b48686de97112c33d87f02007daba89c3213/test/res/client-2.2/activity-[uc]act1mainss.ab
--------------------------------------------------------------------------------
/test/res/client-2.2/activity-commonassets.ab:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/isHarryh/Ark-Unpacker/9705b48686de97112c33d87f02007daba89c3213/test/res/client-2.2/activity-commonassets.ab
--------------------------------------------------------------------------------
/test/res/client-2.2/arts-charportraits-pack1.ab:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/isHarryh/Ark-Unpacker/9705b48686de97112c33d87f02007daba89c3213/test/res/client-2.2/arts-charportraits-pack1.ab
--------------------------------------------------------------------------------
/test/res/client-2.2/arts-dynchars-char_2014_nian_2.ab:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/isHarryh/Ark-Unpacker/9705b48686de97112c33d87f02007daba89c3213/test/res/client-2.2/arts-dynchars-char_2014_nian_2.ab
--------------------------------------------------------------------------------
/test/res/client-2.2/arts-effects-[pack]map.ab:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/isHarryh/Ark-Unpacker/9705b48686de97112c33d87f02007daba89c3213/test/res/client-2.2/arts-effects-[pack]map.ab
--------------------------------------------------------------------------------
/test/res/client-2.2/arts-loadingillusts_1.ab:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/isHarryh/Ark-Unpacker/9705b48686de97112c33d87f02007daba89c3213/test/res/client-2.2/arts-loadingillusts_1.ab
--------------------------------------------------------------------------------
/test/res/client-2.2/arts-rglktopic.ab:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/isHarryh/Ark-Unpacker/9705b48686de97112c33d87f02007daba89c3213/test/res/client-2.2/arts-rglktopic.ab
--------------------------------------------------------------------------------
/test/res/client-2.2/arts-ui-common.ab:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/isHarryh/Ark-Unpacker/9705b48686de97112c33d87f02007daba89c3213/test/res/client-2.2/arts-ui-common.ab
--------------------------------------------------------------------------------
/test/res/client-2.2/audio-sound_beta_2-enemy-e_imp1.ab:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/isHarryh/Ark-Unpacker/9705b48686de97112c33d87f02007daba89c3213/test/res/client-2.2/audio-sound_beta_2-enemy-e_imp1.ab
--------------------------------------------------------------------------------
/test/res/client-2.2/audio-sound_beta_2-general_1.ab:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/isHarryh/Ark-Unpacker/9705b48686de97112c33d87f02007daba89c3213/test/res/client-2.2/audio-sound_beta_2-general_1.ab
--------------------------------------------------------------------------------
/test/res/client-2.2/audio-sound_beta_2-voice-char_002_amiya.ab:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/isHarryh/Ark-Unpacker/9705b48686de97112c33d87f02007daba89c3213/test/res/client-2.2/audio-sound_beta_2-voice-char_002_amiya.ab
--------------------------------------------------------------------------------
/test/res/client-2.2/avg-characters-avg_003_kalts_1.ab:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/isHarryh/Ark-Unpacker/9705b48686de97112c33d87f02007daba89c3213/test/res/client-2.2/avg-characters-avg_003_kalts_1.ab
--------------------------------------------------------------------------------
/test/res/client-2.2/battle-prefabs-[uc]skills.ab:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/isHarryh/Ark-Unpacker/9705b48686de97112c33d87f02007daba89c3213/test/res/client-2.2/battle-prefabs-[uc]skills.ab
--------------------------------------------------------------------------------
/test/res/client-2.2/battle-prefabs-effects-amiya.ab:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/isHarryh/Ark-Unpacker/9705b48686de97112c33d87f02007daba89c3213/test/res/client-2.2/battle-prefabs-effects-amiya.ab
--------------------------------------------------------------------------------
/test/res/client-2.2/battle-prefabs-enemies-enemy_40.ab:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/isHarryh/Ark-Unpacker/9705b48686de97112c33d87f02007daba89c3213/test/res/client-2.2/battle-prefabs-enemies-enemy_40.ab
--------------------------------------------------------------------------------
/test/res/client-2.2/chararts-char_002_amiya.ab:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/isHarryh/Ark-Unpacker/9705b48686de97112c33d87f02007daba89c3213/test/res/client-2.2/chararts-char_002_amiya.ab
--------------------------------------------------------------------------------
/test/res/client-2.2/charpack-char_002_amiya.ab:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/isHarryh/Ark-Unpacker/9705b48686de97112c33d87f02007daba89c3213/test/res/client-2.2/charpack-char_002_amiya.ab
--------------------------------------------------------------------------------
/test/res/client-2.2/gamedata-levels-enemydata.ab:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/isHarryh/Ark-Unpacker/9705b48686de97112c33d87f02007daba89c3213/test/res/client-2.2/gamedata-levels-enemydata.ab
--------------------------------------------------------------------------------
/test/res/client-2.2/i18n-string_map.ab:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/isHarryh/Ark-Unpacker/9705b48686de97112c33d87f02007daba89c3213/test/res/client-2.2/i18n-string_map.ab
--------------------------------------------------------------------------------
/test/res/client-2.2/npcpack-npc_001_doctor.ab:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/isHarryh/Ark-Unpacker/9705b48686de97112c33d87f02007daba89c3213/test/res/client-2.2/npcpack-npc_001_doctor.ab
--------------------------------------------------------------------------------
/test/res/client-2.2/refs-rglktp_rogue_1.ab:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/isHarryh/Ark-Unpacker/9705b48686de97112c33d87f02007daba89c3213/test/res/client-2.2/refs-rglktp_rogue_1.ab
--------------------------------------------------------------------------------
/test/res/client-2.2/retro-permanent_sidestory_1_grani_and_the_treasure_of_knights.ab:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/isHarryh/Ark-Unpacker/9705b48686de97112c33d87f02007daba89c3213/test/res/client-2.2/retro-permanent_sidestory_1_grani_and_the_treasure_of_knights.ab
--------------------------------------------------------------------------------
/test/res/client-2.2/scenes-activities-a001-level_a001_01-level_a001_01-lightingdata.ab:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/isHarryh/Ark-Unpacker/9705b48686de97112c33d87f02007daba89c3213/test/res/client-2.2/scenes-activities-a001-level_a001_01-level_a001_01-lightingdata.ab
--------------------------------------------------------------------------------
/test/res/client-2.2/scenes-activities-a001-level_a001_01-level_a001_01.ab:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/isHarryh/Ark-Unpacker/9705b48686de97112c33d87f02007daba89c3213/test/res/client-2.2/scenes-activities-a001-level_a001_01-level_a001_01.ab
--------------------------------------------------------------------------------
/test/res/client-2.2/skinpack-char_002_amiya.ab:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/isHarryh/Ark-Unpacker/9705b48686de97112c33d87f02007daba89c3213/test/res/client-2.2/skinpack-char_002_amiya.ab
--------------------------------------------------------------------------------
/test/res/client-2.2/spritepack-ui_camp_logo_h2_0.ab:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/isHarryh/Ark-Unpacker/9705b48686de97112c33d87f02007daba89c3213/test/res/client-2.2/spritepack-ui_camp_logo_h2_0.ab
--------------------------------------------------------------------------------
/test/res/client-2.2/ui-[uc]battlefinish.ab:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/isHarryh/Ark-Unpacker/9705b48686de97112c33d87f02007daba89c3213/test/res/client-2.2/ui-[uc]battlefinish.ab
--------------------------------------------------------------------------------
/test/res/client-2.4/arts-dynchars-char_2014_nian_nian#4.ab:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/isHarryh/Ark-Unpacker/9705b48686de97112c33d87f02007daba89c3213/test/res/client-2.4/arts-dynchars-char_2014_nian_nian#4.ab
--------------------------------------------------------------------------------
/test/res/client-2.4/chararts-char_377_gdglow.ab:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/isHarryh/Ark-Unpacker/9705b48686de97112c33d87f02007daba89c3213/test/res/client-2.4/chararts-char_377_gdglow.ab
--------------------------------------------------------------------------------
/test/res/client-2.4/chararts-char_388_mint.ab:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/isHarryh/Ark-Unpacker/9705b48686de97112c33d87f02007daba89c3213/test/res/client-2.4/chararts-char_388_mint.ab
--------------------------------------------------------------------------------
/test/res/client-2.5/anon-3d8cc04a4457d205f1c975252a7e71cf.bin:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/isHarryh/Ark-Unpacker/9705b48686de97112c33d87f02007daba89c3213/test/res/client-2.5/anon-3d8cc04a4457d205f1c975252a7e71cf.bin
--------------------------------------------------------------------------------
/test/res/client-2.5/chararts-char_1026_gvial2.ab:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/isHarryh/Ark-Unpacker/9705b48686de97112c33d87f02007daba89c3213/test/res/client-2.5/chararts-char_1026_gvial2.ab
--------------------------------------------------------------------------------
/test/res/client-2.5/chararts-char_4179_monstr.ab:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/isHarryh/Ark-Unpacker/9705b48686de97112c33d87f02007daba89c3213/test/res/client-2.5/chararts-char_4179_monstr.ab
--------------------------------------------------------------------------------