├── .github
└── workflows
│ ├── update-global.yaml
│ ├── update-jp.yaml
│ └── updatemodels.yaml
├── .gitignore
├── README.md
├── css
└── style.css
├── favicon.ico
├── index.html
├── js
├── canvas-exporter.js
├── main.js
├── pixi-spine.js
├── pixi.js
└── utility.js
├── py
├── generateAudioJsonGlobal.py
├── generateAudioJsonJapan.py
├── generateModelsJson.py
├── getModelsGlobal.py
└── getModelsJapan.py
└── requirements.txt
/.github/workflows/update-global.yaml:
--------------------------------------------------------------------------------
1 | name: Update-Global
2 |
3 | on:
4 | workflow_dispatch:
5 | schedule:
6 | - cron: '0 19 * * *'
7 |
8 | permissions: write-all
9 |
10 | jobs:
11 | update:
12 | runs-on: ubuntu-latest
13 | steps:
14 | - name: Check out repository code
15 | uses: actions/checkout@v3
16 | with:
17 | ref: 'global' # Get the right branch
18 | fetch-depth: '0' # Get all branch first
19 | - name: Installing requirements
20 | run: |
21 | pip install -r requirements.txt
22 | - name: Retrieving models
23 | run: |
24 | python ./py/getModelsGlobal.py
25 | - name: Generating models.json
26 | run: |
27 | python ./py/generateModelsJson.py
28 | - name: Generating audio.json
29 | run: |
30 | python ./py/generateAudioJsonGlobal.py
31 | - name: Commit files
32 | run: |
33 | git add .
34 | git config --global user.email "bot@github.com"
35 | git config --global user.name "Github Bot"
36 | git commit -m "[Update] $(cat ./data/version.txt)"
37 | - name: Push changes
38 | uses: ad-m/github-push-action@master
39 | with:
40 | github_token: ${{ secrets.GITHUB_TOKEN }}
41 | force: true
42 | branch: global
--------------------------------------------------------------------------------
/.github/workflows/update-jp.yaml:
--------------------------------------------------------------------------------
1 | name: Update-JP
2 |
3 | on:
4 | workflow_dispatch:
5 |
6 | permissions: write-all
7 |
8 | jobs:
9 | update:
10 | runs-on: ubuntu-latest
11 | steps:
12 | - name: Check out repository code
13 | uses: actions/checkout@v3
14 | with:
15 | ref: 'jp' # Get the right branch
16 | fetch-depth: '0' # Get all branch first
17 | - name: Installing requirements
18 | run: |
19 | pip install -r requirements.txt
20 | - name: Retrieving models
21 | run: |
22 | python ./py/getModelsJapan.py
23 | - name: Generating models.json
24 | run: |
25 | python ./py/generateModelsJson.py
26 | - name: Generating audio.json
27 | run: |
28 | python ./py/generateAudioJsonJapan.py
29 | - name: Commit files
30 | run: |
31 | git add .
32 | git config --global user.email "bot@github.com"
33 | git config --global user.name "Github Bot"
34 | git commit -m "[Update] $(cat ./data/version.txt)"
35 | - name: Push changes
36 | uses: ad-m/github-push-action@master
37 | with:
38 | github_token: ${{ secrets.GITHUB_TOKEN }}
39 | force: true
40 | branch: jp
--------------------------------------------------------------------------------
/.github/workflows/updatemodels.yaml:
--------------------------------------------------------------------------------
1 | name: Update models.json
2 |
3 | on:
4 | release:
5 | types: [created]
6 | workflow_dispatch:
7 |
8 | permissions: write-all
9 |
10 | jobs:
11 | update:
12 | runs-on: ubuntu-latest
13 | steps:
14 | - name: Check out repository code
15 | uses: actions/checkout@v3
16 | - name: Generating models.json
17 | run: |
18 | python ./py/generateModelsJson.py
19 | - name: Commit files
20 | run: |
21 | git config --local user.email "ucihaibna@yahoo.com"
22 | git config --local user.name "respectZ"
23 | git add .
24 | git commit -m "[Update] $(cat ./data/version.txt)"
25 | - name: Push changes
26 | uses: ad-m/github-push-action@master
27 | with:
28 | github_token: ${{ secrets.GITHUB_TOKEN }}
29 | force: true
30 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | share/python-wheels/
24 | *.egg-info/
25 | .installed.cfg
26 | *.egg
27 | MANIFEST
28 |
29 | # PyInstaller
30 | # Usually these files are written by a python script from a template
31 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
32 | *.manifest
33 | *.spec
34 |
35 | # Installer logs
36 | pip-log.txt
37 | pip-delete-this-directory.txt
38 |
39 | # Unit test / coverage reports
40 | htmlcov/
41 | .tox/
42 | .nox/
43 | .coverage
44 | .coverage.*
45 | .cache
46 | nosetests.xml
47 | coverage.xml
48 | *.cover
49 | *.py,cover
50 | .hypothesis/
51 | .pytest_cache/
52 | cover/
53 |
54 | # Translations
55 | *.mo
56 | *.pot
57 |
58 | # Django stuff:
59 | *.log
60 | local_settings.py
61 | db.sqlite3
62 | db.sqlite3-journal
63 |
64 | # Flask stuff:
65 | instance/
66 | .webassets-cache
67 |
68 | # Scrapy stuff:
69 | .scrapy
70 |
71 | # Sphinx documentation
72 | docs/_build/
73 |
74 | # PyBuilder
75 | .pybuilder/
76 | target/
77 |
78 | # Jupyter Notebook
79 | .ipynb_checkpoints
80 |
81 | # IPython
82 | profile_default/
83 | ipython_config.py
84 |
85 | # pyenv
86 | # For a library or package, you might want to ignore these files since the code is
87 | # intended to run in multiple environments; otherwise, check them in:
88 | # .python-version
89 |
90 | # pipenv
91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
94 | # install all needed dependencies.
95 | #Pipfile.lock
96 |
97 | # poetry
98 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
99 | # This is especially recommended for binary packages to ensure reproducibility, and is more
100 | # commonly ignored for libraries.
101 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
102 | #poetry.lock
103 |
104 | # pdm
105 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
106 | #pdm.lock
107 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
108 | # in version control.
109 | # https://pdm.fming.dev/#use-with-ide
110 | .pdm.toml
111 |
112 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
113 | __pypackages__/
114 |
115 | # Celery stuff
116 | celerybeat-schedule
117 | celerybeat.pid
118 |
119 | # SageMath parsed files
120 | *.sage.py
121 |
122 | # Environments
123 | .env
124 | .venv
125 | env/
126 | venv/
127 | ENV/
128 | env.bak/
129 | venv.bak/
130 |
131 | # Spyder project settings
132 | .spyderproject
133 | .spyproject
134 |
135 | # Rope project settings
136 | .ropeproject
137 |
138 | # mkdocs documentation
139 | /site
140 |
141 | # mypy
142 | .mypy_cache/
143 | .dmypy.json
144 | dmypy.json
145 |
146 | # Pyre type checker
147 | .pyre/
148 |
149 | # pytype static type analyzer
150 | .pytype/
151 |
152 | # Cython debug symbols
153 | cython_debug/
154 |
155 | # PyCharm
156 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can
157 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
158 | # and can be added to the global gitignore or merged into this file. For a more nuclear
159 | # option (not recommended) you can uncomment the following to ignore the entire idea folder.
160 | #.idea/
161 |
162 | # https://github.com/respectZ/blue-archive-spine
163 | env
164 |
165 | # Temporary files
166 | converted_models/
167 | raw_assets/
168 | raw_models/
169 | downloaded_resource/
170 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # blue-archive-spine
2 | Based on [respectZ/blue-archive-spine](https://github.com/respectZ/blue-archive-spine)
3 |
4 | Move to https://github.com/lwd-temp/blue-archive-spine-jp
5 |
6 | **Blue Archive is a registered trademark of NAT GAMES Co., Ltd. This repo is not affiliated with NEXON Korea Corp. & NEXON GAMES Co., Ltd. All game resources are copyrighted to the respective owners.**
7 |
8 | ## Quick Start
9 | * Use this template
10 | * Create `jp` and `global` branch from `resourceless`
11 | * Enable GitHub Actions, manually trigger `Update-Global` and `Update-JP` at any branch (they will find the right branch, no matter where you trigger them) to get the first update
12 | * `Update-Global` will run daily and fails when no update is available, `Update-JP` won't run automatically
13 |
14 | `Update-JP` Needs to be triggered manually since there isn't a solution to get the latest update json link automatically (maybe there is, but I'm not going to write it now). `ba_api` in `getModelsJapan.py` needs to be updated manually (through reverse engineering or network capture) when there is a new update.
15 |
16 | ## About Japan version
17 | The update json link is hard coded into the game OBB resources.
18 |
19 | We can get `LatestClientVersion` from `https://prod-noticeindex.bluearchiveyostar.com/prod/index.json`, but how the random hash path (likes `r48_2q1alt6gvk5igdsj4hl2.json`) under `yostar-serverinfo.bluearchiveyostar.com` is generated?
20 |
21 | BA Japan (Yostar) does not have a certain resource path API. And their developers hard code the resource link for a version into the game OBB resources. The game only checks for `LatestClientVersion` to see if it's outdated and asks the user to update the game from Google Play to get the latest resource link.
22 |
23 | Does this mean the user has to update the game from Google Play every month to get the latest monthly in-game activity? -- Yes, this is what's happening. (BA Japan releases an update at Google Play about every month, while BA Global updates about every three months)
24 |
25 | You can extract `GameMainConfig` from game OBB and decrypt it. Check the key `ServerInfoDataUrl` in `GameMainConfig` to get the resource link.
26 |
27 | ## About this repo
28 | This repo is a fork of [respectZ/blue-archive-spine](https://github.com/respectZ/blue-archive-spine). Some contributors and I modified the code to make it work better.
29 |
30 | `blue-archive-spine` is a tool to download and view the character arts and animations (Memorial Lobby) of the game [Blue Archive](https://bluearchive.nexon.com/home).
31 |
32 | It's recommneded to use GitHub Actions to download the resources automatically. If you want to download the resources manually, please refer to the original README. Attention: Do not open `index.html` locally, all resource must be served with an HTTP server.
33 |
34 | ## License
35 | This repo is based on the work of @respectZ and @LXY1226 . The original repo doesn't have a license, so I'm not sure if it can be used for any purpose.
36 |
37 | ## Disclaimer
38 | **Blue Archive is a registered trademark of NAT GAMES Co., Ltd. This repo is not affiliated with NEXON Korea Corp. & NEXON GAMES Co., Ltd. All game resources are copyrighted to the respective owners.**
39 |
40 | ## The following is the original README.
41 |
42 | *Please notice: some of the following information is outdated. Please refer to the Quick Start section above.*
43 |
44 | This repo uses UnityPy (instead of unitypack in upstream) to extract files.
45 |
46 | -----
47 |
48 | # Informations
49 | For viewing Blue Archive Spines.
50 |
51 | Have a look at [this branch](https://github.com/respectZ/blue-archive-spine/tree/resourceless) for resourceless.
52 |
53 | # Requirements
54 | - [decrunch](https://github.com/HearthSim/decrunch/)
55 | - [fsb5](https://github.com/HearthSim/python-fsb5)
56 | - [lz4](https://github.com/python-lz4/python-lz4)
57 | - [Pillow](https://python-pillow.org/)
58 | - [astc_decomp](https://github.com/K0lb3/astc_decomp/)
59 | - MSVC++ 14.0 Build Tools with Windows 10 SDK
60 |
61 | # Setup
62 | ## Building UnityPack
63 | ```
64 | setup.py build
65 | ```
66 | ## Installing UnityPack
67 | ```
68 | setup.py install
69 | ```
70 | or
71 | ```
72 | setup.py install --user
73 | ```
74 |
75 | # Downloading Models
76 | ```
77 | py/getModels.py
78 | ```
79 | Downloaded Models located at ./downloaded_resource
80 |
81 | Assets (Spine and Audio) located at ./assets
82 |
83 | # Generating JSON Data
84 | ## data/audio.json
85 | ```
86 | py/generateAudioJson.py
87 | ```
88 | This will download VOC_JP audio instead of playing it directly (cors issue ?)
89 |
90 | To play audio directly from BA's server, change py/generateModelsJson.py
91 | ```python
92 | _type = 1
93 | ```
94 | to
95 | ```python
96 | _type = 0
97 | ```
98 | ## data/models.json
99 | ```
100 | py/generateModelsJson.py
101 | ```
102 | ## And you're done!
103 | Just launch index.html
104 |
105 | ## Used Libraries
106 | - [pixi.js](https://pixijs.com/)
107 | - [pixi-spine](https://github.com/pixijs/spine)
108 | - [howler.js](https://howlerjs.com/)
109 | - [UIKit](https://getuikit.com/)
110 |
111 | Big Kudos for awesome [UnityPack](https://github.com/HearthSim/UnityPack)
112 |
--------------------------------------------------------------------------------
/css/style.css:
--------------------------------------------------------------------------------
1 | * {
2 | margin: 0;
3 | padding: 0;
4 | }
5 |
6 | /* width */
7 | ::-webkit-scrollbar {
8 | width: 10px;
9 | }
10 |
11 | /* Track */
12 | ::-webkit-scrollbar-track {
13 | background: rgb(255, 255, 255);
14 | }
15 |
16 | /* Handle */
17 | ::-webkit-scrollbar-thumb {
18 | background: rgb(104, 152, 255);
19 | }
20 |
21 | /* Handle on hover */
22 | ::-webkit-scrollbar-thumb:hover {
23 | background: rgb(58, 120, 255);
24 | }
25 |
26 | #canvasWrapper {
27 | box-sizing: border-box;
28 | width: 100vw;
29 | height: 100vh;
30 | display: flex;
31 | justify-content: center;
32 | align-items: center;
33 | }
--------------------------------------------------------------------------------
/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/SunsetMkt/blue-archive-spine/2b4b6f247f999f6892e04e924764cde96ca93e16/favicon.ico
--------------------------------------------------------------------------------
/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 | Blue Archive Resource Viewer
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
55 |
56 |
57 |
145 |
146 |
147 |
148 |
149 |
150 |
151 |
152 |
354 |
355 |
356 |
357 |
358 |
359 |
360 |
Rendering...
361 |
362 |
363 |
364 |
Done !
365 |
366 |
367 |
368 |
369 |
370 |
371 |
372 |
373 |
374 |
375 |
376 |
--------------------------------------------------------------------------------
/js/canvas-exporter.js:
--------------------------------------------------------------------------------
1 | // Main
2 | function exportAnimation(FPS = 60) {
3 | let exportCanvas = document.createElement("canvas");
4 | exportCanvas.id = "export-canvas";
5 | exportCanvas.style.display = "none";
6 | document.body.appendChild(exportCanvas);
7 | let exportVideo = document.createElement("video");
8 | exportVideo.controls = true;
9 | exportVideo.id = "export-video";
10 |
11 | let appExport = new PIXI.Application({
12 | width: window.innerWidth,
13 | height: window.innerHeight,
14 | view: exportCanvas,
15 | });
16 | appExport.loader
17 | .add("char", `./${option.models.value}`)
18 | .load(function (loader, res) {
19 | let exportChar = new PIXI.spine.Spine(res.char.spineData);
20 | exportChar.scale.x = exportChar.scale.y = char.scale.x;
21 | exportChar.x = char.x;
22 | exportChar.y = char.y;
23 | exportChar.state.setAnimation(0, option.animations.value, 0);
24 |
25 | appExport.stage.addChild(exportChar);
26 |
27 | // Export Section
28 | let videoStream = exportCanvas.captureStream(FPS); //default to 60
29 | let mediaRecorder = new MediaRecorder(videoStream);
30 |
31 | let chunks = [];
32 | mediaRecorder.ondataavailable = function (e) {
33 | chunks.push(e.data);
34 | };
35 |
36 | mediaRecorder.onstop = function (e) {
37 | let blob = new Blob(chunks, { type: option.exportType.value });
38 | chunks = [];
39 | let videoURL = URL.createObjectURL(blob);
40 | exportVideo.src = videoURL;
41 | };
42 | mediaRecorder.ondataavailable = function (e) {
43 | chunks.push(e.data);
44 | };
45 |
46 | // Get Animation Length
47 | let animLength = 0;
48 | for (var i in char.spineData.animations) {
49 | if (char.spineData.animations[i].name == option.animations.value) {
50 | animLength = char.spineData.animations[i].duration;
51 | break;
52 | }
53 | }
54 |
55 | //Modal Popup
56 | document.getElementById("rendering").style.display = "block";
57 | document.getElementById("complete").style.display = "none";
58 | UIkit.modal(document.getElementById("modal-exporter")).show();
59 | // Progressbar
60 | document.getElementById("export-progress").value = 0;
61 | let progress = setInterval(function () {
62 | document.getElementById("export-progress").value += 1;
63 | }, animLength * 10);
64 |
65 | // Record
66 | mediaRecorder.start();
67 | setTimeout(function () {
68 | mediaRecorder.stop();
69 | //Free Resources
70 | appExport.stage.children.pop();
71 | appExport.loader.resources = {};
72 | exportCanvas.remove();
73 | clearInterval(progress);
74 |
75 | //Update modal
76 | document.getElementById("rendering").style.display = "none";
77 | document.getElementById("complete").style.display = "block";
78 | document.getElementById("result").appendChild(exportVideo);
79 | }, animLength * 1000);
80 | });
81 | }
82 |
83 | // char.state.setAnimation(0, "Idle_01", false);
84 | // mediaRecorder.start();
85 | // setTimeout(function (){ mediaRecorder.stop(); }, 4000);
86 |
--------------------------------------------------------------------------------
/js/main.js:
--------------------------------------------------------------------------------
1 | let app;
2 | let char;
3 | let audioList = []
4 | let audios;
5 | let isCharacterLoaded = false;
6 | let debug = 0; //set via console
7 |
8 |
9 | function loadChar(model = "./assets/spine/shiroko_home/Shiroko_home.skel") {
10 | isCharacterLoaded = false;
11 | // remove previous spine
12 | if (app.stage.children.length > 0) {
13 | app.stage.children.pop();
14 | app.loader.resources = {};
15 | }
16 | // remove previous audio
17 | if (audioList.length != 0) {
18 | for (var i in audioList) {
19 | audioList[i].stop();
20 | }
21 | audioList = [];
22 | }
23 | try {
24 | app.loader.resources = {};
25 | // load new spine
26 | app.loader
27 | .add('char', `./${model}`)
28 | .load(onAssetsLoaded);
29 | } catch (e) {
30 | console.error(e)
31 | }
32 | }
33 |
34 |
35 | function onAssetsLoaded(loader, res) {
36 | if (audioList.length != 0) {
37 | for (var i in audioList) {
38 | audioList[i].stop();
39 | }
40 | audioList = [];
41 | }
42 |
43 | char = new PIXI.spine.Spine(res.char.spineData);
44 |
45 | // console.log(char)
46 | // console.log(char.spineData.height)
47 | // console.log(char.spineData.width)
48 |
49 | // Scaler
50 | char.scale.x = 0.5;
51 | char.scale.y = 0.5;
52 |
53 | // Centerize
54 | char.x = window.innerWidth / 2;
55 | char.y = window.innerHeight / 1;
56 |
57 | //Set option value
58 | option.scale.value = 0.5;
59 | option.x.value = char.x;
60 | option.y.value = char.y;
61 |
62 | // Insert animations to index.html
63 | const animations = res.char.spineData.animations;
64 | let check = 0;
65 | option.animations.innerHTML = "";
66 | for (var i in animations) {
67 | let a = document.createElement("option");
68 | a.value = a.innerHTML = animations[i].name;
69 | option.animations.append(a)
70 | if (animations[i].name == "Idle_01")
71 | check = 1;
72 | }
73 |
74 | //Play Animation
75 | if (check) {
76 | char.state.setAnimation(0, "Idle_01", option.loop.checked);
77 | optionAnimations.value = "Idle_01";
78 | } else {
79 | char.state.setAnimation(0, animations[0].name, option.loop.checked);
80 | }
81 | // Voiceline Listener / Handler
82 | char.state.addListener({
83 | event: function (entry, event) {
84 | if (debug)
85 | console.log(event)
86 | if (event.stringValue == '')
87 | return;
88 | if (!option.talkSound.checked)
89 | return;
90 | let charName = option.models.options[option.models.selectedIndex].text.replace("_home", "")
91 | //Camalize
92 | if (charName.indexOf("_") != -1) {
93 | charName = charName.toLowerCase().replace(/([-_][a-z])/g, group =>
94 | group
95 | .toUpperCase()
96 | .replace('-', '')
97 | .replace('_', '')
98 | );
99 | }
100 | charName = charName.charAt(0).toUpperCase() + charName.slice(1);
101 | if (debug)
102 | console.log(charName)
103 | //Play
104 | if (charName == 'MashiroSwimsuit')
105 | charName = 'CH0061';
106 | if (charName == 'ShirokoRidingsuit')
107 | charName = 'ShirokoRidingSuit'
108 | let voice = new Howl({
109 | src: [audios[event.stringValue]]
110 | });
111 | // If already loaded, play it
112 | if (voice.state() == 'loaded')
113 | voice.play();
114 | else if (voice.state() == 'loading') {
115 | voice.on('load', function () {
116 | voice.play();
117 | })
118 | }
119 | audioList.push(voice);
120 | }
121 | })
122 | //Add to main canvas
123 | app.stage.addChild(char);
124 | isCharacterLoaded = true;
125 | }
126 |
127 | function playAnimation(name) {
128 | if (audioList.length != 0) {
129 | for (var i in audioList) {
130 | audioList[i].stop();
131 | }
132 | audioList = [];
133 | }
134 |
135 | char.state.setAnimation(0, name, option.loop.checked);
136 | }
--------------------------------------------------------------------------------
/js/utility.js:
--------------------------------------------------------------------------------
1 | function httpGet(theUrl) {
2 | var xmlHttp = new XMLHttpRequest();
3 | xmlHttp.open("GET", theUrl, false); // false for synchronous request
4 | xmlHttp.send(null);
5 | return xmlHttp.responseText;
6 | }
7 |
8 | function checkFile(url) {
9 | var xmlHttp = new XMLHttpRequest();
10 | xmlHttp.open("GET", url, false); // false for synchronous request
11 | xmlHttp.send(null);
12 | return xmlHttp.status == 200;
13 | }
14 |
15 | function camelCase(obj) {
16 | var newObj = {};
17 | for (d in obj) {
18 | if (obj.hasOwnProperty(d)) {
19 | newObj[d.replace(/(\_\w)/g, function (k) {
20 | return k[1].toUpperCase();
21 | })] = obj[d];
22 | }
23 | }
24 | return newObj;
25 | }
--------------------------------------------------------------------------------
/py/generateAudioJsonGlobal.py:
--------------------------------------------------------------------------------
1 | import json
2 | import os
3 |
4 | import requests
5 |
6 | from getModelsGlobal import downloadFile, getResourceURL, updateBaData
7 |
8 | data = {}
9 |
10 | # 1 for offline, 0 for online but cors issue
11 | _type = 1
12 |
13 | option = {
14 | "skipExisting": True
15 | }
16 |
17 | if not (os.path.isdir("./data")):
18 | os.mkdir("./data")
19 |
20 | if __name__ == "__main__":
21 | # updateBaData first
22 | updateBaData()
23 |
24 | resUrl = getResourceURL()
25 | baseUrl = '/'.join(resUrl.split("/")[0:-1])
26 | res = requests.get(resUrl).json()["resources"]
27 | for asset in res:
28 | if "Audio/VOC_JP/" in asset["resource_path"] and "MemorialLobby" in asset["resource_path"]:
29 | keyEvent = ''.join(
30 | asset["resource_path"].split("/")[-1].split(".")[:-1])
31 | fname = ''.join(asset["resource_path"].split("/")[-1])
32 |
33 | # download ver
34 | if _type:
35 | path = f"./assets/audio/{fname}"
36 | print("="*30)
37 | print(fname)
38 | if os.path.isfile(path):
39 | print("Already downloaded. Skipping.")
40 | data[keyEvent] = path
41 | continue
42 | if not (os.path.isdir("./assets/audio")):
43 | os.mkdir("./assets/audio/")
44 | downloadFile(baseUrl + "/" + asset["resource_path"], path)
45 | data[keyEvent] = path
46 | else:
47 | # online ver (cors ?)
48 | data[keyEvent] = baseUrl + "/" + asset["resource_path"]
49 |
50 | print(data)
51 | with open("./data/audio.json", "w") as f:
52 | json.dump(data, f, indent=4)
53 | print("="*30)
54 | print("Done!")
55 |
--------------------------------------------------------------------------------
/py/generateAudioJsonJapan.py:
--------------------------------------------------------------------------------
1 | import json
2 | import os
3 |
4 | import requests
5 |
6 | from getModelsJapan import downloadFile, getBaseResourceURL
7 |
8 | data = {}
9 |
10 | # 1 for offline, 0 for online but cors issue
11 | _type = 1
12 |
13 | option = {
14 | "skipExisting": True
15 | }
16 |
17 | if not (os.path.isdir("./data")):
18 | os.mkdir("./data")
19 |
20 | if __name__ == "__main__":
21 | baseUrl = getBaseResourceURL() + '/MediaResources'
22 | resUrl = baseUrl + '/MediaCatalog.json'
23 | # https://prod-clientpatch.bluearchiveyostar.com/r47_1_22_46zlzvd7mur326newgu8_2 + /MediaResources/MediaCatalog.json
24 | res = requests.get(resUrl).json()["Table"]
25 | for asset in res:
26 | if "Audio/VOC_JP/" in res[asset]["path"] and "MemorialLobby" in res[asset]["path"]:
27 | keyEvent = ''.join(
28 | res[asset]["path"].split("/")[-1].split(".")[:-1])
29 | fname = ''.join(res[asset]["path"].split("/")[-1])
30 |
31 | # download ver
32 | if _type:
33 | path = f"./assets/audio/{fname}"
34 | print("="*30)
35 | print(fname)
36 | if os.path.isfile(path):
37 | print("Already downloaded. Skipping.")
38 | data[keyEvent] = path
39 | continue
40 | if not (os.path.isdir("./assets/audio")):
41 | os.mkdir("./assets/audio/")
42 | downloadFile(baseUrl + "/" + res[asset]["path"], path)
43 | data[keyEvent] = path
44 | else:
45 | # online ver (cors ?)
46 | data[keyEvent] = baseUrl + "/" + res[asset]["path"]
47 |
48 | print(data)
49 | with open("./data/audio.json", "w") as f:
50 | json.dump(data, f, indent=4)
51 | print("="*30)
52 | print("Done!")
53 |
--------------------------------------------------------------------------------
/py/generateModelsJson.py:
--------------------------------------------------------------------------------
1 | import json
2 | import os
3 |
4 | parentDir = os.listdir("assets/spine/")
5 |
6 | data = {}
7 |
8 | for i in parentDir:
9 | # There may be more than one skel in the folder, and there's also possibility of pack mistakes.
10 | # Currently, not handling this.
11 | print(i)
12 | file = [x for x in (os.listdir(f"assets/spine/{i}")) if ".skel" in x]
13 | if len(file) > 1:
14 | for j in file:
15 | if j[0] == "_":
16 | continue
17 | data[j[:-5]] = f"assets/spine/{i}/{j}"
18 | else:
19 | if i[0] == "_":
20 | continue
21 | data[i] = f"assets/spine/{i}/" + ''.join(file)
22 |
23 | """
24 | for i in parentDir:
25 | print(i)
26 | file = [x for x in (os.listdir(f"assets/spine/{i}")) if ".skel" in x]
27 | if len(file) > 1:
28 | # There should be only one skel in the folder
29 | # This happens when the developer packs by mistake
30 | for j in file:
31 | if j[0] == "_":
32 | continue
33 | if j[:-5].lower() not in i.lower():
34 | # skel name should be the same as the folder name
35 | continue
36 | data[j[:-5]] = f"assets/spine/{i}/{j}"
37 | elif len(file) == 0:
38 | # No skel file in the folder
39 | continue
40 | else:
41 | # Only one skel file in the folder
42 | if i[0] == "_":
43 | continue
44 | data[i] = f"assets/spine/{i}/" + ''.join(file)
45 | """
46 |
47 |
48 | if not (os.path.isdir("./data")):
49 | os.mkdir("./data")
50 |
51 | with open("./data/models.json", "w") as f:
52 | json.dump(data, f, indent=6, sort_keys=True)
53 |
--------------------------------------------------------------------------------
/py/getModelsGlobal.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | import requests
4 | import UnityPy
5 |
6 | # conf
7 | option = {
8 | # will skip resources that already downloaded.
9 | "skipExistingDownloadedResource": True,
10 | # will skip assets that already exists.
11 | "skipExistingAssets": True
12 | }
13 | ba_ps = "https://play.google.com/store/apps/details?id=com.nexon.bluearchive"
14 | ba_api = "https://api-pub.nexon.com/patch/v1.1/version-check"
15 | ba_api_data = {
16 | "market_game_id": "com.nexon.bluearchive",
17 | "language": "en",
18 | "advertising_id": "636a7b75-5516-427b-b140-45318d3d51f0",
19 | "market_code": "playstore",
20 | "country": "US",
21 | "sdk_version": "187",
22 | "curr_build_version": "1.36.120365",
23 | "curr_build_number": 120365,
24 | "curr_patch_version": 0
25 | }
26 |
27 |
28 | def getVersion():
29 | '''
30 | Return Blue Archive build version and build number.
31 | '''
32 | # There are two ways to get the version.
33 | # 1. Get the version from BA API
34 | # 2. Get the version from BA Play Store page
35 | # We will try to get the version from BA API first.
36 |
37 | # Get the version from BA API
38 | try:
39 | r = requests.post(ba_api, json=ba_api_data)
40 | r.raise_for_status()
41 | data = r.json()
42 | build_version = data['latest_build_version']
43 | ver = build_version
44 | print(ver)
45 | # build_number = data['latest_build_number']
46 | # return (build_version, int(build_number))
47 | except:
48 | # Get the version from BA Play Store page
49 | print("Failed to get version from BA API.")
50 | src = requests.get(ba_ps).text
51 | # lmao python sucks
52 | try:
53 | ver = eval(src.split("AF_initDataCallback({key: 'ds:5', hash: ")[1].split("'")[2].split("data:")[1].split(
54 | ", sideChannel: {}")[0].replace("null", "None").replace("false", "False").replace("true", "True"))
55 | ver = ver[1][2][140][0][0][0]
56 | print(ver)
57 | # ver = src.split('')[4].split('
')[0]
58 | except:
59 | # Get the version from BA Play Store page with regex
60 | print('Fallback to regex')
61 | # Fallback
62 | import re
63 |
64 | # Find all [["*.*.*"]]
65 | ver = re.findall(r'\[\[\"+(\d+(.\d+)+(.\d+))+\"\]\]', src)
66 | print(ver)
67 | # Get the first one
68 | ver = ver[0][0]
69 |
70 | return (ver, int(ver.split(".")[-1]))
71 |
72 |
73 | def updateBaData():
74 | global ba_api_data
75 |
76 | ba_api_data = {
77 | "market_game_id": "com.nexon.bluearchive",
78 | "language": "en",
79 | "advertising_id": "636a7b75-5516-427b-b140-45318d3d51f0",
80 | "market_code": "playstore",
81 | "country": "US",
82 | "sdk_version": "187",
83 | "curr_build_version": getVersion()[0],
84 | "curr_build_number": getVersion()[1],
85 | "curr_patch_version": 0
86 | }
87 |
88 |
89 | def getResourceURL():
90 | '''
91 | Return resource url for Blue Archive
92 | '''
93 | data = requests.post(ba_api, json=ba_api_data).json()
94 | print(data)
95 | return data["patch"]["resource_path"]
96 |
97 |
98 | def getModelsList():
99 | '''
100 | Return list of Blue Archive characters url path.
101 | '''
102 | data = []
103 | res_url = getResourceURL()
104 | res = requests.get(res_url).json()
105 | for asset in res["resources"]:
106 | if "spinecharacters-" in asset["resource_path"] or "spinelobbies-" in asset["resource_path"] or "spinebackground-" in asset["resource_path"]:
107 | # append url and path
108 | data.append('/'.join(res_url.split("/")
109 | [0:-1]) + "/" + asset["resource_path"])
110 | return data
111 |
112 |
113 | def downloadFile(url, fname):
114 | src = requests.get(url).content
115 | with open(fname, 'wb') as f:
116 | f.write(src)
117 |
118 |
119 | def extractTextAsset(object, dest):
120 | # parse the object data
121 | data = object.read()
122 |
123 | # create destination path
124 | dest = os.path.join(dest, data.name)
125 |
126 | # touch folder
127 | os.makedirs(os.path.dirname(dest), exist_ok=True)
128 |
129 | # just save
130 | with open(dest, "wb") as f:
131 | f.write(data.script)
132 |
133 |
134 | def extractTexture2D(object, dest):
135 | # parse the object data
136 | data = object.read()
137 |
138 | # create destination path
139 | dest = os.path.join(dest, data.name)
140 |
141 | # touch folder
142 | os.makedirs(os.path.dirname(dest), exist_ok=True)
143 |
144 | # make sure that the extension is correct
145 | # you probably only want to do so with images/textures
146 | dest, ext = os.path.splitext(dest)
147 | dest = dest + ".png"
148 |
149 | img = data.image
150 | img.save(dest)
151 |
152 |
153 | def extractCharacter(src, dest):
154 | # load the bundle
155 | bundle = UnityPy.load(src)
156 |
157 | for obj in bundle.objects:
158 | # extract skel & atlas
159 | if obj.type.name == "TextAsset":
160 | data = obj.read()
161 | if ".atlas" in data.name or ".skel" in data.name:
162 | print(data.name)
163 | extractTextAsset(obj, dest)
164 | # extract texture
165 | elif obj.type.name == "Texture2D":
166 | data = obj.read()
167 |
168 | print(data.name + ".png")
169 | extractTexture2D(obj, dest)
170 |
171 |
172 | if __name__ == "__main__":
173 | # make folder
174 | if not (os.path.isdir("./downloaded_resource")):
175 | os.makedirs("./downloaded_resource")
176 | if not (os.path.isdir("./assets")):
177 | os.makedirs("./assets")
178 | if not (os.path.isdir("./assets/spine")):
179 | os.makedirs("./assets/spine")
180 | if not (os.path.isdir("./data")):
181 | os.makedirs("./data")
182 |
183 | # important
184 | updateBaData()
185 |
186 | ver = getResourceURL() # There are several ResourceURL to a version
187 | print(ver)
188 | if (os.path.isfile("./data/version.txt")):
189 | with open("./data/version.txt", "r") as f:
190 | ver_temp = f.read()
191 | if str(ver) == str(ver_temp):
192 | print(f"[{ver}] No new update. Stopping.")
193 | exit(1)
194 | else:
195 | print(f"Update {ver_temp} to {ver}")
196 | with open("./data/version.txt", "w") as f:
197 | f.write(ver)
198 | else:
199 | with open("./data/version.txt", "w") as f:
200 | f.write(ver)
201 |
202 | # get model list
203 | model_list = getModelsList()
204 |
205 | # download list of model list
206 | for index, model in enumerate(model_list, start=1):
207 | print("="*30)
208 | print(f"{index}/{len(model_list)}")
209 | fname = model.split("/")[-1]
210 | destDownload = f"./downloaded_resource/{fname}"
211 |
212 | print(fname)
213 |
214 | # skip if already exists
215 | if option["skipExistingDownloadedResource"] and os.path.isfile(destDownload):
216 | print("Already downloaded. Skipping.")
217 | continue
218 |
219 | # spinebackground, spinecharacters and spinelobbies only
220 | character_name = ''.join(fname.split("spinecharacters-")[1].split("-")[0] if "spinecharacters" in fname else fname.split(
221 | "spinelobbies-")[1].split("-")[0] if "spinelobbies" in fname else fname.split("spinebackground-")[1].split("-")[0])
222 | destExtract = f"./assets/spine/{character_name}"
223 |
224 | # skip if already exists
225 | if option["skipExistingAssets"] and os.path.isfile(destExtract):
226 | print("Already extracted. Skipping.")
227 | continue
228 |
229 | if not (os.path.isdir(destExtract)):
230 | os.makedirs(destExtract)
231 |
232 | downloadFile(model, destDownload)
233 | # extract
234 | try:
235 | extractCharacter(destDownload, destExtract)
236 | except:
237 | print("Error occured. Skipping.")
238 | import traceback
239 | traceback.print_exc()
240 |
--------------------------------------------------------------------------------
/py/getModelsJapan.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | import requests
4 | import UnityPy
5 |
6 | # conf
7 | option = {
8 | # will skip resources that already downloaded.
9 | "skipExistingDownloadedResource": True,
10 | # will skip assets that already exists.
11 | "skipExistingAssets": True
12 | }
13 |
14 | ba_api = "https://yostar-serverinfo.bluearchiveyostar.com/r60_826142735o1hiici1puy.json"
15 |
16 | ba_api2 = "https://prod-noticeindex.bluearchiveyostar.com/prod/index.json"
17 |
18 |
19 | def getVersion():
20 | '''
21 | Return latest version of Blue Archive Japan
22 | Unused for now
23 | '''
24 | data = requests.get(ba_api2).json()
25 | return data["LatestClientVersion"]
26 |
27 |
28 | def getBaseResourceURL():
29 | '''
30 | Return resource url for Blue Archive
31 | '''
32 | data = requests.get(ba_api).json()
33 | print(data)
34 | return data["ConnectionGroups"][0]['OverrideConnectionGroups'][-1]['AddressablesCatalogUrlRoot']
35 | # https://prod-clientpatch.bluearchiveyostar.com/r47_1_22_46zlzvd7mur326newgu8_2 + /Android/bundleDownloadInfo.json
36 |
37 |
38 | def getModelsList():
39 | '''
40 | Return list of Blue Archive characters url path.
41 | '''
42 | data = []
43 | base_url = getBaseResourceURL()
44 | res_url = base_url + '/Android/bundleDownloadInfo.json'
45 | res = requests.get(res_url).json()
46 | for asset in res["BundleFiles"]:
47 | if "spinecharacters-" in asset["Name"] or "spinelobbies-" in asset["Name"] or "spinebackground-" in asset["Name"]:
48 | # append url and path
49 | data.append(base_url + '/Android/' + asset["Name"])
50 | return data
51 |
52 |
53 | def downloadFile(url, fname):
54 | src = requests.get(url).content
55 | with open(fname, 'wb') as f:
56 | f.write(src)
57 |
58 |
59 | def extractTextAsset(object, dest):
60 | # parse the object data
61 | data = object.read()
62 |
63 | # create destination path
64 | dest = os.path.join(dest, data.name)
65 |
66 | # touch folder
67 | os.makedirs(os.path.dirname(dest), exist_ok=True)
68 |
69 | # just save
70 | with open(dest, "wb") as f:
71 | f.write(data.script)
72 |
73 |
74 | def extractTexture2D(object, dest):
75 | # parse the object data
76 | data = object.read()
77 |
78 | # create destination path
79 | dest = os.path.join(dest, data.name)
80 |
81 | # touch folder
82 | os.makedirs(os.path.dirname(dest), exist_ok=True)
83 |
84 | # make sure that the extension is correct
85 | # you probably only want to do so with images/textures
86 | dest, ext = os.path.splitext(dest)
87 | dest = dest + ".png"
88 |
89 | img = data.image
90 | img.save(dest)
91 |
92 |
93 | def extractCharacter(src, dest):
94 | # load the bundle
95 | bundle = UnityPy.load(src)
96 |
97 | for obj in bundle.objects:
98 | # extract skel & atlas
99 | if obj.type.name == "TextAsset":
100 | data = obj.read()
101 | if ".atlas" in data.name or ".skel" in data.name:
102 | print(data.name)
103 | extractTextAsset(obj, dest)
104 | # extract texture
105 | elif obj.type.name == "Texture2D":
106 | data = obj.read()
107 |
108 | print(data.name + ".png")
109 | extractTexture2D(obj, dest)
110 |
111 |
112 | if __name__ == "__main__":
113 | # make folder
114 | if not (os.path.isdir("./downloaded_resource")):
115 | os.makedirs("./downloaded_resource")
116 | if not (os.path.isdir("./assets")):
117 | os.makedirs("./assets")
118 | if not (os.path.isdir("./assets/spine")):
119 | os.makedirs("./assets/spine")
120 | if not (os.path.isdir("./data")):
121 | os.makedirs("./data")
122 |
123 | # There are several ResourceURL to a version
124 | ver = getBaseResourceURL() + "/Android/bundleDownloadInfo.json"
125 | print(ver)
126 | if (os.path.isfile("./data/version.txt")):
127 | with open("./data/version.txt", "r") as f:
128 | ver_temp = f.read()
129 | if str(ver) == str(ver_temp):
130 | print(f"[{ver}] No new update. Stopping.")
131 | exit(1)
132 | else:
133 | print(f"Update {ver_temp} to {ver}")
134 | with open("./data/version.txt", "w") as f:
135 | f.write(ver)
136 | else:
137 | with open("./data/version.txt", "w") as f:
138 | f.write(ver)
139 |
140 | # get model list
141 | model_list = getModelsList()
142 |
143 | # download list of model list
144 | for index, model in enumerate(model_list, start=1):
145 | print("="*30)
146 | print(f"{index}/{len(model_list)}")
147 | fname = model.split("/")[-1]
148 | destDownload = f"./downloaded_resource/{fname}"
149 |
150 | print(fname)
151 |
152 | # skip if already exists
153 | if option["skipExistingDownloadedResource"] and os.path.isfile(destDownload):
154 | print("Already downloaded. Skipping.")
155 | continue
156 |
157 | # spinebackground, spinecharacters and spinelobbies only
158 | character_name = ''.join(fname.split("spinecharacters-")[1].split("-")[0] if "spinecharacters" in fname else fname.split(
159 | "spinelobbies-")[1].split("-")[0] if "spinelobbies" in fname else fname.split("spinebackground-")[1].split("-")[0])
160 | destExtract = f"./assets/spine/{character_name}"
161 |
162 | # skip if already exists
163 | if option["skipExistingAssets"] and os.path.isfile(destExtract):
164 | print("Already extracted. Skipping.")
165 | continue
166 |
167 | if not (os.path.isdir(destExtract)):
168 | os.makedirs(destExtract)
169 |
170 | downloadFile(model, destDownload)
171 | # extract
172 | try:
173 | extractCharacter(destDownload, destExtract)
174 | except:
175 | print("Error occured. Skipping.")
176 | import traceback
177 | traceback.print_exc()
178 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | requests==2.28.1
2 | UnityPy==1.9.24
--------------------------------------------------------------------------------