├── .gitattributes ├── .github └── workflows │ ├── run.yml │ └── tick.yml ├── .gitignore ├── README.md ├── main.py └── requirements.txt /.gitattributes: -------------------------------------------------------------------------------- 1 | * text=auto eol=lf 2 | /**/*.gz linguist-generated=true 3 | /**/*.msgpack linguist-generated=true 4 | /**/*.min.json linguist-generated=true 5 | -------------------------------------------------------------------------------- /.github/workflows/run.yml: -------------------------------------------------------------------------------- 1 | name: 'Run' 2 | 3 | on: 4 | workflow_dispatch: 5 | inputs: 6 | version: 7 | description: 'Version' 8 | type: string 9 | commit: 10 | description: 'Commit and push' 11 | type: boolean 12 | required: true 13 | default: false 14 | export: 15 | description: 'Export' 16 | type: string 17 | required: true 18 | default: 'all' 19 | force: 20 | description: 'Undo and force' 21 | type: boolean 22 | required: true 23 | default: false 24 | undo: 25 | description: 'Reset to version' 26 | type: string 27 | 28 | jobs: 29 | run: 30 | runs-on: ubuntu-latest 31 | steps: 32 | - uses: actions/checkout@v2 33 | 34 | - uses: actions/setup-python@v2 35 | with: 36 | python-version: '3.x' 37 | 38 | - run: pip install -r requirements.txt 39 | 40 | - uses: actions/setup-java@v2 41 | with: 42 | distribution: 'temurin' 43 | java-version: '21' 44 | 45 | - uses: actions/cache@v3 46 | with: 47 | path: '.cache' 48 | key: mcmeta-${{ inputs.version }} 49 | restore-keys: | 50 | mcmeta- 51 | 52 | - run: python main.py${{ inputs.force && format(' --undo {0}', inputs.undo) || '' }}${{ inputs.version && format(' --version {0}', inputs.version) || '' }} --fetch --export ${{ inputs.export }}${{ inputs.commit && ' --commit --push' || '' }}${{ inputs.force && ' --force' || '' }} 53 | env: 54 | github-username: misode 55 | github-token: ${{ secrets.GITHUB_TOKEN }} 56 | github-repository: misode/mcmeta 57 | -------------------------------------------------------------------------------- /.github/workflows/tick.yml: -------------------------------------------------------------------------------- 1 | name: 'Tick' 2 | 3 | on: 4 | schedule: 5 | - cron: '*/15 * * * *' 6 | workflow_dispatch: 7 | 8 | jobs: 9 | check: 10 | runs-on: ubuntu-latest 11 | outputs: 12 | id: ${{ steps.check.outputs.id }} 13 | steps: 14 | - id: check 15 | name: Check Minecraft updates 16 | uses: ByMartrixX/minecraft-update-check-action@v0 17 | with: 18 | version-manifest-url: 'https://piston-meta.mojang.com/mc/game/version_manifest_v2.json' 19 | cache-base-key: 'mc-manifest-' 20 | 21 | update: 22 | needs: check 23 | if: ${{ needs.check.outputs.id != '' }} 24 | strategy: 25 | fail-fast: false 26 | matrix: 27 | include: 28 | - export: assets 29 | - export: assets-json 30 | - export: assets-tiny 31 | - export: data 32 | - export: data-json 33 | - export: registries 34 | - export: diff 35 | - export: atlas 36 | name: 'update-${{ matrix.export }}' 37 | runs-on: ubuntu-latest 38 | steps: 39 | - uses: actions/checkout@v2 40 | 41 | - uses: actions/setup-python@v2 42 | with: 43 | python-version: '3.x' 44 | 45 | - run: pip install -r requirements.txt 46 | 47 | - uses: actions/setup-java@v2 48 | with: 49 | distribution: 'temurin' 50 | java-version: '21' 51 | 52 | - uses: actions/cache@v3 53 | with: 54 | path: '.cache' 55 | key: mcmeta-${{ matrix.export }}-${{ needs.check.outputs.id }} 56 | restore-keys: | 57 | mcmeta-${{ matrix.export }}- 58 | 59 | - run: python main.py --version ${{ needs.check.outputs.id }} --fetch --export ${{ matrix.export }} --commit --push 60 | env: 61 | github-username: misode 62 | github-token: ${{ secrets.GITHUB_TOKEN }} 63 | github-repository: misode/mcmeta 64 | 65 | update-summary: 66 | needs: ['check', 'update'] 67 | runs-on: ubuntu-latest 68 | steps: 69 | - uses: actions/checkout@v2 70 | 71 | - uses: actions/setup-python@v2 72 | with: 73 | python-version: '3.x' 74 | 75 | - run: pip install -r requirements.txt 76 | 77 | - uses: actions/setup-java@v2 78 | with: 79 | distribution: 'temurin' 80 | java-version: '21' 81 | 82 | - uses: actions/cache@v3 83 | with: 84 | path: '.cache' 85 | key: mcmeta-summary-${{ needs.check.outputs.id }} 86 | restore-keys: | 87 | mcmeta-summary- 88 | 89 | - run: python main.py --version ${{ needs.check.outputs.id }} --fetch --export summary --commit --push 90 | env: 91 | github-username: misode 92 | github-token: ${{ secrets.GITHUB_TOKEN }} 93 | github-repository: misode/mcmeta 94 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | assets/ 2 | assets-json/ 3 | assets-tiny/ 4 | atlas/ 5 | data/ 6 | data-json/ 7 | diff/ 8 | generated/ 9 | libraries/ 10 | logs/ 11 | resources/ 12 | registries/ 13 | summary/ 14 | tmp/ 15 | versions/ 16 | *.jar 17 | *.zip 18 | eula.txt 19 | server.properties 20 | versions.json 21 | .env 22 | .cache 23 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # mcmeta 2 | > Processed, version controlled history of Minecraft's generated data and assets 3 | 4 | ## Repository structure 5 | Each of the following branches has a commit per version. Starting from 1.14, updated to the latest snapshot. Each commit is tagged `-`. 6 | 7 | * [**summary**](https://github.com/misode/mcmeta/tree/summary) - Branch with condensed reports from the data generator or assets, in a variety of formats. 8 | * [**blocks**](https://github.com/misode/mcmeta/blob/summary/blocks/data.json) - Containing block state properties and defaults for all necessary blocks. 9 | * [**commands**](https://github.com/misode/mcmeta/blob/summary/commands/data.json) - The brigadier command tree. 10 | * [**item_components**](https://github.com/misode/mcmeta/blob/summary/item_components/data.json) - The default item components added in 1.20.5. 11 | * [**registries**](https://github.com/misode/mcmeta/blob/summary/registries/data.json) - Collections of resource locations. Including the generated registries, data, and assets. 12 | * [**sounds**](https://github.com/misode/mcmeta/blob/summary/sounds/data.json) - The sounds.json from assets. 13 | * [**versions**](https://github.com/misode/mcmeta/blob/summary/versions/data.json) - A list of versions up to that point ordered with the most recent first. Each entry has the same format as the `version.json` at the root of each branch. 14 | * [**registries**](https://github.com/misode/mcmeta/tree/registries) - The same registries from **summary**, but in a separate file per registry key. 15 | * [**data**](https://github.com/misode/mcmeta/tree/data) - The vanilla data as it if would appear in a data pack. 16 | * [**data-json**](https://github.com/misode/mcmeta/tree/data-json) - The same as **data** but only containing json files, so excluding structures. 17 | * [**assets**](https://github.com/misode/mcmeta/tree/assets) - The vanilla assets is if they would appear in a resource pack. 18 | * [**assets-json**](https://github.com/misode/mcmeta/tree/assets-json) - The same as **assets** but only containing json files, so excluding textures, sounds and shaders. 19 | * [**assets-tiny**](https://github.com/misode/mcmeta/tree/assets-tiny) - The same as **assets** but only containing files from the jar, so excluding sounds, non-english languages. 20 | * [**diff**](https://github.com/misode/mcmeta/tree/diff) - A combination of **assets**, **data**, and **summary** made to be easily viewable as a diff. 21 | * [**atlas**](https://github.com/misode/mcmeta/tree/atlas) - Texture atlases of blocks, items and entities 22 | 23 | ## Sources 24 | * [Version manifest](https://piston-meta.mojang.com/mc/game/version_manifest_v2.json), a list of versions and metadata, client and server jars by following links 25 | * Sound files from Mojang's API following the version manifest 26 | * Data generator using the following commands (depending on version): 27 | ```sh 28 | java -cp server.jar net.minecraft.data.Main --reports 29 | java -DbundlerMainClass=net.minecraft.data.Main -jar server.jar --reports 30 | ``` 31 | * Slicedlime's [examples repo](https://github.com/slicedlime/examples) for worldgen changes before 1.18-pre1 32 | 33 | ## Credits 34 | This project has taken inspiration from [Arcensoth/mcdata](https://github.com/Arcensoth/mcdata) and [SPGoding/vanilla-datapack](https://github.com/SPGoding/vanilla-datapack). 35 | 36 | ## Disclaimer 37 | *mcmeta is not an official Minecraft product, and is not endorsed by or associated with Mojang Studios. All data and assets were obtained through Mojang's internal data generator and public API. If Mojang ever has something against this data existing here, the repository will be promptly removed.* 38 | -------------------------------------------------------------------------------- /main.py: -------------------------------------------------------------------------------- 1 | # MIT License 2 | # 3 | # Copyright (c) 2022 Misode 4 | # 5 | # Permission is hereby granted, free of charge, to any person obtaining a copy 6 | # of this software and associated documentation files (the "Software"), to deal 7 | # in the Software without restriction, including without limitation the rights 8 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | # copies of the Software, and to permit persons to whom the Software is 10 | # furnished to do so, subject to the following conditions: 11 | # 12 | # The above copyright notice and this permission notice shall be included in all 13 | # copies or substantial portions of the Software. 14 | # 15 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | # SOFTWARE. 22 | 23 | import click 24 | import requests 25 | import requests.auth 26 | import zipfile 27 | import subprocess 28 | import json 29 | import os 30 | import os.path 31 | import glob 32 | import msgpack 33 | import gzip 34 | import shutil 35 | import dotenv 36 | import datetime 37 | import re 38 | import time 39 | import image_packer.packer 40 | import nbtlib 41 | import multiprocessing 42 | import traceback 43 | 44 | EXPORTS = ('assets', 'assets-json', 'assets-tiny', 'data', 'data-json', 'summary', 'registries', 'atlas', 'diff') 45 | 46 | APRIL_FOOLS = ('15w14a', '3D Shareware v1.34', '20w14infinite', '22w13oneblockatatime', '23w13a_or_b', '24w14potato', '25w14craftmine') 47 | 48 | @click.command() 49 | @click.option('--version', '-v') 50 | @click.option('--file', '-f', type=click.File(), help='Custom version JSON file') 51 | @click.option('--reset', is_flag=True, help='Whether to reset the exports') 52 | @click.option('--fetch', is_flag=True, help='Whether to fetch from the remote at the start') 53 | @click.option('--undo', help='The version to reset to') 54 | @click.option('--commit', is_flag=True, help='Whether to commit the exports') 55 | @click.option('--export', '-e', multiple=True, default=tuple(), type=click.Choice([*EXPORTS, 'all'], case_sensitive=True)) 56 | @click.option('--fixtags', is_flag=True, help='Whether to fix all the tags') 57 | @click.option('--push', is_flag=True, help='Whether to push to the remote after each commit') 58 | @click.option('--force', is_flag=True, help='Whether to force push') 59 | @click.option('--branch', help='The export branch prefix to use') 60 | def main(version: str | None, file: str | None, reset: bool, fetch: bool, undo: str | None, commit: bool, export: tuple[str], fixtags: bool, push: bool, force: bool, branch: str | None): 61 | dotenv.load_dotenv() 62 | if 'all' in export: 63 | export = EXPORTS 64 | 65 | versions = retry(fetch_versions, version, file) 66 | 67 | # process and commit each version in the range 68 | process_versions = expand_version_range(version, versions) 69 | n = len(process_versions) 70 | start_date = versions[process_versions[0]]['releaseTime'] if process_versions else None 71 | init_exports(start_date, reset, fetch, undo, export, branch) 72 | 73 | try: 74 | os.remove('versions.json') 75 | except OSError: 76 | pass 77 | 78 | if process_versions: 79 | click.echo(f'📃 Processing versions: {", ".join(process_versions)}') 80 | t0 = time.time() 81 | for i, v in enumerate(process_versions): 82 | click.echo(f'🚧 Processing {v}...') 83 | t1 = time.time() 84 | try: 85 | process(v, versions, export) 86 | except ValueError as e: 87 | click.echo(f'💥 Failed to process {v}: {e}') 88 | traceback.print_exc() 89 | return 90 | 91 | if commit: 92 | create_commit(v, versions[v]['releaseTime'], push, force, export, branch) 93 | t2 = time.time() 94 | if n == 1: 95 | click.echo(f'✅ Done {v} ({format_time(t2 - t1)})') 96 | else: 97 | remaining = t2 - t0 + int(t2 - t1) * (n - i - 1) 98 | click.echo(f'✅ Done {v} ({i+1} / {n}) {format_time(t2 - t1)} ({format_time(t2 - t0)} / {format_time(remaining)})') 99 | 100 | if fixtags: 101 | fix_tags(export, branch) 102 | 103 | if (not version or fixtags) and push: 104 | create_commit(None, None, push, force, export, branch) 105 | 106 | 107 | def format_time(seconds: float | int): 108 | seconds = int(seconds) 109 | if seconds <= 60: 110 | return f'{seconds}s' 111 | minutes = int(seconds/60) 112 | if minutes <= 60: 113 | return f'{minutes}m {seconds % 60}s' 114 | return f'{int(minutes/60)}h {minutes%60}m {seconds%60}s' 115 | 116 | 117 | def fetch_versions(version: str | None, file: str | None): 118 | # === fetch manifest === 119 | manifest = requests.get('https://piston-meta.mojang.com/mc/game/version_manifest_v2.json').json() 120 | for v in manifest['versions']: 121 | v['id'] = v['id'].replace(' Pre-Release ', '-pre') 122 | version_ids = [v['id'] for v in manifest['versions']] 123 | 124 | # Fix version order anomaly around 1.16.5 125 | v1165 = version_ids.index('1.16.5') 126 | v20w51a = version_ids.index('20w51a') 127 | v1164 = version_ids.index('1.16.4') 128 | version_ids = [*version_ids[:v1165], *version_ids[v20w51a:v1164], *version_ids[v1165:v20w51a], *version_ids[v1164:]] 129 | 130 | unordered_versions = { v['id']: dict(**v, index=version_ids.index(v['id'])) for v in manifest['versions'] } 131 | versions = { v: unordered_versions[v] for v in version_ids } 132 | 133 | if file: 134 | assert version 135 | launchermeta = json.load(file) 136 | versions[version] = { 137 | 'id': version, 138 | 'type': launchermeta['type'], 139 | 'url': launchermeta, 140 | 'releaseTime': launchermeta['releaseTime'], 141 | 'sha1': 'unknown', 142 | 'index': -1, 143 | } 144 | 145 | if version: 146 | if '..' in version: 147 | start, end = version.split('..') 148 | if start not in versions: 149 | raise ValueError(f'Version {start} not in versions list') 150 | if end not in versions: 151 | raise ValueError(f'Version {end} not in versions list') 152 | elif version not in versions: 153 | raise ValueError(f'Version {version} not in versions list') 154 | 155 | return versions 156 | 157 | 158 | def expand_version_range(version: str | None, versions: dict[str]): 159 | if version is None: 160 | return [] 161 | version_ids = list(versions.keys()) 162 | if '..' in version: 163 | start, end = version.split('..') 164 | start_i = version_ids.index(start) 165 | end_i = version_ids.index(end) 166 | if end_i > start_i: 167 | click.echo('❗ No versions in range') 168 | return [] 169 | return [version_ids[i] for i in range(start_i, end_i - 1, -1) if version_ids[i] not in APRIL_FOOLS] 170 | else: 171 | return [version] 172 | 173 | 174 | def get_version_meta(version: str, versions: dict[str], jar: str = None): 175 | def create_version_meta(): 176 | os.makedirs('tmp', exist_ok=True) 177 | 178 | def get_version_json(): 179 | if not jar: 180 | launchermeta = json.loads(fetch_meta('versionmeta', versions[version]).decode('utf-8')) 181 | client = fetch_meta('jar', launchermeta['downloads']['client'], cache=False) 182 | jar_path = 'tmp/client.jar' 183 | with open(jar_path, 'wb') as f: 184 | f.write(client) 185 | else: 186 | jar_path = jar 187 | 188 | with zipfile.ZipFile(jar_path, 'r') as f: 189 | f.extract('version.json', 'tmp') 190 | 191 | with open('tmp/version.json', 'r') as f: 192 | return json.load(f) 193 | 194 | data = retry(get_version_json) 195 | 196 | pack = data['pack_version'] 197 | 198 | meta = { 199 | 'id': version, 200 | 'name': data['name'], 201 | 'release_target': data.get('release_target', None), 202 | 'type': versions[version]['type'], 203 | 'stable': data['stable'], 204 | 'data_version': data['world_version'], 205 | 'protocol_version': data['protocol_version'], 206 | 'data_pack_version': pack if type(pack) == int else pack['data'], 207 | 'resource_pack_version': pack if type(pack) == int else pack['resource'], 208 | 'build_time': data['build_time'], 209 | 'release_time': versions[version]['releaseTime'], 210 | 'sha1': versions[version]['sha1'] 211 | } 212 | 213 | return json.dumps(meta, indent=None).encode('utf-8') 214 | 215 | return json.loads(cache(f'version-{versions[version]["sha1"]}', create_version_meta).decode('utf-8')) 216 | 217 | 218 | def process(version: str, versions: dict[str], exports: tuple[str]): 219 | version_ids = list(versions.keys()) 220 | 221 | # === fetch version jars === 222 | click.echo(' ⬇️ Downloading version') 223 | launchermeta_bytes = retry(fetch_meta, 'versionmeta', versions[version]) 224 | launchermeta = json.loads(launchermeta_bytes.decode('utf-8')) 225 | 226 | for side in ['server', 'client']: 227 | side_content = retry(fetch_meta, 'jar', launchermeta['downloads'][side], cache=False) 228 | with open(f'{side}.jar', 'wb') as f: 229 | f.write(side_content) 230 | 231 | # === extract client jar === 232 | shutil.rmtree('assets/assets', ignore_errors=True) 233 | shutil.rmtree('assets-json/assets', ignore_errors=True) 234 | shutil.rmtree('assets-tiny/assets', ignore_errors=True) 235 | shutil.rmtree('data/data', ignore_errors=True) 236 | shutil.rmtree('data-json/data', ignore_errors=True) 237 | with zipfile.ZipFile('client.jar', 'r') as jar: 238 | for file in jar.namelist(): 239 | if file.endswith('.mcassetsroot'): 240 | continue 241 | if file.endswith('pack.mcmeta'): 242 | jar.extract(file, 'data') 243 | for part in ['assets', 'data']: 244 | if file.startswith(f'{part}/'): 245 | jar.extract(file, part) 246 | if f'{part}-json' in exports and file.endswith('.json'): 247 | jar.extract(file, f'{part}-json') 248 | if part == 'assets' and 'assets-tiny' in exports: 249 | jar.extract(file, f'{part}-tiny') 250 | 251 | # === update version metas === 252 | click.echo(' 🏷️ Updating versions') 253 | try: 254 | with open('versions.json', 'r') as f: 255 | version_metas = json.load(f) 256 | except: 257 | version_metas = [] 258 | 259 | if version not in [v['id'] for v in version_metas]: 260 | version_metas.append(get_version_meta(version, versions, 'client.jar')) 261 | has_version_ids = [v['id'] for v in version_metas] 262 | if 'summary' in exports: 263 | for v in expand_version_range(f'1.14..{version}', versions): 264 | if v not in has_version_ids: 265 | version_metas.append(get_version_meta(v, versions)) 266 | version_metas.sort(key=lambda v: versions[v['id']]['index']) 267 | version_meta = next(v for v in version_metas if v['id'] == version) 268 | 269 | with open('versions.json', 'w') as f: 270 | json.dump(version_metas, f) 271 | 272 | # === reconstruct data pack.mcmeta === 273 | if versions[version]['index'] <= versions['20w45a']['index']: 274 | pack = { 275 | 'pack': { 276 | 'description': 'The default data for Minecraft', 277 | 'pack_format': version_meta['data_pack_version'] 278 | } 279 | } 280 | for e in ['data', 'data-json']: 281 | os.makedirs(e, exist_ok=True) 282 | with open(f'{e}/pack.mcmeta', 'w') as f: 283 | json.dump(pack, f, indent=4) 284 | 285 | # === run data generators === 286 | if (versions[version]['index'] > versions['22w42a']['index'] and ('data' in exports or 'data-json' in exports)) or 'summary' in exports or 'registries' in exports or 'diff' in exports: 287 | click.echo(' ⚙️ Running data generator') 288 | shutil.rmtree('generated', ignore_errors=True) 289 | if versions[version]['index'] <= versions['21w39a']['index']: 290 | subprocess.run(['java', '-DbundlerMainClass=net.minecraft.data.Main', '-jar', 'server.jar', '--reports'], capture_output=True) 291 | else: 292 | subprocess.run(['java', '-cp', 'server.jar', 'net.minecraft.data.Main', '--reports'], capture_output=True) 293 | 294 | # === get vanilla worldgen === 295 | if 'data' in exports or 'data-json' in exports or 'summary' in exports or 'registries' in exports or 'diff' in exports: 296 | if versions[version]['index'] <= versions['22w42a']['index']: 297 | pass 298 | elif versions[version]['index'] <= versions['22w19a']['index']: 299 | shutil.copytree('generated/reports/minecraft', 'data/data/minecraft', dirs_exist_ok=True) 300 | shutil.copytree('generated/reports/minecraft', 'data-json/data/minecraft', dirs_exist_ok=True) 301 | elif versions[version]['index'] <= versions['1.18-pre1']['index']: 302 | shutil.copytree('generated/reports/worldgen', 'data/data', dirs_exist_ok=True) 303 | shutil.copytree('generated/reports/worldgen', 'data-json/data', dirs_exist_ok=True) 304 | elif versions[version]['index'] <= versions['20w28a']['index']: 305 | click.echo(' ⬇️ Downloading vanilla worldgen') 306 | username = os.getenv('github-username') 307 | token = os.getenv('github-token') 308 | auth = requests.auth.HTTPBasicAuth(username, token) if username and token else None 309 | headers = { 'Accept': 'application/vnd.github.v3+json' } 310 | released = datetime.datetime.fromisoformat(versions[version]['releaseTime']) 311 | released += datetime.timedelta(days=1) 312 | res = requests.get(f'https://api.github.com/repos/slicedlime/examples/commits?until={released.isoformat()}', headers=headers, auth=auth) 313 | click.echo(f' Remaining GitHub requests: {res.headers["X-RateLimit-Remaining"]}/{res.headers["X-RateLimit-Limit"]}') 314 | commits = res.json() 315 | if 'message' in commits: 316 | raise ValueError(f'Cannot get vanilla worldgen: {commits["message"]}') 317 | for id in version_ids[versions[version]['index']:]: 318 | sha = next((c['sha'] for c in commits if re.match(f'Update to {id}\\.?$', c['commit']['message'])), None) 319 | if sha is None and id == '20w28a': 320 | sha = 'd304a1dcf330005e617a78cef4e492ab3e2c09b0' 321 | if sha: 322 | content = retry(fetch, f'slicedlime-{sha}', f'https://raw.githubusercontent.com/slicedlime/examples/{sha}/vanilla_worldgen.zip') 323 | with open('vanilla_worldgen.zip', 'wb') as f: 324 | f.write(content) 325 | zip = zipfile.ZipFile('vanilla_worldgen.zip', 'r') 326 | zip.extractall('data/data/minecraft') 327 | zip.extractall('data-json/data/minecraft') 328 | break 329 | 330 | # === reconstruct dimensions === 331 | if 'data' in exports or 'data-json' in exports or 'summary' in exports or 'diff' in exports: 332 | if versions[version]['index'] <= versions['22w11a']['index'] and not os.path.isdir('data/data/minecraft/dimension'): 333 | with open('data/data/minecraft/worldgen/world_preset/normal.json', 'r') as f: 334 | world_preset = json.load(f) 335 | for key, dimension in world_preset['dimensions'].items(): 336 | preset = dimension['generator'].get('biome_source', dict()).get('preset', '') 337 | try: 338 | with open(f'generated/reports/biome_parameters/{preset.replace(":", "/")}.json', 'r') as f: 339 | parameters = json.load(f) 340 | if parameters: 341 | parameters['type'] = 'minecraft:multi_noise' 342 | dimension['generator']['biome_source'] = parameters 343 | except: 344 | pass 345 | for e in ['data', 'data-json']: 346 | os.makedirs(f'{e}/data/minecraft/dimension/', exist_ok=True) 347 | with open(f'{e}/data/minecraft/dimension/{key.removeprefix("minecraft:")}.json', 'w') as f: 348 | json.dump(dimension, f, indent=2) 349 | 350 | # === stabilize ordering in some data files === 351 | if 'data' in exports or 'data-json' in exports or 'summary' in exports or 'diff' in exports: 352 | reorders = [ 353 | ('advancements/adventure/adventuring_time', 354 | [('criteria', None), ('requirements', lambda e: e[0])]), 355 | ('advancements/husbandry/complete_catalogue', 356 | [('criteria', None), ('requirements', lambda e: e[0])]), 357 | ('advancements/nether/all_effects', 358 | [('criteria.all_effects.conditions.effects', None)]), 359 | ('advancements/nether/all_potions', 360 | [('criteria.all_effects.conditions.effects', None)]), 361 | ('loot_tables/chests/shipwreck_supply', 362 | [('pools.0.entries.[name=minecraft:suspicious_stew].functions.0.effects', lambda e: e['type'])]), 363 | ('loot_tables/chests/ancient_city_ice_box', 364 | [('pools.0.entries.[name=minecraft:suspicious_stew].functions.0.effects', lambda e: e['type'])]), 365 | ('loot_tables/gameplay/hero_of_the_village/fletcher_gift', 366 | [('pools.0.entries', lambda e: (e.get('functions')[-1].get('tag') or e.get('functions')[-1].get('id')) if e.get('functions') else e.get('name'))]), 367 | ('worldgen/noise_settings/*', [('structures.structures', None)]), 368 | ('worldgen/noise_settings/*', [('structures', None)]), 369 | ('worldgen/configured_structure_feature/*', [('spawn_overrides', None)]), 370 | ('worldgen/structure/*', [('spawn_overrides', None)]), 371 | ('worldgen/flat_level_generator_preset/*', [('settings.structure_overrides', None)]), 372 | ('worldgen/world_preset/*', [('dimensions', None)]), 373 | ] 374 | 375 | for filepath, sorts in reorders: 376 | for file in glob.glob(f'data/data/minecraft/{filepath}.json'): 377 | with open(file, 'r') as f: 378 | root = json.load(f) 379 | 380 | for path, order in sorts: 381 | *parts, last = [int(p) if re.match('\\d+', p) else p for p in path.split('.')] 382 | node = root 383 | for p in parts: 384 | if node is None: 385 | break 386 | if type(p) == str and p.startswith('['): 387 | key, value = p[1:-1].split('=') 388 | node = next((e for e in node if key in e and e[key] == value), None) 389 | elif type(node) == list: 390 | node = node[p] 391 | elif hasattr(node, 'get'): 392 | node = node.get(p, None) 393 | else: 394 | node = None 395 | if node is None or last not in node: 396 | break 397 | if type(node[last]) == dict: 398 | node[last] = dict(sorted(node[last].items(), key=order)) 399 | elif type(node[last]) == list: 400 | node[last] = sorted(node[last], key=order) 401 | 402 | needs_export = set(['data', 'data-json']).intersection(exports) 403 | if 'diff' in exports: 404 | needs_export.add('data') 405 | for export in needs_export: 406 | with open(f'{export}{file.removeprefix("data")}', 'w') as f: 407 | json.dump(root, f, indent=2) 408 | 409 | # === download resources === 410 | if 'assets' in exports or 'assets-json' in exports or 'summary' in exports or 'diff' in exports: 411 | click.echo(' 🔊 Downloading assets') 412 | assets_hash = launchermeta['assetIndex']['sha1'] 413 | assets_url = launchermeta['assetIndex']['url'] 414 | assets_bytes = retry(fetch, f'assets-{assets_hash}', assets_url) 415 | assets = json.loads(assets_bytes.decode('utf-8')) 416 | 417 | click.echo(f' Downloading {len(assets["objects"])} resources') 418 | shutil.rmtree('resources', ignore_errors=True) 419 | os.makedirs('resources', exist_ok=True) 420 | with multiprocessing.Pool(20) as pool: 421 | pool.map(download_resource, assets['objects'].items()) 422 | 423 | for export, pattern in [('assets', '*.*'), ('assets-json', '*.json')]: 424 | if export in exports or (export == 'assets' and ('diff' in exports or 'summary' in exports)): 425 | for path in glob.glob(f'resources/**/{pattern}', recursive=True): 426 | if path.endswith('hash.txt') or path.endswith('pack.mcmeta'): 427 | continue 428 | target = f'{export}/assets{path.removeprefix("resources")}' 429 | os.makedirs(os.path.normpath(os.path.join(target, '..')), exist_ok=True) 430 | shutil.copyfile(path, target) 431 | shutil.copyfile('resources/pack.mcmeta', f'{export}/pack.mcmeta') 432 | 433 | if 'summary' in exports or 'diff' in exports: 434 | with open(f'resources/minecraft/sounds.json', 'r') as f: 435 | sounds: dict = json.load(f) 436 | 437 | # === collect summary of registries === 438 | if 'summary' in exports or 'registries' in exports or 'diff' in exports: 439 | click.echo(' 🔎 Collect registries') 440 | registries = dict() 441 | contents = dict() 442 | if os.path.isfile('generated/reports/registries.json'): 443 | with open('generated/reports/registries.json', 'r') as f: 444 | for key, data in json.load(f).items(): 445 | entries = [e.removeprefix('minecraft:') for e in data['entries'].keys()] 446 | registries[key.removeprefix('minecraft:')] = sorted(entries) 447 | 448 | def add_file_registry(id: str, path: str, ext: str = 'json'): 449 | files = glob.glob(f'{path}/**/*.{ext}', recursive=True) 450 | entries = [e.replace('\\', '/', -1).removeprefix(f'{path}/').removesuffix(f'.{ext}') for e in files] 451 | registries[id] = sorted(entries) 452 | if ext == 'json': 453 | content = dict() 454 | for i, file in enumerate(files): 455 | try: 456 | with open(file, 'r', encoding='utf-8') as f: 457 | content[entries[i]] = json.load(f) 458 | except BaseException as e: 459 | click.echo(f' ⚠️ Failed to read file {file}: {e}') 460 | contents[id] = content 461 | 462 | def add_folder_registry(id: str, path: str): 463 | files = glob.glob(f'{path}/*/') 464 | entries = [e.replace('\\', '/', -1).removeprefix(f'{path}/').removesuffix('/') for e in files] 465 | registries[id] = sorted(entries) 466 | 467 | registry_overrides = { 468 | 'advancements': 'advancement', 469 | 'loot_tables': 'loot_table', 470 | 'recipes': 'recipe', 471 | 'structures': 'structure', 472 | 'tag/blocks': 'tag/block', 473 | 'tag/entity_types': 'tag/entity_type', 474 | 'tag/fluids': 'tag/fluid', 475 | 'tag/game_events': 'tag/game_event', 476 | 'tag/items': 'tag/item', 477 | } 478 | 479 | experiments = [ 480 | e.replace('\\', '/', -1).removeprefix('data/data/minecraft/datapacks/').removesuffix('/') 481 | for e in glob.glob(f'data/data/minecraft/datapacks/*/') 482 | ] 483 | 484 | for experiment in [None, *experiments]: 485 | experiment_pattern = f'datapacks/{experiment}/data/minecraft/' if experiment else '' 486 | for pattern in ['', 'worldgen/', 'tags/', 'tags/worldgen/']: 487 | full_pattern = f'data/data/minecraft/{experiment_pattern}{pattern}' 488 | types = [ 489 | e.replace('\\', '/', -1).removeprefix(full_pattern).removesuffix('/') 490 | for e in glob.glob(f'{full_pattern}*/') 491 | ] 492 | for typ in [t for t in types if t not in ['tags', 'worldgen', 'datapacks']]: 493 | registry_key = (pattern + typ).replace('tags/', 'tag/') 494 | registry_key = registry_overrides.get(registry_key, registry_key) 495 | output_key = registry_key if experiment is None else f'experiment/{experiment}/{registry_key}' 496 | extension = 'nbt' if (pattern == '' and typ in ('structures','structure')) else 'json' 497 | add_file_registry(output_key, full_pattern + typ, extension) 498 | 499 | add_folder_registry('datapack', 'data/data/minecraft/datapacks') 500 | 501 | asset_registries = { 502 | 'atlases': 'atlas', 503 | 'blockstates': 'block_definition', 504 | 'equipment': 'equipment', 505 | 'font': 'font', 506 | 'items': 'item_definition', 507 | 'lang': 'lang', 508 | 'models': 'model', 509 | 'post_effect': 'post_effect', 510 | } 511 | 512 | for path, key in asset_registries.items(): 513 | add_file_registry(key, f'assets/assets/minecraft/{path}') 514 | 515 | add_file_registry('resourcepack', 'assets/assets/minecraft/resourcepacks', 'zip') 516 | add_file_registry('sound', 'assets/assets/minecraft/sounds', 'ogg') 517 | add_file_registry('texture', 'assets/assets/minecraft/textures', 'png') 518 | 519 | registries['lang'] = [e for e in registries['lang'] if e != "deprecated"] 520 | 521 | # === simplify blocks report === 522 | if 'summary' in exports or 'diff' in exports: 523 | blocks = dict() 524 | block_definitions = dict() 525 | item_components = dict() 526 | if os.path.isfile('generated/reports/blocks.json'): 527 | with open('generated/reports/blocks.json', 'r') as f: 528 | for key, data in json.load(f).items(): 529 | properties = data.get('properties') 530 | if properties: 531 | default = next(s.get('properties') for s in data['states'] if s.get('default')) 532 | blocks[key.removeprefix('minecraft:')] = (properties, default) 533 | else: 534 | blocks[key.removeprefix('minecraft:')] = ({}, {}) 535 | definition = data.get('definition') 536 | if definition: 537 | block_definitions[key.removeprefix('minecraft:')] = definition 538 | if os.path.isfile('generated/reports/items.json'): 539 | with open('generated/reports/items.json', 'r') as f: 540 | for key, data in json.load(f).items(): 541 | components = data.get('components') 542 | if components: 543 | item_components[key.removeprefix('minecraft:')] = components 544 | 545 | # === read commands report === 546 | if 'summary' in exports or 'diff' in exports: 547 | commands = dict() 548 | if os.path.isfile('generated/reports/commands.json'): 549 | with open('generated/reports/commands.json', 'r') as f: 550 | commands = json.load(f) 551 | 552 | click.echo(' 🚚 Exporting') 553 | 554 | # === export summary === 555 | def create_summary(data, path, clear=True, bin=True): 556 | if clear: 557 | shutil.rmtree(path, ignore_errors=True) 558 | os.makedirs(path, exist_ok=True) 559 | with open(f'{path}/data.json', 'w') as f: 560 | json.dump(data, f, indent=2) 561 | f.write('\n') 562 | with open(f'{path}/data.min.json', 'w') as f: 563 | json.dump(data, f, separators=(',', ':')) 564 | f.write('\n') 565 | if bin: 566 | with open(f'{path}/data.msgpack', 'wb') as f: 567 | f.write(msgpack.packb(data)) 568 | with open(f'{path}/data.json.gz', 'wb') as f: 569 | f.write(gzip.compress(json.dumps(data).encode('utf-8'), mtime=0)) 570 | with open(f'{path}/data.msgpack.gz', 'wb') as f: 571 | f.write(gzip.compress(msgpack.packb(data), mtime=0)) 572 | 573 | if 'summary' in exports: 574 | create_summary(dict(sorted(registries.items())), 'summary/registries') 575 | create_summary(dict(sorted(blocks.items())), 'summary/blocks') 576 | create_summary(dict(sorted(block_definitions.items())), 'summary/block_definitions') 577 | create_summary(dict(sorted(item_components.items())), 'summary/item_components') 578 | create_summary(dict(sorted(sounds.items())), 'summary/sounds') 579 | create_summary(commands, 'summary/commands') 580 | create_summary(version_metas, 'summary/versions') 581 | 582 | for key in contents: 583 | part = 'assets' if key in asset_registries.values() else 'data' 584 | create_summary(dict(sorted(contents[key].items())), f'summary/{part}/{key}', bin=True) 585 | 586 | with open(f'summary/version.txt', 'w') as f: 587 | f.write(version + '\n') 588 | 589 | # === create texture atlas === 590 | if 'atlas' in exports: 591 | click.echo(' 🗺️ Packing textures into atlas') 592 | atlases = [ 593 | ('blocks', ['block'], 1024), 594 | ('items', ['item'], 512), 595 | ('entities', ['entity', 'entity/*', 'entity/*/*'], 2048), 596 | ('all', ['block', 'item', 'entity', 'entity/*', 'entity/*/*'], 2048) 597 | ] 598 | for name, folders, width in atlases: 599 | os.makedirs(f'atlas/{name}', exist_ok=True) 600 | prefix = 'assets/assets/minecraft/textures/' 601 | inputs = [f'{prefix}{f}/*.png' for f in folders] 602 | options = { 603 | 'bg_color': (0, 0, 0, 0), 604 | 'enable_auto_size': False, 605 | } 606 | image_packer.packer.pack(inputs, f'atlas/{name}/atlas.png', width, options) 607 | with open(f'atlas/{name}/atlas.json', 'r') as f: 608 | mapping = json.load(f) 609 | def key(filepath: str): 610 | return filepath.replace('\\', '/', -1).removeprefix(prefix).removesuffix('.png') 611 | mapping = { 612 | key(r['filepath']): [r['x'], r['y'], r['width'], r['height']] 613 | for r in mapping['regions'].values() 614 | } 615 | os.remove(f'atlas/{name}/atlas.json') 616 | create_summary(mapping, f'atlas/{name}', clear=False) 617 | 618 | # === create registries === 619 | if 'registries' in exports: 620 | os.makedirs('registries', exist_ok=True) 621 | for path in glob.glob(f'registries/*'): 622 | shutil.rmtree(path, ignore_errors=True) 623 | for key, entries in sorted(registries.items()): 624 | create_summary(entries, f'registries/{key}', bin=False) 625 | create_summary(sorted(registries.keys()), 'registries', clear=False, bin=False) 626 | 627 | # === create diff === 628 | if 'diff' in exports: 629 | os.makedirs('diff', exist_ok=True) 630 | 631 | shutil.rmtree('diff/data', ignore_errors=True) 632 | shutil.copytree('data/data', 'diff/data', dirs_exist_ok=True) 633 | for path in glob.glob(f'diff/data/**/*.nbt', recursive=True): 634 | nbt: nbtlib.Compound = nbtlib.load(path).root 635 | del nbt['DataVersion'] 636 | snbt = nbt.snbt(indent=2) 637 | with open(path.removesuffix('.nbt') + '.snbt', 'w') as f: 638 | f.write(snbt) 639 | f.write('\n') 640 | os.remove(path) 641 | 642 | shutil.rmtree('diff/assets', ignore_errors=True) 643 | shutil.copytree('assets/assets', 'diff/assets', dirs_exist_ok=True) 644 | shutil.rmtree('diff/assets/minecraft/lang', ignore_errors=True) 645 | os.makedirs('diff/assets/minecraft/lang', exist_ok=True) 646 | shutil.copyfile('assets/assets/minecraft/lang/en_us.json', 'diff/assets/minecraft/lang/en_us.json') 647 | shutil.copyfile('assets/assets/minecraft/lang/deprecated.json', 'diff/assets/minecraft/lang/deprecated.json') 648 | 649 | shutil.rmtree('diff/registries', ignore_errors=True) 650 | os.makedirs('diff/registries', exist_ok=True) 651 | for key, entries in sorted(registries.items()): 652 | os.makedirs(f'diff/registries/{os.path.dirname(key)}', exist_ok=True) 653 | with open(f'diff/registries/{key}.txt', 'w') as f: 654 | f.write('\n'.join(entries) + '\n') 655 | with open(f'diff/registries.txt', 'w') as f: 656 | f.write('\n'.join(sorted(registries.keys())) + '\n') 657 | 658 | shutil.rmtree('diff/commands', ignore_errors=True) 659 | os.makedirs('diff/commands', exist_ok=True) 660 | if 'children' not in commands: 661 | commands['children'] = {} 662 | for key, command in sorted(commands['children'].items()): 663 | with open(f'diff/commands/{key}.json', 'w') as f: 664 | json.dump(command, f, indent=2) 665 | f.write('\n') 666 | with open(f'diff/commands.txt', 'w') as f: 667 | f.write('\n'.join(sorted(commands['children'].keys())) + '\n') 668 | 669 | shutil.rmtree('diff/blocks', ignore_errors=True) 670 | os.makedirs('diff/blocks', exist_ok=True) 671 | for key, block in sorted(blocks.items()): 672 | with open(f'diff/blocks/{key}.json', 'w') as f: 673 | data = { 674 | 'definition': block_definitions.get(key, {}), 675 | 'properties': block[0], 676 | 'default': block[1], 677 | } 678 | json.dump(data, f, indent=2) 679 | f.write('\n') 680 | 681 | shutil.rmtree('diff/items', ignore_errors=True) 682 | os.makedirs('diff/items', exist_ok=True) 683 | for key, components in sorted(item_components.items()): 684 | with open(f'diff/items/{key}.json', 'w') as f: 685 | data = { 686 | 'components': item_components.get(key, []), 687 | } 688 | json.dump(data, f, indent=2) 689 | f.write('\n') 690 | 691 | # === export version.json to all === 692 | for export in exports: 693 | with open(f'{export}/version.json', 'w') as f: 694 | json.dump(version_meta, f, indent=2) 695 | f.write('\n') 696 | 697 | # === copy pack.mcmeta to json exports === 698 | for export in ['data']: 699 | if f'{export}-json' in exports: 700 | shutil.copyfile(f'{export}/pack.mcmeta', f'{export}-json/pack.mcmeta') 701 | 702 | 703 | def init_exports(start_date: str | None, reset: bool, fetch: bool, undo: str | None, exports: tuple[str], branch: str | None): 704 | for export in exports: 705 | export_branch = f'{branch}-{export}' if branch else export 706 | if reset: 707 | shutil.rmtree(export, ignore_errors=True) 708 | os.makedirs(export, exist_ok=True) 709 | os.chdir(export) 710 | subprocess.run(['git', 'init', '-q']) 711 | subprocess.run(['git', 'checkout', '-q', '-b', export_branch], capture_output=True) 712 | subprocess.run(['git', 'config', 'user.name', 'actions-user']) 713 | subprocess.run(['git', 'config', 'user.email', 'actions@github.com']) 714 | if os.getenv('github-repository'): 715 | remotes = subprocess.run(['git', 'remote'], capture_output=True).stdout.decode('utf-8').split('\n') 716 | remote = f'https://x-access-token:{os.getenv("github-token")}@github.com/{os.getenv("github-repository")}' 717 | subprocess.run(['git', 'remote', 'set-url' if 'origin' in remotes else 'add', 'origin', remote]) 718 | if fetch: 719 | subprocess.run(['git', 'fetch', '-q', '--tags', 'origin', export_branch]) 720 | subprocess.run(['git', 'reset', '-q', '--hard', f'origin/{export_branch}']) 721 | elif reset: 722 | assert start_date, 'Cannot reset without a version' 723 | shutil.copyfile('../.gitattributes', f'.gitattributes') 724 | subprocess.run(['git', 'add', '.'], capture_output=True) 725 | os.environ['GIT_AUTHOR_DATE'] = start_date 726 | os.environ['GIT_COMMITTER_DATE'] = start_date 727 | subprocess.run(['git', 'commit', '-q', '-m', f'🎉 Initial commit']) 728 | if undo: 729 | subprocess.run(['git', 'reset', '--hard', f'{undo}-{export}']) 730 | os.chdir('..') 731 | click.echo(f'🎉 Initialized {export} branch') 732 | 733 | 734 | def create_commit(version: str | None, date: str | None, push: bool, force: bool, exports: tuple[str], branch: str | None): 735 | for export in exports: 736 | export_branch = f'{branch}-{export}' if branch else export 737 | os.chdir(export) 738 | if version: 739 | assert date 740 | subprocess.run(['git', 'add', '.'], capture_output=True) 741 | os.environ['GIT_AUTHOR_DATE'] = date 742 | os.environ['GIT_COMMITTER_DATE'] = date 743 | subprocess.run(['git', 'commit', '-q', '-m', f'🚀 Update {export} for {version}']) 744 | subprocess.run(['git', 'tag', '-f', f'{version}-{export}']) 745 | if push: 746 | if force: 747 | subprocess.run(['git', 'push', '-f', '-q', '--tags', 'origin', export_branch]) 748 | else: 749 | subprocess.run(['git', 'push', '-q', '--tags', 'origin', export_branch]) 750 | os.chdir('..') 751 | if version: 752 | click.echo(f'🚀 Created commit on {export_branch} branch') 753 | elif push: 754 | click.echo(f'🚀 Pushed to {export_branch} branch') 755 | 756 | 757 | def fix_tags(exports: tuple[str], branch: str | None): 758 | for export in exports: 759 | export_branch = f'{branch}-{export}' if branch else export 760 | os.chdir(export) 761 | taglist = subprocess.run(['git', 'tag', '-l'], capture_output=True).stdout.decode('utf-8').split('\n') 762 | batch_size = 100 763 | for i in range(0, len(taglist), batch_size): 764 | batch = taglist[i:i + batch_size] 765 | subprocess.run(['git', 'tag', '-d', *batch], capture_output=True) 766 | click.echo(f'🔥 Deleted {len(taglist) - 1} tags in {export_branch} branch') 767 | commits = [c 768 | for c in subprocess.run(['git', 'log', '--format=%h %f'], capture_output=True).stdout.decode('utf-8').split('\n') 769 | if re.match('^.* .*$', c) and not c.endswith('Initial-commit') 770 | ] 771 | for c in commits: 772 | ref, message = c.split(' ') 773 | version = re.match(f'^Update-{export}-for-(.*)$', message.strip())[1] 774 | subprocess.run(['git', 'tag', f'{version}-{export}', ref.strip()], capture_output=True) 775 | os.chdir('..') 776 | click.echo(f'✨ Created {len(commits)} tags in {export_branch} branch') 777 | 778 | 779 | def fetch_meta(prefix: str, obj, cache=True): 780 | assert 'sha1' in obj 781 | assert 'url' in obj 782 | if cache: 783 | return fetch(f'{prefix}-{obj["sha1"]}', obj['url']) 784 | else: 785 | return requests.get(obj['url']).content 786 | 787 | 788 | def download_resource(resource: tuple): 789 | key, object = resource 790 | sound = get_resource(object['hash']) 791 | os.makedirs(os.path.normpath(os.path.join(f'resources/{key}', '..')), exist_ok=True) 792 | with open(f'resources/{key}', 'wb') as f: 793 | f.write(sound) 794 | 795 | 796 | def get_resource(hash: str): 797 | url = f'https://resources.download.minecraft.net/{hash[0:2]}/{hash}' 798 | return retry(fetch, f'resource-{hash}', url) 799 | 800 | 801 | def fetch(key: str, url: str): 802 | return cache(key, lambda: requests.get(url).content) 803 | 804 | 805 | def cache(key: str, factory): 806 | os.makedirs('.cache', exist_ok=True) 807 | cache_path = f'.cache/{key}' 808 | if os.path.exists(cache_path): 809 | with open(cache_path, 'rb') as f: 810 | return f.read() 811 | else: 812 | content = factory() 813 | with open(cache_path, 'wb') as f: 814 | f.write(content) 815 | return content 816 | 817 | 818 | def retry(fn, *args, **kwargs): 819 | retry_count = 0 820 | while True: 821 | try: 822 | return fn(*args, **kwargs) 823 | except Exception as e: 824 | if retry_count < 3: 825 | print(f"Retrying {fn.__name__} after error: {e}") 826 | time.sleep(4**retry_count) 827 | retry_count += 1 828 | else: 829 | e.add_note('Max retry attempts reached') 830 | raise e 831 | 832 | 833 | if __name__ == '__main__': 834 | main() 835 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | click==8.0.3 2 | requests==2.27.1 3 | msgpack==1.0.3 4 | python-dotenv==0.19.2 5 | image-packer==0.10.0 6 | nbtlib==1.12.1 7 | --------------------------------------------------------------------------------