├── .github └── workflows │ ├── PackResData.yml │ └── Release.yml ├── .gitignore ├── README.md ├── config.json5 ├── downloader.py ├── downloader.spec ├── downloader_old.py ├── logger_tt └── log_config.json ├── pack_resdata.py └── requirements.txt /.github/workflows/PackResData.yml: -------------------------------------------------------------------------------- 1 | name: Pack Artifact 2 | 3 | on: 4 | schedule: 5 | - cron: '0 0 * * *' 6 | workflow_dispatch: 7 | 8 | jobs: 9 | PackArtifact: 10 | runs-on: windows-latest 11 | steps: 12 | - uses: actions/checkout@v3 13 | - name: Setup Python 14 | uses: actions/setup-python@v4 15 | with: 16 | python-version: '3.10' 17 | - name: Install Python dependencies 18 | run: | 19 | pip install -r requirements.txt 20 | - name: Pack Resdata 21 | run: | 22 | python pack_resdata.py 23 | - name: Upload 24 | uses: "marvinpinto/action-automatic-releases@latest" 25 | with: 26 | repo_token: "${{ secrets.GITHUB_TOKEN }}" 27 | automatic_release_tag: "ResData" 28 | prerelease: true 29 | files: resdata.zip 30 | -------------------------------------------------------------------------------- /.github/workflows/Release.yml: -------------------------------------------------------------------------------- 1 | name: Releases 2 | 3 | on: 4 | push: 5 | paths-ignore: 6 | - ".github/**" 7 | workflow_dispatch: 8 | 9 | jobs: 10 | BuildExecutable: 11 | runs-on: windows-latest 12 | steps: 13 | - uses: actions/checkout@v2 14 | - name: Setup Python 15 | uses: actions/setup-python@v4 16 | with: 17 | python-version: '3.10' 18 | - name: Install Python dependencies 19 | run: | 20 | pip install -r requirements.txt 21 | - name: Build Executable 22 | run: | 23 | pyinstaller downloader.spec 24 | dir dist 25 | - name: Create zip file 26 | run: Compress-Archive -Path dist/downloader/* -DestinationPath resource_downloader.zip -Force 27 | - name: Pre-Release 28 | uses: "marvinpinto/action-automatic-releases@latest" 29 | if: "!contains(github.ref, 'refs/tags/v')" 30 | with: 31 | repo_token: "${{ secrets.GITHUB_TOKEN }}" 32 | automatic_release_tag: "latest" 33 | prerelease: true 34 | files: | 35 | ./resource_downloader.zip 36 | - name: Tagged Release 37 | uses: "marvinpinto/action-automatic-releases@latest" 38 | if: "contains(github.ref, 'refs/tags/v')" 39 | with: 40 | repo_token: "${{ secrets.GITHUB_TOKEN }}" 41 | prerelease: false 42 | files: | 43 | ./resource_downloader.zip 44 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | build 2 | dist 3 | output 4 | downloader.log* 5 | resdata/ 6 | resdata.zip -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Resource Downloader 2 | A script that can be used to manually download all resource data for GFL. 3 | It utilized multiprocessing to increase speed, and will auto retry for failed tasks. 4 | 5 | ## Performance 6 | On my PC the script spent around 5 minutes to download base assets which are required to open the game, 7 | and about 20 minutes in total to download all assets (including hd resources). 8 | In comparison, I spent 20 minutes downloading the base assets using the game client and encountered 5 crashes in total. 9 | 10 | ## Usage 11 | Probably due to the use of multiprocessing, I failed to pack this using pyinstaller. So a python environment is required. 12 | Run `pip install -r requirements.txt` to install all requirements, and then simply run `python downloader.py`. 13 | All settings are stored in `config.json5`. -------------------------------------------------------------------------------- /config.json5: -------------------------------------------------------------------------------- 1 | { 2 | "destination":"./output", // The directory used to save all data 3 | "region":"ch", // Server region, in {ch,tw,kr,us,jp}, the program will read resdata/{$REGION}_resdata.json to find assets 4 | "download_resdata":true, // Whether to download resdata.zip from github repo. If false, you need to manually prepare ./resdata/{$REGION}_resdata.json 5 | "resdata_url":"https://github.com/gf-data-tools/gf-resource-downloader/releases/download/ResData/resdata.zip", // Where to download resdata.zip, by default the original repo. 6 | "use_abname":false, // Whether to use assetBundleName instead of resname, i.e. character_06typesmg_hd.ab instead of a96bd30db215af7e4f3f8db8169c63c4418620character06typesmghd.ab, useful for unpackers 7 | "download_base_assets":true, // Whether to download base assets (which are required to open the game) 8 | "download_add_assets":true, // Whether to download additional assets (which are required for the full gameplay experience) 9 | "download_passivity_assets":false, // Whether to download optional assets (hd picture, voice, etc.) 10 | "timeout":30, // Timeout before retrying 11 | "max_retry":10, // Max retry times 12 | "processes":16, // Number of simutaneous processes 13 | } -------------------------------------------------------------------------------- /downloader.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import os 3 | import zipfile 4 | from pathlib import Path 5 | 6 | import pyjson5 7 | from gf_utils.download import Downloader 8 | 9 | from logger_tt import logger, setup_logging 10 | 11 | os.chdir(Path(__file__).resolve().parent) 12 | 13 | 14 | def setup_mp_logging(): 15 | setup_logging(config_path="logger_tt/log_config.json") 16 | 17 | 18 | def parse_args(): 19 | parser = argparse.ArgumentParser( 20 | description="Arguments will override corresponding settings in config.json5" 21 | ) 22 | parser.add_argument("--output", type=str, help="Output directory") 23 | parser.add_argument( 24 | "--region", 25 | type=str, 26 | help="Server region, the program will read resdata/{$REGION}_resdata.json to find assets", 27 | ) 28 | parser.add_argument( 29 | "--downloadres", 30 | type=int, 31 | help="Whether to download resdata.zip, 1 for true and 0 for false", 32 | metavar="0/1", 33 | ) 34 | parser.add_argument( 35 | "--url", 36 | type=str, 37 | help="URL for downloading resdata.zip, will override download_resdata if given", 38 | ) 39 | parser.add_argument( 40 | "--abname", 41 | action="store_const", 42 | const=True, 43 | help="Save assets using assetBundleName instead of resname, useful for unpackers", 44 | ) 45 | group = parser.add_argument_group( 46 | "Assets", 47 | description="Select assets you want to download (base/add/passivity), all settings in config.json5 will be overrided if any is passed", 48 | ) 49 | group.add_argument( 50 | "-b", 51 | action="store_true", 52 | help="Download base assets (required to open the game)", 53 | ) 54 | group.add_argument( 55 | "-a", 56 | action="store_true", 57 | help="Download additional assets (required for full gameplay experience)", 58 | ) 59 | group.add_argument( 60 | "-p", 61 | action="store_true", 62 | help="Download passivity assets (optional assets: hd picture, voice, etc.)", 63 | ) 64 | group = parser.add_argument_group("Downloader related") 65 | group.add_argument("--timeout", type=float, help="Timeout before retry") 66 | group.add_argument("--retry", type=int, help="Max retry times") 67 | group.add_argument("--njobs", type=int, help="Number of simutaneous processes") 68 | args = parser.parse_args() 69 | with open("config.json5", encoding="utf-8") as f: 70 | config = pyjson5.load(f) 71 | if args.output is not None: 72 | config["destination"] = args.output 73 | if args.region is not None: 74 | config["region"] = args.region 75 | if args.downloadres is not None: 76 | config["download_resdata"] = bool(args.downloadres) 77 | if args.url is not None: 78 | config["resdata_url"] = args.url 79 | config["download_resdata"] = True 80 | if args.abname is not None: 81 | config["use_abname"] = args.abname 82 | if args.b or args.a or args.p: 83 | ( 84 | config["download_base_assets"], 85 | config["download_add_assets"], 86 | config["download_passivity_assets"], 87 | ) = ( 88 | args.b, 89 | args.a, 90 | args.p, 91 | ) 92 | if args.timeout is not None: 93 | config["timeout"] = args.timeout 94 | if args.retry is not None: 95 | config["max_retry"] = args.retry 96 | if args.njobs is not None: 97 | config["processes"] = args.njobs 98 | return config 99 | 100 | 101 | if __name__ == "__main__": 102 | config = parse_args() 103 | setup_mp_logging() 104 | downloader = Downloader( 105 | n_jobs=config["processes"], timeout=config["timeout"], retry=config["max_retry"] 106 | ) 107 | 108 | out_dir = config["destination"] 109 | os.makedirs(out_dir, exist_ok=True) 110 | resdata_url = config["resdata_url"] 111 | if config["download_resdata"] is True: 112 | print("Downloading compressed resdata from github") 113 | logger.info("Downloading compressed resdata from github") 114 | Path("resdata.zip").unlink(missing_ok=True) 115 | downloader.download([[resdata_url, "./resdata.zip"]]) 116 | else: 117 | logger.warning( 118 | f'Using local resdata/{config["region"]}_resdata.json, please ensure that it is up-to-date' 119 | ) 120 | zipfile.ZipFile("resdata.zip").extractall("./resdata") 121 | data = zipfile.ZipFile("resdata.zip").read(f'{config["region"]}_resdata.json') 122 | with open(f'resdata/{config["region"]}_resdata.json', "r", encoding="utf-8") as f: 123 | res_data = pyjson5.load(f) 124 | resurl = res_data["resUrl"] 125 | tasks = [] 126 | selected_id = [] 127 | selected_id += [0] if config["download_base_assets"] else [] 128 | selected_id += [1] if config["download_add_assets"] else [] 129 | selected_id += [2] if config["download_passivity_assets"] else [] 130 | ab_keys = ["BaseAssetBundles", "AddAssetBundles", "passivityAssetBundles"] 131 | 132 | print("Collecting resource urls") 133 | logger.info("Collecting resource urls") 134 | for d in selected_id: 135 | key = ab_keys[d] 136 | for bundle in res_data[key]: 137 | resname = bundle["resname"] + ".ab" 138 | abname = bundle["assetBundleName"] + ".ab" 139 | size = bundle["sizeOriginal"] 140 | res_path = os.path.join( 141 | out_dir, abname if config["use_abname"] else resname 142 | ) 143 | if os.path.exists(res_path): 144 | if os.path.getsize(res_path) == size: 145 | logger.info(f"File {resname} already exists, thus will be skipped") 146 | continue 147 | else: 148 | os.remove(res_path) 149 | tasks.append([resurl + resname, res_path]) 150 | 151 | for bundle in res_data["bytesData"]: 152 | if bundle["fileInABC"] in selected_id: 153 | resname = bundle["resname"] + ".dat" 154 | abname = bundle["fileName"] + ".dat" 155 | size = bundle["sizeCompress"] 156 | res_path = os.path.join( 157 | out_dir, abname if config["use_abname"] else resname 158 | ) 159 | if os.path.exists(res_path): 160 | if os.path.getsize(res_path) == size: 161 | logger.info(f"File {resname} already exists, thus will be skipped") 162 | continue 163 | else: 164 | os.remove(res_path) 165 | tasks.append([resurl + resname, res_path]) 166 | 167 | print("Start downloading") 168 | logger.info("Start downloading") 169 | downloader.download(tasks) 170 | -------------------------------------------------------------------------------- /downloader.spec: -------------------------------------------------------------------------------- 1 | # -*- mode: python ; coding: utf-8 -*- 2 | 3 | 4 | block_cipher = None 5 | 6 | 7 | a = Analysis( 8 | ['downloader.py'], 9 | pathex=[], 10 | binaries=[], 11 | datas=[('config.json5','./'),('logger_tt','logger_tt')], 12 | hiddenimports=['logger_tt','logger_tt.handlers'], 13 | hookspath=[], 14 | runtime_hooks=[], 15 | excludes=[], 16 | win_no_prefer_redirects=False, 17 | win_private_assemblies=False, 18 | cipher=block_cipher, 19 | noarchive=False, 20 | ) 21 | pyz = PYZ(a.pure, a.zipped_data, cipher=block_cipher) 22 | 23 | exe = EXE( 24 | pyz, 25 | a.scripts, 26 | [], 27 | exclude_binaries=True, 28 | name='downloader', 29 | debug=False, 30 | bootloader_ignore_signals=False, 31 | strip=False, 32 | upx=True, 33 | console=True, 34 | disable_windowed_traceback=False, 35 | argv_emulation=False, 36 | target_arch=None, 37 | codesign_identity=None, 38 | entitlements_file=None, 39 | ) 40 | coll = COLLECT( 41 | exe, 42 | a.binaries, 43 | a.zipfiles, 44 | a.datas, 45 | strip=False, 46 | upx=True, 47 | upx_exclude=[], 48 | name='downloader', 49 | ) 50 | -------------------------------------------------------------------------------- /downloader_old.py: -------------------------------------------------------------------------------- 1 | import pyjson5 2 | import os 3 | from urllib import request 4 | import logging 5 | import socket 6 | from socket import timeout 7 | from multiprocessing import Pool,freeze_support 8 | from tqdm import tqdm 9 | from pathlib import Path 10 | import zipfile 11 | from urllib.error import URLError 12 | os.chdir(Path(__file__).resolve().parent) 13 | 14 | with open('config.json5',encoding='utf-8') as f: 15 | config = pyjson5.load(f) 16 | 17 | logger = logging.getLogger() 18 | fh = logging.FileHandler('downloader.log') 19 | fh.setLevel(config['log_level']) 20 | fh.setFormatter(logging.Formatter('%(asctime)s %(levelname)s: %(message)s')) 21 | logger.addHandler(fh) 22 | sh = logging.StreamHandler() 23 | sh.setLevel('WARNING') 24 | sh.setFormatter(logging.Formatter('%(asctime)s %(levelname)s: %(message)s')) 25 | logger.addHandler(sh) 26 | logger.setLevel('DEBUG') 27 | 28 | MAX_RETRY=config['max_retry'] 29 | 30 | socket.setdefaulttimeout(int(config['timeout'])) 31 | 32 | def download(url, path): 33 | fname = os.path.split(path)[-1] 34 | logger.info(f'Start downloading {fname}') 35 | for i in range(MAX_RETRY): 36 | try: 37 | if not os.path.exists(path): 38 | request.urlretrieve(url,path+'.tmp') 39 | os.rename(path+'.tmp',path) 40 | except (URLError, timeout): 41 | logger.warning(f'Failed to download {fname} for {i+1}/10 tries') 42 | continue 43 | else: 44 | logger.info(f'Successfully downloaded {fname}') 45 | break 46 | else: 47 | logger.error(f'Exceeded max retry times, failed to download {fname} from {url}') 48 | return path 49 | 50 | def star_download(x): 51 | return download(*x) 52 | 53 | def multiprocess_download(tasks, processes=16): 54 | pool = Pool(processes=processes) 55 | for _ in tqdm(pool.imap_unordered(star_download, tasks),total=len(tasks)): 56 | pass 57 | 58 | if __name__=='__main__': 59 | freeze_support() 60 | out_dir = config['destination'] 61 | os.makedirs(out_dir,exist_ok=True) 62 | resdata_url = config['resdata_url'] 63 | if config['download_resdata'] is True: 64 | print('Downloading compressed resdata from github') 65 | logger.info('Downloading compressed resdata from github') 66 | download(resdata_url, './resdata.zip') 67 | elif not os.path.exists('./resdata.zip'): 68 | raise FileNotFoundError(f'./resdata.zip does not exist, you should download it from {resdata_url} or set download_resdata to true in config') 69 | else: 70 | logging.warning('Using local resdata.zip, please ensure that it is up-to-date') 71 | data = zipfile.ZipFile('resdata.zip').read(f'{config["region"]}_resdata.json') 72 | res_data = pyjson5.loads(data.decode('utf-8')) 73 | resurl = res_data['resUrl'] 74 | 75 | tasks = [] 76 | selected_id = [] 77 | selected_id += [0] if config['download_base_assets'] else [] 78 | selected_id += [1] if config['download_add_assets'] else [] 79 | selected_id += [2] if config['download_passivity_assets'] else [] 80 | ab_keys = ['BaseAssetBundles', 'AddAssetBundles', 'passivityAssetBundles'] 81 | 82 | print('Collecting resource urls') 83 | logger.info('Collecting resource urls') 84 | for d in selected_id: 85 | key = ab_keys[d] 86 | for bundle in res_data[key]: 87 | resname = bundle['resname']+'.ab' 88 | size = bundle['sizeOriginal'] 89 | res_path = os.path.join(out_dir,resname) 90 | if os.path.exists(res_path): 91 | if os.path.getsize(res_path) == size: 92 | logger.info(f'File {resname} already exists, thus will be skipped') 93 | continue 94 | else: 95 | os.remove(res_path) 96 | tasks.append((resurl+resname,res_path)) 97 | 98 | for bundle in res_data['bytesData']: 99 | if bundle['fileInABC'] in selected_id: 100 | resname = bundle['resname']+'.dat' 101 | size = bundle['sizeCompress'] 102 | res_path = os.path.join(out_dir,resname) 103 | if os.path.exists(res_path): 104 | if os.path.getsize(res_path) == size: 105 | logger.info(f'File {resname} already exists, thus will be skipped') 106 | continue 107 | else: 108 | os.remove(res_path) 109 | tasks.append((resurl+resname,res_path)) 110 | 111 | print('Start downloading') 112 | logger.info('Start downloading') 113 | multiprocess_download(tasks,config['processes']) -------------------------------------------------------------------------------- /logger_tt/log_config.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": 1, 3 | "disable_existing_loggers": false, 4 | "formatters": { 5 | "simple": { 6 | "format": "[%(asctime)s] [%(name)s:%(lineno)d %(levelname)s] %(message)s", 7 | "datefmt": "%Y-%m-%d %H:%M:%S" 8 | }, 9 | "brief": { 10 | "format": "[%(asctime)s] %(levelname)s: %(message)s", 11 | "datefmt": "%Y-%m-%d %H:%M:%S" 12 | } 13 | }, 14 | 15 | "handlers": { 16 | "console": { 17 | "class": "logging.StreamHandler", 18 | "level": "WARNING", 19 | "formatter": "brief", 20 | "stream": "ext://sys.stdout" 21 | }, 22 | 23 | "error_file_handler": { 24 | "class": "logging.handlers.TimedRotatingFileHandler", 25 | "level": "DEBUG", 26 | "formatter": "simple", 27 | "filename": "downloader.log", 28 | "backupCount": 15, 29 | "encoding": "utf8", 30 | "when": "midnight" 31 | }, 32 | 33 | "buffer_stream_handler": { 34 | "class": "logger_tt.handlers.StreamHandlerWithBuffer", 35 | "level": "INFO", 36 | "formatter": "simple", 37 | "stream": "ext://sys.stdout", 38 | "buffer_time": 0.5, 39 | "buffer_lines": 0, 40 | "debug": false 41 | } 42 | }, 43 | 44 | "loggers": { 45 | "urllib3": { 46 | "level": "ERROR", 47 | "handlers": ["console", "error_file_handler"], 48 | "propagate": false 49 | } 50 | }, 51 | 52 | "root": { 53 | "level": "DEBUG", 54 | "handlers": ["console", "error_file_handler"] 55 | }, 56 | 57 | "logger_tt": { 58 | "suppress": ["exchangelib"], 59 | "suppress_level_below": "WARNING", 60 | "capture_print": false, 61 | "strict": false, 62 | "guess_level": false, 63 | "full_context": 0, 64 | "use_multiprocessing": true, 65 | "limit_line_length": 1000, 66 | "analyze_raise_statement": false, 67 | "default_logger_formats": { 68 | "normal": ["%(name)s", "%(filename)s"], 69 | "thread": ["%(message)s", "%(threadName)s %(message)s"], 70 | "multiprocess": ["%(message)s", "%(processName)s %(message)s"], 71 | "both": ["%(message)s", "%(processName)s %(threadName)s %(message)s"] 72 | } 73 | } 74 | } -------------------------------------------------------------------------------- /pack_resdata.py: -------------------------------------------------------------------------------- 1 | import os 2 | import zipfile 3 | from pathlib import Path 4 | from urllib import request 5 | 6 | os.chdir(Path(__file__).resolve().parent) 7 | 8 | f = zipfile.ZipFile("resdata.zip", "w") 9 | os.makedirs("./resdata", exist_ok=True) 10 | regions = ["ch", "jp", "tw", "us", "kr"] 11 | for region in regions: 12 | url = f"https://github.com/gf-data-tools/gf-data-{region}/raw/main/resdata_no_hash.json" 13 | request.urlretrieve(url, f"./resdata/{region}_resdata.json") 14 | f.write( 15 | f"./resdata/{region}_resdata.json", 16 | f"{region}_resdata.json", 17 | compress_type=zipfile.ZIP_BZIP2, 18 | ) 19 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | pyjson5 2 | tqdm 3 | logger_tt 4 | git+https://github.com/gf-data-tools/gf-utils.git 5 | pyinstaller --------------------------------------------------------------------------------