├── .gitignore ├── .luacheckrc ├── README.md ├── export.lua └── libs ├── BLPConverter ├── BLPConverter ├── LICENSE └── convert_all.py ├── casc ├── bin.c ├── bin.lua ├── blte.lua ├── bspatch.lua ├── dbc.lua ├── encoding.lua ├── init.lua ├── install.lua ├── jenkins96.lua ├── md5.lua ├── platform-fallback │ ├── guess.lua │ ├── unix.lua │ └── windows.lua ├── platform.lua ├── ribbit.lua └── root.lua ├── convert.sh └── dbc ├── bin.lua ├── headers ├── db2.lua ├── db5_6.lua ├── dbc.lua ├── dc1_2.lua └── dc3_4_5.lua └── init.lua /.gitignore: -------------------------------------------------------------------------------- 1 | Blizzard*/ 2 | Cache/ 3 | fails*.txt 4 | version.txt 5 | 6 | ## Sublime Text ## 7 | *.sublime-workspace 8 | -------------------------------------------------------------------------------- /.luacheckrc: -------------------------------------------------------------------------------- 1 | max_line_length = false 2 | max_cyclomatic_complexity = 14 3 | self = false 4 | unused_args = false 5 | std = "none" 6 | globals = { 7 | "_G" 8 | } 9 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | WoW Interface Export 2 | ==================== 3 | 4 | This tool that will extract the interface files for World of Warcraft, either from a local install or from the CDN. 5 | 6 | Usage: 7 | `lua export.lua [project] [branch] [filter]` 8 | 9 | ### project ### 10 | * `retail` - Default 11 | * `classic` - Most recent Classic version 12 | 13 | These can also be used to refer to a specific WoW Classic project 14 | 15 | * `wrath` - WotLK Classic 16 | * `classic_era` - Vanilla Classic 17 | * `vanilla` - Vanilla Classic 18 | 19 | ### branch ### 20 | * `live` - Default 21 | * `ptr` 22 | * `ptr2` - Retail client only 23 | * `beta` 24 | 25 | ### filter ### 26 | * `all` - Default 27 | * `code` 28 | * `art` 29 | * `png` - Like `art`, but also converts to png 30 | 31 | 32 | Tools used: 33 | * [LuaCasc](https://www.townlong-yak.com/casc/) 34 | * [LuaDBC](https://www.townlong-yak.com/casc/dbc/) 35 | * [LuaFileSystem](https://luarocks.org/modules/hisham/luafilesystem) 36 | * [LuaCSV](https://luarocks.org/modules/geoffleyland/csv) (optional) 37 | -------------------------------------------------------------------------------- /export.lua: -------------------------------------------------------------------------------- 1 | local project, branch, filter = ... 2 | 3 | --luacheck: globals require assert pcall type 4 | --luacheck: globals tostring setmetatable 5 | --luacheck: globals table io next os package 6 | 7 | local function write(text, ...) 8 | _G.print(text:format(...)) 9 | end 10 | 11 | local projects = { 12 | retail = "wow", 13 | classic = "wow_classic", 14 | bcc = "wow_classic", 15 | wrath = "wow_classic", 16 | classic_era = "wow_classic_era", 17 | vanilla = "wow_classic_era", 18 | } 19 | local branches = { 20 | live = "", 21 | ptr = "t", 22 | ptr2 = "xptr", 23 | beta = "_beta", 24 | 25 | -- Classic PTR 26 | ptrC = "_ptr", 27 | } 28 | local fileTypes = { 29 | all = true, 30 | code = "Code", 31 | art = "Art", 32 | } 33 | 34 | if project then 35 | if branches[project] then 36 | branch, filter = project, branch 37 | project = "retail" 38 | end 39 | 40 | if fileTypes[project] then 41 | filter = project 42 | project = "retail" 43 | branch = "live" 44 | end 45 | 46 | if fileTypes[branch] then 47 | filter = branch 48 | branch = "live" 49 | end 50 | 51 | if project ~= "retail" and branch == "ptr" then 52 | branch = "ptrC" 53 | end 54 | else 55 | project, branch, filter = "retail", "live", "all" 56 | end 57 | 58 | local convertBLP = false 59 | if filter == "png" then 60 | convertBLP = true 61 | filter = "art" 62 | end 63 | filter = filter or "all" 64 | 65 | --[[ 66 | TODO: Add and `latest` branch that will search 67 | all branches for the newest version. 68 | ]] 69 | 70 | write("Extracting %s from %s %s...", filter, project, branch) 71 | local product = projects[project] .. branches[branch] 72 | 73 | package.path = package.path .. ";libs/?.lua;libs/?/init.lua;InterfaceExport/libs/?.lua;InterfaceExport/libs/?/init.lua" 74 | local casc = require("casc") 75 | local plat = require("casc.platform") 76 | local dbc = require("dbc") 77 | local csv = pcall(require, "csv") 78 | 79 | local WOWDIR = "E:/World of Warcraft" 80 | local CACHE_DIR = "./InterfaceExport/Cache/" .. product 81 | local REGION = "us" 82 | local PATCH_BASE = ("http://%s.patch.battle.net:1119/%s"):format(REGION, product) 83 | local FILEID_PATH_MAP = { 84 | ["DBFilesClient/ManifestInterfaceData.db2"] = 1375801, 85 | ["DBFilesClient/GlobalStrings.db2"] = 1394440, 86 | ["DBFilesClient/UiTextureAtlas.db2"] = 897470, 87 | ["DBFilesClient/UiTextureAtlasMember.db2"] = 897532, 88 | -- These _may_ have proper names, but they're not known at present. 89 | ["Interface/ui-code-list.txt"] = 6067012, 90 | ["Interface/ui-code-doc-list.txt"] = 6076661, 91 | } 92 | 93 | local rmdir, convert do 94 | -- Based on code from casc.platform 95 | local dir_sep = package and package.config and package.config:sub(1,1) or "/" 96 | local command = "rmdir %s" 97 | if dir_sep == '/' then 98 | -- *nix 99 | command = "rm -r %s" 100 | end 101 | local function execute(...) 102 | local ok, status, sig = os.execute(...) 103 | if ok == true and status == "exit" or status == "signal" then 104 | return sig 105 | else 106 | return ok or sig or ok, status, sig 107 | end 108 | end 109 | local function shellEscape(s) 110 | return '"' .. s:gsub('"', '"\\\\""') .. '"' 111 | end 112 | function rmdir(path) 113 | return execute(command:format(shellEscape(path))) 114 | end 115 | 116 | -- ./libs/BLPConverter/BLPConverter UI-AbilityPanel-BotLeft 117 | function convert(path) 118 | write("Convert file: %s", path) 119 | return execute(("./libs/BLPConverter/BLPConverter %s"):format(shellEscape(path))) 120 | end 121 | end 122 | 123 | local fileHandle do 124 | local function selectBuild(buildInfo) 125 | for i = 1, #buildInfo do 126 | --print(buildInfo[i].Product, buildInfo[i].Active) 127 | if buildInfo[i].Product == product and buildInfo[i].Active == 1 then 128 | return i 129 | end 130 | assert(0) 131 | end 132 | end 133 | 134 | local base = WOWDIR .. "/Data" 135 | local buildKey, cdn, ckey, version = casc.localbuild(WOWDIR .. "/.build.info", selectBuild) 136 | if version then 137 | write("Product: %s Build: %s", product, tostring(version)) 138 | else 139 | write("Local build not found, checking CDN...") 140 | 141 | buildKey, cdn, ckey, version = casc.cdnbuild(PATCH_BASE, REGION) 142 | if version then 143 | write("CDN Product: %s Build: %s", product, tostring(version)) 144 | base = CACHE_DIR 145 | else 146 | write("Product %s not found", product) 147 | return 148 | end 149 | end 150 | 151 | local versionBuild = ("%s (%s)"):format(version:match("(%d+.%d+.%d).(%d*)")) 152 | if versionBuild then 153 | local file = assert(io.open("version.txt", "w")) 154 | file:write(versionBuild, "\n") 155 | end 156 | 157 | plat.mkdir(CACHE_DIR) 158 | local conf = { 159 | bkey = buildKey, 160 | base = base, 161 | cdn = cdn, 162 | ckey = ckey, 163 | cache = CACHE_DIR, 164 | cacheFiles = true, 165 | locale = casc.locale.US, 166 | requireRootFile = false, 167 | --verifyHashes = false, 168 | --log = print 169 | } 170 | 171 | fileHandle = assert(casc.open(conf)) 172 | end 173 | 174 | local GetFileList do 175 | local fileFilter = { 176 | xml = "code", 177 | lua = "code", 178 | toc = "code", 179 | xsd = "code", 180 | 181 | blp = "art" 182 | } 183 | 184 | local params = { 185 | header = true, 186 | } 187 | 188 | local l = setmetatable({}, {__index=function(s, a) s[a] = a:lower() return s[a] end}) 189 | local function SortPath(a, b) 190 | return l[a.fullPath] < l[b.fullPath] 191 | end 192 | 193 | local function CheckFile(fileType, files, id, path, name) 194 | --print(_, path, name) 195 | if fileFilter[(name:match("%.(...)$") or ""):lower()] == fileType then 196 | path = path:gsub("[/\\]+", "/") 197 | 198 | --print("CheckFile", path) 199 | files[#files + 1] = { 200 | path = path, 201 | id = id, 202 | fullPath = path .. name, 203 | } 204 | end 205 | end 206 | 207 | local function HasTextualManifest(fileType) 208 | local data 209 | 210 | -- At present art exports still use the old manifest. 211 | if fileType == "code" then 212 | data = fileHandle:readFile("Interface/ui-toc-list.txt") 213 | end 214 | 215 | return data ~= nil 216 | end 217 | 218 | local function CheckFileList(fileType, files, listData) 219 | if listData == nil then 220 | return 221 | end 222 | 223 | for filePath in listData:gmatch("[^\r\n]+") do 224 | local fileDir, fileName = filePath:match("^(.+[\\/])(.-)$") 225 | local fileID = fileHandle.root:getFileID(filePath) 226 | CheckFile(fileType, files, fileID, fileDir, fileName) 227 | end 228 | end 229 | 230 | function GetFileList(fileType) 231 | local files = {} 232 | if csv and io.open("manifestinterfacedata.csv", "r")then 233 | -- from wow.tools table browser 234 | local csvFile = csv.open("manifestinterfacedata.csv", params) 235 | for fields in csvFile:lines() do 236 | CheckFile(fileType, files, fields.ID, fields.FilePath, fields.FileName) 237 | end 238 | elseif HasTextualManifest(fileType) then 239 | fileHandle.root:addFileIDPaths(FILEID_PATH_MAP) 240 | CheckFileList(fileType, files, fileHandle:readFile("Interface/ui-toc-list.txt")) 241 | CheckFileList(fileType, files, fileHandle:readFile("Interface/ui-code-list.txt")) 242 | CheckFileList(fileType, files, fileHandle:readFile("Interface/ui-code-doc-list.txt")) 243 | else 244 | fileHandle.root:addFileIDPaths(FILEID_PATH_MAP) 245 | 246 | local fileData = assert(fileHandle:readFile("DBFilesClient/ManifestInterfaceData.db2")) 247 | for id, path, name in dbc.rows(fileData, "ss") do 248 | if path:match("^[Ii][Nn][Tt][Ee][Rr][Ff][Aa][Cc][Ee][\\/_]") then 249 | CheckFile(fileType, files, id, path, name) 250 | end 251 | end 252 | end 253 | 254 | table.sort(files, SortPath) 255 | return files 256 | end 257 | end 258 | 259 | 260 | local progress = 0 261 | local function UpdateProgress(current) 262 | --if collectgarbage("count") > gcLimit then 263 | -- collectgarbage() 264 | --end 265 | 266 | if (current - progress) > 0.1 then 267 | write("%d%%", current * 100) 268 | progress = current 269 | end 270 | if current == 1 then 271 | write("Done!") 272 | progress = 0 273 | end 274 | end 275 | 276 | 277 | local CreateDirectories do 278 | function CreateDirectories(files, root) 279 | local dirs = {} 280 | for i = 1, #files do 281 | local path = files[i].fullPath 282 | for endPoint in path:gmatch("()/") do 283 | local subPath = path:sub(1, endPoint - 1) 284 | local subLower = subPath:lower() 285 | 286 | if not dirs[subLower] then 287 | --print("dir", path, subPath, subLower) 288 | dirs[subLower] = subPath 289 | end 290 | end 291 | end 292 | 293 | local makeDirs = {} 294 | for _, subPath in next, dirs do 295 | table.insert(makeDirs, subPath) 296 | end 297 | table.sort(makeDirs) 298 | 299 | write("Creating %d folders...", #makeDirs) 300 | plat.mkdir(root) 301 | for i = 1, #makeDirs do 302 | if plat.mkdir(plat.path(root, makeDirs[i])) then 303 | write("Create folder: %s", makeDirs[i]) 304 | else 305 | write("Could not create folder: %s", makeDirs[i]) 306 | end 307 | 308 | UpdateProgress(i / #makeDirs) 309 | end 310 | 311 | return dirs 312 | end 313 | end 314 | 315 | 316 | local ExtractFiles do 317 | function ExtractFiles(fileType) 318 | local files = GetFileList(fileType) 319 | local root = "./" 320 | if filter == "all" then 321 | root = "BlizzardInterface" .. fileTypes[fileType] 322 | end 323 | local dirs = CreateDirectories(files, root) 324 | 325 | local file, filePath, fixedCase 326 | local function FixCase(b) 327 | local s = filePath:sub(1, b - 1) 328 | --print("fixedCase", filePath, b, s) 329 | return dirs[s:lower()]:match("([^/]+/)$") 330 | end 331 | 332 | local pathStatus, w, h, err = {} 333 | write("Creating %d files...", #files) 334 | for i = 1, #files do 335 | file = files[i] 336 | filePath = file.fullPath 337 | fixedCase = (filePath:gsub("[^/]+()/", FixCase)) 338 | if not pathStatus[file.path] then 339 | pathStatus[file.path] = 0 340 | end 341 | 342 | w = fileHandle:readFile(file.fullPath) 343 | if w then 344 | write("Create file: %s", fixedCase) 345 | h, err = io.open(plat.path(root, fixedCase), "wb") 346 | if h then 347 | h:write(w) 348 | h:close() 349 | pathStatus[file.path] = pathStatus[file.path] + 1 350 | 351 | if convertBLP then 352 | convert(plat.path(root, file.path)) 353 | end 354 | else 355 | write("Could not open file %s: %s", filePath, err) 356 | end 357 | else 358 | write("No data for file %s", filePath) 359 | end 360 | UpdateProgress(i / #files) 361 | end 362 | 363 | local emptyDirs = {} 364 | for path, total in next, pathStatus do 365 | if total <= 0 then 366 | table.insert(emptyDirs, path) 367 | end 368 | end 369 | 370 | table.sort(emptyDirs, function(a, b) 371 | return a > b 372 | end) 373 | 374 | write("Cleaning up empty directories...") 375 | for i = 1, #emptyDirs do 376 | local _, status = rmdir(plat.path(root, emptyDirs[i])) 377 | if not status then 378 | write("Removed: %s", emptyDirs[i]) 379 | end 380 | end 381 | end 382 | end 383 | 384 | 385 | if filter == "all" then 386 | ExtractFiles("code") 387 | ExtractFiles("art") 388 | else 389 | ExtractFiles(filter) 390 | end 391 | -------------------------------------------------------------------------------- /libs/BLPConverter/BLPConverter: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Gethe/InterfaceExport/8fe99f1686f013d03f2777ceeb78d5a9373908cd/libs/BLPConverter/BLPConverter -------------------------------------------------------------------------------- /libs/BLPConverter/LICENSE: -------------------------------------------------------------------------------- 1 | BLPConverter is made available under the MIT License. 2 | 3 | Copyright (c) 2011-2012 Philip Abbet 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in 13 | all copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 21 | THE SOFTWARE. -------------------------------------------------------------------------------- /libs/BLPConverter/convert_all.py: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env python2 2 | 3 | import os 4 | import sys 5 | import subprocess 6 | import select 7 | from optparse import OptionParser 8 | 9 | 10 | # Setup of the command-line arguments parser 11 | text = "Usage: %prog [options] \n\nConvert (in-place) all the BLP files in and its subdirectories" 12 | parser = OptionParser(text, version="%prog 1.0") 13 | parser.add_option("--converter", action="store", default="/BLPConverter", type="string", 14 | dest="converter", metavar="CONVERTER", 15 | help="Relative path from this script to the BLPConverter executable") 16 | parser.add_option("--remove", action="store_true", default=False, 17 | dest="remove", help="Remove the BLP files successfully converted") 18 | parser.add_option("--verbose", action="store_true", default=False, 19 | dest="verbose", help="Verbose output") 20 | 21 | # Handling of the arguments 22 | (options, args) = parser.parse_args() 23 | 24 | # Check the parameters 25 | if len(args) != 1: 26 | print "No root folder provided" 27 | sys.exit(-1) 28 | 29 | root_folder = args[0] 30 | if root_folder[-1] != os.path.sep: 31 | root_folder += os.path.sep 32 | 33 | script_dir = os.path.dirname(os.path.realpath(__file__)) 34 | converter_dir = script_dir + options.converter 35 | 36 | try: 37 | subprocess.Popen('%s --help' % converter_dir, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True) 38 | except: 39 | print "Can't execute BLPConverter at '%s'" % converter_dir 40 | sys.exit(-1) 41 | 42 | 43 | # Walk the root folder 44 | counter_success_total = 0 45 | failed_total = [] 46 | for root, dirs, files in os.walk(root_folder): 47 | if root == root_folder: 48 | print "Processing '.'..." 49 | else: 50 | print "Processing '%s'..." % root[len(root_folder):] 51 | 52 | blps = filter(lambda x: x.lower().endswith('.blp'), files) 53 | 54 | counter_failed = 0 55 | 56 | if len(blps) > 0: 57 | current = os.getcwd() 58 | os.chdir(root) 59 | 60 | to_convert = blps 61 | while len(to_convert) > 0: 62 | p = subprocess.Popen('%s %s' % (converter_dir, ' '.join([ '"%s"' % image for image in to_convert[0:10] ])), stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True) 63 | p.wait() 64 | output = p.stdout.read() 65 | 66 | failed = filter(lambda x: not(x.endswith(': OK')) and (len(x) > 0), output.split('\n')) 67 | counter_failed += len(failed) 68 | 69 | failed_total.extend(failed) 70 | 71 | if options.verbose: 72 | print ' * ' + output[:-1].replace('\n', '\n * ') 73 | 74 | if options.remove: 75 | failed2 = map(lambda x: x[0:x.find(':')], failed) 76 | done = filter(lambda x: (x not in failed2) and (len(x) > 0), to_convert[0:10]) 77 | p = subprocess.Popen('rm -f %s' % (' '.join([ '"%s"' % image for image in done ])), stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True) 78 | p.wait() 79 | 80 | to_convert = to_convert[10:] 81 | 82 | os.chdir(current) 83 | 84 | if counter_failed > 0: 85 | print '%d images converted, %d images not converted' % (len(blps) - counter_failed, counter_failed) 86 | else: 87 | print '%d images converted' % (len(blps) - counter_failed) 88 | print 89 | 90 | counter_success_total += len(blps) - counter_failed 91 | 92 | print '----------------------------------------------------------' 93 | 94 | if len(failed_total) > 0: 95 | print 'TOTAL: %d images converted, %d images not converted' % (counter_success_total, len(failed_total)) 96 | print 97 | print 'Images not converted:' 98 | for image in failed_total: 99 | print ' * ' + image 100 | else: 101 | print 'TOTAL: %d images converted' % counter_success_total 102 | -------------------------------------------------------------------------------- /libs/casc/bin.c: -------------------------------------------------------------------------------- 1 | /* SPDX-FileCopyrightText: © 2023 foxlit */ 2 | /* SPDX-License-Identifier: Artistic-2.0 */ 3 | 4 | #include 5 | #include 6 | 7 | #if LUA_VERSION_NUM < 502 8 | #define luaL_newlib(L,l) (lua_newtable(L), luaL_register(L,NULL,l)) 9 | #endif 10 | 11 | LUALIB_API int sadd (lua_State *L) { 12 | size_t l1, l2, l3; 13 | const char *s1 = luaL_checklstring(L, 1, &l1); 14 | lua_Integer p1 = luaL_checkinteger(L, 2); 15 | const char *s2 = luaL_checklstring(L, 3, &l2); 16 | lua_Integer p2 = luaL_checkinteger(L, 4); 17 | lua_Integer length = luaL_checkinteger(L, 5); 18 | lua_Integer outIndex = luaL_checkinteger(L, 7); 19 | luaL_Buffer buff; 20 | luaL_checktype(L, 6, LUA_TTABLE); 21 | 22 | if (p1 < 1 || p1 > l1 || p2 < 0 || (p2 > 0 && p2 > l2)) luaL_error(L, "invalid starting indices"); 23 | if (p1+length-1 > l1) luaL_error(L, "source string is insufficiently long"); 24 | if (length < 0) luaL_error(L, "expected a non-negative length"); 25 | if (length == 0) { 26 | lua_pushstring(L, ""); 27 | return 1; 28 | } 29 | 30 | s1 += p1-1, s2 += p2-1; 31 | l1 -= p1-1, l2 -= p2-1; 32 | if (l2 < length) { 33 | l3 = length - l2; 34 | length = l2; 35 | } else { 36 | l3 = 0; 37 | } 38 | 39 | luaL_buffinit(L, &buff); 40 | while (length --> 0) 41 | luaL_addchar(&buff, *s1++ + *s2++); 42 | while (l3 --> 0) 43 | luaL_addchar(&buff, *s1++); 44 | 45 | luaL_pushresult(&buff); 46 | lua_settable(L, 6); 47 | lua_pushinteger(L, outIndex+1); 48 | 49 | return 1; 50 | } 51 | 52 | static struct luaL_Reg binlib[] = { 53 | {"sadd", sadd}, 54 | {NULL, NULL} 55 | }; 56 | 57 | int luaopen_casc_binc (lua_State *L) { 58 | luaL_newlib(L, binlib); 59 | return 1; 60 | } -------------------------------------------------------------------------------- /libs/casc/bin.lua: -------------------------------------------------------------------------------- 1 | -- SPDX-FileCopyrightText: © 2023 foxlit 2 | -- SPDX-License-Identifier: Artistic-2.0 3 | 4 | local M, sbyte, schar, sgsub, sformat, ssub = {}, string.byte, string.char, string.gsub, string.format, string.sub 5 | local inf, nan, floor, min = math.huge, math.huge-math.huge, math.floor, math.min 6 | local MAX_SLICE_SIZE = 4096 7 | local CM do 8 | local ok, m = pcall(require, "casc.binc") 9 | CM = ok and m 10 | end 11 | 12 | local hexbin = {} for i=0,255 do 13 | local h, b = sformat("%02x", i), schar(i) 14 | hexbin[sformat("%02X", i)], hexbin[h], hexbin[b] = b, b, h 15 | end 16 | 17 | local PRECISION_BITS, PRECISION_BYTES, PRECISION_SAFE1_BYTES, PRECISION_SAFE1_BITS do 18 | local x = 255 19 | for i=8,128 do 20 | local x1 = x-1 21 | local x2 = x1-1 22 | if not (x > 128 and x > x1 and x1 > x2 and (x2+2) == x and (x1+1) == x and (2^i-x) == 1) then 23 | break 24 | else 25 | PRECISION_BITS, x = i, x*2+1 26 | end 27 | end 28 | PRECISION_BYTES = (PRECISION_BITS - PRECISION_BITS%8)/8 29 | M.PRECISION_BITS, M.PRECISION_BYTES = PRECISION_BITS, PRECISION_BYTES 30 | PRECISION_SAFE1_BYTES = PRECISION_BYTES-1 31 | PRECISION_SAFE1_BITS = PRECISION_SAFE1_BYTES * 8 32 | end 33 | 34 | local function uint_le(s, n, pos) 35 | if n > PRECISION_BYTES then 36 | error('Requested integer is too wide: ' .. n .. ' bytes of precision required; ' .. PRECISION_BITS .. ' bits available.', 2) 37 | end 38 | local a, b, c, d, e, f = sbyte(s, (pos or 0)+1, (pos or 0)+n) 39 | return (f or 0)*256^5 + (e or 0)*256^4 + (d or 0)*256^3 + (c or 0)*256^2 + (b or 0)*256 + a + (n > 6 and 2^48*uint_le(s,n-6,pos+6) or 0) 40 | end 41 | local function int_le(s, n, pos) 42 | if n > PRECISION_BYTES then 43 | error('Requested integer is too wide: ' .. n .. ' bytes of precision required; have ' .. PRECISION_BITS .. ' bits', 2) 44 | end 45 | local a, b, c, d, e, f = sbyte(s, (pos or 0)+1, (pos or 0)+n) 46 | local r = (f or 0)*256^5 + (e or 0)*256^4 + (d or 0)*256^3 + (c or 0)*256^2 + (b or 0)*256 + a + (n > 6 and 2^48*uint_le(s,n-6,pos+6) or 0) 47 | local lb = n <= 6 and (f or e or d or c or b or a) or sbyte(s, (pos or 0)+n) 48 | return r - (lb > 127 and 256^n or 0) 49 | end 50 | local function upint_le(s, w, pos) 51 | if w > PRECISION_BITS then 52 | error('Requested packed integer is too wide: ' .. w .. ' bits of precision required; ' .. PRECISION_BITS .. ' bits available.', 2) 53 | end 54 | local o = 0 55 | pos = pos or 0 56 | if w > PRECISION_SAFE1_BITS then 57 | w, o = PRECISION_SAFE1_BITS, 2^PRECISION_SAFE1_BITS*upint_le(s, w-PRECISION_SAFE1_BITS, pos+PRECISION_SAFE1_BITS) 58 | end 59 | local p8 = pos % 8 60 | local lo, iv = 2^p8, uint_le(s, PRECISION_BYTES, (pos-p8)/8) 61 | return o + ((iv - iv % lo)/lo % 2^w) 62 | end 63 | local function pint_le(s, w, pos) 64 | local o = upint_le(s, w, pos) 65 | return o - (o >= 2^(w-1) and 2^w or 0) 66 | end 67 | M.uint_le, M.int_le = uint_le, int_le 68 | M.upint_le, M.pint_le = upint_le, pint_le 69 | 70 | function M.uint16_le(s, pos) 71 | local a, b = sbyte(s, (pos or 0)+1, (pos or 0) + 2) 72 | return b*256 + a 73 | end 74 | function M.uint32_le(s, pos) 75 | local a, b, c, d = sbyte(s, (pos or 0)+1, (pos or 0) + 4) 76 | return d*256^3 + c*256^2 + b*256 + a 77 | end 78 | function M.uint16_be(s, pos) 79 | local a,b = sbyte(s, (pos or 0)+1, (pos or 0) + 2) 80 | return a*256 + b 81 | end 82 | function M.uint32_be(s, pos) 83 | local a,b,c,d = sbyte(s, (pos or 0)+1, (pos or 0) + 4) 84 | return a*256^3 + b*256^2 + c*256 + d 85 | end 86 | function M.uint40_be(s, pos) 87 | local a, b, c, d, e = sbyte(s, (pos or 0)+1, (pos or 0) + 5) 88 | return a*256^4 + b*256^3 + c*256^2 + d*256 + e 89 | end 90 | function M.float32_le(s, pos) 91 | local a, b, c, d = sbyte(s, (pos or 0) + 1, (pos or 0) + 4) 92 | local s, e, f = d > 127 and -1 or 1, (d % 128)*2 + (c > 127 and 1 or 0), a + b*256 + (c % 128)*256^2 93 | if e > 0 and e < 255 then 94 | return s * (1+f/2^23) * 2^(e-127) 95 | else 96 | return e == 0 and (s * f/2^23 * 2^-126) or f == 0 and (s * inf) or nan 97 | end 98 | end 99 | function M.int32_le(s, pos) 100 | local a, b, c, d = sbyte(s, (pos or 0)+1, (pos or 0) + 4) 101 | return (d or 0)*256^3 + (c or 0)*256^2 + (b or 0)*256 + a - (d > 127 and 2^32 or 0) 102 | end 103 | function M.int64ish_le(s, pos) 104 | local a, b, c, d, e, f, g, h = sbyte(s, (pos or 0)+1, (pos or 0) + 8) 105 | return ((h % 128) * 256^7 + g * 256^6 + f*256^5 + e*256^4 + d*256^3 + c*256^2 + b*256 + a) * (h > 127 and -1 or 1) 106 | end 107 | 108 | function M.to_le32(n) 109 | local n = n % 2^32 110 | return schar(floor(n) % 256, floor(n / 256) % 256, floor(n / 256^2) % 256, floor(n / 256^3) % 256) 111 | end 112 | 113 | function M.to_bin(hs) 114 | return hs and sgsub(hs, "%x%x", hexbin) 115 | end 116 | function M.to_hex(bs) 117 | return bs and sgsub(bs, ".", hexbin) 118 | end 119 | 120 | M.sadd = CM and CM.sadd or function(a, ap, b, bp, length, out, on) 121 | local bsz, rx, unpack = #b, length, unpack or table.unpack 122 | while rx > 0 do 123 | if bp > bsz then 124 | out[on], on, bp, ap, rx = ssub(a, ap, ap+rx-1), on+1, bp + rx, ap + rx, 0 125 | else 126 | local slice = min(rx, bsz-bp+1, MAX_SLICE_SIZE) 127 | local t1, t2 = {sbyte(b, bp, bp+slice-1)}, {sbyte(a, ap, ap+slice-1)} 128 | for i=1,slice do 129 | t1[i] = (t1[i] + t2[i]) % 256 130 | end 131 | out[on], on, ap, bp, rx = schar(unpack(t1)), on+1, ap + slice, bp + slice, rx - slice 132 | end 133 | end 134 | return on 135 | end 136 | 137 | return M -------------------------------------------------------------------------------- /libs/casc/blte.lua: -------------------------------------------------------------------------------- 1 | -- SPDX-FileCopyrightText: © 2023 foxlit 2 | -- SPDX-License-Identifier: Artistic-2.0 3 | 4 | local M, plat, bin = {}, require("casc.platform"), require("casc.bin") 5 | local uint32_le, uint32_be, decompress = bin.uint32_le, bin.uint32_be, plat.decompress 6 | 7 | local string_cursor do 8 | local function read(self, n) 9 | local p = self.pos 10 | self.pos = p + n 11 | return self.str:sub(p, p+n-1) 12 | end 13 | local function seek(self, dir, n) 14 | assert(dir == 'cur', 'String cursor only supports relative seeks') 15 | self.pos = self.pos + n 16 | end 17 | function string_cursor(s) 18 | return {str=s, read=read, seek=seek, pos=1} 19 | end 20 | end 21 | local function closeAndReturn(h, ...) 22 | h:close() 23 | return ... 24 | end 25 | 26 | local salsa20 do 27 | local UINT32_TRUNCATE, R = 2^32, { 28 | 4,0,12,7, 8,4,0,9, 12,8,4,13, 0,12,8,18, 29 | 9,5,1,7, 13,9,5,9, 1,13,9,13, 5,1,13,18, 30 | 14,10,6,7, 2,14,10,9, 6,2,14,13, 10,6,2,18, 31 | 3,15,11,7, 7,3,15,9, 11,7,3,13, 15,11,7,18, 32 | 1,0,3,7, 2,1,0,9, 3,2,1,13, 0,3,2,18, 33 | 6,5,4,7, 7,6,5,9, 4,7,6,13, 5,4,7,18, 34 | 11,10,9,7, 8,11,10,9, 9,8,11,13, 10,9,8,18, 35 | 12,15,14,7, 13,12,15,9, 14,13,12,13, 15,14,13,18} 36 | function salsa20(data, key, iv, s, e) 37 | local bxor, char, byte = plat.bxor, string.char, string.byte 38 | local lit = #key == 16 and "expand 16-byte k" or "expand 32-byte k" 39 | local k2, k = {}, {[0]= 40 | uint32_le(lit, 0), uint32_le(key, 0), uint32_le(key, 4), uint32_le(key, 8), 41 | uint32_le(key, 12), uint32_le(lit, 4), uint32_le(iv, 0), uint32_le(iv, 4), 42 | 0, 0, uint32_le(lit, 8), uint32_le(key, #key-16), 43 | uint32_le(key, #key-12), uint32_le(key, #key-8), uint32_le(key, #key-4), uint32_le(lit, 12) 44 | } 45 | local function syn(ob, ki, at) 46 | local k4, a, b, c, d = k2[ki], byte(data, at, at+3) 47 | local k1, k2, k3 = k4 % 0x100, k4 % 0x10000, k4 % 0x1000000 48 | a = a and bxor(a, k1) 49 | b = b and bxor(b, (k2-k1)/0x100) 50 | c = c and bxor(c, (k3-k2)/0x10000) 51 | d = d and bxor(d, (k4-k3)/0x1000000) 52 | if ob > 4 then 53 | return a, b, c, d, syn(ob-4, ki+1, at+4) 54 | elseif ob == 4 then 55 | return a, b, c, d 56 | elseif ob == 3 then 57 | return a, b, c 58 | elseif ob == 2 then 59 | return a, b 60 | elseif ob == 1 then 61 | return a 62 | end 63 | end 64 | 65 | local o, oc, nb = {}, 1, e-s+1 66 | for p=s, e, 64 do 67 | for i=0,15 do 68 | k2[i] = k[i] 69 | end 70 | for i=0,18,2 do 71 | for j=1,#R,4 do 72 | local x, rl = R[j], R[j+3] 73 | local hiPow, loPow, s = 2^(32-rl), 2^rl, (k2[R[j+1]] + k2[R[j+2]]) % UINT32_TRUNCATE 74 | local r = s % hiPow 75 | k2[x] = bxor(k2[x], r * loPow + (s - r)/hiPow) 76 | end 77 | end 78 | for i=0,15 do 79 | k2[i] = (k2[i] + k[i]) % UINT32_TRUNCATE 80 | end 81 | o[#o+1] = char(syn(nb > 64 and 64 or nb, 0, p)) 82 | k[8] = k[8] + 1 83 | if k[8] == UINT32_TRUNCATE then 84 | k[8], k[9] = 0, k[9]+1 85 | end 86 | if #o == 1023 then 87 | o[oc] = table.concat(o, "", oc) 88 | for i=#o,oc + 1,-1 do o[i] = nil end 89 | oc = (oc % 512) + 1 90 | end 91 | nb = nb - 64 92 | end 93 | return table.concat(o, "") 94 | end 95 | end 96 | 97 | local function decodeChunk(chunk, s, e, idx, keys) 98 | local format = chunk:sub(s, s) 99 | if format == 'N' then 100 | return chunk:sub(s+1, e) 101 | elseif format == 'Z' then 102 | local dc, er = decompress(chunk:sub(s+1, e)) 103 | if not dc or #dc == 0 then 104 | return nil, 'BLTE: chunk decompression failed' .. (er and ': ' .. er or '') 105 | end 106 | return dc 107 | elseif format == 'E' then 108 | local knsz = chunk:byte(s+1) 109 | local ivsz = chunk:byte(s+2+knsz) 110 | local algo = chunk:byte(s+3+knsz+ivsz) 111 | if s+3+knsz+ivsz >= e then 112 | return nil, 'BLTE: Encrypted chunk is too short' 113 | elseif algo == 83 then 114 | local kname = chunk:sub(s+2, s+1+knsz):reverse() 115 | local key = type(keys) == "table" and keys[kname] 116 | if not key then 117 | return nil, 'BLTE: missing encryption key Kx' .. bin.to_hex(kname), 'missing-key' 118 | end 119 | local iv = chunk:sub(s+3+knsz, s+2+knsz+ivsz) 120 | iv = (iv .. ("\0"):rep(8-#iv)):gsub("()(.)", function(p,c) 121 | return string.char(plat.bxor(c:byte(), math.floor(idx/256^(p-1)) % 256)) 122 | end) 123 | local o, err = salsa20(chunk, key, iv, s+4+knsz+ivsz, e) 124 | if not o then return o, err end 125 | return decodeChunk(o, 1, #o, idx, keys) 126 | else 127 | return nil, ('BLTE: Unsupported chunk format: Ex%02x'):format(algo or 1028) 128 | end 129 | else 130 | return nil, 'BLTE: unknown chunk format: ' .. tostring(format) 131 | end 132 | end 133 | local function parseBLTE(h, dataSize, keys, opts) 134 | local hadZerofilledContent = false 135 | local header = h:read(8) 136 | if type(header) ~= "string" or header:sub(1,4) ~= 'BLTE' then 137 | local err = type(header) 138 | if err == "string" then 139 | err = #header == 0 and "empty string" or ('%02x'):rep(math.min(#header, 4)):format(header:byte(1,4)) 140 | end 141 | return nil, 'BLTE: expected header signature; got ' .. err 142 | end 143 | local ofs = uint32_be(header, 4) 144 | 145 | local chunks, ret, err, err2 = ofs > 0 and {} 146 | if ofs > 0 then 147 | local sz = h:read(4) 148 | local buf, cn = h:read(uint32_be(sz) % 2^16 * 24), 1 149 | header = header .. sz .. buf 150 | if #header > ofs then 151 | return nil, 'BLTE: header overread' 152 | end 153 | h:seek("cur", ofs-#header) 154 | for p=0, #buf-1, 24 do 155 | local esz, dsz, i = uint32_be(buf, p), uint32_be(buf, p+4), cn 156 | cn, chunks[cn], err, err2 = cn+1, decodeChunk(h:read(esz), 1, esz, cn-1, keys) 157 | if err2 == 'missing-key' and opts and opts.zerofillEncryptedChunks then 158 | hadZerofilledContent = true 159 | chunks[i] = ('\0'):rep(dsz) 160 | if opts.log then 161 | opts.log('WARN', err, 'Zero-filled chunk ' .. (cn-1) .. ' (' .. dsz .. ' bytes)') 162 | end 163 | end 164 | if not chunks[cn-1] then 165 | return nil, err 166 | end 167 | end 168 | ret = table.concat(chunks, "") 169 | else 170 | local chunk = h:read(dataSize-8) 171 | header, ret, err, err2 = header .. chunk, decodeChunk(chunk, 1, -1, 0, keys) 172 | end 173 | if not ret then 174 | hadZerofilledContent = nil 175 | end 176 | return ret, ret and header or err, hadZerofilledContent 177 | end 178 | 179 | function M.readArchive(path, offset, keys, opts) 180 | assert(type(path) == "string" and type(offset) == "number" and (type(keys) == "table" or not keys) and (type(opts) == "table" or opts == nil), 'Syntax: "content", "header" = blte.readArchive("path", offset[, keyring][, options])') 181 | assert(opts == nil or opts.log == nil or type(opts.log) == "function", "blte.readArchive: if specified, options.log must be a function") 182 | 183 | local h, err = io.open(path, "rb") 184 | if not h then 185 | return nil, err 186 | end 187 | h:seek("set", offset) 188 | 189 | return closeAndReturn(h, parseBLTE(h, uint32_le(h:read(30), 16)-30, keys, opts)) 190 | end 191 | function M.readData(str, keys, opts) 192 | assert(type(str) == "string" and (type(keys) == "table" or not keys), 'Syntax: "content", "header" = blte.readData("data"[, keyring][, options])') 193 | assert(opts == nil or opts.log == nil or type(opts.log) == "function", "blte.readData: if specified, options.log must be a function") 194 | 195 | return parseBLTE(string_cursor(str), #str, keys, opts) 196 | end 197 | 198 | local function hexbyte(c) 199 | return string.char(tonumber(c,16)) 200 | end 201 | function M.newKeyRing(keys, skipModuleKeys) 202 | local ret, dk = {} 203 | if not skipModuleKeys then 204 | local ok, defKeys = pcall(require, "casc.keys") 205 | if not (type(defKeys) == 'table' or not ok) then 206 | return nil, 'blte.newKeyRing: casc.keys module, if available, must return a table' 207 | end 208 | dk = ok and defKeys 209 | end 210 | local kt = dk or keys 211 | for i=1,dk and keys and 2 or 1 do 212 | if kt ~= nil then 213 | assert(type(kt) == "table", 'blte.newKeyRing: table expected, got ' .. type(kt)) 214 | for k,v in pairs(kt) do 215 | if not (type(k) == "string" and type(v) == "string" and k:match("^[%x%s]+$") and v:match("^[%x%s]+$")) then 216 | return nil, ('Invalid key table entry: %q=%q (%d)'):format(tostring(k), tostring(v), i) 217 | end 218 | ret[k:gsub("%s*(%x%x)%s*", hexbyte)] = v:gsub("%s*(%x%x)%s*", hexbyte) 219 | end 220 | end 221 | kt = keys 222 | end 223 | return ret 224 | end 225 | 226 | return M -------------------------------------------------------------------------------- /libs/casc/bspatch.lua: -------------------------------------------------------------------------------- 1 | -- SPDX-FileCopyrightText: © 2023 foxlit 2 | -- SPDX-License-Identifier: Artistic-2.0 3 | 4 | local M, decompress, bin = {}, require("casc.platform").decompress, require("casc.bin") 5 | local CONCAT_CHUNK_SIZE, CONCAT_STOP_LENGTH = 512, 16384 6 | 7 | function M.patch(old, patch) 8 | local ssub, int64ish_le, sadd = string.sub, bin.int64ish_le, bin.sadd 9 | 10 | if ssub(patch, 1, 8) ~= "ZBSDIFF1" then 11 | return nil, "corrupt patch: signature mismatch" 12 | end 13 | 14 | local csz, dsz, nsz = int64ish_le(patch, 8), int64ish_le(patch, 16), int64ish_le(patch, 24) 15 | if #patch < 32 + csz + dsz then 16 | return nil, "corrupt patch: header size mismatch" 17 | end 18 | 19 | local control = decompress(ssub(patch, 33, 32+csz)) 20 | local data = decompress(ssub(patch, 33+csz, 32+csz+dsz)) 21 | local extra = decompress(ssub(patch, 33+csz+dsz)) 22 | local o, on, oh, op, dp, ep, np = {}, 1, 1, 1,1,1, 0 23 | for p=0,#control-1,24 do 24 | local x, y = int64ish_le(control, p), int64ish_le(control, p+8) 25 | if x < 0 or y < 0 then 26 | return nil, "corrupt patch: negative block length" 27 | elseif np + x + y > nsz then 28 | return nil, "corrupt patch: overflows declared size" 29 | elseif #data < dp + x - 1 then 30 | return nil, "corrupt patch: overread data" 31 | elseif #extra < ep + y - 1 then 32 | return nil, "corrupt patch: overread extra" 33 | end 34 | if x > 0 then 35 | dp, op, on = dp + x, op + x, sadd(data, dp, old, op, x, o, on) 36 | end 37 | if y > 0 then 38 | o[on], on, ep = ssub(extra, ep, ep + y - 1), on+1, ep + y 39 | end 40 | if oh + CONCAT_CHUNK_SIZE < on then 41 | o[oh], on = table.concat(o, "", oh, on-1), oh + 1 42 | if #o[oh] > CONCAT_STOP_LENGTH then oh = oh + 1 end 43 | end 44 | op, np = op + int64ish_le(control, p+16), np + x + y 45 | end 46 | 47 | if np ~= nsz then 48 | return nil, "corrupt patch: underflows declared size" 49 | end 50 | 51 | return table.concat(o, "", 1, on-1) 52 | end 53 | 54 | return M -------------------------------------------------------------------------------- /libs/casc/dbc.lua: -------------------------------------------------------------------------------- 1 | return require("dbc") -------------------------------------------------------------------------------- /libs/casc/encoding.lua: -------------------------------------------------------------------------------- 1 | -- SPDX-FileCopyrightText: © 2023 foxlit 2 | -- SPDX-License-Identifier: Artistic-2.0 3 | 4 | local M, bin = {}, require("casc.bin") 5 | local uint32_be, uint16_le, ssub = bin.uint32_be, bin.uint16_le, string.sub 6 | 7 | local encoding_mt = {} do 8 | local api = {} 9 | local function parseContentChunk(data, chunk, into) 10 | local p, last, firstKey = chunk[3]-1, chunk[3] + 4059, chunk[1] 11 | repeat 12 | local c, chsh = uint16_le(data, p), ssub(data, p+7, p+22) 13 | into[chsh], p = c > 0 and {} or nil, p + 22 14 | for i=1, c do 15 | into[chsh][i], p = ssub(data, p+1, p+16), p + 16 16 | end 17 | until p > last or chsh < firstKey 18 | end 19 | local function searchChunks(data, chunks, key, parse, into) 20 | if chunks.unloaded > 0 then 21 | local l, h = 1, #chunks 22 | while l ~= h do 23 | local m = math.ceil((l + h)/2) 24 | if chunks[m][1] <= key then 25 | l = m 26 | else 27 | h = m - 1 28 | end 29 | end 30 | local chunk = chunks[l] 31 | if not chunk[4] then 32 | chunk[4], chunks.unloaded = true, chunks.unloaded - 1 33 | parse(data, chunk, into) 34 | return into[key] 35 | end 36 | end 37 | end 38 | function api:getEncodingHash(rawContentHash) 39 | local dt = self._data 40 | local cache = dt.ccache 41 | return cache[rawContentHash] or searchChunks(dt.data, dt.content, rawContentHash, parseContentChunk, cache) 42 | end 43 | function encoding_mt:__tostring() 44 | return ("CASC:encoding (%d/%d c/d-chunks)"):format(#self._data.content, #self._data.encoding) 45 | end 46 | encoding_mt.__index = api 47 | end 48 | 49 | function M.parse(data) 50 | assert(type(data) == "string") 51 | assert(ssub(data,1,2) == "EN", "encoding magic mismatch") 52 | 53 | local numContentChunks = uint32_be(data, 9) 54 | local numEncodingChunks = uint32_be(data, 13) 55 | local recipeSize = uint32_be(data, 18) 56 | 57 | local recipes = {} 58 | local p2, rid = 23, 0 repeat 59 | rid, recipes[rid], p2 = rid + 1, data:match("^(%Z+)%z()", p2) 60 | until p2 > 22 + recipeSize 61 | 62 | local contentChunks, p2 = {unloaded=numContentChunks}, 23 + recipeSize 63 | local cbase = p2 + 32 * numContentChunks 64 | for i=1,numContentChunks do 65 | contentChunks[i], p2 = {ssub(data, p2, p2+15), p2+16, cbase+(i-1)*4096}, p2 + 32 66 | end 67 | 68 | local encodingChunks, p2 = {unloaded=numEncodingChunks}, cbase + 4096*numContentChunks 69 | local ebase = p2 + 32 * numEncodingChunks 70 | for i=1,numEncodingChunks do 71 | encodingChunks[i], p2 = {ssub(data, p2, p2+15), p2+16, ebase+(i-1)*4096}, p2 + 32 72 | end 73 | 74 | local _data = {data=data, recipes=recipes, content=contentChunks, encoding=encodingChunks} 75 | _data.ccache, _data.ecache = {}, {} 76 | return setmetatable({_data=_data}, encoding_mt) 77 | end 78 | 79 | return M -------------------------------------------------------------------------------- /libs/casc/init.lua: -------------------------------------------------------------------------------- 1 | -- SPDX-FileCopyrightText: © 2023 foxlit 2 | -- SPDX-License-Identifier: Artistic-2.0 3 | 4 | local M = {_NAME="LuaCASC", _VERSION="LuaCASC 1.16"} 5 | local plat, bin = require("casc.platform"), require("casc.bin") 6 | local blte, bspatch = require("casc.blte"), require("casc.bspatch") 7 | local encoding, root = require("casc.encoding"), require("casc.root") 8 | local install, ribbit = require("casc.install"), require("casc.ribbit") 9 | 10 | local uint32_le, uint32_be, uint40_be, to_bin, to_hex, ssub 11 | = bin.uint32_le, bin.uint32_be, bin.uint40_be, bin.to_bin, bin.to_hex, string.sub 12 | 13 | local DATA_DIRECTORIES = {"Data", "HeroesData"} 14 | 15 | M.locale = {} do 16 | for k,v in pairs({US=0x02, KR=0x04, FR=0x10, DE=0x20, CN=0x40, ES=0x80, TW=0x0100, GB=0x0200, MX=0x1000, RU=0x2000, BR=0x4000, IT=0x8000, PT=0x010000}) do 17 | local isCN, m = k == "CN", v * 2 18 | M.locale[k] = function(loc, tf) 19 | return loc % m >= v and ((tf % 16 > 7) == isCN and 2 or 1) or nil 20 | end 21 | end 22 | end 23 | local function defaultLocale(loc, tf, cdn) 24 | return (loc % 0x400 < 0x200 and 0 or 2) + (tf % 16 < 8 and 1 or 0) - (cdn and 4 or 0) 25 | end 26 | 27 | local function checkMD5(casc, emsg, hash, ...) 28 | if casc.verifyHashes and type((...)) == "string" then 29 | local actual, expected = plat.md5((...)), #hash == 16 and to_hex(hash) or hash 30 | if actual ~= expected then 31 | return nil, emsg 32 | end 33 | end 34 | return ... 35 | end 36 | local function maybeCheckMD5s(casc, eMD5, cMD5, ...) 37 | local cnt, head, hadZerofilledContent = ... 38 | if cnt then 39 | if eMD5 then 40 | local ok, err = checkMD5(casc, "encoding hash mismatch", eMD5, head) 41 | if not ok then 42 | return nil, err 43 | end 44 | end 45 | if cMD5 then 46 | if hadZerofilledContent ~= true then 47 | return checkMD5(casc, "content hash mismatch", cMD5, cnt) 48 | else 49 | casc.log("WARN", "Skipping content MD5 check; some chunks were not decrypted", eMD5, cMD5) 50 | end 51 | end 52 | end 53 | return ... 54 | end 55 | 56 | local function checkArgs(name, idx, v, t1, t2, ...) 57 | if t1 then 58 | local tt = type(v) 59 | if tt ~= t1 and tt ~= t2 then 60 | error('Invalid argument #' .. idx .. " to " .. name .. ": expected " .. t1 .. (t2 and t2 ~= t1 and "/" .. t2 or "") .. ", got " .. tt, 3) 61 | end 62 | return checkArgs(name, idx+1, ...) 63 | end 64 | end 65 | local function checkFileExists(path) 66 | local h = io.open(path, "r") 67 | if h then 68 | h:close() 69 | return true 70 | end 71 | end 72 | local function readFile(...) 73 | local path = plat.path(...) 74 | if not path then 75 | return nil, "no path specified" 76 | end 77 | local h, err = io.open(path, "rb") 78 | if h then 79 | local c = h:read("*a") 80 | h:close() 81 | return c 82 | end 83 | return h, err 84 | end 85 | local function readCDN(casc, headers, ...) 86 | local cdn = casc.cdn 87 | for i=1,cdn and #cdn or 0 do 88 | local url = plat.url(cdn[i], ...) 89 | casc.log("INFO", "Downloading " .. url .. (headers and headers.Range and " [" .. tostring(headers.Range) .. "]" or "")) 90 | local ok, err, estat = plat.http(url, headers) 91 | if ok then 92 | if i ~= 1 then 93 | table.insert(casc.cdn, 1, table.remove(casc.cdn, i)) 94 | end 95 | return ok 96 | else 97 | casc.log("WARN", "Failed to retrieve CDN data: " .. tostring(err), url) 98 | if type(estat) ~= "number" or estat < 400 then 99 | return nil, err, estat 100 | end 101 | end 102 | end 103 | return nil, "not available via CDN", plat.url("*", ...) 104 | end 105 | local function readCache(casc, cpath, lpath, ...) 106 | local ret, err = nil, "no read requested" 107 | if cpath then 108 | ret, err = readFile(cpath) 109 | end 110 | if not ret and lpath then 111 | ret, err = readFile(lpath) 112 | end 113 | if not ret and (casc and casc.cdn and ...) then 114 | ret, err = readCDN(casc, nil, ...) 115 | if ret and cpath then 116 | local h = io.open(cpath, "wb") 117 | if h then 118 | h:write(ret) 119 | h:close() 120 | end 121 | end 122 | end 123 | return ret, err 124 | end 125 | local function readCacheCommon(casc, cname, ...) 126 | return readCache(casc, plat.path(casc.cache, cname), plat.path(casc.base, ...), ...) 127 | end 128 | local function prefixHash(h) 129 | local a, b, c = h:match("((%x%x)(%x%x).+)") 130 | return b, c, a 131 | end 132 | local function adjustHash(h) 133 | return #h == 32 and h:lower() or to_hex(h), #h == 16 and h or to_bin(h) 134 | end 135 | local function toBinHash(h) 136 | return #h == 32 and to_bin(h) or h 137 | end 138 | local function toHexHash(h) 139 | return #h == 16 and to_hex(h) or h 140 | end 141 | 142 | local function parseInfoData(data) 143 | local hname, htype, hn, ret = {}, {}, 1, {} 144 | local i, s, line = data:gmatch("[^\n\r]+") 145 | line = i(s, line) 146 | for e in line:gmatch("[^|]+") do 147 | hn, hname[hn], htype[hn] = hn + 1, e:match("^([^!]+)!([^:]+)") 148 | end 149 | 150 | for e in i,s,line do 151 | local l, ln = {}, 1 152 | for f in e:gmatch("|?([^|]*)") do 153 | if f ~= "" then 154 | l[hname[ln]] = htype[ln] == "DEC" and tonumber(f) or f 155 | end 156 | ln = ln + 1 157 | end 158 | if ln > 1 then 159 | ret[#ret+1] = l 160 | end 161 | end 162 | 163 | return ret 164 | end 165 | local function parseConfigData(data, into) 166 | local ret, ln, last, lr = type(into) == "table" and into or {}, 1 167 | for l in data:gmatch("[^\r\n]+") do 168 | if l:match("^%s*[^#].-%S") then 169 | local name, args = l:match("^%s*(%S+)%s*=(.*)$") 170 | if name then 171 | last, ln, lr = {}, 1, ret[name] 172 | if lr then 173 | if type(lr[1]) == "table" then 174 | lr[#lr + 1] = last 175 | else 176 | ret[name] = {ret[name], last} 177 | end 178 | else 179 | ret[name] = last 180 | end 181 | end 182 | for s in (args or l):gmatch("%S+") do 183 | last[ln], ln = s, ln + 1 184 | end 185 | end 186 | end 187 | return ret 188 | end 189 | local function parseLocalIndexData(data, into) 190 | -- TODO: verify the Blizzhash integrity blocks within 191 | local pos, sub, len = 8 + uint32_le(data), data.sub 192 | pos = pos + ((16 - pos % 16) % 16) 193 | len, pos = uint32_le(data, pos), pos + 9 194 | assert(len % 18 == 0, "Index data block length parity check") 195 | 196 | for i=1, len/18 do 197 | local key = sub(data, pos, pos + 8) 198 | if not into[key] then 199 | into[key] = uint40_be(data, pos+8) 200 | end 201 | pos = pos + 18 202 | end 203 | 204 | return len/18 205 | end 206 | 207 | local getContent 208 | local function getPatchedContent(casc, rec, ctag, cMD5) 209 | local cnt, err 210 | for i=1, rec and #rec or 0, 3 do 211 | local oc = getContent(casc, rec[i], nil, ctag, true) 212 | if oc then 213 | local pMD5 = toHexHash(rec[i+1]) 214 | local pc, _perr = maybeCheckMD5s(casc, nil, pMD5, readCDN(casc, nil, "patch", prefixHash(pMD5))) 215 | if pc then 216 | cnt, err = maybeCheckMD5s(casc, nil, cMD5, bspatch.patch(oc, pc)) 217 | if cnt then 218 | return cnt 219 | end 220 | casc.log("WARN", "Failed to apply patch: " .. tostring(err), toHexHash(rec[i]) .. "+" .. pMD5) 221 | end 222 | end 223 | end 224 | return nil, err 225 | end 226 | function getContent(casc, eMD5, cMD5, ctag, useOnlyLocalData) 227 | local cnt, err, e2 228 | local eMD5, ehash = adjustHash(eMD5) 229 | local lcache = ctag and plat.path(casc.cache, ctag .. "." .. eMD5) 230 | 231 | local ch = lcache and io.open(lcache, "rb") 232 | if ch then 233 | cnt, err, e2 = maybeCheckMD5s(casc, nil, cMD5, ch:read("*a")) 234 | ch:close() 235 | if cnt then return cnt end 236 | end 237 | 238 | local ehash9 = ssub(ehash, 1, 9) 239 | local lloc = casc.base and casc.index and casc.index[ehash9] 240 | if lloc then 241 | cnt, err, e2 = maybeCheckMD5s(casc, eMD5, cMD5, blte.readArchive(plat.path(casc.base, "data", ("data.%03d"):format(lloc / 2^30)), lloc % 2^30, casc.keys, casc)) 242 | if cnt then return cnt end 243 | end 244 | 245 | if casc.cdn and not useOnlyLocalData then 246 | if cMD5 and casc.patchRecipes then 247 | cnt, err, e2 = getPatchedContent(casc, casc.patchRecipes[toBinHash(cMD5)], ctag, cMD5) 248 | end 249 | 250 | local cloc = not cnt and (casc.indexCDN and casc.indexCDN[ehash9] or eMD5) 251 | if cloc then 252 | local range, name = cloc:match("(%d+%-%d+):(.+)") 253 | cnt, err, e2 = readCDN(casc, range and {Range="bytes=" .. range}, "data", prefixHash(name or cloc)) 254 | if cnt then 255 | cnt, err, e2 = maybeCheckMD5s(casc, eMD5, cMD5, blte.readData(cnt, casc.keys, casc)) 256 | end 257 | end 258 | end 259 | 260 | if cnt and lcache then 261 | local ch = io.open(lcache, "wb") 262 | if ch then 263 | ch:write(cnt) 264 | ch:close() 265 | end 266 | end 267 | 268 | if cnt then 269 | return cnt 270 | elseif err then 271 | return nil, err, e2 272 | end 273 | return nil, "could not retrieve file content", eMD5 .. "/" .. (cMD5 and toHexHash(cMD5) or "?") 274 | end 275 | local function getContentByContentHash(casc, cMD5, ctag) 276 | local cMD5, chash = adjustHash(cMD5) 277 | local err, e2, cnt = "no known encodings for content hash", cMD5 278 | local keys = casc.encoding:getEncodingHash(chash) 279 | 280 | for j=1,keys and 2 or 0 do 281 | for i=1,#keys do 282 | cnt, err, e2 = getContent(casc, keys[i], cMD5, ctag, j == 1) 283 | if cnt then 284 | return cnt 285 | end 286 | end 287 | end 288 | 289 | return nil, err, e2 290 | end 291 | local function getVariantContentHash(casc, pathOrID, rateFunc) 292 | local rateFunc, idx, score, seen, vscore, chash = rateFunc or casc.locale, casc.index, -math.huge 293 | 294 | for _, vchash, vinfo in casc.root:getFileVariants(pathOrID) do 295 | local isLocal, keys = false, idx and casc.encoding:getEncodingHash(vchash) 296 | for i=1, keys and #keys or 0 do 297 | local key = keys[i] 298 | if idx[ssub(key, 1, 9)] then 299 | isLocal = true 300 | break 301 | end 302 | end 303 | seen, vscore = 1, rateFunc(vinfo[2], vinfo[1], not isLocal) 304 | if vscore and vscore > score then 305 | score, chash = vscore, vchash 306 | end 307 | end 308 | 309 | if not seen then 310 | return nil, type(pathOrID) == "string" and "path not in root file" or "fileID not in root file", pathOrID 311 | elseif not chash then 312 | return nil, "no acceptable variants are accessible", pathOrID 313 | end 314 | return chash 315 | end 316 | local function defaultLog(mtype, text, extra) 317 | if mtype == "FAIL" then 318 | io.stderr:write(text .. (extra ~= nil and "\n\t" .. tostring(extra) or "") .. "\n") 319 | end 320 | end 321 | local function splitCDNHosts(hosts, path) 322 | local cdn = {} 323 | for host in hosts:gmatch("(%S+)") do 324 | cdn[#cdn+1] = plat.url("http://", host, path) 325 | end 326 | return cdn 327 | end 328 | 329 | local indexCDN_mt = {} do 330 | local function parseCDNIndexData(name, data, into) 331 | -- TODO: verify the MD5 integrity blocks within 332 | local dlen, p = #data-28, 0 333 | for i=1, math.floor(dlen/4100) - math.floor(dlen/844600) do 334 | for pos=p, p+4072, 24 do 335 | local len = uint32_be(data, pos+16) 336 | if len > 0 then 337 | local ofs = uint32_be(data, pos+20) 338 | into[ssub(data, pos+1, pos+9)] = ofs .. "-" .. (ofs+len-1) .. ":" .. name 339 | end 340 | end 341 | p = p + 4096 342 | end 343 | end 344 | 345 | function indexCDN_mt:__index(ehash) 346 | local archives, casc, v = self._source, self._owner 347 | for i=#archives, 1, -1 do 348 | v, archives[i] = archives[i] 349 | local idat, err = readCache(casc, 350 | plat.path(casc.cache, "index." .. v), 351 | plat.path(casc.base, "indices", v .. ".index"), 352 | "data", prefixHash(v .. ".index") 353 | ) 354 | if err and not idat then 355 | casc.log("FAIL", "Failed to load CDN index", v) 356 | end 357 | parseCDNIndexData(v, idat, self) 358 | if rawget(self, ehash) then 359 | break 360 | end 361 | end 362 | if #archives == 0 then 363 | self._owner, self._source = nil 364 | setmetatable(self, nil) 365 | end 366 | return self[ehash] 367 | end 368 | end 369 | 370 | local handle = {} 371 | local handle_mt = {__index=handle} 372 | function handle:readFile(pathOrID, lang, cache) 373 | if cache == nil then cache = self.cacheFiles end 374 | lang = M.locale[lang] or lang 375 | checkArgs("cascHandle:readFile", 1, pathOrID,"string","number", lang,"function","nil", cache,"boolean","nil") 376 | 377 | local chash, err, e2 = getVariantContentHash(self, pathOrID, lang) 378 | if not chash then 379 | return nil, err, e2 380 | end 381 | 382 | return getContentByContentHash(self, chash, cache and "file") 383 | end 384 | function handle:readFileByEncodingHash(ehash, cache) 385 | if cache == nil then cache = self.cacheFiles end 386 | checkArgs("cascHandle:readFileByEncodingHash", 1, ehash,"string",nil, cache,"boolean","nil") 387 | return getContent(self, ehash, nil, cache and "file") 388 | end 389 | function handle:readFileByContentHash(chash, cache) 390 | if cache == nil then cache = self.cacheFiles end 391 | checkArgs("cascHandle:readFileByContentHash", 1, chash,"string",nil, cache,"boolean","nil") 392 | return getContentByContentHash(self, chash, cache and "file") 393 | end 394 | function handle:getFileContentHash(pathOrID, lang) 395 | lang = M.locale[lang] or lang 396 | checkArgs("cascHandle:getFileContentHash", 1, pathOrID,"string","number", lang,"function","nil") 397 | 398 | local chash, err, e2 = getVariantContentHash(self, pathOrID, lang) 399 | if not chash then 400 | return nil, err, e2 401 | end 402 | return toHexHash(chash) 403 | end 404 | function handle:getFileVariants(pathOrID) 405 | checkArgs("cascHandle:getFileVariants", 1, pathOrID,"string","number") 406 | 407 | local ret, seen = {}, {} 408 | for _, vchash, vinfo in self.root:getFileVariants(pathOrID) do 409 | local key, langs, ln = to_hex(vchash), {}, 1 410 | for lang, rate in pairs(M.locale) do 411 | if seen[lang] == nil and rate(vinfo[2], vinfo[1], true) then 412 | langs[ln], ln, seen[lang] = lang, ln + 1, vchash 413 | end 414 | end 415 | ret[key] = langs 416 | end 417 | 418 | if not next(ret) then 419 | return nil, "no known variants" 420 | end 421 | return ret 422 | end 423 | function handle:setLocale(locale) 424 | checkArgs("cascHandle:setLocale", 1, M.locale[locale] or locale,"function","nil") 425 | self.locale = M.locale[locale] or locale or defaultLocale 426 | end 427 | function handle_mt:__tostring() 428 | local cdns = self.cdn and table.concat(self.cdn, "|") or "--" 429 | return ("CASC: <%s;%s %s;%s>"):format(tostring(self.base), cdns, tostring(self.bkey), tostring(self.ckey)) 430 | end 431 | 432 | local function selectLocalBuild(path) 433 | local buildInfoPath = plat.path(path, ".build.info") 434 | if checkFileExists(buildInfoPath) then 435 | return path, M.localbuild(buildInfoPath, M.selectActiveBuild) 436 | end 437 | buildInfoPath = plat.path(path, "..", ".build.info") 438 | local fi = checkFileExists(buildInfoPath) and readFile(plat.path(path, ".flavor.info")) 439 | fi = fi and parseInfoData(fi) 440 | local product = fi and fi[1] and fi[1]["Product Flavor"] 441 | if product then 442 | return plat.path(path, ".."), M.localbuild(buildInfoPath, M.selectActiveBuild, product) 443 | end 444 | end 445 | local function parseOpenArgs(...) 446 | local conf, extra = ... 447 | if type(conf) == "table" then 448 | assert(type(conf.bkey) == "string", 'casc.open: conf.bkey must be a string') 449 | if not (conf.ckey and conf.cdn) then 450 | conf.ckey, conf.cdn = nil 451 | end 452 | for s in ("base ckey"):gmatch("%S+") do 453 | assert(conf[s] == nil or type(conf[s]) == "string", ('casc.open: if specified, conf.%s must be a string'):format(s)) 454 | end 455 | if type(conf.cdn) == "string" then 456 | conf.cdn = {conf.cdn} 457 | elseif type(conf.cdn) == "table" then 458 | for i=1,#conf.cdn do 459 | assert(type(conf.cdn[i]), "casc.open: if specified, conf.cdn must be a string or an array of strings") 460 | end 461 | elseif conf.cdn ~= nil then 462 | assert(false, "casc.open: if specified, conf.cdn must be a string or an array of strings") 463 | end 464 | assert(type(conf.base) == "string" or (type(conf.cdn) == "table" and conf.cdn[1]), 'casc.open: at least one of (conf.base, (conf.cdn, conf.ckey)) must be specified') 465 | conf.locale = M.locale[conf.locale] or (conf.locale == nil and defaultLocale) or conf.locale 466 | conf.verifyHashes = conf.verifyHashes == nil or conf.verifyHashes 467 | conf.log = conf.log == nil and defaultLog or conf.log 468 | conf.usePatchEntries = conf.usePatchEntries == nil or conf.usePatchEntries 469 | conf.mergeInstall = conf.mergeInstall ~= nil or conf.mergeInstall or false 470 | conf.requireRootFile = conf.requireRootFile ~= false 471 | conf.cacheFiles = conf.cacheFiles or false 472 | conf.zerofillEncryptedChunks = conf.zerofillEncryptedChunks or false 473 | assert(conf.locale == nil or type(conf.locale) == 'function', 'casc.open: if specified, conf.locale must be a function or a valid casc.locale key') 474 | assert(conf.keys == nil or type(conf.keys) == 'table', 'casc.open: if specificed, conf.keys must be a table') 475 | assert(type(conf.verifyHashes) == 'boolean', 'casc.open: if specified, conf.verifyHashes must be a boolean') 476 | assert(type(conf.log) == 'function', 'casc.open: if specified, conf.log must be a function') 477 | assert(type(conf.usePatchEntries) == 'boolean', 'casc.open: if specified, conf.usePatchEntries must be a boolean') 478 | assert(type(conf.requireRootFile) == 'boolean', 'casc.open: if specified, conf.requireRootFile must be a boolean') 479 | assert(type(conf.mergeInstall) == 'boolean' or type(conf.mergeInstall) == 'string', 'casc.open: if specified, conf.mergeInstall must be a string or a boolean') 480 | assert(type(conf.cacheFiles) == 'boolean', 'casc.open: if specified, conf.cacheFiles must be a boolean') 481 | assert(type(conf.zerofillEncryptedChunks) == 'boolean', 'casc.open: if specified, conf.zerofillEncryptedChunks must be a boolean') 482 | assert(conf.pathFileIDLookup == nil or type(conf.pathFileIDLookup) == 'function', 'casc.open: if specified, conf.pathFileIDLookup must be a function') 483 | assert(conf.cache == nil or type(conf.cache) == "boolean" or type(conf.cache) == "string", 'casc.open: if specified, conf.cache must be a string or a boolean') 484 | 485 | local c2 = {} 486 | for k in ("base bkey cdn ckey cache verifyHashes locale buildInfo mergeInstall usePatchEntries requireRootFile cacheFiles log keys zerofillEncryptedChunks pathFileIDLookup"):gmatch("%S+") do 487 | c2[k] = conf[k] 488 | end 489 | c2.cache = type(c2.cache) == "string" and c2.cache or (c2.cache ~= false and os.getenv("LUACASC_CACHE")) or nil 490 | 491 | return setmetatable(c2, handle_mt) 492 | 493 | elseif type(extra) == "string" and #extra == 32 then 494 | local base, build, cdn, cdnKey, cache = ... -- pre-1.3 casc.open() syntax 495 | return parseOpenArgs({base=base, bkey=build, cdn=cdn, ckey=cdnKey, cache=cache, verifyHashes=false}) 496 | 497 | elseif type(conf) == "string" then 498 | local base, build, cdn, cdnKey, _, info 499 | if conf:match("^%a+://.+#.") then 500 | build, cdn, cdnKey, _, info = M.cdnbuild(conf:match("(.+)#(.+)")) 501 | else 502 | local cbase 503 | cbase, build, cdn, cdnKey, _, info = selectLocalBuild(conf) 504 | for i=1, cbase and build and #DATA_DIRECTORIES or 0 do 505 | base = plat.path(cbase, DATA_DIRECTORIES[i]) 506 | if checkFileExists(plat.path(base, "config", prefixHash(build))) then 507 | break 508 | end 509 | base = nil 510 | end 511 | end 512 | if not build then 513 | return nil, cdn 514 | end 515 | local c = {base=base, bkey=build, cdn=cdn, ckey=cdnKey, verifyHashes=true, buildInfo=info} 516 | if type(extra) == "table" then 517 | for k,v in pairs(extra) do 518 | c[k] = v 519 | end 520 | end 521 | return parseOpenArgs(c) 522 | end 523 | error('Syntax: handle = casc.open({conf} or "rootPath"[, {conf}] or "patchURL"[, {conf}])', 3) 524 | end 525 | 526 | function M.conf(root, options) 527 | checkArgs("casc.conf", 1, root,"string",nil, options,"table","nil") 528 | return parseOpenArgs(root, options) 529 | end 530 | function M.open(conf, ...) 531 | local casc, err = parseOpenArgs(conf, ...) 532 | if not casc then return nil, err end 533 | casc.keys, err = blte.newKeyRing(casc.keys) 534 | if not casc.keys then return nil, err end 535 | 536 | casc.log("OPEN", "Loading build configuration", casc.bkey) 537 | local cdat, err = checkMD5(casc, "build configuration hash mismatch", casc.bkey, readCacheCommon(casc, "build." .. casc.bkey, "config", prefixHash(casc.bkey))) 538 | if not cdat then return nil, "build configuration: " .. tostring(err) end 539 | casc.conf = parseConfigData(cdat) 540 | 541 | if casc.ckey then 542 | casc.log("OPEN", "Loading CDN configuration", casc.ckey) 543 | local ccdat, err = checkMD5(casc, "CDN configuration hash mismatch", casc.ckey, readCacheCommon(casc, "cdn." .. casc.ckey, "config", prefixHash(casc.ckey))) 544 | if not ccdat then return nil, "cdn configuration: " .. tostring(err) end 545 | parseConfigData(ccdat, casc.conf) 546 | local source, archives = {}, casc.conf.archives 547 | for i=1,#archives do 548 | source[i] = archives[i] 549 | end 550 | casc.indexCDN = setmetatable({_owner=casc, _source=source}, indexCDN_mt) 551 | end 552 | 553 | if casc.base then 554 | casc.log("OPEN", "Scanning local indices") 555 | local indexFiles, index, ic, ii = {}, {}, 0, 1 556 | for _, f in plat.files(plat.path(casc.base, "data"), "*.idx") do 557 | local id = f:match("(%x%x)%x+%....$"):lower() 558 | local old = indexFiles[id] 559 | if not old or old < f then 560 | indexFiles[id], ic = f, ic + 1 561 | end 562 | end 563 | for _, f in pairs(indexFiles) do 564 | casc.log("OPEN", "Loading local indices", f, ii, ic) 565 | local idat, err = readFile(f) 566 | if not idat then return nil, "local index " .. f .. ": " .. tostring(err) end 567 | parseLocalIndexData(idat, index) 568 | ii = ii + 1 569 | end 570 | casc.index = index 571 | end 572 | 573 | local pckey, pdkey = casc.conf["patch-config"] and casc.conf["patch-config"][1], casc.conf.patch and casc.conf.patch[1] 574 | if pckey and pdkey and casc.usePatchEntries then 575 | casc.log("OPEN", "Loading patch configuration", pckey) 576 | local pcdat, err = checkMD5(casc, "Patch configuration hash mismatch", pckey, readCacheCommon(casc, "pconf." .. pckey, "config", prefixHash(pckey)) ) 577 | if not pcdat then return nil, "patch configuration: " .. tostring(err) end 578 | parseConfigData(pcdat, casc.conf) 579 | 580 | local recipes, entries = {}, casc.conf["patch-entry"] 581 | for i=1,entries and #entries or 0 do 582 | local v = entries[i] 583 | local chash = toBinHash(v[2]) 584 | local rt, rn = recipes[chash] or {} 585 | recipes[chash], rn = rt, #rt+1 586 | for j=7,#v,4 do 587 | rt[rn], rt[rn+1], rt[rn+2], rn = toBinHash(v[j]), toBinHash(v[j+2]), tonumber(v[j+3]), rn + 3 588 | end 589 | end 590 | 591 | casc.patchRecipes = recipes 592 | end 593 | 594 | local ekey = casc.conf.encoding[2] 595 | casc.log("OPEN", "Loading encoding file", ekey) 596 | local edat, err = getContent(casc, ekey, casc.conf.encoding[1], "encoding") 597 | if not edat then return nil, "encoding file: " .. tostring(err) end 598 | casc.encoding = encoding.parse(edat) 599 | 600 | local rkey = casc.conf.root[1] 601 | casc.log("OPEN", "Loading root file", rkey) 602 | local rdat, err, e2 = getContentByContentHash(casc, rkey, "root") 603 | if rdat then 604 | casc.root, err, e2 = root.parse(rdat) 605 | end 606 | if not casc.root then 607 | if casc.requireRootFile then 608 | return nil, "root file: " .. tostring(err), e2 609 | end 610 | casc.root = root.empty() 611 | casc.log("FAIL", err or "Failed to load root file", rkey, e2) 612 | end 613 | casc.requireRootFile = nil 614 | 615 | if casc.root and casc.pathFileIDLookup then 616 | casc.root.pathFileIDLookup, casc.pathFileIDLookup = casc.pathFileIDLookup 617 | end 618 | 619 | if casc.mergeInstall and casc.conf.install then 620 | local ikey = casc.conf.install[1] 621 | casc.log("OPEN", "Loading install data", ikey) 622 | local ins, err = getContentByContentHash(casc, ikey, "install") 623 | if not ins then return nil, "missing install file: " .. tostring(err) end 624 | local universal, filter = {0, 0xffffff}, casc.mergeInstall ~= true and casc.mergeInstall or nil 625 | for name, hash in install.files(ins, filter) do 626 | casc.root:addFileVariant(name, hash, universal) 627 | end 628 | else 629 | casc.mergeInstall = nil 630 | end 631 | casc.log("OPEN", "Ready") 632 | 633 | return casc 634 | end 635 | 636 | local function retrieveCDNVersionsInfo(patchBase) 637 | local versionsData, cdnsData, err 638 | 639 | if patchBase:match("^ribbit://") then 640 | local host, product = patchBase:match("^ribbit://([^/]+)/([^/]+)") 641 | versionsData, err = ribbit.retrieveProductInfo(host, product, "versions") 642 | if not versionsData then 643 | return nil, "ribbit versions retrieval: " .. tostring(err) 644 | end 645 | cdnsData, err = ribbit.retrieveProductInfo(host, product, "cdns") 646 | if not cdnsData then 647 | return nil, "ribbit cdns retrieval: " .. tostring(err) 648 | end 649 | else 650 | versionsData, err = plat.http(plat.url(patchBase, "versions")) 651 | if not versionsData then 652 | return nil, "patch versions retrieval: " .. tostring(err) 653 | end 654 | cdnsData, err = plat.http(plat.url(patchBase, "cdns")) 655 | if not cdnsData then 656 | return nil, "patch CDN retrieval: " .. tostring(err) 657 | end 658 | end 659 | 660 | return versionsData, cdnsData 661 | end 662 | 663 | function M.cdnbuild(patchBase, region) 664 | checkArgs("casc.cdnbuild", 1, patchBase,"string",nil, region,"string","nil") 665 | 666 | local versions, cdns = retrieveCDNVersionsInfo(patchBase) 667 | if not versions then return nil, cdns end 668 | versions, cdns = parseInfoData(versions), parseInfoData(cdns) 669 | 670 | local reginfo = {} 671 | for i=1,#versions do 672 | local v = versions[i] 673 | reginfo[v.Region] = {cdnKey=v.CDNConfig, buildKey=v.BuildConfig, build=v.BuildId, version=v.VersionsName} 674 | end 675 | for i=1,#cdns do 676 | local c = cdns[i] 677 | local ri = reginfo[c.Name] 678 | if ri and c.Hosts and c.Path then 679 | ri.cdnBase = splitCDNHosts(c.Hosts, c.Path) 680 | end 681 | end 682 | if reginfo[region] and reginfo[region].cdnBase then 683 | local r = reginfo[region] 684 | return r.buildKey, r.cdnBase, r.cdnKey, r.version, r 685 | elseif region == nil then 686 | return reginfo, versions, cdns 687 | end 688 | end 689 | function M.localbuild(buildInfoPath, selectBuild, product) 690 | checkArgs("casc.localbuild", 1, buildInfoPath,"string",nil, selectBuild,"function","nil", product,"string","nil") 691 | 692 | local dat, err = readFile(buildInfoPath) 693 | if not dat then return nil, err end 694 | 695 | local info = parseInfoData(dat) 696 | if type(selectBuild) == "function" and info then 697 | local ii = info[selectBuild(info)] 698 | if ii then 699 | local cdn = splitCDNHosts(ii["CDN Hosts"], ii["CDN Path"]) 700 | return ii["Build Key"], cdn, ii["CDN Key"], ii["Version"], ii 701 | end 702 | elseif info then 703 | local branches = {} 704 | for i=1,#info do 705 | local ii = info[i] 706 | local cdn = splitCDNHosts(ii["CDN Hosts"], ii["CDN Path"]) 707 | branches[ii.Branch] = {cdnKey=ii["CDN Key"], buildKey=ii["Build Key"], version=ii["Version"], cdnBase=cdn, product=ii["Product"]} 708 | end 709 | return branches, info 710 | end 711 | end 712 | function M.selectActiveBuild(buildInfo, product) 713 | checkArgs("casc.selectActiveBuild", 1, buildInfo,"table",nil, product,"string","nil") 714 | local r 715 | for i=1,#buildInfo do 716 | if product == nil or buildInfo[i].Product == product then 717 | if buildInfo[i].Active == 1 then 718 | return i 719 | end 720 | r = i 721 | end 722 | end 723 | return product and r or nil 724 | end 725 | 726 | return M -------------------------------------------------------------------------------- /libs/casc/install.lua: -------------------------------------------------------------------------------- 1 | -- SPDX-FileCopyrightText: © 2023 foxlit 2 | -- SPDX-License-Identifier: Artistic-2.0 3 | 4 | local M, bin = {}, require("casc.bin") 5 | local byte = string.byte 6 | 7 | function M.parseInstall(installData) 8 | assert(type(installData) == "string", 'Syntax: casc.install.parseInstall("installData")') 9 | assert(installData:sub(1,2) == "IN", 'missing install header magic') 10 | local numFiles, numCategories = bin.uint32_be(installData, 6), bin.uint16_be(installData, 4) 11 | 12 | local categories, cbpos, pos, bitfieldLength = {}, {}, 11, math.ceil(numFiles/8) 13 | for i=1, numCategories do 14 | local name, ep = installData:match("^(%Z+)%z()", pos) 15 | categories[i], cbpos[i], pos = name, ep+2, ep + 2 + bitfieldLength 16 | end 17 | 18 | local files = {} 19 | for i=1, numFiles do 20 | local name, ep = installData:match("^(%Z+)%z()", pos) 21 | local cats, catmap, cbyte, cmod = "", {}, math.floor((i-1)/8), 2^(8 - ((i-1) % 8)) 22 | local ctest = cmod/2 23 | for j=1, numCategories do 24 | if byte(installData, cbpos[j]+cbyte) % cmod >= ctest then 25 | catmap[categories[j]], cats = true, (cats and cats .. ";" or cats) .. categories[j] 26 | end 27 | end 28 | files[i], pos = { 29 | name=name, 30 | contentHash=bin.to_hex(installData:sub(ep, ep+15)), 31 | size=bin.uint32_be(installData, ep+15), 32 | tags=cats or "", 33 | tagSet=catmap, 34 | }, ep + 20 35 | end 36 | 37 | return files, categories 38 | end 39 | 40 | function M.files(installData, filter) 41 | assert(type(installData) == "string", 'Syntax: casc.install.files("installData"[, "filter"])') 42 | assert(filter == nil or type(filter) == "string", 'Syntax: casc.install.files("installData"[, "filter"])') 43 | 44 | local files, ft, i = M.parseInstall(installData), {}, 1 45 | if filter then 46 | for f in filter:gmatch("[^;]+") do 47 | ft[#ft+1] = f 48 | end 49 | end 50 | 51 | return function() 52 | local f, m repeat 53 | i, f, m = i + 1, files[i], true 54 | if f then 55 | for j=1, #ft do 56 | if not f.tagSet[ft[j]] then 57 | m = false 58 | break 59 | end 60 | end 61 | if m then 62 | return f.name, f.contentHash, f.size, f.tags 63 | end 64 | end 65 | until not f 66 | end 67 | end 68 | 69 | return M -------------------------------------------------------------------------------- /libs/casc/jenkins96.lua: -------------------------------------------------------------------------------- 1 | -- SPDX-FileCopyrightText: © 2023 foxlit 2 | -- SPDX-License-Identifier: Artistic-2.0 3 | 4 | local M, plat, bin = {}, require("casc.platform"), require("casc.bin") 5 | 6 | local rot, xor, uint32_le, to_le32 = plat.rol, plat.bxor, bin.uint32_le, bin.to_le32 7 | 8 | function M.hash(k) 9 | assert(type(k) == "string", 'Syntax: casc.jenkins96.hash("key")') 10 | 11 | if #k == 0 then return 0xdeadbeef, 0xdeadbeef end 12 | local a = 0xdeadbeef + #k 13 | local b, c, k = a, a, k .. (k ~= "" and ("\0"):rep((12 - #k % 12) % 12) or "") 14 | for i=0, #k-13, 12 do 15 | a, b, c = a + uint32_le(k, i), b + uint32_le(k, i+4), c + uint32_le(k, i+8) 16 | a, c = xor(a-c, rot(c, 4)), c + b 17 | b, a = xor(b-a, rot(a, 6)), a + c 18 | c, b = xor(c-b, rot(b, 8)), b + a 19 | a, c = xor(a-c, rot(c,16)), c + b 20 | b, a = xor(b-a, rot(a,19)), a + c 21 | c, b = xor(c-b, rot(b, 4)), b + a 22 | end 23 | local i = #k - 12 24 | a, b, c = a + uint32_le(k, i), b + uint32_le(k, i+4), c + uint32_le(k, i+8) 25 | c = xor(c, b) - rot(b,14) 26 | a = xor(a, c) - rot(c,11) 27 | b = xor(b, a) - rot(a,25) 28 | c = xor(c, b) - rot(b,16) 29 | a = xor(a, c) - rot(c,04) 30 | b = xor(b, a) - rot(a,14) 31 | c = xor(c, b) - rot(b,24) 32 | return c, b 33 | end 34 | 35 | function M.hash_path(path) 36 | assert(type(path) == "string", 'Syntax: casc.jenkins96.hash_path("path")') 37 | local c, b = M.hash((path:upper():gsub('/', '\\'))) 38 | return to_le32(b) .. to_le32(c) 39 | end 40 | 41 | return M -------------------------------------------------------------------------------- /libs/casc/md5.lua: -------------------------------------------------------------------------------- 1 | -- SPDX-FileCopyrightText: © 2023 foxlit 2 | -- SPDX-License-Identifier: Artistic-2.0 3 | 4 | local M, bit = {}, require("casc.platform") 5 | local bxor, band, bor, rol, bnot = bit.bxor, bit.band, bit.bor, bit.rol, bit.bnot 6 | local schar, sbyte, floor = string.char, string.byte, math.floor 7 | 8 | local s, K = {[0]= 9 | 7, 12, 17, 22, 7, 12, 17, 22, 7, 12, 17, 22, 7, 12, 17, 22, 10 | 5, 9, 14, 20, 5, 9, 14, 20, 5, 9, 14, 20, 5, 9, 14, 20, 11 | 4, 11, 16, 23, 4, 11, 16, 23, 4, 11, 16, 23, 4, 11, 16, 23, 12 | 6, 10, 15, 21, 6, 10, 15, 21, 6, 10, 15, 21, 6, 10, 15, 21, 13 | }, {[0]= 14 | 0xd76aa478, 0xe8c7b756, 0x242070db, 0xc1bdceee, 0xf57c0faf, 0x4787c62a, 0xa8304613, 0xfd469501, 15 | 0x698098d8, 0x8b44f7af, 0xffff5bb1, 0x895cd7be, 0x6b901122, 0xfd987193, 0xa679438e, 0x49b40821, 16 | 0xf61e2562, 0xc040b340, 0x265e5a51, 0xe9b6c7aa, 0xd62f105d, 0x02441453, 0xd8a1e681, 0xe7d3fbc8, 17 | 0x21e1cde6, 0xc33707d6, 0xf4d50d87, 0x455a14ed, 0xa9e3e905, 0xfcefa3f8, 0x676f02d9, 0x8d2a4c8a, 18 | 0xfffa3942, 0x8771f681, 0x6d9d6122, 0xfde5380c, 0xa4beea44, 0x4bdecfa9, 0xf6bb4b60, 0xbebfbc70, 19 | 0x289b7ec6, 0xeaa127fa, 0xd4ef3085, 0x04881d05, 0xd9d4d039, 0xe6db99e5, 0x1fa27cf8, 0xc4ac5665, 20 | 0xf4292244, 0x432aff97, 0xab9423a7, 0xfc93a039, 0x655b59c3, 0x8f0ccc92, 0xffeff47d, 0x85845dd1, 21 | 0x6fa87e4f, 0xfe2ce6e0, 0xa3014314, 0x4e0811a1, 0xf7537e82, 0xbd3af235, 0x2ad7d2bb, 0xeb86d391, 22 | } 23 | 24 | local function int64(n) 25 | return schar(floor(n) % 256, floor(n / 256) % 256, floor(n / 256^2) % 256, floor(n / 256^3) % 256, 26 | floor(n / 256^4) % 256, floor(n / 256^5) % 256, floor(n / 256^6) % 256, floor(n / 256^7) % 256) 27 | end 28 | 29 | function M.sumhexa(m) 30 | assert(#m < 2^50, "md5: input too long") -- for double lua_Number; you're on your own elsewhere 31 | local h0, h1, h2, h3, M, ll = 0x67452301, 0xefcdab89, 0x98badcfe, 0x10325476, {}, #m % 64 32 | local pad, cut = m:sub(-ll, ll == 0 and 0 or -1) .. schar(128) .. schar(0):rep((56 - (#m + 1) % 64) % 64) .. int64(#m*8), #m - ll 33 | for p=1, #m+#pad-ll, 64 do 34 | if p > cut then 35 | m, p = pad, p - cut 36 | end 37 | for i=0,15 do 38 | local a, b, c, d = sbyte(m, p+i*4, p+i*4+3) 39 | M[i] = d * 256^3 + c * 256^2 + b * 256 + a 40 | end 41 | local A, B, C, D, F, g = h0, h1, h2, h3 42 | for i=0, 63 do 43 | if i <= 15 then 44 | F, g = bxor(D, band(B, bxor(C, D))), i 45 | elseif i <= 31 then 46 | F, g = bxor(C, band(D, bxor(B, C))), (5*i + 1) % 16 47 | elseif i <= 47 then 48 | F, g = bxor(bxor(B, C), D), (3*i + 5) % 16 49 | else 50 | F, g = bxor(C, bor(B, bnot(D))), (7*i) % 16 51 | end 52 | D, C, B, A = C, B, B + rol((A + F + K[i] + M[g]), s[i]), D 53 | end 54 | h0, h1, h2, h3 = (h0 + A) % 2^32, (h1 + B) % 2^32, (h2 + C) % 2^32, (h3 + D) % 2^32 55 | end 56 | 57 | return (("%08x%08x%08x%08x"):format(h0, h1, h2, h3):gsub("(..)(..)(..)(..)", "%4%3%2%1")) 58 | end 59 | 60 | return M -------------------------------------------------------------------------------- /libs/casc/platform-fallback/guess.lua: -------------------------------------------------------------------------------- 1 | -- SPDX-FileCopyrightText: © 2023 foxlit 2 | -- SPDX-License-Identifier: Artistic-2.0 3 | 4 | local dir_sep = package and package.config and package.config:sub(1,1) or "/" 5 | if dir_sep == '/' then 6 | return require("casc.platform-fallback.unix") 7 | elseif dir_sep == '\\' then 8 | return require("casc.platform-fallback.windows") 9 | end -------------------------------------------------------------------------------- /libs/casc/platform-fallback/unix.lua: -------------------------------------------------------------------------------- 1 | -- SPDX-FileCopyrightText: © 2023 foxlit 2 | -- SPDX-License-Identifier: Artistic-2.0 3 | 4 | local M, P = {}, require("casc.platform") 5 | 6 | M.commands = {toDevNull=' 2>/dev/null', ls='ls %s', mkdir='mkdir -p %s', gzip='gzip -dcq %s'} 7 | 8 | local function execute(...) 9 | local ok, status, sig = os.execute(...) 10 | if ok == true and status == "exit" or status == "signal" then 11 | return sig 12 | else 13 | return ok or sig or ok, status, sig 14 | end 15 | end 16 | local function shellEscape(s) 17 | return "'" .. s:gsub("'", "'\\''") .. "'" 18 | end 19 | local function readAndDeleteFile(path) 20 | local h, err = io.open(path, "rb") 21 | if h then 22 | local c = h:read("*a") 23 | h:close() 24 | h, err = c, nil 25 | end 26 | os.remove(path) 27 | return h, err 28 | end 29 | 30 | function M.decompress(compressed) 31 | assert(type(compressed) == "string", 'Syntax: casc.platform.decompress("compressed")') 32 | assert(M.commands and M.commands.gzip and M.commands.toDevNull, 'unsupported platform') 33 | 34 | local f, f2 = P.tmpname(), P.tmpname() 35 | local h = io.open(f, "wb") 36 | h:write('\31\139\8\0\0\0\0\0') 37 | h:write(compressed) 38 | h:close() 39 | 40 | execute(M.commands.gzip:format(shellEscape(f)) .. " 1>" .. f2 .. " " .. M.commands.toDevNull) 41 | os.remove(f) 42 | 43 | return readAndDeleteFile(f2) 44 | end 45 | function M.mkdir(path) 46 | assert(type(path) == 'string', 'Syntax: casc.platform.mkdir("path")') 47 | assert(M.commands and M.commands.mkdir, 'unsupported platform') 48 | 49 | return execute(M.commands.mkdir:format(shellEscape(path))) 50 | end 51 | function M.files(dir, glob) 52 | assert(type(dir) == "string" and type(glob) == 'string', 'Syntax: casc.platform.files("dir", "glob")') 53 | assert(M.commands and M.commands.ls, 'unsupported platform') 54 | 55 | local t, ni, h = {}, 1, io.popen(M.commands.ls:format(shellEscape(P.path(dir, "")) .. glob), "r") 56 | for l in h:lines() do 57 | t[ni], ni = l, ni + 1 58 | end 59 | h:close() 60 | return pairs(t) 61 | end 62 | function M.http(url, h) 63 | local c, of = "curl -s -S -A 'luacasc+curl'", P.tmpname() 64 | if type(h) == "table" then 65 | for k,v in pairs(h) do 66 | c = c .. ' -H ' .. shellEscape(k .. ": " .. v) 67 | end 68 | end 69 | c = c .. ' -o ' .. shellEscape(of) .. ' ' .. shellEscape(url) 70 | local ret = execute(c) 71 | if ret == 0 then 72 | return readAndDeleteFile(of) 73 | end 74 | os.remove(of) 75 | return nil, "HTTP request failed; status " .. tostring(ret) 76 | end 77 | 78 | for k,v in pairs(M) do 79 | if P[k] == nil then 80 | P[k], P._IMPL[k] = v, "platform-fallback/unix" 81 | end 82 | end 83 | 84 | return M -------------------------------------------------------------------------------- /libs/casc/platform-fallback/windows.lua: -------------------------------------------------------------------------------- 1 | -- SPDX-FileCopyrightText: © 2023 foxlit 2 | -- SPDX-License-Identifier: Artistic-2.0 3 | 4 | local M, P = {}, require("casc.platform") 5 | 6 | M.TMP_PATH_PREFIX = os.getenv('TMP') or os.getenv('TEMP') or nil 7 | M.commands = {toDevNull=' 2>NUL', ls='(for %%a in (%s) do @echo %%~fa)', mkdir='mkdir %s', gzip='gzip -dcq %s'} 8 | 9 | local function execute(...) 10 | local ok, status, sig = os.execute(...) 11 | if ok == true and status == "exit" or status == "signal" then 12 | return sig 13 | else 14 | return ok or sig or ok, status, sig 15 | end 16 | end 17 | local function shellEscape(s) 18 | return '"' .. s:gsub('"', '\\"') .. '"' 19 | end 20 | local function readAndDeleteFile(path) 21 | local h, err = io.open(path, "rb") 22 | if h then 23 | local c = h:read("*a") 24 | h:close() 25 | h, err = c, nil 26 | end 27 | os.remove(path) 28 | return h, err 29 | end 30 | 31 | function M.decompress(compressed) 32 | assert(type(compressed) == "string", 'Syntax: casc.platform.decompress("compressed")') 33 | assert(M.commands and M.commands.gzip and M.commands.toDevNull, 'unsupported platform') 34 | 35 | local f, f2 = P.tmpname(), P.tmpname() 36 | local h = io.open(f, "wb") 37 | h:write('\31\139\8\0\0\0\0\0') 38 | h:write(compressed) 39 | h:close() 40 | 41 | execute(M.commands.gzip:format(shellEscape(f)) .. " 1>" .. f2 .. " " .. M.commands.toDevNull) 42 | os.remove(f) 43 | 44 | return readAndDeleteFile(f2) 45 | end 46 | function M.mkdir(path) 47 | assert(type(path) == 'string', 'Syntax: casc.platform.mkdir("path")') 48 | assert(M.commands and M.commands.mkdir, 'unsupported platform') 49 | 50 | return execute(M.commands.mkdir:format(shellEscape(path))) 51 | end 52 | function M.files(dir, glob) 53 | assert(type(dir) == "string" and type(glob) == 'string', 'Syntax: casc.platform.files("dir", "glob")') 54 | assert(M.commands and M.commands.ls, 'unsupported platform') 55 | 56 | local t, ni, h = {}, 1, io.popen(M.commands.ls:format(shellEscape(P.path(dir, "")) .. glob), "r") 57 | for l in h:lines() do 58 | t[ni], ni = l, ni + 1 59 | end 60 | h:close() 61 | return pairs(t) 62 | end 63 | function M.http(url, h) 64 | local c, of = "curl -s -S -A 'luacasc+curl'", P.tmpname() 65 | if type(h) == "table" then 66 | for k,v in pairs(h) do 67 | c = c .. ' -H ' .. shellEscape(k .. ": " .. v) 68 | end 69 | end 70 | c = c .. ' -o ' .. shellEscape(of) .. ' ' .. shellEscape(url) 71 | local ret = execute(c) 72 | if ret == 0 then 73 | return readAndDeleteFile(of) 74 | end 75 | os.remove(of) 76 | return nil, "HTTP request failed; status " .. tostring(ret) 77 | end 78 | 79 | 80 | for k,v in pairs(M) do 81 | if P[k] == nil then 82 | P[k], P._IMPL[k] = v, "platform-fallback/windows" 83 | end 84 | end 85 | 86 | return M -------------------------------------------------------------------------------- /libs/casc/platform.lua: -------------------------------------------------------------------------------- 1 | -- SPDX-FileCopyrightText: © 2023 foxlit 2 | -- SPDX-License-Identifier: Artistic-2.0 3 | 4 | local M = {_IMPL={}} 5 | 6 | local function maybe(m) 7 | local ok, v = pcall(require, m) 8 | return ok and v 9 | end 10 | 11 | local lfs = maybe("lfs") -- LuaFileSystem; https://github.com/lunarmodules/luafilesystem 12 | local zlib = maybe("zlib") -- lzlib; https://github.com/LuaDist/lzlib 13 | local bit = maybe("bit") -- Lua BitOp; https://bitop.luajit.org 14 | local socket = maybe("socket.http") -- LuaSocket; https://github.com/lunarmodules/luasocket 15 | local socket_root, socket_ltn12 = socket and maybe("socket"), socket and maybe("ltn12") 16 | local curl = not (socket_root and socket_ltn12) and maybe("luacurl") -- LuaCURL; -- https://luarocks.org/modules/luarocks/luacurl 17 | local md5 = maybe("md5") -- MD5; https://github.com/lunarmodules/md5 18 | 19 | local dir_sep = package and package.config and package.config:sub(1,1) or "/" 20 | local dir_sep_pq = dir_sep:gsub("[%[%].%-+*?()%%]", "%%%0") 21 | 22 | function M.path(a, b, ...) 23 | if a and b then 24 | local noUT = M.NO_PARENT_PATH_PATTERN 25 | noUT = noUT == nil and (dir_sep == "\\" and "^[A-Za-z]:$" or "^$") or noUT 26 | while b == ".." do 27 | local p, c = a:match("^(.-)([^" .. dir_sep_pq .. "]*)" .. dir_sep_pq .. "?$") 28 | if c == ".." then 29 | break 30 | elseif p == "" and noUT and c:match(noUT) then 31 | error("bad path traversal") 32 | elseif p == "" then 33 | p = c == "." and ".." or "." 34 | end 35 | return M.path(p, ...) 36 | end 37 | return M.path(a .. (a:sub(-1) ~= dir_sep and dir_sep or "") .. b, ...) 38 | end 39 | return a 40 | end 41 | function M.url(a, b, ...) 42 | if a and b then 43 | return M.url(a .. ((a:sub(-1) == "/" or b:sub(1,1) == "/") and "" or "/") .. b, ...) 44 | end 45 | return a 46 | end 47 | function M.tmpname() 48 | local tn = os.tmpname() 49 | return (M.TMP_PATH_PREFIX or "") .. tn 50 | end 51 | 52 | if zlib and zlib.decompress then 53 | M.decompress, M._IMPL.decompress = zlib.decompress, "lzlib" 54 | end 55 | if lfs and lfs.mkdir then 56 | M.mkdir, M._IMPL.mkdir = lfs.mkdir, "LuaFileSystem" 57 | end 58 | if lfs and lfs.dir then 59 | function M.files(dir, glob) 60 | if not (type(dir) == "string" and type(glob) == 'string') then 61 | return error('Syntax: casc.platform.files("dir", "glob")', 2) 62 | end 63 | local pat = "^" .. glob:gsub("%.%-%+", "%%%0"):gsub("%*", ".*") .. "$" 64 | local t, ni = {}, 1 65 | local ok, it, is, ik = pcall(lfs.dir, dir) 66 | if ok then 67 | for f in it, is, ik do 68 | if f ~= "." and f ~= ".." and f:match(pat) then 69 | t[ni], ni = M.path(dir, f), ni + 1 70 | end 71 | end 72 | end 73 | return pairs(t) 74 | end 75 | M._IMPL.files = "LuaFileSystem" 76 | end 77 | 78 | local function checkBitModule(bit, name) 79 | if bit and bit.bnot and bit.bxor and bit.band and bit.bor then 80 | M.rol, M.bnot, M.bxor, M.band, M.bor = bit.rol or bit.lrotate, bit.bnot, bit.bxor, bit.band, bit.bor 81 | M._IMPL.bit = name 82 | return true 83 | end 84 | end 85 | if not (checkBitModule(bit, "bit module") or checkBitModule(bit32, "bit32 global")) then 86 | M._IMPL.bit = "LuaCASC" 87 | local MAX_INT32, bxorT, bxorW = 2^32 - 1, {[0]={[0]=0, 1}, {[0]=1, 0}}, 2 88 | function M.bnot(a) 89 | return MAX_INT32 - a 90 | end 91 | local function bxor(a, b) 92 | local res, c, a, b = 0, 1, a % 2^32, b % 2^32 93 | while a > 0 and b > 0 do 94 | local a2, b2 = a % bxorW, b % bxorW 95 | res = res + bxorT[a2][b2]*c 96 | a, b, c = (a - a2) / bxorW, (b - b2) / bxorW, c * bxorW 97 | end 98 | return res + (a + b) * c 99 | end 100 | local function band(a, b) 101 | a, b = a % 2^32, b % 2^32 102 | return (a + b - bxor(a, b)) / 2 103 | end 104 | local function bor(a, b) 105 | return MAX_INT32 - band(MAX_INT32 - a, MAX_INT32 - b) 106 | end 107 | M.bxor, M.band, M.bor = bxor, band, bor 108 | for k=1,3 do 109 | for i=0, 2^2^k-1 do 110 | local ti = bxorT[i] or {} 111 | for j=0, 2^2^k-1 do 112 | ti[j] = ti[j] or (j < i and bxorT[j][i]) or bxor(i, j) 113 | end 114 | bxorT[i] = ti 115 | end 116 | bxorW = 2^2^k 117 | end 118 | end 119 | M.rol = M.rol or function(n, b) 120 | local n, e2 = n % 2^32, 2^(32-b) 121 | local lo = n % e2 122 | return lo * 2^b + (n - lo)/e2 123 | end 124 | 125 | if socket and socket.request and socket_ltn12 then 126 | socket.USERAGENT, socket.TIMEOUT = "luacasc", 5 127 | local RETRIES = 3 128 | M.http = function(url, h) 129 | for i=1,RETRIES do 130 | local sink = {} 131 | local ok, status, head = socket.request({url=url, sink=socket_ltn12.sink.table(sink), headers=h}) 132 | if ok then 133 | local cnt = table.concat(sink, "") 134 | if type(status) ~= "number" or status < 200 or status >= 300 then 135 | return nil, "HTTP request failed: " .. tostring(status) .. "; URL: " .. tostring(url), status, head, cnt 136 | end 137 | return status >= 200 and status < 300 and cnt or nil, status, head, cnt 138 | elseif i == RETRIES then 139 | return nil, "HTTP request failed: " .. tostring(status) .. "; URL: " .. tostring(url), status 140 | end 141 | end 142 | end 143 | M._IMPL.http = "LuaSocket" 144 | end 145 | if socket_root and socket_root.connect then 146 | M.socketQuery = function(host, port, query) 147 | local client, err, ok = socket_root.connect(host, port) 148 | if not client then 149 | return nil, "on connect: " .. string(err) 150 | end 151 | client:settimeout(0, "t") 152 | ok, err = client:send(query) 153 | if not ok then 154 | return nil, "on send: " .. tostring(err) 155 | end 156 | client:settimeout(nil, "t") 157 | ok, err = client:receive("*a", "") 158 | client:close() 159 | return ok, err 160 | end 161 | M._IMPL.socketQuery = "LuaSocket" 162 | end 163 | 164 | if curl and not (M.http and M.socketQuery) then 165 | local function writeSink(sink, buf) 166 | sink[#sink+1] = buf 167 | return #buf 168 | end 169 | local function readSource(source, nb) 170 | local s, rest = source[1] or "" 171 | if s and #s > nb then 172 | s, rest = s:sub(1,nb-1), s:sub(nb) 173 | end 174 | source[1] = rest 175 | return s 176 | end 177 | if not M.http then 178 | function M.http(url, h) 179 | local c, o = curl.new(), {} 180 | c:setopt(curl.OPT_URL, url) 181 | c:setopt(curl.OPT_USERAGENT, "luacasc") 182 | c:setopt(curl.OPT_WRITEDATA, o) 183 | c:setopt(curl.OPT_WRITEFUNCTION, writeSink) 184 | if h and h.Range then 185 | c:setopt(curl.OPT_RANGE, h.Range:match("[%d%-]+")) 186 | end 187 | c:perform() 188 | local status, eno = c:getinfo(curl.INFO_RESPONSE_CODE), c:getinfo(curl.INFO_OS_ERRNO) 189 | c:close() 190 | if (status or 0) < 200 or status >= 300 then 191 | local err = "http request failed: " .. url .. "; http " .. tostring(status) .. "/" .. tostring(eno) 192 | return nil, err, status 193 | else 194 | return table.concat(o, "") 195 | end 196 | end 197 | M._IMPL.http = "LuaCURL" 198 | end 199 | if not M.socketQuery then 200 | function M.socketQuery(host, port, query) 201 | local c, o = curl.new(), {} 202 | c:setopt(curl.OPT_URL, "telnet://" .. host .. ":" .. port) 203 | c:setopt(curl.OPT_WRITEDATA, o) 204 | c:setopt(curl.OPT_READDATA, {query}) 205 | c:setopt(curl.OPT_WRITEFUNCTION, writeSink) 206 | c:setopt(curl.OPT_READFUNCTION, readSource) 207 | c:perform() 208 | c:close() 209 | if not o[1] then 210 | return nil, "no data in response" 211 | end 212 | return table.concat(o, "") 213 | end 214 | M._IMPL.socketQuery = "LuaCURL" 215 | end 216 | end 217 | 218 | if md5 and md5.sumhexa then 219 | M.md5, M._IMPL.md5 = md5.sumhexa, "MD5" 220 | else 221 | package.loaded["casc.platform"] = M 222 | local lmd5 = maybe("casc.md5") 223 | M.md5 = lmd5 and lmd5.sumhexa or nil 224 | M._IMPL.md5 = lmd5 and lmd5.sumhexa and "LuaCASC" or nil 225 | end 226 | 227 | return M -------------------------------------------------------------------------------- /libs/casc/ribbit.lua: -------------------------------------------------------------------------------- 1 | -- SPDX-FileCopyrightText: © 2023 foxlit 2 | -- SPDX-License-Identifier: Artistic-2.0 3 | 4 | local M = {} 5 | local plat = require("casc.platform") 6 | 7 | function M.retrieveProductInfo(hostport, product, infoFile) 8 | if type(hostport) ~= "string" or type(product) ~= "string" or type(infoFile) ~= "string" then 9 | error('Syntax: ribbit.retrieveProductInfo("host[:port]", "product", "infoFile")', 2) 10 | end 11 | if not plat.socketQuery then 12 | return nil, "ribbit requires socket access" 13 | end 14 | local host, port = hostport:match("^([^:]+):(%d+)$") 15 | if not host then 16 | host, port = hostport, 1119 17 | end 18 | 19 | local data, err = plat.socketQuery(host, port+0, "v1/products/" .. product .. "/" .. infoFile .. "\r\n") 20 | if not data then 21 | return nil, "socket error: " .. tostring(err) 22 | end 23 | local headers, restStart = data:match("^(.-)\r\n()\r\n") 24 | local boundary = headers and ("\r\n" .. headers):match('\r\nContent%-Type: multipart/alternative; boundary="([^"]+)"') 25 | if not boundary then 26 | return nil, "multipart reply expected; found no boundary" 27 | end 28 | 29 | local segments = {} 30 | local boundaryText, finalBoundaryText = "\r\n--" .. boundary .. "\r\n", "\r\n--" .. boundary .. "--\r\n" 31 | local bp, bpE = string.find(data, boundaryText, restStart, true) 32 | repeat 33 | local isFinal, np, npE = false, string.find(data, boundaryText, bpE, true) 34 | if not np then 35 | np, npE = string.find(data, finalBoundaryText, bpE, true) 36 | isFinal = np or error("ribbit: missing final boundary in multipart reply") 37 | end 38 | if np then 39 | local segment, headers, body = data:sub(bpE+1, np-2) 40 | if segment:sub(1,2) == "\r\n" then 41 | headers, body = nil, segment 42 | else 43 | headers, body = segment:match("^(.-)\r\n\r\n(.+)$") 44 | end 45 | segments[#segments+1] = {body=body, headers=headers} 46 | bp, bpE = np, npE 47 | end 48 | until isFinal 49 | -- There is a checksum header past the final boundary; ignore it. 50 | 51 | if segments[1] then 52 | return segments[1].body 53 | end 54 | end 55 | 56 | return M -------------------------------------------------------------------------------- /libs/casc/root.lua: -------------------------------------------------------------------------------- 1 | -- SPDX-FileCopyrightText: © 2023 foxlit 2 | -- SPDX-License-Identifier: Artistic-2.0 3 | 4 | local M = {} 5 | 6 | local jenkins96, bin = require("casc.jenkins96"), require("casc.bin") 7 | local uint32_le, int32_le, to_bin, ssub = bin.uint32_le, bin.int32_le, bin.to_bin, string.sub 8 | 9 | local function toBinHash(h, hexLength) 10 | return #h == hexLength and to_bin(h) or h 11 | end 12 | 13 | local wow_mt = {__index={}} do 14 | local wow = wow_mt.__index 15 | local function nextVariant(e, i) 16 | i = (i or -1) + 2 17 | local h = e and e[i] 18 | if h then 19 | return i, h, e[i+1] 20 | end 21 | end 22 | function wow:getFileVariants(nameOrID) 23 | local fid = nameOrID 24 | if type(nameOrID) ~= "number" then 25 | fid = self:getFileID(nameOrID) 26 | end 27 | return nextVariant, self[2][fid] 28 | end 29 | function wow:addFileVariant(path, chash, flags) 30 | local nameMap, idVariantMap = self[1], self[2] 31 | local hpath = jenkins96.hash_path(path) 32 | local fid = nameMap[hpath] 33 | if not fid then 34 | fid = (self[3] or 10041)-1 35 | nameMap[hpath], idVariantMap[fid], self[3] = fid, {}, fid 36 | end 37 | local t = idVariantMap[fid] 38 | t[#t+1], t[#t+2] = toBinHash(chash, 32), flags 39 | end 40 | function wow:getFileID(path) 41 | local h = jenkins96.hash_path(path) 42 | local fid = self[1][h] 43 | fid = fid or (self.pathFileIDLookup and self.pathFileIDLookup(path, h)) or nil 44 | return fid 45 | end 46 | function wow:addFileIDPaths(map, pathsArePrehashed) 47 | local nameMap, idVariantMap = self[1], self[2] 48 | for k,v in pairs(map) do 49 | local tk, tv, path, fileID = type(k), type(v) 50 | if tk == "number" and tv == "string" then 51 | path, fileID = v, k 52 | elseif tk == "string" and tv == "number" then 53 | path, fileID = k, v 54 | end 55 | if not path then 56 | error("root:addFileIDPaths: map kv pairs should be (string, number) or (number, string).") 57 | end 58 | local h = pathsArePrehashed and to_bin(path, 16) or jenkins96.hash_path(path) 59 | if nameMap[h] == nil and idVariantMap[fileID] then 60 | nameMap[h] = fileID 61 | end 62 | end 63 | end 64 | end 65 | 66 | local function parseLegacy(data) 67 | local pos, dl, nameMap, idVariantMap = 0, #data, {}, {} 68 | while pos < dl do 69 | if dl < (pos+12) then 70 | return false, 'Root file invalid: block header overread', pos 71 | end 72 | local n, info = uint32_le(data, pos), {uint32_le(data, pos+4), uint32_le(data, pos+8)} 73 | local p2, lfid = pos + 12, 0 74 | if dl < (p2+28*n) then 75 | return false, 'Root file invalid: block content overread', p2 .. '+' .. (28*n) 76 | end 77 | pos = p2 + 4*n 78 | for i=1,n do 79 | local chash, tfid = ssub(data, pos+1, pos+16), lfid+int32_le(data, p2) 80 | local nhash = ssub(data, pos+17, pos+24) 81 | pos, p2 = pos + 24, p2 + 4 82 | local t, tsz = idVariantMap[tfid] or {} 83 | if (nameMap[nhash] or tfid) ~= tfid then 84 | local hexHash = ('%02x'):rep(#nhash):format(nhash:byte(1, #nhash)) 85 | return false, 'Root manifest invalid: file name maps to multiple file IDs', hexHash 86 | end 87 | nameMap[nhash], idVariantMap[tfid], tsz = tfid, t, #t 88 | t[tsz+1], t[tsz+2], lfid = chash, info, tfid + 1 89 | end 90 | end 91 | return setmetatable({nameMap, idVariantMap}, wow_mt) 92 | end 93 | 94 | local function parseMFST(data) 95 | if #data < 16 then 96 | return false, 'Root manifest invalid: data too short' 97 | end 98 | local hSize, hVersion, nFiles, nNamedFiles = uint32_le(data, 4), uint32_le(data, 8) 99 | if hVersion == 1 then 100 | nFiles, nNamedFiles = uint32_le(data, 12), uint32_le(data, 16) 101 | else 102 | -- Legacy format omits header size and version 103 | hSize, hVersion, nFiles, nNamedFiles = 12, -1, hSize, hVersion 104 | end 105 | local pos, dl, nameMap, idVariantMap = hSize, #data, {}, {} 106 | local readFiles, readNamedFiles = 0, 0 107 | local two28, two29 = 2^28, 2^29 108 | while pos < dl do 109 | if dl < (pos+12) then 110 | return false, 'Root manifest invalid: block header overread', pos 111 | end 112 | local n, info = uint32_le(data, pos), {uint32_le(data, pos+4), uint32_le(data, pos+8)} 113 | local p2, p3, lfid = pos + 12, pos + 12 + 20*n, 0 114 | local hasNameHashes = info[1] % two29 < two28 115 | local bSize = (hasNameHashes and 28 or 20) * n 116 | if dl < (p2+bSize) then 117 | return false, 'Root manifest invalid: block content overread', p2 .. '+' .. bSize 118 | end 119 | pos = p2 + 4*n 120 | for i=1,n do 121 | local chash, tfid = ssub(data, pos+1, pos+16), lfid+int32_le(data, p2) 122 | pos, p2 = pos + 16, p2 + 4 123 | local t, tsz = idVariantMap[tfid] or {} 124 | if hasNameHashes then 125 | local nhash = ssub(data, p3+1, p3+8) 126 | if (nameMap[nhash] or tfid) ~= tfid then 127 | local hexHash = ('%02x'):rep(#nhash):format(nhash:byte(1, #nhash)) 128 | return false, 'Root manifest invalid: file name maps to multiple file IDs', hexHash 129 | end 130 | nameMap[nhash], p3 = tfid, p3 + 8 131 | end 132 | idVariantMap[tfid], tsz = t, #t 133 | t[tsz+1], t[tsz+2], lfid = chash, info, tfid + 1 134 | readFiles, readNamedFiles = readFiles + 1, readNamedFiles + (hasNameHashes and 1 or 0) 135 | end 136 | pos = p3 137 | end 138 | if readFiles ~= nFiles then 139 | return false, 'Root manifest invalid: total file miscount', nFiles .. '/' .. readFiles 140 | elseif readNamedFiles ~= nNamedFiles then 141 | return false, 'Root manifest invalid: named file miscount', nNamedFiles .. '/' .. readNamedFiles 142 | end 143 | return setmetatable({nameMap, idVariantMap}, wow_mt) 144 | end 145 | 146 | function M.parse(data) 147 | if data:sub(1,4) == "TSFM" then 148 | return parseMFST(data) 149 | end 150 | return parseLegacy(data) 151 | end 152 | function M.empty() 153 | return setmetatable({{}, {}}, wow_mt) 154 | end 155 | 156 | return M -------------------------------------------------------------------------------- /libs/convert.sh: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env bash 2 | 3 | python2 ../InterfaceExport/InterfaceExport/libs/BLPConverter/convert_all.py . --remove --verbose 4 | -------------------------------------------------------------------------------- /libs/dbc/bin.lua: -------------------------------------------------------------------------------- 1 | -- SPDX-FileCopyrightText: © 2023 foxlit 2 | -- SPDX-License-Identifier: Artistic-2.0 3 | 4 | local M, sbyte = {}, string.byte 5 | local inf, nan = math.huge, math.huge-math.huge 6 | 7 | local PRECISION_BITS, PRECISION_BYTES, PRECISION_SAFE1_BYTES, PRECISION_SAFE1_BITS do 8 | local x = 255 9 | for i=8,128 do 10 | local x1 = x-1 11 | local x2 = x1-1 12 | if not (x > 128 and x > x1 and x1 > x2 and (x2+2) == x and (x1+1) == x and (2^i-x) == 1) then 13 | break 14 | else 15 | PRECISION_BITS, x = i, x*2+1 16 | end 17 | end 18 | PRECISION_BYTES = (PRECISION_BITS - PRECISION_BITS%8)/8 19 | M.PRECISION_BITS, M.PRECISION_BYTES = PRECISION_BITS, PRECISION_BYTES 20 | PRECISION_SAFE1_BYTES = PRECISION_BYTES-1 21 | PRECISION_SAFE1_BITS = PRECISION_SAFE1_BYTES * 8 22 | end 23 | 24 | local function uint_le(s, n, pos) 25 | if n > PRECISION_BYTES then 26 | error('Requested integer is too wide: ' .. n .. ' bytes of precision required; ' .. PRECISION_BITS .. ' bits available.', 2) 27 | end 28 | local a, b, c, d, e, f = sbyte(s, (pos or 0)+1, (pos or 0)+n) 29 | return (f or 0)*256^5 + (e or 0)*256^4 + (d or 0)*256^3 + (c or 0)*256^2 + (b or 0)*256 + a + (n > 6 and 2^48*uint_le(s,n-6,pos+6) or 0) 30 | end 31 | local function int_le(s, n, pos) 32 | if n > PRECISION_BYTES then 33 | error('Requested integer is too wide: ' .. n .. ' bytes of precision required; have ' .. PRECISION_BITS .. ' bits', 2) 34 | end 35 | local a, b, c, d, e, f = sbyte(s, (pos or 0)+1, (pos or 0)+n) 36 | local r = (f or 0)*256^5 + (e or 0)*256^4 + (d or 0)*256^3 + (c or 0)*256^2 + (b or 0)*256 + a + (n > 6 and 2^48*uint_le(s,n-6,pos+6) or 0) 37 | local lb = n <= 6 and (f or e or d or c or b or a) or sbyte(s, (pos or 0)+n) 38 | return r - (lb > 127 and 256^n or 0) 39 | end 40 | local function upint_le(s, w, pos) 41 | if w > PRECISION_BITS then 42 | error('Requested packed integer is too wide: ' .. w .. ' bits of precision required; ' .. PRECISION_BITS .. ' bits available.', 2) 43 | end 44 | local o = 0 45 | pos = pos or 0 46 | if w > PRECISION_SAFE1_BITS then 47 | w, o = PRECISION_SAFE1_BITS, 2^PRECISION_SAFE1_BITS*upint_le(s, w-PRECISION_SAFE1_BITS, pos+PRECISION_SAFE1_BITS) 48 | end 49 | local p8 = pos % 8 50 | local lo, iv = 2^p8, uint_le(s, PRECISION_BYTES, (pos-p8)/8) 51 | return o + ((iv - iv % lo)/lo % 2^w) 52 | end 53 | local function pint_le(s, w, pos) 54 | local o = upint_le(s, w, pos) 55 | return o - (o >= 2^(w-1) and 2^w or 0) 56 | end 57 | M.uint_le, M.int_le = uint_le, int_le 58 | M.upint_le, M.pint_le = upint_le, pint_le 59 | 60 | function M.uint16_le(s, pos) 61 | local a, b = sbyte(s, (pos or 0)+1, (pos or 0) + 2) 62 | return b*256 + a 63 | end 64 | function M.uint32_le(s, pos) 65 | local a, b, c, d = sbyte(s, (pos or 0)+1, (pos or 0) + 4) 66 | return d*256^3 + c*256^2 + b*256 + a 67 | end 68 | function M.float32_le(s, pos) 69 | local a, b, c, d = sbyte(s, (pos or 0) + 1, (pos or 0) + 4) 70 | local s, e, f = d > 127 and -1 or 1, (d % 128)*2 + (c > 127 and 1 or 0), a + b*256 + (c % 128)*256^2 71 | if e > 0 and e < 255 then 72 | return s * (1+f/2^23) * 2^(e-127) 73 | else 74 | return e == 0 and (s * f/2^23 * 2^-126) or f == 0 and (s * inf) or nan 75 | end 76 | end 77 | function M.int32_le(s, pos) 78 | local a, b, c, d = sbyte(s, (pos or 0)+1, (pos or 0) + 4) 79 | return (d or 0)*256^3 + (c or 0)*256^2 + (b or 0)*256 + a - (d > 127 and 2^32 or 0) 80 | end 81 | 82 | function M.u32_float(u) 83 | local a,b,c,d = u % 256, u % 65536, u % 16777216, u % 4294967296 84 | b, c, d = (b-a)/256, (c-b)/65536, (d-c)/16777216 85 | local s, e, f = d > 127 and -1 or 1, (d % 128)*2 + (c > 127 and 1 or 0), a + b*256 + (c % 128)*256^2 86 | if e > 0 and e < 255 then 87 | return s * (1+f/2^23) * 2^(e-127) 88 | else 89 | return e == 0 and (s * f/2^23 * 2^-126) or f == 0 and (s * inf) or nan 90 | end 91 | end 92 | 93 | return M -------------------------------------------------------------------------------- /libs/dbc/headers/db2.lua: -------------------------------------------------------------------------------- 1 | -- SPDX-FileCopyrightText: © 2023 foxlit 2 | -- SPDX-License-Identifier: Artistic-2.0 3 | 4 | local M, bin = { fourCC = { WDB2 = 1, WCH2 = 1 } }, require("dbc.bin") 5 | local uint32_le = bin.uint32_le 6 | 7 | local function assertLEQ(a, b, message) 8 | if a > b then 9 | error(message .. ": " .. a .. " > " .. b, 2) 10 | end 11 | end 12 | 13 | function M:parseHeader(data) 14 | local h = {rowBase=48} 15 | h.rows, h.fields, h.stride, h.stringLength = uint32_le(data, 4), uint32_le(data, 8), uint32_le(data, 12), uint32_le(data, 16) 16 | h.build, h.minId, h.maxId, h.locale = uint32_le(data, 24), uint32_le(data, 32), uint32_le(data, 36), uint32_le(data, 40) 17 | 18 | if h.maxId > 0 then 19 | local n, p, idMap = h.maxId-h.minId + 1, h.rowBase, {} 20 | h.idMap, h.rowBase = idMap, h.rowBase + 6 * n 21 | for i=1,n do 22 | idMap[i], p = uint32_le(data, p), p + 6 23 | end 24 | end 25 | assertLEQ(h.rowBase + h.rows*h.stride + h.stringLength, #data, "DB2 data too short") 26 | h.stringBase = h.rowBase + h.rows*h.stride + 1 27 | 28 | return h 29 | end 30 | 31 | return M -------------------------------------------------------------------------------- /libs/dbc/headers/db5_6.lua: -------------------------------------------------------------------------------- 1 | -- SPDX-FileCopyrightText: © 2023 foxlit 2 | -- SPDX-License-Identifier: Artistic-2.0 3 | 4 | local M, bin = { fourCC = { WDB5 = 1, WDB6 = 1 } }, require("dbc.bin") 5 | local uint32_le, uint16_le, float32_le, int_le = bin.uint32_le, bin.uint16_le, bin.float32_le, bin.int_le 6 | 7 | local function assertLEQ(a, b, message) 8 | if a > b then 9 | error(message .. ": " .. a .. " > " .. b, 2) 10 | end 11 | end 12 | 13 | local DB6_coType = {[0]=4, [1]=2, [2]=1, [3]='f', [4]=4, [5]=8} 14 | function M:parseHeader(data) 15 | local is6 = data:sub(4,4) == '6' 16 | local h, rkField, idField = {} 17 | h.rows, h.fields, h.stride, h.stringLength = uint32_le(data, 4), uint32_le(data, 8), uint32_le(data, 12), uint32_le(data, 16) 18 | h.build, h.minId, h.maxId, h.locale = uint32_le(data, 24), uint32_le(data, 28), uint32_le(data, 32), uint32_le(data, 36) 19 | h.cloneLength, h.flags, rkField = uint32_le(data, 40), uint16_le(data, 44), 1+uint16_le(data, 46) 20 | 21 | local hend = is6 and 56 or 48 22 | local hsize, ofsSize, idmSize = hend + 4*h.fields, h.flags % 2 > 0 and 6*(h.maxId-h.minId+1) or 0, h.flags % 8 > 3 and 4*h.rows or 0 23 | local stSize, stringEnd = ofsSize > 0 and 0 or h.stringLength 24 | assertLEQ(hsize + h.rows*h.stride + stSize + ofsSize + idmSize, #data, "DB5 data too short") 25 | 26 | if ofsSize > 0 and h.rows > 0 then 27 | local ot, nr, pos, minLen, maxLen, sz = {}, 1, h.stringLength, math.huge, -math.huge 28 | for i=h.minId, h.maxId do 29 | pos, sz = pos + 6, uint16_le(data, pos+4) 30 | if sz > 0 then 31 | ot[nr], nr = {i, uint32_le(data, pos-6), sz}, nr + 1 32 | minLen, maxLen = minLen < sz and minLen or sz, maxLen > sz and maxLen or sz 33 | end 34 | end 35 | h.rows, h.rowList, h.maxRowSize, h.minRowSize, h.inlineStrings, stringEnd = #ot, ot, maxLen, minLen, minLen ~= maxLen, h.stringLength + ofsSize 36 | else 37 | h.stringBase, stringEnd = hsize + h.rows*h.stride+1, hsize + h.rows*h.stride+stSize 38 | if idmSize > 0 then 39 | local idMap, p = {}, h.stringBase+h.stringLength-1 40 | for i=1,idMap and h.rows or 0 do 41 | idMap[i], p = uint32_le(data, p), p + 4 42 | end 43 | h.idMap = idMap 44 | end 45 | end 46 | h.rowBase, h.cloneOffset = hsize, h.cloneLength > 0 and stringEnd + ofsSize + idmSize or nil 47 | 48 | local finfo, p = {}, hend 49 | for i=1,h.fields do 50 | local f, sz, o, no = true, (32-int_le(data, 2, p))/8, uint16_le(data, p+2) 51 | p, no, idField = p + 4, i == h.fields and h.stride or uint16_le(data, p+6), i == rkField and #finfo+1 or idField 52 | assert(i == 1 or o >= finfo[#finfo][2]+finfo[#finfo][1]) 53 | repeat 54 | finfo[#finfo+1], o, f = {sz, o, f}, o + sz, false 55 | until (o+sz) > no 56 | end 57 | h.idField = not h.rowList and not h.idMap and idField or nil 58 | h.inlineStrings = ofsSize > 0 and (h.maxRowSize ~= h.minRowSize or h.maxRowSize > finfo[#finfo][1]+finfo[#finfo][2]) or nil 59 | if h.rows > 0 and not finfo[#finfo][3] and finfo[#finfo][1] < 4 then 60 | local md, rl = 0, h.rowList 61 | for i=#finfo,1,-1 do 62 | local f = finfo[i] 63 | if f[3] then break end 64 | md = md + f[1] 65 | end 66 | for i=1,h.rows do 67 | local re = rl and (rl[i][2]+rl[i][3]) or (hsize+i*h.stride) 68 | md = #data:sub(re-md+1,re):match("%z*$") 69 | if md == 0 then break end 70 | end 71 | while md >= finfo[#finfo][1] do 72 | md, h.dropPadding, finfo[#finfo] = md - finfo[#finfo][1], (h.dropPadding or 0) + finfo[#finfo][1] 73 | end 74 | end 75 | 76 | local coSize = is6 and uint32_le(data, 52) or 0 77 | if coSize > 0 then 78 | local _totalFields = uint32_le(data, 48) 79 | local cobase, p, nc = stringEnd + ofsSize + idmSize + h.cloneLength 80 | h.coVals, h.coTypes, p, nc = {}, '', cobase + 4, uint32_le(data, cobase) 81 | for i=1, nc do 82 | local n, t, c = uint32_le(data, p), data:byte(p+5), {} 83 | h.coVals[i], c.type, p = c, t, p + 5 84 | local tt = DB6_coType[t] 85 | if tt == 'f' then 86 | c.ttype = 'f' 87 | for i=1,n do 88 | c[uint32_le(data, p)], p = float32_le(data, p+4), p + 8 89 | end 90 | else 91 | c.ttype = 'i' 92 | for i=1,n do 93 | c[uint32_le(data, p)], p = int_le(data, tt, p+4), p + 4 + tt 94 | end 95 | end 96 | h.coTypes = h.coTypes .. c.ttype 97 | end 98 | end 99 | 100 | h.fieldInfo, h.fields = finfo, #finfo 101 | return h 102 | end 103 | 104 | return M -------------------------------------------------------------------------------- /libs/dbc/headers/dbc.lua: -------------------------------------------------------------------------------- 1 | -- SPDX-FileCopyrightText: © 2023 foxlit 2 | -- SPDX-License-Identifier: Artistic-2.0 3 | 4 | local M, bin = { fourCC = { WDBC = 1 } }, require("dbc.bin") 5 | local uint32_le = bin.uint32_le 6 | 7 | local function assertLEQ(a, b, message) 8 | if a > b then 9 | error(message .. ": " .. a .. " > " .. b, 2) 10 | end 11 | end 12 | 13 | function M:parseHeader(data) 14 | local h = {rowBase=20} 15 | h.rows, h.fields, h.stride, h.stringLength = uint32_le(data, 4), uint32_le(data, 8), uint32_le(data, 12), uint32_le(data, 16) 16 | assertLEQ(20 + h.rows*h.stride + h.stringLength, #data, "DBC data too short") 17 | h.stringBase = h.rowBase + h.rows*h.stride + 1 18 | return h 19 | end 20 | 21 | return M -------------------------------------------------------------------------------- /libs/dbc/headers/dc1_2.lua: -------------------------------------------------------------------------------- 1 | -- SPDX-FileCopyrightText: © 2023 foxlit 2 | -- SPDX-License-Identifier: Artistic-2.0 3 | 4 | local M, bin = { fourCC = { WDC1=1, WDC2=1, ["1SLC"]=1 } }, require("dbc.bin") 5 | local uint32_le, uint16_le, int_le = bin.uint32_le, bin.uint16_le, bin.int_le 6 | 7 | local function assertLEQ(a, b, message) 8 | if a > b then 9 | error(message .. ": " .. a .. " > " .. b, 2) 10 | end 11 | end 12 | local function setFlags(f, ...) 13 | for i=1,select("#", ...) do 14 | local v = select(i, ...) 15 | if v then 16 | f[v] = true 17 | end 18 | end 19 | end 20 | local function serializeFlags(f) 21 | local fa = {} 22 | for k, v in pairs(f) do 23 | if v then 24 | fa[#fa+1] = k 25 | end 26 | end 27 | table.sort(fa) 28 | return table.concat(fa, " ") 29 | end 30 | 31 | local identityMap = setmetatable({}, {__index=function(_,k) return k end}) 32 | local dc1Map = { [40]=44, [42]=46, [44]=48, [48]=52, [52]=56, [56]=68, [60]=72, [64]=76, [92]=40, [96]=60, [100]=64, [104]=80 } 33 | function M:parseHeader(data) 34 | local isDC1 = data:sub(4,4) == '1' 35 | local h, om, feat = {}, isDC1 and dc1Map or identityMap, {} 36 | h.rows, h.fields, h.stride, h.stringLength = uint32_le(data, 4), uint32_le(data, 8), uint32_le(data, 12), uint32_le(data, 16) 37 | h.minId, h.maxId, h.locale = uint32_le(data, 28), uint32_le(data, 32), uint32_le(data, 36) 38 | h.flags = uint16_le(data, om[40]) 39 | h.fields2, h.packStart, h.lookupCount = uint32_le(data, om[44]), uint32_le(data, om[48]), uint32_le(data, om[52]) 40 | local cloneLength = uint32_le(data, om[92]) 41 | local keyColumnID = 1+uint16_le(data, om[42]) 42 | local rowListOffset = uint32_le(data, om[96]) 43 | local idmLength = uint32_le(data, om[100]) 44 | local packLength = uint32_le(data, om[56]) 45 | local codLength = uint32_le(data, om[60]) 46 | local palLength = uint32_le(data, om[64]) 47 | local relLength = uint32_le(data, om[104]) 48 | local staticHeaderEnd = (isDC1 and 84 or 108) 49 | local headerEnd = staticHeaderEnd + 4*h.fields 50 | h.rowBase = isDC1 and headerEnd or uint32_le(data, 80) 51 | 52 | h.stringBase = h.rowBase + h.rows*h.stride + 1 53 | setFlags(feat, 54 | packLength > 0 and "PackInfo", idmLength > 0 and "RowIDMap", codLength > 0 and "DefaultVals", 55 | palLength > 0 and "EnumFields", relLength > 0 and "ForeignKeys", rowListOffset > 0 and "OffsetMap", 56 | cloneLength > 0 and "CloneRows", h.stringLength > 2 and "StringBlock" 57 | ) 58 | keyColumnID = idmLength == 0 and keyColumnID or nil 59 | 60 | local mainDataEnd = rowListOffset == 0 and (h.rowBase + h.rows*h.stride + h.stringLength) or (rowListOffset + (h.maxId - h.minId+1)*6) 61 | local auxDataOffset = mainDataEnd+idmLength+cloneLength+packLength 62 | local auxDataEnd = auxDataOffset+palLength+codLength 63 | local fileEnd = auxDataEnd + relLength 64 | if not isDC1 then 65 | fileEnd = mainDataEnd + idmLength + cloneLength + relLength 66 | if h.stringLength > 0 then 67 | h.rowRelativeStrings = true 68 | end 69 | end 70 | 71 | assert(h.fields == h.fields2, "DC1 header fields/fields2 values are inconsistent") 72 | assertLEQ(fileEnd, #data, "DC1 data too short") 73 | assert(isDC1 or uint32_le(data, 68) == 1, "DC2 multi-part tables are not supported") 74 | 75 | assert(packLength == 0 or packLength/24 == h.fields, "DC1 packing data size invalid") 76 | local packPos = packLength > 0 and (isDC1 and (mainDataEnd + idmLength + cloneLength) or (headerEnd)) 77 | local adOffsetPA = packPos and (isDC1 and (auxDataOffset) or (packPos + packLength)) 78 | local adOffsetCO = packPos and (adOffsetPA+palLength) 79 | local finfo, extraArrayFields, oddSizedFields, basicArrayFields = {}, 0, 0, 0 80 | for i=1, h.fields do 81 | local fi, sz, ofs = {}, 32-int_le(data, 2, staticHeaderEnd-4 + 4*i), uint16_le(data, staticHeaderEnd-2 + 4*i) 82 | finfo[#finfo+1], fi[1], fi[2] = fi, sz/8, ofs 83 | if packPos then 84 | fi.bitOffset, fi.bitSize, fi.adLength = uint16_le(data, packPos), uint16_le(data, packPos+2), uint32_le(data, packPos+4) 85 | fi.packType, fi.pa1, fi.pa2, fi.pa3 = uint32_le(data, packPos+8), uint32_le(data, packPos+12), uint32_le(data, packPos+16), uint32_le(data, packPos+20) 86 | packPos = packPos + 24 87 | if not (fi.packType == 0 or sz == 0 or fi.bitSize == sz) then 88 | error(('DC1 field/packing width mismatch: field %d, pack %d, sz %d, bsz %d'):format(i, fi.packType, sz, fi.bitSize)) 89 | end 90 | if fi.packType == 0 and fi.bitSize ~= sz and sz > 0 then 91 | assert(fi.bitSize % sz == 0, 'DC1 array field width parity check') 92 | basicArrayFields = basicArrayFields + (fi.bitSize/sz-1) 93 | for j=2,fi.bitSize/sz do 94 | finfo[#finfo+1] = {sz/8, ofs+sz*(j-1)} 95 | end 96 | elseif fi.adLength == 0 then 97 | elseif fi.packType == 2 then 98 | fi.adOfs, adOffsetCO = adOffsetCO, adOffsetCO + fi.adLength 99 | else 100 | assert(fi.packType == 3 or fi.packType == 4, "Unknown DC1 field packing type: " .. fi.packType) 101 | fi.adOfs, adOffsetPA = adOffsetPA, adOffsetPA + fi.adLength 102 | if fi.packType == 4 and fi.pa3 >= 1 then 103 | fi.firstFieldIndex, extraArrayFields = #finfo, extraArrayFields + fi.pa3 - 1 104 | for i=2,fi.pa3 do 105 | finfo[#finfo+1] = fi 106 | end 107 | end 108 | end 109 | oddSizedFields = oddSizedFields + (fi.packType ~= 0 and 1 or 0) 110 | end 111 | if i == keyColumnID then 112 | h.idField = #finfo 113 | end 114 | end 115 | feat.ArrayFields = basicArrayFields > 0 116 | feat.DictArrayFields = extraArrayFields > 0 117 | feat.PackedFields = oddSizedFields > 0 118 | 119 | if cloneLength > 0 then 120 | assert(cloneLength % 8 == 0, 'DC1 clone instructions length parity check') 121 | h.cloneOffset, h.cloneLength = mainDataEnd + idmLength, cloneLength 122 | end 123 | if relLength > 0 then 124 | local p = isDC1 and auxDataEnd or (mainDataEnd + idmLength + cloneLength) 125 | local count, min, max = uint32_le(data, p), uint32_le(data, p+4), uint32_le(data, p+8) 126 | h.fkField = {0,0, packType="ForeignKeyMap", adOfs=p+12, adLength=relLength-12, pa1=count, pa2=min, pa3=max} 127 | finfo[#finfo+1] = h.fkField 128 | end 129 | 130 | -- We don't treat arrays as single fields around here, so lie about the field count in the header 131 | -- (h.fields2 preserves the original count) 132 | h.fieldInfo, h.fields = finfo, #finfo 133 | 134 | if rowListOffset > 0 and h.rows > 0 then 135 | local ot, nr, pos, minLen, maxLen, sz = {}, 1, rowListOffset, math.huge, -math.huge 136 | for i=h.minId, h.maxId do 137 | pos, sz = pos + 6, uint16_le(data, pos+4) 138 | if sz > 0 then 139 | ot[nr], nr = {i, uint32_le(data, pos-6), sz}, nr + 1 140 | minLen, maxLen = minLen < sz and minLen or sz, maxLen > sz and maxLen or sz 141 | end 142 | end 143 | h.rows, h.rowList, h.maxRowSize, h.minRowSize, h.inlineStrings = #ot, ot, maxLen, minLen, minLen ~= maxLen 144 | feat.InlineStrings = h.inlineStrings 145 | elseif idmLength > 0 then 146 | local idMap, p = {}, mainDataEnd 147 | for i=1, h.rows do 148 | idMap[i], p = uint32_le(data, p), p + 4 149 | end 150 | h.idMap = idMap 151 | end 152 | h.featureDesc = serializeFlags(feat) 153 | 154 | if h.inlineStrings then 155 | h.rowRelativeStrings = nil 156 | end 157 | 158 | return h 159 | end 160 | 161 | return M -------------------------------------------------------------------------------- /libs/dbc/headers/dc3_4_5.lua: -------------------------------------------------------------------------------- 1 | -- SPDX-FileCopyrightText: © 2024 foxlit 2 | -- SPDX-License-Identifier: Artistic-2.0 3 | 4 | local M, bin = { fourCC = { WDC3=1, WDC4=1, WDC5=1 } }, require("dbc.bin") 5 | local uint32_le, uint16_le, int_le = bin.uint32_le, bin.uint16_le, bin.int_le 6 | 7 | local function assertLEQ(a, b, message) 8 | if a > b then 9 | error(message .. ": " .. a .. " > " .. b, 2) 10 | end 11 | end 12 | local function setFlags(f, ...) 13 | for i=1,select("#", ...) do 14 | local v = select(i, ...) 15 | if v then 16 | f[v] = true 17 | end 18 | end 19 | end 20 | local function serializeFlags(f) 21 | local fa = {} 22 | for k, v in pairs(f) do 23 | if v then 24 | fa[#fa+1] = k 25 | end 26 | end 27 | table.sort(fa) 28 | return table.concat(fa, " ") 29 | end 30 | 31 | function M:parseHeader(data, dbcM) 32 | local fourCC = data:sub(1,4) 33 | local is4, is5 = fourCC == 'WDC4', fourCC == 'WDC5' 34 | local h, feat, h0 = {parts={}}, {}, 0 35 | if is5 then 36 | local hv = uint32_le(data, 4) 37 | h0 = hv == 5 and 132 or error('DC5 unexpected header version [' .. hv .. ']') 38 | end 39 | h.rows, h.fields, h.stride, h.stringLength = uint32_le(data, h0+4), uint32_le(data, h0+8), uint32_le(data, h0+12), uint32_le(data, h0+16) 40 | h.minId, h.maxId, h.locale = uint32_le(data, h0+28), uint32_le(data, h0+32), uint32_le(data, h0+36) 41 | h.flags = uint16_le(data, h0+40) 42 | h.fields2, h.packStart, h.lookupCount = uint32_le(data, h0+44), uint32_le(data, h0+48), uint32_le(data, h0+52) 43 | local keyColumnID = 1+uint16_le(data, h0+42) 44 | local packLength = uint32_le(data, h0+56) 45 | local codLength = uint32_le(data, h0+60) 46 | local palLength = uint32_le(data, h0+64) 47 | local numParts = uint32_le(data, h0+68) 48 | local fieldInfoPos = h0+ 72 + numParts*40 49 | local packPos = packLength > 0 and (fieldInfoPos + 4*h.fields) 50 | assert(h.fields == h.fields2, "DC3+ header fields/fields2 values are inconsistent") 51 | assert(packLength == 0 or packLength/24 == h.fields, "DC3+ packing parity check") 52 | local eidLength, eidPos = 0, (packPos or fieldInfoPos) + packLength + palLength + codLength 53 | 54 | local hasExternalPrimaryID, hasStringBlock, hasInlineStrings, hasForeignKey = false, false, false, false 55 | local partMeta = {__index=h} 56 | for i=1, numParts do 57 | local p = h0+32+i*40 58 | local pt = { 59 | partIndex=i, 60 | keyName = ("%02x%02x%02x%02x%02x%02x%02x%02x"):format(data:byte(p+1, p+8)), 61 | rowBase = uint32_le(data, p+8), 62 | rows = uint32_le(data, p+12), 63 | stringLength = uint32_le(data, p+16), 64 | rowsEnd = uint32_le(data, p+20), 65 | idmLength = uint32_le(data, p+24), 66 | relLength = uint32_le(data, p+28), 67 | rowListEntries = uint32_le(data, p+32), 68 | cloneLength = uint32_le(data, p+36)*8, 69 | } 70 | if pt.rowsEnd > 0 then 71 | p = pt.rowsEnd 72 | pt.rowsLength = p - pt.rowBase 73 | else 74 | pt.rowsLength = h.stride * pt.rows 75 | p = pt.rowBase + pt.rowsLength + pt.stringLength 76 | pt.stringBase = p - pt.stringLength + 1 77 | pt.rowRelativeStrings = true 78 | hasStringBlock = hasStringBlock or (pt.stringLength > 0) 79 | end 80 | if pt.idmLength > 0 then 81 | pt.idmOffset, hasExternalPrimaryID, p = p, true, p + pt.idmLength 82 | end 83 | if pt.cloneLength > 0 then 84 | assert(pt.cloneLength % 8 == 0, 'DC3+ row cloning length parity check') 85 | pt.cloneOffset, p = p, p + pt.cloneLength 86 | end 87 | if pt.rowListEntries > 0 then 88 | pt.rowListOffset, hasExternalPrimaryID, p = p, true, p + pt.rowListEntries*6 89 | end 90 | if pt.relLength > 0 then 91 | pt.relBase, p = p, p + pt.relLength 92 | end 93 | if pt.rowListEntries > 0 then 94 | pt.rowIDListOffset, p = p, p + pt.rowListEntries*4 95 | end 96 | pt.partEndOffset = p 97 | pt.isPresent = (data:match("()%Z", pt.rowBase) or pt.partEndOffset) < pt.partEndOffset 98 | assertLEQ(pt.partEndOffset, #data, "DC3+ data too short") 99 | 100 | if pt.isPresent then 101 | if pt.rowListOffset and pt.rows > 0 then 102 | local lp, ip = pt.rowListOffset, pt.rowIDListOffset 103 | local ot, minLen, maxLen, sz = {}, math.huge, -math.huge 104 | for i=1, pt.rowListEntries do 105 | lp, sz = lp + 6, uint16_le(data, lp+4) 106 | if sz <= 0 then error('DC3+ unexpected zero-length row') end 107 | ot[i], ip = {uint32_le(data, ip), uint32_le(data, lp-6), sz}, ip + 4 108 | minLen, maxLen = minLen < sz and minLen or sz, maxLen > sz and maxLen or sz 109 | end 110 | -- When all inline strings in the part are of the same length, this doesn't work. 111 | -- There's normally enough variation /somewhere/, so this sets the flag globally 112 | -- via the h.inlineStrings __index if any part has length variability. 113 | pt.rows, pt.rowList, pt.maxRowSize, pt.minRowSize, pt.inlineStrings = #ot, ot, maxLen, minLen, minLen ~= maxLen or nil 114 | hasInlineStrings = hasInlineStrings or pt.inlineStrings 115 | 116 | if dbcM.PREFER_ERRORS and pt.idmLength > 0 then 117 | local idmd = data:sub(pt.idmOffset+1, pt.idmOffset+pt.idmLength) 118 | local rlid = data:sub(pt.rowIDListOffset+1, pt.rowIDListOffset+4*pt.rowListEntries) 119 | assert(idmd == rlid, "DC3+ primary key disagreement") 120 | else 121 | feat.IgnoredIDMaps = feat.IgnoredIDMaps or pt.idmLength > 0 122 | end 123 | elseif pt.idmLength > 0 then 124 | local idMap, p = {}, pt.idmOffset 125 | for i=1, pt.rows do 126 | idMap[i], p = uint32_le(data, p), p + 4 127 | end 128 | pt.idMap = idMap 129 | end 130 | if pt.relLength > 0 then 131 | local p = pt.relBase 132 | local count, min, max = uint32_le(data, p), uint32_le(data, p+4), uint32_le(data, p+8) 133 | pt.fkField, hasForeignKey = {0,0, packType="ForeignKeyMap", adOfs=p+12, adLength=pt.relLength-12, pa1=count, pa2=min, pa3=max}, true 134 | end 135 | else 136 | feat.MissingParts = true 137 | end 138 | 139 | if (is4 or is5) and pt.keyName ~= "0000000000000000" then 140 | local eidCount = uint32_le(data, eidPos) 141 | local eidPartLength = 4 + eidCount * 4 142 | if eidCount < pt.rows and dbcM.PREFER_ERRORS then 143 | error("DC4+ encrypted ID list underflow (part header declares more rows)") 144 | end 145 | eidLength, eidPos = eidLength + eidPartLength, eidPos + eidPartLength 146 | end 147 | 148 | h.parts[i] = setmetatable(pt, partMeta) 149 | end 150 | for i=1, numParts do 151 | assertLEQ(eidPos, h.parts[i].rowBase, "DC3+ part data in header region") 152 | end 153 | if hasStringBlock then -- addressing 154 | -- Use stringShift to map field-relative references to indices into a unified string block... 155 | local sbi, s, sn = {}, 0, 1 156 | for i=#h.parts, 1, -1 do 157 | local p = h.parts[i] 158 | p.stringShift, s = -s - p.stringBase, s + p.rowsLength 159 | end 160 | for i=1,#h.parts do 161 | local p = h.parts[i] 162 | if p.stringLength > 0 then 163 | sbi[sn], sbi[sn+1], sn = p.stringLength + (sbi[i+i-3] or 0), p.stringBase - (sbi[i+i-3] or 0), sn+2 164 | end 165 | end 166 | -- ... then convert unified string block indices to indices into the original data. 167 | if #sbi > 0 then 168 | sbi[#sbi+1], sbi[#sbi+2] = math.huge, #data+9e12+1 169 | local lastLow, lastHigh, lastShift, invalid = 0, sbi[1], sbi[2], sbi[#sbi] 170 | h.mapStringOffset = function(o) 171 | if o < 0 then 172 | return invalid 173 | elseif o < lastLow or o >= lastHigh then 174 | for i=1,#sbi,2 do 175 | local si = sbi[i] 176 | if si > o then 177 | lastLow, lastHigh, lastShift = sbi[i-2] or 0, si, sbi[i+1] 178 | break 179 | end 180 | end 181 | end 182 | return o + lastShift 183 | end 184 | end 185 | end 186 | if dbcM.PREFER_ERRORS and hasInlineStrings and hasStringBlock then 187 | error("DC3+ decoder does not support mixing in-line strings and string blocks") 188 | end 189 | h.inlineStrings = hasInlineStrings 190 | 191 | setFlags(feat, 192 | packLength > 0 and "PackInfo", codLength > 0 and "DefaultVals", 193 | palLength > 0 and "EnumFields", h.stringLength > 2 and "StringBlock", 194 | hasInlineStrings and "InlineStrings", hasInlineStrings and hasStringBlock and "MixedStrings", 195 | hasForeignKey and "ForeignKeys", h.lookupCount > 0 and "Lookup[" .. h.lookupCount .. "]", 196 | #h.parts > 1 and ("Parts[" .. #h.parts .. "]") 197 | ) 198 | keyColumnID = (not hasExternalPrimaryID) and keyColumnID or nil 199 | 200 | local adOffsetPA = packPos and (packPos + packLength) 201 | local adOffsetCO = packPos and (adOffsetPA+palLength) 202 | local finfo, extraArrayFields, oddSizedFields, basicArrayFields, narrowedFields = {}, 0, 0, 0, "" 203 | for i=1, h.fields do 204 | local fi, sz, ofs = {}, 32-int_le(data, 2, fieldInfoPos-4 + 4*i), uint16_le(data, fieldInfoPos-2 + 4*i) 205 | finfo[#finfo+1], fi[1], fi[2] = fi, sz/8, ofs 206 | if packPos then 207 | fi.bitOffset, fi.bitSize, fi.adLength = uint16_le(data, packPos), uint16_le(data, packPos+2), uint32_le(data, packPos+4) 208 | fi.packType, fi.pa1, fi.pa2, fi.pa3 = uint32_le(data, packPos+8), uint32_le(data, packPos+12), uint32_le(data, packPos+16), uint32_le(data, packPos+20) 209 | packPos = packPos + 24 210 | if not (fi.packType == 0 or sz == 0 or fi.bitSize == sz) then 211 | error(('DC3+ field/packing width mismatch: field %d, pack %d, sz %d, bsz %d'):format(i, fi.packType, sz, fi.bitSize)) 212 | end 213 | if fi.packType == 0 and fi.bitSize ~= sz and sz > 0 then 214 | assert(fi.bitSize % sz == 0, 'DC3+ array field width parity check') 215 | basicArrayFields = basicArrayFields + (fi.bitSize/sz-1) 216 | for j=2,fi.bitSize/sz do 217 | finfo[#finfo+1] = {sz/8, ofs+sz*(j-1)} 218 | end 219 | elseif fi.adLength == 0 then 220 | elseif fi.packType == 2 then 221 | fi.adOfs, adOffsetCO = adOffsetCO, adOffsetCO + fi.adLength 222 | else 223 | assert(fi.packType == 3 or fi.packType == 4, "DC3+ field packing type " .. tostring(fi.packType) .. " not implemented") 224 | fi.adOfs, adOffsetPA = adOffsetPA, adOffsetPA + fi.adLength 225 | if fi.adLength > 4 and dbcM.SCAN_PACK_WIDTH then 226 | local sbyte, p = string.byte, fi.adOfs+4 227 | local w, b0,c0,d0, b1,c1,d1 = 1, sbyte(data, p-2, p) 228 | for p=p, adOffsetPA-1, 4 do 229 | b1,c1,d1 = sbyte(data, p+2, p+4) 230 | if c1 ~= c0 or d1 ~= d0 then 231 | w = 4 232 | break 233 | elseif b1 ~= b0 then 234 | w = 2 235 | end 236 | end 237 | fi.packWidth = w 238 | if w < 4 then 239 | narrowedFields = (narrowedFields ~= "" and narrowedFields .. ";" or "") .. i .. (w == 1 and "b" or "s") 240 | end 241 | end 242 | if fi.packType == 4 and fi.pa3 >= 1 then 243 | fi.firstFieldIndex, extraArrayFields = #finfo, extraArrayFields + fi.pa3 - 1 244 | for i=2, fi.pa3 do 245 | finfo[#finfo+1] = fi 246 | end 247 | end 248 | end 249 | oddSizedFields = oddSizedFields + (fi.packType ~= 0 and 1 or 0) 250 | end 251 | if i == keyColumnID then 252 | h.idField = #finfo 253 | end 254 | end 255 | if hasForeignKey then 256 | finfo[#finfo+1] = {0,0, packType="ForeignKeyMap"} 257 | end 258 | feat.ArrayFields = basicArrayFields > 0 259 | feat.DictArrayFields = extraArrayFields > 0 260 | feat.PackedFields = oddSizedFields > 0 261 | feat["NarrowedFields[" .. narrowedFields .. "]"] = narrowedFields ~= "" or nil 262 | if hasInlineStrings then 263 | h.rowRelativeStrings = nil 264 | end 265 | 266 | -- update h.fields to match the expanded field count; h.fields2 retains the header value 267 | h.fieldInfo, h.fields = finfo, #finfo 268 | h.featureDesc = serializeFlags(feat) 269 | 270 | return h 271 | end 272 | 273 | return M -------------------------------------------------------------------------------- /libs/dbc/init.lua: -------------------------------------------------------------------------------- 1 | -- SPDX-FileCopyrightText: © 2024 foxlit 2 | -- SPDX-License-Identifier: Artistic-2.0 3 | 4 | local M, bin = {MAX_GUESS_ATTEMPTS=50, TARGET_TEST_SET_SIZE=100, PREFER_ERRORS=true, SCAN_PACK_WIDTH=true, _VERSION="LuaDBC 1.15"}, require("dbc.bin") 5 | 6 | local uint32_le, int32_le, float32_le = bin.uint32_le, bin.int32_le, bin.float32_le 7 | local int_le, uint_le = bin.int_le, bin.uint_le 8 | local pint_le, upint_le, u32_float = bin.pint_le, bin.upint_le, bin.u32_float 9 | 10 | local POSSIBLE_FLOAT_PACK_TYPES = {[1]=true, [3]=true, [4]=true} 11 | local FIELD_TYPE_BASE_EQUIV = {I="i", U="u"} 12 | local FIELD_TYPE_ALLOW_NARROW = {i=1, u=1} 13 | 14 | local function assertLEQ(a, b, message) 15 | if a > b then 16 | error(message .. ": " .. a .. " > " .. b, 2) 17 | end 18 | end 19 | 20 | local function guessInlineStrings(s, fields, fi, fmt, pos) 21 | local stringState = {} 22 | local p = 0 23 | for i=1,fields do 24 | local fc = fmt:sub(i,i) 25 | if fc == "s" and fi[i][1] ~= 4 then 26 | error('Invalid signature: declared string field has unexpected width', 3) 27 | end 28 | if fi[i][1] == 4 and (fc == "?" or fc == "s" or fc == "") then 29 | stringState[i] = "?" 30 | else 31 | stringState[i] = "." 32 | end 33 | p = p + fi[i][1] 34 | end 35 | 36 | local traceArchive = {} 37 | for i=1,#pos do 38 | local base, oEnd, currentTrace = pos[i], pos[-i]-pos[i], {[0]={[-1]={nil,fi[1][1]}, [0]={{},{}}}} 39 | local function insertQueueEntry(fid, ofs, src, k) 40 | if ofs > oEnd then return end 41 | local builtNode = src do 42 | local srcNode = currentTrace[src][fid-1] 43 | builtNode = {src} 44 | if fid == 1 then 45 | for u=2,i do 46 | local otherRootFW = traceArchive[u-1][0][-1] 47 | builtNode[u] = {[otherRootFW[k]] = 1} 48 | end 49 | else 50 | local fsz = fi[fid][1] 51 | for u=2,i do 52 | for k2=1,2 do 53 | local srcArr = srcNode[k2] 54 | for i=1,#srcArr do 55 | local otherTrace, otherSrc = traceArchive[u-1], srcArr[i][u] 56 | for j in pairs(otherSrc) do 57 | local otherSrc = otherTrace[j] 58 | local otherFW = otherSrc and otherSrc[-1] 59 | local otherDst = k == 2 and (j+fsz) or (otherFW and otherFW[k]) 60 | local otherDstNode = otherTrace[otherDst] 61 | if otherDstNode and otherDstNode[fid] and otherDstNode[fid][k] then 62 | local as = builtNode[u] or {} 63 | as[otherDst] = 1 64 | builtNode[u] = as 65 | end 66 | end 67 | end 68 | end 69 | if not builtNode[u] then 70 | return 71 | end 72 | end 73 | end 74 | end 75 | 76 | local e = currentTrace[ofs] 77 | if not e then 78 | e = {} 79 | currentTrace[ofs] = e 80 | end 81 | 82 | if k == 1 then 83 | currentTrace[src][-1][k] = ofs 84 | end 85 | 86 | e[fid] = e[fid] or {{},{}} 87 | e = e[fid][k] 88 | e[#e+1] = builtNode 89 | end 90 | 91 | for ofs=0,oEnd do 92 | if currentTrace[ofs] then 93 | currentTrace[ofs][-1] = currentTrace[ofs][-1] or {} 94 | for ofid in pairs(currentTrace[ofs]) do 95 | local fid = ofid+1 96 | if fid > 0 and fid <= fields then 97 | local state = stringState[fid] 98 | if state ~= "." then 99 | local sr, nofs = s:match("(%Z*)()", 1+ofs+base) 100 | if not sr:match("[^%C\n\r\t]") then 101 | insertQueueEntry(fid, nofs-base, ofs, 1) 102 | end 103 | end 104 | if state ~= "s" then 105 | insertQueueEntry(fid, ofs + fi[fid][1], ofs, 2) 106 | end 107 | end 108 | end 109 | end 110 | end 111 | 112 | -- Remove parallel traces 113 | for _k, v in pairs(currentTrace) do 114 | for f, m in pairs(v) do 115 | for i=1,f == -1 and 0 or 2 do 116 | local a = m[i] 117 | for j=1,#a do 118 | a[j] = a[j][1] 119 | end 120 | end 121 | end 122 | end 123 | 124 | -- Mark anything that reaches into the null tail of the record as "needed" 125 | local nullEnd = #s:sub(1+pos[i], pos[-i]):match("(%z*)$") 126 | currentTrace[oEnd] = currentTrace[oEnd] or {} 127 | currentTrace[oEnd][fields] = currentTrace[oEnd][fields] or {{},{}} 128 | for i=oEnd-nullEnd,oEnd do 129 | local e = currentTrace[i] 130 | if e and e[fields] then 131 | e.n = e.n or {[fields]=1} 132 | end 133 | end 134 | 135 | -- Cull links which cannot lead to the null tail 136 | local seenFieldState = {} 137 | for i=oEnd,1,-1 do 138 | local e = currentTrace[i] 139 | if e == nil or not e.n then 140 | currentTrace[i] = nil 141 | else 142 | local n = e.n 143 | for f, a in pairs(e) do 144 | if f == -1 then 145 | elseif not n[f] then 146 | e[f] = nil 147 | else 148 | for k=1,2 do 149 | local a = a[k] 150 | seenFieldState[#a == 0 and 0 or k == 1 and -f or f] = true 151 | for j=1,#a do 152 | local e2 = currentTrace[a[j]] 153 | local nt = e2.n or {} 154 | nt[f-1], e2.n = 1, nt 155 | end 156 | end 157 | end 158 | end 159 | end 160 | end 161 | traceArchive[i] = currentTrace 162 | 163 | -- Some fields may only appear as strings or non-strings on goal-reaching traces; 164 | -- this can restrict future traces, and if no choices remain, return an early end. 165 | local fixedStringState = true 166 | for i=1,fields do 167 | if stringState[i] == "?" then 168 | local seenFixed, seenString = seenFieldState[i], seenFieldState[-i] 169 | if seenFixed and not seenString then 170 | stringState[i] = "." 171 | elseif seenString and not seenFixed then 172 | stringState[i] = "s" 173 | else 174 | fixedStringState = false 175 | end 176 | end 177 | end 178 | if fixedStringState then 179 | return table.concat(stringState, "", 1, fields) 180 | end 181 | end 182 | 183 | -- There are multiple options remaining. 184 | local validMasks, outputMaskBuffer = {}, {} 185 | local function backSearch(front, fid) 186 | if fid == 0 then 187 | validMasks[#validMasks+1] = table.concat(outputMaskBuffer, "", 1, fields) 188 | return 189 | end 190 | local az = fid == 1 and 0 or "NOT A VALID FOLLOW INDEX" 191 | local nfid, oc, f1, f2 = fid-1, 0 192 | for k=2,1,-1 do 193 | local nf = {} 194 | for i=1,#front do 195 | local nfi = {} 196 | for ii=1,#front[i] do 197 | local a = front[i][ii][k] 198 | for j=1,#a do 199 | local np = a[j] 200 | nfi[#nfi+1] = np == az or traceArchive[i][np][nfid] 201 | end 202 | end 203 | if not nfi[1] then 204 | f1, f2, nf = k == 1 and i or f1, k == 2 and i or f2 205 | break 206 | end 207 | nf[i] = nfi 208 | end 209 | if nf then 210 | outputMaskBuffer[fid] = k == 1 and "s" or "." 211 | backSearch(nf, nfid) 212 | oc = oc + 1 213 | end 214 | end 215 | end 216 | local frontier = {} -- Merge every offset claiming to have a solution for [fields] 217 | for i=1,#traceArchive do 218 | local m1, m2 = {}, {} 219 | for _k, v in pairs(traceArchive[i]) do 220 | if v[fields] then 221 | for k=1,2 do 222 | local a,m = v[fields][k], k == 1 and m1 or m2 223 | for i=1,#a do 224 | m[a[i]] = 1 225 | end 226 | end 227 | end 228 | end 229 | local a1, a2 = {}, {} 230 | for k in pairs(m1) do a1[#a1+1] = k end 231 | for k in pairs(m2) do a2[#a2+1] = k end 232 | frontier[i] = {{a1, a2}} 233 | end 234 | backSearch(frontier, fields) 235 | 236 | return validMasks[1], validMasks 237 | end 238 | local function extendType(count, sym) 239 | return sym:rep(tonumber(count)) 240 | end 241 | local function updateFloatScores(ic, fc, _iv, fv) 242 | fv = fv < 0 and -fv or fv 243 | return ic+1, fc + (fv >= 1e-7 and fv <= 1e16 and 1 or fv == 0 and 0.75 or -1) 244 | end 245 | local function guessTypes(s, header, fmt) 246 | local rows, fields = header.rows, header.fields 247 | if rows < 1 then return ("u"):rep(fields) end 248 | 249 | local sbase, fi, is = header.stringBase, header.fieldInfo, header.inlineStrings 250 | local redo, pos, slen = 0, {}, header.stringLength or 0 251 | local rowRelativeStrings = header.rowRelativeStrings 252 | local stringShift = header.stringShift or 0 253 | local stringBlockMap = header.mapStringOffset 254 | local rpos = {} do 255 | local t, dropPad = {}, header.dropPadding or 0 256 | for i=-5,4 do t[i % rows] = 1 end 257 | local sm = math.max(1,math.ceil((rows-15)/M.TARGET_TEST_SET_SIZE)) 258 | for i=10, rows-5,sm do t[i] = 1 end 259 | local rl, base, stride = header.rowList, header.rowBase, header.stride 260 | for k in pairs(t) do 261 | pos[#pos+1] = rl and rl[k+1][2] or (base + k*stride) 262 | pos[-#pos], rpos[#pos] = pos[#pos]+(rl and rl[k+1][3] or stride)-dropPad, pos[#pos] 263 | end 264 | end 265 | 266 | fmt = fmt and fmt:gsub("[{}]", ""):gsub("[^uifsFL%d*]", "?"):gsub("(%d+)(%D)", extendType) or "*?" 267 | fmt = fmt:gsub("%*(.)", ("%1"):rep(fields-#fmt+1)) 268 | local userFormat, o = fmt 269 | 270 | local isFormatHint = fi and is and guessInlineStrings(s, fields, fi, fmt, pos) 271 | repeat 272 | fmt, o = fmt .. ("?"):rep(fields-#fmt), "" 273 | for i=1,redo > 0 and #pos or 0 do 274 | pos[i] = rpos[i] 275 | end 276 | 277 | local nsc, maybeStrings = 0, is and {} 278 | for j=1,fields do 279 | local fj = fi and fi[j] 280 | local fsz, ft = fj and fj[1] or 4, fmt:sub(j,j) 281 | if ft ~= "?" then 282 | elseif fj and fj.packType == "ForeignKeyMap" then 283 | ft = "F" 284 | elseif fsz == 0 then 285 | ft = fj and fj.packType == 1 and fj.pa3 and fj.pa3 % 2 == 1 and "i" or "u" 286 | local ic, fc = 0, 0 287 | for i=1, ft == "u" and POSSIBLE_FLOAT_PACK_TYPES[fj.packType] and #pos or 0 do 288 | local iv 289 | if fj.packType == 1 then 290 | iv = upint_le(s, fj.bitSize, rpos[i]*8+fj.bitOffset) 291 | elseif fj.packType == 3 or fj.packType == 4 then 292 | local cf = j 293 | local extraSkip, aw = fj.packType == 4 and 4*(cf-fj.firstFieldIndex) or 0, fj.packType == 4 and 4*fj.pa3 or 4 294 | local ix = upint_le(s, fj.bitSize, rpos[i]*8+fj.bitOffset) 295 | iv = uint_le(s, 4, fj.adOfs+extraSkip+ aw*ix) 296 | end 297 | if iv then 298 | ic, fc = updateFloatScores(ic, fc, iv, u32_float(iv)) 299 | end 300 | end 301 | if fc > 0 and fc >= ic/1.25 then 302 | ft = "f" 303 | end 304 | elseif fsz == 4 then 305 | local sc, ic, fc, nb = 0, 0, 0, 0 306 | for i=1,#pos do 307 | local uv, fv = uint32_le(s, pos[i]), float32_le(s, pos[i]) 308 | nb = nb + (is and uv % 256 == 0 and 1 or 0) 309 | if uv ~= 0 then 310 | local sbase = rowRelativeStrings and 1+pos[i]+stringShift or sbase 311 | local spos = sbase and (sbase + uv - 1) 312 | if stringBlockMap then spos = stringBlockMap(spos) end 313 | local cs = is and s:match("^(%Z*)", 1+pos[i]) or (not is and uv < slen and spos and s:match("^%z(%Z+)", spos)) 314 | if cs and (not is or (cs:match("^[%C\n\r\t]*$") and cs:match("%w%w%w"))) then 315 | sc, ic = sc + 1, ic - 1 316 | elseif is and cs == "" then 317 | sc = sc + math.random(0, 1) 318 | else 319 | sc, ic, fc = sc - (is and uv % 256 == 0 and 0 or 1), updateFloatScores(ic, fc, uv, fv) 320 | end 321 | end 322 | end 323 | if is and isFormatHint then 324 | sc = ic + (isFormatHint:sub(j,j) == "s" and 1 or -1) 325 | elseif is and (sc >= ic and redo > 1 and math.random() < 0.10) then 326 | sc, maybeStrings[#maybeStrings+1] = ic-1, j 327 | elseif nb == #pos and sc <= ic then 328 | maybeStrings[#maybeStrings+1] = j 329 | end 330 | ft = sc > ic and "s" or (fc > 0 and fc >= ic/1.25 and "f" or "i") 331 | elseif fsz == 8 and bin.PRECISION_BYTES < fsz then 332 | ft = "L" 333 | else 334 | ft = "u" 335 | end 336 | if ft == "s" and is then 337 | nsc = nsc + (userFormat:sub(j,j) == "s" and 0 or 1) 338 | for i=1,#pos do 339 | pos[i] = s:match("%Z*()", 1+pos[i]) 340 | end 341 | else 342 | for i=1,#pos do 343 | pos[i] = pos[i]+fsz 344 | end 345 | end 346 | o = o .. ft 347 | end 348 | 349 | for i=1,is and not isFormatHint and #pos or 0 do 350 | local cur, exp = pos[i], pos[-i] 351 | if cur > exp or (exp-cur) > 3 then 352 | redo = redo + 1 353 | assert(redo < M.MAX_GUESS_ATTEMPTS, "Exceeded attempt limit; cannot guess row format") 354 | assert(maybeStrings[1] or nsc > 0, "Irrecoverable format guess") 355 | if math.random() > 0.5 and maybeStrings[1] then 356 | fmt, o = o:sub(1, maybeStrings[math.random(#maybeStrings)]-1) .. "s" 357 | else 358 | local o2, c = "", math.random(nsc)-1 359 | for block, sp in o:gmatch("([^s]*)()s?") do 360 | if userFormat:sub(sp, sp) == "s" then 361 | o2 = o2 .. block .. "s" 362 | else 363 | o2, c = o2 .. block .. (c > 0 and "s" or ""), c-1 364 | if c < 0 then 365 | break 366 | end 367 | end 368 | end 369 | fmt, o = o2 370 | end 371 | break 372 | end 373 | end 374 | until o 375 | 376 | if header.coTypes and #header.coTypes > 0 then 377 | local ucTypes, coTypes = userFormat:sub(fields+1), header.coTypes 378 | for i=1,#header.coTypes do 379 | local ut = ucTypes:sub(i,i) 380 | o = o .. ((ut == "?" or ut == "") and coTypes:sub(i,i) or ut) 381 | end 382 | end 383 | return o 384 | end 385 | 386 | local function emptyNext() 387 | end 388 | local function findLargestPart(header) 389 | local largePartRows, largePartID, numPresentParts = 0, 0, 0 390 | for i=1,#header.parts do 391 | local p = header.parts[i] 392 | if p.isPresent and p.rows > 0 then 393 | numPresentParts = numPresentParts + 1 394 | if largePartRows < p.rows then 395 | largePartRows, largePartID = p.rows, i 396 | end 397 | end 398 | end 399 | return largePartID, numPresentParts 400 | end 401 | local function createUnpacker(data, header, format, loose, partID, guessedTypes) 402 | if header.parts ~= nil and partID == nil then 403 | local largePartID, numPresentParts = findLargestPart(header) 404 | if numPresentParts == 0 then 405 | return emptyNext 406 | elseif numPresentParts == 1 then 407 | return createUnpacker(data, header, format, loose, largePartID) 408 | end 409 | local guessedTypes do 410 | local largestPart = header.parts[largePartID] 411 | local format2 = loose and largestPart.idField and not format:match("%*") and format .. "*." or format 412 | if format2:match(largestPart.inlineStrings and '[.?]' or '[?]') then 413 | guessedTypes = guessTypes(data, largestPart, format2) 414 | end 415 | end 416 | local iterators = {} 417 | for i=1,#header.parts do 418 | local p = header.parts[i] 419 | if p.isPresent and p.rows > 0 then 420 | iterators[#iterators+1] = createUnpacker(data, header, format, loose, i, guessedTypes) 421 | end 422 | end 423 | local cf, cn, nextRow = iterators[1], 2 424 | local function checkOutput(s, ...) 425 | if select("#", ...) > 0 or #iterators < cn then 426 | return ... 427 | end 428 | cf, cn = iterators[cn], cn + 1 429 | return nextRow(s) 430 | end 431 | function nextRow(s) 432 | return checkOutput(s, cf(s)) 433 | end 434 | return nextRow, data 435 | elseif partID and header.parts then 436 | header = header.parts[partID] 437 | end 438 | 439 | if header.rows == 0 then 440 | -- This does not validate the signature, but some formats contain conflicting nonsense when empty. 441 | return emptyNext 442 | end 443 | 444 | local rows, fields, stride, sbase, rbase = header.rows, header.fields, header.stride, header.stringBase, header.rowBase 445 | local fi, cf, rl, idf, copy = header.fieldInfo, 1, header.rowList, header.idField 446 | if header.cloneOffset and header.cloneLength > 0 then 447 | copy = {} 448 | for i=header.cloneOffset, header.cloneOffset+header.cloneLength-1, 8 do 449 | local cid, oid = uint32_le(data, i), uint32_le(data, i+4) 450 | copy[oid], copy[cid] = cid, copy[oid] 451 | end 452 | end 453 | if loose and idf and not format:match("%*") then 454 | format = format .. "*." 455 | end 456 | 457 | local ctPrefix = copy and '\t' or '' 458 | local p, po = {"lid"}, {[=[-- casc.dbc:iterator 459 | local smatch, uint_le, int_le, float32_le, upint_le, pint_le, u32_float, cov, idMap, ct, rows, stride, sbase, r0, rList, minId, dfo, fkMap, sbm = ... 460 | local i, ii, lid = 0, nil, nil 461 | return function(data)]=], 462 | copy and '\tlid = ct[lid]\n\tif rows > i or lid then\n\t\tif not lid then' or '\tif rows > i then', 463 | ctPrefix..(rl and '\t\tr0, i, lid, ii = rList[i+1][2], i + 1, rList[i+1][1], rList[i+1][1]' or '\t\tr0, i, lid = r0 + stride, i + 1, ' .. (header.idMap and "idMap[i+1]" or "minId+i")), 464 | copy and '\t\tend' or nil, 465 | } 466 | 467 | local nfi = fi and fi[1] 468 | local skip, fsz, openTables, cr, crn = nfi and nfi[2] or 0, nfi and nfi[1] or 4, 0, 'r0+', 1 469 | local gt = format:match(header.inlineStrings and '[.?]' or '[?]') and (guessedTypes or guessTypes(data, header, format)) 470 | local tFields = (header.coVals and #header.coVals or 0) + fields 471 | local defaultOverrides = {} 472 | local missingLocalRowIndex = true 473 | local fkField, fkMap = header.fkField 474 | local rowRelativeStrings = header.rowRelativeStrings 475 | local stringShift = header.stringShift or 0 476 | local stringBlockMap = header.mapStringOffset 477 | 478 | for r, t, tp in format:gmatch("(%*?%d*)(.)()") do 479 | for i=1,r == '*' and (tFields-cf+1) or tonumber(r) or 1 do 480 | local t, pt, adv, sigType = t == '?' and gt:sub(cf, cf) or t, nfi and nfi.packType 481 | t, sigType = FIELD_TYPE_BASE_EQUIV[t] or t, t 482 | if cf == idf and fsz <= 5 and missingLocalRowIndex then 483 | if fsz == 0 and nfi and nfi.bitSize and nfi.bitOffset then 484 | local se = pt == 1 and (nfi.pa3 and nfi.pa3 % 2 == 1) 485 | po[#po+1] = ('%s\t\tlid = %s(data, %d, r0*8+%d)'):format(ctPrefix, se and "pint_le" or "upint_le", nfi.bitSize, nfi.bitOffset), 1 486 | else 487 | po[#po+1] = ctPrefix .. '\t\tlid = uint_le(data, ' .. fsz .. ', ' .. cr .. skip .. ')' 488 | end 489 | missingLocalRowIndex = false 490 | end 491 | if t == '{' then 492 | p[#p+1], openTables = '{', openTables + 1 493 | if tp == 2 and #p == 2 then 494 | p[1], p[2] = '{', '[0]=lid' 495 | end 496 | elseif t == '}' then 497 | assert(openTables > 0, 'invalid signature: no table to close here') 498 | for j=#p-1, 1, -1 do 499 | if p[j] == '{' then 500 | p[j], openTables = '{' .. table.concat(p, ', ', j+1) .. '}', openTables - 1 501 | for k=#p,j+1,-1 do p[k] = nil end 502 | break 503 | end 504 | end 505 | elseif i > tFields then 506 | error("invalid signature: too many specified fields") 507 | elseif cf > fields and header.coTypes then 508 | local oid = cf-fields 509 | local hfType = header.coTypes:sub(oid, oid) 510 | adv, fsz = 1, 0 511 | if t == hfType or t == '?' then 512 | p[#p+1] = ('cov[%d][lid]'):format(oid) 513 | elseif t ~= '.' then 514 | error(("invalid signature: extra field type mismatch: %q expected, got %q"):format(hfType, t)) 515 | end 516 | elseif t == '.' then 517 | adv = 1 518 | if header.inlineStrings and gt:sub(cf,cf) == 's' and tp <= #format then 519 | po[#po+1], cr, crn = 2, ('\t\tlocal r%d = smatch(data, "%%Z*()", 1+%s%d)'):format(crn, cr, skip), 'r'..crn..'+', crn + 1 520 | skip, fsz = 0, 0 521 | end 522 | elseif nfi and pt == 2 then 523 | assert((nfi.adOfs and nfi.adLength) or nfi.adLength == 0, 'Default/Override field missing override data') 524 | if not defaultOverrides[cf] then 525 | local dv, ofs = nfi.pa1, nfi.adOfs or 0 526 | local ot = setmetatable({}, {__index=function() return dv end}) 527 | local vr = t == 'i' and int32_le or t == 'f' and float32_le or uint32_le 528 | for p=ofs,ofs+nfi.adLength-1,8 do 529 | local rid, v = uint32_le(data, p), vr(data, p+4) 530 | ot[rid] = v 531 | end 532 | defaultOverrides[cf] = ot 533 | end 534 | p[#p+1], adv = ('dfo[%d][lid or i]'):format(cf), 1 535 | elseif t == 'F' or pt == 'ForeignKeyMap' then 536 | assert(fkField, 'No foreign key information present') 537 | assert(t == 'F' or t == 'u' or t == 'i', 'Signature assigns a strange type to a foreign key field: ' .. tostring(t)) 538 | if not fkMap then 539 | fkMap = {} 540 | local readInt = t == 'i' and int32_le or uint32_le 541 | for p=fkField.adOfs, fkField.adOfs+fkField.adLength-1, 8 do 542 | local v, idx = readInt(data, p), readInt(data, p+4) 543 | fkMap[idx] = v 544 | end 545 | end 546 | p[#p+1], adv = 'fkMap[ii or (i-1)]', pt == 'ForeignKeyMap' 547 | elseif (t == 'u' or t == 'i') and fsz == 0 and nfi and nfi.bitSize and nfi.bitOffset then 548 | local signed = (pt == 1 or pt == 5) and t == 'i' 549 | p[#p+1], adv = cf == idf and 'lid' or ('%s(data, %d, r0*8+%d)'):format(signed and "pint_le" or "upint_le", nfi.bitSize, nfi.bitOffset), 1 550 | if pt == 3 or pt == 4 then 551 | local extraSkip, aw = pt == 4 and 4*(cf-nfi.firstFieldIndex) or 0, pt == 4 and 4*nfi.pa3 or 4 552 | p[#p] = ('%s(data, %d, %d+%d*%s)'):format(t == 'u' and 'uint_le' or 'int_le', FIELD_TYPE_ALLOW_NARROW[sigType] and nfi.packWidth or 4, 553 | nfi.adOfs+extraSkip, aw, p[#p]) 554 | end 555 | elseif t == 'u' then 556 | assertLEQ(1, fsz, "Unacceptable field size (u)") 557 | p[#p+1], adv = cf == idf and 'lid' or ('uint_le(data, ' .. fsz .. ', ' .. cr .. skip .. ')'), 1 558 | elseif t == 'i' then 559 | assertLEQ(1, fsz, "Unacceptable field size (i)") 560 | p[#p+1], adv = 'int_le(data, ' .. fsz .. ', ' .. cr .. skip .. ')', 1 561 | elseif t == 'f' then 562 | if pt == 1 then 563 | p[#p+1], adv = ('u32_float(upint_le(data, %d, r0*8+%d))'):format(nfi.bitSize, nfi.bitOffset), 1 564 | elseif pt == 3 or pt == 4 then 565 | local extraSkip, aw = pt == 4 and 4*(cf-nfi.firstFieldIndex) or 0, pt == 4 and 4*nfi.pa3 or 4 566 | p[#p+1], adv = ('upint_le(data, %d, r0*8+%d)'):format(nfi.bitSize, nfi.bitOffset), 1 567 | p[#p] = ('u32_float(uint_le(data, 4, %d+%d*%s))'):format(nfi.adOfs+extraSkip, aw, p[#p]) 568 | else 569 | assertLEQ(1, fsz, "Unacceptable field size (f)") 570 | p[#p+1], adv = 'float32_le(data, ' .. cr .. skip .. ')', 1 571 | end 572 | elseif t == 'L' then 573 | assert(fsz == 8, "Unacceptable field size (L)") 574 | p[#p+1], adv = '{ uint_le(data, 4, ' .. cr .. skip .. '), uint_le(data, 4, ' .. cr .. skip .. '+4), m=2^32}', 1 575 | elseif t == 's' then 576 | if header.inlineStrings then 577 | p[#p+1], adv = 's' .. crn, 1 578 | po[#po+1], cr, crn = ('\t\tlocal s%d, r%d = smatch(data, "(%%Z*)()", 1+%s%d)'):format(crn, crn, cr, skip, crn, crn), 'r'..crn..'+', crn + 1 579 | skip, fsz = 0, 0 580 | else 581 | assert(sbase, "invalid signature: 's' requires a string block") 582 | local base = rowRelativeStrings and cr .. (skip+1) .. (stringShift ~= 0 and " + " .. stringShift or "") or 'sbase' 583 | local expr = base .. ' + uint_le(data, 4, ' .. cr .. skip .. ')' 584 | if stringBlockMap then 585 | expr = 'sbm(' .. expr .. ')' 586 | end 587 | p[#p+1], adv = 'smatch(data, "%Z*", ' .. expr .. ')', 1 588 | end 589 | else 590 | error('Unknown signature field type "' .. t .. '"') 591 | end 592 | if adv then 593 | cf, nfi = cf + 1, fi and fi[cf+1] 594 | skip, fsz = skip + fsz, nfi and nfi[1] or 4 595 | end 596 | end 597 | end 598 | assert(openTables == 0, 'invalid signature: missing closing table marker' .. (openTables > 1 and "s" or "")) 599 | if not loose then 600 | local grace = fkField and 1 or 0 601 | assertLEQ(tFields, cf-1+grace, 'invalid signature: too few fields specified') 602 | assertLEQ(skip, header.stride, 'invalid signature: field length exceeds stride') 603 | end 604 | 605 | po[#po+1] = '\t\treturn ' 606 | local code = table.concat(po, '\n') .. table.concat(p, ", ") .. '\n\tend\nend' 607 | local minId = (header.minId or 0) == 0 and 1 or header.minId 608 | defaultOverrides = next(defaultOverrides) and defaultOverrides or nil 609 | return (loadstring or load)(code)(string.match, uint_le, int_le, float32_le, upint_le, pint_le, u32_float, 610 | header.coVals, header.idMap, copy, rows, stride, sbase, rbase - stride, header.rowList, minId, defaultOverrides, fkMap, 611 | stringBlockMap 612 | ), fi and (header.stride - (fields+1-cf)) or skip 613 | end 614 | 615 | local parsers = {} do 616 | local modules = {"dbc", "db2", "db5_6", "dc1_2", "dc3_4_5"} 617 | for i=1,#modules do 618 | local ok, m = pcall(require, "dbc.headers." .. modules[i]) 619 | if ok and type(m) == "table" and type(m.parseHeader) == "function" and type(m.fourCC) == "table" then 620 | for fourCC in pairs(m.fourCC) do 621 | parsers[fourCC] = m 622 | end 623 | end 624 | end 625 | end 626 | 627 | function M.header(data) 628 | assert(type(data) == "string", 'Syntax: header = dbc.header("data")') 629 | assertLEQ(4, #data, "dbc.header: data too short") 630 | local hm = parsers[data:sub(1,4)] or error("Unsupported DBC format [" .. (data:match("^%w%w%w%w") or ("%02x%02x%02x%02x"):format(data:byte(1,4))) .. "]") 631 | return hm:parseHeader(data, M) 632 | end 633 | 634 | function M.rows(data, sig, loose, header) 635 | assert(type(data) == "string" and type(sig) == "string", 'Syntax: dbc.rows("data", "rowSignature"[, loose[, header]])') 636 | assertLEQ(4, #data, "dbc.rows: data too short") 637 | 638 | local h = type(header) == "table" and header or M.header(data) 639 | local iter = createUnpacker(data, h, sig, loose) 640 | 641 | return iter, data 642 | end 643 | 644 | function M.fields(data, sig) 645 | assert(type(data) == "string" and (sig == nil or type(sig) == "string"), 'Syntax: casc.dbc.fields("data"[, "rowSignature"])') 646 | local h = M.header(data) 647 | return guessTypes(data, h.parts == nil and h or h.parts[findLargestPart(h)], sig) 648 | end 649 | 650 | return M --------------------------------------------------------------------------------