├── .github └── workflows │ ├── nuget.yml │ └── tests.yml ├── .gitignore ├── DBCD.Benchmark ├── Benchmarks │ ├── ReadingBenchmark.cs │ ├── StringTableBench.cs │ └── WritingBenchmark.cs ├── DBCD.Benchmark.csproj ├── Program.cs └── Utilities │ └── IO.cs ├── DBCD.IO ├── Attributes │ ├── CardinalityAttribute.cs │ ├── ForeignAttribute.cs │ ├── ForeignReferenceAttribute.cs │ ├── IndexAttribute.cs │ ├── LocaleAttribute.cs │ ├── RelationAttribute.cs │ └── SizeInBitsAttribute.cs ├── Common │ ├── BitReader.cs │ ├── BitWriter.cs │ ├── DBStructs.cs │ ├── HTFXStructs.cs │ ├── IDBRow.cs │ ├── IDBRowSerializer.cs │ ├── OrderedHashSet.cs │ └── Value32Comparer.cs ├── DB2Flags.cs ├── DBCD.IO.csproj ├── DBParser.cs ├── Extensions.cs ├── FieldCache.cs ├── HotfixReader.cs ├── Readers │ ├── BaseEncryptionSupportingReader.cs │ ├── BaseReader.cs │ ├── HTFXReader.cs │ ├── WDB2Reader.cs │ ├── WDB3Reader.cs │ ├── WDB4Reader.cs │ ├── WDB5Reader.cs │ ├── WDB6Reader.cs │ ├── WDBCReader.cs │ ├── WDC1Reader.cs │ ├── WDC2Reader.cs │ ├── WDC3Reader.cs │ ├── WDC4Reader.cs │ └── WDC5Reader.cs ├── Storage.cs └── Writers │ ├── BaseWriter.cs │ ├── WDB2Writer.cs │ ├── WDB3Writer.cs │ ├── WDB4Writer.cs │ ├── WDB5Writer.cs │ ├── WDB6Writer.cs │ ├── WDBCWriter.cs │ ├── WDC1Writer.cs │ ├── WDC2Writer.cs │ ├── WDC3Writer.cs │ ├── WDC4Writer.cs │ └── WDC5Writer.cs ├── DBCD.Tests ├── DBCD.Tests.csproj ├── Providers │ └── WagoDBCProvider.cs ├── ReadingTest.cs ├── Utilities │ └── IO.cs └── WritingTest.cs ├── DBCD.sln ├── DBCD ├── DBCD.cs ├── DBCD.csproj ├── DBCDBuilder.cs ├── DBCDStorage.cs ├── Extensions │ └── DB2ReaderExtension.cs ├── Helpers │ └── FieldAccessor.cs └── Providers │ ├── FilesystemDBCProvider.cs │ ├── FilesystemDBDProvider.cs │ ├── GithubBDBDProvider.cs │ ├── GithubDBDProvider.cs │ ├── IDBCProvider.cs │ └── IDBDProvider.cs ├── Directory.Build.props ├── Directory.Build.targets ├── LICENSE ├── README.md └── nuget-icon.png /.github/workflows/nuget.yml: -------------------------------------------------------------------------------- 1 | # yaml-language-server: $schema=https://json.schemastore.org/github-workflow.json 2 | # Based on Gerald Barre's blog post: https://www.meziantou.net/publishing-a-nuget-package-following-best-practices-using-github.htm 3 | 4 | name: publish 5 | on: 6 | workflow_dispatch: # Allow running the workflow manually from the GitHub UI 7 | release: 8 | types: 9 | - published # Run the workflow when a new GitHub release is published 10 | 11 | env: 12 | DOTNET_SKIP_FIRST_TIME_EXPERIENCE: 1 13 | DOTNET_NOLOGO: true 14 | NuGetDirectory: ${{ github.workspace}}/nuget 15 | 16 | defaults: 17 | run: 18 | shell: pwsh 19 | 20 | jobs: 21 | create_nuget: 22 | runs-on: ubuntu-latest 23 | steps: 24 | - uses: actions/checkout@v3 25 | with: 26 | fetch-depth: 0 # Get all history to allow automatic versioning using MinVer 27 | 28 | # Install the .NET SDK indicated in the global.json file 29 | - name: Setup .NET 30 | uses: actions/setup-dotnet@v4 31 | with: 32 | dotnet-version: 9.x.x 33 | # Create the NuGet package in the folder from the environment variable NuGetDirectory 34 | - run: dotnet pack --configuration Release --output ${{ env.NuGetDirectory }} /p:ContinuousIntegrationBuild=true 35 | 36 | # Publish the NuGet package as an artifact, so they can be used in the following jobs 37 | - uses: actions/upload-artifact@v4.4.0 38 | with: 39 | name: nuget 40 | if-no-files-found: error 41 | retention-days: 7 42 | path: ${{ env.NuGetDirectory }}/*.nupkg 43 | 44 | validate_nuget: 45 | runs-on: ubuntu-latest 46 | needs: [ create_nuget ] 47 | steps: 48 | # Install the .NET SDK indicated in the global.json file 49 | - name: Setup .NET 50 | uses: actions/setup-dotnet@v4 51 | with: 52 | dotnet-version: 9.x.x 53 | 54 | # Download the NuGet package created in the previous job 55 | - uses: actions/download-artifact@v4.1.7 56 | with: 57 | name: nuget 58 | path: ${{ env.NuGetDirectory }} 59 | 60 | - name: Install nuget validator 61 | run: dotnet tool update Meziantou.Framework.NuGetPackageValidation.Tool --global 62 | 63 | # Validate metadata and content of the NuGet package 64 | # https://www.nuget.org/packages/Meziantou.Framework.NuGetPackageValidation.Tool#readme-body-tab 65 | # If some rules are not applicable, you can disable them 66 | # using the --excluded-rules or --excluded-rule-ids option 67 | - name: Validate package 68 | run: meziantou.validate-nuget-package (Get-ChildItem "${{ env.NuGetDirectory }}/*.nupkg") --excluded-rules Symbols 69 | 70 | run_test: 71 | runs-on: ubuntu-latest 72 | steps: 73 | - uses: actions/checkout@v3 74 | - name: Setup .NET 75 | uses: actions/setup-dotnet@v4 76 | with: 77 | dotnet-version: 9.x.x 78 | - name: Run tests 79 | run: dotnet test --configuration Release 80 | 81 | deploy: 82 | # Publish only when creating a GitHub Release 83 | # https://docs.github.com/en/repositories/releasing-projects-on-github/managing-releases-in-a-repository 84 | # You can update this logic if you want to manage releases differently 85 | if: github.event_name == 'release' 86 | runs-on: ubuntu-latest 87 | needs: [ validate_nuget, run_test ] 88 | steps: 89 | # Download the NuGet package created in the previous job 90 | - uses: actions/download-artifact@v4.1.7 91 | with: 92 | name: nuget 93 | path: ${{ env.NuGetDirectory }} 94 | 95 | - name: Setup .NET Core 96 | uses: actions/setup-dotnet@v4 97 | with: 98 | dotnet-version: 9.x.x 99 | 100 | # Publish all NuGet packages to NuGet.org 101 | # Use --skip-duplicate to prevent errors if a package with the same version already exists. 102 | # If you retry a failed workflow, already published packages will be skipped without error. 103 | - name: Publish NuGet package 104 | run: | 105 | foreach($file in (Get-ChildItem "${{ env.NuGetDirectory }}" -Recurse -Include *.nupkg)) { 106 | dotnet nuget push $file --api-key "${{ secrets.NUGET_APIKEY }}" --source https://api.nuget.org/v3/index.json --skip-duplicate 107 | } 108 | -------------------------------------------------------------------------------- /.github/workflows/tests.yml: -------------------------------------------------------------------------------- 1 | name: Tests 2 | on: 3 | push: 4 | branches: [ "master" ] 5 | pull_request: 6 | branches: [ "master" ] 7 | 8 | jobs: 9 | tests: 10 | runs-on: ubuntu-latest 11 | steps: 12 | - uses: actions/checkout@v4 13 | - name: Setup .NET 14 | uses: actions/setup-dotnet@v4 15 | with: 16 | dotnet-version: 9.x.x 17 | - name: Restore dependencies 18 | run: dotnet restore 19 | - name: Build 20 | run: dotnet build --no-restore 21 | - name: Test 22 | run: dotnet test --no-build --verbosity normal 23 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | ## Ignore Visual Studio temporary files, build results, and 2 | ## files generated by popular Visual Studio add-ons. 3 | ## 4 | ## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore 5 | 6 | # User-specific files 7 | *.rsuser 8 | *.suo 9 | *.user 10 | *.userosscache 11 | *.sln.docstates 12 | 13 | # User-specific files (MonoDevelop/Xamarin Studio) 14 | *.userprefs 15 | 16 | # Mono auto generated files 17 | mono_crash.* 18 | 19 | # Build results 20 | [Dd]ebug/ 21 | [Dd]ebugPublic/ 22 | [Rr]elease/ 23 | [Rr]eleases/ 24 | x64/ 25 | x86/ 26 | [Aa][Rr][Mm]/ 27 | [Aa][Rr][Mm]64/ 28 | bld/ 29 | [Bb]in/ 30 | [Oo]bj/ 31 | [Ll]og/ 32 | 33 | # Visual Studio 2015/2017 cache/options directory 34 | .vs/ 35 | # Uncomment if you have tasks that create the project's static files in wwwroot 36 | #wwwroot/ 37 | 38 | # Visual Studio 2017 auto generated files 39 | Generated\ Files/ 40 | 41 | # MSTest test Results 42 | [Tt]est[Rr]esult*/ 43 | [Bb]uild[Ll]og.* 44 | 45 | # NUNIT 46 | *.VisualState.xml 47 | TestResult.xml 48 | 49 | # Build Results of an ATL Project 50 | [Dd]ebugPS/ 51 | [Rr]eleasePS/ 52 | dlldata.c 53 | 54 | # Benchmark Results 55 | BenchmarkDotNet.Artifacts/ 56 | 57 | # .NET Core 58 | project.lock.json 59 | project.fragment.lock.json 60 | artifacts/ 61 | 62 | # StyleCop 63 | StyleCopReport.xml 64 | 65 | # Files built by Visual Studio 66 | *_i.c 67 | *_p.c 68 | *_h.h 69 | *.ilk 70 | *.meta 71 | *.obj 72 | *.iobj 73 | *.pch 74 | *.pdb 75 | *.ipdb 76 | *.pgc 77 | *.pgd 78 | *.rsp 79 | *.sbr 80 | *.tlb 81 | *.tli 82 | *.tlh 83 | *.tmp 84 | *.tmp_proj 85 | *_wpftmp.csproj 86 | *.log 87 | *.vspscc 88 | *.vssscc 89 | .builds 90 | *.pidb 91 | *.svclog 92 | *.scc 93 | 94 | # Chutzpah Test files 95 | _Chutzpah* 96 | 97 | # Visual C++ cache files 98 | ipch/ 99 | *.aps 100 | *.ncb 101 | *.opendb 102 | *.opensdf 103 | *.sdf 104 | *.cachefile 105 | *.VC.db 106 | *.VC.VC.opendb 107 | 108 | # Visual Studio profiler 109 | *.psess 110 | *.vsp 111 | *.vspx 112 | *.sap 113 | 114 | # Visual Studio Trace Files 115 | *.e2e 116 | 117 | # TFS 2012 Local Workspace 118 | $tf/ 119 | 120 | # Guidance Automation Toolkit 121 | *.gpState 122 | 123 | # ReSharper is a .NET coding add-in 124 | _ReSharper*/ 125 | *.[Rr]e[Ss]harper 126 | *.DotSettings.user 127 | 128 | # JustCode is a .NET coding add-in 129 | .JustCode 130 | 131 | # TeamCity is a build add-in 132 | _TeamCity* 133 | 134 | # DotCover is a Code Coverage Tool 135 | *.dotCover 136 | 137 | # AxoCover is a Code Coverage Tool 138 | .axoCover/* 139 | !.axoCover/settings.json 140 | 141 | # Visual Studio code coverage results 142 | *.coverage 143 | *.coveragexml 144 | 145 | # NCrunch 146 | _NCrunch_* 147 | .*crunch*.local.xml 148 | nCrunchTemp_* 149 | 150 | # MightyMoose 151 | *.mm.* 152 | AutoTest.Net/ 153 | 154 | # Web workbench (sass) 155 | .sass-cache/ 156 | 157 | # Installshield output folder 158 | [Ee]xpress/ 159 | 160 | # DocProject is a documentation generator add-in 161 | DocProject/buildhelp/ 162 | DocProject/Help/*.HxT 163 | DocProject/Help/*.HxC 164 | DocProject/Help/*.hhc 165 | DocProject/Help/*.hhk 166 | DocProject/Help/*.hhp 167 | DocProject/Help/Html2 168 | DocProject/Help/html 169 | 170 | # Click-Once directory 171 | publish/ 172 | 173 | # Publish Web Output 174 | *.[Pp]ublish.xml 175 | *.azurePubxml 176 | # Note: Comment the next line if you want to checkin your web deploy settings, 177 | # but database connection strings (with potential passwords) will be unencrypted 178 | *.pubxml 179 | *.publishproj 180 | 181 | # Microsoft Azure Web App publish settings. Comment the next line if you want to 182 | # checkin your Azure Web App publish settings, but sensitive information contained 183 | # in these scripts will be unencrypted 184 | PublishScripts/ 185 | 186 | # NuGet Packages 187 | *.nupkg 188 | # The packages folder can be ignored because of Package Restore 189 | **/[Pp]ackages/* 190 | # except build/, which is used as an MSBuild target. 191 | !**/[Pp]ackages/build/ 192 | # Uncomment if necessary however generally it will be regenerated when needed 193 | #!**/[Pp]ackages/repositories.config 194 | # NuGet v3's project.json files produces more ignorable files 195 | *.nuget.props 196 | *.nuget.targets 197 | 198 | # Microsoft Azure Build Output 199 | csx/ 200 | *.build.csdef 201 | 202 | # Microsoft Azure Emulator 203 | ecf/ 204 | rcf/ 205 | 206 | # Windows Store app package directories and files 207 | AppPackages/ 208 | BundleArtifacts/ 209 | Package.StoreAssociation.xml 210 | _pkginfo.txt 211 | *.appx 212 | *.appxbundle 213 | *.appxupload 214 | 215 | # Visual Studio cache files 216 | # files ending in .cache can be ignored 217 | *.[Cc]ache 218 | # but keep track of directories ending in .cache 219 | !?*.[Cc]ache/ 220 | 221 | # Others 222 | ClientBin/ 223 | ~$* 224 | *~ 225 | *.dbmdl 226 | *.dbproj.schemaview 227 | *.jfm 228 | *.pfx 229 | *.publishsettings 230 | orleans.codegen.cs 231 | 232 | # Including strong name files can present a security risk 233 | # (https://github.com/github/gitignore/pull/2483#issue-259490424) 234 | #*.snk 235 | 236 | # Since there are multiple workflows, uncomment next line to ignore bower_components 237 | # (https://github.com/github/gitignore/pull/1529#issuecomment-104372622) 238 | #bower_components/ 239 | 240 | # RIA/Silverlight projects 241 | Generated_Code/ 242 | 243 | # Backup & report files from converting an old project file 244 | # to a newer Visual Studio version. Backup files are not needed, 245 | # because we have git ;-) 246 | _UpgradeReport_Files/ 247 | Backup*/ 248 | UpgradeLog*.XML 249 | UpgradeLog*.htm 250 | ServiceFabricBackup/ 251 | *.rptproj.bak 252 | 253 | # SQL Server files 254 | *.mdf 255 | *.ldf 256 | *.ndf 257 | 258 | # Business Intelligence projects 259 | *.rdl.data 260 | *.bim.layout 261 | *.bim_*.settings 262 | *.rptproj.rsuser 263 | *- Backup*.rdl 264 | 265 | # Microsoft Fakes 266 | FakesAssemblies/ 267 | 268 | # GhostDoc plugin setting file 269 | *.GhostDoc.xml 270 | 271 | # Node.js Tools for Visual Studio 272 | .ntvs_analysis.dat 273 | node_modules/ 274 | 275 | # Visual Studio 6 build log 276 | *.plg 277 | 278 | # Visual Studio 6 workspace options file 279 | *.opt 280 | 281 | # Visual Studio 6 auto-generated workspace file (contains which files were open etc.) 282 | *.vbw 283 | 284 | # Visual Studio LightSwitch build output 285 | **/*.HTMLClient/GeneratedArtifacts 286 | **/*.DesktopClient/GeneratedArtifacts 287 | **/*.DesktopClient/ModelManifest.xml 288 | **/*.Server/GeneratedArtifacts 289 | **/*.Server/ModelManifest.xml 290 | _Pvt_Extensions 291 | 292 | # Paket dependency manager 293 | .paket/paket.exe 294 | paket-files/ 295 | 296 | # FAKE - F# Make 297 | .fake/ 298 | 299 | # CodeRush personal settings 300 | .cr/personal 301 | 302 | # Python Tools for Visual Studio (PTVS) 303 | __pycache__/ 304 | *.pyc 305 | 306 | # Cake - Uncomment if you are using it 307 | # tools/** 308 | # !tools/packages.config 309 | 310 | # Tabs Studio 311 | *.tss 312 | 313 | # Telerik's JustMock configuration file 314 | *.jmconfig 315 | 316 | # BizTalk build output 317 | *.btp.cs 318 | *.btm.cs 319 | *.odx.cs 320 | *.xsd.cs 321 | 322 | # OpenCover UI analysis results 323 | OpenCover/ 324 | 325 | # Azure Stream Analytics local run output 326 | ASALocalRun/ 327 | 328 | # MSBuild Binary and Structured Log 329 | *.binlog 330 | 331 | # NVidia Nsight GPU debugger configuration file 332 | *.nvuser 333 | 334 | # MFractors (Xamarin productivity tool) working folder 335 | .mfractor/ 336 | 337 | # Local History for Visual Studio 338 | .localhistory/ 339 | 340 | # BeatPulse healthcheck temp database 341 | healthchecksdb 342 | 343 | # Backup folder for Package Reference Convert tool in Visual Studio 2017 344 | MigrationBackup/ 345 | 346 | # Visual Studio Code 347 | .vscode 348 | 349 | # Private Providers 350 | DBCD/Providers/MirrorDBCProvider.cs 351 | -------------------------------------------------------------------------------- /DBCD.Benchmark/Benchmarks/ReadingBenchmark.cs: -------------------------------------------------------------------------------- 1 | using BenchmarkDotNet.Attributes; 2 | using DBCD.Providers; 3 | 4 | namespace DBCD.Benchmark.Benchmarks 5 | { 6 | [MemoryDiagnoser] 7 | public class ReadingBenchmark 8 | { 9 | private static readonly FilesystemDBDProvider localDBDProvider = new FilesystemDBDProvider("D:\\Projects\\WoWDBDefs\\definitions"); 10 | private static readonly FilesystemDBCProvider localDBCProvider = new FilesystemDBCProvider("D:\\Projects\\DBCDStaging\\DBCD.Tests\\bin\\Debug\\net8.0\\DBCCache\\11.0.2.56044"); 11 | private readonly string[] allDB2s = Directory.GetFiles("D:\\Projects\\DBCDStaging\\DBCD.Tests\\bin\\Debug\\net8.0\\DBCCache\\11.0.2.56044", "*.db2", SearchOption.AllDirectories).Select(x => Path.GetFileNameWithoutExtension(x)).ToArray(); 12 | private readonly string build = "11.0.2.56044"; 13 | 14 | [Benchmark] 15 | public int TestReadingAllDB2s() 16 | { 17 | var inputDBCD = new DBCD(localDBCProvider, localDBDProvider); 18 | 19 | //var build = "3.3.5.12340"; // WDBC 20 | //var build = "6.0.1.18179"; // WDB2 21 | //var build = "7.0.1.20740"; // WDB3, only 1 DBD sadly 22 | //var build = "7.0.1.20810"; // WDB4, only 2 DBDs sadly 23 | //var build = "7.2.0.23436"; // WDB5, only Map.db2 24 | //var build = "7.3.5.25928"; // WDB6 25 | //var build = "7.3.5.25928"; // WDC1 26 | //var build = "8.0.1.26231"; // WDC2 27 | //var build = "9.1.0.39653"; // WDC3 28 | //var build = "10.1.0.48480"; // WDC4 29 | var build = "11.0.2.56044"; // WDC5 30 | 31 | foreach (var tableName in allDB2s) 32 | { 33 | if (tableName == "UnitTestSparse") 34 | continue; 35 | 36 | if (!localDBDProvider.ContainsBuild(tableName, build)) 37 | continue; 38 | 39 | var storage = inputDBCD.Load(tableName, build); 40 | } 41 | 42 | return allDB2s.Count(); 43 | } 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /DBCD.Benchmark/Benchmarks/StringTableBench.cs: -------------------------------------------------------------------------------- 1 | using BenchmarkDotNet.Attributes; 2 | using System.Text; 3 | 4 | namespace DBCD.Benchmark.Benchmarks 5 | { 6 | [MemoryDiagnoser] 7 | public class StringTableBench 8 | { 9 | private static byte[] InputBytes = File.ReadAllBytes("E:\\stringtable.bytes"); 10 | private static int StringTableSize = (int)InputBytes.Length; 11 | 12 | [Benchmark] 13 | public void OldMethod() 14 | { 15 | using (var stream = new MemoryStream(InputBytes)) 16 | using (var reader = new BinaryReader(stream)) 17 | { 18 | var StringTable = new Dictionary(StringTableSize / 0x20); 19 | for (int i = 0; i < StringTableSize;) 20 | { 21 | long oldPos = reader.BaseStream.Position; 22 | StringTable[i] = reader.ReadCString(); 23 | i += (int)(reader.BaseStream.Position - oldPos); 24 | } 25 | } 26 | } 27 | 28 | [Benchmark] 29 | public void NewMethod() 30 | { 31 | using (var stream = new MemoryStream(InputBytes)) 32 | using (var reader = new BinaryReader(stream)) 33 | { 34 | var StringTable = reader.ReadStringTable(StringTableSize); 35 | } 36 | } 37 | } 38 | 39 | public static class BinaryReaderExtensions 40 | { 41 | public static string ReadCString(this BinaryReader reader) 42 | { 43 | var bytes = new List(); 44 | byte b; 45 | while ((b = reader.ReadByte()) != 0) 46 | bytes.Add(b); 47 | 48 | return Encoding.UTF8.GetString(bytes.ToArray()); 49 | } 50 | 51 | public static Dictionary ReadStringTable(this BinaryReader reader, int stringTableSize, int baseOffset = 0, bool usePos = false) 52 | { 53 | var StringTable = new Dictionary(stringTableSize / 0x20); 54 | 55 | if (stringTableSize == 0) 56 | return StringTable; 57 | 58 | var curOfs = 0; 59 | var decoded = Encoding.UTF8.GetString(reader.ReadBytes(stringTableSize)); 60 | foreach (var str in decoded.Split('\0')) 61 | { 62 | if (curOfs == stringTableSize) 63 | break; 64 | 65 | if (usePos) 66 | StringTable[(reader.BaseStream.Position - stringTableSize) + curOfs] = str; 67 | else 68 | StringTable[baseOffset + curOfs] = str; 69 | 70 | curOfs += Encoding.UTF8.GetByteCount(str) + 1; 71 | } 72 | 73 | return StringTable; 74 | } 75 | } 76 | } 77 | -------------------------------------------------------------------------------- /DBCD.Benchmark/Benchmarks/WritingBenchmark.cs: -------------------------------------------------------------------------------- 1 | using BenchmarkDotNet.Attributes; 2 | using DBCD.Benchmark.Utilities; 3 | using DBCD.Providers; 4 | 5 | namespace DBCD.Benchmark.Benchmarks 6 | { 7 | [MemoryDiagnoser] 8 | public class WritingBenchmark 9 | { 10 | public static GithubDBDProvider DBDProvider { get; } = new GithubDBDProvider(true); 11 | public static string InputPath { get; } = $"{Directory.GetCurrentDirectory()}\\dbc"; 12 | public static DBCD InputDBCD { get; } = new DBCD(new FilesystemDBCProvider(InputPath), DBDProvider); 13 | public static DBCD SavedDBCD { get; } = new DBCD(new FilesystemDBCProvider("tmp"), DBDProvider); 14 | 15 | public static string Build { get; } = "9.1.0.39653"; 16 | 17 | [Benchmark] 18 | public void TestWritingAllDB2s() 19 | { 20 | string[] allDB2s = Directory.GetFiles(InputPath, "*.db2", SearchOption.TopDirectoryOnly); 21 | 22 | if (Directory.Exists("tmp")) 23 | Directory.Delete("tmp", true); 24 | Directory.CreateDirectory("tmp"); 25 | 26 | foreach (var db2File in allDB2s) 27 | { 28 | if (Utilities.IO.TryGetExactPath(db2File, out string exactPath)) 29 | { 30 | var tableName = Path.GetFileNameWithoutExtension(exactPath); 31 | 32 | var originalStorage = InputDBCD.Load(tableName, Build); 33 | originalStorage.Save($"tmp/{tableName}.db2"); 34 | } 35 | } 36 | 37 | Directory.Delete("tmp", true); 38 | } 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /DBCD.Benchmark/DBCD.Benchmark.csproj: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Exe 5 | net9.0 6 | enable 7 | enable 8 | true 9 | false 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | -------------------------------------------------------------------------------- /DBCD.Benchmark/Program.cs: -------------------------------------------------------------------------------- 1 | // See https://aka.ms/new-console-template for more information 2 | using BenchmarkDotNet.Running; 3 | using DBCD.Benchmark.Benchmarks; 4 | 5 | BenchmarkRunner.Run(); -------------------------------------------------------------------------------- /DBCD.Benchmark/Utilities/IO.cs: -------------------------------------------------------------------------------- 1 | using System.Collections.Generic; 2 | using System.Globalization; 3 | using System.IO; 4 | using System.Linq; 5 | 6 | namespace DBCD.Benchmark.Utilities 7 | { 8 | internal class IO 9 | { 10 | public static bool TryGetExactPath(string path, out string exactPath) 11 | { 12 | bool result = false; 13 | exactPath = null; 14 | 15 | // DirectoryInfo accepts either a file path or a directory path, and most of its properties work for either. 16 | // However, its Exists property only works for a directory path. 17 | DirectoryInfo directory = new DirectoryInfo(path); 18 | if (File.Exists(path) || directory.Exists) 19 | { 20 | List parts = new List(); 21 | 22 | DirectoryInfo parentDirectory = directory.Parent; 23 | while (parentDirectory != null) 24 | { 25 | FileSystemInfo entry = parentDirectory.EnumerateFileSystemInfos(directory.Name).First(); 26 | parts.Add(entry.Name); 27 | 28 | directory = parentDirectory; 29 | parentDirectory = directory.Parent; 30 | } 31 | 32 | // Handle the root part (i.e., drive letter or UNC \\server\share). 33 | string root = directory.FullName; 34 | if (root.Contains(':')) 35 | { 36 | root = root.ToUpper(); 37 | } 38 | else 39 | { 40 | string[] rootParts = root.Split('\\'); 41 | root = string.Join("\\", rootParts.Select(part => CultureInfo.CurrentCulture.TextInfo.ToTitleCase(part))); 42 | } 43 | 44 | parts.Add(root); 45 | parts.Reverse(); 46 | exactPath = Path.Combine(parts.ToArray()); 47 | result = true; 48 | } 49 | 50 | return result; 51 | } 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /DBCD.IO/Attributes/CardinalityAttribute.cs: -------------------------------------------------------------------------------- 1 | using System; 2 | 3 | namespace DBCD.IO.Attributes 4 | { 5 | public class CardinalityAttribute : Attribute 6 | { 7 | public readonly int Count; 8 | 9 | public CardinalityAttribute(int count) => Count = count; 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /DBCD.IO/Attributes/ForeignAttribute.cs: -------------------------------------------------------------------------------- 1 | using System; 2 | 3 | namespace DBCD.IO.Attributes 4 | { 5 | public class ForeignAttribute : Attribute 6 | { 7 | public readonly bool IsForeign; 8 | 9 | public ForeignAttribute(bool isForeign) => IsForeign = isForeign; 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /DBCD.IO/Attributes/ForeignReferenceAttribute.cs: -------------------------------------------------------------------------------- 1 |  2 | using System; 3 | using System.Collections.Generic; 4 | using System.Text; 5 | 6 | namespace DBCD.IO.Attributes 7 | { 8 | public class ForeignReferenceAttribute : Attribute 9 | { 10 | public readonly string ForeignTable; 11 | public readonly string ForeignColumn; 12 | 13 | public ForeignReferenceAttribute(string foreignTable, string foreignColumn) => (ForeignTable, ForeignColumn) = (foreignTable, foreignColumn); 14 | 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /DBCD.IO/Attributes/IndexAttribute.cs: -------------------------------------------------------------------------------- 1 | using System; 2 | 3 | namespace DBCD.IO.Attributes 4 | { 5 | public class IndexAttribute : Attribute 6 | { 7 | public readonly bool NonInline; 8 | 9 | public IndexAttribute(bool noninline) => NonInline = noninline; 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /DBCD.IO/Attributes/LocaleAttribute.cs: -------------------------------------------------------------------------------- 1 | using System; 2 | 3 | namespace DBCD.IO.Attributes 4 | { 5 | public class LocaleAttribute : Attribute 6 | { 7 | /// 8 | /// See https://wowdev.wiki/Localization 9 | /// 10 | public readonly int Locale; 11 | /// 12 | /// Number of available locales 13 | /// 14 | public readonly int LocaleCount; 15 | 16 | public LocaleAttribute(int locale, int localecount = 16) 17 | { 18 | Locale = locale; 19 | LocaleCount = localecount; 20 | } 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /DBCD.IO/Attributes/RelationAttribute.cs: -------------------------------------------------------------------------------- 1 | using System; 2 | 3 | namespace DBCD.IO.Attributes 4 | { 5 | public class RelationAttribute : Attribute 6 | { 7 | public readonly Type FieldType; 8 | public readonly bool IsNonInline; 9 | public RelationAttribute(Type fieldType, bool isNonInline) => (this.FieldType, this.IsNonInline) = (fieldType, isNonInline); 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /DBCD.IO/Attributes/SizeInBitsAttribute.cs: -------------------------------------------------------------------------------- 1 | using System; 2 | using System.Collections.Generic; 3 | using System.Text; 4 | 5 | namespace DBCD.IO.Attributes 6 | { 7 | public class SizeInBitsAttribute: Attribute 8 | { 9 | public readonly ushort Size; 10 | 11 | public SizeInBitsAttribute(ushort size) => Size = size; 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /DBCD.IO/Common/BitReader.cs: -------------------------------------------------------------------------------- 1 | using System.Collections.Generic; 2 | using System.Runtime.CompilerServices; 3 | using System.Text; 4 | 5 | namespace DBCD.IO.Common 6 | { 7 | class BitReader 8 | { 9 | private readonly byte[] Array; 10 | 11 | public int Position { get; set; } 12 | public int Offset { get; set; } 13 | 14 | public BitReader(byte[] data) => Array = data; 15 | 16 | public BitReader(byte[] data, int offset) 17 | { 18 | Array = data; 19 | Offset = offset; 20 | } 21 | 22 | [MethodImpl(MethodImplOptions.AggressiveInlining)] 23 | public uint ReadUInt32(int numBits) 24 | { 25 | uint result = Unsafe.As(ref Array[Offset + (Position >> 3)]) << (32 - numBits - (Position & 7)) >> (32 - numBits); 26 | Position += numBits; 27 | return result; 28 | } 29 | 30 | [MethodImpl(MethodImplOptions.AggressiveInlining)] 31 | public ulong ReadUInt64(int numBits) 32 | { 33 | ulong result = Unsafe.As(ref Array[Offset + (Position >> 3)]) << (64 - numBits - (Position & 7)) >> (64 - numBits); 34 | Position += numBits; 35 | return result; 36 | } 37 | 38 | public Value32 ReadValue32(int numBits) 39 | { 40 | unsafe 41 | { 42 | ulong result = ReadUInt32(numBits); 43 | return *(Value32*)&result; 44 | } 45 | } 46 | 47 | public Value64 ReadValue64(int numBits) 48 | { 49 | unsafe 50 | { 51 | ulong result = ReadUInt64(numBits); 52 | return *(Value64*)&result; 53 | } 54 | } 55 | 56 | public Value64 ReadValue64Signed(int numBits) 57 | { 58 | unsafe 59 | { 60 | ulong result = ReadUInt64(numBits); 61 | ulong signedShift = 1UL << (numBits - 1); 62 | result = (signedShift ^ result) - signedShift; 63 | return *(Value64*)&result; 64 | } 65 | } 66 | 67 | public string ReadCString() 68 | { 69 | uint num; 70 | 71 | List bytes = new List(0x20); 72 | while ((num = ReadUInt32(8)) != 0) 73 | bytes.Add((byte)num); 74 | 75 | return Encoding.UTF8.GetString(bytes.ToArray()); 76 | } 77 | 78 | public override int GetHashCode() 79 | { 80 | unchecked 81 | { 82 | int hash = 0; 83 | for (int i = 0; i < Array.Length; i++) 84 | { 85 | hash += Array[i]; 86 | hash += hash << 10; 87 | hash ^= hash >> 6; 88 | 89 | } 90 | 91 | hash += hash << 3; 92 | hash ^= hash >> 11; 93 | hash += hash << 15; 94 | return hash; 95 | } 96 | } 97 | 98 | public BitReader Clone() => new BitReader(Array); 99 | } 100 | } 101 | -------------------------------------------------------------------------------- /DBCD.IO/Common/BitWriter.cs: -------------------------------------------------------------------------------- 1 | using System; 2 | using System.Buffers; 3 | using System.IO; 4 | using System.Runtime.CompilerServices; 5 | using System.Text; 6 | 7 | namespace DBCD.IO.Common 8 | { 9 | class BitWriter : IEquatable, IDisposable 10 | { 11 | private static readonly ArrayPool SharedPool = ArrayPool.Create(); 12 | 13 | public int TotalBytesWrittenOut { get; private set; } 14 | 15 | private byte AccumulatedBitsCount; 16 | private byte[] Buffer; 17 | 18 | public BitWriter(int capacity) => Buffer = SharedPool.Rent(capacity); 19 | 20 | public byte this[int i] => Buffer[i]; 21 | 22 | [MethodImpl(MethodImplOptions.AggressiveInlining)] 23 | public void WriteAligned(T value) where T : struct 24 | { 25 | EnsureSize(); 26 | Unsafe.WriteUnaligned(ref Buffer[TotalBytesWrittenOut], value); 27 | TotalBytesWrittenOut += Unsafe.SizeOf(); 28 | } 29 | 30 | [MethodImpl(MethodImplOptions.AggressiveInlining)] 31 | public void WriteCStringAligned(string value) 32 | { 33 | byte[] data = Encoding.UTF8.GetBytes(value); 34 | Array.Resize(ref data, data.Length + 1); 35 | 36 | EnsureSize(data.Length); 37 | Unsafe.CopyBlockUnaligned(ref Buffer[TotalBytesWrittenOut], ref data[0], (uint)data.Length); 38 | 39 | TotalBytesWrittenOut += data.Length; 40 | } 41 | 42 | [MethodImpl(MethodImplOptions.AggressiveInlining)] 43 | public void Write(T value, int nbits) where T : struct 44 | { 45 | Span pool = stackalloc byte[0x10]; 46 | if (AccumulatedBitsCount == 0 && (nbits & 7) == 0) 47 | { 48 | EnsureSize(); 49 | Unsafe.WriteUnaligned(ref Buffer[TotalBytesWrittenOut], value); 50 | TotalBytesWrittenOut += nbits / 8; 51 | } 52 | else 53 | { 54 | Unsafe.WriteUnaligned(ref pool[0], value); 55 | for (int i = 0; nbits > 0; i++) 56 | { 57 | WriteBits(Math.Min(nbits, 8), pool[i]); 58 | nbits -= 8; 59 | } 60 | } 61 | } 62 | 63 | [MethodImpl(MethodImplOptions.AggressiveInlining)] 64 | public void Write(T value, int nbits, int offset) where T : struct 65 | { 66 | Span pool = stackalloc byte[0x10]; 67 | Unsafe.WriteUnaligned(ref pool[0], value); 68 | 69 | int byteOffset = offset >> 3; 70 | int lowLen = offset & 7; 71 | int highLen = 8 - lowLen; 72 | 73 | int i = 0; 74 | while ((nbits -= 8) >= 0) 75 | { 76 | // write last part of this byte 77 | Buffer[byteOffset] = (byte)((Buffer[byteOffset] & (0xFF >> highLen)) | (pool[i] << lowLen)); 78 | 79 | // write first part of next byte 80 | byteOffset++; 81 | Buffer[byteOffset] = (byte)((Buffer[byteOffset] & (0xFF << lowLen)) | (pool[i] >> highLen)); 82 | i++; 83 | } 84 | 85 | // write final bits 86 | if ((nbits &= 7) > 0) 87 | { 88 | lowLen = nbits; 89 | highLen = 8 - nbits; 90 | 91 | Buffer[byteOffset] = (byte)((Buffer[byteOffset] & (0xFF >> highLen)) | (pool[i] << lowLen)); 92 | } 93 | } 94 | 95 | [MethodImpl(MethodImplOptions.AggressiveInlining)] 96 | public void WriteCString(string value) 97 | { 98 | // Note: cstrings are always aligned to 8 bytes 99 | if (AccumulatedBitsCount == 0) 100 | { 101 | WriteCStringAligned(value); 102 | } 103 | else 104 | { 105 | byte[] data = Encoding.UTF8.GetBytes(value); 106 | for (int i = 0; i < data.Length; i++) 107 | WriteBits(8, data[i]); 108 | 109 | WriteBits(8, 0); 110 | } 111 | } 112 | 113 | [MethodImpl(MethodImplOptions.AggressiveInlining)] 114 | private void WriteBits(int bitCount, uint value) 115 | { 116 | EnsureSize(); 117 | 118 | for (int i = 0; i < bitCount; i++) 119 | { 120 | Buffer[TotalBytesWrittenOut] |= (byte)(((value >> i) & 0x1) << AccumulatedBitsCount); 121 | AccumulatedBitsCount++; 122 | 123 | if (AccumulatedBitsCount > 7) 124 | { 125 | TotalBytesWrittenOut++; 126 | AccumulatedBitsCount = 0; 127 | } 128 | } 129 | } 130 | 131 | [MethodImpl(MethodImplOptions.AggressiveInlining)] 132 | private void EnsureSize(int size = 8) 133 | { 134 | if (TotalBytesWrittenOut + size >= Buffer.Length) 135 | { 136 | byte[] rent = SharedPool.Rent(Buffer.Length + size); 137 | 138 | Unsafe.CopyBlockUnaligned(ref rent[0], ref Buffer[0], (uint)rent.Length); 139 | 140 | SharedPool.Return(Buffer, true); 141 | 142 | Buffer = rent; 143 | } 144 | } 145 | 146 | public void Resize(int size) 147 | { 148 | if (TotalBytesWrittenOut < size) 149 | { 150 | EnsureSize(size - TotalBytesWrittenOut); 151 | TotalBytesWrittenOut = size; 152 | } 153 | } 154 | 155 | public void ResizeToMultiple(int divisor) 156 | { 157 | int remainder = TotalBytesWrittenOut % divisor; 158 | if (remainder != 0) 159 | { 160 | EnsureSize(divisor); 161 | TotalBytesWrittenOut += divisor - remainder; 162 | } 163 | } 164 | 165 | public void CopyTo(Stream stream) 166 | { 167 | stream.Write(Buffer, 0, TotalBytesWrittenOut); 168 | } 169 | 170 | public bool Equals(BitWriter other) 171 | { 172 | if (TotalBytesWrittenOut != other.TotalBytesWrittenOut) 173 | return false; 174 | if (ReferenceEquals(this, other)) 175 | return true; 176 | 177 | for (int i = 0; i < TotalBytesWrittenOut; i++) 178 | if (this[i] != other[i]) 179 | return false; 180 | 181 | return true; 182 | } 183 | 184 | public override int GetHashCode() 185 | { 186 | unchecked 187 | { 188 | const int p = 16777619; 189 | int hash = (int)2166136261; 190 | 191 | for (int i = 0; i < TotalBytesWrittenOut; i++) 192 | hash = (hash ^ Buffer[i]) * p; 193 | 194 | hash += hash << 13; 195 | hash ^= hash >> 7; 196 | hash += hash << 3; 197 | hash ^= hash >> 17; 198 | hash += hash << 5; 199 | return hash; 200 | } 201 | } 202 | 203 | public void Dispose() => SharedPool.Return(Buffer); 204 | } 205 | } -------------------------------------------------------------------------------- /DBCD.IO/Common/DBStructs.cs: -------------------------------------------------------------------------------- 1 | using System.Collections.Generic; 2 | using System.Runtime.CompilerServices; 3 | using System.Runtime.InteropServices; 4 | 5 | #pragma warning disable CS0649 6 | #pragma warning disable IDE0044 7 | 8 | namespace DBCD.IO.Common 9 | { 10 | public interface IEncryptableDatabaseSection 11 | { 12 | ulong TactKeyLookup { get; } 13 | int NumRecords { get; } 14 | } 15 | 16 | public interface IEncryptionSupportingReader 17 | { 18 | List GetEncryptedSections(); 19 | Dictionary GetEncryptedIDs(); 20 | } 21 | 22 | struct FieldMetaData 23 | { 24 | public short Bits; 25 | public short Offset; 26 | } 27 | 28 | [StructLayout(LayoutKind.Explicit)] 29 | struct ColumnMetaData 30 | { 31 | [FieldOffset(0)] 32 | public ushort RecordOffset; 33 | [FieldOffset(2)] 34 | public ushort Size; 35 | [FieldOffset(4)] 36 | public uint AdditionalDataSize; 37 | [FieldOffset(8)] 38 | public CompressionType CompressionType; 39 | [FieldOffset(12)] 40 | public ColumnCompressionData_Immediate Immediate; 41 | [FieldOffset(12)] 42 | public ColumnCompressionData_Pallet Pallet; 43 | [FieldOffset(12)] 44 | public ColumnCompressionData_Common Common; 45 | } 46 | 47 | struct ColumnCompressionData_Immediate 48 | { 49 | public int BitOffset; 50 | public int BitWidth; 51 | public int Flags; // 0x1 signed 52 | } 53 | 54 | struct ColumnCompressionData_Pallet 55 | { 56 | public int BitOffset; 57 | public int BitWidth; 58 | public int Cardinality; 59 | } 60 | 61 | struct ColumnCompressionData_Common 62 | { 63 | public Value32 DefaultValue; 64 | public int B; 65 | public int C; 66 | } 67 | 68 | struct Value32 69 | { 70 | unsafe fixed byte Value[4]; 71 | 72 | [MethodImpl(MethodImplOptions.AggressiveInlining)] 73 | public T GetValue() where T : struct 74 | { 75 | unsafe 76 | { 77 | fixed (byte* ptr = Value) 78 | return Unsafe.ReadUnaligned(ptr); 79 | } 80 | } 81 | 82 | [MethodImpl(MethodImplOptions.AggressiveInlining)] 83 | public static unsafe Value32 Create(T obj) where T : unmanaged 84 | { 85 | return *(Value32*)&obj; 86 | } 87 | 88 | [MethodImpl(MethodImplOptions.AggressiveInlining)] 89 | public static unsafe Value32 Create(object obj) 90 | { 91 | if (obj is byte b) 92 | return *(Value32*)&b; 93 | else if (obj is sbyte sb) 94 | return *(Value32*)&sb; 95 | else if (obj is short s) 96 | return *(Value32*)&s; 97 | else if (obj is ushort us) 98 | return *(Value32*)&us; 99 | else if (obj is int i) 100 | return *(Value32*)&i; 101 | else if (obj is uint ui) 102 | return *(Value32*)&ui; 103 | else if (obj is long l) 104 | return *(Value32*)&l; 105 | else if (obj is ulong ul) 106 | return *(Value32*)&ul; 107 | else if (obj is float f) 108 | return *(Value32*)&f; 109 | else 110 | throw new System.Exception("Invalid type"); 111 | } 112 | } 113 | 114 | struct Value64 115 | { 116 | unsafe fixed byte Value[8]; 117 | 118 | [MethodImpl(MethodImplOptions.AggressiveInlining)] 119 | public T GetValue() where T : struct 120 | { 121 | unsafe 122 | { 123 | fixed (byte* ptr = Value) 124 | return Unsafe.ReadUnaligned(ptr); 125 | } 126 | } 127 | } 128 | 129 | enum CompressionType 130 | { 131 | None = 0, 132 | Immediate = 1, 133 | Common = 2, 134 | Pallet = 3, 135 | PalletArray = 4, 136 | SignedImmediate = 5 137 | } 138 | 139 | struct ReferenceEntry 140 | { 141 | public int Id; 142 | public int Index; 143 | } 144 | 145 | class ReferenceData 146 | { 147 | public int NumRecords { get; set; } 148 | public int MinId { get; set; } 149 | public int MaxId { get; set; } 150 | public Dictionary Entries { get; set; } = new Dictionary(); 151 | } 152 | 153 | [StructLayout(LayoutKind.Sequential, Pack = 2)] 154 | struct SparseEntry 155 | { 156 | public uint Offset; 157 | public ushort Size; 158 | } 159 | 160 | [StructLayout(LayoutKind.Sequential, Pack = 2)] 161 | struct SectionHeader : IEncryptableDatabaseSection 162 | { 163 | public ulong TactKeyLookup; 164 | public int FileOffset; 165 | public int NumRecords; 166 | public int StringTableSize; 167 | public int CopyTableSize; 168 | public int SparseTableOffset; // CatalogDataOffset, absolute value, {uint offset, ushort size}[MaxId - MinId + 1] 169 | public int IndexDataSize; // int indexData[IndexDataSize / 4] 170 | public int ParentLookupDataSize; // uint NumRecords, uint minId, uint maxId, {uint id, uint index}[NumRecords], questionable usefulness... 171 | 172 | ulong IEncryptableDatabaseSection.TactKeyLookup => TactKeyLookup; 173 | int IEncryptableDatabaseSection.NumRecords => NumRecords; 174 | } 175 | 176 | [StructLayout(LayoutKind.Sequential, Pack = 2)] 177 | struct SectionHeaderWDC3 : IEncryptableDatabaseSection 178 | { 179 | public ulong TactKeyLookup; 180 | public int FileOffset; 181 | public int NumRecords; 182 | public int StringTableSize; 183 | public int OffsetRecordsEndOffset; // CatalogDataOffset, absolute value, {uint offset, ushort size}[MaxId - MinId + 1] 184 | public int IndexDataSize; // int indexData[IndexDataSize / 4] 185 | public int ParentLookupDataSize; // uint NumRecords, uint minId, uint maxId, {uint id, uint index}[NumRecords], questionable usefulness... 186 | public int OffsetMapIDCount; 187 | public int CopyTableCount; 188 | 189 | ulong IEncryptableDatabaseSection.TactKeyLookup => TactKeyLookup; 190 | int IEncryptableDatabaseSection.NumRecords => NumRecords; 191 | } 192 | 193 | [StructLayout(LayoutKind.Sequential, Pack = 2)] 194 | struct SectionHeaderWDC4 : IEncryptableDatabaseSection 195 | { 196 | public ulong TactKeyLookup; 197 | public int FileOffset; 198 | public int NumRecords; 199 | public int StringTableSize; 200 | public int OffsetRecordsEndOffset; // CatalogDataOffset, absolute value, {uint offset, ushort size}[MaxId - MinId + 1] 201 | public int IndexDataSize; // int indexData[IndexDataSize / 4] 202 | public int ParentLookupDataSize; // uint NumRecords, uint minId, uint maxId, {uint id, uint index}[NumRecords], questionable usefulness... 203 | public int OffsetMapIDCount; 204 | public int CopyTableCount; 205 | 206 | ulong IEncryptableDatabaseSection.TactKeyLookup => this.TactKeyLookup; 207 | int IEncryptableDatabaseSection.NumRecords => this.NumRecords; 208 | } 209 | 210 | [StructLayout(LayoutKind.Sequential, Pack = 2)] 211 | struct SectionHeaderWDC5 : IEncryptableDatabaseSection 212 | { 213 | public ulong TactKeyLookup; 214 | public int FileOffset; 215 | public int NumRecords; 216 | public int StringTableSize; 217 | public int OffsetRecordsEndOffset; // CatalogDataOffset, absolute value, {uint offset, ushort size}[MaxId - MinId + 1] 218 | public int IndexDataSize; // int indexData[IndexDataSize / 4] 219 | public int ParentLookupDataSize; // uint NumRecords, uint minId, uint maxId, {uint id, uint index}[NumRecords], questionable usefulness... 220 | public int OffsetMapIDCount; 221 | public int CopyTableCount; 222 | 223 | ulong IEncryptableDatabaseSection.TactKeyLookup => this.TactKeyLookup; 224 | int IEncryptableDatabaseSection.NumRecords => this.NumRecords; 225 | } 226 | } 227 | -------------------------------------------------------------------------------- /DBCD.IO/Common/HTFXStructs.cs: -------------------------------------------------------------------------------- 1 | #pragma warning disable CS0169 2 | 3 | using System.Runtime.InteropServices; 4 | 5 | namespace DBCD.IO.Common 6 | { 7 | public interface IHotfixEntry 8 | { 9 | int PushId { get; } 10 | int DataSize { get; } 11 | uint TableHash { get; } 12 | int RecordId { get; } 13 | bool IsValid { get; } 14 | } 15 | 16 | [StructLayout(LayoutKind.Sequential, Pack = 1)] 17 | struct HotfixEntryV1 : IHotfixEntry 18 | { 19 | public int PushId { get; } 20 | public int DataSize { get; } 21 | public uint TableHash { get; } 22 | public int RecordId { get; } 23 | public bool IsValid { get; } 24 | 25 | private readonly byte pad1, pad2, pad3; 26 | } 27 | 28 | [StructLayout(LayoutKind.Sequential, Pack = 1)] 29 | struct HotfixEntryV2 : IHotfixEntry 30 | { 31 | public uint Version { get; } 32 | public int PushId { get; } 33 | public int DataSize { get; } 34 | public uint TableHash { get; } 35 | public int RecordId { get; } 36 | public bool IsValid { get; } 37 | 38 | private readonly byte pad1, pad2, pad3; 39 | } 40 | 41 | [StructLayout(LayoutKind.Sequential, Pack = 1)] 42 | struct HotfixEntryV7 : IHotfixEntry 43 | { 44 | public int PushId { get; } 45 | public uint TableHash { get; } 46 | public int RecordId { get; } 47 | public int DataSize { get; } 48 | public bool IsValid => op == 1; 49 | 50 | private readonly byte op, pad1, pad2, pad3; 51 | } 52 | 53 | [StructLayout(LayoutKind.Sequential, Pack = 1)] 54 | struct HotfixEntryV8 : IHotfixEntry 55 | { 56 | public int PushId { get; } 57 | public int UniqueId { get; } 58 | public uint TableHash { get; } 59 | public int RecordId { get; } 60 | public int DataSize { get; } 61 | public bool IsValid => op == 1; 62 | 63 | private readonly byte op, pad1, pad2, pad3; 64 | } 65 | 66 | [StructLayout(LayoutKind.Sequential, Pack = 1)] 67 | struct HotfixEntryV9 : IHotfixEntry 68 | { 69 | public int RegionId { get; } 70 | public int PushId { get; } 71 | public int UniqueId { get; } 72 | public uint TableHash { get; } 73 | public int RecordId { get; } 74 | public int DataSize { get; } 75 | public bool IsValid => op == 1; 76 | 77 | private readonly byte op, pad1, pad2, pad3; 78 | } 79 | } 80 | -------------------------------------------------------------------------------- /DBCD.IO/Common/IDBRow.cs: -------------------------------------------------------------------------------- 1 | namespace DBCD.IO.Common 2 | { 3 | interface IDBRow 4 | { 5 | int Id { get; set; } 6 | BitReader Data { get; set; } 7 | void GetFields(FieldCache[] fields, T entry); 8 | IDBRow Clone(); 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /DBCD.IO/Common/IDBRowSerializer.cs: -------------------------------------------------------------------------------- 1 | using System.Collections.Generic; 2 | 3 | namespace DBCD.IO.Common 4 | { 5 | interface IDBRowSerializer where T : class 6 | { 7 | IDictionary Records { get; } 8 | 9 | void Serialize(IDictionary rows); 10 | 11 | void Serialize(int id, T row); 12 | 13 | void GetCopyRows(); 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /DBCD.IO/Common/Value32Comparer.cs: -------------------------------------------------------------------------------- 1 | using System.Collections.Generic; 2 | 3 | namespace DBCD.IO.Common 4 | { 5 | class Value32Comparer : IEqualityComparer 6 | { 7 | public bool Equals(Value32[] x, Value32[] y) 8 | { 9 | if (ReferenceEquals(x, y)) 10 | return true; 11 | if (x.Length != y.Length) 12 | return false; 13 | 14 | for (int i = 0; i < x.Length; i++) 15 | if (x[i].GetValue() != y[i].GetValue()) 16 | return false; 17 | 18 | return true; 19 | } 20 | 21 | public int GetHashCode(Value32[] obj) 22 | { 23 | unchecked 24 | { 25 | int s = 314, t = 159, hashCode = 0; 26 | for (int i = 0; i < obj.Length; i++) 27 | { 28 | hashCode = hashCode * s + obj[i].GetValue(); 29 | s *= t; 30 | } 31 | return hashCode; 32 | } 33 | } 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /DBCD.IO/DB2Flags.cs: -------------------------------------------------------------------------------- 1 | using System; 2 | 3 | namespace DBCD.IO 4 | { 5 | [Flags] 6 | public enum DB2Flags 7 | { 8 | None = 0x0, 9 | Sparse = 0x1, 10 | SecondaryKey = 0x2, 11 | Index = 0x4, 12 | Unknown1 = 0x8, // modern client explicitly throws an exception 13 | BitPacked = 0x10 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /DBCD.IO/DBCD.IO.csproj: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | netstandard2.0;net8.0;net9.0 5 | 6 | 7 | 8 | true 9 | 10 | 11 | 12 | true 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | -------------------------------------------------------------------------------- /DBCD.IO/DBParser.cs: -------------------------------------------------------------------------------- 1 | using DBCD.IO.Common; 2 | using DBCD.IO.Readers; 3 | using DBCD.IO.Writers; 4 | using System; 5 | using System.Collections.Generic; 6 | using System.IO; 7 | using System.Linq; 8 | using System.Runtime.CompilerServices; 9 | 10 | [assembly:InternalsVisibleTo("DBCD")] 11 | 12 | namespace DBCD.IO 13 | { 14 | public class DBParser 15 | { 16 | private readonly BaseReader _reader; 17 | 18 | #region Header 19 | public Type RecordType { get; protected set; } 20 | public string Identifier { get; } 21 | public int RecordsCount => _reader.RecordsCount; 22 | public int FieldsCount => _reader.FieldsCount; 23 | public int RecordSize => _reader.RecordSize; 24 | public int StringTableSize => _reader.StringTableSize; 25 | public uint TableHash => _reader.TableHash; 26 | public uint LayoutHash => _reader.LayoutHash; 27 | public int IdFieldIndex => _reader.IdFieldIndex; 28 | public DB2Flags Flags => _reader.Flags; 29 | internal ColumnMetaData[] ColumnMeta => _reader.ColumnMeta; 30 | #endregion 31 | 32 | public DBParser(string fileName) : this(File.Open(fileName, FileMode.Open, FileAccess.Read, FileShare.Read)) { } 33 | 34 | public DBParser(Stream stream) 35 | { 36 | using (var bin = new BinaryReader(stream)) 37 | { 38 | Identifier = new string(bin.ReadChars(4)); 39 | stream.Position = 0; 40 | switch (Identifier) 41 | { 42 | case "WDC5": 43 | _reader = new WDC5Reader(stream); 44 | break; 45 | case "WDC4": 46 | _reader = new WDC4Reader(stream); 47 | break; 48 | case "WDC3": 49 | _reader = new WDC3Reader(stream); 50 | break; 51 | case "WDC2": 52 | case "1SLC": 53 | _reader = new WDC2Reader(stream); 54 | break; 55 | case "WDC1": 56 | _reader = new WDC1Reader(stream); 57 | break; 58 | case "WDB6": 59 | _reader = new WDB6Reader(stream); 60 | break; 61 | case "WDB5": 62 | _reader = new WDB5Reader(stream); 63 | break; 64 | case "WDB4": 65 | _reader = new WDB4Reader(stream); 66 | break; 67 | case "WDB3": 68 | _reader = new WDB3Reader(stream); 69 | break; 70 | case "WDB2": 71 | _reader = new WDB2Reader(stream); 72 | break; 73 | case "WDBC": 74 | _reader = new WDBCReader(stream); 75 | break; 76 | default: 77 | throw new Exception("DB type " + Identifier + " is not supported!"); 78 | } 79 | } 80 | } 81 | 82 | /// 83 | /// Get all records for 84 | /// 85 | public Storage GetRecords() where T : class, new() => new Storage(this); 86 | 87 | /// 88 | /// Populate the storage with values. 89 | /// 90 | public void PopulateRecords(IDictionary storage) where T : class, new() => ReadRecords(storage); 91 | 92 | protected virtual void ReadRecords(IDictionary storage) where T : class, new() 93 | { 94 | var fieldCache = (RecordType = typeof(T)).ToFieldCache(); 95 | 96 | _reader.Enumerate((row) => 97 | { 98 | T entry = new T(); 99 | row.GetFields(fieldCache, entry); 100 | lock (storage) 101 | storage.Add(row.Id, entry); 102 | }); 103 | } 104 | 105 | /// 106 | /// Get's all encrypted DB2 Sections. 107 | /// 108 | public Dictionary GetEncryptedSections() 109 | { 110 | var reader = _reader as IEncryptionSupportingReader; 111 | 112 | if (reader == null || reader.GetEncryptedSections() == null) 113 | { 114 | return new Dictionary(); 115 | } 116 | 117 | return reader.GetEncryptedSections().Where(s => s != null).ToDictionary(s => s.TactKeyLookup, s => s.NumRecords); 118 | } 119 | 120 | public Dictionary GetEncryptedIDs() 121 | { 122 | var reader = this._reader as IEncryptionSupportingReader; 123 | 124 | if (reader == null || reader.GetEncryptedIDs() == null) 125 | { 126 | return new Dictionary(); 127 | } 128 | 129 | return reader.GetEncryptedIDs(); 130 | } 131 | 132 | /// 133 | /// Write records to a new .db2 file. 134 | /// 135 | public void WriteRecords(IDictionary storage, string filename) where T : class, new() => 136 | WriteRecords(storage, File.Open(filename, FileMode.Create, FileAccess.ReadWrite, FileShare.ReadWrite)); 137 | 138 | /// 139 | /// Write records to a new .db2 file. 140 | /// 141 | public void WriteRecords(IDictionary storage, Stream stream) where T : class, new() 142 | { 143 | if (typeof(T) != RecordType) 144 | throw new FormatException($"Invalid record type, expected {RecordType.Name}"); 145 | 146 | BaseWriter writer; 147 | switch (Identifier) 148 | { 149 | case "WDC5": 150 | writer = new WDC5Writer((WDC5Reader)_reader, storage, stream); 151 | break; 152 | case "WDC4": 153 | writer = new WDC4Writer((WDC4Reader)_reader, storage, stream); 154 | break; 155 | case "WDC3": 156 | writer = new WDC3Writer((WDC3Reader)_reader, storage, stream); 157 | break; 158 | case "WDC2": 159 | case "1SLC": 160 | writer = new WDC2Writer((WDC2Reader)_reader, storage, stream); 161 | break; 162 | case "WDC1": 163 | writer = new WDC1Writer((WDC1Reader)_reader, storage, stream); 164 | break; 165 | case "WDB6": 166 | writer = new WDB6Writer((WDB6Reader)_reader, storage, stream); 167 | break; 168 | case "WDB5": 169 | writer = new WDB5Writer((WDB5Reader)_reader, storage, stream); 170 | break; 171 | case "WDB4": 172 | writer = new WDB4Writer((WDB4Reader)_reader, storage, stream); 173 | break; 174 | case "WDB3": 175 | writer = new WDB3Writer((WDB3Reader)_reader, storage, stream); 176 | break; 177 | case "WDB2": 178 | writer = new WDB2Writer((WDB2Reader)_reader, storage, stream); 179 | break; 180 | case "WDBC": 181 | writer = new WDBCWriter((WDBCReader)_reader, storage, stream); 182 | break; 183 | } 184 | } 185 | 186 | /// 187 | /// Clears temporary data however prevents further calls 188 | /// 189 | public void ClearCache() => _reader.Clear(); 190 | } 191 | } -------------------------------------------------------------------------------- /DBCD.IO/Extensions.cs: -------------------------------------------------------------------------------- 1 | using System; 2 | using System.Collections.Generic; 3 | using System.Diagnostics.Contracts; 4 | using System.IO; 5 | using System.Linq; 6 | using System.Linq.Expressions; 7 | using System.Reflection; 8 | using System.Runtime.CompilerServices; 9 | using System.Text; 10 | 11 | namespace DBCD.IO 12 | { 13 | static class Extensions 14 | { 15 | public static Action GetSetter(this FieldInfo fieldInfo) 16 | { 17 | var paramExpression = Expression.Parameter(typeof(T)); 18 | var propertyExpression = Expression.Field(paramExpression, fieldInfo); 19 | var valueExpression = Expression.Parameter(typeof(object)); 20 | var convertExpression = Expression.Convert(valueExpression, fieldInfo.FieldType); 21 | var assignExpression = Expression.Assign(propertyExpression, convertExpression); 22 | 23 | return Expression.Lambda>(assignExpression, paramExpression, valueExpression).Compile(); 24 | } 25 | 26 | public static Func GetGetter(this FieldInfo fieldInfo) 27 | { 28 | var paramExpression = Expression.Parameter(typeof(T)); 29 | var propertyExpression = Expression.Field(paramExpression, fieldInfo); 30 | var convertExpression = Expression.Convert(propertyExpression, typeof(object)); 31 | 32 | return Expression.Lambda>(convertExpression, paramExpression).Compile(); 33 | } 34 | 35 | public static T GetAttribute(this FieldInfo fieldInfo) where T : Attribute 36 | { 37 | return Attribute.GetCustomAttribute(fieldInfo, typeof(T)) as T; 38 | } 39 | 40 | public static FieldCache[] ToFieldCache(this Type type) 41 | { 42 | var fields = type.GetFields(); 43 | 44 | var cache = new FieldCache[fields.Length]; 45 | for (int i = 0; i < fields.Length; i++) 46 | cache[i] = new FieldCache(fields[i]); 47 | 48 | return cache; 49 | } 50 | 51 | public static T Read(this BinaryReader reader) where T : struct 52 | { 53 | byte[] result = reader.ReadBytes(Unsafe.SizeOf()); 54 | return Unsafe.ReadUnaligned(ref result[0]); 55 | } 56 | 57 | 58 | /// 59 | /// Reads a NUL-separated string table from the current stream 60 | /// 61 | /// BinaryReader instance 62 | /// Size of the string table 63 | /// Use WDC2-style position-base table key numbering 64 | /// Base offset to use for the string table keys 65 | public static Dictionary ReadStringTable(this BinaryReader reader, int stringTableSize, int baseOffset = 0, bool usePos = false) 66 | { 67 | var StringTable = new Dictionary(stringTableSize / 0x20); 68 | 69 | if(stringTableSize == 0) 70 | return StringTable; 71 | 72 | var curOfs = 0; 73 | var decoded = Encoding.UTF8.GetString(reader.ReadBytes(stringTableSize)); 74 | foreach (var str in decoded.Split('\0')) 75 | { 76 | if (curOfs == stringTableSize) 77 | break; 78 | 79 | if(usePos) 80 | StringTable[(reader.BaseStream.Position - stringTableSize) + curOfs] = str; 81 | else 82 | StringTable[baseOffset + curOfs] = str; 83 | 84 | curOfs += Encoding.UTF8.GetByteCount(str) + 1; 85 | } 86 | 87 | return StringTable; 88 | } 89 | 90 | public static T[] ReadArray(this BinaryReader reader) where T : struct 91 | { 92 | int numBytes = (int)reader.ReadInt64(); 93 | 94 | byte[] result = reader.ReadBytes(numBytes); 95 | 96 | reader.BaseStream.Position += (0 - numBytes) & 0x07; 97 | return result.CopyTo(); 98 | } 99 | 100 | public static T[] ReadArray(this BinaryReader reader, int size) where T : struct 101 | { 102 | int numBytes = Unsafe.SizeOf() * size; 103 | 104 | byte[] result = reader.ReadBytes(numBytes); 105 | return result.CopyTo(); 106 | } 107 | 108 | public static unsafe T[] CopyTo(this byte[] src) where T : struct 109 | { 110 | T[] result = new T[src.Length / Unsafe.SizeOf()]; 111 | 112 | if (src.Length > 0) 113 | Unsafe.CopyBlockUnaligned(Unsafe.AsPointer(ref result[0]), Unsafe.AsPointer(ref src[0]), (uint)src.Length); 114 | 115 | return result; 116 | } 117 | 118 | public static unsafe void WriteArray(this BinaryWriter writer, T[] value) where T : struct 119 | { 120 | if (value.Length == 0) 121 | return; 122 | 123 | if (!(value is byte[] buffer)) 124 | { 125 | buffer = new byte[value.Length * Unsafe.SizeOf()]; 126 | Unsafe.CopyBlockUnaligned(Unsafe.AsPointer(ref buffer[0]), Unsafe.AsPointer(ref value[0]), (uint)buffer.Length); 127 | } 128 | 129 | writer.Write(buffer); 130 | } 131 | 132 | public static void Write(this BinaryWriter writer, T value) where T : struct 133 | { 134 | byte[] buffer = new byte[Unsafe.SizeOf()]; 135 | Unsafe.WriteUnaligned(ref buffer[0], value); 136 | writer.Write(buffer); 137 | } 138 | 139 | public static bool HasFlagExt(this DB2Flags flag, DB2Flags valueToCheck) 140 | { 141 | return (flag & valueToCheck) == valueToCheck; 142 | } 143 | 144 | public static T MaxOrDefault(this ICollection source) 145 | { 146 | return source.DefaultIfEmpty().Max(); 147 | } 148 | 149 | public static T MinOrDefault(this ICollection source) 150 | { 151 | return source.DefaultIfEmpty().Min(); 152 | } 153 | } 154 | 155 | static class CStringExtensions 156 | { 157 | /// Reads the NULL terminated string from 158 | /// the current stream and advances the current position of the stream by string length + 1. 159 | /// 160 | /// 161 | [MethodImpl(MethodImplOptions.AggressiveInlining)] 162 | public static string ReadCString(this BinaryReader reader) 163 | { 164 | return reader.ReadCString(Encoding.UTF8); 165 | } 166 | 167 | /// Reads the NULL terminated string from 168 | /// the current stream and advances the current position of the stream by string length + 1. 169 | /// 170 | /// 171 | [MethodImpl(MethodImplOptions.AggressiveInlining)] 172 | public static string ReadCString(this BinaryReader reader, Encoding encoding) 173 | { 174 | var bytes = new System.Collections.Generic.List(0x20); 175 | byte b; 176 | while ((b = reader.ReadByte()) != 0) 177 | bytes.Add(b); 178 | return encoding.GetString(bytes.ToArray()); 179 | } 180 | 181 | public static void WriteCString(this BinaryWriter writer, string str) 182 | { 183 | var bytes = Encoding.UTF8.GetBytes(str); 184 | writer.Write(bytes); 185 | writer.Write((byte)0); 186 | } 187 | 188 | public static byte[] ToByteArray(this string str) 189 | { 190 | str = str.Replace(" ", string.Empty); 191 | 192 | var res = new byte[str.Length / 2]; 193 | for (int i = 0; i < res.Length; i++) 194 | { 195 | res[i] = Convert.ToByte(str.Substring(i * 2, 2), 16); 196 | } 197 | return res; 198 | } 199 | } 200 | 201 | /// 202 | /// A that provides extension methods for numeric types 203 | /// 204 | public static class NumericExtensions 205 | { 206 | [Pure] 207 | [MethodImpl(MethodImplOptions.AggressiveInlining)] 208 | public static int MostSignificantBit(this int n) 209 | { 210 | if (n == 0) return 1; 211 | else return ((int)(BitConverter.DoubleToInt64Bits(n) >> 52) & 0x7ff) - 1022; 212 | } 213 | 214 | [Pure] 215 | [MethodImpl(MethodImplOptions.AggressiveInlining)] 216 | public static int MostSignificantBit(this uint n) 217 | { 218 | if (n == 0) return 1; 219 | else return ((int)(BitConverter.DoubleToInt64Bits(n) >> 52) & 0x7ff) - 1022; 220 | } 221 | 222 | /// 223 | /// Calculates the upper bound of the log base 2 of the input value 224 | /// 225 | /// The input value to compute the bound for (with n > 0) 226 | public static int UpperBoundLog2(this int n) => 1 << MostSignificantBit(n); 227 | } 228 | } 229 | -------------------------------------------------------------------------------- /DBCD.IO/FieldCache.cs: -------------------------------------------------------------------------------- 1 | using DBCD.IO.Attributes; 2 | using System; 3 | using System.Reflection; 4 | 5 | namespace DBCD.IO 6 | { 7 | class FieldCache 8 | { 9 | public readonly FieldInfo Field; 10 | public readonly bool IsArray = false; 11 | public readonly bool IsLocalisedString = false; 12 | public readonly Action Setter; 13 | public readonly Func Getter; 14 | public readonly LocaleAttribute LocaleInfo; 15 | 16 | public bool IsForeign { get; set; } = false; 17 | public bool IsNonInlineRelation { get; set; } = false; 18 | public bool IsRelation { get; set; } = false; 19 | public bool IndexMapField { get; set; } = false; 20 | public int Cardinality { get; set; } = 1; 21 | 22 | // Type of the variable that is used to store the field 23 | // Might not match the information retrieved from client 24 | // metadata i.e. when field is a relation (as those are always uint32) 25 | public readonly Type FieldType; 26 | // Type of the variable as defined in client metadata 27 | public readonly Type MetaDataFieldType; 28 | 29 | public FieldCache(FieldInfo field) 30 | { 31 | Field = field; 32 | IsArray = field.FieldType.IsArray; 33 | IsLocalisedString = GetStringInfo(field, out LocaleInfo); 34 | Setter = field.GetSetter(); 35 | Getter = field.GetGetter(); 36 | Cardinality = GetCardinality(field); 37 | 38 | IndexAttribute indexAttribute = (IndexAttribute)Attribute.GetCustomAttribute(field, typeof(IndexAttribute)); 39 | IndexMapField = (indexAttribute != null) ? indexAttribute.NonInline : false; 40 | 41 | RelationAttribute relationAttribute = (RelationAttribute)Attribute.GetCustomAttribute(field, typeof(RelationAttribute)); 42 | IsRelation = (relationAttribute != null); 43 | IsNonInlineRelation = IsRelation && relationAttribute.IsNonInline; 44 | FieldType = field.FieldType; 45 | MetaDataFieldType = IsNonInlineRelation ? relationAttribute.FieldType : FieldType; 46 | 47 | ForeignAttribute foreignAttribute = (ForeignAttribute)Attribute.GetCustomAttribute(field, typeof(ForeignAttribute)); 48 | IsForeign = (foreignAttribute != null) ? foreignAttribute.IsForeign : false; 49 | } 50 | 51 | private int GetCardinality(FieldInfo field) 52 | { 53 | var cardinality = field.GetAttribute()?.Count; 54 | return cardinality.HasValue && cardinality > 0 ? cardinality.Value : 1; 55 | } 56 | 57 | private bool GetStringInfo(FieldInfo field, out LocaleAttribute attribute) 58 | { 59 | return (attribute = field.GetAttribute()) != null; 60 | } 61 | } 62 | } 63 | -------------------------------------------------------------------------------- /DBCD.IO/HotfixReader.cs: -------------------------------------------------------------------------------- 1 | using DBCD.IO.Common; 2 | using DBCD.IO.Readers; 3 | using System; 4 | using System.Collections.Generic; 5 | using System.IO; 6 | using System.Linq; 7 | 8 | namespace DBCD.IO 9 | { 10 | public class HotfixReader 11 | { 12 | public delegate RowOp RowProcessor(IHotfixEntry row, bool shouldDelete); 13 | 14 | private readonly HTFXReader _reader; 15 | 16 | #region Header 17 | 18 | public int Version => _reader.Version; 19 | public int BuildId => _reader.BuildId; 20 | 21 | #endregion 22 | 23 | public HotfixReader(string fileName) : this(File.Open(fileName, FileMode.Open, FileAccess.Read, FileShare.Read)) { } 24 | 25 | public HotfixReader(Stream stream) 26 | { 27 | using (var bin = new BinaryReader(stream)) 28 | { 29 | var identifier = new string(bin.ReadChars(4)); 30 | stream.Position = 0; 31 | switch (identifier) 32 | { 33 | case "XFTH": 34 | _reader = new HTFXReader(stream); 35 | break; 36 | default: 37 | throw new Exception("Hotfix type " + identifier + " is not supported!"); 38 | } 39 | } 40 | } 41 | 42 | 43 | public void ApplyHotfixes(IDictionary storage, DBParser parser) where T : class, new() => ReadHotfixes(storage, parser); 44 | 45 | public void ApplyHotfixes(IDictionary storage, DBParser parser, RowProcessor processor) where T : class, new() 46 | => ReadHotfixes(storage, parser, processor); 47 | 48 | public void CombineCaches(params string[] files) 49 | { 50 | foreach (var file in files) 51 | { 52 | CombineCache(file); 53 | } 54 | } 55 | 56 | public void CombineCache(string file) 57 | { 58 | if (!File.Exists(file)) 59 | return; 60 | 61 | // parse the new cache 62 | var reader = new HTFXReader(file); 63 | if (reader.BuildId != BuildId) 64 | return; 65 | 66 | // add additional hotfix entries 67 | _reader.Combine(reader); 68 | } 69 | 70 | protected virtual void ReadHotfixes(IDictionary storage, DBParser parser, RowProcessor processor = null) where T : class, new() 71 | { 72 | var fieldCache = typeof(T).ToFieldCache(); 73 | 74 | if (processor == null) 75 | processor = DefaultProcessor; 76 | 77 | // Id fields need to be excluded if not inline 78 | if (parser.Flags.HasFlagExt(DB2Flags.Index)) 79 | fieldCache[parser.IdFieldIndex].IndexMapField = true; 80 | 81 | // TODO verify hotfixes need to be applied sequentially 82 | var records = _reader.GetRecords(parser.TableHash).OrderBy(x => x.PushId); 83 | 84 | // Check if there are any valid cached records with data, don't remove row if so. 85 | // Example situation: Blizzard has invalidated TACTKey records in the same DBCache as valid ones. 86 | // Without the below check, valid cached TACTKey records would be removed by the invalidated records afterwards. 87 | // This only seems to be relevant for cached tables and specifically TACTKey, BroadcastText/ItemSparse only show up single times it seems. 88 | var shouldDelete = (parser.TableHash != 3744420815 && parser.TableHash != 35137211) || !records.Any(r => r.IsValid && r.PushId == -1 && r.DataSize > 0); 89 | 90 | foreach (var row in records) 91 | { 92 | var operation = processor(row, shouldDelete); 93 | 94 | if (operation == RowOp.Add) 95 | { 96 | T entry = new T(); 97 | row.GetFields(fieldCache, entry); 98 | storage[row.RecordId] = entry; 99 | } 100 | else if (operation == RowOp.Delete) 101 | { 102 | storage.Remove(row.RecordId); 103 | } 104 | } 105 | } 106 | 107 | public static RowOp DefaultProcessor(IHotfixEntry row, bool shouldDelete) 108 | { 109 | if (row.IsValid & row.DataSize > 0) 110 | return RowOp.Add; 111 | else if (shouldDelete) 112 | return RowOp.Delete; 113 | else 114 | return RowOp.Ignore; 115 | } 116 | } 117 | 118 | public enum RowOp 119 | { 120 | Add, 121 | Delete, 122 | Ignore 123 | } 124 | } 125 | -------------------------------------------------------------------------------- /DBCD.IO/Readers/BaseEncryptionSupportingReader.cs: -------------------------------------------------------------------------------- 1 | using System.Collections.Generic; 2 | using System.Linq; 3 | using DBCD.IO.Common; 4 | 5 | namespace DBCD.IO.Readers 6 | { 7 | abstract class BaseEncryptionSupportingReader : BaseReader, IEncryptionSupportingReader 8 | { 9 | protected List m_sections; 10 | protected Dictionary m_encryptedIDs; 11 | 12 | List IEncryptionSupportingReader.GetEncryptedSections() 13 | { 14 | return this.m_sections.Where(s => s.TactKeyLookup != 0).ToList(); 15 | } 16 | 17 | Dictionary IEncryptionSupportingReader.GetEncryptedIDs() 18 | { 19 | return this.m_encryptedIDs; 20 | } 21 | } 22 | } -------------------------------------------------------------------------------- /DBCD.IO/Readers/BaseReader.cs: -------------------------------------------------------------------------------- 1 | using DBCD.IO.Common; 2 | using System; 3 | using System.Collections.Generic; 4 | using System.Linq; 5 | using System.Threading.Tasks; 6 | 7 | namespace DBCD.IO.Readers 8 | { 9 | abstract class BaseReader 10 | { 11 | public int RecordsCount { get; protected set; } 12 | public int FieldsCount { get; protected set; } 13 | public int RecordSize { get; protected set; } 14 | public int StringTableSize { get; protected set; } 15 | 16 | // WDB2-WDB3 17 | public uint Build { get; protected set; } 18 | 19 | // WDB2+ 20 | public int MinIndex { get; protected set; } 21 | public int MaxIndex { get; protected set; } 22 | 23 | // WDB3+ 24 | public DB2Flags Flags { get; protected set; } 25 | public int Locale { get; protected set; } 26 | 27 | // WDB5+ 28 | public uint TableHash { get; protected set; } 29 | public uint LayoutHash { get; protected set; } 30 | public int IdFieldIndex { get; protected set; } 31 | 32 | // WDC1+ 33 | public int PackedDataOffset { get; protected set; } 34 | 35 | // WDC5+ 36 | public uint SchemaVersion { get; protected set; } 37 | public string SchemaString { get; protected set; } 38 | 39 | #region Data 40 | 41 | public FieldMetaData[] Meta { get; protected set; } 42 | public int[] IndexData { get; protected set; } 43 | public ColumnMetaData[] ColumnMeta { get; protected set; } 44 | public Value32[][] PalletData { get; protected set; } 45 | public Dictionary[] CommonData { get; protected set; } 46 | public Dictionary StringTable { get; protected set; } 47 | public int[] ForeignKeyData { get; protected set; } 48 | 49 | protected Dictionary CopyData { get; set; } 50 | protected byte[] RecordsData { get; set; } 51 | protected Dictionary _Records { get; set; } = new Dictionary(); 52 | protected List SparseEntries { get; set; } 53 | 54 | #endregion 55 | 56 | #region Methods 57 | 58 | public void Enumerate(Action action) 59 | { 60 | Parallel.ForEach(_Records.Values, action); 61 | Parallel.ForEach(GetCopyRows(), action); 62 | } 63 | 64 | public void Clear() 65 | { 66 | IndexData = null; 67 | PalletData = null; 68 | ColumnMeta = null; 69 | RecordsData = null; 70 | ForeignKeyData = null; 71 | CommonData = null; 72 | 73 | _Records?.Clear(); 74 | StringTable?.Clear(); 75 | SparseEntries?.Clear(); 76 | CopyData?.Clear(); 77 | } 78 | 79 | private IEnumerable GetCopyRows() 80 | { 81 | if (CopyData == null || CopyData.Count == 0) 82 | yield break; 83 | 84 | // fix temp ids 85 | _Records = _Records.ToDictionary(x => x.Value.Id, x => x.Value); 86 | 87 | foreach (var copyRow in CopyData) 88 | { 89 | IDBRow rec = _Records[copyRow.Value].Clone(); 90 | rec.Data = rec.Data.Clone(); 91 | rec.Id = copyRow.Key; 92 | _Records[rec.Id] = rec; 93 | yield return rec; 94 | } 95 | 96 | CopyData.Clear(); 97 | } 98 | 99 | #endregion 100 | } 101 | 102 | } 103 | -------------------------------------------------------------------------------- /DBCD.IO/Readers/WDB2Reader.cs: -------------------------------------------------------------------------------- 1 | using System; 2 | using System.Collections.Generic; 3 | using System.IO; 4 | using System.Linq; 5 | using System.Runtime.CompilerServices; 6 | using System.Text; 7 | using DBCD.IO.Common; 8 | 9 | namespace DBCD.IO.Readers 10 | { 11 | class WDB2Row : IDBRow 12 | { 13 | private BaseReader m_reader; 14 | private readonly int m_recordIndex; 15 | 16 | public int Id { get; set; } 17 | public BitReader Data { get; set; } 18 | 19 | public WDB2Row(BaseReader reader, BitReader data, int recordIndex) 20 | { 21 | m_reader = reader; 22 | Data = data; 23 | m_recordIndex = recordIndex + 1; 24 | 25 | Id = m_recordIndex = recordIndex + 1; 26 | } 27 | 28 | private static Dictionary, BaseReader, object>> simpleReaders = new Dictionary, BaseReader, object>> 29 | { 30 | [typeof(long)] = (data, stringTable, header) => GetFieldValue(data), 31 | [typeof(float)] = (data, stringTable, header) => GetFieldValue(data), 32 | [typeof(int)] = (data, stringTable, header) => GetFieldValue(data), 33 | [typeof(uint)] = (data, stringTable, header) => GetFieldValue(data), 34 | [typeof(short)] = (data, stringTable, header) => GetFieldValue(data), 35 | [typeof(ushort)] = (data, stringTable, header) => GetFieldValue(data), 36 | [typeof(sbyte)] = (data, stringTable, header) => GetFieldValue(data), 37 | [typeof(byte)] = (data, stringTable, header) => GetFieldValue(data), 38 | [typeof(string)] = (data, stringTable, header) => stringTable[GetFieldValue(data)], 39 | }; 40 | 41 | private static Dictionary, int, object>> arrayReaders = new Dictionary, int, object>> 42 | { 43 | [typeof(ulong[])] = (data, stringTable, cardinality) => GetFieldValueArray(data, cardinality), 44 | [typeof(long[])] = (data, stringTable, cardinality) => GetFieldValueArray(data, cardinality), 45 | [typeof(float[])] = (data, stringTable, cardinality) => GetFieldValueArray(data, cardinality), 46 | [typeof(int[])] = (data, stringTable, cardinality) => GetFieldValueArray(data, cardinality), 47 | [typeof(uint[])] = (data, stringTable, cardinality) => GetFieldValueArray(data, cardinality), 48 | [typeof(ulong[])] = (data, stringTable, cardinality) => GetFieldValueArray(data, cardinality), 49 | [typeof(ushort[])] = (data, stringTable, cardinality) => GetFieldValueArray(data, cardinality), 50 | [typeof(short[])] = (data, stringTable, cardinality) => GetFieldValueArray(data, cardinality), 51 | [typeof(byte[])] = (data, stringTable, cardinality) => GetFieldValueArray(data, cardinality), 52 | [typeof(sbyte[])] = (data, stringTable, cardinality) => GetFieldValueArray(data, cardinality), 53 | [typeof(string[])] = (data, stringTable, cardinality) => GetFieldValueArray(data, cardinality).Select(i => stringTable[i]).ToArray(), 54 | }; 55 | 56 | public void GetFields(FieldCache[] fields, T entry) 57 | { 58 | for (int i = 0; i < fields.Length; i++) 59 | { 60 | FieldCache info = fields[i]; 61 | if (info.IndexMapField) 62 | { 63 | Id = GetFieldValue(Data); 64 | info.Setter(entry, Convert.ChangeType(Id, info.FieldType)); 65 | continue; 66 | } 67 | 68 | object value = null; 69 | 70 | if (info.IsArray) 71 | { 72 | if (arrayReaders.TryGetValue(info.FieldType, out var reader)) 73 | value = reader(Data, m_reader.StringTable, info.Cardinality); 74 | else 75 | throw new Exception("Unhandled array type: " + typeof(T).Name); 76 | } 77 | else if (info.IsLocalisedString) 78 | { 79 | Data.Position += 32 * info.LocaleInfo.Locale; 80 | value = simpleReaders[typeof(string)](Data, m_reader.StringTable, m_reader); 81 | Data.Position += 32 * (info.LocaleInfo.LocaleCount - info.LocaleInfo.Locale); 82 | } 83 | else 84 | { 85 | if (simpleReaders.TryGetValue(info.FieldType, out var reader)) 86 | value = reader(Data, m_reader.StringTable, m_reader); 87 | else 88 | throw new Exception("Unhandled field type: " + typeof(T).Name); 89 | } 90 | 91 | info.Setter(entry, value); 92 | } 93 | } 94 | 95 | private static T GetFieldValue(BitReader r) where T : struct 96 | { 97 | return r.ReadValue64(Unsafe.SizeOf() * 8).GetValue(); 98 | } 99 | 100 | private static T[] GetFieldValueArray(BitReader r, int cardinality) where T : struct 101 | { 102 | T[] array = new T[cardinality]; 103 | for (int i = 0; i < array.Length; i++) 104 | array[i] = r.ReadValue64(Unsafe.SizeOf() * 8).GetValue(); 105 | 106 | return array; 107 | } 108 | 109 | public IDBRow Clone() 110 | { 111 | return (IDBRow)MemberwiseClone(); 112 | } 113 | } 114 | 115 | class WDB2Reader : BaseReader 116 | { 117 | private const int HeaderSize = 28; 118 | private const int ExtendedHeaderSize = 48; 119 | private const uint WDB2FmtSig = 0x32424457; // WDB2 120 | 121 | public WDB2Reader(string dbcFile) : this(new FileStream(dbcFile, FileMode.Open)) { } 122 | 123 | public WDB2Reader(Stream stream) 124 | { 125 | using (var reader = new BinaryReader(stream, Encoding.UTF8)) 126 | { 127 | if (reader.BaseStream.Length < HeaderSize) 128 | throw new InvalidDataException("WDB2 file is corrupted!"); 129 | 130 | uint magic = reader.ReadUInt32(); 131 | 132 | if (magic != WDB2FmtSig) 133 | throw new InvalidDataException("WDB2 file is corrupted!"); 134 | 135 | RecordsCount = reader.ReadInt32(); 136 | FieldsCount = reader.ReadInt32(); 137 | RecordSize = reader.ReadInt32(); 138 | StringTableSize = reader.ReadInt32(); 139 | TableHash = reader.ReadUInt32(); 140 | Build = reader.ReadUInt32(); 141 | uint timestamp = reader.ReadUInt32(); 142 | 143 | if (RecordsCount == 0) 144 | return; 145 | 146 | // Extended header 147 | if (Build > 12880) 148 | { 149 | if (reader.BaseStream.Length < ExtendedHeaderSize) 150 | throw new InvalidDataException("WDB2 file is corrupted!"); 151 | 152 | MinIndex = reader.ReadInt32(); 153 | MaxIndex = reader.ReadInt32(); 154 | int locale = reader.ReadInt32(); 155 | int copyTableSize = reader.ReadInt32(); 156 | 157 | if (MaxIndex > 0) 158 | { 159 | int diff = MaxIndex - MinIndex + 1; 160 | reader.BaseStream.Position += diff * 4; // indicies uint[] 161 | reader.BaseStream.Position += diff * 2; // string lengths ushort[] 162 | } 163 | } 164 | 165 | byte[] data = reader.ReadBytes(RecordsCount * RecordSize); 166 | Array.Resize(ref data, data.Length + 8); // pad with extra zeros so we don't crash when reading 167 | RecordsData = data; 168 | 169 | for (int i = 0; i < RecordsCount; i++) 170 | { 171 | BitReader bitReader = new BitReader(RecordsData) { Position = i * RecordSize * 8 }; 172 | IDBRow rec = new WDB2Row(this, bitReader, i); 173 | _Records.Add(i, rec); 174 | } 175 | 176 | StringTable = reader.ReadStringTable(StringTableSize); 177 | } 178 | } 179 | } 180 | } 181 | -------------------------------------------------------------------------------- /DBCD.IO/Readers/WDB4Reader.cs: -------------------------------------------------------------------------------- 1 | using DBCD.IO.Common; 2 | using System; 3 | using System.Collections.Generic; 4 | using System.IO; 5 | using System.Linq; 6 | using System.Runtime.CompilerServices; 7 | using System.Text; 8 | 9 | namespace DBCD.IO.Readers 10 | { 11 | class WDB4Row : IDBRow 12 | { 13 | private BaseReader m_reader; 14 | private readonly int m_dataOffset; 15 | private readonly int m_dataPosition; 16 | private readonly int m_recordIndex; 17 | 18 | public int Id { get; set; } 19 | public BitReader Data { get; set; } 20 | 21 | public WDB4Row(BaseReader reader, BitReader data, int id, int recordIndex) 22 | { 23 | m_reader = reader; 24 | Data = data; 25 | m_recordIndex = recordIndex; 26 | 27 | Id = id; 28 | 29 | m_dataOffset = Data.Offset; 30 | m_dataPosition = Data.Position; 31 | } 32 | 33 | private static Dictionary, BaseReader, object>> simpleReaders = new Dictionary, BaseReader, object>> 34 | { 35 | [typeof(long)] = (data, stringTable, header) => GetFieldValue(data), 36 | [typeof(float)] = (data, stringTable, header) => GetFieldValue(data), 37 | [typeof(int)] = (data, stringTable, header) => GetFieldValue(data), 38 | [typeof(uint)] = (data, stringTable, header) => GetFieldValue(data), 39 | [typeof(short)] = (data, stringTable, header) => GetFieldValue(data), 40 | [typeof(ushort)] = (data, stringTable, header) => GetFieldValue(data), 41 | [typeof(sbyte)] = (data, stringTable, header) => GetFieldValue(data), 42 | [typeof(byte)] = (data, stringTable, header) => GetFieldValue(data), 43 | [typeof(string)] = (data, stringTable, header) => header.Flags.HasFlagExt(DB2Flags.Sparse) ? data.ReadCString() : stringTable[GetFieldValue(data)], 44 | }; 45 | 46 | private static Dictionary, int, object>> arrayReaders = new Dictionary, int, object>> 47 | { 48 | [typeof(ulong[])] = (data, stringTable, cardinality) => GetFieldValueArray(data, cardinality), 49 | [typeof(long[])] = (data, stringTable, cardinality) => GetFieldValueArray(data, cardinality), 50 | [typeof(float[])] = (data, stringTable, cardinality) => GetFieldValueArray(data, cardinality), 51 | [typeof(int[])] = (data, stringTable, cardinality) => GetFieldValueArray(data, cardinality), 52 | [typeof(uint[])] = (data, stringTable, cardinality) => GetFieldValueArray(data, cardinality), 53 | [typeof(ulong[])] = (data, stringTable, cardinality) => GetFieldValueArray(data, cardinality), 54 | [typeof(ushort[])] = (data, stringTable, cardinality) => GetFieldValueArray(data, cardinality), 55 | [typeof(short[])] = (data, stringTable, cardinality) => GetFieldValueArray(data, cardinality), 56 | [typeof(byte[])] = (data, stringTable, cardinality) => GetFieldValueArray(data, cardinality), 57 | [typeof(sbyte[])] = (data, stringTable, cardinality) => GetFieldValueArray(data, cardinality), 58 | [typeof(string[])] = (data, stringTable, cardinality) => GetFieldValueArray(data, cardinality).Select(i => stringTable[i]).ToArray(), 59 | }; 60 | 61 | public void GetFields(FieldCache[] fields, T entry) 62 | { 63 | int indexFieldOffSet = 0; 64 | 65 | Data.Position = m_dataPosition; 66 | Data.Offset = m_dataOffset; 67 | 68 | for (int i = 0; i < fields.Length; i++) 69 | { 70 | FieldCache info = fields[i]; 71 | if (info.IndexMapField) 72 | { 73 | if (Id != -1) 74 | indexFieldOffSet++; 75 | else 76 | Id = GetFieldValue(Data); 77 | 78 | info.Setter(entry, Convert.ChangeType(Id, info.FieldType)); 79 | continue; 80 | } 81 | 82 | object value = null; 83 | int fieldIndex = i - indexFieldOffSet; 84 | 85 | // 0x2 SecondaryKey 86 | if (fieldIndex >= m_reader.FieldsCount) 87 | { 88 | info.Setter(entry, Convert.ChangeType(m_reader.ForeignKeyData[Id - m_reader.MinIndex], info.FieldType)); 89 | continue; 90 | } 91 | 92 | if (info.IsArray) 93 | { 94 | if (arrayReaders.TryGetValue(info.FieldType, out var reader)) 95 | value = reader(Data, m_reader.StringTable, info.Cardinality); 96 | else 97 | throw new Exception("Unhandled array type: " + typeof(T).Name); 98 | } 99 | else 100 | { 101 | if (simpleReaders.TryGetValue(info.FieldType, out var reader)) 102 | value = reader(Data, m_reader.StringTable, m_reader); 103 | else 104 | throw new Exception("Unhandled field type: " + typeof(T).Name); 105 | } 106 | 107 | info.Setter(entry, value); 108 | } 109 | } 110 | 111 | private static T GetFieldValue(BitReader r) where T : struct 112 | { 113 | return r.ReadValue64(Unsafe.SizeOf() * 8).GetValue(); 114 | } 115 | 116 | private static T[] GetFieldValueArray(BitReader r, int cardinality) where T : struct 117 | { 118 | T[] array = new T[cardinality]; 119 | for (int i = 0; i < array.Length; i++) 120 | array[i] = r.ReadValue64(Unsafe.SizeOf() * 8).GetValue(); 121 | 122 | return array; 123 | } 124 | 125 | public IDBRow Clone() 126 | { 127 | return (IDBRow)MemberwiseClone(); 128 | } 129 | } 130 | 131 | class WDB4Reader : BaseReader 132 | { 133 | private const int HeaderSize = 52; 134 | private const uint WDB4FmtSig = 0x34424457; // WDB4 135 | 136 | public WDB4Reader(string dbcFile) : this(new FileStream(dbcFile, FileMode.Open)) { } 137 | 138 | public WDB4Reader(Stream stream) 139 | { 140 | using (var reader = new BinaryReader(stream, Encoding.UTF8)) 141 | { 142 | if (reader.BaseStream.Length < HeaderSize) 143 | throw new InvalidDataException("WDB4 file is corrupted!"); 144 | 145 | uint magic = reader.ReadUInt32(); 146 | 147 | if (magic != WDB4FmtSig) 148 | throw new InvalidDataException("WDB4 file is corrupted!"); 149 | 150 | RecordsCount = reader.ReadInt32(); 151 | FieldsCount = reader.ReadInt32(); 152 | RecordSize = reader.ReadInt32(); 153 | StringTableSize = reader.ReadInt32(); 154 | TableHash = reader.ReadUInt32(); 155 | Build = reader.ReadUInt32(); 156 | uint timestamp = reader.ReadUInt32(); 157 | MinIndex = reader.ReadInt32(); 158 | MaxIndex = reader.ReadInt32(); 159 | Locale = reader.ReadInt32(); 160 | int copyTableSize = reader.ReadInt32(); 161 | Flags = (DB2Flags)reader.ReadUInt32(); 162 | 163 | if (RecordsCount == 0) 164 | return; 165 | 166 | if (!Flags.HasFlagExt(DB2Flags.Sparse)) 167 | { 168 | // records data 169 | byte[] data = reader.ReadBytes(RecordsCount * RecordSize); 170 | Array.Resize(ref data, data.Length + 8); // pad with extra zeros so we don't crash when reading 171 | RecordsData = data; 172 | 173 | // string table 174 | StringTable = reader.ReadStringTable(StringTableSize); 175 | } 176 | else 177 | { 178 | // sparse data with inlined strings 179 | RecordsData = reader.ReadBytes(StringTableSize - HeaderSize); 180 | 181 | int sparseCount = MaxIndex - MinIndex + 1; 182 | 183 | SparseEntries = new List(sparseCount); 184 | CopyData = new Dictionary(sparseCount); 185 | var sparseIdLookup = new Dictionary(sparseCount); 186 | 187 | for (int i = 0; i < sparseCount; i++) 188 | { 189 | SparseEntry sparse = reader.Read(); 190 | if (sparse.Offset == 0 || sparse.Size == 0) 191 | continue; 192 | 193 | if (sparseIdLookup.TryGetValue(sparse.Offset, out int copyId)) 194 | { 195 | CopyData[MinIndex + i] = copyId; 196 | } 197 | else 198 | { 199 | SparseEntries.Add(sparse); 200 | sparseIdLookup.Add(sparse.Offset, MinIndex + i); 201 | } 202 | } 203 | } 204 | 205 | // secondary key 206 | if (Flags.HasFlagExt(DB2Flags.SecondaryKey)) 207 | ForeignKeyData = reader.ReadArray(MaxIndex - MinIndex + 1); 208 | 209 | // index table 210 | if (Flags.HasFlagExt(DB2Flags.Index)) 211 | IndexData = reader.ReadArray(RecordsCount); 212 | 213 | // duplicate rows data 214 | if (CopyData == null) 215 | CopyData = new Dictionary(copyTableSize / 8); 216 | 217 | for (int i = 0; i < copyTableSize / 8; i++) 218 | CopyData[reader.ReadInt32()] = reader.ReadInt32(); 219 | 220 | int position = 0; 221 | for (int i = 0; i < RecordsCount; i++) 222 | { 223 | BitReader bitReader = new BitReader(RecordsData) { Position = 0 }; 224 | 225 | if (Flags.HasFlagExt(DB2Flags.Sparse)) 226 | { 227 | bitReader.Position = position; 228 | position += SparseEntries[i].Size * 8; 229 | } 230 | else 231 | { 232 | bitReader.Offset = i * RecordSize; 233 | } 234 | 235 | IDBRow rec = new WDB4Row(this, bitReader, Flags.HasFlagExt(DB2Flags.Index) ? IndexData[i] : -1, i); 236 | _Records.Add(i, rec); 237 | } 238 | } 239 | } 240 | } 241 | } 242 | -------------------------------------------------------------------------------- /DBCD.IO/Readers/WDBCReader.cs: -------------------------------------------------------------------------------- 1 | using DBCD.IO.Common; 2 | using System; 3 | using System.Collections.Generic; 4 | using System.IO; 5 | using System.Linq; 6 | using System.Runtime.CompilerServices; 7 | using System.Text; 8 | 9 | namespace DBCD.IO.Readers 10 | { 11 | class WDBCRow : IDBRow 12 | { 13 | private BaseReader m_reader; 14 | private readonly int m_recordIndex; 15 | 16 | public int Id { get; set; } 17 | public BitReader Data { get; set; } 18 | 19 | public WDBCRow(BaseReader reader, BitReader data, int recordIndex) 20 | { 21 | m_reader = reader; 22 | Data = data; 23 | m_recordIndex = recordIndex + 1; 24 | 25 | Id = m_recordIndex = recordIndex + 1; 26 | } 27 | 28 | private static Dictionary, BaseReader, object>> simpleReaders = new Dictionary, BaseReader, object>> 29 | { 30 | [typeof(long)] = (data, stringTable, header) => GetFieldValue(data), 31 | [typeof(float)] = (data, stringTable, header) => GetFieldValue(data), 32 | [typeof(int)] = (data, stringTable, header) => GetFieldValue(data), 33 | [typeof(uint)] = (data, stringTable, header) => GetFieldValue(data), 34 | [typeof(short)] = (data, stringTable, header) => GetFieldValue(data), 35 | [typeof(ushort)] = (data, stringTable, header) => GetFieldValue(data), 36 | [typeof(sbyte)] = (data, stringTable, header) => GetFieldValue(data), 37 | [typeof(byte)] = (data, stringTable, header) => GetFieldValue(data), 38 | [typeof(string)] = (data, stringTable, header) => stringTable[GetFieldValue(data)], 39 | }; 40 | 41 | private static Dictionary, int, object>> arrayReaders = new Dictionary, int, object>> 42 | { 43 | [typeof(ulong[])] = (data, stringTable, cardinality) => GetFieldValueArray(data, cardinality), 44 | [typeof(long[])] = (data, stringTable, cardinality) => GetFieldValueArray(data, cardinality), 45 | [typeof(float[])] = (data, stringTable, cardinality) => GetFieldValueArray(data, cardinality), 46 | [typeof(int[])] = (data, stringTable, cardinality) => GetFieldValueArray(data, cardinality), 47 | [typeof(uint[])] = (data, stringTable, cardinality) => GetFieldValueArray(data, cardinality), 48 | [typeof(ulong[])] = (data, stringTable, cardinality) => GetFieldValueArray(data, cardinality), 49 | [typeof(ushort[])] = (data, stringTable, cardinality) => GetFieldValueArray(data, cardinality), 50 | [typeof(short[])] = (data, stringTable, cardinality) => GetFieldValueArray(data, cardinality), 51 | [typeof(byte[])] = (data, stringTable, cardinality) => GetFieldValueArray(data, cardinality), 52 | [typeof(sbyte[])] = (data, stringTable, cardinality) => GetFieldValueArray(data, cardinality), 53 | [typeof(string[])] = (data, stringTable, cardinality) => GetFieldValueArray(data, cardinality).Select(i => stringTable[i]).ToArray(), 54 | }; 55 | 56 | public void GetFields(FieldCache[] fields, T entry) 57 | { 58 | for (int i = 0; i < fields.Length; i++) 59 | { 60 | FieldCache info = fields[i]; 61 | if (info.IndexMapField) 62 | { 63 | Id = GetFieldValue(Data); 64 | info.Setter(entry, Convert.ChangeType(Id, info.FieldType)); 65 | continue; 66 | } 67 | 68 | object value = null; 69 | 70 | if (info.IsArray) 71 | { 72 | if (arrayReaders.TryGetValue(info.FieldType, out var reader)) 73 | value = reader(Data, m_reader.StringTable, info.Cardinality); 74 | else 75 | throw new Exception("Unhandled array type: " + typeof(T).Name); 76 | } 77 | else if (info.IsLocalisedString) 78 | { 79 | Data.Position += 32 * info.LocaleInfo.Locale; 80 | value = simpleReaders[typeof(string)](Data, m_reader.StringTable, m_reader); 81 | Data.Position += 32 * (info.LocaleInfo.LocaleCount - info.LocaleInfo.Locale); 82 | } 83 | else 84 | { 85 | if (simpleReaders.TryGetValue(info.FieldType, out var reader)) 86 | value = reader(Data, m_reader.StringTable, m_reader); 87 | else 88 | throw new Exception("Unhandled field type: " + typeof(T).Name); 89 | } 90 | 91 | info.Setter(entry, value); 92 | } 93 | } 94 | 95 | private static T GetFieldValue(BitReader r) where T : struct 96 | { 97 | return r.ReadValue64(Unsafe.SizeOf() * 8).GetValue(); 98 | } 99 | 100 | private static T[] GetFieldValueArray(BitReader r, int cardinality) where T : struct 101 | { 102 | T[] array = new T[cardinality]; 103 | for (int i = 0; i < array.Length; i++) 104 | array[i] = r.ReadValue64(Unsafe.SizeOf() * 8).GetValue(); 105 | 106 | return array; 107 | } 108 | 109 | public IDBRow Clone() 110 | { 111 | return (IDBRow)MemberwiseClone(); 112 | } 113 | } 114 | 115 | class WDBCReader : BaseReader 116 | { 117 | private const int HeaderSize = 20; 118 | private const uint WDBCFmtSig = 0x43424457; // WDBC 119 | 120 | public WDBCReader(string dbcFile) : this(new FileStream(dbcFile, FileMode.Open)) { } 121 | 122 | public WDBCReader(Stream stream) 123 | { 124 | using (var reader = new BinaryReader(stream, Encoding.UTF8)) 125 | { 126 | if (reader.BaseStream.Length < HeaderSize) 127 | throw new InvalidDataException("WDBC file is corrupted!"); 128 | 129 | uint magic = reader.ReadUInt32(); 130 | 131 | if (magic != WDBCFmtSig) 132 | throw new InvalidDataException("WDBC file is corrupted!"); 133 | 134 | RecordsCount = reader.ReadInt32(); 135 | FieldsCount = reader.ReadInt32(); 136 | RecordSize = reader.ReadInt32(); 137 | StringTableSize = reader.ReadInt32(); 138 | 139 | if (RecordsCount == 0) 140 | return; 141 | 142 | byte[] data = reader.ReadBytes(RecordsCount * RecordSize); 143 | Array.Resize(ref data, data.Length + 8); // pad with extra zeros so we don't crash when reading 144 | RecordsData = data; 145 | 146 | for (int i = 0; i < RecordsCount; i++) 147 | { 148 | BitReader bitReader = new BitReader(RecordsData) { Position = i * RecordSize * 8 }; 149 | IDBRow rec = new WDBCRow(this, bitReader, i); 150 | _Records.Add(i, rec); 151 | } 152 | 153 | StringTable = reader.ReadStringTable(StringTableSize); 154 | } 155 | } 156 | } 157 | } 158 | -------------------------------------------------------------------------------- /DBCD.IO/Storage.cs: -------------------------------------------------------------------------------- 1 | using System.Collections.Generic; 2 | using System.IO; 3 | 4 | namespace DBCD.IO 5 | { 6 | public class Storage : SortedDictionary where T : class, new() 7 | { 8 | private readonly DBParser parser; 9 | 10 | #region Header 11 | 12 | public string Identifier => parser.Identifier; 13 | public int RecordsCount => parser.RecordsCount; 14 | public int FieldsCount => parser.FieldsCount; 15 | public int RecordSize => parser.RecordSize; 16 | public uint TableHash => parser.TableHash; 17 | public uint LayoutHash => parser.LayoutHash; 18 | public int IdFieldIndex => parser.IdFieldIndex; 19 | public DB2Flags Flags => parser.Flags; 20 | 21 | #endregion 22 | 23 | #region Constructors 24 | public Storage(string fileName) : this(File.Open(fileName, FileMode.Open, FileAccess.Read, FileShare.Read)) { } 25 | 26 | public Storage(Stream stream) : this(new DBParser(stream)) => parser.ClearCache(); 27 | 28 | public Storage(DBParser dbParser) 29 | { 30 | parser = dbParser; 31 | parser.PopulateRecords(this); 32 | } 33 | #endregion 34 | 35 | #region Methods 36 | 37 | public void Save(string fileName) => parser.WriteRecords(this, fileName); 38 | 39 | public void Save(Stream stream) => parser.WriteRecords(this, stream); 40 | 41 | #endregion 42 | } 43 | } -------------------------------------------------------------------------------- /DBCD.IO/Writers/WDB2Writer.cs: -------------------------------------------------------------------------------- 1 | using DBCD.IO.Common; 2 | using DBCD.IO.Readers; 3 | using System; 4 | using System.Collections.Generic; 5 | using System.IO; 6 | using System.Linq; 7 | 8 | namespace DBCD.IO.Writers 9 | { 10 | class WDB2RowSerializer : IDBRowSerializer where T : class 11 | { 12 | public IDictionary Records { get; private set; } 13 | public IDictionary StringLengths { get; private set; } 14 | 15 | private readonly BaseWriter m_writer; 16 | 17 | 18 | public WDB2RowSerializer(BaseWriter writer) 19 | { 20 | m_writer = writer; 21 | 22 | Records = new Dictionary(); 23 | StringLengths = new Dictionary(); 24 | } 25 | 26 | public void Serialize(IDictionary rows) 27 | { 28 | foreach (var row in rows) 29 | Serialize(row.Key, row.Value); 30 | } 31 | 32 | public void Serialize(int id, T row) 33 | { 34 | BitWriter bitWriter = new BitWriter(m_writer.RecordSize); 35 | StringLengths[id] = 0; 36 | 37 | for (int i = 0; i < m_writer.FieldCache.Length; i++) 38 | { 39 | FieldCache info = m_writer.FieldCache[i]; 40 | 41 | if (info.IsArray) 42 | { 43 | if (arrayWriters.TryGetValue(info.FieldType, out var writer)) 44 | { 45 | Array array = (Array)info.Getter(row); 46 | writer(bitWriter, m_writer, array); 47 | 48 | if (array is string[] strings) 49 | StringLengths[id] = (ushort)strings.Sum(x => x.Length == 0 ? 0 : x.Length + 1); 50 | } 51 | else 52 | throw new Exception("Unhandled array type: " + typeof(T).Name); 53 | } 54 | else 55 | { 56 | if (simpleWriters.TryGetValue(info.FieldType, out var writer)) 57 | { 58 | object value = info.Getter(row); 59 | writer(bitWriter, m_writer, value); 60 | 61 | if (value is string strings) 62 | StringLengths[id] = (ushort)(strings.Length == 0 ? 0 : strings.Length + 1); 63 | } 64 | else 65 | throw new Exception("Unhandled field type: " + typeof(T).Name); 66 | } 67 | } 68 | 69 | // pad to record size 70 | bitWriter.Resize(m_writer.RecordSize); 71 | Records[id] = bitWriter; 72 | } 73 | 74 | public void GetCopyRows() 75 | { 76 | throw new NotImplementedException(); 77 | } 78 | 79 | 80 | private static Dictionary, object>> simpleWriters = new Dictionary, object>> 81 | { 82 | [typeof(long)] = (data, writer, value) => WriteFieldValue(data, value), 83 | [typeof(float)] = (data, writer, value) => WriteFieldValue(data, value), 84 | [typeof(int)] = (data, writer, value) => WriteFieldValue(data, value), 85 | [typeof(uint)] = (data, writer, value) => WriteFieldValue(data, value), 86 | [typeof(short)] = (data, writer, value) => WriteFieldValue(data, value), 87 | [typeof(ushort)] = (data, writer, value) => WriteFieldValue(data, value), 88 | [typeof(sbyte)] = (data, writer, value) => WriteFieldValue(data, value), 89 | [typeof(byte)] = (data, writer, value) => WriteFieldValue(data, value), 90 | [typeof(string)] = (data, writer, value) => WriteFieldValue(data, writer.InternString((string)value)), 91 | }; 92 | 93 | private readonly Dictionary, Array>> arrayWriters = new Dictionary, Array>> 94 | { 95 | [typeof(ulong[])] = (data, writer, array) => WriteFieldValueArray(data, array), 96 | [typeof(long[])] = (data, writer, array) => WriteFieldValueArray(data, array), 97 | [typeof(float[])] = (data, writer, array) => WriteFieldValueArray(data, array), 98 | [typeof(int[])] = (data, writer, array) => WriteFieldValueArray(data, array), 99 | [typeof(uint[])] = (data, writer, array) => WriteFieldValueArray(data, array), 100 | [typeof(ulong[])] = (data, writer, array) => WriteFieldValueArray(data, array), 101 | [typeof(ushort[])] = (data, writer, array) => WriteFieldValueArray(data, array), 102 | [typeof(short[])] = (data, writer, array) => WriteFieldValueArray(data, array), 103 | [typeof(byte[])] = (data, writer, array) => WriteFieldValueArray(data, array), 104 | [typeof(sbyte[])] = (data, writer, array) => WriteFieldValueArray(data, array), 105 | [typeof(string[])] = (data, writer, array) => WriteFieldValueArray(data, (array as string[]).Select(x => writer.InternString(x)).ToArray()), 106 | }; 107 | 108 | private static void WriteFieldValue(BitWriter r, object value) where TType : struct 109 | { 110 | r.WriteAligned((TType)value); 111 | } 112 | 113 | private static void WriteFieldValueArray(BitWriter r, Array value) where TType : struct 114 | { 115 | for (int i = 0; i < value.Length; i++) 116 | r.WriteAligned((TType)value.GetValue(i)); 117 | } 118 | 119 | } 120 | 121 | class WDB2Writer : BaseWriter where T : class 122 | { 123 | private const uint WDB2FmtSig = 0x32424457; // WDB2 124 | 125 | public WDB2Writer(WDB2Reader reader, IDictionary storage, Stream stream) : base(reader) 126 | { 127 | WDB2RowSerializer serializer = new WDB2RowSerializer(this); 128 | serializer.Serialize(storage); 129 | 130 | RecordsCount = storage.Count; 131 | 132 | using (var writer = new BinaryWriter(stream)) 133 | { 134 | writer.Write(WDB2FmtSig); 135 | writer.Write(RecordsCount); 136 | writer.Write(FieldsCount); 137 | writer.Write(RecordSize); 138 | writer.Write(StringTableSize); 139 | writer.Write(reader.TableHash); 140 | writer.Write(reader.Build); 141 | writer.Write((uint)DateTimeOffset.UtcNow.ToUnixTimeSeconds()); 142 | 143 | if (storage.Count == 0) 144 | return; 145 | 146 | // Extended header 147 | if (reader.Build > 12880) 148 | { 149 | if (reader.MaxIndex == 0) 150 | { 151 | writer.Write(0); 152 | writer.Write(0); 153 | writer.Write(reader.LayoutHash); 154 | writer.Write(0); // CopyTableSize 155 | } 156 | else 157 | { 158 | WriteIndices(writer, serializer, reader.LayoutHash); 159 | } 160 | } 161 | 162 | foreach (var record in serializer.Records) 163 | record.Value.CopyTo(writer.BaseStream); 164 | 165 | foreach (var str in StringTable) 166 | writer.WriteCString(str.Key); 167 | } 168 | } 169 | 170 | private void WriteIndices(BinaryWriter writer, WDB2RowSerializer serializer, uint layoutHash) 171 | { 172 | int min = serializer.Records.Keys.Min(); 173 | int max = serializer.Records.Keys.Max(); 174 | 175 | writer.Write(min); 176 | writer.Write(max); 177 | writer.Write(layoutHash); 178 | writer.Write(0); // CopyTableSize 179 | 180 | int index = 0; 181 | for (int i = min; i <= max; i++) 182 | { 183 | if (serializer.StringLengths.ContainsKey(i)) 184 | { 185 | writer.Write(++index); 186 | writer.Write(serializer.StringLengths[i]); 187 | } 188 | else 189 | { 190 | writer.Write(0); 191 | writer.Write((ushort)0); 192 | } 193 | } 194 | } 195 | } 196 | } 197 | -------------------------------------------------------------------------------- /DBCD.IO/Writers/WDB3Writer.cs: -------------------------------------------------------------------------------- 1 | using DBCD.IO.Common; 2 | using DBCD.IO.Readers; 3 | using System; 4 | using System.Collections.Generic; 5 | using System.IO; 6 | using System.Linq; 7 | 8 | namespace DBCD.IO.Writers 9 | { 10 | class WDB3RowSerializer : IDBRowSerializer where T : class 11 | { 12 | public IDictionary Records { get; private set; } 13 | 14 | private readonly BaseWriter m_writer; 15 | 16 | 17 | public WDB3RowSerializer(BaseWriter writer) 18 | { 19 | m_writer = writer; 20 | 21 | Records = new Dictionary(); 22 | } 23 | 24 | public void Serialize(IDictionary rows) 25 | { 26 | foreach (var row in rows) 27 | Serialize(row.Key, row.Value); 28 | } 29 | 30 | public void Serialize(int id, T row) 31 | { 32 | BitWriter bitWriter = new BitWriter(m_writer.RecordSize); 33 | 34 | for (int i = 0; i < m_writer.FieldCache.Length; i++) 35 | { 36 | FieldCache info = m_writer.FieldCache[i]; 37 | 38 | if (info.IndexMapField && m_writer.Flags.HasFlagExt(DB2Flags.Index)) 39 | continue; 40 | 41 | if (info.IsArray) 42 | { 43 | if (arrayWriters.TryGetValue(info.FieldType, out var writer)) 44 | writer(bitWriter, m_writer, (Array)info.Getter(row)); 45 | else 46 | throw new Exception("Unhandled array type: " + typeof(T).Name); 47 | } 48 | else 49 | { 50 | if (simpleWriters.TryGetValue(info.FieldType, out var writer)) 51 | writer(bitWriter, m_writer, info.Getter(row)); 52 | else 53 | throw new Exception("Unhandled field type: " + typeof(T).Name); 54 | } 55 | } 56 | 57 | // pad to record size 58 | if (!m_writer.Flags.HasFlagExt(DB2Flags.Sparse)) 59 | bitWriter.Resize(m_writer.RecordSize); 60 | else 61 | bitWriter.ResizeToMultiple(4); 62 | 63 | Records[id] = bitWriter; 64 | } 65 | 66 | public void GetCopyRows() 67 | { 68 | var copydata = Records.GroupBy(x => x.Value).Where(x => x.Count() > 1); 69 | foreach (var copygroup in copydata) 70 | { 71 | int key = copygroup.First().Key; 72 | foreach (var copy in copygroup.Skip(1)) 73 | m_writer.CopyData[copy.Key] = key; 74 | } 75 | } 76 | 77 | 78 | private static Dictionary, object>> simpleWriters = new Dictionary, object>> 79 | { 80 | [typeof(long)] = (data, writer, value) => WriteFieldValue(data, value), 81 | [typeof(float)] = (data, writer, value) => WriteFieldValue(data, value), 82 | [typeof(int)] = (data, writer, value) => WriteFieldValue(data, value), 83 | [typeof(uint)] = (data, writer, value) => WriteFieldValue(data, value), 84 | [typeof(short)] = (data, writer, value) => WriteFieldValue(data, value), 85 | [typeof(ushort)] = (data, writer, value) => WriteFieldValue(data, value), 86 | [typeof(sbyte)] = (data, writer, value) => WriteFieldValue(data, value), 87 | [typeof(byte)] = (data, writer, value) => WriteFieldValue(data, value), 88 | [typeof(string)] = (data, writer, value) => 89 | { 90 | if (writer.Flags.HasFlagExt(DB2Flags.Sparse)) 91 | data.WriteCStringAligned((string)value); 92 | else 93 | WriteFieldValue(data, writer.InternString((string)value)); 94 | } 95 | }; 96 | 97 | private readonly Dictionary, Array>> arrayWriters = new Dictionary, Array>> 98 | { 99 | [typeof(ulong[])] = (data, writer, array) => WriteFieldValueArray(data, array), 100 | [typeof(long[])] = (data, writer, array) => WriteFieldValueArray(data, array), 101 | [typeof(float[])] = (data, writer, array) => WriteFieldValueArray(data, array), 102 | [typeof(int[])] = (data, writer, array) => WriteFieldValueArray(data, array), 103 | [typeof(uint[])] = (data, writer, array) => WriteFieldValueArray(data, array), 104 | [typeof(ulong[])] = (data, writer, array) => WriteFieldValueArray(data, array), 105 | [typeof(ushort[])] = (data, writer, array) => WriteFieldValueArray(data, array), 106 | [typeof(short[])] = (data, writer, array) => WriteFieldValueArray(data, array), 107 | [typeof(byte[])] = (data, writer, array) => WriteFieldValueArray(data, array), 108 | [typeof(sbyte[])] = (data, writer, array) => WriteFieldValueArray(data, array), 109 | [typeof(string[])] = (data, writer, array) => WriteFieldValueArray(data, (array as string[]).Select(x => writer.InternString(x)).ToArray()), 110 | }; 111 | 112 | private static void WriteFieldValue(BitWriter r, object value) where TType : struct 113 | { 114 | r.WriteAligned((TType)value); 115 | } 116 | 117 | private static void WriteFieldValueArray(BitWriter r, Array value) where TType : struct 118 | { 119 | for (int i = 0; i < value.Length; i++) 120 | r.WriteAligned((TType)value.GetValue(i)); 121 | } 122 | } 123 | 124 | 125 | class WDB3Writer : BaseWriter where T : class 126 | { 127 | private const uint WDB3FmtSig = 0x33424457; // WDB3 128 | 129 | public WDB3Writer(WDB3Reader reader, IDictionary storage, Stream stream) : base(reader) 130 | { 131 | // always 2 empties 132 | StringTableSize++; 133 | 134 | WDB3RowSerializer serializer = new WDB3RowSerializer(this); 135 | serializer.Serialize(storage); 136 | serializer.GetCopyRows(); 137 | 138 | RecordsCount = serializer.Records.Count - CopyData.Count; 139 | if (Flags.HasFlagExt(DB2Flags.Sparse)) 140 | StringTableSize = 0; 141 | 142 | using (var writer = new BinaryWriter(stream)) 143 | { 144 | int minIndex = storage.Keys.MinOrDefault(); 145 | int maxIndex = storage.Keys.MaxOrDefault(); 146 | int copyTableSize = Flags.HasFlagExt(DB2Flags.Sparse) ? 0 : CopyData.Count * 8; 147 | 148 | writer.Write(WDB3FmtSig); 149 | writer.Write(RecordsCount); 150 | writer.Write(FieldsCount); 151 | writer.Write(RecordSize); 152 | writer.Write(StringTableSize); 153 | writer.Write(reader.TableHash); 154 | writer.Write(reader.Build); 155 | writer.Write((uint)DateTimeOffset.UtcNow.ToUnixTimeSeconds()); 156 | writer.Write(minIndex); 157 | writer.Write(maxIndex); 158 | writer.Write(reader.Locale); 159 | writer.Write(copyTableSize); 160 | 161 | if (storage.Count == 0) 162 | return; 163 | 164 | // sparse data 165 | if (Flags.HasFlagExt(DB2Flags.Sparse)) 166 | { 167 | int sparseCount = maxIndex - minIndex + 1; 168 | uint recordsOffset = (uint)(writer.BaseStream.Position + (sparseCount * 6)); 169 | WriteOffsetRecords(writer, serializer, recordsOffset, sparseCount); 170 | } 171 | 172 | // secondary key 173 | if (Flags.HasFlagExt(DB2Flags.SecondaryKey)) 174 | WriteSecondaryKeyData(writer, storage, maxIndex - minIndex + 1); 175 | 176 | // record data 177 | foreach (var record in serializer.Records) 178 | if (!CopyData.ContainsKey(record.Key)) 179 | record.Value.CopyTo(writer.BaseStream); 180 | 181 | // string table 182 | if (!Flags.HasFlagExt(DB2Flags.Sparse)) 183 | { 184 | writer.WriteCString(""); 185 | foreach (var str in StringTable) 186 | writer.WriteCString(str.Key); 187 | } 188 | 189 | // index table 190 | if (Flags.HasFlagExt(DB2Flags.Index)) 191 | writer.WriteArray(serializer.Records.Keys.Except(CopyData.Keys).ToArray()); 192 | 193 | // copy table 194 | if (!Flags.HasFlagExt(DB2Flags.Sparse)) 195 | { 196 | foreach (var copyRecord in CopyData) 197 | { 198 | writer.Write(copyRecord.Key); 199 | writer.Write(copyRecord.Value); 200 | } 201 | } 202 | } 203 | } 204 | } 205 | } 206 | -------------------------------------------------------------------------------- /DBCD.IO/Writers/WDB4Writer.cs: -------------------------------------------------------------------------------- 1 | using DBCD.IO.Common; 2 | using DBCD.IO.Readers; 3 | using System; 4 | using System.Collections.Generic; 5 | using System.IO; 6 | using System.Linq; 7 | 8 | namespace DBCD.IO.Writers 9 | { 10 | class WDB4RowSerializer : IDBRowSerializer where T : class 11 | { 12 | public IDictionary Records { get; private set; } 13 | 14 | private readonly BaseWriter m_writer; 15 | 16 | 17 | public WDB4RowSerializer(BaseWriter writer) 18 | { 19 | m_writer = writer; 20 | 21 | Records = new Dictionary(); 22 | } 23 | 24 | public void Serialize(IDictionary rows) 25 | { 26 | foreach (var row in rows) 27 | Serialize(row.Key, row.Value); 28 | } 29 | 30 | public void Serialize(int id, T row) 31 | { 32 | BitWriter bitWriter = new BitWriter(m_writer.RecordSize); 33 | 34 | for (int i = 0; i < m_writer.FieldCache.Length; i++) 35 | { 36 | FieldCache info = m_writer.FieldCache[i]; 37 | 38 | if (info.IndexMapField && m_writer.Flags.HasFlagExt(DB2Flags.Index)) 39 | continue; 40 | 41 | if (info.IsArray) 42 | { 43 | if (arrayWriters.TryGetValue(info.FieldType, out var writer)) 44 | writer(bitWriter, m_writer, (Array)info.Getter(row)); 45 | else 46 | throw new Exception("Unhandled array type: " + typeof(T).Name); 47 | } 48 | else 49 | { 50 | if (simpleWriters.TryGetValue(info.FieldType, out var writer)) 51 | writer(bitWriter, m_writer, info.Getter(row)); 52 | else 53 | throw new Exception("Unhandled field type: " + typeof(T).Name); 54 | } 55 | } 56 | 57 | // pad to record size 58 | if (!m_writer.Flags.HasFlagExt(DB2Flags.Sparse)) 59 | bitWriter.Resize(m_writer.RecordSize); 60 | else 61 | bitWriter.ResizeToMultiple(4); 62 | 63 | Records[id] = bitWriter; 64 | } 65 | 66 | public void GetCopyRows() 67 | { 68 | var copydata = Records.GroupBy(x => x.Value).Where(x => x.Count() > 1); 69 | foreach (var copygroup in copydata) 70 | { 71 | int key = copygroup.First().Key; 72 | foreach (var copy in copygroup.Skip(1)) 73 | m_writer.CopyData[copy.Key] = key; 74 | } 75 | } 76 | 77 | 78 | private static Dictionary, object>> simpleWriters = new Dictionary, object>> 79 | { 80 | [typeof(long)] = (data, writer, value) => WriteFieldValue(data, value), 81 | [typeof(float)] = (data, writer, value) => WriteFieldValue(data, value), 82 | [typeof(int)] = (data, writer, value) => WriteFieldValue(data, value), 83 | [typeof(uint)] = (data, writer, value) => WriteFieldValue(data, value), 84 | [typeof(short)] = (data, writer, value) => WriteFieldValue(data, value), 85 | [typeof(ushort)] = (data, writer, value) => WriteFieldValue(data, value), 86 | [typeof(sbyte)] = (data, writer, value) => WriteFieldValue(data, value), 87 | [typeof(byte)] = (data, writer, value) => WriteFieldValue(data, value), 88 | [typeof(string)] = (data, writer, value) => 89 | { 90 | if (writer.Flags.HasFlagExt(DB2Flags.Sparse)) 91 | data.WriteCStringAligned((string)value); 92 | else 93 | WriteFieldValue(data, writer.InternString((string)value)); 94 | } 95 | }; 96 | 97 | private readonly Dictionary, Array>> arrayWriters = new Dictionary, Array>> 98 | { 99 | [typeof(ulong[])] = (data, writer, array) => WriteFieldValueArray(data, array), 100 | [typeof(long[])] = (data, writer, array) => WriteFieldValueArray(data, array), 101 | [typeof(float[])] = (data, writer, array) => WriteFieldValueArray(data, array), 102 | [typeof(int[])] = (data, writer, array) => WriteFieldValueArray(data, array), 103 | [typeof(uint[])] = (data, writer, array) => WriteFieldValueArray(data, array), 104 | [typeof(ulong[])] = (data, writer, array) => WriteFieldValueArray(data, array), 105 | [typeof(ushort[])] = (data, writer, array) => WriteFieldValueArray(data, array), 106 | [typeof(short[])] = (data, writer, array) => WriteFieldValueArray(data, array), 107 | [typeof(byte[])] = (data, writer, array) => WriteFieldValueArray(data, array), 108 | [typeof(sbyte[])] = (data, writer, array) => WriteFieldValueArray(data, array), 109 | [typeof(string[])] = (data, writer, array) => WriteFieldValueArray(data, (array as string[]).Select(x => writer.InternString(x)).ToArray()), 110 | }; 111 | 112 | private static void WriteFieldValue(BitWriter r, object value) where TType : struct 113 | { 114 | r.WriteAligned((TType)value); 115 | } 116 | 117 | private static void WriteFieldValueArray(BitWriter r, Array value) where TType : struct 118 | { 119 | for (int i = 0; i < value.Length; i++) 120 | r.WriteAligned((TType)value.GetValue(i)); 121 | } 122 | } 123 | 124 | class WDB4Writer : BaseWriter where T : class 125 | { 126 | private const uint WDB4FmtSig = 0x34424457; // WDB4 127 | 128 | public WDB4Writer(WDB4Reader reader, IDictionary storage, Stream stream) : base(reader) 129 | { 130 | // always 2 empties 131 | StringTableSize++; 132 | 133 | WDB4RowSerializer serializer = new WDB4RowSerializer(this); 134 | serializer.Serialize(storage); 135 | serializer.GetCopyRows(); 136 | 137 | RecordsCount = serializer.Records.Count - CopyData.Count; 138 | 139 | using (var writer = new BinaryWriter(stream)) 140 | { 141 | int minIndex = storage.Keys.MinOrDefault(); 142 | int maxIndex = storage.Keys.MaxOrDefault(); 143 | int copyTableSize = Flags.HasFlagExt(DB2Flags.Sparse) ? 0 : CopyData.Count * 8; 144 | 145 | writer.Write(WDB4FmtSig); 146 | writer.Write(RecordsCount); 147 | writer.Write(FieldsCount); 148 | writer.Write(RecordSize); 149 | writer.Write(StringTableSize); // if flags & 0x01 != 0, offset to the offset_map 150 | writer.Write(reader.TableHash); 151 | writer.Write(reader.Build); 152 | writer.Write((uint)DateTimeOffset.UtcNow.ToUnixTimeSeconds()); 153 | writer.Write(minIndex); 154 | writer.Write(maxIndex); 155 | writer.Write(reader.Locale); 156 | writer.Write(copyTableSize); 157 | writer.Write((uint)Flags); 158 | 159 | if (storage.Count == 0) 160 | return; 161 | 162 | // record data 163 | uint recordsOffset = (uint)writer.BaseStream.Position; 164 | foreach (var record in serializer.Records) 165 | if (!CopyData.ContainsKey(record.Key)) 166 | record.Value.CopyTo(writer.BaseStream); 167 | 168 | // string table 169 | if (!Flags.HasFlagExt(DB2Flags.Sparse)) 170 | { 171 | writer.WriteCString(""); 172 | foreach (var str in StringTable) 173 | writer.WriteCString(str.Key); 174 | } 175 | 176 | // sparse data 177 | if (Flags.HasFlagExt(DB2Flags.Sparse)) 178 | { 179 | // change the StringTableSize to the offset_map position 180 | long oldPos = writer.BaseStream.Position; 181 | writer.BaseStream.Position = 16; 182 | writer.Write((uint)oldPos); 183 | writer.BaseStream.Position = oldPos; 184 | 185 | WriteOffsetRecords(writer, serializer, recordsOffset, maxIndex - minIndex + 1); 186 | } 187 | 188 | // secondary key 189 | if (Flags.HasFlagExt(DB2Flags.SecondaryKey)) 190 | WriteSecondaryKeyData(writer, storage, maxIndex - minIndex + 1); 191 | 192 | // index table 193 | if (Flags.HasFlagExt(DB2Flags.Index)) 194 | writer.WriteArray(serializer.Records.Keys.Except(CopyData.Keys).ToArray()); 195 | 196 | // copy table 197 | if (!Flags.HasFlagExt(DB2Flags.Sparse)) 198 | { 199 | foreach (var copyRecord in CopyData) 200 | { 201 | writer.Write(copyRecord.Key); 202 | writer.Write(copyRecord.Value); 203 | } 204 | } 205 | } 206 | } 207 | } 208 | } 209 | -------------------------------------------------------------------------------- /DBCD.IO/Writers/WDB5Writer.cs: -------------------------------------------------------------------------------- 1 | using DBCD.IO.Common; 2 | using DBCD.IO.Readers; 3 | using System; 4 | using System.Collections.Generic; 5 | using System.IO; 6 | using System.Linq; 7 | 8 | namespace DBCD.IO.Writers 9 | { 10 | class WDB5RowSerializer : IDBRowSerializer where T : class 11 | { 12 | public IDictionary Records { get; private set; } 13 | 14 | private readonly BaseWriter m_writer; 15 | private readonly FieldMetaData[] m_fieldMeta; 16 | 17 | 18 | public WDB5RowSerializer(BaseWriter writer) 19 | { 20 | m_writer = writer; 21 | m_fieldMeta = m_writer.Meta; 22 | 23 | Records = new Dictionary(); 24 | } 25 | 26 | public void Serialize(IDictionary rows) 27 | { 28 | foreach (var row in rows) 29 | Serialize(row.Key, row.Value); 30 | } 31 | 32 | public void Serialize(int id, T row) 33 | { 34 | BitWriter bitWriter = new BitWriter(m_writer.RecordSize); 35 | 36 | int indexFieldOffSet = 0; 37 | 38 | for (int i = 0; i < m_writer.FieldCache.Length; i++) 39 | { 40 | FieldCache info = m_writer.FieldCache[i]; 41 | 42 | if (info.IndexMapField && m_writer.Flags.HasFlagExt(DB2Flags.Index)) 43 | { 44 | indexFieldOffSet++; 45 | continue; 46 | } 47 | 48 | int fieldIndex = i - indexFieldOffSet; 49 | 50 | if (info.IsArray) 51 | { 52 | if (arrayWriters.TryGetValue(info.FieldType, out var writer)) 53 | writer(bitWriter, m_writer, m_fieldMeta[fieldIndex], (Array)info.Getter(row)); 54 | else 55 | throw new Exception("Unhandled array type: " + typeof(T).Name); 56 | } 57 | else 58 | { 59 | if (simpleWriters.TryGetValue(info.FieldType, out var writer)) 60 | writer(bitWriter, m_writer, m_fieldMeta[fieldIndex], info.Getter(row)); 61 | else 62 | throw new Exception("Unhandled field type: " + typeof(T).Name); 63 | } 64 | } 65 | 66 | // pad to record size 67 | if (!m_writer.Flags.HasFlagExt(DB2Flags.Sparse)) 68 | bitWriter.Resize(m_writer.RecordSize); 69 | else 70 | bitWriter.ResizeToMultiple(4); 71 | 72 | Records[id] = bitWriter; 73 | } 74 | 75 | public void GetCopyRows() 76 | { 77 | var copydata = Records.GroupBy(x => x.Value).Where(x => x.Count() > 1); 78 | foreach (var copygroup in copydata) 79 | { 80 | int key = copygroup.First().Key; 81 | foreach (var copy in copygroup.Skip(1)) 82 | m_writer.CopyData[copy.Key] = key; 83 | } 84 | } 85 | 86 | 87 | private static Dictionary, FieldMetaData, object>> simpleWriters = new Dictionary, FieldMetaData, object>> 88 | { 89 | [typeof(long)] = (data, writer, fieldMeta, value) => WriteFieldValue(data, fieldMeta, value), 90 | [typeof(float)] = (data, writer, fieldMeta, value) => WriteFieldValue(data, fieldMeta, value), 91 | [typeof(int)] = (data, writer, fieldMeta, value) => WriteFieldValue(data, fieldMeta, value), 92 | [typeof(uint)] = (data, writer, fieldMeta, value) => WriteFieldValue(data, fieldMeta, value), 93 | [typeof(short)] = (data, writer, fieldMeta, value) => WriteFieldValue(data, fieldMeta, value), 94 | [typeof(ushort)] = (data, writer, fieldMeta, value) => WriteFieldValue(data, fieldMeta, value), 95 | [typeof(sbyte)] = (data, writer, fieldMeta, value) => WriteFieldValue(data, fieldMeta, value), 96 | [typeof(byte)] = (data, writer, fieldMeta, value) => WriteFieldValue(data, fieldMeta, value), 97 | [typeof(string)] = (data, writer, fieldMeta, value) => 98 | { 99 | if (writer.Flags.HasFlagExt(DB2Flags.Sparse)) 100 | data.WriteCString((string)value); 101 | else 102 | WriteFieldValue(data, fieldMeta, writer.InternString((string)value)); 103 | } 104 | }; 105 | 106 | private static Dictionary, FieldMetaData, Array>> arrayWriters = new Dictionary, FieldMetaData, Array>> 107 | { 108 | [typeof(ulong[])] = (data, writer, fieldMeta, array) => WriteFieldValueArray(data, fieldMeta, array), 109 | [typeof(long[])] = (data, writer, fieldMeta, array) => WriteFieldValueArray(data, fieldMeta, array), 110 | [typeof(float[])] = (data, writer, fieldMeta, array) => WriteFieldValueArray(data, fieldMeta, array), 111 | [typeof(int[])] = (data, writer, fieldMeta, array) => WriteFieldValueArray(data, fieldMeta, array), 112 | [typeof(uint[])] = (data, writer, fieldMeta, array) => WriteFieldValueArray(data, fieldMeta, array), 113 | [typeof(ulong[])] = (data, writer, fieldMeta, array) => WriteFieldValueArray(data, fieldMeta, array), 114 | [typeof(ushort[])] = (data, writer, fieldMeta, array) => WriteFieldValueArray(data, fieldMeta, array), 115 | [typeof(short[])] = (data, writer, fieldMeta, array) => WriteFieldValueArray(data, fieldMeta, array), 116 | [typeof(byte[])] = (data, writer, fieldMeta, array) => WriteFieldValueArray(data, fieldMeta, array), 117 | [typeof(sbyte[])] = (data, writer, fieldMeta, array) => WriteFieldValueArray(data, fieldMeta, array), 118 | [typeof(string[])] = (data, writer, fieldMeta, array) => WriteFieldValueArray(data, fieldMeta, (array as string[]).Select(x => writer.InternString(x)).ToArray()), 119 | }; 120 | 121 | private static void WriteFieldValue(BitWriter r, FieldMetaData fieldMeta, object value) where TType : struct 122 | { 123 | r.Write((TType)value, 32 - fieldMeta.Bits); 124 | } 125 | 126 | private static void WriteFieldValueArray(BitWriter r, FieldMetaData fieldMeta, Array value) where TType : struct 127 | { 128 | for (int i = 0; i < value.Length; i++) 129 | r.Write((TType)value.GetValue(i), 32 - fieldMeta.Bits); 130 | } 131 | } 132 | 133 | class WDB5Writer : BaseWriter where T : class 134 | { 135 | private const uint WDB5FmtSig = 0x35424457; // WDB5 136 | 137 | public WDB5Writer(WDB5Reader reader, IDictionary storage, Stream stream) : base(reader) 138 | { 139 | // always 2 empties 140 | StringTableSize++; 141 | 142 | WDB5RowSerializer serializer = new WDB5RowSerializer(this); 143 | serializer.Serialize(storage); 144 | serializer.GetCopyRows(); 145 | 146 | RecordsCount = serializer.Records.Count - CopyData.Count; 147 | 148 | using (var writer = new BinaryWriter(stream)) 149 | { 150 | int minIndex = storage.Keys.MinOrDefault(); 151 | int maxIndex = storage.Keys.MaxOrDefault(); 152 | int copyTableSize = Flags.HasFlagExt(DB2Flags.Sparse) ? 0 : CopyData.Count * 8; 153 | 154 | writer.Write(WDB5FmtSig); 155 | writer.Write(RecordsCount); 156 | writer.Write(FieldsCount); 157 | writer.Write(RecordSize); 158 | writer.Write(StringTableSize); // if flags & 0x01 != 0, offset to the offset_map 159 | writer.Write(reader.TableHash); 160 | writer.Write(reader.LayoutHash); 161 | writer.Write(minIndex); 162 | writer.Write(maxIndex); 163 | writer.Write(reader.Locale); 164 | writer.Write(copyTableSize); // copytablesize 165 | writer.Write((uint)Flags); 166 | writer.Write((ushort)IdFieldIndex); 167 | 168 | // field meta 169 | writer.WriteArray(Meta); 170 | 171 | if (storage.Count == 0) 172 | return; 173 | 174 | // record data 175 | uint recordsOffset = (uint)writer.BaseStream.Position; 176 | foreach (var record in serializer.Records) 177 | if (!CopyData.ContainsKey(record.Key)) 178 | record.Value.CopyTo(writer.BaseStream); 179 | 180 | // string table 181 | if (!Flags.HasFlagExt(DB2Flags.Sparse)) 182 | { 183 | writer.WriteCString(""); 184 | foreach (var str in StringTable) 185 | writer.WriteCString(str.Key); 186 | } 187 | 188 | // sparse data 189 | if (Flags.HasFlagExt(DB2Flags.Sparse)) 190 | { 191 | // change the StringTableSize to the offset_map position 192 | long oldPos = writer.BaseStream.Position; 193 | writer.BaseStream.Position = 16; 194 | writer.Write((uint)oldPos); 195 | writer.BaseStream.Position = oldPos; 196 | 197 | WriteOffsetRecords(writer, serializer, recordsOffset, maxIndex - minIndex + 1); 198 | } 199 | 200 | // secondary key 201 | if (Flags.HasFlagExt(DB2Flags.SecondaryKey)) 202 | WriteSecondaryKeyData(writer, storage, maxIndex - minIndex + 1); 203 | 204 | // index table 205 | if (Flags.HasFlagExt(DB2Flags.Index)) 206 | writer.WriteArray(serializer.Records.Keys.Except(CopyData.Keys).ToArray()); 207 | 208 | // copy table 209 | if (!Flags.HasFlagExt(DB2Flags.Sparse)) 210 | { 211 | foreach (var copyRecord in CopyData) 212 | { 213 | writer.Write(copyRecord.Key); 214 | writer.Write(copyRecord.Value); 215 | } 216 | } 217 | } 218 | } 219 | } 220 | } 221 | -------------------------------------------------------------------------------- /DBCD.IO/Writers/WDBCWriter.cs: -------------------------------------------------------------------------------- 1 | using DBCD.IO.Common; 2 | using DBCD.IO.Readers; 3 | using System; 4 | using System.Collections.Generic; 5 | using System.IO; 6 | using System.Linq; 7 | 8 | namespace DBCD.IO.Writers 9 | { 10 | class WDBCRowSerializer : IDBRowSerializer where T : class 11 | { 12 | public IDictionary Records { get; private set; } 13 | 14 | private readonly BaseWriter m_writer; 15 | 16 | 17 | public WDBCRowSerializer(BaseWriter writer) 18 | { 19 | m_writer = writer; 20 | Records = new Dictionary(); 21 | } 22 | 23 | public void Serialize(IDictionary rows) 24 | { 25 | foreach (var row in rows) 26 | Serialize(row.Key, row.Value); 27 | } 28 | 29 | public void Serialize(int id, T row) 30 | { 31 | BitWriter bitWriter = new BitWriter(m_writer.RecordSize); 32 | 33 | for (int i = 0; i < m_writer.FieldCache.Length; i++) 34 | { 35 | FieldCache info = m_writer.FieldCache[i]; 36 | 37 | if (info.IsArray) 38 | { 39 | if (arrayWriters.TryGetValue(info.FieldType, out var writer)) 40 | writer(bitWriter, m_writer, (Array)info.Getter(row)); 41 | else 42 | throw new Exception("Unhandled array type: " + typeof(T).Name); 43 | } 44 | else 45 | { 46 | if (simpleWriters.TryGetValue(info.FieldType, out var writer)) 47 | writer(bitWriter, m_writer, info.Getter(row)); 48 | else 49 | throw new Exception("Unhandled field type: " + typeof(T).Name); 50 | } 51 | } 52 | 53 | // pad to record size 54 | bitWriter.Resize(m_writer.RecordSize); 55 | Records[id] = bitWriter; 56 | } 57 | 58 | public void GetCopyRows() 59 | { 60 | throw new NotImplementedException(); 61 | } 62 | 63 | 64 | private static Dictionary, object>> simpleWriters = new Dictionary, object>> 65 | { 66 | [typeof(long)] = (data, writer, value) => WriteFieldValue(data, value), 67 | [typeof(float)] = (data, writer, value) => WriteFieldValue(data, value), 68 | [typeof(int)] = (data, writer, value) => WriteFieldValue(data, value), 69 | [typeof(uint)] = (data, writer, value) => WriteFieldValue(data, value), 70 | [typeof(short)] = (data, writer, value) => WriteFieldValue(data, value), 71 | [typeof(ushort)] = (data, writer, value) => WriteFieldValue(data, value), 72 | [typeof(sbyte)] = (data, writer, value) => WriteFieldValue(data, value), 73 | [typeof(byte)] = (data, writer, value) => WriteFieldValue(data, value), 74 | [typeof(string)] = (data, writer, value) => WriteFieldValue(data, writer.InternString((string)value)), 75 | }; 76 | 77 | private readonly Dictionary, Array>> arrayWriters = new Dictionary, Array>> 78 | { 79 | [typeof(ulong[])] = (data, writer, array) => WriteFieldValueArray(data, array), 80 | [typeof(long[])] = (data, writer, array) => WriteFieldValueArray(data, array), 81 | [typeof(float[])] = (data, writer, array) => WriteFieldValueArray(data, array), 82 | [typeof(int[])] = (data, writer, array) => WriteFieldValueArray(data, array), 83 | [typeof(uint[])] = (data, writer, array) => WriteFieldValueArray(data, array), 84 | [typeof(ulong[])] = (data, writer, array) => WriteFieldValueArray(data, array), 85 | [typeof(ushort[])] = (data, writer, array) => WriteFieldValueArray(data, array), 86 | [typeof(short[])] = (data, writer, array) => WriteFieldValueArray(data, array), 87 | [typeof(byte[])] = (data, writer, array) => WriteFieldValueArray(data, array), 88 | [typeof(sbyte[])] = (data, writer, array) => WriteFieldValueArray(data, array), 89 | [typeof(string[])] = (data, writer, array) => WriteFieldValueArray(data, (array as string[]).Select(x => writer.InternString(x)).ToArray()), 90 | }; 91 | 92 | private static void WriteFieldValue(BitWriter r, object value) where TType : struct 93 | { 94 | r.WriteAligned((TType)value); 95 | } 96 | 97 | private static void WriteFieldValueArray(BitWriter r, Array value) where TType : struct 98 | { 99 | for (int i = 0; i < value.Length; i++) 100 | r.WriteAligned((TType)value.GetValue(i)); 101 | } 102 | } 103 | 104 | class WDBCWriter : BaseWriter where T : class 105 | { 106 | private const int HeaderSize = 20; 107 | private const uint WDBCFmtSig = 0x43424457; // WDBC 108 | 109 | public WDBCWriter(WDBCReader reader, IDictionary storage, Stream stream) : base(reader) 110 | { 111 | WDBCRowSerializer serializer = new WDBCRowSerializer(this); 112 | serializer.Serialize(storage); 113 | 114 | RecordsCount = storage.Count; 115 | 116 | using (var writer = new BinaryWriter(stream)) 117 | { 118 | writer.Write(WDBCFmtSig); 119 | writer.Write(RecordsCount); 120 | writer.Write(FieldsCount); 121 | writer.Write(RecordSize); 122 | writer.Write(StringTableSize); 123 | 124 | if (RecordsCount == 0) 125 | return; 126 | 127 | foreach (var record in serializer.Records) 128 | record.Value.CopyTo(writer.BaseStream); 129 | 130 | foreach (var str in StringTable) 131 | writer.WriteCString(str.Key); 132 | } 133 | } 134 | } 135 | } 136 | -------------------------------------------------------------------------------- /DBCD.Tests/DBCD.Tests.csproj: -------------------------------------------------------------------------------- 1 |  2 | 3 | 4 | net9.0 5 | false 6 | 9.0 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | -------------------------------------------------------------------------------- /DBCD.Tests/Providers/WagoDBCProvider.cs: -------------------------------------------------------------------------------- 1 | using System; 2 | using System.Collections.Generic; 3 | using System.IO; 4 | using System.Linq; 5 | using System.Net.Http; 6 | using System.Text; 7 | 8 | namespace DBCD.Providers 9 | { 10 | /// 11 | /// Retrieves and returns DB2 files from wago.tools with 1-day caching. 12 | /// 13 | public class WagoDBCProvider : IDBCProvider 14 | { 15 | private readonly HttpClient client = new(); 16 | private readonly Dictionary DB2FileDataIDs = new(); 17 | 18 | public WagoDBCProvider() 19 | { 20 | if (DB2FileDataIDs.Count == 0) 21 | LoadDBDManifest(); 22 | } 23 | 24 | private struct DBDManifestEntry { 25 | public string tableName; 26 | public string tableHash; 27 | public uint dbcFileDataID; 28 | public uint db2FileDataID; 29 | } 30 | 31 | private void LoadDBDManifest() 32 | { 33 | var manifest = client.GetStringAsync("https://raw.githubusercontent.com/wowdev/WoWDBDefs/master/manifest.json").Result; 34 | var dbdManifest = Newtonsoft.Json.JsonConvert.DeserializeObject>(manifest); 35 | 36 | foreach(var entry in dbdManifest) 37 | DB2FileDataIDs[entry.tableName] = entry.db2FileDataID; 38 | } 39 | 40 | public string[] GetAllTableNames() 41 | { 42 | return DB2FileDataIDs.Keys.ToArray(); 43 | } 44 | 45 | public Stream StreamForTableName(string tableName, string build) 46 | { 47 | if (!DB2FileDataIDs.TryGetValue(tableName, out uint fileDataID)) 48 | throw new Exception("Unable to find table " + tableName + " in FDID lookup!"); 49 | 50 | if(!Directory.Exists("DBCCache")) 51 | Directory.CreateDirectory("DBCCache"); 52 | 53 | if (!Directory.Exists(Path.Combine("DBCCache", build))) 54 | Directory.CreateDirectory(Path.Combine("DBCCache", build)); 55 | 56 | var cacheFile = Path.Combine("DBCCache", build, tableName + ".db2"); 57 | if (File.Exists(cacheFile)) 58 | { 59 | //var lastWrite = File.GetLastWriteTime(cacheFile); 60 | //if (DateTime.Now - lastWrite < new TimeSpan(1, 0, 0, 0)) 61 | return new MemoryStream(File.ReadAllBytes(cacheFile)); 62 | } 63 | 64 | var bytes = client.GetByteArrayAsync("https://wago.tools/api/casc/" + fileDataID + "?version=" + build).Result; 65 | if (bytes.Length == 0 || (bytes.Length < 40 && Encoding.ASCII.GetString(bytes).Contains("error"))) 66 | throw new FileNotFoundException(); 67 | 68 | File.WriteAllBytes(cacheFile, bytes); 69 | return new MemoryStream(bytes); 70 | } 71 | } 72 | } 73 | -------------------------------------------------------------------------------- /DBCD.Tests/ReadingTest.cs: -------------------------------------------------------------------------------- 1 | using DBCD.Providers; 2 | using Microsoft.VisualStudio.TestTools.UnitTesting; 3 | using System; 4 | using System.IO; 5 | 6 | namespace DBCD.Tests 7 | { 8 | [TestClass] 9 | public class ReadingTest 10 | { 11 | static GithubDBDProvider githubDBDProvider = new(true); 12 | static readonly WagoDBCProvider wagoDBCProvider = new(); 13 | 14 | // Disabled as 7.1.0 definitions are not yet generally available 15 | /* 16 | [TestMethod] 17 | public void TestWDB5ReadingNoIndexData() 18 | { 19 | DBCD dbcd = new(wagoDBCProvider, githubDBDProvider); 20 | IDBCDStorage storage = dbcd.Load("Achievement_Category", "7.1.0.23222"); 21 | var row = storage[1]; 22 | Assert.AreEqual("Statistics", row["Name_lang"]); 23 | } 24 | */ 25 | [TestMethod] 26 | public void TestWDB5Reading() 27 | { 28 | DBCD dbcd = new(wagoDBCProvider, githubDBDProvider); 29 | IDBCDStorage storage = dbcd.Load("Map", "7.1.0.23222"); 30 | var row = storage[451]; 31 | Assert.AreEqual("development", row["Directory"]); 32 | } 33 | 34 | [TestMethod] 35 | public void TestWDC1Reading() 36 | { 37 | DBCD dbcd = new(wagoDBCProvider, githubDBDProvider); 38 | IDBCDStorage storage = dbcd.Load("Map", "7.3.5.25600"); 39 | 40 | var row = storage[451]; 41 | Assert.AreEqual("development", row["Directory"]); 42 | } 43 | 44 | [TestMethod] 45 | public void TestWDC2Reading() 46 | { 47 | DBCD dbcd = new(wagoDBCProvider, githubDBDProvider); 48 | IDBCDStorage storage = dbcd.Load("Map", "8.0.1.26231"); 49 | 50 | var row = storage[451]; 51 | Assert.AreEqual("development", row["Directory"]); 52 | } 53 | 54 | [TestMethod] 55 | public void TestWDC3Reading() 56 | { 57 | DBCD dbcd = new(wagoDBCProvider, githubDBDProvider); 58 | IDBCDStorage storage = dbcd.Load("Map", "9.2.7.45745"); 59 | 60 | var row = storage[451]; 61 | Assert.AreEqual("development", row["Directory"]); 62 | } 63 | 64 | [TestMethod] 65 | public void TestWDC4Reading() 66 | { 67 | DBCD dbcd = new(wagoDBCProvider, githubDBDProvider); 68 | IDBCDStorage storage = dbcd.Load("Map", "10.1.0.48480"); 69 | 70 | var row = storage[2574]; 71 | Assert.AreEqual("Dragon Isles", row["MapName_lang"]); 72 | } 73 | 74 | [TestMethod] 75 | public void TestWDC5Reading() 76 | { 77 | DBCD dbcd = new(wagoDBCProvider, githubDBDProvider); 78 | IDBCDStorage storage = dbcd.Load("Map", "10.2.5.52432"); 79 | 80 | var row = storage[2574]; 81 | Assert.AreEqual("Dragon Isles", row["MapName_lang"]); 82 | } 83 | 84 | [TestMethod] 85 | public void TestWDC5ReadingBDBDNoCache() 86 | { 87 | DBCD dbcd = new(wagoDBCProvider, GithubBDBDProvider.GetStream(true)); 88 | IDBCDStorage storage = dbcd.Load("Map", "10.2.5.52432"); 89 | 90 | var row = storage[2574]; 91 | Assert.AreEqual("Dragon Isles", row["MapName_lang"]); 92 | } 93 | 94 | [TestMethod] 95 | public void TestSparseReading() 96 | { 97 | DBCD dbcd = new(wagoDBCProvider, githubDBDProvider); 98 | IDBCDStorage storage = dbcd.Load("ItemSparse", "9.2.7.45745"); 99 | 100 | var row = storage[132172]; 101 | Assert.AreEqual("Crowbar", row["Display_lang"]); 102 | } 103 | 104 | [TestMethod] 105 | public void TestNonInlineRelation() 106 | { 107 | DBCD dbcd = new(wagoDBCProvider, githubDBDProvider); 108 | IDBCDStorage storage = dbcd.Load("MapDifficulty", "9.2.7.45745"); 109 | 110 | var row = storage[38]; 111 | Assert.AreEqual(451, row["MapID"]); 112 | } 113 | 114 | [TestMethod] 115 | public void TestEncryptedInfo() 116 | { 117 | DBCD dbcd = new DBCD(wagoDBCProvider, githubDBDProvider); 118 | 119 | var storage = dbcd.Load("SpellName", "11.0.2.55959"); 120 | 121 | foreach (var section in storage.GetEncryptedSections()) 122 | { 123 | Console.WriteLine($"Found encrypted section encrypted with key {section.Key} containing {section.Value} rows"); 124 | } 125 | } 126 | 127 | [TestMethod] 128 | public void TestGithubDBDProviderNoCache() 129 | { 130 | var noCacheProvider = new GithubDBDProvider(false); 131 | noCacheProvider.StreamForTableName("ItemSparse"); 132 | } 133 | 134 | [TestMethod] 135 | public void TestGithubDBDProviderWithCache() 136 | { 137 | githubDBDProvider.StreamForTableName("ItemSparse"); 138 | } 139 | 140 | [TestMethod] 141 | public void TestReadingAllDB2s() 142 | { 143 | return; // Only run this test manually 144 | var localDBDProvider = new FilesystemDBDProvider("D:\\Projects\\WoWDBDefs\\definitions"); 145 | 146 | //var build = "3.3.5.12340"; // WDBC 147 | //var build = "6.0.1.18179"; // WDB2 148 | //var build = "7.0.1.20740"; // WDB3, only 1 DBD sadly 149 | //var build = "7.0.1.20810"; // WDB4, only 2 DBDs sadly 150 | //var build = "7.2.0.23436"; // WDB5, only Map.db2 151 | //var build = "7.3.5.25928"; // WDB6 152 | //var build = "7.3.5.25928"; // WDC1 153 | //var build = "8.0.1.26231"; // WDC2 154 | //var build = "9.1.0.39653"; // WDC3 155 | //var build = "10.1.0.48480"; // WDC4 156 | var build = "11.0.2.56044"; // WDC5 157 | 158 | var localDBCProvider = new FilesystemDBCProvider(Path.Combine("DBCCache", build)); 159 | var dbcd = new DBCD(localDBCProvider, localDBDProvider); 160 | var allDB2s = wagoDBCProvider.GetAllTableNames(); 161 | 162 | var attemptedTables = 0; 163 | var successfulTables = 0; 164 | 165 | foreach (var tableName in allDB2s) 166 | { 167 | // I think this table is meant to crash the test, so we skip it 168 | if (tableName == "UnitTestSparse") 169 | continue; 170 | 171 | if (!localDBDProvider.ContainsBuild(tableName, build)) 172 | continue; 173 | 174 | attemptedTables++; 175 | 176 | try 177 | { 178 | var storage = dbcd.Load(tableName, build); 179 | successfulTables++; 180 | } 181 | catch (FileNotFoundException e) 182 | { 183 | Console.WriteLine($"Failed to load {tableName} for build {build}, does not exist in build."); 184 | successfulTables++; // this counts 185 | } 186 | catch (Exception e) 187 | { 188 | Console.WriteLine("Failed to load " + tableName + " for build " + build + ": " + e.Message + "\n" + e.StackTrace); 189 | } 190 | } 191 | 192 | Assert.AreEqual(attemptedTables, successfulTables); 193 | } 194 | 195 | //[TestMethod] 196 | //public void TestHotfixApplying() 197 | //{ 198 | // DBCD dbcd = new DBCD(dbcProvider, githubDBDProvider); 199 | 200 | // var storage = dbcd.Load("ItemSparse"); 201 | // var hotfix = new HotfixReader("hotfix.bin"); 202 | 203 | // var countBefore = storage.Count; 204 | // storage = storage.ApplyingHotfixes(hotfix); 205 | 206 | // var countAfter = storage.Count; 207 | 208 | // System.Console.WriteLine($"B: {countBefore} => A: {countAfter}"); 209 | //} 210 | 211 | 212 | //[TestMethod] 213 | //public void TestFilesystemDBDProvider() 214 | //{ 215 | // DBCD dbcd = new DBCD(dbcProvider, dbdProvider); 216 | // var storage = dbcd.Load("SpellName", locale: Locale.EnUS); 217 | // // Spell is present in Classic Era -> Retail: https://www.wowhead.com/spell=17/ 218 | // Assert.AreEqual("Power Word: Shield", storage[17]["Name_lang"]); 219 | //} 220 | } 221 | } 222 | -------------------------------------------------------------------------------- /DBCD.Tests/Utilities/IO.cs: -------------------------------------------------------------------------------- 1 | using System.Collections.Generic; 2 | using System.Globalization; 3 | using System.IO; 4 | using System.Linq; 5 | 6 | namespace DBCD.Tests.Utilities 7 | { 8 | internal class IO 9 | { 10 | public static bool TryGetExactPath(string path, out string exactPath) 11 | { 12 | bool result = false; 13 | exactPath = null; 14 | 15 | // DirectoryInfo accepts either a file path or a directory path, and most of its properties work for either. 16 | // However, its Exists property only works for a directory path. 17 | DirectoryInfo directory = new DirectoryInfo(path); 18 | if (File.Exists(path) || directory.Exists) 19 | { 20 | List parts = new List(); 21 | 22 | DirectoryInfo parentDirectory = directory.Parent; 23 | while (parentDirectory != null) 24 | { 25 | FileSystemInfo entry = parentDirectory.EnumerateFileSystemInfos(directory.Name).First(); 26 | parts.Add(entry.Name); 27 | 28 | directory = parentDirectory; 29 | parentDirectory = directory.Parent; 30 | } 31 | 32 | // Handle the root part (i.e., drive letter or UNC \\server\share). 33 | string root = directory.FullName; 34 | if (root.Contains(':')) 35 | { 36 | root = root.ToUpper(); 37 | } 38 | else 39 | { 40 | string[] rootParts = root.Split('\\'); 41 | root = string.Join("\\", rootParts.Select(part => CultureInfo.CurrentCulture.TextInfo.ToTitleCase(part))); 42 | } 43 | 44 | parts.Add(root); 45 | parts.Reverse(); 46 | exactPath = Path.Combine(parts.ToArray()); 47 | result = true; 48 | } 49 | 50 | return result; 51 | } 52 | 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /DBCD.Tests/WritingTest.cs: -------------------------------------------------------------------------------- 1 | using DBCD.Providers; 2 | using Microsoft.VisualStudio.TestTools.UnitTesting; 3 | using Newtonsoft.Json; 4 | using System; 5 | using System.Collections.Generic; 6 | using System.IO; 7 | using System.Linq; 8 | using System.Net.Http; 9 | 10 | namespace DBCD.Tests 11 | { 12 | [TestClass] 13 | public class WritingTest 14 | { 15 | public static string InputPath { get; } = $"{Directory.GetCurrentDirectory()}\\DBCCache"; 16 | 17 | public static string OutputPath = $"{Directory.GetCurrentDirectory()}\\tmp"; 18 | 19 | public static WagoDBCProvider wagoDBCProvider = new(); 20 | static GithubDBDProvider githubDBDProvider = new(true); 21 | 22 | [ClassInitialize()] 23 | public static void ClassInit(TestContext context) 24 | { 25 | if (Directory.Exists(OutputPath)) 26 | { 27 | Directory.Delete(OutputPath, true); 28 | } 29 | Directory.CreateDirectory(OutputPath); 30 | } 31 | 32 | [ClassCleanup()] 33 | public static void ClassCleanup() 34 | { 35 | Directory.Delete(OutputPath, true); 36 | } 37 | 38 | 39 | [TestMethod] 40 | public void TestWritingNewRowDb2() 41 | { 42 | DBCD dbcd = new(wagoDBCProvider, githubDBDProvider); 43 | IDBCDStorage storage = dbcd.Load("AlliedRace", "9.2.7.45745"); 44 | 45 | storage.Add(700000, storage.ConstructRow(700000)); 46 | storage.Save(Path.Join(OutputPath, "AlliedRace.db2")); 47 | 48 | DBCD localDbcd = new(new FilesystemDBCProvider(OutputPath), githubDBDProvider); 49 | IDBCDStorage outputStorage = localDbcd.Load("AlliedRace", "9.2.7.45745"); 50 | 51 | Assert.AreEqual(11, outputStorage.Count); 52 | } 53 | 54 | [TestMethod] 55 | public void TestWritingNewRowDb2WithArrayField() 56 | { 57 | DBCD dbcd = new(wagoDBCProvider, githubDBDProvider); 58 | IDBCDStorage storage = dbcd.Load("ItemDisplayInfo", "9.2.7.45745"); 59 | 60 | storage.Add(700000, storage.ConstructRow(700000)); 61 | storage.Save(Path.Join(OutputPath, "ItemDisplayInfo.db2")); 62 | 63 | DBCD localDbcd = new(new FilesystemDBCProvider(OutputPath), githubDBDProvider); 64 | IDBCDStorage outputStorage = localDbcd.Load("ItemDisplayInfo", "9.2.7.45745"); 65 | 66 | Assert.AreEqual(116146, outputStorage.Count); 67 | } 68 | 69 | [TestMethod] 70 | public void TestWritingNewRowDb2WithArrayOfStringsField() 71 | { 72 | DBCD dbcd = new(wagoDBCProvider, githubDBDProvider); 73 | IDBCDStorage storage = dbcd.Load("BattlePetEffectProperties", "9.2.7.45745"); 74 | 75 | storage.Add(10, storage.ConstructRow(10)); 76 | storage.Save(Path.Join(OutputPath, "BattlePetEffectProperties.db2")); 77 | 78 | DBCD localDbcd = new(new FilesystemDBCProvider(OutputPath), githubDBDProvider); 79 | IDBCDStorage outputStorage = localDbcd.Load("BattlePetEffectProperties", "9.2.7.45745"); 80 | 81 | Assert.AreEqual(134, outputStorage.Count); 82 | } 83 | 84 | [TestMethod] 85 | public void TestSavingSameStorageTwice() 86 | { 87 | DBCD dbcd = new(wagoDBCProvider, githubDBDProvider); 88 | IDBCDStorage storage = dbcd.Load("AlliedRace", "9.2.7.45745"); 89 | storage.Save(Path.Join(OutputPath, "AlliedRace.db2")); 90 | storage.Save(Path.Join(OutputPath, "AlliedRace.db2")); 91 | } 92 | 93 | [TestMethod] 94 | public void TestWritingAllDB2s() 95 | { 96 | return; // Only run this test manually 97 | 98 | var localDBDProvider = new FilesystemDBDProvider("D:\\Projects\\WoWDBDefs\\definitions"); 99 | 100 | //var build = "3.3.5.12340"; // WDBC 101 | //var build = "6.0.1.18179"; // WDB2 102 | //var build = "7.0.1.20740"; // WDB3, TODO: Find DBDs for a DB2 103 | //var build = "7.0.1.20810"; // WDB4, TODO: Find DBDs for a DB2 104 | //var build = "7.0.3.21479"; // WDB5, TODO: Find DBDs for a DB2 105 | //var build = "7.2.0.23436"; // WDB6 106 | //var build = "7.3.5.25928"; // WDC1 107 | //var build = "8.0.1.26231"; // WDC2 108 | var build = "9.2.7.45745"; // WDC3 109 | //var build = "10.1.0.48480"; // WDC4 110 | //var build = "11.0.2.56044"; // WDC5 111 | 112 | var allDB2s = wagoDBCProvider.GetAllTableNames(); 113 | 114 | if (Directory.Exists("tmp")) 115 | Directory.Delete("tmp", true); 116 | 117 | Directory.CreateDirectory("tmp"); 118 | 119 | var localDBCProvider = new FilesystemDBCProvider(Path.Combine("DBCCache", build)); 120 | var tmpDBCProvider = new FilesystemDBCProvider("tmp"); 121 | 122 | var InputDBCD = new DBCD(localDBCProvider, localDBDProvider); 123 | var SavedDBCD = new DBCD(tmpDBCProvider, localDBDProvider); 124 | 125 | var attemptedTables = 0; 126 | var successfulTables = 0; 127 | var identicalTables = 0; 128 | 129 | foreach (var tableName in allDB2s) 130 | { 131 | if (!localDBDProvider.ContainsBuild(tableName, build)) 132 | continue; 133 | 134 | if (tableName == "UnitTestSparse") 135 | continue; 136 | 137 | var originalValues = new List(); 138 | 139 | attemptedTables++; 140 | 141 | try 142 | { 143 | var originalStorage = InputDBCD.Load(tableName, build); 144 | 145 | //if(tableName == "ModelFileData") 146 | //{ 147 | // var row = originalStorage.ConstructRow(4252801); 148 | // row["FileDataID"] = 4252801; 149 | // row["Flags"] = (byte)0; 150 | // row["LodCount"] = (byte)3; 151 | // row["ModelResourcesID"] = (uint)62664; 152 | //} 153 | 154 | originalValues.AddRange(originalStorage.Values); 155 | originalStorage.Save($"tmp/{tableName}.db2"); 156 | } 157 | catch (FileNotFoundException e) 158 | { 159 | // This is not a reading test, I could not care less 160 | attemptedTables--; 161 | continue; 162 | } 163 | catch (AggregateException e) 164 | { 165 | if (e.InnerException is HttpRequestException) 166 | { 167 | // This is not a reading test, I could not care less 168 | attemptedTables--; 169 | continue; 170 | } 171 | else 172 | { 173 | Console.WriteLine("Failed to write " + tableName + " for build " + build + ": " + e.Message + "\n" + e.StackTrace); 174 | continue; 175 | } 176 | } 177 | catch (Exception e) 178 | { 179 | Console.WriteLine("Failed to write " + tableName + " for build " + build + ": " + e.Message + "\n" + e.StackTrace); 180 | continue; 181 | } 182 | 183 | //try 184 | //{ 185 | var savedStorage = SavedDBCD.Load(tableName, build); 186 | successfulTables++; 187 | // Lazy compare 188 | var originalJson = JsonConvert.SerializeObject(originalValues, Formatting.Indented); 189 | var newJson = JsonConvert.SerializeObject(savedStorage.Values, Formatting.Indented); 190 | if (originalJson != newJson) 191 | { 192 | File.WriteAllText("original.json", originalJson); 193 | File.WriteAllText("new.json", newJson); 194 | 195 | throw new InvalidDataException($"The saved storage {tableName} should not differ from the original one!"); 196 | } 197 | 198 | using (var originalStream = localDBCProvider.StreamForTableName(tableName, build)) 199 | using (var originalMS = new MemoryStream()) 200 | using (var savedStream = tmpDBCProvider.StreamForTableName(tableName, build)) 201 | using (var savedMS = new MemoryStream()) 202 | { 203 | if (originalStream.Length != savedStream.Length) 204 | { 205 | Console.WriteLine(originalStream.Length + " vs " + savedStream.Length + " for " + tableName + " " + build); 206 | continue; 207 | } 208 | 209 | originalStream.CopyTo(originalMS); 210 | originalStream.Position = 0; 211 | 212 | savedStream.CopyTo(savedMS); 213 | savedStream.Position = 0; 214 | 215 | var originalBytes = originalMS.ToArray(); 216 | var savedBytes = savedMS.ToArray(); 217 | 218 | if (!originalBytes.SequenceEqual(savedBytes)) 219 | Console.WriteLine("Different bytes for " + tableName + " " + build); 220 | else 221 | identicalTables++; 222 | } 223 | //} 224 | //catch (Exception e) 225 | //{ 226 | // Console.WriteLine("Failed to load rewritten " + tableName + " for build " + build + ": " + e.Message + "\n" + e.StackTrace); 227 | //} 228 | } 229 | 230 | Console.WriteLine(successfulTables + "/" + attemptedTables + " written succesfully"); 231 | Console.WriteLine(identicalTables + "/" + successfulTables + " identical (" + (successfulTables - identicalTables) + " different)"); 232 | 233 | Assert.AreEqual(attemptedTables, successfulTables); 234 | 235 | //Directory.Delete("tmp", true); 236 | } 237 | } 238 | } 239 | -------------------------------------------------------------------------------- /DBCD.sln: -------------------------------------------------------------------------------- 1 |  2 | Microsoft Visual Studio Solution File, Format Version 12.00 3 | # Visual Studio Version 17 4 | VisualStudioVersion = 17.9.34616.47 5 | MinimumVisualStudioVersion = 10.0.40219.1 6 | Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "DBCD", "DBCD\DBCD.csproj", "{8267DA23-F629-4756-8D14-F532617FBCE4}" 7 | EndProject 8 | Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "DBCD.IO", "DBCD.IO\DBCD.IO.csproj", "{3172C02B-502C-449F-81D3-47BBF47297BD}" 9 | EndProject 10 | Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "DBCD.Tests", "DBCD.Tests\DBCD.Tests.csproj", "{96CFC512-3818-487F-8FB6-7632E340ABB9}" 11 | EndProject 12 | Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "DBCD.Benchmark", "DBCD.Benchmark\DBCD.Benchmark.csproj", "{5EA3D33B-9901-48CB-B558-0D8A90F2CD7C}" 13 | EndProject 14 | Global 15 | GlobalSection(SolutionConfigurationPlatforms) = preSolution 16 | Debug|Any CPU = Debug|Any CPU 17 | Release|Any CPU = Release|Any CPU 18 | EndGlobalSection 19 | GlobalSection(ProjectConfigurationPlatforms) = postSolution 20 | {8267DA23-F629-4756-8D14-F532617FBCE4}.Debug|Any CPU.ActiveCfg = Debug|Any CPU 21 | {8267DA23-F629-4756-8D14-F532617FBCE4}.Debug|Any CPU.Build.0 = Debug|Any CPU 22 | {8267DA23-F629-4756-8D14-F532617FBCE4}.Release|Any CPU.ActiveCfg = Release|Any CPU 23 | {8267DA23-F629-4756-8D14-F532617FBCE4}.Release|Any CPU.Build.0 = Release|Any CPU 24 | {3172C02B-502C-449F-81D3-47BBF47297BD}.Debug|Any CPU.ActiveCfg = Debug|Any CPU 25 | {3172C02B-502C-449F-81D3-47BBF47297BD}.Debug|Any CPU.Build.0 = Debug|Any CPU 26 | {3172C02B-502C-449F-81D3-47BBF47297BD}.Release|Any CPU.ActiveCfg = Release|Any CPU 27 | {3172C02B-502C-449F-81D3-47BBF47297BD}.Release|Any CPU.Build.0 = Release|Any CPU 28 | {96CFC512-3818-487F-8FB6-7632E340ABB9}.Debug|Any CPU.ActiveCfg = Debug|Any CPU 29 | {96CFC512-3818-487F-8FB6-7632E340ABB9}.Debug|Any CPU.Build.0 = Debug|Any CPU 30 | {96CFC512-3818-487F-8FB6-7632E340ABB9}.Release|Any CPU.ActiveCfg = Release|Any CPU 31 | {96CFC512-3818-487F-8FB6-7632E340ABB9}.Release|Any CPU.Build.0 = Release|Any CPU 32 | {5EA3D33B-9901-48CB-B558-0D8A90F2CD7C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU 33 | {5EA3D33B-9901-48CB-B558-0D8A90F2CD7C}.Debug|Any CPU.Build.0 = Debug|Any CPU 34 | {5EA3D33B-9901-48CB-B558-0D8A90F2CD7C}.Release|Any CPU.ActiveCfg = Release|Any CPU 35 | {5EA3D33B-9901-48CB-B558-0D8A90F2CD7C}.Release|Any CPU.Build.0 = Release|Any CPU 36 | EndGlobalSection 37 | GlobalSection(SolutionProperties) = preSolution 38 | HideSolutionNode = FALSE 39 | EndGlobalSection 40 | GlobalSection(ExtensibilityGlobals) = postSolution 41 | SolutionGuid = {A4100D30-F9D2-4353-9F01-1D5B67BC2BEA} 42 | EndGlobalSection 43 | EndGlobal 44 | -------------------------------------------------------------------------------- /DBCD/DBCD.cs: -------------------------------------------------------------------------------- 1 | using DBCD.IO; 2 | using DBCD.Providers; 3 | using DBDefsLib; 4 | using System; 5 | using System.Collections.Generic; 6 | using System.IO; 7 | 8 | namespace DBCD 9 | { 10 | 11 | public class DBCD 12 | { 13 | private readonly IDBCProvider dbcProvider; 14 | private readonly IDBDProvider dbdProvider; 15 | 16 | private readonly bool useBDBD; 17 | private readonly Dictionary BDBDCache; 18 | 19 | /// 20 | /// Creates a DBCD instance that uses the given DBC and DBD providers. 21 | /// 22 | /// The IDBCProvider for DBC files. 23 | /// The IDBDProvider for DBD files. 24 | public DBCD(IDBCProvider dbcProvider, IDBDProvider dbdProvider) 25 | { 26 | this.dbcProvider = dbcProvider; 27 | this.dbdProvider = dbdProvider; 28 | this.useBDBD = false; 29 | } 30 | 31 | /// 32 | /// Creates a DBCD instance that uses the given DBC provider and BDBD stream. 33 | /// 34 | /// The IDBCProvider for DBC files. 35 | /// The stream for a BDBD (Binary DBD) file to load all definitions from. 36 | /// WARNING: The usage of a BDBD file for supplying definitions is still experimental and currently has little to no advantages. 37 | public DBCD(IDBCProvider dbcProvider, Stream bdbdStream) 38 | { 39 | this.dbcProvider = dbcProvider; 40 | this.useBDBD = true; 41 | this.BDBDCache = BDBDReader.Read(bdbdStream); 42 | } 43 | 44 | /// 45 | /// Loads a table by its name, and optionally build/locale. 46 | /// 47 | /// The name of the DBC/DB2 table to load. 48 | /// The source build of the table formatted as x.x.x.xxxxx (optional, recommended in general but required for tables older than Legion). 49 | /// The locale to use (optional, recommended for DBC files from WotLK or older). 50 | /// An instance of representing the loaded table. 51 | public IDBCDStorage Load(string tableName, string build = null, Locale locale = Locale.None) 52 | { 53 | var dbcStream = this.dbcProvider.StreamForTableName(tableName, build); 54 | 55 | Structs.DBDefinition databaseDefinition; 56 | 57 | if (!useBDBD) 58 | { 59 | var dbdStream = this.dbdProvider.StreamForTableName(tableName, build); 60 | var dbdReader = new DBDReader(); 61 | databaseDefinition = dbdReader.Read(dbdStream); 62 | } 63 | else 64 | { 65 | if (!BDBDCache.TryGetValue(tableName, out var tableInfo)) 66 | throw new FileNotFoundException($"Table {tableName} not found in BDBD."); 67 | 68 | databaseDefinition = tableInfo.dbd; 69 | } 70 | 71 | var builder = new DBCDBuilder(locale); 72 | 73 | var dbReader = new DBParser(dbcStream); 74 | var definition = builder.Build(dbReader, databaseDefinition, tableName, build); 75 | 76 | var type = typeof(DBCDStorage<>).MakeGenericType(definition.Item1); 77 | 78 | return (IDBCDStorage)Activator.CreateInstance(type, new object[2] { 79 | dbReader, 80 | definition.Item2 81 | }); 82 | } 83 | } 84 | 85 | public enum Locale 86 | { 87 | None = -1, 88 | EnUS = 0, 89 | EnGB = EnUS, 90 | KoKR = 1, 91 | FrFR = 2, 92 | DeDE = 3, 93 | EnCN = 4, 94 | ZhCN = EnCN, 95 | EnTW = 5, 96 | ZhTW = EnTW, 97 | EsES = 6, 98 | EsMX = 7, 99 | /* Available from TBC 2.1.0.6692 */ 100 | RuRU = 8, 101 | PtPT = 10, 102 | PtBR = PtPT, 103 | ItIT = 11, 104 | } 105 | } -------------------------------------------------------------------------------- /DBCD/DBCD.csproj: -------------------------------------------------------------------------------- 1 |  2 | 3 | 4 | netstandard2.0;net8.0;net9.0 5 | 6 | 7 | 8 | true 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | -------------------------------------------------------------------------------- /DBCD/DBCDBuilder.cs: -------------------------------------------------------------------------------- 1 | using DBDefsLib; 2 | using DBCD.IO; 3 | using DBCD.IO.Attributes; 4 | using System; 5 | using System.Collections.Generic; 6 | using System.IO; 7 | using System.Linq; 8 | using System.Reflection; 9 | using System.Reflection.Emit; 10 | 11 | namespace DBCD 12 | { 13 | 14 | public struct DBCDInfo 15 | { 16 | internal string tableName; 17 | 18 | internal string[] availableColumns; 19 | } 20 | 21 | internal class DBCDBuilder 22 | { 23 | private ModuleBuilder moduleBuilder; 24 | private int locStringSize; 25 | private readonly Locale locale; 26 | 27 | internal DBCDBuilder(Locale locale = Locale.None) 28 | { 29 | var assemblyName = new AssemblyName("DBCDDefinitons"); 30 | var assemblyBuilder = AssemblyBuilder.DefineDynamicAssembly(assemblyName, AssemblyBuilderAccess.Run); 31 | var moduleBuilder = assemblyBuilder.DefineDynamicModule(assemblyName.Name); 32 | 33 | this.moduleBuilder = moduleBuilder; 34 | this.locStringSize = 1; 35 | this.locale = locale; 36 | } 37 | 38 | internal Tuple Build(DBParser dbcReader, Structs.DBDefinition databaseDefinition, string name, string build) 39 | { 40 | if (name == null) 41 | name = Guid.NewGuid().ToString(); 42 | 43 | Structs.VersionDefinitions? versionDefinition = null; 44 | 45 | if (!string.IsNullOrWhiteSpace(build)) 46 | { 47 | var dbBuild = new Build(build); 48 | locStringSize = GetLocStringSize(dbBuild); 49 | Utils.GetVersionDefinitionByBuild(databaseDefinition, dbBuild, out versionDefinition); 50 | } 51 | 52 | if (versionDefinition == null && dbcReader.LayoutHash != 0) 53 | { 54 | var layoutHash = dbcReader.LayoutHash.ToString("X8"); 55 | Utils.GetVersionDefinitionByLayoutHash(databaseDefinition, layoutHash, out versionDefinition); 56 | } 57 | 58 | if (versionDefinition == null) 59 | { 60 | throw new FileNotFoundException("No definition found for this file."); 61 | } 62 | 63 | if (locStringSize > 1 && (int)locale >= locStringSize) 64 | { 65 | throw new FormatException("Invalid locale for this file."); 66 | } 67 | 68 | var typeBuilder = moduleBuilder.DefineType(name, TypeAttributes.Public); 69 | 70 | var fields = versionDefinition.Value.definitions; 71 | var columns = new List(fields.Length); 72 | bool localiseStrings = locale != Locale.None; 73 | 74 | var metadataIndex = 0; 75 | foreach (var fieldDefinition in fields) 76 | { 77 | var columnDefinition = databaseDefinition.columnDefinitions[fieldDefinition.name]; 78 | bool isLocalisedString = columnDefinition.type == "locstring" && locStringSize > 1; 79 | 80 | 81 | Type fieldType; 82 | if (fieldDefinition.isRelation && fieldDefinition.isNonInline) 83 | { 84 | fieldType = fieldDefinition.arrLength == 0 ? typeof(int) : typeof(int[]); 85 | } 86 | else 87 | { 88 | fieldType = FieldDefinitionToType(fieldDefinition, columnDefinition, localiseStrings); 89 | } 90 | 91 | var field = typeBuilder.DefineField(fieldDefinition.name, fieldType, FieldAttributes.Public); 92 | 93 | columns.Add(fieldDefinition.name); 94 | 95 | if (fieldDefinition.isID) 96 | { 97 | AddAttribute(field, fieldDefinition.isNonInline); 98 | } 99 | 100 | if (!fieldDefinition.isNonInline) 101 | { 102 | if (dbcReader.ColumnMeta != null && metadataIndex < dbcReader.ColumnMeta.Length) 103 | { 104 | AddAttribute(field, dbcReader.ColumnMeta[metadataIndex].Size); 105 | } 106 | metadataIndex++; 107 | } 108 | 109 | if (fieldDefinition.arrLength > 1) 110 | { 111 | AddAttribute(field, fieldDefinition.arrLength); 112 | } 113 | 114 | if (fieldDefinition.isRelation) 115 | { 116 | var metaDataFieldType = FieldDefinitionToType(fieldDefinition, columnDefinition, localiseStrings); 117 | AddAttribute(field, metaDataFieldType, fieldDefinition.isNonInline); 118 | } 119 | 120 | if (!string.IsNullOrEmpty(columnDefinition.foreignTable)) 121 | { 122 | AddAttribute(field, columnDefinition.foreignTable, columnDefinition.foreignColumn); 123 | } 124 | 125 | if (isLocalisedString) 126 | { 127 | if (localiseStrings) 128 | { 129 | AddAttribute(field, (int)locale, locStringSize); 130 | } 131 | else 132 | { 133 | AddAttribute(field, locStringSize); 134 | // add locstring mask field 135 | typeBuilder.DefineField(fieldDefinition.name + "_mask", typeof(uint), FieldAttributes.Public); 136 | columns.Add(fieldDefinition.name + "_mask"); 137 | } 138 | } 139 | } 140 | 141 | var type = typeBuilder.CreateTypeInfo(); 142 | 143 | var info = new DBCDInfo 144 | { 145 | availableColumns = columns.ToArray(), 146 | tableName = name 147 | }; 148 | 149 | return new Tuple(type, info); 150 | } 151 | 152 | private int GetLocStringSize(Build build) 153 | { 154 | // post wotlk 155 | if (build.expansion >= 4 || build.build > 12340) 156 | return 1; 157 | 158 | // tbc - wotlk 159 | if (build.build >= 6692) 160 | return 16; 161 | 162 | // alpha - vanilla 163 | return 8; 164 | } 165 | 166 | private void AddAttribute(FieldBuilder field, params object[] parameters) where T : Attribute 167 | { 168 | var constructorParameters = parameters.Select(x => x.GetType()).ToArray(); 169 | var constructorInfo = typeof(T).GetConstructor(constructorParameters); 170 | var attributeBuilder = new CustomAttributeBuilder(constructorInfo, parameters); 171 | field.SetCustomAttribute(attributeBuilder); 172 | } 173 | 174 | private Type FieldDefinitionToType(Structs.Definition field, Structs.ColumnDefinition column, bool localiseStrings) 175 | { 176 | var isArray = field.arrLength != 0; 177 | 178 | switch (column.type) 179 | { 180 | case "int": 181 | { 182 | Type type = null; 183 | var signed = field.isSigned; 184 | 185 | switch (field.size) 186 | { 187 | case 8: 188 | type = signed ? typeof(sbyte) : typeof(byte); 189 | break; 190 | case 16: 191 | type = signed ? typeof(short) : typeof(ushort); 192 | break; 193 | case 32: 194 | type = signed ? typeof(int) : typeof(uint); 195 | break; 196 | case 64: 197 | type = signed ? typeof(long) : typeof(ulong); 198 | break; 199 | } 200 | 201 | return isArray ? type.MakeArrayType() : type; 202 | } 203 | case "string": 204 | return isArray ? typeof(string[]) : typeof(string); 205 | case "locstring": 206 | { 207 | if (isArray && locStringSize > 1) 208 | throw new NotSupportedException("Localised string arrays are not supported"); 209 | 210 | return (!localiseStrings && locStringSize > 1) || isArray ? typeof(string[]) : typeof(string); 211 | } 212 | case "float": 213 | return isArray ? typeof(float[]) : typeof(float); 214 | default: 215 | throw new ArgumentException("Unable to construct C# type from " + column.type); 216 | } 217 | } 218 | } 219 | } 220 | -------------------------------------------------------------------------------- /DBCD/DBCDStorage.cs: -------------------------------------------------------------------------------- 1 | using DBCD.Helpers; 2 | 3 | using DBCD.IO; 4 | using DBCD.IO.Attributes; 5 | using System; 6 | using System.Collections; 7 | using System.Collections.Generic; 8 | using System.Collections.ObjectModel; 9 | using System.Dynamic; 10 | using System.IO; 11 | using System.Linq; 12 | using System.Reflection; 13 | 14 | namespace DBCD 15 | { 16 | public class DBCDRow : DynamicObject 17 | { 18 | public int ID; 19 | 20 | private readonly dynamic raw; 21 | private FieldAccessor fieldAccessor; 22 | 23 | internal DBCDRow(int ID, dynamic raw, FieldAccessor fieldAccessor) 24 | { 25 | this.raw = raw; 26 | this.fieldAccessor = fieldAccessor; 27 | this.ID = ID; 28 | } 29 | 30 | public override bool TryGetMember(GetMemberBinder binder, out object result) 31 | { 32 | return fieldAccessor.TryGetMember(this.raw, binder.Name, out result); 33 | } 34 | 35 | public override bool TrySetMember(SetMemberBinder binder, object value) 36 | { 37 | return fieldAccessor.TrySetMember(this.raw, binder.Name, value); 38 | } 39 | 40 | public object this[string fieldName] 41 | { 42 | get => fieldAccessor[this.raw, fieldName]; 43 | set => fieldAccessor[this.raw, fieldName] = value; 44 | } 45 | 46 | public object this[string fieldName, int index] 47 | { 48 | get => ((Array)this[fieldName]).GetValue(index); 49 | set => ((Array)this[fieldName]).SetValue(value, index); 50 | } 51 | 52 | public T Field(string fieldName) 53 | { 54 | return (T)fieldAccessor[this.raw, fieldName]; 55 | } 56 | 57 | public T FieldAs(string fieldName) 58 | { 59 | return fieldAccessor.GetMemberAs(this.raw, fieldName); 60 | } 61 | 62 | public override IEnumerable GetDynamicMemberNames() 63 | { 64 | return fieldAccessor.FieldNames; 65 | } 66 | 67 | public T AsType() => (T)raw; 68 | 69 | public Type GetUnderlyingType() => raw.GetType(); 70 | } 71 | 72 | public class DynamicKeyValuePair 73 | { 74 | public T Key; 75 | public dynamic Value; 76 | 77 | internal DynamicKeyValuePair(T key, dynamic value) 78 | { 79 | this.Key = key; 80 | this.Value = value; 81 | } 82 | } 83 | 84 | public class RowConstructor 85 | { 86 | private readonly IDBCDStorage storage; 87 | public RowConstructor(IDBCDStorage storage) 88 | { 89 | this.storage = storage; 90 | } 91 | 92 | public bool Create(int index, Action f) 93 | { 94 | var constructedRow = storage.ConstructRow(index); 95 | if (storage.ContainsKey(index)) 96 | return false; 97 | else 98 | { 99 | f(constructedRow); 100 | storage.Add(index, constructedRow); 101 | } 102 | 103 | return true; 104 | } 105 | } 106 | 107 | public interface IDBCDStorage : IEnumerable>, IDictionary 108 | { 109 | string[] AvailableColumns { get; } 110 | uint LayoutHash { get; } 111 | 112 | DBCDRow ConstructRow(int index); 113 | 114 | Dictionary GetEncryptedSections(); 115 | Dictionary GetEncryptedIDs(); 116 | 117 | void ApplyingHotfixes(HotfixReader hotfixReader); 118 | void ApplyingHotfixes(HotfixReader hotfixReader, HotfixReader.RowProcessor processor); 119 | 120 | void Save(string filename); 121 | 122 | Dictionary ToDictionary(); 123 | } 124 | 125 | public class DBCDStorage : Dictionary, IDBCDStorage where T : class, new() 126 | { 127 | private readonly FieldAccessor fieldAccessor; 128 | private readonly Storage storage; 129 | private readonly DBCDInfo info; 130 | private readonly DBParser parser; 131 | 132 | string[] IDBCDStorage.AvailableColumns => this.info.availableColumns; 133 | public uint LayoutHash => this.storage.LayoutHash; 134 | public override string ToString() => $"{this.info.tableName}"; 135 | 136 | public DBCDStorage(Stream stream, DBCDInfo info) : this(new DBParser(stream), info) { } 137 | 138 | public DBCDStorage(DBParser dbParser, DBCDInfo info) : this(dbParser, dbParser.GetRecords(), info) { } 139 | 140 | public DBCDStorage(DBParser parser, Storage storage, DBCDInfo info) : base(new Dictionary()) 141 | { 142 | this.info = info; 143 | this.fieldAccessor = new FieldAccessor(typeof(T), info.availableColumns); 144 | this.parser = parser; 145 | this.storage = storage; 146 | 147 | foreach (var record in storage) 148 | base.Add(record.Key, new DBCDRow(record.Key, record.Value, fieldAccessor)); 149 | } 150 | 151 | 152 | public void ApplyingHotfixes(HotfixReader hotfixReader) 153 | { 154 | this.ApplyingHotfixes(hotfixReader, null); 155 | } 156 | 157 | public void ApplyingHotfixes(HotfixReader hotfixReader, HotfixReader.RowProcessor processor) 158 | { 159 | var mutableStorage = this.storage.ToDictionary(k => k.Key, v => v.Value); 160 | 161 | hotfixReader.ApplyHotfixes(mutableStorage, this.parser, processor); 162 | 163 | #if NETSTANDARD2_0 164 | foreach (var record in mutableStorage) 165 | base[record.Key] = new DBCDRow(record.Key, record.Value, fieldAccessor); 166 | #else 167 | foreach (var (id, row) in mutableStorage) 168 | base[id] = new DBCDRow(id, row, fieldAccessor); 169 | #endif 170 | foreach (var key in base.Keys.Except(mutableStorage.Keys)) 171 | base.Remove(key); 172 | } 173 | 174 | IEnumerator> IEnumerable>.GetEnumerator() 175 | { 176 | var enumerator = GetEnumerator(); 177 | while (enumerator.MoveNext()) 178 | yield return new DynamicKeyValuePair(enumerator.Current.Key, enumerator.Current.Value); 179 | } 180 | 181 | public Dictionary GetEncryptedSections() => this.parser.GetEncryptedSections(); 182 | public Dictionary GetEncryptedIDs() => this.parser.GetEncryptedIDs(); 183 | 184 | public void Save(string filename) 185 | { 186 | storage.Clear(); 187 | #if NETSTANDARD2_0 188 | var sortedDictionary = new SortedDictionary(this); 189 | foreach (var record in sortedDictionary) 190 | storage.Add(record.Key, record.Value.AsType()); 191 | #else 192 | foreach (var (id, record) in new SortedDictionary(this)) 193 | storage.Add(id, record.AsType()); 194 | #endif 195 | storage.Save(filename); 196 | } 197 | 198 | public DBCDRow ConstructRow(int index) 199 | { 200 | T raw = new(); 201 | var fields = typeof(T).GetFields(); 202 | // Array Fields need to be initialized to fill their length 203 | var arrayFields = fields.Where(x => x.FieldType.IsArray); 204 | foreach (var arrayField in arrayFields) 205 | { 206 | var count = arrayField.GetCustomAttribute().Count; 207 | var elementType = arrayField.FieldType.GetElementType(); 208 | var isStringField = elementType == typeof(string); 209 | 210 | Array rowRecords = Array.CreateInstance(elementType, count); 211 | for (var i = 0; i < count; i++) 212 | { 213 | if (isStringField) 214 | { 215 | rowRecords.SetValue(string.Empty, i); 216 | } else 217 | { 218 | rowRecords.SetValue(Activator.CreateInstance(elementType), i); 219 | } 220 | } 221 | arrayField.SetValue(raw, rowRecords); 222 | } 223 | 224 | // String Fields need to be initialized to empty string rather than null; 225 | var stringFields = fields.Where(x => x.FieldType == typeof(string)); 226 | foreach (var stringField in stringFields) 227 | { 228 | stringField.SetValue(raw, string.Empty); 229 | } 230 | return new DBCDRow(index, raw, fieldAccessor); 231 | } 232 | 233 | public Dictionary ToDictionary() 234 | { 235 | return this; 236 | } 237 | } 238 | } -------------------------------------------------------------------------------- /DBCD/Extensions/DB2ReaderExtension.cs: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wowdev/DBCD/12feeb97cefc2a3fcc43ed224801552565512f5f/DBCD/Extensions/DB2ReaderExtension.cs -------------------------------------------------------------------------------- /DBCD/Helpers/FieldAccessor.cs: -------------------------------------------------------------------------------- 1 | using System; 2 | using System.Collections.Generic; 3 | using System.Globalization; 4 | using System.Linq.Expressions; 5 | 6 | namespace DBCD.Helpers 7 | { 8 | internal class FieldAccessor 9 | { 10 | public IEnumerable FieldNames => _getters.Keys; 11 | 12 | private readonly Dictionary> _getters; 13 | private readonly Dictionary> _setters; 14 | 15 | private readonly CultureInfo _convertCulture; 16 | 17 | public FieldAccessor(Type type, string[] fields) 18 | { 19 | _getters = new Dictionary>(); 20 | _setters = new Dictionary>(); 21 | _convertCulture = CultureInfo.InvariantCulture; 22 | 23 | var ownerParameter = Expression.Parameter(typeof(object)); 24 | var valueParameter = Expression.Parameter(typeof(object)); 25 | 26 | foreach (var field in fields) 27 | { 28 | var fieldExpression = Expression.Field(Expression.Convert(ownerParameter, type), field); 29 | 30 | var conversionExpression = Expression.Convert(fieldExpression, typeof(object)); 31 | var getterExpression = Expression.Lambda>(conversionExpression, ownerParameter); 32 | _getters.Add(field, getterExpression.Compile()); 33 | 34 | 35 | var assignExpression = Expression.Assign(fieldExpression, Expression.Convert(valueParameter, fieldExpression.Type)); 36 | var setterExpression = Expression.Lambda>(assignExpression, ownerParameter, valueParameter); 37 | _setters.Add(field, setterExpression.Compile()); 38 | } 39 | } 40 | 41 | public object this[object obj, string key] 42 | { 43 | get => _getters[key](obj); 44 | set => _setters[key](obj, value); 45 | } 46 | 47 | public bool TryGetMember(object obj, string field, out object value) 48 | { 49 | if (_getters.TryGetValue(field, out var getter)) 50 | { 51 | value = getter(obj); 52 | return true; 53 | } 54 | else 55 | { 56 | value = null; 57 | return false; 58 | } 59 | } 60 | 61 | public bool TrySetMember(object obj, string field, object value) 62 | { 63 | if (_setters.TryGetValue(field, out var setter)) 64 | { 65 | setter(obj, value); 66 | return true; 67 | } 68 | 69 | return false; 70 | } 71 | 72 | public T GetMemberAs(object obj, string field) 73 | { 74 | var value = _getters[field](obj); 75 | 76 | if (value is T direct) 77 | return direct; 78 | 79 | if (value is Array array) 80 | { 81 | return ConvertArray(array); 82 | } 83 | else 84 | { 85 | return (T)Convert.ChangeType(value, typeof(T), _convertCulture); 86 | } 87 | } 88 | 89 | private T ConvertArray(Array array) 90 | { 91 | var type = typeof(T); 92 | if (!type.IsArray) 93 | throw new InvalidCastException($"Cannot convert type '{array.GetType().Name}' to '{type.Name}'"); 94 | 95 | var elementType = type.GetElementType(); 96 | var result = Array.CreateInstance(elementType, array.Length); 97 | 98 | for (int i = 0; i < result.Length; i++) 99 | { 100 | object value = Convert.ChangeType(array.GetValue(i), elementType, _convertCulture); 101 | result.SetValue(value, i); 102 | } 103 | 104 | return (T)(object)result; 105 | } 106 | } 107 | } -------------------------------------------------------------------------------- /DBCD/Providers/FilesystemDBCProvider.cs: -------------------------------------------------------------------------------- 1 | using System.Collections.Generic; 2 | using System.IO; 3 | 4 | namespace DBCD.Providers 5 | { 6 | /// 7 | /// Loads DBC/DB2 files from a local directory. 8 | /// 9 | public class FilesystemDBCProvider : IDBCProvider 10 | { 11 | private readonly string Directory; 12 | private readonly bool UseCache; 13 | public Dictionary<(string, string), byte[]> Cache = new Dictionary<(string, string), byte[]>(); 14 | 15 | public FilesystemDBCProvider(string directory, bool useCache = false) => (this.Directory, this.UseCache) = (directory, useCache); 16 | 17 | public Stream StreamForTableName(string tableName, string build) 18 | { 19 | if (UseCache && Cache.TryGetValue((tableName, build), out var cachedData)) 20 | { 21 | return new MemoryStream(cachedData); 22 | } 23 | else 24 | { 25 | if (File.Exists(Path.Combine(Directory, $"{tableName}.db2"))) 26 | { 27 | var bytes = File.ReadAllBytes(Path.Combine(Directory, $"{tableName}.db2")); 28 | if (UseCache) 29 | Cache[(tableName, build)] = bytes; 30 | return new MemoryStream(bytes); 31 | } 32 | 33 | if (File.Exists(Path.Combine(Directory, $"{tableName}.dbc"))) 34 | { 35 | var bytes = File.ReadAllBytes(Path.Combine(Directory, $"{tableName}.dbc")); 36 | if(UseCache) 37 | Cache[(tableName, build)] = File.ReadAllBytes(Path.Combine(Directory, $"{tableName}.dbc")); 38 | return new MemoryStream(bytes); 39 | } 40 | 41 | throw new FileNotFoundException("Unable to find DBC/DB2 file on disk for table " + tableName); 42 | } 43 | } 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /DBCD/Providers/FilesystemDBDProvider.cs: -------------------------------------------------------------------------------- 1 | using DBDefsLib; 2 | using System.Collections.Generic; 3 | using System.IO; 4 | using System.Linq; 5 | 6 | namespace DBCD.Providers 7 | { 8 | /// 9 | /// Loads DBD files from a local directory, such as a checked out copy of WoWDBDefs. 10 | /// 11 | public class FilesystemDBDProvider : IDBDProvider 12 | { 13 | private readonly string directory; 14 | 15 | public FilesystemDBDProvider(string directory) => this.directory = directory; 16 | 17 | public Dictionary<(string, string), byte[]> Cache = new Dictionary<(string, string), byte[]>(); 18 | 19 | /// 20 | /// Function that checks if a certain build exists in a DBD file. Note that this causes a full read/parse of the file. 21 | /// 22 | public bool ContainsBuild(string tableName, string build) 23 | { 24 | if (!File.Exists(Path.Combine(directory, $"{tableName}.dbd"))) 25 | return false; 26 | 27 | var reader = new DBDReader(); 28 | var definition = reader.Read(StreamForTableName(tableName)); 29 | var targetBuild = new Build(build); 30 | 31 | foreach (var versionDefinition in definition.versionDefinitions) 32 | { 33 | if (versionDefinition.builds.Contains(targetBuild)) 34 | return true; 35 | 36 | if (versionDefinition.buildRanges.Any(range => range.Contains(targetBuild))) 37 | return true; 38 | } 39 | 40 | return false; 41 | } 42 | 43 | public Stream StreamForTableName(string tableName, string build = null) 44 | { 45 | if (Cache.TryGetValue((tableName, build), out var cachedData)) 46 | return new MemoryStream(cachedData); 47 | else 48 | { 49 | var data = File.ReadAllBytes(Path.Combine(directory, $"{tableName}.dbd")); 50 | Cache[(tableName, build)] = data; 51 | return new MemoryStream(data); 52 | } 53 | } 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /DBCD/Providers/GithubBDBDProvider.cs: -------------------------------------------------------------------------------- 1 | using System; 2 | using System.IO; 3 | using System.Net.Http; 4 | 5 | namespace DBCD.Providers 6 | { 7 | public static class GithubBDBDProvider 8 | { 9 | public static string BDBDUrl = "https://github.com/wowdev/WoWDBDefs/releases/latest/download/all.bdbd"; 10 | 11 | private static string CachePath { get; } = "BDBDCache/"; 12 | private static readonly TimeSpan CacheExpiryTime = new TimeSpan(1, 0, 0, 0); 13 | 14 | public static Stream GetStream(bool forceNew = false) 15 | { 16 | var currentFile = Path.Combine(CachePath, "all.bdbd"); 17 | if (File.Exists(currentFile)) 18 | { 19 | var fileInfo = new FileInfo(currentFile); 20 | if (fileInfo.Length == 0) 21 | { 22 | File.Delete(currentFile); 23 | } 24 | else 25 | { 26 | if (!forceNew && DateTime.Now - fileInfo.LastWriteTime < CacheExpiryTime) 27 | return new MemoryStream(File.ReadAllBytes(currentFile)); 28 | } 29 | } 30 | 31 | if (!Directory.Exists(CachePath)) 32 | Directory.CreateDirectory(CachePath); 33 | 34 | var bdbdStream = new MemoryStream(); 35 | using (var fileStream = new FileStream(currentFile, FileMode.Create, FileAccess.Write)) 36 | using (var client = new HttpClient()) 37 | { 38 | var response = client.GetAsync(BDBDUrl).Result; 39 | response.EnsureSuccessStatusCode(); 40 | response.Content.CopyToAsync(bdbdStream).Wait(); 41 | bdbdStream.CopyTo(fileStream); 42 | bdbdStream.Position = 0; 43 | } 44 | return bdbdStream; 45 | } 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /DBCD/Providers/GithubDBDProvider.cs: -------------------------------------------------------------------------------- 1 | using System; 2 | using System.IO; 3 | using System.Net.Http; 4 | 5 | namespace DBCD.Providers 6 | { 7 | public class GithubDBDProvider : IDBDProvider 8 | { 9 | private static Uri BaseURI = new Uri("https://raw.githubusercontent.com/wowdev/WoWDBDefs/master/definitions/"); 10 | private HttpClient client = new HttpClient(); 11 | 12 | private static bool UseCache = false; 13 | private static string CachePath { get; } = "DBDCache/"; 14 | private static readonly TimeSpan CacheExpiryTime = new TimeSpan(1, 0, 0, 0); 15 | 16 | public GithubDBDProvider(bool useCache = false) 17 | { 18 | UseCache = useCache; 19 | if(useCache && !Directory.Exists(CachePath)) 20 | Directory.CreateDirectory(CachePath); 21 | 22 | client.BaseAddress = BaseURI; 23 | } 24 | 25 | public Stream StreamForTableName(string tableName, string build = null) 26 | { 27 | var query = $"{tableName}.dbd"; 28 | 29 | if(UseCache) 30 | { 31 | var cacheFile = Path.Combine(CachePath, query); 32 | if(File.Exists(cacheFile)) 33 | { 34 | var lastWrite = File.GetLastWriteTime(cacheFile); 35 | if(DateTime.Now - lastWrite < CacheExpiryTime) 36 | return new MemoryStream(File.ReadAllBytes(cacheFile)); 37 | } 38 | } 39 | 40 | var bytes = client.GetByteArrayAsync(query).Result; 41 | 42 | if(UseCache) 43 | File.WriteAllBytes(Path.Combine(CachePath, query), bytes); 44 | 45 | return new MemoryStream(bytes); 46 | } 47 | } 48 | } -------------------------------------------------------------------------------- /DBCD/Providers/IDBCProvider.cs: -------------------------------------------------------------------------------- 1 | using System.IO; 2 | 3 | namespace DBCD.Providers 4 | { 5 | public interface IDBCProvider 6 | { 7 | Stream StreamForTableName(string tableName, string build); 8 | } 9 | } -------------------------------------------------------------------------------- /DBCD/Providers/IDBDProvider.cs: -------------------------------------------------------------------------------- 1 | using System.IO; 2 | 3 | namespace DBCD.Providers 4 | { 5 | public interface IDBDProvider 6 | { 7 | Stream StreamForTableName(string tableName, string build = null); 8 | } 9 | } -------------------------------------------------------------------------------- /Directory.Build.props: -------------------------------------------------------------------------------- 1 | 2 | 3 | embedded 4 | latest 5 | 2.1.4 6 | 7 | 8 | WoWDev 9 | Library for reading/writing World of Warcraft's DBC/DB2 files 10 | git 11 | https://github.com/wowdev/DBCD.git 12 | true 13 | MIT 14 | README.md 15 | library,DBC,DB2,World of Warcraft 16 | https://github.com/wowdev/DBCD 17 | nuget-icon.png 18 | true 19 | true 20 | snupkg 21 | true 22 | 23 | 24 | True 25 | $(NoWarn);CS1591 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | -------------------------------------------------------------------------------- /Directory.Build.targets: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | $([System.IO.Path]::Combine('$(IntermediateOutputPath)','$(TargetFrameworkMoniker).AssemblyAttributes$(DefaultLanguageSourceExtension)')) 6 | 7 | 8 | 9 | 10 | 11 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 wowdev 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # DBCD 2 | C# library for reading and writing [DBC](https://wowdev.wiki/DBC)/[DB2](https://wowdev.wiki/DB2) database files from World of Warcraft with built-in support for [WoWDBDefs](https://github.com/wowdev/WoWDBDefs) definitions. 3 | 4 | ## Features 5 | - Reading of `WDBC` ([.dbc](https://wowdev.wiki/DBC)) and `WDB2`-`WDB6`, `WDC1`-`WDC5` ([.db2](https://wowdev.wiki/DB2)). 6 | - Experimental writing (`WDC3` works, the others likely will too but are largely untested with actual WoW clients). 7 | - Applying of hotfixes (DBCache.bin). 8 | 9 | ## Projects 10 | ### DBCD 11 | Contains the glue between DBCD.IO, DBDefsLib and the providers. 12 | 13 | ### DBCD.IO 14 | Contains the actual reading and writing of DBC/DB2 files. 15 | 16 | ## Limitations 17 | - _(Reading/Writing)_ Relies on [WoWDBDefs](https://github.com/wowdev/WoWDBDefs) (DBDs) for table structures, can not load tables without DBDs (yet). 18 | - _(Writing)_ Does not support writing out DB2s with multiple sections. 19 | 20 | ## Example Usage 21 | ```csharp 22 | // A FilesystemDBCProvider to load DBCs/DB2s from a directory on disk. 23 | var localDBCProvider = new FilesystemDBCProvider("D:/DBC"); 24 | 25 | // A FilesystemDBDProvider to load DBDs from a folder, you can also use GithubDBDProvider to download them directly from GitHub. 26 | var localDBDProvider = new FilesystemDBDProvider("D:/WoWDBDefs/definitions"); 27 | 28 | // A new DBCD instance with the specified DBC/DBD provider. 29 | var dbcd = new DBCD(localDBCProvider, localDBDProvider); 30 | 31 | // Loads Map.db2 (note the table name without extension) for build 11.0.2.56044 (build might be needed to load correct definition). 32 | var storage = dbcd.Load("Map", "11.0.2.56044"); 33 | 34 | // Get the row with ID 2552. 35 | var row = storage[2552]; 36 | 37 | // Outputs "Khaz Algar (Surface)". 38 | Console.WriteLine((string)row["MapName_lang"]); 39 | ``` 40 | 41 | -------------------------------------------------------------------------------- /nuget-icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wowdev/DBCD/12feeb97cefc2a3fcc43ed224801552565512f5f/nuget-icon.png --------------------------------------------------------------------------------