├── .gitattributes
├── .gitignore
├── ArcTool
├── ArcTool.csproj
├── GarArchive.cs
├── IarArchive.cs
├── Program.cs
└── WarArchive.cs
├── SAS5Lib
├── Misc.cs
├── SAS5Lib.csproj
├── SecArcFileList.cs
├── SecCode
│ ├── ExecutorCommand.cs
│ ├── Expression.cs
│ ├── ExpressionOperation.cs
│ ├── OrphanExpression.cs
│ ├── ScenarioCode.cs
│ └── SecCodeProp.cs
├── SecCodePage.cs
├── SecOption
│ ├── OptionManager.cs
│ └── OptionType.cs
├── SecResource
│ └── ResourceManager.cs
├── SecScenarioProgram.cs
├── SecSource.cs
└── SecVariable
│ ├── ObjectType.cs
│ ├── PresetVariables.cs
│ └── VariableManager.cs
├── SAS5Tool.sln
├── SecTool
├── Program.cs
├── SecTextTool.cs
└── SecTool.csproj
└── readme.md
/.gitattributes:
--------------------------------------------------------------------------------
1 | ###############################################################################
2 | # Set default behavior to automatically normalize line endings.
3 | ###############################################################################
4 | * text=auto
5 |
6 | ###############################################################################
7 | # Set default behavior for command prompt diff.
8 | #
9 | # This is need for earlier builds of msysgit that does not have it on by
10 | # default for csharp files.
11 | # Note: This is only used by command line
12 | ###############################################################################
13 | #*.cs diff=csharp
14 |
15 | ###############################################################################
16 | # Set the merge driver for project and solution files
17 | #
18 | # Merging from the command prompt will add diff markers to the files if there
19 | # are conflicts (Merging from VS is not affected by the settings below, in VS
20 | # the diff markers are never inserted). Diff markers may cause the following
21 | # file extensions to fail to load in VS. An alternative would be to treat
22 | # these files as binary and thus will always conflict and require user
23 | # intervention with every merge. To do so, just uncomment the entries below
24 | ###############################################################################
25 | #*.sln merge=binary
26 | #*.csproj merge=binary
27 | #*.vbproj merge=binary
28 | #*.vcxproj merge=binary
29 | #*.vcproj merge=binary
30 | #*.dbproj merge=binary
31 | #*.fsproj merge=binary
32 | #*.lsproj merge=binary
33 | #*.wixproj merge=binary
34 | #*.modelproj merge=binary
35 | #*.sqlproj merge=binary
36 | #*.wwaproj merge=binary
37 |
38 | ###############################################################################
39 | # behavior for image files
40 | #
41 | # image files are treated as binary by default.
42 | ###############################################################################
43 | #*.jpg binary
44 | #*.png binary
45 | #*.gif binary
46 |
47 | ###############################################################################
48 | # diff behavior for common document formats
49 | #
50 | # Convert binary document formats to text before diffing them. This feature
51 | # is only available from the command line. Turn it on by uncommenting the
52 | # entries below.
53 | ###############################################################################
54 | #*.doc diff=astextplain
55 | #*.DOC diff=astextplain
56 | #*.docx diff=astextplain
57 | #*.DOCX diff=astextplain
58 | #*.dot diff=astextplain
59 | #*.DOT diff=astextplain
60 | #*.pdf diff=astextplain
61 | #*.PDF diff=astextplain
62 | #*.rtf diff=astextplain
63 | #*.RTF diff=astextplain
64 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | ## Ignore Visual Studio temporary files, build results, and
2 | ## files generated by popular Visual Studio add-ons.
3 | ##
4 | ## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore
5 |
6 | # User-specific files
7 | *.rsuser
8 | *.suo
9 | *.user
10 | *.userosscache
11 | *.sln.docstates
12 |
13 | # User-specific files (MonoDevelop/Xamarin Studio)
14 | *.userprefs
15 |
16 | # Mono auto generated files
17 | mono_crash.*
18 |
19 | # Build results
20 | [Dd]ebug/
21 | [Dd]ebugPublic/
22 | [Rr]elease/
23 | [Rr]eleases/
24 | x64/
25 | x86/
26 | [Ww][Ii][Nn]32/
27 | [Aa][Rr][Mm]/
28 | [Aa][Rr][Mm]64/
29 | bld/
30 | [Bb]in/
31 | [Oo]bj/
32 | [Oo]ut/
33 | [Ll]og/
34 | [Ll]ogs/
35 |
36 | # Visual Studio 2015/2017 cache/options directory
37 | .vs/
38 | # Uncomment if you have tasks that create the project's static files in wwwroot
39 | #wwwroot/
40 |
41 | # Visual Studio 2017 auto generated files
42 | Generated\ Files/
43 |
44 | # MSTest test Results
45 | [Tt]est[Rr]esult*/
46 | [Bb]uild[Ll]og.*
47 |
48 | # NUnit
49 | *.VisualState.xml
50 | TestResult.xml
51 | nunit-*.xml
52 |
53 | # Build Results of an ATL Project
54 | [Dd]ebugPS/
55 | [Rr]eleasePS/
56 | dlldata.c
57 |
58 | # Benchmark Results
59 | BenchmarkDotNet.Artifacts/
60 |
61 | # .NET Core
62 | project.lock.json
63 | project.fragment.lock.json
64 | artifacts/
65 |
66 | # ASP.NET Scaffolding
67 | ScaffoldingReadMe.txt
68 |
69 | # StyleCop
70 | StyleCopReport.xml
71 |
72 | # Files built by Visual Studio
73 | *_i.c
74 | *_p.c
75 | *_h.h
76 | *.ilk
77 | *.meta
78 | *.obj
79 | *.iobj
80 | *.pch
81 | *.pdb
82 | *.ipdb
83 | *.pgc
84 | *.pgd
85 | *.rsp
86 | *.sbr
87 | *.tlb
88 | *.tli
89 | *.tlh
90 | *.tmp
91 | *.tmp_proj
92 | *_wpftmp.csproj
93 | *.log
94 | *.vspscc
95 | *.vssscc
96 | .builds
97 | *.pidb
98 | *.svclog
99 | *.scc
100 |
101 | # Chutzpah Test files
102 | _Chutzpah*
103 |
104 | # Visual C++ cache files
105 | ipch/
106 | *.aps
107 | *.ncb
108 | *.opendb
109 | *.opensdf
110 | *.sdf
111 | *.cachefile
112 | *.VC.db
113 | *.VC.VC.opendb
114 |
115 | # Visual Studio profiler
116 | *.psess
117 | *.vsp
118 | *.vspx
119 | *.sap
120 |
121 | # Visual Studio Trace Files
122 | *.e2e
123 |
124 | # TFS 2012 Local Workspace
125 | $tf/
126 |
127 | # Guidance Automation Toolkit
128 | *.gpState
129 |
130 | # ReSharper is a .NET coding add-in
131 | _ReSharper*/
132 | *.[Rr]e[Ss]harper
133 | *.DotSettings.user
134 |
135 | # TeamCity is a build add-in
136 | _TeamCity*
137 |
138 | # DotCover is a Code Coverage Tool
139 | *.dotCover
140 |
141 | # AxoCover is a Code Coverage Tool
142 | .axoCover/*
143 | !.axoCover/settings.json
144 |
145 | # Coverlet is a free, cross platform Code Coverage Tool
146 | coverage*.json
147 | coverage*.xml
148 | coverage*.info
149 |
150 | # Visual Studio code coverage results
151 | *.coverage
152 | *.coveragexml
153 |
154 | # NCrunch
155 | _NCrunch_*
156 | .*crunch*.local.xml
157 | nCrunchTemp_*
158 |
159 | # MightyMoose
160 | *.mm.*
161 | AutoTest.Net/
162 |
163 | # Web workbench (sass)
164 | .sass-cache/
165 |
166 | # Installshield output folder
167 | [Ee]xpress/
168 |
169 | # DocProject is a documentation generator add-in
170 | DocProject/buildhelp/
171 | DocProject/Help/*.HxT
172 | DocProject/Help/*.HxC
173 | DocProject/Help/*.hhc
174 | DocProject/Help/*.hhk
175 | DocProject/Help/*.hhp
176 | DocProject/Help/Html2
177 | DocProject/Help/html
178 |
179 | # Click-Once directory
180 | publish/
181 |
182 | # Publish Web Output
183 | *.[Pp]ublish.xml
184 | *.azurePubxml
185 | # Note: Comment the next line if you want to checkin your web deploy settings,
186 | # but database connection strings (with potential passwords) will be unencrypted
187 | *.pubxml
188 | *.publishproj
189 |
190 | # Microsoft Azure Web App publish settings. Comment the next line if you want to
191 | # checkin your Azure Web App publish settings, but sensitive information contained
192 | # in these scripts will be unencrypted
193 | PublishScripts/
194 |
195 | # NuGet Packages
196 | *.nupkg
197 | # NuGet Symbol Packages
198 | *.snupkg
199 | # The packages folder can be ignored because of Package Restore
200 | **/[Pp]ackages/*
201 | # except build/, which is used as an MSBuild target.
202 | !**/[Pp]ackages/build/
203 | # Uncomment if necessary however generally it will be regenerated when needed
204 | #!**/[Pp]ackages/repositories.config
205 | # NuGet v3's project.json files produces more ignorable files
206 | *.nuget.props
207 | *.nuget.targets
208 |
209 | # Microsoft Azure Build Output
210 | csx/
211 | *.build.csdef
212 |
213 | # Microsoft Azure Emulator
214 | ecf/
215 | rcf/
216 |
217 | # Windows Store app package directories and files
218 | AppPackages/
219 | BundleArtifacts/
220 | Package.StoreAssociation.xml
221 | _pkginfo.txt
222 | *.appx
223 | *.appxbundle
224 | *.appxupload
225 |
226 | # Visual Studio cache files
227 | # files ending in .cache can be ignored
228 | *.[Cc]ache
229 | # but keep track of directories ending in .cache
230 | !?*.[Cc]ache/
231 |
232 | # Others
233 | ClientBin/
234 | ~$*
235 | *~
236 | *.dbmdl
237 | *.dbproj.schemaview
238 | *.jfm
239 | *.pfx
240 | *.publishsettings
241 | orleans.codegen.cs
242 |
243 | # Including strong name files can present a security risk
244 | # (https://github.com/github/gitignore/pull/2483#issue-259490424)
245 | #*.snk
246 |
247 | # Since there are multiple workflows, uncomment next line to ignore bower_components
248 | # (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
249 | #bower_components/
250 |
251 | # RIA/Silverlight projects
252 | Generated_Code/
253 |
254 | # Backup & report files from converting an old project file
255 | # to a newer Visual Studio version. Backup files are not needed,
256 | # because we have git ;-)
257 | _UpgradeReport_Files/
258 | Backup*/
259 | UpgradeLog*.XML
260 | UpgradeLog*.htm
261 | ServiceFabricBackup/
262 | *.rptproj.bak
263 |
264 | # SQL Server files
265 | *.mdf
266 | *.ldf
267 | *.ndf
268 |
269 | # Business Intelligence projects
270 | *.rdl.data
271 | *.bim.layout
272 | *.bim_*.settings
273 | *.rptproj.rsuser
274 | *- [Bb]ackup.rdl
275 | *- [Bb]ackup ([0-9]).rdl
276 | *- [Bb]ackup ([0-9][0-9]).rdl
277 |
278 | # Microsoft Fakes
279 | FakesAssemblies/
280 |
281 | # GhostDoc plugin setting file
282 | *.GhostDoc.xml
283 |
284 | # Node.js Tools for Visual Studio
285 | .ntvs_analysis.dat
286 | node_modules/
287 |
288 | # Visual Studio 6 build log
289 | *.plg
290 |
291 | # Visual Studio 6 workspace options file
292 | *.opt
293 |
294 | # Visual Studio 6 auto-generated workspace file (contains which files were open etc.)
295 | *.vbw
296 |
297 | # Visual Studio LightSwitch build output
298 | **/*.HTMLClient/GeneratedArtifacts
299 | **/*.DesktopClient/GeneratedArtifacts
300 | **/*.DesktopClient/ModelManifest.xml
301 | **/*.Server/GeneratedArtifacts
302 | **/*.Server/ModelManifest.xml
303 | _Pvt_Extensions
304 |
305 | # Paket dependency manager
306 | .paket/paket.exe
307 | paket-files/
308 |
309 | # FAKE - F# Make
310 | .fake/
311 |
312 | # CodeRush personal settings
313 | .cr/personal
314 |
315 | # Python Tools for Visual Studio (PTVS)
316 | __pycache__/
317 | *.pyc
318 |
319 | # Cake - Uncomment if you are using it
320 | # tools/**
321 | # !tools/packages.config
322 |
323 | # Tabs Studio
324 | *.tss
325 |
326 | # Telerik's JustMock configuration file
327 | *.jmconfig
328 |
329 | # BizTalk build output
330 | *.btp.cs
331 | *.btm.cs
332 | *.odx.cs
333 | *.xsd.cs
334 |
335 | # OpenCover UI analysis results
336 | OpenCover/
337 |
338 | # Azure Stream Analytics local run output
339 | ASALocalRun/
340 |
341 | # MSBuild Binary and Structured Log
342 | *.binlog
343 |
344 | # NVidia Nsight GPU debugger configuration file
345 | *.nvuser
346 |
347 | # MFractors (Xamarin productivity tool) working folder
348 | .mfractor/
349 |
350 | # Local History for Visual Studio
351 | .localhistory/
352 |
353 | # BeatPulse healthcheck temp database
354 | healthchecksdb
355 |
356 | # Backup folder for Package Reference Convert tool in Visual Studio 2017
357 | MigrationBackup/
358 |
359 | # Ionide (cross platform F# VS Code tools) working folder
360 | .ionide/
361 |
362 | # Fody - auto-generated XML schema
363 | FodyWeavers.xsd
364 | /SecTool/Properties/launchSettings.json
365 | /ArcTool/Properties/launchSettings.json
366 |
--------------------------------------------------------------------------------
/ArcTool/ArcTool.csproj:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | Exe
5 | true
6 | net8.0
7 | enable
8 | enable
9 | $(SolutionDir)Build\bin
10 | $(SolutionDir)Build\obj\ArcTool\
11 | False
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
--------------------------------------------------------------------------------
/ArcTool/GarArchive.cs:
--------------------------------------------------------------------------------
1 | using System.Diagnostics;
2 |
3 | namespace ArcTool
4 | {
5 | class GarArchive
6 | {
7 | class Block
8 | {
9 | public long Offset;
10 | public long Size;
11 |
12 | public Block(long offset, long size)
13 | {
14 | Offset = offset;
15 | Size = size;
16 | }
17 |
18 | public override string ToString()
19 | {
20 | return $"Offset: {Offset}, Size: {Size}";
21 | }
22 | }
23 |
24 | struct Collection
25 | {
26 | //int notused
27 | public byte[] CollectionProperty;
28 | //int collectionElemSize
29 | public List CollectionElem;
30 |
31 | public Collection(BinaryReader br)
32 | {
33 | var a = br.ReadInt32();
34 | var propertySize = br.ReadInt32();
35 | CollectionProperty = br.ReadBytes(propertySize);
36 | var b = br.ReadInt32();
37 | CollectionElem = [];
38 |
39 | var elemCount = br.ReadInt32();
40 | for (int i = 0; i < elemCount; i++)
41 | {
42 | CollectionElem.Add(new CollectionElement(br));
43 | }
44 | }
45 |
46 | public Collection(List> input)
47 | {
48 | //"\x03BID\x04NAME"
49 | CollectionProperty = [ 0x03, 0x42, 0x49, 0x44, 0x04, 0x4E, 0x41, 0x4D, 0x45 ];
50 | CollectionElem = [];
51 |
52 | foreach(var item in input)
53 | {
54 | CollectionElem.Add(new CollectionElement(item.Item1, item.Item2));
55 | }
56 | }
57 |
58 | public void Write(BinaryWriter bw)
59 | {
60 | bw.Write(2);
61 | bw.Write(CollectionProperty.Length);
62 | bw.Write(CollectionProperty);
63 | bw.Write(2);
64 | bw.Write(CollectionElem.Count);
65 | foreach(var elem in CollectionElem)
66 | {
67 | elem.Write(bw);
68 | }
69 | }
70 | }
71 |
72 | struct CollectionElement
73 | {
74 | //int propertyCount = 2;
75 | public List Properties;//BlockID, Name
76 |
77 | public CollectionElement(BinaryReader br)
78 | {
79 | Properties = [];
80 | var propCount = br.ReadInt32();
81 | for (int i = 0; i < propCount; i++)
82 | {
83 | Property p = new()
84 | {
85 | PropertyIndex = br.ReadInt32(),
86 | Value = new PropertyValueType(br.ReadBytes(br.ReadInt32()))
87 | };
88 | Properties.Add(p);
89 | }
90 | }
91 |
92 | public CollectionElement(int fileIndex, string fileName)
93 | {
94 | Properties = [];
95 | Properties.Add(new Property(0, new PropertyValueType(fileIndex)));
96 | Properties.Add(new Property(4, new PropertyValueType(fileName)));
97 | }
98 |
99 | public void Write(BinaryWriter bw)
100 | {
101 | bw.Write(2);
102 | foreach(var prop in Properties)
103 | {
104 | prop.Write(bw);
105 | }
106 | }
107 | }
108 |
109 | struct Property
110 | {
111 | public int PropertyIndex;//0 or 4
112 | //int valueSize
113 | public PropertyValueType Value;
114 |
115 | public Property(int index, PropertyValueType val)
116 | {
117 | PropertyIndex = index;
118 | Value = val;
119 | }
120 |
121 | public void Write(BinaryWriter bw)
122 | {
123 | bw.Write(PropertyIndex);
124 | if(Value.Value is int numVal)
125 | {
126 | bw.Write(5);
127 | bw.Write((byte)5);
128 | bw.Write(numVal);
129 | }
130 | else if(Value.Value is string strVal)
131 | {
132 | var b = CodepageManager.Instance.ImportGetBytes(strVal);
133 | bw.Write(b.Length + 1);
134 | bw.Write((byte)1);
135 | bw.Write(b);
136 | }
137 | }
138 |
139 | public override string ToString()
140 | {
141 | return $"{Value}";
142 | }
143 | }
144 |
145 | struct PropertyValueType
146 | {
147 | public byte Type;
148 | public object? Value;
149 |
150 | public PropertyValueType(byte[] input)
151 | {
152 | Type = input[0];
153 | switch(input[0])
154 | {
155 | case 1://File
156 | case 2:
157 | case 3://BID
158 | case 4://NAME
159 | Value = CodepageManager.Instance.ImportGetString(input[1..]);
160 | break;
161 | case 5://File
162 | Value = input[1] | input[2] << 8 | input[3] << 16 | input[4] << 24;
163 | break;
164 |
165 | }
166 | }
167 |
168 | public PropertyValueType(object obj)
169 | {
170 | Value = obj;
171 | }
172 |
173 | public override string ToString()
174 | {
175 | return $"{Value}";
176 | }
177 | }
178 |
179 | long m_offset;
180 |
181 | //Block -1:?
182 | //Block 0: Header
183 | //Block 1: コレクション
184 | //Block 2: BlockAllocationTable
185 | //Block n(n>2) : Files
186 | readonly Dictionary m_blockAllocationTable;
187 | Collection m_collection;
188 | BinaryReader m_arcReader;
189 |
190 | static readonly byte[] unkSectionData = [
191 | 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x74, 0x00, 0x00, 0x00,
192 | 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x1C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
193 | 0x01, 0x00, 0x00, 0x00, 0x1C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00,
194 | 0x2C, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x48, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 ];
195 |
196 | public GarArchive(string arcName)
197 | {
198 | m_blockAllocationTable = [];
199 |
200 | m_arcReader = new BinaryReader(File.OpenRead(arcName));
201 |
202 | var header = m_arcReader.ReadUInt32();
203 | Trace.Assert(header == 0x20524147);//"GAR "
204 |
205 | var ver1 = m_arcReader.ReadUInt16();
206 | var ver2 = m_arcReader.ReadUInt16();
207 |
208 | m_offset = m_arcReader.ReadInt64();
209 |
210 | var a = m_arcReader.ReadUInt32();
211 | var b = m_arcReader.ReadUInt32();
212 | var c = m_arcReader.ReadUInt32();
213 |
214 |
215 | m_arcReader.BaseStream.Position = m_offset;
216 | var blockCount = m_arcReader.ReadInt32();
217 | var unk = m_arcReader.ReadInt64();//reader.BaseStream.Position + 20
218 | for (int i = 0; i < blockCount; i++)
219 | {
220 | m_blockAllocationTable.TryAdd(m_arcReader.ReadInt32(), new Block(m_arcReader.ReadInt64(), m_arcReader.ReadInt64()));
221 | }
222 |
223 | if(!m_blockAllocationTable.TryGetValue(1, out var collectionBlock))
224 | {
225 | Trace.Assert(false);
226 | return;
227 | }
228 | m_arcReader.BaseStream.Position = collectionBlock.Offset;
229 | m_collection = new Collection(m_arcReader);
230 | }
231 |
232 | public void ExtractTo(string outputPath)
233 | {
234 | if(!Path.Exists(outputPath))
235 | {
236 | Directory.CreateDirectory(outputPath);
237 | }
238 |
239 | foreach(var elem in m_collection.CollectionElem)
240 | {
241 | if(elem.Properties[0].Value.Value is int blockIndex && elem.Properties[1].Value.Value is string fileName)
242 | {
243 | if(m_blockAllocationTable.TryGetValue(blockIndex, out var collectionBlock))
244 | {
245 | m_arcReader.BaseStream.Position = collectionBlock.Offset;
246 | File.WriteAllBytes(Path.Combine(outputPath, fileName), m_arcReader.ReadBytes(Convert.ToInt32(collectionBlock.Size)));
247 | }
248 | }
249 | }
250 | }
251 |
252 | public static List> Create(string folder, string outputArcName)
253 | {
254 | Dictionary blocks = [];
255 | using var writer = new BinaryWriter(File.Open(Path.Combine(folder, "..", outputArcName), FileMode.Create));
256 | writer.Write(0x20524147);
257 | writer.Write(1);
258 | long blockOffset = writer.BaseStream.Position;
259 | writer.Write(blockOffset);
260 | writer.Write(0);
261 | writer.Write(2);
262 | writer.Write(1);
263 | blocks.Add(0, new Block(0, writer.BaseStream.Position));
264 | blocks.Add(-1, new Block(writer.BaseStream.Position, unkSectionData.Length));
265 | writer.Write(unkSectionData);
266 |
267 | int fileIndex = 3;
268 | List> fileList = [];
269 | foreach(var file in Directory.EnumerateFiles(folder))
270 | {
271 | byte[] input = File.ReadAllBytes(file);
272 |
273 | blocks.Add(fileIndex, new Block(writer.BaseStream.Position, input.LongLength));
274 | fileList.Add(new (fileIndex, Path.GetFileName(file)));
275 |
276 | writer.Write(input);
277 |
278 | fileIndex++;
279 | }
280 | var curOffset = writer.BaseStream.Position;
281 |
282 | var collection = new Collection(fileList);
283 | collection.Write(writer);
284 | blocks.Add(1, new Block(curOffset, writer.BaseStream.Position - curOffset));
285 |
286 | curOffset = writer.BaseStream.Position;
287 | writer.BaseStream.Position = blockOffset;
288 | writer.Write(curOffset);
289 | writer.BaseStream.Position = curOffset;
290 |
291 | writer.Write(blocks.Count + 1);
292 | curOffset = writer.BaseStream.Position;
293 | writer.Write(curOffset);
294 |
295 | foreach(var k in blocks.Keys)
296 | {
297 | writer.Write(k);
298 | var blk = blocks[k];
299 | writer.Write(blk.Offset);
300 | writer.Write(blk.Size);
301 | }
302 | //last block
303 | writer.Write(2);
304 | writer.Write(curOffset - 4);
305 | var endOffset = writer.BaseStream.Position + 8;
306 | writer.Write(endOffset - curOffset + 24);
307 |
308 | writer.BaseStream.Position = curOffset;
309 | writer.Write(endOffset + 20);
310 |
311 | return fileList;
312 | }
313 | }
314 | }
315 |
--------------------------------------------------------------------------------
/ArcTool/IarArchive.cs:
--------------------------------------------------------------------------------
1 | using System.Diagnostics;
2 | using System.IO.Compression;
3 | using System.Numerics;
4 | using System.Runtime.Intrinsics;
5 | using System.Runtime.Intrinsics.X86;
6 | using System.Text.RegularExpressions;
7 | using ICSharpCode.SharpZipLib.Zip.Compression;
8 | using ICSharpCode.SharpZipLib.Zip.Compression.Streams;
9 | using SixLabors.ImageSharp;
10 | using SixLabors.ImageSharp.PixelFormats;
11 |
12 | namespace ArcTool
13 | {
14 | class IarArchive
15 | {
16 | public List m_arcFileOffset;
17 | public bool m_isArcLongOffset;
18 | public int m_arcVersion;
19 | public BinaryReader m_reader;
20 | public string m_arcName;
21 |
22 | public class IarImage
23 | {
24 | public static byte[] Deflate(byte[] buffer)
25 | {
26 | Deflater deflater = new Deflater(Deflater.BEST_COMPRESSION);
27 | using (MemoryStream memoryStream = new MemoryStream())
28 | using (DeflaterOutputStream deflaterOutputStream = new DeflaterOutputStream(memoryStream, deflater))
29 | {
30 | deflaterOutputStream.Write(buffer, 0, buffer.Length);
31 | deflaterOutputStream.Flush();
32 | deflaterOutputStream.Finish();
33 |
34 | return memoryStream.ToArray();
35 | }
36 | }
37 |
38 | public static byte[] Inflate(byte[] buffer)
39 | {
40 | byte[] block = new byte[256];
41 | MemoryStream outputStream = new MemoryStream();
42 |
43 | Inflater inflater = new Inflater();
44 | using (MemoryStream memoryStream = new MemoryStream(buffer))
45 | using (InflaterInputStream inflaterInputStream = new InflaterInputStream(memoryStream, inflater))
46 | {
47 | while (true)
48 | {
49 | int numBytes = inflaterInputStream.Read(block, 0, block.Length);
50 | if (numBytes < 1)
51 | break;
52 | outputStream.Write(block, 0, numBytes);
53 | }
54 | }
55 |
56 | return outputStream.ToArray();
57 | }
58 |
59 | public static void Extract(BinaryReader reader, long offset, int arcVersion, Dictionary fileListDic, string outputPath)
60 | {
61 | var headPos = offset;
62 | reader.BaseStream.Position = offset;
63 |
64 | var Flags = reader.ReadInt16();
65 | var unk02 = reader.ReadByte();
66 | var Compressed = reader.ReadByte() != 0;
67 | var unk04 = reader.ReadInt32();
68 | var UnpackedSize = reader.ReadInt32();
69 | var PaletteSize = reader.ReadInt32();
70 | var PackedSize = reader.ReadInt32();
71 | var unk14 = reader.ReadInt32();
72 | var OffsetX = reader.ReadInt32();
73 | var OffsetY = reader.ReadInt32();
74 | var Width = reader.ReadInt32();
75 | var Height = reader.ReadInt32();
76 | var Stride = reader.ReadInt32();
77 |
78 | var metadataSize = Convert.ToInt32(GetImageHeaderSize(arcVersion) - (reader.BaseStream.Position - headPos));
79 | var metadataStr = Convert.ToBase64String(Deflate(reader.ReadBytes(metadataSize))).Replace('/', '`');
80 |
81 | var PaletteData = PaletteSize != 0 ? reader.ReadBytes(PaletteSize) : [];
82 |
83 |
84 | byte[]? ImageData;
85 | if (Compressed)
86 | {
87 | ImageData = new byte[UnpackedSize];
88 | IarDecompressor.Unpack(reader, ImageData);
89 | }
90 | else
91 | {
92 | ImageData = reader.ReadBytes(PackedSize);
93 | }
94 |
95 | using var imageDataReader = new BinaryReader(new MemoryStream(ImageData));
96 | //SubLayer
97 | if ((Flags & 0x1000) != 0)
98 | {
99 | using var layerImageOut = File.CreateText($"{outputPath}.layerImg");
100 | layerImageOut.WriteLine($"LayerImg({Width},{Height},{Flags},{OffsetX},{OffsetY},{Stride},{metadataStr});");
101 | int offset_x = 0, offset_y = 0;
102 |
103 | while (imageDataReader.BaseStream.Position != imageDataReader.BaseStream.Length)
104 | {
105 | int cmd = imageDataReader.ReadByte();
106 | switch (cmd)
107 | {
108 | case 0x21:
109 | offset_x += imageDataReader.ReadInt16();
110 | offset_y += imageDataReader.ReadInt16();
111 | break;
112 |
113 | case 0x00:
114 | case 0x20:
115 | {
116 | var indexImg = fileListDic[imageDataReader.ReadInt32()];
117 |
118 | OffsetX -= offset_x;
119 | OffsetY -= offset_y;
120 | if (cmd == 0x20)
121 | {
122 | layerImageOut.WriteLine($"Mask({indexImg},{offset_x},{offset_y});");
123 | }
124 | else
125 | {
126 | layerImageOut.WriteLine($"Blend({indexImg},{offset_x},{offset_y});");
127 | }
128 | break;
129 | }
130 | default:
131 | Trace.WriteLine($"Unknown layer type 0x{cmd:X8}", "IAR");
132 | break;
133 | }
134 | }
135 | layerImageOut.Flush();
136 | layerImageOut.Close();
137 | }
138 | //SubImage
139 | else if ((Flags & 0x800) != 0)
140 | {
141 | var baseImgName = fileListDic[imageDataReader.ReadInt32()];
142 |
143 | using var writer = new BinaryWriter(File.Open($"{outputPath}.base_{baseImgName}.{metadataStr}.subImg", FileMode.Create));
144 | writer.Write(Flags);
145 | writer.Write(Width);
146 | writer.Write(Height);
147 | writer.Write(OffsetX);
148 | writer.Write(OffsetY);
149 | writer.Write(Stride);
150 | writer.Write(PaletteSize);
151 | if (PaletteSize > 0 && PaletteData != null)
152 | writer.Write(PaletteData);
153 | writer.Write(imageDataReader.ReadBytes(UnpackedSize - 4));
154 | writer.Flush();
155 | writer.Close();
156 | }
157 | else
158 | {
159 | var BPP = (Flags & 0x3E) switch
160 | {
161 | 0x02 => 8,
162 | 0x1C => 24,
163 | 0x3C => 32,
164 | _ => 8
165 | };
166 |
167 | var NewStride = Width * (BPP / 8);
168 | if (Stride != NewStride)
169 | {
170 | var NewImageData = new byte[Height * NewStride];
171 |
172 | for (int i = 0; i < Height; i++)
173 | {
174 | Buffer.BlockCopy(ImageData, i * Stride, NewImageData, i * NewStride, NewStride);
175 | }
176 |
177 | ImageData = NewImageData;
178 | }
179 |
180 | switch (BPP)
181 | {
182 | case 8:
183 | {
184 | using var image = Image.LoadPixelData(ImageData, Width, Height);
185 | image.SaveAsPng($"{outputPath}.{OffsetX}_{OffsetY}.{metadataStr}.png");
186 | break;
187 | }
188 | case 24:
189 | {
190 | using var image = Image.LoadPixelData(ImageData, Width, Height);
191 | image.SaveAsPng($"{outputPath}.{OffsetX}_{OffsetY}.{metadataStr}.png");
192 | break;
193 | }
194 | case 32:
195 | {
196 | using var image = Image.LoadPixelData(ImageData, Width, Height);
197 | image.SaveAsPng($"{outputPath}.{OffsetX}_{OffsetY}.{metadataStr}.png");
198 | break;
199 | }
200 | default: throw new NotSupportedException("Not supported IAR image format");
201 | }
202 | }
203 | }
204 |
205 | public static void Import(BinaryWriter writer, string inputFile, int fileIndex, Dictionary fileMap, int arcVersion)
206 | {
207 | short Flags = 0;
208 | byte unk02 = 0;
209 | byte Compressed;
210 | int unk04 = 0;
211 | int UnpackedSize;
212 | int PaletteSize = 0;
213 | int PackedSize;
214 | int unk14 = 0;
215 | int OffsetX = 0;
216 | int OffsetY = 0;
217 | int Width = 0;
218 | int Height = 0;
219 | int Stride = 0;
220 | byte[]? ImageData = null;
221 | byte[]? PaletteData = null;
222 | byte[]? MetaData = null;
223 | string entryName = "";
224 | switch (Path.GetExtension(inputFile))
225 | {
226 | case ".png":
227 | {
228 | var fn = Path.GetFileName(inputFile);
229 | if(fn.Contains("subImg"))
230 | {
231 | return;
232 | }
233 | var match = Regex.Match(fn, @"(.+)\.(.+)_(.+)\.(.+)\.png");
234 | if(!match.Success)
235 | {
236 | Console.WriteLine($"Invalid png file name: {inputFile}.");
237 | return;
238 | }
239 | entryName = match.Groups[1].Value;
240 | using var image = Image.Load(inputFile);
241 | Width = image.Width;
242 | Height = image.Height;
243 | Stride = image.Width;
244 | OffsetX = Convert.ToInt32(match.Groups[2].Value);
245 | OffsetY = Convert.ToInt32(match.Groups[3].Value);
246 | MetaData = Inflate(Convert.FromBase64String(match.Groups[4].Value.Replace('`', '/')));
247 |
248 | switch (image.PixelType.BitsPerPixel)
249 | {
250 | case 8:
251 | {
252 | Flags = 2;
253 | ImageData = new byte[Stride * Height];
254 | image.CloneAs().CopyPixelDataTo(ImageData);
255 | break;
256 | }
257 | case 24:
258 | {
259 | Flags = 0x1C;
260 | Stride *= 3;
261 | ImageData = new byte[Stride * Height];
262 | image.CloneAs().CopyPixelDataTo(ImageData);
263 | break;
264 | }
265 | case 32:
266 | {
267 | Flags = 0x3C;
268 | Stride *= 4;
269 | ImageData = new byte[Stride * Height];
270 | image.CloneAs().CopyPixelDataTo(ImageData);
271 | break;
272 | }
273 | default:
274 | {
275 | Console.WriteLine($"Unsupported bpp({image.PixelType.BitsPerPixel}): {inputFile}.");
276 | return;
277 | }
278 | }
279 | break;
280 | }
281 | case ".layerImg":
282 | {
283 | entryName = Path.GetFileNameWithoutExtension(inputFile);
284 | var texts = File.ReadAllLines(inputFile);
285 | if (texts.Length == 0)
286 | {
287 | Console.WriteLine($"Empty layerImg: {inputFile}.");
288 | return;
289 | }
290 |
291 | var header = Regex.Match(texts[0], @"LayerImg\((.+),(.+),(.+),(.+),(.+),(.+),(.+)\);");
292 | if(!header.Success)
293 | {
294 | Console.WriteLine($"Invalid layerImg property: {inputFile}.");
295 | return;
296 | }
297 |
298 | Width = Convert.ToInt32(header.Groups[1].Value);
299 | Height = Convert.ToInt32(header.Groups[2].Value);
300 | Flags = Convert.ToInt16(header.Groups[3].Value);
301 | OffsetX = Convert.ToInt32(header.Groups[4].Value);
302 | OffsetY = Convert.ToInt32(header.Groups[5].Value);
303 | Stride = Convert.ToInt32(header.Groups[6].Value);
304 | MetaData = Inflate(Convert.FromBase64String(header.Groups[7].Value.Replace('`', '/')));
305 |
306 | var ms = new MemoryStream();
307 |
308 | {
309 | var imageDataWriter = new BinaryWriter(ms);
310 | short offset_x = 0, offset_y = 0;
311 | for (int i = 1; i < texts.Length; i++)
312 | {
313 | var cmd = Regex.Match(texts[i], @"(Mask|Blend)\((.+),(.+),(.+)\);");
314 |
315 | if (cmd.Success)
316 | {
317 | var x = Convert.ToInt16(cmd.Groups[3].Value);
318 | var y = Convert.ToInt16(cmd.Groups[4].Value);
319 |
320 | if (fileMap.TryGetValue(cmd.Groups[2].Value, out int k))
321 | {
322 | imageDataWriter.Write((byte)0x21);
323 | imageDataWriter.Write(Convert.ToInt16(x - offset_x));
324 | imageDataWriter.Write(Convert.ToInt16(y - offset_y));
325 | imageDataWriter.Write(cmd.Groups[1].Value == "Mask" ? (byte)0x20 : (byte)0x00);
326 | imageDataWriter.Write(k);
327 | }
328 | else
329 | {
330 | Console.WriteLine($"Cannot find file {cmd.Groups[2].Value} needed by layerImg {inputFile}, skipping command \"{texts[i]}\".");
331 | }
332 | offset_x = x;
333 | offset_y = y;
334 | }
335 | else
336 | {
337 | Console.WriteLine($"Invalid layerImg command: {texts[i]}.");
338 | }
339 | }
340 | }
341 | ImageData = ms.ToArray();
342 | break;
343 | }
344 | case ".subImg":
345 | {
346 | var match = Regex.Match(Path.GetFileName(inputFile), @"(.+)\.base_(.+)\.(.+)\.subImg");
347 | if(!match.Success)
348 | {
349 | Console.WriteLine($"Invalid subImage file name.");
350 | return;
351 | }
352 | entryName = match.Groups[1].Value;
353 |
354 | if (!fileMap.TryGetValue(match.Groups[2].Value, out int k))
355 | {
356 | Console.WriteLine($"Cannot find base image {match.Groups[2].Value} needed by {entryName}.");
357 | return;
358 | }
359 |
360 | MetaData = Inflate(Convert.FromBase64String(match.Groups[3].Value.Replace('`', '/')));
361 |
362 | var ms = new MemoryStream();
363 |
364 | {
365 | using var reader = new BinaryReader(File.Open(inputFile, FileMode.Open));
366 | Flags = reader.ReadInt16();
367 | Width = reader.ReadInt32();
368 | Height = reader.ReadInt32();
369 | OffsetX = reader.ReadInt32();
370 | OffsetY = reader.ReadInt32();
371 | Stride = reader.ReadInt32();
372 | PaletteSize = reader.ReadInt32();
373 |
374 | if (PaletteSize > 0)
375 | PaletteData = reader.ReadBytes(PaletteSize);
376 |
377 | var imageDataWriter = new BinaryWriter(ms);
378 | imageDataWriter.Write(k);
379 | imageDataWriter.Write(reader.ReadBytes(Convert.ToInt32(reader.BaseStream.Length - reader.BaseStream.Position)));
380 | }
381 | ImageData = ms.ToArray();
382 | break;
383 | }
384 | }
385 |
386 | Trace.Assert(ImageData != null);
387 | UnpackedSize = PackedSize = ImageData.Length;
388 | var packedData = IarCompressor.Pack(ImageData);
389 |
390 | if (UnpackedSize > packedData.Length)
391 | {
392 | Compressed = 1;
393 | ImageData = packedData;
394 | PackedSize = packedData.Length;
395 | }
396 | else
397 | {
398 | Compressed = 0;
399 | }
400 |
401 | var basePos = writer.BaseStream.Position;
402 |
403 | writer.Write(Flags);
404 | writer.Write(unk02);
405 | writer.Write(Compressed);
406 | writer.Write(unk04);
407 | writer.Write(UnpackedSize);
408 | writer.Write(PaletteSize);
409 | writer.Write(PackedSize);
410 | writer.Write(unk14);
411 | writer.Write(OffsetX);
412 | writer.Write(OffsetY);
413 | writer.Write(Width);
414 | writer.Write(Height);
415 | writer.Write(Stride);
416 |
417 | var metadataSize = Convert.ToInt32(GetImageHeaderSize(arcVersion) - (writer.BaseStream.Position - basePos));
418 | if(MetaData != null)
419 | {
420 | if(MetaData.Length > metadataSize)
421 | {
422 | Console.WriteLine($"Metadata size not match({MetaData.Length}/{metadataSize}), turncating.");
423 | MetaData = MetaData[..metadataSize];
424 | writer.Write(MetaData);
425 | }
426 | else
427 | {
428 | writer.Write(MetaData);
429 | writer.BaseStream.Position += metadataSize - MetaData.Length;
430 | }
431 | }
432 | else
433 | {
434 | writer.BaseStream.Position += metadataSize;
435 | }
436 |
437 |
438 | if (PaletteSize != 0 && PaletteData != null)
439 | {
440 | writer.Write(PaletteData);
441 | }
442 | writer.Write(ImageData);
443 |
444 | fileMap.TryAdd(entryName, fileIndex);
445 | }
446 | static int GetImageHeaderSize(int iarVersion)
447 | {
448 | switch (iarVersion)
449 | {
450 | case 0x1000:
451 | return 0x30;
452 | case 0x2000:
453 | case 0x3000:
454 | return 0x40;
455 | case 0x4000:
456 | case 0x4001:
457 | case 0x4002:
458 | case 0x4003:
459 | return 0x48;
460 | default:
461 | return 0;
462 | }
463 | }
464 |
465 | internal sealed class IarDecompressor
466 | {
467 | class BitHelper
468 | {
469 | readonly BinaryReader m_reader;
470 | int m_bits = 1;
471 |
472 | public BitHelper(BinaryReader reader)
473 | {
474 | m_reader = reader;
475 | }
476 |
477 | public int GetNextBit()
478 | {
479 | if (1 == m_bits)
480 | {
481 | m_bits = m_reader.ReadUInt16() | 0x10000;
482 | }
483 | int b = m_bits & 1;
484 | m_bits >>= 1;
485 | return b;
486 | }
487 | }
488 |
489 | public static void Unpack(BinaryReader input, byte[] output)
490 | {
491 | var bh = new BitHelper(input);
492 |
493 | int dst = 0;
494 | while (dst < output.Length)
495 | {
496 | if (bh.GetNextBit() == 1)
497 | {
498 | output[dst++] = input.ReadByte();
499 | continue;
500 | }
501 | int offset, count;
502 | if (bh.GetNextBit() == 1)// 3 <= duplicate count < 272
503 | {
504 | //1~8192
505 | int tmp = bh.GetNextBit();
506 | if (bh.GetNextBit() == 1)
507 | offset = 1;
508 | else if (bh.GetNextBit() == 1)
509 | offset = 0x201;
510 | else
511 | {
512 | tmp = (tmp << 1) | bh.GetNextBit();
513 | if (bh.GetNextBit() == 1)
514 | offset = 0x401;
515 | else
516 | {
517 | tmp = (tmp << 1) | bh.GetNextBit();
518 | if (bh.GetNextBit() == 1)
519 | offset = 0x801;
520 | else
521 | {
522 | offset = 0x1001;
523 | tmp = (tmp << 1) | bh.GetNextBit();
524 | }
525 | }
526 | }
527 | offset += (tmp << 8) | input.ReadByte();
528 |
529 | if (bh.GetNextBit() == 1)
530 | count = 3;
531 | else if (bh.GetNextBit() == 1)
532 | count = 4;
533 | else if (bh.GetNextBit() == 1)
534 | count = 5;
535 | else if (bh.GetNextBit() == 1)
536 | count = 6;
537 | else if (bh.GetNextBit() == 1)
538 | count = 7 + bh.GetNextBit();
539 | else if (bh.GetNextBit() == 1)
540 | count = 17 + input.ReadByte(); //17 ~ 272
541 | else
542 | {
543 | //9 ~ 16
544 | count = bh.GetNextBit() << 2;
545 | count |= bh.GetNextBit() << 1;
546 | count |= bh.GetNextBit();
547 | count += 9;
548 | }
549 | }
550 | else//duplicate count == 2
551 | {
552 | count = 2;
553 | if (bh.GetNextBit() == 1)
554 | {
555 | //offset = 0x100 ~ 0x8FF (256 ~ 2303)
556 | offset = bh.GetNextBit() << 10;
557 | offset |= bh.GetNextBit() << 9;
558 | offset |= bh.GetNextBit() << 8;
559 | offset = (offset | input.ReadByte()) + 0x100;
560 | }
561 | else
562 | {
563 | //offset = 0x1 ~ 0xFF
564 | offset = 1 + input.ReadByte();//maximum == 0xFE
565 | if (0x100 == offset)//offset == 0xFF -> End
566 | break;
567 | }
568 | }
569 | CopyOverlapped(output, dst - offset, dst, count);
570 | dst += count;
571 | }
572 | }
573 |
574 | public static void CopyOverlapped(byte[] data, int src, int dst, int count)
575 | {
576 | if (dst > src)
577 | {
578 | while (count > 0)
579 | {
580 | int preceding = Math.Min(dst - src, count);
581 | Buffer.BlockCopy(data, src, data, dst, preceding);
582 | dst += preceding;
583 | count -= preceding;
584 | }
585 | }
586 | else
587 | {
588 | Buffer.BlockCopy(data, src, data, dst, count);
589 | }
590 | }
591 | }
592 |
593 | internal sealed class IarCompressor
594 | {
595 | public class BufferWriter
596 | {
597 | private ushort m_ctl;
598 | private int m_pos;
599 | private readonly List m_outputBuf;
600 | private readonly List m_codeBuf;
601 |
602 | static readonly ushort[] elemA = [1, 0x201, 0x401, 0x801, 0x1001];
603 | static readonly ushort[] elemB = [0x40, 0x20, 8, 2, 0];
604 | public BufferWriter()
605 | {
606 | m_outputBuf = [];
607 | m_codeBuf = [];
608 | m_ctl = 0;
609 | m_pos = 0;
610 | }
611 |
612 | void SetBits(ushort val, int bitCount = 1)
613 | {
614 | while (bitCount != 0)
615 | {
616 | if (m_pos == 16)
617 | {
618 | Flush();
619 | }
620 |
621 | m_ctl |= (ushort)((val & 1) << m_pos++);
622 | val >>= 1;
623 |
624 | bitCount--;
625 | }
626 | }
627 |
628 | public void PutUncoded(byte input)
629 | {
630 | SetBits(1);
631 | m_codeBuf.Add(input);
632 | }
633 |
634 | public void PutPair(int offset, int length)
635 | {
636 | if (length < 2)
637 | throw new ArgumentException("Cannot put pair that length lower than 2.");
638 | if (length == 2)
639 | {
640 | SetBits(0, 2);
641 | if (offset <= 0xFF)
642 | {
643 | SetBits(0);
644 | m_codeBuf.Add(Convert.ToByte(offset - 1));
645 | }
646 | else
647 | {
648 | SetBits(1);
649 | offset -= 0x100;
650 | SetBits(Convert.ToUInt16((offset >> 10) & 1), 1);
651 | SetBits(Convert.ToUInt16((offset >> 9) & 1), 1);
652 | SetBits(Convert.ToUInt16((offset >> 8) & 1), 1);
653 | m_codeBuf.Add(Convert.ToByte(offset & 0xFF));
654 | }
655 | }
656 | else
657 | {
658 | //repeats greater than 2 bytes
659 | SetBits(2, 2);
660 | byte offsetPart = (byte)((offset & 0xFF) - 1);
661 |
662 | bool flag = false;
663 | for (int j = 0; j <= 0xF; j++)
664 | {
665 | if (flag)
666 | break;
667 | for (int i = 0; i < 5; i++)
668 | {
669 | if (elemA[i] > offset)
670 | continue;
671 | if ((elemA[i] + (j << 8) + offsetPart) == offset)
672 | {
673 | var bitlen = i switch
674 | {
675 | 0 => 2,
676 | 1 => 3,
677 | 2 => 5,
678 | 3 => 7,
679 | _ => 8,
680 | };
681 |
682 | int code;
683 | if (i < 2)
684 | {
685 | code = (j & 1) << 7 | elemB[i];
686 | }
687 | else if (i < 3)
688 | {
689 | // 1 << 7
690 | code = (j & 1) << 4 | (j & 2) << 6 | elemB[i];
691 | }
692 | else if (i < 4)
693 | {
694 | // 1 << 4 1 << 7
695 | code = (j & 1) << 2 | (j & 2) << 3 | (j & 4) << 5 | elemB[i];
696 | }
697 | else
698 | {
699 | // 1 << 2 1 << 4 1 << 7
700 | code = (j & 1) << 0 | (j & 2) << 1 | (j & 4) << 2 | (j & 8) << 4 | elemB[i];
701 | }
702 |
703 | for (int b = 0; b < bitlen; b++)
704 | {
705 | SetBits((ushort)((code & 0x80) >> 7));
706 | code <<= 1;
707 | }
708 | m_codeBuf.Add(Convert.ToByte(offsetPart));
709 | flag = true;
710 | break;
711 | }
712 | }
713 | }
714 |
715 | switch (length)
716 | {
717 | case 3:
718 | SetBits(1);
719 | break;
720 | case 4:
721 | SetBits(2, 2);//01
722 | break;
723 | case 5:
724 | SetBits(4, 3);//001
725 | break;
726 | case 6:
727 | SetBits(8, 4);//0001
728 | break;
729 | case 7:
730 | SetBits(16, 6);//000010
731 | break;
732 | case 8:
733 | SetBits(48, 6);//000011
734 | break;
735 | default:
736 | {
737 | var count = length;
738 | if (count <= 16)
739 | {
740 | SetBits(0, 6);
741 | count -= 9;
742 | SetBits((ushort)(count >> 2));
743 | SetBits((ushort)(count >> 1));
744 | SetBits((ushort)(count >> 0));
745 | }
746 | else
747 | {
748 | SetBits(32, 6);
749 | count -= 17;
750 | m_codeBuf.Add(Convert.ToByte(count));
751 | }
752 | break;
753 | }
754 | }
755 | }
756 | }
757 |
758 | public void Flush()
759 | {
760 | m_pos = 0;
761 |
762 | m_outputBuf.Add((byte)m_ctl);
763 | m_outputBuf.Add((byte)(m_ctl >> 8));
764 | m_ctl = 0;
765 |
766 | m_outputBuf.AddRange(m_codeBuf);
767 | m_codeBuf.Clear();
768 | }
769 |
770 | public byte[] GetBytes()
771 | {
772 | //Set End Flag
773 | SetBits(0, 3);
774 | m_codeBuf.Add(0xFF);
775 | Flush();
776 | return m_outputBuf.ToArray();
777 | }
778 | }
779 |
780 | //RingBuffer, must be power of 2
781 | const int BufferSize = 1 << 13;
782 | //Maximum matching size
783 | const int SearchSize = 255;
784 | //Minimum pair length
785 | const int THRESHOLD = 2;
786 |
787 | const int NIL = BufferSize;
788 | //Original source: https://github.com/opensource-apple/kext_tools/blob/master/compression.c
789 | public class EncodeState
790 | {
791 | /*
792 | * initialize state, mostly the trees
793 | *
794 | * For i = 0 to BufferSize - 1, rchild[i] and lchild[i] will be the right and left
795 | * children of node i. These nodes need not be initialized. Also, parent[i]
796 | * is the parent of node i. These (parent nodes) are initialized to NIL (= BufferSize), which stands
797 | * for 'not used.' For i = 0 to 255, rchild[BufferSize + i + 1] is the root of the
798 | * tree for strings that begin with character i. These are initialized to NIL.
799 | * Note there are 256 trees.
800 | */
801 | public int[] lchild = new int[BufferSize + 1];
802 | public int[] rchild = Enumerable.Repeat(NIL, BufferSize + 1 + 256).ToArray();
803 | public int[] parent = Enumerable.Repeat(NIL, BufferSize + 1).ToArray();
804 |
805 | public byte[] text_buf = Enumerable.Repeat((byte)0xFF, BufferSize + SearchSize + 1).ToArray();
806 | public int[] text_buf_map = new int[BufferSize + SearchSize + 1];
807 |
808 | public int match_position = 0;
809 | public int match_length = 0;
810 | };
811 |
812 | /*
813 | * Inserts string of (length=SearchSize, text_buf[index..index + SearchSize - 1]) into one of the trees
814 | * (text_buf[index]'th tree) and returns the longest-match position and length
815 | * via the global variables match_position and match_length.
816 | * If match_length = SearchSize, then removes the old node in favor of the new one,
817 | * because the old one will be deleted sooner. Note index plays double role,
818 | * as tree node and position in buffer.
819 | */
820 | static void InsertNode(EncodeState sp, int index)
821 | {
822 | int cmp = 1;
823 | int p = BufferSize + sp.text_buf[index] + 1;//find root node of text_buf[index]'s tree
824 | sp.rchild[index] = sp.lchild[index] = NIL;
825 | sp.match_length = 0;
826 | for (; ; )
827 | {
828 | if (cmp >= 0)
829 | {
830 | if (sp.rchild[p] != NIL)
831 | p = sp.rchild[p];
832 | else
833 | {
834 | sp.rchild[p] = index;
835 | sp.parent[index] = p;
836 | return;
837 | }
838 | }
839 | else
840 | {
841 | if (sp.lchild[p] != NIL)
842 | p = sp.lchild[p];
843 | else
844 | {
845 | sp.lchild[p] = index;
846 | sp.parent[index] = p;
847 | return;
848 | }
849 | }
850 |
851 | //Faster string comparsion
852 | var i = 1;
853 | while(i < SearchSize)
854 | {
855 | var u = Vector256.Create(sp.text_buf, index + i);
856 | var v = Vector256.Create(sp.text_buf, p + i);
857 | var w = BitOperations.TrailingZeroCount(~Avx2.MoveMask(Avx2.CompareEqual(u, v)));
858 |
859 | i += w;
860 | if(w != 32)
861 | {
862 | break;
863 | }
864 | }
865 | if (i > SearchSize)
866 | i = SearchSize;
867 |
868 | cmp = sp.text_buf[index + i] - sp.text_buf[p + i];
869 |
870 | if (i > sp.match_length)
871 | {
872 | sp.match_position = p;
873 | if ((sp.match_length = i) >= SearchSize)
874 | break;
875 | }
876 | }
877 | sp.parent[index] = sp.parent[p];
878 | sp.lchild[index] = sp.lchild[p];
879 | sp.rchild[index] = sp.rchild[p];
880 | sp.parent[sp.lchild[p]] = index;
881 | sp.parent[sp.rchild[p]] = index;
882 | if (sp.rchild[sp.parent[p]] == p)
883 | sp.rchild[sp.parent[p]] = index;
884 | else
885 | sp.lchild[sp.parent[p]] = index;
886 | sp.parent[p] = NIL; /* remove p */
887 | }
888 |
889 | /* deletes node p from tree */
890 | static void DeleteNode(EncodeState sp, int p)
891 | {
892 | int q;
893 | if (sp.parent[p] == NIL)
894 | return; /* not in tree */
895 | if (sp.rchild[p] == NIL)
896 | q = sp.lchild[p];
897 | else if (sp.lchild[p] == NIL)
898 | q = sp.rchild[p];
899 | else
900 | {
901 | q = sp.lchild[p];
902 | if (sp.rchild[q] != NIL)
903 | {
904 | do
905 | {
906 | q = sp.rchild[q];
907 | } while (sp.rchild[q] != NIL);
908 | sp.rchild[sp.parent[q]] = sp.lchild[q];
909 | sp.parent[sp.lchild[q]] = sp.parent[q];
910 | sp.lchild[q] = sp.lchild[p];
911 | sp.parent[sp.lchild[p]] = q;
912 | }
913 | sp.rchild[q] = sp.rchild[p];
914 | sp.parent[sp.rchild[p]] = q;
915 | }
916 | sp.parent[q] = sp.parent[p];
917 | if (sp.rchild[sp.parent[p]] == p)
918 | sp.rchild[sp.parent[p]] = q;
919 | else
920 | sp.lchild[sp.parent[p]] = q;
921 | sp.parent[p] = NIL;
922 | }
923 |
924 | public static byte[] Pack(byte[] input)
925 | {
926 | EncodeState sp = new();
927 |
928 | int i;
929 | int len, last_match_length;
930 |
931 | int r = BufferSize - SearchSize;
932 | int s = 0;
933 | int inputIdx = 0;
934 |
935 | /* Read F bytes into the last F bytes of the buffer(wait for search) */
936 | for (len = 0; len < SearchSize && inputIdx < input.Length; len++)
937 | {
938 | sp.text_buf[r + len] = input[inputIdx];
939 | sp.text_buf_map[r + len] = inputIdx;
940 | inputIdx++;
941 | }
942 |
943 | /*
944 | * Insert the whole string just read.
945 | * The global variables match_length and match_position are set.
946 | */
947 | InsertNode(sp, r);
948 |
949 | var bw = new BufferWriter();
950 |
951 | var encode_pos = 0;
952 | do
953 | {
954 | if (encode_pos == 0 || sp.match_length < THRESHOLD)
955 | {
956 | sp.match_length = 1;
957 | bw.PutUncoded(sp.text_buf[r]);
958 | encode_pos++;
959 | }
960 | else
961 | {
962 | var offset = encode_pos - sp.text_buf_map[sp.match_position];
963 |
964 | if(offset > 2048 && sp.match_length == THRESHOLD)
965 | {
966 | sp.match_length = 1;
967 | bw.PutUncoded(sp.text_buf[r]);
968 | encode_pos++;
969 | }
970 | else
971 | {
972 | bw.PutPair(offset, sp.match_length);
973 | encode_pos += sp.match_length;
974 | }
975 | }
976 |
977 | byte c;
978 | last_match_length = sp.match_length;
979 | for (i = 0; i < last_match_length && inputIdx < input.Length; i++)
980 | {
981 | DeleteNode(sp, s); /* Delete old strings and */
982 | c = input[inputIdx];
983 | sp.text_buf[s] = c; /* read new bytes */
984 | sp.text_buf_map[s] = inputIdx;
985 |
986 | /*
987 | * If the position is near the end of buffer, extend the buffer
988 | * to make string comparison easier.
989 | */
990 | if (s < (SearchSize - 1))
991 | {
992 | sp.text_buf[s + BufferSize] = c;
993 | sp.text_buf_map[s + BufferSize] = inputIdx;
994 | }
995 |
996 | inputIdx++;
997 | /* Since this is a ring buffer, increment the position modulo BufferSize. */
998 | s = (s + 1) & (BufferSize - 1);
999 | r = (r + 1) & (BufferSize - 1);
1000 |
1001 | /* Register the string in text_buf[r..r+SearchSize-1] */
1002 | InsertNode(sp, r);
1003 | }
1004 | while (i++ < last_match_length)
1005 | {
1006 | DeleteNode(sp, s);
1007 |
1008 | /* After the end of text, no need to read, */
1009 | s = (s + 1) & (BufferSize - 1);
1010 | r = (r + 1) & (BufferSize - 1);
1011 |
1012 | /* but buffer may not be empty. */
1013 | if ((--len) > 0)
1014 | InsertNode(sp, r);
1015 |
1016 | //Match length can't exceed the input length
1017 | if (sp.match_length > len)
1018 | sp.match_length = 1;
1019 | }
1020 |
1021 | } while (len > 0);
1022 |
1023 | return bw.GetBytes();
1024 | }
1025 | }
1026 | }
1027 |
1028 | public IarArchive(string fileName)
1029 | {
1030 | m_arcFileOffset = [];
1031 | m_arcName = Path.GetFileName(fileName);
1032 | m_reader = new BinaryReader(File.OpenRead(fileName));
1033 |
1034 | Trace.Assert(m_reader.ReadUInt32() == 0x20726169);
1035 |
1036 | m_arcVersion = (m_reader.ReadInt16() << 12) | (int)m_reader.ReadInt16();
1037 | m_isArcLongOffset = m_arcVersion >= 3000;
1038 |
1039 | var headerSize = m_reader.ReadInt32();//0xC
1040 | var infoSize = m_reader.ReadInt32();//0x14
1041 |
1042 | var trash = m_reader.ReadInt64();
1043 |
1044 | var entryCount = m_reader.ReadInt32();
1045 | var fileCount = m_reader.ReadInt32();
1046 |
1047 | Trace.Assert(entryCount == fileCount);
1048 |
1049 | for(int i = 0; i < entryCount; i++)
1050 | {
1051 | m_arcFileOffset.Add(m_isArcLongOffset ? m_reader.ReadInt64() : m_reader.ReadInt32());
1052 | }
1053 |
1054 | Trace.Assert(m_reader.BaseStream.Position == m_arcFileOffset[0]);
1055 | }
1056 |
1057 | public void ExtractTo(Dictionary fileListDic, string outputPath)
1058 | {
1059 | if (!Path.Exists(outputPath))
1060 | {
1061 | Directory.CreateDirectory(outputPath);
1062 | }
1063 |
1064 | foreach (var key in fileListDic.Keys)
1065 | {
1066 | Console.WriteLine($"Writing {fileListDic[key]}...");
1067 | IarImage.Extract(m_reader, m_arcFileOffset[key], m_arcVersion, fileListDic, Path.Combine(outputPath, fileListDic[key]));
1068 | }
1069 | }
1070 |
1071 | public void ExtractSingle(Dictionary fileListDic, string file, string outputPath)
1072 | {
1073 | try
1074 | {
1075 | var k = fileListDic.First(k => k.Value == file);
1076 |
1077 | if (!Path.Exists(outputPath))
1078 | {
1079 | Directory.CreateDirectory(outputPath);
1080 | }
1081 |
1082 | Console.WriteLine($"Writing {file}...");
1083 | IarImage.Extract(m_reader, m_arcFileOffset[k.Key], m_arcVersion, fileListDic, Path.Combine(outputPath, fileListDic[k.Key]));
1084 | }
1085 | catch(Exception ex)
1086 | {
1087 | Console.WriteLine(ex.Message);
1088 | Console.WriteLine(ex.StackTrace);
1089 | }
1090 | }
1091 |
1092 | public static Dictionary Create(string folder, string outputArcName)
1093 | {
1094 | Dictionary fileList = [];
1095 |
1096 | var normalImgList = Directory.EnumerateFiles(folder, "*.png", SearchOption.TopDirectoryOnly);
1097 | var layerImgList = Directory.EnumerateFiles(folder, "*.layerImg", SearchOption.TopDirectoryOnly);
1098 | var subImgList = Directory.EnumerateFiles(folder, "*.subImg", SearchOption.TopDirectoryOnly);
1099 |
1100 | using var writer = new BinaryWriter(File.Open(Path.Combine(folder, "..", outputArcName), FileMode.Create));
1101 | writer.Write(0x20726169);
1102 | writer.Write(0x00010004);//Lock version to 0x4001
1103 | writer.Write(0xC);
1104 | writer.Write(0x14);
1105 | writer.Write(0);
1106 | writer.Write(0);
1107 |
1108 | var fileCountOffset = writer.BaseStream.Position;
1109 | writer.BaseStream.Position += 8 * (normalImgList.Count() + layerImgList.Count() + subImgList.Count() + 1);
1110 |
1111 | List fileOffsets = [];
1112 |
1113 | int inArcIndex = 0;
1114 |
1115 | {
1116 | foreach(var file in normalImgList)
1117 | {
1118 | Console.WriteLine($"WritingImg: {file} ...");
1119 | fileOffsets.Add(writer.BaseStream.Position);
1120 | IarImage.Import(writer, file, inArcIndex++, fileList, 0x4001);
1121 | }
1122 |
1123 | foreach(var file in layerImgList)
1124 | {
1125 | Console.WriteLine($"WritingLayerImg: {file} ...");
1126 | fileOffsets.Add(writer.BaseStream.Position);
1127 | IarImage.Import(writer, file, inArcIndex++, fileList, 0x4001);
1128 | }
1129 |
1130 | foreach (var file in subImgList)
1131 | {
1132 | Console.WriteLine($"WritingSubImg: {file} ...");
1133 | fileOffsets.Add(writer.BaseStream.Position);
1134 | IarImage.Import(writer, file, inArcIndex++, fileList, 0x4001);
1135 | }
1136 | }
1137 |
1138 | writer.BaseStream.Position = fileCountOffset;
1139 | writer.Write(fileList.Count);
1140 | writer.Write(fileList.Count);
1141 | foreach(var o in fileOffsets)
1142 | {
1143 | writer.Write(o);
1144 | }
1145 |
1146 | return fileList;
1147 | }
1148 | }
1149 | }
1150 |
--------------------------------------------------------------------------------
/ArcTool/Program.cs:
--------------------------------------------------------------------------------
1 | using SAS5Lib.SecResource;
2 | using System.Text;
3 | using System.Text.RegularExpressions;
4 |
5 | namespace ArcTool
6 | {
7 | internal class Program
8 | {
9 | static void Main(string[] args)
10 | {
11 | Encoding.RegisterProvider(CodePagesEncodingProvider.Instance);
12 | CodepageManager.Instance.SetImportEncoding("sjis");
13 | CodepageManager.Instance.SetExportEncoding("utf8");
14 |
15 |
16 | if(args.Length < 4)
17 | {
18 | Console.WriteLine("SAS5Tool.ArcTool");
19 | Console.WriteLine("A tool to extract & import resource file inside .iar/.war/.gar file.\n");
20 | Console.WriteLine("Usage:\nArcTool.exe unpack [