├── LICENSE
├── MediaFileProcessor
├── .gitignore
├── AOPInfrastructure
│ └── AOPInfrastructure.csproj
├── ConsoleTest
│ ├── ConsoleTest.csproj
│ ├── ImageProcessorTests.cs
│ ├── Program.cs
│ ├── TestFile.cs
│ ├── TestsVideo
│ │ └── TestAVI.cs
│ ├── VideoProcessorTests.cs
│ └── VideoProcessorTestsLinux.cs
├── MediaFileProcessor.sln
└── MediaFileProcessor
│ ├── Extensions
│ ├── FFmpegExtensions.cs
│ ├── FileDataExtensions.cs
│ ├── FilesSignatureExtensions.cs
│ └── StreamDecodeExtensions.cs
│ ├── MediaFileProcess.cs
│ ├── MediaFileProcessor.csproj
│ ├── Models
│ ├── Common
│ │ ├── MediaFile.cs
│ │ └── MultiStream.cs
│ ├── Enums
│ │ ├── AudioBitrateType.cs
│ │ ├── AudioBitstreamFilterType.cs
│ │ ├── AudioCodecType.cs
│ │ ├── AudioFilterType.cs
│ │ ├── AudioSampleRateType.cs
│ │ ├── AudioSyncMethodType.cs
│ │ ├── CpuFlags.cs
│ │ ├── DpiValueType.cs
│ │ ├── DvType.cs
│ │ ├── EncoderTimebaseType.cs
│ │ ├── FFmpegStrictness.cs
│ │ ├── FileFormatType.cs
│ │ ├── FilterType.cs
│ │ ├── HardwareAccelerationType.cs
│ │ ├── HelpOptionType.cs
│ │ ├── ImageFormatType.cs
│ │ ├── LanguageType.cs
│ │ ├── MediaFileInputType.cs
│ │ ├── MovFlagsType.cs
│ │ ├── PixelFormatType.cs
│ │ ├── PositionType.cs
│ │ ├── ReadRateType.cs
│ │ ├── SubtitleBitstreamFilterType.cs
│ │ ├── SubtitleCodecType.cs
│ │ ├── TargetStandardType.cs
│ │ ├── TargetType.cs
│ │ ├── VideoAspectRatioType.cs
│ │ ├── VideoBitstreamFilter.cs
│ │ ├── VideoCodecPresetType.cs
│ │ ├── VideoCodecProfileType.cs
│ │ ├── VideoCodecType.cs
│ │ ├── VideoSizeTyoe.cs
│ │ ├── VideoSyncMethodType.cs
│ │ └── ZipCompression.cs
│ ├── Settings
│ │ ├── BaseProcessingSettings.cs
│ │ ├── FFmpegProcessingSettings.cs
│ │ ├── FFprobeProcessingSettings.cs
│ │ ├── ImageMagickProcessingSettings.cs
│ │ └── PandocFileProcessingSettings.cs
│ ├── Video
│ │ ├── CropArea.cs
│ │ ├── Disposition.cs
│ │ ├── Format.cs
│ │ ├── SideDataList.cs
│ │ ├── StreamInfo.cs
│ │ ├── Tag.cs
│ │ └── VideoFileInfo.cs
│ └── ZipFile
│ │ └── ZipFileEntry.cs
│ ├── Processors
│ ├── DocumentFileProcessor.cs
│ ├── FileDownloadProcessor.cs
│ ├── ImageFileProcessor.cs
│ ├── Interfaces
│ │ ├── IDocumentFileProcessor.cs
│ │ ├── IImageFileProcessor.cs
│ │ └── IVideoFileProcessor.cs
│ ├── VideoFileProcessor.cs
│ └── ZipFileProcessor.cs
│ └── ava.jpg
├── README.md
└── testFiles
├── issue.mp4
├── sample.3gp
├── sample.aac
├── sample.asf
├── sample.avi
├── sample.bin
├── sample.bmp
├── sample.flac
├── sample.flv
├── sample.gif
├── sample.gxf
├── sample.ico
├── sample.jpg
├── sample.m2ts
├── sample.m4v
├── sample.mkv
├── sample.mov
├── sample.mp3
├── sample.mp4
├── sample.mpeg
├── sample.mxf
├── sample.ogg
├── sample.png
├── sample.psd
├── sample.rm
├── sample.svg
├── sample.tiff
├── sample.ts
├── sample.vob
├── sample.wav
├── sample.webm
├── sample.webp
├── sample.wma
└── sample.wmv
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2023 Askhat Pazylidinov
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/MediaFileProcessor/.gitignore:
--------------------------------------------------------------------------------
1 | ## Ignore Visual Studio temporary files, build results, and
2 | ## files generated by popular Visual Studio add-ons.
3 | ##
4 | ## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore
5 |
6 | # User-specific files
7 | *.rsuser
8 | *.suo
9 | *.user
10 | *.userosscache
11 | *.sln.docstates
12 |
13 | # User-specific files (MonoDevelop/Xamarin Studio)
14 | *.userprefs
15 |
16 | # Mono auto generated files
17 | mono_crash.*
18 |
19 | # Build results
20 | [Dd]ebug/
21 | [Dd]ebugPublic/
22 | [Rr]elease/
23 | [Rr]eleases/
24 | x64/
25 | x86/
26 | [Ww][Ii][Nn]32/
27 | [Aa][Rr][Mm]/
28 | [Aa][Rr][Mm]64/
29 | bld/
30 | [Bb]in/
31 | **/[Oo]bj/*
32 | [Oo]ut/
33 | [Ll]og/
34 | [Ll]ogs/
35 | [Dd]ist/
36 |
37 | # Visual Studio 2015/2017 cache/options directory
38 | .vs/
39 | # Uncomment if you have tasks that create the project's static files in wwwroot
40 | #wwwroot/
41 |
42 | # Visual Studio 2017 auto generated files
43 | Generated\ Files/
44 |
45 | # MSTest test Results
46 | [Tt]est[Rr]esult*/
47 | [Bb]uild[Ll]og.*
48 |
49 | # NUnit
50 | *.VisualState.xml
51 | TestResult.xml
52 | nunit-*.xml
53 |
54 | # Build Results of an ATL Project
55 | [Dd]ebugPS/
56 | [Rr]eleasePS/
57 | dlldata.c
58 |
59 | # Benchmark Results
60 | BenchmarkDotNet.Artifacts/
61 |
62 | # .NET Core
63 | project.lock.json
64 | project.fragment.lock.json
65 | artifacts/
66 |
67 | # ASP.NET Scaffolding
68 | ScaffoldingReadMe.txt
69 |
70 | # StyleCop
71 | StyleCopReport.xml
72 |
73 | # Files built by Visual Studio
74 | *_i.c
75 | *_p.c
76 | *_h.h
77 | *.ilk
78 | *.meta
79 | *.obj
80 | *.iobj
81 | *.pch
82 | *.pdb
83 | *.ipdb
84 | *.pgc
85 | *.pgd
86 | *.rsp
87 | *.sbr
88 | *.tlb
89 | *.tli
90 | *.tlh
91 | *.tmp
92 | *.tmp_proj
93 | *_wpftmp.csproj
94 | *.log
95 | *.vspscc
96 | *.vssscc
97 | .builds
98 | *.pidb
99 | *.svclog
100 | *.scc
101 |
102 | # Chutzpah Test files
103 | _Chutzpah*
104 |
105 | # Visual C++ cache files
106 | ipch/
107 | *.aps
108 | *.ncb
109 | *.opendb
110 | *.opensdf
111 | *.sdf
112 | *.cachefile
113 | *.VC.db
114 | *.VC.VC.opendb
115 |
116 | # Visual Studio profiler
117 | *.psess
118 | *.vsp
119 | *.vspx
120 | *.sap
121 |
122 | # Visual Studio Trace Files
123 | *.e2e
124 |
125 | # TFS 2012 Local Workspace
126 | $tf/
127 |
128 | # Guidance Automation Toolkit
129 | *.gpState
130 |
131 | # ReSharper is a .NET coding add-in
132 | _ReSharper*/
133 | *.[Rr]e[Ss]harper
134 | *.DotSettings.user
135 |
136 | # TeamCity is a build add-in
137 | _TeamCity*
138 |
139 | # DotCover is a Code Coverage Tool
140 | *.dotCover
141 |
142 | # AxoCover is a Code Coverage Tool
143 | .axoCover/*
144 | !.axoCover/settings.json
145 |
146 | # Coverlet is a free, cross platform Code Coverage Tool
147 | coverage*.json
148 | coverage*.xml
149 | coverage*.info
150 |
151 | # Visual Studio code coverage results
152 | *.coverage
153 | *.coveragexml
154 |
155 | # NCrunch
156 | _NCrunch_*
157 | .*crunch*.local.xml
158 | nCrunchTemp_*
159 |
160 | # MightyMoose
161 | *.mm.*
162 | AutoTest.Net/
163 |
164 | # Web workbench (sass)
165 | .sass-cache/
166 |
167 | # Installshield output folder
168 | [Ee]xpress/
169 |
170 | # DocProject is a documentation generator add-in
171 | DocProject/buildhelp/
172 | DocProject/Help/*.HxT
173 | DocProject/Help/*.HxC
174 | DocProject/Help/*.hhc
175 | DocProject/Help/*.hhk
176 | DocProject/Help/*.hhp
177 | DocProject/Help/Html2
178 | DocProject/Help/html
179 |
180 | # Click-Once directory
181 | publish/
182 |
183 | # Publish Web Output
184 | *.[Pp]ublish.xml
185 | *.azurePubxml
186 | # Note: Comment the next line if you want to checkin your web deploy settings,
187 | # but database connection strings (with potential passwords) will be unencrypted
188 | *.pubxml
189 | *.publishproj
190 |
191 | # Microsoft Azure Web App publish settings. Comment the next line if you want to
192 | # checkin your Azure Web App publish settings, but sensitive information contained
193 | # in these scripts will be unencrypted
194 | PublishScripts/
195 |
196 | # NuGet Packages
197 | *.nupkg
198 | # NuGet Symbol Packages
199 | *.snupkg
200 | # The packages folder can be ignored because of Package Restore
201 | **/[Pp]ackages/*
202 | # except build/, which is used as an MSBuild target.
203 | !**/[Pp]ackages/build/
204 | # Uncomment if necessary however generally it will be regenerated when needed
205 | #!**/[Pp]ackages/repositories.config
206 | # NuGet v3's project.json files produces more ignorable files
207 | *.nuget.props
208 | *.nuget.targets
209 |
210 | # Microsoft Azure Build Output
211 | csx/
212 | *.build.csdef
213 |
214 | # Microsoft Azure Emulator
215 | ecf/
216 | rcf/
217 |
218 | # Windows Store app package directories and files
219 | AppPackages/
220 | BundleArtifacts/
221 | Package.StoreAssociation.xml
222 | _pkginfo.txt
223 | *.appx
224 | *.appxbundle
225 | *.appxupload
226 |
227 | # Visual Studio cache files
228 | # files ending in .cache can be ignored
229 | *.[Cc]ache
230 | # but keep track of directories ending in .cache
231 | !?*.[Cc]ache/
232 |
233 | # Others
234 | ClientBin/
235 | ~$*
236 | *~
237 | *.dbmdl
238 | *.dbproj.schemaview
239 | *.jfm
240 | *.pfx
241 | *.publishsettings
242 | orleans.codegen.cs
243 |
244 | # Including strong name files can present a security risk
245 | # (https://github.com/github/gitignore/pull/2483#issue-259490424)
246 | #*.snk
247 |
248 | # Since there are multiple workflows, uncomment next line to ignore bower_components
249 | # (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
250 | #bower_components/
251 |
252 | # RIA/Silverlight projects
253 | Generated_Code/
254 |
255 | # Backup & report files from converting an old project file
256 | # to a newer Visual Studio version. Backup files are not needed,
257 | # because we have git ;-)
258 | _UpgradeReport_Files/
259 | Backup*/
260 | UpgradeLog*.XML
261 | UpgradeLog*.htm
262 | ServiceFabricBackup/
263 | *.rptproj.bak
264 |
265 | # SQL Server files
266 | *.mdf
267 | *.ldf
268 | *.ndf
269 |
270 | # Business Intelligence projects
271 | *.rdl.data
272 | *.bim.layout
273 | *.bim_*.settings
274 | *.rptproj.rsuser
275 | *- [Bb]ackup.rdl
276 | *- [Bb]ackup ([0-9]).rdl
277 | *- [Bb]ackup ([0-9][0-9]).rdl
278 |
279 | # Microsoft Fakes
280 | FakesAssemblies/
281 |
282 | # GhostDoc plugin setting file
283 | *.GhostDoc.xml
284 |
285 | # Node.js Tools for Visual Studio
286 | .ntvs_analysis.dat
287 | node_modules/
288 |
289 | # Visual Studio 6 build log
290 | *.plg
291 |
292 | # Visual Studio 6 workspace options file
293 | *.opt
294 |
295 | # Visual Studio 6 auto-generated workspace file (contains which files were open etc.)
296 | *.vbw
297 |
298 | # Visual Studio LightSwitch build output
299 | **/*.HTMLClient/GeneratedArtifacts
300 | **/*.DesktopClient/GeneratedArtifacts
301 | **/*.DesktopClient/ModelManifest.xml
302 | **/*.Server/GeneratedArtifacts
303 | **/*.Server/ModelManifest.xml
304 | _Pvt_Extensions
305 |
306 | # Paket dependency manager
307 | .paket/paket.exe
308 | paket-files/
309 |
310 | # FAKE - F# Make
311 | .fake/
312 |
313 | # CodeRush personal settings
314 | .cr/personal
315 |
316 | # Python Tools for Visual Studio (PTVS)
317 | __pycache__/
318 | *.pyc
319 |
320 | # Cake - Uncomment if you are using it
321 | # tools/**
322 | # !tools/packages.config
323 |
324 | # Tabs Studio
325 | *.tss
326 |
327 | # Telerik's JustMock configuration file
328 | *.jmconfig
329 |
330 | # BizTalk build output
331 | *.btp.cs
332 | *.btm.cs
333 | *.odx.cs
334 | *.xsd.cs
335 |
336 | # OpenCover UI analysis results
337 | OpenCover/
338 |
339 | # Azure Stream Analytics local run output
340 | ASALocalRun/
341 |
342 | # MSBuild Binary and Structured Log
343 | *.binlog
344 |
345 | # NVidia Nsight GPU debugger configuration file
346 | *.nvuser
347 |
348 | # MFractors (Xamarin productivity tool) working folder
349 | .mfractor/
350 |
351 | # Local History for Visual Studio
352 | .localhistory/
353 |
354 | # BeatPulse healthcheck temp database
355 | healthchecksdb
356 |
357 | # Backup folder for Package Reference Convert tool in Visual Studio 2017
358 | MigrationBackup/
359 |
360 | # Ionide (cross platform F# VS Code tools) working folder
361 | .ionide/
362 |
363 | # Fody - auto-generated XML schema
364 | FodyWeavers.xsd
365 | *.assets.json
366 | *.assets.cache
367 | *.csproj.FileListAbsolute.txt
368 | *.csproj.nuget.cache
369 | *.csproj.nuget.dgspec.json
370 | *.csproj.nuget.g.props
371 |
372 | .DS_Store
373 | node_modules
374 | /dist
375 | # local env files
376 | .env.local
377 | .env.*.local
378 |
379 | # Log files
380 | npm-debug.log*
381 | yarn-debug.log*
382 | yarn-error.log*
383 | pnpm-debug.log*
384 |
385 | # Editor directories and files
386 | .idea
387 | .vscode
388 | *.suo
389 | *.ntvs*
390 | *.njsproj
391 | *.sw?
392 | *.rar
393 | /.idea
394 | */**/bin/Debug
395 | */**/bin/Release
396 | */**/obj/Debug
397 | */**/obj/Release
--------------------------------------------------------------------------------
/MediaFileProcessor/AOPInfrastructure/AOPInfrastructure.csproj:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | net7.0
5 | enable
6 | enable
7 | 11
8 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/MediaFileProcessor/ConsoleTest/ConsoleTest.csproj:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | Exe
5 | net7.0
6 | enable
7 | enable
8 | Windows
9 | 11
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
--------------------------------------------------------------------------------
/MediaFileProcessor/ConsoleTest/ImageProcessorTests.cs:
--------------------------------------------------------------------------------
1 | using MediaFileProcessor.Models.Common;
2 | using MediaFileProcessor.Models.Enums;
3 | using MediaFileProcessor.Processors;
4 | namespace ConsoleTest;
5 |
6 | ///
7 | /// Test methods for ImageProcessor
8 | ///
9 | public class ImageProcessorTests
10 | {
11 | ///
12 | /// Test image
13 | ///
14 | private static readonly string _image = @"G:\MagickImageFile\testImage.jpg";
15 |
16 | ///
17 | /// Test Compress image
18 | ///
19 | public static async Task CompressImageTest(ImageFileProcessor processor)
20 | {
21 | //Test block with physical paths to input and output files
22 | await processor.CompressImageAsync(new MediaFile(_image), ImageFormatType.JPG, 60, FilterType.Lanczos, "x1080", @"G:\MagickImageFile\result.jpg", ImageFormatType.JPG);
23 |
24 | //Block for testing file processing as streams without specifying physical paths
25 | await using var stream = new FileStream(_image, FileMode.Open);
26 | var resultStream = await processor.CompressImageAsStreamAsync(new MediaFile(stream), ImageFormatType.JPG, 60, FilterType.Lanczos, "x1080", ImageFormatType.JPG);
27 | await using (var output = new FileStream(@"G:\MagickImageFile\result.jpg", FileMode.Create))
28 | resultStream.WriteTo(output);
29 |
30 | //Block for testing file processing as bytes without specifying physical paths
31 | var bytes = await File.ReadAllBytesAsync(_image);
32 | var resultBytes = await processor.CompressImageAsBytesAsync(new MediaFile(bytes), ImageFormatType.JPG, 60, FilterType.Lanczos, "x1080", ImageFormatType.JPG);
33 | await using (var output = new FileStream(@"G:\MagickImageFile\result.jpg", FileMode.Create))
34 | output.Write(resultBytes);
35 | }
36 | }
--------------------------------------------------------------------------------
/MediaFileProcessor/ConsoleTest/Program.cs:
--------------------------------------------------------------------------------
1 | using System.Diagnostics;
2 | using System.Text;
3 | using ConsoleTest;
4 | using ConsoleTest.TestsVideo;
5 | using MediaFileProcessor.Extensions;
6 | using MediaFileProcessor.Models.Common;
7 | using MediaFileProcessor.Models.Enums;
8 |
9 | Console.ForegroundColor = ConsoleColor.Green;
10 | Console.WriteLine("Start");
11 | Console.ResetColor();
12 |
13 |
14 |
15 | Console.ForegroundColor = ConsoleColor.Green;
16 | Console.WriteLine("Done");
17 | Console.ResetColor();
--------------------------------------------------------------------------------
/MediaFileProcessor/ConsoleTest/TestFile.cs:
--------------------------------------------------------------------------------
1 | using MediaFileProcessor.Models.Enums;
2 | namespace ConsoleTest;
3 |
4 | public static class TestFile
5 | {
6 | private const string BasePath = @"testFiles/";
7 | public const string ResultFilePath = @"tests/";
8 | public static string GetPath(FileFormatType formatType)
9 | {
10 | var filePath = BasePath + $"sample.{formatType}".ToLowerInvariant().Replace("_", "");
11 |
12 | if(!File.Exists(filePath))
13 | throw new FileNotFoundException($"{filePath} not found");
14 |
15 | return filePath;
16 | }
17 |
18 | public static void VerifyFileSize(string path, int expectedSize)
19 | {
20 | var fileInfo = new FileInfo(path);
21 |
22 | if(expectedSize != fileInfo.Length)
23 | {
24 | Console.ForegroundColor = ConsoleColor.Red;
25 | Console.WriteLine($"ERROR ({path}): The expected size does not correspond to the real one");
26 | Console.ResetColor();
27 | }
28 | else
29 | {
30 | Console.ForegroundColor = ConsoleColor.Green;
31 | Console.WriteLine($"{path} file successfully created");
32 | Console.ResetColor();
33 | }
34 | }
35 | }
--------------------------------------------------------------------------------
/MediaFileProcessor/MediaFileProcessor.sln:
--------------------------------------------------------------------------------
1 |
2 | Microsoft Visual Studio Solution File, Format Version 12.00
3 | Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "MediaFileProcessor", "MediaFileProcessor\MediaFileProcessor.csproj", "{79F0FC86-435E-4C4F-A8C8-4F060BD5C700}"
4 | EndProject
5 | Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "ConsoleTest", "ConsoleTest\ConsoleTest.csproj", "{831343A2-AC68-482E-9E7B-E51C4BDEB242}"
6 | EndProject
7 | Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "AOPInfrastructure", "AOPInfrastructure\AOPInfrastructure.csproj", "{8BB4DADC-4846-4405-9E7A-B75F437561C4}"
8 | EndProject
9 | Global
10 | GlobalSection(SolutionConfigurationPlatforms) = preSolution
11 | Debug|Any CPU = Debug|Any CPU
12 | Release|Any CPU = Release|Any CPU
13 | EndGlobalSection
14 | GlobalSection(ProjectConfigurationPlatforms) = postSolution
15 | {79F0FC86-435E-4C4F-A8C8-4F060BD5C700}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
16 | {79F0FC86-435E-4C4F-A8C8-4F060BD5C700}.Debug|Any CPU.Build.0 = Debug|Any CPU
17 | {79F0FC86-435E-4C4F-A8C8-4F060BD5C700}.Release|Any CPU.ActiveCfg = Release|Any CPU
18 | {79F0FC86-435E-4C4F-A8C8-4F060BD5C700}.Release|Any CPU.Build.0 = Release|Any CPU
19 | {831343A2-AC68-482E-9E7B-E51C4BDEB242}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
20 | {831343A2-AC68-482E-9E7B-E51C4BDEB242}.Debug|Any CPU.Build.0 = Debug|Any CPU
21 | {831343A2-AC68-482E-9E7B-E51C4BDEB242}.Release|Any CPU.ActiveCfg = Release|Any CPU
22 | {831343A2-AC68-482E-9E7B-E51C4BDEB242}.Release|Any CPU.Build.0 = Release|Any CPU
23 | {8BB4DADC-4846-4405-9E7A-B75F437561C4}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
24 | {8BB4DADC-4846-4405-9E7A-B75F437561C4}.Debug|Any CPU.Build.0 = Debug|Any CPU
25 | {8BB4DADC-4846-4405-9E7A-B75F437561C4}.Release|Any CPU.ActiveCfg = Release|Any CPU
26 | {8BB4DADC-4846-4405-9E7A-B75F437561C4}.Release|Any CPU.Build.0 = Release|Any CPU
27 | EndGlobalSection
28 | EndGlobal
29 |
--------------------------------------------------------------------------------
/MediaFileProcessor/MediaFileProcessor/Extensions/FFmpegExtensions.cs:
--------------------------------------------------------------------------------
1 | namespace MediaFileProcessor.Extensions;
2 |
3 | ///
4 | /// Provides extension methods for handling FFmpeg related operations.
5 | ///
6 | public static class FFmpegExtensions
7 | {
8 | ///
9 | /// Converts a TimeSpan duration to FFmpeg format.
10 | ///
11 | /// The TimeSpan duration to convert.
12 | /// The FFmpeg-formatted string representation of the duration.
13 | public static string ToFfmpegDuration(this TimeSpan duration)
14 | {
15 | var isNegative = duration.TotalSeconds < 0;
16 | var sign = isNegative ? "-" : "";
17 | var absDuration = isNegative ? -duration : duration;
18 | return $"{sign}{(int)absDuration.TotalHours}:{absDuration.Minutes:00}:{absDuration.Seconds:00}.{absDuration.Milliseconds:000}";
19 | }
20 | }
--------------------------------------------------------------------------------
/MediaFileProcessor/MediaFileProcessor/MediaFileProcess.cs:
--------------------------------------------------------------------------------
1 | using System.Diagnostics;
2 | using System.IO.Pipes;
3 | using System.Runtime.InteropServices;
4 | using MediaFileProcessor.Models.Settings;
5 | namespace MediaFileProcessor;
6 |
7 | ///
8 | /// Represents a media file processing class
9 | ///
10 | public sealed class MediaFileProcess : IDisposable
11 | {
12 | private Process? Process { get; set; }
13 |
14 | ///
15 | /// The processing settings for the media file process.
16 | ///
17 | private BaseProcessingSettings Settings { get; }
18 |
19 | ///
20 | /// The input streams for the process.
21 | ///
22 | private Stream[]? InputStreams { get; set; }
23 |
24 | ///
25 | /// Named pipes through which data will be transferred
26 | ///
27 | private NamedPipeServerStream[]? NamedPipes { get; set; }
28 |
29 | ///
30 | /// The pipe names for the process.
31 | ///
32 | private string[]? PipeNames { get; set; }
33 |
34 | ///
35 | /// Initializes a new instance of the class.
36 | ///
37 | /// The name of the process file.
38 | /// The arguments for the process file.
39 | /// The processing settings for the media file process.
40 | public MediaFileProcess(string processFileName, string arguments, BaseProcessingSettings settings)
41 | : this(processFileName, arguments, settings, null, null)
42 | {
43 | }
44 |
45 | ///
46 | /// Initializes a new instance of the class.
47 | ///
48 | /// The name of the process file.
49 | /// The arguments for the process file.
50 | /// The processing settings for the media file process.
51 | /// The input streams for the process. Optional parameter.
52 | public MediaFileProcess(string processFileName, string arguments, BaseProcessingSettings settings, Stream[] inputStreams)
53 | : this(processFileName, arguments, settings, inputStreams, null)
54 | {
55 | }
56 |
57 | ///
58 | /// Initializes a new instance of the class.
59 | ///
60 | /// The name of the process file.
61 | /// The arguments for the process file.
62 | /// The processing settings for the media file process.
63 | /// The pipe names for the process. Optional parameter.
64 | public MediaFileProcess(string processFileName, string arguments, BaseProcessingSettings settings, string[] pipeNames)
65 | : this(processFileName, arguments, settings, null, pipeNames)
66 | {
67 | }
68 |
69 | ///
70 | /// Initializes a new instance of the class.
71 | ///
72 | /// The name of the process file.
73 | /// The arguments for the process file.
74 | /// The processing settings for the media file process.
75 | /// The input streams for the process. Optional parameter.
76 | /// The pipe names for the process. Optional parameter.
77 | public MediaFileProcess(string processFileName,
78 | string arguments,
79 | BaseProcessingSettings settings,
80 | Stream[]? inputStreams,
81 | string[]? pipeNames)
82 | {
83 | Process = new Process();
84 | Settings = settings;
85 | InputStreams = inputStreams;
86 | PipeNames = pipeNames;
87 |
88 | Process.StartInfo.FileName = processFileName;
89 | Process.StartInfo.Arguments = arguments;
90 | Process.StartInfo.CreateNoWindow = Settings.CreateNoWindow;
91 | Process.StartInfo.UseShellExecute = Settings.UseShellExecute;
92 | Process.StartInfo.RedirectStandardInput = inputStreams is not null;
93 | Process.StartInfo.RedirectStandardOutput = Settings.IsStandartOutputRedirect;
94 | Process.StartInfo.RedirectStandardError = Settings.RedirectStandardError;
95 |
96 | Process.EnableRaisingEvents = Settings.EnableRaisingEvents;
97 | Process.StartInfo.WindowStyle = Settings.WindowStyle;
98 |
99 | if (Settings.ProcessOnExitedHandler is not null)
100 | Process.Exited += Settings.ProcessOnExitedHandler;
101 |
102 | if (Settings.OutputDataReceivedEventHandler is not null)
103 | Process.OutputDataReceived += Settings.OutputDataReceivedEventHandler;
104 |
105 | if (Settings.ErrorDataReceivedHandler is not null)
106 | Process.ErrorDataReceived += Settings.ErrorDataReceivedHandler;
107 |
108 | Process.Exited += ProcessOnExited;
109 | AppDomain.CurrentDomain.UnhandledException += UnhandledExceptionHandler;
110 | AppDomain.CurrentDomain.ProcessExit += DomainProcessExitHandler;
111 | }
112 |
113 | ///
114 | /// The event handler that will be triggered when the process exits.
115 | /// It waits for the process to exit and unsubscribes from the event.
116 | ///
117 | /// The sender object
118 | /// The event arguments
119 | private void ProcessOnExited(object sender, EventArgs e)
120 | {
121 | if(Process is null)
122 | return;
123 |
124 | Process.WaitForExit();
125 | Process.Exited -= ProcessOnExited;
126 | }
127 |
128 | ///
129 | /// Asynchronously executes the process and returns the output as a memory stream if
130 | /// RedirectStandardOutput is set to true.
131 | ///
132 | /// A cancellation token to cancel the operation
133 | /// The memory stream containing the output of the process, or null if
134 | /// RedirectStandardOutput is set to false
135 | public async Task ExecuteAsync(CancellationToken cancellationToken)
136 | {
137 | if(Process is null)
138 | throw new ObjectDisposedException("The process is no longer executable. "
139 | + "Any arguments like named pipes or input or output streams can be closed or their pointers shifted");
140 |
141 | MemoryStream? outputStream = null;
142 | if(Process.StartInfo.RedirectStandardOutput)
143 | outputStream = new MemoryStream();
144 |
145 | await Task.Factory.StartNew(() => Run(outputStream, cancellationToken), cancellationToken, TaskCreationOptions.LongRunning, TaskScheduler.Default);
146 |
147 | outputStream?.Seek(0, SeekOrigin.Begin);
148 |
149 | return outputStream;
150 | }
151 |
152 | ///
153 | /// This method is responsible for running the process.
154 | ///
155 | /// The stream to which the result of the process will be written
156 | /// if it is specified and the process is configured to issue the result in StandardOutput
157 | /// A cancellation token to cancel the operation
158 | private void Run(MemoryStream? outputStream, CancellationToken cancellationToken)
159 | {
160 | try
161 | {
162 | //Wrapping the process in a using statement to ensure it is disposed properly.
163 | using (Process)
164 | {
165 | //A flag to keep track if the process has already exited.
166 | var processExited = false;
167 |
168 | //Starting the process.
169 | Process!.Start();
170 |
171 | //Registers a callback for when the cancellation token is cancelled.
172 | //This will kill the process if it is still running.
173 | cancellationToken.Register(() =>
174 | {
175 | try
176 | {
177 | //Check if the process has already exited or if the process exit has already been handled.
178 | if (processExited || Process.HasExited)
179 | return;
180 |
181 | //Killing the process.
182 | Dispose();
183 | }
184 | catch(Exception)
185 | {
186 | //Ignoring any exceptions that may occur while trying to kill the process.
187 | }
188 | });
189 |
190 | //If an error data received handler is set, start reading error data.
191 | if(Settings.ErrorDataReceivedHandler is not null)
192 | Process.BeginErrorReadLine();
193 |
194 | //If an output data received handler is set, start reading output data.
195 | if(Settings.OutputDataReceivedEventHandler is not null)
196 | Process.BeginOutputReadLine();
197 |
198 | //Tasks for handling input and output streams.
199 | Task? inputTask = null;
200 | Task? outputTask = null;
201 |
202 | //If input streams are available, start a task to write the input to the process' standard input.
203 | if(InputStreams is not null)
204 | inputTask = Task.Run(WriteStandartInput, cancellationToken);
205 |
206 | //If an output stream is available, start a task to read the process' standard output.
207 | if(outputStream is not null)
208 | outputTask = Task.Run(() => ReadStandartOutput(outputStream), cancellationToken);
209 |
210 | //Wait for both tasks to complete.
211 | Task.WaitAll(inputTask ?? Task.CompletedTask, outputTask ?? Task.CompletedTask);
212 |
213 | //Wait for the process to exit.
214 | Process.WaitForExit();
215 |
216 | //Set the processExited flag to true.
217 | processExited = true;
218 | }
219 | }
220 | catch (Exception e)
221 | {
222 | //Write the exception to the console.
223 | Console.WriteLine(e);
224 |
225 | //Rethrow the exception.
226 | throw;
227 | }
228 | }
229 |
230 | // This method writes the input data to the standard input of the process
231 | private void WriteStandartInput()
232 | {
233 | // If there is only one input stream
234 | if(InputStreams?.Length == 1)
235 | try
236 | {
237 | // Copy data from the input stream to the standard input of the process
238 | InputStreams[0].CopyTo(Process!.StandardInput.BaseStream);
239 |
240 | // Flush the data to the standard input of the process
241 | Process.StandardInput.Flush();
242 |
243 | // Close the standard input stream
244 | Process.StandardInput.Close();
245 | }
246 | catch (Exception)
247 | {
248 | // ignore any exceptions that occur during the copying process
249 | }
250 |
251 | // If there are multiple input streams
252 | if (!(InputStreams?.Length > 1))
253 | return;
254 |
255 | // Based on the operating system, use the appropriate method for handling multiple input streams
256 | if(RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
257 | MultiInputWindowsOs();
258 | else if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
259 | MultiInpuLinuxOs();
260 | else if (RuntimeInformation.IsOSPlatform(OSPlatform.OSX))
261 |
262 | // Throw an exception if the operating system is not supported for multi inputs
263 | throw new NotSupportedException("Operating System not supported for multi inputs");
264 | else
265 |
266 | // Throw an exception if the operating system cannot be recognized
267 | throw new NotSupportedException("Operating System not recognized");
268 | }
269 |
270 | ///
271 | /// This method is used to handle the scenario when the OS is Windows and multiple input streams are being processed.
272 | ///
273 | private void MultiInputWindowsOs()
274 | {
275 | // Create an array of NamedPipeServerStream instances with the size of the input streams
276 | NamedPipes = new NamedPipeServerStream[InputStreams!.Length];
277 |
278 | // Loop through the pipes array and initialize each instance of NamedPipeServerStream with the name of the pipe
279 | // and other required parameters for pipe transmission.
280 | for (var i = 0; i < NamedPipes.Length; i++)
281 | NamedPipes[i] = new NamedPipeServerStream(PipeNames![i], PipeDirection.InOut, 1, PipeTransmissionMode.Byte, PipeOptions.Asynchronous);
282 |
283 | // Create an array of tasks with the size of the pipes array
284 | var tasks = new Task[NamedPipes.Length];
285 |
286 | // Loop through the pipes array and connect each pipe with its corresponding input stream
287 | for (var i = 0; i < NamedPipes.Length; i++)
288 | {
289 | var pipe = NamedPipes[i];
290 | var inputStream = InputStreams[i];
291 |
292 | // Wait for the connection to be established
293 | pipe.WaitForConnection();
294 |
295 | // Create a new task to copy data from the input stream to the pipe
296 | tasks[i] = inputStream.CopyToAsync(pipe)
297 | .ContinueWith(_ =>
298 | {
299 | // Wait for the data to be transmitted completely
300 | pipe.WaitForPipeDrain();
301 |
302 | // Disconnect the pipe after the data has been transmitted
303 | pipe.Disconnect();
304 | });
305 | }
306 |
307 | // Wait for all tasks to complete
308 | Task.WaitAll(tasks);
309 | }
310 |
311 | // MultiInputLinuxOS is a method that handles the input streams and pipes them to the process when the
312 | // operating system is Linux.
313 | private void MultiInpuLinuxOs()
314 | {
315 | // Create an array of tasks to handle each input stream
316 | var tasks = new Task[InputStreams!.Length];
317 |
318 | // Loop through the pipe names and open a file stream for each one
319 | for (var i = 0; i < PipeNames!.Length; i++)
320 | {
321 | var fs = File.OpenWrite($"{PipeNames[i]}");
322 |
323 | // Start a task to copy the input stream data to the file stream
324 | tasks[i] = InputStreams[i].CopyToAsync(fs);
325 | }
326 |
327 | // Wait for all tasks to complete
328 | Task.WaitAll(tasks);
329 | }
330 |
331 | // ReadStandartOutput is a method that reads the standard output from the process and writes it to the
332 | // provided output stream.
333 | private void ReadStandartOutput(MemoryStream outputStream)
334 | {
335 | // Create a buffer to store the data being read from the process
336 | var buffer = new byte[64 * 1024];
337 |
338 | // Variable to store the number of bytes read in the last read operation
339 | int lastRead;
340 |
341 | // Read data from the process while there is still data available
342 | do
343 | {
344 | // Read data from the process and store the number of bytes read
345 | lastRead = Process!.StandardOutput.BaseStream.Read(buffer, 0, buffer.Length);
346 |
347 | // Write the data to the output stream
348 | outputStream.Write(buffer, 0, lastRead);
349 | } while (lastRead > 0);
350 | }
351 |
352 | ///
353 | /// Implements the IDisposable interface to release unmanaged resources used by this object.
354 | ///
355 | public void Dispose(bool disposing)
356 | {
357 | if(disposing)
358 | return;
359 |
360 | if (InputStreams != null)
361 | {
362 | foreach(var inputStream in InputStreams)
363 | inputStream.Dispose();
364 | InputStreams = null;
365 | }
366 |
367 | if (NamedPipes != null)
368 | {
369 | foreach(var namedPipe in NamedPipes)
370 | namedPipe.Dispose();
371 | NamedPipes = null;
372 | }
373 |
374 | if (PipeNames != null)
375 | {
376 | foreach (var pipeFile in PipeNames.Where(File.Exists))
377 | {
378 | File.Delete(pipeFile);
379 | }
380 |
381 | PipeNames = null;
382 | }
383 |
384 | // Dispose of the process and kill it
385 | if(Process is null)
386 | return;
387 |
388 | Process.Dispose();
389 |
390 | try
391 | {
392 | if(Process.HasExited)
393 | Process.Kill();
394 | }
395 | catch (InvalidOperationException e)
396 | {
397 | if(e.Message != "No process is associated with this object.")
398 | throw;
399 | }
400 | finally
401 | {
402 | Process = null;
403 | }
404 | }
405 |
406 | ///
407 | /// Dispose pattern
408 | ///
409 | public void Dispose()
410 | {
411 | Dispose(false);
412 | GC.SuppressFinalize(this);
413 | }
414 |
415 | ///
416 | /// Destructor
417 | ///
418 | ~MediaFileProcess()
419 | {
420 | Dispose(false);
421 | }
422 |
423 | ///
424 | /// This method is executed in the event of an application crash so that child executable processes do not remain alive in the background
425 | ///
426 | private void UnhandledExceptionHandler(object sender, UnhandledExceptionEventArgs e)
427 | {
428 | Dispose(false);
429 | }
430 |
431 | ///
432 | /// This method is executed in the event of an application exit so that child executable processes do not remain alive in the background
433 | ///
434 | private void DomainProcessExitHandler(object sender, EventArgs e)
435 | {
436 | Dispose(false);
437 | }
438 | }
--------------------------------------------------------------------------------
/MediaFileProcessor/MediaFileProcessor/MediaFileProcessor.csproj:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | netstandard2.1
5 | enable
6 | enable
7 | 11
8 | true
9 | Library for processing various files (videos, photos, documents, images).
10 | Askhat Pazylidinov
11 |
12 | This library is a universal wrapper for executable processes in the operating system (Windows/Linux).
13 | The library allows files to communicate with processes through named pipes, streams, byte arrays, and directory paths.
14 | It also has some useful features, such as the ability to decode a stream on the fly and get a set of files from it by their signatures.
15 |
16 | ava.jpg
17 | https://github.com/askatmaster/MediaFileProcessor
18 | ffmpeg, ffprobe, pandoc, imageMagick, MediaFileProcessor.
19 | README.md
20 | LICENSE
21 | https://github.com/askatmaster/MediaFileProcessor
22 | git
23 |
24 | In this version, wrappers are implemented in the libraries over such projects as FFmpeg, ImageMagick and Pandoc.
25 | This library can also be used to interact with third-party processes.
26 |
27 | OpenSource
28 | 1.0.4
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
--------------------------------------------------------------------------------
/MediaFileProcessor/MediaFileProcessor/Models/Common/MediaFile.cs:
--------------------------------------------------------------------------------
1 | using MediaFileProcessor.Extensions;
2 | using MediaFileProcessor.Models.Enums;
3 | namespace MediaFileProcessor.Models.Common;
4 |
5 | ///
6 | /// The `MediaFile` class represents a media file with either file path, named pipe, stream or byte array as input.
7 | ///
8 | public class MediaFile
9 | {
10 | ///
11 | /// The file path of the media file.
12 | ///
13 | public string? InputFilePath { get; }
14 |
15 | ///
16 | /// File format. If null then the format could not be determined
17 | ///
18 | public FileFormatType? FormatType { get; }
19 |
20 | ///
21 | /// The stream of the media file.
22 | ///
23 | public Stream? InputFileStream { get; }
24 |
25 | ///
26 | /// The input type of the media file.
27 | ///
28 | public MediaFileInputType InputType { get; }
29 |
30 | ///
31 | /// Initializes a new instance of the `MediaFile` class with file Path or template NamedPipe.
32 | ///
33 | /// The file path or template of the media file.
34 | public MediaFile(string inputArgument)
35 | {
36 | var fileExtension = inputArgument.GetExtension();
37 |
38 | if(fileExtension is not null)
39 | {
40 | InputType = MediaFileInputType.Path;
41 | InputFilePath = $"\"{inputArgument}\" ";
42 | FormatType = inputArgument.GetFileFormatType();
43 | }
44 | else
45 | {
46 | InputFilePath = $" {inputArgument.ToPipeDir()} ";
47 | InputType = MediaFileInputType.NamedPipe;
48 | }
49 | }
50 |
51 | ///
52 | /// Initializes a new instance of the `MediaFile` class with stream input.
53 | ///
54 | /// The stream of the media file.
55 | public MediaFile(Stream inputFileStream)
56 | {
57 | if (!inputFileStream.CanRead)
58 | throw new NotSupportedException("The thread does not support reading.");
59 |
60 | var buffer = new byte[2024];
61 | var read = inputFileStream.Read(buffer, 0, buffer.Length);
62 | if(read > 0)
63 | FormatType = buffer.GetFormat();
64 | inputFileStream.Seek(0, SeekOrigin.Begin);
65 | InputFileStream = inputFileStream;
66 | InputType = MediaFileInputType.Stream;
67 | }
68 |
69 | ///
70 | /// Initializes a new instance of the `MediaFile` class with byte array input.
71 | ///
72 | /// The byte array of the media file.
73 | public MediaFile(byte[] bytes)
74 | {
75 | if(bytes.Length is 0)
76 | throw new ArgumentException("Byte array is empty");
77 |
78 | FormatType = bytes.GetFormat();
79 | InputFileStream = new MemoryStream(bytes);
80 | InputType = MediaFileInputType.Stream;
81 | }
82 | }
--------------------------------------------------------------------------------
/MediaFileProcessor/MediaFileProcessor/Models/Common/MultiStream.cs:
--------------------------------------------------------------------------------
1 | using System.Collections;
2 |
3 | namespace MediaFileProcessor.Models.Common;
4 |
5 | ///
6 | /// The `MultiStream` class is a concrete implementation of the `Stream` abstract class.
7 | /// It allows multiple streams to be combined into one virtual stream that can be read from.
8 | ///
9 | public sealed class MultiStream : Stream
10 | {
11 | ///
12 | /// Constructor
13 | ///
14 | public MultiStream()
15 | {
16 | }
17 |
18 | ///
19 | /// Constructor
20 | ///
21 | public MultiStream(Stream[] streams)
22 | {
23 | streamList.AddRange(streams);
24 | }
25 |
26 | ///
27 | /// Constructor
28 | ///
29 | public MultiStream(List streams)
30 | {
31 | streamList.AddRange(streams);
32 | }
33 |
34 | ///
35 | /// Constructor
36 | ///
37 | public MultiStream(IEnumerable streams)
38 | {
39 | streamList.AddRange(streams.ToArray());
40 | }
41 |
42 | ///
43 | /// Constructor
44 | ///
45 | public MultiStream(ICollection streams)
46 | {
47 | streamList.AddRange(streams.ToArray());
48 | }
49 |
50 | ///
51 | /// A list of streams to be combined into the virtual stream.
52 | ///
53 | private readonly ArrayList streamList = new ();
54 |
55 | ///
56 | /// The current position within the virtual stream.
57 | ///
58 | private long position;
59 |
60 | ///
61 | /// Gets a value indicating whether the current stream supports reading.
62 | ///
63 | ///
64 | /// Always returns `true` as the `MultiStream` class supports reading.
65 | ///
66 | public override bool CanRead => true;
67 |
68 | ///
69 | /// Gets a value indicating whether the current stream supports seeking.
70 | ///
71 | ///
72 | /// Always returns `true` as the `MultiStream` class supports seeking.
73 | ///
74 | public override bool CanSeek => true;
75 |
76 | ///
77 | /// Gets a value indicating whether the current stream supports writing.
78 | ///
79 | ///
80 | /// Always returns `false` as the `MultiStream` class does not support writing.
81 | ///
82 | public override bool CanWrite => false;
83 |
84 | ///
85 | /// Streams count
86 | ///
87 | public int Count => streamList.Count;
88 |
89 | ///
90 | /// Gets the length of the virtual stream, which is the sum of the lengths of all streams in the `streamList`.
91 | ///
92 | ///
93 | /// The length of the virtual stream as a `long`.
94 | ///
95 | public override long Length =>
96 | streamList.Cast()
97 | .Sum(stream => stream.Length);
98 |
99 | ///
100 | /// Gets or sets the position within the virtual stream.
101 | ///
102 | ///
103 | /// The current position within the virtual stream as a `long`.
104 | ///
105 | public override long Position
106 | {
107 | get => position;
108 | set => Seek(value, SeekOrigin.Begin);
109 | }
110 |
111 | ///
112 | /// Changes the position within the virtual stream.
113 | ///
114 | /// A `long` offset to move the position by.
115 | /// The origin from which to calculate the new position.
116 | /// Must be either `SeekOrigin.Begin`, `SeekOrigin.Current`, or `SeekOrigin.End`.
117 | ///
118 | /// The new position within the virtual stream as a `long`.
119 | ///
120 | public override long Seek(long offset, SeekOrigin origin)
121 | {
122 | var len = Length;
123 |
124 | switch (origin)
125 | {
126 | case SeekOrigin.Begin:
127 | position = offset;
128 |
129 | break;
130 | case SeekOrigin.Current:
131 | position += offset;
132 |
133 | break;
134 | case SeekOrigin.End:
135 | position = len - offset;
136 |
137 | break;
138 | default:
139 | throw new ArgumentOutOfRangeException(nameof(origin), origin, null);
140 | }
141 |
142 | if (position > len)
143 | position = len;
144 | else if (position < 0)
145 | position = 0;
146 |
147 | return position;
148 | }
149 |
150 | ///
151 | /// Reads all the data in the multiple streams and returns it as an array of byte arrays.
152 | ///
153 | /// An array of byte arrays containing the data of the multiple streams
154 | public byte[][] ReadAsDataArray()
155 | {
156 | var list = new List(streamList.Count);
157 |
158 | foreach (Stream stream in streamList)
159 | {
160 | var buffer = new byte[stream.Length];
161 | stream.Read(buffer, 0, (int)stream.Length);
162 | list.Add(buffer);
163 | stream.Seek(0, SeekOrigin.Begin);
164 | }
165 |
166 | return list.ToArray();
167 | }
168 |
169 | ///
170 | /// Reads all the data in the multiple streams and returns it as an arrays.
171 | ///
172 | /// An array of Stream containing the data of the multiple streams
173 | public Stream[] ReadAsStreamArray()
174 | {
175 | var list = new List(streamList.Count);
176 |
177 | foreach (Stream stream in streamList)
178 | {
179 | stream.Seek(0, SeekOrigin.Begin);
180 | list.Add(stream);
181 | }
182 |
183 | return list.ToArray();
184 | }
185 |
186 | ///
187 | /// Reads a specified number of bytes from the multiple streams into a buffer, starting at a specified index.
188 | ///
189 | /// The buffer to read the data into
190 | /// The starting index in the buffer
191 | /// The number of bytes to read
192 | /// The total number of bytes read into the buffer
193 | public override int Read(byte[] buffer, int offset, int count)
194 | {
195 | long len = 0;
196 | var result = 0;
197 | var bufPos = offset;
198 |
199 | foreach (Stream stream in streamList)
200 | {
201 | if (position < len + stream.Length)
202 | {
203 | stream.Position = position - len;
204 | var bytesRead = stream.Read(buffer, bufPos, count);
205 | result += bytesRead;
206 | bufPos += bytesRead;
207 | position += bytesRead;
208 |
209 | if (bytesRead < count)
210 | count -= bytesRead;
211 | else
212 | break;
213 | }
214 |
215 | len += stream.Length;
216 | }
217 |
218 | return result;
219 | }
220 |
221 | ///
222 | /// Adds a new stream to the multiple streams list.
223 | ///
224 | /// The stream to add
225 | public void AddStream(Stream stream)
226 | {
227 | streamList.Add(stream);
228 | }
229 |
230 | ///
231 | /// Sets the length of the multiple streams.
232 | /// WARNING!!! This method cannot be implemented in this class
233 | ///
234 | /// The length to set
235 | /// This method is not implemented in this class
236 | public override void SetLength(long value)
237 | {
238 | }
239 |
240 | ///
241 | /// Clears all buffers for the multiple streams and causes any buffered data to be written to the underlying devices.
242 | ///
243 | public override void Flush()
244 | {
245 | foreach (var t in streamList)
246 | ((Stream)t).Flush();
247 | }
248 |
249 | ///
250 | /// Writes a specified number of bytes to the multiple streams from a buffer, starting at a specified index.
251 | /// WARNING!!! This method cannot be implemented in this class
252 | ///
253 | public override void Write(byte[] buffer, int offset, int count)
254 | {
255 | }
256 | }
--------------------------------------------------------------------------------
/MediaFileProcessor/MediaFileProcessor/Models/Enums/AudioBitrateType.cs:
--------------------------------------------------------------------------------
1 | namespace MediaFileProcessor.Models.Enums;
2 |
3 | ///
4 | /// Enum representing audio bitrate values in bits per second.
5 | ///
6 | public enum AudioBitrateType
7 | {
8 | ///
9 | /// Битрейт 64 килобита в секунду.
10 | ///
11 | _64k = 64000,
12 |
13 | ///
14 | /// Битрейт 96 килобит в секунду.
15 | ///
16 | _96k = 96000,
17 |
18 | ///
19 | /// Битрейт 128 килобит в секунду.
20 | ///
21 | _128k = 128000,
22 |
23 | ///
24 | /// Битрейт 160 килобит в секунду.
25 | ///
26 | _160k = 160000,
27 |
28 | ///
29 | /// Битрейт 192 килобита в секунду.
30 | ///
31 | _192k = 192000,
32 |
33 | ///
34 | /// Битрейт 256 килобит в секунду.
35 | ///
36 | _256k = 256000,
37 |
38 | ///
39 | /// Битрейт 320 килобит в секунду.
40 | ///
41 | _320k = 320000
42 | }
--------------------------------------------------------------------------------
/MediaFileProcessor/MediaFileProcessor/Models/Enums/AudioBitstreamFilterType.cs:
--------------------------------------------------------------------------------
1 | namespace MediaFileProcessor.Models.Enums;
2 |
3 | ///
4 | /// Represents some common bitstream filters for audio in FFmpeg.
5 | ///
6 | public enum AudioBitstreamFilterType
7 | {
8 | ///
9 | /// Converts AAC ADTS to an MPEG-4 Audio Specific Config.
10 | ///
11 | Aac_Adtstoasc = 1,
12 |
13 | ///
14 | /// Add noise to the input audio.
15 | ///
16 | Noise = 2,
17 |
18 | ///
19 | /// Remove extradata from the input audio.
20 | ///
21 | Remove_Extra = 3
22 | }
--------------------------------------------------------------------------------
/MediaFileProcessor/MediaFileProcessor/Models/Enums/AudioCodecType.cs:
--------------------------------------------------------------------------------
1 | namespace MediaFileProcessor.Models.Enums;
2 |
3 | ///
4 | /// Enumeration for the supported audio codec types.
5 | ///
6 | public enum AudioCodecType
7 | {
8 | ///
9 | /// The audio codec type for copying the original audio without encoding or compression.
10 | ///
11 | ///
12 | /// This option can be useful when the original audio quality is already high,
13 | /// and you want to maintain the original quality without any further processing.
14 | ///
15 | COPY = 0,
16 |
17 | ///
18 | /// The audio codec type for MP3 encoding.
19 | ///
20 | ///
21 | /// MP3 is a popular audio format that uses lossy compression to reduce file size,
22 | /// while maintaining good audio quality for most applications.
23 | ///
24 | MP3 = 1,
25 |
26 | ///
27 | /// The audio codec type for WMA encoding.
28 | ///
29 | ///
30 | /// WMA is a proprietary audio format developed by Microsoft,
31 | /// and it offers both lossless and lossy compression options.
32 | ///
33 | WMA = 2,
34 |
35 | ///
36 | /// The audio codec type for WAV encoding.
37 | ///
38 | ///
39 | /// WAV is a standard audio format for storing uncompressed audio data,
40 | /// and it is commonly used for storing high-quality audio recordings.
41 | ///
42 | WAV = 3,
43 |
44 | ///
45 | /// The audio codec type for AAC encoding.
46 | ///
47 | ///
48 | /// AAC is a popular audio format that uses lossy compression,
49 | /// and it is widely used for storing audio on portable devices and streaming audio over the internet.
50 | ///
51 | AAC = 4,
52 |
53 | ///
54 | /// The audio codec type for FLAC encoding.
55 | ///
56 | ///
57 | /// FLAC is a lossless audio format that is commonly used for storing high-quality audio recordings,
58 | /// and it provides efficient compression without sacrificing audio quality.
59 | ///
60 | FLAC = 5,
61 |
62 | ///
63 | /// The audio codec type for ALAC encoding.
64 | ///
65 | ///
66 | /// ALAC is a lossless audio format developed by Apple,
67 | /// and it is commonly used for storing high-quality audio recordings on Apple devices.
68 | ///
69 | ALAC = 6,
70 |
71 | ///
72 | /// The audio codec type for WMA encoding.
73 | ///
74 | ///
75 | /// WMA is a proprietary audio format developed by Microsoft,
76 | /// and it offers both lossless and lossy compression options.
77 | ///
78 | WMAV2 = 7,
79 |
80 | ///
81 | /// AC3 (Audio Codec 3), also known as Dolby Digital, is a lossy audio compression format developed by Dolby Laboratories.
82 | /// It is widely used in surround sound systems, such as those in movie theaters and home theaters.
83 | /// AC3 supports multiple audio channels, making it suitable for multi-channel audio setups (e.g., 5.1 or 7.1 surround sound).
84 | ///
85 | AC3 = 8,
86 |
87 | ///
88 | /// The pcm_s16le audio codec in FFmpeg represents uncompressed Pulse Code Modulation (PCM) audio data with a signed 16-bit little-endian format.
89 | /// PCM is a method used to digitally represent analog signals, and it is the standard form of digital audio in computers, compact discs, digital telephony, and other digital audio applications.
90 | /// Here's a brief explanation of the pcm_s16le codec:
91 | /// PCM (Pulse Code Modulation): A technique used to convert analog audio signals into digital data without any compression.
92 | /// S16: Refers to a signed 16-bit integer format, meaning each audio sample is represented by a 16-bit value, allowing for a dynamic range of approximately 96 decibels (dB).
93 | /// LE (Little-Endian): Represents the byte order of the 16-bit values, where the least significant byte comes first.
94 | ///
95 | PCM_S16LE = 9,
96 |
97 |
98 | ///
99 | /// libopus is an FFmpeg audio codec that allows you to encode or decode audio streams using the Opus audio format.
100 | /// Opus is a versatile, open, and royalty-free audio codec designed for a wide range of applications, from low-latency real-time communication to high-quality audio streaming.
101 | /// It is well-suited for both speech and music, and it provides low-latency and high compression efficiency.
102 | ///
103 | LIBOPUS = 10
104 | }
--------------------------------------------------------------------------------
/MediaFileProcessor/MediaFileProcessor/Models/Enums/AudioFilterType.cs:
--------------------------------------------------------------------------------
1 | namespace MediaFileProcessor.Models.Enums;
2 |
3 | ///
4 | /// Audio Filter Type
5 | ///
6 | public enum AudioFilterType
7 | {
8 | ///
9 | /// Changes the volume of the audio.
10 | /// Example of use: volume=2 (increases volume by 2 times)
11 | ///
12 | Volume,
13 |
14 | ///
15 | /// Applies equalization to audio.
16 | /// Usage example: equalizer=f=1000:width_type=h:width=200:g=-10 (reduces the frequency of 1000 Hz by 10 dB)
17 | ///
18 | Equalizer,
19 |
20 | ///
21 | /// Adds reverb to audio.
22 | /// Usage example: aecho=0.8:0.9:1000|1800:0.3|0.25 (parameters for easy reverb)
23 | ///
24 | Aecho,
25 |
26 | ///
27 | /// Changes the audio playback speed without changing the pitch.
28 | /// Example of use: atempo=1.5 (increases playback speed by 1.5 times)
29 | ///
30 | Atempo,
31 |
32 | ///
33 | /// Changes the pitch of the audio without changing the playback speed.
34 | /// Example use: asetrate=480001.5 (increases pitch by 1.5 times)
35 | ///
36 | Asetrate,
37 |
38 | ///
39 | /// Removes noise from the audio recording.
40 | /// Usage example: anlmdenoise=s=1 (standard noise removal profile)
41 | ///
42 | Anlmdenoise
43 | }
--------------------------------------------------------------------------------
/MediaFileProcessor/MediaFileProcessor/Models/Enums/AudioSampleRateType.cs:
--------------------------------------------------------------------------------
1 | namespace MediaFileProcessor.Models.Enums;
2 |
3 | ///
4 | /// Enumeration for the supported audio sample rate types.
5 | ///
6 | public enum AudioSampleRateType
7 | {
8 | ///
9 | /// The audio sample rate type for 22050 Hz.
10 | ///
11 | ///
12 | /// 22050 Hz is a standard sample rate for many audio applications,
13 | /// including voice recordings, podcasting, and speech recognition.
14 | ///
15 | Hz22050 = 0,
16 |
17 | ///
18 | /// The audio sample rate type for 44100 Hz.
19 | ///
20 | ///
21 | /// 44100 Hz is a standard sample rate for audio CDs and many other audio applications.
22 | ///
23 | Hz44100 = 1,
24 |
25 | ///
26 | /// The audio sample rate type for 48000 Hz.
27 | ///
28 | ///
29 | /// 48000 Hz is a common sample rate for professional audio applications,
30 | /// including digital audio workstations and video post-production.
31 | ///
32 | Hz48000 = 2
33 | }
--------------------------------------------------------------------------------
/MediaFileProcessor/MediaFileProcessor/Models/Enums/AudioSyncMethodType.cs:
--------------------------------------------------------------------------------
1 | namespace MediaFileProcessor.Models.Enums;
2 |
3 | ///
4 | /// Enum representing audio sync values.
5 | ///
6 | public enum AudioSyncMethodType
7 | {
8 | ///
9 | /// Audio is stretched/squeezed to match the timestamps.
10 | ///
11 | Stretch,
12 |
13 | ///
14 | /// Audio is passed through as is.
15 | ///
16 | Passthrough
17 | }
--------------------------------------------------------------------------------
/MediaFileProcessor/MediaFileProcessor/Models/Enums/CpuFlags.cs:
--------------------------------------------------------------------------------
1 | namespace MediaFileProcessor.Models.Enums;
2 |
3 | ///
4 | /// Enumeration of CPU flags used in x86 architecture.
5 | ///
6 | public enum CpuFlags
7 | {
8 | ///
9 | /// MMX is a SIMD instruction set designed by Intel, introduced in 1997 with their P5-based Pentium line of microprocessors.
10 | ///
11 | MMX = 1,
12 |
13 | ///
14 | /// MMXEXT is a SIMD instruction set extension designed by AMD.
15 | ///
16 | MMXEXT = 2,
17 |
18 | ///
19 | /// SSE (Streaming SIMD Extensions) is a SIMD instruction set extension to the x86 architecture, designed by Intel.
20 | ///
21 | SSE = 3,
22 |
23 | ///
24 | /// SSE2 is one of the Intel SIMD processor supplementary instruction sets.
25 | ///
26 | SSE2 = 4,
27 |
28 | ///
29 | /// SSE2Slow is an extension of the SSE2 instruction set.
30 | ///
31 | SSE2Slow = 5,
32 |
33 | ///
34 | /// SSE3 is the third iteration of the SSE instruction set for the IA-32 architecture.
35 | ///
36 | SSE3 = 6,
37 |
38 | ///
39 | /// SSE3Slow is an extension of the SSE3 instruction set.
40 | ///
41 | SSE3Slow = 7,
42 |
43 | ///
44 | /// SSSE3 adds 16 new instructions to SSE3.
45 | ///
46 | SSSE3 = 8,
47 |
48 | ///
49 | /// Atom is a system on chip platform designed for smartphones and tablet computers, launched by Intel in 2008.
50 | ///
51 | Atom = 9,
52 |
53 | ///
54 | /// SSE4.1 is a SIMD instruction set used in Intel processors, introduced in 2006 with their Core micro architecture.
55 | ///
56 | SSE4_1 = 10,
57 |
58 | ///
59 | /// SSE4.2, like its predecessor SSE4.1, is a SIMD CPU instruction set used in the Intel Core microarchitecture and AMD’s K10 microarchitecture.
60 | ///
61 | SSE4_2 = 11,
62 |
63 | ///
64 | /// AVX (Advanced Vector Extensions) are extensions to the x86 instruction set architecture for microprocessors from Intel and AMD.
65 | ///
66 | AVX = 12,
67 |
68 | ///
69 | /// AVX2 extends the AVX instruction set with new integer instructions.
70 | ///
71 | AVX2 = 13,
72 |
73 | ///
74 | /// XOP is a SIMD instruction set designed by AMD.
75 | ///
76 | XOP = 14,
77 |
78 | ///
79 | /// FMA3 is an instruction set designed by Intel and is used in certain AMD processors.
80 | ///
81 | FMA3 = 15,
82 |
83 | ///
84 | /// FMA4 is an instruction set designed by AMD for their processors.
85 | ///
86 | FMA4 = 16,
87 |
88 | ///
89 | /// 3dnow is a SIMD instruction set for the MMX integer instructions in the AMD K6-2 microprocessor.
90 | ///
91 | _3dnow = 17,
92 |
93 | ///
94 | /// 3dnowext is an extension of the 3DNow! SIMD instruction set.
95 | ///
96 | _3dnowext = 18,
97 |
98 | ///
99 | /// BMI1 (Bit Manipulation Instruction Set 1) is an x86 instruction set that introduces several new instructions that offer new ways of performing bitwise operations.
100 | ///
101 | BMI1 = 19,
102 |
103 | ///
104 | /// BMI2 (Bit Manipulation Instruction Set 2) is an extension to the x86 instruction set used by Intel and AMD processors.
105 | ///
106 | BMI2 = 20,
107 |
108 | ///
109 | /// CMOV (Conditional MOVe) instruction is a feature of some processors like Intel.
110 | ///
111 | CMOV = 21,
112 |
113 | ///
114 | /// ARMv5TE is a version in the ARM family of general purpose 32-bit RISC microprocessor architecture.
115 | ///
116 | ARMv5TE = 22,
117 |
118 | ///
119 | /// ARMv6 is a group of ARM architecture CPU cores designed and licensed by ARM Holdings.
120 | ///
121 | ARMv6 = 23,
122 |
123 | ///
124 | /// ARMv6t2 introduces the Thumb-2 instruction set for better performance.
125 | ///
126 | ARMv6t2 = 24,
127 |
128 | ///
129 | /// Vector Floating Point (VFP) is used to improve the performance of floating-point operations.
130 | ///
131 | Vector = 25,
132 |
133 | ///
134 | /// Advanced SIMD, also known as Neon, is a combined 64- and 128-bit SIMD instruction set.
135 | ///
136 | Neon = 26,
137 |
138 | ///
139 | /// SETEND allows changing of the byte endianness (big endian or little endian).
140 | ///
141 | SETEND = 27,
142 |
143 | ///
144 | /// ARMv8 introduces the 64-bit instruction set.
145 | ///
146 | ARMv8 = 28,
147 |
148 | ///
149 | /// Altivec is a SIMD extension created by Motorola for PowerPC microprocessors.
150 | ///
151 | Altivec = 29,
152 |
153 | ///
154 | /// Enumeration of CPU flags used in x86 architecture.
155 | ///
156 | VFP = 30,
157 |
158 | ///
159 | /// The Pentium II is a microprocessor from Intel. It is based on the P6 microarchitecture.
160 | ///
161 | pentium2 = 31,
162 |
163 | ///
164 | /// The Pentium III is a microprocessor from Intel that is based on its predecessor, the Pentium II.
165 | ///
166 | pentium3 = 32,
167 |
168 | ///
169 | /// The Pentium 4 is a line of single-core central processing units (CPUs) for desktops, laptops and entry-level servers introduced by Intel.
170 | ///
171 | pentium4 = 33,
172 |
173 | ///
174 | /// The K6 is a series of microprocessors that were designed by Advanced Micro Devices (AMD).
175 | ///
176 | k6 = 34,
177 |
178 | ///
179 | /// K6-2 is a model in the AMD K6 line of microprocessors.
180 | ///
181 | k62 = 35,
182 |
183 | ///
184 | /// The Athlon is a series of 32-bit microprocessors designed and marketed by AMD to compete with Intel's Pentium processors.
185 | ///
186 | athlon = 36,
187 |
188 | ///
189 | /// Athlon XP is a family of 32-bit microprocessors designed and manufactured by AMD based on the Athlon microarchitecture.
190 | ///
191 | athlonxp = 37,
192 |
193 | ///
194 | /// K8 is a microprocessor architecture designed by AMD as the successor to the K7 architecture.
195 | ///
196 | k8 = 38
197 | }
--------------------------------------------------------------------------------
/MediaFileProcessor/MediaFileProcessor/Models/Enums/DpiValueType.cs:
--------------------------------------------------------------------------------
1 | namespace MediaFileProcessor.Models.Enums;
2 |
3 | ///
4 | /// Represents some common DPI values for FFmpeg.
5 | ///
6 | public enum DpiValueType
7 | {
8 | ///
9 | /// 72 DPI, common for web images.
10 | ///
11 | Dpi72 = 72,
12 |
13 | ///
14 | /// 96 DPI, common for Windows and other desktop screens.
15 | ///
16 | Dpi96 = 96,
17 |
18 | ///
19 | /// 150 DPI, higher resolution for print images.
20 | ///
21 | Dpi150 = 150,
22 |
23 | ///
24 | /// 300 DPI, high resolution for print images.
25 | ///
26 | Dpi300 = 300,
27 |
28 | ///
29 | /// 600 DPI, very high resolution for print images.
30 | ///
31 | Dpi600 = 600
32 | }
--------------------------------------------------------------------------------
/MediaFileProcessor/MediaFileProcessor/Models/Enums/DvType.cs:
--------------------------------------------------------------------------------
1 | namespace MediaFileProcessor.Models.Enums;
2 |
3 | ///
4 | /// Represents the DV types for FFmpeg.
5 | ///
6 | public enum DvType
7 | {
8 | ///
9 | /// Standard DV format.
10 | ///
11 | DV,
12 |
13 | ///
14 | /// DVCPRO50 format.
15 | ///
16 | DV50,
17 |
18 | ///
19 | /// DV25 format.
20 | ///
21 | DV25
22 | }
--------------------------------------------------------------------------------
/MediaFileProcessor/MediaFileProcessor/Models/Enums/EncoderTimebaseType.cs:
--------------------------------------------------------------------------------
1 | namespace MediaFileProcessor.Models.Enums;
2 |
3 | ///
4 | /// Encoder timebase when stream copying
5 | ///
6 | public enum EncoderTimebaseType
7 | {
8 | ///
9 | /// Use the demuxer timebase.
10 | /// The time base is copied to the output encoder from the corresponding input demuxer. This is sometimes required to avoid non monotonically increasing timestamps when copying video streams with variable frame rate.
11 | ///
12 | Decoder = 0,
13 |
14 | ///
15 | /// Use the decoder timebase.
16 | /// The time base is copied to the output encoder from the corresponding input decoder.
17 | ///
18 | Demuxer = 1,
19 |
20 | ///
21 | /// Try to make the choice automatically, in order to generate a sane output.
22 | ///
23 | Auto = -1
24 | }
--------------------------------------------------------------------------------
/MediaFileProcessor/MediaFileProcessor/Models/Enums/FFmpegStrictness.cs:
--------------------------------------------------------------------------------
1 | namespace MediaFileProcessor.Models.Enums;
2 |
3 | ///
4 | /// Represents the strictness levels for FFmpeg.
5 | ///
6 | public enum FFmpegStrictness
7 | {
8 | ///
9 | /// Allow all FFmpeg features, including those considered experimental.
10 | ///
11 | Experimental = -2,
12 |
13 | ///
14 | /// Allow everything except for unofficial encoders, decoders, muxers, demuxers, filters, etc.
15 | ///
16 | Unofficial = -1,
17 |
18 | ///
19 | /// This is the default value. It allows all official FFmpeg features.
20 | ///
21 | Normal = 0,
22 |
23 | ///
24 | /// Only allow standards-compliant features. This might disable some features that work but are not fully standards-compliant.
25 | ///
26 | Strict = 1,
27 |
28 | ///
29 | /// Similar to Strict, but even more restrictive.
30 | ///
31 | Very = 2
32 | }
--------------------------------------------------------------------------------
/MediaFileProcessor/MediaFileProcessor/Models/Enums/FileFormatType.cs:
--------------------------------------------------------------------------------
1 | namespace MediaFileProcessor.Models.Enums;
2 |
3 | ///
4 | /// Enumeration for the supported file format types.
5 | ///
6 | public enum FileFormatType
7 | {
8 | ///
9 | /// The file format type for 3GP files.
10 | ///
11 | ///
12 | /// 3GP is a multimedia container format that is commonly used for storing video and audio on mobile devices.
13 | ///
14 | _3GP = 0,
15 |
16 | ///
17 | /// The file format type for MP4 files.
18 | ///
19 | ///
20 | /// MP4 is a multimedia container format that is widely used for storing video and audio,
21 | /// and it provides efficient compression without sacrificing audio/video quality.
22 | ///
23 | MP4 = 1,
24 |
25 | ///
26 | /// The file format type for ICO files.
27 | ///
28 | ///
29 | /// ICO is a file format for storing icons in Microsoft Windows,
30 | /// and it can store multiple images of different sizes and color depths in a single file.
31 | ///
32 | ICO = 2,
33 |
34 | ///
35 | /// The file format type for BIN files.
36 | ///
37 | ///
38 | /// BIN is a binary file format that is commonly used for storing executable code or data.
39 | ///
40 | BIN = 3,
41 |
42 | ///
43 | /// The file format type for MOV files.
44 | ///
45 | ///
46 | /// MOV is a multimedia container format that is commonly used for storing video and audio on Apple devices.
47 | ///
48 | MOV = 4,
49 |
50 | ///
51 | /// The file format type for TIFF files.
52 | ///
53 | ///
54 | /// TIFF is a file format for storing raster graphics images,
55 | /// and it is widely used in the printing and publishing industry.
56 | ///
57 | TIFF = 5,
58 |
59 | ///
60 | /// The file format type for MATROSKA files.
61 | ///
62 | ///
63 | /// MATROSKA is a free and open-source multimedia container format,
64 | /// and it is commonly used for storing high-definition video and audio.
65 | ///
66 | MKV = 7,
67 | ///
68 | /// The file format type for AVI files.
69 | ///
70 | ///
71 | /// AVI is a popular multimedia container format that was developed by Microsoft,
72 | /// and it is widely used for storing video and audio.
73 | ///
74 | AVI = 8,
75 |
76 | ///
77 | /// The file format type for MPEG files.
78 | ///
79 | ///
80 | /// MPEG is a family of multimedia file formats that are used for storing video and audio,
81 | /// and it provides high compression with good video quality.
82 | ///
83 | MPEG = 9,
84 |
85 | ///
86 | /// The file format type for MPEG-TS files.
87 | ///
88 | ///
89 | /// MPEG-TS is a standard format for the transport of video and audio over the Internet,
90 | /// and it is widely used for delivering video to set-top boxes and other digital devices.
91 | ///
92 | TS = 10,
93 |
94 | ///
95 | /// WAV (Waveform Audio File Format) is a standard digital audio file format developed by Microsoft and IBM.
96 | /// It is widely used on Windows-based computers and is also supported on other operating systems.
97 | ///
98 | WAV = 11,
99 |
100 | ///
101 | /// The file format type for GIF files.
102 | ///
103 | ///
104 | /// GIF is a file format for storing graphics and animations,
105 | /// and it is widely used for simple animations on the web.
106 | ///
107 | GIF = 12,
108 |
109 | ///
110 | /// The file format type for VOB files.
111 | ///
112 | ///
113 | /// VOB is a file format for storing video data on DVDs,
114 | /// and it is used to store the main movie, menus, and other video content on a DVD.
115 | ///
116 | VOB = 13,
117 |
118 | ///
119 | /// The file format type for M2TS files.
120 | ///
121 | ///
122 | /// M2TS is a file format for storing high-definition video and audio on Blu-ray discs,
123 | /// and it provides high-quality video and audio playback.
124 | ///
125 | M2TS = 14,
126 |
127 | ///
128 | /// The file format type for MXF files.
129 | ///
130 | ///
131 | /// MXF is a file format for storing professional-quality video and audio,
132 | /// and it is commonly used in the television and film industries.
133 | ///
134 | MXF = 15,
135 |
136 | ///
137 | /// The file format type for WEBM files.
138 | ///
139 | ///
140 | /// WEBM is a free and open-source multimedia file format that is designed for the web,
141 | /// and it provides efficient compression with good video quality.
142 | ///
143 | WEBM = 16,
144 |
145 | ///
146 | /// GXF file format. This format is commonly used in broadcast applications to store video and audio content.
147 | ///
148 | GXF = 17,
149 |
150 | ///
151 | /// FLV file format. This format is commonly used to deliver video over the internet and supports video and audio codecs such as H.263, VP6, and MP3.
152 | ///
153 | FLV = 18,
154 |
155 | ///
156 | /// OGG file format. This format is commonly used for streaming audio and video content over the internet and uses open source codecs such as Theora and Vorbis.
157 | ///
158 | OGG = 19,
159 |
160 | ///
161 | /// WMV file format. This format is developed by Microsoft and commonly used for delivering video content over the internet. It supports Windows Media Audio and Windows Media Video codecs.
162 | ///
163 | WMV = 20,
164 |
165 | ///
166 | /// BMP file format. This format is used to store bitmap images and widely supported across various platforms.
167 | ///
168 | BMP = 21,
169 |
170 | ///
171 | /// ASF file format. This format is developed by Microsoft and commonly used for delivering video and audio content over the internet. It supports a wide range of codecs including Windows Media Audio and Video.
172 | ///
173 | ASF = 22,
174 |
175 | ///
176 | /// JPG file format. This format is used to store images and is commonly used for digital photography and graphics.
177 | ///
178 | JPG = 23,
179 |
180 | ///
181 | /// WebP, on the other hand, is an image file format that is designed to be used on the web.
182 | /// It uses both lossy and lossless compression algorithms to reduce the size of images without significantly reducing their quality.
183 | /// WebP is designed to be a smaller, faster-loading alternative to other image file formats such as JPEG and PNG.
184 | /// It is supported by most modern web browsers and is often used by web developers to optimize the performance of their websites.
185 | ///
186 | WEBP = 24,
187 |
188 | ///
189 | /// IMAGE2PIPE file format. This format is used to stream images through pipes, it's supported by FFmpeg.
190 | ///
191 | IMAGE2PIPE = 25,
192 |
193 | ///
194 | /// The IMAGE2 format. This format is commonly used for image data.
195 | ///
196 | IMAGE2 = 26,
197 |
198 | ///
199 | /// The PNG format. This is a lossless image format that is commonly used for web graphics.
200 | ///
201 | PNG = 27,
202 |
203 | ///
204 | /// The raw video format. This is a format that stores video data in its raw form, without any compression or encoding.
205 | ///
206 | RAWVIDEO = 28,
207 |
208 | ///
209 | /// The MP3 format. This is a popular audio compression format that provides good sound quality with small file sizes.
210 | ///
211 | MP3 = 29,
212 |
213 | ///
214 | /// The raw format. This format is used to store data in its raw, unprocessed form.
215 | ///
216 | RAW = 30,
217 |
218 | ///
219 | /// The SVG format. This is a vector graphics format that is commonly used for creating images and graphics for the web.
220 | ///
221 | SVG = 31,
222 |
223 | ///
224 | /// The PSD format. This is the native format of Adobe Photoshop, and is used to store image data in a layered format.
225 | ///
226 | PSD = 32,
227 |
228 | ///
229 | /// The RM format. This is a multimedia format that is commonly used for streaming audio and video over the internet.
230 | ///
231 | RM = 33,
232 |
233 | ///
234 | /// M4V is a video file format that is similar to the more popular MP4 format.
235 | /// M4V files are typically used to store video content that has been downloaded or purchased from the iTunes Store,
236 | /// and they are often protected by Apple's FairPlay DRM (Digital Rights Management) system to prevent unauthorized distribution.
237 | ///
238 | M4V = 34,
239 |
240 | ///
241 | /// WMA (Windows Media Audio) is a proprietary audio compression format developed by Microsoft.
242 | /// It was first released in 1999 as a competitor to MP3, which was the dominant audio format at the time.
243 | ///
244 | WMA = 35,
245 |
246 | ///
247 | /// An alternative to the MP3 format that has become popular thanks to Apple.
248 | /// With compression and loss, but with higher sound quality.
249 | /// Used for downloading from iTunes and streaming from Apple Music.
250 | ///
251 | AAC = 36,
252 |
253 | ///
254 | /// Lossless compression format with support for Hi-Res compatible sample rates and metadata storage;
255 | /// the file size is half that of WAV. Due to the lack of royalties,
256 | /// it is considered the best format for downloading and storing albums in Hi-Res audio.
257 | /// Its main drawback is the lack of support for Apple devices (and, therefore, incompatibility with iTunes).
258 | ///
259 | FLAC = 37,
260 |
261 | ///
262 | /// Apple audio format
263 | ///
264 | M4A = 38
265 | }
--------------------------------------------------------------------------------
/MediaFileProcessor/MediaFileProcessor/Models/Enums/FilterType.cs:
--------------------------------------------------------------------------------
1 | namespace MediaFileProcessor.Models.Enums;
2 |
3 | ///
4 | /// Enum to represent different types of filters that can be used.
5 | ///
6 | public enum FilterType
7 | {
8 | ///
9 | /// Point filter.
10 | ///
11 | Point = 0,
12 |
13 | ///
14 | /// Box filter.
15 | ///
16 | Box = 1,
17 |
18 | ///
19 | /// Triangle filter.
20 | ///
21 | Triangle = 2,
22 |
23 | ///
24 | /// Cubic spline filter.
25 | ///
26 | CubicSpline = 3,
27 |
28 | ///
29 | /// Quadratic filter.
30 | ///
31 | Quadratic = 4,
32 |
33 | ///
34 | /// Gaussian filter.
35 | ///
36 | Gaussian = 5,
37 |
38 | ///
39 | /// Hermite filter.
40 | ///
41 | Hermite = 6,
42 |
43 | ///
44 | /// Cubic filter.
45 | ///
46 | Cubic = 7,
47 |
48 | ///
49 | /// Catrom filter.
50 | ///
51 | Catrom = 8,
52 |
53 | ///
54 | /// Mitchell filter.
55 | ///
56 | Mitchell = 9,
57 |
58 | ///
59 | /// Lanczos filter.
60 | ///
61 | Lanczos = 10,
62 |
63 | ///
64 | /// Blackman filter.
65 | ///
66 | Blackman = 11,
67 |
68 | ///
69 | /// Kaiser filter.
70 | ///
71 | Kaiser = 12,
72 |
73 | ///
74 | /// Welsh filter.
75 | ///
76 | Welsh = 13,
77 |
78 | ///
79 | /// Hanning filter.
80 | ///
81 | Hanning = 14,
82 |
83 | ///
84 | /// Bartlett filter.
85 | ///
86 | Bartlett = 15,
87 |
88 | ///
89 | /// Bohman filter.
90 | ///
91 | Bohman = 16
92 | }
--------------------------------------------------------------------------------
/MediaFileProcessor/MediaFileProcessor/Models/Enums/HardwareAccelerationType.cs:
--------------------------------------------------------------------------------
1 | namespace MediaFileProcessor.Models.Enums;
2 |
3 | ///
4 | /// Enum to represent different types of hardware acceleration.
5 | ///
6 | public enum HardwareAccelerationType
7 | {
8 | ///
9 | /// Automatically choose the best hardware acceleration method.
10 | ///
11 | AUTO = 0,
12 |
13 | ///
14 | /// DirectX Video Acceleration 2 (DXVA2) hardware acceleration.
15 | ///
16 | DXVA2 = 1,
17 |
18 | ///
19 | /// Video Decode and Presentation API for Unix (VDPAU) hardware acceleration.
20 | ///
21 | VDPAU = 2,
22 |
23 | ///
24 | /// Direct3D 11 Video Acceleration (D3D11VA) hardware acceleration.
25 | ///
26 | D3D11VA = 3,
27 |
28 | ///
29 | /// Video Acceleration API (VAAPI) hardware acceleration.
30 | ///
31 | VAAPI = 4,
32 |
33 | ///
34 | /// Intel Quick Sync Video (QSV) hardware acceleration.
35 | ///
36 | QSV = 5,
37 |
38 | ///
39 | /// NVIDIA CUDA hardware acceleration.
40 | ///
41 | CUDA = 6,
42 |
43 | ///
44 | /// NVIDIA NVDEC hardware acceleration.
45 | ///
46 | NVDEC = 7,
47 |
48 | ///
49 | /// NVIDIA CUVID hardware acceleration.
50 | ///
51 | CUVID = 8
52 | }
--------------------------------------------------------------------------------
/MediaFileProcessor/MediaFileProcessor/Models/Enums/HelpOptionType.cs:
--------------------------------------------------------------------------------
1 | namespace MediaFileProcessor.Models.Enums;
2 |
3 | ///
4 | /// Enumeration of different types of help options.
5 | ///
6 | public enum HelpOptionType
7 | {
8 | ///
9 | /// Print advanced tool options in addition to the basic tool options.
10 | ///
11 | Long,
12 |
13 | ///
14 | /// Print complete list of options, including shared and private options
15 | /// for encoders, decoders, demuxers, muxers, filters, etc.
16 | ///
17 | Full,
18 |
19 | ///
20 | /// Print detailed information about the decoder named decoder_name.
21 | /// Use the -decoders option to get a list of all decoders.
22 | ///
23 | Decoder,
24 |
25 | ///
26 | /// Print detailed information about the encoder named encoder_name.
27 | /// Use the -encoders option to get a list of all encoders.
28 | ///
29 | Encoder,
30 |
31 | ///
32 | /// Print detailed information about the demuxer named demuxer_name.
33 | /// Use the -formats option to get a list of all demuxers and muxers.
34 | ///
35 | Demuxer,
36 |
37 | ///
38 | /// Print detailed information about the muxer named muxer_name.
39 | /// Use the -formats option to get a list of all muxers and demuxers.
40 | ///
41 | Muxer,
42 |
43 | ///
44 | /// Print detailed information about the filter named filter_name.
45 | /// Use the -filters option to get a list of all filters.
46 | ///
47 | Filter,
48 |
49 | ///
50 | /// Print detailed information about the bitstream filter named bitstream_filter_name.
51 | /// Use the -bsfs option to get a list of all bitstream filters.
52 | ///
53 | Bsf,
54 |
55 | ///
56 | /// Print detailed information about the protocol named protocol_name.
57 | /// Use the -protocols option to get a list of all protocols.
58 | ///
59 | Protocol
60 | }
--------------------------------------------------------------------------------
/MediaFileProcessor/MediaFileProcessor/Models/Enums/ImageFormatType.cs:
--------------------------------------------------------------------------------
1 | namespace MediaFileProcessor.Models.Enums;
2 |
3 | ///
4 | /// Enum to represent different image formats.
5 | ///
6 | public enum ImageFormatType
7 | {
8 | ///
9 | /// Joint Photographic Experts Group (JPG) image format.
10 | ///
11 | JPG = 0,
12 |
13 | ///
14 | /// Portable Network Graphics (PNG) image format.
15 | ///
16 | PNG = 1,
17 |
18 | ///
19 | /// Bitmap (BMP) image format.
20 | ///
21 | BMP = 2,
22 |
23 | ///
24 | /// Tagged Image File Format (TIFF) image format.
25 | ///
26 | TIFF = 3,
27 |
28 | ///
29 | /// Joint Photographic Experts Group (JPEG) image format.
30 | ///
31 | JPEG = 4,
32 |
33 | ///
34 | /// RAW image format.
35 | ///
36 | RAW = 5,
37 |
38 | ///
39 | /// Scalable Vector Graphics (SVG) image format.
40 | ///
41 | SVG = 6,
42 |
43 | ///
44 | /// Adobe Photoshop Document (PSD) image format.
45 | ///
46 | PSD = 7
47 | }
--------------------------------------------------------------------------------
/MediaFileProcessor/MediaFileProcessor/Models/Enums/LanguageType.cs:
--------------------------------------------------------------------------------
1 | namespace MediaFileProcessor.Models.Enums;
2 |
3 | ///
4 | /// Represents some common languages for FFmpeg.
5 | ///
6 | public enum LanguageType
7 | {
8 | ///
9 | /// English language.
10 | ///
11 | Eng,
12 |
13 | ///
14 | /// French language.
15 | ///
16 | Fra,
17 |
18 | ///
19 | /// German language.
20 | ///
21 | Deu,
22 |
23 | ///
24 | /// Spanish language.
25 | ///
26 | Esp,
27 |
28 | ///
29 | /// Russian language.
30 | ///
31 | Rus
32 | }
--------------------------------------------------------------------------------
/MediaFileProcessor/MediaFileProcessor/Models/Enums/MediaFileInputType.cs:
--------------------------------------------------------------------------------
1 | namespace MediaFileProcessor.Models.Enums;
2 |
3 | ///
4 | /// Enum to represent different types of media file inputs.
5 | ///
6 | public enum MediaFileInputType
7 | {
8 | ///
9 | /// Input media file is specified by its file path.
10 | ///
11 | Path = 0,
12 |
13 | ///
14 | /// Input media file is specified by a stream.
15 | ///
16 | Stream = 1,
17 |
18 | ///
19 | /// Input media file is specified by a named pipe.
20 | ///
21 | NamedPipe = 3
22 | }
--------------------------------------------------------------------------------
/MediaFileProcessor/MediaFileProcessor/Models/Enums/MovFlagsType.cs:
--------------------------------------------------------------------------------
1 | namespace MediaFileProcessor.Models.Enums;
2 |
3 | ///
4 | /// Represents some common MOV format flags for FFmpeg.
5 | ///
6 | public enum MovFlagsType
7 | {
8 | ///
9 | /// Move the moov atom to the beginning of the file.
10 | ///
11 | Faststart = 1,
12 |
13 | ///
14 | /// Each fragment starts with a keyframe.
15 | ///
16 | FragKeyframe = 2,
17 |
18 | ///
19 | /// Create an empty moov atom. Generally used with live streams.
20 | ///
21 | EmptyMoov = 3,
22 |
23 | ///
24 | /// Disable automatic generation of 'udta' metadata.
25 | ///
26 | DisableChpl = 4,
27 |
28 | ///
29 | /// Enable streaming and forbid seeking from the output.
30 | ///
31 | IsmlLive = 5
32 | }
--------------------------------------------------------------------------------
/MediaFileProcessor/MediaFileProcessor/Models/Enums/PixelFormatType.cs:
--------------------------------------------------------------------------------
1 | namespace MediaFileProcessor.Models.Enums;
2 |
3 | ///
4 | /// Specifies the pixel format for video encoding or decoding.
5 | ///
6 | public enum PixelFormatType
7 | {
8 | ///
9 | /// 8 bits per pixel, YUV 4:0:0, full scale (JPEG).
10 | ///
11 | YUV400P = 0,
12 |
13 | ///
14 | /// 8 bits per pixel, planar YUV 4:1:0, (1 Cr & Cb sample per 4x4 Y samples).
15 | ///
16 | YUV410P = 1,
17 |
18 | ///
19 | /// 8 bits per pixel, planar YUV 4:2:0, (1 Cr & Cb sample per 2x2 Y samples).
20 | ///
21 | YUV420P = 2,
22 |
23 | ///
24 | /// 8 bits per pixel, planar YUV 4:2:2, (1 Cr & Cb sample per 2x1 Y samples).
25 | ///
26 | YUV422P = 3,
27 |
28 | ///
29 | /// 8 bits per pixel, planar YUV 4:4:4, (1 Cr & Cb sample per 1x1 Y samples).
30 | ///
31 | YUV444P = 4,
32 |
33 | ///
34 | /// 8 bits per pixel, packed YUV 4:2:2, Y0 Cb Y1 Cr.
35 | ///
36 | YUYV422 = 5,
37 |
38 | ///
39 | /// 24 bits per pixel, packed RGB 8:8:8, RGBRGB...
40 | ///
41 | RGB24 = 6,
42 |
43 | ///
44 | /// 24 bits per pixel, packed RGB 8:8:8, BGRBGR...
45 | ///
46 | BGR24 = 7,
47 |
48 | ///
49 | /// 32 bits per pixel, packed RGB 8:8:8, XRGBXRGB...
50 | ///
51 | XRGB = 8,
52 |
53 | ///
54 | /// 32 bits per pixel, packed RGB 8:8:8, XBGRXBGR...
55 | ///
56 | XBGR = 9,
57 |
58 | ///
59 | /// 32 bits per pixel, packed RGBA 8:8:8:8, RGBA...
60 | ///
61 | RGBA = 10,
62 |
63 | ///
64 | /// 32 bits per pixel, packed BGRA 8:8:8:8, ARGB...
65 | ///
66 | BGRA = 11,
67 |
68 | ///
69 | /// 8 bits per pixel, grayscale.
70 | ///
71 | GRAY = 12,
72 |
73 | ///
74 | /// 8 bits per pixel, monochrome, 0 is white, 1 is black, pixels are ordered from the msb to the lsb.
75 | ///
76 | MONOWHITE = 13,
77 |
78 | ///
79 | /// 8 bits per pixel, monochrome, 0 is black, 1 is white, pixels are ordered from the msb to the lsb.
80 | ///
81 | MONOBLACK = 14,
82 |
83 | ///
84 | /// 4 bits per pixel, 16 color VGA palette.
85 | ///
86 | PAL8 = 15,
87 | ///
88 | /// 16 bits per pixel, planar YUV 4:1:1, (1 Cr & Cb sample per 4x1 Y samples).
89 | ///
90 | YUV411P = 16,
91 |
92 | ///
93 | /// 9 bits per pixel, planar YUV 4:0:0, (1 Cr & Cb sample per 1x1 Y samples).
94 | ///
95 | GRAY9BE = 17,
96 |
97 | ///
98 | /// 9 bits per pixel, planar YUV 4:0:0, (1 Cr & Cb sample per 1x1 Y samples).
99 | ///
100 | GRAY9LE = 18,
101 |
102 | ///
103 | /// 10 bits per pixel, planar YUV 4:0:0, (1 Cr & Cb sample per 1x1 Y samples).
104 | ///
105 | GRAY10BE = 19,
106 |
107 | ///
108 | /// 10 bits per pixel, planar YUV 4:0:0, (1 Cr & Cb sample per 1x1 Y samples).
109 | ///
110 | GRAY10LE = 20,
111 |
112 | ///
113 | /// 12 bits per pixel, planar YUV 4:0:0, (1 Cr & Cb sample per 1x1 Y samples).
114 | ///
115 | GRAY12BE = 21,
116 |
117 | ///
118 | /// 12 bits per pixel, planar YUV 4:0:0, (1 Cr & Cb sample per 1x1 Y samples).
119 | ///
120 | GRAY12LE = 22,
121 |
122 | ///
123 | /// 16 bits per pixel, planar YUV 4:0:0, (1 Cr & Cb sample per 1x1 Y samples).
124 | ///
125 | GRAY16BE = 23,
126 |
127 | ///
128 | /// 16 bits per pixel, planar YUV 4:0:0, (1 Cr & Cb sample per 1x1 Y samples).
129 | ///
130 | GRAY16LE = 24,
131 |
132 | ///
133 | /// 10 bits per pixel, packed YUV 4:2:2, Y0 Cb Y1 Cr.
134 | ///
135 | YUV422P10BE = 25,
136 |
137 | ///
138 | /// 10 bits per pixel, packed YUV 4:2:2, Y0 Cb Y1 Cr.
139 | ///
140 | YUV422P10LE = 26,
141 |
142 | ///
143 | /// 12 bits per pixel, packed YUV 4:2:2, Y0 Cb Y1 Cr.
144 | ///
145 | YUV422P12BE = 27,
146 |
147 | ///
148 | /// 12 bits per pixel, packed YUV 4:2:2, Y0 Cb Y1 Cr.
149 | ///
150 | YUV422P12LE = 28,
151 |
152 | ///
153 | /// 16 bits per pixel, packed YUV 4:2:2, Y0 Cb Y1 Cr.
154 | ///
155 | YUV422P16BE = 29,
156 | ///
157 | /// 16 bits per pixel, packed YUV 4:2:2, Y0 Cb Y1 Cr.
158 | ///
159 | YUV422P16LE = 30,
160 | ///
161 | /// 10 bits per pixel, planar YUV 4:2:0, (1 Cr & Cb sample per 2x2 Y samples).
162 | ///
163 | YUV420P10BE = 31,
164 |
165 | ///
166 | /// 10 bits per pixel, planar YUV 4:2:0, (1 Cr & Cb sample per 2x2 Y samples).
167 | ///
168 | YUV420P10LE = 32,
169 |
170 | ///
171 | /// 12 bits per pixel, planar YUV 4:2:0, (1 Cr & Cb sample per 2x2 Y samples).
172 | ///
173 | YUV420P12BE = 33,
174 |
175 | ///
176 | /// 12 bits per pixel, planar YUV 4:2:0, (1 Cr & Cb sample per 2x2 Y samples).
177 | ///
178 | YUV420P12LE = 34,
179 |
180 | ///
181 | /// 16 bits per pixel, planar YUV 4:2:0, (1 Cr & Cb sample per 2x2 Y samples).
182 | ///
183 | YUV420P16BE = 35,
184 |
185 | ///
186 | /// 16 bits per pixel, planar YUV 4:2:0, (1 Cr & Cb sample per 2x2 Y samples).
187 | ///
188 | YUV420P16LE = 36,
189 |
190 | ///
191 | /// 10 bits per pixel, planar YUV 4:4:4, (1 Cr & Cb sample per 1x1 Y samples).
192 | ///
193 | YUV444P10BE = 37,
194 |
195 | ///
196 | /// 10 bits per pixel, planar YUV 4:4:4, (1 Cr & Cb sample per 1x1 Y samples).
197 | ///
198 | YUV444P10LE = 38,
199 |
200 | ///
201 | /// 12 bits per pixel, planar YUV 4:4:4, (1 Cr & Cb sample per 1x1 Y samples).
202 | ///
203 | YUV444P12BE = 39,
204 |
205 | ///
206 | /// 12 bits per pixel, planar YUV 4:4:4, (1 Cr & Cb sample per 1x1 Y samples).
207 | ///
208 | YUV444P12LE = 40,
209 |
210 | ///
211 | /// 16 bits per pixel, planar YUV 4:4:4, (1 Cr & Cb sample per 1x1 Y samples).
212 | ///
213 | YUV444P16BE = 41,
214 |
215 | ///
216 | /// 16 bits per pixel, planar YUV 4:4:4, (1 Cr & Cb sample per 1x1 Y samples).
217 | ///
218 | YUV444P16LE = 42,
219 |
220 | ///
221 | /// 8 bits per pixel, packed RGB 3:3:2, RGBRGB...
222 | ///
223 | RGB8 = 43,
224 |
225 | ///
226 | /// 4 bits per pixel, packed RGB 1:2:1, RPBRPBRPB...
227 | ///
228 | RGB4 = 44,
229 | ///
230 | /// 2bits per pixel, packed RGB 1:2:1, RPBRPBRPB...
231 | ///
232 | RGB4_BYTE = 45,
233 | ///
234 | /// 16 bits per pixel, packed RGB 5:6:5, RGBRGB...
235 | ///
236 | RGB565 = 46,
237 |
238 | ///
239 | /// 24 bits per pixel, packed RGB 8:8:8, RGBXRGBX...
240 | ///
241 | RGB24X = 47,
242 |
243 | ///
244 | /// 32 bits per pixel, packed RGB 8:8:8, XRGBXRGB...
245 | ///
246 | XRGB1555 = 48,
247 |
248 | ///
249 | /// 32 bits per pixel, packed RGB 8:8:8, ARGBARGB...
250 | ///
251 | ARGB1555 = 49,
252 |
253 | ///
254 | /// 32 bits per pixel, packed RGBA 8:8:8:8, XRGBXRGB...
255 | ///
256 | RGBA64BE = 50,
257 |
258 | ///
259 | /// 32 bits per pixel, packed RGBA 8:8:8:8, XRGBXRGB...
260 | ///
261 | RGBA64LE = 51,
262 |
263 | ///
264 | /// 16 bits per pixel, planar GBR 4:4:4 24bpp, GGG... BBB...
265 | ///
266 | GBRP = 52,
267 |
268 | ///
269 | /// 16 bits per pixel, planar GBR 4:4:4 24bpp, BBB... GGG...
270 | ///
271 | GBRP9BE = 53,
272 |
273 | ///
274 | /// 16 bits per pixel, planar GBR 4:4:4 24bpp, BBB... GGG...
275 | ///
276 | GBRP9LE = 54,
277 |
278 | ///
279 | /// 16 bits per pixel, planar GBR 4:4:4 24bpp, BBB... GGG...
280 | ///
281 | GBRP10BE = 55,
282 |
283 | ///
284 | /// 16 bits per pixel, planar GBR 4:4:4 24bpp, BBB... GGG...
285 | ///
286 | GBRP10LE = 56,
287 |
288 | ///
289 | /// 16 bits per pixel, planar GBR 4:4:4 24bpp, BBB... GGG...
290 | ///
291 | GBRP12BE = 57,
292 |
293 | ///
294 | /// 16 bits per pixel, planar GBR 4:4:4 24bpp, BBB... GGG...
295 | ///
296 | GBRP12LE = 58,
297 |
298 | ///
299 | /// 16 bits per pixel, planar GBR 4:4:4 24bpp, BBB... GGG...
300 | ///
301 | GBRP14BE = 59,
302 |
303 | ///
304 | /// 16 bits per pixel, planar GBR 4:4:4 24bpp, BBB... GGG...
305 | ///
306 | GBRP14LE = 60,
307 |
308 | ///
309 | /// 16 bits per pixel, planar GBR 4:4:4 24bpp, BBB... GGG...
310 | ///
311 | GBRP16BE = 61,
312 | ///
313 | ///16 bits per pixel, planar GBR 4:4:4 24bpp, BBB... GGG...
314 | ///
315 | GBRP16LE = 62,
316 |
317 | ///
318 | /// 10 bits per pixel, packed YUV 4:2:0, Y0 Cb Y1 Cr.
319 | ///
320 | YUVA420P10BE = 63,
321 |
322 | ///
323 | /// 10 bits per pixel, packed YUV 4:2:0, Y0 Cb Y1 Cr.
324 | ///
325 | YUVA420P10LE = 64,
326 |
327 | ///
328 | /// 12 bits per pixel, packed YUV 4:2:0, Y0 Cb Y1 Cr.
329 | ///
330 | YUVA420P12BE = 65,
331 |
332 | ///
333 | /// 12 bits per pixel, packed YUV 4:2:0, Y0 Cb Y1 Cr.
334 | ///
335 | YUVA420P12LE = 66,
336 |
337 | ///
338 | /// 16 bits per pixel, packed YUV 4:2:0, Y0 Cb Y1 Cr.
339 | ///
340 | YUVA420P16BE = 67,
341 |
342 | ///
343 | /// 16 bits per pixel, packed YUV 4:2:0, Y0 Cb Y1 Cr.
344 | ///
345 | YUVA420P16LE = 68,
346 |
347 | ///
348 | /// 10 bits per pixel, planar YUV 4:2:2, (1 Cr & Cb sample per 2x1 Y samples).
349 | ///
350 | YUVA422P10BE = 69,
351 |
352 | ///
353 | /// 10 bits per pixel, planar YUV 4:2:2, (1 Cr & Cb sample per 2x1 Y samples).
354 | ///
355 | YUVA422P10LE = 70,
356 |
357 | ///
358 | /// 12 bits per pixel, planar YUV 4:2:2, (1 Cr & Cb sample per 2x1 Y samples).
359 | ///
360 | YUVA422P12BE = 71,
361 |
362 | ///
363 | /// 12 bits per pixel, planar YUV 4:2:2, (1 Cr & Cb sample per 2x1 Y samples).
364 | ///
365 | YUVA422P12LE = 72,
366 |
367 | ///
368 | /// 16 bits per pixel, planar YUV 4:2:2, (1 Cr & Cb sample per 2x1 Y samples).
369 | ///
370 | YUVA422P16BE = 73,
371 |
372 | ///
373 | /// 16 bits per pixel, planar YUV 4:2:2, (1 Cr & Cb sample per 2x1 Y samples).
374 | ///
375 | YUVA422P16LE = 74,
376 |
377 | ///
378 | /// 10 bits per pixel, planar YUV 4:4:4, (1 Cr & Cb sample per 1x1 Y samples).
379 | ///
380 | YUVA444P10BE = 75,
381 |
382 | ///
383 | /// 10 bits per pixel, planar YUV 4:4:4, (1 Cr & Cb sample per 1x1 Y samples).
384 | ///
385 | YUVA444P10LE = 76,
386 |
387 | ///
388 | /// 12 bits per pixel, planar YUV 4:4:4, (1 Cr & Cb sample per 1x1 Y samples).
389 | ///
390 | YUVA444P12BE = 77,
391 |
392 | ///
393 | /// 12 bits per pixel, planar YUV 4:4:4, (1 Cr & Cb sample per 1x1 Y samples).
394 | ///
395 | YUVA444P12LE = 78,
396 |
397 | ///
398 | /// 16 bits per pixel, planar YUV 4:4:4, (1 Cr & Cb sample per 1x1 Y samples).
399 | ///
400 | YUVA444P16BE = 79,
401 |
402 | ///
403 | /// 16 bits per pixel, planar YUV 4:4:4, (1 Cr & Cb sample per 1x1 Y samples).
404 | ///
405 | YUVA444P16LE = 80,
406 |
407 | ///
408 | /// Alias for YUVJ411P for backward compatibility.
409 | ///
410 | YUVJ411P = 81,
411 |
412 | ///
413 | /// Alias for YUVJ420P for backward compatibility.
414 | ///
415 | YUVJ420P = 82,
416 |
417 | ///
418 | /// Alias for YUVJ422P for backward compatibility.
419 | ///
420 | YUVJ422P = 83,
421 |
422 | ///
423 | /// Alias for YUVJ444P for backward compatibility.
424 | ///
425 | YUVJ444P = 84,
426 |
427 | ///
428 | /// Alias for XYZ12BE for backward compatibility.
429 | ///
430 | XYZ12 = 85,
431 |
432 | ///
433 | /// Alias for NV24 for backward compatibility.
434 | ///
435 | NV24 = 86,
436 |
437 | ///
438 | /// Alias for NV42 for backward compatibility.
439 | ///
440 | NV42 = 87,
441 |
442 | ///
443 | /// 24 bits per pixel, packed BGR 8:8:8, BGRBGR..
444 | ///
445 | BGR8 = 88
446 | }
--------------------------------------------------------------------------------
/MediaFileProcessor/MediaFileProcessor/Models/Enums/PositionType.cs:
--------------------------------------------------------------------------------
1 | namespace MediaFileProcessor.Models.Enums;
2 |
3 | ///
4 | /// Enum to represent different positions.
5 | ///
6 | public enum PositionType
7 | {
8 | ///
9 | /// Upper left position.
10 | ///
11 | UpperLeft = 0,
12 |
13 | ///
14 | /// Upper right position.
15 | ///
16 | UpperRight = 1,
17 |
18 | ///
19 | /// Right position.
20 | ///
21 | Right = 2,
22 |
23 | ///
24 | /// Left position.
25 | ///
26 | Left = 3,
27 |
28 | ///
29 | /// Up position.
30 | ///
31 | Up = 4,
32 |
33 | ///
34 | /// Bottom right position.
35 | ///
36 | BottomRight = 5,
37 |
38 | ///
39 | /// Bottom left position.
40 | ///
41 | BottomLeft = 6,
42 |
43 | ///
44 | /// Center position.
45 | ///
46 | Center = 7,
47 |
48 | ///
49 | /// Bottom position.
50 | ///
51 | Bottom = 8
52 | }
--------------------------------------------------------------------------------
/MediaFileProcessor/MediaFileProcessor/Models/Enums/ReadRateType.cs:
--------------------------------------------------------------------------------
1 | namespace MediaFileProcessor.Models.Enums;
2 |
3 | ///
4 | /// Represents some common read rates for FFmpeg.
5 | ///
6 | public enum ReadRateType
7 | {
8 | ///
9 | /// 1 KB per second.
10 | ///
11 | Kb1PerSec = 1024,
12 |
13 | ///
14 | /// 10 KB per second.
15 | ///
16 | Kb10PerSec = 10240,
17 |
18 | ///
19 | /// 100 KB per second.
20 | ///
21 | Kb100PerSec = 102400,
22 |
23 | ///
24 | /// 1 MB per second.
25 | ///
26 | Mb1PerSec = 1048576,
27 |
28 | ///
29 | /// 10 MB per second.
30 | ///
31 | Mb10PerSec = 10485760
32 | }
--------------------------------------------------------------------------------
/MediaFileProcessor/MediaFileProcessor/Models/Enums/SubtitleBitstreamFilterType.cs:
--------------------------------------------------------------------------------
1 | namespace MediaFileProcessor.Models.Enums;
2 |
3 | ///
4 | /// Represents some common bitstream filters for subtitles in FFmpeg.
5 | ///
6 | public enum SubtitleBitstreamFilterType
7 | {
8 | ///
9 | /// Convert MOV text to srt.
10 | ///
11 | Mov2TextSub = 1,
12 |
13 | ///
14 | /// Convert text subtitles to MOV text.
15 | ///
16 | Text2MovSub = 2
17 | }
--------------------------------------------------------------------------------
/MediaFileProcessor/MediaFileProcessor/Models/Enums/SubtitleCodecType.cs:
--------------------------------------------------------------------------------
1 | namespace MediaFileProcessor.Models.Enums;
2 |
3 | ///
4 | /// Enum representing subtitle codec values.
5 | ///
6 | public enum SubtitleCodecType
7 | {
8 | ///
9 | /// Subtitle codec used in MP4 files.
10 | ///
11 | Mov_Text,
12 |
13 | ///
14 | /// Codec for SubRip subtitles (usually with .srt extension).
15 | ///
16 | Srt,
17 |
18 | ///
19 | /// Codec for Advanced SubStation Alpha subtitles (usually with .ass extension).
20 | ///
21 | Ass,
22 |
23 | ///
24 | /// Codec for SubStation Alpha subtitles (usually with .ssa extension).
25 | ///
26 | Ssa,
27 |
28 | ///
29 | /// Codec for WebVTT subtitles (usually with .vtt extension).
30 | ///
31 | WebVTT,
32 |
33 | ///
34 | /// Codec for DVD subtitles.
35 | ///
36 | Dvd_Subtitle,
37 |
38 | ///
39 | /// Copy subtitles without changes.
40 | ///
41 | Copy
42 | }
--------------------------------------------------------------------------------
/MediaFileProcessor/MediaFileProcessor/Models/Enums/TargetStandardType.cs:
--------------------------------------------------------------------------------
1 | namespace MediaFileProcessor.Models.Enums;
2 |
3 | ///
4 | /// Enum to represent different target standards.
5 | ///
6 | public enum TargetStandardType
7 | {
8 | ///
9 | /// Phase Alternating Line (PAL) standard.
10 | ///
11 | PAL = 0,
12 |
13 | ///
14 | /// National Television System Committee (NTSC) standard.
15 | ///
16 | NTSC = 1,
17 |
18 | ///
19 | /// Film standard.
20 | ///
21 | FILM = 2
22 | }
--------------------------------------------------------------------------------
/MediaFileProcessor/MediaFileProcessor/Models/Enums/TargetType.cs:
--------------------------------------------------------------------------------
1 | namespace MediaFileProcessor.Models.Enums;
2 |
3 | ///
4 | /// Enum to represent different target types.
5 | ///
6 | public enum TargetType
7 | {
8 | ///
9 | /// Video Compact Disc (VCD) target type.
10 | ///
11 | VCD = 0,
12 |
13 | ///
14 | /// Super Video Compact Disc (SVCD) target type.
15 | ///
16 | SVCD = 1,
17 |
18 | ///
19 | /// Digital Video Disc (DVD) target type.
20 | ///
21 | DVD = 2,
22 |
23 | ///
24 | /// Digital Video (DV) target type.
25 | ///
26 | DV = 3,
27 |
28 | ///
29 | /// DV50 target type.
30 | ///
31 | DV50 = 4
32 | }
--------------------------------------------------------------------------------
/MediaFileProcessor/MediaFileProcessor/Models/Enums/VideoAspectRatioType.cs:
--------------------------------------------------------------------------------
1 | namespace MediaFileProcessor.Models.Enums;
2 |
3 | ///
4 | /// Enum to represent different video aspect ratios.
5 | ///
6 | public enum VideoAspectRatioType
7 | {
8 | ///
9 | /// 3:2 aspect ratio.
10 | ///
11 | R3_2 = 0,
12 |
13 | ///
14 | /// 4:3 aspect ratio.
15 | ///
16 | R4_3 = 1,
17 |
18 | ///
19 | /// 5:3 aspect ratio.
20 | ///
21 | R5_3 = 2,
22 |
23 | ///
24 | /// 5:4 aspect ratio.
25 | ///
26 | R5_4 = 3,
27 |
28 | ///
29 | /// 16:9 aspect ratio.
30 | ///
31 | R16_9 = 4,
32 |
33 | ///
34 | /// 16:10 aspect ratio.
35 | ///
36 | R16_10 = 5,
37 |
38 | ///
39 | /// 17:9 aspect ratio.
40 | ///
41 | R17_9 = 6
42 | }
--------------------------------------------------------------------------------
/MediaFileProcessor/MediaFileProcessor/Models/Enums/VideoBitstreamFilter.cs:
--------------------------------------------------------------------------------
1 | namespace MediaFileProcessor.Models.Enums;
2 |
3 | ///
4 | /// Represents some common bitstream filters for FFmpeg.
5 | ///
6 | public enum VideoBitstreamFilter
7 | {
8 | ///
9 | /// Convert H.264 bitstream from length-prefixed mode to start code mode.
10 | ///
11 | H264_Mp4ToAnnexB = 1,
12 |
13 | ///
14 | /// Add noise to the input video.
15 | ///
16 | Noise = 2,
17 |
18 | ///
19 | /// Remove extradata from the input video.
20 | ///
21 | RemoveExtra = 3
22 | }
--------------------------------------------------------------------------------
/MediaFileProcessor/MediaFileProcessor/Models/Enums/VideoCodecPresetType.cs:
--------------------------------------------------------------------------------
1 | namespace MediaFileProcessor.Models.Enums;
2 |
3 | ///
4 | /// Enum to represent different video codec preset types.
5 | ///
6 | public enum VideoCodecPresetType
7 | {
8 | ///
9 | /// Ultrafast preset type.
10 | ///
11 | ULTRAFAST = 0,
12 |
13 | ///
14 | /// Superfast preset type.
15 | ///
16 | SUPERFAST = 1,
17 |
18 | ///
19 | /// Veryfast preset type.
20 | ///
21 | VERYFAST = 2,
22 |
23 | ///
24 | /// Faster preset type.
25 | ///
26 | FASTER = 3,
27 |
28 | ///
29 | /// Fast preset type.
30 | ///
31 | FAST = 4,
32 |
33 | ///
34 | /// Medium preset type.
35 | ///
36 | MEDIUM = 5,
37 |
38 | ///
39 | /// Slow preset type.
40 | ///
41 | SLOW = 6,
42 |
43 | ///
44 | /// Slower preset type.
45 | ///
46 | SLOWER = 7,
47 |
48 | ///
49 | /// Veryslow preset type.
50 | ///
51 | VERYSLOW = 8
52 | }
--------------------------------------------------------------------------------
/MediaFileProcessor/MediaFileProcessor/Models/Enums/VideoCodecProfileType.cs:
--------------------------------------------------------------------------------
1 | namespace MediaFileProcessor.Models.Enums;
2 |
3 | ///
4 | /// Enum to represent different video codec profile types.
5 | ///
6 | public enum VideoCodecProfileType
7 | {
8 | ///
9 | /// Default profile type.
10 | ///
11 | DEFAULT = 0,
12 |
13 | ///
14 | /// Baseline profile type.
15 | ///
16 | BASELINE = 1,
17 |
18 | ///
19 | /// Main profile type.
20 | ///
21 | MAIN = 2,
22 |
23 | ///
24 | /// High profile type.
25 | ///
26 | HIGH = 3,
27 |
28 | ///
29 | /// High10 profile type.
30 | ///
31 | HIGH10 = 4,
32 |
33 | ///
34 | /// High422 profile type.
35 | ///
36 | HIGH422 = 5,
37 |
38 | ///
39 | /// High444 profile type.
40 | ///
41 | HIGH444 = 6
42 | }
--------------------------------------------------------------------------------
/MediaFileProcessor/MediaFileProcessor/Models/Enums/VideoCodecType.cs:
--------------------------------------------------------------------------------
1 | namespace MediaFileProcessor.Models.Enums;
2 |
3 | ///
4 | /// Video codec type
5 | ///
6 | public enum VideoCodecType
7 | {
8 | ///
9 | /// Copy codec type
10 | ///
11 | COPY = 0,
12 |
13 | ///
14 | /// Multicolor charset for Commodore 64 (codec a64_multi)
15 | ///
16 | A64MULTI = 1,
17 |
18 | ///
19 | /// Multicolor charset for Commodore 64 = , extended with 5th color (colram) (codec a64_multi5)
20 | ///
21 | A64MULTI5 = 2,
22 |
23 | ///
24 | ///Alias/Wavefront PIX image
25 | ///
26 | ALIAS_PIX = 3,
27 |
28 | ///
29 | ///AMV Video
30 | ///
31 | AMV = 4,
32 |
33 | ///
34 | ///APNG (Animated Portable Network Graphics) image
35 | ///
36 | APNG = 5,
37 |
38 | ///
39 | ///ASUS V1
40 | ///
41 | ASV1 = 6,
42 |
43 | ///
44 | ///ASUS V2
45 | ///
46 | ASV2 = 7,
47 |
48 | ///
49 | ///Avid 1:1 10-bit RGB Packer
50 | ///
51 | AVRP = 8,
52 |
53 | ///
54 | ///Avid Meridien Uncompressed
55 | ///
56 | AVUI = 9,
57 |
58 | ///
59 | /// Uncompressed packed MS 4:4:4:4
60 | ///
61 | AYUV = 10,
62 |
63 | ///
64 | ///BMP (Windows and OS/2 bitmap)
65 | ///
66 | BMP = 11,
67 |
68 | ///
69 | ///Cinepak
70 | ///
71 | CINEPAK = 12,
72 |
73 | ///
74 | ///Cirrus Logic AccuPak
75 | ///
76 | CLJR = 13,
77 |
78 | ///
79 | ///SMPTE VC-2 (codec dirac)
80 | ///
81 | VC2 = 14,
82 |
83 | ///
84 | ///VC3/DNxHD
85 | ///
86 | DNXHD = 15,
87 |
88 | ///
89 | ///DPX (Digital Picture Exchange) image
90 | ///
91 | DPX = 16,
92 |
93 | ///
94 | /// DV (Digital Video)
95 | ///
96 | DVVIDEO = 17,
97 |
98 | ///
99 | /// FFmpeg video codec #1
100 | ///
101 | FFV1 = 18,
102 |
103 | ///
104 | /// Huffyuv FFmpeg variant
105 | ///
106 | FFVHUFF = 19,
107 |
108 | ///
109 | /// Flexible Image Transport System
110 | ///
111 | FITS = 20,
112 |
113 | ///
114 | /// Flash Screen Video
115 | ///
116 | FLASHSV = 21,
117 |
118 | ///
119 | /// Flash Screen Video Version 2
120 | ///
121 | FLASHSV2 = 22,
122 |
123 | ///
124 | /// FLV / Sorenson Spark / Sorenson H.263 (Flash Video) (codec flv1)
125 | ///
126 | FLV = 23,
127 |
128 | ///
129 | /// GIF (Graphics Interchange Format)
130 | ///
131 | GIF = 24,
132 |
133 | ///
134 | /// H.261
135 | ///
136 | H261 = 25,
137 |
138 | ///
139 | /// H.263 / H.263-1996
140 | ///
141 | H263 = 26,
142 |
143 | ///
144 | /// H.264 / H.264-1996
145 | ///
146 | H264 = 27,
147 |
148 | ///
149 | /// H.263+ / H.263-1998 / H.263 version 2
150 | ///
151 | H263P = 28,
152 |
153 | ///
154 | /// libx264 H.264 / AVC / MPEG-4 AVC / MPEG-4 part 10 (codec h264)
155 | ///
156 | LIBX264 = 29,
157 |
158 | ///
159 | /// libx264 H.264 / AVC / MPEG-4 AVC / MPEG-4 part 10 RGB (codec h264)
160 | ///
161 | LIBX264RGB = 30,
162 |
163 | ///
164 | /// AMD AMF H.264 Encoder (codec h264)
165 | ///
166 | H264_AMF = 31,
167 |
168 | ///
169 | /// NVIDIA NVENC H.264 encoder (codec h264)
170 | ///
171 | H264_NVENC = 32,
172 |
173 | ///
174 | /// H.264 / AVC / MPEG-4 AVC / MPEG-4 part 10 (Intel Quick Sync Video acceleration) (codec h264)
175 | ///
176 | H264_QSV = 33,
177 |
178 | ///
179 | /// NVIDIA NVENC H.264 encoder (codec h264)
180 | ///
181 | NVENC = 34,
182 |
183 | ///
184 | /// NVIDIA NVENC H.264 encoder (codec h264)
185 | ///
186 | NVENC_H264 = 35,
187 |
188 | ///
189 | /// Vidvox Hap
190 | ///
191 | HAP = 36,
192 |
193 | ///
194 | /// libx265 H.265 / HEVC (codec hevc)
195 | ///
196 | LIBX265 = 37,
197 |
198 | ///
199 | /// NVIDIA NVENC hevc encoder (codec hevc)
200 | ///
201 | NVENC_HEVC = 38,
202 |
203 | ///
204 | /// AMD AMF HEVC encoder (codec hevc)
205 | ///
206 | HEVC_AMF = 39,
207 |
208 | ///
209 | /// NVIDIA NVENC hevc encoder (codec hevc)
210 | ///
211 | HEVC_NVENC = 40,
212 |
213 | ///
214 | /// HEVC (Intel Quick Sync Video acceleration) (codec hevc)
215 | ///
216 | HEVC_QSV = 41,
217 |
218 | ///
219 | /// Huffyuv / HuffYUV
220 | ///
221 | HUFFYUV = 42,
222 |
223 | ///
224 | /// JPEG 2000
225 | ///
226 | JPEG2000 = 43,
227 |
228 | ///
229 | /// OpenJPEG JPEG 2000 (codec jpeg2000)
230 | ///
231 | LIBOPENJPEG = 44,
232 |
233 | ///
234 | /// JPEG-LS
235 | ///
236 | JPEGLS = 45,
237 |
238 | ///
239 | /// Lossless JPEG
240 | ///
241 | LJPEG = 46,
242 |
243 | ///
244 | /// MagicYUV video
245 | ///
246 | MAGICYUV = 47,
247 |
248 | ///
249 | /// MJPEG (Motion JPEG)
250 | ///
251 | MJPEG = 48,
252 |
253 | ///
254 | /// MJPEG (Intel Quick Sync Video acceleration) (codec mjpeg)
255 | ///
256 | MJPEG_QSV = 49,
257 |
258 | ///
259 | /// MPEG-1 video
260 | ///
261 | MPEG1VIDEO = 50,
262 |
263 | ///
264 | /// MPEG-2 video
265 | ///
266 | MPEG2VIDEO = 51,
267 |
268 | ///
269 | /// MPEG-2 video (Intel Quick Sync Video acceleration) (codec mpeg2video)
270 | ///
271 | MPEG2_QSV = 52,
272 |
273 | ///
274 | /// MPEG-4 part 2
275 | ///
276 | MPEG4 = 53,
277 |
278 | ///
279 | /// libxvidcore MPEG-4 part 2 (codec mpeg4)
280 | ///
281 | LIBXVID = 54,
282 |
283 | ///
284 | /// MPEG-4 part 2 Microsoft variant version 2
285 | ///
286 | MSMPEG4V2 = 55,
287 |
288 | ///
289 | /// MPEG-4 part 2 Microsoft variant version 3 (codec msmpeg4v3)
290 | ///
291 | MSMPEG4 = 56,
292 |
293 | ///
294 | /// Microsoft Video-1
295 | ///
296 | MSVIDEO1 = 57,
297 |
298 | ///
299 | /// PAM (Portable AnyMap) image
300 | ///
301 | PAM = 58,
302 |
303 | ///
304 | /// PBM (Portable BitMap) image
305 | ///
306 | PBM = 59,
307 |
308 | ///
309 | /// PC Paintbrush PCX image
310 | ///
311 | PCX = 60,
312 |
313 | ///
314 | /// PGM (Portable GrayMap) image
315 | ///
316 | PGM = 61,
317 |
318 | ///
319 | /// PGMYUV (Portable GrayMap YUV) image
320 | ///
321 | PGMYUV = 62,
322 |
323 | ///
324 | /// PNG (Portable Network Graphics) image
325 | ///
326 | PNG = 63,
327 |
328 | ///
329 | /// PPM (Portable PixelMap) image
330 | ///
331 | PPM = 64,
332 |
333 | ///
334 | /// Apple ProRes
335 | ///
336 | PRORES = 65,
337 |
338 | ///
339 | /// Apple ProRes (codec prores)
340 | ///
341 | PRORES_AW = 66,
342 |
343 | ///
344 | /// Apple ProRes (iCodec Pro) (codec prores)
345 | ///
346 | PRORES_KS = 67,
347 |
348 | ///
349 | /// QuickTime Animation (RLE) video
350 | ///
351 | QTRLE = 68,
352 |
353 | ///
354 | /// AJA Kona 10-bit RGB Codec
355 | ///
356 | R10K = 69,
357 |
358 | ///
359 | /// Uncompressed RGB 10-bit
360 | ///
361 | R210 = 70,
362 |
363 | ///
364 | /// raw video
365 | ///
366 | RAWVIDEO = 71,
367 |
368 | ///
369 | /// id RoQ video (codec roq)
370 | ///
371 | ROQVIDEO = 72,
372 |
373 | ///
374 | /// RealVideo 1.0
375 | ///
376 | RV10 = 73,
377 |
378 | ///
379 | /// RealVideo 2.0
380 | ///
381 | RV20 = 74,
382 |
383 | ///
384 | /// SGI image
385 | ///
386 | SGI = 75,
387 |
388 | ///
389 | /// Snow
390 | ///
391 | SNOW = 76,
392 |
393 | ///
394 | /// Sun Rasterfile image
395 | ///
396 | SUNRAST = 77,
397 |
398 | ///
399 | /// Sorenson Vector Quantizer 1 / Sorenson Video 1 / SVQ1
400 | ///
401 | SVQ1 = 78,
402 |
403 | ///
404 | /// Truevision Targa image
405 | ///
406 | TARGA = 79,
407 |
408 | ///
409 | /// libtheora Theora (codec theora)
410 | ///
411 | LIBTHEORA = 80,
412 |
413 | ///
414 | /// TIFF image
415 | ///
416 | TIFF = 81,
417 |
418 | ///
419 | /// Ut Video
420 | ///
421 | UTVIDEO = 82,
422 |
423 | ///
424 | /// Uncompressed 4:2:2 10-bit
425 | ///
426 | V210 = 83,
427 |
428 | ///
429 | /// Uncompressed packed 4:4:4
430 | ///
431 | V308 = 84,
432 |
433 | ///
434 | /// Uncompressed packed QT 4:4:4:4
435 | ///
436 | V408 = 85,
437 |
438 | ///
439 | /// Uncompressed 4:4:4 10-bit
440 | ///
441 | V410 = 86,
442 |
443 | ///
444 | /// libvpx VP8 (codec vp8)
445 | ///
446 | LIBVPX = 87,
447 |
448 | ///
449 | /// VP9 video (Intel Quick Sync Video acceleration) (codec vp9)
450 | ///
451 | VP9_QSV = 88,
452 |
453 | ///
454 | /// libwebp WebP image (codec webp)
455 | ///
456 | LIBWEBP_ANIM = 89,
457 |
458 | ///
459 | /// libwebp WebP image (codec webp)
460 | ///
461 | LIBWEBP = 90,
462 |
463 | ///
464 | /// Windows Media Video 7
465 | ///
466 | WMV1 = 91,
467 |
468 | ///
469 | /// Windows Media Video 8
470 | ///
471 | WMV2 = 92,
472 |
473 | ///
474 | /// AVFrame to AVPacket passthrough
475 | ///
476 | WRAPPED_AVFRAME = 93,
477 |
478 | ///
479 | /// XBM (X BitMap) image
480 | ///
481 | XBM = 94,
482 |
483 | ///
484 | /// X-face image
485 | ///
486 | XFACE = 95,
487 |
488 | ///
489 | /// XWD (X Window Dump) image
490 | ///
491 | XWD = 96,
492 |
493 | ///
494 | /// Uncompressed YUV 4:1:1 12-bit
495 | ///
496 | Y41P = 97,
497 |
498 | ///
499 | /// Uncompressed packed 4:2:0
500 | ///
501 | YUV4 = 98,
502 |
503 | ///
504 | /// LCL (LossLess Codec Library) ZLIB
505 | ///
506 | ZLIB = 99,
507 |
508 | ///
509 | /// Zip Motion Blocks Video
510 | ///
511 | ZMBV = 100
512 | }
--------------------------------------------------------------------------------
/MediaFileProcessor/MediaFileProcessor/Models/Enums/VideoSizeTyoe.cs:
--------------------------------------------------------------------------------
1 | namespace MediaFileProcessor.Models.Enums;
2 |
3 | ///
4 | /// Enumeration for different video sizes.
5 | ///
6 | public enum VideoSizeType
7 | {
8 | ///
9 | /// 16 times Common Intermediate Format (CIF)
10 | ///
11 | _16CIF,
12 |
13 | ///
14 | /// 2K resolution (approximately 2048 pixels wide)
15 | ///
16 | _2K,
17 |
18 | ///
19 | /// 2K resolution with a flat aspect ratio
20 | ///
21 | _2KFLAT,
22 |
23 | ///
24 | /// 2K resolution with a widescreen aspect ratio
25 | ///
26 | _2KSCOPE,
27 |
28 | ///
29 | /// 4 times Common Intermediate Format (CIF)
30 | ///
31 | _4CIF,
32 |
33 | ///
34 | /// 4K resolution (approximately 4096 pixels wide)
35 | ///
36 | _4K,
37 |
38 | ///
39 | /// 4K resolution with a flat aspect ratio
40 | ///
41 | _4KFLAT,
42 |
43 | ///
44 | /// 4K resolution with a widescreen aspect ratio
45 | ///
46 | _4KSCOPE,
47 |
48 | ///
49 | /// Color Graphics Array (CGA) resolution
50 | ///
51 | CGA,
52 |
53 | ///
54 | /// Common Intermediate Format (CIF) resolution
55 | ///
56 | CIF,
57 |
58 | ///
59 | /// Enhanced Graphics Array (EGA) resolution
60 | ///
61 | EGA,
62 |
63 | ///
64 | /// Film resolution
65 | ///
66 | FILM,
67 |
68 | ///
69 | /// FWQVGA resolution (480x320)
70 | ///
71 | FWQVGA,
72 |
73 | ///
74 | /// HD1080 resolution (1920x1080)
75 | ///
76 | HD1080,
77 |
78 | ///
79 | /// HD480 resolution (854x480)
80 | ///
81 | HD480,
82 |
83 | ///
84 | /// HD720 resolution (1280x720)
85 | ///
86 | HD720,
87 |
88 | ///
89 | /// HQVGA resolution (240x160)
90 | ///
91 | HQVGA,
92 |
93 | ///
94 | /// HSXGA resolution (5120x4096)
95 | ///
96 | HSXGA,
97 | ///
98 | /// High-definition television 720p.
99 | ///
100 | HVGA,
101 |
102 | ///
103 | /// Non-high-definition television.
104 | ///
105 | NHD,
106 |
107 | ///
108 | /// The National Television System Committee television standard.
109 | ///
110 | NTSC,
111 |
112 | ///
113 | /// The National Television System Committee standard for film.
114 | ///
115 | NTSC_FILM,
116 |
117 | ///
118 | /// The Phase Alternating Line television standard used in Europe and other countries.
119 | ///
120 | PAL,
121 |
122 | ///
123 | /// Quarter Common Intermediate Format used in videoconferencing and mobile devices.
124 | ///
125 | QCIF,
126 |
127 | ///
128 | /// High-definition television 720p.
129 | ///
130 | QHD,
131 |
132 | ///
133 | /// Quarter National Television System Committee.
134 | ///
135 | QNTSC,
136 |
137 | ///
138 | /// Quarter Phase Alternating Line.
139 | ///
140 | QPAL,
141 |
142 | ///
143 | /// Quarter Quarter VGA.
144 | ///
145 | QQVGA,
146 |
147 | ///
148 | /// Quarter Super Extended Graphics Array.
149 | ///
150 | QSXGA,
151 |
152 | ///
153 | /// Quarter VGA.
154 | ///
155 | QVGA,
156 |
157 | ///
158 | /// Quarter XGA.
159 | ///
160 | QXGA,
161 |
162 | ///
163 | /// Super NTSC.
164 | ///
165 | SNTSC,
166 |
167 | ///
168 | /// Super PAL.
169 | ///
170 | SPAL,
171 |
172 | ///
173 | /// Super QCIF.
174 | ///
175 | SQCIF,
176 |
177 | ///
178 | /// Super Video Graphics Array.
179 | ///
180 | SVGA,
181 |
182 | ///
183 | /// Super Extended Graphics Array.
184 | ///
185 | SXGA,
186 |
187 | ///
188 | /// Ultra Extended Graphics Array.
189 | ///
190 | UXGA,
191 |
192 | ///
193 | /// Video Graphics Array.
194 | ///
195 | VGA,
196 |
197 | ///
198 | /// Widescreen High Super Extended Graphics Array.
199 | ///
200 | WHSXGA,
201 |
202 | ///
203 | /// Widescreen High Ultra Extended Graphics Array.
204 | ///
205 | WHUXGA,
206 |
207 | ///
208 | /// Widescreen Organic Extended Graphics Array.
209 | ///
210 | WOXGA,
211 |
212 | ///
213 | /// Widescreen Quarter Super Extended Graphics Array.
214 | ///
215 | WQSXGA,
216 |
217 | ///
218 | /// Widescreen Quarter Ultra Extended Graphics Array.
219 | ///
220 | WQUXGA,
221 |
222 | ///
223 | /// Widescreen Quarter Video Graphics Array.
224 | ///
225 | WQVGA,
226 |
227 | ///
228 | /// Widescreen Super Extended Graphics Array.
229 | ///
230 | WSXGA,
231 |
232 | ///
233 | /// Widescreen Ultra Extended Graphics Array.
234 | ///
235 | WUXGA,
236 |
237 | ///
238 | /// Widescreen Video Graphics Array.
239 | ///
240 | WVGA,
241 |
242 | ///
243 | /// Widescreen XGA.
244 | ///
245 | WXGA,
246 |
247 | ///
248 | /// Custom size
249 | ///
250 | CUSTOM
251 | }
--------------------------------------------------------------------------------
/MediaFileProcessor/MediaFileProcessor/Models/Enums/VideoSyncMethodType.cs:
--------------------------------------------------------------------------------
1 | namespace MediaFileProcessor.Models.Enums;
2 |
3 | ///
4 | /// Enum representing video sync values.
5 | ///
6 | public enum VideoSyncMethodType
7 | {
8 | ///
9 | /// Each frame is passed with its timestamp from the demuxer to the muxer.
10 | ///
11 | Passthrough,
12 |
13 | ///
14 | /// Frames are passed through as is.
15 | ///
16 | Cfr,
17 |
18 | ///
19 | /// Frames are passed through only if timestamps are reset.
20 | ///
21 | Vfr
22 | }
--------------------------------------------------------------------------------
/MediaFileProcessor/MediaFileProcessor/Models/Enums/ZipCompression.cs:
--------------------------------------------------------------------------------
1 | namespace MediaFileProcessor.Models.Enums;
2 |
3 | ///
4 | /// Compression method enumeration
5 | ///
6 | public enum ZipCompression : ushort
7 | {
8 | ///
9 | /// Uncompressed storage
10 | ///
11 | Store = 0,
12 |
13 | ///
14 | /// Deflate compression method
15 | ///
16 | Deflate = 8
17 | }
--------------------------------------------------------------------------------
/MediaFileProcessor/MediaFileProcessor/Models/Settings/BaseProcessingSettings.cs:
--------------------------------------------------------------------------------
1 | using System.Diagnostics;
2 | using System.Text;
3 | namespace MediaFileProcessor.Models.Settings;
4 |
5 | ///
6 | /// Abstract class that provides basic properties and methods for processing settings
7 | ///
8 | public abstract class BaseProcessingSettings
9 | {
10 | ///
11 | /// A string builder to store the process arguments
12 | ///
13 | protected readonly StringBuilder _stringBuilder = new ();
14 |
15 | ///
16 | /// Property that indicates whether to use the process standard error
17 | ///
18 | public bool RedirectStandardError => ErrorDataReceivedHandler is not null;
19 |
20 | ///
21 | /// Property that indicates whether to create a console window for the process
22 | ///
23 | public bool CreateNoWindow { get; set; } = true;
24 |
25 | ///
26 | /// Property that indicates whether to use shell execute for the process
27 | ///
28 | public bool UseShellExecute { get; set; }
29 |
30 | ///
31 | /// Property that indicates whether to raise events for the process
32 | ///
33 | public bool EnableRaisingEvents { get; set; } = true;
34 |
35 | ///
36 | /// Property that sets the window style for the process
37 | ///
38 | public ProcessWindowStyle WindowStyle { get; set; } = ProcessWindowStyle.Hidden;
39 |
40 | ///
41 | /// Property that sets the process exit handler
42 | ///
43 | public EventHandler? ProcessOnExitedHandler { get; set; }
44 |
45 | ///
46 | /// Property that sets the output data received event handler
47 | ///
48 | public DataReceivedEventHandler? OutputDataReceivedEventHandler { get; set; }
49 |
50 | ///
51 | /// Property to hold a reference to the ErrorDataReceivedHandler event handler.
52 | ///
53 | public DataReceivedEventHandler? ErrorDataReceivedHandler { get; set; }
54 |
55 | ///
56 | /// Arguments for Specifying the Output Data Format
57 | ///
58 | protected string? OutputFileArguments { get; set; }
59 |
60 | ///
61 | /// Property to determine if standard output is redirected or not.
62 | ///
63 | public bool IsStandartOutputRedirect => OutputFileArguments == null || OutputFileArguments.Trim() == "-";
64 |
65 | ///
66 | /// Property to hold a dictionary of pipe names and their associated streams.
67 | ///
68 | internal Dictionary? PipeNames { get; set; }
69 |
70 | ///
71 | /// Property to hold a list of input streams.
72 | ///
73 | protected List? InputStreams { get; set; }
74 |
75 | ///
76 | /// Abstract method to get process arguments.
77 | ///
78 | /// Determines whether output arguments should be set or not. Default is true.
79 | /// A string representing the process arguments.
80 | public virtual string GetProcessArguments(bool setOutputArguments = true)
81 | {
82 | return _stringBuilder.ToString();
83 | }
84 |
85 | ///
86 | /// Abstract method to get input streams.
87 | ///
88 | /// An array of input streams.
89 | public virtual Stream[]? GetInputStreams()
90 | {
91 | return InputStreams?.ToArray();
92 | }
93 |
94 | ///
95 | /// Abstract method to get input pipe names.
96 | ///
97 | /// An array of input pipe names.
98 | public virtual string[]? GetInputPipeNames()
99 | {
100 | return PipeNames?.Keys.ToArray();
101 | }
102 | }
--------------------------------------------------------------------------------
/MediaFileProcessor/MediaFileProcessor/Models/Settings/PandocFileProcessingSettings.cs:
--------------------------------------------------------------------------------
1 | using MediaFileProcessor.Extensions;
2 | using MediaFileProcessor.Models.Common;
3 | using MediaFileProcessor.Models.Enums;
4 |
5 | namespace MediaFileProcessor.Models.Settings;
6 |
7 | ///
8 | /// Settings for document file processing
9 | ///
10 | public class PandocFileProcessingSettings : BaseProcessingSettings
11 | {
12 | ///
13 | /// To produce a standalone document (e.g. a valid HTML file including 'head' and 'body' tags)
14 | ///
15 | public PandocFileProcessingSettings Standalone()
16 | {
17 | _stringBuilder.Append(" -s ");
18 |
19 | return this;
20 | }
21 |
22 | ///
23 | /// The input format can be specified using the -f/--from option
24 | ///
25 | public PandocFileProcessingSettings From(string format)
26 | {
27 | _stringBuilder.Append($" -f {format}");
28 |
29 | return this;
30 | }
31 |
32 | ///
33 | /// The output format using the -t/--to option
34 | ///
35 | public PandocFileProcessingSettings To(string format)
36 | {
37 | _stringBuilder.Append($" -t {format}");
38 |
39 | return this;
40 | }
41 |
42 | ///
43 | /// Specify the user data directory to search for pandoc data files
44 | ///
45 | public PandocFileProcessingSettings DataDirectory(string directory)
46 | {
47 | _stringBuilder.Append($" --data-dir={directory}");
48 |
49 | return this;
50 | }
51 |
52 | ///
53 | /// Specify a set of default option settings
54 | ///
55 | public PandocFileProcessingSettings DefaultOptionSettings(string file)
56 | {
57 | _stringBuilder.Append($" -d {file}");
58 |
59 | return this;
60 | }
61 |
62 | ///
63 | /// Shift heading levels by a positive or negative integer
64 | ///
65 | public PandocFileProcessingSettings ShiftHeadingLevel(string number)
66 | {
67 | _stringBuilder.Append($" --shift-heading-level-by={number}");
68 |
69 | return this;
70 | }
71 |
72 | ///
73 | /// Specify an executable to be used as a filter transforming the pandoc AST after the input is parsed and before the output is written
74 | ///
75 | public PandocFileProcessingSettings Filter(string program)
76 | {
77 | _stringBuilder.Append($" --filter={program}");
78 |
79 | return this;
80 | }
81 |
82 | ///
83 | /// Set the metadata field KEY to the value VAL. A value specified on the command line overrides a value specified in the document using YAML metadata blocks
84 | ///
85 | public PandocFileProcessingSettings Metadata(string value)
86 | {
87 | _stringBuilder.Append($" --metadata={value}");
88 |
89 | return this;
90 | }
91 |
92 | ///
93 | /// Read metadata from the supplied YAML (or JSON) file
94 | ///
95 | public PandocFileProcessingSettings MetadataFile(string file)
96 | {
97 | _stringBuilder.Append($" --metadata-file={file}");
98 |
99 | return this;
100 | }
101 |
102 | ///
103 | /// Preserve tabs instead of converting them to spaces
104 | ///
105 | public PandocFileProcessingSettings PreserveTabs()
106 | {
107 | _stringBuilder.Append(" --preserve-tabs ");
108 |
109 | return this;
110 | }
111 |
112 | ///
113 | /// Parse untranslatable HTML and LaTeX as raw
114 | ///
115 | public PandocFileProcessingSettings ParseRaw()
116 | {
117 | _stringBuilder.Append(" --parse-raw ");
118 |
119 | return this;
120 | }
121 |
122 | ///
123 | /// Normalize the document, including converting it to NFC Unicode normalization form
124 | ///
125 | public PandocFileProcessingSettings Normalize()
126 | {
127 | _stringBuilder.Append(" --normalize ");
128 |
129 | return this;
130 | }
131 |
132 | ///
133 | /// Link to a CSS stylesheet
134 | ///
135 | public PandocFileProcessingSettings CssUrl(string url)
136 | {
137 | _stringBuilder.Append(" --css={url} ");
138 |
139 | return this;
140 | }
141 |
142 | ///
143 | /// Print the default template for FORMAT
144 | ///
145 | public PandocFileProcessingSettings PrintDefaultTemplate(string format)
146 | {
147 | _stringBuilder.Append(" -D {format} ");
148 |
149 | return this;
150 | }
151 |
152 | ///
153 | /// Parse each file individually before combining for multifile documents.
154 | ///
155 | public PandocFileProcessingSettings FileScope()
156 | {
157 | _stringBuilder.Append(" --file-scope ");
158 |
159 | return this;
160 | }
161 |
162 | ///
163 | /// Additional settings that are not currently provided in the wrapper
164 | ///
165 | public PandocFileProcessingSettings CustomArguments(string arg)
166 | {
167 | _stringBuilder.Append(arg);
168 |
169 | return this;
170 | }
171 |
172 | ///
173 | /// Redirect receipt input to stdin
174 | ///
175 | private string StandartInputRedirectArgument => " - ";
176 |
177 | ///
178 | /// Setting Output Arguments
179 | ///
180 | public PandocFileProcessingSettings SetOutputFileArguments(string? arg)
181 | {
182 | OutputFileArguments = arg;
183 |
184 | return this;
185 | }
186 |
187 | ///
188 | /// This method sets the input files for document file processing
189 | ///
190 | /// An array of media files that represent the input files for the processing
191 | /// Thrown when the input 'files' argument is null
192 | /// An instance of the DocumentFileProcessingSettings object, representing the current state of the processing settings
193 | public PandocFileProcessingSettings SetInputFiles(params MediaFile[]? files)
194 | {
195 | // Check if the input files are specified
196 | if (files is null)
197 | throw new ArgumentException("'CustomInputs' Arguments must be specified if there are no input files");
198 |
199 | // If the number of input files is 0, throw an exception
200 | switch (files.Length)
201 | {
202 | case 0:
203 | throw new NotSupportedException("No input files");
204 |
205 | // If there is only one input file
206 | case 1:
207 | // Check the type of input file (Path, Template or NamedPipe)
208 | // and append the file path to the string builder
209 | _stringBuilder.Append(files[0].InputType is MediaFileInputType.Path or MediaFileInputType.NamedPipe
210 | ? files[0].InputFilePath!
211 | : StandartInputRedirectArgument);
212 |
213 | // Set input streams for the files
214 | SetInputStreams(files);
215 |
216 | return this;
217 | }
218 |
219 | // If there is only one stream type among the input files
220 | if (files.Count(x => x.InputType == MediaFileInputType.Stream) <= 1)
221 | {
222 | // Aggregate the input file paths (or the standard input redirect argument) into a single string
223 | // and append it to the string builder
224 | _stringBuilder.Append(files.Aggregate(string.Empty,
225 | (current, file) =>
226 | current
227 | + " "
228 | + (file.InputType is MediaFileInputType.Path or MediaFileInputType.NamedPipe
229 | ? file.InputFilePath!
230 | : StandartInputRedirectArgument)));
231 |
232 | // Set input streams for the files
233 | SetInputStreams(files);
234 |
235 | return this;
236 | }
237 |
238 | // If there are multiple stream types among the input files
239 | _stringBuilder.Append(files.Aggregate(string.Empty,
240 | (current, file) => current
241 | + " "
242 | + (file.InputType is MediaFileInputType.Path or MediaFileInputType.NamedPipe
243 | ? file.InputFilePath!
244 | : SetPipeChannel(Guid.NewGuid().ToString(), file))));
245 |
246 | // Set input streams for the files
247 | SetInputStreams(files);
248 |
249 | return this;
250 | }
251 |
252 | ///
253 | /// Summary arguments to process
254 | ///
255 | public override string GetProcessArguments(bool setOutputArguments = true)
256 | {
257 | if (setOutputArguments)
258 | return _stringBuilder + GetOutputArguments();
259 |
260 | return _stringBuilder.ToString();
261 | }
262 |
263 | ///
264 | /// Get output arguments
265 | ///
266 | private string GetOutputArguments()
267 | {
268 | return " -o " + (OutputFileArguments ?? " - ");
269 | }
270 |
271 | ///
272 | /// Get streams to transfer to a process
273 | ///
274 | public override Stream[]? GetInputStreams()
275 | {
276 | return InputStreams?.ToArray();
277 | }
278 |
279 | ///
280 | /// Pipe names for input streams
281 | ///
282 | public override string[]? GetInputPipeNames()
283 | {
284 | return PipeNames?.Keys.ToArray();
285 | }
286 |
287 | ///
288 | /// If the file is transmitted through a stream then assign a channel name to that stream
289 | ///
290 | private string SetPipeChannel(string pipeName, MediaFile file)
291 | {
292 | PipeNames ??= new Dictionary();
293 | PipeNames.Add(pipeName, file.InputFileStream!);
294 |
295 | return pipeName.ToPipeDir();
296 | }
297 |
298 | ///
299 | /// Set input streams from files
300 | /// If the input files are streams
301 | ///
302 | private void SetInputStreams(params MediaFile[]? files)
303 | {
304 | // If null, return without doing anything
305 | if (files is null)
306 | return;
307 |
308 | // Check if there is only one input file with Stream type
309 | if (files.Count(x => x.InputType == MediaFileInputType.Stream) == 1)
310 | {
311 | // If yes, create the InputStreams list if it is null
312 | InputStreams ??= new List();
313 |
314 | // Add the single stream to InputStreams list
315 | InputStreams.Add(files.First(x => x.InputType == MediaFileInputType.Stream).InputFileStream!);
316 | }
317 |
318 | // Check if PipeNames list is not null and has at least one element
319 | if (!(PipeNames?.Count > 0))
320 | return;
321 |
322 | // If yes, create the InputStreams list if it is null
323 | InputStreams ??= new List();
324 |
325 | // Add all streams from PipeNames list to InputStreams
326 | InputStreams.AddRange(PipeNames.Select(pipeName => pipeName.Value));
327 | }
328 | }
--------------------------------------------------------------------------------
/MediaFileProcessor/MediaFileProcessor/Models/Video/CropArea.cs:
--------------------------------------------------------------------------------
1 | namespace MediaFileProcessor.Models.Video;
2 |
3 | ///
4 | /// Represents a crop area with its X, Y, Width and Height values.
5 | ///
6 | public class CropArea
7 | {
8 | ///
9 | /// The X coordinate of the top-left corner of the crop area.
10 | ///
11 | public int X { get; set; }
12 |
13 | ///
14 | /// The Y coordinate of the top-left corner of the crop area.
15 | ///
16 | public int Y { get; set; }
17 |
18 | ///
19 | /// The width of the crop area.
20 | ///
21 | public int Width { get; set; }
22 |
23 | ///
24 | /// The height of the crop area.
25 | ///
26 | public int Height { get; set; }
27 | }
--------------------------------------------------------------------------------
/MediaFileProcessor/MediaFileProcessor/Models/Video/Disposition.cs:
--------------------------------------------------------------------------------
1 | namespace MediaFileProcessor.Models.Video;
2 |
3 | ///
4 | /// Class representing the disposition of media file streams.
5 | ///
6 | public class Disposition
7 | {
8 | ///
9 | /// Indicates the default stream
10 | ///
11 | public int Default { get; set; }
12 |
13 | ///
14 | /// Indicates the dub stream
15 | ///
16 | public int Dub { get; set; }
17 |
18 | ///
19 | /// Indicates the original stream
20 | ///
21 | public int Original { get; set; }
22 |
23 | ///
24 | /// Indicates the comment stream
25 | ///
26 | public int Comment { get; set; }
27 |
28 | ///
29 | /// Indicates the lyrics stream
30 | ///
31 | public int Lyrics { get; set; }
32 |
33 | ///
34 | /// Indicates the karaoke stream
35 | ///
36 | public int Karaoke { get; set; }
37 |
38 | ///
39 | /// Indicates the forced stream
40 | ///
41 | public int Forced { get; set; }
42 |
43 | ///
44 | /// Indicates the hearing impaired stream
45 | ///
46 | public int HearingImpaired { get; set; }
47 |
48 | ///
49 | /// Indicates the visual impaired stream
50 | ///
51 | public int VisualImpaired { get; set; }
52 |
53 | ///
54 | /// Indicates the clean effects stream
55 | ///
56 | public int CleanEffects { get; set; }
57 |
58 | ///
59 | /// Represents the number of attached pictures.
60 | ///
61 | public int AttachedPic { get; set; }
62 |
63 | ///
64 | /// Represents the number of timed thumbnails
65 | ///
66 | public int TimedThumbnails { get; set; }
67 |
68 | ///
69 | /// Represents the number of captions
70 | ///
71 | public int Captions { get; set; }
72 |
73 | ///
74 | /// Represents the number of descriptions
75 | ///
76 | public int Descriptions { get; set; }
77 |
78 | ///
79 | /// Represents the number of metadata
80 | ///
81 | public int Metadata { get; set; }
82 |
83 | ///
84 | /// Represents the number of dependent items
85 | ///
86 | public int Dependent { get; set; }
87 |
88 | ///
89 | /// Represents the number of still images
90 | ///
91 | public int StillImage { get; set; }
92 | }
--------------------------------------------------------------------------------
/MediaFileProcessor/MediaFileProcessor/Models/Video/Format.cs:
--------------------------------------------------------------------------------
1 | namespace MediaFileProcessor.Models.Video;
2 |
3 | ///
4 | /// Represents the format of a video file
5 | ///
6 | public class Format
7 | {
8 | ///
9 | /// Represents the duration of the video file in string format
10 | ///
11 | public string? Duration { get; set; }
12 |
13 | ///
14 | /// Represents the size of the video file in string format
15 | ///
16 | public string? Size { get; set; }
17 |
18 | ///
19 | /// Represents the bit rate of the video file in string format
20 | ///
21 | public string? BitRate { get; set; }
22 |
23 | ///
24 | /// Represents the tag information of the video file
25 | ///
26 | public Tag? Tag { get; set; }
27 | }
--------------------------------------------------------------------------------
/MediaFileProcessor/MediaFileProcessor/Models/Video/SideDataList.cs:
--------------------------------------------------------------------------------
1 | namespace MediaFileProcessor.Models.Video;
2 |
3 | ///
4 | /// Represents a list of side data for a media file
5 | ///
6 | public class SideDataList
7 | {
8 | ///
9 | /// Represents the type of the side data in string format
10 | ///
11 | public string? SideDataType { get; set; }
12 |
13 | ///
14 | /// Represents the display matrix of the side data in string format
15 | ///
16 | public string? Displaymatrix { get; set; }
17 |
18 | ///
19 | /// Represents the rotation of the side data
20 | ///
21 | public int Rotation { get; set; }
22 | }
--------------------------------------------------------------------------------
/MediaFileProcessor/MediaFileProcessor/Models/Video/StreamInfo.cs:
--------------------------------------------------------------------------------
1 | namespace MediaFileProcessor.Models.Video;
2 |
3 | ///
4 | /// Represents the information of a stream in a media file
5 | ///
6 | public class StreamInfo
7 | {
8 | ///
9 | /// Represents the index of the stream
10 | ///
11 | public int Index { get; set; }
12 |
13 | ///
14 | /// Represents the codec name of the stream in string format
15 | ///
16 | public string? CodecName { get; set; }
17 |
18 | ///
19 | /// Represents the codec long name of the stream in string format
20 | ///
21 | public string? CodecLongName { get; set; }
22 |
23 | ///
24 | /// Represents the profile of the stream in string format
25 | ///
26 | public string? Profile { get; set; }
27 |
28 | ///
29 | /// Represents the codec type of the stream in string format
30 | ///
31 | public string? CodecType { get; set; }
32 |
33 | ///
34 | /// Represents the codec tag string of the stream in string format
35 | ///
36 | public string? CodecTagString { get; set; }
37 |
38 | ///
39 | /// Represents the codec tag of the stream in string format
40 | ///
41 | public string? CodecTag { get; set; }
42 |
43 | ///
44 | /// Represents the width of the stream in pixels
45 | ///
46 | public int Width { get; set; }
47 |
48 | ///
49 | /// Represents the height of the stream in pixels
50 | ///
51 | public int Height { get; set; }
52 |
53 | ///
54 | /// Represents the coded width of the stream in pixels
55 | ///
56 | public int CodedWidth { get; set; }
57 |
58 | ///
59 | /// Represents the coded height of the stream in pixels
60 | ///
61 | public int CodedHeight { get; set; }
62 |
63 | ///
64 | /// Represents the number of closed captions in the video.
65 | ///
66 | public int ClosedCaptions { get; set; }
67 |
68 | ///
69 | /// Represents the presence of film grain effect in the video.
70 | ///
71 | public int FilmGrain { get; set; }
72 |
73 | ///
74 | /// Indicates if the video has B frames.
75 | ///
76 | public int HasBFrames { get; set; }
77 |
78 | ///
79 | /// Represents the sample aspect ratio of the video.
80 | ///
81 | public string? SampleAspectRatio { get; set; }
82 |
83 | ///
84 | /// Represents the display aspect ratio of the video.
85 | ///
86 | public string? DisplayAspectRatio { get; set; }
87 |
88 | ///
89 | /// Represents the pixel format of the video.
90 | ///
91 | public string? PixFmt { get; set; }
92 |
93 | ///
94 | /// Represents the level of the video.
95 | ///
96 | public int Level { get; set; }
97 |
98 | ///
99 | /// Represents the color range of the video.
100 | ///
101 | public string? ColorRange { get; set; }
102 |
103 | ///
104 | /// Represents the color space of the video.
105 | ///
106 | public string? ColorSpace { get; set; }
107 |
108 | ///
109 | /// Represents the color transfer characteristic of the video.
110 | ///
111 | public string? ColorTransfer { get; set; }
112 |
113 | ///
114 | /// Represents the color primaries of the video.
115 | ///
116 | public string? ColorPrimaries { get; set; }
117 |
118 | ///
119 | /// Represents the chroma location of the video.
120 | ///
121 | public string? ChromaLocation { get; set; }
122 |
123 | ///
124 | /// Represents the field order of the video.
125 | ///
126 | public string? FieldOrder { get; set; }
127 |
128 | ///
129 | /// Represents the number of reference frames in the video.
130 | ///
131 | public int Refs { get; set; }
132 |
133 | ///
134 | /// Indicates if the video is in AVC format.
135 | ///
136 | public string? IsAvc { get; set; }
137 |
138 | ///
139 | /// The size of the NAL unit, expressed as the number of bytes of the prefix that determine the size of each NAL unit.
140 | ///
141 | public string? NalLengthSize { get; set; }
142 |
143 | ///
144 | /// The identifier of the stream.
145 | ///
146 | public string? Id { get; set; }
147 |
148 | ///
149 | /// The real frame rate of the stream.
150 | ///
151 | public string? RFrameRate { get; set; }
152 |
153 | ///
154 | /// The average frame rate of the stream.
155 | ///
156 | public string? AvgFrameRate { get; set; }
157 |
158 | ///
159 | /// The time base of the stream.
160 | ///
161 | public string? TimeBase { get; set; }
162 |
163 | ///
164 | /// The presentation timestamp of the first frame of the stream, in stream time base.
165 | ///
166 | public int StartPts { get; set; }
167 |
168 | ///
169 | /// The start time of the stream, expressed as a string.
170 | ///
171 | public string? StartTime { get; set; }
172 |
173 | ///
174 | /// The duration of the stream, expressed in stream time base units.
175 | ///
176 | public int DurationTs { get; set; }
177 |
178 | ///
179 | /// The duration of the stream, expressed as a string.
180 | ///
181 | public string? Duration { get; set; }
182 |
183 | ///
184 | /// The bit rate of the stream, expressed as a string.
185 | ///
186 | public string? BitRate { get; set; }
187 |
188 | ///
189 | /// The number of bits per raw audio sample.
190 | ///
191 | public string? BitsPerRawSample { get; set; }
192 |
193 | ///
194 | /// The number of frames in the stream.
195 | ///
196 | public string? NbFrames { get; set; }
197 |
198 | ///
199 | /// The size of the extra data in the stream.
200 | ///
201 | public int ExtradataSize { get; set; }
202 |
203 | ///
204 | /// The disposition of the stream, if any.
205 | ///
206 | public Disposition? Disposition { get; set; }
207 |
208 | ///
209 | /// The tag associated with the stream, if any.
210 | ///
211 | public Tag? Tag { get; set; }
212 |
213 | ///
214 | /// A list of side data in the stream, if any.
215 | ///
216 | public List? SideDataList { get; set; }
217 |
218 | ///
219 | /// The sample format of the stream.
220 | ///
221 | public string? SampleFmt { get; set; }
222 |
223 | ///
224 | /// The sample rate of the stream.
225 | ///
226 | public string? SampleRate { get; set; }
227 |
228 | ///
229 | /// The number of channels in the stream.
230 | ///
231 | public int? Channels { get; set; }
232 |
233 | ///
234 | /// The channel layout of the stream.
235 | ///
236 | public string? ChannelLayout { get; set; }
237 |
238 | ///
239 | /// The number of bits per sample in the stream.
240 | ///
241 | public int? BitsPerSample { get; set; }
242 | }
--------------------------------------------------------------------------------
/MediaFileProcessor/MediaFileProcessor/Models/Video/Tag.cs:
--------------------------------------------------------------------------------
1 | namespace MediaFileProcessor.Models.Video;
2 |
3 | ///
4 | /// Represents a tag information for a media file
5 | ///
6 | public class Tag
7 | {
8 | ///
9 | /// Represents the creation time of the tag
10 | ///
11 | public DateTime CreationTime { get; set; }
12 |
13 | ///
14 | /// Represents the language of the tag in string format
15 | ///
16 | public string? Language { get; set; }
17 |
18 | ///
19 | /// Represents the handler name of the tag in string format
20 | ///
21 | public string? HandlerName { get; set; }
22 |
23 | ///
24 | /// Represents the vendor ID of the tag in string format
25 | ///
26 | public string? VendorId { get; set; }
27 | }
--------------------------------------------------------------------------------
/MediaFileProcessor/MediaFileProcessor/Models/Video/VideoFileInfo.cs:
--------------------------------------------------------------------------------
1 | namespace MediaFileProcessor.Models.Video;
2 |
3 | ///
4 | /// Detailed information about the video file
5 | ///
6 | public class VideoFileInfo
7 | {
8 | ///
9 | /// Represents the information of a stream in a media file
10 | ///
11 | public StreamInfo[]? Streams { get; set; }
12 |
13 | ///
14 | /// Represents the format of a video file
15 | ///
16 | public Format? Format { get; set; }
17 | }
--------------------------------------------------------------------------------
/MediaFileProcessor/MediaFileProcessor/Models/ZipFile/ZipFileEntry.cs:
--------------------------------------------------------------------------------
1 | using MediaFileProcessor.Models.Enums;
2 |
3 | namespace MediaFileProcessor.Models.ZipFile;
4 |
5 | ///
6 | /// Represents an entry in Zip file directory
7 | ///
8 | public class ZipFileEntry
9 | {
10 | ///
11 | /// Compression method
12 | ///
13 | public ZipCompression Method;
14 |
15 | ///
16 | /// Full path and filename as stored in Zip
17 | ///
18 | public string? FilenameInZip;
19 |
20 | ///
21 | /// Original file size
22 | ///
23 | public long FileSize;
24 |
25 | ///
26 | /// Compressed file size
27 | ///
28 | public long CompressedSize;
29 |
30 | ///
31 | /// Offset of header information inside Zip storage
32 | ///
33 | public long HeaderOffset;
34 |
35 | ///
36 | /// Offset of file inside Zip
37 | ///
38 | public long FileOffset;
39 |
40 | ///
41 | /// Size of header information
42 | ///
43 | public uint HeaderSize;
44 |
45 | ///
46 | /// 32-bit checksum of entire file
47 | ///
48 | public uint Crc32;
49 |
50 | ///
51 | /// Last modification time of file
52 | ///
53 | public DateTime ModifyTime;
54 |
55 | ///
56 | /// Creation time of file
57 | ///
58 | public DateTime CreationTime;
59 |
60 | ///
61 | /// Last access time of file
62 | ///
63 | public DateTime AccessTime;
64 |
65 | ///
66 | /// User comment for file
67 | ///
68 | public string? Comment;
69 |
70 | ///
71 | /// True if UTF8 encoding for filename and comments, false if default (CP 437)
72 | ///
73 | public bool EncodeUTF8;
74 |
75 | ///
76 | /// Overriden method
77 | ///
78 | /// Filename in Zip
79 | public override string? ToString()
80 | {
81 | return FilenameInZip;
82 | }
83 | }
--------------------------------------------------------------------------------
/MediaFileProcessor/MediaFileProcessor/Processors/DocumentFileProcessor.cs:
--------------------------------------------------------------------------------
1 | using MediaFileProcessor.Models.Common;
2 | using MediaFileProcessor.Models.Settings;
3 | using MediaFileProcessor.Processors.Interfaces;
4 | namespace MediaFileProcessor.Processors;
5 |
6 | ///
7 | /// This class is responsible for converting Document files.
8 | ///
9 | public class DocumentFileProcessor : IDocumentFileProcessor
10 | {
11 | ///
12 | /// The name of the pandoc executable.
13 | ///
14 | private readonly string _pandoc = "pandoc";
15 |
16 | public DocumentFileProcessor(string pandocExePath)
17 | {
18 | _pandoc = pandocExePath;
19 | }
20 |
21 | public DocumentFileProcessor() { }
22 |
23 | ///
24 | /// The address from which the pandoc executable can be downloaded.
25 | ///
26 | private const string ZipAddress = "https://github.com/jgm/pandoc/releases/download/3.0.1/pandoc-3.0.1-windows-x86_64.zip";
27 |
28 | ///
29 | public async Task ExecuteAsync(PandocFileProcessingSettings settings, CancellationToken cancellationToken)
30 | {
31 | using var process = new MediaFileProcess(_pandoc,
32 | settings.GetProcessArguments(),
33 | settings,
34 | settings.GetInputStreams(),
35 | settings.GetInputPipeNames());
36 |
37 | return await process.ExecuteAsync(cancellationToken);
38 | }
39 |
40 | ///
41 | /// Converts a .docx file to a PDF file asynchronously.
42 | ///
43 | /// The .docx file to be converted.
44 | /// The file name of the converted PDF file. If `null`, the PDF file is not saved to disk.
45 | /// A cancellation token that can be used to cancel the operation.
46 | /// A `MemoryStream` containing the converted PDF file.
47 | private async Task ExecuteConvertDocxToPdfAsync(MediaFile file, string? outputFile, CancellationToken cancellationToken)
48 | {
49 | var settings = new PandocFileProcessingSettings().From("docx").To("pdf").Standalone().SetInputFiles(file).SetOutputFileArguments(outputFile);
50 |
51 | return await ExecuteAsync(settings, cancellationToken);
52 | }
53 |
54 | ///
55 | public async Task ConvertDocxToPdfAsync(MediaFile file, string? outputFile, CancellationToken? cancellationToken = null)
56 | {
57 | await ExecuteConvertDocxToPdfAsync(file, outputFile, cancellationToken ?? default);
58 | }
59 |
60 | ///
61 | public async Task ConvertDocxToPdfAsStreamAsync(MediaFile file, CancellationToken? cancellationToken = null)
62 | {
63 | return (await ExecuteConvertDocxToPdfAsync(file, null, cancellationToken ?? default))!;
64 | }
65 |
66 | ///
67 | public async Task ConvertDocxToPdfAsBytesAsync(MediaFile file, CancellationToken? cancellationToken = null)
68 | {
69 | return (await ExecuteConvertDocxToPdfAsync(file, null, cancellationToken ?? default))!.ToArray();
70 | }
71 |
72 | ///
73 | /// Downloads executable files pandoc.exe from a remote ZIP archive.
74 | ///
75 | ///
76 | /// Thrown when either of the files pandoc.exe is not found in the ZIP archive.
77 | ///
78 | public static async Task DownloadExecutableFilesAsync()
79 | {
80 | var fileName = $"{Guid.NewGuid()}.zip";
81 | var pandocFound = false;
82 |
83 | try
84 | {
85 | // Downloads the ZIP archive from the remote location specified by _zipAddress.
86 | await FileDownloadProcessor.DownloadFileAsync(new Uri(ZipAddress), fileName);
87 |
88 | // Open an existing zip file for reading
89 | using var zip = ZipFileProcessor.Open(fileName, FileAccess.Read);
90 |
91 | // Read the central directory collection
92 | var dir = zip.ReadCentralDir();
93 |
94 | // Look for the desired file
95 | foreach (var entry in dir.Where(entry => Path.GetFileName(entry.FilenameInZip) == "pandoc.exe"))
96 | {
97 | zip.ExtractFile(entry, "pandoc.exe"); // File found, extract it}
98 | pandocFound = true;
99 | }
100 |
101 | if(!pandocFound)
102 | throw new FileNotFoundException("pandoc.exe not found");
103 | }
104 | finally
105 | {
106 | // Delete the downloaded ZIP archive after extracting the required files.
107 | if(File.Exists(fileName))
108 | File.Delete(fileName);
109 | }
110 | }
111 | }
--------------------------------------------------------------------------------
/MediaFileProcessor/MediaFileProcessor/Processors/FileDownloadProcessor.cs:
--------------------------------------------------------------------------------
1 | namespace MediaFileProcessor.Processors;
2 |
3 | ///
4 | /// File Download handler
5 | ///
6 | public static class FileDownloadProcessor
7 | {
8 | ///
9 | /// Download file
10 | ///
11 | /// Address from download
12 | /// Downloaded FileName
13 | /// Delete downloaded data if download was failed in process
14 | public static async Task DownloadFileAsync(Uri url, string fileName, bool deleteIfFail = true)
15 | {
16 | try
17 | {
18 | using var client = new HttpClient();
19 |
20 | // Add a user agent header in case the requested URI contains a query.
21 | client.DefaultRequestHeaders.Add("user-agent", "Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.2; .NET CLR 1.0.3705;)");
22 |
23 | using var response = await client.GetAsync(url, HttpCompletionOption.ResponseHeadersRead);
24 |
25 | using var content = response.Content;
26 |
27 | // Get the total size of the file
28 | var totalBytes = content.Headers.ContentLength.GetValueOrDefault();
29 |
30 | await using var fileStream = new FileStream(fileName, FileMode.Create, FileAccess.Write, FileShare.None);
31 |
32 | // Get the stream of the content
33 | await using var contentStream = await content.ReadAsStreamAsync();
34 |
35 | // Read the content stream
36 | var buffer = new byte[8192];
37 | int bytesRead;
38 | long bytesReceived = 0;
39 |
40 | while ((bytesRead = await contentStream.ReadAsync(buffer, 0, buffer.Length)) > 0)
41 | {
42 | // Write the data to the file
43 | await fileStream.WriteAsync(buffer, 0, bytesRead);
44 | bytesReceived += bytesRead;
45 |
46 | // Calculate the download progress in percentages
47 | var percentage = (double)bytesReceived / totalBytes * 100;
48 |
49 | // Round the percentage to the nearest tenth
50 | percentage = Math.Round(percentage, 1);
51 |
52 | // Set the cursor position to the beginning of the line
53 | Console.SetCursorPosition(0, Console.CursorTop);
54 |
55 | // Print the download progress percentage to the console
56 | Console.Write(percentage + "%");
57 | }
58 | }
59 | finally
60 | {
61 | if (deleteIfFail && File.Exists(fileName))
62 | File.Delete(fileName);
63 | }
64 | }
65 | }
--------------------------------------------------------------------------------
/MediaFileProcessor/MediaFileProcessor/Processors/ImageFileProcessor.cs:
--------------------------------------------------------------------------------
1 | using MediaFileProcessor.Models.Common;
2 | using MediaFileProcessor.Models.Enums;
3 | using MediaFileProcessor.Models.Settings;
4 | using MediaFileProcessor.Processors.Interfaces;
5 | namespace MediaFileProcessor.Processors;
6 |
7 | ///
8 | /// This class is responsible for processing Document files.
9 | ///
10 | public class ImageFileProcessor : IImageFileProcessor
11 | {
12 | ///
13 | /// The name of the convert executable.
14 | ///
15 | private readonly string _convert = "convert";
16 |
17 | ///
18 | /// Constructor
19 | ///
20 | /// The name of the convert executable.
21 | public ImageFileProcessor(string convertExePath)
22 | {
23 | _convert = convertExePath;
24 | }
25 |
26 | ///
27 | /// Constructor
28 | ///
29 | public ImageFileProcessor() { }
30 |
31 | ///
32 | /// The address from which the convert executable can be downloaded.
33 | ///
34 | private const string ZipAddress = "https://imagemagick.org/archive/binaries/ImageMagick-7.1.1-25-portable-Q16-x64.zip";
35 |
36 | ///
37 | public async Task ExecuteAsync(ImageMagickProcessingSettings settings, CancellationToken cancellationToken)
38 | {
39 | using var process = new MediaFileProcess(_convert,
40 | settings.GetProcessArguments(),
41 | settings,
42 | settings.GetInputStreams(),
43 | settings.GetInputPipeNames());
44 |
45 | return await process.ExecuteAsync(cancellationToken);
46 | }
47 |
48 | //======================================================================================================================================================================
49 |
50 | ///
51 | /// Executes the compress image asynchronously.
52 | ///
53 | /// The image file to be compressed.
54 | /// The format of the input image.
55 | /// The quality of the output image.
56 | /// The type of filter to be applied to the image.
57 | /// The size of the thumbnail to be generated.
58 | /// The path of the output file.
59 | /// The format of the output image.
60 | /// The cancellation token used to cancel the operation.
61 | /// A memory stream that contains the compressed image.
62 | private async Task ExecuteCompressImageAsync(MediaFile file,
63 | ImageFormatType inputFormatType,
64 | int quality,
65 | FilterType filterType,
66 | string thumbnail,
67 | string? outputFile,
68 | ImageFormatType outputFormatType,
69 | CancellationToken cancellationToken)
70 | {
71 | var settings = new ImageMagickProcessingSettings().Format(inputFormatType)
72 | .SetInputFiles(file)
73 | .Quality(quality)
74 | .Filter(filterType)
75 | .SamplingFactor("4:2:0")
76 | .Define("jpeg:dct-method=float")
77 | .Thumbnail(thumbnail)
78 | .Format(outputFormatType)
79 | .SetOutputFileArguments(outputFile);
80 |
81 | return await ExecuteAsync(settings, cancellationToken);
82 | }
83 |
84 | ///
85 | public async Task CompressImageAsync(MediaFile file,
86 | ImageFormatType inputFormatType,
87 | int quality,
88 | FilterType filterType,
89 | string thumbnail,
90 | string outputFile,
91 | ImageFormatType outputFormatType,
92 | CancellationToken? cancellationToken = null)
93 | {
94 | await ExecuteCompressImageAsync(file, inputFormatType, quality, filterType, thumbnail, outputFile, outputFormatType, cancellationToken ?? default);
95 | }
96 |
97 | ///
98 | public async Task CompressImageAsStreamAsync(MediaFile file,
99 | ImageFormatType inputFormatType,
100 | int quality,
101 | FilterType filterType,
102 | string thumbnail,
103 | ImageFormatType outputFormatType,
104 | CancellationToken? cancellationToken = null)
105 | {
106 | return (await ExecuteCompressImageAsync(file, inputFormatType, quality, filterType, thumbnail, null, outputFormatType, cancellationToken ?? default))!;
107 | }
108 |
109 | ///
110 | public async Task CompressImageAsBytesAsync(MediaFile file,
111 | ImageFormatType inputFormatType,
112 | int quality,
113 | FilterType filterType,
114 | string thumbnail,
115 | ImageFormatType outputFormatType,
116 | CancellationToken? cancellationToken = null)
117 | {
118 | return (await ExecuteCompressImageAsync(file, inputFormatType, quality, filterType, thumbnail, null, outputFormatType, cancellationToken ?? default))!
119 | .ToArray();
120 | }
121 |
122 | //======================================================================================================================================================================
123 |
124 | ///
125 | /// Executes the conversion of an image file from one format to another.
126 | ///
127 | /// The file to convert.
128 | /// The format of the input file.
129 | /// The path to the output file (optional).
130 | /// The format of the output file (optional).
131 | /// The cancellation token used to cancel the operation.
132 | /// A memory stream containing the output image file.
133 | private async Task ExecuteConvertImageAsync(MediaFile file,
134 | ImageFormatType inputFormatType,
135 | string? outputFile,
136 | ImageFormatType? outputFormat,
137 | CancellationToken cancellationToken)
138 | {
139 | var settings = new ImageMagickProcessingSettings().Format(inputFormatType).SetInputFiles(file).Format(outputFormat).SetOutputFileArguments(outputFile);
140 |
141 | return await ExecuteAsync(settings, cancellationToken);
142 | }
143 |
144 | ///
145 | public async Task ConvertImageAsync(MediaFile file, ImageFormatType inputFormatType, string outputFile, CancellationToken? cancellationToken = null)
146 | {
147 | await ExecuteConvertImageAsync(file, inputFormatType, outputFile, null, cancellationToken ?? default);
148 | }
149 |
150 | ///
151 | public async Task ConvertImageAsStreamAsync(MediaFile file,
152 | ImageFormatType inputFormatType,
153 | ImageFormatType? outputFormat,
154 | CancellationToken? cancellationToken = null)
155 | {
156 | return (await ExecuteConvertImageAsync(file, inputFormatType, null, outputFormat, cancellationToken ?? default))!;
157 | }
158 |
159 | ///
160 | public async Task ConvertImageAsBytesAsync(MediaFile file, ImageFormatType inputFormatType, ImageFormatType? outputFormat, CancellationToken? cancellationToken = null)
161 | {
162 | return (await ExecuteConvertImageAsync(file, inputFormatType, null, outputFormat, cancellationToken ?? default))!.ToArray();
163 | }
164 |
165 | //======================================================================================================================================================================
166 |
167 | ///
168 | /// Executes the resize image operation with the specified parameters asynchronously.
169 | ///
170 | /// The input file.
171 | /// The input format of the image file.
172 | /// The size to resize the image to in the format of "widthxheight".
173 | /// The desired output format for the image. If null, the output format will be the same as the input format.
174 | /// The path to the output file. If null, the image will not be saved to a file.
175 | /// A cancellation token that can be used to cancel the asynchronous operation.
176 | /// A task that represents the asynchronous operation. The task result is the memory stream containing the resized image data.
177 | private async Task ExecuteResizeImageAsync(MediaFile file,
178 | ImageFormatType inputFormatType,
179 | string size,
180 | ImageFormatType? outputFormat,
181 | string? outputFile,
182 | CancellationToken cancellationToken)
183 | {
184 | var settings = new ImageMagickProcessingSettings().Resize(size)
185 | .Quality(92)
186 | .Format(inputFormatType)
187 | .SetInputFiles(file)
188 | .Format(outputFormat)
189 | .SetOutputFileArguments(outputFile);
190 |
191 | return await ExecuteAsync(settings, cancellationToken);
192 | }
193 |
194 | ///
195 | public async Task ResizeImageAsync(MediaFile file, ImageFormatType inputFormatType, string size, string outputFile, CancellationToken? cancellationToken = null)
196 | {
197 | await ExecuteResizeImageAsync(file, inputFormatType, size, null, outputFile, cancellationToken ?? default);
198 | }
199 |
200 | ///
201 | public async Task ResizeImageAsStreamAsync(MediaFile file,
202 | ImageFormatType inputFormatType,
203 | string size,
204 | ImageFormatType? outputFormat,
205 | CancellationToken? cancellationToken = null)
206 | {
207 | return (await ExecuteResizeImageAsync(file, inputFormatType, size, outputFormat, null, cancellationToken ?? default))!;
208 | }
209 |
210 | ///
211 | public async Task ResizeImageAsBytesAsync(MediaFile file,
212 | ImageFormatType inputFormatType,
213 | string size,
214 | ImageFormatType? outputFormat,
215 | CancellationToken? cancellationToken = null)
216 | {
217 | return (await ExecuteResizeImageAsync(file, inputFormatType, size, outputFormat, null, cancellationToken ?? default))!.ToArray();
218 | }
219 |
220 | //======================================================================================================================================================================
221 |
222 | ///
223 | /// This method converts a series of images into a single animated gif.
224 | ///
225 | /// The input file(s) to be converted into a gif.
226 | /// The delay between each frame in the gif in hundredths of a second.
227 | /// The format of the input files.
228 | /// The file path where the resulting gif will be saved. If set to null, the method returns the gif as a stream.
229 | /// A cancellation token that can be used by other objects or threads to receive notice of cancellation.
230 | /// The resulting gif as a memory stream, or null if outputFile is not null.
231 | private async Task ExecuteImagesToGifAsync(MediaFile file, int delay, ImageFormatType inputFormatType, string? outputFile, CancellationToken cancellationToken)
232 | {
233 | var settings = new ImageMagickProcessingSettings().Delay(delay)
234 | .Format(inputFormatType)
235 | .SetInputFiles(file)
236 | .Format(FileFormatType.GIF)
237 | .SetOutputFileArguments(outputFile);
238 |
239 | return await ExecuteAsync(settings, cancellationToken);
240 | }
241 |
242 | ///
243 | public async Task ImagesToGifAsync(MediaFile file, int delay, ImageFormatType inputFormatType, string? outputFile, CancellationToken? cancellationToken = null)
244 | {
245 | await ExecuteImagesToGifAsync(file, delay, inputFormatType, outputFile, cancellationToken ?? default);
246 | }
247 |
248 | ///
249 | public async Task ImagesToGifAsStreamAsync(MediaFile file, int delay, ImageFormatType inputFormatType, CancellationToken? cancellationToken = null)
250 | {
251 | return (await ExecuteImagesToGifAsync(file, delay, inputFormatType, null, cancellationToken ?? default))!;
252 | }
253 |
254 | ///
255 | public async Task ImagesToGifAsBytesAsync(MediaFile file, int delay, ImageFormatType inputFormatType, CancellationToken? cancellationToken = null)
256 | {
257 | return (await ExecuteImagesToGifAsync(file, delay, inputFormatType, null, cancellationToken ?? default))!.ToArray();
258 | }
259 |
260 | ///
261 | /// Downloads executable files convert.exe from a remote ZIP archive.
262 | ///
263 | ///
264 | /// Thrown when either of the files convert.exe is not found in the ZIP archive.
265 | ///
266 | public static async Task DownloadExecutableFilesAsync()
267 | {
268 | var fileName = $"{Guid.NewGuid()}.zip";
269 |
270 | var convertFound = false;
271 |
272 | try
273 | {
274 | // Downloads the ZIP archive from the remote location specified by _zipAddress.
275 | await FileDownloadProcessor.DownloadFileAsync(new Uri(ZipAddress), fileName);
276 |
277 | // Open an existing zip file for reading
278 | using var zip = ZipFileProcessor.Open(fileName, FileAccess.Read);
279 |
280 | // Read the central directory collection
281 | var dir = zip.ReadCentralDir();
282 |
283 | // Look for the desired file
284 | foreach (var entry in dir.Where(entry => Path.GetFileName(entry.FilenameInZip) == "convert.exe"))
285 | {
286 | zip.ExtractFile(entry, "convert.exe"); // File found, extract it
287 | convertFound = true;
288 | }
289 |
290 | // Check if both the files were found in the ZIP archive.
291 | if(!convertFound)
292 | throw new FileNotFoundException("convert.exe not found");
293 | }
294 | finally
295 | {
296 | // Delete the downloaded ZIP archive after extracting the required files.
297 | if(File.Exists(fileName))
298 | File.Delete(fileName);
299 | }
300 | }
301 | }
--------------------------------------------------------------------------------
/MediaFileProcessor/MediaFileProcessor/Processors/Interfaces/IDocumentFileProcessor.cs:
--------------------------------------------------------------------------------
1 | using MediaFileProcessor.Models.Common;
2 | using MediaFileProcessor.Models.Settings;
3 | namespace MediaFileProcessor.Processors.Interfaces;
4 |
5 | ///
6 | /// Interface for DocumentFileProcessor
7 | ///
8 | public interface IDocumentFileProcessor
9 | {
10 | ///
11 | /// Executes the conversion of the document file to PDF asynchronously.
12 | ///
13 | /// The settings used for the conversion process.
14 | /// A cancellation token that can be used to cancel the operation.
15 | /// A `MemoryStream` containing the converted PDF file.
16 | Task ExecuteAsync(PandocFileProcessingSettings settings, CancellationToken cancellationToken);
17 |
18 | ///
19 | /// Converts a .docx file to a PDF file and saves it to disk asynchronously.
20 | ///
21 | /// The .docx file to be converted.
22 | /// The file name of the converted PDF file.
23 | /// A cancellation token that can be used to cancel the operation.
24 | Task ConvertDocxToPdfAsync(MediaFile file, string? outputFile, CancellationToken? cancellationToken = null);
25 |
26 | ///
27 | /// Converts the DOCX file to a PDF file as a stream.
28 | ///
29 | /// The media file to be converted.
30 | /// The cancellation token to cancel the operation.
31 | /// The memory stream that contains the converted PDF file.
32 | Task ConvertDocxToPdfAsStreamAsync(MediaFile file, CancellationToken? cancellationToken = null);
33 |
34 | ///
35 | /// Converts the DOCX file to a PDF file as a byte array.
36 | ///
37 | /// The media file to be converted.
38 | /// The cancellation token to cancel the operation.
39 | /// The byte array that contains the converted PDF file.
40 | Task ConvertDocxToPdfAsBytesAsync(MediaFile file, CancellationToken? cancellationToken = null);
41 | }
--------------------------------------------------------------------------------
/MediaFileProcessor/MediaFileProcessor/Processors/Interfaces/IImageFileProcessor.cs:
--------------------------------------------------------------------------------
1 | using MediaFileProcessor.Models.Common;
2 | using MediaFileProcessor.Models.Enums;
3 | using MediaFileProcessor.Models.Settings;
4 | namespace MediaFileProcessor.Processors.Interfaces;
5 |
6 | ///
7 | /// Interface for ImageFileProcessor
8 | ///
9 | public interface IImageFileProcessor
10 | {
11 | ///
12 | /// Executes image processing asynchronously.
13 | ///
14 | /// The settings used for the processing process.
15 | /// A cancellation token that can be used to cancel the operation.
16 | /// A `MemoryStream` containing the converted PDF file.
17 | Task ExecuteAsync(ImageMagickProcessingSettings settings, CancellationToken cancellationToken);
18 |
19 | ///
20 | /// Compresses the image asynchronously.
21 | ///
22 | /// The image file to be compressed.
23 | /// The format of the input image.
24 | /// The quality of the output image.
25 | /// The type of filter to be applied to the image.
26 | /// The size of the thumbnail to be generated.
27 | /// The path of the output file.
28 | /// The format of the output image.
29 | /// The cancellation token used to cancel the operation.
30 | /// A task that represents the asynchronous operation.
31 | Task CompressImageAsync(MediaFile file,
32 | ImageFormatType inputFormatType,
33 | int quality,
34 | FilterType filterType,
35 | string thumbnail,
36 | string outputFile,
37 | ImageFormatType outputFormatType,
38 | CancellationToken? cancellationToken = null);
39 |
40 | ///
41 | /// Compresses an image file as a memory stream.
42 | ///
43 | /// The image file to be compressed.
44 | /// The format of the input image file.
45 | /// The quality level of the output image, with 0 being the lowest and 100 being the highest.
46 | /// The type of filter to be applied during the compression process.
47 | /// A string that specifies the size and location of a thumbnail to extract from the input image.
48 | /// The format of the output image file.
49 | /// A cancellation token that can be used to cancel the operation.
50 | /// A memory stream that contains the compressed image data.
51 | Task CompressImageAsStreamAsync(MediaFile file,
52 | ImageFormatType inputFormatType,
53 | int quality,
54 | FilterType filterType,
55 | string thumbnail,
56 | ImageFormatType outputFormatType,
57 | CancellationToken? cancellationToken = null);
58 |
59 | ///
60 | /// Compresses an image to a memory stream as a byte array.
61 | ///
62 | /// The input image file to be compressed.
63 | /// The format of the input image file.
64 | /// The quality of the compressed image, where quality value varies between 0 and 100.
65 | /// The filter type to be applied to the compressed image.
66 | /// The thumbnail to be generated from the compressed image.
67 | /// The format of the output compressed image.
68 | /// A cancellation token to cancel the asynchronous operation.
69 | /// The compressed image as a byte array in memory stream.
70 | Task CompressImageAsBytesAsync(MediaFile file,
71 | ImageFormatType inputFormatType,
72 | int quality,
73 | FilterType filterType,
74 | string thumbnail,
75 | ImageFormatType outputFormatType,
76 | CancellationToken? cancellationToken = null);
77 |
78 | ///
79 | /// Converts an image from one format to another and saves the result to a specified file.
80 | ///
81 | /// The input image file to be converted.
82 | /// The format of the input image file.
83 | /// The file path of the converted image.
84 | /// A to observe while waiting for the task to complete.
85 | Task ConvertImageAsync(MediaFile file, ImageFormatType inputFormatType, string outputFile, CancellationToken? cancellationToken = null);
86 |
87 | ///
88 | /// Converts an image and returns the result as a memory stream.
89 | ///
90 | /// The media file to convert
91 | /// The input format of the image
92 | /// The desired output format of the image. If null, the output format will be the same as the input format.
93 | /// The cancellation token used to cancel the operation
94 | /// A memory stream containing the converted image data
95 | Task ConvertImageAsStreamAsync(MediaFile file, ImageFormatType inputFormatType, ImageFormatType? outputFormat, CancellationToken? cancellationToken = null);
96 |
97 | ///
98 | /// Converts the image to a byte array format.
99 | ///
100 | /// The file to be converted.
101 | /// The input format of the file.
102 | /// The desired output format of the image. If null, the original format will be kept.
103 | /// A cancellation token that can be used by other objects or threads to receive notice of cancellation.
104 | /// The converted image as a byte array.
105 | Task ConvertImageAsBytesAsync(MediaFile file, ImageFormatType inputFormatType, ImageFormatType? outputFormat, CancellationToken? cancellationToken = null);
106 |
107 | ///
108 | /// Resizes the image asynchronously.
109 | ///
110 | /// The image file to be resized.
111 | /// The input format of the image file.
112 | /// The size to which the image should be resized to in the format of "width x height".
113 | /// The full path and filename of the output file.
114 | /// The optional cancellation token to cancel the operation.
115 | /// A task that represents the asynchronous resize operation.
116 | Task ResizeImageAsync(MediaFile file, ImageFormatType inputFormatType, string size, string outputFile, CancellationToken? cancellationToken = null);
117 |
118 | ///
119 | /// Resizes an image and returns the result as a memory stream.
120 | ///
121 | /// The input image file.
122 | /// The input image format.
123 | /// The size to resize the image to, in the format "widthxheight".
124 | /// The output image format, if different from the input format. Can be null.
125 | /// A cancellation token that can be used to cancel the operation.
126 | /// A memory stream containing the resized image.
127 | Task ResizeImageAsStreamAsync(MediaFile file,
128 | ImageFormatType inputFormatType,
129 | string size,
130 | ImageFormatType? outputFormat,
131 | CancellationToken? cancellationToken = null);
132 |
133 | ///
134 | /// Resizes an image and returns the result as a byte array.
135 | ///
136 | /// The input image file to be resized.
137 | /// The format of the input image file.
138 | /// The target size of the resized image, in the format "widthxheight".
139 | /// The format of the output image, if different from the input format. Can be null.
140 | /// A cancellation token that can be used to cancel the operation.
141 | /// A byte array representing the resized image.
142 | Task ResizeImageAsBytesAsync(MediaFile file, ImageFormatType inputFormatType, string size, ImageFormatType? outputFormat, CancellationToken? cancellationToken = null);
143 |
144 | ///
145 | /// Converts a set of image files into a single GIF image.
146 | ///
147 | /// The set of image files to be converted into a single GIF image.
148 | /// The delay between frames in the output GIF image in milliseconds.
149 | /// The format of the input image files.
150 | /// The file path of the output GIF image. If it is not specified, the result will be returned as a memory stream.
151 | /// The cancellation token to cancel the operation.
152 | Task ImagesToGifAsync(MediaFile file, int delay, ImageFormatType inputFormatType, string? outputFile, CancellationToken? cancellationToken = null);
153 |
154 | ///
155 | /// Converts multiple images to a GIF image as a MemoryStream.
156 | ///
157 | /// The input MediaFile.
158 | /// The delay in milliseconds between each frame of the resulting GIF image.
159 | /// The input image format.
160 | /// A CancellationToken to cancel the asynchronous operation.
161 | /// A MemoryStream that contains the resulting GIF image.
162 | Task ImagesToGifAsStreamAsync(MediaFile file, int delay, ImageFormatType inputFormatType, CancellationToken? cancellationToken = null);
163 |
164 | ///
165 | /// Converts multiple images to an animated GIF.
166 | ///
167 | /// The `MediaFile` object representing the image to convert.
168 | /// The delay in milliseconds between each image frame in the GIF animation.
169 | /// The input image format.
170 | /// A `CancellationToken` to observe while waiting for the task to complete.
171 | /// A `byte[]` containing the animated GIF data.
172 | Task ImagesToGifAsBytesAsync(MediaFile file, int delay, ImageFormatType inputFormatType, CancellationToken? cancellationToken = null);
173 | }
--------------------------------------------------------------------------------
/MediaFileProcessor/MediaFileProcessor/Processors/Interfaces/IVideoFileProcessor.cs:
--------------------------------------------------------------------------------
1 | using MediaFileProcessor.Models.Common;
2 | using MediaFileProcessor.Models.Enums;
3 | using MediaFileProcessor.Models.Settings;
4 | namespace MediaFileProcessor.Processors.Interfaces;
5 |
6 | ///
7 | /// Interface for VideoFileProcessor
8 | ///
9 | public interface IVideoFileProcessor
10 | {
11 | ///
12 | /// Executes the video processing with the provided settings and cancellation token.
13 | ///
14 | /// The video processing settings to use for processing the video.
15 | /// The cancellation token used to cancel the processing if necessary.
16 | /// A MemoryStream representing the processed video, or null if the process was cancelled.
17 | Task ExecuteAsync(FFmpegProcessingSettings settings, CancellationToken cancellationToken);
18 |
19 | ///
20 | /// Converts images to video and saves it to the specified file path.
21 | ///
22 | /// The file that contains the list of images to be converted to video.
23 | /// The number of frames per second for the resulting video.
24 | /// The path to the output video file. The file will be overwritten if it already exists.
25 | /// The format type for the output video file.
26 | /// A used to cancel the asynchronous operation.
27 | Task ConvertImagesToVideoAsync(MediaFile file,
28 | int frameRate,
29 | string? outputFile = null,
30 | FileFormatType? outputFormat = null,
31 | CancellationToken? cancellationToken = null);
32 |
33 | ///
34 | /// Extracts audio from a video file and saves it to a specified file path.
35 | ///
36 | /// The video file to extract audio from.
37 | /// The format of the output audio file.
38 | /// The file path where the extracted audio will be saved.
39 | /// A used to cancel the operation.
40 | Task ExtractAudioFromVideoAsync(MediaFile file,
41 | string? outputFile = null,
42 | FileFormatType? outputFormat = null,
43 | CancellationToken? cancellationToken = null);
44 |
45 | ///
46 | /// Converts the given `MediaFile` to the specified `FileFormatType` and saves it to the file at the specified `output` path.
47 | ///
48 | /// The `MediaFile` to be converted.
49 | /// The path where the converted file will be saved.
50 | /// The desired `FileFormatType` of the converted file.
51 | /// An optional `CancellationToken` that can be used to cancel the operation.
52 | /// A `Task` representing the asynchronous operation.
53 | Task ConvertVideoAsync(MediaFile file,
54 | string? outputFile = null,
55 | FileFormatType? outputFormat = null,
56 | CancellationToken? cancellationToken = null);
57 |
58 | ///
59 | /// Adds a watermark to the given video file and saves the result to the specified output path.
60 | ///
61 | /// The video file to add the watermark to.
62 | /// The watermark file to add to the video.
63 | /// The position of the watermark in the video.
64 | /// The path to the output file. If null, the output will not be saved to disk.
65 | /// The format of the output file.
66 | /// A CancellationToken to observe while waiting for the task to complete.
67 | Task AddWaterMarkToVideoAsync(MediaFile videoFile,
68 | MediaFile watermarkFile,
69 | PositionType position,
70 | string? outputFile = null,
71 | FileFormatType? outputFormat = null,
72 | CancellationToken? cancellationToken = null);
73 |
74 | ///
75 | /// Extracts the video from a media file asynchronously.
76 | ///
77 | /// The media file from which to extract the video.
78 | /// The output file path for the extracted video.
79 | /// The format of the output file.
80 | /// A cancellation token to cancel the operation. Default is null.
81 | /// A task representing the asynchronous operation.
82 | Task ExtractVideoFromFileAsync(MediaFile file,
83 | string? outputFile = null,
84 | FileFormatType? outputFormat = null,
85 | CancellationToken? cancellationToken = null);
86 |
87 | ///
88 | /// Adds audio to a video file asynchronously.
89 | ///
90 | /// The audio file to add to the video.
91 | /// The video file to add the audio to.
92 | /// The output file path for the audio-added video.
93 | /// The output file format type of the audio-added video.
94 | /// A cancellation token to cancel the asynchronous operation. Default is null.
95 | Task AddAudioToVideoAsync(MediaFile audioFile,
96 | MediaFile videoFile,
97 | string? outputFile = null,
98 | FileFormatType? outputFormat = null,
99 | CancellationToken? cancellationToken = null);
100 |
101 | ///
102 | /// Converts a video file to a gif file and saves it to the specified output path.
103 | ///
104 | /// The video file to be converted.
105 | /// The number of frames per second for the output gif.
106 | /// The scale of the output gif in pixels.
107 | /// The number of times the output gif will loop.
108 | /// The path where the output gif will be saved.
109 | /// A cancellation token that can be used to cancel the operation.
110 | Task ConvertVideoToGifAsync(MediaFile file, int fps, int scale, int loop, string? outputFile = null, CancellationToken? cancellationToken = null);
111 |
112 | ///
113 | /// Asynchronously compresses the input video file.
114 | ///
115 | /// The input media file to be compressed.
116 | /// The compression ratio to be used for compression.
117 | /// The desired name of the output file, including the file extension.
118 | /// An optional cancellation token to stop the compression process.
119 | Task CompressVideoAsync(MediaFile file,
120 | int compressionRatio,
121 | string? output = null,
122 | CancellationToken? cancellationToken = null);
123 |
124 | ///
125 | /// Concatenates multiple videos into one video.
126 | ///
127 | /// Array of input videos to concatenate
128 | /// Output file path for the concatenated video
129 | /// Format of the output file
130 | /// Cancellation token for cancelling the task
131 | Task ConcatVideosAsync(MediaFile[] files,
132 | string? outputFile = null,
133 | FileFormatType? outputFormat = null,
134 | CancellationToken? cancellationToken = null);
135 |
136 | ///
137 | /// This method is used to get the information of a video file, such as its size, duration, and bit rate.
138 | ///
139 | /// The video file for which information needs to be retrieved.
140 | /// A cancellation token to cancel the async operation.
141 | /// A string that contains the video file's information in JSON format.
142 | Task GetVideoInfoAsync(MediaFile videoFile, CancellationToken? cancellationToken = null);
143 |
144 | ///
145 | /// Asynchronously adds hard subtitles to a video file
146 | ///
147 | /// The video file to add subtitles to
148 | /// The subtitles file to add to the video
149 | /// The language of the subtitles
150 | /// The output file name for the processed video file with added subtitles
151 | /// The format type for the output file
152 | /// A CancellationToken that can be used to cancel the operation
153 | Task AddSubtitlesAsync(MediaFile videoFile,
154 | MediaFile subsFile,
155 | LanguageType language,
156 | string? outputFile = null,
157 | FileFormatType? outputFormat = null,
158 | CancellationToken? cancellationToken = null);
159 | }
160 |
--------------------------------------------------------------------------------
/MediaFileProcessor/MediaFileProcessor/ava.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askatmaster/MediaFileProcessor/28b340afa6deae303ed995e56a8d2c1250e3f383/MediaFileProcessor/MediaFileProcessor/ava.jpg
--------------------------------------------------------------------------------
/testFiles/issue.mp4:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askatmaster/MediaFileProcessor/28b340afa6deae303ed995e56a8d2c1250e3f383/testFiles/issue.mp4
--------------------------------------------------------------------------------
/testFiles/sample.3gp:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askatmaster/MediaFileProcessor/28b340afa6deae303ed995e56a8d2c1250e3f383/testFiles/sample.3gp
--------------------------------------------------------------------------------
/testFiles/sample.aac:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askatmaster/MediaFileProcessor/28b340afa6deae303ed995e56a8d2c1250e3f383/testFiles/sample.aac
--------------------------------------------------------------------------------
/testFiles/sample.asf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askatmaster/MediaFileProcessor/28b340afa6deae303ed995e56a8d2c1250e3f383/testFiles/sample.asf
--------------------------------------------------------------------------------
/testFiles/sample.avi:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askatmaster/MediaFileProcessor/28b340afa6deae303ed995e56a8d2c1250e3f383/testFiles/sample.avi
--------------------------------------------------------------------------------
/testFiles/sample.bin:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askatmaster/MediaFileProcessor/28b340afa6deae303ed995e56a8d2c1250e3f383/testFiles/sample.bin
--------------------------------------------------------------------------------
/testFiles/sample.bmp:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askatmaster/MediaFileProcessor/28b340afa6deae303ed995e56a8d2c1250e3f383/testFiles/sample.bmp
--------------------------------------------------------------------------------
/testFiles/sample.flac:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askatmaster/MediaFileProcessor/28b340afa6deae303ed995e56a8d2c1250e3f383/testFiles/sample.flac
--------------------------------------------------------------------------------
/testFiles/sample.flv:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askatmaster/MediaFileProcessor/28b340afa6deae303ed995e56a8d2c1250e3f383/testFiles/sample.flv
--------------------------------------------------------------------------------
/testFiles/sample.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askatmaster/MediaFileProcessor/28b340afa6deae303ed995e56a8d2c1250e3f383/testFiles/sample.gif
--------------------------------------------------------------------------------
/testFiles/sample.gxf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askatmaster/MediaFileProcessor/28b340afa6deae303ed995e56a8d2c1250e3f383/testFiles/sample.gxf
--------------------------------------------------------------------------------
/testFiles/sample.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askatmaster/MediaFileProcessor/28b340afa6deae303ed995e56a8d2c1250e3f383/testFiles/sample.ico
--------------------------------------------------------------------------------
/testFiles/sample.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askatmaster/MediaFileProcessor/28b340afa6deae303ed995e56a8d2c1250e3f383/testFiles/sample.jpg
--------------------------------------------------------------------------------
/testFiles/sample.m2ts:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askatmaster/MediaFileProcessor/28b340afa6deae303ed995e56a8d2c1250e3f383/testFiles/sample.m2ts
--------------------------------------------------------------------------------
/testFiles/sample.m4v:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askatmaster/MediaFileProcessor/28b340afa6deae303ed995e56a8d2c1250e3f383/testFiles/sample.m4v
--------------------------------------------------------------------------------
/testFiles/sample.mkv:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askatmaster/MediaFileProcessor/28b340afa6deae303ed995e56a8d2c1250e3f383/testFiles/sample.mkv
--------------------------------------------------------------------------------
/testFiles/sample.mov:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askatmaster/MediaFileProcessor/28b340afa6deae303ed995e56a8d2c1250e3f383/testFiles/sample.mov
--------------------------------------------------------------------------------
/testFiles/sample.mp3:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askatmaster/MediaFileProcessor/28b340afa6deae303ed995e56a8d2c1250e3f383/testFiles/sample.mp3
--------------------------------------------------------------------------------
/testFiles/sample.mp4:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askatmaster/MediaFileProcessor/28b340afa6deae303ed995e56a8d2c1250e3f383/testFiles/sample.mp4
--------------------------------------------------------------------------------
/testFiles/sample.mpeg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askatmaster/MediaFileProcessor/28b340afa6deae303ed995e56a8d2c1250e3f383/testFiles/sample.mpeg
--------------------------------------------------------------------------------
/testFiles/sample.mxf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askatmaster/MediaFileProcessor/28b340afa6deae303ed995e56a8d2c1250e3f383/testFiles/sample.mxf
--------------------------------------------------------------------------------
/testFiles/sample.ogg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askatmaster/MediaFileProcessor/28b340afa6deae303ed995e56a8d2c1250e3f383/testFiles/sample.ogg
--------------------------------------------------------------------------------
/testFiles/sample.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askatmaster/MediaFileProcessor/28b340afa6deae303ed995e56a8d2c1250e3f383/testFiles/sample.png
--------------------------------------------------------------------------------
/testFiles/sample.psd:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askatmaster/MediaFileProcessor/28b340afa6deae303ed995e56a8d2c1250e3f383/testFiles/sample.psd
--------------------------------------------------------------------------------
/testFiles/sample.rm:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askatmaster/MediaFileProcessor/28b340afa6deae303ed995e56a8d2c1250e3f383/testFiles/sample.rm
--------------------------------------------------------------------------------
/testFiles/sample.tiff:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askatmaster/MediaFileProcessor/28b340afa6deae303ed995e56a8d2c1250e3f383/testFiles/sample.tiff
--------------------------------------------------------------------------------
/testFiles/sample.ts:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askatmaster/MediaFileProcessor/28b340afa6deae303ed995e56a8d2c1250e3f383/testFiles/sample.ts
--------------------------------------------------------------------------------
/testFiles/sample.vob:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askatmaster/MediaFileProcessor/28b340afa6deae303ed995e56a8d2c1250e3f383/testFiles/sample.vob
--------------------------------------------------------------------------------
/testFiles/sample.wav:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askatmaster/MediaFileProcessor/28b340afa6deae303ed995e56a8d2c1250e3f383/testFiles/sample.wav
--------------------------------------------------------------------------------
/testFiles/sample.webm:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askatmaster/MediaFileProcessor/28b340afa6deae303ed995e56a8d2c1250e3f383/testFiles/sample.webm
--------------------------------------------------------------------------------
/testFiles/sample.webp:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askatmaster/MediaFileProcessor/28b340afa6deae303ed995e56a8d2c1250e3f383/testFiles/sample.webp
--------------------------------------------------------------------------------
/testFiles/sample.wma:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askatmaster/MediaFileProcessor/28b340afa6deae303ed995e56a8d2c1250e3f383/testFiles/sample.wma
--------------------------------------------------------------------------------
/testFiles/sample.wmv:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/askatmaster/MediaFileProcessor/28b340afa6deae303ed995e56a8d2c1250e3f383/testFiles/sample.wmv
--------------------------------------------------------------------------------