├── .gitattributes
├── .gitignore
├── FFMediaToolkit.sln
├── FFMediaToolkit.snk
├── FFMediaToolkit
├── Audio
│ ├── AudioData.cs
│ └── SampleFormat.cs
├── Common
│ ├── ContainerMetadata.cs
│ ├── FFDictionary.cs
│ ├── Internal
│ │ ├── AudioFrame.cs
│ │ ├── ImageConverter.cs
│ │ ├── MediaFrame.cs
│ │ ├── MediaPacket.cs
│ │ └── VideoFrame.cs
│ ├── MediaType.cs
│ └── Wrapper{T}.cs
├── Decoding
│ ├── AudioStream.cs
│ ├── AudioStreamInfo.cs
│ ├── ContainerOptions.cs
│ ├── Internal
│ │ ├── AvioStream.cs
│ │ ├── Decoder.cs
│ │ ├── DecoderFactory.cs
│ │ └── InputContainer.cs
│ ├── MediaChapter.cs
│ ├── MediaFile.cs
│ ├── MediaInfo.cs
│ ├── MediaOptions.cs
│ ├── MediaStream.cs
│ ├── StreamInfo.cs
│ ├── VideoStream.cs
│ └── VideoStreamInfo.cs
├── Encoding
│ ├── AudioCodec.cs
│ ├── AudioEncoderSettings.cs
│ ├── AudioOutputStream.cs
│ ├── ContainerFormat.cs
│ ├── EncoderPreset.cs
│ ├── Internal
│ │ ├── OutputContainer.cs
│ │ ├── OutputStreamFactory.cs
│ │ └── OutputStream{TFrame}.cs
│ ├── MediaBuilder.cs
│ ├── MediaOutput.cs
│ ├── VideoCodec.cs
│ ├── VideoEncoderSettings.cs
│ └── VideoOutputStream.cs
├── FFMediaToolkit.csproj
├── FFMediaToolkit.ruleset
├── FFmpegException.cs
├── FFmpegLoader.cs
├── Graphics
│ ├── ImageData.cs
│ └── ImagePixelFormat.cs
├── Helpers
│ ├── ExceptionHandler.cs
│ ├── Extensions.cs
│ ├── MathHelper.cs
│ └── StringConverter.cs
├── Interop
│ └── NativeMethods.cs
└── LogLevel.cs
├── LICENSE
├── README.md
├── appveyor.yml
└── build.ps1
/.gitattributes:
--------------------------------------------------------------------------------
1 | ###############################################################################
2 | # Set default behavior to automatically normalize line endings.
3 | ###############################################################################
4 | * text=auto
5 |
6 | ###############################################################################
7 | # Set default behavior for command prompt diff.
8 | #
9 | # This is need for earlier builds of msysgit that does not have it on by
10 | # default for csharp files.
11 | # Note: This is only used by command line
12 | ###############################################################################
13 | #*.cs diff=csharp
14 |
15 | ###############################################################################
16 | # Set the merge driver for project and solution files
17 | #
18 | # Merging from the command prompt will add diff markers to the files if there
19 | # are conflicts (Merging from VS is not affected by the settings below, in VS
20 | # the diff markers are never inserted). Diff markers may cause the following
21 | # file extensions to fail to load in VS. An alternative would be to treat
22 | # these files as binary and thus will always conflict and require user
23 | # intervention with every merge. To do so, just uncomment the entries below
24 | ###############################################################################
25 | #*.sln merge=binary
26 | #*.csproj merge=binary
27 | #*.vbproj merge=binary
28 | #*.vcxproj merge=binary
29 | #*.vcproj merge=binary
30 | #*.dbproj merge=binary
31 | #*.fsproj merge=binary
32 | #*.lsproj merge=binary
33 | #*.wixproj merge=binary
34 | #*.modelproj merge=binary
35 | #*.sqlproj merge=binary
36 | #*.wwaproj merge=binary
37 |
38 | ###############################################################################
39 | # behavior for image files
40 | #
41 | # image files are treated as binary by default.
42 | ###############################################################################
43 | #*.jpg binary
44 | #*.png binary
45 | #*.gif binary
46 |
47 | ###############################################################################
48 | # diff behavior for common document formats
49 | #
50 | # Convert binary document formats to text before diffing them. This feature
51 | # is only available from the command line. Turn it on by uncommenting the
52 | # entries below.
53 | ###############################################################################
54 | #*.doc diff=astextplain
55 | #*.DOC diff=astextplain
56 | #*.docx diff=astextplain
57 | #*.DOCX diff=astextplain
58 | #*.dot diff=astextplain
59 | #*.DOT diff=astextplain
60 | #*.pdf diff=astextplain
61 | #*.PDF diff=astextplain
62 | #*.rtf diff=astextplain
63 | #*.RTF diff=astextplain
64 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | ## Ignore Visual Studio temporary files, build results, and
2 | ## files generated by popular Visual Studio add-ons.
3 | ##
4 | ## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore
5 |
6 | # User-specific files
7 | *.rsuser
8 | *.suo
9 | *.user
10 | *.userosscache
11 | *.sln.docstates
12 |
13 | # User-specific files (MonoDevelop/Xamarin Studio)
14 | *.userprefs
15 |
16 | # Build results
17 | [Dd]ebug/
18 | [Dd]ebugPublic/
19 | [Rr]elease/
20 | [Rr]eleases/
21 | x64/
22 | x86/
23 | [Aa][Rr][Mm]/
24 | [Aa][Rr][Mm]64/
25 | bld/
26 | [Bb]in/
27 | [Oo]bj/
28 | [Ll]og/
29 |
30 | # Visual Studio 2015/2017 cache/options directory
31 | .vs/
32 | # Uncomment if you have tasks that create the project's static files in wwwroot
33 | #wwwroot/
34 |
35 | # Visual Studio 2017 auto generated files
36 | Generated\ Files/
37 |
38 | # MSTest test Results
39 | [Tt]est[Rr]esult*/
40 | [Bb]uild[Ll]og.*
41 |
42 | # NUNIT
43 | *.VisualState.xml
44 | TestResult.xml
45 |
46 | # Build Results of an ATL Project
47 | [Dd]ebugPS/
48 | [Rr]eleasePS/
49 | dlldata.c
50 |
51 | # Benchmark Results
52 | BenchmarkDotNet.Artifacts/
53 |
54 | # .NET Core
55 | project.lock.json
56 | project.fragment.lock.json
57 | artifacts/
58 |
59 | # StyleCop
60 | StyleCopReport.xml
61 |
62 | # Files built by Visual Studio
63 | *_i.c
64 | *_p.c
65 | *_h.h
66 | *.ilk
67 | *.meta
68 | *.obj
69 | *.iobj
70 | *.pch
71 | *.pdb
72 | *.ipdb
73 | *.pgc
74 | *.pgd
75 | *.rsp
76 | *.sbr
77 | *.tlb
78 | *.tli
79 | *.tlh
80 | *.tmp
81 | *.tmp_proj
82 | *_wpftmp.csproj
83 | *.log
84 | *.vspscc
85 | *.vssscc
86 | .builds
87 | *.pidb
88 | *.svclog
89 | *.scc
90 |
91 | # Chutzpah Test files
92 | _Chutzpah*
93 |
94 | # Visual C++ cache files
95 | ipch/
96 | *.aps
97 | *.ncb
98 | *.opendb
99 | *.opensdf
100 | *.sdf
101 | *.cachefile
102 | *.VC.db
103 | *.VC.VC.opendb
104 |
105 | # Visual Studio profiler
106 | *.psess
107 | *.vsp
108 | *.vspx
109 | *.sap
110 |
111 | # Visual Studio Trace Files
112 | *.e2e
113 |
114 | # TFS 2012 Local Workspace
115 | $tf/
116 |
117 | # Guidance Automation Toolkit
118 | *.gpState
119 |
120 | # ReSharper is a .NET coding add-in
121 | _ReSharper*/
122 | *.[Rr]e[Ss]harper
123 | *.DotSettings.user
124 |
125 | # JustCode is a .NET coding add-in
126 | .JustCode
127 |
128 | # TeamCity is a build add-in
129 | _TeamCity*
130 |
131 | # DotCover is a Code Coverage Tool
132 | *.dotCover
133 |
134 | # AxoCover is a Code Coverage Tool
135 | .axoCover/*
136 | !.axoCover/settings.json
137 |
138 | # Visual Studio code coverage results
139 | *.coverage
140 | *.coveragexml
141 |
142 | # NCrunch
143 | _NCrunch_*
144 | .*crunch*.local.xml
145 | nCrunchTemp_*
146 |
147 | # MightyMoose
148 | *.mm.*
149 | AutoTest.Net/
150 |
151 | # Web workbench (sass)
152 | .sass-cache/
153 |
154 | # Installshield output folder
155 | [Ee]xpress/
156 |
157 | # DocProject is a documentation generator add-in
158 | DocProject/buildhelp/
159 | DocProject/Help/*.HxT
160 | DocProject/Help/*.HxC
161 | DocProject/Help/*.hhc
162 | DocProject/Help/*.hhk
163 | DocProject/Help/*.hhp
164 | DocProject/Help/Html2
165 | DocProject/Help/html
166 |
167 | # Click-Once directory
168 | publish/
169 |
170 | # Publish Web Output
171 | *.[Pp]ublish.xml
172 | *.azurePubxml
173 | # Note: Comment the next line if you want to checkin your web deploy settings,
174 | # but database connection strings (with potential passwords) will be unencrypted
175 | *.pubxml
176 | *.publishproj
177 |
178 | # Microsoft Azure Web App publish settings. Comment the next line if you want to
179 | # checkin your Azure Web App publish settings, but sensitive information contained
180 | # in these scripts will be unencrypted
181 | PublishScripts/
182 |
183 | # NuGet Packages
184 | *.nupkg
185 | # The packages folder can be ignored because of Package Restore
186 | **/[Pp]ackages/*
187 | # except build/, which is used as an MSBuild target.
188 | !**/[Pp]ackages/build/
189 | # Uncomment if necessary however generally it will be regenerated when needed
190 | #!**/[Pp]ackages/repositories.config
191 | # NuGet v3's project.json files produces more ignorable files
192 | *.nuget.props
193 | *.nuget.targets
194 |
195 | # Microsoft Azure Build Output
196 | csx/
197 | *.build.csdef
198 |
199 | # Microsoft Azure Emulator
200 | ecf/
201 | rcf/
202 |
203 | # Windows Store app package directories and files
204 | AppPackages/
205 | BundleArtifacts/
206 | Package.StoreAssociation.xml
207 | _pkginfo.txt
208 | *.appx
209 |
210 | # Visual Studio cache files
211 | # files ending in .cache can be ignored
212 | *.[Cc]ache
213 | # but keep track of directories ending in .cache
214 | !?*.[Cc]ache/
215 |
216 | # Others
217 | ClientBin/
218 | ~$*
219 | *~
220 | *.dbmdl
221 | *.dbproj.schemaview
222 | *.jfm
223 | *.pfx
224 | *.publishsettings
225 | orleans.codegen.cs
226 |
227 | # Including strong name files can present a security risk
228 | # (https://github.com/github/gitignore/pull/2483#issue-259490424)
229 | #*.snk
230 |
231 | # Since there are multiple workflows, uncomment next line to ignore bower_components
232 | # (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
233 | #bower_components/
234 |
235 | # RIA/Silverlight projects
236 | Generated_Code/
237 |
238 | # Backup & report files from converting an old project file
239 | # to a newer Visual Studio version. Backup files are not needed,
240 | # because we have git ;-)
241 | _UpgradeReport_Files/
242 | Backup*/
243 | UpgradeLog*.XML
244 | UpgradeLog*.htm
245 | ServiceFabricBackup/
246 | *.rptproj.bak
247 |
248 | # SQL Server files
249 | *.mdf
250 | *.ldf
251 | *.ndf
252 |
253 | # Business Intelligence projects
254 | *.rdl.data
255 | *.bim.layout
256 | *.bim_*.settings
257 | *.rptproj.rsuser
258 | *- Backup*.rdl
259 |
260 | # Microsoft Fakes
261 | FakesAssemblies/
262 |
263 | # GhostDoc plugin setting file
264 | *.GhostDoc.xml
265 |
266 | # Node.js Tools for Visual Studio
267 | .ntvs_analysis.dat
268 | node_modules/
269 |
270 | # Visual Studio 6 build log
271 | *.plg
272 |
273 | # Visual Studio 6 workspace options file
274 | *.opt
275 |
276 | # Visual Studio 6 auto-generated workspace file (contains which files were open etc.)
277 | *.vbw
278 |
279 | # Visual Studio LightSwitch build output
280 | **/*.HTMLClient/GeneratedArtifacts
281 | **/*.DesktopClient/GeneratedArtifacts
282 | **/*.DesktopClient/ModelManifest.xml
283 | **/*.Server/GeneratedArtifacts
284 | **/*.Server/ModelManifest.xml
285 | _Pvt_Extensions
286 |
287 | # Paket dependency manager
288 | .paket/paket.exe
289 | paket-files/
290 |
291 | # FAKE - F# Make
292 | .fake/
293 |
294 | # JetBrains Rider
295 | .idea/
296 | *.sln.iml
297 |
298 | # CodeRush personal settings
299 | .cr/personal
300 |
301 | # Python Tools for Visual Studio (PTVS)
302 | __pycache__/
303 | *.pyc
304 |
305 | # Cake - Uncomment if you are using it
306 | # tools/**
307 | # !tools/packages.config
308 |
309 | # Tabs Studio
310 | *.tss
311 |
312 | # Telerik's JustMock configuration file
313 | *.jmconfig
314 |
315 | # BizTalk build output
316 | *.btp.cs
317 | *.btm.cs
318 | *.odx.cs
319 | *.xsd.cs
320 |
321 | # OpenCover UI analysis results
322 | OpenCover/
323 |
324 | # Azure Stream Analytics local run output
325 | ASALocalRun/
326 |
327 | # MSBuild Binary and Structured Log
328 | *.binlog
329 |
330 | # NVidia Nsight GPU debugger configuration file
331 | *.nvuser
332 |
333 | # MFractors (Xamarin productivity tool) working folder
334 | .mfractor/
335 |
336 | # Local History for Visual Studio
337 | .localhistory/
338 |
339 | # BeatPulse healthcheck temp database
340 | healthchecksdb
341 | /.ionide
342 |
--------------------------------------------------------------------------------
/FFMediaToolkit.sln:
--------------------------------------------------------------------------------
1 |
2 | Microsoft Visual Studio Solution File, Format Version 12.00
3 | # Visual Studio Version 16
4 | VisualStudioVersion = 16.0.28721.148
5 | MinimumVisualStudioVersion = 10.0.40219.1
6 | Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "FFMediaToolkit", "FFMediaToolkit\FFMediaToolkit.csproj", "{F9957DDC-E1BF-4318-BC33-6B7C73C3536E}"
7 | EndProject
8 | Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Solution Items", "Solution Items", "{1D0BC7FA-6327-4A4F-86CE-3A4C54F876ED}"
9 | ProjectSection(SolutionItems) = preProject
10 | appveyor.yml = appveyor.yml
11 | build.ps1 = build.ps1
12 | README.md = README.md
13 | EndProjectSection
14 | EndProject
15 | Global
16 | GlobalSection(SolutionConfigurationPlatforms) = preSolution
17 | Debug|Any CPU = Debug|Any CPU
18 | Release|Any CPU = Release|Any CPU
19 | EndGlobalSection
20 | GlobalSection(ProjectConfigurationPlatforms) = postSolution
21 | {F9957DDC-E1BF-4318-BC33-6B7C73C3536E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
22 | {F9957DDC-E1BF-4318-BC33-6B7C73C3536E}.Debug|Any CPU.Build.0 = Debug|Any CPU
23 | {F9957DDC-E1BF-4318-BC33-6B7C73C3536E}.Release|Any CPU.ActiveCfg = Release|Any CPU
24 | {F9957DDC-E1BF-4318-BC33-6B7C73C3536E}.Release|Any CPU.Build.0 = Release|Any CPU
25 | EndGlobalSection
26 | GlobalSection(SolutionProperties) = preSolution
27 | HideSolutionNode = FALSE
28 | EndGlobalSection
29 | GlobalSection(ExtensibilityGlobals) = postSolution
30 | SolutionGuid = {A4EC0BD8-5AC2-46FB-9CC1-20AA18D1E60C}
31 | EndGlobalSection
32 | EndGlobal
33 |
--------------------------------------------------------------------------------
/FFMediaToolkit.snk:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/radek-k/FFMediaToolkit/80a113d9ff20d906c6864cface450fc83771018b/FFMediaToolkit.snk
--------------------------------------------------------------------------------
/FFMediaToolkit/Audio/AudioData.cs:
--------------------------------------------------------------------------------
1 | namespace FFMediaToolkit.Audio
2 | {
3 | using System;
4 | using FFMediaToolkit.Common.Internal;
5 |
6 | ///
7 | /// Represents a lightweight container for audio data.
8 | ///
9 | public ref struct AudioData
10 | {
11 | private readonly AudioFrame frame;
12 |
13 | ///
14 | /// Initializes a new instance of the struct.
15 | ///
16 | /// frame object containing raw audio data.
17 | internal AudioData(AudioFrame frame)
18 | {
19 | this.frame = frame;
20 | }
21 |
22 | ///
23 | /// Gets the number of samples.
24 | ///
25 | public int NumSamples => frame.NumSamples;
26 |
27 | ///
28 | /// Gets the number of channels.
29 | ///
30 | public int NumChannels => frame.NumChannels;
31 |
32 | ///
33 | /// Fetches raw audio data from this audio frame for specified channel.
34 | ///
35 | /// The index of audio channel that should be retrieved, allowed range: [0..).
36 | /// The span with samples in range of [-1.0, ..., 1.0].
37 | public Span GetChannelData(uint channel)
38 | {
39 | return frame.GetChannelData(channel);
40 | }
41 |
42 | ///
43 | /// Copies raw multichannel audio data from this frame to a heap allocated array.
44 | ///
45 | ///
46 | /// The span with rows and columns;
47 | /// samples in range of [-1.0, ..., 1.0].
48 | ///
49 | public float[][] GetSampleData()
50 | {
51 | return frame.GetSampleData();
52 | }
53 |
54 | ///
55 | /// Updates the specified channel of this audio frame with the given sample data.
56 | ///
57 | /// An array of samples with length .
58 | /// The index of audio channel that should be updated, allowed range: [0..).
59 | public void UpdateChannelData(float[] samples, uint channel)
60 | {
61 | frame.UpdateChannelData(samples, channel);
62 | }
63 |
64 | ///
65 | /// Updates this audio frame with the specified multi-channel sample data.
66 | ///
67 | ///
68 | /// A 2D jagged array of multi-channel sample data
69 | /// with rows and columns.
70 | ///
71 | public void UpdateFromSampleData(float[][] samples)
72 | {
73 | frame.UpdateFromSampleData(samples);
74 | }
75 |
76 | ///
77 | /// Releases all unmanaged resources associated with this instance.
78 | ///
79 | public void Dispose()
80 | {
81 | frame.Dispose();
82 | }
83 | }
84 | }
85 |
--------------------------------------------------------------------------------
/FFMediaToolkit/Audio/SampleFormat.cs:
--------------------------------------------------------------------------------
1 | namespace FFMediaToolkit.Audio
2 | {
3 | using FFmpeg.AutoGen;
4 |
5 | ///
6 | /// Enumerates common audio sample formats supported by FFmpeg.
7 | ///
8 | public enum SampleFormat
9 | {
10 | ///
11 | /// Unsupported/Unknown.
12 | ///
13 | None = AVSampleFormat.AV_SAMPLE_FMT_NONE,
14 |
15 | ///
16 | /// Unsigned 8-bit integer.
17 | ///
18 | UnsignedByte = AVSampleFormat.AV_SAMPLE_FMT_U8,
19 |
20 | ///
21 | /// Signed 16-bit integer.
22 | ///
23 | SignedWord = AVSampleFormat.AV_SAMPLE_FMT_S16,
24 |
25 | ///
26 | /// Signed 32-bit integer.
27 | ///
28 | SignedDWord = AVSampleFormat.AV_SAMPLE_FMT_S32,
29 |
30 | ///
31 | /// Single precision floating point.
32 | ///
33 | Single = AVSampleFormat.AV_SAMPLE_FMT_FLT,
34 |
35 | ///
36 | /// Double precision floating point.
37 | ///
38 | Double = AVSampleFormat.AV_SAMPLE_FMT_DBL,
39 |
40 | ///
41 | /// Signed 16-bit integer (planar).
42 | ///
43 | SignedWordP = AVSampleFormat.AV_SAMPLE_FMT_S16P,
44 |
45 | ///
46 | /// Signed 32-bit integer (planar).
47 | ///
48 | SignedDWordP = AVSampleFormat.AV_SAMPLE_FMT_S32P,
49 |
50 | ///
51 | /// Single precision floating point (planar).
52 | ///
53 | SingleP = AVSampleFormat.AV_SAMPLE_FMT_FLTP,
54 |
55 | ///
56 | /// Double precision floating point (planar).
57 | ///
58 | DoubleP = AVSampleFormat.AV_SAMPLE_FMT_DBLP,
59 | }
60 | }
61 |
--------------------------------------------------------------------------------
/FFMediaToolkit/Common/ContainerMetadata.cs:
--------------------------------------------------------------------------------
1 | namespace FFMediaToolkit.Common
2 | {
3 | using System.Collections.Generic;
4 | using FFmpeg.AutoGen;
5 |
6 | ///
7 | /// Represents multimedia file metadata info.
8 | ///
9 | public class ContainerMetadata
10 | {
11 | private const string TitleKey = "title";
12 | private const string AuthorKey = "author";
13 | private const string AlbumKey = "album";
14 | private const string YearKey = "year";
15 | private const string GenreKey = "genre";
16 | private const string DescriptionKey = "description";
17 | private const string LanguageKey = "language";
18 | private const string CopyrightKey = "copyright";
19 | private const string RatingKey = "rating";
20 | private const string TrackKey = "track";
21 | private const string DateKey = "date";
22 |
23 | ///
24 | /// Initializes a new instance of the class.
25 | ///
26 | public ContainerMetadata() => Metadata = new Dictionary();
27 |
28 | ///
29 | /// Initializes a new instance of the class.
30 | ///
31 | /// The source metadata dictionary.
32 | internal unsafe ContainerMetadata(AVDictionary* sourceMetadata)
33 | => Metadata = FFDictionary.ToDictionary(sourceMetadata, true);
34 |
35 | ///
36 | /// Gets or sets the multimedia title.
37 | ///
38 | public string Title
39 | {
40 | get => Metadata.ContainsKey(TitleKey) ? Metadata[TitleKey] : string.Empty;
41 | set => Metadata[TitleKey] = value;
42 | }
43 |
44 | ///
45 | /// Gets or sets the multimedia author info.
46 | ///
47 | public string Author
48 | {
49 | get => Metadata.ContainsKey(AuthorKey) ? Metadata[AuthorKey] : string.Empty;
50 | set => Metadata[AuthorKey] = value;
51 | }
52 |
53 | ///
54 | /// Gets or sets the multimedia album name.
55 | ///
56 | public string Album
57 | {
58 | get => Metadata.ContainsKey(AlbumKey) ? Metadata[AlbumKey] : string.Empty;
59 | set => Metadata[AlbumKey] = value;
60 | }
61 |
62 | ///
63 | /// Gets or sets multimedia release date/year.
64 | ///
65 | public string Year
66 | {
67 | get => Metadata.ContainsKey(YearKey)
68 | ? Metadata[YearKey]
69 | : (Metadata.ContainsKey(DateKey) ? Metadata[DateKey] : string.Empty);
70 | set => Metadata[YearKey] = value;
71 | }
72 |
73 | ///
74 | /// Gets or sets the multimedia genre.
75 | ///
76 | public string Genre
77 | {
78 | get => Metadata.ContainsKey(GenreKey) ? Metadata[GenreKey] : string.Empty;
79 | set => Metadata[GenreKey] = value;
80 | }
81 |
82 | ///
83 | /// Gets or sets the multimedia description.
84 | ///
85 | public string Description
86 | {
87 | get => Metadata.ContainsKey(DescriptionKey) ? Metadata[DescriptionKey] : string.Empty;
88 | set => Metadata[DescriptionKey] = value;
89 | }
90 |
91 | ///
92 | /// Gets or sets the multimedia language.
93 | ///
94 | public string Language
95 | {
96 | get => Metadata.ContainsKey(LanguageKey) ? Metadata[LanguageKey] : string.Empty;
97 | set => Metadata[LanguageKey] = value;
98 | }
99 |
100 | ///
101 | /// Gets or sets the multimedia copyright info.
102 | ///
103 | public string Copyright
104 | {
105 | get => Metadata.ContainsKey(CopyrightKey) ? Metadata[CopyrightKey] : string.Empty;
106 | set => Metadata[CopyrightKey] = value;
107 | }
108 |
109 | ///
110 | /// Gets or sets the multimedia rating.
111 | ///
112 | public string Rating
113 | {
114 | get => Metadata.ContainsKey(RatingKey) ? Metadata[RatingKey] : string.Empty;
115 | set => Metadata[RatingKey] = value;
116 | }
117 |
118 | ///
119 | /// Gets or sets the multimedia track number string.
120 | ///
121 | public string TrackNumber
122 | {
123 | get => Metadata.ContainsKey(TrackKey) ? Metadata[TrackKey] : string.Empty;
124 | set => Metadata[TrackKey] = value;
125 | }
126 |
127 | ///
128 | /// Gets or sets the dictionary containing all metadata fields.
129 | ///
130 | public Dictionary Metadata { get; set; }
131 | }
132 | }
133 |
--------------------------------------------------------------------------------
/FFMediaToolkit/Common/FFDictionary.cs:
--------------------------------------------------------------------------------
1 | namespace FFMediaToolkit.Common
2 | {
3 | using System;
4 | using System.Collections.Generic;
5 | using FFMediaToolkit.Helpers;
6 | using FFmpeg.AutoGen;
7 |
8 | ///
9 | /// Represents a wrapper of . Used for applying codec and container settings.
10 | ///
11 | internal unsafe class FFDictionary : Wrapper
12 | {
13 | private bool requireDisposing;
14 |
15 | ///
16 | /// Initializes a new instance of the class.
17 | ///
18 | /// Should the dictionary be disposed automatically?.
19 | public FFDictionary(bool dispose = true)
20 | : base(null)
21 | {
22 | requireDisposing = dispose;
23 | }
24 |
25 | ///
26 | /// Initializes a new instance of the class.
27 | ///
28 | /// The dictionary to copy.
29 | /// Should the dictionary be disposed automatically?.
30 | public FFDictionary(Dictionary dictionary, bool dispose = true)
31 | : base(null)
32 | {
33 | Copy(dictionary);
34 | requireDisposing = dispose;
35 | }
36 |
37 | ///
38 | /// Gets the number of elements in the dictionary.
39 | ///
40 | public int Count => Pointer == null ? 0 : ffmpeg.av_dict_count(Pointer);
41 |
42 | ///
43 | /// Gets or sets the value with the specified key.
44 | ///
45 | /// The key.
46 | /// The value.
47 | public string this[string key]
48 | {
49 | get => Get(key);
50 | set => Set(key, value);
51 | }
52 |
53 | ///
54 | /// Converts a to the managed string dictionary.
55 | ///
56 | /// The to converter.
57 | /// If set the flag will be used.
58 | /// The converted .
59 | public static Dictionary ToDictionary(AVDictionary* dictionary, bool ignoreSuffix = false)
60 | {
61 | var result = new Dictionary();
62 |
63 | var item = ffmpeg.av_dict_get(dictionary, string.Empty, null, ignoreSuffix ? ffmpeg.AV_DICT_IGNORE_SUFFIX : 0);
64 |
65 | while (item != null)
66 | {
67 | result[new IntPtr(item->key).Utf8ToString()] = new IntPtr(item->value).Utf8ToString();
68 | item = ffmpeg.av_dict_get(dictionary, string.Empty, item, ignoreSuffix ? ffmpeg.AV_DICT_IGNORE_SUFFIX : 0);
69 | }
70 |
71 | return result;
72 | }
73 |
74 | ///
75 | /// Gets the value with specified key.
76 | ///
77 | /// The dictionary key.
78 | /// If matches case.
79 | /// The value with specified key. If the key not exist, returns .
80 | public string Get(string key, bool matchCase = true)
81 | {
82 | var ptr = ffmpeg.av_dict_get(Pointer, key, null, matchCase ? ffmpeg.AV_DICT_MATCH_CASE : 0);
83 | return ptr != null ? new IntPtr(ptr).Utf8ToString() : null;
84 | }
85 |
86 | ///
87 | /// Sets the value for the specified key.
88 | ///
89 | /// The key.
90 | /// The value.
91 | public void Set(string key, string value)
92 | {
93 | var ptr = Pointer;
94 | ffmpeg.av_dict_set(&ptr, key, value, 0);
95 | UpdatePointer(ptr);
96 | }
97 |
98 | ///
99 | /// Copies items from specified dictionary to this .
100 | ///
101 | /// The dictionary to copy.
102 | public void Copy(Dictionary dictionary)
103 | {
104 | foreach (var item in dictionary)
105 | {
106 | this[item.Key] = item.Value;
107 | }
108 | }
109 |
110 | ///
111 | /// Updates the pointer to the dictionary.
112 | ///
113 | /// The pointer to the .
114 | internal void Update(AVDictionary* pointer) => UpdatePointer(pointer);
115 |
116 | ///
117 | protected override void OnDisposing()
118 | {
119 | if (requireDisposing && Pointer != null && Count > 0)
120 | {
121 | var ptr = Pointer;
122 | ffmpeg.av_dict_free(&ptr);
123 | }
124 | }
125 | }
126 | }
127 |
--------------------------------------------------------------------------------
/FFMediaToolkit/Common/Internal/AudioFrame.cs:
--------------------------------------------------------------------------------
1 | namespace FFMediaToolkit.Common.Internal
2 | {
3 | using System;
4 | using FFMediaToolkit.Audio;
5 | using FFMediaToolkit.Helpers;
6 | using FFmpeg.AutoGen;
7 |
8 | ///
9 | /// Represent an audio frame.
10 | ///
11 | internal unsafe class AudioFrame : MediaFrame
12 | {
13 | ///
14 | /// Initializes a new instance of the class with empty frame data.
15 | ///
16 | public AudioFrame()
17 | : base(ffmpeg.av_frame_alloc())
18 | {
19 | }
20 |
21 | ///
22 | /// Initializes a new instance of the class using existing .
23 | ///
24 | /// The audio .
25 | public AudioFrame(AVFrame* frame)
26 | : base(frame)
27 | {
28 | if (frame->GetMediaType() != MediaType.Audio)
29 | throw new ArgumentException("Cannot create an AudioFrame instance from the AVFrame with type: " + frame->GetMediaType());
30 | }
31 |
32 | ///
33 | /// Gets the number of samples.
34 | ///
35 | public int NumSamples => Pointer != null ? Pointer->nb_samples : default;
36 |
37 | ///
38 | /// Gets the sample rate.
39 | ///
40 | public int SampleRate => Pointer != null ? Pointer->sample_rate : default;
41 |
42 | ///
43 | /// Gets the number of channels.
44 | ///
45 | public int NumChannels => Pointer != null ? Pointer->ch_layout.nb_channels : default;
46 |
47 | ///
48 | /// Gets the audio sample format.
49 | ///
50 | public SampleFormat SampleFormat => Pointer != null ? (SampleFormat)Pointer->format : SampleFormat.None;
51 |
52 | ///
53 | /// Gets the channel layout.
54 | ///
55 | internal AVChannelLayout ChannelLayout => Pointer != null ? Pointer->ch_layout : default;
56 |
57 | ///
58 | /// Creates an audio frame with given dimensions and allocates a buffer for it.
59 | ///
60 | /// The sample rate of the audio frame.
61 | /// The number of channels in the audio frame.
62 | /// The number of samples in the audio frame.
63 | /// The channel layout to be used by the audio frame.
64 | /// The audio sample format.
65 | /// The timestamp when the frame has to be decoded.
66 | /// The timestamp when the frame has to be presented.
67 | /// The new audio frame.
68 | public static AudioFrame Create(int sample_rate, int num_channels, int num_samples, AVChannelLayout channel_layout, SampleFormat sampleFormat, long decodingTimestamp, long presentationTimestamp)
69 | {
70 | var frame = ffmpeg.av_frame_alloc();
71 |
72 | frame->sample_rate = sample_rate;
73 |
74 | frame->nb_samples = num_samples;
75 | frame->ch_layout = channel_layout;
76 | frame->format = (int)sampleFormat;
77 |
78 | frame->pts = presentationTimestamp;
79 | frame->pkt_dts = decodingTimestamp;
80 |
81 | ffmpeg.av_frame_get_buffer(frame, 32);
82 |
83 | return new AudioFrame(frame);
84 | }
85 |
86 | ///
87 | /// Creates an empty frame for decoding.
88 | ///
89 | /// The empty .
90 | public static AudioFrame CreateEmpty() => new AudioFrame();
91 |
92 | ///
93 | /// Fetches raw audio data from this audio frame for specified channel.
94 | ///
95 | /// The index of audio channel that should be retrieved, allowed range: [0..).
96 | /// The span with samples in range of [-1.0, ..., 1.0].
97 | public Span GetChannelData(uint channel)
98 | {
99 | if (SampleFormat != SampleFormat.SingleP)
100 | throw new Exception("Cannot extract channel data from an AudioFrame with a SampleFormat not equal to SampleFormat.SingleP");
101 | return new Span(Pointer->data[channel], NumSamples);
102 | }
103 |
104 | ///
105 | /// Copies raw multichannel audio data from this frame to a heap allocated array.
106 | ///
107 | ///
108 | /// The span with rows and columns;
109 | /// samples in range of [-1.0, ..., 1.0].
110 | ///
111 | public float[][] GetSampleData()
112 | {
113 | if (SampleFormat != SampleFormat.SingleP)
114 | throw new Exception("Cannot extract sample data from an AudioFrame with a SampleFormat not equal to SampleFormat.SingleP");
115 |
116 | var samples = new float[NumChannels][];
117 |
118 | for (uint ch = 0; ch < NumChannels; ch++)
119 | {
120 | samples[ch] = new float[NumSamples];
121 |
122 | var channelData = GetChannelData(ch);
123 | var sampleData = new Span(samples[ch], 0, NumSamples);
124 |
125 | channelData.CopyTo(sampleData);
126 | }
127 |
128 | return samples;
129 | }
130 |
131 | ///
132 | /// Updates the specified channel of this audio frame with the given sample data.
133 | ///
134 | /// An array of samples with length .
135 | /// The index of audio channel that should be updated, allowed range: [0..).
136 | public void UpdateChannelData(float[] samples, uint channel)
137 | {
138 | if (SampleFormat != SampleFormat.SingleP)
139 | throw new Exception("Cannot update channel data of an AudioFrame with a SampleFormat not equal to SampleFormat.SingleP");
140 |
141 | var frameData = GetChannelData(channel);
142 | var sampleData = new Span(samples, 0, NumSamples);
143 |
144 | sampleData.CopyTo(frameData);
145 | }
146 |
147 | ///
148 | /// Updates this audio frame with the specified multi-channel sample data.
149 | ///
150 | ///
151 | /// A 2D jagged array of multi-channel sample data
152 | /// with rows and columns.
153 | ///
154 | public void UpdateFromSampleData(float[][] samples)
155 | {
156 | if (SampleFormat != SampleFormat.SingleP)
157 | throw new Exception("Cannot update sample data of an AudioFrame with a SampleFormat not equal to SampleFormat.SingleP");
158 |
159 | for (uint ch = 0; ch < NumChannels; ch++)
160 | {
161 | var newData = new Span(samples[ch], 0, NumSamples);
162 | var frameData = GetChannelData(ch);
163 | newData.CopyTo(frameData);
164 | }
165 | }
166 |
167 | ///
168 | /// Updates this audio frame with the specified audio data.
169 | /// ( and
170 | /// should match the respective values for this instance!).
171 | ///
172 | /// The audio data.
173 | public void UpdateFromAudioData(AudioData audioData)
174 | {
175 | if (SampleFormat != SampleFormat.SingleP)
176 | throw new Exception("Cannot update data of an AudioFrame with a SampleFormat not equal to SampleFormat.SingleP");
177 |
178 | for (uint ch = 0; ch < NumChannels; ch++)
179 | {
180 | var newData = audioData.GetChannelData(ch);
181 | var currData = GetChannelData(ch);
182 |
183 | newData.CopyTo(currData);
184 | }
185 | }
186 |
187 | ///
188 | internal override unsafe void Update(AVFrame* newFrame)
189 | {
190 | if (newFrame->GetMediaType() != MediaType.Audio)
191 | {
192 | throw new ArgumentException("The new frame doesn't contain audio data.");
193 | }
194 |
195 | base.Update(newFrame);
196 | }
197 | }
198 | }
199 |
--------------------------------------------------------------------------------
/FFMediaToolkit/Common/Internal/ImageConverter.cs:
--------------------------------------------------------------------------------
1 | namespace FFMediaToolkit.Common.Internal
2 | {
3 | using System.Drawing;
4 | using FFMediaToolkit.Graphics;
5 | using FFmpeg.AutoGen;
6 |
7 | ///
8 | /// A class used to convert ffmpeg s to objects with specified image size and color format.
9 | ///
10 | internal unsafe class ImageConverter : Wrapper
11 | {
12 | // sws_scale requires up to 16 extra bytes allocated in the input buffer when resizing an image
13 | // (reference: https://www.ffmpeg.org/doxygen/6.0/frame_8h_source.html#l00340)
14 | private const int BufferPaddingSize = 16;
15 |
16 | private readonly Size destinationSize;
17 | private readonly AVPixelFormat destinationFormat;
18 |
19 | private Size lastSourceSize;
20 | private AVPixelFormat lastSourcePixelFormat;
21 | private byte[] tmpBuffer = { };
22 |
23 | ///
24 | /// Initializes a new instance of the class.
25 | ///
26 | /// Destination image size.
27 | /// Destination image format.
28 | public ImageConverter(Size destinationSize, AVPixelFormat destinationFormat)
29 | : base(null)
30 | {
31 | this.destinationSize = destinationSize;
32 | this.destinationFormat = destinationFormat;
33 | }
34 |
35 | ///
36 | /// Overrides the image buffer with the converted bitmap. Used in encoding.
37 | ///
38 | /// The input bitmap.
39 | /// The to override.
40 | internal void FillAVFrame(ImageData bitmap, VideoFrame destinationFrame)
41 | {
42 | UpdateContext(bitmap.ImageSize, (AVPixelFormat)bitmap.PixelFormat);
43 |
44 | var requiredBufferLength = (bitmap.Stride * bitmap.ImageSize.Height) + BufferPaddingSize;
45 | var shouldUseTmpBuffer = bitmap.ImageSize != destinationSize && bitmap.Data.Length < requiredBufferLength;
46 |
47 | if (shouldUseTmpBuffer)
48 | {
49 | if (tmpBuffer.Length < requiredBufferLength)
50 | {
51 | tmpBuffer = new byte[requiredBufferLength];
52 | }
53 |
54 | bitmap.Data.CopyTo(tmpBuffer);
55 | }
56 |
57 | var source = shouldUseTmpBuffer ? tmpBuffer : bitmap.Data;
58 | fixed (byte* ptr = source)
59 | {
60 | var data = new byte*[4] { ptr, null, null, null };
61 | var linesize = new int[4] { bitmap.Stride, 0, 0, 0 };
62 | ffmpeg.sws_scale(Pointer, data, linesize, 0, bitmap.ImageSize.Height, destinationFrame.Pointer->data, destinationFrame.Pointer->linesize);
63 | }
64 | }
65 |
66 | ///
67 | /// Converts a video to the specified bitmap. Used in decoding.
68 | ///
69 | /// The video frame to convert.
70 | /// The destination .
71 | /// Size of the single bitmap row.
72 | internal void AVFrameToBitmap(VideoFrame videoFrame, byte* destination, int stride)
73 | {
74 | UpdateContext(videoFrame.Layout, videoFrame.PixelFormat);
75 |
76 | var data = new byte*[4] { destination, null, null, null };
77 | var linesize = new int[4] { stride, 0, 0, 0 };
78 | ffmpeg.sws_scale(Pointer, videoFrame.Pointer->data, videoFrame.Pointer->linesize, 0, videoFrame.Layout.Height, data, linesize);
79 | }
80 |
81 | ///
82 | protected override void OnDisposing() => ffmpeg.sws_freeContext(Pointer);
83 |
84 | private void UpdateContext(Size sourceSize, AVPixelFormat sourceFormat)
85 | {
86 | if (sourceSize != lastSourceSize || sourceFormat != lastSourcePixelFormat)
87 | {
88 | ffmpeg.sws_freeContext(Pointer);
89 |
90 | var scaleMode = sourceSize == destinationSize ? ffmpeg.SWS_POINT : ffmpeg.SWS_BICUBIC;
91 | var swsContext = ffmpeg.sws_getContext(sourceSize.Width, sourceSize.Height, sourceFormat, destinationSize.Width, destinationSize.Height, destinationFormat, scaleMode, null, null, null);
92 |
93 | if (swsContext == null)
94 | {
95 | throw new FFmpegException("Cannot allocate SwsContext.");
96 | }
97 |
98 | UpdatePointer(swsContext);
99 | lastSourceSize = sourceSize;
100 | lastSourcePixelFormat = sourceFormat;
101 | }
102 | }
103 | }
104 | }
105 |
--------------------------------------------------------------------------------
/FFMediaToolkit/Common/Internal/MediaFrame.cs:
--------------------------------------------------------------------------------
1 | namespace FFMediaToolkit.Common.Internal
2 | {
3 | using FFmpeg.AutoGen;
4 |
5 | ///
6 | /// Represents a base class of audio and video frames.
7 | ///
8 | internal abstract unsafe class MediaFrame : Wrapper
9 | {
10 | ///
11 | /// Initializes a new instance of the class.
12 | ///
13 | /// The object.
14 | public MediaFrame(AVFrame* frame)
15 | : base(frame)
16 | {
17 | }
18 |
19 | ///
20 | /// Gets or sets the frame PTS value in the stream time base units.
21 | ///
22 | public long PresentationTimestamp
23 | {
24 | get => Pointer->pts;
25 | set => Pointer->pts = value;
26 | }
27 |
28 | ///
29 | /// Gets or sets the frame PTS value in the stream time base units.
30 | ///
31 | public long DecodingTimestamp
32 | {
33 | get => Pointer->pkt_dts;
34 | set => Pointer->pkt_dts = value;
35 | }
36 |
37 | ///
38 | /// Changes the pointer to the media frame.
39 | ///
40 | /// The new pointer to a object.
41 | internal virtual void Update(AVFrame* newFrame) => UpdatePointer(newFrame);
42 |
43 | ///
44 | protected override void OnDisposing()
45 | {
46 | var ptr = Pointer;
47 | ffmpeg.av_frame_free(&ptr);
48 | }
49 | }
50 | }
51 |
--------------------------------------------------------------------------------
/FFMediaToolkit/Common/Internal/MediaPacket.cs:
--------------------------------------------------------------------------------
1 | namespace FFMediaToolkit.Common.Internal
2 | {
3 | using FFmpeg.AutoGen;
4 |
5 | ///
6 | /// Represents a FFMpeg media packet.
7 | ///
8 | internal unsafe sealed class MediaPacket : Wrapper
9 | {
10 | ///
11 | /// Initializes a new instance of the class.
12 | ///
13 | /// The object.
14 | private MediaPacket(AVPacket* packet)
15 | : base(packet)
16 | {
17 | }
18 |
19 | ///
20 | /// Gets or sets a value indicating whether this packet is a key packet.
21 | ///
22 | public bool IsKeyPacket
23 | {
24 | get => (Pointer->flags & ffmpeg.AV_PKT_FLAG_KEY) > 0;
25 | set => Pointer->flags |= value ? ffmpeg.AV_PKT_FLAG_KEY : ~ffmpeg.AV_PKT_FLAG_KEY;
26 | }
27 |
28 | ///
29 | /// Gets or sets the stream index.
30 | ///
31 | public int StreamIndex
32 | {
33 | get => Pointer->stream_index;
34 | set => Pointer->stream_index = value;
35 | }
36 |
37 | ///
38 | /// Gets the presentation time stamp of the packet. if is AV_NOPTS_VALUE.
39 | ///
40 | public long? Timestamp => Pointer->pts != ffmpeg.AV_NOPTS_VALUE ? Pointer->pts : (long?)null;
41 |
42 | ///
43 | /// Converts an instance of to the unmanaged pointer.
44 | ///
45 | /// A instance.
46 | public static implicit operator AVPacket*(MediaPacket packet) => packet.Pointer;
47 |
48 | ///
49 | /// Allocates a new empty packet.
50 | ///
51 | /// The new .
52 | public static MediaPacket AllocateEmpty()
53 | {
54 | var packet = ffmpeg.av_packet_alloc();
55 | packet->stream_index = -1;
56 | return new MediaPacket(packet);
57 | }
58 |
59 | ///
60 | /// Creates a flush packet.
61 | ///
62 | /// The stream index.
63 | /// The flush packet.
64 | public static MediaPacket CreateFlushPacket(int streamIndex)
65 | {
66 | var packet = ffmpeg.av_packet_alloc();
67 | packet->stream_index = streamIndex;
68 | packet->data = null;
69 | packet->size = 0;
70 | return new MediaPacket(packet);
71 | }
72 |
73 | ///
74 | /// Sets valid PTS/DTS values. Used only in encoding.
75 | ///
76 | /// The encoder time base.
77 | /// The time base of media stream.
78 | public void RescaleTimestamp(AVRational codecTimeBase, AVRational streamTimeBase) => ffmpeg.av_packet_rescale_ts(Pointer, codecTimeBase, streamTimeBase);
79 |
80 | ///
81 | /// Wipes the packet data.
82 | ///
83 | public void Wipe() => ffmpeg.av_packet_unref(Pointer);
84 |
85 | ///
86 | protected override void OnDisposing()
87 | {
88 | var ptr = Pointer;
89 | ffmpeg.av_packet_free(&ptr);
90 | }
91 | }
92 | }
93 |
--------------------------------------------------------------------------------
/FFMediaToolkit/Common/Internal/VideoFrame.cs:
--------------------------------------------------------------------------------
1 | namespace FFMediaToolkit.Common.Internal
2 | {
3 | using System;
4 | using System.Drawing;
5 | using FFMediaToolkit.Graphics;
6 | using FFMediaToolkit.Helpers;
7 | using FFmpeg.AutoGen;
8 |
9 | ///
10 | /// Represent a video frame.
11 | ///
12 | internal unsafe class VideoFrame : MediaFrame
13 | {
14 | ///
15 | /// Initializes a new instance of the class with empty frame data.
16 | ///
17 | public VideoFrame()
18 | : base(ffmpeg.av_frame_alloc())
19 | {
20 | }
21 |
22 | ///
23 | /// Initializes a new instance of the class using existing .
24 | ///
25 | /// The video .
26 | public VideoFrame(AVFrame* frame)
27 | : base(frame)
28 | {
29 | if (frame->GetMediaType() == MediaType.Audio)
30 | throw new ArgumentException("Cannot create a VideoFrame instance from the AVFrame containing audio.");
31 | }
32 |
33 | ///
34 | /// Gets the frame dimensions.
35 | ///
36 | public Size Layout => Pointer != null ? new Size(Pointer->width, Pointer->height) : default;
37 |
38 | ///
39 | /// Gets the frame pixel format.
40 | ///
41 | public AVPixelFormat PixelFormat => Pointer != null ? (AVPixelFormat)Pointer->format : default;
42 |
43 | ///
44 | /// Creates a video frame with given dimensions and allocates a buffer for it.
45 | ///
46 | /// The dimensions of the video frame.
47 | /// The video pixel format.
48 | /// The new video frame.
49 | public static VideoFrame Create(Size dimensions, AVPixelFormat pixelFormat)
50 | {
51 | var frame = ffmpeg.av_frame_alloc();
52 |
53 | frame->width = dimensions.Width;
54 | frame->height = dimensions.Height;
55 | frame->format = (int)pixelFormat;
56 |
57 | ffmpeg.av_frame_get_buffer(frame, 32);
58 |
59 | return new VideoFrame(frame);
60 | }
61 |
62 | ///
63 | /// Creates an empty frame for decoding.
64 | ///
65 | /// The empty .
66 | public static VideoFrame CreateEmpty() => new VideoFrame();
67 |
68 | ///
69 | /// Overrides this video frame data with the converted using specified object.
70 | ///
71 | /// The bitmap to convert.
72 | /// A object, used for caching the FFMpeg when converting many frames of the same video.
73 | public void UpdateFromBitmap(ImageData bitmap, ImageConverter converter) => converter.FillAVFrame(bitmap, this);
74 |
75 | ///
76 | /// Converts this video frame to the with the specified pixel format.
77 | ///
78 | /// A object, used for caching the FFMpeg when converting many frames of the same video.
79 | /// The output bitmap pixel format.
80 | /// /// The output bitmap size.
81 | /// A instance containing converted bitmap data.
82 | public ImageData ToBitmap(ImageConverter converter, ImagePixelFormat targetFormat, Size targetSize)
83 | {
84 | var bitmap = ImageData.CreatePooled(targetSize, targetFormat); // Rents memory for the output bitmap.
85 | fixed (byte* ptr = bitmap.Data)
86 | {
87 | // Converts the raw video frame using the given size and pixel format and writes it to the ImageData bitmap.
88 | converter.AVFrameToBitmap(this, ptr, bitmap.Stride);
89 | }
90 |
91 | return bitmap;
92 | }
93 |
94 | ///
95 | internal override unsafe void Update(AVFrame* newFrame)
96 | {
97 | if (newFrame->GetMediaType() != MediaType.Video)
98 | {
99 | throw new ArgumentException("The new frame doesn't contain video data.");
100 | }
101 |
102 | base.Update(newFrame);
103 | }
104 | }
105 | }
106 |
--------------------------------------------------------------------------------
/FFMediaToolkit/Common/MediaType.cs:
--------------------------------------------------------------------------------
1 | namespace FFMediaToolkit.Common
2 | {
3 | ///
4 | /// Represents the multimedia stream types.
5 | ///
6 | public enum MediaType
7 | {
8 | ///
9 | /// Other media type not supported by the FFMediaToolkit.
10 | ///
11 | None,
12 |
13 | ///
14 | /// Video.
15 | ///
16 | Video,
17 |
18 | ///
19 | /// Audio.
20 | ///
21 | Audio,
22 | }
23 | }
24 |
--------------------------------------------------------------------------------
/FFMediaToolkit/Common/Wrapper{T}.cs:
--------------------------------------------------------------------------------
1 | namespace FFMediaToolkit.Common
2 | {
3 | using System;
4 |
5 | ///
6 | /// A base class for wrappers of unmanaged objects with implementation.
7 | ///
8 | /// The type of the unmanaged object.
9 | internal abstract unsafe class Wrapper : IDisposable
10 | where T : unmanaged
11 | {
12 | private IntPtr pointer;
13 | private bool isDisposed;
14 |
15 | ///
16 | /// Initializes a new instance of the class.
17 | ///
18 | /// A pointer to a unmanaged object.
19 | protected Wrapper(T* pointer) => this.pointer = new IntPtr(pointer);
20 |
21 | ///
22 | /// Finalizes an instance of the class.
23 | ///
24 | ~Wrapper() => Disposing(false);
25 |
26 | ///
27 | /// Gets a pointer to the underlying object.
28 | ///
29 | public T* Pointer => isDisposed ? null : (T*)pointer;
30 |
31 | ///
32 | /// Gets a reference to the wrapped pointer field.
33 | ///
34 | internal ref readonly IntPtr PointerRef => ref pointer;
35 |
36 | ///
37 | public void Dispose() => Disposing(true);
38 |
39 | ///
40 | /// Updates the pointer to the object.
41 | ///
42 | /// The new pointer.
43 | protected void UpdatePointer(T* newPointer) => pointer = new IntPtr(newPointer);
44 |
45 | ///
46 | /// Free the unmanaged resources.
47 | ///
48 | protected abstract void OnDisposing();
49 |
50 | private void Disposing(bool dispose)
51 | {
52 | if (isDisposed)
53 | return;
54 |
55 | OnDisposing();
56 |
57 | isDisposed = true;
58 |
59 | if (dispose)
60 | GC.SuppressFinalize(this);
61 | }
62 | }
63 | }
--------------------------------------------------------------------------------
/FFMediaToolkit/Decoding/AudioStream.cs:
--------------------------------------------------------------------------------
1 | namespace FFMediaToolkit.Decoding
2 | {
3 | using System;
4 | using System.IO;
5 | using FFMediaToolkit.Audio;
6 | using FFMediaToolkit.Common.Internal;
7 | using FFMediaToolkit.Decoding.Internal;
8 | using FFMediaToolkit.Helpers;
9 | using FFmpeg.AutoGen;
10 |
11 | ///
12 | /// Represents an audio stream in the .
13 | ///
14 | public unsafe class AudioStream : MediaStream
15 | {
16 | private SwrContext* swrContext;
17 | private bool isDisposed;
18 |
19 | ///
20 | /// Initializes a new instance of the class.
21 | ///
22 | /// The audio stream.
23 | /// The decoder settings.
24 | internal AudioStream(Decoder stream, MediaOptions options)
25 | : base(stream, options)
26 | {
27 | var layout = Info.ChannelLayout;
28 | SwrContext* context;
29 | ffmpeg.swr_alloc_set_opts2(
30 | &context,
31 | &layout,
32 | (AVSampleFormat)SampleFormat.SingleP,
33 | Info.SampleRate,
34 | &layout,
35 | (AVSampleFormat)Info.SampleFormat,
36 | Info.SampleRate,
37 | 0,
38 | null).ThrowIfError("Cannot allocate SwrContext");
39 | ffmpeg.swr_init(context);
40 | swrContext = context;
41 | }
42 |
43 | ///
44 | /// Gets informations about this stream.
45 | ///
46 | public new AudioStreamInfo Info => base.Info as AudioStreamInfo;
47 |
48 | ///
49 | /// Reads the next frame from the audio stream.
50 | ///
51 | /// The decoded audio data.
52 | public new AudioData GetNextFrame()
53 | {
54 | var frame = base.GetNextFrame() as AudioFrame;
55 |
56 | var converted = AudioFrame.Create(
57 | frame.SampleRate,
58 | frame.NumChannels,
59 | frame.NumSamples,
60 | frame.ChannelLayout,
61 | SampleFormat.SingleP,
62 | frame.DecodingTimestamp,
63 | frame.PresentationTimestamp);
64 |
65 | ffmpeg.swr_convert_frame(swrContext, converted.Pointer, frame.Pointer);
66 |
67 | return new AudioData(converted);
68 | }
69 |
70 | ///
71 | /// Reads the next frame from the audio stream.
72 | /// A return value indicates that reached end of stream.
73 | /// The method throws exception if another error has occurred.
74 | ///
75 | /// The decoded audio data.
76 | /// if reached end of the stream.
77 | public bool TryGetNextFrame(out AudioData data)
78 | {
79 | try
80 | {
81 | data = GetNextFrame();
82 | return true;
83 | }
84 | catch (EndOfStreamException)
85 | {
86 | data = default;
87 | return false;
88 | }
89 | }
90 |
91 | ///
92 | /// Reads the video frame found at the specified timestamp.
93 | ///
94 | /// The frame timestamp.
95 | /// The decoded video frame.
96 | public new AudioData GetFrame(TimeSpan time)
97 | {
98 | var frame = base.GetFrame(time) as AudioFrame;
99 |
100 | var converted = AudioFrame.Create(
101 | frame.SampleRate,
102 | frame.NumChannels,
103 | frame.NumSamples,
104 | frame.ChannelLayout,
105 | SampleFormat.SingleP,
106 | frame.DecodingTimestamp,
107 | frame.PresentationTimestamp);
108 |
109 | ffmpeg.swr_convert_frame(swrContext, converted.Pointer, frame.Pointer);
110 |
111 | return new AudioData(converted);
112 | }
113 |
114 | ///
115 | /// Reads the audio data found at the specified timestamp.
116 | /// A return value indicates that reached end of stream.
117 | /// The method throws exception if another error has occurred.
118 | ///
119 | /// The frame timestamp.
120 | /// The decoded audio data.
121 | /// if reached end of the stream.
122 | public bool TryGetFrame(TimeSpan time, out AudioData data)
123 | {
124 | try
125 | {
126 | data = GetFrame(time);
127 | return true;
128 | }
129 | catch (EndOfStreamException)
130 | {
131 | data = default;
132 | return false;
133 | }
134 | }
135 |
136 | ///
137 | public override void Dispose()
138 | {
139 | if (!isDisposed)
140 | {
141 | fixed (SwrContext** ptr = &swrContext)
142 | {
143 | ffmpeg.swr_free(ptr);
144 | }
145 |
146 | isDisposed = true;
147 | }
148 |
149 | base.Dispose();
150 | }
151 | }
152 | }
153 |
--------------------------------------------------------------------------------
/FFMediaToolkit/Decoding/AudioStreamInfo.cs:
--------------------------------------------------------------------------------
1 | namespace FFMediaToolkit.Decoding
2 | {
3 | using FFMediaToolkit.Audio;
4 | using FFMediaToolkit.Common;
5 | using FFMediaToolkit.Decoding.Internal;
6 | using FFmpeg.AutoGen;
7 |
8 | ///
9 | /// Represents informations about the audio stream.
10 | ///
11 | public class AudioStreamInfo : StreamInfo
12 | {
13 | ///
14 | /// Initializes a new instance of the class.
15 | ///
16 | /// A generic stream.
17 | /// The input container.
18 | internal unsafe AudioStreamInfo(AVStream* stream, InputContainer container)
19 | : base(stream, MediaType.Audio, container)
20 | {
21 | var codec = stream->codecpar;
22 | NumChannels = codec->ch_layout.nb_channels;
23 | SampleRate = codec->sample_rate;
24 | SamplesPerFrame = codec->frame_size > 0 ? codec->frame_size : codec->sample_rate / 20;
25 | SampleFormat = (SampleFormat)codec->format;
26 |
27 | AVChannelLayout layout;
28 | ffmpeg.av_channel_layout_default(&layout, codec->ch_layout.nb_channels);
29 | ChannelLayout = layout;
30 | }
31 |
32 | ///
33 | /// Gets the number of audio channels stored in the stream.
34 | ///
35 | public int NumChannels { get; }
36 |
37 | ///
38 | /// Gets the number of samples per second of the audio stream.
39 | ///
40 | public int SampleRate { get; }
41 |
42 | ///
43 | /// Gets the average number of samples per frame (chunk of samples) calculated from metadata.
44 | /// It is used to calculate timestamps in the internal decoder methods.
45 | ///
46 | public int SamplesPerFrame { get; }
47 |
48 | ///
49 | /// Gets the audio sample format.
50 | ///
51 | public SampleFormat SampleFormat { get; }
52 |
53 | ///
54 | /// Gets the channel layout for this stream.
55 | ///
56 | internal AVChannelLayout ChannelLayout { get; }
57 | }
58 | }
59 |
--------------------------------------------------------------------------------
/FFMediaToolkit/Decoding/ContainerOptions.cs:
--------------------------------------------------------------------------------
1 | namespace FFMediaToolkit.Decoding
2 | {
3 | using System.Collections.Generic;
4 | using FFmpeg.AutoGen;
5 |
6 | ///
7 | /// Represents a set of demuxer options and flags.
8 | /// See https://ffmpeg.org/ffmpeg-formats.html#Format-Options.
9 | ///
10 | public class ContainerOptions
11 | {
12 | ///
13 | /// Initializes a new instance of the class with the default settings.
14 | ///
15 | public ContainerOptions()
16 | {
17 | }
18 |
19 | ///
20 | /// Discard corrupted packets.
21 | /// Port of 'discardcorrupt'.
22 | ///
23 | public bool FlagDiscardCorrupt { get; set; }
24 |
25 | ///
26 | /// Enable fast, but inaccurate seeks for some formats.
27 | /// Port of 'fastseek'.
28 | ///
29 | public bool FlagEnableFastSeek { get; set; }
30 |
31 | ///
32 | /// Do not fill in missing values that can be exactly calculated.
33 | /// Port of 'nofillin'.
34 | ///
35 | public bool FlagEnableNoFillIn { get; set; }
36 |
37 | ///
38 | /// Generate missing PTS if DTS is present.
39 | /// Port of 'genpts'.
40 | ///
41 | public bool FlagGeneratePts { get; set; }
42 |
43 | ///
44 | /// Ignore DTS if PTS is set.
45 | /// Port of 'igndts'.
46 | ///
47 | public bool FlagIgnoreDts { get; set; }
48 |
49 | ///
50 | /// Ignore index.
51 | /// Port of 'ignidx'.
52 | ///
53 | public bool FlagIgnoreIndex { get; set; }
54 |
55 | ///
56 | /// Reduce the latency introduced by optional buffering.
57 | /// Port of 'nobuffer'.
58 | ///
59 | public bool FlagNoBuffer { get; set; }
60 |
61 | ///
62 | /// Try to interleave output packets by DTS.
63 | /// Port of 'sortdts'.
64 | ///
65 | public bool FlagSortDts { get; set; }
66 |
67 | ///
68 | /// Allow seeking to non-keyframes on demuxer level when supported.
69 | /// Port of seek2any.
70 | ///
71 | public bool SeekToAny { get; set; }
72 |
73 | ///
74 | /// Gets or sets the private demuxer-specific options.
75 | ///
76 | public Dictionary PrivateOptions { get; set; } = new Dictionary();
77 |
78 | ///
79 | /// Applies flag settings specified in this class to an instance of .
80 | ///
81 | /// An empty instance of before opening the stream.
82 | internal unsafe void ApplyFlags(AVFormatContext* formatContext)
83 | {
84 | ref var formatFlags = ref formatContext->flags;
85 |
86 | if (FlagDiscardCorrupt)
87 | {
88 | formatFlags |= ffmpeg.AVFMT_FLAG_DISCARD_CORRUPT;
89 | }
90 |
91 | if (FlagEnableFastSeek)
92 | {
93 | formatFlags |= ffmpeg.AVFMT_FLAG_FAST_SEEK;
94 | }
95 |
96 | if (FlagEnableNoFillIn)
97 | {
98 | formatFlags |= ffmpeg.AVFMT_FLAG_NOFILLIN;
99 | }
100 |
101 | if (FlagGeneratePts)
102 | {
103 | formatFlags |= ffmpeg.AVFMT_FLAG_GENPTS;
104 | }
105 |
106 | if (FlagIgnoreDts)
107 | {
108 | formatFlags |= ffmpeg.AVFMT_FLAG_IGNDTS;
109 | }
110 |
111 | if (FlagIgnoreIndex)
112 | {
113 | formatFlags |= ffmpeg.AVFMT_FLAG_IGNIDX;
114 | }
115 |
116 | if (FlagNoBuffer)
117 | {
118 | formatFlags |= ffmpeg.AVFMT_FLAG_NOBUFFER;
119 | }
120 |
121 | if (FlagSortDts)
122 | {
123 | formatFlags |= ffmpeg.AVFMT_FLAG_SORT_DTS;
124 | }
125 |
126 | formatContext->seek2any = SeekToAny ? 1 : 0;
127 | }
128 | }
129 | }
130 |
--------------------------------------------------------------------------------
/FFMediaToolkit/Decoding/Internal/AvioStream.cs:
--------------------------------------------------------------------------------
1 | namespace FFMediaToolkit.Decoding.Internal
2 | {
3 | using System;
4 | using System.IO;
5 | using System.Runtime.InteropServices;
6 |
7 | using FFmpeg.AutoGen;
8 |
9 | ///
10 | /// A stream wrapper.
11 | ///
12 | internal unsafe class AvioStream
13 | {
14 | private readonly Stream inputStream;
15 |
16 | private byte[] readBuffer = null;
17 |
18 | ///
19 | /// Initializes a new instance of the class.
20 | ///
21 | /// Multimedia file stream.
22 | public AvioStream(Stream input)
23 | {
24 | inputStream = input ?? throw new ArgumentNullException(nameof(input));
25 | }
26 |
27 | ///
28 | /// A method for refilling the buffer. For stream protocols,
29 | /// must never return 0 but rather a proper AVERROR code.
30 | ///
31 | /// An opaque pointer.
32 | /// A buffer that needs to be filled with stream data.
33 | /// The size of .
34 | /// Number of read bytes.
35 | public int Read(void* opaque, byte* buffer, int bufferLength)
36 | {
37 | if (readBuffer == null)
38 | {
39 | readBuffer = new byte[bufferLength];
40 | }
41 |
42 | int readed = inputStream.Read(readBuffer, 0, readBuffer.Length);
43 |
44 | if (readed < 1)
45 | {
46 | return ffmpeg.AVERROR_EOF;
47 | }
48 |
49 | Marshal.Copy(readBuffer, 0, (IntPtr)buffer, readed);
50 |
51 | return readed;
52 | }
53 |
54 | ///
55 | /// A method for seeking to specified byte position.
56 | ///
57 | /// An opaque pointer.
58 | /// The offset in a stream.
59 | /// The seek option.
60 | /// Position within the current stream or stream size.
61 | public long Seek(void* opaque, long offset, int whence)
62 | {
63 | if (!inputStream.CanSeek)
64 | {
65 | return -1;
66 | }
67 |
68 | return whence == ffmpeg.AVSEEK_SIZE ?
69 | inputStream.Length :
70 | inputStream.Seek(offset, SeekOrigin.Begin);
71 | }
72 | }
73 | }
--------------------------------------------------------------------------------
/FFMediaToolkit/Decoding/Internal/Decoder.cs:
--------------------------------------------------------------------------------
1 | namespace FFMediaToolkit.Decoding.Internal
2 | {
3 | using System;
4 | using System.Collections.Generic;
5 | using System.IO;
6 | using FFMediaToolkit.Common;
7 | using FFMediaToolkit.Common.Internal;
8 | using FFMediaToolkit.Helpers;
9 | using FFmpeg.AutoGen;
10 |
11 | ///
12 | /// Represents a input multimedia stream.
13 | ///
14 | internal unsafe class Decoder : Wrapper
15 | {
16 | private readonly int bufferLimit;
17 | private int bufferSize = 0;
18 | private bool reuseLastPacket;
19 | private bool flushing = false;
20 | private MediaPacket packet;
21 |
22 | ///
23 | /// Initializes a new instance of the class.
24 | ///
25 | /// The underlying codec.
26 | /// The multimedia stream.
27 | /// The container that owns the stream.
28 | public Decoder(AVCodecContext* codec, AVStream* stream, InputContainer owner)
29 | : base(codec)
30 | {
31 | bufferLimit = owner.MaxBufferSize * 1024 * 1024; // convert megabytes to bytes
32 | OwnerFile = owner;
33 | Info = StreamInfo.Create(stream, owner);
34 | switch (Info.Type)
35 | {
36 | case MediaType.Audio:
37 | RecentlyDecodedFrame = new AudioFrame();
38 | break;
39 | case MediaType.Video:
40 | RecentlyDecodedFrame = new VideoFrame();
41 | break;
42 | default:
43 | throw new Exception("Tried to create a decoder from an unsupported stream or codec type.");
44 | }
45 |
46 | BufferedPackets = new Queue();
47 | }
48 |
49 | ///
50 | /// Gets informations about the stream.
51 | ///
52 | public StreamInfo Info { get; }
53 |
54 | ///
55 | /// Gets the media container that owns this stream.
56 | ///
57 | public InputContainer OwnerFile { get; }
58 |
59 | ///
60 | /// Gets the recently decoded frame.
61 | ///
62 | public MediaFrame RecentlyDecodedFrame { get; }
63 |
64 | ///
65 | /// Indicates whether the codec has buffered packets.
66 | ///
67 | public bool IsBufferEmpty => BufferedPackets.Count == 0;
68 |
69 | ///
70 | /// Gets a FIFO collection of media packets that the codec has buffered.
71 | ///
72 | private Queue BufferedPackets { get; }
73 |
74 | ///
75 | /// Adds the specified packet to the codec buffer.
76 | ///
77 | /// The packet to be buffered.
78 | public void BufferPacket(MediaPacket packet)
79 | {
80 | BufferedPackets.Enqueue(packet);
81 | bufferSize += packet.Pointer->size;
82 |
83 | if (bufferSize > bufferLimit)
84 | {
85 | var deletedPacket = BufferedPackets.Dequeue();
86 | bufferSize -= deletedPacket.Pointer->size;
87 | deletedPacket.Dispose();
88 | }
89 | }
90 |
91 | ///
92 | /// Reads the next frame from the stream.
93 | ///
94 | /// The decoded frame.
95 | public MediaFrame GetNextFrame()
96 | {
97 | ReadNextFrame();
98 | return RecentlyDecodedFrame;
99 | }
100 |
101 | ///
102 | /// Decodes frames until reach the specified time stamp. Useful to seek few frames forward.
103 | ///
104 | /// The target time stamp.
105 | public void SkipFrames(long targetTs)
106 | {
107 | do
108 | {
109 | ReadNextFrame();
110 | }
111 | while (RecentlyDecodedFrame.PresentationTimestamp < targetTs);
112 | }
113 |
114 | ///
115 | /// Discards all packet data buffered by this instance.
116 | ///
117 | public void DiscardBufferedData()
118 | {
119 | ffmpeg.avcodec_flush_buffers(Pointer);
120 |
121 | foreach (var packet in BufferedPackets)
122 | {
123 | packet.Wipe();
124 | packet.Dispose();
125 | }
126 |
127 | BufferedPackets.Clear();
128 | bufferSize = 0;
129 | flushing = false;
130 | }
131 |
132 | ///
133 | protected override void OnDisposing()
134 | {
135 | RecentlyDecodedFrame.Dispose();
136 | fixed (void* pointerRef = &PointerRef)
137 | {
138 | ffmpeg.avcodec_free_context((AVCodecContext**)pointerRef);
139 | }
140 | }
141 |
142 | private void ReadNextFrame()
143 | {
144 | ffmpeg.av_frame_unref(RecentlyDecodedFrame.Pointer);
145 | int error;
146 |
147 | do
148 | {
149 | if (!flushing)
150 | {
151 | DecodePacket(); // Gets the next packet and sends it to the decoder
152 | }
153 |
154 | error = ffmpeg.avcodec_receive_frame(Pointer, RecentlyDecodedFrame.Pointer); // Tries to decode frame from the packets.
155 | }
156 | while (error == ffmpeg.AVERROR(ffmpeg.EAGAIN) || error == -35); // The EAGAIN code means that the frame decoding has not been completed and more packets are needed.
157 |
158 | if (error == ffmpeg.AVERROR_EOF)
159 | {
160 | throw new EndOfStreamException("End of file.");
161 | }
162 |
163 | error.ThrowIfError("An error occurred while decoding the frame.");
164 | }
165 |
166 | private void DecodePacket()
167 | {
168 | if (!reuseLastPacket)
169 | {
170 | if (IsBufferEmpty)
171 | {
172 | flushing = !OwnerFile.GetPacketFromStream(Info.Index);
173 | }
174 |
175 | packet = BufferedPackets.Dequeue();
176 | bufferSize -= packet.Pointer->size;
177 | }
178 |
179 | // Sends the packet to the decoder.
180 | var result = ffmpeg.avcodec_send_packet(Pointer, packet);
181 |
182 | reuseLastPacket = result == ffmpeg.AVERROR(ffmpeg.EAGAIN);
183 |
184 | if (!reuseLastPacket)
185 | {
186 | packet.Wipe();
187 | packet.Dispose();
188 | result.ThrowIfError("Cannot send a packet to the decoder.");
189 | }
190 | }
191 | }
192 | }
193 |
--------------------------------------------------------------------------------
/FFMediaToolkit/Decoding/Internal/DecoderFactory.cs:
--------------------------------------------------------------------------------
1 | namespace FFMediaToolkit.Decoding.Internal
2 | {
3 | using FFMediaToolkit.Common;
4 | using FFMediaToolkit.Helpers;
5 | using FFmpeg.AutoGen;
6 |
7 | ///
8 | /// Contains method for opening media streams.
9 | ///
10 | internal unsafe class DecoderFactory
11 | {
12 | ///
13 | /// Opens the video stream with the specified index in the media container.
14 | ///
15 | /// The media container.
16 | /// The media options.
17 | /// The stream.
18 | /// The opened .
19 | internal static Decoder OpenStream(InputContainer container, MediaOptions options, AVStream* stream)
20 | {
21 | var format = container.Pointer;
22 | AVCodec* codec = null;
23 |
24 | var index = ffmpeg.av_find_best_stream(format, stream->codecpar->codec_type, stream->index, -1, &codec, 0);
25 | index.IfError(ffmpeg.AVERROR_DECODER_NOT_FOUND, "Cannot find a codec for the specified stream.");
26 | if (index < 0)
27 | {
28 | return null;
29 | }
30 |
31 | var codecContext = ffmpeg.avcodec_alloc_context3(codec);
32 | ffmpeg.avcodec_parameters_to_context(codecContext, stream->codecpar)
33 | .ThrowIfError("Cannot open the stream codec!");
34 | codecContext->pkt_timebase = stream->time_base;
35 |
36 | var dict = new FFDictionary(options.DecoderOptions, false);
37 | fixed (void* dictPtrRef = &dict.PointerRef)
38 | {
39 | ffmpeg.avcodec_open2(codecContext, codec, (AVDictionary**)dictPtrRef)
40 | .ThrowIfError("Cannot open the stream codec!");
41 | }
42 |
43 | return new Decoder(codecContext, stream, container);
44 | }
45 | }
46 | }
47 |
--------------------------------------------------------------------------------
/FFMediaToolkit/Decoding/Internal/InputContainer.cs:
--------------------------------------------------------------------------------
1 | namespace FFMediaToolkit.Decoding.Internal
2 | {
3 | using System;
4 | using System.IO;
5 |
6 | using FFMediaToolkit.Common;
7 | using FFMediaToolkit.Common.Internal;
8 | using FFMediaToolkit.Helpers;
9 |
10 | using FFmpeg.AutoGen;
11 |
12 | ///
13 | /// Represents the multimedia file container.
14 | ///
15 | internal unsafe class InputContainer : Wrapper
16 | {
17 | private readonly avio_alloc_context_read_packet readCallback;
18 |
19 | private readonly avio_alloc_context_seek seekCallBack;
20 |
21 | private InputContainer(AVFormatContext* formatContext, int bufferSizeLimit)
22 | : base(formatContext)
23 | {
24 | Decoders = new Decoder[Pointer->nb_streams];
25 | MaxBufferSize = bufferSizeLimit;
26 | }
27 |
28 | private InputContainer(AVFormatContext* formatContext, avio_alloc_context_read_packet read, avio_alloc_context_seek seek, int bufferSizeLimit)
29 | : base(formatContext)
30 | {
31 | Decoders = new Decoder[Pointer->nb_streams];
32 | MaxBufferSize = bufferSizeLimit;
33 | readCallback = read;
34 | seekCallBack = seek;
35 | }
36 |
37 | private delegate void AVFormatContextDelegate(AVFormatContext* context);
38 |
39 | ///
40 | /// List of all stream codecs that have been opened from the file.
41 | ///
42 | public Decoder[] Decoders { get; }
43 |
44 | ///
45 | /// Gets the memory limit of packets stored in the decoder's buffer.
46 | ///
47 | public int MaxBufferSize { get; }
48 |
49 | ///
50 | /// Opens a media container and stream codecs from given path.
51 | ///
52 | /// A path to the multimedia file.
53 | /// The media settings.
54 | /// A new instance of the class.
55 | public static InputContainer LoadFile(string path, MediaOptions options) => MakeContainer(path, options, _ => { });
56 |
57 | ///
58 | /// Opens a media container and stream codecs from given stream.
59 | ///
60 | /// A stream of the multimedia file.
61 | /// The media settings.
62 | /// A new instance of the class.
63 | public static InputContainer LoadStream(Stream stream, MediaOptions options) => MakeContainer(stream, options);
64 |
65 | ///
66 | /// Seeks all streams in the container to the first key frame before the specified time stamp.
67 | ///
68 | /// The target time stamp in a stream time base.
69 | /// The stream index. It will be used only to get the correct time base value.
70 | public void SeekFile(long targetTs, int streamIndex)
71 | {
72 | ffmpeg.av_seek_frame(Pointer, streamIndex, targetTs, ffmpeg.AVSEEK_FLAG_BACKWARD).ThrowIfError($"Seek to {targetTs} failed.");
73 |
74 | foreach (var decoder in Decoders)
75 | {
76 | decoder?.DiscardBufferedData();
77 | }
78 | }
79 |
80 | ///
81 | /// Reads a packet from the specified stream index and buffers it in the respective codec.
82 | ///
83 | /// Index of the stream to read from.
84 | /// True if the requested packet was read, false if EOF ocurred and a flush packet was send to the buffer.
85 | public bool GetPacketFromStream(int streamIndex)
86 | {
87 | MediaPacket packet;
88 | do
89 | {
90 | if (!TryReadNextPacket(out packet))
91 | {
92 | Decoders[streamIndex].BufferPacket(MediaPacket.CreateFlushPacket(streamIndex));
93 | return false;
94 | }
95 |
96 | var stream = Decoders[packet.StreamIndex];
97 | if (stream == null)
98 | {
99 | packet.Wipe();
100 | packet.Dispose();
101 | packet = null;
102 | }
103 | else
104 | {
105 | stream.BufferPacket(packet);
106 | }
107 | }
108 | while (packet?.StreamIndex != streamIndex);
109 | return true;
110 | }
111 |
112 | ///
113 | protected override void OnDisposing()
114 | {
115 | foreach (var decoder in Decoders)
116 | {
117 | decoder?.Dispose();
118 | }
119 |
120 | var ptr = Pointer;
121 | ffmpeg.avformat_close_input(&ptr);
122 | }
123 |
124 | private static AVFormatContext* MakeContext(string url, MediaOptions options, AVFormatContextDelegate contextDelegate)
125 | {
126 | FFmpegLoader.LoadFFmpeg();
127 |
128 | var context = ffmpeg.avformat_alloc_context();
129 | options.DemuxerOptions.ApplyFlags(context);
130 | var dict = new FFDictionary(options.DemuxerOptions.PrivateOptions, false).Pointer;
131 |
132 | contextDelegate(context);
133 |
134 | ffmpeg.avformat_open_input(&context, url, null, &dict)
135 | .ThrowIfError("An error occurred while opening the file");
136 |
137 | ffmpeg.avformat_find_stream_info(context, null)
138 | .ThrowIfError("Cannot find stream info");
139 |
140 | return context;
141 | }
142 |
143 | private static InputContainer MakeContainer(Stream input, MediaOptions options)
144 | {
145 | var avioStream = new AvioStream(input);
146 | var read = (avio_alloc_context_read_packet)avioStream.Read;
147 | var seek = (avio_alloc_context_seek)avioStream.Seek;
148 |
149 | var context = MakeContext(null, options, ctx =>
150 | {
151 | int bufferLength = 4096;
152 | var avioBuffer = (byte*)ffmpeg.av_malloc((ulong)bufferLength);
153 |
154 | ctx->pb = ffmpeg.avio_alloc_context(avioBuffer, bufferLength, 0, null, read, null, seek);
155 | if (ctx->pb == null)
156 | {
157 | throw new FFmpegException("Cannot allocate AVIOContext.");
158 | }
159 | });
160 |
161 | var container = new InputContainer(context, read, seek, options.PacketBufferSizeLimit);
162 | container.OpenStreams(options);
163 | return container;
164 | }
165 |
166 | private static InputContainer MakeContainer(string url, MediaOptions options, AVFormatContextDelegate contextDelegate)
167 | {
168 | var context = MakeContext(url, options, contextDelegate);
169 |
170 | var container = new InputContainer(context, options.PacketBufferSizeLimit);
171 | container.OpenStreams(options);
172 | return container;
173 | }
174 |
175 | ///
176 | /// Opens the streams in the file using the specified .
177 | ///
178 | /// The object.
179 | private void OpenStreams(MediaOptions options)
180 | {
181 | for (int i = 0; i < Pointer->nb_streams; i++)
182 | {
183 | var stream = Pointer->streams[i];
184 | if (options.ShouldLoadStreamsOfType(stream->codecpar->codec_type))
185 | {
186 | try
187 | {
188 | Decoders[i] = DecoderFactory.OpenStream(this, options, stream);
189 | }
190 | catch (Exception)
191 | {
192 | Decoders[i] = null;
193 | }
194 | }
195 | }
196 | }
197 |
198 | ///
199 | /// Reads the next packet from this file.
200 | ///
201 | private bool TryReadNextPacket(out MediaPacket packet)
202 | {
203 | packet = MediaPacket.AllocateEmpty();
204 | var result = ffmpeg.av_read_frame(Pointer, packet.Pointer); // Gets the next packet from the file.
205 |
206 | // Check if the end of file error occurred
207 | if (result < 0)
208 | {
209 | packet.Dispose();
210 | if (result == ffmpeg.AVERROR_EOF)
211 | {
212 | return false;
213 | }
214 | else
215 | {
216 | result.ThrowIfError("Cannot read next packet from the file");
217 | }
218 | }
219 |
220 | return true;
221 | }
222 | }
223 | }
--------------------------------------------------------------------------------
/FFMediaToolkit/Decoding/MediaChapter.cs:
--------------------------------------------------------------------------------
1 | namespace FFMediaToolkit.Decoding
2 | {
3 | using System;
4 | using System.Collections.Generic;
5 |
6 | ///
7 | /// Represents a chapter in a media file.
8 | ///
9 | public class MediaChapter
10 | {
11 | ///
12 | /// Initializes a new instance of the class.
13 | ///
14 | /// The starting time of this chapter.
15 | /// The ending time of this chapter.
16 | /// This chapter's metadata.
17 | internal MediaChapter(TimeSpan start, TimeSpan end, Dictionary metadata)
18 | {
19 | StartTime = start;
20 | EndTime = end;
21 | Metadata = metadata;
22 | }
23 |
24 | ///
25 | /// Gets the start time of this chapter.
26 | ///
27 | public TimeSpan StartTime { get; }
28 |
29 | ///
30 | /// Gets the end time of this chapter.
31 | ///
32 | public TimeSpan EndTime { get; }
33 |
34 | ///
35 | /// Gets the duration of this chapter.
36 | ///
37 | public TimeSpan Duration => EndTime - StartTime;
38 |
39 | ///
40 | /// Gets the metadata for this chapter (such as name).
41 | ///
42 | public Dictionary Metadata { get; }
43 | }
44 | }
--------------------------------------------------------------------------------
/FFMediaToolkit/Decoding/MediaFile.cs:
--------------------------------------------------------------------------------
1 | namespace FFMediaToolkit.Decoding
2 | {
3 | using System;
4 | using System.IO;
5 | using System.Linq;
6 | using FFMediaToolkit.Common;
7 | using FFMediaToolkit.Decoding.Internal;
8 |
9 | ///
10 | /// Represents a multimedia file.
11 | ///
12 | public class MediaFile : IDisposable
13 | {
14 | private readonly InputContainer container;
15 | private bool isDisposed;
16 |
17 | private unsafe MediaFile(InputContainer container, MediaOptions options)
18 | {
19 | this.container = container;
20 |
21 | var video = container.Decoders.Where(codec => codec?.Info.Type == MediaType.Video);
22 | var audio = container.Decoders.Where(codec => codec?.Info.Type == MediaType.Audio);
23 |
24 | VideoStreams = video.Select(codec => new VideoStream(codec, options)).ToArray();
25 | AudioStreams = audio.Select(codec => new AudioStream(codec, options)).ToArray();
26 |
27 | Info = new MediaInfo(container.Pointer);
28 | }
29 |
30 | ///
31 | /// Gets all the video streams in the media file.
32 | ///
33 | public VideoStream[] VideoStreams { get; }
34 |
35 | ///
36 | /// Gets the first video stream in the media file.
37 | ///
38 | public VideoStream Video => VideoStreams.FirstOrDefault();
39 |
40 | ///
41 | /// Gets a value indicating whether the file contains video streams.
42 | ///
43 | public bool HasVideo => VideoStreams.Length > 0;
44 |
45 | ///
46 | /// Gets all the audio streams in the media file.
47 | ///
48 | public AudioStream[] AudioStreams { get; }
49 |
50 | ///
51 | /// Gets the first audio stream in the media file.
52 | ///
53 | public AudioStream Audio => AudioStreams.FirstOrDefault();
54 |
55 | ///
56 | /// Gets a value indicating whether the file contains video streams.
57 | ///
58 | public bool HasAudio => AudioStreams.Length > 0;
59 |
60 | ///
61 | /// Gets informations about the media container.
62 | ///
63 | public MediaInfo Info { get; }
64 |
65 | ///
66 | /// Opens a media file from the specified path with default settings.
67 | ///
68 | /// A path to the media file.
69 | /// The opened .
70 | public static MediaFile Open(string path) => Open(path, new MediaOptions());
71 |
72 | ///
73 | /// Opens a media file from the specified path.
74 | ///
75 | /// A path to the media file.
76 | /// The decoder settings.
77 | /// The opened .
78 | public static MediaFile Open(string path, MediaOptions options)
79 | {
80 | try
81 | {
82 | var container = InputContainer.LoadFile(path, options);
83 | return new MediaFile(container, options);
84 | }
85 | catch (DirectoryNotFoundException)
86 | {
87 | throw;
88 | }
89 | catch (Exception ex)
90 | {
91 | throw new Exception("Failed to open the media file", ex);
92 | }
93 | }
94 |
95 | ///
96 | /// Opens a media stream with default settings.
97 | ///
98 | /// A stream of the multimedia file.
99 | /// The opened .
100 | public static MediaFile Open(Stream stream) => Open(stream, new MediaOptions());
101 |
102 | ///
103 | /// Opens a media stream.
104 | ///
105 | /// A stream of the multimedia file.
106 | /// The decoder settings.
107 | /// The opened .
108 | public static MediaFile Open(Stream stream, MediaOptions options)
109 | {
110 | try
111 | {
112 | var container = InputContainer.LoadStream(stream, options);
113 | return new MediaFile(container, options);
114 | }
115 | catch (Exception ex)
116 | {
117 | throw new Exception("Failed to open the media stream", ex);
118 | }
119 | }
120 |
121 | ///
122 | public void Dispose()
123 | {
124 | if (isDisposed)
125 | {
126 | return;
127 | }
128 |
129 | var video = VideoStreams.Cast();
130 | var audio = AudioStreams.Cast();
131 |
132 | var streams = video.Concat(audio);
133 |
134 | foreach (var stream in streams)
135 | stream.Dispose();
136 |
137 | container.Dispose();
138 |
139 | isDisposed = true;
140 | }
141 | }
142 | }
--------------------------------------------------------------------------------
/FFMediaToolkit/Decoding/MediaInfo.cs:
--------------------------------------------------------------------------------
1 | namespace FFMediaToolkit.Decoding
2 | {
3 | using System;
4 | using System.Collections.ObjectModel;
5 | using System.IO;
6 | using FFMediaToolkit.Common;
7 | using FFMediaToolkit.Helpers;
8 | using FFmpeg.AutoGen;
9 |
10 | ///
11 | /// Contains informations about the media container.
12 | ///
13 | public class MediaInfo
14 | {
15 | private readonly Lazy fileInfo;
16 |
17 | ///
18 | /// Initializes a new instance of the class.
19 | ///
20 | /// The input container context.
21 | internal unsafe MediaInfo(AVFormatContext* container)
22 | {
23 | FilePath = new IntPtr(container->url).Utf8ToString();
24 | ContainerFormat = new IntPtr(container->iformat->name).Utf8ToString();
25 | Metadata = new ContainerMetadata(container->metadata);
26 | Bitrate = container->bit_rate > 0 ? container->bit_rate : 0;
27 |
28 | var timeBase = new AVRational { num = 1, den = ffmpeg.AV_TIME_BASE };
29 | Duration = container->duration != ffmpeg.AV_NOPTS_VALUE ?
30 | container->duration.ToTimeSpan(timeBase) :
31 | TimeSpan.Zero;
32 | StartTime = container->start_time != ffmpeg.AV_NOPTS_VALUE ?
33 | container->start_time.ToTimeSpan(timeBase) :
34 | TimeSpan.Zero;
35 | Chapters = new ReadOnlyCollection(ParseChapters(container));
36 |
37 | fileInfo = new Lazy(() =>
38 | {
39 | try
40 | {
41 | var info = new FileInfo(FilePath);
42 | return info;
43 | }
44 | catch (Exception)
45 | {
46 | return null;
47 | }
48 | });
49 | }
50 |
51 | ///
52 | /// Gets the file path used to open the container.
53 | ///
54 | public string FilePath { get; }
55 |
56 | ///
57 | /// Gets a object for the media file.
58 | /// It contains file size, directory, last access, creation and write timestamps.
59 | /// Returns if not available, for example when was used to open the .
60 | ///
61 | public FileInfo FileInfo => fileInfo.Value;
62 |
63 | ///
64 | /// Gets the container format name.
65 | ///
66 | public string ContainerFormat { get; }
67 |
68 | ///
69 | /// Gets the container bitrate in bytes per second (B/s) units. 0 if unknown.
70 | ///
71 | public long Bitrate { get; }
72 |
73 | ///
74 | /// Gets the duration of the media container.
75 | ///
76 | public TimeSpan Duration { get; }
77 |
78 | ///
79 | /// Gets the start time of the media container.
80 | ///
81 | public TimeSpan StartTime { get; }
82 |
83 | ///
84 | /// Gets the container file metadata. Streams may contain additional metadata.
85 | ///
86 | public ContainerMetadata Metadata { get; }
87 |
88 | ///
89 | /// Gets a collection of chapters existing in the media file.
90 | ///
91 | public ReadOnlyCollection Chapters { get; }
92 |
93 | private static unsafe MediaChapter[] ParseChapters(AVFormatContext* container)
94 | {
95 | var streamChapters = new MediaChapter[container->nb_chapters];
96 |
97 | for (var i = 0; i < container->nb_chapters; i++)
98 | {
99 | var chapter = container->chapters[i];
100 | var meta = chapter->metadata;
101 | var startTimespan = chapter->start.ToTimeSpan(chapter->time_base);
102 | var endTimespan = chapter->end.ToTimeSpan(chapter->time_base);
103 | streamChapters[i] =
104 | new MediaChapter(startTimespan, endTimespan, FFDictionary.ToDictionary(meta, true));
105 | }
106 |
107 | return streamChapters;
108 | }
109 | }
110 | }
--------------------------------------------------------------------------------
/FFMediaToolkit/Decoding/MediaOptions.cs:
--------------------------------------------------------------------------------
1 | namespace FFMediaToolkit.Decoding
2 | {
3 | using System;
4 | using System.Collections.Generic;
5 | using System.Drawing;
6 | using FFMediaToolkit.Graphics;
7 | using FFmpeg.AutoGen;
8 |
9 | ///
10 | /// Represents the audio/video streams loading modes.
11 | ///
12 | [Flags]
13 | public enum MediaMode
14 | {
15 | ///
16 | /// Enables loading only video streams.
17 | ///
18 | Video = 1 << AVMediaType.AVMEDIA_TYPE_VIDEO,
19 |
20 | ///
21 | /// Enables loading only audio streams.
22 | ///
23 | Audio = 1 << AVMediaType.AVMEDIA_TYPE_AUDIO,
24 |
25 | ///
26 | /// Enables loading both audio and video streams if they exist.
27 | ///
28 | AudioVideo = Audio | Video,
29 | }
30 |
31 | ///
32 | /// Represents the multimedia file container options.
33 | ///
34 | public class MediaOptions
35 | {
36 | private const string Threads = "threads";
37 |
38 | ///
39 | /// Initializes a new instance of the class.
40 | ///
41 | public MediaOptions()
42 | {
43 | }
44 |
45 | ///
46 | /// Gets or sets the limit of memory used by the packet buffer. Default limit is 40 MB per stream.
47 | ///
48 | public int PacketBufferSizeLimit { get; set; } = 40;
49 |
50 | ///
51 | /// Gets or sets the demuxer settings.
52 | ///
53 | public ContainerOptions DemuxerOptions { get; set; } = new ContainerOptions();
54 |
55 | ///
56 | /// Gets or sets the target pixel format for decoded video frames conversion. The default value is Bgr24.
57 | ///
58 | public ImagePixelFormat VideoPixelFormat { get; set; } = ImagePixelFormat.Bgr24;
59 |
60 | ///
61 | /// Gets or sets the target video size for decoded video frames conversion. , if no rescale.
62 | ///
63 | public Size? TargetVideoSize { get; set; }
64 |
65 | ///
66 | /// Gets or sets the threshold value used to choose the best seek method. Set this to video GoP value (if know) to improve stream seek performance.
67 | ///
68 | public int VideoSeekThreshold { get; set; } = 12;
69 |
70 | ///
71 | /// Gets or sets the threshold value used to choose the best seek method.
72 | ///
73 | public int AudioSeekThreshold { get; set; } = 12;
74 |
75 | ///
76 | /// Gets or sets the number of decoder threads (by the 'threads' flag). The default value is - 'auto'.
77 | ///
78 | public int? DecoderThreads
79 | {
80 | get => DecoderOptions.TryGetValue(Threads, out string value) &&
81 | int.TryParse(value, out var count) ? (int?)count : null;
82 | set
83 | {
84 | if (value.HasValue)
85 | {
86 | DecoderOptions[Threads] = value.ToString();
87 | }
88 | else
89 | {
90 | DecoderOptions.Remove(Threads);
91 | }
92 | }
93 | }
94 |
95 | ///
96 | /// Gets or sets the dictionary with global options for the multimedia decoders.
97 | ///
98 | public Dictionary DecoderOptions { get; set; } = new Dictionary();
99 |
100 | ///
101 | /// Gets or sets which streams (audio/video) will be loaded.
102 | ///
103 | public MediaMode StreamsToLoad { get; set; } = MediaMode.AudioVideo;
104 |
105 | ///
106 | /// Determines whether streams of a certain should be loaded
107 | /// (Based on property).
108 | ///
109 | /// A given .
110 | /// if streams of the given are to be loaded.
111 | public bool ShouldLoadStreamsOfType(AVMediaType type)
112 | {
113 | var mode = (MediaMode)(1 << (int)type);
114 | return StreamsToLoad.HasFlag(mode);
115 | }
116 | }
117 | }
118 |
--------------------------------------------------------------------------------
/FFMediaToolkit/Decoding/MediaStream.cs:
--------------------------------------------------------------------------------
1 | namespace FFMediaToolkit.Decoding
2 | {
3 | using System;
4 | using FFMediaToolkit.Common.Internal;
5 | using FFMediaToolkit.Decoding.Internal;
6 | using FFMediaToolkit.Helpers;
7 |
8 | ///
9 | /// A base for streams of any kind of media.
10 | ///
11 | public class MediaStream : IDisposable
12 | {
13 | private bool isDisposed;
14 |
15 | ///
16 | /// Initializes a new instance of the class.
17 | ///
18 | /// The associated codec.
19 | /// Extra options.
20 | internal MediaStream(Decoder stream, MediaOptions options)
21 | {
22 | Stream = stream;
23 | Options = options;
24 |
25 | Threshold = TimeSpan.FromSeconds(0.5).ToTimestamp(Info.TimeBase);
26 | }
27 |
28 | ///
29 | /// Gets informations about this stream.
30 | ///
31 | public StreamInfo Info => Stream.Info;
32 |
33 | ///
34 | /// Gets the timestamp of the recently decoded frame in the media stream.
35 | ///
36 | public TimeSpan Position => Math.Max(Stream.RecentlyDecodedFrame.PresentationTimestamp, 0).ToTimeSpan(Info.TimeBase);
37 |
38 | ///
39 | /// Indicates whether the stream has buffered frame data.
40 | ///
41 | public bool IsBufferEmpty => Stream.IsBufferEmpty;
42 |
43 | ///
44 | /// Gets the options configured for this .
45 | ///
46 | protected MediaOptions Options { get; }
47 |
48 | private Decoder Stream { get; }
49 |
50 | private long Threshold { get; }
51 |
52 | ///
53 | /// Discards all buffered frame data associated with this stream.
54 | ///
55 | [Obsolete("Do not call this method. Buffered data is automatically discarded when required")]
56 | public void DiscardBufferedData() => Stream.DiscardBufferedData();
57 |
58 | ///
59 | public virtual void Dispose()
60 | {
61 | if (!isDisposed)
62 | {
63 | Stream.DiscardBufferedData();
64 | Stream.Dispose();
65 | isDisposed = true;
66 | }
67 | }
68 |
69 | ///
70 | /// Gets the data belonging to the next frame in the stream.
71 | ///
72 | /// The next frame's data.
73 | internal MediaFrame GetNextFrame() => Stream.GetNextFrame();
74 |
75 | ///
76 | /// Seeks the stream to the specified time and returns the nearest frame's data.
77 | ///
78 | /// A specific point in time in this stream.
79 | /// The nearest frame's data.
80 | internal MediaFrame GetFrame(TimeSpan time)
81 | {
82 | var ts = time.ToTimestamp(Info.TimeBase);
83 | var frame = GetFrameByTimestamp(ts);
84 | return frame;
85 | }
86 |
87 | private MediaFrame GetFrameByTimestamp(long ts)
88 | {
89 | var frame = Stream.RecentlyDecodedFrame;
90 | ts = Math.Max(0, Math.Min(ts, Info.DurationRaw));
91 |
92 | if (ts > frame.PresentationTimestamp && ts < frame.PresentationTimestamp + Threshold)
93 | {
94 | return Stream.GetNextFrame();
95 | }
96 | else if (ts != frame.PresentationTimestamp)
97 | {
98 | if (ts < frame.PresentationTimestamp || ts >= frame.PresentationTimestamp + Threshold)
99 | {
100 | Stream.OwnerFile.SeekFile(ts, Info.Index);
101 | }
102 |
103 | Stream.SkipFrames(ts);
104 | }
105 |
106 | return Stream.RecentlyDecodedFrame;
107 | }
108 | }
109 | }
110 |
--------------------------------------------------------------------------------
/FFMediaToolkit/Decoding/StreamInfo.cs:
--------------------------------------------------------------------------------
1 | namespace FFMediaToolkit.Decoding
2 | {
3 | using System;
4 | using System.Collections.ObjectModel;
5 | using FFMediaToolkit.Common;
6 | using FFMediaToolkit.Decoding.Internal;
7 | using FFMediaToolkit.Helpers;
8 | using FFmpeg.AutoGen;
9 |
10 | ///
11 | /// Represents generic informations about the stream, specialized by subclasses for specific
12 | /// kinds of streams.
13 | ///
14 | public class StreamInfo
15 | {
16 | ///
17 | /// Initializes a new instance of the class.
18 | ///
19 | /// A generic stream.
20 | /// The media type of the stream.
21 | /// The input container.
22 | internal unsafe StreamInfo(AVStream* stream, MediaType type, InputContainer container)
23 | {
24 | var codecId = stream->codecpar->codec_id;
25 | Metadata = new ReadOnlyDictionary(FFDictionary.ToDictionary(stream->metadata, true));
26 | CodecName = ffmpeg.avcodec_get_name(codecId);
27 | CodecId = codecId.FormatEnum(12);
28 | Index = stream->index;
29 | Type = type;
30 |
31 | TimeBase = stream->time_base;
32 | RealFrameRate = stream->r_frame_rate;
33 | AvgFrameRate = stream->avg_frame_rate.ToDouble();
34 | IsVariableFrameRate = RealFrameRate.ToDouble() != AvgFrameRate;
35 |
36 | if (stream->duration >= 0)
37 | {
38 | Duration = stream->duration.ToTimeSpan(stream->time_base);
39 | DurationRaw = stream->duration;
40 | }
41 | else
42 | {
43 | Duration = TimeSpan.FromTicks(container.Pointer->duration * 10);
44 | DurationRaw = Duration.ToTimestamp(TimeBase);
45 | }
46 |
47 | if (stream->start_time >= 0)
48 | {
49 | StartTime = stream->start_time.ToTimeSpan(stream->time_base);
50 | }
51 |
52 | if (stream->nb_frames > 0)
53 | {
54 | IsFrameCountProvidedByContainer = true;
55 | NumberOfFrames = (int)stream->nb_frames;
56 | #pragma warning disable CS0618 // Type or member is obsolete
57 | FrameCount = NumberOfFrames.Value;
58 | }
59 | else
60 | {
61 | FrameCount = Duration.ToFrameNumber(stream->avg_frame_rate);
62 | if (!IsVariableFrameRate)
63 | {
64 | NumberOfFrames = FrameCount;
65 | #pragma warning restore CS0618 // Type or member is obsolete
66 | }
67 | else
68 | {
69 | NumberOfFrames = null;
70 | }
71 | }
72 | }
73 |
74 | ///
75 | /// Gets the stream index.
76 | ///
77 | public int Index { get; }
78 |
79 | ///
80 | /// Gets the codec name.
81 | ///
82 | public string CodecName { get; }
83 |
84 | ///
85 | /// Gets the codec identifier.
86 | ///
87 | public string CodecId { get; }
88 |
89 | ///
90 | /// Gets the stream's type.
91 | ///
92 | public MediaType Type { get; }
93 |
94 | ///
95 | /// Gets a value indicating whether the value is know from the multimedia container metadata.
96 | ///
97 | public bool IsFrameCountProvidedByContainer { get; }
98 |
99 | ///
100 | /// Gets the stream time base.
101 | ///
102 | public AVRational TimeBase { get; }
103 |
104 | ///
105 | /// Gets the number of frames value from the container metadata, if available (see )
106 | /// Otherwise, it is estimated from the video duration and average frame rate.
107 | /// This value may not be accurate, if the video is variable frame rate (see property).
108 | ///
109 | [Obsolete("Please use \"StreamInfo.NumberOfFrames\" property instead.")]
110 | public int FrameCount { get; }
111 |
112 | ///
113 | /// Gets the number of frames value taken from the container metadata or estimated in constant frame rate videos. Returns if not available.
114 | ///
115 | public int? NumberOfFrames { get; }
116 |
117 | ///
118 | /// Gets the stream duration.
119 | ///
120 | public TimeSpan Duration { get; }
121 |
122 | ///
123 | /// Gets the stream start time. Null if undefined.
124 | ///
125 | public TimeSpan? StartTime { get; }
126 |
127 | ///
128 | /// Gets the average frame rate as a value.
129 | ///
130 | public double AvgFrameRate { get; }
131 |
132 | ///
133 | /// Gets the frame rate as a value.
134 | /// It is used to calculate timestamps in the internal decoder methods.
135 | ///
136 | public AVRational RealFrameRate { get; }
137 |
138 | ///
139 | /// Gets a value indicating whether the video is variable frame rate (VFR).
140 | ///
141 | public bool IsVariableFrameRate { get; }
142 |
143 | ///
144 | /// Gets the stream metadata.
145 | ///
146 | public ReadOnlyDictionary Metadata { get; }
147 |
148 | ///
149 | /// Gets the duration of the stream in the time base units.
150 | ///
151 | internal long DurationRaw { get; }
152 |
153 | ///
154 | /// Creates the apprioriate type of class depending on the kind
155 | /// of stream passed in.
156 | ///
157 | /// The represented stream.
158 | /// The input container.
159 | /// The resulting new object.
160 | internal static unsafe StreamInfo Create(AVStream* stream, InputContainer owner)
161 | {
162 | if (stream->codecpar->codec_type == AVMediaType.AVMEDIA_TYPE_AUDIO)
163 | return new AudioStreamInfo(stream, owner);
164 | if (stream->codecpar->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO)
165 | return new VideoStreamInfo(stream, owner);
166 | return new StreamInfo(stream, MediaType.None, owner);
167 | }
168 | }
169 | }
--------------------------------------------------------------------------------
/FFMediaToolkit/Decoding/VideoStreamInfo.cs:
--------------------------------------------------------------------------------
1 | namespace FFMediaToolkit.Decoding
2 | {
3 | using System;
4 | using System.Drawing;
5 | using System.Runtime.InteropServices;
6 | using FFMediaToolkit.Common;
7 | using FFMediaToolkit.Decoding.Internal;
8 | using FFMediaToolkit.Helpers;
9 | using FFmpeg.AutoGen;
10 |
11 | ///
12 | /// Represents informations about the video stream.
13 | ///
14 | public class VideoStreamInfo : StreamInfo
15 | {
16 | ///
17 | /// Initializes a new instance of the class.
18 | ///
19 | /// A generic stream.
20 | /// The input container.
21 | internal unsafe VideoStreamInfo(AVStream* stream, InputContainer container)
22 | : base(stream, MediaType.Video, container)
23 | {
24 | var codec = stream->codecpar;
25 | IsInterlaced = codec->field_order != AVFieldOrder.AV_FIELD_PROGRESSIVE &&
26 | codec->field_order != AVFieldOrder.AV_FIELD_UNKNOWN;
27 | FrameSize = new Size(codec->width, codec->height);
28 | PixelFormat = ((AVPixelFormat)codec->format).FormatEnum(11);
29 | AVPixelFormat = (AVPixelFormat)codec->format;
30 |
31 | var matrix = (IntPtr)ffmpeg.av_packet_side_data_get(codec->coded_side_data, codec->nb_coded_side_data, AVPacketSideDataType.AV_PKT_DATA_DISPLAYMATRIX);
32 | Rotation = CalculateRotation(matrix);
33 | }
34 |
35 | ///
36 | /// Gets the clockwise rotation angle computed from the display matrix.
37 | ///
38 | public double Rotation { get; }
39 |
40 | ///
41 | /// Gets a value indicating whether the frames in the stream are interlaced.
42 | ///
43 | public bool IsInterlaced { get; }
44 |
45 | ///
46 | /// Gets the video frame dimensions.
47 | ///
48 | public Size FrameSize { get; }
49 |
50 | ///
51 | /// Gets a lowercase string representing the video pixel format.
52 | ///
53 | public string PixelFormat { get; }
54 |
55 | ///
56 | /// Gets the video pixel format.
57 | ///
58 | internal AVPixelFormat AVPixelFormat { get; }
59 |
60 | private static double CalculateRotation(IntPtr displayMatrix)
61 | {
62 | const int matrixLength = 9;
63 |
64 | if (displayMatrix == IntPtr.Zero)
65 | return 0;
66 |
67 | var matrix = new int[matrixLength];
68 | Marshal.Copy(displayMatrix, matrix, 0, matrixLength);
69 |
70 | var scale = new double[2];
71 | scale[0] = (matrix[0] != 0 && matrix[3] != 0) ? CalculateHypotenuse(matrix[0], matrix[3]) : 1;
72 | scale[1] = (matrix[1] != 0 && matrix[4] != 0) ? CalculateHypotenuse(matrix[1], matrix[4]) : 1;
73 |
74 | var rotation = Math.Atan2(matrix[1] / scale[1], matrix[0] / scale[0]) * 180 / Math.PI;
75 | rotation -= 360 * Math.Floor((rotation / 360) + (0.9 / 360));
76 |
77 | return rotation;
78 | }
79 |
80 | private static double CalculateHypotenuse(int a, int b) => Math.Sqrt((a * a) + (b * b));
81 | }
82 | }
83 |
--------------------------------------------------------------------------------
/FFMediaToolkit/Encoding/AudioCodec.cs:
--------------------------------------------------------------------------------
1 | namespace FFMediaToolkit.Encoding
2 | {
3 | using FFmpeg.AutoGen;
4 |
5 | ///
6 | /// This enum contains only supported audio encoders.
7 | /// If you want to use a codec not included to this enum, you can cast to .
8 | ///
9 | public enum AudioCodec
10 | {
11 | ///
12 | /// Default audio codec for the selected container format.
13 | ///
14 | Default = AVCodecID.AV_CODEC_ID_NONE,
15 |
16 | ///
17 | /// AAC (Advanced Audio Coding) audio codec
18 | ///
19 | AAC = AVCodecID.AV_CODEC_ID_AAC,
20 |
21 | ///
22 | /// ATSC A/52A (AC-3) audio codec
23 | ///
24 | AC3 = AVCodecID.AV_CODEC_ID_AC3,
25 |
26 | ///
27 | /// MP3 (MPEG audio layer 3) audio codec
28 | ///
29 | MP3 = AVCodecID.AV_CODEC_ID_MP3,
30 |
31 | ///
32 | /// Windows Media Audio V2 audio codec
33 | ///
34 | WMA = AVCodecID.AV_CODEC_ID_WMAV2,
35 |
36 | ///
37 | /// OGG Vorbis audio codec
38 | ///
39 | Vorbis = AVCodecID.AV_CODEC_ID_VORBIS,
40 | }
41 | }
42 |
--------------------------------------------------------------------------------
/FFMediaToolkit/Encoding/AudioEncoderSettings.cs:
--------------------------------------------------------------------------------
1 | namespace FFMediaToolkit.Encoding
2 | {
3 | using System.Collections.Generic;
4 | using FFMediaToolkit.Audio;
5 | using FFmpeg.AutoGen;
6 |
7 | ///
8 | /// Represents an audio encoder configuration.
9 | ///
10 | public class AudioEncoderSettings
11 | {
12 | ///
13 | /// Initializes a new instance of the class with default video settings values.
14 | ///
15 | /// The sample rate of the stream.
16 | /// The number of channels in the stream.
17 | /// The audio encoder.
18 | public AudioEncoderSettings(int sampleRate, int channels, AudioCodec codec = AudioCodec.Default)
19 | {
20 | SampleRate = sampleRate;
21 | Channels = channels;
22 | Codec = codec;
23 | CodecOptions = new Dictionary();
24 | }
25 |
26 | ///
27 | /// Gets or sets the audio stream bitrate (bytes per second). The default value is 128,000 B/s.
28 | ///
29 | public int Bitrate { get; set; } = 128_000;
30 |
31 | ///
32 | /// Gets or sets the audio stream sample rate (samples per second). The default value is 44,100 samples/sec.
33 | ///
34 | public int SampleRate { get; set; } = 44_100;
35 |
36 | ///
37 | /// Gets or sets the number of channels in the audio stream. The default value is 2.
38 | ///
39 | public int Channels { get; set; } = 2;
40 |
41 | ///
42 | /// Gets or sets the number of samples per audio frame. Default is 2205 (1/20th of a second at 44.1kHz).
43 | ///
44 | public int SamplesPerFrame { get; set; } = 2205;
45 |
46 | ///
47 | /// Gets or the time base of the audio stream. Always equal to /.
48 | ///
49 | public AVRational TimeBase => new AVRational { num = SamplesPerFrame, den = SampleRate };
50 |
51 | ///
52 | /// Gets or sets the sample format to be used by the audio codec. The default value is (16-bit integer).
53 | ///
54 | public SampleFormat SampleFormat { get; set; } = SampleFormat.SignedWord;
55 |
56 | ///
57 | /// Gets or sets the dictionary with custom codec options.
58 | ///
59 | public Dictionary CodecOptions { get; set; }
60 |
61 | ///
62 | /// Gets or sets the codec for this stream.
63 | /// If set to , encoder will use default audio codec for current container.
64 | /// If property is set, this value will be ignored.
65 | ///
66 | public AudioCodec Codec { get; set; }
67 |
68 | ///
69 | /// Gets or sets the encoder name. Overwrites property.
70 | ///
71 | public string CodecName { get; set; }
72 | }
73 | }
--------------------------------------------------------------------------------
/FFMediaToolkit/Encoding/AudioOutputStream.cs:
--------------------------------------------------------------------------------
1 | namespace FFMediaToolkit.Encoding
2 | {
3 | using System;
4 | using FFMediaToolkit.Audio;
5 | using FFMediaToolkit.Common.Internal;
6 | using FFMediaToolkit.Encoding.Internal;
7 | using FFMediaToolkit.Helpers;
8 | using FFmpeg.AutoGen;
9 |
10 | ///
11 | /// Represents an audio encoder stream.
12 | ///
13 | public unsafe class AudioOutputStream : IDisposable
14 | {
15 | private readonly OutputStream stream;
16 | private readonly AudioFrame frame;
17 |
18 | private SwrContext* swrContext;
19 |
20 | private bool isDisposed;
21 | private long lastFramePts = -1;
22 |
23 | ///
24 | /// Initializes a new instance of the class.
25 | ///
26 | /// The audio stream.
27 | /// The stream setting.
28 | internal AudioOutputStream(OutputStream stream, AudioEncoderSettings config)
29 | {
30 | this.stream = stream;
31 |
32 | AVChannelLayout channelLayout;
33 | ffmpeg.av_channel_layout_default(&channelLayout, config.Channels);
34 | SwrContext* context;
35 | ffmpeg.swr_alloc_set_opts2(
36 | &context,
37 | &channelLayout,
38 | (AVSampleFormat)config.SampleFormat,
39 | config.SampleRate,
40 | &channelLayout,
41 | (AVSampleFormat)SampleFormat.SingleP,
42 | config.SampleRate,
43 | 0,
44 | null).ThrowIfError("Cannot allocate SwrContext");
45 | ffmpeg.swr_init(context);
46 |
47 | swrContext = context;
48 | Configuration = config;
49 | frame = AudioFrame.Create(config.SampleRate, config.Channels, config.SamplesPerFrame, channelLayout, SampleFormat.SingleP, 0, 0);
50 | }
51 |
52 | ///
53 | /// Gets the video encoding configuration used to create this stream.
54 | ///
55 | public AudioEncoderSettings Configuration { get; }
56 |
57 | ///
58 | /// Gets the current duration of this stream.
59 | ///
60 | public TimeSpan CurrentDuration => lastFramePts.ToTimeSpan(Configuration.TimeBase);
61 |
62 | ///
63 | /// Writes the specified audio data to the stream as the next frame.
64 | ///
65 | /// The audio data to write.
66 | /// (optional) custom PTS value for the frame.
67 | public void AddFrame(AudioData data, long customPtsValue)
68 | {
69 | if (customPtsValue <= lastFramePts)
70 | throw new Exception("Cannot add a frame that occurs chronologically before the most recently written frame!");
71 |
72 | frame.UpdateFromAudioData(data);
73 |
74 | var converted = AudioFrame.Create(
75 | frame.SampleRate,
76 | frame.NumChannels,
77 | frame.NumSamples,
78 | frame.ChannelLayout,
79 | Configuration.SampleFormat,
80 | frame.DecodingTimestamp,
81 | frame.PresentationTimestamp);
82 | converted.PresentationTimestamp = customPtsValue;
83 |
84 | ffmpeg.swr_convert_frame(swrContext, converted.Pointer, frame.Pointer);
85 |
86 | stream.Push(converted);
87 | converted.Dispose();
88 |
89 | lastFramePts = customPtsValue;
90 | }
91 |
92 | ///
93 | /// Writes the specified sample data to the stream as the next frame.
94 | ///
95 | /// The sample data to write.
96 | /// (optional) custom PTS value for the frame.
97 | public void AddFrame(float[][] samples, long customPtsValue)
98 | {
99 | if (customPtsValue <= lastFramePts)
100 | throw new Exception("Cannot add a frame that occurs chronologically before the most recently written frame!");
101 |
102 | frame.UpdateFromSampleData(samples);
103 | frame.PresentationTimestamp = customPtsValue;
104 | stream.Push(frame);
105 |
106 | lastFramePts = customPtsValue;
107 | }
108 |
109 | ///
110 | /// Writes the specified audio data to the stream as the next frame.
111 | ///
112 | /// The audio data to write.
113 | /// Custom timestamp for this frame.
114 | public void AddFrame(AudioData data, TimeSpan customTime) => AddFrame(data, customTime.ToTimestamp(Configuration.TimeBase));
115 |
116 | ///
117 | /// Writes the specified audio data to the stream as the next frame.
118 | ///
119 | /// The audio data to write.
120 | public void AddFrame(AudioData data) => AddFrame(data, lastFramePts + 1);
121 |
122 | ///
123 | /// Writes the specified sample data to the stream as the next frame.
124 | ///
125 | /// The sample data to write.
126 | /// Custom timestamp for this frame.
127 | public void AddFrame(float[][] samples, TimeSpan customTime) => AddFrame(samples, customTime.ToTimestamp(Configuration.TimeBase));
128 |
129 | ///
130 | /// Writes the specified sample data to the stream as the next frame.
131 | ///
132 | /// The sample data to write.
133 | public void AddFrame(float[][] samples) => AddFrame(samples, lastFramePts + 1);
134 |
135 | ///
136 | public void Dispose()
137 | {
138 | if (isDisposed)
139 | {
140 | return;
141 | }
142 |
143 | stream.Dispose();
144 | frame.Dispose();
145 |
146 | fixed (SwrContext** ptr = &swrContext)
147 | {
148 | ffmpeg.swr_free(ptr);
149 | }
150 |
151 | isDisposed = true;
152 | }
153 | }
154 | }
155 |
--------------------------------------------------------------------------------
/FFMediaToolkit/Encoding/ContainerFormat.cs:
--------------------------------------------------------------------------------
1 | namespace FFMediaToolkit.Encoding
2 | {
3 | using System.ComponentModel;
4 |
5 | ///
6 | /// Video container formats supported by FFMediaToolkit.
7 | ///
8 | public enum ContainerFormat
9 | {
10 | ///
11 | /// The 3GPP container format (.3gp)
12 | ///
13 | [Description("3gp")]
14 | Container3GP,
15 |
16 | ///
17 | /// The 3GPP2 container format (.3g2)
18 | ///
19 | [Description("3g2")]
20 | Container3GP2,
21 |
22 | ///
23 | /// The Microsoft Advanced Systems Formats container format (.asf)
24 | /// Use this container when encoding a .wmv (Windows Media) video file.
25 | ///
26 | [Description("asf")]
27 | ASF,
28 |
29 | ///
30 | /// The Audio Video Interleave container format (.avi)
31 | ///
32 | [Description("avi")]
33 | AVI,
34 |
35 | ///
36 | /// The Flash Video container format (.flv)
37 | ///
38 | [Description("flv")]
39 | FLV,
40 |
41 | ///
42 | /// The Matroska Multimedia Container format (.mkv)
43 | ///
44 | [Description("mkv")]
45 | MKV,
46 |
47 | ///
48 | /// The QuickTime container format (.mov)
49 | ///
50 | [Description("mov")]
51 | MOV,
52 |
53 | ///
54 | /// The MPEG-4 container format (.mp4)
55 | ///
56 | [Description("mp4")]
57 | MP4,
58 |
59 | ///
60 | /// The Ogg container format (.ogv extension for video files)
61 | ///
62 | [Description("ogv")]
63 | Ogg,
64 |
65 | ///
66 | /// The WebM container format (.webm)
67 | ///
68 | [Description("webm")]
69 | WebM,
70 | }
71 | }
72 |
--------------------------------------------------------------------------------
/FFMediaToolkit/Encoding/EncoderPreset.cs:
--------------------------------------------------------------------------------
1 | namespace FFMediaToolkit.Encoding
2 | {
3 | using System.ComponentModel;
4 |
5 | ///
6 | /// The presets for H264, H265 (HEVC), and H266 (VVC) video encoders.
7 | /// Fast presets = faster encoding, worse compression.
8 | /// Slow presets = longer encoding, better compression.
9 | ///
10 | public enum EncoderPreset
11 | {
12 | ///
13 | /// Port of 'ultrafast'
14 | ///
15 | [Description("ultrafast")]
16 | UltraFast = 0,
17 |
18 | ///
19 | /// Port of 'superfast'
20 | ///
21 | [Description("superfast")]
22 | SuperFast = 1,
23 |
24 | ///
25 | /// Port of 'veryfast'
26 | ///
27 | [Description("veryfast")]
28 | VeryFast = 2,
29 |
30 | ///
31 | /// Port of 'faster'
32 | ///
33 | [Description("faster")]
34 | Faster = 3,
35 |
36 | ///
37 | /// Port of 'fast'
38 | ///
39 | [Description("fast")]
40 | Fast = 4,
41 |
42 | ///
43 | /// The default preset. Port of 'medium'
44 | ///
45 | [Description("medium")]
46 | Medium = 5,
47 |
48 | ///
49 | /// Port of 'slow'
50 | ///
51 | [Description("slow")]
52 | Slow = 6,
53 |
54 | ///
55 | /// Port of 'slower'
56 | ///
57 | [Description("slower")]
58 | Slower = 7,
59 |
60 | ///
61 | /// Port of 'veryslow'
62 | ///
63 | [Description("veryslow")]
64 | VerySlow = 8,
65 | }
66 | }
67 |
--------------------------------------------------------------------------------
/FFMediaToolkit/Encoding/Internal/OutputContainer.cs:
--------------------------------------------------------------------------------
1 | namespace FFMediaToolkit.Encoding.Internal
2 | {
3 | using System;
4 | using System.Collections.Generic;
5 | using FFMediaToolkit.Common;
6 | using FFMediaToolkit.Common.Internal;
7 | using FFMediaToolkit.Helpers;
8 | using FFmpeg.AutoGen;
9 |
10 | ///
11 | /// Represents the multimedia file container used for encoding.
12 | ///
13 | internal unsafe class OutputContainer : Wrapper
14 | {
15 | private OutputContainer(AVFormatContext* formatContext)
16 | : base(formatContext)
17 | {
18 | Video = new List<(OutputStream, VideoEncoderSettings)>();
19 | Audio = new List<(OutputStream, AudioEncoderSettings)>();
20 | }
21 |
22 | ///
23 | /// Gets the video streams.
24 | ///
25 | public List<(OutputStream stream, VideoEncoderSettings config)> Video { get; }
26 |
27 | ///
28 | /// Gets the audio streams.
29 | ///
30 | public List<(OutputStream stream, AudioEncoderSettings config)> Audio { get; }
31 |
32 | ///
33 | /// Gets a value indicating whether the file is created.
34 | ///
35 | public bool IsFileCreated { get; private set; }
36 |
37 | ///
38 | /// Gets a dictionary containing format options.
39 | ///
40 | internal FFDictionary ContainerOptions { get; private set; } = new FFDictionary();
41 |
42 | ///
43 | /// Creates an empty FFmpeg format container for encoding.
44 | ///
45 | /// A output file extension. It is used only to guess the container format.
46 | /// A new instance of the .
47 | /// Before you write frames to the container, you must call the method to create an output file.
48 | public static OutputContainer Create(string extension)
49 | {
50 | FFmpegLoader.LoadFFmpeg();
51 |
52 | var format = ffmpeg.av_guess_format(null, "x." + extension, null);
53 |
54 | if (format == null)
55 | throw new NotSupportedException($"Cannot find a container format for the \"{extension}\" file extension.");
56 |
57 | AVFormatContext* formatContext = null;
58 | ffmpeg.avformat_alloc_output_context2(&formatContext, format, null, null).ThrowIfError("Cannot allocate output format context");
59 |
60 | return new OutputContainer(formatContext);
61 | }
62 |
63 | ///
64 | /// Applies a set of metadata fields to the output file.
65 | ///
66 | /// The metadata object to set.
67 | public void SetMetadata(ContainerMetadata metadata)
68 | {
69 | foreach (var item in metadata.Metadata)
70 | {
71 | ffmpeg.av_dict_set(&Pointer->metadata, item.Key, item.Value, 0);
72 | }
73 | }
74 |
75 | ///
76 | /// Adds a new video stream to the container. Usable only in encoding, before locking file.
77 | ///
78 | /// The stream configuration.
79 | public void AddVideoStream(VideoEncoderSettings config)
80 | {
81 | if (IsFileCreated)
82 | {
83 | throw new InvalidOperationException("The stream must be added before creating a file.");
84 | }
85 |
86 | Video.Add((OutputStreamFactory.CreateVideo(this, config), config));
87 | }
88 |
89 | ///
90 | /// Adds a new audio stream to the container. Usable only in encoding, before locking file.
91 | ///
92 | /// The stream configuration.
93 | public void AddAudioStream(AudioEncoderSettings config)
94 | {
95 | if (IsFileCreated)
96 | {
97 | throw new InvalidOperationException("The stream must be added before creating a file.");
98 | }
99 |
100 | Audio.Add((OutputStreamFactory.CreateAudio(this, config), config));
101 | }
102 |
103 | ///
104 | /// Creates a media file for this container and writes format header into it.
105 | ///
106 | /// A path to create the file.
107 | public void CreateFile(string path)
108 | {
109 | if (IsFileCreated)
110 | {
111 | return;
112 | }
113 |
114 | if (Video == null)
115 | {
116 | throw new InvalidOperationException("Cannot create empty media file. You have to add video stream before locking the file");
117 | }
118 |
119 | var ptr = ContainerOptions.Pointer;
120 |
121 | ffmpeg.avio_open(&Pointer->pb, path, ffmpeg.AVIO_FLAG_WRITE).ThrowIfError("Cannot create the output file.");
122 | ffmpeg.avformat_write_header(Pointer, &ptr);
123 |
124 | IsFileCreated = true;
125 | }
126 |
127 | ///
128 | /// Writes the specified packet to the container by the method.
129 | ///
130 | /// The media packet to write.
131 | public void WritePacket(MediaPacket packet)
132 | {
133 | if (!IsFileCreated)
134 | {
135 | throw new InvalidOperationException("The file must be opened before writing a packet. Use the OutputContainer.CreateFile() method.");
136 | }
137 |
138 | ffmpeg.av_interleaved_write_frame(Pointer, packet);
139 | }
140 |
141 | ///
142 | protected override void OnDisposing()
143 | {
144 | foreach (var output in Video)
145 | {
146 | output.stream.Dispose();
147 | }
148 |
149 | foreach (var output in Audio)
150 | {
151 | output.stream.Dispose();
152 | }
153 |
154 | if (IsFileCreated)
155 | {
156 | ffmpeg.av_write_trailer(Pointer);
157 | ffmpeg.avio_close(Pointer->pb);
158 | }
159 |
160 | ffmpeg.avformat_free_context(Pointer);
161 | }
162 | }
163 | }
164 |
--------------------------------------------------------------------------------
/FFMediaToolkit/Encoding/Internal/OutputStreamFactory.cs:
--------------------------------------------------------------------------------
1 | namespace FFMediaToolkit.Encoding.Internal
2 | {
3 | using System;
4 | using FFMediaToolkit.Common;
5 | using FFMediaToolkit.Common.Internal;
6 | using FFMediaToolkit.Helpers;
7 | using FFmpeg.AutoGen;
8 |
9 | ///
10 | /// Contains method for creating media streams.
11 | ///
12 | internal static unsafe class OutputStreamFactory
13 | {
14 | ///
15 | /// Creates a new video stream for the specified .
16 | ///
17 | /// The media container.
18 | /// The stream settings.
19 | /// The new video stream.
20 | public static OutputStream CreateVideo(OutputContainer container, VideoEncoderSettings config)
21 | {
22 | AVCodec* codec;
23 | if (!string.IsNullOrEmpty(config.CodecName))
24 | {
25 | codec = ffmpeg.avcodec_find_encoder_by_name(config.CodecName);
26 | }
27 | else
28 | {
29 | var codecId = config.Codec == VideoCodec.Default ? container.Pointer->oformat->video_codec : (AVCodecID)config.Codec;
30 | if (codecId == AVCodecID.AV_CODEC_ID_NONE)
31 | throw new InvalidOperationException("The media container doesn't support video!");
32 |
33 | codec = ffmpeg.avcodec_find_encoder(codecId);
34 | }
35 |
36 | if (codec == null)
37 | throw new InvalidOperationException($"Cannot find the requested video codec");
38 |
39 | if (codec->type != AVMediaType.AVMEDIA_TYPE_VIDEO)
40 | throw new InvalidOperationException($"The {codec->id} encoder doesn't support video!");
41 |
42 | var stream = ffmpeg.avformat_new_stream(container.Pointer, codec);
43 | if (stream == null)
44 | throw new InvalidOperationException("Cannot allocate AVStream");
45 |
46 | stream->time_base = config.TimeBase;
47 | stream->avg_frame_rate = config.FramerateRational;
48 |
49 | var codecContext = ffmpeg.avcodec_alloc_context3(codec);
50 | if (codecContext == null)
51 | throw new InvalidOperationException("Cannot allocate AVCodecContext");
52 |
53 | codecContext->codec_type = AVMediaType.AVMEDIA_TYPE_VIDEO;
54 | codecContext->codec_id = codec->id;
55 | codecContext->width = config.VideoWidth;
56 | codecContext->height = config.VideoHeight;
57 | codecContext->pix_fmt = (AVPixelFormat)config.VideoFormat;
58 | codecContext->bit_rate = config.Bitrate;
59 | codecContext->framerate = config.FramerateRational;
60 | codecContext->time_base = stream->time_base;
61 | codecContext->gop_size = config.KeyframeRate;
62 |
63 | var dict = new FFDictionary(config.CodecOptions);
64 |
65 | if (config.CRF.HasValue && config.Codec.IsMatch(VideoCodec.H264, VideoCodec.H265, VideoCodec.VP9, VideoCodec.VP8))
66 | {
67 | dict["crf"] = config.CRF.Value.ToString();
68 | }
69 |
70 | if (config.QP.HasValue && config.Codec.IsMatch(VideoCodec.H266))
71 | {
72 | dict["qp"] = config.QP.Value.ToString();
73 | }
74 |
75 | if (config.Codec.IsMatch(VideoCodec.H264, VideoCodec.H265, VideoCodec.H266))
76 | {
77 | dict["preset"] = config.EncoderPreset.GetDescription();
78 | }
79 |
80 | if ((container.Pointer->oformat->flags & ffmpeg.AVFMT_GLOBALHEADER) != 0)
81 | {
82 | codecContext->flags |= ffmpeg.AV_CODEC_FLAG_GLOBAL_HEADER;
83 | }
84 |
85 | var ptr = dict.Pointer;
86 | ffmpeg.avcodec_open2(codecContext, codec, &ptr).ThrowIfError("Failed to open video encoder.");
87 | dict.Update(ptr);
88 |
89 | ffmpeg.avcodec_parameters_from_context(stream->codecpar, codecContext).ThrowIfError("Cannot copy encoder parameters to output stream");
90 |
91 | return new OutputStream(stream, codecContext, container);
92 | }
93 |
94 | ///
95 | /// Creates a new audio stream for the specified .
96 | ///
97 | /// The media container.
98 | /// The stream settings.
99 | /// The new audio stream.
100 | public static OutputStream CreateAudio(OutputContainer container, AudioEncoderSettings config)
101 | {
102 | AVCodec* codec;
103 | if (!string.IsNullOrEmpty(config.CodecName))
104 | {
105 | codec = ffmpeg.avcodec_find_encoder_by_name(config.CodecName);
106 | }
107 | else
108 | {
109 | var codecId = config.Codec == AudioCodec.Default ? container.Pointer->oformat->audio_codec : (AVCodecID)config.Codec;
110 | if (codecId == AVCodecID.AV_CODEC_ID_NONE)
111 | throw new InvalidOperationException("The media container doesn't support audio!");
112 |
113 | codec = ffmpeg.avcodec_find_encoder(codecId);
114 | }
115 |
116 | if (codec == null)
117 | throw new InvalidOperationException($"Cannot find the requested audio codec");
118 |
119 | if (codec->type != AVMediaType.AVMEDIA_TYPE_AUDIO)
120 | throw new InvalidOperationException($"The {codec->id} encoder doesn't support audio!");
121 |
122 | var stream = ffmpeg.avformat_new_stream(container.Pointer, codec);
123 | if (stream == null)
124 | throw new InvalidOperationException("Cannot allocate AVStream");
125 |
126 | var codecContext = ffmpeg.avcodec_alloc_context3(codec);
127 | if (codecContext == null)
128 | throw new InvalidOperationException("Cannot allocate AVCodecContext");
129 |
130 | stream->codecpar->codec_id = codec->id;
131 | stream->codecpar->codec_type = AVMediaType.AVMEDIA_TYPE_AUDIO;
132 | stream->codecpar->sample_rate = config.SampleRate;
133 | stream->codecpar->frame_size = config.SamplesPerFrame;
134 | stream->codecpar->format = (int)config.SampleFormat;
135 |
136 | ffmpeg.av_channel_layout_default(&stream->codecpar->ch_layout, config.Channels);
137 | stream->codecpar->bit_rate = config.Bitrate;
138 |
139 | ffmpeg.avcodec_parameters_to_context(codecContext, stream->codecpar).ThrowIfError("Cannot copy stream parameters to encoder");
140 | codecContext->time_base = config.TimeBase;
141 |
142 | if ((container.Pointer->oformat->flags & ffmpeg.AVFMT_GLOBALHEADER) != 0)
143 | {
144 | codecContext->flags |= ffmpeg.AV_CODEC_FLAG_GLOBAL_HEADER;
145 | }
146 |
147 | var dict = new FFDictionary(config.CodecOptions);
148 | var ptr = dict.Pointer;
149 |
150 | ffmpeg.avcodec_open2(codecContext, codec, &ptr).ThrowIfError("Failed to open audio encoder.");
151 |
152 | dict.Update(ptr);
153 |
154 | ffmpeg.avcodec_parameters_from_context(stream->codecpar, codecContext).ThrowIfError("Cannot copy encoder parameters to output stream");
155 |
156 | return new OutputStream(stream, codecContext, container);
157 | }
158 | }
159 | }
160 |
--------------------------------------------------------------------------------
/FFMediaToolkit/Encoding/Internal/OutputStream{TFrame}.cs:
--------------------------------------------------------------------------------
1 | namespace FFMediaToolkit.Encoding.Internal
2 | {
3 | using System;
4 | using System.Runtime.CompilerServices;
5 | using FFMediaToolkit.Common;
6 | using FFMediaToolkit.Common.Internal;
7 | using FFMediaToolkit.Helpers;
8 | using FFmpeg.AutoGen;
9 |
10 | ///
11 | /// Represents a output multimedia stream.
12 | ///
13 | /// The type of frames in the stream.
14 | internal unsafe class OutputStream : Wrapper
15 | where TFrame : MediaFrame
16 | {
17 | private readonly MediaPacket packet;
18 | private readonly AVCodecContext* codecContext;
19 |
20 | ///
21 | /// Initializes a new instance of the class.
22 | ///
23 | /// The multimedia stream.
24 | /// Codec context.
25 | /// The container that owns the stream.
26 | public OutputStream(AVStream* stream, AVCodecContext* codec, OutputContainer owner)
27 | : base(stream)
28 | {
29 | OwnerFile = owner;
30 | codecContext = codec;
31 | packet = MediaPacket.AllocateEmpty();
32 | }
33 |
34 | ///
35 | /// Gets the media container that owns this stream.
36 | ///
37 | public OutputContainer OwnerFile { get; }
38 |
39 | ///
40 | /// Gets the stream index.
41 | ///
42 | public int Index => Pointer->index;
43 |
44 | ///
45 | /// Gets the stream time base.
46 | ///
47 | public AVRational TimeBase => Pointer->time_base;
48 |
49 | ///
50 | /// Writes the specified frame to this stream.
51 | ///
52 | /// The media frame.
53 | public void Push(TFrame frame)
54 | {
55 | ffmpeg.avcodec_send_frame(codecContext, frame.Pointer)
56 | .ThrowIfError("Cannot send a frame to the encoder.");
57 |
58 | if (ffmpeg.avcodec_receive_packet(codecContext, packet) == 0)
59 | {
60 | packet.RescaleTimestamp(codecContext->time_base, TimeBase);
61 | packet.StreamIndex = Index;
62 |
63 | OwnerFile.WritePacket(packet);
64 | }
65 |
66 | packet.Wipe();
67 | }
68 |
69 | ///
70 | protected override void OnDisposing()
71 | {
72 | FlushEncoder();
73 | packet.Dispose();
74 |
75 | fixed (AVCodecContext** codecContextRef = &codecContext)
76 | {
77 | ffmpeg.avcodec_free_context(codecContextRef);
78 | }
79 | }
80 |
81 | private void FlushEncoder()
82 | {
83 | ffmpeg.avcodec_send_frame(codecContext, null);
84 | while (true)
85 | {
86 | if (ffmpeg.avcodec_receive_packet(codecContext, packet) == 0)
87 | {
88 | packet.RescaleTimestamp(codecContext->time_base, TimeBase);
89 | packet.StreamIndex = Index;
90 | OwnerFile.WritePacket(packet);
91 | }
92 | else
93 | {
94 | break;
95 | }
96 |
97 | packet.Wipe();
98 | }
99 | }
100 | }
101 | }
102 |
--------------------------------------------------------------------------------
/FFMediaToolkit/Encoding/MediaBuilder.cs:
--------------------------------------------------------------------------------
1 | namespace FFMediaToolkit.Encoding
2 | {
3 | using System;
4 | using System.IO;
5 | using FFMediaToolkit.Common;
6 | using FFMediaToolkit.Encoding.Internal;
7 | using FFMediaToolkit.Helpers;
8 |
9 | ///
10 | /// Represents a multimedia file creator.
11 | ///
12 | public class MediaBuilder
13 | {
14 | private readonly OutputContainer container;
15 | private readonly string outputPath;
16 |
17 | private MediaBuilder(string path, ContainerFormat? format)
18 | {
19 | if (!Path.IsPathRooted(path))
20 | throw new ArgumentException($"The path \"{path}\" is not valid.");
21 |
22 | if (!Path.HasExtension(path) && format == null)
23 | throw new ArgumentException("The file path has no extension.");
24 |
25 | container = OutputContainer.Create(format?.GetDescription() ?? Path.GetExtension(path));
26 | outputPath = path;
27 | }
28 |
29 | ///
30 | /// Sets up a multimedia container with the specified .
31 | ///
32 | /// A path to create the output file.
33 | /// A container format.
34 | /// The instance.
35 | public static MediaBuilder CreateContainer(string path, ContainerFormat format) => new MediaBuilder(path, format);
36 |
37 | ///
38 | /// Sets up a multimedia container with the format guessed from the file extension.
39 | ///
40 | /// A path to create the output file.
41 | /// The instance.
42 | public static MediaBuilder CreateContainer(string path) => new MediaBuilder(path, null);
43 |
44 | ///
45 | /// Applies a custom container option.
46 | ///
47 | /// The option key.
48 | /// The value to set.
49 | /// The instance.
50 | public MediaBuilder UseFormatOption(string key, string value)
51 | {
52 | container.ContainerOptions[key] = value;
53 | return this;
54 | }
55 |
56 | ///
57 | /// Applies a set of metadata fields to the output file.
58 | ///
59 | /// The metadata object to set.
60 | /// The instance.
61 | public MediaBuilder UseMetadata(ContainerMetadata metadata)
62 | {
63 | container.SetMetadata(metadata);
64 | return this;
65 | }
66 |
67 | ///
68 | /// Adds a new video stream to the file.
69 | ///
70 | /// The video stream settings.
71 | /// This object.
72 | public MediaBuilder WithVideo(VideoEncoderSettings settings)
73 | {
74 | if (FFmpegLoader.IsFFmpegGplLicensed == false && (settings.Codec == VideoCodec.H264 || settings.Codec == VideoCodec.H265))
75 | {
76 | throw new NotSupportedException("The LGPL-licensed FFmpeg build does not contain libx264 and libx265 codecs.");
77 | }
78 |
79 | container.AddVideoStream(settings);
80 | return this;
81 | }
82 |
83 | ///
84 | /// Adds a new audio stream to the file.
85 | ///
86 | /// The video stream settings.
87 | /// This object.
88 | public MediaBuilder WithAudio(AudioEncoderSettings settings)
89 | {
90 | container.AddAudioStream(settings);
91 | return this;
92 | }
93 |
94 | ///
95 | /// Creates a multimedia file for specified video stream.
96 | ///
97 | /// A new .
98 | public MediaOutput Create()
99 | {
100 | container.CreateFile(outputPath);
101 |
102 | return new MediaOutput(container);
103 | }
104 | }
105 | }
106 |
--------------------------------------------------------------------------------
/FFMediaToolkit/Encoding/MediaOutput.cs:
--------------------------------------------------------------------------------
1 | namespace FFMediaToolkit.Encoding
2 | {
3 | using System;
4 | using System.Linq;
5 | using FFMediaToolkit.Encoding.Internal;
6 |
7 | ///
8 | /// Represents a multimedia output file.
9 | ///
10 | public class MediaOutput : IDisposable
11 | {
12 | private readonly OutputContainer container;
13 | private bool isDisposed;
14 |
15 | ///
16 | /// Initializes a new instance of the class.
17 | ///
18 | /// The object.
19 | internal MediaOutput(OutputContainer mediaContainer)
20 | {
21 | container = mediaContainer;
22 |
23 | VideoStreams = container.Video
24 | .Select(o => new VideoOutputStream(o.stream, o.config))
25 | .ToArray();
26 |
27 | AudioStreams = container.Audio
28 | .Select(o => new AudioOutputStream(o.stream, o.config))
29 | .ToArray();
30 | }
31 |
32 | ///
33 | /// Finalizes an instance of the class.
34 | ///
35 | ~MediaOutput() => Dispose();
36 |
37 | ///
38 | /// Gets the video streams in the media file.
39 | ///
40 | public VideoOutputStream[] VideoStreams { get; }
41 |
42 | ///
43 | /// Gets the audio streams in the media file.
44 | ///
45 | public AudioOutputStream[] AudioStreams { get; }
46 |
47 | ///
48 | /// Gets the first video stream in the media file.
49 | ///
50 | public VideoOutputStream Video => VideoStreams.FirstOrDefault();
51 |
52 | ///
53 | /// Gets the first audio stream in the media file.
54 | ///
55 | public AudioOutputStream Audio => AudioStreams.FirstOrDefault();
56 |
57 | ///
58 | public void Dispose()
59 | {
60 | if (isDisposed)
61 | return;
62 |
63 | container.Dispose();
64 |
65 | isDisposed = true;
66 | }
67 | }
68 | }
69 |
--------------------------------------------------------------------------------
/FFMediaToolkit/Encoding/VideoCodec.cs:
--------------------------------------------------------------------------------
1 | namespace FFMediaToolkit.Encoding
2 | {
3 | using FFmpeg.AutoGen;
4 |
5 | ///
6 | /// This enum contains only supported video encoders.
7 | /// If you want to use a codec not included to this enum, you can cast to .
8 | ///
9 | public enum VideoCodec
10 | {
11 | ///
12 | /// Default video codec for the selected container format.
13 | ///
14 | Default = AVCodecID.AV_CODEC_ID_NONE,
15 |
16 | ///
17 | /// H.263 codec
18 | ///
19 | H263 = AVCodecID.AV_CODEC_ID_H263,
20 |
21 | ///
22 | /// H.263-I codec
23 | ///
24 | H263I = AVCodecID.AV_CODEC_ID_H263I,
25 |
26 | ///
27 | /// H.263-P codec
28 | ///
29 | H263P = AVCodecID.AV_CODEC_ID_H263P,
30 |
31 | ///
32 | /// Advanced Video Coding (AVC) - H.264 codec
33 | ///
34 | H264 = AVCodecID.AV_CODEC_ID_H264,
35 |
36 | ///
37 | /// High Efficiency Video Coding (HEVC) - H.265 codec
38 | ///
39 | H265 = AVCodecID.AV_CODEC_ID_HEVC,
40 |
41 | ///
42 | /// Versitile Video Coding (VVC) - H.266 codec
43 | ///
44 | H266 = AVCodecID.AV_CODEC_ID_VVC,
45 |
46 | ///
47 | /// Microsoft Windows Media Video 9 (WMV3)
48 | ///
49 | WMV = AVCodecID.AV_CODEC_ID_WMV3,
50 |
51 | ///
52 | /// MPEG-1 video codec.
53 | ///
54 | MPEG = AVCodecID.AV_CODEC_ID_MPEG1VIDEO,
55 |
56 | ///
57 | /// MPEG-2 (H.262) video codec.
58 | ///
59 | MPEG2 = AVCodecID.AV_CODEC_ID_MPEG2VIDEO,
60 |
61 | ///
62 | /// MPEG-4 Part 2 video codec.
63 | ///
64 | MPEG4 = AVCodecID.AV_CODEC_ID_MPEG4,
65 |
66 | ///
67 | /// VP8 codec.
68 | ///
69 | VP8 = AVCodecID.AV_CODEC_ID_VP8,
70 |
71 | ///
72 | /// VP9 codec.
73 | ///
74 | VP9 = AVCodecID.AV_CODEC_ID_VP9,
75 |
76 | ///
77 | /// Theora codec.
78 | ///
79 | Theora = AVCodecID.AV_CODEC_ID_THEORA,
80 |
81 | ///
82 | /// Dirac codec.
83 | ///
84 | Dirac = AVCodecID.AV_CODEC_ID_DIRAC,
85 |
86 | ///
87 | /// Motion JPEG video codec.
88 | ///
89 | MJPEG = AVCodecID.AV_CODEC_ID_MJPEG,
90 |
91 | ///
92 | /// AV1 codec.
93 | ///
94 | AV1 = AVCodecID.AV_CODEC_ID_AV1,
95 |
96 | ///
97 | /// DV codec.
98 | ///
99 | DV = AVCodecID.AV_CODEC_ID_DVVIDEO,
100 |
101 | ///
102 | /// Cinepak codec.
103 | ///
104 | Cinepak = AVCodecID.AV_CODEC_ID_CINEPAK,
105 | }
106 | }
107 |
--------------------------------------------------------------------------------
/FFMediaToolkit/Encoding/VideoEncoderSettings.cs:
--------------------------------------------------------------------------------
1 | namespace FFMediaToolkit.Encoding
2 | {
3 | using System.Collections.Generic;
4 |
5 | using FFMediaToolkit.Graphics;
6 | using FFmpeg.AutoGen;
7 |
8 | ///
9 | /// Represents a video encoder configuration.
10 | ///
11 | public class VideoEncoderSettings
12 | {
13 | ///
14 | /// Initializes a new instance of the class with default video settings values.
15 | ///
16 | /// The video frame width.
17 | /// The video frame height.
18 | /// The video frames per seconds (fps) value.
19 | /// The video encoder.
20 | public VideoEncoderSettings(int width, int height, int framerate = 30, VideoCodec codec = VideoCodec.Default)
21 | {
22 | VideoWidth = width;
23 | VideoHeight = height;
24 | Framerate = framerate;
25 | Codec = codec;
26 | CodecOptions = new Dictionary();
27 | }
28 |
29 | ///
30 | /// Gets or sets the video stream bitrate (bytes per second). The default value is 5,000,000 B/s.
31 | /// If CRF (for H.264/H.265) is set, this value will be ignored.
32 | ///
33 | public int Bitrate { get; set; } = 5_000_000;
34 |
35 | ///
36 | /// Gets or sets the GoP value. The default value is 12.
37 | ///
38 | public int KeyframeRate { get; set; } = 12;
39 |
40 | ///
41 | /// Gets or sets the video frame width.
42 | ///
43 | public int VideoWidth { get; set; }
44 |
45 | ///
46 | /// Gets or sets the video frame height.
47 | ///
48 | public int VideoHeight { get; set; }
49 |
50 | ///
51 | /// Gets or sets the output video pixel format. The default value is YUV420p.
52 | /// Added frames will be automatically converted to this format.
53 | ///
54 | public ImagePixelFormat VideoFormat { get; set; } = ImagePixelFormat.Yuv420;
55 |
56 | ///
57 | /// Gets or sets video frame rate (FPS) value. The default value is 30 frames/s.
58 | ///
59 | public int Framerate
60 | {
61 | get => FramerateRational.num / FramerateRational.den;
62 | set => FramerateRational = new AVRational { num = value, den = 1 };
63 | }
64 |
65 | ///
66 | /// Gets or sets the video frame rate as a FFmpeg value. Optional. Overwrites property.
67 | ///
68 | public AVRational FramerateRational { get; set; }
69 |
70 | ///
71 | /// Gets the calculated time base for the video stream. Value is always equal to reciporical of .
72 | ///
73 | public AVRational TimeBase => new AVRational { num = FramerateRational.den, den = FramerateRational.num };
74 |
75 | ///
76 | /// Gets or sets the Constant Rate Factor. It supports only H.264 and H.265 codecs.
77 | ///
78 | public int? CRF { get; set; }
79 |
80 | ///
81 | /// Gets or sets the Quantization Parameter. It supports only the H.266 codec.
82 | ///
83 | public int? QP { get; set; }
84 |
85 | ///
86 | /// Gets or sets the encoder preset. It supports only H.264, H.265, and H.266 codecs.
87 | ///
88 | public EncoderPreset EncoderPreset { get; set; }
89 |
90 | ///
91 | /// Gets or sets the dictionary with custom codec options.
92 | ///
93 | public Dictionary CodecOptions { get; set; }
94 |
95 | ///
96 | /// Gets or sets the codec for this stream.
97 | /// If set to , encoder will use default video codec for current container.
98 | /// If property is set, this value will be ignored.
99 | ///
100 | public VideoCodec Codec { get; set; }
101 |
102 | ///
103 | /// Gets or sets the encoder name. Overwrites property.
104 | ///
105 | public string CodecName { get; set; }
106 | }
107 | }
108 |
--------------------------------------------------------------------------------
/FFMediaToolkit/Encoding/VideoOutputStream.cs:
--------------------------------------------------------------------------------
1 | namespace FFMediaToolkit.Encoding
2 | {
3 | using System;
4 | using System.Drawing;
5 | using FFMediaToolkit.Common.Internal;
6 | using FFMediaToolkit.Encoding.Internal;
7 | using FFMediaToolkit.Graphics;
8 | using FFMediaToolkit.Helpers;
9 | using FFmpeg.AutoGen;
10 |
11 | ///
12 | /// Represents a video encoder stream.
13 | ///
14 | public class VideoOutputStream : IDisposable
15 | {
16 | private readonly OutputStream stream;
17 | private readonly VideoFrame encodedFrame;
18 | private readonly ImageConverter converter;
19 |
20 | private bool isDisposed;
21 | private long lastFramePts = -1;
22 |
23 | ///
24 | /// Initializes a new instance of the class.
25 | ///
26 | /// The video stream.
27 | /// The stream setting.
28 | internal VideoOutputStream(OutputStream stream, VideoEncoderSettings config)
29 | {
30 | this.stream = stream;
31 | Configuration = config;
32 |
33 | var frameSize = new Size(config.VideoWidth, config.VideoHeight);
34 | encodedFrame = VideoFrame.Create(frameSize, (AVPixelFormat)config.VideoFormat);
35 | converter = new ImageConverter(frameSize, (AVPixelFormat)config.VideoFormat);
36 | }
37 |
38 | ///
39 | /// Gets the video encoding configuration used to create this stream.
40 | ///
41 | public VideoEncoderSettings Configuration { get; }
42 |
43 | ///
44 | /// Gets the current duration of this stream.
45 | ///
46 | public TimeSpan CurrentDuration => lastFramePts.ToTimeSpan(Configuration.TimeBase);
47 |
48 | ///
49 | /// Writes the specified bitmap to the video stream as the next frame.
50 | ///
51 | /// The bitmap to write.
52 | /// (optional) custom PTS value for the frame.
53 | public void AddFrame(ImageData frame, long customPtsValue)
54 | {
55 | if (customPtsValue <= lastFramePts)
56 | throw new Exception("Cannot add a frame that occurs chronologically before the most recently written frame!");
57 |
58 | encodedFrame.UpdateFromBitmap(frame, converter);
59 | encodedFrame.PresentationTimestamp = customPtsValue;
60 | stream.Push(encodedFrame);
61 |
62 | lastFramePts = customPtsValue;
63 | }
64 |
65 | ///
66 | /// Writes the specified bitmap to the video stream as the next frame.
67 | ///
68 | /// The bitmap to write.
69 | /// Custom timestamp for this frame.
70 | public void AddFrame(ImageData frame, TimeSpan customTime) => AddFrame(frame, customTime.ToTimestamp(Configuration.TimeBase));
71 |
72 | ///
73 | /// Writes the specified bitmap to the video stream as the next frame.
74 | ///
75 | /// The bitmap to write.
76 | public void AddFrame(ImageData frame) => AddFrame(frame, lastFramePts + 1);
77 |
78 | ///
79 | public void Dispose()
80 | {
81 | if (isDisposed)
82 | {
83 | return;
84 | }
85 |
86 | stream.Dispose();
87 | encodedFrame.Dispose();
88 | converter.Dispose();
89 |
90 | isDisposed = true;
91 | }
92 | }
93 | }
94 |
--------------------------------------------------------------------------------
/FFMediaToolkit/FFMediaToolkit.csproj:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | netstandard2.0;netstandard2.1
5 | true
6 | disable
7 | true
8 | Radosław Kmiotek
9 | radek-k
10 | Copyright (c) 2019-2025 Radosław Kmiotek
11 | Cross-platform audio/video processing library based on FFmpeg native libraries. Supports audio/video frames extraction (fast access to any frame by timestamp), reading file metadata and encoding media files from bitmap images and audio data.
12 | ffmpeg;video;audio;encoder;encoding;decoder;decoding;h264;mp4;c#;netstandard;netcore;frame-extraction
13 | https://github.com/radek-k/FFMediaToolkit
14 | https://github.com/radek-k/FFMediaToolkit
15 |
16 | $(AllowedOutputExtensionsInPackageBuildOutputFolder);.pdb
17 | true
18 | true
19 | MIT
20 | true
21 | ..\FFMediaToolkit.snk
22 | true
23 | snupkg
24 |
25 |
26 |
27 | v
28 | normal
29 | 4.3
30 |
31 |
32 |
33 | FFMediaToolkit.ruleset
34 |
35 |
36 |
37 | FFMediaToolkit.ruleset
38 |
39 |
40 |
41 |
42 |
43 | all
44 | runtime; build; native; contentfiles; analyzers; buildtransitive
45 |
46 |
47 | all
48 | runtime; build; native; contentfiles; analyzers; buildtransitive
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
--------------------------------------------------------------------------------
/FFMediaToolkit/FFMediaToolkit.ruleset:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
--------------------------------------------------------------------------------
/FFMediaToolkit/FFmpegException.cs:
--------------------------------------------------------------------------------
1 | namespace FFMediaToolkit
2 | {
3 | using System;
4 | using FFMediaToolkit.Helpers;
5 |
6 | ///
7 | /// Represents an exception thrown when FFMpeg method call returns an error code.
8 | ///
9 | [Serializable]
10 | public class FFmpegException : Exception
11 | {
12 | ///
13 | /// Initializes a new instance of the class.
14 | ///
15 | public FFmpegException()
16 | {
17 | }
18 |
19 | ///
20 | /// Initializes a new instance of the class using a message and a error code.
21 | ///
22 | /// The exception message.
23 | public FFmpegException(string message)
24 | : base(message)
25 | => ErrorMessage = string.Empty;
26 |
27 | ///
28 | /// Initializes a new instance of the class using a message and a error code.
29 | ///
30 | /// The exception message.
31 | /// The error code returned by the FFmpeg method.
32 | public FFmpegException(string message, int errorCode)
33 | : base(CreateMessage(message, errorCode))
34 | {
35 | ErrorCode = errorCode;
36 | ErrorMessage = StringConverter.DecodeMessage(errorCode);
37 | }
38 |
39 | ///
40 | /// Gets the error code returned by the FFmpeg method.
41 | ///
42 | public int? ErrorCode { get; }
43 |
44 | ///
45 | /// Gets the message text decoded from error code.
46 | ///
47 | public string ErrorMessage { get; }
48 |
49 | private static string CreateMessage(string msg, int errCode)
50 | => $"{msg} Error code: {errCode} : {StringConverter.DecodeMessage(errCode)}";
51 | }
52 | }
53 |
--------------------------------------------------------------------------------
/FFMediaToolkit/FFmpegLoader.cs:
--------------------------------------------------------------------------------
1 | namespace FFMediaToolkit
2 | {
3 | using System;
4 | using System.IO;
5 | using System.Runtime.InteropServices;
6 | using FFMediaToolkit.Interop;
7 | using FFmpeg.AutoGen;
8 |
9 | ///
10 | /// Contains methods for managing FFmpeg libraries.
11 | ///
12 | public static class FFmpegLoader
13 | {
14 | private static LogLevel logLevel = LogLevel.Error;
15 | private static bool isPathSet;
16 |
17 | ///
18 | /// Delegate for log message callback.
19 | ///
20 | /// The message.
21 | public delegate void LogCallbackDelegate(string message);
22 |
23 | ///
24 | /// Log message callback event.
25 | ///
26 | public static event LogCallbackDelegate LogCallback;
27 |
28 | ///
29 | /// Gets or sets the verbosity level of FFMpeg logs printed to standard error/output.
30 | /// Default value is .
31 | ///
32 | public static LogLevel LogVerbosity
33 | {
34 | get => logLevel;
35 | set
36 | {
37 | if (IsFFmpegLoaded)
38 | {
39 | ffmpeg.av_log_set_level((int)value);
40 | }
41 |
42 | logLevel = value;
43 | }
44 | }
45 |
46 | ///
47 | /// Gets or sets path to the directory containing FFmpeg binaries.
48 | ///
49 | /// Thrown when FFmpeg was already loaded.
50 | /// Thrown when specified directory does not exist.
51 | public static string FFmpegPath
52 | {
53 | get => ffmpeg.RootPath ?? string.Empty;
54 | set
55 | {
56 | if (IsFFmpegLoaded)
57 | {
58 | throw new InvalidOperationException("FFmpeg libraries were already loaded!");
59 | }
60 |
61 | if (!Directory.Exists(value))
62 | {
63 | throw new DirectoryNotFoundException("The specified FFmpeg directory does not exist!");
64 | }
65 |
66 | ffmpeg.RootPath = value;
67 | isPathSet = true;
68 | }
69 | }
70 |
71 | ///
72 | /// Gets the FFmpeg version info string.
73 | /// Empty when FFmpeg libraries were not yet loaded.
74 | ///
75 | public static string FFmpegVersion { get; private set; } = string.Empty;
76 |
77 | ///
78 | /// Gets a value indicating whether the loaded FFmpeg binary files are licensed under the GPL.
79 | /// Null when FFmpeg libraries were not yet loaded.
80 | ///
81 | public static bool? IsFFmpegGplLicensed { get; private set; }
82 |
83 | ///
84 | /// Gets the FFmpeg license text
85 | /// Empty when FFmpeg libraries were not yet loaded.
86 | ///
87 | public static string FFmpegLicense { get; private set; } = string.Empty;
88 |
89 | ///
90 | /// Gets a value indicating whether the FFmpeg binary files were successfully loaded.
91 | ///
92 | internal static bool IsFFmpegLoaded { get; private set; }
93 |
94 | ///
95 | /// Manually loads FFmpeg libraries from the specified (or the default path for current platform if not set).
96 | /// If you will not call this method, FFmpeg will be loaded while opening or creating a video file.
97 | ///
98 | ///
99 | /// Thrown when default FFmpeg directory does not exist.
100 | /// On Windows you have to specify a path to a directory containing the FFmpeg shared build DLL files.
101 | ///
102 | ///
103 | /// Thrown when required FFmpeg libraries do not exist or when you try to load 64bit binaries from 32bit application process.
104 | ///
105 | public static void LoadFFmpeg()
106 | {
107 | if (IsFFmpegLoaded)
108 | {
109 | return;
110 | }
111 |
112 | if (!isPathSet)
113 | {
114 | try
115 | {
116 | FFmpegPath = NativeMethods.GetFFmpegDirectory();
117 | }
118 | catch (DirectoryNotFoundException)
119 | {
120 | throw new DirectoryNotFoundException("Cannot found the default FFmpeg directory.\n" +
121 | "On Windows you have to set \"FFmpegLoader.FFmpegPath\" with full path to the directory containing FFmpeg 7.x shared build \".dll\" files\n" +
122 | "For more informations please see https://github.com/radek-k/FFMediaToolkit#setup");
123 | }
124 | }
125 |
126 | try
127 | {
128 | FFmpegVersion = ffmpeg.av_version_info();
129 | FFmpegLicense = ffmpeg.avcodec_license();
130 | IsFFmpegGplLicensed = FFmpegLicense.StartsWith("GPL");
131 | }
132 | catch (DllNotFoundException ex)
133 | {
134 | HandleLibraryLoadError(ex);
135 | }
136 | catch (NotSupportedException ex)
137 | {
138 | HandleLibraryLoadError(ex);
139 | }
140 |
141 | IsFFmpegLoaded = true;
142 | LogVerbosity = logLevel;
143 | }
144 |
145 | ///
146 | /// Start logging ffmpeg output.
147 | ///
148 | public static unsafe void SetupLogging()
149 | {
150 | ffmpeg.av_log_set_level(ffmpeg.AV_LOG_VERBOSE);
151 |
152 | // do not convert to local function
153 | av_log_set_callback_callback logCallback = (p0, level, format, vl) =>
154 | {
155 | if (level > ffmpeg.av_log_get_level())
156 | return;
157 |
158 | var lineSize = 1024;
159 | var lineBuffer = stackalloc byte[lineSize];
160 | var printPrefix = 1;
161 | ffmpeg.av_log_format_line(p0, level, format, vl, lineBuffer, lineSize, &printPrefix);
162 | var line = Marshal.PtrToStringAnsi((IntPtr)lineBuffer);
163 | LogCallback?.Invoke(line);
164 | };
165 |
166 | ffmpeg.av_log_set_callback(logCallback);
167 | }
168 |
169 | ///
170 | /// Throws a FFmpeg library loading exception.
171 | ///
172 | /// The original exception.
173 | internal static void HandleLibraryLoadError(Exception exception)
174 | {
175 | throw new DllNotFoundException($"Cannot load FFmpeg libraries from {FFmpegPath} directory.\nRequired FFmpeg version: 7.x (shared build)\nMake sure the \"Build\"Prefer 32-bit\" option in the project settings is turned off.\nFor more information please see https://github.com/radek-k/FFMediaToolkit#setup", exception);
176 | }
177 | }
178 | }
179 |
--------------------------------------------------------------------------------
/FFMediaToolkit/Graphics/ImageData.cs:
--------------------------------------------------------------------------------
1 | namespace FFMediaToolkit.Graphics
2 | {
3 | using System;
4 | using System.Buffers;
5 | using System.Drawing;
6 |
7 | ///
8 | /// Represent a lightweight container for bitmap images.
9 | ///
10 | public ref struct ImageData
11 | {
12 | private readonly Span span;
13 | private readonly IMemoryOwner pooledMemory;
14 |
15 | ///
16 | /// Initializes a new instance of the struct using a as the data source.
17 | ///
18 | /// The bitmap data.
19 | /// The pixel format.
20 | /// The image dimensions.
21 | /// When data span size doesn't match size calculated from width, height and the pixel format.
22 | public ImageData(Span data, ImagePixelFormat pixelFormat, Size imageSize)
23 | {
24 | var size = EstimateStride(imageSize.Width, pixelFormat) * imageSize.Height;
25 | if (data.Length < size)
26 | {
27 | throw new ArgumentException("Pixel buffer size doesn't match size required by this image format.");
28 | }
29 |
30 | span = data;
31 | pooledMemory = null;
32 |
33 | ImageSize = imageSize;
34 | PixelFormat = pixelFormat;
35 | }
36 |
37 | ///
38 | /// Initializes a new instance of the struct using a as the data source.
39 | ///
40 | /// The bitmap data.
41 | /// The pixel format.
42 | /// The image width.
43 | /// The image height.
44 | /// When data span size doesn't match size calculated from width, height and the pixel format.
45 | public ImageData(Span data, ImagePixelFormat pixelFormat, int width, int height)
46 | : this(data, pixelFormat, new Size(width, height))
47 | {
48 | }
49 |
50 | private ImageData(IMemoryOwner memory, Size size, ImagePixelFormat pixelFormat)
51 | {
52 | span = null;
53 | pooledMemory = memory;
54 |
55 | ImageSize = size;
56 | PixelFormat = pixelFormat;
57 | }
58 |
59 | ///
60 | /// Gets the object containing the bitmap data.
61 | ///
62 | public Span Data => IsPooled ? pooledMemory.Memory.Span : span;
63 |
64 | ///
65 | /// Gets a value indicating whether this instance of uses memory pooling.
66 | ///
67 | public bool IsPooled => pooledMemory != null;
68 |
69 | ///
70 | /// Gets the image size.
71 | ///
72 | public Size ImageSize { get; }
73 |
74 | ///
75 | /// Gets the bitmap pixel format.
76 | ///
77 | public ImagePixelFormat PixelFormat { get; }
78 |
79 | ///
80 | /// Gets the estimated number of bytes in one row of image pixels.
81 | ///
82 | public int Stride => EstimateStride(ImageSize.Width, PixelFormat);
83 |
84 | ///
85 | /// Rents a memory buffer from pool and creates a new instance of class from it.
86 | ///
87 | /// The image dimensions.
88 | /// The bitmap pixel format.
89 | /// The new instance.
90 | public static ImageData CreatePooled(Size imageSize, ImagePixelFormat pixelFormat)
91 | {
92 | var size = EstimateStride(imageSize.Width, pixelFormat) * imageSize.Height;
93 | var pool = MemoryPool.Shared;
94 | var memory = pool.Rent(size);
95 | return new ImageData(memory, imageSize, pixelFormat);
96 | }
97 |
98 | ///
99 | /// Creates a new instance of the class using a byte array as the data source.
100 | ///
101 | /// The byte array containing bitmap data.
102 | /// The bitmap pixel format.
103 | /// The image dimensions.
104 | /// A new instance.
105 | public static ImageData FromArray(byte[] pixels, ImagePixelFormat pixelFormat, Size imageSize)
106 | => new ImageData(new Span(pixels), pixelFormat, imageSize);
107 |
108 | ///
109 | /// Creates a new instance of the class using a byte array as the data source.
110 | ///
111 | /// The byte array containing bitmap data.
112 | /// The bitmap pixel format.
113 | /// The image width.
114 | /// The image height.
115 | /// A new instance.
116 | public static ImageData FromArray(byte[] pixels, ImagePixelFormat pixelFormat, int width, int height)
117 | => FromArray(pixels, pixelFormat, new Size(width, height));
118 |
119 | ///
120 | /// Creates a new instance of the class using a pointer to the unmanaged memory as the data source.
121 | ///
122 | /// The byte array containing bitmap data.
123 | /// The bitmap pixel format.
124 | /// The image dimensions.
125 | /// A new instance.
126 | public static ImageData FromPointer(IntPtr pointer, ImagePixelFormat pixelFormat, Size imageSize)
127 | {
128 | var span = CreateSpan(pointer, imageSize, pixelFormat);
129 | return new ImageData(span, pixelFormat, imageSize);
130 | }
131 |
132 | ///
133 | /// Creates a new instance of the class using a pointer to the unmanaged memory as the data source.
134 | ///
135 | /// The byte array containing bitmap data.
136 | /// The bitmap pixel format.
137 | /// The image width.
138 | /// The image height.
139 | /// A new instance.
140 | public static ImageData FromPointer(IntPtr pointer, ImagePixelFormat pixelFormat, int width, int height)
141 | => FromPointer(pointer, pixelFormat, new Size(width, height));
142 |
143 | ///
144 | /// Gets the estimated image line size based on the pixel format and width.
145 | ///
146 | /// The image width.
147 | /// The image pixel format.
148 | /// The size of a single line of the image measured in bytes.
149 | public static int EstimateStride(int width, ImagePixelFormat format) => 4 * (((GetBitsPerPixel(format) * width) + 31) / 32);
150 |
151 | private static unsafe Span CreateSpan(IntPtr pointer, Size imageSize, ImagePixelFormat pixelFormat)
152 | {
153 | var size = EstimateStride(imageSize.Width, pixelFormat) * imageSize.Height;
154 | return new Span((void*)pointer, size);
155 | }
156 |
157 | private static int GetBitsPerPixel(ImagePixelFormat format)
158 | {
159 | switch (format)
160 | {
161 | case ImagePixelFormat.Bgr24:
162 | return 24;
163 | case ImagePixelFormat.Bgra32:
164 | return 32;
165 | case ImagePixelFormat.Rgb24:
166 | return 24;
167 | case ImagePixelFormat.Rgba32:
168 | return 32;
169 | case ImagePixelFormat.Argb32:
170 | return 32;
171 | case ImagePixelFormat.Uyvy422:
172 | return 16;
173 | case ImagePixelFormat.Yuv420:
174 | return 12;
175 | case ImagePixelFormat.Yuv422:
176 | return 16;
177 | case ImagePixelFormat.Yuv444:
178 | return 24;
179 | case ImagePixelFormat.Gray16:
180 | return 16;
181 | case ImagePixelFormat.Gray8:
182 | return 8;
183 | case ImagePixelFormat.Rgba64:
184 | return 64;
185 | default:
186 | return 0;
187 | }
188 | }
189 | }
190 | }
191 |
--------------------------------------------------------------------------------
/FFMediaToolkit/Graphics/ImagePixelFormat.cs:
--------------------------------------------------------------------------------
1 | namespace FFMediaToolkit.Graphics
2 | {
3 | using FFmpeg.AutoGen;
4 |
5 | ///
6 | /// Represents the most used image pixel formats. Partially compatible with .
7 | ///
8 | public enum ImagePixelFormat
9 | {
10 | ///
11 | /// Represents a BGR 24bpp bitmap pixel format. Used by default in GDI+ and WPF graphics.
12 | ///
13 | Bgr24 = AVPixelFormat.AV_PIX_FMT_BGR24,
14 |
15 | ///
16 | /// Represents a BGRA(with alpha channel) 32bpp bitmap pixel format.
17 | ///
18 | Bgra32 = AVPixelFormat.AV_PIX_FMT_BGRA,
19 |
20 | ///
21 | /// Represents a RGB 24bpp bitmap pixel format.
22 | ///
23 | Rgb24 = AVPixelFormat.AV_PIX_FMT_RGB24,
24 |
25 | ///
26 | /// Represents a RGBA(with alpha channel) 32bpp bitmap pixel format.
27 | ///
28 | Rgba32 = AVPixelFormat.AV_PIX_FMT_RGBA,
29 |
30 | ///
31 | /// Represents a ARGB(with alpha channel) 32bpp bitmap pixel format.
32 | ///
33 | Argb32 = AVPixelFormat.AV_PIX_FMT_ARGB,
34 |
35 | ///
36 | /// Represents a RGBA(with alpha channel) 64bpp bitmap pixel format.
37 | ///
38 | Rgba64 = AVPixelFormat.AV_PIX_FMT_RGBA64LE,
39 |
40 | ///
41 | /// Represents a UYVY422 pixel format.
42 | ///
43 | Uyvy422 = AVPixelFormat.AV_PIX_FMT_UYVY422,
44 |
45 | ///
46 | /// Represents a YUV 24bpp 4:4:4 video pixel format.
47 | ///
48 | Yuv444 = AVPixelFormat.AV_PIX_FMT_YUV444P,
49 |
50 | ///
51 | /// Represents a YUV 16bpp 4:2:2 video pixel format.
52 | ///
53 | Yuv422 = AVPixelFormat.AV_PIX_FMT_YUV422P,
54 |
55 | ///
56 | /// Represents a YUV 12bpp 4:2:0 video pixel format.
57 | ///
58 | Yuv420 = AVPixelFormat.AV_PIX_FMT_YUV420P,
59 |
60 | ///
61 | /// Represents a YUV 15bpp 4:2:0 video pixel format.
62 | ///
63 | Yuv42010 = AVPixelFormat.AV_PIX_FMT_YUV420P10LE,
64 |
65 | ///
66 | /// Represents a Gray 16bpp little-endian video pixel format.
67 | ///
68 | Gray16 = AVPixelFormat.AV_PIX_FMT_GRAY16LE,
69 |
70 | ///
71 | /// Represents a Gray 8bpp video pixel format.
72 | ///
73 | Gray8 = AVPixelFormat.AV_PIX_FMT_GRAY8,
74 | }
75 | }
76 |
--------------------------------------------------------------------------------
/FFMediaToolkit/Helpers/ExceptionHandler.cs:
--------------------------------------------------------------------------------
1 | namespace FFMediaToolkit.Helpers
2 | {
3 | using System.Runtime.CompilerServices;
4 |
5 | ///
6 | /// Contains common methods for handling FFMpeg exceptions.
7 | ///
8 | internal static class ExceptionHandler
9 | {
10 | ///
11 | /// A delegate for error code handling.
12 | ///
13 | /// The error code.
14 | internal delegate void ErrorHandler(int errorCode);
15 |
16 | ///
17 | /// Checks if specified integer is error code and throws an .
18 | ///
19 | /// The exit code returned by a method.
20 | /// The exception message.
21 | [MethodImpl(MethodImplOptions.AggressiveInlining)]
22 | internal static void ThrowIfError(this int errorCode, string exceptionMessage)
23 | {
24 | if (errorCode < 0)
25 | {
26 | throw new FFmpegException(exceptionMessage, errorCode);
27 | }
28 | }
29 |
30 | ///
31 | /// Checks if the integer is equal to the specified and executes the method.
32 | ///
33 | /// The exit code returned by a method.
34 | /// The error code to handle.
35 | /// The method to execute if error handled.
36 | /// If this method after handling exception will return 0 instead of the original code.
37 | /// Original error code or 0 if error handled and the is .
38 | internal static int IfError(this int errorCode, int handledError, ErrorHandler action, bool handles = true)
39 | {
40 | if (errorCode == handledError)
41 | {
42 | action(errorCode);
43 | }
44 |
45 | return handles ? 0 : errorCode;
46 | }
47 |
48 | ///
49 | /// Checks if the integer is equal to the and throws an .
50 | ///
51 | /// The exit code returned by a method.
52 | /// The error code to handle.
53 | /// The exception message.
54 | /// The original error code.
55 | internal static int IfError(this int errorCode, int handledError, string exceptionMessage)
56 | => errorCode.IfError(handledError, x => throw new FFmpegException(exceptionMessage, x));
57 | }
58 | }
59 |
--------------------------------------------------------------------------------
/FFMediaToolkit/Helpers/Extensions.cs:
--------------------------------------------------------------------------------
1 | namespace FFMediaToolkit.Helpers
2 | {
3 | using System;
4 | using System.ComponentModel;
5 | using FFMediaToolkit.Common;
6 | using FFmpeg.AutoGen;
7 |
8 | ///
9 | /// Contains extension methods.
10 | ///
11 | internal static class Extensions
12 | {
13 | ///
14 | /// Gets the value of the specified enumeration value.
15 | ///
16 | /// The enum value.
17 | /// The description attribute string of this enum value.
18 | public static string GetDescription(this Enum value)
19 | {
20 | var field = value.GetType().GetField(value.ToString());
21 | return Attribute.GetCustomAttribute(field, typeof(DescriptionAttribute)) is DescriptionAttribute attribute
22 | ? attribute.Description : value.ToString();
23 | }
24 |
25 | ///
26 | /// Checks if this object is equal to at least one of specified objects.
27 | ///
28 | /// Type of the objects.
29 | /// This object.
30 | /// Objects to check.
31 | /// is the object is equal to at least one of specified objects.
32 | public static bool IsMatch(this T value, params T[] valueToCompare)
33 | where T : struct, Enum
34 | {
35 | foreach (T x in valueToCompare)
36 | {
37 | if (value.Equals(x))
38 | {
39 | return true;
40 | }
41 | }
42 |
43 | return false;
44 | }
45 |
46 | ///
47 | /// Normalizes this enumeration value - makes it lowercase and trims the specified amount of chars.
48 | ///
49 | /// The enumeration value to format.
50 | /// Number of chars to trim.
51 | /// The normalized string.
52 | internal static string FormatEnum(this Enum value, int charsToTrim) => value.ToString().Substring(charsToTrim).ToLower();
53 |
54 | ///
55 | /// Gets the type of content in the .
56 | ///
57 | /// The .
58 | /// The type of frame content.
59 | internal static MediaType GetMediaType(this AVFrame frame)
60 | {
61 | if (frame.width > 0 && frame.height > 0)
62 | {
63 | return MediaType.Video;
64 | }
65 |
66 | if (frame.ch_layout.nb_channels > 0)
67 | {
68 | return MediaType.Audio;
69 | }
70 |
71 | return MediaType.None;
72 | }
73 | }
74 | }
75 |
--------------------------------------------------------------------------------
/FFMediaToolkit/Helpers/MathHelper.cs:
--------------------------------------------------------------------------------
1 | namespace FFMediaToolkit.Helpers
2 | {
3 | using System;
4 | using FFmpeg.AutoGen;
5 |
6 | ///
7 | /// Contains extension methods for math types.
8 | ///
9 | internal static class MathHelper
10 | {
11 | ///
12 | /// Converts a rational number to a double.
13 | ///
14 | /// The to convert.
15 | /// The value.
16 | public static double ToDouble(this AVRational rational)
17 | => rational.den == 0 ? 0 : Convert.ToDouble(rational.num) / Convert.ToDouble(rational.den);
18 |
19 | ///
20 | /// Converts the given in the units to a object.
21 | ///
22 | /// The timestamp.
23 | /// The time base unit.
24 | /// The converted .
25 | public static TimeSpan ToTimeSpan(this long timestamp, AVRational timeBase)
26 | {
27 | var ts = Convert.ToDouble(timestamp);
28 | var tb = timeBase.ToDouble();
29 |
30 | return TimeSpan.FromMilliseconds(ts * tb * 1000);
31 | }
32 |
33 | ///
34 | /// Converts the frame number to a .
35 | ///
36 | /// The frame number.
37 | /// The stream frame rate.
38 | /// The converted .
39 | public static TimeSpan ToTimeSpan(this int frameNumber, double fps) => TimeSpan.FromMilliseconds(frameNumber * (1000 / fps));
40 |
41 | ///
42 | /// Converts this to a frame number based on the specified frame rate/>.
43 | ///
44 | /// The time.
45 | /// The stream frame rate.
46 | /// The frame number.
47 | public static int ToFrameNumber(this TimeSpan time, AVRational framerate)
48 | => (int)(time.TotalSeconds * framerate.num / framerate.den);
49 |
50 | ///
51 | /// Converts a frame index to a timestamp in the units.
52 | ///
53 | /// The frame number.
54 | /// The stream frame rate.
55 | /// The stream time base.
56 | /// The timestamp.
57 | public static long ToTimestamp(this int frameNumber, AVRational fps, AVRational timeBase)
58 | {
59 | long num = frameNumber * fps.den * timeBase.den;
60 | long den = fps.num * timeBase.num;
61 | return Convert.ToInt64(num / (double)den);
62 | }
63 |
64 | ///
65 | /// Converts the to a timestamp in the units.
66 | ///
67 | /// The time.
68 | /// The stream time base.
69 | /// The timestamp.
70 | public static long ToTimestamp(this TimeSpan time, AVRational timeBase)
71 | => Convert.ToInt64(time.TotalSeconds * timeBase.den / timeBase.num);
72 |
73 | ///
74 | /// Clamps the specified number between min and max values.
75 | ///
76 | /// The value to clamp.
77 | /// The minimum value.
78 | /// The maximum value.
79 | /// The clamped value.
80 | public static int Clamp(this int number, int min, int max)
81 | {
82 | if (number < min)
83 | {
84 | return min;
85 | }
86 |
87 | if (number > max)
88 | {
89 | return max;
90 | }
91 |
92 | return number;
93 | }
94 | }
95 | }
96 |
--------------------------------------------------------------------------------
/FFMediaToolkit/Helpers/StringConverter.cs:
--------------------------------------------------------------------------------
1 | namespace FFMediaToolkit.Helpers
2 | {
3 | using System;
4 | using System.Runtime.InteropServices;
5 | using System.Text;
6 | using FFmpeg.AutoGen;
7 |
8 | ///
9 | /// Contains string conversion methods.
10 | ///
11 | internal static class StringConverter
12 | {
13 | ///
14 | /// Creates a new from a pointer to the unmanaged UTF-8 string.
15 | ///
16 | /// A pointer to the unmanaged string.
17 | /// The converted string.
18 | public static string Utf8ToString(this IntPtr pointer)
19 | {
20 | var lenght = 0;
21 |
22 | while (Marshal.ReadByte(pointer, lenght) != 0)
23 | {
24 | ++lenght;
25 | }
26 |
27 | var buffer = new byte[lenght];
28 | Marshal.Copy(pointer, buffer, 0, lenght);
29 |
30 | return Encoding.UTF8.GetString(buffer);
31 | }
32 |
33 | ///
34 | /// Gets the FFmpeg error message based on the error code.
35 | ///
36 | /// The error code.
37 | /// The decoded error message.
38 | public static unsafe string DecodeMessage(int errorCode)
39 | {
40 | const int bufferSize = 1024;
41 | var buffer = stackalloc byte[bufferSize];
42 | ffmpeg.av_strerror(errorCode, buffer, bufferSize);
43 |
44 | var message = new IntPtr(buffer).Utf8ToString();
45 | return message;
46 | }
47 | }
48 | }
49 |
--------------------------------------------------------------------------------
/FFMediaToolkit/Interop/NativeMethods.cs:
--------------------------------------------------------------------------------
1 | namespace FFMediaToolkit.Interop
2 | {
3 | using System;
4 | using System.Runtime.InteropServices;
5 |
6 | ///
7 | /// Contains the native operating system methods.
8 | ///
9 | internal static class NativeMethods
10 | {
11 | private static string MacOSDefautDirectory => "/usr/local/lib";
12 |
13 | private static string LinuxDefaultDirectory => "/usr/lib/{0}-linux-gnu";
14 |
15 | private static string WindowsDefaultDirectory => @"\runtimes\{0}\native";
16 |
17 | ///
18 | /// Gets the default FFmpeg directory for current platform.
19 | ///
20 | /// A path to the default directory for FFmpeg libraries.
21 | internal static string GetFFmpegDirectory()
22 | {
23 | if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
24 | {
25 | string archName;
26 | switch (RuntimeInformation.ProcessArchitecture)
27 | {
28 | case Architecture.X86:
29 | archName = "win-x86";
30 | break;
31 | case Architecture.X64:
32 | archName = "win-x64";
33 | break;
34 | case Architecture.Arm64:
35 | archName = "win-arm64";
36 | break;
37 | default:
38 | throw new PlatformNotSupportedException("This OS architecture is not supported by the FFMediaToolkit");
39 | }
40 |
41 | return AppDomain.CurrentDomain.BaseDirectory + string.Format(WindowsDefaultDirectory, archName);
42 | }
43 | else if (RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
44 | {
45 | string archName;
46 | switch (RuntimeInformation.ProcessArchitecture)
47 | {
48 | case Architecture.X86:
49 | archName = "x86";
50 | break;
51 | case Architecture.X64:
52 | archName = "x86_64";
53 | break;
54 | case Architecture.Arm:
55 | archName = "arm";
56 | break;
57 | case Architecture.Arm64:
58 | archName = "aarch64";
59 | break;
60 | default:
61 | throw new PlatformNotSupportedException("This OS architecture is not supported by the FFMediaToolkit");
62 | }
63 |
64 | return string.Format(LinuxDefaultDirectory, archName);
65 | }
66 | else if (RuntimeInformation.IsOSPlatform(OSPlatform.OSX))
67 | {
68 | return MacOSDefautDirectory;
69 | }
70 | else
71 | {
72 | throw new PlatformNotSupportedException("This OS platform is not supported by the FFMediaToolkit");
73 | }
74 | }
75 | }
76 | }
77 |
--------------------------------------------------------------------------------
/FFMediaToolkit/LogLevel.cs:
--------------------------------------------------------------------------------
1 | namespace FFMediaToolkit
2 | {
3 | using FFmpeg.AutoGen;
4 |
5 | ///
6 | /// FFMpeg logging verbosity levels.
7 | ///
8 | public enum LogLevel
9 | {
10 | ///
11 | /// Doesn't print any messages.
12 | ///
13 | Quiet = ffmpeg.AV_LOG_QUIET,
14 |
15 | ///
16 | /// Prints only error messages.
17 | ///
18 | Error = ffmpeg.AV_LOG_ERROR,
19 |
20 | ///
21 | /// Prints error and warning messages.
22 | ///
23 | Warning = ffmpeg.AV_LOG_WARNING,
24 |
25 | ///
26 | /// Prints errors, warnings and informational messages.
27 | ///
28 | Info = ffmpeg.AV_LOG_INFO,
29 |
30 | ///
31 | /// Prints the most detailed messages.
32 | ///
33 | Verbose = ffmpeg.AV_LOG_VERBOSE,
34 | }
35 | }
36 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2019 Radosław Kmiotek
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # FFMediaToolkit
2 |
3 | [](https://ci.appveyor.com/project/radek-k/ffmediatoolkit) [](https://www.nuget.org/packages/FFMediaToolkit/)
4 | [](https://github.com/radek-k/FFMediaToolkit/blob/master/LICENSE)
5 |
6 | > ⚠ **This library is not recommended for production use.**
7 |
8 | **FFMediaToolkit** is a .NET library for creating and reading multimedia files. It uses native FFmpeg libraries by the [FFmpeg.Autogen](https://github.com/Ruslan-B/FFmpeg.AutoGen) bindings.
9 |
10 | ## Features
11 |
12 | - Decoding/encoding audio-video files in many formats supported by FFmpeg.
13 | - Extracting audio data as floating point arrays.
14 | - Access to any video frame by timestamp.
15 | - Creating videos from images with metadata, pixel format, bitrate, CRF, FPS, GoP, dimensions and other codec settings.
16 | - Supports reading multimedia chapters and metadata.
17 |
18 | ## Code samples
19 |
20 | - Extract all video frames as PNG files
21 |
22 | ```c#
23 | int i = 0;
24 | var file = MediaFile.Open(@"C:\videos\movie.mp4");
25 | while(file.Video.TryGetNextFrame(out var imageData))
26 | {
27 | imageData.ToBitmap().Save($@"C:\images\frame_{i++}.png");
28 | // See the #Usage details for example .ToBitmap() implementation
29 | // The .Save() method may be different depending on your graphics library
30 | }
31 | ```
32 |
33 | - Video decoding
34 |
35 | ```c#
36 | // Opens a multimedia file.
37 | // You can use the MediaOptions properties to set decoder options.
38 | var file = MediaFile.Open(@"C:\videos\movie.mp4");
39 |
40 | // Gets the frame at 5th second of the video.
41 | var frame5s = file.Video.GetFrame(TimeSpan.FromSeconds(5));
42 |
43 | // Print informations about the video stream.
44 | Console.WriteLine($"Bitrate: {file.Info.Bitrate / 1000.0} kb/s");
45 | var info = file.Video.Info;
46 | Console.WriteLine($"Duration: {info.Duration}");
47 | Console.WriteLine($"Frames count: {info.NumberOfFrames ?? "N/A"}");
48 | var frameRateInfo = info.IsVariableFrameRate ? "average" : "constant";
49 | Console.WriteLine($"Frame rate: {info.AvgFrameRate} fps ({frameRateInfo})");
50 | Console.WriteLine($"Frame size: {info.FrameSize}");
51 | Console.WriteLine($"Pixel format: {info.PixelFormat}");
52 | Console.WriteLine($"Codec: {info.CodecName}");
53 | Console.WriteLine($"Is interlaced: {info.IsInterlaced}");
54 | ```
55 |
56 | - Encode video from images.
57 |
58 | ```c#
59 | // You can set there codec, bitrate, frame rate and many other options.
60 | var settings = new VideoEncoderSettings(width: 1920, height: 1080, framerate: 30, codec: VideoCodec.H264);
61 | settings.EncoderPreset = EncoderPreset.Fast;
62 | settings.CRF = 17;
63 | using(var file = MediaBuilder.CreateContainer(@"C:\videos\example.mp4").WithVideo(settings).Create())
64 | {
65 | while(file.Video.FramesCount < 300)
66 | {
67 | file.Video.AddFrame(/*Your code*/);
68 | }
69 | }
70 | ```
71 |
72 | ## Setup
73 |
74 | Install the **FFMediaToolkit** package from [NuGet](https://www.nuget.org/packages/FFMediaToolkit/).
75 |
76 | ```shell
77 | dotnet add package FFMediaToolkit
78 | ```
79 |
80 | ```Package
81 | PM> Install-Package FFMediaToolkit
82 | ```
83 |
84 | **FFmpeg libraries are not included in the package.** To use FFMediaToolkit, you need the **FFmpeg shared build** binaries: `avcodec`, `avformat`, `avutil`, `swresample`, `swscale`.
85 |
86 | - **Windows** - You can download it from the [BtbN/FFmpeg-Builds](https://github.com/BtbN/FFmpeg-Builds/releases) or [gyan.dev](https://www.gyan.dev/ffmpeg/builds/). You only need `*.dll` files from the `.\bin` directory (**not `.\lib`**) of the ZIP package. Place the binaries in the `.\ffmpeg\x86_64\`(64bit) in the application output directory or set `FFmpegLoader.FFmpegPath`.
87 | - **Linux** - Download FFmpeg using your package manager.
88 | - **macOS**, **iOS**, **Android** - Not supported.
89 |
90 | **You need to set `FFmpegLoader.FFmpegPath` with a full path to FFmpeg libraries.**
91 | > If you want to use 64-bit FFmpeg, you have to disable the *Build* -> *Prefer 32-bit* option in Visual Studio project properties.
92 |
93 | ## Usage details
94 |
95 | FFMediaToolkit uses the [*ref struct*](https://docs.microsoft.com/pl-pl/dotnet/csharp/language-reference/keywords/ref#ref-struct-types) `ImageData` for bitmap images. The `.Data` property contains pixels data in a [`Span`](https://docs.microsoft.com/pl-pl/dotnet/api/system.span-1?view=netstandard-2.1).
96 |
97 | > **If you want to process or save the `ImageData`, you should convert it to another graphics object, using one of the following methods.**
98 |
99 | > **These methods are not included in the program to avoid additional dependencies and provide compatibility with many graphics libraries.**
100 |
101 | - **For [ImageSharp](https://github.com/SixLabors/ImageSharp) library (.NET Standard/Core - cross-platform):**
102 |
103 | ```c#
104 | using SixLabors.ImageSharp;
105 | using SixLabors.ImageSharp.PixelFormats;
106 | ...
107 | public static Image ToBitmap(this ImageData imageData)
108 | {
109 | return Image.LoadPixelData(imageData.Data, imageData.ImageSize.Width, imageData.ImageSize.Height);
110 | }
111 | ```
112 |
113 | - **For .NET Framework `System.Drawing.Bitmap` (Windows only):**
114 |
115 | ```c#
116 | // ImageData -> Bitmap (unsafe)
117 | public static unsafe Bitmap ToBitmap(this ImageData bitmap)
118 | {
119 | fixed(byte* p = bitmap.Data)
120 | {
121 | return new Bitmap(bitmap.ImageSize.Width, bitmap.ImageSize.Height, bitmap.Stride, PixelFormat.Format24bppRgb, new IntPtr(p));
122 | }
123 | }
124 |
125 | // Bitmap -> ImageData (safe)
126 | ...
127 | var rect = new Rectangle(Point.Empty, bitmap.Size);
128 | var bitLock = bitmap.LockBits(rect, ImageLockMode.ReadOnly, PixelFormat.Format24bppRgb);
129 | var bitmapData = ImageData.FromPointer(bitLock.Scan0, bitmap.Size, ImagePixelFormat.Bgr24);
130 |
131 | mediaFile.Video.AddFrame(bitmapData); // Encode the frame
132 |
133 | bitmap.UnlockBits(bitLock); // UnlockBits() must be called after encoding the frame
134 | ...
135 | ```
136 |
137 | - **For .NET Framework/Core desktop apps with WPF UI. (Windows only):**
138 |
139 | ```c#
140 | using System.Windows.Media.Imaging;
141 | ...
142 | // ImageData -> BitmapSource (unsafe)
143 | public static unsafe BitmapSource ToBitmap(this ImageData bitmapData)
144 | {
145 | fixed(byte* ptr = bitmapData.Data)
146 | {
147 | return BitmapSource.Create(bitmapData.ImageSize.Width, bitmapData.ImageSize.Height, 96, 96, PixelFormats.Bgr32, null, new IntPtr(ptr), bitmapData.Data.Length, bitmapData.Stride);
148 | }
149 | }
150 |
151 | // BitmapSource -> ImageData (safe)
152 | public static ImageData ToImageData(this BitmapSource bitmap)
153 | {
154 | var wb = new WriteableBitmap(bitmap);
155 | return ImageData.FromPointer(wb.BackBuffer, ImagePixelFormat.Bgra32, wb.PixelWidth, wb.PixelHeight);
156 | }
157 | ```
158 |
159 | - **FFMediaToolkit will also work with any other graphics library that supports creating images from `Span`, byte array or memory pointer**
160 |
161 | ## Visual Basic usage
162 | Writing decoded bitmap directly to the WPF `WriteableBitmap` buffer using the `TryReadFrameToPointer` method:
163 | ````vb
164 | Dim file As FileStream = New FileStream("path to the video file", FileMode.Open, FileAccess.Read)
165 | Dim media As MediaFile = MediaFile.Load(file)
166 | Dim bmp As WriteableBimap = New WriteableBitmap(media.Video.Info.FrameSize.Width, media.Video.Info.FrameSize.Height, 96, 96, PixelFormats.Bgr24, Nothing)
167 | bmp.Lock()
168 | Dim decoded As Boolean = media.Video.TryGetFrame(TimeSpan.FromMinutes(1), bmp.BackBuffer, bmp.BackBufferStride)
169 | If decoded Then
170 | bmp.AddDirtyRect(New Int32Rect(0, 0, media.Video.Info.FrameSize.Width, media.Video.Info.FrameSize.Height))
171 | End If
172 | bmp.Unlock()
173 | imageBox.Source = bmp
174 | ````
175 | Converting `ImageData` to a byte array:
176 | ````vb
177 | Dim data() As Byte = media.Video.GetNextFrame().Data.ToArray()
178 | ````
179 | ## Licensing
180 |
181 | This project is licensed under the [MIT](https://github.com/radek-k/FFMediaToolkit/blob/master/LICENSE) license.
182 |
--------------------------------------------------------------------------------
/appveyor.yml:
--------------------------------------------------------------------------------
1 | image: Visual Studio 2022
2 | version: '{build}'
3 | pull_requests:
4 | do_not_increment_build_number: true
5 | branches:
6 | only:
7 | - master
8 | - develop
9 | nuget:
10 | disable_publish_on_pr: true
11 | build_script:
12 | - ps: .\build.ps1
13 | test: off
14 | artifacts:
15 | - path: .\artifacts\*.*nupkg
16 | name: NuGet
17 | deploy:
18 | - provider: NuGet
19 | server: https://ci.appveyor.com/nuget/radek41-3uj048yghh6h
20 | api_key:
21 | secure: N4Ceb4PGYD3hKN+N2ZkaXtocmrZeUb4jBrFLr/tvSzc=
22 | skip_symbols: true
23 | on:
24 | branch: master
25 | - provider: NuGet
26 | server: https://ci.appveyor.com/nuget/radek41-3uj048yghh6h
27 | api_key:
28 | secure: N4Ceb4PGYD3hKN+N2ZkaXtocmrZeUb4jBrFLr/tvSzc=
29 | skip_symbols: true
30 | on:
31 | branch: develop
32 | - provider: NuGet
33 | name: production
34 | api_key:
35 | secure: ma6VOuNBMjh9UUtr2LvxSnLepmxr855/6U4/bMT/oqgEgol1yCQVrh1VWw4NLIPW
36 | on:
37 | branch: master
38 | APPVEYOR_REPO_TAG: true
39 | - provider: GitHub
40 | release: $(APPVEYOR_REPO_TAG_NAME)
41 | description: 'Release description'
42 | auth_token:
43 | secure: 5obJpl3w5Trw5/FpDqp15U+qxbAC5AztY0k3F8hjsZ3wecH0tX0qc66LA/HVxCSNnHf5NarxmkS0xr6952ByAmiwPDPcBJPF98xtkg5pdnMOhhken8Ux/oekK/sQzf2g
44 | artifact: /.*\.*nupkg/
45 | draft: true
46 | prerelease: false
47 | force_update: true
48 | on:
49 | branch: master
50 | APPVEYOR_REPO_TAG: true
51 |
--------------------------------------------------------------------------------
/build.ps1:
--------------------------------------------------------------------------------
1 | function Exec
2 | {
3 | [CmdletBinding()]
4 | param(
5 | [Parameter(Position=0,Mandatory=1)][scriptblock]$cmd,
6 | [Parameter(Position=1,Mandatory=0)][string]$errorMessage = ($msgs.error_bad_command -f $cmd)
7 | )
8 | & $cmd
9 | if ($lastexitcode -ne 0) {
10 | throw ("Exec: " + $errorMessage)
11 | }
12 | }
13 |
14 | if(Test-Path .\artifacts) { Remove-Item .\artifacts -Force -Recurse }
15 |
16 | exec { & dotnet restore }
17 |
18 | if($env:APPVEYOR_REPO_TAG_NAME -eq $NULL)
19 | {
20 | # $revision = @{ $true = $env:APPVEYOR_BUILD_NUMBER; $false = 1 }[$env:APPVEYOR_BUILD_NUMBER -ne $NULL]
21 | # $suffix = "dev{0:D4}" -f [convert]::ToInt32($revision, 10)
22 | # echo "build: Development build - no commit tag. Package version suffix is $suffix"
23 | echo "build: Development build - commit tag not detected"
24 | exec { & dotnet pack .\FFMediaToolkit\FFMediaToolkit.csproj -c Debug /p:ContinuousIntegrationBuild=true -o .\artifacts --include-symbols }
25 | # --version-suffix=$suffix
26 | }
27 | else
28 | {
29 | echo "build: Release build - tagged commit detected."
30 |
31 | exec { & dotnet pack .\FFMediaToolkit\FFMediaToolkit.csproj -c Release /p:ContinuousIntegrationBuild=true -o .\artifacts --include-symbols }
32 | }
--------------------------------------------------------------------------------