├── .appveyor.yml
├── .github
└── workflows
│ ├── nuget.yml
│ └── windows-dev.yml
├── .gitignore
├── LICENSE
├── README.md
├── src
├── SIPSorceryMedia.Windows.csproj
├── SIPSorceryMedia.Windows.sln
├── WindowsAudioEndPoint.cs
├── WindowsVideoEndPoint.cs
└── icon.png
└── test
└── VideoCaptureTest
├── Program.cs
└── VideoCaptureTest.csproj
/.appveyor.yml:
--------------------------------------------------------------------------------
1 | version: 1.{build}
2 | image: Visual Studio 2022
3 | configuration: Release
4 | before_build:
5 | - cmd: nuget restore -DisableParallelProcessing src\SIPSorceryMedia.Windows.csproj
6 | build:
7 | project: src\SIPSorceryMedia.Windows.csproj
8 | publish_nuget: false
9 | verbosity: quiet
10 | after_build:
11 | - dotnet pack src\SIPSorceryMedia.Windows.csproj -p:NuspecFile=SIPSorceryMedia.Windows.nuspec -c Release --no-build
12 | artifacts:
13 | - path: '**\*.nupkg'
14 | # - path: '**\*.snupkg'
15 | deploy:
16 | - provider: NuGet
17 | server: # remove to push to NuGet.org
18 | api_key:
19 | secure: GWtnKGaBRjWgQ8jTe+9zzlr83Gr15mS/poFyqLWWEeWAIndh0uyaBpAXxozsCcC5
20 | skip_symbols: false
21 | symbol_server: # remove to push symbols to SymbolSource.org
22 | artifact: /.*(\.|\.s)nupkg/
23 | on:
24 | APPVEYOR_REPO_TAG: true # deploy on tag push only
25 | - provider: NuGet
26 | server: https://nuget.pkg.github.com/sipsorcery/index.json
27 | artifact: /.*(\.|\.s)nupkg/
28 | username: sipsorcery
29 | api_key:
30 | secure: E58r+OknoQn8+bsPRT6l3U2K4kfOpDiGCo1C75LkVg+R/RBHpY//J8UCXEfVvyRB
31 | on:
32 | APPVEYOR_REPO_TAG: true # deploy on tag push only
33 |
--------------------------------------------------------------------------------
/.github/workflows/nuget.yml:
--------------------------------------------------------------------------------
1 | name: Publish to NuGet
2 |
3 | on:
4 | push:
5 | tags:
6 | - 'v*' # Triggers only on tags starting with "v"
7 |
8 | jobs:
9 | build-and-publish:
10 | runs-on: windows-latest # ← must run on Windows for net*-windows projects
11 | strategy:
12 | matrix:
13 | framework: ['net8.0-windows10.0.17763.0']
14 |
15 | steps:
16 | - name: Checkout code
17 | uses: actions/checkout@v3
18 |
19 | - name: Setup .NET SDK
20 | uses: actions/setup-dotnet@v3
21 | with:
22 | dotnet-version: '8.0.x'
23 |
24 | - name: Restore dependencies
25 | run: dotnet restore src/SIPSorceryMedia.Windows.csproj
26 |
27 | - name: Build
28 | run: dotnet build src/SIPSorceryMedia.Windows.csproj -c Release --no-restore
29 |
30 | - name: Pack NuGet package
31 | run: dotnet pack src/SIPSorceryMedia.Windows.csproj -c Release -o ./artifacts --no-build
32 |
33 | - name: Publish to NuGet.org
34 | if: startsWith(github.ref, 'refs/tags/v')
35 | env:
36 | NUGET_API_KEY: ${{ secrets.NUGET_API_KEY }}
37 | run: dotnet nuget push .\artifacts\*.nupkg --source https://api.nuget.org/v3/index.json --api-key $env:NUGET_API_KEY
38 |
39 |
--------------------------------------------------------------------------------
/.github/workflows/windows-dev.yml:
--------------------------------------------------------------------------------
1 | name: Run on master commits
2 |
3 | on:
4 | push:
5 | branches:
6 | - master
7 |
8 | jobs:
9 | build-and-publish:
10 | runs-on: windows-latest
11 |
12 | steps:
13 | - name: Checkout code
14 | uses: actions/checkout@v3
15 |
16 | - name: Setup .NET SDK
17 | uses: actions/setup-dotnet@v3
18 | with:
19 | dotnet-version: '8.0.x'
20 |
21 | - name: Restore dependencies
22 | run: dotnet restore src/SIPSorceryMedia.Windows.csproj
23 |
24 | - name: Build
25 | run: dotnet build src/SIPSorceryMedia.Windows.csproj -c Release --no-restore
26 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | ## Ignore Visual Studio temporary files, build results, and
2 | ## files generated by popular Visual Studio add-ons.
3 | ##
4 | ## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore
5 |
6 | # User-specific files
7 | *.rsuser
8 | *.suo
9 | *.user
10 | *.userosscache
11 | *.sln.docstates
12 |
13 | # User-specific files (MonoDevelop/Xamarin Studio)
14 | *.userprefs
15 |
16 | # Mono auto generated files
17 | mono_crash.*
18 |
19 | # Build results
20 | [Dd]ebug/
21 | [Dd]ebugPublic/
22 | [Rr]elease/
23 | [Rr]eleases/
24 | x64/
25 | x86/
26 | [Aa][Rr][Mm]/
27 | [Aa][Rr][Mm]64/
28 | bld/
29 | [Bb]in/
30 | [Oo]bj/
31 | [Ll]og/
32 | [Ll]ogs/
33 |
34 | # Visual Studio 2015/2017 cache/options directory
35 | .vs/
36 | # Uncomment if you have tasks that create the project's static files in wwwroot
37 | #wwwroot/
38 |
39 | # Visual Studio 2017 auto generated files
40 | Generated\ Files/
41 |
42 | # MSTest test Results
43 | [Tt]est[Rr]esult*/
44 | [Bb]uild[Ll]og.*
45 |
46 | # NUnit
47 | *.VisualState.xml
48 | TestResult.xml
49 | nunit-*.xml
50 |
51 | # Build Results of an ATL Project
52 | [Dd]ebugPS/
53 | [Rr]eleasePS/
54 | dlldata.c
55 |
56 | # Benchmark Results
57 | BenchmarkDotNet.Artifacts/
58 |
59 | # .NET Core
60 | project.lock.json
61 | project.fragment.lock.json
62 | artifacts/
63 |
64 | # StyleCop
65 | StyleCopReport.xml
66 |
67 | # Files built by Visual Studio
68 | *_i.c
69 | *_p.c
70 | *_h.h
71 | *.ilk
72 | *.meta
73 | *.obj
74 | *.iobj
75 | *.pch
76 | *.pdb
77 | *.ipdb
78 | *.pgc
79 | *.pgd
80 | *.rsp
81 | *.sbr
82 | *.tlb
83 | *.tli
84 | *.tlh
85 | *.tmp
86 | *.tmp_proj
87 | *_wpftmp.csproj
88 | *.log
89 | *.vspscc
90 | *.vssscc
91 | .builds
92 | *.pidb
93 | *.svclog
94 | *.scc
95 |
96 | # Chutzpah Test files
97 | _Chutzpah*
98 |
99 | # Visual C++ cache files
100 | ipch/
101 | *.aps
102 | *.ncb
103 | *.opendb
104 | *.opensdf
105 | *.sdf
106 | *.cachefile
107 | *.VC.db
108 | *.VC.VC.opendb
109 |
110 | # Visual Studio profiler
111 | *.psess
112 | *.vsp
113 | *.vspx
114 | *.sap
115 |
116 | # Visual Studio Trace Files
117 | *.e2e
118 |
119 | # TFS 2012 Local Workspace
120 | $tf/
121 |
122 | # Guidance Automation Toolkit
123 | *.gpState
124 |
125 | # ReSharper is a .NET coding add-in
126 | _ReSharper*/
127 | *.[Rr]e[Ss]harper
128 | *.DotSettings.user
129 |
130 | # TeamCity is a build add-in
131 | _TeamCity*
132 |
133 | # DotCover is a Code Coverage Tool
134 | *.dotCover
135 |
136 | # AxoCover is a Code Coverage Tool
137 | .axoCover/*
138 | !.axoCover/settings.json
139 |
140 | # Visual Studio code coverage results
141 | *.coverage
142 | *.coveragexml
143 |
144 | # NCrunch
145 | _NCrunch_*
146 | .*crunch*.local.xml
147 | nCrunchTemp_*
148 |
149 | # MightyMoose
150 | *.mm.*
151 | AutoTest.Net/
152 |
153 | # Web workbench (sass)
154 | .sass-cache/
155 |
156 | # Installshield output folder
157 | [Ee]xpress/
158 |
159 | # DocProject is a documentation generator add-in
160 | DocProject/buildhelp/
161 | DocProject/Help/*.HxT
162 | DocProject/Help/*.HxC
163 | DocProject/Help/*.hhc
164 | DocProject/Help/*.hhk
165 | DocProject/Help/*.hhp
166 | DocProject/Help/Html2
167 | DocProject/Help/html
168 |
169 | # Click-Once directory
170 | publish/
171 |
172 | # Publish Web Output
173 | *.[Pp]ublish.xml
174 | *.azurePubxml
175 | # Note: Comment the next line if you want to checkin your web deploy settings,
176 | # but database connection strings (with potential passwords) will be unencrypted
177 | *.pubxml
178 | *.publishproj
179 |
180 | # Microsoft Azure Web App publish settings. Comment the next line if you want to
181 | # checkin your Azure Web App publish settings, but sensitive information contained
182 | # in these scripts will be unencrypted
183 | PublishScripts/
184 |
185 | # NuGet Packages
186 | *.nupkg
187 | # NuGet Symbol Packages
188 | *.snupkg
189 | # The packages folder can be ignored because of Package Restore
190 | **/[Pp]ackages/*
191 | # except build/, which is used as an MSBuild target.
192 | !**/[Pp]ackages/build/
193 | # Uncomment if necessary however generally it will be regenerated when needed
194 | #!**/[Pp]ackages/repositories.config
195 | # NuGet v3's project.json files produces more ignorable files
196 | *.nuget.props
197 | *.nuget.targets
198 |
199 | # Microsoft Azure Build Output
200 | csx/
201 | *.build.csdef
202 |
203 | # Microsoft Azure Emulator
204 | ecf/
205 | rcf/
206 |
207 | # Windows Store app package directories and files
208 | AppPackages/
209 | BundleArtifacts/
210 | Package.StoreAssociation.xml
211 | _pkginfo.txt
212 | *.appx
213 | *.appxbundle
214 | *.appxupload
215 |
216 | # Visual Studio cache files
217 | # files ending in .cache can be ignored
218 | *.[Cc]ache
219 | # but keep track of directories ending in .cache
220 | !?*.[Cc]ache/
221 |
222 | # Others
223 | ClientBin/
224 | ~$*
225 | *~
226 | *.dbmdl
227 | *.dbproj.schemaview
228 | *.jfm
229 | *.pfx
230 | *.publishsettings
231 | orleans.codegen.cs
232 |
233 | # Including strong name files can present a security risk
234 | # (https://github.com/github/gitignore/pull/2483#issue-259490424)
235 | #*.snk
236 |
237 | # Since there are multiple workflows, uncomment next line to ignore bower_components
238 | # (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
239 | #bower_components/
240 |
241 | # RIA/Silverlight projects
242 | Generated_Code/
243 |
244 | # Backup & report files from converting an old project file
245 | # to a newer Visual Studio version. Backup files are not needed,
246 | # because we have git ;-)
247 | _UpgradeReport_Files/
248 | Backup*/
249 | UpgradeLog*.XML
250 | UpgradeLog*.htm
251 | ServiceFabricBackup/
252 | *.rptproj.bak
253 |
254 | # SQL Server files
255 | *.mdf
256 | *.ldf
257 | *.ndf
258 |
259 | # Business Intelligence projects
260 | *.rdl.data
261 | *.bim.layout
262 | *.bim_*.settings
263 | *.rptproj.rsuser
264 | *- [Bb]ackup.rdl
265 | *- [Bb]ackup ([0-9]).rdl
266 | *- [Bb]ackup ([0-9][0-9]).rdl
267 |
268 | # Microsoft Fakes
269 | FakesAssemblies/
270 |
271 | # GhostDoc plugin setting file
272 | *.GhostDoc.xml
273 |
274 | # Node.js Tools for Visual Studio
275 | .ntvs_analysis.dat
276 | node_modules/
277 |
278 | # Visual Studio 6 build log
279 | *.plg
280 |
281 | # Visual Studio 6 workspace options file
282 | *.opt
283 |
284 | # Visual Studio 6 auto-generated workspace file (contains which files were open etc.)
285 | *.vbw
286 |
287 | # Visual Studio LightSwitch build output
288 | **/*.HTMLClient/GeneratedArtifacts
289 | **/*.DesktopClient/GeneratedArtifacts
290 | **/*.DesktopClient/ModelManifest.xml
291 | **/*.Server/GeneratedArtifacts
292 | **/*.Server/ModelManifest.xml
293 | _Pvt_Extensions
294 |
295 | # Paket dependency manager
296 | .paket/paket.exe
297 | paket-files/
298 |
299 | # FAKE - F# Make
300 | .fake/
301 |
302 | # CodeRush personal settings
303 | .cr/personal
304 |
305 | # Python Tools for Visual Studio (PTVS)
306 | __pycache__/
307 | *.pyc
308 |
309 | # Cake - Uncomment if you are using it
310 | # tools/**
311 | # !tools/packages.config
312 |
313 | # Tabs Studio
314 | *.tss
315 |
316 | # Telerik's JustMock configuration file
317 | *.jmconfig
318 |
319 | # BizTalk build output
320 | *.btp.cs
321 | *.btm.cs
322 | *.odx.cs
323 | *.xsd.cs
324 |
325 | # OpenCover UI analysis results
326 | OpenCover/
327 |
328 | # Azure Stream Analytics local run output
329 | ASALocalRun/
330 |
331 | # MSBuild Binary and Structured Log
332 | *.binlog
333 |
334 | # NVidia Nsight GPU debugger configuration file
335 | *.nvuser
336 |
337 | # MFractors (Xamarin productivity tool) working folder
338 | .mfractor/
339 |
340 | # Local History for Visual Studio
341 | .localhistory/
342 |
343 | # BeatPulse healthcheck temp database
344 | healthchecksdb
345 |
346 | # Backup folder for Package Reference Convert tool in Visual Studio 2017
347 | MigrationBackup/
348 |
349 | # Ionide (cross platform F# VS Code tools) working folder
350 | .ionide/
351 | *.bmp
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | # License
2 |
3 | ## 1. BSD 3-Clause "New" or "Revised" License
4 |
5 | Copyright (c) 2020–2025 Aaron Clauson
6 | All rights reserved.
7 |
8 | Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
9 |
10 | 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
11 | 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
12 | 3. Neither the name “SIP Sorcery,” nor “Aaron Clauson,” nor the names of any contributors may be used to endorse or promote products derived from this software without specific prior written permission.
13 |
14 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS “AS IS” AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE, ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
15 |
16 | ---
17 |
18 | ## 2. Additional Restriction: Boycott, Divestment, Sanctions (BDS) – Attribution-NonCommercial-ShareAlike
19 |
20 | **Boycott Divestment Sanctions – Attribution-NonCommercial-ShareAlike (BDS BY-NC-SA)**
21 |
22 | This software **must not be used** to further the Apartheid policies of Israel. Use, modification, or distribution **inside** Israel and the Occupied Territories is strictly forbidden until the demands of the Boycott, Divestment and Sanctions movement have been met:
23 |
24 | * Israel has ended the occupation and colonization of all Arab lands occupied in 1967 and dismantled the Wall;
25 | * Arab-Palestinian citizens of Israel have been granted full equality; and
26 | * Palestinian refugees have obtained the right to return to their homes and properties as stipulated in UN Resolution 194.
27 |
28 | For all other geographic regions **outside** of Israel and the Occupied Territories, use, modification, and distribution are permitted under the terms of the **BSD 3-Clause "New" or "Revised" License** above (Section 1), provided that any future use, modification, or distribution carries the above BDS restriction and abides by the ShareAlike and NonCommercial principles.
29 |
30 | This restriction is **not** intended to limit the rights of Israelis or any other people residing outside of Israel and the Occupied Territories.
31 |
32 | In any instance where the BSD 3-Clause License conflicts with the above restriction, the above restriction shall be interpreted as superior, and all other non-conflicting provisions of the BSD 3-Clause license shall remain in effect.
33 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # SIPSorceryMedia.Windows
2 |
3 | This project is an example of developing a C# library that can access Windows audio and video devices and that inegrates with the [SIPSorcery](https://github.com/sipsorcery-org/sipsorcery) real-time communications library.
4 |
5 | The classes in this project provide functions to:
6 |
7 | - Supply audio samples from Windows audio capture devices.
8 | - Render audio samples to Windows audio output devices.
9 | - Supply video samples from Windows video capture devices.
10 |
--------------------------------------------------------------------------------
/src/SIPSorceryMedia.Windows.csproj:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 | true
10 | true
11 | True
12 | SIPSorceryMedia.Windows
13 | net8.0-windows10.0.17763.0
14 | 10.0.17763.0
15 | true
16 | Aaron Clauson
17 | Copyright © 2020-2025 Aaron Clauson
18 | SIP Sorcery PTY LTD
19 | BSD-3-Clause
20 | SIPSorceryMedia.Windows
21 | Audio and video end points for Windows capture devices.
22 | Provides audio and video device access for Windows for use with the main SIPSorcery real-time communications library.
23 | SIPSorceryMedia.Windows
24 | http://www.sipsorcery.com/mainsite/favicon.ico
25 | icon.png
26 | https://github.com/sipsorcery-org/SIPSorceryMedia.Windows
27 | README.md
28 | 8.0.14
29 | 8.0.14.0
30 | 8.0.14.0
31 | git
32 | master
33 | WebRTC VoIP SIPSorcery Audio Video Codecs Encoders Decoders
34 | -v8.0.14: Improvements to audio sink handling and implemented IAudioEndPoint interface.
35 | -v8.0.11: Support more than one audo channel.
36 | -v8.0.7: Updated for changes to abstractions library.
37 | -v6.0.5: Updated for changes to abstractions library.
38 | -v6.0.4: Updated for changes to abstractions library.
39 | -v6.0.1-pre: Fixed invalid cast exception acquiring webcam frames (properly this time, not just in test app).
40 | -v6.0.0-pre: Target .net6 and lastest Windows SDK version.
41 | -v0.0.32-pre: Updated to the latest Windows SDK version.
42 | -v0.0.31-pre: Adjust audio playback rate dependent on selected audio format (fixes G722 playback).
43 | -v0.0.30-pre: Updated to use latest abstractons package with change to IAudioEncoder and IVideoEncoder interfaces.
44 | -v0.0.29-pre: Added new method to list video formats for a Windows video capture device.
45 | -v0.0.28-pre: Removed dependency on SIPSorceryMedia.Encoders. Now using IVideoEncoder interface for more flexibility.
46 | -v0.0.27-pre: Updated to use latest encoders package.
47 | -v0.0.26-pre: Wired up video source raw sample event for convenient hook into locally generated video samples.
48 | -v0.0.25-pre: Updated to use latest abstractions and encoder packages.
49 | -v0.0.24-pre: Fixed endian issue converting microphone samples to signed PCM.
50 | -v0.0.23-pre: Updated for SDP refactor in main library for improved audio and video format management.
51 | -v0.0.21-pre: Updated to use latest abstractions and encoding packages.
52 | -v0.0.20-pre: Refactored encoding functions into SIPSorceryMedia.Encoding.
53 | -v0.0.19-pre: Added support for codec manager.
54 | -v0.0.18-pre: Added events for video and audio source failures.
55 | -v0.0.16-pre: Added webcam support.
56 | -v0.0.15-pre: Initial pre-release
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 | true
66 | snupkg
67 |
68 |
69 |
70 |
--------------------------------------------------------------------------------
/src/SIPSorceryMedia.Windows.sln:
--------------------------------------------------------------------------------
1 |
2 | Microsoft Visual Studio Solution File, Format Version 12.00
3 | # Visual Studio Version 16
4 | VisualStudioVersion = 16.0.30320.27
5 | MinimumVisualStudioVersion = 10.0.40219.1
6 | Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "SIPSorceryMedia.Windows", "SIPSorceryMedia.Windows.csproj", "{9FE0CD36-9AA6-4F54-92FC-685ECD0CDA94}"
7 | EndProject
8 | Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "test", "test", "{3C0AB998-716E-4FFD-9742-1470B6E4BE73}"
9 | EndProject
10 | Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "VideoCaptureTest", "..\test\VideoCaptureTest\VideoCaptureTest.csproj", "{264CD1C2-2725-436E-8DC9-81FF119B95B4}"
11 | EndProject
12 | Global
13 | GlobalSection(SolutionConfigurationPlatforms) = preSolution
14 | Debug|Any CPU = Debug|Any CPU
15 | Debug|x64 = Debug|x64
16 | Debug|x86 = Debug|x86
17 | Release|Any CPU = Release|Any CPU
18 | Release|x64 = Release|x64
19 | Release|x86 = Release|x86
20 | EndGlobalSection
21 | GlobalSection(ProjectConfigurationPlatforms) = postSolution
22 | {9FE0CD36-9AA6-4F54-92FC-685ECD0CDA94}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
23 | {9FE0CD36-9AA6-4F54-92FC-685ECD0CDA94}.Debug|Any CPU.Build.0 = Debug|Any CPU
24 | {9FE0CD36-9AA6-4F54-92FC-685ECD0CDA94}.Debug|x64.ActiveCfg = Debug|Any CPU
25 | {9FE0CD36-9AA6-4F54-92FC-685ECD0CDA94}.Debug|x64.Build.0 = Debug|Any CPU
26 | {9FE0CD36-9AA6-4F54-92FC-685ECD0CDA94}.Debug|x86.ActiveCfg = Debug|Any CPU
27 | {9FE0CD36-9AA6-4F54-92FC-685ECD0CDA94}.Debug|x86.Build.0 = Debug|Any CPU
28 | {9FE0CD36-9AA6-4F54-92FC-685ECD0CDA94}.Release|Any CPU.ActiveCfg = Release|Any CPU
29 | {9FE0CD36-9AA6-4F54-92FC-685ECD0CDA94}.Release|Any CPU.Build.0 = Release|Any CPU
30 | {9FE0CD36-9AA6-4F54-92FC-685ECD0CDA94}.Release|x64.ActiveCfg = Release|Any CPU
31 | {9FE0CD36-9AA6-4F54-92FC-685ECD0CDA94}.Release|x64.Build.0 = Release|Any CPU
32 | {9FE0CD36-9AA6-4F54-92FC-685ECD0CDA94}.Release|x86.ActiveCfg = Release|Any CPU
33 | {9FE0CD36-9AA6-4F54-92FC-685ECD0CDA94}.Release|x86.Build.0 = Release|Any CPU
34 | {264CD1C2-2725-436E-8DC9-81FF119B95B4}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
35 | {264CD1C2-2725-436E-8DC9-81FF119B95B4}.Debug|Any CPU.Build.0 = Debug|Any CPU
36 | {264CD1C2-2725-436E-8DC9-81FF119B95B4}.Debug|x64.ActiveCfg = Debug|Any CPU
37 | {264CD1C2-2725-436E-8DC9-81FF119B95B4}.Debug|x64.Build.0 = Debug|Any CPU
38 | {264CD1C2-2725-436E-8DC9-81FF119B95B4}.Debug|x86.ActiveCfg = Debug|Any CPU
39 | {264CD1C2-2725-436E-8DC9-81FF119B95B4}.Debug|x86.Build.0 = Debug|Any CPU
40 | {264CD1C2-2725-436E-8DC9-81FF119B95B4}.Release|Any CPU.ActiveCfg = Release|Any CPU
41 | {264CD1C2-2725-436E-8DC9-81FF119B95B4}.Release|Any CPU.Build.0 = Release|Any CPU
42 | {264CD1C2-2725-436E-8DC9-81FF119B95B4}.Release|x64.ActiveCfg = Release|Any CPU
43 | {264CD1C2-2725-436E-8DC9-81FF119B95B4}.Release|x64.Build.0 = Release|Any CPU
44 | {264CD1C2-2725-436E-8DC9-81FF119B95B4}.Release|x86.ActiveCfg = Release|Any CPU
45 | {264CD1C2-2725-436E-8DC9-81FF119B95B4}.Release|x86.Build.0 = Release|Any CPU
46 | EndGlobalSection
47 | GlobalSection(SolutionProperties) = preSolution
48 | HideSolutionNode = FALSE
49 | EndGlobalSection
50 | GlobalSection(NestedProjects) = preSolution
51 | {264CD1C2-2725-436E-8DC9-81FF119B95B4} = {3C0AB998-716E-4FFD-9742-1470B6E4BE73}
52 | EndGlobalSection
53 | GlobalSection(ExtensibilityGlobals) = postSolution
54 | SolutionGuid = {49C7CDF5-E940-4CEB-AE5D-4A8377759FC7}
55 | EndGlobalSection
56 | EndGlobal
57 |
--------------------------------------------------------------------------------
/src/WindowsAudioEndPoint.cs:
--------------------------------------------------------------------------------
1 | //-----------------------------------------------------------------------------
2 | // Filename: WindowsAudioSession.cs
3 | //
4 | // Description: Example of an RTP session that uses NAUdio for audio
5 | // capture and rendering on Windows.
6 | //
7 | // Author(s):
8 | // Aaron Clauson (aaron@sipsorcery.com)
9 | //
10 | // History:
11 | // 17 Apr 2020 Aaron Clauson Created, Dublin, Ireland.
12 | // 01 Jun 2020 Aaron Clauson Refactored to use RtpAudioSession base class.
13 | // 15 Aug 2020 Aaron Clauson Moved from examples into SIPSorceryMedia.Windows
14 | // assembly.
15 | // 21 Jan 2021 Aaron Clauson Adjust playback rate dependent on selected audio format.
16 | //
17 | // License:
18 | // BSD 3-Clause "New" or "Revised" License and the additional
19 | // BDS BY-NC-SA restriction, see included LICENSE.md file.
20 | //-----------------------------------------------------------------------------
21 |
22 | using System;
23 | using System.Collections.Generic;
24 | using System.Linq;
25 | using System.Net;
26 | using System.Threading.Tasks;
27 | using Microsoft.Extensions.Logging;
28 | using NAudio.Wave;
29 | using SIPSorceryMedia.Abstractions;
30 |
31 | namespace SIPSorceryMedia.Windows
32 | {
33 | public class WindowsAudioEndPoint : IAudioEndPoint
34 | {
35 | private const int DEVICE_BITS_PER_SAMPLE = 16;
36 | private const int DEFAULT_DEVICE_CHANNELS = 1;
37 | private const int INPUT_BUFFERS = 2; // See https://github.com/sipsorcery/sipsorcery/pull/148.
38 | private const int DEFAULT_PLAYBACK_BUFFER_MILLISECONDS = 20;
39 | private const int AUDIO_INPUTDEVICE_INDEX = -1;
40 | private const int AUDIO_OUTPUTDEVICE_INDEX = -1;
41 |
42 | ///
43 | /// Microphone input is sampled at 8KHz.
44 | ///
45 | public readonly static AudioSamplingRatesEnum DefaultAudioSourceSamplingRate = AudioSamplingRatesEnum.Rate8KHz;
46 |
47 | public readonly static AudioSamplingRatesEnum DefaultAudioPlaybackRate = AudioSamplingRatesEnum.Rate8KHz;
48 |
49 | private ILogger logger = SIPSorcery.LogFactory.CreateLogger();
50 |
51 | private WaveFormat _waveSinkFormat;
52 | private WaveFormat _waveSourceFormat;
53 |
54 | ///
55 | /// Audio render device.
56 | ///
57 | private WaveOutEvent _waveOutEvent;
58 |
59 | ///
60 | /// Buffer for audio samples to be rendered.
61 | ///
62 | private BufferedWaveProvider _waveProvider;
63 |
64 | ///
65 | /// Audio capture device.
66 | ///
67 | private WaveInEvent _waveInEvent;
68 |
69 | private IAudioEncoder _audioEncoder;
70 | private MediaFormatManager _audioFormatManager;
71 |
72 | private bool _disableSink;
73 | private int _audioOutDeviceIndex;
74 | private int _audioInDeviceIndex;
75 | private bool _disableSource;
76 |
77 | protected bool _isAudioSourceStarted;
78 | protected bool _isAudioSinkStarted;
79 | protected bool _isAudioSourcePaused;
80 | protected bool _isAudioSinkPaused;
81 | protected bool _isAudioSourceClosed;
82 | protected bool _isAudioSinkClosed;
83 |
84 | ///
85 | /// Obsolete. Use the event instead.
86 | ///
87 | public event EncodedSampleDelegate OnAudioSourceEncodedSample;
88 |
89 | ///
90 | /// Event handler for when an encoded audio frame is ready to be sent to the RTP transport layer.
91 | /// The sample contained in this event is already encoded with the chosen audio format (codec) and ready for transmission.
92 | ///
93 | public event Action OnAudioSourceEncodedFrameReady;
94 |
95 | ///
96 | /// This audio source DOES NOT generate raw samples. Subscribe to the encoded samples event
97 | /// to get samples ready for passing to the RTP transport layer.
98 | ///
99 | [Obsolete("The audio source only generates encoded samples.")]
100 | public event RawAudioSampleDelegate OnAudioSourceRawSample { add { } remove { } }
101 |
102 | public event SourceErrorDelegate OnAudioSourceError;
103 |
104 | public event SourceErrorDelegate OnAudioSinkError;
105 |
106 | ///
107 | /// Creates a new basic RTP session that captures and renders audio to/from the default system devices.
108 | ///
109 | /// An audio encoder that can be used to encode and decode
110 | /// specific audio codecs.
111 | /// Optional. An external source to use in combination with the source
112 | /// provided by this end point. The application will need to signal which source is active.
113 | /// Set to true to disable the use of the audio source functionality, i.e.
114 | /// don't capture input from the microphone.
115 | /// Set to true to disable the use of the audio sink functionality, i.e.
116 | /// don't playback audio to the speaker.
117 | public WindowsAudioEndPoint(IAudioEncoder audioEncoder,
118 | int audioOutDeviceIndex = AUDIO_OUTPUTDEVICE_INDEX,
119 | int audioInDeviceIndex = AUDIO_INPUTDEVICE_INDEX,
120 | bool disableSource = false,
121 | bool disableSink = false)
122 | {
123 | logger = SIPSorcery.LogFactory.CreateLogger();
124 |
125 | _audioFormatManager = new MediaFormatManager(audioEncoder.SupportedFormats);
126 | _audioEncoder = audioEncoder;
127 |
128 | _audioOutDeviceIndex = audioOutDeviceIndex;
129 | _audioInDeviceIndex = audioInDeviceIndex;
130 | _disableSource = disableSource;
131 | _disableSink = disableSink;
132 |
133 | if (!_disableSink)
134 | {
135 | InitPlaybackDevice(_audioOutDeviceIndex, DefaultAudioPlaybackRate.GetHashCode(), DEFAULT_DEVICE_CHANNELS);
136 |
137 | if (audioEncoder.SupportedFormats?.Count == 1)
138 | {
139 | SetAudioSinkFormat(audioEncoder.SupportedFormats[0]);
140 | }
141 | }
142 |
143 | if (!_disableSource)
144 | {
145 | InitCaptureDevice(_audioInDeviceIndex, (int)DefaultAudioSourceSamplingRate, DEFAULT_DEVICE_CHANNELS);
146 |
147 | if (audioEncoder.SupportedFormats?.Count == 1)
148 | {
149 | SetAudioSourceFormat(audioEncoder.SupportedFormats[0]);
150 | }
151 | }
152 | }
153 |
154 | public void RestrictFormats(Func filter) => _audioFormatManager.RestrictFormats(filter);
155 | public List GetAudioSourceFormats() => _audioFormatManager.GetSourceFormats();
156 | public List GetAudioSinkFormats() => _audioFormatManager.GetSourceFormats();
157 |
158 | public bool HasEncodedAudioSubscribers() => OnAudioSourceEncodedSample != null;
159 | public bool IsAudioSourcePaused() => _isAudioSourcePaused;
160 | public bool IsAudioSinkPaused() => _isAudioSinkPaused;
161 | public void ExternalAudioSourceRawSample(AudioSamplingRatesEnum samplingRate, uint durationMilliseconds, short[] sample) =>
162 | throw new NotImplementedException();
163 |
164 | public void SetAudioSourceFormat(AudioFormat audioFormat)
165 | {
166 | _audioFormatManager.SetSelectedFormat(audioFormat);
167 |
168 | if (!_disableSource)
169 | {
170 | if (_waveSourceFormat.SampleRate != _audioFormatManager.SelectedFormat.ClockRate)
171 | {
172 | // Reinitialise the audio capture device.
173 | logger.LogDebug($"Windows audio end point adjusting capture rate from {_waveSourceFormat.SampleRate} to {_audioFormatManager.SelectedFormat.ClockRate}.");
174 |
175 | InitCaptureDevice(_audioInDeviceIndex, _audioFormatManager.SelectedFormat.ClockRate, _audioFormatManager.SelectedFormat.ChannelCount);
176 | }
177 | }
178 | }
179 |
180 | public void SetAudioSinkFormat(AudioFormat audioFormat)
181 | {
182 | _audioFormatManager.SetSelectedFormat(audioFormat);
183 |
184 | if (!_disableSink)
185 | {
186 | if (_waveSinkFormat.SampleRate != _audioFormatManager.SelectedFormat.ClockRate)
187 | {
188 | // Reinitialise the audio output device.
189 | logger.LogDebug($"Windows audio end point adjusting playback rate from {_waveSinkFormat.SampleRate} to {_audioFormatManager.SelectedFormat.ClockRate}.");
190 |
191 | InitPlaybackDevice(_audioOutDeviceIndex, _audioFormatManager.SelectedFormat.ClockRate, _audioFormatManager.SelectedFormat.ChannelCount);
192 | }
193 | }
194 | }
195 |
196 | public MediaEndPoints ToMediaEndPoints()
197 | {
198 | return new MediaEndPoints
199 | {
200 | AudioSource = _disableSource ? null : this,
201 | AudioSink = _disableSink ? null : this,
202 | };
203 | }
204 |
205 | ///
206 | /// Starts the audio capturing/source device and the audio sink device.
207 | ///
208 | public Task Start()
209 | {
210 | if (!_isAudioSourceStarted && _waveInEvent != null)
211 | {
212 | StartAudio();
213 | }
214 |
215 | if (!_isAudioSinkStarted && _waveOutEvent != null)
216 | {
217 | StartAudioSink();
218 | }
219 |
220 | return Task.CompletedTask;
221 | }
222 |
223 | ///
224 | /// Closes the audio devices.
225 | ///
226 | public Task Close()
227 | {
228 | if (!_isAudioSourceClosed && _waveInEvent != null)
229 | {
230 | CloseAudio();
231 | }
232 |
233 | if (!_isAudioSinkClosed && _waveOutEvent != null)
234 | {
235 | CloseAudioSink();
236 | }
237 |
238 | return Task.CompletedTask;
239 | }
240 |
241 | public Task Pause()
242 | {
243 | if (!_isAudioSourcePaused && _waveInEvent != null)
244 | {
245 | PauseAudio();
246 | }
247 |
248 | if (!_isAudioSinkPaused && _waveOutEvent != null)
249 | {
250 | PauseAudioSink();
251 | }
252 |
253 | return Task.CompletedTask;
254 | }
255 |
256 | public Task Resume()
257 | {
258 | if (_isAudioSourcePaused && _waveInEvent != null)
259 | {
260 | ResumeAudio();
261 | }
262 |
263 | if (_isAudioSinkPaused && _waveOutEvent != null)
264 | {
265 | ResumeAudioSink();
266 | }
267 |
268 | return Task.CompletedTask;
269 | }
270 |
271 | private void InitPlaybackDevice(int audioOutDeviceIndex, int audioSinkSampleRate, int channels)
272 | {
273 | try
274 | {
275 | _waveOutEvent?.Stop();
276 |
277 | _waveSinkFormat = new WaveFormat(
278 | audioSinkSampleRate,
279 | DEVICE_BITS_PER_SAMPLE,
280 | channels);
281 |
282 | // Playback device.
283 | _waveOutEvent = new WaveOutEvent();
284 | _waveOutEvent.DeviceNumber = audioOutDeviceIndex;
285 | _waveProvider = new BufferedWaveProvider(_waveSinkFormat);
286 | _waveProvider.DiscardOnBufferOverflow = true;
287 | _waveOutEvent.Init(_waveProvider);
288 | }
289 | catch (Exception excp)
290 | {
291 | logger.LogWarning(0, excp, "WindowsAudioEndPoint failed to initialise playback device.");
292 | OnAudioSinkError?.Invoke($"WindowsAudioEndPoint failed to initialise playback device. {excp.Message}");
293 | }
294 | }
295 |
296 | private void InitCaptureDevice(int audioInDeviceIndex, int audioSourceSampleRate, int audioSourceChannels)
297 | {
298 | if (WaveInEvent.DeviceCount > 0)
299 | {
300 | if (WaveInEvent.DeviceCount > audioInDeviceIndex)
301 | {
302 | if (_waveInEvent != null)
303 | {
304 | _waveInEvent.DataAvailable -= LocalAudioSampleAvailable;
305 | _waveInEvent.StopRecording();
306 | }
307 |
308 | _waveSourceFormat = new WaveFormat(
309 | audioSourceSampleRate,
310 | DEVICE_BITS_PER_SAMPLE,
311 | audioSourceChannels);
312 |
313 | _waveInEvent = new WaveInEvent();
314 |
315 | // Note NAudio recommends a buffer size of 100ms but codecs like Opus can only handle 20ms buffers.
316 | _waveInEvent.BufferMilliseconds = DEFAULT_PLAYBACK_BUFFER_MILLISECONDS;
317 |
318 | _waveInEvent.NumberOfBuffers = INPUT_BUFFERS;
319 | _waveInEvent.DeviceNumber = audioInDeviceIndex;
320 | _waveInEvent.WaveFormat = _waveSourceFormat;
321 | _waveInEvent.DataAvailable += LocalAudioSampleAvailable;
322 | }
323 | else
324 | {
325 | logger.LogWarning($"The requested audio input device index {audioInDeviceIndex} exceeds the maximum index of {WaveInEvent.DeviceCount - 1}.");
326 | OnAudioSourceError?.Invoke($"The requested audio input device index {audioInDeviceIndex} exceeds the maximum index of {WaveInEvent.DeviceCount - 1}.");
327 | }
328 | }
329 | else
330 | {
331 | logger.LogWarning("No audio capture devices are available.");
332 | OnAudioSourceError?.Invoke("No audio capture devices are available.");
333 | }
334 | }
335 |
336 | ///
337 | /// Event handler for audio sample being supplied by local capture device.
338 | ///
339 | private void LocalAudioSampleAvailable(object sender, WaveInEventArgs args)
340 | {
341 | // Note NAudio.Wave.WaveBuffer.ShortBuffer does not take into account little endian.
342 | // https://github.com/naudio/NAudio/blob/master/NAudio/Wave/WaveOutputs/WaveBuffer.cs
343 |
344 | byte[] buffer = args.Buffer.Take(args.BytesRecorded).ToArray();
345 | short[] pcm = buffer.Where((x, i) => i % 2 == 0).Select((y, i) => BitConverter.ToInt16(buffer, i * 2)).ToArray();
346 | byte[] encodedSample = _audioEncoder.EncodeAudio(pcm, _audioFormatManager.SelectedFormat);
347 |
348 | OnAudioSourceEncodedSample?.Invoke((uint)encodedSample.Length, encodedSample);
349 |
350 | if (OnAudioSourceEncodedFrameReady != null)
351 | {
352 | var encodedAudioFrame = new EncodedAudioFrame(0,
353 | _audioFormatManager.SelectedFormat,
354 | GetEncodSampleDurationMs(pcm.Length, _audioFormatManager.SelectedFormat),
355 | encodedSample);
356 | OnAudioSourceEncodedFrameReady(encodedAudioFrame);
357 | }
358 | }
359 |
360 | private uint GetEncodSampleDurationMs(int totalPcmSamples, AudioFormat audioFormat)
361 | {
362 | int numChannels = audioFormat.ChannelCount;
363 | int sampleRate = audioFormat.ClockRate;
364 | int frames = totalPcmSamples / numChannels;
365 | double durationMsD = sampleRate > 0 ? (frames / (double)sampleRate) * 1000.0 : 0;
366 | return (uint)Math.Round(durationMsD);
367 | }
368 |
369 | ///
370 | /// Event handler for playing audio samples received from the remote call party.
371 | ///
372 | /// Raw PCM sample from remote party.
373 | public void GotAudioSample(byte[] pcmSample)
374 | {
375 | if (_waveProvider != null)
376 | {
377 | _waveProvider.AddSamples(pcmSample, 0, pcmSample.Length);
378 | }
379 | }
380 |
381 | ///
382 | /// Obsolete. Use the method instead.
383 | ///
384 | [Obsolete("Use GotEncodedMediaFrame instead.")]
385 | public void GotAudioRtp(IPEndPoint remoteEndPoint, uint ssrc, uint seqnum, uint timestamp, int payloadID, bool marker, byte[] payload)
386 | {
387 | if (_waveProvider != null && _audioEncoder != null)
388 | {
389 | var pcmSample = _audioEncoder.DecodeAudio(payload, _audioFormatManager.SelectedFormat);
390 | byte[] pcmBytes = pcmSample.SelectMany(BitConverter.GetBytes).ToArray();
391 | _waveProvider?.AddSamples(pcmBytes, 0, pcmBytes.Length);
392 | }
393 | }
394 |
395 | ///
396 | /// Handler for receiving an encoded audio frame from the remote party.
397 | ///
398 | /// Encoded audio frame received from the remote party.
399 | public void GotEncodedMediaFrame(EncodedAudioFrame encodedMediaFrame)
400 | {
401 | var audioFormat = encodedMediaFrame.AudioFormat;
402 |
403 | if (_waveProvider != null && _audioEncoder != null && !audioFormat.IsEmpty())
404 | {
405 | var pcmSample = _audioEncoder.DecodeAudio(encodedMediaFrame.EncodedAudio, audioFormat);
406 | byte[] pcmBytes = pcmSample.SelectMany(BitConverter.GetBytes).ToArray();
407 | _waveProvider?.AddSamples(pcmBytes, 0, pcmBytes.Length);
408 | }
409 | }
410 |
411 | public Task PauseAudioSink()
412 | {
413 | _isAudioSinkPaused = true;
414 | _waveOutEvent?.Pause();
415 | return Task.CompletedTask;
416 | }
417 |
418 | public Task ResumeAudioSink()
419 | {
420 | _isAudioSinkPaused = false;
421 | _waveOutEvent?.Play();
422 | return Task.CompletedTask;
423 | }
424 |
425 | public Task StartAudioSink()
426 | {
427 | if (!_isAudioSinkStarted)
428 | {
429 | _isAudioSinkStarted = true;
430 | _waveOutEvent?.Play();
431 | }
432 | return Task.CompletedTask;
433 | }
434 |
435 | public Task CloseAudioSink()
436 | {
437 | if (!_isAudioSinkClosed)
438 | {
439 | _isAudioSinkClosed = true;
440 |
441 | _waveOutEvent?.Stop();
442 | }
443 |
444 | return Task.CompletedTask;
445 | }
446 |
447 | ///
448 | /// Pauses the audio source. Use the method to pause both the audio source and sink.
449 | ///
450 | public Task PauseAudio()
451 | {
452 | _isAudioSourcePaused = true;
453 | _waveInEvent?.StopRecording();
454 |
455 | return Task.CompletedTask;
456 | }
457 |
458 | ///
459 | /// Resumes the audio source. Use the method to resume both the audio source and sink.
460 | ///
461 | public Task ResumeAudio()
462 | {
463 | _isAudioSourcePaused = false;
464 | _waveInEvent?.StartRecording();
465 |
466 | return Task.CompletedTask;
467 | }
468 |
469 | ///
470 | /// Starts the audio source. Use the method to start both the audio source and sink.
471 | ///
472 | public Task StartAudio()
473 | {
474 | if (!_isAudioSourceStarted)
475 | {
476 | _isAudioSourceStarted = true;
477 | _waveInEvent?.StartRecording();
478 | }
479 |
480 | return Task.CompletedTask;
481 | }
482 |
483 | ///
484 | /// Closes (stops) the audio source. Use the method to stop both the audio source and sink.
485 | ///
486 | public Task CloseAudio()
487 | {
488 | if (!_isAudioSourceClosed)
489 | {
490 | _isAudioSourceClosed = true;
491 |
492 | if (_waveInEvent != null)
493 | {
494 | _waveInEvent.DataAvailable -= LocalAudioSampleAvailable;
495 | _waveInEvent.StopRecording();
496 | }
497 | }
498 |
499 | return Task.CompletedTask;
500 | }
501 | }
502 | }
503 |
--------------------------------------------------------------------------------
/src/WindowsVideoEndPoint.cs:
--------------------------------------------------------------------------------
1 | //-----------------------------------------------------------------------------
2 | // Filename: WindowsVideoEndPoint.cs
3 | //
4 | // Description: Implements a video source and sink for Windows.
5 | //
6 | // Author(s):
7 | // Aaron Clauson (aaron@sipsorcery.com)
8 | //
9 | // History:
10 | // 20 Aug 2020 Aaron Clauson Created, Dublin, Ireland.
11 | //
12 | // License:
13 | // BSD 3-Clause "New" or "Revised" License and the additional
14 | // BDS BY-NC-SA restriction, see included LICENSE.md file.
15 | //-----------------------------------------------------------------------------
16 |
17 | using Microsoft.Extensions.Logging;
18 | using SIPSorceryMedia.Abstractions;
19 | using System;
20 | using System.Collections.Generic;
21 | using System.Linq;
22 | using System.Net;
23 | using System.Runtime.InteropServices;
24 | using System.Threading;
25 | using System.Threading.Tasks;
26 | using Windows.Devices.Enumeration;
27 | using Windows.Graphics.Imaging;
28 | using Windows.Media.Capture;
29 | using Windows.Media.Capture.Frames;
30 | using Windows.Media.Devices;
31 | using Windows.Media.MediaProperties;
32 | using WinRT;
33 |
34 | namespace SIPSorceryMedia.Windows
35 | {
36 | [ComImport]
37 | [Guid("5B0D3235-4DBA-4D44-865E-8F1D0E4FD04D")]
38 | [InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
39 | unsafe interface IMemoryBufferByteAccess
40 | {
41 | void GetBuffer(out byte* buffer, out uint capacity);
42 | }
43 |
44 | public struct VideoCaptureDeviceInfo
45 | {
46 | public string ID;
47 | public string Name;
48 | }
49 |
50 | public class WindowsVideoEndPoint : IVideoEndPoint, IDisposable
51 | {
52 | private const int VIDEO_SAMPLING_RATE = 90000;
53 | private const int DEFAULT_FRAMES_PER_SECOND = 30;
54 | private readonly string MF_NV12_PIXEL_FORMAT = MediaEncodingSubtypes.Nv12;
55 | public const string MF_I420_PIXEL_FORMAT = "{30323449-0000-0010-8000-00AA00389B71}";
56 |
57 | // NV12 seems to be what the Software Bitmaps provided from Windows Media Foundation tend to prefer.
58 | //private readonly vpxmd.VpxImgFmt EncoderInputFormat = vpxmd.VpxImgFmt.VPX_IMG_FMT_NV12;
59 | private readonly VideoPixelFormatsEnum EncoderInputFormat = VideoPixelFormatsEnum.NV12;
60 |
61 | private static ILogger logger = SIPSorcery.LogFactory.CreateLogger();
62 |
63 | private MediaFormatManager _videoFormatManager;
64 | private IVideoEncoder _videoEncoder;
65 | private bool _forceKeyFrame = false;
66 | private bool _isInitialised;
67 | private bool _isStarted;
68 | private bool _isPaused;
69 | private bool _isClosed;
70 | private MediaCapture _mediaCapture;
71 | private MediaFrameReader _mediaFrameReader;
72 | private SoftwareBitmap _backBuffer;
73 | private string _videoDeviceID;
74 | private uint _width = 0;
75 | private uint _height = 0;
76 | private uint _fpsNumerator = 0;
77 | private uint _fpsDenominator = 1;
78 | private bool _videoCaptureDeviceFailed;
79 | private DateTime _lastFrameAt = DateTime.MinValue;
80 |
81 | ///
82 | /// This event is fired when local video samples are available. The samples
83 | /// are for applications that wish to display the local video stream. The
84 | /// event is fired after the sample
85 | /// has been encoded and is ready for transmission.
86 | ///
87 | public event RawVideoSampleDelegate OnVideoSourceRawSample;
88 |
89 | #pragma warning disable 0067
90 | ///
91 | /// Event Not used in this component - use instead
92 | ///
93 | public event RawVideoSampleFasterDelegate OnVideoSourceRawSampleFaster;
94 | #pragma warning restore 0067
95 |
96 | ///
97 | /// This event will be fired whenever a video sample is encoded and is ready to transmit to the remote party.
98 | ///
99 | public event EncodedSampleDelegate OnVideoSourceEncodedSample;
100 |
101 | ///
102 | /// This event is fired after the sink decodes a video frame from the remote party.
103 | ///
104 | public event VideoSinkSampleDecodedDelegate OnVideoSinkDecodedSample;
105 |
106 | #pragma warning disable 0067
107 | ///
108 | /// Event Not used in this component - use instead
109 | ///
110 | public event VideoSinkSampleDecodedFasterDelegate OnVideoSinkDecodedSampleFaster;
111 | #pragma warning restore 0067
112 |
113 | ///
114 | /// This event will be fired if there is a problem acquiring the capture device.
115 | ///
116 | public event SourceErrorDelegate OnVideoSourceError;
117 |
118 | ///
119 | /// Attempts to create a new video source from a local video capture device.
120 | ///
121 | /// A video encoder that can be used to encode and decode video frames.
122 | /// Optional. If specified the video capture device will be requested to initialise with this frame
123 | /// width. If the attempt fails an exception is thrown. If not specified the device's default frame width will
124 | /// be used.
125 | /// Optional. If specified the video capture device will be requested to initialise with this frame
126 | /// height. If the attempt fails an exception is thrown. If not specified the device's default frame height will
127 | /// be used.
128 | /// Optional. If specified the video capture device will be requested to initialise with this frame
129 | /// rate. If the attempt fails an exception is thrown. If not specified the device's default frame rate will
130 | /// be used.
131 | public WindowsVideoEndPoint(IVideoEncoder videoEncoder,
132 | string videoDeviceID = null,
133 | uint width = 0,
134 | uint height = 0,
135 | uint fps = 0)
136 | {
137 | _videoEncoder = videoEncoder;
138 | _videoDeviceID = videoDeviceID;
139 | _width = width;
140 | _height = height;
141 | _fpsNumerator = fps;
142 |
143 | _mediaCapture = new MediaCapture();
144 | _mediaCapture.Failed += VideoCaptureDevice_Failed;
145 | _videoFormatManager = new MediaFormatManager(videoEncoder.SupportedFormats);
146 |
147 | if(videoEncoder.SupportedFormats?.Count == 0)
148 | {
149 | _videoFormatManager.SetSelectedFormat(videoEncoder.SupportedFormats[0]);
150 | }
151 | }
152 |
153 | public void RestrictFormats(Func filter) => _videoFormatManager.RestrictFormats(filter);
154 | public List GetVideoSourceFormats() => _videoFormatManager.GetSourceFormats();
155 | public void SetVideoSourceFormat(VideoFormat videoFormat) => _videoFormatManager.SetSelectedFormat(videoFormat);
156 | public List GetVideoSinkFormats() => _videoFormatManager.GetSourceFormats();
157 | public void SetVideoSinkFormat(VideoFormat videoFormat) => _videoFormatManager.SetSelectedFormat(videoFormat);
158 | public void ExternalVideoSourceRawSample(uint durationMilliseconds, int width, int height, byte[] sample, VideoPixelFormatsEnum pixelFormat) =>
159 | throw new ApplicationException("The Windows Video End Point does not support external samples. Use the video end point from SIPSorceryMedia.Encoders.");
160 |
161 | public void ExternalVideoSourceRawSampleFaster(uint durationMilliseconds, RawImage rawImage) =>
162 | throw new ApplicationException("The Windows Video End Point does not support external samples. Use the video end point from SIPSorceryMedia.Encoders.");
163 |
164 | public void ForceKeyFrame() => _forceKeyFrame = true;
165 | public void GotVideoRtp(IPEndPoint remoteEndPoint, uint ssrc, uint seqnum, uint timestamp, int payloadID, bool marker, byte[] payload) =>
166 | throw new ApplicationException("The Windows Video End Point requires full video frames rather than individual RTP packets.");
167 | public bool HasEncodedVideoSubscribers() => OnVideoSourceEncodedSample != null;
168 | public bool IsVideoSourcePaused() => _isPaused;
169 |
170 | private async void VideoCaptureDevice_Failed(MediaCapture sender, MediaCaptureFailedEventArgs errorEventArgs)
171 | {
172 | if (!_videoCaptureDeviceFailed)
173 | {
174 | _videoCaptureDeviceFailed = true;
175 |
176 | //logger.LogWarning($"Video capture device failed. {errorEventArgs.Message}.");
177 | OnVideoSourceError?.Invoke(errorEventArgs.Message);
178 |
179 | await CloseVideoCaptureDevice().ConfigureAwait(false);
180 | }
181 | }
182 |
183 | ///
184 | /// Initialises the video capture device. Ideally should be called before attempting to use the device,
185 | /// which happens after calling . By initialising first any problem with the requested
186 | /// frame size and rate parameters can be caught.
187 | ///
188 | /// True if the local video capture device was successfully initialised. False if not.
189 | public Task InitialiseVideoSourceDevice()
190 | {
191 | if (!_isInitialised)
192 | {
193 | _isInitialised = true;
194 | return InitialiseDevice(_width, _height, _fpsNumerator);
195 | }
196 | else
197 | {
198 | return Task.FromResult(true);
199 | }
200 | }
201 |
202 | public MediaEndPoints ToMediaEndPoints()
203 | {
204 | return new MediaEndPoints
205 | {
206 | VideoSource = this,
207 | VideoSink = this
208 | };
209 | }
210 |
211 | ///
212 | /// Attempts to initialise the local video capture device.
213 | ///
214 | /// The frame width to attempt to initialise the video capture device with. Set as 0 to use default.
215 | /// The frame height to attempt to initialise the video capture device with. Set as 0 to use default.
216 | /// The frame rate, in frames per second, to attempt to initialise the video capture device with.
217 | /// Set as 0 to use default.
218 | private async Task InitialiseDevice(uint width, uint height, uint fps)
219 | {
220 | var mediaCaptureSettings = new MediaCaptureInitializationSettings()
221 | {
222 | StreamingCaptureMode = StreamingCaptureMode.Video,
223 | SharingMode = MediaCaptureSharingMode.ExclusiveControl,
224 | MediaCategory = MediaCategory.Communications
225 | };
226 |
227 | if (_videoDeviceID != null)
228 | {
229 | var vidCapDevices = await DeviceInformation.FindAllAsync(DeviceClass.VideoCapture).AsTask().ConfigureAwait(false);
230 | var vidDevice = vidCapDevices.FirstOrDefault(x => x.Id == _videoDeviceID || x.Name == _videoDeviceID);
231 |
232 | if (vidDevice == null)
233 | {
234 | logger.LogWarning($"Could not find video capture device for specified ID {_videoDeviceID}, using default device.");
235 | }
236 | else
237 | {
238 | logger.LogInformation($"Video capture device {vidDevice.Name} selected.");
239 | mediaCaptureSettings.VideoDeviceId = vidDevice.Id;
240 | }
241 | }
242 |
243 | await _mediaCapture.InitializeAsync(mediaCaptureSettings).AsTask().ConfigureAwait(false);
244 |
245 | MediaFrameSourceInfo colorSourceInfo = null;
246 | foreach (var srcInfo in _mediaCapture.FrameSources)
247 | {
248 | if (srcInfo.Value.Info.MediaStreamType == MediaStreamType.VideoRecord &&
249 | srcInfo.Value.Info.SourceKind == MediaFrameSourceKind.Color)
250 | {
251 | colorSourceInfo = srcInfo.Value.Info;
252 | break;
253 | }
254 | }
255 |
256 | var colorFrameSource = _mediaCapture.FrameSources[colorSourceInfo.Id];
257 |
258 | var preferredFormat = colorFrameSource.SupportedFormats.Where(format =>
259 | {
260 | return format.VideoFormat.Width >= _width &&
261 | format.VideoFormat.Width >= _height &&
262 | (format.FrameRate.Numerator / format.FrameRate.Denominator) >= fps
263 | && format.Subtype == MF_NV12_PIXEL_FORMAT;
264 | }).FirstOrDefault();
265 |
266 | if (preferredFormat == null)
267 | {
268 | // Try again without the pixel format.
269 | preferredFormat = colorFrameSource.SupportedFormats.Where(format =>
270 | {
271 | return format.VideoFormat.Width >= _width &&
272 | format.VideoFormat.Width >= _height &&
273 | (format.FrameRate.Numerator / format.FrameRate.Denominator) >= fps;
274 | }).FirstOrDefault();
275 | }
276 |
277 | if (preferredFormat == null)
278 | {
279 | // Still can't get what we want. Log a warning message and take the default.
280 | logger.LogWarning($"The video capture device did not support the requested format (or better) {_width}x{_height} {fps}fps. Using default mode.");
281 |
282 | preferredFormat = colorFrameSource.SupportedFormats.First();
283 | }
284 |
285 | if (preferredFormat == null)
286 | {
287 | throw new ApplicationException("The video capture device does not support a compatible video format for the requested parameters.");
288 | }
289 |
290 | await colorFrameSource.SetFormatAsync(preferredFormat).AsTask().ConfigureAwait(false);
291 |
292 | _mediaFrameReader = await _mediaCapture.CreateFrameReaderAsync(colorFrameSource).AsTask().ConfigureAwait(false);
293 | _mediaFrameReader.AcquisitionMode = MediaFrameReaderAcquisitionMode.Realtime;
294 |
295 | // Frame source and format have now been successfully set.
296 | _width = preferredFormat.VideoFormat.Width;
297 | _height = preferredFormat.VideoFormat.Height;
298 | _fpsNumerator = preferredFormat.FrameRate.Numerator;
299 | _fpsDenominator = preferredFormat.FrameRate.Denominator;
300 |
301 | //double fpsSelected = _fpsNumerator / _fpsDenominator;
302 | //string pixFmt = preferredFormat.Subtype == MF_I420_PIXEL_FORMAT ? "I420" : preferredFormat.Subtype;
303 | //string deviceName = colorFrameSource.Info.DeviceInformation.Name;
304 | //logger.LogInformation($"Video capture device {deviceName} successfully initialised: {_width}x{_height} {fpsSelected:0.##}fps pixel format {pixFmt}.");
305 |
306 | PrintFrameSourceInfo(colorFrameSource);
307 |
308 | _mediaFrameReader.FrameArrived += FrameArrivedHandler;
309 |
310 | return true;
311 | }
312 |
313 | public void GotVideoFrame(IPEndPoint remoteEndPoint, uint timestamp, byte[] frame, VideoFormat format)
314 | {
315 | if (!_isClosed)
316 | {
317 | //DateTime startTime = DateTime.Now;
318 |
319 | //List decodedFrames = _vp8Decoder.Decode(frame, frame.Length, out var width, out var height);
320 | var decodedFrames = _videoEncoder.DecodeVideo(frame, EncoderInputFormat, _videoFormatManager.SelectedFormat.Codec);
321 |
322 | if (decodedFrames == null)
323 | {
324 | logger.LogWarning("VPX decode of video sample failed.");
325 | }
326 | else
327 | {
328 | foreach (var decodedFrame in decodedFrames)
329 | {
330 | // Windows bitmaps expect BGR when supplying System.Drawing.Imaging.PixelFormat.Format24bppRgb.
331 | //byte[] bgr = PixelConverter.I420toBGR(decodedFrame.Sample, (int)decodedFrame.Width, (int)decodedFrame.Height);
332 | //Console.WriteLine($"VP8 decode took {DateTime.Now.Subtract(startTime).TotalMilliseconds}ms.");
333 | OnVideoSinkDecodedSample(decodedFrame.Sample, decodedFrame.Width, decodedFrame.Height, (int)(decodedFrame.Width * 3), VideoPixelFormatsEnum.Bgr);
334 | }
335 | }
336 | }
337 | }
338 |
339 | public Task PauseVideo()
340 | {
341 | _isPaused = true;
342 |
343 | if (_mediaFrameReader != null)
344 | {
345 | return _mediaFrameReader.StopAsync().AsTask();
346 | }
347 | else
348 | {
349 | return Task.CompletedTask;
350 | }
351 | }
352 |
353 | public Task ResumeVideo()
354 | {
355 | _isPaused = false;
356 |
357 | if (_mediaFrameReader != null)
358 | {
359 | return _mediaFrameReader.StartAsync().AsTask();
360 | }
361 | else
362 | {
363 | return Task.CompletedTask;
364 | }
365 | }
366 |
367 | public async Task StartVideo()
368 | {
369 | if (!_isStarted)
370 | {
371 | _isStarted = true;
372 |
373 | if (!_isInitialised)
374 | {
375 | await InitialiseVideoSourceDevice().ConfigureAwait(false);
376 | }
377 |
378 | await _mediaFrameReader.StartAsync().AsTask().ConfigureAwait(false);
379 | }
380 | }
381 |
382 | public async Task CloseVideo()
383 | {
384 | if (!_isClosed)
385 | {
386 | _isClosed = true;
387 |
388 | await CloseVideoCaptureDevice().ConfigureAwait(false);
389 |
390 | if (_videoEncoder != null)
391 | {
392 | lock (_videoEncoder)
393 | {
394 | Dispose();
395 | }
396 | }
397 | else
398 | {
399 | Dispose();
400 | }
401 | }
402 | }
403 |
404 | ///
405 | /// Attempts to list the system video capture devices and supported video modes.
406 | ///
407 | public static async Task> GetVideoCatpureDevices()
408 | {
409 | var vidCapDevices = await DeviceInformation.FindAllAsync(DeviceClass.VideoCapture);
410 |
411 | if (vidCapDevices != null)
412 | {
413 | return vidCapDevices.Select(x => new VideoCaptureDeviceInfo { ID = x.Id, Name = x.Name }).ToList();
414 | }
415 | else
416 | {
417 | return null;
418 | }
419 | }
420 |
421 | ///
422 | /// Attempts to list the system video capture devices and supported video modes.
423 | ///
424 | public static async Task ListDevicesAndFormats()
425 | {
426 | var vidCapDevices = await DeviceInformation.FindAllAsync(DeviceClass.VideoCapture);
427 | foreach (var vidCapDevice in vidCapDevices)
428 | {
429 | // The block below is how the reference documentation shows how to list modes but as of Sep 2020 it does not work.
430 | // https://docs.microsoft.com/en-us/uwp/api/windows.media.capture.mediacapture.findallvideoprofiles?view=winrt-19041.
431 | //logger.LogDebug($"Supported formats for video capture device {vidCapDevice.Name}:");
432 | //foreach (var recordProfiles in MediaCapture.FindAllVideoProfiles(vidCapDevice.Id).Select(x => x.SupportedRecordMediaDescription))
433 | //{
434 | // logger.LogDebug($"Support profile count {recordProfiles.Count}");
435 | // foreach (var profile in recordProfiles)
436 | // {
437 | // logger.LogDebug($"Capture device frame source {profile.Width}x{profile.Height} {profile.FrameRate:0.##}fps {profile.Subtype}");
438 | // }
439 | //}
440 |
441 | var mediaCaptureSettings = new MediaCaptureInitializationSettings()
442 | {
443 | StreamingCaptureMode = StreamingCaptureMode.Video,
444 | SharingMode = MediaCaptureSharingMode.SharedReadOnly,
445 | VideoDeviceId = vidCapDevice.Id
446 | };
447 |
448 | MediaCapture mediaCapture = new MediaCapture();
449 | await mediaCapture.InitializeAsync(mediaCaptureSettings);
450 |
451 | foreach (var srcFmtList in mediaCapture.FrameSources.Values.Select(x => x.SupportedFormats).Select(y => y.ToList()))
452 | {
453 | foreach (var srcFmt in srcFmtList)
454 | {
455 | var vidFmt = srcFmt.VideoFormat;
456 | float vidFps = vidFmt.MediaFrameFormat.FrameRate.Numerator / vidFmt.MediaFrameFormat.FrameRate.Denominator;
457 | string pixFmt = vidFmt.MediaFrameFormat.Subtype == MF_I420_PIXEL_FORMAT ? "I420" : vidFmt.MediaFrameFormat.Subtype;
458 | logger.LogDebug($"Video Capture device {vidCapDevice.Name} format {vidFmt.Width}x{vidFmt.Height} {vidFps:0.##}fps {pixFmt}");
459 | }
460 | }
461 | }
462 | }
463 |
464 | ///
465 | /// Gets a list of supported video frame formats for a webcam.
466 | ///
467 | /// The name of the webcam to get the video formats for.
468 | /// A list of supported video frame formats for the specified webcam.
469 | public static async Task> GetDeviceFrameFormats(string deviceName)
470 | {
471 | if(string.IsNullOrEmpty(deviceName))
472 | {
473 | throw new ArgumentNullException(nameof(deviceName), "A webcam name must be specified to get the video formats for.");
474 | }
475 |
476 | List formats = new List();
477 |
478 | var vidCapDevices = await DeviceInformation.FindAllAsync(DeviceClass.VideoCapture);
479 | foreach (var vidCapDevice in vidCapDevices)
480 | {
481 | if(vidCapDevice.Name.ToLower() == deviceName.ToLower())
482 | {
483 | var mediaCaptureSettings = new MediaCaptureInitializationSettings()
484 | {
485 | StreamingCaptureMode = StreamingCaptureMode.Video,
486 | SharingMode = MediaCaptureSharingMode.SharedReadOnly,
487 | VideoDeviceId = vidCapDevice.Id
488 | };
489 |
490 | MediaCapture mediaCapture = new MediaCapture();
491 | await mediaCapture.InitializeAsync(mediaCaptureSettings);
492 |
493 | foreach (var srcFmtList in mediaCapture.FrameSources.Values.Select(x => x.SupportedFormats).Select(y => y.ToList()))
494 | {
495 | foreach (var srcFmt in srcFmtList)
496 | {
497 | formats.Add(srcFmt.VideoFormat);
498 | }
499 | }
500 | }
501 | }
502 |
503 | return formats;
504 | }
505 |
506 | private async Task CloseVideoCaptureDevice()
507 | {
508 | if (_mediaFrameReader != null)
509 | {
510 | _mediaFrameReader.FrameArrived -= FrameArrivedHandler;
511 | await _mediaFrameReader.StopAsync().AsTask().ConfigureAwait(false);
512 | }
513 |
514 | if (_mediaCapture != null && _mediaCapture.CameraStreamState == CameraStreamState.Streaming)
515 | {
516 | await _mediaCapture.StopRecordAsync().AsTask().ConfigureAwait(false);
517 | }
518 | }
519 |
520 | ///
521 | /// Event handler for video frames for the local video capture device.
522 | ///
523 | private async void FrameArrivedHandler(MediaFrameReader sender, MediaFrameArrivedEventArgs e)
524 | {
525 | if (!_isClosed)
526 | {
527 | if (!_videoFormatManager.SelectedFormat.IsEmpty() && (OnVideoSourceEncodedSample != null || OnVideoSourceRawSample != null))
528 | {
529 | using (var mediaFrameReference = sender.TryAcquireLatestFrame())
530 | {
531 | var videoMediaFrame = mediaFrameReference?.VideoMediaFrame;
532 | var softwareBitmap = videoMediaFrame?.SoftwareBitmap;
533 |
534 | if (softwareBitmap == null && videoMediaFrame != null)
535 | {
536 | var videoFrame = videoMediaFrame.GetVideoFrame();
537 | softwareBitmap = await SoftwareBitmap.CreateCopyFromSurfaceAsync(videoFrame.Direct3DSurface);
538 | }
539 |
540 | if (softwareBitmap != null)
541 | {
542 | int width = softwareBitmap.PixelWidth;
543 | int height = softwareBitmap.PixelHeight;
544 |
545 | if (softwareBitmap.BitmapPixelFormat != BitmapPixelFormat.Nv12)
546 | {
547 | softwareBitmap = SoftwareBitmap.Convert(softwareBitmap, BitmapPixelFormat.Nv12, BitmapAlphaMode.Ignore);
548 | }
549 |
550 | // Swap the processed frame to _backBuffer and dispose of the unused image.
551 | softwareBitmap = Interlocked.Exchange(ref _backBuffer, softwareBitmap);
552 |
553 | using (BitmapBuffer buffer = _backBuffer.LockBuffer(BitmapBufferAccessMode.Read))
554 | {
555 | using (var reference = buffer.CreateReference())
556 | {
557 | unsafe
558 | {
559 | byte* dataInBytes;
560 | uint capacity;
561 | reference.As().GetBuffer(out dataInBytes, out capacity);
562 | byte[] nv12Buffer = new byte[capacity];
563 | Marshal.Copy((IntPtr)dataInBytes, nv12Buffer, 0, (int)capacity);
564 |
565 | if (OnVideoSourceEncodedSample != null)
566 | {
567 | lock (_videoEncoder)
568 | {
569 | var encodedBuffer = _videoEncoder.EncodeVideo(width, height, nv12Buffer, EncoderInputFormat, _videoFormatManager.SelectedFormat.Codec);
570 |
571 | if (encodedBuffer != null)
572 | {
573 | uint fps = (_fpsDenominator > 0 && _fpsNumerator > 0) ? _fpsNumerator / _fpsDenominator : DEFAULT_FRAMES_PER_SECOND;
574 | uint durationRtpTS = VIDEO_SAMPLING_RATE / fps;
575 | OnVideoSourceEncodedSample.Invoke(durationRtpTS, encodedBuffer);
576 | }
577 |
578 | if (_forceKeyFrame)
579 | {
580 | _forceKeyFrame = false;
581 | }
582 | }
583 | }
584 |
585 | if (OnVideoSourceRawSample != null)
586 | {
587 | uint frameSpacing = 0;
588 | if (_lastFrameAt != DateTime.MinValue)
589 | {
590 | frameSpacing = Convert.ToUInt32(DateTime.Now.Subtract(_lastFrameAt).TotalMilliseconds);
591 | }
592 |
593 | var bgrBuffer = PixelConverter.NV12toBGR(nv12Buffer, width, height, width * 3);
594 |
595 | OnVideoSourceRawSample(frameSpacing, width, height, bgrBuffer, VideoPixelFormatsEnum.Bgr);
596 | }
597 | }
598 | }
599 | }
600 |
601 | _backBuffer?.Dispose();
602 | softwareBitmap?.Dispose();
603 | }
604 |
605 | _lastFrameAt = DateTime.Now;
606 | }
607 | }
608 | }
609 | }
610 |
611 | ///
612 | /// Copies data from an RGB buffer to a software bitmap.
613 | ///
614 | /// The RGB buffer to copy from.
615 | /// The software bitmap to copy the data to.
616 | private void SetBitmapData(byte[] buffer, SoftwareBitmap sbmp, VideoPixelFormatsEnum pixelFormat)
617 | {
618 | using (BitmapBuffer bmpBuffer = sbmp.LockBuffer(BitmapBufferAccessMode.Write))
619 | {
620 | using (var reference = bmpBuffer.CreateReference())
621 | {
622 | unsafe
623 | {
624 | byte* dataInBytes;
625 | uint capacity;
626 | ((IMemoryBufferByteAccess)reference).GetBuffer(out dataInBytes, out capacity);
627 | int posn = 0;
628 |
629 | // Fill-in the RGBA plane
630 | BitmapPlaneDescription bufferLayout = bmpBuffer.GetPlaneDescription(0);
631 | for (int i = 0; i < bufferLayout.Height; i++)
632 | {
633 | for (int j = 0; j < bufferLayout.Width; j++)
634 | {
635 | // NOTE: Same as for System.Drawing.Bitmap pixel formats that have "rgb" in their name, such as
636 | // BitmapPixelFormat.Rgba8, use a buffer format of BGR. Many issues on StackOverflow regarding this,
637 | // e.g. https://stackoverflow.com/questions/5106505/converting-gdi-pixelformat-to-wpf-pixelformat.
638 | // Notice the switch of the Blue and Red pixels below.
639 | if (pixelFormat == VideoPixelFormatsEnum.Rgb)
640 | {
641 | dataInBytes[bufferLayout.StartIndex + bufferLayout.Stride * i + 4 * j + 0] = buffer[posn++];
642 | dataInBytes[bufferLayout.StartIndex + bufferLayout.Stride * i + 4 * j + 1] = buffer[posn++];
643 | dataInBytes[bufferLayout.StartIndex + bufferLayout.Stride * i + 4 * j + 2] = buffer[posn++];
644 | dataInBytes[bufferLayout.StartIndex + bufferLayout.Stride * i + 4 * j + 3] = (byte)255;
645 | }
646 | else if (pixelFormat == VideoPixelFormatsEnum.Bgr)
647 | {
648 | dataInBytes[bufferLayout.StartIndex + bufferLayout.Stride * i + 4 * j + 2] = buffer[posn++];
649 | dataInBytes[bufferLayout.StartIndex + bufferLayout.Stride * i + 4 * j + 1] = buffer[posn++];
650 | dataInBytes[bufferLayout.StartIndex + bufferLayout.Stride * i + 4 * j + 0] = buffer[posn++];
651 | dataInBytes[bufferLayout.StartIndex + bufferLayout.Stride * i + 4 * j + 3] = (byte)255;
652 | }
653 | //if (pixelFormat == VideoPixelFormatsEnum.Rgba)
654 | //{
655 | // dataInBytes[bufferLayout.StartIndex + bufferLayout.Stride * i + 4 * j + 0] = buffer[posn++];
656 | // dataInBytes[bufferLayout.StartIndex + bufferLayout.Stride * i + 4 * j + 1] = buffer[posn++];
657 | // dataInBytes[bufferLayout.StartIndex + bufferLayout.Stride * i + 4 * j + 2] = buffer[posn++];
658 | // dataInBytes[bufferLayout.StartIndex + bufferLayout.Stride * i + 4 * j + 3] = buffer[posn++];
659 | //}
660 | else if (pixelFormat == VideoPixelFormatsEnum.Bgra)
661 | {
662 | dataInBytes[bufferLayout.StartIndex + bufferLayout.Stride * i + 4 * j + 2] = buffer[posn++];
663 | dataInBytes[bufferLayout.StartIndex + bufferLayout.Stride * i + 4 * j + 1] = buffer[posn++];
664 | dataInBytes[bufferLayout.StartIndex + bufferLayout.Stride * i + 4 * j + 0] = buffer[posn++];
665 | dataInBytes[bufferLayout.StartIndex + bufferLayout.Stride * i + 4 * j + 3] = buffer[posn++];
666 | }
667 | }
668 | }
669 | }
670 | }
671 | }
672 | }
673 |
674 | ///
675 | /// Diagnostic method to print the details of a video frame source.
676 | ///
677 | private void PrintFrameSourceInfo(MediaFrameSource frameSource)
678 | {
679 | var width = frameSource.CurrentFormat.VideoFormat.Width;
680 | var height = frameSource.CurrentFormat.VideoFormat.Height;
681 | var fpsNumerator = frameSource.CurrentFormat.FrameRate.Numerator;
682 | var fpsDenominator = frameSource.CurrentFormat.FrameRate.Denominator;
683 |
684 | double fps = fpsNumerator / fpsDenominator;
685 | string pixFmt = frameSource.CurrentFormat.Subtype;
686 | string deviceName = frameSource.Info.DeviceInformation.Name;
687 |
688 | logger.LogInformation($"Video capture device {deviceName} successfully initialised: {width}x{height} {fps:0.##}fps pixel format {pixFmt}.");
689 | }
690 |
691 | public void Dispose()
692 | {
693 | if (_videoEncoder != null)
694 | {
695 | lock (_videoEncoder)
696 | {
697 | _videoEncoder.Dispose();
698 | }
699 | }
700 | }
701 |
702 | public Task PauseVideoSink()
703 | {
704 | return Task.CompletedTask;
705 | }
706 |
707 | public Task ResumeVideoSink()
708 | {
709 | return Task.CompletedTask;
710 | }
711 |
712 | public Task StartVideoSink()
713 | {
714 | return Task.CompletedTask;
715 | }
716 |
717 | public Task CloseVideoSink()
718 | {
719 | return Task.CompletedTask;
720 | }
721 |
722 | public Task Start() => StartVideo();
723 |
724 | public Task Close() => CloseVideo();
725 |
726 | public Task Pause() => PauseVideo();
727 |
728 | public Task Resume() => ResumeVideo();
729 | }
730 | }
731 |
--------------------------------------------------------------------------------
/src/icon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/sipsorcery-org/SIPSorceryMedia.Windows/92a99772973d5c4f1ae49c7994e0ab9c6787f159/src/icon.png
--------------------------------------------------------------------------------
/test/VideoCaptureTest/Program.cs:
--------------------------------------------------------------------------------
1 | //-----------------------------------------------------------------------------
2 | // Filename: Program.cs
3 | //
4 | // Description: Test program for the WinRT Media Foundation Wrapper to activate
5 | // and capture output from a webcam.
6 | //
7 | // Main reference:
8 | // https://docs.microsoft.com/en-us/windows/uwp/audio-video-camera/process-media-frames-with-mediaframereader.
9 | //
10 | // Author(s):
11 | // Aaron Clauson (aaron@sipsorcery.com)
12 | //
13 | // History:
14 | // 30 Sep 2020 Aaron Clauson Created, Dublin, Ireland.
15 | //
16 | // License:
17 | // BSD 3-Clause "New" or "Revised" License, see included LICENSE.md file.
18 | //-----------------------------------------------------------------------------
19 |
20 | using System;
21 | using System.Drawing;
22 | using System.Drawing.Imaging;
23 | using System.Linq;
24 | using System.Runtime.InteropServices;
25 | using System.Threading;
26 | using System.Threading.Tasks;
27 | using System.Windows.Forms;
28 | using Windows.Devices.Enumeration;
29 | using Windows.Graphics.Imaging;
30 | using Windows.Media.Capture;
31 | using Windows.Media.Capture.Frames;
32 | using Windows.Media.MediaProperties;
33 | using WinRT;
34 |
35 | namespace test
36 | {
37 | [ComImport]
38 | [Guid("5B0D3235-4DBA-4D44-865E-8F1D0E4FD04D")]
39 | [InterfaceType(ComInterfaceType.InterfaceIsIUnknown)]
40 | unsafe interface IMemoryBufferByteAccess
41 | {
42 | void GetBuffer(out byte* buffer, out uint capacity);
43 | }
44 |
45 | class Program
46 | {
47 | //private const string WEBCAM_NAME = "Logitech QuickCam Pro 9000";
48 | private const string WEBCAM_NAME = "HD Pro Webcam C920";
49 |
50 | // These are the input formats the VP8 encoder supports. If the webcam
51 | // supports them natively a pixel conversion can be saved.
52 | private static readonly string MF_NV12_PIXEL_FORMAT = MediaEncodingSubtypes.Nv12.ToUpper();
53 | private static readonly string MF_I420_PIXEL_FORMAT = "{30323449-0000-0010-8000-00AA00389B71}";
54 | private static readonly string MF_RGB24_PIXEL_FORMAT = MediaEncodingSubtypes.Rgb24.ToUpper();
55 |
56 | private static readonly string MF_SELECTED_PIXEL_FORMAT = MF_NV12_PIXEL_FORMAT;
57 |
58 | private static int FRAME_WIDTH = 640;
59 | private static int FRAME_HEIGHT = 480;
60 | private static int FRAME_RATE = 30;
61 |
62 | private static Form _form;
63 | private static PictureBox _picBox;
64 |
65 | static async Task Main()
66 | {
67 | Console.WriteLine("Video Capture Test");
68 |
69 | //await ListDevicesAndFormats();
70 | //Console.ReadLine();
71 |
72 | var mediaFrameReader = await StartVideoCapture().ConfigureAwait(false);
73 |
74 | if (mediaFrameReader != null)
75 | {
76 | // Open a Window to display the video feed from video capture device
77 | _form = new Form();
78 | _form.AutoSize = true;
79 | _form.BackgroundImageLayout = ImageLayout.Center;
80 | _picBox = new PictureBox
81 | {
82 | Size = new Size(FRAME_WIDTH, FRAME_HEIGHT),
83 | Location = new Point(0, 0),
84 | Visible = true
85 | };
86 | _form.Controls.Add(_picBox);
87 |
88 | bool taskRunning = false;
89 | SoftwareBitmap backBuffer = null;
90 |
91 | // Lambda handler for captured frames.
92 | mediaFrameReader.FrameArrived += async (MediaFrameReader sender, MediaFrameArrivedEventArgs e) =>
93 | {
94 | if (taskRunning)
95 | {
96 | return;
97 | }
98 | taskRunning = true;
99 |
100 | var mediaFrameReference = sender.TryAcquireLatestFrame();
101 | var videoMediaFrame = mediaFrameReference?.VideoMediaFrame;
102 | var softwareBitmap = videoMediaFrame?.SoftwareBitmap;
103 |
104 | if (softwareBitmap == null && videoMediaFrame != null)
105 | {
106 | var videoFrame = videoMediaFrame.GetVideoFrame();
107 | softwareBitmap = await SoftwareBitmap.CreateCopyFromSurfaceAsync(videoFrame.Direct3DSurface);
108 | }
109 |
110 | if (softwareBitmap != null)
111 | {
112 | Console.WriteLine($"Software bitmap pixel fmt {softwareBitmap.BitmapPixelFormat}, alpha mode {softwareBitmap.BitmapAlphaMode}.");
113 |
114 | if (softwareBitmap.BitmapPixelFormat != Windows.Graphics.Imaging.BitmapPixelFormat.Bgra8 ||
115 | softwareBitmap.BitmapAlphaMode != Windows.Graphics.Imaging.BitmapAlphaMode.Premultiplied)
116 | {
117 | softwareBitmap = SoftwareBitmap.Convert(softwareBitmap, BitmapPixelFormat.Bgra8, BitmapAlphaMode.Premultiplied);
118 | }
119 |
120 | int width = softwareBitmap.PixelWidth;
121 | int height = softwareBitmap.PixelHeight;
122 |
123 | Console.WriteLine($"Software bitmap frame size {width}x{height}.");
124 |
125 | // Swap the processed frame to _backBuffer and dispose of the unused image.
126 | softwareBitmap = Interlocked.Exchange(ref backBuffer, softwareBitmap);
127 | softwareBitmap?.Dispose();
128 |
129 | _form.BeginInvoke(new Action(() =>
130 | {
131 | if (_picBox.Width != width || _picBox.Height != height)
132 | {
133 | _picBox.Size = new Size(width, height);
134 | }
135 |
136 | using (BitmapBuffer buffer = backBuffer.LockBuffer(BitmapBufferAccessMode.Read))
137 | {
138 | using (var reference = buffer.CreateReference())
139 | {
140 | unsafe
141 | {
142 | byte* dataInBytes;
143 | uint capacity;
144 | reference.As().GetBuffer(out dataInBytes, out capacity);
145 |
146 | Bitmap bmpImage = new Bitmap((int)width, (int)height, (int)(capacity / height), PixelFormat.Format32bppArgb, (IntPtr)dataInBytes);
147 | _picBox.Image = bmpImage;
148 | }
149 | }
150 | }
151 | }));
152 | }
153 | else
154 | {
155 | Console.WriteLine("null");
156 | }
157 |
158 | taskRunning = false;
159 | };
160 |
161 | Console.WriteLine("Starting media frame reader.");
162 | _ = Task.Run(async () => await mediaFrameReader.StartAsync()).ConfigureAwait(false);
163 |
164 | Console.WriteLine("Starting Windows Forms message loop.");
165 | Application.EnableVisualStyles();
166 | Application.Run(_form);
167 | }
168 | else
169 | {
170 | Console.WriteLine("Could not acquire a media frame reader.");
171 | }
172 | }
173 |
174 | ///
175 | /// Initialise the capture device and set the source format.
176 | ///
177 | private static async Task StartVideoCapture()
178 | {
179 | var mediaCaptureSettings = new MediaCaptureInitializationSettings()
180 | {
181 | StreamingCaptureMode = StreamingCaptureMode.Video,
182 | SharingMode = MediaCaptureSharingMode.ExclusiveControl,
183 | // It shouldn't be necessary to force the CPU.
184 | // Better to allow the system to use the GPU if possible.
185 | //MemoryPreference = MediaCaptureMemoryPreference.Cpu,
186 | VideoDeviceId = await GetDeviceID(WEBCAM_NAME),
187 | MediaCategory = MediaCategory.Communications,
188 | };
189 |
190 | var mediaCapture = new MediaCapture();
191 | await mediaCapture.InitializeAsync(mediaCaptureSettings);
192 |
193 | MediaFrameSourceInfo colorSourceInfo = null;
194 | foreach (var srcInfo in mediaCapture.FrameSources)
195 | {
196 | if (srcInfo.Value.Info.MediaStreamType == MediaStreamType.VideoRecord &&
197 | srcInfo.Value.Info.SourceKind == MediaFrameSourceKind.Color)
198 | {
199 | colorSourceInfo = srcInfo.Value.Info;
200 | break;
201 | }
202 | }
203 |
204 | var colorFrameSource = mediaCapture.FrameSources[colorSourceInfo.Id];
205 |
206 | var preferredFormat = colorFrameSource.SupportedFormats.Where(format =>
207 | {
208 | return format.VideoFormat.Width >= FRAME_WIDTH &&
209 | (format.FrameRate.Numerator / format.FrameRate.Denominator) >= FRAME_RATE
210 | // Setting the pixel format has proven to be very error prone AND the software bitmap from the frame
211 | // reader can end up with a different format any way. On my older logitech 9000 webcam attempting to
212 | // set the pixel format to I420, and thus save a conversion, resulted in the media frame reader only
213 | // having buffered samples which was crazy slow and seemed to be operating in some kind of preview
214 | // mode where the camera was closed and re-opened for each frame.
215 | // The best approach seems to try for NV12 and if not available let the system choose. In the
216 | // frame reader loop then do a software bitmap conversion if NV12 wasn't chosen.
217 | && format.Subtype == MF_SELECTED_PIXEL_FORMAT;
218 | }).FirstOrDefault();
219 |
220 | if (preferredFormat == null)
221 | {
222 | // Our desired format is not supported
223 | return null;
224 | }
225 |
226 | await colorFrameSource.SetFormatAsync(preferredFormat);
227 |
228 | var mediaFrameReader = await mediaCapture.CreateFrameReaderAsync(colorFrameSource);
229 | //var mediaFrameReader = await mediaCapture.CreateFrameReaderAsync(colorFrameSource, MediaEncodingSubtypes.Rgb24.ToUpper());
230 | mediaFrameReader.AcquisitionMode = MediaFrameReaderAcquisitionMode.Realtime;
231 |
232 | PrintFrameSourceInfo(colorFrameSource);
233 |
234 | return mediaFrameReader;
235 | }
236 |
237 | ///
238 | /// Diagnostic method to print the details of a video frame source.
239 | ///
240 | private static void PrintFrameSourceInfo(MediaFrameSource frameSource)
241 | {
242 | var width = frameSource.CurrentFormat.VideoFormat.Width;
243 | var height = frameSource.CurrentFormat.VideoFormat.Height;
244 | var fpsNumerator = frameSource.CurrentFormat.FrameRate.Numerator;
245 | var fpsDenominator = frameSource.CurrentFormat.FrameRate.Denominator;
246 |
247 | double fps = fpsNumerator / fpsDenominator;
248 | string pixFmt = frameSource.CurrentFormat.Subtype;
249 | string deviceName = frameSource.Info.DeviceInformation.Name;
250 |
251 | Console.WriteLine($"Video capture device {deviceName} successfully initialised: {width}x{height} {fps:0.##}fps pixel format {pixFmt}.");
252 | }
253 |
254 | ///
255 | /// Gets the ID of a video device from its name.
256 | ///
257 | private static async Task GetDeviceID(string deviceName)
258 | {
259 | var vidCapDevices = await DeviceInformation.FindAllAsync(DeviceClass.VideoCapture);
260 | var vidDevice = vidCapDevices.FirstOrDefault(x => x.Name == deviceName);
261 |
262 | if (vidDevice == null)
263 | {
264 | Console.WriteLine($"Could not find video capture device for name {deviceName}.");
265 | return null;
266 | }
267 | else
268 | {
269 | return vidDevice.Id;
270 | }
271 | }
272 |
273 | ///
274 | /// Attempts to list the system video capture devices and supported video modes.
275 | ///
276 | public static async Task ListDevicesAndFormats()
277 | {
278 | var vidCapDevices = await DeviceInformation.FindAllAsync(DeviceClass.VideoCapture);
279 | foreach (var vidCapDevice in vidCapDevices)
280 | {
281 | // The block below is how the reference documentation shows how to list modes but as of Sep 2020 it does not work.
282 | // https://docs.microsoft.com/en-us/uwp/api/windows.media.capture.mediacapture.findallvideoprofiles?view=winrt-19041.
283 | //logger.LogDebug($"Supported formats for video capture device {vidCapDevice.Name}:");
284 | //foreach (var recordProfiles in MediaCapture.FindAllVideoProfiles(vidCapDevice.Id).Select(x => x.SupportedRecordMediaDescription))
285 | //{
286 | // logger.LogDebug($"Support profile count {recordProfiles.Count}");
287 | // foreach (var profile in recordProfiles)
288 | // {
289 | // logger.LogDebug($"Capture device frame source {profile.Width}x{profile.Height} {profile.FrameRate:0.##}fps {profile.Subtype}");
290 | // }
291 | //}
292 |
293 | var mediaCaptureSettings = new MediaCaptureInitializationSettings()
294 | {
295 | StreamingCaptureMode = StreamingCaptureMode.Video,
296 | SharingMode = MediaCaptureSharingMode.SharedReadOnly,
297 | VideoDeviceId = vidCapDevice.Id
298 | };
299 |
300 | MediaCapture mediaCapture = new MediaCapture();
301 | await mediaCapture.InitializeAsync(mediaCaptureSettings);
302 |
303 | foreach (var srcFmtList in mediaCapture.FrameSources.Values.Select(x => x.SupportedFormats).Select(y => y.ToList()))
304 | {
305 | foreach (var srcFmt in srcFmtList)
306 | {
307 | var vidFmt = srcFmt.VideoFormat;
308 | float vidFps = vidFmt.MediaFrameFormat.FrameRate.Numerator / vidFmt.MediaFrameFormat.FrameRate.Denominator;
309 | string pixFmt = vidFmt.MediaFrameFormat.Subtype == MF_I420_PIXEL_FORMAT ? "I420" : vidFmt.MediaFrameFormat.Subtype;
310 | Console.WriteLine($"Video Capture device {vidCapDevice.Name} format {vidFmt.Width}x{vidFmt.Height} {vidFps:0.##}fps {pixFmt}");
311 | }
312 | }
313 | }
314 | }
315 | }
316 | }
317 |
--------------------------------------------------------------------------------
/test/VideoCaptureTest/VideoCaptureTest.csproj:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | Exe
5 | net6.0-windows10.0.22000
6 | true
7 | true
8 |
9 |
10 |
11 | true
12 |
13 |
14 |
15 |
--------------------------------------------------------------------------------