├── .clang-format ├── .gitignore ├── CMakeLists.txt ├── CMakePresets.json ├── LICENSE ├── README.md ├── rc ├── icon.ico ├── icon.rc └── mute.wav └── src ├── basic ├── basic.cpp ├── basic.h ├── frame.cpp ├── frame.h └── timer.h ├── capturer ├── audio_capturer.cpp ├── audio_capturer.h ├── dxgi_capturer.cpp ├── dxgi_capturer.h ├── finder.cpp ├── finder.h ├── gdi_capturer.cpp ├── gdi_capturer.h ├── video_capturer.cpp ├── video_capturer.h ├── wgc │ ├── App.cpp │ ├── App.h │ ├── SimpleCapture.cpp │ ├── SimpleCapture.h │ ├── capture.interop.h │ ├── composition.interop.h │ ├── d3dHelpers.h │ ├── direct3d11.interop.h │ ├── pch.cpp │ ├── pch.h │ ├── winrt.cpp │ └── winrt.h ├── wgc_capturer.cpp └── wgc_capturer.h ├── d3d ├── buffer_filler.cpp ├── buffer_filler.h ├── convert.cpp ├── convert.h ├── gen_frame.cpp └── gen_frame.h ├── encoder ├── abstract_encoder.cpp ├── abstract_encoder.h ├── audio_encoder.cpp ├── audio_encoder.h ├── audio_mixer.cpp ├── audio_mixer.h ├── video_encoder.cpp └── video_encoder.h ├── main.cpp ├── muxer ├── av_muxer.cpp └── av_muxer.h ├── recorder ├── audio_recorder.cpp ├── audio_recorder.h ├── video_recorder.cpp └── video_recorder.h └── ui ├── audio_render.cpp ├── audio_render.h ├── audio_widget.cpp ├── audio_widget.h ├── av_recorder.cpp ├── av_recorder.h ├── settings_page.cpp ├── settings_page.h ├── video_render.cpp ├── video_render.h ├── video_widget.cpp └── video_widget.h /.clang-format: -------------------------------------------------------------------------------- 1 | { BasedOnStyle: WebKit, AlignTrailingComments: true, Cpp11BracedListStyle: true, SortIncludes: false } -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .vscode 2 | .cache 3 | build 4 | debug 5 | *.dll 6 | *.mp4 7 | .vs 8 | out -------------------------------------------------------------------------------- /CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 3.5) # CMake install : https://cmake.org/download/ 2 | project(AvRecorder) 3 | set(CMAKE_INCLUDE_CURRENT_DIR ON) 4 | set(CMAKE_PREFIX_PATH "e:/Qt/5.15.0/msvc2019_64") # Qt Kit Dir 5 | set(CMAKE_AUTOUIC ON) 6 | set(CMAKE_AUTOMOC ON) 7 | set(CMAKE_AUTORCC ON) 8 | set(CMAKE_CXX_STANDARD 17) 9 | set(CMAKE_CXX_STANDARD_REQUIRED ON) 10 | 11 | # ffmpeg 12 | include_directories(e:/ffmpeg/include) 13 | link_directories(e:/ffmpeg/lib) 14 | 15 | include_directories(./src) 16 | 17 | 18 | find_package(Qt5 COMPONENTS Widgets REQUIRED) # Qt COMPONENTS 19 | aux_source_directory(./src/basic basicSrcs) 20 | aux_source_directory(./src/capturer capturerSrcs) 21 | aux_source_directory(./src/encoder encoderSrcs) 22 | aux_source_directory(./src/muxer muxerSrcs) 23 | aux_source_directory(./src/recorder recorderSrcs) 24 | aux_source_directory(./src/ui uiSrcs) 25 | aux_source_directory(./src/capturer/wgc wgcSrcs) 26 | aux_source_directory(./src/d3d d3dSrcs) 27 | 28 | # Specify MSVC UTF-8 encoding 29 | add_compile_options("$<$:/utf-8>") 30 | add_compile_options("$<$:/utf-8>") 31 | 32 | add_executable(${PROJECT_NAME} 33 | # WIN32 # If you need a terminal for debug, please comment this statement 34 | ${basicSrcs} 35 | ${capturerSrcs} 36 | ${encoderSrcs} 37 | ${muxerSrcs} 38 | ${recorderSrcs} 39 | ${uiSrcs} 40 | ${wgcSrcs} 41 | ${d3dSrcs} 42 | src/main.cpp 43 | rc/icon.rc 44 | ) 45 | 46 | target_link_libraries( 47 | ${PROJECT_NAME} 48 | # Qt6 Shared Library 49 | PRIVATE Qt5::Widgets 50 | 51 | # ffmpeg 52 | avcodec 53 | avdevice 54 | avfilter 55 | avformat 56 | avutil 57 | swresample 58 | postproc 59 | swscale 60 | 61 | # windows 62 | ksuser 63 | d3d11 64 | user32 65 | winmm 66 | windowsapp 67 | dwmapi 68 | dxgi 69 | ) -------------------------------------------------------------------------------- /CMakePresets.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": 3, 3 | "configurePresets": [ 4 | { 5 | "name": "windows-base", 6 | "description": "面向具有 Visual Studio 开发环境的 Windows。", 7 | "hidden": true, 8 | "generator": "Ninja", 9 | "binaryDir": "${sourceDir}/out/build/${presetName}", 10 | "installDir": "${sourceDir}/out/install/${presetName}", 11 | "cacheVariables": { 12 | "CMAKE_C_COMPILER": "cl.exe", 13 | "CMAKE_CXX_COMPILER": "cl.exe" 14 | }, 15 | "condition": { 16 | "type": "equals", 17 | "lhs": "${hostSystemName}", 18 | "rhs": "Windows" 19 | } 20 | }, 21 | { 22 | "name": "x64-debug", 23 | "displayName": "x64 Debug", 24 | "description": "使用 Visual Studio 开发环境定向到 Windows (64 位)。(Debug)", 25 | "inherits": "windows-base", 26 | "architecture": { 27 | "value": "x64", 28 | "strategy": "external" 29 | }, 30 | "cacheVariables": { "CMAKE_BUILD_TYPE": "Debug" } 31 | }, 32 | { 33 | "name": "x64-release", 34 | "displayName": "x64 Release", 35 | "description": "使用 Visual Studio 开发环境定向到 Windows (64 位)。(RelWithDebInfo)", 36 | "inherits": "x64-debug", 37 | "cacheVariables": { "CMAKE_BUILD_TYPE": "Release" } 38 | }, 39 | { 40 | "name": "x86-debug", 41 | "displayName": "x86 Debug", 42 | "description": "用 Visual Studio 开发环境定向到 Windows (32 位)。(Debug)", 43 | "inherits": "windows-base", 44 | "architecture": { 45 | "value": "x86", 46 | "strategy": "external" 47 | }, 48 | "cacheVariables": { "CMAKE_BUILD_TYPE": "Debug" } 49 | }, 50 | { 51 | "name": "x86-release", 52 | "displayName": "x86 Release", 53 | "description": "用 Visual Studio 开发环境定向到 Windows (32 位)。(RelWithDebInfo)", 54 | "inherits": "x86-debug", 55 | "cacheVariables": { "CMAKE_BUILD_TYPE": "Release" } 56 | } 57 | ] 58 | } -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | GNU GENERAL PUBLIC LICENSE 2 | Version 3, 29 June 2007 3 | 4 | Copyright (C) 2007 Free Software Foundation, Inc. 5 | Everyone is permitted to copy and distribute verbatim copies 6 | of this license document, but changing it is not allowed. 7 | 8 | Preamble 9 | 10 | The GNU General Public License is a free, copyleft license for 11 | software and other kinds of works. 12 | 13 | The licenses for most software and other practical works are designed 14 | to take away your freedom to share and change the works. By contrast, 15 | the GNU General Public License is intended to guarantee your freedom to 16 | share and change all versions of a program--to make sure it remains free 17 | software for all its users. We, the Free Software Foundation, use the 18 | GNU General Public License for most of our software; it applies also to 19 | any other work released this way by its authors. You can apply it to 20 | your programs, too. 21 | 22 | When we speak of free software, we are referring to freedom, not 23 | price. Our General Public Licenses are designed to make sure that you 24 | have the freedom to distribute copies of free software (and charge for 25 | them if you wish), that you receive source code or can get it if you 26 | want it, that you can change the software or use pieces of it in new 27 | free programs, and that you know you can do these things. 28 | 29 | To protect your rights, we need to prevent others from denying you 30 | these rights or asking you to surrender the rights. Therefore, you have 31 | certain responsibilities if you distribute copies of the software, or if 32 | you modify it: responsibilities to respect the freedom of others. 33 | 34 | For example, if you distribute copies of such a program, whether 35 | gratis or for a fee, you must pass on to the recipients the same 36 | freedoms that you received. You must make sure that they, too, receive 37 | or can get the source code. And you must show them these terms so they 38 | know their rights. 39 | 40 | Developers that use the GNU GPL protect your rights with two steps: 41 | (1) assert copyright on the software, and (2) offer you this License 42 | giving you legal permission to copy, distribute and/or modify it. 43 | 44 | For the developers' and authors' protection, the GPL clearly explains 45 | that there is no warranty for this free software. For both users' and 46 | authors' sake, the GPL requires that modified versions be marked as 47 | changed, so that their problems will not be attributed erroneously to 48 | authors of previous versions. 49 | 50 | Some devices are designed to deny users access to install or run 51 | modified versions of the software inside them, although the manufacturer 52 | can do so. This is fundamentally incompatible with the aim of 53 | protecting users' freedom to change the software. The systematic 54 | pattern of such abuse occurs in the area of products for individuals to 55 | use, which is precisely where it is most unacceptable. Therefore, we 56 | have designed this version of the GPL to prohibit the practice for those 57 | products. If such problems arise substantially in other domains, we 58 | stand ready to extend this provision to those domains in future versions 59 | of the GPL, as needed to protect the freedom of users. 60 | 61 | Finally, every program is threatened constantly by software patents. 62 | States should not allow patents to restrict development and use of 63 | software on general-purpose computers, but in those that do, we wish to 64 | avoid the special danger that patents applied to a free program could 65 | make it effectively proprietary. To prevent this, the GPL assures that 66 | patents cannot be used to render the program non-free. 67 | 68 | The precise terms and conditions for copying, distribution and 69 | modification follow. 70 | 71 | TERMS AND CONDITIONS 72 | 73 | 0. Definitions. 74 | 75 | "This License" refers to version 3 of the GNU General Public License. 76 | 77 | "Copyright" also means copyright-like laws that apply to other kinds of 78 | works, such as semiconductor masks. 79 | 80 | "The Program" refers to any copyrightable work licensed under this 81 | License. Each licensee is addressed as "you". "Licensees" and 82 | "recipients" may be individuals or organizations. 83 | 84 | To "modify" a work means to copy from or adapt all or part of the work 85 | in a fashion requiring copyright permission, other than the making of an 86 | exact copy. The resulting work is called a "modified version" of the 87 | earlier work or a work "based on" the earlier work. 88 | 89 | A "covered work" means either the unmodified Program or a work based 90 | on the Program. 91 | 92 | To "propagate" a work means to do anything with it that, without 93 | permission, would make you directly or secondarily liable for 94 | infringement under applicable copyright law, except executing it on a 95 | computer or modifying a private copy. Propagation includes copying, 96 | distribution (with or without modification), making available to the 97 | public, and in some countries other activities as well. 98 | 99 | To "convey" a work means any kind of propagation that enables other 100 | parties to make or receive copies. Mere interaction with a user through 101 | a computer network, with no transfer of a copy, is not conveying. 102 | 103 | An interactive user interface displays "Appropriate Legal Notices" 104 | to the extent that it includes a convenient and prominently visible 105 | feature that (1) displays an appropriate copyright notice, and (2) 106 | tells the user that there is no warranty for the work (except to the 107 | extent that warranties are provided), that licensees may convey the 108 | work under this License, and how to view a copy of this License. If 109 | the interface presents a list of user commands or options, such as a 110 | menu, a prominent item in the list meets this criterion. 111 | 112 | 1. Source Code. 113 | 114 | The "source code" for a work means the preferred form of the work 115 | for making modifications to it. "Object code" means any non-source 116 | form of a work. 117 | 118 | A "Standard Interface" means an interface that either is an official 119 | standard defined by a recognized standards body, or, in the case of 120 | interfaces specified for a particular programming language, one that 121 | is widely used among developers working in that language. 122 | 123 | The "System Libraries" of an executable work include anything, other 124 | than the work as a whole, that (a) is included in the normal form of 125 | packaging a Major Component, but which is not part of that Major 126 | Component, and (b) serves only to enable use of the work with that 127 | Major Component, or to implement a Standard Interface for which an 128 | implementation is available to the public in source code form. A 129 | "Major Component", in this context, means a major essential component 130 | (kernel, window system, and so on) of the specific operating system 131 | (if any) on which the executable work runs, or a compiler used to 132 | produce the work, or an object code interpreter used to run it. 133 | 134 | The "Corresponding Source" for a work in object code form means all 135 | the source code needed to generate, install, and (for an executable 136 | work) run the object code and to modify the work, including scripts to 137 | control those activities. However, it does not include the work's 138 | System Libraries, or general-purpose tools or generally available free 139 | programs which are used unmodified in performing those activities but 140 | which are not part of the work. For example, Corresponding Source 141 | includes interface definition files associated with source files for 142 | the work, and the source code for shared libraries and dynamically 143 | linked subprograms that the work is specifically designed to require, 144 | such as by intimate data communication or control flow between those 145 | subprograms and other parts of the work. 146 | 147 | The Corresponding Source need not include anything that users 148 | can regenerate automatically from other parts of the Corresponding 149 | Source. 150 | 151 | The Corresponding Source for a work in source code form is that 152 | same work. 153 | 154 | 2. Basic Permissions. 155 | 156 | All rights granted under this License are granted for the term of 157 | copyright on the Program, and are irrevocable provided the stated 158 | conditions are met. This License explicitly affirms your unlimited 159 | permission to run the unmodified Program. The output from running a 160 | covered work is covered by this License only if the output, given its 161 | content, constitutes a covered work. This License acknowledges your 162 | rights of fair use or other equivalent, as provided by copyright law. 163 | 164 | You may make, run and propagate covered works that you do not 165 | convey, without conditions so long as your license otherwise remains 166 | in force. You may convey covered works to others for the sole purpose 167 | of having them make modifications exclusively for you, or provide you 168 | with facilities for running those works, provided that you comply with 169 | the terms of this License in conveying all material for which you do 170 | not control copyright. Those thus making or running the covered works 171 | for you must do so exclusively on your behalf, under your direction 172 | and control, on terms that prohibit them from making any copies of 173 | your copyrighted material outside their relationship with you. 174 | 175 | Conveying under any other circumstances is permitted solely under 176 | the conditions stated below. Sublicensing is not allowed; section 10 177 | makes it unnecessary. 178 | 179 | 3. Protecting Users' Legal Rights From Anti-Circumvention Law. 180 | 181 | No covered work shall be deemed part of an effective technological 182 | measure under any applicable law fulfilling obligations under article 183 | 11 of the WIPO copyright treaty adopted on 20 December 1996, or 184 | similar laws prohibiting or restricting circumvention of such 185 | measures. 186 | 187 | When you convey a covered work, you waive any legal power to forbid 188 | circumvention of technological measures to the extent such circumvention 189 | is effected by exercising rights under this License with respect to 190 | the covered work, and you disclaim any intention to limit operation or 191 | modification of the work as a means of enforcing, against the work's 192 | users, your or third parties' legal rights to forbid circumvention of 193 | technological measures. 194 | 195 | 4. Conveying Verbatim Copies. 196 | 197 | You may convey verbatim copies of the Program's source code as you 198 | receive it, in any medium, provided that you conspicuously and 199 | appropriately publish on each copy an appropriate copyright notice; 200 | keep intact all notices stating that this License and any 201 | non-permissive terms added in accord with section 7 apply to the code; 202 | keep intact all notices of the absence of any warranty; and give all 203 | recipients a copy of this License along with the Program. 204 | 205 | You may charge any price or no price for each copy that you convey, 206 | and you may offer support or warranty protection for a fee. 207 | 208 | 5. Conveying Modified Source Versions. 209 | 210 | You may convey a work based on the Program, or the modifications to 211 | produce it from the Program, in the form of source code under the 212 | terms of section 4, provided that you also meet all of these conditions: 213 | 214 | a) The work must carry prominent notices stating that you modified 215 | it, and giving a relevant date. 216 | 217 | b) The work must carry prominent notices stating that it is 218 | released under this License and any conditions added under section 219 | 7. This requirement modifies the requirement in section 4 to 220 | "keep intact all notices". 221 | 222 | c) You must license the entire work, as a whole, under this 223 | License to anyone who comes into possession of a copy. This 224 | License will therefore apply, along with any applicable section 7 225 | additional terms, to the whole of the work, and all its parts, 226 | regardless of how they are packaged. This License gives no 227 | permission to license the work in any other way, but it does not 228 | invalidate such permission if you have separately received it. 229 | 230 | d) If the work has interactive user interfaces, each must display 231 | Appropriate Legal Notices; however, if the Program has interactive 232 | interfaces that do not display Appropriate Legal Notices, your 233 | work need not make them do so. 234 | 235 | A compilation of a covered work with other separate and independent 236 | works, which are not by their nature extensions of the covered work, 237 | and which are not combined with it such as to form a larger program, 238 | in or on a volume of a storage or distribution medium, is called an 239 | "aggregate" if the compilation and its resulting copyright are not 240 | used to limit the access or legal rights of the compilation's users 241 | beyond what the individual works permit. Inclusion of a covered work 242 | in an aggregate does not cause this License to apply to the other 243 | parts of the aggregate. 244 | 245 | 6. Conveying Non-Source Forms. 246 | 247 | You may convey a covered work in object code form under the terms 248 | of sections 4 and 5, provided that you also convey the 249 | machine-readable Corresponding Source under the terms of this License, 250 | in one of these ways: 251 | 252 | a) Convey the object code in, or embodied in, a physical product 253 | (including a physical distribution medium), accompanied by the 254 | Corresponding Source fixed on a durable physical medium 255 | customarily used for software interchange. 256 | 257 | b) Convey the object code in, or embodied in, a physical product 258 | (including a physical distribution medium), accompanied by a 259 | written offer, valid for at least three years and valid for as 260 | long as you offer spare parts or customer support for that product 261 | model, to give anyone who possesses the object code either (1) a 262 | copy of the Corresponding Source for all the software in the 263 | product that is covered by this License, on a durable physical 264 | medium customarily used for software interchange, for a price no 265 | more than your reasonable cost of physically performing this 266 | conveying of source, or (2) access to copy the 267 | Corresponding Source from a network server at no charge. 268 | 269 | c) Convey individual copies of the object code with a copy of the 270 | written offer to provide the Corresponding Source. This 271 | alternative is allowed only occasionally and noncommercially, and 272 | only if you received the object code with such an offer, in accord 273 | with subsection 6b. 274 | 275 | d) Convey the object code by offering access from a designated 276 | place (gratis or for a charge), and offer equivalent access to the 277 | Corresponding Source in the same way through the same place at no 278 | further charge. You need not require recipients to copy the 279 | Corresponding Source along with the object code. If the place to 280 | copy the object code is a network server, the Corresponding Source 281 | may be on a different server (operated by you or a third party) 282 | that supports equivalent copying facilities, provided you maintain 283 | clear directions next to the object code saying where to find the 284 | Corresponding Source. Regardless of what server hosts the 285 | Corresponding Source, you remain obligated to ensure that it is 286 | available for as long as needed to satisfy these requirements. 287 | 288 | e) Convey the object code using peer-to-peer transmission, provided 289 | you inform other peers where the object code and Corresponding 290 | Source of the work are being offered to the general public at no 291 | charge under subsection 6d. 292 | 293 | A separable portion of the object code, whose source code is excluded 294 | from the Corresponding Source as a System Library, need not be 295 | included in conveying the object code work. 296 | 297 | A "User Product" is either (1) a "consumer product", which means any 298 | tangible personal property which is normally used for personal, family, 299 | or household purposes, or (2) anything designed or sold for incorporation 300 | into a dwelling. In determining whether a product is a consumer product, 301 | doubtful cases shall be resolved in favor of coverage. For a particular 302 | product received by a particular user, "normally used" refers to a 303 | typical or common use of that class of product, regardless of the status 304 | of the particular user or of the way in which the particular user 305 | actually uses, or expects or is expected to use, the product. A product 306 | is a consumer product regardless of whether the product has substantial 307 | commercial, industrial or non-consumer uses, unless such uses represent 308 | the only significant mode of use of the product. 309 | 310 | "Installation Information" for a User Product means any methods, 311 | procedures, authorization keys, or other information required to install 312 | and execute modified versions of a covered work in that User Product from 313 | a modified version of its Corresponding Source. The information must 314 | suffice to ensure that the continued functioning of the modified object 315 | code is in no case prevented or interfered with solely because 316 | modification has been made. 317 | 318 | If you convey an object code work under this section in, or with, or 319 | specifically for use in, a User Product, and the conveying occurs as 320 | part of a transaction in which the right of possession and use of the 321 | User Product is transferred to the recipient in perpetuity or for a 322 | fixed term (regardless of how the transaction is characterized), the 323 | Corresponding Source conveyed under this section must be accompanied 324 | by the Installation Information. But this requirement does not apply 325 | if neither you nor any third party retains the ability to install 326 | modified object code on the User Product (for example, the work has 327 | been installed in ROM). 328 | 329 | The requirement to provide Installation Information does not include a 330 | requirement to continue to provide support service, warranty, or updates 331 | for a work that has been modified or installed by the recipient, or for 332 | the User Product in which it has been modified or installed. Access to a 333 | network may be denied when the modification itself materially and 334 | adversely affects the operation of the network or violates the rules and 335 | protocols for communication across the network. 336 | 337 | Corresponding Source conveyed, and Installation Information provided, 338 | in accord with this section must be in a format that is publicly 339 | documented (and with an implementation available to the public in 340 | source code form), and must require no special password or key for 341 | unpacking, reading or copying. 342 | 343 | 7. Additional Terms. 344 | 345 | "Additional permissions" are terms that supplement the terms of this 346 | License by making exceptions from one or more of its conditions. 347 | Additional permissions that are applicable to the entire Program shall 348 | be treated as though they were included in this License, to the extent 349 | that they are valid under applicable law. If additional permissions 350 | apply only to part of the Program, that part may be used separately 351 | under those permissions, but the entire Program remains governed by 352 | this License without regard to the additional permissions. 353 | 354 | When you convey a copy of a covered work, you may at your option 355 | remove any additional permissions from that copy, or from any part of 356 | it. (Additional permissions may be written to require their own 357 | removal in certain cases when you modify the work.) You may place 358 | additional permissions on material, added by you to a covered work, 359 | for which you have or can give appropriate copyright permission. 360 | 361 | Notwithstanding any other provision of this License, for material you 362 | add to a covered work, you may (if authorized by the copyright holders of 363 | that material) supplement the terms of this License with terms: 364 | 365 | a) Disclaiming warranty or limiting liability differently from the 366 | terms of sections 15 and 16 of this License; or 367 | 368 | b) Requiring preservation of specified reasonable legal notices or 369 | author attributions in that material or in the Appropriate Legal 370 | Notices displayed by works containing it; or 371 | 372 | c) Prohibiting misrepresentation of the origin of that material, or 373 | requiring that modified versions of such material be marked in 374 | reasonable ways as different from the original version; or 375 | 376 | d) Limiting the use for publicity purposes of names of licensors or 377 | authors of the material; or 378 | 379 | e) Declining to grant rights under trademark law for use of some 380 | trade names, trademarks, or service marks; or 381 | 382 | f) Requiring indemnification of licensors and authors of that 383 | material by anyone who conveys the material (or modified versions of 384 | it) with contractual assumptions of liability to the recipient, for 385 | any liability that these contractual assumptions directly impose on 386 | those licensors and authors. 387 | 388 | All other non-permissive additional terms are considered "further 389 | restrictions" within the meaning of section 10. If the Program as you 390 | received it, or any part of it, contains a notice stating that it is 391 | governed by this License along with a term that is a further 392 | restriction, you may remove that term. If a license document contains 393 | a further restriction but permits relicensing or conveying under this 394 | License, you may add to a covered work material governed by the terms 395 | of that license document, provided that the further restriction does 396 | not survive such relicensing or conveying. 397 | 398 | If you add terms to a covered work in accord with this section, you 399 | must place, in the relevant source files, a statement of the 400 | additional terms that apply to those files, or a notice indicating 401 | where to find the applicable terms. 402 | 403 | Additional terms, permissive or non-permissive, may be stated in the 404 | form of a separately written license, or stated as exceptions; 405 | the above requirements apply either way. 406 | 407 | 8. Termination. 408 | 409 | You may not propagate or modify a covered work except as expressly 410 | provided under this License. Any attempt otherwise to propagate or 411 | modify it is void, and will automatically terminate your rights under 412 | this License (including any patent licenses granted under the third 413 | paragraph of section 11). 414 | 415 | However, if you cease all violation of this License, then your 416 | license from a particular copyright holder is reinstated (a) 417 | provisionally, unless and until the copyright holder explicitly and 418 | finally terminates your license, and (b) permanently, if the copyright 419 | holder fails to notify you of the violation by some reasonable means 420 | prior to 60 days after the cessation. 421 | 422 | Moreover, your license from a particular copyright holder is 423 | reinstated permanently if the copyright holder notifies you of the 424 | violation by some reasonable means, this is the first time you have 425 | received notice of violation of this License (for any work) from that 426 | copyright holder, and you cure the violation prior to 30 days after 427 | your receipt of the notice. 428 | 429 | Termination of your rights under this section does not terminate the 430 | licenses of parties who have received copies or rights from you under 431 | this License. If your rights have been terminated and not permanently 432 | reinstated, you do not qualify to receive new licenses for the same 433 | material under section 10. 434 | 435 | 9. Acceptance Not Required for Having Copies. 436 | 437 | You are not required to accept this License in order to receive or 438 | run a copy of the Program. Ancillary propagation of a covered work 439 | occurring solely as a consequence of using peer-to-peer transmission 440 | to receive a copy likewise does not require acceptance. However, 441 | nothing other than this License grants you permission to propagate or 442 | modify any covered work. These actions infringe copyright if you do 443 | not accept this License. Therefore, by modifying or propagating a 444 | covered work, you indicate your acceptance of this License to do so. 445 | 446 | 10. Automatic Licensing of Downstream Recipients. 447 | 448 | Each time you convey a covered work, the recipient automatically 449 | receives a license from the original licensors, to run, modify and 450 | propagate that work, subject to this License. You are not responsible 451 | for enforcing compliance by third parties with this License. 452 | 453 | An "entity transaction" is a transaction transferring control of an 454 | organization, or substantially all assets of one, or subdividing an 455 | organization, or merging organizations. If propagation of a covered 456 | work results from an entity transaction, each party to that 457 | transaction who receives a copy of the work also receives whatever 458 | licenses to the work the party's predecessor in interest had or could 459 | give under the previous paragraph, plus a right to possession of the 460 | Corresponding Source of the work from the predecessor in interest, if 461 | the predecessor has it or can get it with reasonable efforts. 462 | 463 | You may not impose any further restrictions on the exercise of the 464 | rights granted or affirmed under this License. For example, you may 465 | not impose a license fee, royalty, or other charge for exercise of 466 | rights granted under this License, and you may not initiate litigation 467 | (including a cross-claim or counterclaim in a lawsuit) alleging that 468 | any patent claim is infringed by making, using, selling, offering for 469 | sale, or importing the Program or any portion of it. 470 | 471 | 11. Patents. 472 | 473 | A "contributor" is a copyright holder who authorizes use under this 474 | License of the Program or a work on which the Program is based. The 475 | work thus licensed is called the contributor's "contributor version". 476 | 477 | A contributor's "essential patent claims" are all patent claims 478 | owned or controlled by the contributor, whether already acquired or 479 | hereafter acquired, that would be infringed by some manner, permitted 480 | by this License, of making, using, or selling its contributor version, 481 | but do not include claims that would be infringed only as a 482 | consequence of further modification of the contributor version. For 483 | purposes of this definition, "control" includes the right to grant 484 | patent sublicenses in a manner consistent with the requirements of 485 | this License. 486 | 487 | Each contributor grants you a non-exclusive, worldwide, royalty-free 488 | patent license under the contributor's essential patent claims, to 489 | make, use, sell, offer for sale, import and otherwise run, modify and 490 | propagate the contents of its contributor version. 491 | 492 | In the following three paragraphs, a "patent license" is any express 493 | agreement or commitment, however denominated, not to enforce a patent 494 | (such as an express permission to practice a patent or covenant not to 495 | sue for patent infringement). To "grant" such a patent license to a 496 | party means to make such an agreement or commitment not to enforce a 497 | patent against the party. 498 | 499 | If you convey a covered work, knowingly relying on a patent license, 500 | and the Corresponding Source of the work is not available for anyone 501 | to copy, free of charge and under the terms of this License, through a 502 | publicly available network server or other readily accessible means, 503 | then you must either (1) cause the Corresponding Source to be so 504 | available, or (2) arrange to deprive yourself of the benefit of the 505 | patent license for this particular work, or (3) arrange, in a manner 506 | consistent with the requirements of this License, to extend the patent 507 | license to downstream recipients. "Knowingly relying" means you have 508 | actual knowledge that, but for the patent license, your conveying the 509 | covered work in a country, or your recipient's use of the covered work 510 | in a country, would infringe one or more identifiable patents in that 511 | country that you have reason to believe are valid. 512 | 513 | If, pursuant to or in connection with a single transaction or 514 | arrangement, you convey, or propagate by procuring conveyance of, a 515 | covered work, and grant a patent license to some of the parties 516 | receiving the covered work authorizing them to use, propagate, modify 517 | or convey a specific copy of the covered work, then the patent license 518 | you grant is automatically extended to all recipients of the covered 519 | work and works based on it. 520 | 521 | A patent license is "discriminatory" if it does not include within 522 | the scope of its coverage, prohibits the exercise of, or is 523 | conditioned on the non-exercise of one or more of the rights that are 524 | specifically granted under this License. You may not convey a covered 525 | work if you are a party to an arrangement with a third party that is 526 | in the business of distributing software, under which you make payment 527 | to the third party based on the extent of your activity of conveying 528 | the work, and under which the third party grants, to any of the 529 | parties who would receive the covered work from you, a discriminatory 530 | patent license (a) in connection with copies of the covered work 531 | conveyed by you (or copies made from those copies), or (b) primarily 532 | for and in connection with specific products or compilations that 533 | contain the covered work, unless you entered into that arrangement, 534 | or that patent license was granted, prior to 28 March 2007. 535 | 536 | Nothing in this License shall be construed as excluding or limiting 537 | any implied license or other defenses to infringement that may 538 | otherwise be available to you under applicable patent law. 539 | 540 | 12. No Surrender of Others' Freedom. 541 | 542 | If conditions are imposed on you (whether by court order, agreement or 543 | otherwise) that contradict the conditions of this License, they do not 544 | excuse you from the conditions of this License. If you cannot convey a 545 | covered work so as to satisfy simultaneously your obligations under this 546 | License and any other pertinent obligations, then as a consequence you may 547 | not convey it at all. For example, if you agree to terms that obligate you 548 | to collect a royalty for further conveying from those to whom you convey 549 | the Program, the only way you could satisfy both those terms and this 550 | License would be to refrain entirely from conveying the Program. 551 | 552 | 13. Use with the GNU Affero General Public License. 553 | 554 | Notwithstanding any other provision of this License, you have 555 | permission to link or combine any covered work with a work licensed 556 | under version 3 of the GNU Affero General Public License into a single 557 | combined work, and to convey the resulting work. The terms of this 558 | License will continue to apply to the part which is the covered work, 559 | but the special requirements of the GNU Affero General Public License, 560 | section 13, concerning interaction through a network will apply to the 561 | combination as such. 562 | 563 | 14. Revised Versions of this License. 564 | 565 | The Free Software Foundation may publish revised and/or new versions of 566 | the GNU General Public License from time to time. Such new versions will 567 | be similar in spirit to the present version, but may differ in detail to 568 | address new problems or concerns. 569 | 570 | Each version is given a distinguishing version number. If the 571 | Program specifies that a certain numbered version of the GNU General 572 | Public License "or any later version" applies to it, you have the 573 | option of following the terms and conditions either of that numbered 574 | version or of any later version published by the Free Software 575 | Foundation. If the Program does not specify a version number of the 576 | GNU General Public License, you may choose any version ever published 577 | by the Free Software Foundation. 578 | 579 | If the Program specifies that a proxy can decide which future 580 | versions of the GNU General Public License can be used, that proxy's 581 | public statement of acceptance of a version permanently authorizes you 582 | to choose that version for the Program. 583 | 584 | Later license versions may give you additional or different 585 | permissions. However, no additional obligations are imposed on any 586 | author or copyright holder as a result of your choosing to follow a 587 | later version. 588 | 589 | 15. Disclaimer of Warranty. 590 | 591 | THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY 592 | APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT 593 | HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY 594 | OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, 595 | THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 596 | PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM 597 | IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF 598 | ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 599 | 600 | 16. Limitation of Liability. 601 | 602 | IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING 603 | WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS 604 | THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY 605 | GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE 606 | USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF 607 | DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD 608 | PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), 609 | EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF 610 | SUCH DAMAGES. 611 | 612 | 17. Interpretation of Sections 15 and 16. 613 | 614 | If the disclaimer of warranty and limitation of liability provided 615 | above cannot be given local legal effect according to their terms, 616 | reviewing courts shall apply local law that most closely approximates 617 | an absolute waiver of all civil liability in connection with the 618 | Program, unless a warranty or assumption of liability accompanies a 619 | copy of the Program in return for a fee. 620 | 621 | END OF TERMS AND CONDITIONS 622 | 623 | How to Apply These Terms to Your New Programs 624 | 625 | If you develop a new program, and you want it to be of the greatest 626 | possible use to the public, the best way to achieve this is to make it 627 | free software which everyone can redistribute and change under these terms. 628 | 629 | To do so, attach the following notices to the program. It is safest 630 | to attach them to the start of each source file to most effectively 631 | state the exclusion of warranty; and each file should have at least 632 | the "copyright" line and a pointer to where the full notice is found. 633 | 634 | 635 | Copyright (C) 636 | 637 | This program is free software: you can redistribute it and/or modify 638 | it under the terms of the GNU General Public License as published by 639 | the Free Software Foundation, either version 3 of the License, or 640 | (at your option) any later version. 641 | 642 | This program is distributed in the hope that it will be useful, 643 | but WITHOUT ANY WARRANTY; without even the implied warranty of 644 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 645 | GNU General Public License for more details. 646 | 647 | You should have received a copy of the GNU General Public License 648 | along with this program. If not, see . 649 | 650 | Also add information on how to contact you by electronic and paper mail. 651 | 652 | If the program does terminal interaction, make it output a short 653 | notice like this when it starts in an interactive mode: 654 | 655 | Copyright (C) 656 | This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. 657 | This is free software, and you are welcome to redistribute it 658 | under certain conditions; type `show c' for details. 659 | 660 | The hypothetical commands `show w' and `show c' should show the appropriate 661 | parts of the General Public License. Of course, your program's commands 662 | might be different; for a GUI interface, you would use an "about box". 663 | 664 | You should also get your employer (if you work as a programmer) or school, 665 | if any, to sign a "copyright disclaimer" for the program, if necessary. 666 | For more information on this, and how to apply and follow the GNU GPL, see 667 | . 668 | 669 | The GNU General Public License does not permit incorporating your program 670 | into proprietary programs. If your program is a subroutine library, you 671 | may consider it more useful to permit linking proprietary applications with 672 | the library. If this is what you want to do, use the GNU Lesser General 673 | Public License instead of this License. But first, please read 674 | . 675 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | 7 | # AvRecorder 8 | Record audio and video via WGC, DXGI, BitBlt, CoreAudio, DirectX11 and FFmpeg 9 | 10 | Built by MSVC 2022 CMake + Qt_64 5.15.0 + FFmpeg 5.1.0 11 | 12 | ## 目前完成的功能 13 | 14 | * 可以进行 RTSP/RTMP 推流 (直播) 15 | * 使用 D3D11 加速像素色彩空间转换 16 | * 使用 DXGI 录制桌面 17 | * 使用 BitBlt 录制窗口 18 | * 使用 WGC 捕获桌面和窗口 19 | * 多显示器的支持 20 | * 使用 DirectX11 对捕获的画面进行渲染 21 | * 使用 FFmpeg 对捕获的画面进行编码 22 | * 使用 CoreAudio 录制麦克风和扬声器 23 | * 使用 FFmpeg 对麦克风和扬声器进行混音,并且具备控制音频音量的功能 24 | * 使用 FFmpeg 对音频和视频进行编码 25 | * 可以对音频进行调幅 26 | * 帧率控制 27 | 28 | ## 待完成功能(咕咕咕) 29 | 30 | * 暂停录制 31 | * 截取画面 32 | * 画面合成 33 | 34 | ## 遇到的坑 35 | 36 | * 音频编码直接使用 `avcodec_find_encoder(AV_CODEC_ID_AAC)` AAC 编码器牛逼! 37 | * 鼠标绘制方面:不要使用 DrawIcon, 而应该使用 DrawIconEx 并且最后的参数设置为 DI_NORMAL | DI_COMPAT, 不然绘制出来的鼠标会有锯齿 38 | * DXGI 截屏为了能够让 GDI 进行鼠标绘制, _textureDesc.MiscFlags = D3D11_RESOURCE_MISC_GDI_COMPATIBLE 应这样设置 39 | * 帧率控制: Sleep 千万不要简单的让时间除以帧率, 因为 C++ 中整数运算会直接截断, 导致视频的截取的频率会变快 40 | * GDI 截屏将画面复制到 AVFrame 时要一行一行的复制, 不然画面可能会撕裂,原因是内存对齐 41 | * 硬件编码必须每次 alloc_frame 和 free_frame, 不然会导致内存泄漏 42 | * 音频混音过滤链必须每次 unref, 不然会导致内存泄漏 43 | * 音频捕获当电脑没有播放器播放声音时, 扬声器会停止工作, 这会导致一系列严重的问题: 混音失败、编码失败、音画不同步等, 这里采用的方案是循环播放一个静音的音频,强制让扬声器工作 44 | * DXGI 截屏当桌面没有画面刷新时, 会返回一个错误码, 不过这没事, 直接让编码器编码上一帧的缓存即可 45 | 46 | ## 关于性能 47 | * CPU 软件编码设置为 veryfast, 以降低 CPU 的占用 48 | * 截取画面从 GPU 到 CPU 需要调用 Map 函数, 这个函数实际上很坑的, 因为他必须等待 GPU 把这帧画面绘制完成才能工作, 这样 CPU 就得搁那干等, 究极浪费性能, 解决方案就是多缓存几个 Texture, 这样做就是增加了延迟, 但是这无所谓, 录屏谁在乎那几帧的延迟呢,肉眼是看不出来的 49 | * 像素色彩空间转换 CPU 占用高, D3D11 能在硬件层面完成像素转换 50 | 51 | ## 代码参考 52 | 53 | * https://github.com/NVIDIA/video-sdk-samples/blob/master/nvEncDXGIOutputDuplicationSample/Preproc.cpp 54 | 55 | * https://github.com/microsoft/Windows.UI.Composition-Win32-Samples/tree/master/cpp/ScreenCaptureforHWND -------------------------------------------------------------------------------- /rc/icon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vector-wlc/AvRecorder/94c6d95521c29774312f5dd93d0f35c878901675/rc/icon.ico -------------------------------------------------------------------------------- /rc/icon.rc: -------------------------------------------------------------------------------- 1 | IDI_ICON1 ICON "icon.ico" -------------------------------------------------------------------------------- /rc/mute.wav: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vector-wlc/AvRecorder/94c6d95521c29774312f5dd93d0f35c878901675/rc/mute.wav -------------------------------------------------------------------------------- /src/basic/basic.cpp: -------------------------------------------------------------------------------- 1 | /* 2 | * @Coding: utf-8 3 | * @Author: vector-wlc 4 | * @Date: 2023-02-06 11:53:50 5 | * @Description: 6 | */ 7 | #include "basic/basic.h" 8 | 9 | std::mutex __mtx; -------------------------------------------------------------------------------- /src/basic/basic.h: -------------------------------------------------------------------------------- 1 | /* 2 | * @Coding: utf-8 3 | * @Author: vector-wlc 4 | * @Date: 2023-01-28 11:01:58 5 | * @Description: 6 | */ 7 | #ifndef __BASIC_FUCN_H__ 8 | #define __BASIC_FUCN_H__ 9 | #define __STDC_FORMAT_MACROS 10 | 11 | #include 12 | #include 13 | #include 14 | #include 15 | 16 | extern "C" { 17 | #include 18 | #include 19 | } 20 | 21 | // *************** 22 | // MUTEX 23 | extern std::mutex __mtx; 24 | 25 | // *************** 26 | // debug function 27 | 28 | #define __AVDEBUG 29 | 30 | #ifdef __AVDEBUG 31 | #define __DebugPrint(fmtStr, ...) \ 32 | std::printf("[" __FILE__ ", line:%d] " fmtStr "\n", __LINE__, ##__VA_ARGS__) 33 | #define __Str(exp) #exp 34 | #define __Check(retVal, ...) \ 35 | do { \ 36 | if (!(__VA_ARGS__)) { \ 37 | __DebugPrint(__Str(__VA_ARGS__) " failed"); \ 38 | return retVal; \ 39 | } \ 40 | } while (false) 41 | 42 | #else 43 | #define __DebugPrint(fmtStr, ...) 44 | #define __Check(retVal, ...) \ 45 | do { \ 46 | if (!(__VA_ARGS__)) { \ 47 | return retVal; \ 48 | } \ 49 | } while (false) 50 | #endif 51 | 52 | #define __CheckNo(...) __Check(, __VA_ARGS__) 53 | #define __CheckBool(...) __Check(false, __VA_ARGS__) 54 | #define __CheckNullptr(...) __Check(nullptr, __VA_ARGS__) 55 | 56 | enum class MediaType { 57 | AUDIO, 58 | VIDEO 59 | }; 60 | 61 | // *************** 62 | // memory function 63 | 64 | template 65 | void Free(T*& ptr, Func&& func) 66 | { 67 | static_assert(std::is_convertible_v>, "Type Func should be std::function"); 68 | if (ptr == nullptr) { 69 | return; 70 | } 71 | 72 | func(); 73 | ptr = nullptr; 74 | } 75 | 76 | //*************** 77 | // time function 78 | 79 | // Sleep x ms 80 | inline void SleepMs(int timeMs) 81 | { 82 | std::this_thread::sleep_for(std::chrono::milliseconds(timeMs)); 83 | } 84 | 85 | // 对于音频编码器的全局设置 86 | constexpr int AUDIO_SAMPLE_RATE = 48000; 87 | constexpr int AUDIO_CHANNEL = 1; 88 | constexpr AVSampleFormat AUDIO_FMT = AV_SAMPLE_FMT_FLTP; 89 | constexpr int MICROPHONE_INDEX = 0; 90 | constexpr int SPEAKER_INDEX = 1; 91 | 92 | #endif -------------------------------------------------------------------------------- /src/basic/frame.cpp: -------------------------------------------------------------------------------- 1 | /* 2 | * @Coding: utf-8 3 | * @Author: vector-wlc 4 | * @Date: 2023-02-19 14:23:39 5 | * @Description: 6 | */ 7 | #include "basic/frame.h" 8 | 9 | extern "C" { 10 | #include 11 | } 12 | 13 | AVFrame* Frame::Alloc(AVSampleFormat sampleFmt, 14 | const AVChannelLayout* channel_layout, 15 | int sampleRate, int nbSamples) 16 | { 17 | AVFrame* frame = nullptr; 18 | __CheckNullptr(frame = av_frame_alloc()); 19 | frame->format = sampleFmt; 20 | av_channel_layout_copy(&frame->ch_layout, channel_layout); 21 | frame->sample_rate = sampleRate; 22 | frame->nb_samples = nbSamples; 23 | 24 | /* allocate the buffers for the frame data */ 25 | __CheckNullptr(av_frame_get_buffer(frame, 0) >= 0); 26 | return frame; 27 | } 28 | 29 | Frame::Frame(AVSampleFormat sampleFmt, 30 | const AVChannelLayout* channel_layout, int sampleRate, 31 | int nbSamples) 32 | { 33 | __CheckNo(frame = Alloc(sampleFmt, channel_layout, sampleRate, nbSamples)); 34 | } 35 | 36 | Frame::Frame(AVFrame* frame) 37 | { 38 | if (frame == nullptr) { 39 | this->frame = nullptr; 40 | return; 41 | } 42 | __CheckNo(this->frame = Alloc(AVSampleFormat(frame->format), &frame->ch_layout, frame->sample_rate, frame->nb_samples)); 43 | __CheckNo(av_frame_copy(this->frame, frame) >= 0); 44 | } 45 | 46 | Frame::Frame(AVPixelFormat pixFmt, int width, int height) 47 | { 48 | __CheckNo(frame = Alloc(pixFmt, width, height)); 49 | } 50 | 51 | AVFrame* Frame::Alloc(AVPixelFormat pixFmt, int width, int height) 52 | { 53 | AVFrame* frame = nullptr; 54 | __CheckNullptr(frame = av_frame_alloc()); 55 | 56 | frame->format = pixFmt; 57 | frame->width = width; 58 | frame->height = height; 59 | 60 | /* allocate the buffers for the frame data */ 61 | __CheckNullptr(av_frame_get_buffer(frame, 0) >= 0); 62 | return frame; 63 | } 64 | 65 | Frame::Frame(AVFrame* frame) 66 | { 67 | if (frame == nullptr) { 68 | this->frame = nullptr; 69 | return; 70 | } 71 | __CheckNo(this->frame = Alloc(AVPixelFormat(frame->format), frame->width, frame->height)); 72 | __CheckNo(av_frame_copy(this->frame, frame) >= 0); 73 | } 74 | 75 | bool FfmpegConverter::SetSize(int width, int height) 76 | { 77 | Free(_swsCtx, [this] { sws_freeContext(_swsCtx); }); 78 | Free(_frameTo, [this] { av_frame_free(&_frameTo); }); 79 | // 创建格式转换 80 | __CheckBool(_swsCtx = sws_getContext( 81 | width, height, _from, 82 | width, height, _to, 83 | 0, NULL, NULL, NULL)); 84 | 85 | __CheckBool(_frameTo = Frame::Alloc(_to, width, height)); 86 | return true; 87 | } 88 | 89 | AVFrame* FfmpegConverter::Trans(AVFrame* frameFrom) 90 | { 91 | // 如果是空指针,直接把缓存返回 92 | if (frameFrom == nullptr) { 93 | return _frameTo; 94 | } 95 | __CheckNullptr( 96 | sws_scale(_swsCtx, (const uint8_t* const*)frameFrom->data, 97 | frameFrom->linesize, 0, frameFrom->height, _frameTo->data, 98 | _frameTo->linesize) 99 | >= 0); 100 | return _frameTo; 101 | } 102 | 103 | FfmpegConverter::~FfmpegConverter() 104 | { 105 | Free(_swsCtx, [this] { sws_freeContext(_swsCtx); }); 106 | Free(_frameTo, [this] { av_frame_free(&_frameTo); }); 107 | } 108 | -------------------------------------------------------------------------------- /src/basic/frame.h: -------------------------------------------------------------------------------- 1 | #ifndef __FRAME_H__ 2 | #define __FRAME_H__ 3 | #include "basic/basic.h" 4 | 5 | class __BasicFrame { 6 | public: 7 | AVFrame* frame = nullptr; 8 | __BasicFrame() = default; 9 | __BasicFrame(__BasicFrame&& rhs) noexcept 10 | { 11 | frame = rhs.frame; 12 | rhs.frame = nullptr; 13 | } 14 | __BasicFrame& operator=(__BasicFrame&& rhs) 15 | { 16 | Free(frame, [this] { av_frame_free(&frame); }); 17 | frame = rhs.frame; 18 | rhs.frame = nullptr; 19 | return *this; 20 | } 21 | __BasicFrame(const __BasicFrame& rhs) = delete; 22 | __BasicFrame& operator=(const __BasicFrame& rhs) = delete; 23 | ~__BasicFrame() 24 | { 25 | Free(frame, [this] { av_frame_free(&frame); }); 26 | } 27 | }; 28 | 29 | template 30 | class Frame; 31 | 32 | template <> 33 | class Frame : public __BasicFrame { 34 | public: 35 | static AVFrame* Alloc(AVSampleFormat sampleFmt, 36 | const AVChannelLayout* channel_layout, 37 | int sampleRate, int nbSamples); 38 | 39 | Frame(AVSampleFormat sampleFmt, 40 | const AVChannelLayout* channel_layout, int sampleRate, 41 | int nbSamples); 42 | 43 | Frame(AVFrame* frame); 44 | Frame() = default; 45 | }; 46 | 47 | template <> 48 | class Frame : public __BasicFrame { 49 | public: 50 | static AVFrame* Alloc(AVPixelFormat pixFmt, int width, int height); 51 | Frame(AVPixelFormat pixFmt, int width, int height); 52 | Frame(AVFrame* frame); 53 | Frame() = default; 54 | }; 55 | 56 | struct SwsContext; 57 | 58 | class FfmpegConverter { 59 | private: 60 | AVPixelFormat _from; 61 | AVPixelFormat _to; 62 | 63 | public: 64 | FfmpegConverter(AVPixelFormat from, AVPixelFormat to) 65 | : _from(from) 66 | , _to(to) 67 | { 68 | } 69 | bool SetSize(int width, int height); 70 | AVFrame* Trans(AVFrame* frameFrom); 71 | ~FfmpegConverter(); 72 | 73 | private: 74 | AVFrame* _frameTo = nullptr; 75 | SwsContext* _swsCtx = nullptr; 76 | }; 77 | 78 | #endif -------------------------------------------------------------------------------- /src/basic/timer.h: -------------------------------------------------------------------------------- 1 | /* 2 | * @Coding: utf-8 3 | * @Author: vector-wlc 4 | * @Date: 2023-02-06 10:37:38 5 | * @Description: 6 | */ 7 | #ifndef __TIMER_H__ 8 | #define __TIMER_H__ 9 | 10 | #include "basic/basic.h" 11 | #include 12 | #include 13 | 14 | class Timer { 15 | public: 16 | ~Timer() 17 | { 18 | Stop(); 19 | } 20 | 21 | // interval 为 0 表示时刻执行 22 | template 23 | void Start(int fps, Func&& func) 24 | { 25 | static_assert(std::is_convertible_v>, "func need to be std::function"); 26 | _fps = fps; 27 | _tickCnt = 0; 28 | _isOverload = false; 29 | __CheckNo(!_isRunning); 30 | using namespace std::chrono; 31 | _isRunning = true; 32 | _beginTime = high_resolution_clock::now(); 33 | if (_fps > 0) { 34 | auto task = [this, func = std::forward(func)]() mutable { 35 | while (_isRunning) { 36 | // 这里不能直接使用整数除法 37 | // 因为整数除法有截断,导致最终睡眠的时间少一些 38 | uint64_t goalTime = int((double(1000) / _fps * _tickCnt) + 0.5); 39 | ++_tickCnt; 40 | auto nowTime = high_resolution_clock::now(); 41 | auto duration = duration_cast(nowTime - _beginTime).count(); 42 | int64_t sleepTime = goalTime - duration; 43 | if (sleepTime > 0) { 44 | SleepMs(sleepTime); 45 | } 46 | #ifdef __AVDEBUG 47 | // else if (sleepTime < 0) { 48 | // printf("Time out : %lld\n", -sleepTime); 49 | // } 50 | #endif 51 | _isOverload = -sleepTime > 1000; // 捕获的过载时间设置为 1s 52 | func(); 53 | } 54 | }; 55 | _thread = new std::thread(std::move(task)); 56 | timeBeginPeriod(1); 57 | return; 58 | } 59 | 60 | auto task = [this, func = std::forward(func)]() mutable { 61 | while (_isRunning) { 62 | func(); 63 | } 64 | }; 65 | _thread = new std::thread(std::move(task)); 66 | } 67 | 68 | void Stop() 69 | { 70 | _isRunning = false; 71 | if (_thread == nullptr) { 72 | return; 73 | } 74 | if (_fps > 0) { 75 | timeEndPeriod(1); 76 | } 77 | _thread->join(); 78 | delete _thread; 79 | 80 | _thread = nullptr; 81 | } 82 | 83 | bool IsOverload() const { return _isOverload; } 84 | 85 | private: 86 | int _fps = 100; 87 | int _isRunning = false; 88 | int _isOverload = false; 89 | std::vector vec; 90 | std::chrono::time_point _beginTime; 91 | std::thread* _thread = nullptr; 92 | uint64_t _tickCnt = 0; 93 | }; 94 | 95 | #endif -------------------------------------------------------------------------------- /src/capturer/audio_capturer.cpp: -------------------------------------------------------------------------------- 1 | 2 | #include "audio_capturer.h" 3 | 4 | #include "basic/basic.h" 5 | #include 6 | 7 | #define DEFAULT_SAMPLE_RATE 48000 // 默认采样率:48kHz 8 | #define DEFAULT_BITS_PER_SAMPLE 16 // 默认位深:16bit 9 | #define DEFAULT_CHANNELS 1 // 默认音频通道数:1 10 | #define DEFAULT_AUDIO_PACKET_INTERVAL 10 // 默认音频包发送间隔:10ms 11 | 12 | bool AudioCapturer::Init(Type deviceType, CallBack callback, void* userInfo) 13 | { 14 | Stop(); 15 | _userInfo = userInfo; 16 | _callback = callback; 17 | _deviceType = deviceType; 18 | __CheckBool(_CreateDeviceEnumerator(&_pDeviceEnumerator)); 19 | __CheckBool(_CreateDevice(_pDeviceEnumerator, &_pDevice)); 20 | __CheckBool(_CreateAudioClient(_pDevice, &_pAudioClient)); 21 | 22 | if (!_IsFormatSupported(_pAudioClient)) { 23 | __CheckBool(_GetPreferFormat(_pAudioClient, &_formatex)); 24 | } 25 | __CheckBool(_InitAudioClient(_pAudioClient, &_formatex)); 26 | __CheckBool(_CreateAudioCaptureClient(_pAudioClient, &_pAudioCaptureClient)); 27 | _isInit = true; 28 | return true; 29 | } 30 | 31 | bool AudioCapturer::Start() 32 | { 33 | __CheckBool(_isInit); 34 | _loopFlag = true; 35 | // 用于强制打开扬声器 36 | PlaySoundA("./rc/mute.wav", nullptr, SND_FILENAME | SND_ASYNC | SND_LOOP); 37 | _captureThread = new std::thread( 38 | [this] { _ThreadRun(_pAudioClient, _pAudioCaptureClient); }); 39 | return true; 40 | } 41 | 42 | void AudioCapturer::Stop() 43 | { 44 | // CoUninitialize(); 45 | _isInit = false; 46 | _loopFlag = false; 47 | Free(_captureThread, [this] { 48 | _captureThread->join(); 49 | delete _captureThread; 50 | }); 51 | Free(_pAudioCaptureClient, [this] { _pAudioCaptureClient->Release(); }); 52 | if (_pAudioClient != nullptr) { 53 | _pAudioClient->Stop(); 54 | } 55 | PlaySoundA(nullptr, nullptr, SND_FILENAME | SND_ASYNC | SND_LOOP); 56 | 57 | Free(_pAudioClient, [this] { _pAudioClient->Release(); }); 58 | Free(_pDevice, [this] { _pDevice->Release(); }); 59 | Free(_pDeviceEnumerator, [this] { _pDeviceEnumerator->Release(); }); 60 | } 61 | 62 | bool AudioCapturer::_CreateDeviceEnumerator(IMMDeviceEnumerator** enumerator) 63 | { 64 | // __CheckBool(SUCCEEDED(CoInitializeEx(nullptr, COINIT_MULTITHREADED))); 65 | // __CheckBool(SUCCEEDED(CoInitializeEx(nullptr, COINIT_APARTMENTTHREADED))); 66 | __CheckBool(SUCCEEDED(CoCreateInstance(__uuidof(MMDeviceEnumerator), NULL, CLSCTX_ALL, 67 | __uuidof(IMMDeviceEnumerator), 68 | reinterpret_cast(enumerator)))); 69 | return true; 70 | } 71 | bool AudioCapturer::_CreateDevice(IMMDeviceEnumerator* enumerator, IMMDevice** device) 72 | { 73 | EDataFlow enDataFlow = _deviceType == Microphone ? eCapture : eRender; 74 | ERole enRole = eConsole; 75 | __CheckBool(SUCCEEDED(enumerator->GetDefaultAudioEndpoint(enDataFlow, enRole, device))); 76 | return true; 77 | } 78 | bool AudioCapturer::_CreateAudioClient(IMMDevice* device, IAudioClient** audioClient) 79 | { 80 | __CheckBool(SUCCEEDED(device->Activate(__uuidof(IAudioClient), CLSCTX_ALL, NULL, 81 | (void**)audioClient))); 82 | return true; 83 | } 84 | bool AudioCapturer::_IsFormatSupported(IAudioClient* audioClient) 85 | { 86 | memset(&_formatex, 0, sizeof(_formatex)); 87 | WAVEFORMATEX* format = &_formatex.Format; 88 | format->nSamplesPerSec = DEFAULT_SAMPLE_RATE; 89 | format->wBitsPerSample = DEFAULT_BITS_PER_SAMPLE; 90 | format->nChannels = DEFAULT_CHANNELS; 91 | 92 | WAVEFORMATEX* closestMatch = nullptr; 93 | 94 | HRESULT hr = audioClient->IsFormatSupported(AUDCLNT_SHAREMODE_SHARED, 95 | format, &closestMatch); 96 | if (hr == AUDCLNT_E_UNSUPPORTED_FORMAT) // 0x88890008 97 | { 98 | if (closestMatch == nullptr) // 如果找不到最相近的格式,closestMatch可能为nullptr 99 | { 100 | return false; 101 | } 102 | 103 | format->nSamplesPerSec = closestMatch->nSamplesPerSec; 104 | format->wBitsPerSample = closestMatch->wBitsPerSample; 105 | format->nChannels = closestMatch->nChannels; 106 | 107 | return true; 108 | } 109 | 110 | return false; 111 | } 112 | bool AudioCapturer::_GetPreferFormat(IAudioClient* audioClient, 113 | WAVEFORMATEXTENSIBLE* formatex) 114 | { 115 | WAVEFORMATEX* format = nullptr; 116 | __CheckBool(SUCCEEDED(audioClient->GetMixFormat(&format))); 117 | formatex->Format.nSamplesPerSec = format->nSamplesPerSec; 118 | formatex->Format.wBitsPerSample = format->wBitsPerSample; 119 | formatex->Format.nChannels = format->nChannels; 120 | return true; 121 | } 122 | bool AudioCapturer::_InitAudioClient(IAudioClient* audioClient, 123 | WAVEFORMATEXTENSIBLE* formatex) 124 | { 125 | AUDCLNT_SHAREMODE shareMode = AUDCLNT_SHAREMODE_SHARED; // share Audio Engine with other applications 126 | DWORD streamFlags = _deviceType == Microphone ? 0 : AUDCLNT_STREAMFLAGS_LOOPBACK; 127 | streamFlags |= AUDCLNT_STREAMFLAGS_AUTOCONVERTPCM; // A channel matrixer and a sample 128 | // rate converter are inserted 129 | streamFlags |= AUDCLNT_STREAMFLAGS_SRC_DEFAULT_QUALITY; // a sample rate converter 130 | // with better quality than 131 | // the default conversion but 132 | // with a higher performance 133 | // cost is used 134 | REFERENCE_TIME hnsBufferDuration = 0; 135 | WAVEFORMATEX* format = &formatex->Format; 136 | format->wFormatTag = WAVE_FORMAT_EXTENSIBLE; 137 | format->nBlockAlign = (format->wBitsPerSample >> 3) * format->nChannels; 138 | format->nAvgBytesPerSec = format->nBlockAlign * format->nSamplesPerSec; 139 | format->cbSize = sizeof(WAVEFORMATEXTENSIBLE) - sizeof(WAVEFORMATEX); 140 | formatex->Samples.wValidBitsPerSample = format->wBitsPerSample; 141 | formatex->dwChannelMask = format->nChannels == 1 ? KSAUDIO_SPEAKER_MONO : KSAUDIO_SPEAKER_STEREO; 142 | formatex->SubFormat = KSDATAFORMAT_SUBTYPE_PCM; 143 | 144 | __CheckBool(SUCCEEDED(audioClient->Initialize(shareMode, streamFlags, hnsBufferDuration, 0, 145 | format, nullptr))); 146 | return true; 147 | } 148 | 149 | bool AudioCapturer::_CreateAudioCaptureClient(IAudioClient* audioClient, 150 | IAudioCaptureClient** audioCaptureClient) 151 | { 152 | __CheckBool(SUCCEEDED(audioClient->GetService(IID_PPV_ARGS(audioCaptureClient)))); 153 | return true; 154 | } 155 | 156 | bool AudioCapturer::_ThreadRun(IAudioClient* audio_client, 157 | IAudioCaptureClient* audio_capture_client) 158 | { 159 | UINT32 num_success = 0; 160 | BYTE* p_audio_data = nullptr; 161 | UINT32 num_frames_to_read = 0; 162 | DWORD dw_flag = 0; 163 | UINT32 num_frames_in_next_packet = 0; 164 | audio_client->Start(); 165 | while (_loopFlag) { 166 | SleepMs(5); 167 | while (true) { 168 | __CheckBool(SUCCEEDED(audio_capture_client->GetNextPacketSize(&num_frames_in_next_packet))); 169 | if (num_frames_in_next_packet == 0) { 170 | break; 171 | } 172 | 173 | __CheckBool(SUCCEEDED(audio_capture_client->GetBuffer(&p_audio_data, &num_frames_to_read, 174 | &dw_flag, nullptr, nullptr))); 175 | 176 | size_t size = (_formatex.Format.wBitsPerSample >> 3) * _formatex.Format.nChannels * num_frames_to_read; 177 | _callback(p_audio_data, size, _userInfo); 178 | __CheckBool(SUCCEEDED(audio_capture_client->ReleaseBuffer(num_frames_to_read))); 179 | } 180 | } 181 | 182 | audio_client->Stop(); 183 | return true; 184 | } 185 | -------------------------------------------------------------------------------- /src/capturer/audio_capturer.h: -------------------------------------------------------------------------------- 1 | /* 2 | * @Coding: utf-8 3 | * @Author: vector-wlc 4 | * @Date: 2023-02-19 15:34:07 5 | * @Description: 6 | */ 7 | 8 | #ifndef __AUDIO_CAPTURER_H__ 9 | #define __AUDIO_CAPTURER_H__ 10 | 11 | #include 12 | #include 13 | #include 14 | 15 | #include 16 | #include 17 | 18 | class AudioCapturer { 19 | public: 20 | enum Type { 21 | Microphone, 22 | Speaker 23 | }; 24 | using CallBack = void (*)(void* data, size_t size, void* userInfo); 25 | 26 | bool Init(Type deviceType, CallBack callback, void* userInfo = nullptr); 27 | bool Start(); 28 | const WAVEFORMATEX& GetFormat() const { return _formatex.Format; } 29 | 30 | void Stop(); 31 | 32 | private: 33 | bool _isInit = false; 34 | CallBack _callback; 35 | Type _deviceType; 36 | IMMDeviceEnumerator* _pDeviceEnumerator = nullptr; 37 | IMMDevice* _pDevice = nullptr; 38 | IAudioClient* _pAudioClient = nullptr; 39 | IAudioCaptureClient* _pAudioCaptureClient = nullptr; 40 | std::thread* _captureThread = nullptr; 41 | bool _loopFlag = false; 42 | WAVEFORMATEXTENSIBLE _formatex; 43 | void* _userInfo = nullptr; 44 | 45 | bool _CreateDeviceEnumerator(IMMDeviceEnumerator** enumerator); 46 | bool _CreateDevice(IMMDeviceEnumerator* enumerator, IMMDevice** device); 47 | bool _CreateAudioClient(IMMDevice* device, IAudioClient** audioClient); 48 | bool _IsFormatSupported(IAudioClient* audioClient); 49 | bool _GetPreferFormat(IAudioClient* audioClient, 50 | WAVEFORMATEXTENSIBLE* formatex); 51 | bool _InitAudioClient(IAudioClient* audioClient, 52 | WAVEFORMATEXTENSIBLE* formatex); 53 | bool _CreateAudioCaptureClient(IAudioClient* audioClient, 54 | IAudioCaptureClient** audioCaptureClient); 55 | bool _ThreadRun(IAudioClient* audio_client, 56 | IAudioCaptureClient* audio_capture_client); 57 | }; 58 | 59 | #endif -------------------------------------------------------------------------------- /src/capturer/dxgi_capturer.cpp: -------------------------------------------------------------------------------- 1 | #include "dxgi_capturer.h" 2 | #include 3 | 4 | DxgiCapturer::DxgiCapturer() 5 | { 6 | ZeroMemory(&_desc, sizeof(_desc)); 7 | } 8 | 9 | DxgiCapturer::~DxgiCapturer() 10 | { 11 | Close(); 12 | } 13 | 14 | bool DxgiCapturer::Open(int left, int top, int width, int height) 15 | { 16 | Close(); 17 | HRESULT hr = S_OK; 18 | _isAttached = false; 19 | 20 | if (_bInit) { 21 | return false; 22 | } 23 | 24 | // Driver types supported 25 | D3D_DRIVER_TYPE DriverTypes[] = { 26 | D3D_DRIVER_TYPE_HARDWARE, 27 | D3D_DRIVER_TYPE_WARP, 28 | D3D_DRIVER_TYPE_REFERENCE, 29 | }; 30 | UINT NumDriverTypes = ARRAYSIZE(DriverTypes); 31 | 32 | // Feature levels supported 33 | D3D_FEATURE_LEVEL FeatureLevels[] = { 34 | D3D_FEATURE_LEVEL_11_0, 35 | D3D_FEATURE_LEVEL_10_1, 36 | D3D_FEATURE_LEVEL_10_0, 37 | D3D_FEATURE_LEVEL_9_1}; 38 | UINT NumFeatureLevels = ARRAYSIZE(FeatureLevels); 39 | 40 | D3D_FEATURE_LEVEL FeatureLevel; 41 | 42 | // Create D3D device 43 | for (UINT DriverTypeIndex = 0; DriverTypeIndex < NumDriverTypes; ++DriverTypeIndex) { 44 | hr = D3D11CreateDevice(nullptr, DriverTypes[DriverTypeIndex], nullptr, 0, FeatureLevels, 45 | NumFeatureLevels, D3D11_SDK_VERSION, &_hDevice, &FeatureLevel, &_hContext); 46 | if (SUCCEEDED(hr)) { 47 | break; 48 | } 49 | } 50 | __CheckBool(SUCCEEDED(hr)); 51 | 52 | // Get DXGI device 53 | IDXGIDevice* hDxgiDevice = nullptr; 54 | __CheckBool(SUCCEEDED(_hDevice->QueryInterface(__uuidof(IDXGIDevice), reinterpret_cast(&hDxgiDevice)))); 55 | 56 | // Get DXGI adapter 57 | IDXGIAdapter* hDxgiAdapter = nullptr; 58 | hr = hDxgiDevice->GetParent(__uuidof(IDXGIAdapter), reinterpret_cast(&hDxgiAdapter)); 59 | Free(hDxgiDevice, [=] { hDxgiDevice->Release(); }); 60 | __CheckBool(SUCCEEDED(hr)); 61 | 62 | // Get output 63 | INT nOutput = 0; 64 | IDXGIOutput* hDxgiOutput = nullptr; 65 | DXGI_OUTPUT_DESC dxgiOutDesc; 66 | ZeroMemory(&dxgiOutDesc, sizeof(dxgiOutDesc)); 67 | 68 | for (int idx = 0; SUCCEEDED(hr = hDxgiAdapter->EnumOutputs(idx, &hDxgiOutput)); ++idx) { 69 | // get output description struct 70 | hDxgiOutput->GetDesc(&dxgiOutDesc); 71 | if (dxgiOutDesc.DesktopCoordinates.left == left 72 | && dxgiOutDesc.DesktopCoordinates.top == top) { // 寻找显示器 73 | break; 74 | } 75 | } 76 | Free(hDxgiAdapter, [=] { hDxgiAdapter->Release(); }); 77 | __CheckBool(SUCCEEDED(hr)); 78 | 79 | // QI for Output 1 80 | IDXGIOutput1* hDxgiOutput1 = nullptr; 81 | hr = hDxgiOutput->QueryInterface(__uuidof(hDxgiOutput1), reinterpret_cast(&hDxgiOutput1)); 82 | Free(hDxgiOutput, [=] { hDxgiOutput->Release(); }); 83 | __CheckBool(SUCCEEDED(hr)); 84 | 85 | // Create desktop duplication 86 | hr = hDxgiOutput1->DuplicateOutput(_hDevice, &_hDeskDupl); 87 | Free(hDxgiOutput1, [=] { hDxgiOutput1->Release(); }); 88 | __CheckBool(SUCCEEDED(hr)); 89 | 90 | // Set ColorSpace 91 | D3D11_VIDEO_PROCESSOR_COLOR_SPACE inputColorSpace; 92 | inputColorSpace.Usage = 1; 93 | inputColorSpace.RGB_Range = 0; 94 | inputColorSpace.YCbCr_Matrix = 1; 95 | inputColorSpace.YCbCr_xvYCC = 0; 96 | inputColorSpace.Nominal_Range = D3D11_VIDEO_PROCESSOR_NOMINAL_RANGE_0_255; 97 | 98 | D3D11_VIDEO_PROCESSOR_COLOR_SPACE outputColorSpace; 99 | outputColorSpace.Usage = 0; 100 | outputColorSpace.RGB_Range = 0; 101 | outputColorSpace.YCbCr_Matrix = 1; 102 | outputColorSpace.YCbCr_xvYCC = 0; 103 | outputColorSpace.Nominal_Range = D3D11_VIDEO_PROCESSOR_NOMINAL_RANGE_16_235; 104 | _rgbToNv12.Open(_hDevice, _hContext, inputColorSpace, outputColorSpace); 105 | _nv12Frame = Frame::Alloc(AV_PIX_FMT_NV12, width, height); 106 | _xrgbFrame = Frame::Alloc(AV_PIX_FMT_BGR0, width, height); 107 | __CheckBool(_nv12Frame); 108 | __CheckBool(_xrgbFrame); 109 | // 初始化成功 110 | _bInit = true; 111 | return true; 112 | } 113 | void DxgiCapturer::Close() 114 | { 115 | if (!_bInit) { 116 | return; 117 | } 118 | 119 | _bInit = false; 120 | _nv12Buffers.Clear(); 121 | _xrgbBuffers.Clear(); 122 | _rgbToNv12.Close(); 123 | Free(_nv12Frame, [this] { av_frame_free(&_nv12Frame); }); 124 | Free(_xrgbFrame, [this] { av_frame_free(&_xrgbFrame); }); 125 | Free(_hDeskDupl, [this] { _hDeskDupl->Release(); }); 126 | Free(_hDevice, [this] { _hDevice->Release(); }); 127 | Free(_hContext, [this] { _hContext->Release(); }); 128 | } 129 | 130 | HDC DxgiCapturer::GetHdc() 131 | { 132 | _isCaptureSuccess = false; 133 | if (!_bInit) { 134 | return nullptr; 135 | } 136 | 137 | IDXGIResource* hDesktopResource = nullptr; 138 | DXGI_OUTDUPL_FRAME_INFO FrameInfo; 139 | HRESULT hr = _hDeskDupl->AcquireNextFrame(0, &FrameInfo, &hDesktopResource); 140 | if (FAILED(hr)) { 141 | if (hr == DXGI_ERROR_WAIT_TIMEOUT) { // 这里是因为当桌面没有动画更新时就会有一个错误值,不进行错误打印 142 | return nullptr; 143 | } 144 | return nullptr; 145 | } 146 | 147 | // query next frame staging buffer 148 | ID3D11Texture2D* srcImage = nullptr; 149 | hr = hDesktopResource->QueryInterface(__uuidof(ID3D11Texture2D), reinterpret_cast(&srcImage)); 150 | Free(hDesktopResource, [=] { hDesktopResource->Release(); }); 151 | __CheckNullptr(SUCCEEDED(hr)); 152 | 153 | srcImage->GetDesc(&_desc); 154 | 155 | // create a new staging buffer for fill frame image 156 | auto desc = _desc; 157 | desc.ArraySize = 1; 158 | desc.BindFlags = D3D11_BIND_FLAG::D3D11_BIND_RENDER_TARGET; 159 | desc.MiscFlags = D3D11_RESOURCE_MISC_GDI_COMPATIBLE; 160 | desc.SampleDesc.Count = 1; 161 | desc.SampleDesc.Quality = 0; 162 | desc.MipLevels = 1; 163 | desc.CPUAccessFlags = 0; 164 | desc.Usage = D3D11_USAGE_DEFAULT; 165 | hr = _hDevice->CreateTexture2D(&desc, nullptr, &_gdiImage); 166 | if (FAILED(hr)) { 167 | __DebugPrint("Create _gdiImage failed"); 168 | Free(srcImage, [=] { srcImage->Release(); }); 169 | Free(_hDeskDupl, [this] { _hDeskDupl->ReleaseFrame(); }); 170 | return nullptr; 171 | } 172 | 173 | // copy next staging buffer to new staging buffer 174 | _hContext->CopyResource(_gdiImage, srcImage); 175 | Free(srcImage, [=] { srcImage->Release(); }); 176 | _hDeskDupl->ReleaseFrame(); 177 | 178 | // create staging buffer for map bits 179 | _hStagingSurf = nullptr; 180 | hr = _gdiImage->QueryInterface(__uuidof(IDXGISurface), (void**)(&_hStagingSurf)); 181 | if (FAILED(hr)) { 182 | __DebugPrint("_gdiImage->QueryInterface failed"); 183 | Free(_gdiImage, [this] { _gdiImage->Release(); }); 184 | return nullptr; 185 | } 186 | 187 | _isCaptureSuccess = true; 188 | HDC hdc = nullptr; 189 | // if GetDc is failed, the hdc is nullptr 190 | _hStagingSurf->GetDC(FALSE, &hdc); 191 | return hdc; 192 | } 193 | 194 | AVFrame* DxgiCapturer::GetFrame() 195 | { 196 | if (!_isCaptureSuccess) { 197 | return nullptr; 198 | } 199 | _isCaptureSuccess = false; 200 | _hStagingSurf->ReleaseDC(nullptr); 201 | 202 | // 创建一个临时的纹理 203 | ID3D11Texture2D* tmpImage = nullptr; 204 | _desc.MiscFlags = 2050; 205 | __CheckNullptr(SUCCEEDED(_hDevice->CreateTexture2D(&_desc, nullptr, &tmpImage))); 206 | _hContext->CopyResource(tmpImage, _gdiImage); 207 | 208 | // 首先尝试创建 NV12 纹理 209 | AVFrame* frame = nullptr; 210 | auto tmpFormat = _desc.Format; 211 | _desc.Format = DXGI_FORMAT_NV12; 212 | if (GenNv12Frame(_hDevice, _hContext, _desc, tmpImage, 213 | _nv12Buffers, _nv12Frame, _rgbToNv12)) { 214 | frame = _nv12Frame; 215 | } else { 216 | _desc.Format = tmpFormat; 217 | GenRgbFrame(_hDevice, _hContext, _desc, _gdiImage, 218 | _xrgbBuffers, _xrgbFrame); 219 | frame = _xrgbFrame; 220 | } 221 | Free(_hStagingSurf, [this] { _hStagingSurf->Release(); }); 222 | Free(tmpImage, [&tmpImage] { tmpImage->Release(); }); 223 | Free(_gdiImage, [this] { _gdiImage->Release(); }); 224 | 225 | return frame; 226 | } -------------------------------------------------------------------------------- /src/capturer/dxgi_capturer.h: -------------------------------------------------------------------------------- 1 | /* 2 | * @Coding: utf-8 3 | * @Author: vector-wlc 4 | * @Date: 2023-02-04 15:50:31 5 | * @Description: 6 | */ 7 | #ifndef __DXGI_CAPTURER_H__ 8 | #define __DXGI_CAPTURER_H__ 9 | 10 | #include 11 | #include 12 | #include 13 | #include "d3d/gen_frame.h" 14 | class DxgiCapturer { 15 | public: 16 | DxgiCapturer(); 17 | ~DxgiCapturer(); 18 | 19 | public: 20 | bool Open(int left, int top, int width, int height); 21 | void Close(); 22 | 23 | public: 24 | HDC GetHdc(); 25 | AVFrame* GetFrame(); 26 | 27 | private: 28 | bool _bInit = false; 29 | bool _isCaptureSuccess = false; 30 | 31 | ID3D11Device* _hDevice = nullptr; 32 | ID3D11DeviceContext* _hContext = nullptr; 33 | IDXGIOutputDuplication* _hDeskDupl = nullptr; 34 | IDXGISurface1* _hStagingSurf = nullptr; 35 | ID3D11Texture2D* _gdiImage = nullptr; 36 | D3D11_TEXTURE2D_DESC _desc; 37 | bool _isAttached = false; 38 | AVFrame* _xrgbFrame = nullptr; 39 | AVFrame* _nv12Frame = nullptr; 40 | BufferFiller _xrgbBuffers; 41 | BufferFiller _nv12Buffers; 42 | D3dConverter _rgbToNv12; 43 | }; 44 | 45 | #endif 46 | -------------------------------------------------------------------------------- /src/capturer/finder.cpp: -------------------------------------------------------------------------------- 1 | #include "finder.h" 2 | 3 | const std::vector& WindowFinder::GetList(bool isUpdate) 4 | { 5 | if (!isUpdate) { 6 | return _list; 7 | } 8 | _list.clear(); 9 | EnumWindows(_EnumWindowsProc, (LPARAM) nullptr); 10 | return _list; 11 | } 12 | 13 | std::vector WindowFinder::_list; 14 | 15 | std::wstring WindowFinder::_GetWindowTextStd(HWND hwnd) 16 | { 17 | std::array windowText; 18 | ::GetWindowTextW(hwnd, windowText.data(), (int)windowText.size()); 19 | std::wstring title(windowText.data()); 20 | return title; 21 | } 22 | BOOL CALLBACK WindowFinder::_EnumWindowsProc(HWND hwnd, LPARAM lParam) 23 | { 24 | auto title = _GetWindowTextStd(hwnd); 25 | if (!IsAltTabWindow(hwnd, title)) { 26 | return TRUE; 27 | } 28 | _list.push_back({hwnd, std::move(title)}); 29 | return TRUE; 30 | } 31 | 32 | bool WindowFinder::IsAltTabWindow(HWND hwnd, const std::wstring& title) 33 | { 34 | HWND shellWindow = GetShellWindow(); 35 | 36 | if (hwnd == shellWindow) { 37 | return false; 38 | } 39 | 40 | if (title.length() == 0 || title == L"NVIDIA GeForce Overlay") { 41 | return false; 42 | } 43 | 44 | if (!IsWindowVisible(hwnd)) { 45 | return false; 46 | } 47 | 48 | if (GetAncestor(hwnd, GA_ROOT) != hwnd) { 49 | return false; 50 | } 51 | 52 | LONG style = GetWindowLong(hwnd, GWL_STYLE); 53 | if (!((style & WS_DISABLED) != WS_DISABLED)) { 54 | return false; 55 | } 56 | 57 | DWORD cloaked = FALSE; 58 | HRESULT hrTemp = DwmGetWindowAttribute(hwnd, DWMWA_CLOAKED, &cloaked, sizeof(cloaked)); 59 | if (SUCCEEDED(hrTemp) && cloaked == DWM_CLOAKED_SHELL) { 60 | return false; 61 | } 62 | 63 | return !IsIconic(hwnd); 64 | } 65 | 66 | const std::vector& MonitorFinder::GetList(bool isUpdate) 67 | { 68 | if (!isUpdate) { 69 | return _list; 70 | } 71 | _list.clear(); 72 | EnumDisplayMonitors(nullptr, nullptr, _MonitorEnumProc, (LPARAM) nullptr); 73 | return _list; 74 | } 75 | 76 | std::vector MonitorFinder::_list; 77 | 78 | BOOL CALLBACK MonitorFinder::_MonitorEnumProc( 79 | HMONITOR hMonitor, // handle to display monitor 80 | HDC hdcMonitor, // handle to monitor-appropriate device context 81 | LPRECT lprcMonitor, // pointer to monitor intersection rectangle 82 | LPARAM dwData // data passed from EnumDisplayMonitors 83 | ) 84 | { 85 | std::wstring name = L"显示器" + std::to_wstring(_list.size() + 1); 86 | MONITORINFO monitorInfo; 87 | monitorInfo.cbSize = sizeof(monitorInfo); 88 | GetMonitorInfoW(hMonitor, &monitorInfo); 89 | Info info; 90 | info.monitor = hMonitor; 91 | info.rect = monitorInfo.rcMonitor; 92 | info.title = std::move(name); 93 | _list.push_back(std::move(info)); 94 | return TRUE; 95 | } 96 | -------------------------------------------------------------------------------- /src/capturer/finder.h: -------------------------------------------------------------------------------- 1 | /* 2 | * @Coding: utf-8 3 | * @Author: vector-wlc 4 | * @Date: 2023-02-24 16:28:40 5 | * @Description: 6 | */ 7 | #pragma once 8 | #define UNICODE 9 | 10 | #include 11 | #include 12 | #include 13 | #include 14 | #include 15 | 16 | class WindowFinder { 17 | public: 18 | struct Info { 19 | HWND hwnd = nullptr; 20 | std::wstring title; 21 | }; 22 | 23 | static const std::vector& GetList(bool isUpdate = false); 24 | 25 | private: 26 | static std::vector _list; 27 | static std::wstring _GetWindowTextStd(HWND hwnd); 28 | static BOOL CALLBACK _EnumWindowsProc(HWND hwnd, LPARAM lParam); 29 | static bool IsAltTabWindow(HWND hwnd, const std::wstring& title); 30 | }; 31 | 32 | class MonitorFinder { 33 | public: 34 | struct Info { 35 | HMONITOR monitor = nullptr; 36 | std::wstring title; 37 | RECT rect; 38 | }; 39 | 40 | static const std::vector& GetList(bool isUpdate = false); 41 | 42 | private: 43 | static std::vector _list; 44 | 45 | static BOOL CALLBACK _MonitorEnumProc( 46 | HMONITOR hMonitor, // handle to display monitor 47 | HDC hdcMonitor, // handle to monitor-appropriate device context 48 | LPRECT lprcMonitor, // pointer to monitor intersection rectangle 49 | LPARAM dwData // data passed from EnumDisplayMonitors 50 | ); 51 | }; 52 | -------------------------------------------------------------------------------- /src/capturer/gdi_capturer.cpp: -------------------------------------------------------------------------------- 1 | /* 2 | * @Coding: utf-8 3 | * @Author: vector-wlc 4 | * @Date: 2023-02-05 14:30:33 5 | * @Description: 6 | */ 7 | #include "gdi_capturer.h" 8 | #include "basic/basic.h" 9 | 10 | bool GdiCapturer::Open(HWND hwnd, int width, int height) 11 | { 12 | Close(); 13 | _width = width; 14 | _height = height; 15 | _srcHdc = GetWindowDC(hwnd); 16 | _dstHdc = CreateCompatibleDC(_srcHdc); 17 | _bitmap = CreateCompatibleBitmap(_srcHdc, width, height); 18 | SelectObject(_dstHdc, _bitmap); 19 | 20 | _bitmapInfo.bmiHeader.biSize = sizeof(BITMAPINFOHEADER); 21 | _bitmapInfo.bmiHeader.biPlanes = 1; 22 | _bitmapInfo.bmiHeader.biBitCount = 24; 23 | _bitmapInfo.bmiHeader.biWidth = width; 24 | _bitmapInfo.bmiHeader.biHeight = height; 25 | _bitmapInfo.bmiHeader.biCompression = BI_RGB; 26 | _bitmapInfo.bmiHeader.biSizeImage = width * height; 27 | 28 | // 创建缓存帧 29 | _frame = Frame::Alloc(AV_PIX_FMT_BGR24, width, height); 30 | return true; 31 | } 32 | 33 | HDC GdiCapturer::GetHdc(int borderWidth, int borderHeight) 34 | { 35 | __CheckNullptr( 36 | BitBlt(_dstHdc, 0, 0, _width, _height, 37 | _srcHdc, borderWidth / 2, borderHeight - borderWidth / 2, SRCCOPY)); 38 | 39 | return _dstHdc; 40 | } 41 | 42 | AVFrame* GdiCapturer::GetFrame() 43 | { 44 | auto linesize = _frame->linesize[0]; 45 | for (int row = 0; row < _height; ++row) { 46 | __CheckNullptr(GetDIBits(_dstHdc, _bitmap, _height - 1 - row, 1, _frame->data[0] + row * linesize, &_bitmapInfo, DIB_RGB_COLORS)); 47 | } 48 | return _frame; 49 | } 50 | 51 | void GdiCapturer::Close() 52 | { 53 | Free(_frame, [this] { av_frame_free(&_frame); }); 54 | Free(_dstHdc, [this] { DeleteObject(_dstHdc); }); 55 | Free(_bitmap, [this] { DeleteObject(_bitmap); }); 56 | } 57 | 58 | GdiCapturer::~GdiCapturer() 59 | { 60 | Close(); 61 | } 62 | -------------------------------------------------------------------------------- /src/capturer/gdi_capturer.h: -------------------------------------------------------------------------------- 1 | /* 2 | * @Coding: utf-8 3 | * @Author: vector-wlc 4 | * @Date: 2023-02-05 14:31:02 5 | * @Description: 6 | */ 7 | #ifndef __GDI_CAPTURER_H__ 8 | #define __GDI_CAPTURER_H__ 9 | 10 | #include "basic/frame.h" 11 | #include 12 | 13 | class GdiCapturer { 14 | public: 15 | bool Open(HWND hwnd, int width, int height); 16 | HDC GetHdc(int borderWidth, int borderHeight); 17 | AVFrame* GetFrame(); 18 | void Close(); 19 | ~GdiCapturer(); 20 | 21 | private: 22 | HDC _srcHdc = nullptr; 23 | HDC _dstHdc = nullptr; 24 | HBITMAP _bitmap = nullptr; 25 | BITMAPINFO _bitmapInfo; 26 | int _width = 0; 27 | int _height = 0; 28 | AVFrame* _frame = nullptr; 29 | }; 30 | 31 | #endif -------------------------------------------------------------------------------- /src/capturer/video_capturer.cpp: -------------------------------------------------------------------------------- 1 | #include "video_capturer.h" 2 | #include "basic/frame.h" 3 | #include "capturer/finder.h" 4 | 5 | bool VideoCapturer::Open(HWND hwnd, Method method) 6 | { 7 | Close(); 8 | __CheckBool(hwnd); 9 | _srcHwnd = hwnd; 10 | __CheckBool(_GetHwndSize(_srcHwnd)); 11 | _usingMethod = method; 12 | _type = WINDOW; 13 | switch (method) { 14 | case WGC: { 15 | _wgcCapturer = WgcCapturer::New(); 16 | __CheckBool(_wgcCapturer->StartCapturerWindow(hwnd, _width, _height)); 17 | break; 18 | } 19 | 20 | default: { // GDI 21 | _gdiCapturer = new GdiCapturer; 22 | __CheckBool(_gdiCapturer->Open(hwnd, _width, _height)); 23 | break; 24 | } 25 | } 26 | 27 | return true; 28 | } 29 | 30 | bool VideoCapturer::Open(int monitorIdx, Method method) 31 | { 32 | Close(); 33 | auto&& monitorInfo = MonitorFinder::GetList()[monitorIdx]; 34 | _rect = monitorInfo.rect; 35 | _borderHeight = 0; 36 | _borderWidth = 0; 37 | _width = _rect.right - _rect.left; 38 | _height = _rect.bottom - _rect.top; 39 | _usingMethod = method; 40 | _type = MONITOR; 41 | switch (method) { 42 | case WGC: { 43 | auto monitor = monitorInfo.monitor; 44 | _wgcCapturer = WgcCapturer::New(); 45 | __CheckBool(_wgcCapturer->StartCapturerMonitor(monitor, _width, _height)); 46 | break; 47 | } 48 | 49 | default: { // DXGI 50 | _dxgiCapturer = new DxgiCapturer; 51 | __CheckBool(_dxgiCapturer->Open(_rect.left, _rect.top, _width, _height)); 52 | break; 53 | } 54 | } 55 | return true; 56 | } 57 | 58 | AVFrame* VideoCapturer::GetFrame() 59 | { 60 | switch (_usingMethod) { 61 | case WGC: // 该捕获方式自动就将鼠标画好了,我们不需要再自己画鼠标 62 | return _wgcCapturer->GetFrame(); 63 | case DXGI: { 64 | auto hdc = _dxgiCapturer->GetHdc(); 65 | if (_isDrawCursor && hdc) { 66 | _DrawCursor(hdc); 67 | } 68 | return _dxgiCapturer->GetFrame(); 69 | } 70 | default: // GDI 71 | auto hdc = _gdiCapturer->GetHdc(_borderWidth, _borderHeight); 72 | if (_isDrawCursor && hdc) { 73 | _DrawCursor(hdc); 74 | } 75 | return _gdiCapturer->GetFrame(); 76 | } 77 | } 78 | 79 | void VideoCapturer::SetDrawCursor(bool isDrawCursor) 80 | { 81 | _isDrawCursor = isDrawCursor; 82 | if (_usingMethod == WGC) { 83 | _wgcCapturer->SetDrawCursor(_isDrawCursor); 84 | } 85 | } 86 | 87 | void VideoCapturer::Close() 88 | { 89 | Free(_dxgiCapturer, [this] { _dxgiCapturer->Close(); delete _dxgiCapturer; }); 90 | Free(_gdiCapturer, [this] { _gdiCapturer->Close(); delete _gdiCapturer; }); 91 | Free(_wgcCapturer, [this] { _wgcCapturer->Close(); }); 92 | } 93 | 94 | VideoCapturer::~VideoCapturer() 95 | { 96 | Close(); 97 | } 98 | 99 | int VideoCapturer::GetWidth() const 100 | { 101 | return _width; 102 | } 103 | int VideoCapturer::GetHeight() const 104 | { 105 | return _height; 106 | } 107 | 108 | bool VideoCapturer::_GetHwndSize(HWND hwnd) 109 | { 110 | RECT rect; 111 | __CheckBool(GetClientRect(hwnd, &rect)); 112 | _rect = rect; 113 | _width = (rect.right - rect.left); 114 | _height = (rect.bottom - rect.top); 115 | __CheckBool(GetWindowRect(hwnd, &rect)); 116 | _borderHeight = rect.bottom - rect.top - _height; 117 | _borderWidth = rect.right - rect.left - _width; 118 | if (_borderHeight < 0) { 119 | _borderHeight = 0; 120 | } 121 | if (_borderWidth < 0) { 122 | _borderWidth = 0; 123 | } 124 | return true; 125 | } 126 | 127 | void VideoCapturer::_DrawCursor(HDC hdc) 128 | { 129 | CURSORINFO ci; 130 | ci.cbSize = sizeof(CURSORINFO); 131 | __CheckNo(GetCursorInfo(&ci)); 132 | int cursorX = ci.ptScreenPos.x; 133 | int cursorY = ci.ptScreenPos.y; 134 | 135 | if (cursorX > _rect.right || cursorX < _rect.left 136 | || cursorY > _rect.bottom || cursorY < _rect.top) { 137 | return; // 超出显示范围 138 | } 139 | 140 | if (ci.flags == CURSOR_SHOWING) { 141 | // 将光标画到屏幕所在位置 142 | int x = cursorX - _rect.left; 143 | int y = cursorY - _rect.top; 144 | __CheckNo(DrawIconEx(hdc, x, y, ci.hCursor, 0, 0, 0, NULL, DI_NORMAL | DI_COMPAT)); 145 | } 146 | } -------------------------------------------------------------------------------- /src/capturer/video_capturer.h: -------------------------------------------------------------------------------- 1 | /* 2 | * @Coding: utf-8 3 | * @Author: vector-wlc 4 | * @Date: 2023-02-01 18:10:33 5 | * @Description: 6 | */ 7 | #ifndef __AV_CAPTURER_H__ 8 | #define __AV_CAPTURER_H__ 9 | 10 | #include "dxgi_capturer.h" 11 | #include "gdi_capturer.h" 12 | #include "wgc_capturer.h" 13 | 14 | class VideoCapturer { 15 | public: 16 | enum Method { 17 | GDI, 18 | DXGI, 19 | WGC 20 | }; 21 | 22 | enum Type { 23 | WINDOW, 24 | MONITOR 25 | }; 26 | ~VideoCapturer(); 27 | bool Open(HWND hwnd, Method method); 28 | bool Open(int monitorIdx, Method method); 29 | AVFrame* GetFrame(); 30 | void SetDrawCursor(bool isDrawCursor); 31 | void Close(); 32 | int GetWidth() const; 33 | int GetHeight() const; 34 | Method GetMethod() const { return _usingMethod; } 35 | 36 | private: 37 | bool _GetHwndSize(HWND hwnd); 38 | void _DrawCursor(HDC hdc); 39 | Method _usingMethod = WGC; 40 | RECT _rect; 41 | Type _type = MONITOR; 42 | DxgiCapturer* _dxgiCapturer = nullptr; 43 | GdiCapturer* _gdiCapturer = nullptr; 44 | WgcCapturer* _wgcCapturer = nullptr; 45 | int _width = 0; 46 | int _height = 0; 47 | int _borderHeight = 0; 48 | int _borderWidth = 0; 49 | HWND _srcHwnd = nullptr; 50 | bool _isDrawCursor = true; 51 | }; 52 | #endif 53 | -------------------------------------------------------------------------------- /src/capturer/wgc/App.cpp: -------------------------------------------------------------------------------- 1 | /* 2 | * @Coding: utf-8 3 | * @Author: vector-wlc 4 | * @Date: 2023-02-24 16:28:40 5 | * @Description: 6 | */ 7 | // D3D 8 | #include 9 | #include 10 | #include 11 | #include 12 | 13 | #include "pch.h" 14 | #include "App.h" 15 | #include "basic/frame.h" 16 | 17 | using namespace winrt; 18 | using namespace Windows::System; 19 | using namespace Windows::Foundation; 20 | using namespace Windows::UI; 21 | using namespace Windows::UI::Composition; 22 | using namespace Windows::Graphics::Capture; 23 | 24 | void App::Initialize( 25 | ContainerVisual const& root) 26 | { 27 | auto queue = DispatcherQueue::GetForCurrentThread(); 28 | 29 | m_compositor = root.Compositor(); 30 | m_root = m_compositor.CreateContainerVisual(); 31 | m_content = m_compositor.CreateSpriteVisual(); 32 | m_brush = m_compositor.CreateSurfaceBrush(); 33 | 34 | m_root.RelativeSizeAdjustment({1, 1}); 35 | root.Children().InsertAtTop(m_root); 36 | 37 | m_content.AnchorPoint({0.5f, 0.5f}); 38 | m_content.RelativeOffsetAdjustment({0.5f, 0.5f, 0}); 39 | m_content.RelativeSizeAdjustment({1, 1}); 40 | m_content.Size({-80, -80}); 41 | m_content.Brush(m_brush); 42 | m_brush.HorizontalAlignmentRatio(0.5f); 43 | m_brush.VerticalAlignmentRatio(0.5f); 44 | m_brush.Stretch(CompositionStretch::Uniform); 45 | auto shadow = m_compositor.CreateDropShadow(); 46 | shadow.Mask(m_brush); 47 | m_content.Shadow(shadow); 48 | m_root.Children().InsertAtTop(m_content); 49 | 50 | auto d3dDevice = CreateD3DDevice(); 51 | auto dxgiDevice = d3dDevice.as(); 52 | m_device = CreateDirect3DDevice(dxgiDevice.get()); 53 | } 54 | 55 | void App::Close() 56 | { 57 | if (m_capture) { 58 | m_capture->Close(); 59 | delete m_capture; 60 | m_capture = nullptr; 61 | } 62 | } 63 | 64 | bool App::StartCaptureWindow(HWND hwnd, int width, int height) 65 | { 66 | Close(); 67 | auto item = CreateCaptureItemForWindow(hwnd); 68 | __CheckBool(item); 69 | m_capture = new SimpleCapture(m_device, item, width, height); 70 | auto surface = m_capture->CreateSurface(m_compositor); 71 | m_brush.Surface(surface); 72 | m_capture->StartCapture(); 73 | return true; 74 | } 75 | 76 | void App::SetDrawCursor(bool isDrawCursor) 77 | { 78 | if (m_capture == nullptr) { 79 | return; 80 | } 81 | m_capture->SetDrawCursor(isDrawCursor); 82 | } 83 | 84 | bool App::StartCaptureMonitor(HMONITOR monitor, int width, int height) 85 | { 86 | Close(); 87 | auto item = CreateCaptureItemForMonitor(monitor); 88 | __CheckBool(item); 89 | m_capture = new SimpleCapture(m_device, item, width, height); 90 | auto surface = m_capture->CreateSurface(m_compositor); 91 | m_brush.Surface(surface); 92 | m_capture->StartCapture(); 93 | return true; 94 | } -------------------------------------------------------------------------------- /src/capturer/wgc/App.h: -------------------------------------------------------------------------------- 1 | /* 2 | * @Coding: utf-8 3 | * @Author: vector-wlc 4 | * @Date: 2023-02-24 16:28:40 5 | * @Description: 6 | */ 7 | #pragma once 8 | 9 | #include "basic/frame.h" 10 | #include "SimpleCapture.h" 11 | 12 | class App { 13 | public: 14 | App() { } 15 | ~App() { } 16 | 17 | void Initialize( 18 | winrt::Windows::UI::Composition::ContainerVisual const& root); 19 | 20 | bool StartCaptureWindow(HWND hwnd, int width, int height); 21 | bool StartCaptureMonitor(HMONITOR monitor, int width, int height); 22 | void SetDrawCursor(bool isDrawCursor); 23 | void Close(); 24 | AVFrame* GetFrame() { return m_capture->GetFrame(); } 25 | 26 | private: 27 | winrt::Windows::UI::Composition::Compositor m_compositor {nullptr}; 28 | winrt::Windows::UI::Composition::ContainerVisual m_root {nullptr}; 29 | winrt::Windows::UI::Composition::SpriteVisual m_content {nullptr}; 30 | winrt::Windows::UI::Composition::CompositionSurfaceBrush m_brush {nullptr}; 31 | 32 | winrt::Windows::Graphics::DirectX::Direct3D11::IDirect3DDevice m_device {nullptr}; 33 | SimpleCapture* m_capture = nullptr; 34 | }; -------------------------------------------------------------------------------- /src/capturer/wgc/SimpleCapture.cpp: -------------------------------------------------------------------------------- 1 | 2 | // D3D 3 | #include 4 | #include 5 | #include 6 | #include 7 | 8 | #include "pch.h" 9 | #include "SimpleCapture.h" 10 | #include "basic/basic.h" 11 | 12 | using namespace winrt; 13 | using namespace Windows; 14 | using namespace Windows::Foundation; 15 | using namespace Windows::System; 16 | using namespace Windows::Graphics; 17 | using namespace Windows::Graphics::Capture; 18 | using namespace Windows::Graphics::DirectX; 19 | using namespace Windows::Graphics::DirectX::Direct3D11; 20 | using namespace Windows::Foundation::Numerics; 21 | using namespace Windows::UI; 22 | using namespace Windows::UI::Composition; 23 | 24 | #undef min 25 | #undef max 26 | 27 | SimpleCapture::SimpleCapture( 28 | IDirect3DDevice const& device, 29 | GraphicsCaptureItem const& item, 30 | int width, int height) 31 | { 32 | m_item = item; 33 | m_device = device; 34 | 35 | // Set up 36 | auto d3dDevice = GetDXGIInterfaceFromObject(m_device); 37 | d3dDevice->GetImmediateContext(m_d3dContext.put()); 38 | auto size = m_item.Size(); 39 | 40 | m_swapChain = CreateDXGISwapChain( 41 | d3dDevice, 42 | static_cast(size.Width), 43 | static_cast(size.Height), 44 | static_cast(DirectXPixelFormat::B8G8R8A8UIntNormalized), 45 | 2); 46 | 47 | // Create framepool, define pixel format (DXGI_FORMAT_B8G8R8A8_UNORM), and frame size. 48 | m_framePool = Direct3D11CaptureFramePool::Create( 49 | m_device, 50 | DirectXPixelFormat::B8G8R8A8UIntNormalized, 51 | 2, 52 | size); 53 | 54 | m_session = m_framePool.CreateCaptureSession(m_item); 55 | m_lastSize = size; 56 | m_frameArrived = m_framePool.FrameArrived(auto_revoke, {this, &SimpleCapture::OnFrameArrived}); 57 | 58 | // Set ColorSpace 59 | D3D11_VIDEO_PROCESSOR_COLOR_SPACE inputColorSpace; 60 | inputColorSpace.Usage = 1; 61 | inputColorSpace.RGB_Range = 0; 62 | inputColorSpace.YCbCr_Matrix = 1; 63 | inputColorSpace.YCbCr_xvYCC = 0; 64 | inputColorSpace.Nominal_Range = D3D11_VIDEO_PROCESSOR_NOMINAL_RANGE_0_255; 65 | 66 | D3D11_VIDEO_PROCESSOR_COLOR_SPACE outputColorSpace; 67 | outputColorSpace.Usage = 0; 68 | outputColorSpace.RGB_Range = 0; 69 | outputColorSpace.YCbCr_Matrix = 1; 70 | outputColorSpace.YCbCr_xvYCC = 0; 71 | outputColorSpace.Nominal_Range = D3D11_VIDEO_PROCESSOR_NOMINAL_RANGE_16_235; 72 | m_rgbToNv12.Open(d3dDevice.get(), m_d3dContext.get(), inputColorSpace, outputColorSpace); 73 | m_nv12Frame = Frame::Alloc(AV_PIX_FMT_NV12, width, height); 74 | m_xrgbFrame = Frame::Alloc(AV_PIX_FMT_BGR0, width, height); 75 | __CheckNo(m_nv12Frame); 76 | __CheckNo(m_xrgbFrame); 77 | m_isCapture = true; 78 | m_cnt = 5; 79 | } 80 | 81 | // Start sending capture frames 82 | void SimpleCapture::StartCapture() 83 | { 84 | CheckClosed(); 85 | m_session.StartCapture(); 86 | } 87 | 88 | ICompositionSurface SimpleCapture::CreateSurface( 89 | Compositor const& compositor) 90 | { 91 | CheckClosed(); 92 | return CreateCompositionSurfaceForSwapChain(compositor, m_swapChain.get()); 93 | } 94 | 95 | // Process captured frames 96 | void SimpleCapture::Close() 97 | { 98 | auto expected = false; 99 | if (m_closed.compare_exchange_strong(expected, true)) { 100 | m_frameArrived.revoke(); 101 | m_framePool.Close(); 102 | m_session.Close(); 103 | m_swapChain = nullptr; 104 | m_framePool = nullptr; 105 | m_session = nullptr; 106 | m_item = nullptr; 107 | } 108 | m_nv12Buffers.Clear(); 109 | m_xrgbBuffers.Clear(); 110 | m_rgbToNv12.Close(); 111 | Free(m_nv12Frame, [this] { av_frame_free(&m_nv12Frame); }); 112 | Free(m_xrgbFrame, [this] { av_frame_free(&m_xrgbFrame); }); 113 | } 114 | 115 | void SimpleCapture::OnFrameArrived( 116 | Direct3D11CaptureFramePool const& sender, 117 | winrt::Windows::Foundation::IInspectable const&) 118 | { 119 | auto newSize = false; 120 | auto frame = sender.TryGetNextFrame(); 121 | auto frameContentSize = frame.ContentSize(); 122 | if (frameContentSize.Width != m_lastSize.Width || frameContentSize.Height != m_lastSize.Height) { 123 | // The thing we have been capturing has changed size. 124 | // We need to resize our swap chain first, then blit the pixels. 125 | // After we do that, retire the frame and then recreate our frame pool. 126 | newSize = true; 127 | m_lastSize = frameContentSize; 128 | m_swapChain->ResizeBuffers( 129 | 2, 130 | static_cast(m_lastSize.Width), 131 | static_cast(m_lastSize.Height), 132 | static_cast(DirectXPixelFormat::B8G8R8A8UIntNormalized), 133 | 0); 134 | m_nv12Buffers.Clear(); 135 | m_xrgbBuffers.Clear(); 136 | } 137 | if (m_cnt > 0) { 138 | --m_cnt; 139 | } 140 | m_isCapture = m_isCapture && !newSize || m_cnt > 0; 141 | if (m_isCapture) { 142 | auto frameSurface = GetDXGIInterfaceFromObject(frame.Surface()); 143 | D3D11_TEXTURE2D_DESC desc; 144 | frameSurface->GetDesc(&desc); 145 | auto d3dDevice = GetDXGIInterfaceFromObject(m_device); 146 | 147 | // 首先尝试创建 NV12 纹理 148 | auto tmpFormat = desc.Format; 149 | desc.Format = DXGI_FORMAT_NV12; 150 | if (GenNv12Frame(d3dDevice.get(), m_d3dContext.get(), desc, frameSurface.get(), 151 | m_nv12Buffers, m_nv12Frame, m_rgbToNv12)) { 152 | m_pixType = _PixType::NV12; 153 | } else { 154 | desc.Format = tmpFormat; 155 | GenRgbFrame(d3dDevice.get(), m_d3dContext.get(), desc, frameSurface.get(), 156 | m_xrgbBuffers, m_xrgbFrame); 157 | m_pixType = _PixType::RGB; 158 | } 159 | } 160 | 161 | // com_ptr backBuffer; 162 | // check_hresult(m_swapChain->GetBuffer(0, guid_of(), backBuffer.put_void())); 163 | // m_d3dContext->CopyResource(backBuffer.get(), m_bufferFiller.GetMap()); 164 | 165 | // DXGI_PRESENT_PARAMETERS presentParameters = {0}; 166 | // auto hr = m_swapChain->Present1(1, 0, &presentParameters); 167 | 168 | if (newSize) { 169 | m_framePool.Recreate( 170 | m_device, 171 | DirectXPixelFormat::B8G8R8A8UIntNormalized, 172 | 2, 173 | m_lastSize); 174 | } 175 | } 176 | -------------------------------------------------------------------------------- /src/capturer/wgc/SimpleCapture.h: -------------------------------------------------------------------------------- 1 | /* 2 | * @Coding: utf-8 3 | * @Author: vector-wlc 4 | * @Date: 2023-02-24 16:28:40 5 | * @Description: 6 | */ 7 | #pragma once 8 | 9 | #include 10 | #include "d3d/gen_frame.h" 11 | 12 | class SimpleCapture { 13 | public: 14 | SimpleCapture( 15 | winrt::Windows::Graphics::DirectX::Direct3D11::IDirect3DDevice const& device, 16 | winrt::Windows::Graphics::Capture::GraphicsCaptureItem const& item, 17 | int width, int height); 18 | ~SimpleCapture() { Close(); } 19 | 20 | void StartCapture(); 21 | winrt::Windows::UI::Composition::ICompositionSurface CreateSurface( 22 | winrt::Windows::UI::Composition::Compositor const& compositor); 23 | 24 | void SetDrawCursor(bool isDrawCursor) { m_session.IsCursorCaptureEnabled(isDrawCursor); } 25 | 26 | void Close(); 27 | 28 | AVFrame* GetFrame() const noexcept { return m_pixType == NV12 ? m_nv12Frame : m_xrgbFrame; } 29 | 30 | private: 31 | void OnFrameArrived( 32 | winrt::Windows::Graphics::Capture::Direct3D11CaptureFramePool const& sender, 33 | winrt::Windows::Foundation::IInspectable const& args); 34 | 35 | void CheckClosed() 36 | { 37 | if (m_closed.load() == true) { 38 | throw winrt::hresult_error(RO_E_CLOSED); 39 | } 40 | } 41 | 42 | private: 43 | enum _PixType { 44 | NV12, 45 | RGB 46 | }; 47 | 48 | winrt::Windows::Graphics::Capture::GraphicsCaptureItem m_item {nullptr}; 49 | winrt::Windows::Graphics::Capture::Direct3D11CaptureFramePool m_framePool {nullptr}; 50 | winrt::Windows::Graphics::Capture::GraphicsCaptureSession m_session {nullptr}; 51 | winrt::Windows::Graphics::SizeInt32 m_lastSize; 52 | 53 | winrt::Windows::Graphics::DirectX::Direct3D11::IDirect3DDevice m_device {nullptr}; 54 | winrt::com_ptr m_swapChain {nullptr}; 55 | winrt::com_ptr m_d3dContext {nullptr}; 56 | 57 | std::atomic m_closed = false; 58 | winrt::Windows::Graphics::Capture::Direct3D11CaptureFramePool::FrameArrived_revoker m_frameArrived; 59 | AVFrame* m_xrgbFrame = nullptr; 60 | AVFrame* m_nv12Frame = nullptr; 61 | BufferFiller m_xrgbBuffers; 62 | BufferFiller m_nv12Buffers; 63 | D3dConverter m_rgbToNv12; 64 | _PixType m_pixType; 65 | bool m_isCapture = true; 66 | int m_cnt = 5; 67 | }; -------------------------------------------------------------------------------- /src/capturer/wgc/capture.interop.h: -------------------------------------------------------------------------------- 1 | /* 2 | * @Coding: utf-8 3 | * @Author: vector-wlc 4 | * @Date: 2023-02-24 16:28:40 5 | * @Description: 6 | */ 7 | #pragma once 8 | #include 9 | #include 10 | #include 11 | 12 | inline auto CreateCaptureItemForWindow(HWND hwnd) 13 | { 14 | auto activation_factory = winrt::get_activation_factory(); 15 | auto interop_factory = activation_factory.as(); 16 | winrt::Windows::Graphics::Capture::GraphicsCaptureItem item = {nullptr}; 17 | interop_factory->CreateForWindow(hwnd, winrt::guid_of(), reinterpret_cast(winrt::put_abi(item))); 18 | return item; 19 | } 20 | 21 | inline auto CreateCaptureItemForMonitor(HMONITOR monitor) 22 | { 23 | auto activation_factory = winrt::get_activation_factory(); 24 | auto interop_factory = activation_factory.as(); 25 | winrt::Windows::Graphics::Capture::GraphicsCaptureItem item = {nullptr}; 26 | interop_factory->CreateForMonitor(monitor, winrt::guid_of(), reinterpret_cast(winrt::put_abi(item))); 27 | return item; 28 | } -------------------------------------------------------------------------------- /src/capturer/wgc/composition.interop.h: -------------------------------------------------------------------------------- 1 | #pragma once 2 | #include 3 | #include 4 | #include 5 | 6 | inline auto CreateCompositionGraphicsDevice( 7 | winrt::Windows::UI::Composition::Compositor const& compositor, 8 | ::IUnknown* device) 9 | { 10 | winrt::Windows::UI::Composition::CompositionGraphicsDevice graphicsDevice{ nullptr }; 11 | auto compositorInterop = compositor.as(); 12 | winrt::com_ptr graphicsInterop; 13 | winrt::check_hresult(compositorInterop->CreateGraphicsDevice(device, graphicsInterop.put())); 14 | winrt::check_hresult(graphicsInterop->QueryInterface(winrt::guid_of(), 15 | reinterpret_cast(winrt::put_abi(graphicsDevice)))); 16 | return graphicsDevice; 17 | } 18 | 19 | inline void ResizeSurface( 20 | winrt::Windows::UI::Composition::CompositionDrawingSurface const& surface, 21 | winrt::Windows::Foundation::Size const& size) 22 | { 23 | auto surfaceInterop = surface.as(); 24 | SIZE newSize = {}; 25 | newSize.cx = static_cast(std::round(size.Width)); 26 | newSize.cy = static_cast(std::round(size.Height)); 27 | winrt::check_hresult(surfaceInterop->Resize(newSize)); 28 | } 29 | 30 | inline auto SurfaceBeginDraw( 31 | winrt::Windows::UI::Composition::CompositionDrawingSurface const& surface) 32 | { 33 | auto surfaceInterop = surface.as(); 34 | winrt::com_ptr context; 35 | POINT offset = {}; 36 | winrt::check_hresult(surfaceInterop->BeginDraw(nullptr, __uuidof(ID2D1DeviceContext), context.put_void(), &offset)); 37 | context->SetTransform(D2D1::Matrix3x2F::Translation((FLOAT)offset.x,(FLOAT) offset.y)); 38 | return context; 39 | } 40 | 41 | inline void SurfaceEndDraw( 42 | winrt::Windows::UI::Composition::CompositionDrawingSurface const& surface) 43 | { 44 | auto surfaceInterop = surface.as(); 45 | winrt::check_hresult(surfaceInterop->EndDraw()); 46 | } 47 | 48 | inline auto CreateCompositionSurfaceForSwapChain( 49 | winrt::Windows::UI::Composition::Compositor const& compositor, 50 | ::IUnknown* swapChain) 51 | { 52 | winrt::Windows::UI::Composition::ICompositionSurface surface{ nullptr }; 53 | auto compositorInterop = compositor.as(); 54 | winrt::com_ptr surfaceInterop; 55 | winrt::check_hresult(compositorInterop->CreateCompositionSurfaceForSwapChain(swapChain, surfaceInterop.put())); 56 | winrt::check_hresult(surfaceInterop->QueryInterface(winrt::guid_of(), 57 | reinterpret_cast(winrt::put_abi(surface)))); 58 | return surface; 59 | } -------------------------------------------------------------------------------- /src/capturer/wgc/d3dHelpers.h: -------------------------------------------------------------------------------- 1 | #pragma once 2 | #include "composition.interop.h" 3 | 4 | struct SurfaceContext 5 | { 6 | public: 7 | SurfaceContext(std::nullptr_t) {} 8 | SurfaceContext( 9 | winrt::Windows::UI::Composition::CompositionDrawingSurface surface) 10 | { 11 | m_surface = surface; 12 | m_d2dContext = SurfaceBeginDraw(m_surface); 13 | } 14 | ~SurfaceContext() 15 | { 16 | SurfaceEndDraw(m_surface); 17 | m_d2dContext = nullptr; 18 | m_surface = nullptr; 19 | } 20 | 21 | winrt::com_ptr GetDeviceContext() { return m_d2dContext; } 22 | 23 | private: 24 | winrt::com_ptr m_d2dContext; 25 | winrt::Windows::UI::Composition::CompositionDrawingSurface m_surface{ nullptr }; 26 | }; 27 | 28 | struct D3D11DeviceLock 29 | { 30 | public: 31 | D3D11DeviceLock(std::nullopt_t) {} 32 | D3D11DeviceLock(ID3D11Multithread* pMultithread) 33 | { 34 | m_multithread.copy_from(pMultithread); 35 | m_multithread->Enter(); 36 | } 37 | ~D3D11DeviceLock() 38 | { 39 | m_multithread->Leave(); 40 | m_multithread = nullptr; 41 | } 42 | private: 43 | winrt::com_ptr m_multithread; 44 | }; 45 | 46 | inline auto 47 | CreateWICFactory() 48 | { 49 | winrt::com_ptr wicFactory; 50 | winrt::check_hresult( 51 | ::CoCreateInstance( 52 | CLSID_WICImagingFactory, 53 | nullptr, 54 | CLSCTX_INPROC_SERVER, 55 | winrt::guid_of(), 56 | wicFactory.put_void())); 57 | 58 | return wicFactory; 59 | } 60 | 61 | inline auto 62 | CreateD2DDevice( 63 | winrt::com_ptr const& factory, 64 | winrt::com_ptr const& device) 65 | { 66 | winrt::com_ptr result; 67 | winrt::check_hresult(factory->CreateDevice(device.as().get(), result.put())); 68 | return result; 69 | } 70 | 71 | inline auto 72 | CreateD3DDevice( 73 | D3D_DRIVER_TYPE const type, 74 | winrt::com_ptr& device) 75 | { 76 | WINRT_ASSERT(!device); 77 | 78 | UINT flags = D3D11_CREATE_DEVICE_BGRA_SUPPORT; 79 | 80 | //#ifdef _DEBUG 81 | // flags |= D3D11_CREATE_DEVICE_DEBUG; 82 | //#endif 83 | 84 | return D3D11CreateDevice( 85 | nullptr, 86 | type, 87 | nullptr, 88 | flags, 89 | nullptr, 0, 90 | D3D11_SDK_VERSION, 91 | device.put(), 92 | nullptr, 93 | nullptr); 94 | } 95 | 96 | inline auto 97 | CreateD3DDevice() 98 | { 99 | winrt::com_ptr device; 100 | HRESULT hr = CreateD3DDevice(D3D_DRIVER_TYPE_HARDWARE, device); 101 | 102 | if (DXGI_ERROR_UNSUPPORTED == hr) 103 | { 104 | hr = CreateD3DDevice(D3D_DRIVER_TYPE_WARP, device); 105 | } 106 | 107 | winrt::check_hresult(hr); 108 | return device; 109 | } 110 | 111 | inline auto 112 | CreateD2DFactory() 113 | { 114 | D2D1_FACTORY_OPTIONS options{}; 115 | 116 | //#ifdef _DEBUG 117 | // options.debugLevel = D2D1_DEBUG_LEVEL_INFORMATION; 118 | //#endif 119 | 120 | winrt::com_ptr factory; 121 | 122 | winrt::check_hresult(D2D1CreateFactory( 123 | D2D1_FACTORY_TYPE_SINGLE_THREADED, 124 | options, 125 | factory.put())); 126 | 127 | return factory; 128 | } 129 | 130 | inline auto 131 | CreateDXGISwapChain( 132 | winrt::com_ptr const& device, 133 | const DXGI_SWAP_CHAIN_DESC1* desc) 134 | { 135 | auto dxgiDevice = device.as(); 136 | winrt::com_ptr adapter; 137 | winrt::check_hresult(dxgiDevice->GetParent(winrt::guid_of(), adapter.put_void())); 138 | winrt::com_ptr factory; 139 | winrt::check_hresult(adapter->GetParent(winrt::guid_of(), factory.put_void())); 140 | 141 | winrt::com_ptr swapchain; 142 | winrt::check_hresult(factory->CreateSwapChainForComposition( 143 | device.get(), 144 | desc, 145 | nullptr, 146 | swapchain.put())); 147 | 148 | return swapchain; 149 | } 150 | 151 | inline auto 152 | CreateDXGISwapChain( 153 | winrt::com_ptr const& device, 154 | uint32_t width, 155 | uint32_t height, 156 | DXGI_FORMAT format, 157 | uint32_t bufferCount) 158 | { 159 | DXGI_SWAP_CHAIN_DESC1 desc = {}; 160 | desc.Width = width; 161 | desc.Height = height; 162 | desc.Format = format; 163 | desc.BufferUsage = DXGI_USAGE_RENDER_TARGET_OUTPUT; 164 | desc.SampleDesc.Count = 1; 165 | desc.SampleDesc.Quality = 0; 166 | desc.BufferCount = bufferCount; 167 | desc.Scaling = DXGI_SCALING_STRETCH; 168 | desc.SwapEffect = DXGI_SWAP_EFFECT_FLIP_SEQUENTIAL; 169 | desc.AlphaMode = DXGI_ALPHA_MODE_PREMULTIPLIED; 170 | 171 | return CreateDXGISwapChain(device, &desc); 172 | } -------------------------------------------------------------------------------- /src/capturer/wgc/direct3d11.interop.h: -------------------------------------------------------------------------------- 1 | #pragma once 2 | #include 3 | 4 | extern "C" 5 | { 6 | HRESULT __stdcall CreateDirect3D11DeviceFromDXGIDevice(::IDXGIDevice* dxgiDevice, 7 | ::IInspectable** graphicsDevice); 8 | 9 | HRESULT __stdcall CreateDirect3D11SurfaceFromDXGISurface(::IDXGISurface* dgxiSurface, 10 | ::IInspectable** graphicsSurface); 11 | } 12 | 13 | struct __declspec(uuid("A9B3D012-3DF2-4EE3-B8D1-8695F457D3C1")) 14 | IDirect3DDxgiInterfaceAccess : ::IUnknown 15 | { 16 | virtual HRESULT __stdcall GetInterface(GUID const& id, void** object) = 0; 17 | }; 18 | 19 | inline auto CreateDirect3DDevice(IDXGIDevice* dxgi_device) 20 | { 21 | winrt::com_ptr<::IInspectable> d3d_device; 22 | winrt::check_hresult(CreateDirect3D11DeviceFromDXGIDevice(dxgi_device, d3d_device.put())); 23 | return d3d_device.as(); 24 | } 25 | 26 | inline auto CreateDirect3DSurface(IDXGISurface* dxgi_surface) 27 | { 28 | winrt::com_ptr<::IInspectable> d3d_surface; 29 | winrt::check_hresult(CreateDirect3D11SurfaceFromDXGISurface(dxgi_surface, d3d_surface.put())); 30 | return d3d_surface.as(); 31 | } 32 | 33 | template 34 | auto GetDXGIInterfaceFromObject(winrt::Windows::Foundation::IInspectable const& object) 35 | { 36 | auto access = object.as(); 37 | winrt::com_ptr result; 38 | winrt::check_hresult(access->GetInterface(winrt::guid_of(), result.put_void())); 39 | return result; 40 | } -------------------------------------------------------------------------------- /src/capturer/wgc/pch.cpp: -------------------------------------------------------------------------------- 1 | 2 | // D3D 3 | #include 4 | #include 5 | #include 6 | #include 7 | 8 | 9 | #include "pch.h" -------------------------------------------------------------------------------- /src/capturer/wgc/pch.h: -------------------------------------------------------------------------------- 1 | /* 2 | * @Coding: utf-8 3 | * @Author: vector-wlc 4 | * @Date: 2023-02-24 16:28:40 5 | * @Description: 6 | */ 7 | #pragma once 8 | #define UNICODE 9 | 10 | #include 11 | #include 12 | 13 | // WinRT 14 | #include 15 | #include 16 | #include 17 | #include 18 | #include 19 | #include 20 | #include 21 | #include 22 | #include 23 | 24 | #include 25 | #include 26 | 27 | // STL 28 | #include 29 | #include 30 | 31 | // D3D 32 | #include 33 | #include 34 | #include 35 | #include 36 | 37 | // Helpers 38 | #include "composition.interop.h" 39 | #include "d3dHelpers.h" 40 | #include "direct3d11.interop.h" 41 | #include "capture.interop.h" -------------------------------------------------------------------------------- /src/capturer/wgc/winrt.cpp: -------------------------------------------------------------------------------- 1 | /* 2 | * @Coding: utf-8 3 | * @Author: vector-wlc 4 | * @Date: 2023-02-24 16:28:40 5 | * @Description: 6 | */ 7 | 8 | #include "pch.h" 9 | #include "App.h" 10 | #include "SimpleCapture.h" 11 | #include 12 | 13 | using namespace winrt; 14 | using namespace Windows::UI; 15 | using namespace Windows::UI::Composition; 16 | using namespace Windows::UI::Composition::Desktop; 17 | 18 | // Direct3D11CaptureFramePool requires a DispatcherQueue 19 | winrt::Windows::System::DispatcherQueueController CreateDispatcherQueueController() 20 | { 21 | namespace abi = ABI::Windows::System; 22 | 23 | DispatcherQueueOptions options { 24 | sizeof(DispatcherQueueOptions), 25 | DQTYPE_THREAD_CURRENT, 26 | DQTAT_COM_STA}; 27 | 28 | Windows::System::DispatcherQueueController controller {nullptr}; 29 | check_hresult(CreateDispatcherQueueController(options, reinterpret_cast(put_abi(controller)))); 30 | return controller; 31 | } 32 | 33 | DesktopWindowTarget CreateDesktopWindowTarget(Compositor const& compositor, HWND window) 34 | { 35 | namespace abi = ABI::Windows::UI::Composition::Desktop; 36 | auto interop = compositor.as(); 37 | DesktopWindowTarget target {nullptr}; 38 | check_hresult(interop->CreateDesktopWindowTarget(window, true, reinterpret_cast(put_abi(target)))); 39 | return target; 40 | } 41 | -------------------------------------------------------------------------------- /src/capturer/wgc/winrt.h: -------------------------------------------------------------------------------- 1 | #pragma once 2 | 3 | // WinRT 4 | #include 5 | #include 6 | #include 7 | #include 8 | #include 9 | #include 10 | #include 11 | #include 12 | #include 13 | 14 | #include 15 | #include 16 | 17 | using namespace winrt; 18 | using namespace Windows::UI; 19 | using namespace Windows::UI::Composition; 20 | using namespace Windows::UI::Composition::Desktop; 21 | 22 | // Direct3D11CaptureFramePool requires a DispatcherQueue 23 | winrt::Windows::System::DispatcherQueueController CreateDispatcherQueueController(); 24 | DesktopWindowTarget CreateDesktopWindowTarget(Compositor const& compositor, HWND window); 25 | -------------------------------------------------------------------------------- /src/capturer/wgc_capturer.cpp: -------------------------------------------------------------------------------- 1 | /* 2 | * @Coding: utf-8 3 | * @Author: vector-wlc 4 | * @Date: 2023-02-26 10:10:47 5 | * @Description: 6 | */ 7 | 8 | #include "wgc_capturer.h" 9 | #include 10 | 11 | winrt::Windows::System::DispatcherQueue* WgcCapturer::queuePtr = nullptr; 12 | winrt::Windows::UI::Composition::ContainerVisual* WgcCapturer::rootPtr = nullptr; 13 | std::list WgcCapturer::_capturers; 14 | QWidget* __widget = nullptr; 15 | 16 | void WgcCapturer::Init() 17 | { 18 | if (queuePtr != nullptr) { 19 | return; 20 | } 21 | // Init COM 22 | init_apartment(apartment_type::single_threaded); 23 | // Create a DispatcherQueue for our thread 24 | static auto controller = CreateDispatcherQueueController(); 25 | // Initialize Composition 26 | static auto compositor = Compositor(); 27 | __widget = new QWidget; 28 | // __widget->resize(800, 600); 29 | // __widget->show(); 30 | static auto target = CreateDesktopWindowTarget(compositor, (HWND)__widget->winId()); 31 | static auto root = compositor.CreateContainerVisual(); 32 | root.RelativeSizeAdjustment({1.0f, 1.0f}); 33 | target.Root(root); 34 | 35 | // Enqueue our capture work on the dispatcher 36 | static auto queue = controller.DispatcherQueue(); 37 | queuePtr = &queue; 38 | rootPtr = &root; 39 | // 首先 New 一个 Capturer 备用 40 | New(); 41 | } 42 | 43 | void WgcCapturer::Uninit() 44 | { 45 | delete __widget; 46 | while (!_capturers.empty()) { 47 | delete *_capturers.begin(); 48 | _capturers.erase(_capturers.begin()); 49 | } 50 | } 51 | 52 | WgcCapturer* WgcCapturer::New() 53 | { 54 | // 将上一个 new 好的对象返回,并重新预备一个新的 55 | if (_capturers.empty()) { 56 | _capturers.push_back(new WgcCapturer); 57 | } 58 | return *(--_capturers.end()); 59 | } 60 | 61 | void WgcCapturer::Delete(WgcCapturer* ptr) 62 | { 63 | // auto iter = std::find(_capturers.begin(), _capturers.end(), ptr); 64 | // if (iter == _capturers.end()) { 65 | // return; 66 | // } 67 | // if (*iter != nullptr) { 68 | // delete *iter; 69 | // } 70 | // _capturers.erase(iter); 71 | } 72 | 73 | WgcCapturer::WgcCapturer() 74 | { 75 | _app = new App; 76 | _isAppInit = false; 77 | auto success = queuePtr->TryEnqueue([=]() -> void { 78 | _app->Initialize(*rootPtr); 79 | _isAppInit = true; 80 | }); 81 | WINRT_VERIFY(success); 82 | } 83 | 84 | WgcCapturer::~WgcCapturer() 85 | { 86 | if (_app) { 87 | delete _app; 88 | _app = nullptr; 89 | } 90 | } 91 | 92 | bool WgcCapturer::StartCapturerMonitor(HMONITOR monitor, int width, int height) 93 | { 94 | return _app->StartCaptureMonitor(monitor, width, height); 95 | } 96 | 97 | bool WgcCapturer::StartCapturerWindow(HWND hwnd, int width, int height) 98 | { 99 | return _app->StartCaptureWindow(hwnd, width, height); 100 | } 101 | -------------------------------------------------------------------------------- /src/capturer/wgc_capturer.h: -------------------------------------------------------------------------------- 1 | /* 2 | * @Coding: utf-8 3 | * @Author: vector-wlc 4 | * @Date: 2023-02-24 18:21:15 5 | * @Description: 6 | */ 7 | 8 | #ifndef __WGC_CAPTURER_H__ 9 | #define __WGC_CAPTURER_H__ 10 | #include "wgc/pch.h" 11 | #include "wgc/winrt.h" 12 | #include "wgc/App.h" 13 | #include "basic/frame.h" 14 | #include 15 | 16 | class WgcCapturer { 17 | public: 18 | bool StartCapturerWindow(HWND hwnd, int width, int height); 19 | bool StartCapturerMonitor(HMONITOR monitor, int width, int height); 20 | void SetDrawCursor(bool isDrawCursor) { _app->SetDrawCursor(isDrawCursor); } 21 | static void Init(); 22 | static WgcCapturer* New(); 23 | static void Delete(WgcCapturer* ptr); 24 | static void Uninit(); 25 | void Close() 26 | { 27 | if (_app != nullptr) { 28 | _app->Close(); 29 | } 30 | } 31 | AVFrame* GetFrame() { return _app->GetFrame(); } 32 | 33 | private: 34 | WgcCapturer(); 35 | ~WgcCapturer(); 36 | App* _app = nullptr; 37 | bool _isAppInit = false; 38 | static std::list _capturers; 39 | static winrt::Windows::System::DispatcherQueue* queuePtr; 40 | static winrt::Windows::UI::Composition::ContainerVisual* rootPtr; 41 | }; 42 | #endif -------------------------------------------------------------------------------- /src/d3d/buffer_filler.cpp: -------------------------------------------------------------------------------- 1 | /* 2 | * @Coding: utf-8 3 | * @Author: vector-wlc 4 | * @Date: 2023-02-26 10:43:22 5 | * @Description: 6 | */ 7 | #include "buffer_filler.h" 8 | #include "basic/basic.h" 9 | 10 | bool BufferFiller::Fill(ID3D11Device* device, D3D11_TEXTURE2D_DESC desc, int maxCnt) 11 | { 12 | desc.ArraySize = 1; 13 | desc.BindFlags = 0; 14 | desc.MiscFlags = 0; 15 | desc.SampleDesc.Count = 1; 16 | desc.SampleDesc.Quality = 0; 17 | desc.MipLevels = 1; 18 | desc.CPUAccessFlags = D3D11_CPU_ACCESS_READ; 19 | desc.Usage = D3D11_USAGE_STAGING; 20 | if (_buffers.size() == maxCnt) { 21 | ID3D11Texture2D* dstImg = nullptr; 22 | if (FAILED(device->CreateTexture2D(&desc, nullptr, &dstImg))) { 23 | return false; 24 | } 25 | _buffers[_mapIdx] = dstImg; 26 | _mapIdx = (_mapIdx + 1) % _buffers.size(); 27 | return true; 28 | } 29 | while (_buffers.size() < maxCnt) { 30 | ID3D11Texture2D* dstImg = nullptr; 31 | if (FAILED(device->CreateTexture2D(&desc, nullptr, &dstImg))) { 32 | break; 33 | } 34 | _buffers.push_back(dstImg); 35 | } 36 | __CheckBool(!_buffers.empty()); 37 | _copyIdx = 0; 38 | _mapIdx = (_copyIdx + 1) % _buffers.size(); 39 | return true; 40 | } 41 | 42 | bool BufferFiller::Reset() 43 | { 44 | _buffers[_mapIdx]->Release(); 45 | _buffers[_mapIdx] = nullptr; 46 | _copyIdx = (_copyIdx + 1) % _buffers.size(); 47 | return true; 48 | } 49 | 50 | void BufferFiller::Clear() 51 | { 52 | for (auto&& dstImg : _buffers) { 53 | Free(dstImg, [&dstImg] { dstImg->Release(); }); 54 | } 55 | _buffers.clear(); 56 | } 57 | -------------------------------------------------------------------------------- /src/d3d/buffer_filler.h: -------------------------------------------------------------------------------- 1 | /* 2 | * @Coding: utf-8 3 | * @Author: vector-wlc 4 | * @Date: 2023-02-26 10:35:06 5 | * @Description: 6 | */ 7 | /* 8 | * @Coding: utf-8 9 | * @Author: vector-wlc 10 | * @Date: 2023-02-26 10:35:06 11 | * @Description: 12 | */ 13 | #ifndef __BUFFER_FILLER_H__ 14 | #define __BUFFER_FILLER_H__ 15 | #include 16 | #include 17 | 18 | class BufferFiller { 19 | public: 20 | bool Fill(ID3D11Device* device, D3D11_TEXTURE2D_DESC desc, int maxCnt = 3); 21 | bool Reset(); 22 | ID3D11Texture2D* GetCopy() { return _buffers[_copyIdx]; } 23 | ID3D11Texture2D* GetMap() { return _buffers[_mapIdx]; } 24 | void Clear(); 25 | ~BufferFiller() 26 | { 27 | Clear(); 28 | } 29 | 30 | private: 31 | int _mapIdx = 0; 32 | int _copyIdx = 0; 33 | std::vector _buffers; 34 | }; 35 | 36 | #endif; -------------------------------------------------------------------------------- /src/d3d/convert.cpp: -------------------------------------------------------------------------------- 1 | 2 | #include "convert.h" 3 | using namespace std; 4 | 5 | #if !defined(SAFE_RELEASE) 6 | #define SAFE_RELEASE(X) \ 7 | if (X) { \ 8 | X->Release(); \ 9 | X = nullptr; \ 10 | } 11 | #endif 12 | 13 | #if !defined(PRINTERR1) 14 | #define PRINTERR1(x) printf(__FUNCTION__ ": Error 0x%08x at line %d in file %s\n", x, __LINE__, __FILE__); 15 | #endif 16 | 17 | #if !defined(PRINTERR) 18 | #define PRINTERR(x, y) printf(__FUNCTION__ ": Error 0x%08x in %s at line %d in file %s\n", x, y, __LINE__, __FILE__); 19 | #endif 20 | 21 | /// Initialize Video Context 22 | HRESULT D3dConverter::Open(ID3D11Device* pDev, ID3D11DeviceContext* pCtx, 23 | const D3D11_VIDEO_PROCESSOR_COLOR_SPACE& inColorSpace, D3D11_VIDEO_PROCESSOR_COLOR_SPACE& outColorSpace) 24 | { 25 | m_pDev = pDev; 26 | m_pCtx = pCtx; 27 | m_pDev->AddRef(); 28 | m_pCtx->AddRef(); 29 | /// Obtain Video device and Video device context 30 | HRESULT hr = m_pDev->QueryInterface(__uuidof(ID3D11VideoDevice), (void**)&m_pVid); 31 | if (FAILED(hr)) { 32 | PRINTERR(hr, "QAI for ID3D11VideoDevice"); 33 | } 34 | hr = m_pCtx->QueryInterface(__uuidof(ID3D11VideoContext), (void**)&m_pVidCtx); 35 | if (FAILED(hr)) { 36 | PRINTERR(hr, "QAI for ID3D11VideoContext"); 37 | } 38 | _inColorSpace = inColorSpace; 39 | _outColorSpace = outColorSpace; 40 | return hr; 41 | } 42 | 43 | /// Release all Resources 44 | void D3dConverter::Close() 45 | { 46 | for (auto& it : viewMap) { 47 | ID3D11VideoProcessorOutputView* pVPOV = it.second; 48 | pVPOV->Release(); 49 | } 50 | SAFE_RELEASE(m_pVP); 51 | SAFE_RELEASE(m_pVPEnum); 52 | SAFE_RELEASE(m_pVidCtx); 53 | SAFE_RELEASE(m_pVid); 54 | SAFE_RELEASE(m_pCtx); 55 | SAFE_RELEASE(m_pDev); 56 | } 57 | 58 | /// Perform Colorspace conversion 59 | HRESULT D3dConverter::Convert(ID3D11Texture2D* pIn, ID3D11Texture2D* pOut) 60 | { 61 | HRESULT hr = S_OK; 62 | 63 | D3D11_TEXTURE2D_DESC inDesc = {0}; 64 | D3D11_TEXTURE2D_DESC outDesc = {0}; 65 | pIn->GetDesc(&inDesc); 66 | pOut->GetDesc(&outDesc); 67 | 68 | /// Check if VideoProcessor needs to be reconfigured 69 | /// Reconfiguration is required if input/output dimensions have changed 70 | if (m_pVP) { 71 | if (m_inDesc.Width != inDesc.Width || m_inDesc.Height != inDesc.Height || m_outDesc.Width != outDesc.Width || m_outDesc.Height != outDesc.Height) { 72 | SAFE_RELEASE(m_pVPEnum); 73 | SAFE_RELEASE(m_pVP); 74 | } 75 | } 76 | 77 | if (!m_pVP) { 78 | /// Initialize Video Processor 79 | m_inDesc = inDesc; 80 | m_outDesc = outDesc; 81 | D3D11_VIDEO_PROCESSOR_CONTENT_DESC contentDesc = { 82 | D3D11_VIDEO_FRAME_FORMAT_PROGRESSIVE, 83 | {0, 0}, inDesc.Width, inDesc.Height, 84 | {0, 0}, outDesc.Width, outDesc.Height, 85 | D3D11_VIDEO_USAGE_PLAYBACK_NORMAL}; 86 | hr = m_pVid->CreateVideoProcessorEnumerator(&contentDesc, &m_pVPEnum); 87 | if (FAILED(hr)) { 88 | PRINTERR(hr, "CreateVideoProcessorEnumerator"); 89 | } 90 | hr = m_pVid->CreateVideoProcessor(m_pVPEnum, 0, &m_pVP); 91 | if (FAILED(hr)) { 92 | PRINTERR(hr, "CreateVideoProcessor"); 93 | } 94 | 95 | m_pVidCtx->VideoProcessorSetStreamColorSpace(m_pVP, 0, &_inColorSpace); 96 | m_pVidCtx->VideoProcessorSetOutputColorSpace(m_pVP, &_outColorSpace); 97 | } 98 | 99 | /// Obtain Video Processor Input view from input texture 100 | ID3D11VideoProcessorInputView* pVPIn = nullptr; 101 | D3D11_VIDEO_PROCESSOR_INPUT_VIEW_DESC inputVD = {0, D3D11_VPIV_DIMENSION_TEXTURE2D, {0, 0}}; 102 | hr = m_pVid->CreateVideoProcessorInputView(pIn, m_pVPEnum, &inputVD, &pVPIn); 103 | if (FAILED(hr)) { 104 | PRINTERR(hr, "CreateVideoProcessInputView"); 105 | return hr; 106 | } 107 | 108 | /// Obtain Video Processor Output view from output texture 109 | ID3D11VideoProcessorOutputView* pVPOV = nullptr; 110 | D3D11_VIDEO_PROCESSOR_OUTPUT_VIEW_DESC ovD = {D3D11_VPOV_DIMENSION_TEXTURE2D}; 111 | hr = m_pVid->CreateVideoProcessorOutputView(pOut, m_pVPEnum, &ovD, &pVPOV); 112 | if (FAILED(hr)) { 113 | SAFE_RELEASE(pVPIn); 114 | PRINTERR(hr, "CreateVideoProcessorOutputView"); 115 | return hr; 116 | } 117 | 118 | /// Create a Video Processor Stream to run the operation 119 | D3D11_VIDEO_PROCESSOR_STREAM stream = {TRUE, 0, 0, 0, 0, nullptr, pVPIn, nullptr}; 120 | 121 | /// Perform the Colorspace conversion 122 | hr = m_pVidCtx->VideoProcessorBlt(m_pVP, pVPOV, 0, 1, &stream); 123 | if (FAILED(hr)) { 124 | SAFE_RELEASE(pVPIn); 125 | PRINTERR(hr, "VideoProcessorBlt"); 126 | return hr; 127 | } 128 | SAFE_RELEASE(pVPIn); 129 | SAFE_RELEASE(pVPOV); 130 | return hr; 131 | } 132 | -------------------------------------------------------------------------------- /src/d3d/convert.h: -------------------------------------------------------------------------------- 1 | /* 2 | * @Coding: utf-8 3 | * @Author: vector-wlc 4 | * @Date: 2023-03-01 09:48:51 5 | * @Description: 6 | */ 7 | /* 8 | * Copyright (c) 2019, NVIDIA CORPORATION. All rights reserved. 9 | * 10 | * Redistribution and use in source and binary forms, with or without 11 | * modification, are permitted provided that the following conditions 12 | * are met: 13 | * * Redistributions of source code must retain the above copyright 14 | * notice, this list of conditions and the following disclaimer. 15 | * * Redistributions in binary form must reproduce the above copyright 16 | * notice, this list of conditions and the following disclaimer in the 17 | * documentation and/or other materials provided with the distribution. 18 | * * Neither the name of NVIDIA CORPORATION nor the names of its 19 | * contributors may be used to endorse or promote products derived 20 | * from this software without specific prior written permission. 21 | * 22 | * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY 23 | * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 24 | * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 25 | * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR 26 | * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, 27 | * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, 28 | * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR 29 | * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY 30 | * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 31 | * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 32 | * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 33 | */ 34 | 35 | #pragma once 36 | #include 37 | #include 38 | #include 39 | 40 | class D3dConverter { 41 | /// Simple Preprocessor class 42 | /// Uses DXVAHD VideoProcessBlt to perform colorspace conversion 43 | private: 44 | /// D3D11 device to be used for Processing 45 | ID3D11Device* m_pDev = nullptr; 46 | /// D3D11 device context to be used for Processing 47 | ID3D11DeviceContext* m_pCtx = nullptr; 48 | /// D3D11 video device to be used for Processing, obtained from d3d11 device 49 | ID3D11VideoDevice* m_pVid = nullptr; 50 | /// D3D11 video device context to be used for Processing, obtained from d3d11 device 51 | ID3D11VideoContext* m_pVidCtx = nullptr; 52 | /// DXVAHD video processor configured for processing. 53 | /// Needs to be reconfigured based on input and output textures for each Convert() call 54 | ID3D11VideoProcessor* m_pVP = nullptr; 55 | /// DXVAHD VpBlt output target. Obtained from the output texture passed to Convert() 56 | ID3D11VideoProcessorOutputView* m_pVPOut = nullptr; 57 | /// D3D11 video processor enumerator. Required to configure Video processor streams 58 | ID3D11VideoProcessorEnumerator* m_pVPEnum = nullptr; 59 | /// Mapping of Texture2D handle and corresponding Video Processor output view handle 60 | /// Optimization to avoid having to create video processor output views in each Convert() call 61 | std::unordered_map viewMap; 62 | /// Input and Output Texture2D properties. 63 | /// Required to optimize Video Processor stream usage 64 | D3D11_TEXTURE2D_DESC m_inDesc = {0}; 65 | D3D11_TEXTURE2D_DESC m_outDesc = {0}; 66 | D3D11_VIDEO_PROCESSOR_COLOR_SPACE _inColorSpace; 67 | D3D11_VIDEO_PROCESSOR_COLOR_SPACE _outColorSpace; 68 | 69 | public: 70 | /// Initialize Video Context 71 | HRESULT Open(ID3D11Device* pDev, ID3D11DeviceContext* pCtx, 72 | const D3D11_VIDEO_PROCESSOR_COLOR_SPACE& inColorSpace, D3D11_VIDEO_PROCESSOR_COLOR_SPACE& outColorSpace); 73 | /// Perform Colorspace conversion 74 | HRESULT Convert(ID3D11Texture2D* pIn, ID3D11Texture2D* pOut); 75 | /// Release all resources 76 | void Close(); 77 | 78 | public: 79 | /// Constructor 80 | // RGBToNV12(ID3D11Device *pDev, ID3D11DeviceContext *pCtx); 81 | /// Destructor. Release all resources before destroying object 82 | ~D3dConverter() 83 | { 84 | Close(); 85 | } 86 | }; 87 | -------------------------------------------------------------------------------- /src/d3d/gen_frame.cpp: -------------------------------------------------------------------------------- 1 | /* 2 | * @Coding: utf-8 3 | * @Author: vector-wlc 4 | * @Date: 2023-03-01 12:35:29 5 | * @Description: 6 | */ 7 | 8 | #include "gen_frame.h" 9 | #include 10 | 11 | #undef min 12 | #undef max 13 | 14 | bool GenNv12Frame(ID3D11Device* device, ID3D11DeviceContext* ctx, const D3D11_TEXTURE2D_DESC& desc, 15 | ID3D11Texture2D* img, BufferFiller& buffers, AVFrame*& outFrame, D3dConverter& rgbToNv12) 16 | { 17 | winrt::com_ptr nv12Img = nullptr; 18 | if (FAILED(device->CreateTexture2D(&desc, nullptr, nv12Img.put()))) { 19 | return false; 20 | } 21 | __CheckBool(SUCCEEDED(rgbToNv12.Convert(img, nv12Img.get()))); 22 | // 填充缓冲区 23 | __CheckBool(buffers.Fill(device, desc)); 24 | 25 | ctx->CopyResource(buffers.GetCopy(), nv12Img.get()); 26 | D3D11_MAPPED_SUBRESOURCE resource; 27 | __CheckBool(SUCCEEDED(ctx->Map(buffers.GetMap(), 0, D3D11_MAP_READ, 0, &resource))); 28 | auto height = std::min(outFrame->height, (int)desc.Height); 29 | auto width = outFrame->width; 30 | auto srcLinesize = resource.RowPitch; 31 | auto dstLinesize = outFrame->linesize[0]; 32 | auto srcData = (uint8_t*)resource.pData; 33 | auto titleHeight = std::max(int(desc.Height - height), 0); 34 | auto copyLine = std::min(std::min(width, (int)srcLinesize), dstLinesize); 35 | auto border = (desc.Width - width) / 2; 36 | __mtx.lock(); 37 | 38 | // Y 39 | int Ystart = (titleHeight - border) * srcLinesize + border; 40 | auto dstData = outFrame->data[0]; 41 | for (int row = 0; row < height; ++row) { 42 | memcpy(dstData + row * dstLinesize, srcData + Ystart + row * srcLinesize, width); 43 | } 44 | 45 | // UV 46 | dstData = outFrame->data[1]; 47 | int UVStart = srcLinesize * desc.Height + (titleHeight - border) / 2 * srcLinesize + border / 2 * 2; 48 | for (int row = 0; row < height / 2; ++row) { 49 | memcpy(dstData + row * dstLinesize, srcData + UVStart + row * srcLinesize, width); 50 | } 51 | 52 | __mtx.unlock(); 53 | ctx->Unmap(buffers.GetMap(), 0); 54 | __CheckBool(buffers.Reset()); 55 | return true; 56 | } 57 | bool GenRgbFrame(ID3D11Device* device, ID3D11DeviceContext* ctx, const D3D11_TEXTURE2D_DESC& desc, 58 | ID3D11Texture2D* img, BufferFiller& buffers, AVFrame*& outFrame) 59 | { 60 | __CheckBool(buffers.Fill(device, desc)); 61 | ctx->CopyResource(buffers.GetCopy(), img); 62 | D3D11_MAPPED_SUBRESOURCE resource; 63 | __CheckBool(SUCCEEDED(ctx->Map(buffers.GetMap(), 0, D3D11_MAP_READ, 0, &resource))); 64 | auto height = std::min(outFrame->height, (int)desc.Height); 65 | auto width = outFrame->width; 66 | auto srcLinesize = resource.RowPitch; 67 | auto dstLinesize = outFrame->linesize[0]; 68 | auto srcData = (uint8_t*)resource.pData; 69 | auto dstData = outFrame->data[0]; 70 | auto titleHeight = std::max(int(desc.Height - height), 0); 71 | auto copyLine = std::min(std::min(width * 4, (int)srcLinesize), dstLinesize); 72 | auto border = (desc.Width - width) / 2; 73 | __mtx.lock(); 74 | for (int row = 0; row < height; ++row) { 75 | auto offset = (titleHeight + row - border) * srcLinesize + border * 4; 76 | memcpy(dstData + row * dstLinesize, srcData + offset, copyLine); 77 | } 78 | __mtx.unlock(); 79 | ctx->Unmap(buffers.GetMap(), 0); 80 | __CheckBool(buffers.Reset()); 81 | return true; 82 | } -------------------------------------------------------------------------------- /src/d3d/gen_frame.h: -------------------------------------------------------------------------------- 1 | /* 2 | * @Coding: utf-8 3 | * @Author: vector-wlc 4 | * @Date: 2023-03-01 12:35:36 5 | * @Description: 6 | */ 7 | #ifndef __GEN_FRAME_H__ 8 | #define __GEN_FRAME_H__ 9 | 10 | #include 11 | #include "buffer_filler.h" 12 | #include "basic/frame.h" 13 | #include "convert.h" 14 | 15 | bool GenNv12Frame(ID3D11Device* device, ID3D11DeviceContext* ctx, const D3D11_TEXTURE2D_DESC& desc, 16 | ID3D11Texture2D* img, BufferFiller& buffers, AVFrame*& outFrame, D3dConverter& rgbToNv12); 17 | bool GenRgbFrame(ID3D11Device* device, ID3D11DeviceContext* ctx, const D3D11_TEXTURE2D_DESC& desc, 18 | ID3D11Texture2D* img, BufferFiller& buffers, AVFrame*& outFrame); 19 | #endif -------------------------------------------------------------------------------- /src/encoder/abstract_encoder.cpp: -------------------------------------------------------------------------------- 1 | /* 2 | * @Coding: utf-8 3 | * @Author: vector-wlc 4 | * @Date: 2023-02-19 16:27:59 5 | * @Description: 6 | */ 7 | #include "abstract_encoder.h" 8 | 9 | AVPacket* AbstractEncoder::Encode() 10 | { 11 | int ret = avcodec_receive_packet(_codecCtx, _packet); 12 | if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) { 13 | return nullptr; 14 | } else if (ret < 0) { 15 | __DebugPrint("avcodec_receive_packet : Error during encoding"); 16 | return nullptr; 17 | } 18 | return _packet; 19 | } -------------------------------------------------------------------------------- /src/encoder/abstract_encoder.h: -------------------------------------------------------------------------------- 1 | /* 2 | * @Coding: utf-8 3 | * @Author: vector-wlc 4 | * @Date: 2023-02-19 15:41:37 5 | * @Description: 6 | */ 7 | #ifndef __BASIC_ENCODER_H__ 8 | #define __BASIC_ENCODER_H__ 9 | 10 | #include "basic/basic.h" 11 | 12 | class AbstractEncoder { 13 | public: 14 | AbstractEncoder() 15 | { 16 | _packet = av_packet_alloc(); 17 | } 18 | AVCodecContext* GetCtx() const 19 | { 20 | return _codecCtx; 21 | } 22 | 23 | virtual bool PushFrame(AVFrame* frame, bool isEnd, uint64_t pts) = 0; 24 | AVPacket* Encode(); 25 | virtual void AfterEncode() {}; 26 | virtual void Close() = 0; 27 | virtual ~AbstractEncoder() 28 | { 29 | Free(_packet, [this] { av_packet_free(&_packet); }); 30 | } 31 | 32 | protected: 33 | bool _isOpen = false; 34 | AVPacket* _packet = nullptr; 35 | const AVCodec* _codec = nullptr; 36 | AVCodecContext* _codecCtx = nullptr; 37 | }; 38 | 39 | template 40 | class Encoder; 41 | 42 | #endif -------------------------------------------------------------------------------- /src/encoder/audio_encoder.cpp: -------------------------------------------------------------------------------- 1 | /* 2 | * @Coding: utf-8 3 | * @Author: vector-wlc 4 | * @Date: 2023-02-19 16:20:16 5 | * @Description: 6 | */ 7 | #include "audio_encoder.h" 8 | 9 | bool Encoder::Open(const Param& audioParma, AVFormatContext* fmtCtx) 10 | { 11 | Close(); 12 | _isOpen = false; 13 | __CheckBool(_Init(audioParma, fmtCtx)); 14 | __CheckBool(avcodec_open2(_codecCtx, _codec, nullptr) >= 0); 15 | _isOpen = true; 16 | return true; 17 | } 18 | void Encoder::Close() 19 | { 20 | if (_codecCtx != nullptr) { 21 | avcodec_close(_codecCtx); 22 | } 23 | Free(_codecCtx, [this] { avcodec_free_context(&_codecCtx); }); 24 | } 25 | bool Encoder::_Init(const Param& audioParam, AVFormatContext* fmtCtx) 26 | { 27 | // codec 28 | __CheckBool(_codec = avcodec_find_encoder(AV_CODEC_ID_AAC)); 29 | // codeccontext 30 | __CheckBool(_codecCtx = avcodec_alloc_context3(_codec)); 31 | _codecCtx->sample_fmt = AV_SAMPLE_FMT_FLTP; 32 | _codecCtx->bit_rate = audioParam.bitRate; 33 | _codecCtx->sample_rate = AUDIO_SAMPLE_RATE; 34 | AVChannelLayout layout; 35 | layout.order = AV_CHANNEL_ORDER_NATIVE; 36 | layout.nb_channels = 1; 37 | layout.u.mask = AV_CH_LAYOUT_MONO; 38 | av_channel_layout_copy(&_codecCtx->ch_layout, &layout); 39 | if (fmtCtx->oformat->flags & AVFMT_GLOBALHEADER) { 40 | _codecCtx->flags |= AV_CODEC_FLAG_GLOBAL_HEADER; 41 | } 42 | return true; 43 | } 44 | 45 | bool Encoder::PushFrame(AVFrame* frame, bool isEnd, uint64_t pts) 46 | { 47 | if (!isEnd) { 48 | __CheckBool(frame); 49 | } else { 50 | frame = nullptr; 51 | } 52 | if (frame != nullptr) { 53 | frame->pts = pts; 54 | } 55 | __CheckBool(avcodec_send_frame(_codecCtx, frame) >= 0); 56 | return true; 57 | } 58 | -------------------------------------------------------------------------------- /src/encoder/audio_encoder.h: -------------------------------------------------------------------------------- 1 | /* 2 | * @Coding: utf-8 3 | * @Author: vector-wlc 4 | * @Date: 2023-02-19 16:16:28 5 | * @Description: 6 | */ 7 | #ifndef __AUDIO_ENCODER_H__ 8 | #define __AUDIO_ENCODER_H__ 9 | 10 | #include "abstract_encoder.h" 11 | 12 | template <> 13 | class Encoder : public AbstractEncoder { 14 | public: 15 | struct Param { 16 | int bitRate; 17 | }; 18 | ~Encoder() { Close(); } 19 | 20 | bool Open(const Param& audioParma, AVFormatContext* fmtCtx); 21 | virtual void Close() override; 22 | virtual bool PushFrame(AVFrame* frame, bool isEnd, uint64_t pts) override; 23 | 24 | private: 25 | bool _Init(const Param& audioParam, AVFormatContext* fmtCtx); 26 | }; 27 | 28 | #endif -------------------------------------------------------------------------------- /src/encoder/audio_mixer.cpp: -------------------------------------------------------------------------------- 1 | /* 2 | * @Coding: utf-8 3 | * @Author: vector-wlc 4 | * @Date: 2023-02-17 19:33:07 5 | * @Description: 6 | */ 7 | #include "audio_mixer.h" 8 | #include "basic/basic.h" 9 | #include 10 | #include 11 | #include 12 | #include "basic/frame.h" 13 | 14 | AVSampleFormat BitsToFmt(int bits) 15 | { 16 | switch (bits) { 17 | case 8: 18 | return AV_SAMPLE_FMT_U8; 19 | case 16: 20 | return AV_SAMPLE_FMT_S16; 21 | case 32: 22 | return AV_SAMPLE_FMT_S32; 23 | case 64: 24 | return AV_SAMPLE_FMT_S64; 25 | default: 26 | return AV_SAMPLE_FMT_FLT; 27 | } 28 | } 29 | 30 | int FmtToBits(AVSampleFormat fmt) 31 | { 32 | switch (fmt) { 33 | case AV_SAMPLE_FMT_U8: 34 | return 8; 35 | case AV_SAMPLE_FMT_S16: 36 | return 16; 37 | case AV_SAMPLE_FMT_S32: 38 | return 32; 39 | case AV_SAMPLE_FMT_S64: 40 | return 64; 41 | default: 42 | return 32; 43 | } 44 | } 45 | 46 | int SizeToNbSamples(int size, int bitsPerSample, int nbChannels) 47 | { 48 | return (size << 3) / bitsPerSample / nbChannels; 49 | } 50 | 51 | int NbSamplesToSize(int nbSamples, int bitsPerSample, int nbChannels) 52 | { 53 | return (nbSamples * bitsPerSample * nbChannels) >> 3; 54 | } 55 | 56 | bool FrameQueue::Init(int channelNums, int sampleRate, AVSampleFormat fmt, int nbSamples) 57 | { 58 | _front = 0; 59 | _sampleRate = sampleRate; 60 | _fmt = fmt; 61 | _nbSamples = nbSamples; 62 | _usedLinesize = nbSamples * channelNums * (fmt == AV_SAMPLE_FMT_S16 ? 2 : 4); 63 | av_channel_layout_default(&_layout, channelNums); 64 | _queue.emplace(fmt, &_layout, sampleRate, nbSamples); 65 | return true; 66 | } 67 | 68 | Frame FrameQueue::Pop() 69 | { 70 | if (_queue.size() > 1) { 71 | auto frame = std::move(_queue.front()); 72 | _queue.pop(); 73 | return frame; 74 | } 75 | return Frame(); 76 | } 77 | 78 | void FrameQueue::Push(uint8_t* data, int length) 79 | { 80 | if (length > _usedLinesize) { // 递归调用 81 | Push(data, length / 2); 82 | Push(data + length / 2, length / 2 + length % 2); 83 | return; 84 | } 85 | auto&& frame = _queue.back().frame; 86 | int secondLength = _front + length - _usedLinesize; 87 | if (secondLength <= 0) { // 第一段缓存是够用的 88 | memcpy(frame->data[0] + _front, data, length); 89 | _front += length; 90 | return; 91 | } 92 | // 第一段缓存不够用 93 | int firstLength = length - secondLength; 94 | if (firstLength > 0) { 95 | memcpy(frame->data[0] + _front, data, firstLength); 96 | } 97 | // 载入一段新缓存 98 | _queue.emplace(_fmt, &_layout, _sampleRate, _nbSamples); 99 | memcpy(_queue.back().frame->data[0], data + firstLength, secondLength); 100 | _front = secondLength; 101 | } 102 | 103 | bool Resampler::Open(int inChannelNums, int inSampleRate, AVSampleFormat inFmt, 104 | int outChannelNums, int outSampleRate, AVSampleFormat outFmt, int outNbSample) 105 | { 106 | Close(); 107 | __CheckBool(_swrCtx = swr_alloc()); 108 | 109 | AVChannelLayout tmpLayout; 110 | av_channel_layout_default(&tmpLayout, inChannelNums); 111 | av_opt_set_chlayout(_swrCtx, "in_chlayout", &tmpLayout, 0); 112 | av_opt_set_int(_swrCtx, "in_sample_rate", inSampleRate, 0); 113 | av_opt_set_sample_fmt(_swrCtx, "in_sample_fmt", inFmt, 0); 114 | __CheckBool(_fromQueue.Init(inChannelNums, inSampleRate, inFmt, inSampleRate / 100 * 2)); 115 | 116 | av_channel_layout_default(&tmpLayout, outChannelNums); 117 | av_opt_set_chlayout(_swrCtx, "out_chlayout", &tmpLayout, 0); 118 | av_opt_set_int(_swrCtx, "out_sample_rate", outSampleRate, 0); 119 | av_opt_set_sample_fmt(_swrCtx, "out_sample_fmt", outFmt, 0); 120 | if (swr_init(_swrCtx) < 0) { 121 | Close(); 122 | __DebugPrint("swr_init(_swrCtx) failed\n"); 123 | return false; 124 | } 125 | __CheckBool(_toQueue.Init(outChannelNums, outSampleRate, outFmt, outNbSample)); 126 | __CheckBool(_swrFrame = Frame::Alloc(outFmt, &tmpLayout, outSampleRate, outSampleRate / 100 * 2)); 127 | 128 | return true; 129 | } 130 | 131 | void Resampler::Close() 132 | { 133 | Free(_swrCtx, [this] { swr_free(&_swrCtx); }); 134 | Free(_swrFrame, [this] { av_frame_free(&_swrFrame); }); 135 | } 136 | 137 | bool Resampler::Convert(uint8_t* data, int size) 138 | { 139 | std::vector> ret; 140 | if (data == nullptr) { 141 | return false; 142 | } 143 | _fromQueue.Push(data, size); 144 | for (; true;) { // 转换 145 | auto frame = _fromQueue.Pop(); 146 | if (frame.frame == nullptr) { 147 | break; 148 | } 149 | __CheckNullptr(swr_convert(_swrCtx, _swrFrame->data, _swrFrame->nb_samples, // 150 | (const uint8_t**)frame.frame->data, frame.frame->nb_samples)); 151 | _toQueue.Push(_swrFrame->data[0], _swrFrame->linesize[0]); 152 | } 153 | return true; 154 | } 155 | 156 | AVFrame* AudioMixer::Convert(uint32_t index, uint8_t* inBuf, uint32_t size) 157 | { 158 | std::lock_guard locker(_mutex); 159 | auto iter = _audioInputInfos.find(index); 160 | __CheckNullptr(iter != _audioInputInfos.end()); 161 | __CheckNullptr(iter->second.resampler->Convert(inBuf, size)); 162 | return _AdjustVolume() ? _outputFrame : nullptr; 163 | } 164 | 165 | bool AudioMixer::_AdjustVolume() 166 | { 167 | // 检测所有流之间是不是相差太大了以及缓存的数据是不是太多了 168 | // 如果缓存的数据太多,直接将所有的队列删除同样的数据 169 | // 如果两个流直接数据相差太大,将多的那个减到和少的那个一样 170 | constexpr int MAX_DIFF = 10; 171 | constexpr int MAX_BUF_SIZE = 20; 172 | int minSize = INT_MAX; 173 | int maxSize = INT_MIN; 174 | FrameQueue* maxQueue = nullptr; 175 | #undef min 176 | for (auto&& iter : _audioInputInfos) { 177 | auto&& queue = iter.second.resampler->GetQueue(); 178 | if (queue.IsEmpty()) { 179 | return false; 180 | } 181 | minSize = std::min(minSize, (int)queue.GetSize()); 182 | if (maxSize < (int)queue.GetSize()) { 183 | maxSize = queue.GetSize(); 184 | maxQueue = &queue; 185 | } 186 | } 187 | 188 | if (maxSize - minSize > MAX_DIFF) { 189 | __DebugPrint("Clear MAX_DIFF"); 190 | for (int i = 0; i < maxSize - minSize; ++i) { 191 | maxQueue->Pop(); 192 | } 193 | } 194 | 195 | for (auto iter = _audioInputInfos.begin(); iter != _audioInputInfos.end(); ++iter) { 196 | auto&& frameQueue = iter->second.resampler->GetQueue(); 197 | if (minSize > MAX_BUF_SIZE) { 198 | __DebugPrint("Clear MAX_BUF_SIZE"); 199 | for (int i = 0; i < minSize - 2; ++i) { 200 | frameQueue.Pop(); 201 | } 202 | } 203 | auto frame = frameQueue.Pop(); 204 | auto scale = iter->second.scale; 205 | auto writeStream = (float*)(_outputFrame->data[0]); 206 | auto readStream = (float*)(frame.frame->data[0]); 207 | iter->second.volume = readStream[0] * scale; 208 | 209 | if (iter == _audioInputInfos.begin()) { 210 | if (std::abs(scale - 1) < 0.01) { // 这种情况可以直接使用 memcpy 而不是下面那种低效率的逐个赋值 211 | memcpy(writeStream, readStream, _outputFrame->linesize[0]); 212 | continue; 213 | } 214 | // 要进行 scale, 只能逐个赋值 215 | // 所以这里要清零 216 | memset(writeStream, 0, _outputFrame->linesize[0]); 217 | } 218 | // 逐个计算赋值 219 | for (int idx = 0; idx < _outputFrame->nb_samples; ++idx) { 220 | writeStream[idx] += readStream[idx] * scale; 221 | if (writeStream[idx] > 0.99) { 222 | writeStream[idx] = 0.99; 223 | } 224 | } 225 | } 226 | return true; 227 | } 228 | 229 | AudioMixer::AudioMixer() 230 | : _inited(false) 231 | { 232 | } 233 | 234 | AudioMixer::~AudioMixer() 235 | { 236 | // delete out_buf; 237 | if (_inited) { 238 | Close(); 239 | } 240 | } 241 | 242 | bool AudioMixer::AddAudioInput(uint32_t index, uint32_t sampleRate, 243 | uint32_t channels, uint32_t bitsPerSample, 244 | AVSampleFormat format) 245 | { 246 | std::lock_guard locker(_mutex); 247 | __CheckBool(!_inited); 248 | // 根据index保存是否已经存在 249 | __CheckBool(_audioInputInfos.find(index) == _audioInputInfos.end()); 250 | 251 | auto& filterInfo = _audioInputInfos[index]; 252 | // 设置音频相关参数 253 | filterInfo.sampleRate = sampleRate; 254 | filterInfo.channels = channels; 255 | filterInfo.bitsPerSample = bitsPerSample; 256 | filterInfo.format = format; 257 | filterInfo.name = std::string("input") + std::to_string(index); 258 | return true; 259 | } 260 | 261 | bool AudioMixer::AddAudioOutput(const uint32_t sampleRate, 262 | const uint32_t channels, 263 | const uint32_t bitsPerSample, 264 | const AVSampleFormat format) 265 | { 266 | std::lock_guard locker(_mutex); 267 | __CheckBool(!_inited); 268 | // 设置音频相关参数 269 | _audioOutputInfo.sampleRate = sampleRate; 270 | _audioOutputInfo.channels = channels; 271 | _audioOutputInfo.bitsPerSample = bitsPerSample; 272 | _audioOutputInfo.format = format; 273 | _audioOutputInfo.name = "output"; 274 | return true; 275 | } 276 | 277 | bool AudioMixer::SetOutFrameSize(int outFrameSize) 278 | { 279 | if (_outFrameSize == outFrameSize) { 280 | return true; 281 | } 282 | _outFrameSize = outFrameSize; 283 | for (auto&& filterInfoPair : _audioInputInfos) { 284 | auto&& filterInfo = filterInfoPair.second; 285 | filterInfo.resampler = std::make_unique(); 286 | __CheckBool(filterInfo.resampler->Open(filterInfo.channels, filterInfo.sampleRate, filterInfo.format, 287 | _audioOutputInfo.channels, _audioOutputInfo.sampleRate, _audioOutputInfo.format, outFrameSize)); 288 | } 289 | AVChannelLayout tmpLayout; 290 | av_channel_layout_default(&tmpLayout, _audioOutputInfo.channels); 291 | Free(_outputFrame, [this] { av_frame_free(&_outputFrame); }); 292 | __CheckBool(_outputFrame = Frame::Alloc(_audioOutputInfo.format, &tmpLayout, _audioOutputInfo.sampleRate, outFrameSize)); 293 | _inited = true; 294 | return true; 295 | } 296 | 297 | bool AudioMixer::Close() 298 | { 299 | if (!_inited) { 300 | return true; 301 | } 302 | _inited = false; 303 | std::lock_guard locker(_mutex); 304 | _audioInputInfos.clear(); 305 | Free(_outputFrame, [this] { av_frame_free(&_outputFrame); }); 306 | _outFrameSize = 0; 307 | return true; 308 | } 309 | 310 | AudioMixer::AudioInfo* AudioMixer::GetInputInfo(uint32_t index) 311 | { 312 | auto iter = _audioInputInfos.find(index); 313 | return iter == _audioInputInfos.end() ? nullptr : &(iter->second); 314 | } 315 | -------------------------------------------------------------------------------- /src/encoder/audio_mixer.h: -------------------------------------------------------------------------------- 1 | /* 2 | * @Coding: utf-8 3 | * @Author: vector-wlc 4 | * @Date: 2023-02-17 19:33:07 5 | * @Description: 6 | */ 7 | #ifndef __AUDIO_MIXER_H__ 8 | #define __AUDIO_MIXER_H__ 9 | 10 | #include 11 | #include 12 | #include 13 | #include 14 | #include 15 | #include 16 | #include 17 | #include 18 | 19 | extern "C" { 20 | #include 21 | #include 22 | #include 23 | #include 24 | #include 25 | #include 26 | } 27 | 28 | #include "basic/frame.h" 29 | 30 | #define __PCM1_FRAME_SIZE (4096 * 2) 31 | #define __PCM2_FRAME_SIZE (4096) 32 | #define __PCM_OUT_FRAME_SIZE (40000) 33 | 34 | // 循环缓存空间 35 | class FrameQueue { 36 | public: 37 | bool Init(int channelNums, int sampleRate, AVSampleFormat fmt, int nbSamples); 38 | Frame Pop(); 39 | void Push(uint8_t* data, int length); 40 | bool IsEmpty() const { return _queue.size() < 2; } 41 | auto GetSize() const { return _queue.size(); } 42 | 43 | private: 44 | int _front = 0; 45 | AVChannelLayout _layout; 46 | int _sampleRate; 47 | int _nbSamples; 48 | int _usedLinesize; 49 | AVSampleFormat _fmt; 50 | std::queue> _queue; 51 | }; 52 | 53 | class Resampler { 54 | public: 55 | bool Open(int inChannelNums, int inSampleRate, AVSampleFormat inFmt, 56 | int outChannelNums, int outSampleRate, AVSampleFormat outFmt, int outNbSample); 57 | bool Convert(uint8_t* data, int size); 58 | void Close(); 59 | FrameQueue& GetQueue() { return _toQueue; } 60 | ~Resampler() { Close(); } 61 | 62 | private: 63 | AVFrame* _swrFrame = nullptr; 64 | SwrContext* _swrCtx = nullptr; 65 | FrameQueue _fromQueue; 66 | FrameQueue _toQueue; 67 | }; 68 | 69 | class AudioMixer { 70 | public: 71 | struct AudioInfo { 72 | uint32_t sampleRate; 73 | uint32_t channels; 74 | uint32_t bitsPerSample; 75 | AVSampleFormat format; 76 | std::string name; 77 | std::unique_ptr resampler; 78 | float volume = 0; 79 | float scale = 1; 80 | int callTime = 0; 81 | }; 82 | AudioMixer(); 83 | virtual ~AudioMixer(); 84 | // 添加音频输入通道 85 | bool AddAudioInput(uint32_t index, uint32_t sampleRate, uint32_t channels, 86 | uint32_t bitsPerSample, AVSampleFormat format); 87 | // 添加音频输出通道 88 | bool AddAudioOutput(const uint32_t sampleRate, const uint32_t channels, 89 | const uint32_t bitsPerSample, const AVSampleFormat format); 90 | AVFrame* Convert(uint32_t index, uint8_t* inBuf, uint32_t size); 91 | bool SetOutFrameSize(int outputFrameSize = 1024); 92 | int GetOutFrameSize() const { return _outFrameSize; }; 93 | bool Close(); 94 | AudioInfo* GetInputInfo(uint32_t index); 95 | 96 | private: 97 | bool _inited = false; 98 | std::mutex _mutex; 99 | // 输入 100 | std::unordered_map _audioInputInfos; 101 | // 转换格式 102 | AudioInfo _audioOutputInfo; 103 | AVFrame* _outputFrame = nullptr; 104 | bool _AdjustVolume(); 105 | int _outFrameSize = 0; 106 | }; 107 | 108 | #endif // AUDIOMIXER_H 109 | -------------------------------------------------------------------------------- /src/encoder/video_encoder.cpp: -------------------------------------------------------------------------------- 1 | /* 2 | * @Coding: utf-8 3 | * @Author: vector-wlc 4 | * @Date: 2023-02-03 10:01:33 5 | * @Description: 6 | */ 7 | #include "video_encoder.h" 8 | 9 | extern "C" { 10 | #include 11 | } 12 | 13 | std::vector Encoder::_usableEncoders; 14 | 15 | Encoder::Encoder() 16 | { 17 | } 18 | 19 | bool Encoder::Open(const Param& encodeParam, AVFormatContext* fmtCtx) 20 | { 21 | Close(); 22 | _isOpen = false; 23 | __CheckBool(_Init(encodeParam, fmtCtx)); 24 | 25 | // 打开编码器 26 | __CheckBool(avcodec_open2(_codecCtx, _codec, nullptr) >= 0); 27 | 28 | _isOpen = true; 29 | return true; 30 | } 31 | 32 | bool Encoder::PushFrame(AVFrame* frame, bool isEnd, uint64_t pts) 33 | { 34 | if (!isEnd) { 35 | __CheckBool(_Trans(frame)); 36 | frame = _bufferFrame; 37 | __CheckBool(frame); 38 | } else { 39 | frame = nullptr; // 直接刷新编码器缓存 40 | } 41 | if (frame != nullptr) { 42 | frame->pts = pts; 43 | } 44 | __CheckBool(avcodec_send_frame(_codecCtx, frame) >= 0); 45 | return true; 46 | } 47 | 48 | void Encoder::AfterEncode() 49 | { 50 | if (_isHardware) { 51 | Free(_hwFrame, [this] { av_frame_free(&_hwFrame); }); 52 | } 53 | } 54 | 55 | void Encoder::Close() 56 | { 57 | if (_codecCtx != nullptr) { 58 | avcodec_close(_codecCtx); 59 | } 60 | 61 | Free(_codecCtx, [this] { avcodec_free_context(&_codecCtx); }); 62 | Free(_hwDeviceCtx, [this] { av_buffer_unref(&_hwDeviceCtx); }); 63 | _converter = nullptr; 64 | } 65 | 66 | const std::vector& Encoder::GetUsableEncoders() 67 | { 68 | if (_usableEncoders.empty()) { 69 | _FindUsableEncoders(); 70 | } 71 | return _usableEncoders; 72 | } 73 | 74 | void Encoder::_FindUsableEncoders() 75 | { 76 | // 尝试打开编码器看看编码器能不能用 77 | Param param; 78 | param.bitRate = 1000; 79 | param.fps = 30; 80 | param.width = 1920; 81 | param.height = 1080; 82 | Encoder encoder; 83 | AVFormatContext* fmtCtx = nullptr; 84 | 85 | __CheckNo(avformat_alloc_output_context2(&fmtCtx, nullptr, nullptr, "test.mp4") >= 0); 86 | for (const auto& name : _encoderNames) { 87 | if (strcmp(name, "libx264") == 0) { // 软件编码器必定支持 88 | _usableEncoders.push_back(name); 89 | continue; 90 | } 91 | param.name = name; 92 | if (encoder.Open(param, fmtCtx)) { 93 | _usableEncoders.push_back(name); 94 | } 95 | encoder.Close(); 96 | } 97 | Free(fmtCtx, [&fmtCtx] { avformat_free_context(fmtCtx); }); 98 | } 99 | 100 | bool Encoder::_Init(const Param& encodeParam, AVFormatContext* fmtCtx) 101 | { 102 | _isHardware = encodeParam.name != "libx264"; 103 | AVHWDeviceType hwType; 104 | if (encodeParam.name == "libx264") { 105 | _pixFmt = AV_PIX_FMT_NV12; 106 | } else if (encodeParam.name == "h264_nvenc") { 107 | _pixFmt = AV_PIX_FMT_CUDA; 108 | hwType = AV_HWDEVICE_TYPE_CUDA; 109 | } else if (encodeParam.name == "h264_qsv") { 110 | _pixFmt = AV_PIX_FMT_QSV; 111 | hwType = AV_HWDEVICE_TYPE_QSV; 112 | } else if (encodeParam.name == "h264_amf") { 113 | _pixFmt = AV_PIX_FMT_VULKAN; 114 | hwType = AV_HWDEVICE_TYPE_VULKAN; 115 | } 116 | _isHardware = _pixFmt != AV_PIX_FMT_NV12; 117 | if (_isHardware && av_hwdevice_ctx_create(&_hwDeviceCtx, hwType, nullptr, nullptr, 0) < 0) { // 硬件解码 118 | __DebugPrint("av_hwdevice_ctx_create failed\n"); 119 | return false; 120 | } 121 | __CheckBool(_codec = avcodec_find_encoder_by_name(encodeParam.name.c_str())); 122 | __CheckBool(_codecCtx = avcodec_alloc_context3(_codec)); 123 | _codecCtx->bit_rate = encodeParam.bitRate; 124 | _codecCtx->width = encodeParam.width; 125 | _codecCtx->height = encodeParam.height; 126 | _codecCtx->time_base = {1, encodeParam.fps}; 127 | _codecCtx->framerate = {encodeParam.fps, 1}; 128 | 129 | // 影响缓冲区大小 130 | _codecCtx->gop_size = 10; 131 | _codecCtx->max_b_frames = 1; 132 | _codecCtx->pix_fmt = _pixFmt; 133 | 134 | /* Some formats want stream headers to be separate. */ 135 | if (fmtCtx->oformat->flags & AVFMT_GLOBALHEADER) { 136 | _codecCtx->flags |= AV_CODEC_FLAG_GLOBAL_HEADER; 137 | } 138 | 139 | if (!_isHardware) { // 软件编码设置为快,避免占用过高的 CPU ,反正硬盘不值钱 140 | av_opt_set(_codecCtx->priv_data, "preset", "veryfast", 0); 141 | } 142 | 143 | __CheckBool(!_isHardware || _SetHwFrameCtx()); 144 | return true; 145 | } 146 | bool Encoder::_SetHwFrameCtx() 147 | { 148 | AVBufferRef* hwFramesRef; 149 | AVHWFramesContext* framesCtx = nullptr; 150 | 151 | __CheckBool(hwFramesRef = av_hwframe_ctx_alloc(_hwDeviceCtx)); 152 | framesCtx = (AVHWFramesContext*)(hwFramesRef->data); 153 | framesCtx->format = _pixFmt; 154 | framesCtx->sw_format = AV_PIX_FMT_NV12; 155 | framesCtx->width = _codecCtx->width; 156 | framesCtx->height = _codecCtx->height; 157 | framesCtx->initial_pool_size = 20; 158 | if (av_hwframe_ctx_init(hwFramesRef) < 0) { 159 | __DebugPrint("av_hwframe_ctx_init failed\n"); 160 | av_buffer_unref(&hwFramesRef); 161 | return false; 162 | } 163 | __CheckBool(_codecCtx->hw_frames_ctx = av_buffer_ref(hwFramesRef)); 164 | av_buffer_unref(&hwFramesRef); 165 | return true; 166 | } 167 | 168 | bool Encoder::_Trans(AVFrame* frame) 169 | { 170 | std::lock_guard lk(__mtx); 171 | if (!_isOpen) { 172 | return false; 173 | } 174 | if (frame->format == AV_PIX_FMT_NV12) { 175 | _bufferFrame = frame; 176 | } else { 177 | if (_converter == nullptr) { 178 | _converter = std::make_unique(AVPixelFormat(frame->format), AV_PIX_FMT_NV12); 179 | _converter->SetSize(frame->width, frame->height); 180 | } 181 | _bufferFrame = _converter->Trans(frame); 182 | } 183 | if (_isHardware) { 184 | _bufferFrame = _ToHardware(); 185 | } 186 | __CheckBool(_bufferFrame); 187 | return true; 188 | } 189 | 190 | AVFrame* Encoder::_ToHardware() 191 | { 192 | if (_bufferFrame == nullptr) { 193 | return nullptr; 194 | } 195 | __CheckNullptr(_hwFrame = av_frame_alloc()); 196 | __CheckNullptr(av_hwframe_get_buffer(_codecCtx->hw_frames_ctx, _hwFrame, 0) >= 0); 197 | __CheckNullptr(_hwFrame->hw_frames_ctx); 198 | __CheckNullptr(av_hwframe_transfer_data(_hwFrame, _bufferFrame, 0) >= 0); 199 | return _hwFrame; 200 | } 201 | -------------------------------------------------------------------------------- /src/encoder/video_encoder.h: -------------------------------------------------------------------------------- 1 | /* 2 | * @Coding: utf-8 3 | * @Author: vector-wlc 4 | * @Date: 2023-02-19 15:41:37 5 | * @Description: 6 | */ 7 | #ifndef __VIDEO_ENCODER_H__ 8 | #define __VIDEO_ENCODER_H__ 9 | 10 | #include "abstract_encoder.h" 11 | #include "basic/frame.h" 12 | 13 | template <> 14 | class Encoder : public AbstractEncoder { 15 | public: 16 | struct Param { 17 | int bitRate; 18 | int width; 19 | int height; 20 | int fps; 21 | std::string name; 22 | }; 23 | Encoder(); 24 | ~Encoder() { Close(); } 25 | bool Open(const Param& encodeParam, AVFormatContext* fmtCtx); 26 | virtual bool PushFrame(AVFrame* frame, bool isEnd, uint64_t pts) override; 27 | virtual void AfterEncode() override; 28 | virtual void Close() override; 29 | static const std::vector& GetUsableEncoders(); 30 | 31 | private: 32 | bool 33 | _Init(const Param& encodeParam, AVFormatContext* fmtCtx); 34 | bool _SetHwFrameCtx(); 35 | bool _Trans(AVFrame* frame); 36 | AVFrame* _ToHardware(); 37 | static void _FindUsableEncoders(); 38 | bool _isHardware = false; 39 | std::unique_ptr _converter = nullptr; 40 | AVFrame* _bufferFrame = nullptr; 41 | static constexpr const char* _encoderNames[4] = { 42 | "h264_nvenc", 43 | "h264_qsv", 44 | "h264_amf", 45 | "libx264", 46 | }; 47 | static std::vector _usableEncoders; 48 | AVBufferRef* _hwDeviceCtx = nullptr; 49 | AVFrame* _hwFrame = nullptr; 50 | AVPixelFormat _pixFmt = AV_PIX_FMT_NV12; 51 | }; 52 | 53 | #endif -------------------------------------------------------------------------------- /src/main.cpp: -------------------------------------------------------------------------------- 1 | /* 2 | * @Coding: utf-8 3 | * @Author: vector-wlc 4 | * @Date: 2023-02-03 10:00:08 5 | * @Description: 6 | */ 7 | #include "ui/av_recorder.h" 8 | #include 9 | 10 | int main(int argc, char* argv[]) 11 | { 12 | QApplication a(argc, argv); 13 | AvRecorder w; 14 | 15 | w.show(); 16 | return a.exec(); 17 | } 18 | -------------------------------------------------------------------------------- /src/muxer/av_muxer.cpp: -------------------------------------------------------------------------------- 1 | /* 2 | * @Coding: utf-8 3 | * @Author: vector-wlc 4 | * @Date: 2023-02-19 19:36:28 5 | * @Description: 6 | */ 7 | #include "av_muxer.h" 8 | 9 | bool AvMuxer::Open(std::string_view filePath, std::string_view format) 10 | { 11 | Close(); 12 | _isOpenFile = false; 13 | _filePath = filePath; 14 | __CheckBool(avformat_alloc_output_context2(&_fmtCtx, nullptr, format.data(), _filePath.c_str()) >= 0); 15 | __CheckBool(_fmtCtx); 16 | return true; 17 | } 18 | 19 | bool AvMuxer::WriteHeader() 20 | { 21 | av_dump_format(_fmtCtx, 0, _filePath.data(), 1); 22 | // 打开输出文件 23 | if (!(_fmtCtx->oformat->flags & AVFMT_NOFILE)) { 24 | __CheckBool(avio_open(&_fmtCtx->pb, _filePath.c_str(), AVIO_FLAG_WRITE) >= 0); 25 | } 26 | // 写入文件头 27 | __CheckBool(avformat_write_header(_fmtCtx, nullptr) >= 0); 28 | _isOpenFile = true; 29 | return true; 30 | } 31 | 32 | int AvMuxer::AddVideoStream(const Encoder::Param& param) 33 | { 34 | __Check(-1, _fmtCtx->oformat->video_codec != AV_CODEC_ID_NONE); 35 | Info info; 36 | info.pts = 0; 37 | info.fps = param.fps; 38 | auto encoder = new Encoder; 39 | __Check(-1, encoder->Open(param, _fmtCtx)); 40 | info.type = MediaType::VIDEO; 41 | info.encoder = encoder; 42 | __Check(-1, _AddStream(info)); 43 | _infos.back().stream->time_base = {1, info.fps}; 44 | return info.streamIndex; 45 | } 46 | 47 | int AvMuxer::AddAudioStream(const Encoder::Param& param) 48 | { 49 | __Check(-1, _fmtCtx->oformat->audio_codec != AV_CODEC_ID_NONE); 50 | Info info; 51 | info.pts = 0; 52 | info.fps = AUDIO_SAMPLE_RATE; 53 | auto encoder = new Encoder; 54 | info.type = MediaType::AUDIO; 55 | info.encoder = encoder; 56 | __Check(-1, encoder->Open(param, _fmtCtx)); 57 | __Check(-1, _AddStream(info)); 58 | _infos.back().stream->time_base = {1, AUDIO_SAMPLE_RATE}; 59 | return info.streamIndex; 60 | } 61 | 62 | bool AvMuxer::Write(AVFrame* frame, int streamIndex, bool isEnd) 63 | { 64 | // 此函数不能被多个流同时调用 65 | std::lock_guard lk(_mtx); 66 | __CheckBool(_infos.size() > streamIndex); 67 | auto&& info = _infos[streamIndex]; 68 | if (info.isEnd) { 69 | return true; 70 | } 71 | if (isEnd) { 72 | info.isEnd = isEnd; 73 | frame = nullptr; 74 | } 75 | __CheckBool(info.encoder); 76 | // 检测流之间时间是不是差的太多,如果差的太多,直接弃掉数据多的流数据 77 | if (!_CheckTime(double(info.pts) / info.fps)) { 78 | info.isEncodeOverload = true; 79 | return false; 80 | } 81 | info.isEncodeOverload = false; 82 | __CheckBool(info.encoder->PushFrame(frame, isEnd, info.pts)); 83 | info.pts += info.type == MediaType::AUDIO ? info.encoder->GetCtx()->frame_size : 1; // 更新 pts 84 | AVPacket* packet = nullptr; 85 | while ((packet = info.encoder->Encode())) { 86 | av_packet_rescale_ts(packet, info.encoder->GetCtx()->time_base, info.stream->time_base); 87 | packet->stream_index = info.stream->index; 88 | __CheckBool(av_interleaved_write_frame(_fmtCtx, packet) >= 0); 89 | } 90 | info.encoder->AfterEncode(); 91 | return true; 92 | } 93 | 94 | bool AvMuxer::_CheckTime(double time) 95 | { 96 | auto minTime = double(_infos.front().pts) / _infos.front().fps; 97 | for (int idx = 1; idx < _infos.size(); ++idx) { 98 | minTime = std::min(double(_infos[idx].pts) / _infos[idx].fps, minTime); 99 | } 100 | if (time - minTime > 0.1) { // 说明相差的太多了,下一帧不能再送往编码器 101 | return false; 102 | } 103 | return true; 104 | } 105 | 106 | void AvMuxer::Close() 107 | { 108 | if (_fmtCtx == nullptr) { 109 | return; 110 | } 111 | // 清空编码器缓存 112 | for (int index = 0; index < _infos.size(); ++index) { 113 | __DebugPrint("stream: %d, time:%f", index, double(_infos[index].pts) / _infos[index].fps); 114 | } 115 | if (_isOpenFile) { 116 | __CheckNo(av_write_trailer(_fmtCtx) >= 0); 117 | Free(_fmtCtx->pb, [this] { avio_closep(&_fmtCtx->pb); }); 118 | } 119 | _isOpenFile = false; 120 | 121 | for (auto&& info : _infos) { 122 | info.encoder->Close(); 123 | Free(info.encoder, [&info] {info.encoder->Close(); delete info.encoder; }); 124 | } 125 | _infos.clear(); 126 | Free(_fmtCtx, [this] { avformat_free_context(_fmtCtx); }); 127 | } 128 | 129 | bool AvMuxer::_AddStream(Info& info) 130 | { 131 | __CheckBool(info.stream = avformat_new_stream(_fmtCtx, nullptr)); 132 | info.stream->id = _fmtCtx->nb_streams - 1; 133 | __CheckBool(avcodec_parameters_from_context(info.stream->codecpar, info.encoder->GetCtx()) >= 0); 134 | info.streamIndex = _fmtCtx->nb_streams - 1; 135 | info.pts = 0; 136 | info.isEnd = false; 137 | _infos.push_back(info); 138 | return true; 139 | } 140 | 141 | AVCodecContext* AvMuxer::GetCodecCtx(int streamIndex) 142 | { 143 | __CheckNullptr(streamIndex >= 0 && _infos.size() > streamIndex); 144 | return _infos[streamIndex].encoder->GetCtx(); 145 | } 146 | 147 | bool AvMuxer::IsEncodeOverload() const 148 | { 149 | for (auto&& info : _infos) { 150 | if (info.isEncodeOverload) { 151 | return true; 152 | } 153 | } 154 | return false; 155 | } -------------------------------------------------------------------------------- /src/muxer/av_muxer.h: -------------------------------------------------------------------------------- 1 | /* 2 | * @Coding: utf-8 3 | * @Author: vector-wlc 4 | * @Date: 2023-01-31 17:19:09 5 | * @Description: 6 | */ 7 | #ifndef __AV_MUXER_H__ 8 | #define __AV_MUXER_H__ 9 | 10 | #include "encoder/audio_encoder.h" 11 | #include "encoder/video_encoder.h" 12 | 13 | class AvMuxer { 14 | public: 15 | struct Info { 16 | MediaType type; 17 | AbstractEncoder* encoder = nullptr; 18 | AVStream* stream = nullptr; 19 | int streamIndex = -1; 20 | int fps = 30; 21 | uint64_t pts = 0; 22 | bool isEnd = false; 23 | bool isEncodeOverload = false; 24 | }; 25 | ~AvMuxer() 26 | { 27 | Close(); 28 | } 29 | bool Open(std::string_view filePath, std::string_view format = "mp4"); 30 | bool WriteHeader(); 31 | // 返回值为创建的流的索引 ,-1表示创建失败 32 | int AddVideoStream(const Encoder::Param& param); 33 | int AddAudioStream(const Encoder::Param& param); 34 | bool Write(AVFrame* frame, int streamIndex, bool isEnd = false); 35 | void Close(); 36 | AVCodecContext* GetCodecCtx(int streamIndex); 37 | bool IsEncodeOverload() const; 38 | 39 | private: 40 | std::mutex _mtx; 41 | bool _isOpenFile = false; 42 | bool _AddStream(Info& info); 43 | bool _CheckTime(double time); 44 | std::vector _infos; 45 | AVFormatContext* _fmtCtx = nullptr; 46 | std::string _filePath; 47 | }; 48 | 49 | #endif -------------------------------------------------------------------------------- /src/recorder/audio_recorder.cpp: -------------------------------------------------------------------------------- 1 | /* 2 | * @Coding: utf-8 3 | * @Author: vector-wlc 4 | * @Date: 2023-02-17 19:33:07 5 | * @Description: 6 | */ 7 | #include "audio_recorder.h" 8 | 9 | bool AudioRecorder::Open( 10 | const std::vector& deviceTypes, 11 | Encoder::Param& param, 12 | const uint32_t sampleRate, 13 | const uint32_t channels, 14 | const uint32_t bitsPerSample, 15 | const AVSampleFormat format) 16 | { 17 | Close(); 18 | Info mixInfo; 19 | mixInfo.mixer = &_mixer; 20 | mixInfo.isRecord = &_isRecord; 21 | mixInfo.streamIndex = &_streamIndex; 22 | 23 | for (int index = 0; index < deviceTypes.size(); ++index) { 24 | mixInfo.mixIndex = index; 25 | _infos.push_back(mixInfo); 26 | } 27 | for (int index = 0; index < deviceTypes.size(); ++index) { 28 | auto&& capturer = deviceTypes[index] == AudioCapturer::Microphone ? _micCapturer : _speakerCapturer; 29 | if (!capturer.Init(deviceTypes[index], _Callback, &(_infos[index]))) { 30 | continue; 31 | } 32 | auto&& format = capturer.GetFormat(); 33 | __CheckBool(_mixer.AddAudioInput(index, format.nSamplesPerSec, format.nChannels, 34 | format.wBitsPerSample, _GetAVSampleFormat(format.wBitsPerSample))); 35 | } 36 | __CheckBool(_mixer.AddAudioOutput(sampleRate, channels, bitsPerSample, format)); 37 | _param = param; 38 | __CheckBool(_mixer.SetOutFrameSize(1024)); 39 | 40 | for (int index = 0; index < deviceTypes.size(); ++index) { 41 | if (_mixer.GetInputInfo(index) != nullptr) { 42 | auto&& capturer = deviceTypes[index] == AudioCapturer::Microphone ? _micCapturer : _speakerCapturer; 43 | __CheckBool(capturer.Start()); 44 | } 45 | } 46 | 47 | return true; 48 | } 49 | 50 | void AudioRecorder::Close() 51 | { 52 | StopRecord(); 53 | _micCapturer.Stop(); 54 | _speakerCapturer.Stop(); 55 | _mixer.Close(); 56 | _infos.clear(); 57 | } 58 | 59 | void AudioRecorder::SetVolumeScale(float scale, int mixIndex) 60 | { 61 | auto info = _mixer.GetInputInfo(mixIndex); 62 | if (info != nullptr) { 63 | info->scale = scale; 64 | } 65 | } 66 | 67 | bool AudioRecorder::LoadMuxer(AvMuxer& muxer) 68 | { 69 | for (auto&& info : _infos) { 70 | info.muxer = &muxer; 71 | } 72 | __CheckBool((_streamIndex = muxer.AddAudioStream(_param)) != -1); 73 | return true; 74 | } 75 | 76 | bool AudioRecorder::StartRecord() 77 | { 78 | _isRecord = true; 79 | return true; 80 | } 81 | 82 | void AudioRecorder::StopRecord() 83 | { 84 | _isRecord = false; 85 | } 86 | 87 | void AudioRecorder::_Callback(void* data, size_t size, void* userInfo) 88 | { 89 | auto info = (Info*)userInfo; 90 | auto inputInfo = info->mixer->GetInputInfo(info->mixIndex); 91 | auto frame = info->mixer->Convert(info->mixIndex, (uint8_t*)data, size); 92 | if (frame == nullptr) { 93 | return; 94 | } 95 | if (*(info->isRecord)) { 96 | __CheckNo(info->streamIndex && *(info->streamIndex) != -1); 97 | int frameSize = info->muxer->GetCodecCtx(*info->streamIndex)->frame_size; 98 | if (info->mixer->GetOutFrameSize() != frameSize) { 99 | __DebugPrint("Change frame size from %d to %d", info->mixer->GetOutFrameSize(), frameSize); 100 | info->mixer->SetOutFrameSize(frameSize); 101 | return; 102 | } 103 | __CheckNo(info->muxer->Write(frame, *(info->streamIndex))); 104 | } 105 | } 106 | -------------------------------------------------------------------------------- /src/recorder/audio_recorder.h: -------------------------------------------------------------------------------- 1 | /* 2 | * @Coding: utf-8 3 | * @Author: vector-wlc 4 | * @Date: 2023-02-19 20:01:09 5 | * @Description: 6 | */ 7 | #ifndef __AUDIO_RECORDER_H__ 8 | #define __AUDIO_RECORDER_H__ 9 | 10 | #include "capturer/audio_capturer.h" 11 | #include "encoder/audio_mixer.h" 12 | #include "muxer/av_muxer.h" 13 | 14 | class AudioRecorder { 15 | public: 16 | struct Info { 17 | AudioMixer* mixer = nullptr; 18 | AvMuxer* muxer = nullptr; 19 | bool* isRecord = nullptr; 20 | int mixIndex; 21 | int* streamIndex = nullptr; 22 | }; 23 | 24 | bool Open(const std::vector& deviceTypes, 25 | Encoder::Param& param, 26 | const uint32_t sampleRate = AUDIO_SAMPLE_RATE, 27 | const uint32_t channels = AUDIO_CHANNEL, 28 | const uint32_t bitsPerSample = 32, 29 | const AVSampleFormat format = AUDIO_FMT); 30 | bool LoadMuxer(AvMuxer& muxer); 31 | bool StartRecord(); 32 | void StopRecord(); 33 | void Close(); 34 | auto GetCaptureInfo(int mixIndex) 35 | { 36 | return _mixer.GetInputInfo(mixIndex); 37 | } 38 | void SetVolumeScale(float scale, int mixIndex); 39 | 40 | private: 41 | AudioCapturer _micCapturer; 42 | AudioCapturer _speakerCapturer; 43 | AudioMixer _mixer; 44 | std::vector _infos; 45 | bool _isRecord = false; 46 | int _streamIndex; 47 | Encoder::Param _param; 48 | static void _Callback(void* data, size_t size, void* userInfo); 49 | AVSampleFormat _GetAVSampleFormat(int wBitsPerSample) 50 | { 51 | return wBitsPerSample == 16 ? AV_SAMPLE_FMT_S16 : AV_SAMPLE_FMT_S32; 52 | } 53 | }; 54 | 55 | #endif -------------------------------------------------------------------------------- /src/recorder/video_recorder.cpp: -------------------------------------------------------------------------------- 1 | /* 2 | * @Coding: utf-8 3 | * @Author: vector-wlc 4 | * @Date: 2023-02-20 10:59:34 5 | * @Description: 6 | */ 7 | #include "video_recorder.h" 8 | 9 | bool VideoRecorder::Open(HWND srcHwnd, Encoder::Param& param, VideoCapturer::Method type) 10 | { 11 | Close(); 12 | __CheckBool(_capturer.Open(srcHwnd, type)); 13 | __CheckBool(_Open(param)); 14 | return true; 15 | } 16 | 17 | bool VideoRecorder::Open(int monitorIdx, Encoder::Param& param, VideoCapturer::Method type) 18 | { 19 | Close(); 20 | __CheckBool(_capturer.Open(monitorIdx, type)); 21 | __CheckBool(_Open(param)); 22 | return true; 23 | } 24 | 25 | bool VideoRecorder::_Open(Encoder::Param& param) 26 | { 27 | __CheckBool(_encodeFrame = Frame::Alloc( 28 | AV_PIX_FMT_NV12, _capturer.GetWidth(), _capturer.GetHeight())); 29 | { 30 | std::lock_guard renderLk(_renderMtx); 31 | __CheckBool(_renderFrame = Frame::Alloc( 32 | AV_PIX_FMT_NV12, _capturer.GetWidth(), _capturer.GetHeight())); 33 | } 34 | 35 | // 开始捕获画面 36 | _captureTimer.Start(param.fps, [this] { 37 | auto srcFrame = _capturer.GetFrame(); 38 | if (srcFrame != nullptr) { 39 | std::lock_guard muxLk(__mtx); 40 | if (srcFrame->format != _encodeFrame->format) { 41 | std::lock_guard renderLk(_renderMtx); 42 | Free(_encodeFrame, [this] { av_frame_free(&_encodeFrame); }); 43 | __CheckNo(_encodeFrame = Frame::Alloc( 44 | AVPixelFormat(srcFrame->format), _capturer.GetWidth(), _capturer.GetHeight())); 45 | } 46 | av_frame_copy(_encodeFrame, srcFrame); 47 | } 48 | }); 49 | param.width = _capturer.GetWidth(); 50 | param.height = _capturer.GetHeight(); 51 | _param = param; 52 | return true; 53 | } 54 | 55 | AVFrame* VideoRecorder::GetRenderFrame() 56 | { 57 | std::lock_guard renderLk(_renderMtx); 58 | if (_encodeFrame == nullptr) { 59 | return nullptr; 60 | } 61 | if (_renderFrame->format != _encodeFrame->format) { 62 | Free(_renderFrame, [this] { av_frame_free(&_renderFrame); }); 63 | __CheckNullptr(_renderFrame = Frame::Alloc( 64 | AVPixelFormat(_encodeFrame->format), _capturer.GetWidth(), _capturer.GetHeight())); 65 | } 66 | av_frame_copy(_renderFrame, _encodeFrame); 67 | return _renderFrame; 68 | } 69 | 70 | bool VideoRecorder::LoadMuxer(AvMuxer& muxer) 71 | { 72 | _muxer = &muxer; 73 | __CheckBool((_streamIndex = muxer.AddVideoStream(_param)) != -1); 74 | return true; 75 | } 76 | 77 | bool VideoRecorder::StartRecord() 78 | { 79 | _totalPts = 0; 80 | _lossPts = 0; 81 | _muxTimer.Start(_param.fps, [this] { 82 | ++_totalPts; 83 | if (!_muxer->Write(_encodeFrame, _streamIndex)) { 84 | ++_lossPts; 85 | } 86 | }); 87 | _isRecord = true; 88 | return true; 89 | } 90 | void VideoRecorder::StopRecord() 91 | { 92 | _isRecord = false; 93 | _muxTimer.Stop(); 94 | } 95 | 96 | void VideoRecorder::Close() 97 | { 98 | StopRecord(); 99 | _captureTimer.Stop(); 100 | _capturer.Close(); 101 | Free(_encodeFrame, [this] { av_frame_free(&_encodeFrame); }); 102 | Free(_renderFrame, [this] { av_frame_free(&_renderFrame); }); 103 | } -------------------------------------------------------------------------------- /src/recorder/video_recorder.h: -------------------------------------------------------------------------------- 1 | /* 2 | * @Coding: utf-8 3 | * @Author: vector-wlc 4 | * @Date: 2023-02-20 10:33:43 5 | * @Description: 6 | */ 7 | #ifndef __VIDEO_RECORDER_H__ 8 | #define __VIDEO_RECORDER_H__ 9 | 10 | #include "basic/timer.h" 11 | #include "capturer/video_capturer.h" 12 | #include "muxer/av_muxer.h" 13 | #include 14 | #include 15 | 16 | class VideoRecorder { 17 | public: 18 | bool Open(HWND srcHwnd, Encoder::Param& param, VideoCapturer::Method type); 19 | bool Open(int monitorIdx, Encoder::Param& param, VideoCapturer::Method type); 20 | bool LoadMuxer(AvMuxer& muxer); 21 | bool StartRecord(); 22 | void StopRecord(); 23 | auto GetCapturerType() { return _capturer.GetMethod(); } 24 | AVFrame* GetRenderFrame(); 25 | // 停止录制 26 | void Close(); 27 | void SetIsDrawCursor(bool isDraw) 28 | { 29 | _capturer.SetDrawCursor(isDraw); 30 | } 31 | bool IsCaptureOverload() const { return _captureTimer.IsOverload(); } 32 | double GetLossRate() { return _lossPts == 0 ? 0 : (double)_lossPts / _totalPts; } 33 | 34 | private: 35 | bool _Open(Encoder::Param& param); 36 | VideoCapturer _capturer; 37 | AvMuxer* _muxer = nullptr; 38 | bool _isRecord = false; 39 | int _streamIndex = -1; 40 | AVFrame* _encodeFrame = nullptr; 41 | AVFrame* _renderFrame = nullptr; 42 | Encoder::Param _param; 43 | Timer _captureTimer; 44 | Timer _muxTimer; 45 | std::mutex _renderMtx; 46 | uint64_t _totalPts = 0; 47 | uint64_t _lossPts = 0; 48 | }; 49 | #endif -------------------------------------------------------------------------------- /src/ui/audio_render.cpp: -------------------------------------------------------------------------------- 1 | /* 2 | * @Coding: utf-8 3 | * @Author: vector-wlc 4 | * @Date: 2023-02-21 09:51:51 5 | * @Description: 6 | */ 7 | #include "audio_render.h" 8 | #include 9 | #include "basic/basic.h" 10 | 11 | AudioRender::AudioRender(QLabel* parent) 12 | : QLabel(parent) 13 | { 14 | } 15 | void AudioRender::ShowVolume(float volume) 16 | { 17 | float val = 0; 18 | if (volume < 0) { 19 | volume = -volume; 20 | } 21 | 22 | if (volume > 0.001) { 23 | val = (20 * log10(volume) + 60) / 60; 24 | } 25 | 26 | auto diff = val - _lastShowVal; 27 | if (diff < -0.015) { 28 | diff = -0.015; 29 | } 30 | _lastShowVal += diff; 31 | } 32 | 33 | void AudioRender::paintEvent(QPaintEvent* event) 34 | { 35 | int val = _lastShowVal * width(); 36 | QPainter painter(this); 37 | QPen pen(Qt::green, height()); 38 | painter.setPen(pen); 39 | painter.drawLine(0, 0, val, 0); 40 | pen.setColor(Qt::gray); 41 | painter.setPen(pen); 42 | painter.drawLine(val, 0, width(), 0); 43 | } -------------------------------------------------------------------------------- /src/ui/audio_render.h: -------------------------------------------------------------------------------- 1 | /* 2 | * @Coding: utf-8 3 | * @Author: vector-wlc 4 | * @Date: 2023-02-21 09:49:18 5 | * @Description: 6 | */ 7 | #ifndef __AUDIO_RENDER_H__ 8 | #define __AUDIO_RENDER_H__ 9 | 10 | // 这里直接使用 Qt 中的 QLabel 进行音量的渲染 11 | 12 | #include 13 | 14 | class AudioRender : public QLabel { 15 | public: 16 | AudioRender(QLabel* parent = nullptr); 17 | void ShowVolume(float volume); 18 | 19 | protected: 20 | virtual void paintEvent(QPaintEvent* event) override; 21 | float _lastShowVal = 0; 22 | }; 23 | #endif -------------------------------------------------------------------------------- /src/ui/audio_widget.cpp: -------------------------------------------------------------------------------- 1 | /* 2 | * @Coding: utf-8 3 | * @Author: vector-wlc 4 | * @Date: 2023-02-20 19:18:17 5 | * @Description: 6 | */ 7 | 8 | #include "audio_widget.h" 9 | #include 10 | 11 | AudioWidget::AudioWidget(QWidget* parent) 12 | : QWidget(parent) 13 | { 14 | _CreateUi(); 15 | _CreateConnect(); 16 | _mutebox->setChecked(true); 17 | } 18 | 19 | void AudioWidget::_CreateUi() 20 | { 21 | auto hLayout = new QHBoxLayout; 22 | _nameLabel = new QLabel; 23 | _mutebox = new QCheckBox("静音"); 24 | _render = new AudioRender; 25 | _volumeBox = new QDoubleSpinBox; 26 | _volumeBox->setMinimum(0); 27 | _volumeBox->setValue(1); 28 | hLayout->addWidget(_nameLabel); 29 | hLayout->addWidget(_mutebox); 30 | auto scaleLayout = new QHBoxLayout; 31 | scaleLayout->addWidget(new QLabel("调幅:")); 32 | scaleLayout->addWidget(_volumeBox); 33 | hLayout->addLayout(scaleLayout); 34 | auto vLayout = new QVBoxLayout; 35 | vLayout->addLayout(hLayout); 36 | vLayout->addWidget(_render); 37 | setLayout(vLayout); 38 | } 39 | 40 | void AudioWidget::_CreateConnect() 41 | { 42 | connect(_mutebox, &QCheckBox::stateChanged, [this](int) { 43 | if (_mutebox->isChecked()) { 44 | emit SetVolumeScale(0); 45 | _volumeBox->setEnabled(false); 46 | } else { 47 | _volumeBox->setEnabled(true); 48 | emit SetVolumeScale(_volumeBox->value()); 49 | } 50 | }); 51 | 52 | void (QDoubleSpinBox::*valueChanged)(double) = &(QDoubleSpinBox::valueChanged); 53 | 54 | connect(_volumeBox, valueChanged, [this] { 55 | emit SetVolumeScale(_volumeBox->value()); 56 | }); 57 | } -------------------------------------------------------------------------------- /src/ui/audio_widget.h: -------------------------------------------------------------------------------- 1 | /* 2 | * @Coding: utf-8 3 | * @Author: vector-wlc 4 | * @Date: 2023-02-20 19:18:35 5 | * @Description: 6 | */ 7 | #ifndef __AUDIO_WIDGET_H__ 8 | #define __AUDIO_WIDGET_H__ 9 | 10 | #include "audio_render.h" 11 | #include 12 | #include 13 | #include 14 | #include 15 | #include 16 | 17 | class AudioWidget : public QWidget { 18 | Q_OBJECT 19 | public: 20 | AudioWidget(QWidget* parent = nullptr); 21 | void ShowVolume(float volume) 22 | { 23 | _render->ShowVolume(volume); 24 | _render->update(); 25 | } 26 | void SetName(const std::string& name) 27 | { 28 | _nameLabel->setText(name.c_str()); 29 | } 30 | 31 | double GetVolume() 32 | { 33 | return _mutebox->isChecked() ? 0 : _volumeBox->value(); 34 | } 35 | 36 | private: 37 | void _CreateUi(); 38 | void _CreateConnect(); 39 | QLabel* _nameLabel = nullptr; 40 | AudioRender* _render = nullptr; 41 | QCheckBox* _mutebox = nullptr; 42 | QDoubleSpinBox* _volumeBox = nullptr; 43 | float _lastShowVal = 0; 44 | 45 | signals: 46 | void SetVolumeScale(float scale); 47 | }; 48 | 49 | #endif -------------------------------------------------------------------------------- /src/ui/av_recorder.cpp: -------------------------------------------------------------------------------- 1 | /* 2 | * @Coding: utf-8 3 | * @Author: vector-wlc 4 | * @Date: 2020-02-03 10:00:08 5 | * @Description: 6 | */ 7 | 8 | #include "av_recorder.h" 9 | #include 10 | #include 11 | #include 12 | 13 | AvRecorder::AvRecorder(QWidget* parent) 14 | : QMainWindow(parent) 15 | { 16 | _settingsParam.audioParam.bitRate = 160'000; 17 | _settingsParam.videoParam.bitRate = 8000'000; 18 | _settingsParam.videoParam.fps = 30; 19 | _settingsParam.videoParam.name = Encoder::GetUsableEncoders().front(); 20 | _settingsParam.outputDir = "."; 21 | _settingsParam.liveUrl = "rtmp://127.0.0.1:1935"; 22 | _settingsParam.liveName = "stream"; 23 | 24 | WgcCapturer::Init(); 25 | _InitUi(); 26 | _InitConnect(); 27 | } 28 | 29 | void AvRecorder::_InitConnect() 30 | { 31 | // 启动 32 | auto timer = new QTimer(this); 33 | connect(timer, &QTimer::timeout, [this, timer] { 34 | _isLocked = true; 35 | _StopPreview(); 36 | _StopCapture(); 37 | _StartCapture(VideoCapturer::WGC); 38 | _StartPreview(); 39 | _isLocked = false; 40 | timer->stop(); 41 | }); 42 | timer->start(100); 43 | 44 | connect(_recordBtn, &QPushButton::released, [this] { 45 | if (!_isRecord) { 46 | auto fileName = _settingsParam.outputDir; 47 | if (fileName.back() != '\\') { 48 | fileName.push_back('\\'); 49 | } 50 | auto format = "mp4"; 51 | fileName += QDateTime::currentDateTime().toString("yyyy-MM-dd-hh-mm-ss").toStdString() + "." + format; 52 | // fileName += std::string("test.") + format; 53 | __CheckNo(_StartStream(fileName, format)); 54 | _liveBtn->setEnabled(false); 55 | _recordBtn->setText("停止录制"); 56 | } else { 57 | _StopStream(); 58 | _liveBtn->setEnabled(true); 59 | _recordBtn->setText("开始录制"); 60 | } 61 | _isRecord = !_isRecord; 62 | }); 63 | connect(_liveBtn, &QPushButton::released, [this] { 64 | if (!_isLive) { 65 | auto fileName = _settingsParam.liveUrl + "/" + _settingsParam.liveName; 66 | bool isRtsp = _settingsParam.liveUrl.find("rtsp") != std::string::npos; 67 | __CheckNo(_StartStream(fileName, isRtsp ? "rtsp" : "flv")); 68 | _recordBtn->setEnabled(false); 69 | _liveBtn->setText("停止直播"); 70 | } else { 71 | _StopStream(); 72 | _recordBtn->setEnabled(true); 73 | _liveBtn->setText("开始直播"); 74 | } 75 | _isLive = !_isLive; 76 | }); 77 | connect(_microphoneWidget, &AudioWidget::SetVolumeScale, [this](float scale) { 78 | _audioRecorder.SetVolumeScale(scale, MICROPHONE_INDEX); 79 | }); 80 | connect(_speakerWidget, &AudioWidget::SetVolumeScale, [this](float scale) { 81 | _audioRecorder.SetVolumeScale(scale, SPEAKER_INDEX); 82 | }); 83 | connect(_updateListBtn, &QPushButton::released, [this] { 84 | _UpdateCaptureList(); 85 | }); 86 | connect(_captureListWidget, &QListWidget::currentTextChanged, [this](const QString& text) { 87 | if (text.isEmpty() || _isLocked) { 88 | return; 89 | } 90 | _isLocked = true; 91 | _StopPreview(); 92 | _StopCapture(); 93 | _StartCapture(VideoCapturer::WGC); 94 | _StartPreview(); 95 | _isLocked = false; 96 | }); 97 | connect(_isDrawCursorBox, &QCheckBox::stateChanged, [this] { 98 | _videoRecorder.SetIsDrawCursor(_isDrawCursorBox->isChecked()); 99 | }); 100 | connect(_captureMethodBox, &QComboBox::currentTextChanged, [this](const QString& text) { 101 | if (_isLocked || text.isEmpty()) { 102 | return; 103 | } 104 | _StopPreview(); 105 | _StopCapture(); 106 | if (text == "WGC") { 107 | _StartCapture(VideoCapturer::WGC); 108 | } else if (text == "DXGI") { 109 | _StartCapture(VideoCapturer::DXGI); 110 | } else { 111 | _StartCapture(VideoCapturer::GDI); 112 | } 113 | _StartPreview(); 114 | }); 115 | connect(_settingsBtn, &QPushButton::released, [this] { 116 | auto settingsPage = std::make_unique(&_settingsParam, this); 117 | settingsPage->exec(); 118 | _isLocked = true; 119 | _StopPreview(); 120 | _StopCapture(); 121 | _StartCapture(VideoCapturer::WGC); 122 | _StartPreview(); 123 | _isLocked = false; 124 | }); 125 | 126 | _otherTimer.callOnTimeout([this] { 127 | if (windowState() == Qt::WindowMinimized) { 128 | return; 129 | } 130 | // 音频 131 | auto info = _audioRecorder.GetCaptureInfo(MICROPHONE_INDEX); 132 | _microphoneWidget->ShowVolume(info == nullptr ? 0 : info->volume); 133 | info = _audioRecorder.GetCaptureInfo(SPEAKER_INDEX); 134 | _speakerWidget->ShowVolume(info == nullptr ? 0 : info->volume); 135 | // 状态栏 136 | if (_isRecord || _isLive) { 137 | int interval = _recordTime.secsTo(QTime::currentTime()); 138 | int sec = interval % 60; 139 | interval /= 60; 140 | int minute = interval % 60; 141 | int hour = interval / 60; 142 | _captureTimeLabel->setText( 143 | QString("%1:%2:%3").arg(hour, 2, 10, QChar('0')).arg(minute, 2, 10, QChar('0')).arg(sec, 2, 10, QChar('0'))); 144 | auto lossRate = _videoRecorder.GetLossRate(); 145 | int num = lossRate * 10000; 146 | _videolossRate->setText(QString("丢帧率: %1.%2%").arg(num / 100, 2, 10, QChar('0')).arg(num % 100, 2, 10, QChar('0'))); 147 | } else if (_captureTimeLabel->text() != "00:00:00") { 148 | _captureTimeLabel->setText("00:00:00"); 149 | } 150 | }); 151 | } 152 | 153 | AvRecorder::~AvRecorder() 154 | { 155 | _StopStream(); 156 | _StopPreview(); 157 | _StopCapture(); 158 | WgcCapturer::Uninit(); 159 | } 160 | 161 | void AvRecorder::_StartCapture(VideoCapturer::Method method) 162 | { 163 | if (_isLocked) { 164 | _captureMethodBox->clear(); 165 | _captureMethodBox->addItem("WGC"); 166 | } 167 | 168 | // 判断是要捕获屏幕还是窗口 169 | int idx = _captureListWidget->currentRow(); 170 | if (idx < 0) { 171 | idx = 0; 172 | _captureListWidget->setCurrentRow(idx); 173 | } 174 | 175 | int monitorCnt = MonitorFinder::GetList().size(); 176 | if (idx < monitorCnt) { // 捕获屏幕 177 | if (_captureMethodBox->count() < 2) { 178 | _captureMethodBox->addItem("DXGI"); 179 | } 180 | 181 | _videoRecorder.Open(idx, _settingsParam.videoParam, method); 182 | 183 | } else { 184 | if (_captureMethodBox->count() < 2) { 185 | _captureMethodBox->addItem("GDI"); 186 | } 187 | auto hwnd = WindowFinder::GetList()[idx - monitorCnt].hwnd; 188 | 189 | _videoRecorder.Open(hwnd, _settingsParam.videoParam, method); 190 | } 191 | _DealCapture(); 192 | _isDrawCursorBox->setEnabled(true); 193 | _recordBtn->setEnabled(true); 194 | _liveBtn->setEnabled(true); 195 | _videoRecorder.SetIsDrawCursor(_isDrawCursorBox->isChecked()); 196 | _audioRecorder.SetVolumeScale(_microphoneWidget->GetVolume(), MICROPHONE_INDEX); 197 | _audioRecorder.SetVolumeScale(_speakerWidget->GetVolume(), SPEAKER_INDEX); 198 | } 199 | 200 | void AvRecorder::_DealCapture() 201 | { 202 | __CheckNo(_audioRecorder.Open({AudioCapturer::Microphone, AudioCapturer::Speaker}, _settingsParam.audioParam)); 203 | _microphoneWidget->setEnabled(_audioRecorder.GetCaptureInfo(MICROPHONE_INDEX) != nullptr); 204 | _speakerWidget->setEnabled(_audioRecorder.GetCaptureInfo(SPEAKER_INDEX) != nullptr); 205 | _fpsLabel->setText(QString("FPS: %1").arg(_settingsParam.videoParam.fps)); 206 | _videoEncodeLabel->setText(("编码器: " + _settingsParam.videoParam.name).c_str()); 207 | } 208 | 209 | void AvRecorder::_StopCapture() 210 | { 211 | _videoRecorder.Close(); 212 | _audioRecorder.Close(); 213 | } 214 | 215 | void AvRecorder::_StartPreview() 216 | { 217 | __CheckNo(_videoRender.Open(_videoWidget->GetHwnd(), _settingsParam.videoParam.width, _settingsParam.videoParam.height)); 218 | _videoWidget->SetScaleFixSize(_settingsParam.videoParam.width, _settingsParam.videoParam.height); 219 | 220 | // 视频需要做到和帧率一样的渲染速度,QTimer 达不到要求 221 | // 需要自己封装一个计时器 222 | _videoRenderTimer.Start(_settingsParam.videoParam.fps, [this] { 223 | if (windowState() == Qt::WindowMinimized) { 224 | return; 225 | } 226 | // 视频 227 | auto frame = _videoRecorder.GetRenderFrame(); 228 | __CheckNo(_videoRender.Render(frame)); 229 | }); 230 | 231 | // 刷新率设置为 25 232 | _otherTimer.start(40); 233 | } 234 | 235 | void AvRecorder::_StopPreview() 236 | { 237 | _videoRenderTimer.Stop(); 238 | _videoRender.Close(); 239 | _otherTimer.stop(); 240 | } 241 | 242 | bool AvRecorder::_StartStream(std::string_view path, std::string_view format) 243 | { 244 | __CheckBool(_avMuxer.Open(path, format)); 245 | __CheckBool(_audioRecorder.LoadMuxer(_avMuxer)); 246 | __CheckBool(_videoRecorder.LoadMuxer(_avMuxer)); 247 | __CheckBool(_avMuxer.WriteHeader()); 248 | __CheckBool(_audioRecorder.StartRecord()); 249 | __CheckBool(_videoRecorder.StartRecord()); 250 | _recordTime = QTime::currentTime(); 251 | _captureStatusLabel->setText("状态: 正在工作"); 252 | _settingsBtn->setEnabled(false); 253 | _captureListWidget->setEnabled(false); 254 | _updateListBtn->setEnabled(false); 255 | _captureMethodBox->setEnabled(false); 256 | return true; 257 | } 258 | 259 | void AvRecorder::_StopStream() 260 | { 261 | _audioRecorder.StopRecord(); 262 | _videoRecorder.StopRecord(); 263 | _avMuxer.Close(); 264 | _captureStatusLabel->setText("状态: 正常"); 265 | _settingsBtn->setEnabled(true); 266 | _captureListWidget->setEnabled(true); 267 | _updateListBtn->setEnabled(true); 268 | _captureMethodBox->setEnabled(true); 269 | } 270 | 271 | void AvRecorder::_UpdateCaptureList() 272 | { 273 | _captureListWidget->clear(); 274 | auto&& monitorList = MonitorFinder::GetList(true); 275 | for (auto&& monitor : monitorList) { 276 | _captureListWidget->addItem("屏幕: " + QString::fromStdWString(monitor.title)); 277 | } 278 | auto&& windowList = WindowFinder::GetList(true); 279 | for (auto&& window : windowList) { 280 | _captureListWidget->addItem("窗口: " + QString::fromStdWString(window.title)); 281 | } 282 | } 283 | 284 | void AvRecorder::_InitUi() 285 | { 286 | setFont(QFont("Microsoft Yahei")); 287 | resize(800, 600); 288 | _videoWidget = new VideoWidget; 289 | auto hLayout = new QHBoxLayout; 290 | hLayout->addLayout(_InitAudioUi(), 2); 291 | hLayout->addLayout(_InitListUi(), 2); 292 | hLayout->addLayout(_InitOtherUi(), 1); 293 | _InitStatusBarUi(); 294 | auto widget = new QWidget; 295 | auto layout = new QVBoxLayout; 296 | layout->addWidget(_videoWidget, 4); 297 | layout->addLayout(hLayout, 1); 298 | widget->setLayout(layout); 299 | this->setCentralWidget(widget); 300 | _UpdateCaptureList(); 301 | } 302 | 303 | QVBoxLayout* AvRecorder::_InitListUi() 304 | { 305 | auto layout = new QVBoxLayout; 306 | _captureListWidget = new QListWidget; 307 | layout->addWidget(_captureListWidget); 308 | return layout; 309 | } 310 | 311 | QVBoxLayout* AvRecorder::_InitAudioUi() 312 | { 313 | _microphoneWidget = new AudioWidget; 314 | _speakerWidget = new AudioWidget; 315 | _microphoneWidget->SetName("麦克风"); 316 | _speakerWidget->SetName("扬声器"); 317 | auto layout = new QVBoxLayout; 318 | layout->addWidget(_microphoneWidget); 319 | layout->addWidget(_speakerWidget); 320 | return layout; 321 | } 322 | 323 | QVBoxLayout* AvRecorder::_InitOtherUi() 324 | { 325 | _isDrawCursorBox = new QCheckBox("绘制鼠标指针"); 326 | _isDrawCursorBox->setChecked(true); 327 | _isDrawCursorBox->setEnabled(false); 328 | _updateListBtn = new QPushButton("刷新窗口列表"); 329 | _recordBtn = new QPushButton("开始录制"); 330 | _recordBtn->setEnabled(false); 331 | _liveBtn = new QPushButton("开始直播"); 332 | _liveBtn->setEnabled(false); 333 | _settingsBtn = new QPushButton("设置"); 334 | auto layout = new QVBoxLayout; 335 | layout->addWidget(_isDrawCursorBox); 336 | layout->addWidget(_updateListBtn); 337 | layout->addWidget(_recordBtn); 338 | layout->addWidget(_liveBtn); 339 | layout->addWidget(_settingsBtn); 340 | return layout; 341 | } 342 | 343 | void AvRecorder::_InitStatusBarUi() 344 | { 345 | auto layout = new QHBoxLayout; 346 | _videoEncodeLabel = new QLabel; 347 | auto hLayout = new QHBoxLayout; 348 | hLayout->addWidget(new QLabel("捕获方式:")); 349 | _captureMethodBox = new QComboBox; 350 | hLayout->addWidget(_captureMethodBox); 351 | _captureStatusLabel = new QLabel("状态: 正常"); 352 | _captureTimeLabel = new QLabel("00:00:00"); 353 | _videolossRate = new QLabel("丢帧率: 00.00%"); 354 | _fpsLabel = new QLabel("FPS: 30"); 355 | auto statusBar = this->statusBar(); 356 | statusBar->layout()->setSpacing(20); 357 | statusBar->layout()->addWidget(_videoEncodeLabel); 358 | auto widget = new QWidget; 359 | widget->setLayout(hLayout); 360 | statusBar->layout()->addWidget(widget); 361 | statusBar->layout()->addWidget(_videolossRate); 362 | statusBar->layout()->addWidget(_captureStatusLabel); 363 | statusBar->layout()->addWidget(_captureTimeLabel); 364 | statusBar->layout()->addWidget(_fpsLabel); 365 | } -------------------------------------------------------------------------------- /src/ui/av_recorder.h: -------------------------------------------------------------------------------- 1 | /* 2 | * @Coding: utf-8 3 | * @Author: vector-wlc 4 | * @Date: 2023-02-03 10:00:08 5 | * @Description: 6 | */ 7 | #pragma once 8 | 9 | #include "audio_widget.h" 10 | #include "recorder/audio_recorder.h" 11 | #include "recorder/video_recorder.h" 12 | #include "ui/settings_page.h" 13 | #include "video_render.h" 14 | #include "video_widget.h" 15 | #include 16 | #include 17 | #include 18 | #include 19 | #include 20 | #include 21 | #include 22 | #include 23 | #include 24 | 25 | class AvRecorder : public QMainWindow { 26 | Q_OBJECT 27 | 28 | public: 29 | AvRecorder(QWidget* parent = nullptr); 30 | ~AvRecorder(); 31 | 32 | private: 33 | AudioRecorder _audioRecorder; 34 | VideoRecorder _videoRecorder; 35 | AvMuxer _avMuxer; 36 | VideoRender _videoRender; 37 | VideoWidget* _videoWidget = nullptr; 38 | AudioWidget* _microphoneWidget = nullptr; 39 | AudioWidget* _speakerWidget = nullptr; 40 | QPushButton* _recordBtn = nullptr; 41 | QPushButton* _liveBtn = nullptr; 42 | QPushButton* _settingsBtn = nullptr; 43 | QCheckBox* _isDrawCursorBox = nullptr; 44 | Timer _videoRenderTimer; 45 | QTimer _otherTimer; 46 | QListWidget* _captureListWidget = nullptr; 47 | QPushButton* _updateListBtn = nullptr; 48 | bool _isRecord = false; 49 | bool _isLive = false; 50 | void _InitUi(); 51 | QComboBox* _captureMethodBox = nullptr; 52 | QLabel* _captureStatusLabel = nullptr; 53 | QLabel* _captureTimeLabel = nullptr; 54 | QLabel* _fpsLabel = nullptr; 55 | QLabel* _videoEncodeLabel = nullptr; 56 | QLabel* _videolossRate = nullptr; 57 | SettingsPage::Param _settingsParam; 58 | QVBoxLayout* _InitListUi(); 59 | QVBoxLayout* _InitAudioUi(); 60 | QVBoxLayout* _InitOtherUi(); 61 | QTime _recordTime; 62 | bool _isLocked = false; 63 | void _InitStatusBarUi(); 64 | void _UpdateCaptureList(); 65 | void _StartCapture(VideoCapturer::Method method); 66 | void _StopCapture(); 67 | void _StartPreview(); 68 | void _DealCapture(); 69 | void _StopPreview(); 70 | bool _StartStream(std::string_view path, std::string_view format); 71 | void _StopStream(); 72 | void _InitConnect(); 73 | }; -------------------------------------------------------------------------------- /src/ui/settings_page.cpp: -------------------------------------------------------------------------------- 1 | /* 2 | * @Coding: utf-8 3 | * @Author: vector-wlc 4 | * @Date: 2023-02-21 14:29:50 5 | * @Description: 6 | */ 7 | 8 | #include "settings_page.h" 9 | #include "encoder/video_encoder.h" 10 | #include 11 | 12 | SettingsPage::SettingsPage(Param* param, QWidget* parent) 13 | : QDialog(parent) 14 | , _param(param) 15 | { 16 | setFont(QFont("Microsoft Yahei")); 17 | _InitUi(); 18 | _InitConnect(); 19 | } 20 | 21 | void SettingsPage::_InitConnect() 22 | { 23 | connect(_applyBtn, &QPushButton::released, [this] { 24 | _WriteSettings(); 25 | }); 26 | 27 | connect(_cancelBtn, &QPushButton::released, [this] { 28 | this->close(); 29 | }); 30 | 31 | connect(_yesBtn, &QPushButton::released, [this] { 32 | _WriteSettings(); 33 | this->close(); 34 | }); 35 | 36 | connect(_selDirBtn, &QPushButton::released, [this] { 37 | QString selectedDir = QFileDialog::getExistingDirectory(this, "选择输出目录", "./", QFileDialog::ShowDirsOnly); 38 | // 若目录路径不为空 39 | if (!selectedDir.isEmpty()) { 40 | // 显示选择的目录路径 41 | _fileDirEdit->setText(selectedDir); 42 | } 43 | }); 44 | } 45 | 46 | void SettingsPage::_WriteSettings() 47 | { 48 | _param->videoParam.bitRate = _videoBitRateBox->value() * 1000; 49 | _param->videoParam.fps = _videoFpsBox->value(); 50 | _param->videoParam.name = _videoEncoderBox->currentText().toStdString(); 51 | _param->audioParam.bitRate = _audioBitRateBox->value() * 1000; 52 | _param->outputDir = _fileDirEdit->text().toStdString(); 53 | _param->liveUrl = _liveUrlEdit->text().toStdString(); 54 | _param->liveName = _liveNameEdit->text().toStdString(); 55 | } 56 | 57 | void SettingsPage::_InitUi() 58 | { 59 | setWindowTitle("Settings"); 60 | auto layout = new QVBoxLayout; 61 | layout->addWidget(_InitVideoUi()); 62 | layout->addWidget(_InitAudioUi()); 63 | layout->addWidget(_InitOutputUi()); 64 | layout->addWidget(_InitLiveUi()); 65 | auto hLayout = new QHBoxLayout; 66 | _applyBtn = new QPushButton("应用"); 67 | _cancelBtn = new QPushButton("取消"); 68 | _yesBtn = new QPushButton("确定"); 69 | hLayout->setAlignment(Qt::AlignRight); 70 | hLayout->addWidget(_applyBtn); 71 | hLayout->addWidget(_cancelBtn); 72 | hLayout->addWidget(_yesBtn); 73 | layout->addLayout(hLayout); 74 | setLayout(layout); 75 | } 76 | 77 | QGroupBox* SettingsPage::_InitVideoUi() 78 | { 79 | auto groupBox = new QGroupBox("视频"); 80 | auto layout = new QVBoxLayout; 81 | _videoBitRateBox = new QSpinBox; 82 | _videoBitRateBox->setMinimum(0); 83 | _videoBitRateBox->setMaximum(INT_MAX); 84 | _videoBitRateBox->setValue(_param->videoParam.bitRate / 1000); 85 | _videoFpsBox = new QSpinBox; 86 | _videoFpsBox->setMinimum(0); 87 | _videoFpsBox->setMaximum(60); 88 | _videoFpsBox->setValue(_param->videoParam.fps); 89 | _videoEncoderBox = new QComboBox; 90 | auto&& encoders = Encoder::GetUsableEncoders(); 91 | for (auto&& encoder : encoders) { 92 | _videoEncoderBox->addItem(encoder.c_str()); 93 | } 94 | _videoEncoderBox->setCurrentText(_param->videoParam.name.c_str()); 95 | layout->addLayout(_CreateDescription("比特率(kB):", "越高的比特率越清晰, 但越占用硬件资源", _videoBitRateBox)); 96 | layout->addLayout(_CreateDescription("帧率:", "越高的帧率越流畅, 但越占用硬件资源", _videoFpsBox)); 97 | layout->addLayout(_CreateDescription("编码器:", "libx264 为软件编码, CPU占用高但兼容性强, 其他为硬件编码, 效果与软件编码相反", _videoEncoderBox)); 98 | groupBox->setLayout(layout); 99 | return groupBox; 100 | } 101 | QGroupBox* SettingsPage::_InitAudioUi() 102 | { 103 | auto groupBox = new QGroupBox("音频"); 104 | auto layout = new QVBoxLayout; 105 | _audioBitRateBox = new QSpinBox; 106 | _audioBitRateBox->setMinimum(0); 107 | _audioBitRateBox->setMaximum(INT_MAX); 108 | _audioBitRateBox->setValue(_param->audioParam.bitRate / 1000); 109 | layout->addLayout(_CreateDescription("比特率(kB):", "越高的比特率越清晰, 但越占用硬件资源", _audioBitRateBox)); 110 | groupBox->setLayout(layout); 111 | return groupBox; 112 | } 113 | 114 | QGroupBox* SettingsPage::_InitOutputUi() 115 | { 116 | auto groupBox = new QGroupBox("输出"); 117 | auto layout = new QHBoxLayout; 118 | _fileDirEdit = new QLineEdit(_param->outputDir.c_str()); 119 | _selDirBtn = new QPushButton("选择"); 120 | layout->addWidget(_fileDirEdit); 121 | layout->addWidget(_selDirBtn); 122 | groupBox->setLayout(layout); 123 | return groupBox; 124 | } 125 | 126 | QGroupBox* SettingsPage::_InitLiveUi() 127 | { 128 | auto groupBox = new QGroupBox("直播"); 129 | auto layout = new QVBoxLayout; 130 | _liveUrlEdit = new QLineEdit(_param->liveUrl.c_str()); 131 | _liveNameEdit = new QLineEdit(_param->liveName.c_str()); 132 | auto liveUrlLayout = new QHBoxLayout(); 133 | liveUrlLayout->addWidget(new QLabel("地址:")); 134 | liveUrlLayout->addWidget(_liveUrlEdit); 135 | auto liveNameLayout = new QHBoxLayout(); 136 | liveNameLayout->addWidget(new QLabel("名称(密钥):")); 137 | liveNameLayout->addWidget(_liveNameEdit); 138 | layout->addLayout(liveUrlLayout); 139 | layout->addLayout(liveNameLayout); 140 | groupBox->setLayout(layout); 141 | return groupBox; 142 | } 143 | QHBoxLayout* SettingsPage::_CreateDescription(std::string_view text, std::string_view textEx, QWidget* widget) 144 | { 145 | auto layout = new QHBoxLayout; 146 | auto label = new QLabel(text.data()); 147 | label->setToolTip(textEx.data()); 148 | layout->addWidget(label); 149 | layout->addWidget(widget); 150 | return layout; 151 | } -------------------------------------------------------------------------------- /src/ui/settings_page.h: -------------------------------------------------------------------------------- 1 | #ifndef __SETTINGS_H__ 2 | #define __SETTINGS_H__ 3 | 4 | #include "encoder/audio_encoder.h" 5 | #include "encoder/video_encoder.h" 6 | #include 7 | #include 8 | #include 9 | #include 10 | #include 11 | #include 12 | #include 13 | #include 14 | 15 | class SettingsPage : public QDialog { 16 | public: 17 | struct Param { 18 | Encoder::Param audioParam; 19 | Encoder::Param videoParam; 20 | std::string outputDir; 21 | std::string liveUrl; 22 | std::string liveName; 23 | }; 24 | SettingsPage(Param* param, QWidget* parent = nullptr); 25 | 26 | private: 27 | void _InitUi(); 28 | void _InitConnect(); 29 | void _WriteSettings(); 30 | QGroupBox* _InitVideoUi(); 31 | QGroupBox* _InitAudioUi(); 32 | QGroupBox* _InitOutputUi(); 33 | QGroupBox* _InitLiveUi(); 34 | Param* _param = nullptr; 35 | QSpinBox* _videoBitRateBox = nullptr; 36 | QSpinBox* _videoFpsBox = nullptr; 37 | QComboBox* _videoEncoderBox = nullptr; 38 | QSpinBox* _audioBitRateBox = nullptr; 39 | QLineEdit* _fileDirEdit = nullptr; 40 | QLineEdit* _liveUrlEdit = nullptr; 41 | QLineEdit* _liveNameEdit = nullptr; 42 | QPushButton* _selDirBtn = nullptr; 43 | QPushButton* _applyBtn = nullptr; 44 | QPushButton* _cancelBtn = nullptr; 45 | QPushButton* _yesBtn = nullptr; 46 | 47 | QHBoxLayout* _CreateDescription(std::string_view text, std::string_view textEx, QWidget* widget); 48 | }; 49 | 50 | #endif -------------------------------------------------------------------------------- /src/ui/video_render.cpp: -------------------------------------------------------------------------------- 1 | /* 2 | * @Coding: utf-8 3 | * @Author: vector-wlc 4 | * @Date: 2023-02-07 22:34:39 5 | * @Description: 6 | */ 7 | #include "video_render.h" 8 | #include 9 | 10 | template 11 | using MWComPtr = Microsoft::WRL::ComPtr; 12 | 13 | VideoRender::VideoRender() 14 | { 15 | } 16 | VideoRender::~VideoRender() 17 | { 18 | Close(); 19 | } 20 | 21 | bool VideoRender::Open(HWND hwnd, unsigned int width, unsigned int height) 22 | { 23 | Close(); 24 | DXGI_SWAP_CHAIN_DESC sd; 25 | ZeroMemory(&sd, sizeof(sd)); 26 | sd.BufferCount = 1; 27 | sd.BufferDesc.Width = 0; 28 | sd.BufferDesc.Height = 0; 29 | sd.BufferDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM; 30 | sd.BufferDesc.RefreshRate.Numerator = 0; 31 | sd.BufferDesc.RefreshRate.Denominator = 0; 32 | sd.BufferDesc.Scaling = DXGI_MODE_SCALING_UNSPECIFIED; 33 | sd.BufferDesc.ScanlineOrdering = DXGI_MODE_SCANLINE_ORDER_UNSPECIFIED; 34 | sd.BufferUsage = DXGI_USAGE_RENDER_TARGET_OUTPUT; 35 | sd.OutputWindow = hwnd; 36 | sd.SampleDesc.Count = 1; 37 | sd.SampleDesc.Quality = 0; 38 | sd.Windowed = TRUE; 39 | sd.Flags = 0; 40 | sd.BufferCount = 1; 41 | sd.SwapEffect = DXGI_SWAP_EFFECT_DISCARD; 42 | __CheckBool(SUCCEEDED( 43 | D3D11CreateDeviceAndSwapChain( 44 | nullptr, D3D_DRIVER_TYPE_HARDWARE, nullptr, 0, nullptr, 0, 45 | D3D11_SDK_VERSION, &sd, &_swapChain, &_device, nullptr, &_context))); 46 | _width = width; 47 | _height = height; 48 | __CheckBool(SUCCEEDED(_swapChain->ResizeBuffers(0, width, height, DXGI_FORMAT_R8G8B8A8_UNORM, 0))); 49 | 50 | MWComPtr pBackBuffer; 51 | __CheckBool(SUCCEEDED(_swapChain->GetBuffer(0, __uuidof(ID3D11Texture2D), &pBackBuffer))); 52 | 53 | // Create a render-target view 54 | MWComPtr renderTargetView; 55 | __CheckBool(SUCCEEDED(_device->CreateRenderTargetView(pBackBuffer.Get(), nullptr, &renderTargetView))); 56 | 57 | _context->OMSetRenderTargets(1, &renderTargetView, nullptr); 58 | 59 | return true; 60 | } 61 | 62 | void VideoRender::Close() 63 | { 64 | Free(_swapChain, [this] { _swapChain->Release(); }); 65 | Free(_device, [this] { _device->Release(); }); 66 | Free(_context, [this] { _context->Release(); }); 67 | _swConverter = nullptr; 68 | _hwConverter = nullptr; 69 | } 70 | 71 | bool VideoRender::_Convert(AVFrame* frame, ID3D11Texture2D* texture) 72 | { 73 | if (_lastFmt != frame->format) { 74 | _swConverter = nullptr; 75 | _hwConverter = nullptr; 76 | _lastFmt = frame->format; 77 | } 78 | if (_HardwareConvert(frame, texture)) { 79 | return true; 80 | } 81 | __CheckBool(_SoftwareConvert(frame, texture)); 82 | return true; 83 | } 84 | 85 | bool VideoRender::_SoftwareConvert(AVFrame* frame, ID3D11Texture2D* texture) 86 | { 87 | if (_swConverter == nullptr) { 88 | auto fmt = AVPixelFormat(frame->format); 89 | _swConverter = std::make_unique(fmt, AV_PIX_FMT_RGBA); 90 | _swConverter->SetSize(_width, _height); 91 | } 92 | __CheckBool(_bufferFrame = _swConverter->Trans(frame)); 93 | _context->UpdateSubresource(texture, 0, nullptr, _bufferFrame->data[0], _bufferFrame->linesize[0], 0); 94 | return true; 95 | } 96 | 97 | bool VideoRender::_HardwareConvert(AVFrame* frame, ID3D11Texture2D* texture) 98 | { 99 | D3D11_TEXTURE2D_DESC desc; 100 | texture->GetDesc(&desc); 101 | switch (frame->format) { 102 | case AV_PIX_FMT_BGR0: 103 | desc.Format = DXGI_FORMAT_B8G8R8X8_UNORM; 104 | break; 105 | case AV_PIX_FMT_NV12: 106 | desc.Format = DXGI_FORMAT_NV12; 107 | break; 108 | default: 109 | return false; 110 | } 111 | MWComPtr tmpTexture = nullptr; 112 | __CheckBool(SUCCEEDED(_device->CreateTexture2D(&desc, nullptr, &tmpTexture))); 113 | 114 | desc.CPUAccessFlags = D3D11_CPU_ACCESS_WRITE; 115 | desc.Usage = D3D11_USAGE_STAGING; 116 | desc.ArraySize = 1; 117 | desc.BindFlags = 0; 118 | desc.MiscFlags = 0; 119 | desc.SampleDesc.Count = 1; 120 | desc.SampleDesc.Quality = 0; 121 | desc.MipLevels = 1; 122 | MWComPtr cpuTexture = nullptr; 123 | __CheckBool(SUCCEEDED(_device->CreateTexture2D(&desc, nullptr, &cpuTexture))); 124 | D3D11_MAPPED_SUBRESOURCE resource; 125 | __CheckBool(SUCCEEDED(_context->Map(cpuTexture.Get(), 0, D3D11_MAP_WRITE, 0, &resource))); 126 | if (desc.Format == DXGI_FORMAT_NV12) { 127 | int srcRow = frame->linesize[0]; 128 | int dstRow = resource.RowPitch; 129 | for (int row = 0; row < frame->height; ++row) { // Y 130 | memcpy((uint8_t*)resource.pData + row * dstRow, 131 | frame->data[0] + srcRow * row, frame->width); 132 | } 133 | for (int row = 0; row < frame->height / 2; ++row) { // UV 134 | memcpy((uint8_t*)resource.pData + (row + frame->height) * dstRow, 135 | frame->data[1] + srcRow * row, frame->width); 136 | } 137 | } else { 138 | int srcRow = frame->linesize[0]; 139 | int dstRow = resource.RowPitch; 140 | for (int row = 0; row < frame->height; ++row) { 141 | memcpy((uint8_t*)resource.pData + row * dstRow, 142 | frame->data[0] + srcRow * row, frame->width * 4); 143 | } 144 | } 145 | _context->Unmap(cpuTexture.Get(), 0); 146 | _context->CopyResource(tmpTexture.Get(), cpuTexture.Get()); 147 | 148 | if (_hwConverter == nullptr) { 149 | _hwConverter = std::make_unique(); 150 | D3D11_VIDEO_PROCESSOR_COLOR_SPACE inputColorSpace; 151 | inputColorSpace.Usage = 1; 152 | inputColorSpace.RGB_Range = 0; 153 | inputColorSpace.YCbCr_Matrix = 1; 154 | inputColorSpace.YCbCr_xvYCC = 0; 155 | inputColorSpace.Nominal_Range = D3D11_VIDEO_PROCESSOR_NOMINAL_RANGE_0_255; 156 | 157 | D3D11_VIDEO_PROCESSOR_COLOR_SPACE outputColorSpace; 158 | outputColorSpace.Usage = 0; 159 | outputColorSpace.RGB_Range = 0; 160 | outputColorSpace.YCbCr_Matrix = 1; 161 | outputColorSpace.YCbCr_xvYCC = 0; 162 | outputColorSpace.Nominal_Range = D3D11_VIDEO_PROCESSOR_NOMINAL_RANGE_16_235; 163 | if (FAILED(_hwConverter->Open(_device, _context, outputColorSpace, inputColorSpace))) { 164 | return false; 165 | } 166 | } 167 | if (FAILED(_hwConverter->Convert(tmpTexture.Get(), texture))) { 168 | return false; 169 | } 170 | return true; 171 | } 172 | 173 | bool VideoRender::Render(AVFrame* frame) 174 | { 175 | if (frame == nullptr) { 176 | __CheckBool(SUCCEEDED(_swapChain->Present(0, 0))); 177 | return true; 178 | } 179 | __CheckBool(_device != nullptr && _swapChain != nullptr && _context != nullptr); 180 | MWComPtr pBackBuffer; 181 | __CheckBool(SUCCEEDED(_swapChain->GetBuffer(0, __uuidof(ID3D11Texture2D), &pBackBuffer))); 182 | _Convert(frame, pBackBuffer.Get()); 183 | __CheckBool(SUCCEEDED(_swapChain->Present(0, 0))); 184 | pBackBuffer->Release(); 185 | return true; 186 | } -------------------------------------------------------------------------------- /src/ui/video_render.h: -------------------------------------------------------------------------------- 1 | /* 2 | * @Coding: utf-8 3 | * @Author: vector-wlc 4 | * @Date: 2023-02-02 21:33:51 5 | * @Description: 6 | */ 7 | 8 | #ifndef __VIDEO_RENDER_H__ 9 | #define __VIDEO_RENDER_H__ 10 | #include 11 | #include 12 | #include 13 | #include 14 | #include 15 | 16 | #include "basic/frame.h" 17 | #include "d3d/convert.h" 18 | 19 | class VideoRender { 20 | public: 21 | VideoRender(); 22 | ~VideoRender(); 23 | 24 | public: 25 | bool Open(HWND hwnd, unsigned int width, unsigned int height); 26 | void Close(); 27 | bool Render(AVFrame* frame); 28 | 29 | private: 30 | bool _Convert(AVFrame* frame, ID3D11Texture2D* texture); // 将图片的格式转为 D3D 能渲染的格式 31 | bool _HardwareConvert(AVFrame* frame, ID3D11Texture2D* texture); 32 | bool _SoftwareConvert(AVFrame* frame, ID3D11Texture2D* texture); 33 | IDXGISwapChain* _swapChain = nullptr; 34 | ID3D11Device* _device = nullptr; 35 | ID3D11DeviceContext* _context = nullptr; 36 | std::unique_ptr _swConverter = nullptr; 37 | std::unique_ptr _hwConverter = nullptr; 38 | int _width = 0; 39 | int _height = 0; 40 | int _lastFmt; 41 | AVFrame* _bufferFrame = nullptr; 42 | }; 43 | 44 | #endif -------------------------------------------------------------------------------- /src/ui/video_widget.cpp: -------------------------------------------------------------------------------- 1 | /* 2 | * @Coding: utf-8 3 | * @Author: vector-wlc 4 | * @Date: 2023-02-03 18:01:32 5 | * @Description: 6 | */ 7 | #include "video_widget.h" 8 | #include 9 | #include 10 | 11 | VideoWidget::VideoWidget(QWidget* parent) 12 | : QWidget(parent) 13 | { 14 | _viewWidget = new QWidget(this); 15 | _viewWidget->setUpdatesEnabled(false); 16 | _ratioWidth = width(); 17 | _ratioHeight = height(); 18 | _ResizeWithScaled(); 19 | } 20 | 21 | HWND VideoWidget::GetHwnd() const 22 | { 23 | return (HWND)_viewWidget->winId(); 24 | } 25 | 26 | void VideoWidget::resizeEvent(QResizeEvent* event) 27 | { 28 | _ResizeWithScaled(); 29 | } 30 | 31 | void VideoWidget::SetScaleFixSize(int width, int height) 32 | { 33 | _ratioWidth = width; 34 | _ratioHeight = height; 35 | _ResizeWithScaled(); 36 | } 37 | 38 | void VideoWidget::_ResizeWithScaled() 39 | { 40 | int viewWidth = this->width(); 41 | int viewHeight = this->height(); 42 | if (_ratioHeight <= 0 || _ratioWidth <= 0 43 | || viewWidth <= 0 || viewHeight <= 0) { 44 | _viewWidget->resize(1, 1); 45 | return; 46 | } 47 | if (viewWidth < float(_ratioWidth) / _ratioHeight * viewHeight) { 48 | // 宽度不足,则缩小高度 49 | viewHeight = _ratioHeight * viewWidth / _ratioWidth; 50 | } else { 51 | // 否则缩小宽度 52 | viewWidth = _ratioWidth * viewHeight / _ratioHeight; 53 | } 54 | 55 | // 窗口居中 56 | // 获取父窗口的中心位置 57 | // 然后将子窗口的中心位置设置为父窗口的中心位置 58 | int centralX = this->width() / 2; 59 | int centralY = this->height() / 2; 60 | int viewX = centralX - viewWidth / 2; 61 | int viewY = centralY - viewHeight / 2; 62 | _viewWidget->resize(viewWidth, viewHeight); 63 | _viewWidget->move(viewX, viewY); 64 | } -------------------------------------------------------------------------------- /src/ui/video_widget.h: -------------------------------------------------------------------------------- 1 | /* 2 | * @Coding: utf-8 3 | * @Author: vector-wlc 4 | * @Date: 2023-02-03 15:51:31 5 | * @Description: 6 | */ 7 | #ifndef __VIDEO_LABEL_H__ 8 | #define __VIDEO_LABEL_H__ 9 | #include 10 | 11 | class QPaintEvent; 12 | 13 | class VideoWidget : public QWidget { 14 | Q_OBJECT 15 | public: 16 | VideoWidget(QWidget* parent = nullptr); 17 | HWND GetHwnd() const; 18 | void SetScaleFixSize(int width, int height); 19 | 20 | protected: 21 | virtual void resizeEvent(QResizeEvent* event) override; 22 | void _ResizeWithScaled(); 23 | int _ratioWidth = 0; 24 | int _ratioHeight = 0; 25 | QWidget* _viewWidget = nullptr; 26 | }; 27 | 28 | #endif --------------------------------------------------------------------------------