├── .clang-format ├── .github └── workflows │ └── msbuild.yml ├── .gitignore ├── .gitmodules ├── CMakeLists.txt ├── LICENSE ├── README.md ├── gs.key ├── img ├── delay.png ├── img.png └── img1.png ├── qml.qrc ├── qml ├── RecordTimer.qml ├── TipsBox.qml └── main.qml └── src ├── QmlNativeAPI.h ├── main.cpp ├── player ├── GifEncoder.cpp ├── GifEncoder.h ├── JpegEncoder.cpp ├── JpegEncoder.h ├── Mp4Encoder.cpp ├── Mp4Encoder.h ├── QQuickRealTimePlayer.cpp ├── QQuickRealTimePlayer.h ├── RealTimeRenderer.cpp ├── RealTimeRenderer.h ├── ffmpegDecode.cpp ├── ffmpegDecode.h └── ffmpegInclude.h ├── util ├── base64.cpp ├── base64.h ├── mini.cpp ├── mini.h ├── util.cpp └── util.h └── wifi ├── Rtp.h ├── RxFrame.h ├── WFBDefine.h ├── WFBProcessor.cpp ├── WFBProcessor.h ├── WFBReceiver.cpp ├── WFBReceiver.h ├── fec.c └── fec.h /.clang-format: -------------------------------------------------------------------------------- 1 | # This is for clang-format >= 9.0. 2 | # 3 | # clang-format --version 4 | # clang-format version 9.0.1 (Red Hat 9.0.1-2.module+el8.2.0+5494+7b8075cf) 5 | # 6 | # 详细说明见: https://clang.llvm.org/docs/ClangFormatStyleOptions.html 7 | # 部分参数会随版本变化. 8 | --- 9 | Language: Cpp 10 | # 基于 WebKit 的风格, https://www.webkit.org/coding/coding-style.html 11 | BasedOnStyle: WebKit 12 | 13 | # 以下各选项按字母排序 14 | 15 | # public/protected/private 不缩进 16 | AccessModifierOffset: -4 17 | # 参数过长时统一换行 18 | AlignAfterOpenBracket: AlwaysBreak 19 | # clang-format >= 13 required, map 之类的内部列对齐 20 | # AlignArrayOfStructures: Left 21 | # 换行符统一在 ColumnLimit 最右侧 22 | AlignEscapedNewlines: Right 23 | # 不允许短代码块单行, 即不允许单行代码: if (x) return; 24 | AllowShortBlocksOnASingleLine: false 25 | # 只允许 Inline 函数单行 26 | AllowShortFunctionsOnASingleLine: Inline 27 | # 模板声明换行 28 | AlwaysBreakTemplateDeclarations: Yes 29 | # 左开括号不换行 30 | BreakBeforeBraces: Custom 31 | BraceWrapping: 32 | AfterCaseLabel: false 33 | AfterClass: false 34 | # BraceWrappingAfterControlStatementStyle: MultiLine 35 | AfterEnum: false 36 | AfterFunction: false 37 | AfterNamespace: false 38 | AfterStruct: false 39 | AfterUnion: false 40 | AfterExternBlock: false 41 | BeforeCatch: false 42 | BeforeElse: false 43 | BeforeLambdaBody: false 44 | BeforeWhile: false 45 | IndentBraces: false 46 | SplitEmptyFunction: false 47 | SplitEmptyRecord: false 48 | SplitEmptyNamespace: false 49 | # 构造函数初始化时在 `,` 前换行, 和 `:` 对齐显得整齐 50 | BreakConstructorInitializers: BeforeComma 51 | # 继承过长需要换行时也在 `,` 前 52 | BreakInheritanceList: BeforeComma 53 | # 列宽 120 54 | ColumnLimit: 120 55 | # c++11 括号内起始/结束无空格, false 会加上 56 | Cpp11BracedListStyle: false 57 | # 命名空间后的注释会修正为: // namespace_name 58 | FixNamespaceComments: true 59 | # clang-format >= 13 required, lambda 函数内部缩进级别和外部一致, 默认会增加一级缩进 60 | # LambdaBodyIndentation: OuterScope 61 | # 命名空间不缩进 62 | NamespaceIndentation: None 63 | # PPIndentWidth: 2 64 | # */& 靠近变量, 向右靠 65 | PointerAlignment: Right 66 | # c++11 使用 {} 构造时和变量加个空格 67 | SpaceBeforeCpp11BracedList: true 68 | # 继承时 `:` 前加空格 69 | SpaceBeforeInheritanceColon: true 70 | # () 前不加空格, do/for/if/switch/while 除外 71 | SpaceBeforeParens: ControlStatements 72 | # 空 {} 中不加空格 73 | SpaceInEmptyBlock: false 74 | # Tab 占 4 位 75 | TabWidth: 4 76 | # 不使用 TAB 77 | UseTab: Never 78 | --- 79 | Language: Java 80 | --- 81 | Language: JavaScript 82 | ... 83 | -------------------------------------------------------------------------------- /.github/workflows/msbuild.yml: -------------------------------------------------------------------------------- 1 | name: MSBuild 2 | 3 | on: 4 | push: 5 | branches: [ "main" ] 6 | pull_request: 7 | branches: [ "main" ] 8 | 9 | env: 10 | SOLUTION_FILE_PATH: . 11 | BUILD_CONFIGURATION: Release 12 | 13 | permissions: 14 | contents: read 15 | 16 | jobs: 17 | build: 18 | runs-on: windows-latest 19 | 20 | steps: 21 | - uses: actions/checkout@v4 22 | 23 | - name: Add MSBuild to PATH 24 | uses: ilammy/msvc-dev-cmd@v1.4.1 25 | 26 | - name: Set up cache for vcpkg/installed 27 | id: cache-vcpkg 28 | uses: actions/cache@v3 29 | with: 30 | path: vcpkg/installed 31 | key: ${{ runner.os }}-vcpkg-${{ hashFiles('vcpkg/installed/**/*') }} 32 | restore-keys: | 33 | ${{ runner.os }}-vcpkg- 34 | 35 | - name: Build 36 | working-directory: ${{env.GITHUB_WORKSPACE}} 37 | run: | 38 | git submodule update --init 39 | if (-Not (Test-Path -Path vcpkg )) { New-Item -ItemType Directory -Path vcpkg } 40 | cd vcpkg 41 | git init 42 | git remote add origin https://github.com/microsoft/vcpkg.git 43 | git fetch origin 44 | git checkout -b master --track origin/master 45 | git checkout b27651341123a59f7187b42ef2bc476284afb310 46 | .\bootstrap-vcpkg.bat 47 | .\vcpkg integrate install 48 | .\vcpkg install libusb libpcap libsodium ffmpeg qt5 sdl2 vcpkg-tool-ninja 49 | cd .. 50 | cmake "-DCMAKE_TOOLCHAIN_FILE=D:/a/fpv4win/fpv4win/vcpkg/scripts/buildsystems/vcpkg.cmake" -S ./ -B "build" 51 | cmake --build build --config Release --target fpv4win 52 | cp gs.key build/Release/ 53 | Invoke-WebRequest -Uri https://github.com/pbatard/libwdi/releases/download/v1.5.0/zadig-2.8.exe -OutFile build/Release/zadig-2.8.exe 54 | Invoke-WebRequest -Uri https://aka.ms/vs/17/release/vc_redist.x64.exe -OutFile build/Release/vcredist_x64.exe 55 | 56 | - name: Upload Artifact 57 | id: upload-artifact 58 | uses: actions/upload-artifact@v4 59 | with: 60 | name: fpv4win 61 | path: build/Release/ 62 | 63 | release: 64 | if: false 65 | needs: build 66 | runs-on: windows-latest 67 | steps: 68 | - name: Download Artifact 69 | uses: actions/download-artifact@v4 70 | with: 71 | name: fpv4win 72 | path: build/Release/ 73 | 74 | - name: Create Release 75 | id: create_release 76 | uses: actions/create-release@v1 77 | env: 78 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 79 | with: 80 | tag_name: ${{ github.ref }} 81 | release_name: Release ${{ github.ref }} 82 | body: | 83 | Release for version ${{ github.ref }} 84 | draft: false 85 | prerelease: false 86 | 87 | - name: Upload Release Asset 88 | id: upload-release-asset 89 | uses: actions/upload-release-asset@v1 90 | env: 91 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 92 | with: 93 | upload_url: ${{ steps.create_release.outputs.upload_url }} 94 | asset_path: ./build/Release/* 95 | asset_name: fpv4win-${{ github.ref }}.zip 96 | asset_content_type: application/zip -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Compiled Object files 2 | **/.DS_Store 3 | *.slo 4 | *.lo 5 | *.o 6 | *.obj 7 | 8 | # Precompiled Headers 9 | *.gch 10 | *.pch 11 | 12 | # Compiled Dynamic libraries 13 | *.so 14 | *.dylib 15 | *.dll 16 | 17 | # Fortran module files 18 | *.mod 19 | *.smod 20 | 21 | # Compiled Static libraries 22 | *.lai 23 | *.la 24 | *.a 25 | *.lib 26 | 27 | # Executables 28 | *.exe 29 | *.out 30 | *.app 31 | 32 | **/cmake-build-debug 33 | **/CMakeCache.txt 34 | **/cmake_install.cmake 35 | **/install_manifest.txt 36 | **/CMakeFiles/ 37 | **/CTestTestfile.cmake 38 | **/Makefile 39 | **/*.cbp 40 | **/CMakeScripts 41 | **/compile_commands.json 42 | 43 | include/divisible/* 44 | 45 | 46 | ## Local 47 | 48 | .idea/*.xml 49 | 50 | build/**/* 51 | 52 | include/* 53 | lib/* 54 | bin/* 55 | test/test_runner -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "3rd/devourer"] 2 | path = 3rd/devourer 3 | url = https://github.com/OpenIPC/devourer.git 4 | [submodule "3rd/rtl8812au-monitor-pcap"] 5 | path = 3rd/rtl8812au-monitor-pcap 6 | url = https://github.com/TalusL/rtl8812au-monitor-pcap.git 7 | -------------------------------------------------------------------------------- /CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 3.20) 2 | project(fpv4win) 3 | 4 | set(CMAKE_CXX_STANDARD 20) 5 | set(CMAKE_AUTOMOC ON) 6 | set(CMAKE_AUTORCC ON) 7 | set(CMAKE_AUTOUIC ON) 8 | 9 | set(CMAKE_INCLUDE_CURRENT_DIR ON) 10 | 11 | find_package(PkgConfig REQUIRED) 12 | 13 | find_package(FFmpeg REQUIRED) 14 | find_package(SDL2 REQUIRED) 15 | find_package(unofficial-sodium CONFIG REQUIRED) 16 | pkg_check_modules(LIBUSB REQUIRED IMPORTED_TARGET libusb-1.0) 17 | include_directories(${FFMPEG_INCLUDE_DIRS}) 18 | 19 | find_package(Qt5 COMPONENTS Quick Multimedia REQUIRED) 20 | 21 | 22 | if(CMAKE_BUILD_TYPE MATCHES "Debug") 23 | message("build debug") 24 | add_definitions(-DDEBUG_MODE) 25 | endif() 26 | 27 | 28 | file(GLOB_RECURSE SRC_LIST 29 | 3rd/rtl8812au-monitor-pcap/*.c 30 | 3rd/rtl8812au-monitor-pcap/*.cpp 31 | src/*.cpp 32 | src/*.c 33 | src/*.h 34 | ) 35 | list(FILTER SRC_LIST EXCLUDE REGEX ".*/3rd.*/src/main.cpp") 36 | 37 | include_directories(src 3rd/rtl8812au-monitor-pcap/src 3rd/rtl8812au-monitor-pcap/hal) 38 | add_executable(${PROJECT_NAME} ${SRC_LIST} qml.qrc) 39 | 40 | target_link_libraries(${PROJECT_NAME} 41 | Qt5::Quick ${FFMPEG_LIBRARIES} 42 | ${SDL2_LIBRARIES} 43 | PkgConfig::LIBUSB 44 | unofficial-sodium::sodium) 45 | 46 | if (WIN32 AND NOT DEFINED CMAKE_TOOLCHAIN_FILE) 47 | set(DEBUG_SUFFIX) 48 | if (MSVC AND CMAKE_BUILD_TYPE MATCHES "Debug") 49 | set(DEBUG_SUFFIX "d") 50 | endif () 51 | set(QT_INSTALL_PATH "${CMAKE_PREFIX_PATH}") 52 | if (NOT EXISTS "${QT_INSTALL_PATH}/bin") 53 | set(QT_INSTALL_PATH "${QT_INSTALL_PATH}/..") 54 | if (NOT EXISTS "${QT_INSTALL_PATH}/bin") 55 | set(QT_INSTALL_PATH "${QT_INSTALL_PATH}/..") 56 | endif () 57 | endif () 58 | if (EXISTS "${QT_INSTALL_PATH}/plugins/platforms/qwindows${DEBUG_SUFFIX}.dll") 59 | add_custom_command(TARGET ${PROJECT_NAME} POST_BUILD 60 | COMMAND ${CMAKE_COMMAND} -E make_directory 61 | "$/plugins/platforms/") 62 | add_custom_command(TARGET ${PROJECT_NAME} POST_BUILD 63 | COMMAND ${CMAKE_COMMAND} -E copy 64 | "${QT_INSTALL_PATH}/plugins/platforms/qwindows${DEBUG_SUFFIX}.dll" 65 | "$/plugins/platforms/") 66 | endif () 67 | foreach (QT_LIB Core) 68 | add_custom_command(TARGET ${PROJECT_NAME} POST_BUILD 69 | COMMAND ${CMAKE_COMMAND} -E copy 70 | "${QT_INSTALL_PATH}/bin/Qt5${QT_LIB}${DEBUG_SUFFIX}.dll" 71 | "$") 72 | endforeach (QT_LIB) 73 | endif () 74 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # WiFi Broadcast FPV client for Windows platform. 2 | 3 | fpv4win is an app for Windows that packages multiple components together to decode an H264/H265 video feed broadcasted by wfb-ng over the air. 4 | 5 | 6 | - [devourer](https://github.com/openipc/devourer): A userspace rtl8812au driver initially created by [buldo](https://github.com/buldo) and converted to C by [josephnef](https://github.com/josephnef) . 7 | - [wfb-ng](https://github.com/svpcom/wfb-ng): A library that allows broadcasting the video feed over the air. 8 | 9 | Supported rtl8812au WiFi adapter only. 10 | 11 | It is recommended to use with [OpenIPC](https://github.com/OpenIPC) FPV 12 | 13 | ![img.png](img/img.png) 14 | 15 | ### Usage 16 | - 1. Download [Zadig](https://github.com/pbatard/libwdi/releases/download/v1.5.0/zadig-2.8.exe) 17 | - 2. Repair the libusb driver (you may need to enable [Options] -> [List All Devices] to show your adapter). 18 | 19 | ![img.png](img/img1.png) 20 | 21 | - 3. Install [vcredist_x64.exe](https://aka.ms/vs/17/release/vc_redist.x64.exe) 22 | - 4. Select your 8812au adapter. 23 | - 5. Select your WFB key. 24 | - 6. Select your drone channel. 25 | - 7. Enjoy! 26 | 27 | ### Delay test 28 | 29 | ![img.png](img/delay.png) 30 | 31 | ### Todo 32 | - OSD 33 | - Hardware acceleration decoding 34 | - ~~Record MP4 file~~ 35 | - ~~Capture frame to JPG~~ 36 | - Stream to RTMP/RTSP/SRT/WHIP server 37 | - Receive multiple video streams using a single adapter 38 | - ONVIF/GB28181/SIP client 39 | 40 | ### How to build 41 | - Take a look at 42 | [GithubAction](https://github.com/openipc/fpv4win/blob/main/.github/workflows/msbuild.yml) 43 | -------------------------------------------------------------------------------- /gs.key: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenIPC/fpv4win/ade46c872280d036c3c32e609362b014fa5cb6e5/gs.key -------------------------------------------------------------------------------- /img/delay.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenIPC/fpv4win/ade46c872280d036c3c32e609362b014fa5cb6e5/img/delay.png -------------------------------------------------------------------------------- /img/img.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenIPC/fpv4win/ade46c872280d036c3c32e609362b014fa5cb6e5/img/img.png -------------------------------------------------------------------------------- /img/img1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenIPC/fpv4win/ade46c872280d036c3c32e609362b014fa5cb6e5/img/img1.png -------------------------------------------------------------------------------- /qml.qrc: -------------------------------------------------------------------------------- 1 | 2 | 3 | qml/main.qml 4 | qml/TipsBox.qml 5 | qml/RecordTimer.qml 6 | 7 | 8 | -------------------------------------------------------------------------------- /qml/RecordTimer.qml: -------------------------------------------------------------------------------- 1 | import QtQuick 2.12 2 | import QtQuick.Controls 2.5 3 | Rectangle{ 4 | id:recordTimer 5 | visible: false 6 | property var startTime: 0 7 | property var recordLen: 0 8 | color:"#bb333333" 9 | width:childrenRect.width 10 | height:childrenRect.height 11 | radius: 5 12 | z:999 13 | Text { 14 | width:20 15 | id:redCyc 16 | visible: false 17 | padding: 5 18 | anchors.verticalCenter: parent.verticalCenter 19 | anchors.right: time.left + 16 20 | text: '' 21 | font.pixelSize: 16 22 | Timer { 23 | interval: 1000; 24 | running: true; 25 | repeat: true; 26 | onTriggered: ()=>{ 27 | redCyc.visible = !redCyc.visible 28 | } 29 | } 30 | } 31 | Text { 32 | width:parent.width-20 33 | id:time 34 | padding: 5 35 | anchors.verticalCenter: parent.verticalCenter 36 | anchors.right: parent.right 37 | text:parseInt(recordTimer.recordLen) + 'S' 38 | font.pixelSize: 12 39 | color: "#ffffff" 40 | } 41 | Timer { 42 | id:timer 43 | interval: 100; 44 | running: false; 45 | repeat: true; 46 | onTriggered: ()=>{ 47 | recordTimer.recordLen = (new Date().getTime() - startTime)/1000; 48 | } 49 | } 50 | property var start :function(){ 51 | recordTimer.visible = true; 52 | recordTimer.startTime = new Date().getTime(); 53 | timer.start(); 54 | } 55 | property var stop :function(){ 56 | recordTimer.visible = false; 57 | timer.stop(); 58 | } 59 | } 60 | -------------------------------------------------------------------------------- /qml/TipsBox.qml: -------------------------------------------------------------------------------- 1 | import QtQuick 2.12 2 | import QtQuick.Controls 2.5 3 | 4 | Rectangle{ 5 | id:tipsBox 6 | visible: false 7 | color:"#bb333333" 8 | width:childrenRect.width 9 | height:childrenRect.height 10 | anchors.verticalCenter: parent.verticalCenter 11 | anchors.horizontalCenter: parent.horizontalCenter 12 | property string tips: '' 13 | property var timeout: 3000 14 | radius: 5 15 | property var showPop : function(msg,time){ 16 | tips = msg; 17 | hideTimer.interval = time?time:timeout 18 | tipsBox.visible = true; 19 | hideTimer.restart(); 20 | } 21 | property var hide : function(){ 22 | tipsBox.visible = false; 23 | tipsBox.tips = "" 24 | hideTimer.stop(); 25 | } 26 | Timer { 27 | id:hideTimer 28 | interval: tipsBox.timeout; 29 | running: false; 30 | repeat: false; 31 | onTriggered: ()=>{ 32 | tipsBox.hide(); 33 | } 34 | } 35 | Text { 36 | padding: 10 37 | anchors.verticalCenter: parent.verticalCenter 38 | anchors.horizontalCenter: parent.horizontalCenter 39 | text: tipsBox.tips 40 | font.pointSize: 16 41 | color: "#ffffff" 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /qml/main.qml: -------------------------------------------------------------------------------- 1 | import QtQuick 2.15 2 | import QtQuick.Controls 2.15 3 | import realTimePlayer 1.0 4 | import Qt.labs.platform 1.1 5 | 6 | 7 | ApplicationWindow { 8 | visible: true 9 | width: 1024 10 | height: 768 11 | id:window 12 | title: qsTr("") 13 | 14 | 15 | 16 | QQuickRealTimePlayer { 17 | x: 0 18 | y: 0 19 | id: player 20 | width: parent.width - 200 21 | height:parent.height 22 | property var playingFile 23 | Component.onCompleted: { 24 | NativeApi.onRtpStream.connect((sdpFile)=>{ 25 | playingFile = sdpFile; 26 | play(sdpFile) 27 | }); 28 | onPlayStopped.connect(()=>{ 29 | stop(); 30 | play(playingFile) 31 | }); 32 | } 33 | TipsBox{ 34 | id:tips 35 | z:999 36 | tips:'' 37 | } 38 | Rectangle { 39 | width: parent.width 40 | height:30 41 | anchors.bottom : parent.bottom 42 | color: Qt.rgba(0,0,0,0.3) 43 | border.color: "#55222222" 44 | border.width: 1 45 | Row{ 46 | height:parent.height 47 | padding:5 48 | spacing:5 49 | Text { 50 | anchors.verticalCenter: parent.verticalCenter 51 | text: "0bps" 52 | font.pixelSize: 12 53 | width:60 54 | horizontalAlignment: Text.Center 55 | color: "#ffffff" 56 | Component.onCompleted: { 57 | player.onBitrate.connect((btr)=>{ 58 | if(btr>1000*1000){ 59 | text = Number(btr/1000/1000).toFixed(2) + 'Mbps'; 60 | }else if(btr>1000){ 61 | text = Number(btr/1000).toFixed(2) + 'Kbps'; 62 | }else{ 63 | text = btr+ 'bps'; 64 | } 65 | }); 66 | } 67 | } 68 | 69 | 70 | } 71 | Row{ 72 | anchors.right:parent.right 73 | height:parent.height 74 | padding:5 75 | spacing:5 76 | Rectangle { 77 | height:20 78 | width:30 79 | radius:5 80 | color: "#55222222" 81 | border.color: "#88ffffff" 82 | border.width: 1 83 | Text { 84 | horizontalAlignment: Text.Center 85 | anchors.verticalCenter: parent.verticalCenter 86 | anchors.horizontalCenter: parent.horizontalCenter 87 | text: "JPG" 88 | font.pixelSize: 12 89 | color: "#ffffff" 90 | } 91 | MouseArea { 92 | cursorShape: Qt.PointingHandCursor 93 | anchors.fill: parent 94 | onClicked:{ 95 | let f = player.captureJpeg(); 96 | if(f!==''){ 97 | tips.showPop('Saved '+f,3000); 98 | }else{ 99 | tips.showPop('Capture failed! '+f,3000); 100 | } 101 | } 102 | } 103 | } 104 | Rectangle { 105 | height: 20 106 | width: 50 107 | radius: 5 108 | color: "#55222222" 109 | border.color: "#88ffffff" 110 | border.width: 1 111 | Text { 112 | visible:!recordTimer.started 113 | horizontalAlignment: Text.Center 114 | anchors.verticalCenter: parent.verticalCenter 115 | anchors.horizontalCenter: parent.horizontalCenter 116 | text: "MP4" 117 | font.pixelSize: 12 118 | color: "#ffffff" 119 | } 120 | RecordTimer{ 121 | id:recordTimer 122 | width:parent.width 123 | height: parent.height 124 | property bool started:false 125 | function clickEvent() { 126 | if(!recordTimer.started){ 127 | recordTimer.started = player.startRecord(); 128 | if(recordTimer.started){ 129 | recordTimer.start(); 130 | }else{ 131 | tips.showPop('Record failed! ',3000); 132 | } 133 | }else{ 134 | recordTimer.started = false; 135 | let f = player.stopRecord(); 136 | if(f!==''){ 137 | tips.showPop('Saved '+f,3000); 138 | }else{ 139 | tips.showPop('Record failed! ',3000); 140 | } 141 | recordTimer.stop(); 142 | } 143 | } 144 | } 145 | MouseArea { 146 | cursorShape: Qt.PointingHandCursor 147 | anchors.fill: parent 148 | onClicked:{ 149 | recordTimer.clickEvent(); 150 | } 151 | } 152 | } 153 | } 154 | } 155 | } 156 | Rectangle { 157 | x: parent.width - 200 158 | y: 0 159 | width: 200 160 | height: parent.height 161 | color: '#cccccc' 162 | 163 | 164 | Column { 165 | padding: 5 166 | anchors.left: parent.left 167 | 168 | Rectangle { 169 | // Size of the background adapts to the text size plus some padding 170 | width: 190 171 | height: selDevText.height + 10 172 | color: "#1c80c9" 173 | 174 | Text { 175 | id: selDevText 176 | x: 5 177 | anchors.verticalCenter: parent.verticalCenter 178 | text: "RTL8812AU VID:PID" 179 | font.pixelSize: 16 180 | color: "#ffffff" 181 | } 182 | } 183 | ComboBox { 184 | id: selectDev 185 | width: 190 186 | model: ListModel { 187 | id: comboBoxModel 188 | Component.onCompleted: { 189 | var dongleList = NativeApi.GetDongleList(); 190 | for (var i = 0; i < dongleList.length; i++) { 191 | comboBoxModel.append({text: dongleList[i]}); 192 | } 193 | selectDev.currentIndex = 0; // Set default selection 194 | } 195 | } 196 | currentIndex: 0 197 | } 198 | Row{ 199 | width: 190 200 | Column { 201 | width:95 202 | Rectangle { 203 | // Size of the background adapts to the text size plus some padding 204 | width: parent.width 205 | height: selChText.height + 10 206 | color: "#1c80c9" 207 | 208 | Text { 209 | width: parent.width 210 | id: selChText 211 | x: 5 212 | anchors.verticalCenter: parent.verticalCenter 213 | text: "Channel" 214 | font.pixelSize: 16 215 | color: "#ffffff" 216 | } 217 | } 218 | ComboBox { 219 | id: selectChannel 220 | width: parent.width 221 | model: [ 222 | '1','2','3','4','5','6','7','8','9','10','11','12','13', 223 | '32','36','40','44','48','52','56','60','64','68','96','100','104','108','112','116','120', 224 | '124','128','132','136','140','144','149','153','157','161','169','173','177' 225 | ] 226 | currentIndex: 39 227 | Component.onCompleted: { 228 | let ch = NativeApi.GetConfig()["config.channel"]; 229 | if(ch&&ch!==''){ 230 | currentIndex = model.indexOf(ch); 231 | } 232 | } 233 | } 234 | } 235 | Column { 236 | width:95 237 | Rectangle { 238 | // Size of the background adapts to the text size plus some padding 239 | width: parent.width 240 | height: selCodecText.height + 10 241 | color: "#1c80c9" 242 | 243 | Text { 244 | width: parent.width 245 | id: selCodecText 246 | x: 5 247 | anchors.verticalCenter: parent.verticalCenter 248 | text: "Codec" 249 | font.pixelSize: 16 250 | color: "#ffffff" 251 | } 252 | } 253 | ComboBox { 254 | id: selectCodec 255 | width: parent.width 256 | model: ['AUTO','H264','H265'] 257 | currentIndex: 0 258 | Component.onCompleted: { 259 | let codec = NativeApi.GetConfig()["config.codec"]; 260 | if (codec&&codec !== '') { 261 | currentIndex = model.indexOf(codec); 262 | } 263 | } 264 | } 265 | } 266 | } 267 | Column { 268 | width:190 269 | Rectangle { 270 | // Size of the background adapts to the text size plus some padding 271 | width: parent.width 272 | height: selBwText.height + 10 273 | color: "#1c80c9" 274 | 275 | Text { 276 | width: parent.width 277 | id: selBwText 278 | x: 5 279 | anchors.verticalCenter: parent.verticalCenter 280 | text: "Channel Width" 281 | font.pixelSize: 16 282 | color: "#ffffff" 283 | } 284 | } 285 | ComboBox { 286 | id: selectBw 287 | width: parent.width 288 | model: [ 289 | '20', 290 | '40', 291 | '80', 292 | '160', 293 | '80_80', 294 | '5', 295 | '10', 296 | 'MAX' 297 | ] 298 | currentIndex: 0 299 | Component.onCompleted: { 300 | let chw = NativeApi.GetConfig()["config.channelWidth"]; 301 | if (chw&&chw !== '') { 302 | currentIndex = Number(chw); 303 | } 304 | } 305 | } 306 | } 307 | Rectangle { 308 | // Size of the background adapts to the text size plus some padding 309 | width: 190 310 | height: actionText.height + 10 311 | color: "#1c80c9" 312 | 313 | Text { 314 | id: keyText 315 | x: 5 316 | anchors.verticalCenter: parent.verticalCenter 317 | text: "Key" 318 | font.pixelSize: 16 319 | color: "#ffffff" 320 | } 321 | } 322 | Column { 323 | FileDialog { 324 | id: fileDialog 325 | title: "Select key File" 326 | nameFilters: ["Key Files (*.key)"] 327 | 328 | onAccepted: { 329 | keySelector.text = file; 330 | keySelector.text = keySelector.text.replace('file:///','') 331 | } 332 | } 333 | Button { 334 | width: 190 335 | id:keySelector 336 | text: "gs.key" 337 | onClicked: fileDialog.open() 338 | Component.onCompleted: { 339 | let key = NativeApi.GetConfig()["config.key"]; 340 | if (key && key !== '') { 341 | text = key; 342 | } 343 | } 344 | } 345 | } 346 | Rectangle { 347 | // Size of the background adapts to the text size plus some padding 348 | width: 190 349 | height: actionText.height + 10 350 | color: "#1c80c9" 351 | 352 | Text { 353 | id: actionText 354 | x: 5 355 | anchors.verticalCenter: parent.verticalCenter 356 | text: "Action" 357 | font.pixelSize: 16 358 | color: "#ffffff" 359 | } 360 | } 361 | Column { 362 | padding:5 363 | Rectangle { 364 | // Size of the background adapts to the text size plus some padding 365 | width: 180 366 | height: actionStartText.height + 10 367 | color: "#2fdcf3" 368 | radius: 10 369 | 370 | Text { 371 | id: actionStartText 372 | property bool started : false; 373 | x: 5 374 | anchors.centerIn: parent 375 | text: started?"STOP":"START" 376 | font.pixelSize: 32 377 | color: "#ffffff" 378 | } 379 | MouseArea{ 380 | cursorShape: Qt.PointingHandCursor 381 | anchors.fill: parent 382 | Component.onCompleted: { 383 | NativeApi.onWifiStop.connect(()=>{ 384 | actionStartText.started = false; 385 | player.stop(); 386 | }); 387 | } 388 | onClicked: function(){ 389 | if(!actionStartText.started){ 390 | actionStartText.started = NativeApi.Start( 391 | selectDev.currentText, 392 | Number(selectChannel.currentText), 393 | Number(selectBw.currentIndex), 394 | keySelector.text, 395 | selectCodec.currentText 396 | ); 397 | }else{ 398 | NativeApi.Stop(); 399 | player.stop(); 400 | if(recordTimer.started){ 401 | recordTimer.clickEvent(); 402 | } 403 | } 404 | } 405 | } 406 | } 407 | } 408 | Rectangle { 409 | // Size of the background adapts to the text size plus some padding 410 | width: 190 411 | height: countText.height + 10 412 | color: "#1c80c9" 413 | 414 | Text { 415 | id: countText 416 | x: 5 417 | anchors.verticalCenter: parent.verticalCenter 418 | text: "Packet(RTP/WFB/802.11)" 419 | font.pixelSize: 16 420 | color: "#ffffff" 421 | } 422 | } 423 | Row { 424 | padding:5 425 | width: 190 426 | Text { 427 | id: rtpPktCountText 428 | x: 5 429 | text: ""+NativeApi.rtpPktCount 430 | font.pixelSize: 16 431 | color: "#000000" 432 | } 433 | Text { 434 | x: 5 435 | text: "/" 436 | font.pixelSize: 16 437 | color: "#000000" 438 | } 439 | Text { 440 | id: wfbPktCountText 441 | x: 5 442 | text: ""+NativeApi.wfbFrameCount 443 | font.pixelSize: 16 444 | color: "#000000" 445 | } 446 | Text { 447 | x: 5 448 | text: "/" 449 | font.pixelSize: 16 450 | color: "#000000" 451 | } 452 | Text { 453 | id: airPktCountText 454 | x: 5 455 | text: ""+NativeApi.wifiFrameCount 456 | font.pixelSize: 16 457 | color: "#000000" 458 | } 459 | } 460 | Rectangle { 461 | id:logTitle 462 | z:2 463 | // Size of the background adapts to the text size plus some padding 464 | width: 190 465 | height: logText.height + 10 466 | color: "#1c80c9" 467 | 468 | Text { 469 | id: logText 470 | x: 5 471 | anchors.verticalCenter: parent.verticalCenter 472 | text: "WiFi Driver Log" 473 | font.pixelSize: 16 474 | color: "#FFFFFF" 475 | } 476 | } 477 | Rectangle { 478 | width:190 479 | height:window.height - 430 480 | color:"#f3f1f1" 481 | clip:true 482 | 483 | Component { 484 | id: contactDelegate 485 | Item { 486 | height:log.height 487 | Row { 488 | padding:2 489 | Text { 490 | id:log 491 | width: 190 492 | wrapMode: Text.Wrap 493 | font.pixelSize: 10 494 | text: '['+level+'] '+msg 495 | color: { 496 | let colors = { 497 | error: "#ff0000", 498 | info: "#0f7340", 499 | warn: "#e8c538", 500 | debug: "#3296de", 501 | } 502 | return colors[level]; 503 | } 504 | } 505 | } 506 | } 507 | } 508 | 509 | ListView { 510 | z:1 511 | anchors.top :logTitle.bottom 512 | anchors.fill: parent 513 | anchors.margins:5 514 | model: ListModel {} 515 | delegate: contactDelegate 516 | Component.onCompleted: { 517 | NativeApi.onLog.connect((level,msg)=>{ 518 | model.append({"level": level, "msg": msg}); 519 | positionViewAtIndex(count - 1, ListView.End) 520 | }); 521 | } 522 | } 523 | } 524 | } 525 | } 526 | } -------------------------------------------------------------------------------- /src/QmlNativeAPI.h: -------------------------------------------------------------------------------- 1 | // 2 | // Created by liangzhuohua on 2022/4/20. 3 | // 4 | 5 | #ifndef CTRLCENTER_QMLNATIVEAPI_H 6 | #define CTRLCENTER_QMLNATIVEAPI_H 7 | #include "wifi/WFBReceiver.h" 8 | #include 9 | #include 10 | #include 11 | #include 12 | #include 13 | #include 14 | #include 15 | #include 16 | 17 | using namespace toolkit; 18 | 19 | #define CONFIG "config." 20 | #define CONFIG_FILE "config.ini" 21 | #define CONFIG_CHANNEL CONFIG "channel" 22 | #define CONFIG_CHANNEL_WIDTH CONFIG "channelWidth" 23 | #define CONFIG_CHANNEL_KEY CONFIG "key" 24 | #define CONFIG_CHANNEL_CODEC CONFIG "codec" 25 | 26 | /** 27 | * C++封装留给qml使用的api 28 | */ 29 | class QmlNativeAPI : public QObject { 30 | Q_OBJECT 31 | Q_PROPERTY(qulonglong wifiFrameCount READ wifiFrameCount NOTIFY onWifiFrameCount) 32 | Q_PROPERTY(qulonglong wfbFrameCount READ wfbFrameCount NOTIFY onWfbFrameCount) 33 | Q_PROPERTY(qulonglong rtpPktCount READ rtpPktCount NOTIFY onRtpPktCount) 34 | public: 35 | static QmlNativeAPI &Instance() { 36 | static QmlNativeAPI api; 37 | return api; 38 | } 39 | explicit QmlNativeAPI(QObject *parent = nullptr) 40 | : QObject(parent) { 41 | // load config 42 | try { 43 | mINI::Instance().parseFile(CONFIG_FILE); 44 | } catch (...) { 45 | } 46 | }; 47 | // Get config 48 | Q_INVOKABLE QJsonObject GetConfig() { 49 | QJsonObject config; 50 | for (const auto &item : mINI::Instance()) { 51 | config[QString(item.first.c_str())] = QString(item.second.c_str()); 52 | } 53 | return config; 54 | } 55 | // get all dongle 56 | Q_INVOKABLE static QList GetDongleList() { 57 | QList l; 58 | for (auto &item : WFBReceiver::Instance().GetDongleList()) { 59 | l.push_back(QString(item.c_str())); 60 | } 61 | return l; 62 | }; 63 | Q_INVOKABLE static bool 64 | Start(const QString &vidPid, int channel, int channelWidth, const QString &keyPath, const QString &codec) { 65 | // save config 66 | mINI::Instance()[CONFIG_CHANNEL] = channel; 67 | mINI::Instance()[CONFIG_CHANNEL_WIDTH] = channelWidth; 68 | mINI::Instance()[CONFIG_CHANNEL_KEY] = keyPath.toStdString(); 69 | mINI::Instance()[CONFIG_CHANNEL_CODEC] = codec.toStdString(); 70 | mINI::Instance().dumpFile(CONFIG_FILE); 71 | // alloc port 72 | QmlNativeAPI::Instance().playerPort = QmlNativeAPI::Instance().GetFreePort(); 73 | QmlNativeAPI::Instance().playerCodec = codec; 74 | return WFBReceiver::Instance().Start(vidPid.toStdString(), channel, channelWidth, keyPath.toStdString()); 75 | } 76 | Q_INVOKABLE static bool Stop() { 77 | std::async([]() { WFBReceiver::Instance().Stop(); }); 78 | return true; 79 | } 80 | Q_INVOKABLE static void BuildSdp(const QString &filePath, const QString &codec, int payloadType, int port) { 81 | QString dirPath = QFileInfo(filePath).absolutePath(); 82 | QDir dir(dirPath); 83 | if (!dir.exists()) { 84 | dir.mkpath(dirPath); 85 | } 86 | std::ofstream sdpFos(filePath.toStdString()); 87 | sdpFos << "v=0\n"; 88 | sdpFos << "o=- 0 0 IN IP4 127.0.0.1\n"; 89 | sdpFos << "s=No Name\n"; 90 | sdpFos << "c=IN IP4 127.0.0.1\n"; 91 | sdpFos << "t=0 0\n"; 92 | sdpFos << "m=video " << port << " RTP/AVP " << payloadType << "\n"; 93 | sdpFos << "a=rtpmap:" << payloadType << " " << codec.toStdString() << "/90000\n"; 94 | sdpFos.flush(); 95 | sdpFos.close(); 96 | // log 97 | QmlNativeAPI::Instance().PutLog( 98 | "debug", 99 | "Build Player SDP: Codec:" + codec.toStdString() + " PT:" + std::to_string(payloadType) 100 | + " Port:" + std::to_string(port)); 101 | } 102 | void PutLog(const std::string &level, const std::string &msg) { 103 | emit onLog(QString(level.c_str()), QString(msg.c_str())); 104 | } 105 | void NotifyWifiStop() { emit onWifiStop(); } 106 | int NotifyRtpStream(int pt, uint16_t ssrc) { 107 | // get free port 108 | const QString sdpFile = "sdp/sdp.sdp"; 109 | BuildSdp(sdpFile, playerCodec, pt, playerPort); 110 | emit onRtpStream(sdpFile); 111 | return QmlNativeAPI::Instance().playerPort; 112 | } 113 | void UpdateCount() { 114 | emit onWifiFrameCount(wifiFrameCount_); 115 | emit onWfbFrameCount(wfbFrameCount_); 116 | emit onRtpPktCount(rtpPktCount_); 117 | } 118 | qulonglong wfbFrameCount() { return wfbFrameCount_; } 119 | qulonglong rtpPktCount() { return rtpPktCount_; } 120 | qulonglong wifiFrameCount() { return wifiFrameCount_; } 121 | Q_INVOKABLE int GetPlayerPort() { return playerPort; } 122 | Q_INVOKABLE QString GetPlayerCodec() const { return playerCodec; } 123 | int GetFreePort() { return 52356; } 124 | qulonglong wfbFrameCount_ = 0; 125 | qulonglong wifiFrameCount_ = 0; 126 | qulonglong rtpPktCount_ = 0; 127 | int playerPort = 0; 128 | QString playerCodec; 129 | signals: 130 | // onlog 131 | void onLog(QString level, QString msg); 132 | void onWifiStop(); 133 | void onWifiFrameCount(qulonglong count); 134 | void onWfbFrameCount(qulonglong count); 135 | void onRtpPktCount(qulonglong count); 136 | void onRtpStream(QString sdp); 137 | }; 138 | 139 | #endif // CTRLCENTER_QMLNATIVEAPI_H 140 | -------------------------------------------------------------------------------- /src/main.cpp: -------------------------------------------------------------------------------- 1 | #include "src/QmlNativeAPI.h" 2 | #include 3 | #include 4 | #include 5 | #include 6 | 7 | #pragma comment(lib, "ws2_32.lib") 8 | 9 | #ifdef DEBUG_MODE 10 | #include 11 | #pragma comment(lib, "DbgHelp.lib") 12 | // 创建Dump文件 13 | void CreateDumpFile(LPCWSTR lpstrDumpFilePathName, EXCEPTION_POINTERS *pException) { 14 | HANDLE hDumpFile = CreateFile( 15 | reinterpret_cast(lpstrDumpFilePathName), GENERIC_WRITE, 0, nullptr, CREATE_ALWAYS, 16 | FILE_ATTRIBUTE_NORMAL, nullptr); 17 | // Dump信息 18 | MINIDUMP_EXCEPTION_INFORMATION dumpInfo; 19 | dumpInfo.ExceptionPointers = pException; 20 | dumpInfo.ThreadId = GetCurrentThreadId(); 21 | dumpInfo.ClientPointers = TRUE; 22 | // 写入Dump文件内容 23 | MiniDumpWriteDump( 24 | GetCurrentProcess(), GetCurrentProcessId(), hDumpFile, MiniDumpNormal, &dumpInfo, nullptr, nullptr); 25 | CloseHandle(hDumpFile); 26 | } 27 | // 处理Unhandled Exception的回调函数 28 | LONG ApplicationCrashHandler(EXCEPTION_POINTERS *pException) { 29 | CreateDumpFile(L"dump.dmp", pException); 30 | return EXCEPTION_EXECUTE_HANDLER; 31 | } 32 | 33 | #endif 34 | 35 | int main(int argc, char *argv[]) { 36 | #ifdef DEBUG_MODE 37 | SetUnhandledExceptionFilter((LPTOP_LEVEL_EXCEPTION_FILTER)ApplicationCrashHandler); 38 | #endif 39 | 40 | QGuiApplication app(argc, argv); 41 | 42 | QQmlApplicationEngine engine; 43 | 44 | qmlRegisterType("realTimePlayer", 1, 0, "QQuickRealTimePlayer"); 45 | 46 | auto &qmlNativeApi = QmlNativeAPI::Instance(); 47 | engine.rootContext()->setContextProperty("NativeApi", &qmlNativeApi); 48 | 49 | engine.load(QUrl(QStringLiteral("qrc:/qml/main.qml"))); 50 | 51 | return QGuiApplication::exec(); 52 | } 53 | -------------------------------------------------------------------------------- /src/player/GifEncoder.cpp: -------------------------------------------------------------------------------- 1 | // 2 | // Created by liangzhuohua on 2022/4/22. 3 | // 4 | 5 | #include "GifEncoder.h" 6 | #include 7 | #include 8 | 9 | bool GifEncoder::open(int width, int height, AVPixelFormat pixelFormat, int frameRate, const string &outputPath) { 10 | // 编码上下文 11 | _formatCtx = shared_ptr(avformat_alloc_context(), &avformat_free_context); 12 | // 设置格式 13 | _formatCtx->oformat = av_guess_format("gif", nullptr, nullptr); 14 | // 创建流 15 | AVStream *pAVStream = avformat_new_stream(_formatCtx.get(), nullptr); 16 | if (pAVStream == nullptr) { 17 | return false; 18 | } 19 | // 查找编码器 20 | const AVCodec *pCodec = avcodec_find_encoder(_formatCtx->oformat->video_codec); 21 | if (!pCodec) { 22 | return false; 23 | } 24 | // 设置编码参数 25 | _codecCtx = shared_ptr( 26 | avcodec_alloc_context3(pCodec), [](AVCodecContext *ctx) { avcodec_free_context(&ctx); }); 27 | _frameRate = frameRate; 28 | _codecCtx->codec_id = _formatCtx->oformat->video_codec; 29 | _codecCtx->codec_type = AVMEDIA_TYPE_VIDEO; 30 | _codecCtx->pix_fmt = AV_PIX_FMT_RGB8; 31 | _codecCtx->width = 640; 32 | _codecCtx->height = (int)(640.0 * height / width); 33 | _codecCtx->time_base = AVRational { 1, frameRate }; 34 | // 根据需要创建颜色空间转换器 35 | if (_codecCtx->pix_fmt != pixelFormat) { 36 | // 颜色转换器 37 | _imgConvertCtx = sws_getCachedContext( 38 | _imgConvertCtx, width, height, pixelFormat, _codecCtx->width, _codecCtx->height, _codecCtx->pix_fmt, 39 | SWS_BICUBIC, nullptr, nullptr, nullptr); 40 | if (!_imgConvertCtx) { 41 | return false; 42 | } 43 | } 44 | // 打开编码器 45 | if (avcodec_open2(_codecCtx.get(), pCodec, nullptr) < 0) { 46 | return false; 47 | } 48 | // 设置编码器参数 49 | avcodec_parameters_from_context(pAVStream->codecpar, _codecCtx.get()); 50 | // 写文件头 51 | if (avformat_write_header(_formatCtx.get(), nullptr) < 0) { 52 | return false; 53 | } 54 | // 打开文件输出 55 | if (avio_open(&_formatCtx->pb, outputPath.c_str(), AVIO_FLAG_READ_WRITE) < 0) { 56 | return false; 57 | } 58 | _opened = true; 59 | return true; 60 | } 61 | 62 | bool GifEncoder::encodeFrame(const shared_ptr &frame) { 63 | if (!_opened) { 64 | return false; 65 | } 66 | // 编码锁 67 | lock_guard lck(_encodeMtx); 68 | // 颜色转换 69 | if (_codecCtx->pix_fmt != frame->format) { 70 | // 分配帧空间 71 | if (!_tmpFrame) { 72 | _tmpFrame = shared_ptr(av_frame_alloc(), [](AVFrame *f) { av_frame_free(&f); }); 73 | if (!_tmpFrame) { 74 | return false; 75 | } 76 | _tmpFrame->width = _codecCtx->width; 77 | _tmpFrame->height = _codecCtx->height; 78 | _tmpFrame->format = _codecCtx->pix_fmt; 79 | int size = av_image_get_buffer_size(_codecCtx->pix_fmt, _codecCtx->width, _codecCtx->height, 1); 80 | _buff.resize(size); 81 | int ret = av_image_fill_arrays( 82 | _tmpFrame->data, _tmpFrame->linesize, _buff.data(), _codecCtx->pix_fmt, _codecCtx->width, 83 | _codecCtx->width, 1); 84 | if (ret < 0) { 85 | return false; 86 | } 87 | } 88 | // 转换为GIF编码需要的颜色和高度 89 | int h = sws_scale( 90 | _imgConvertCtx, frame->data, frame->linesize, 0, frame->height, _tmpFrame->data, _tmpFrame->linesize); 91 | if (h != _codecCtx->height) { 92 | return false; 93 | } 94 | } 95 | // 创建pkt 96 | int size = (_codecCtx->width) * (_codecCtx->height); 97 | // 调整包大小 98 | shared_ptr pkt = shared_ptr(av_packet_alloc(), [](AVPacket *pkt) { av_packet_free(&pkt); }); 99 | av_new_packet(pkt.get(), size); 100 | // 记录帧编码时间 101 | _lastEncodeTime 102 | = std::chrono::duration_cast(std::chrono::system_clock::now().time_since_epoch()) 103 | .count(); 104 | // 发送帧到编码上下文 105 | int ret = avcodec_send_frame(_codecCtx.get(), _tmpFrame.get()); 106 | if (ret < 0) { 107 | return false; 108 | } 109 | // 获取已经编码完成的帧 110 | avcodec_receive_packet(_codecCtx.get(), pkt.get()); 111 | // 写文件 112 | av_write_frame(_formatCtx.get(), pkt.get()); 113 | return true; 114 | } 115 | 116 | void GifEncoder::close() { 117 | lock_guard lck(_encodeMtx); 118 | if (!_opened) { 119 | return; 120 | } 121 | if (_formatCtx) { 122 | // 写文件尾 123 | av_write_trailer(_formatCtx.get()); 124 | } 125 | if (_codecCtx) { 126 | // 关闭编码器 127 | avcodec_close(_codecCtx.get()); 128 | } 129 | // 关闭文件 130 | avio_close(_formatCtx->pb); 131 | _opened = false; 132 | } 133 | 134 | GifEncoder::~GifEncoder() { 135 | qWarning() << __FUNCTION__; 136 | close(); 137 | } 138 | 139 | bool GifEncoder::isOpened() { 140 | lock_guard lck(_encodeMtx); 141 | return _opened; 142 | } 143 | -------------------------------------------------------------------------------- /src/player/GifEncoder.h: -------------------------------------------------------------------------------- 1 | // 2 | // Created by liangzhuohua on 2022/4/22. 3 | // 4 | 5 | #ifndef CTRLCENTER_GIFENCODER_H 6 | #define CTRLCENTER_GIFENCODER_H 7 | #include "ffmpegInclude.h" 8 | #include 9 | #include 10 | #include 11 | #include 12 | 13 | using namespace std; 14 | class GifEncoder { 15 | public: 16 | ~GifEncoder(); 17 | // 初始化编码器 18 | bool open(int width, int height, AVPixelFormat pixelFormat, int frameRate, const string &outputPath); 19 | // 编码帧 20 | bool encodeFrame(const shared_ptr &frame); 21 | // 关闭编码器 22 | void close(); 23 | // 帧率 24 | int getFrameRate() const { return _frameRate; } 25 | // 上次编码时间 26 | uint64_t getLastEncodeTime() const { return _lastEncodeTime; } 27 | // 是否已经打开 28 | bool isOpened(); 29 | 30 | protected: 31 | mutex _encodeMtx; 32 | // 编码上下文 33 | shared_ptr _formatCtx; 34 | // 编码上下文 35 | shared_ptr _codecCtx; 36 | // 色彩空间转换 37 | SwsContext *_imgConvertCtx; 38 | // 颜色转换临时frame 39 | shared_ptr _tmpFrame; 40 | vector _buff; 41 | // 最后编码时间 42 | uint64_t _lastEncodeTime = 0; 43 | // 帧率 44 | int _frameRate = 0; 45 | // 是否打开 46 | volatile bool _opened = false; 47 | }; 48 | 49 | #endif // CTRLCENTER_GIFENCODER_H 50 | -------------------------------------------------------------------------------- /src/player/JpegEncoder.cpp: -------------------------------------------------------------------------------- 1 | // 2 | // Created by liangzhuohua on 2022/2/28. 3 | // 4 | 5 | #include "JpegEncoder.h" 6 | 7 | #include 8 | inline bool convertToYUV420P(const shared_ptr &frame, shared_ptr &yuvFrame) { 9 | int width = frame->width; 10 | int height = frame->height; 11 | 12 | // Allocate YUV frame 13 | yuvFrame = shared_ptr(av_frame_alloc(), [](AVFrame *f){ 14 | av_frame_free(&f); 15 | }); 16 | if (!yuvFrame) { 17 | return false; 18 | } 19 | yuvFrame->format = AV_PIX_FMT_YUVJ420P; 20 | yuvFrame->width = width; 21 | yuvFrame->height = height; 22 | 23 | // Allocate buffer for YUV frame 24 | int ret = av_frame_get_buffer(yuvFrame.get(), 32); 25 | if (ret < 0) { 26 | return false; 27 | } 28 | 29 | // Convert RGB to YUV420P 30 | struct SwsContext *sws_ctx = sws_getContext(width, height, static_cast(frame->format), 31 | width, height, AV_PIX_FMT_YUVJ420P, 32 | 0, nullptr, nullptr, nullptr); 33 | if (!sws_ctx) { 34 | return false; 35 | } 36 | 37 | // Perform RGB to YUV conversion 38 | ret = sws_scale(sws_ctx, frame->data, frame->linesize, 0, height, 39 | yuvFrame->data, yuvFrame->linesize); 40 | if (ret <= 0) { 41 | sws_freeContext(sws_ctx); 42 | return false; 43 | } 44 | 45 | // Cleanup 46 | sws_freeContext(sws_ctx); 47 | 48 | return true; 49 | } 50 | 51 | bool JpegEncoder::encodeJpeg(const string &outFilePath, const shared_ptr &frame) { 52 | if (!(frame && frame->height && frame->width && frame->linesize[0])) { 53 | return false; 54 | } 55 | 56 | // 编码上下文 57 | shared_ptr pFormatCtx 58 | = shared_ptr(avformat_alloc_context(), &avformat_free_context); 59 | 60 | // 设置格式 61 | pFormatCtx->oformat = av_guess_format("mjpeg", nullptr, nullptr); 62 | 63 | // 初始化上下文 64 | if (avio_open(&pFormatCtx->pb, outFilePath.c_str(), AVIO_FLAG_READ_WRITE) < 0) { 65 | return false; 66 | } 67 | 68 | // 创建流 69 | AVStream *pAVStream = avformat_new_stream(pFormatCtx.get(), nullptr); 70 | if (pAVStream == nullptr) { 71 | return false; 72 | } 73 | 74 | // 查找编码器 75 | const AVCodec *pCodec = avcodec_find_encoder(pFormatCtx->oformat->video_codec); 76 | if (!pCodec) { 77 | return false; 78 | } 79 | // 设置编码参数 80 | shared_ptr codecCtx = shared_ptr( 81 | avcodec_alloc_context3(pCodec), [](AVCodecContext *ctx) { avcodec_free_context(&ctx); }); 82 | codecCtx->codec_id = pFormatCtx->oformat->video_codec; 83 | codecCtx->codec_type = AVMEDIA_TYPE_VIDEO; 84 | codecCtx->pix_fmt = static_cast(frame->format); 85 | codecCtx->width = frame->width; 86 | codecCtx->height = frame->height; 87 | codecCtx->time_base = AVRational { 1, 25 }; 88 | 89 | // Convert frame to YUV420P if it's not already in that format 90 | shared_ptr yuvFrame; 91 | if ( 92 | frame->format != AV_PIX_FMT_YUVJ420P && 93 | frame->format != AV_PIX_FMT_YUV420P 94 | ) { 95 | if (!convertToYUV420P(frame, yuvFrame)) { 96 | return false; 97 | } 98 | codecCtx->pix_fmt = AV_PIX_FMT_YUVJ420P; 99 | } else { 100 | yuvFrame = frame; // If already YUV420P, use as is 101 | } 102 | 103 | // 打开编码器 104 | if (avcodec_open2(codecCtx.get(), pCodec, nullptr) < 0) { 105 | return false; 106 | } 107 | // 设置编码器参数 108 | avcodec_parameters_from_context(pAVStream->codecpar, codecCtx.get()); 109 | 110 | // 写文件头 111 | avformat_write_header(pFormatCtx.get(), nullptr); 112 | int y_size = (codecCtx->width) * (codecCtx->height); 113 | // 调整包大小 114 | shared_ptr pkt = shared_ptr(av_packet_alloc(), [](AVPacket *pkt) { av_packet_free(&pkt); }); 115 | av_new_packet(pkt.get(), y_size); 116 | 117 | // 发送帧到编码上下文 118 | int ret = avcodec_send_frame(codecCtx.get(), yuvFrame.get()); 119 | if (ret < 0) { 120 | return false; 121 | } 122 | // 获取已经编码完成的帧 123 | avcodec_receive_packet(codecCtx.get(), pkt.get()); 124 | // 写文件 125 | av_write_frame(pFormatCtx.get(), pkt.get()); 126 | // 写文件尾 127 | av_write_trailer(pFormatCtx.get()); 128 | // 关闭编码器 129 | avcodec_close(codecCtx.get()); 130 | // 关闭文件 131 | avio_close(pFormatCtx->pb); 132 | return true; 133 | } 134 | -------------------------------------------------------------------------------- /src/player/JpegEncoder.h: -------------------------------------------------------------------------------- 1 | // 2 | // Created by liangzhuohua on 2022/2/28. 3 | // 4 | 5 | #ifndef CTRLCENTER_JPEGENCODER_H 6 | #define CTRLCENTER_JPEGENCODER_H 7 | #include "ffmpegInclude.h" 8 | #include 9 | #include 10 | 11 | using namespace std; 12 | class JpegEncoder { 13 | public: 14 | static bool encodeJpeg(const string &outFilePath, const shared_ptr &frame); 15 | }; 16 | 17 | #endif // CTRLCENTER_JPEGENCODER_H 18 | -------------------------------------------------------------------------------- /src/player/Mp4Encoder.cpp: -------------------------------------------------------------------------------- 1 | // 2 | // Created by liangzhuohua on 2022/3/1. 3 | // 4 | 5 | #include "Mp4Encoder.h" 6 | 7 | Mp4Encoder::Mp4Encoder(const string &saveFilePath) { 8 | // 分配 9 | _formatCtx = shared_ptr(avformat_alloc_context(), &avformat_free_context); 10 | // 设置格式 11 | _formatCtx->oformat = av_guess_format("mov", nullptr, nullptr); 12 | // 文件保存路径 13 | _saveFilePath = saveFilePath; 14 | } 15 | 16 | Mp4Encoder::~Mp4Encoder() { 17 | if (_isOpen) { 18 | stop(); 19 | } 20 | } 21 | 22 | void Mp4Encoder::addTrack(AVStream *stream) { 23 | AVStream *os = avformat_new_stream(_formatCtx.get(), nullptr); 24 | if (!os) { 25 | return; 26 | } 27 | int ret = avcodec_parameters_copy(os->codecpar, stream->codecpar); 28 | if (ret < 0) { 29 | return; 30 | } 31 | os->codecpar->codec_tag = 0; 32 | if (stream->codecpar->codec_type == AVMEDIA_TYPE_AUDIO) { 33 | audioIndex = os->index; 34 | _originAudioTimeBase = stream->time_base; 35 | } else if (stream->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) { 36 | videoIndex = os->index; 37 | _originVideoTimeBase = stream->time_base; 38 | } 39 | } 40 | 41 | bool Mp4Encoder::start() { 42 | // 初始化上下文 43 | if (avio_open(&_formatCtx->pb, _saveFilePath.c_str(), AVIO_FLAG_READ_WRITE) < 0) { 44 | return false; 45 | } 46 | // 写输出流头信息 47 | AVDictionary *opts = nullptr; 48 | av_dict_set(&opts, "movflags", "frag_keyframe+empty_moov", 0); 49 | int ret = avformat_write_header(_formatCtx.get(), &opts); 50 | if (ret < 0) { 51 | return false; 52 | } 53 | _isOpen = true; 54 | return true; 55 | } 56 | 57 | void Mp4Encoder::writePacket(const shared_ptr &pkt, bool isVideo) { 58 | if (!_isOpen) { 59 | return; 60 | } 61 | #ifdef I_FRAME_FIRST 62 | // 未获取视频关键帧前先忽略音频 63 | if (videoIndex >= 0 && !writtenKeyFrame && !isVideo) { 64 | return; 65 | } 66 | // 跳过非关键帧,使关键帧前置 67 | if (!writtenKeyFrame && pkt->flags & AV_PKT_FLAG_KEY) { 68 | return; 69 | } 70 | writtenKeyFrame = true; 71 | #endif 72 | if (isVideo) { 73 | pkt->stream_index = videoIndex; 74 | av_packet_rescale_ts(pkt.get(), _originVideoTimeBase, _formatCtx->streams[videoIndex]->time_base); 75 | } else { 76 | pkt->stream_index = audioIndex; 77 | av_packet_rescale_ts(pkt.get(), _originAudioTimeBase, _formatCtx->streams[audioIndex]->time_base); 78 | } 79 | pkt->pos = -1; 80 | av_write_frame(_formatCtx.get(), pkt.get()); 81 | } 82 | 83 | void Mp4Encoder::stop() { 84 | _isOpen = false; 85 | // 写文件尾 86 | av_write_trailer(_formatCtx.get()); 87 | // 关闭文件 88 | avio_close(_formatCtx->pb); 89 | } 90 | -------------------------------------------------------------------------------- /src/player/Mp4Encoder.h: -------------------------------------------------------------------------------- 1 | // 2 | // Created by liangzhuohua on 2022/3/1. 3 | // 4 | 5 | #ifndef CTRLCENTER_MP4ENCODER_H 6 | #define CTRLCENTER_MP4ENCODER_H 7 | #include "ffmpegInclude.h" 8 | #include 9 | #include 10 | 11 | using namespace std; 12 | class Mp4Encoder { 13 | public: 14 | explicit Mp4Encoder(const string &saveFilePath); 15 | ~Mp4Encoder(); 16 | // 开启 17 | bool start(); 18 | // 关闭 19 | void stop(); 20 | // 增加轨道 21 | void addTrack(AVStream *stream); 22 | // 写packet 23 | void writePacket(const shared_ptr &pkt, bool isVideo); 24 | // 音视频index 25 | int videoIndex = -1; 26 | int audioIndex = -1; 27 | // 文件保存路径 28 | string _saveFilePath; 29 | 30 | private: 31 | // 是否已经初始化 32 | bool _isOpen = false; 33 | // 编码上下文 34 | shared_ptr _formatCtx; 35 | // 原始视频流时间基 36 | AVRational _originVideoTimeBase {}; 37 | // 原始音频流时间基 38 | AVRational _originAudioTimeBase {}; 39 | // 已经写入关键帧 40 | bool writtenKeyFrame = false; 41 | }; 42 | 43 | #endif // CTRLCENTER_MP4ENCODER_H 44 | -------------------------------------------------------------------------------- /src/player/QQuickRealTimePlayer.cpp: -------------------------------------------------------------------------------- 1 |  2 | #include "QQuickRealTimePlayer.h" 3 | #include "JpegEncoder.h" 4 | #include 5 | #include 6 | #include 7 | #include 8 | #include 9 | #include 10 | #include 11 | // GIF默认帧率 12 | #define DEFAULT_GIF_FRAMERATE 10 13 | 14 | //************TaoItemRender************// 15 | class TItemRender : public QQuickFramebufferObject::Renderer { 16 | public: 17 | TItemRender(); 18 | 19 | void render() override; 20 | QOpenGLFramebufferObject *createFramebufferObject(const QSize &size) override; 21 | void synchronize(QQuickFramebufferObject *) override; 22 | 23 | private: 24 | RealTimeRenderer m_render; 25 | QQuickWindow *m_window = nullptr; 26 | }; 27 | 28 | TItemRender::TItemRender() { 29 | m_render.init(); 30 | } 31 | 32 | void TItemRender::render() { 33 | m_render.paint(); 34 | m_window->resetOpenGLState(); 35 | } 36 | 37 | QOpenGLFramebufferObject *TItemRender::createFramebufferObject(const QSize &size) { 38 | QOpenGLFramebufferObjectFormat format; 39 | format.setAttachment(QOpenGLFramebufferObject::CombinedDepthStencil); 40 | format.setSamples(4); 41 | m_render.resize(size.width(), size.height()); 42 | return new QOpenGLFramebufferObject(size, format); 43 | } 44 | 45 | void TItemRender::synchronize(QQuickFramebufferObject *item) { 46 | 47 | auto *pItem = qobject_cast(item); 48 | if (pItem) { 49 | if (!m_window) { 50 | m_window = pItem->window(); 51 | } 52 | if (pItem->infoDirty()) { 53 | m_render.updateTextureInfo(pItem->videoWidth(), pItem->videoHeght(), pItem->videoFormat()); 54 | pItem->makeInfoDirty(false); 55 | } 56 | if (pItem->playStop) { 57 | m_render.clear(); 58 | return; 59 | } 60 | bool got = false; 61 | shared_ptr frame = pItem->getFrame(got); 62 | if (got && frame->linesize[0]) { 63 | m_render.updateTextureData(frame); 64 | } 65 | } 66 | } 67 | 68 | //************QQuickRealTimePlayer************// 69 | QQuickRealTimePlayer::QQuickRealTimePlayer(QQuickItem *parent) 70 | : QQuickFramebufferObject(parent) { 71 | SDL_Init(SDL_INIT_AUDIO); 72 | // 按每秒60帧的帧率更新界面 73 | startTimer(1000 / 100); 74 | } 75 | 76 | void QQuickRealTimePlayer::timerEvent(QTimerEvent *event) { 77 | Q_UNUSED(event); 78 | update(); 79 | } 80 | 81 | shared_ptr QQuickRealTimePlayer::getFrame(bool &got) { 82 | got = false; 83 | shared_ptr frame; 84 | { 85 | lock_guard lck(mtx); 86 | // 帧缓冲区已被清空,跳过渲染 87 | if (videoFrameQueue.empty()) { 88 | return {}; 89 | } 90 | // 从帧缓冲区取出帧 91 | frame = videoFrameQueue.front(); 92 | got = true; 93 | // 缓冲区出队被渲染的帧 94 | videoFrameQueue.pop(); 95 | } 96 | // 缓冲,追帧机制 97 | _lastFrame = frame; 98 | return frame; 99 | } 100 | 101 | void QQuickRealTimePlayer::onVideoInfoReady(int width, int height, int format) { 102 | if (m_videoWidth != width) { 103 | m_videoWidth = width; 104 | makeInfoDirty(true); 105 | } 106 | if (m_videoHeight != height) { 107 | m_videoHeight = height; 108 | makeInfoDirty(true); 109 | } 110 | if (m_videoFormat != format) { 111 | m_videoFormat = format; 112 | makeInfoDirty(true); 113 | } 114 | } 115 | 116 | QQuickFramebufferObject::Renderer *QQuickRealTimePlayer::createRenderer() const { 117 | return new TItemRender; 118 | } 119 | 120 | void QQuickRealTimePlayer::play(const QString &playUrl) { 121 | playStop = false; 122 | if (analysisThread.joinable()) { 123 | analysisThread.join(); 124 | } 125 | // 启动分析线程 126 | analysisThread = std::thread([this, playUrl]() { 127 | auto decoder_ = make_shared(); 128 | url = playUrl.toStdString(); 129 | // 打开并分析输入 130 | bool ok = decoder_->OpenInput(url); 131 | if (!ok) { 132 | emit onError("视频加载出错", -2); 133 | return; 134 | } 135 | decoder = decoder_; 136 | // 启动解码线程 137 | decodeThread = std::thread([this]() { 138 | while (!playStop) { 139 | try { 140 | // 循环解码 141 | auto frame = decoder->GetNextFrame(); 142 | if (!frame) { 143 | continue; 144 | } 145 | { 146 | // 解码获取到视频帧,放入帧缓冲队列 147 | lock_guard lck(mtx); 148 | if (videoFrameQueue.size() > 10) { 149 | videoFrameQueue.pop(); 150 | } 151 | videoFrameQueue.push(frame); 152 | } 153 | } catch (const exception &e) { 154 | emit onError(e.what(), -2); 155 | // 出错,停止 156 | break; 157 | } 158 | } 159 | playStop = true; 160 | // 解码已经停止,触发信号 161 | emit onPlayStopped(); 162 | }); 163 | decodeThread.detach(); 164 | 165 | if (!isMuted && decoder->HasAudio()) { 166 | // 开启音频 167 | enableAudio(); 168 | } 169 | // 是否存在音频 170 | emit onHasAudio(decoder->HasAudio()); 171 | 172 | if (decoder->HasVideo()) { 173 | onVideoInfoReady(decoder->GetWidth(), decoder->GetHeight(), decoder->GetVideoFrameFormat()); 174 | } 175 | 176 | // 码率计算回调 177 | decoder->onBitrate = [this](uint64_t bitrate) { emit onBitrate(static_cast(bitrate)); }; 178 | }); 179 | analysisThread.detach(); 180 | } 181 | 182 | void QQuickRealTimePlayer::stop() { 183 | playStop = true; 184 | if (decoder && decoder->pFormatCtx) { 185 | decoder->pFormatCtx->interrupt_callback.callback = [](void *) { return 1; }; 186 | } 187 | if (analysisThread.joinable()) { 188 | analysisThread.join(); 189 | } 190 | if (decodeThread.joinable()) { 191 | decodeThread.join(); 192 | } 193 | while (!videoFrameQueue.empty()) { 194 | lock_guard lck(mtx); 195 | // 清空缓冲 196 | videoFrameQueue.pop(); 197 | } 198 | SDL_CloseAudio(); 199 | if (decoder) { 200 | decoder->CloseInput(); 201 | } 202 | } 203 | 204 | void QQuickRealTimePlayer::setMuted(bool muted) { 205 | if (!decoder->HasAudio()) { 206 | return; 207 | } 208 | if (!muted && decoder) { 209 | decoder->ClearAudioBuff(); 210 | // 初始化声音 211 | if (!enableAudio()) { 212 | return; 213 | } 214 | } else { 215 | disableAudio(); 216 | } 217 | isMuted = muted; 218 | emit onMutedChanged(muted); 219 | } 220 | 221 | QQuickRealTimePlayer::~QQuickRealTimePlayer() { 222 | stop(); 223 | } 224 | 225 | QString QQuickRealTimePlayer::captureJpeg() { 226 | if (!_lastFrame) { 227 | return ""; 228 | } 229 | QString dirPath = QFileInfo("jpg/l").absolutePath(); 230 | QDir dir(dirPath); 231 | if (!dir.exists()) { 232 | dir.mkpath(dirPath); 233 | } 234 | stringstream ss; 235 | ss << "jpg/"; 236 | ss << std::chrono::duration_cast(std::chrono::system_clock::now().time_since_epoch()) 237 | .count() 238 | << ".jpg"; 239 | auto ok = JpegEncoder::encodeJpeg(ss.str(), _lastFrame); 240 | // 截图 241 | return ok ? QString(ss.str().c_str()) : ""; 242 | } 243 | 244 | bool QQuickRealTimePlayer::startRecord() { 245 | if (playStop && !_lastFrame) { 246 | return false; 247 | } 248 | QString dirPath = QFileInfo("mp4/l").absolutePath(); 249 | QDir dir(dirPath); 250 | if (!dir.exists()) { 251 | dir.mkpath(dirPath); 252 | } 253 | // 保存路径 254 | stringstream ss; 255 | ss << "mp4/"; 256 | ss << std::chrono::duration_cast(std::chrono::system_clock::now().time_since_epoch()) 257 | .count() 258 | << ".mp4"; 259 | // 创建MP4编码器 260 | _mp4Encoder = make_shared(ss.str()); 261 | 262 | // 添加音频流 263 | if (decoder->HasAudio()) { 264 | _mp4Encoder->addTrack(decoder->pFormatCtx->streams[decoder->audioStreamIndex]); 265 | } 266 | // 添加视频流 267 | if (decoder->HasVideo()) { 268 | _mp4Encoder->addTrack(decoder->pFormatCtx->streams[decoder->videoStreamIndex]); 269 | } 270 | if (!_mp4Encoder->start()) { 271 | return false; 272 | } 273 | // 设置获得NALU回调 274 | decoder->_gotPktCallback = [this](const shared_ptr &packet) { 275 | // 输入编码器 276 | _mp4Encoder->writePacket(packet, packet->stream_index == decoder->videoStreamIndex); 277 | }; 278 | // 启动编码器 279 | return true; 280 | } 281 | 282 | QString QQuickRealTimePlayer::stopRecord() { 283 | if (!_mp4Encoder) { 284 | return {}; 285 | } 286 | _mp4Encoder->stop(); 287 | decoder->_gotPktCallback = nullptr; 288 | return { _mp4Encoder->_saveFilePath.c_str() }; 289 | } 290 | 291 | int QQuickRealTimePlayer::getVideoWidth() { 292 | if (!decoder) { 293 | return 0; 294 | } 295 | return decoder->width; 296 | } 297 | 298 | int QQuickRealTimePlayer::getVideoHeight() { 299 | if (!decoder) { 300 | return 0; 301 | } 302 | return decoder->height; 303 | } 304 | 305 | bool QQuickRealTimePlayer::enableAudio() { 306 | if (!decoder->HasAudio()) { 307 | return false; 308 | } 309 | // 音频参数 310 | SDL_AudioSpec audioSpec; 311 | audioSpec.freq = decoder->GetAudioSampleRate(); 312 | audioSpec.format = AUDIO_S16; 313 | audioSpec.channels = decoder->GetAudioChannelCount(); 314 | audioSpec.silence = 1; 315 | audioSpec.samples = decoder->GetAudioFrameSamples(); 316 | audioSpec.padding = 0; 317 | audioSpec.size = 0; 318 | audioSpec.userdata = this; 319 | // 音频样本读取回调 320 | audioSpec.callback = [](void *Thiz, Uint8 *stream, int len) { 321 | auto *pThis = static_cast(Thiz); 322 | SDL_memset(stream, 0, len); 323 | pThis->decoder->ReadAudioBuff(stream, len); 324 | if (pThis->isMuted) { 325 | SDL_memset(stream, 0, len); 326 | } 327 | }; 328 | // 关闭音频 329 | SDL_CloseAudio(); 330 | // 开启声音 331 | if (SDL_OpenAudio(&audioSpec, nullptr) == 0) { 332 | // 播放声音 333 | SDL_PauseAudio(0); 334 | } else { 335 | emit onError("开启音频出错,如需听声音请插入音频外设\n" + QString(SDL_GetError()), -1); 336 | return false; 337 | } 338 | return true; 339 | } 340 | 341 | void QQuickRealTimePlayer::disableAudio() { 342 | SDL_CloseAudio(); 343 | } 344 | 345 | bool QQuickRealTimePlayer::startGifRecord() { 346 | if (playStop) { 347 | return false; 348 | } 349 | // 保存路径 350 | stringstream ss; 351 | ss << QStandardPaths::writableLocation(QStandardPaths::DesktopLocation).toStdString() << "/"; 352 | ss << std::chrono::duration_cast(std::chrono::system_clock::now().time_since_epoch()) 353 | .count() 354 | << ".gif"; 355 | if (!(decoder && decoder->HasVideo())) { 356 | return false; 357 | } 358 | // 创建gif编码器 359 | _gifEncoder = make_shared(); 360 | if (!_gifEncoder->open( 361 | decoder->width, decoder->height, decoder->GetVideoFrameFormat(), DEFAULT_GIF_FRAMERATE, ss.str())) { 362 | return false; 363 | } 364 | // 设置获得解码帧回调 365 | decoder->_gotFrameCallback = [this](const shared_ptr &frame) { 366 | if (!_gifEncoder) { 367 | return; 368 | } 369 | if (!_gifEncoder->isOpened()) { 370 | return; 371 | } 372 | // 根据GIF帧率跳帧 373 | uint64_t now 374 | = std::chrono::duration_cast(std::chrono::system_clock::now().time_since_epoch()) 375 | .count(); 376 | if (_gifEncoder->getLastEncodeTime() + 1000 / _gifEncoder->getFrameRate() > now) { 377 | return; 378 | } 379 | // 编码 380 | _gifEncoder->encodeFrame(frame); 381 | }; 382 | 383 | return true; 384 | } 385 | 386 | void QQuickRealTimePlayer::stopGifRecord() { 387 | decoder->_gotFrameCallback = nullptr; 388 | if (!_gifEncoder) { 389 | return; 390 | } 391 | _gifEncoder->close(); 392 | } -------------------------------------------------------------------------------- /src/player/QQuickRealTimePlayer.h: -------------------------------------------------------------------------------- 1 | #pragma once 2 | #include "RealTimeRenderer.h" 3 | #include "ffmpegDecode.h" 4 | #include 5 | #include 6 | #include 7 | #include 8 | #include 9 | 10 | #include "GifEncoder.h" 11 | #include "Mp4Encoder.h" 12 | 13 | class TItemRender; 14 | 15 | class QQuickRealTimePlayer : public QQuickFramebufferObject { 16 | Q_OBJECT 17 | Q_PROPERTY(bool isMuted READ getMuted WRITE setMuted NOTIFY onMutedChanged) 18 | Q_PROPERTY(bool hasAudio READ hasAudio NOTIFY onHasAudio) 19 | public: 20 | explicit QQuickRealTimePlayer(QQuickItem *parent = nullptr); 21 | ~QQuickRealTimePlayer() override; 22 | void timerEvent(QTimerEvent *event) override; 23 | 24 | shared_ptr getFrame(bool &got); 25 | 26 | bool infoDirty() const { return m_infoChanged; } 27 | void makeInfoDirty(bool dirty) { m_infoChanged = dirty; } 28 | int videoWidth() const { return m_videoWidth; } 29 | int videoHeght() const { return m_videoHeight; } 30 | int videoFormat() const { return m_videoFormat; } 31 | bool getMuted() const { return isMuted; } 32 | // 播放 33 | Q_INVOKABLE void play(const QString &playUrl); 34 | // 停止 35 | Q_INVOKABLE void stop(); 36 | // 静音 37 | Q_INVOKABLE void setMuted(bool muted = false); 38 | // 截图 39 | Q_INVOKABLE QString captureJpeg(); 40 | // 录像 41 | Q_INVOKABLE bool startRecord(); 42 | Q_INVOKABLE QString stopRecord(); 43 | // 录制GIF 44 | Q_INVOKABLE bool startGifRecord(); 45 | Q_INVOKABLE void stopGifRecord(); 46 | // 获取视频宽度 47 | Q_INVOKABLE int getVideoWidth(); 48 | // 获取视频高度 49 | Q_INVOKABLE int getVideoHeight(); 50 | 51 | signals: 52 | // 播放已经停止 53 | void onPlayStopped(); 54 | // 出错 55 | void onError(QString msg, int code); 56 | // 获取录音音量 57 | void gotRecordVol(double vol); 58 | // 获得码率 59 | void onBitrate(long bitrate); 60 | // 静音 61 | void onMutedChanged(bool muted); 62 | // 是否有音频 63 | void onHasAudio(bool has); 64 | 65 | friend class TItemRender; 66 | 67 | protected: 68 | // ffmpeg解码器 69 | shared_ptr decoder; 70 | // 播放地址 71 | string url; 72 | // 播放标记位 73 | volatile bool playStop = true; 74 | // 静音标记位 75 | volatile bool isMuted = true; 76 | // 帧队列 77 | std::queue> videoFrameQueue; 78 | mutex mtx; 79 | // 解码线程 80 | std::thread decodeThread; 81 | // 分析线程 82 | std::thread analysisThread; 83 | // 最后输出的帧 84 | shared_ptr _lastFrame; 85 | // 视频是否ready 86 | void onVideoInfoReady(int width, int height, int format); 87 | // 播放音频 88 | bool enableAudio(); 89 | // 停止播放音频 90 | void disableAudio(); 91 | // MP4录制器 92 | shared_ptr _mp4Encoder; 93 | // GIF录制器 94 | shared_ptr _gifEncoder; 95 | // 是否有声音 96 | bool hasAudio() { 97 | if (!decoder) { 98 | return false; 99 | } 100 | return decoder->HasAudio(); 101 | } 102 | 103 | public: 104 | Renderer *createRenderer() const override; 105 | int m_videoWidth {}; 106 | int m_videoHeight {}; 107 | int m_videoFormat {}; 108 | bool m_infoChanged = false; 109 | }; 110 | -------------------------------------------------------------------------------- /src/player/RealTimeRenderer.cpp: -------------------------------------------------------------------------------- 1 | #include "RealTimeRenderer.h" 2 | #include "libavutil/pixfmt.h" 3 | #include 4 | 5 | #define VSHCODE \ 6 | R"( 7 | attribute highp vec3 qt_Vertex; 8 | attribute highp vec2 texCoord; 9 | 10 | uniform mat4 u_modelMatrix; 11 | uniform mat4 u_viewMatrix; 12 | uniform mat4 u_projectMatrix; 13 | 14 | varying vec2 v_texCoord; 15 | void main(void) 16 | { 17 | gl_Position = u_projectMatrix * u_viewMatrix * u_modelMatrix * vec4(qt_Vertex, 1.0f); 18 | v_texCoord = texCoord; 19 | } 20 | 21 | )" 22 | 23 | #define FSHCPDE \ 24 | R"( 25 | varying vec2 v_texCoord; 26 | uniform sampler2D tex_y; 27 | uniform sampler2D tex_u; 28 | uniform sampler2D tex_v; 29 | uniform int pixFmt; 30 | void main(void) 31 | { 32 | vec3 yuv; 33 | vec3 rgb; 34 | if (pixFmt == 0 || pixFmt == 12) { 35 | //yuv420p 36 | yuv.x = texture2D(tex_y, v_texCoord).r; 37 | yuv.y = texture2D(tex_u, v_texCoord).r - 0.5; 38 | yuv.z = texture2D(tex_v, v_texCoord).r - 0.5; 39 | rgb = mat3( 1.0, 1.0, 1.0, 40 | 0.0, -0.3455, 1.779, 41 | 1.4075, -0.7169, 0.0) * yuv; 42 | } else if( pixFmt == 23 ){ 43 | // NV12 44 | yuv.x = texture2D(tex_y, v_texCoord).r; 45 | yuv.y = texture2D(tex_u, v_texCoord).r - 0.5; 46 | yuv.z = texture2D(tex_u, v_texCoord).a - 0.5; 47 | rgb = mat3( 1.0, 1.0, 1.0, 48 | 0.0, -0.3455, 1.779, 49 | 1.4075, -0.7169, 0.0) * yuv; 50 | 51 | } else { 52 | //YUV444P 53 | yuv.x = texture2D(tex_y, v_texCoord).r; 54 | yuv.y = texture2D(tex_u, v_texCoord).r - 0.5; 55 | yuv.z = texture2D(tex_v, v_texCoord).r - 0.5; 56 | 57 | rgb.x = clamp( yuv.x + 1.402 *yuv.z, 0.0, 1.0); 58 | rgb.y = clamp( yuv.x - 0.34414 * yuv.y - 0.71414 * yuv.z, 0.0, 1.0); 59 | rgb.z = clamp( yuv.x + 1.772 * yuv.y, 0.0, 1.0); 60 | } 61 | gl_FragColor = vec4(rgb, 1.0); 62 | } 63 | 64 | )" 65 | 66 | static void safeDeleteTexture(QOpenGLTexture *texture) { 67 | if (texture) { 68 | if (texture->isBound()) { 69 | texture->release(); 70 | } 71 | if (texture->isCreated()) { 72 | texture->destroy(); 73 | } 74 | delete texture; 75 | texture = nullptr; 76 | } 77 | } 78 | 79 | RealTimeRenderer::RealTimeRenderer() { 80 | qWarning() << __FUNCTION__; 81 | } 82 | 83 | RealTimeRenderer::~RealTimeRenderer() { 84 | qWarning() << __FUNCTION__; 85 | safeDeleteTexture(mTexY); 86 | safeDeleteTexture(mTexU); 87 | safeDeleteTexture(mTexV); 88 | } 89 | 90 | void RealTimeRenderer::init() { 91 | qWarning() << __FUNCTION__; 92 | initializeOpenGLFunctions(); 93 | glDepthMask(GL_TRUE); 94 | glEnable(GL_TEXTURE_2D); 95 | initShader(); 96 | initGeometry(); 97 | } 98 | void RealTimeRenderer::resize(int width, int height) { 99 | 100 | m_itemWidth = width; 101 | m_itemHeight = height; 102 | glViewport(0, 0, width, height); 103 | float bottom = -1.0f; 104 | float top = 1.0f; 105 | float n = 1.0f; 106 | float f = 100.0f; 107 | mProjectionMatrix.setToIdentity(); 108 | mProjectionMatrix.frustum(-1.0, 1.0, bottom, top, n, f); 109 | } 110 | void RealTimeRenderer::initShader() { 111 | if (!mProgram.addShaderFromSourceCode(QOpenGLShader::Vertex, VSHCODE)) { 112 | qWarning() << " add vertex shader file failed."; 113 | return; 114 | } 115 | if (!mProgram.addShaderFromSourceCode(QOpenGLShader::Fragment, FSHCPDE)) { 116 | qWarning() << " add fragment shader file failed."; 117 | return; 118 | } 119 | mProgram.bindAttributeLocation("qt_Vertex", 0); 120 | mProgram.bindAttributeLocation("texCoord", 1); 121 | mProgram.link(); 122 | mProgram.bind(); 123 | } 124 | void RealTimeRenderer::initTexture() { 125 | // yuv420p 126 | mTexY = new QOpenGLTexture(QOpenGLTexture::Target2D); 127 | mTexY->setFormat(QOpenGLTexture::LuminanceFormat); 128 | // mTexY->setFixedSamplePositions(false); 129 | mTexY->setMinificationFilter(QOpenGLTexture::Nearest); 130 | mTexY->setMagnificationFilter(QOpenGLTexture::Nearest); 131 | mTexY->setWrapMode(QOpenGLTexture::ClampToEdge); 132 | 133 | mTexU = new QOpenGLTexture(QOpenGLTexture::Target2D); 134 | mTexU->setFormat(mPixFmt == AV_PIX_FMT_NV12?QOpenGLTexture::LuminanceAlphaFormat:QOpenGLTexture::LuminanceFormat); 135 | // mTexU->setFixedSamplePositions(false); 136 | mTexU->setMinificationFilter(QOpenGLTexture::Nearest); 137 | mTexU->setMagnificationFilter(QOpenGLTexture::Nearest); 138 | mTexU->setWrapMode(QOpenGLTexture::ClampToEdge); 139 | 140 | mTexV = new QOpenGLTexture(QOpenGLTexture::Target2D); 141 | mTexV->setFormat(QOpenGLTexture::LuminanceFormat); 142 | // mTexV->setFixedSamplePositions(false); 143 | mTexV->setMinificationFilter(QOpenGLTexture::Nearest); 144 | mTexV->setMagnificationFilter(QOpenGLTexture::Nearest); 145 | mTexV->setWrapMode(QOpenGLTexture::ClampToEdge); 146 | } 147 | 148 | void RealTimeRenderer::initGeometry() { 149 | mVertices << QVector3D(-1, 1, 0.0f) << QVector3D(1, 1, 0.0f) << QVector3D(1, -1, 0.0f) << QVector3D(-1, -1, 0.0f); 150 | mTexcoords << QVector2D(0, 1) << QVector2D(1, 1) << QVector2D(1, 0) << QVector2D(0, 0); 151 | 152 | mViewMatrix.setToIdentity(); 153 | mViewMatrix.lookAt(QVector3D(0.0f, 0.0f, 1.001f), QVector3D(0.0f, 0.0f, -5.0f), QVector3D(0.0f, 1.0f, 0.0f)); 154 | mModelMatrix.setToIdentity(); 155 | } 156 | void RealTimeRenderer::updateTextureInfo(int width, int height, int format) { 157 | mPixFmt = format; 158 | if(!inited) { 159 | inited = true; 160 | initTexture(); 161 | } 162 | if (format == AV_PIX_FMT_YUV420P || format == AV_PIX_FMT_YUVJ420P) { 163 | // yuv420p 164 | mTexY->setSize(width, height); 165 | mTexY->allocateStorage(QOpenGLTexture::Luminance, QOpenGLTexture::UInt8); 166 | 167 | mTexU->setSize(width / 2, height / 2); 168 | mTexU->allocateStorage(QOpenGLTexture::Luminance, QOpenGLTexture::UInt8); 169 | 170 | mTexV->setSize(width / 2, height / 2); 171 | mTexV->allocateStorage(QOpenGLTexture::Luminance, QOpenGLTexture::UInt8); 172 | } else if (format == AV_PIX_FMT_NV12) { 173 | mTexY->setSize(width, height); 174 | mTexY->allocateStorage(QOpenGLTexture::Luminance, QOpenGLTexture::UInt8); 175 | 176 | mTexU->setSize(width / 2, height / 2); 177 | mTexU->allocateStorage(QOpenGLTexture::LuminanceAlpha, QOpenGLTexture::UInt8); 178 | 179 | // NV12 not use for v 180 | mTexV->setSize(2, 2); 181 | mTexV->allocateStorage(QOpenGLTexture::Luminance, QOpenGLTexture::UInt8); 182 | } else { 183 | // 先按yuv444p处理 184 | mTexY->setSize(width, height); 185 | mTexY->allocateStorage(QOpenGLTexture::Luminance, QOpenGLTexture::UInt8); 186 | 187 | mTexU->setSize(width, height); 188 | mTexU->allocateStorage(QOpenGLTexture::Luminance, QOpenGLTexture::UInt8); 189 | 190 | mTexV->setSize(width, height); 191 | mTexV->allocateStorage(QOpenGLTexture::Luminance, QOpenGLTexture::UInt8); 192 | } 193 | mTextureAlloced = true; 194 | } 195 | 196 | void RealTimeRenderer::updateTextureData(const std::shared_ptr &data) { 197 | double frameWidth = m_itemWidth; 198 | double frameHeight = m_itemHeight; 199 | if (m_itemWidth * (1.0 * data->height / data->width) < m_itemHeight) { 200 | frameHeight = frameWidth * (1.0 * data->height / data->width); 201 | } else { 202 | frameWidth = frameHeight * (1.0 * data->width / data->height); 203 | } 204 | double x = (m_itemWidth - frameWidth) / 2; 205 | double y = (m_itemHeight - frameHeight) / 2; 206 | // GL顶点坐标转换 207 | auto x1 = (float)(-1 + 2.0 / m_itemWidth * x); 208 | auto y1 = (float)(1 - 2.0 / m_itemHeight * y); 209 | auto x2 = (float)(2.0 / m_itemWidth * frameWidth + x1); 210 | auto y2 = (float)(y1 - 2.0 / m_itemHeight * frameHeight); 211 | 212 | mVertices.clear(); 213 | mVertices << QVector3D(x1, y1, 0.0f) << QVector3D(x2, y1, 0.0f) << QVector3D(x2, y2, 0.0f) 214 | << QVector3D(x1, y2, 0.0f); 215 | 216 | QOpenGLPixelTransferOptions options; 217 | if (data->linesize[0]) { 218 | options.setRowLength(data->linesize[0]); 219 | options.setImageHeight(data->height); 220 | mTexY->setData(QOpenGLTexture::Luminance, QOpenGLTexture::UInt8, data->data[0], &options); 221 | } 222 | if (data->linesize[1]) { 223 | if (data->format == AV_PIX_FMT_NV12) { 224 | options.setRowLength(data->linesize[1] / 2); 225 | options.setImageHeight(data->height / 2); 226 | mTexU->setData(QOpenGLTexture::LuminanceAlpha, QOpenGLTexture::UInt8, data->data[1], &options); 227 | } else { 228 | options.setRowLength(data->linesize[1]); 229 | options.setImageHeight(data->height); 230 | mTexU->setData(QOpenGLTexture::Luminance, QOpenGLTexture::UInt8, data->data[1], &options); 231 | } 232 | } 233 | if (data->linesize[2]) { 234 | options.setRowLength(data->linesize[2]); 235 | options.setImageHeight(data->height); 236 | mTexV->setData(QOpenGLTexture::Luminance, QOpenGLTexture::UInt8, data->data[2], &options); 237 | } 238 | } 239 | void RealTimeRenderer::paint() { 240 | glDepthMask(true); 241 | glClearColor(0.0f, 0.0f, 0.0f, 1.0f); 242 | glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); 243 | if (!mTextureAlloced) { 244 | return; 245 | } 246 | if (mNeedClear) { 247 | mNeedClear = false; 248 | return; 249 | } 250 | mProgram.bind(); 251 | 252 | mModelMatHandle = mProgram.uniformLocation("u_modelMatrix"); 253 | mViewMatHandle = mProgram.uniformLocation("u_viewMatrix"); 254 | mProjectMatHandle = mProgram.uniformLocation("u_projectMatrix"); 255 | mVerticesHandle = mProgram.attributeLocation("qt_Vertex"); 256 | mTexCoordHandle = mProgram.attributeLocation("texCoord"); 257 | // 顶点 258 | mProgram.enableAttributeArray(mVerticesHandle); 259 | mProgram.setAttributeArray(mVerticesHandle, mVertices.constData()); 260 | 261 | // 纹理坐标 262 | mProgram.enableAttributeArray(mTexCoordHandle); 263 | mProgram.setAttributeArray(mTexCoordHandle, mTexcoords.constData()); 264 | 265 | // MVP矩阵 266 | mProgram.setUniformValue(mModelMatHandle, mModelMatrix); 267 | mProgram.setUniformValue(mViewMatHandle, mViewMatrix); 268 | mProgram.setUniformValue(mProjectMatHandle, mProjectionMatrix); 269 | 270 | // pixFmt 271 | mProgram.setUniformValue("pixFmt", mPixFmt); 272 | 273 | // 纹理 274 | // Y 275 | glActiveTexture(GL_TEXTURE0); 276 | mTexY->bind(); 277 | 278 | // U 279 | glActiveTexture(GL_TEXTURE1); 280 | mTexU->bind(); 281 | 282 | // V 283 | glActiveTexture(GL_TEXTURE2); 284 | mTexV->bind(); 285 | 286 | mProgram.setUniformValue("tex_y", 0); 287 | mProgram.setUniformValue("tex_u", 1); 288 | mProgram.setUniformValue("tex_v", 2); 289 | 290 | glDrawArrays(GL_TRIANGLE_FAN, 0, mVertices.size()); 291 | 292 | mProgram.disableAttributeArray(mVerticesHandle); 293 | mProgram.disableAttributeArray(mTexCoordHandle); 294 | mProgram.release(); 295 | } 296 | 297 | void RealTimeRenderer::clear() { 298 | mNeedClear = true; 299 | } 300 | -------------------------------------------------------------------------------- /src/player/RealTimeRenderer.h: -------------------------------------------------------------------------------- 1 | #pragma once 2 | 3 | #include "libavutil/frame.h" 4 | #include 5 | #include 6 | #include 7 | #include 8 | #include 9 | #include 10 | #include 11 | 12 | class YUVData { 13 | public: 14 | QByteArray Y; 15 | QByteArray U; 16 | QByteArray V; 17 | int yLineSize; 18 | int uLineSize; 19 | int vLineSize; 20 | int height; 21 | }; 22 | 23 | class TItemRender; 24 | class RealTimeRenderer : public QOpenGLFunctions { 25 | public: 26 | RealTimeRenderer(); 27 | ~RealTimeRenderer(); 28 | void init(); 29 | void paint(); 30 | void resize(int width, int height); 31 | void updateTextureInfo(int width, int height, int format); 32 | void updateTextureData(const std::shared_ptr &data); 33 | void clear(); 34 | 35 | friend class TItemRender; 36 | 37 | protected: 38 | void initTexture(); 39 | void initShader(); 40 | void initGeometry(); 41 | 42 | private: 43 | QOpenGLShaderProgram mProgram; 44 | QOpenGLTexture *mTexY = nullptr; 45 | QOpenGLTexture *mTexU = nullptr; 46 | QOpenGLTexture *mTexV = nullptr; 47 | QVector mVertices; 48 | QVector mTexcoords; 49 | int mModelMatHandle {}, mViewMatHandle {}, mProjectMatHandle {}; 50 | int mVerticesHandle {}; 51 | int mTexCoordHandle {}; 52 | 53 | QMatrix4x4 mModelMatrix; 54 | QMatrix4x4 mViewMatrix; 55 | QMatrix4x4 mProjectionMatrix; 56 | GLint mPixFmt = 0; 57 | bool mTextureAlloced = false; 58 | 59 | int m_itemWidth = 0; 60 | int m_itemHeight = 0; 61 | 62 | bool mNeedClear = false; 63 | 64 | volatile bool inited = false; 65 | }; 66 | -------------------------------------------------------------------------------- /src/player/ffmpegDecode.cpp: -------------------------------------------------------------------------------- 1 | #include "ffmpegDecode.h" 2 | #include 3 | #include 4 | #include 5 | #include 6 | 7 | #define MAX_AUDIO_PACKET (2 * 1024 * 1024) 8 | 9 | bool FFmpegDecoder::OpenInput(string &inputFile) { 10 | CloseInput(); 11 | 12 | if (!isHwDecoderEnable) { 13 | hwDecoderType = av_hwdevice_find_type_by_name("d3d11va"); 14 | if (hwDecoderType != AV_HWDEVICE_TYPE_NONE) { 15 | isHwDecoderEnable = true; 16 | } 17 | } 18 | 19 | AVDictionary *param = nullptr; 20 | 21 | av_dict_set(¶m, "preset", "ultrafast", 0); 22 | av_dict_set(¶m, "tune", "zerolatency", 0); 23 | av_dict_set(¶m, "buffer_size", "425984", 0); 24 | av_dict_set(¶m, "rtsp_transport", "tcp", 0); 25 | av_dict_set(¶m, "protocol_whitelist", "file,udp,tcp,rtp,rtmp,rtsp,http", 0); 26 | 27 | // 打开输入 28 | if (avformat_open_input(&pFormatCtx, inputFile.c_str(), nullptr, ¶m) != 0) { 29 | CloseInput(); 30 | return false; 31 | } 32 | // 超时机制 33 | static const int timeout = 10; 34 | auto startTime = std::make_shared(); 35 | *startTime = QDateTime::currentSecsSinceEpoch(); 36 | pFormatCtx->interrupt_callback.callback = [](void *ctx) -> int { 37 | uint64_t now = QDateTime::currentSecsSinceEpoch(); 38 | return now - *(uint64_t *)ctx > timeout; 39 | }; 40 | pFormatCtx->interrupt_callback.opaque = startTime.get(); 41 | 42 | if (avformat_find_stream_info(pFormatCtx, nullptr) < 0) { 43 | CloseInput(); 44 | return false; 45 | } 46 | 47 | // 分析超时,退出,可能格式不正确 48 | if (QDateTime::currentSecsSinceEpoch() - *startTime > timeout) { 49 | CloseInput(); 50 | return false; 51 | } 52 | pFormatCtx->interrupt_callback.callback = nullptr; 53 | pFormatCtx->interrupt_callback.opaque = nullptr; 54 | 55 | // 打开视频/音频输入 56 | hasVideoStream = OpenVideo(); 57 | hasAudioStream = OpenAudio(); 58 | 59 | isOpen = true; 60 | 61 | // 转换时间基 62 | if (videoStreamIndex != -1) { 63 | videoFramePerSecond = av_q2d(pFormatCtx->streams[videoStreamIndex]->r_frame_rate); 64 | videoBaseTime = av_q2d(pFormatCtx->streams[videoStreamIndex]->time_base); 65 | } 66 | if (audioStreamIndex != -1) { 67 | audioBaseTime = av_q2d(pFormatCtx->streams[audioStreamIndex]->time_base); 68 | } 69 | 70 | // 创建音频解码缓存 71 | if (hasAudioStream) { 72 | audioFifoBuffer = shared_ptr( 73 | av_fifo_alloc2(0, GetAudioFrameSamples() * GetAudioChannelCount() * 10, AV_FIFO_FLAG_AUTO_GROW)); 74 | } 75 | return true; 76 | } 77 | 78 | bool FFmpegDecoder::CloseInput() { 79 | isOpen = false; 80 | 81 | lock_guard lck(_releaseLock); 82 | 83 | // 关闭流 84 | CloseVideo(); 85 | CloseAudio(); 86 | if (pFormatCtx) { 87 | avformat_close_input(&pFormatCtx); 88 | pFormatCtx = nullptr; 89 | } 90 | 91 | return true; 92 | } 93 | 94 | void freeFrame(AVFrame *f) { 95 | av_frame_free(&f); 96 | } 97 | void freePkt(AVPacket *f) { 98 | av_packet_free(&f); 99 | } 100 | void freeSwrCtx(SwrContext *s) { 101 | swr_free(&s); 102 | } 103 | 104 | shared_ptr FFmpegDecoder::GetNextFrame() { 105 | // 加锁,避免在此方法执行过程中解码器释放,导致崩溃 106 | lock_guard lck(_releaseLock); 107 | shared_ptr res; 108 | if (videoStreamIndex == -1 && audioStreamIndex == -1) { 109 | return res; 110 | } 111 | if (!isOpen) { 112 | return res; 113 | } 114 | 115 | // 读输入流 116 | while (true) { 117 | if (!pFormatCtx) { 118 | throw runtime_error("分配解析器出错"); 119 | } 120 | shared_ptr packet = shared_ptr(av_packet_alloc(), &freePkt); 121 | int ret = av_read_frame(pFormatCtx, packet.get()); 122 | if (ret < 0) { 123 | char errStr[AV_ERROR_MAX_STRING_SIZE]; 124 | av_strerror(ret, errStr, AV_ERROR_MAX_STRING_SIZE); 125 | throw runtime_error("解析视频出错 " + string(errStr)); 126 | } 127 | // 计算码率 128 | { 129 | bytesSecond += packet->size; 130 | uint64_t now = std::chrono::duration_cast( 131 | std::chrono::system_clock::now().time_since_epoch()) 132 | .count(); 133 | if (now - lastCountBitrateTime >= 1000) { 134 | // 计算码率定时器 135 | bitrate = bytesSecond * 8 * 1000 / (now - lastCountBitrateTime); 136 | bytesSecond = 0; 137 | if (onBitrate) { 138 | onBitrate(bitrate); 139 | } 140 | lastCountBitrateTime = now; 141 | } 142 | } 143 | if (packet->stream_index == videoStreamIndex) { 144 | // 回调nalu 145 | if (_gotPktCallback) { 146 | _gotPktCallback(packet); 147 | } 148 | // 处理视频数据 149 | shared_ptr pVideoYuv = shared_ptr(av_frame_alloc(), &freeFrame); 150 | // 解码视频祯 151 | bool isDecodeComplite = DecodeVideo(packet.get(), pVideoYuv); 152 | if (isDecodeComplite) { 153 | res = pVideoYuv; 154 | } 155 | // 回调frame 156 | if (_gotFrameCallback) { 157 | _gotFrameCallback(pVideoYuv); 158 | } 159 | break; 160 | } else if (packet->stream_index == audioStreamIndex) { 161 | // 回调nalu 162 | if (_gotPktCallback) { 163 | _gotPktCallback(packet); 164 | } 165 | // 处理音频数据 166 | if (packet->dts != AV_NOPTS_VALUE) { 167 | int audioFrameSize = MAX_AUDIO_PACKET; 168 | shared_ptr pFrameAudio = shared_ptr(new uint8_t[audioFrameSize]); 169 | // 解码音频祯 170 | int nDecodedSize = DecodeAudio(audioStreamIndex, packet.get(), pFrameAudio.get(), audioFrameSize); 171 | // 解码成功,解码数据写入音频缓存 172 | if (nDecodedSize > 0) { 173 | writeAudioBuff(pFrameAudio.get(), nDecodedSize); 174 | } 175 | } 176 | if (!HasVideo()) { 177 | return res; 178 | } 179 | } 180 | } 181 | return res; 182 | } 183 | 184 | bool FFmpegDecoder::hwDecoderInit(AVCodecContext *ctx, const enum AVHWDeviceType type) { 185 | if (av_hwdevice_ctx_create(&hwDeviceCtx, type, nullptr, nullptr, 0) < 0) { 186 | return false; 187 | } 188 | ctx->hw_device_ctx = av_buffer_ref(hwDeviceCtx); 189 | 190 | return true; 191 | } 192 | 193 | bool FFmpegDecoder::OpenVideo() { 194 | bool res = false; 195 | 196 | if (pFormatCtx) { 197 | videoStreamIndex = -1; 198 | 199 | for (unsigned int i = 0; i < pFormatCtx->nb_streams; i++) { 200 | if (pFormatCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) { 201 | videoStreamIndex = i; 202 | const AVCodec *codec = avcodec_find_decoder(pFormatCtx->streams[i]->codecpar->codec_id); 203 | 204 | // 如果有存在视频,检测硬件解码器 205 | if (codec && isHwDecoderEnable) { 206 | for (int configIndex = 0;; configIndex++) { 207 | const AVCodecHWConfig *config = avcodec_get_hw_config(codec, configIndex); 208 | if (!config) { 209 | isHwDecoderEnable = false; 210 | break; 211 | } 212 | if (config->methods & AV_CODEC_HW_CONFIG_METHOD_HW_DEVICE_CTX 213 | && config->device_type == hwDecoderType) { 214 | hwPixFmt = config->pix_fmt; 215 | break; 216 | } 217 | } 218 | } 219 | 220 | if (codec) { 221 | pVideoCodecCtx = avcodec_alloc_context3(codec); 222 | if (pVideoCodecCtx) { 223 | if (isHwDecoderEnable) { 224 | isHwDecoderEnable = hwDecoderInit(pVideoCodecCtx, hwDecoderType); 225 | } 226 | 227 | if (avcodec_parameters_to_context(pVideoCodecCtx, pFormatCtx->streams[i]->codecpar) >= 0) { 228 | res = !(avcodec_open2(pVideoCodecCtx, codec, nullptr) < 0); 229 | if (res) { 230 | width = pVideoCodecCtx->width; 231 | height = pVideoCodecCtx->height; 232 | } 233 | } 234 | } 235 | } 236 | 237 | break; 238 | } 239 | } 240 | 241 | if (!res) { 242 | CloseVideo(); 243 | } 244 | } 245 | 246 | return res; 247 | } 248 | 249 | bool FFmpegDecoder::DecodeVideo(const AVPacket *av_pkt, shared_ptr &pOutFrame) { 250 | bool res = false; 251 | 252 | if (pVideoCodecCtx && av_pkt && pOutFrame) { 253 | int ret = avcodec_send_packet(pVideoCodecCtx, av_pkt); 254 | if (ret < 0) { 255 | char errStr[AV_ERROR_MAX_STRING_SIZE]; 256 | av_strerror(ret, errStr, AV_ERROR_MAX_STRING_SIZE); 257 | throw runtime_error("发送视频包出错 " + string(errStr)); 258 | } 259 | 260 | if (isHwDecoderEnable) { 261 | // Initialize the hardware frame. 262 | if (!hwFrame) { 263 | hwFrame = shared_ptr(av_frame_alloc(), &freeFrame); 264 | } 265 | 266 | ret = avcodec_receive_frame(pVideoCodecCtx, hwFrame.get()); 267 | } else { 268 | ret = avcodec_receive_frame(pVideoCodecCtx, pOutFrame.get()); 269 | } 270 | 271 | if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) { 272 | // No output available right now or end of stream 273 | res = false; 274 | } else if (ret < 0) { 275 | char errStr[AV_ERROR_MAX_STRING_SIZE]; 276 | av_strerror(ret, errStr, AV_ERROR_MAX_STRING_SIZE); 277 | throw runtime_error("解码视频出错 " + string(errStr)); 278 | } else { 279 | // Successfully decoded a frame 280 | res = true; 281 | } 282 | 283 | if (isHwDecoderEnable) { 284 | if (dropCurrentVideoFrame) { 285 | pOutFrame.reset(); 286 | return false; 287 | } 288 | 289 | // Copy data from the hw surface to the out frame. 290 | ret = av_hwframe_transfer_data(pOutFrame.get(), hwFrame.get(), 0); 291 | 292 | if (ret < 0) { 293 | char errStr[AV_ERROR_MAX_STRING_SIZE]; 294 | av_strerror(ret, errStr, AV_ERROR_MAX_STRING_SIZE); 295 | throw runtime_error("Decode video frame error. " + string(errStr)); 296 | } 297 | } 298 | } 299 | 300 | return res; 301 | } 302 | 303 | bool FFmpegDecoder::OpenAudio() { 304 | bool res = false; 305 | 306 | if (pFormatCtx) { 307 | audioStreamIndex = -1; 308 | 309 | for (unsigned int i = 0; i < pFormatCtx->nb_streams; i++) { 310 | if (pFormatCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_AUDIO) { 311 | audioStreamIndex = i; 312 | const AVCodec *codec = avcodec_find_decoder(pFormatCtx->streams[i]->codecpar->codec_id); 313 | 314 | if (codec) { 315 | pAudioCodecCtx = avcodec_alloc_context3(codec); 316 | if (pAudioCodecCtx) { 317 | if (avcodec_parameters_to_context(pAudioCodecCtx, pFormatCtx->streams[i]->codecpar) >= 0) { 318 | res = !(avcodec_open2(pAudioCodecCtx, codec, nullptr) < 0); 319 | } 320 | } 321 | } 322 | 323 | break; 324 | } 325 | } 326 | 327 | if (!res) { 328 | CloseAudio(); 329 | } 330 | } 331 | 332 | return res; 333 | } 334 | 335 | void FFmpegDecoder::CloseVideo() { 336 | if (pVideoCodecCtx) { 337 | avcodec_close(pVideoCodecCtx); 338 | pVideoCodecCtx = nullptr; 339 | videoStreamIndex = 0; 340 | } 341 | } 342 | 343 | void FFmpegDecoder::CloseAudio() { 344 | if (pAudioCodecCtx) { 345 | avcodec_close(pAudioCodecCtx); 346 | pAudioCodecCtx = nullptr; 347 | audioStreamIndex = 0; 348 | } 349 | } 350 | 351 | int FFmpegDecoder::DecodeAudio(int nStreamIndex, const AVPacket *avpkt, uint8_t *pOutBuffer, size_t nOutBufferSize) { 352 | int decodedSize = 0; 353 | 354 | int packetSize = avpkt->size; 355 | const uint8_t *pPacketData = avpkt->data; 356 | 357 | while (packetSize > 0) { 358 | int sizeToDecode = nOutBufferSize; 359 | uint8_t *pDest = pOutBuffer + decodedSize; 360 | AVFrame *audioFrame = av_frame_alloc(); 361 | if (!audioFrame) { 362 | throw std::runtime_error("Failed to allocate audio frame"); 363 | return 0; 364 | } 365 | 366 | int packetDecodedSize = avcodec_receive_frame(pAudioCodecCtx, audioFrame); 367 | 368 | if (packetDecodedSize >= 0) { 369 | if (audioFrame->format != AV_SAMPLE_FMT_S16) { 370 | // Convert frame to AV_SAMPLE_FMT_S16 if needed 371 | if (!swrCtx) { 372 | SwrContext *ptr = nullptr; 373 | swr_alloc_set_opts2( 374 | &ptr, &pAudioCodecCtx->ch_layout, AV_SAMPLE_FMT_S16, pAudioCodecCtx->sample_rate, 375 | &pAudioCodecCtx->ch_layout, static_cast(audioFrame->format), 376 | pAudioCodecCtx->sample_rate, 0, nullptr); 377 | 378 | auto ret = swr_init(ptr); 379 | if (ret < 0) { 380 | char errStr[AV_ERROR_MAX_STRING_SIZE]; 381 | av_strerror(ret, errStr, AV_ERROR_MAX_STRING_SIZE); 382 | throw runtime_error("解码音频出错 " + string(errStr)); 383 | return 0; 384 | } 385 | swrCtx = shared_ptr(ptr, &freeSwrCtx); 386 | } 387 | 388 | // Convert audio frame to S16 format 389 | int samples = swr_convert( 390 | swrCtx.get(), &pDest, audioFrame->nb_samples, (const uint8_t **)audioFrame->data, 391 | audioFrame->nb_samples); 392 | sizeToDecode 393 | = samples * pAudioCodecCtx->ch_layout.nb_channels * av_get_bytes_per_sample(AV_SAMPLE_FMT_S16); 394 | } else { 395 | // Copy S16 audio data directly 396 | sizeToDecode = av_samples_get_buffer_size( 397 | nullptr, pAudioCodecCtx->ch_layout.nb_channels, audioFrame->nb_samples, AV_SAMPLE_FMT_S16, 1); 398 | memcpy(pDest, audioFrame->data[0], sizeToDecode); 399 | } 400 | } 401 | 402 | av_frame_free(&audioFrame); 403 | 404 | if (packetDecodedSize < 0) { 405 | decodedSize = 0; 406 | break; 407 | } 408 | 409 | packetSize -= packetDecodedSize; 410 | pPacketData += packetDecodedSize; 411 | 412 | if (sizeToDecode <= 0) { 413 | continue; 414 | } 415 | 416 | decodedSize += sizeToDecode; 417 | } 418 | 419 | return decodedSize; 420 | } 421 | 422 | void FFmpegDecoder::writeAudioBuff(uint8_t *aSample, size_t aSize) { 423 | lock_guard lck(abBuffMtx); 424 | if (av_fifo_can_write(audioFifoBuffer.get()) < aSize) { 425 | std::vector tmp; 426 | tmp.resize(aSize); 427 | av_fifo_read(audioFifoBuffer.get(), tmp.data(), aSize); 428 | } 429 | av_fifo_write(audioFifoBuffer.get(), aSample, aSize); 430 | } 431 | 432 | size_t FFmpegDecoder::ReadAudioBuff(uint8_t *aSample, size_t aSize) { 433 | lock_guard lck(abBuffMtx); 434 | if (av_fifo_elem_size(audioFifoBuffer.get()) < aSize) { 435 | return 0; 436 | } 437 | av_fifo_read(audioFifoBuffer.get(), aSample, aSize); 438 | return aSize; 439 | } 440 | void FFmpegDecoder::ClearAudioBuff() { 441 | lock_guard lck(abBuffMtx); 442 | av_fifo_reset2(audioFifoBuffer.get()); 443 | } 444 | -------------------------------------------------------------------------------- /src/player/ffmpegDecode.h: -------------------------------------------------------------------------------- 1 | /* 2 | It is FFmpeg decoder class. Sample for article from unick-soft.ru 3 | */ 4 | 5 | #ifndef __FFMPEG_DECODER__ 6 | #define __FFMPEG_DECODER__ 7 | 8 | #include "ffmpegInclude.h" 9 | #include 10 | #include 11 | #include 12 | #include 13 | 14 | class QQuickRealTimePlayer; 15 | 16 | using namespace std; 17 | 18 | class FFmpegDecoder { 19 | public: 20 | FFmpegDecoder() 21 | : pImgConvertCtx(nullptr) 22 | , audioBaseTime(0.0) 23 | , videoBaseTime(0.0) 24 | , videoFramePerSecond(0.0) 25 | , isOpen(false) 26 | , audioStreamIndex(-1) 27 | , videoStreamIndex(-1) 28 | , pAudioCodecCtx(nullptr) 29 | , pVideoCodecCtx(nullptr) 30 | , pFormatCtx(nullptr) { 31 | ; 32 | } 33 | 34 | public: 35 | virtual ~FFmpegDecoder() { FFmpegDecoder::CloseInput(); } 36 | 37 | // 打开输入 38 | virtual bool OpenInput(std::string &inputFile); 39 | 40 | // 关闭输入并释放资源 41 | virtual bool CloseInput(); 42 | 43 | // 获取下一帧 44 | virtual shared_ptr GetNextFrame(); 45 | 46 | // 获取宽度 47 | int GetWidth() const { return width; } 48 | 49 | // 获取高度 50 | int GetHeight() const { return height; } 51 | 52 | // 获取FPS 53 | double GetFps() const { return videoFramePerSecond; } 54 | 55 | // 输入流是否存在音频 56 | bool HasAudio() const { return hasAudioStream; } 57 | 58 | // 输入流是否存在视频 59 | bool HasVideo() const { return hasVideoStream; } 60 | 61 | // 读音频fifo 62 | size_t ReadAudioBuff(uint8_t *aSample, size_t aSize); 63 | // 清空音频fifo 64 | void ClearAudioBuff(); 65 | // 音频采样率 66 | int GetAudioSampleRate() const { return pAudioCodecCtx->sample_rate; } 67 | // 音频声道数 68 | int GetAudioChannelCount() const { return pAudioCodecCtx->ch_layout.nb_channels; } 69 | // 音频样本格式 70 | AVSampleFormat GetAudioSampleFormat() const { return AV_SAMPLE_FMT_S16; } 71 | // 视频帧格式 72 | AVPixelFormat GetVideoFrameFormat() const { 73 | if (isHwDecoderEnable) { 74 | return AV_PIX_FMT_NV12; 75 | } 76 | return pVideoCodecCtx->pix_fmt; 77 | } 78 | // 获取音频frame大小 79 | int GetAudioFrameSamples() { return pAudioCodecCtx->sample_rate * 2 / 25; } 80 | // 有元 81 | friend class QQuickRealTimePlayer; 82 | 83 | private: 84 | // 打开视频流 85 | bool OpenVideo(); 86 | 87 | // 打开音频流 88 | bool OpenAudio(); 89 | 90 | // 关闭视频流 91 | void CloseVideo(); 92 | 93 | // 关闭音频流 94 | void CloseAudio(); 95 | 96 | // 解码音频帧 97 | int DecodeAudio(int nStreamIndex, const AVPacket *avpkt, uint8_t *pOutBuffer, size_t nOutBufferSize); 98 | 99 | // 解码视频祯 100 | bool DecodeVideo(const AVPacket *avpkt, shared_ptr &pOutFrame); 101 | 102 | // 向音频fifo写入数据 103 | void writeAudioBuff(uint8_t *aSample, size_t aSize); 104 | 105 | // 获取到NALU回调 106 | std::function &packet)> _gotPktCallback = nullptr; 107 | // 获取到已经解码图像回调 108 | std::function &frame)> _gotFrameCallback = nullptr; 109 | 110 | // 初始化硬件解码器 111 | bool hwDecoderInit(AVCodecContext *ctx, enum AVHWDeviceType type); 112 | 113 | // FFmpeg 解封装上下文 114 | AVFormatContext *pFormatCtx = nullptr; 115 | 116 | // FFmpeg 视频编码上下文 117 | AVCodecContext *pVideoCodecCtx = nullptr; 118 | 119 | // FFmpeg音频编码上下文 120 | AVCodecContext *pAudioCodecCtx = nullptr; 121 | 122 | // FFmpeg 音频样本格式转换 123 | shared_ptr swrCtx; 124 | 125 | // 视频轨道顺序 126 | int videoStreamIndex = 0; 127 | 128 | // 音轨顺序 129 | int audioStreamIndex = 0; 130 | 131 | // 输入源是否成功打开 132 | volatile bool isOpen = false; 133 | 134 | // Video 帧率 135 | double videoFramePerSecond = 0; 136 | 137 | // FFmpeg 视频时间基 138 | double videoBaseTime = 0; 139 | 140 | // FFmpeg 音频时间基 141 | double audioBaseTime = 0; 142 | 143 | // FFmpeg 视频格式转换 144 | struct SwsContext *pImgConvertCtx = nullptr; 145 | 146 | // 解码器全局释放锁 147 | mutex _releaseLock; 148 | 149 | // 是否存在视频流 150 | bool hasVideoStream {}; 151 | // 是否存在音频流 152 | bool hasAudioStream {}; 153 | 154 | // 视频宽度 155 | int width {}; 156 | 157 | // 视频高度 158 | int height {}; 159 | 160 | volatile uint64_t bytesSecond = 0; 161 | uint64_t bitrate = 0; 162 | uint64_t lastCountBitrateTime = 0; 163 | function onBitrate; 164 | 165 | // 音频队列 166 | mutex abBuffMtx; 167 | shared_ptr audioFifoBuffer; 168 | 169 | // 硬件解码 170 | enum AVHWDeviceType hwDecoderType; 171 | bool isHwDecoderEnable = false; 172 | enum AVPixelFormat hwPixFmt; 173 | AVBufferRef *hwDeviceCtx = nullptr; 174 | volatile bool dropCurrentVideoFrame = false; 175 | // Hardware frame 176 | shared_ptr hwFrame; 177 | }; 178 | 179 | #endif 180 | -------------------------------------------------------------------------------- /src/player/ffmpegInclude.h: -------------------------------------------------------------------------------- 1 | /* 2 | * Include ffmpeg files 3 | */ 4 | 5 | #pragma once 6 | 7 | #ifndef __STDC_CONSTANT_MACROS 8 | #define __STDC_CONSTANT_MACROS 9 | #endif 10 | 11 | extern "C" { 12 | #include 13 | #include 14 | #include 15 | #include 16 | #include 17 | #include 18 | } 19 | -------------------------------------------------------------------------------- /src/util/base64.cpp: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2006 Ryan Martell. (rdm4@martellventures.com) 3 | * 4 | * This file is part of FFmpeg. 5 | * 6 | * FFmpeg is free software; you can redistribute it and/or 7 | * modify it under the terms of the GNU Lesser General Public 8 | * License as published by the Free Software Foundation; either 9 | * version 2.1 of the License, or (at your option) any later version. 10 | * 11 | * FFmpeg is distributed in the hope that it will be useful, 12 | * but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 14 | * Lesser General Public License for more details. 15 | * 16 | * You should have received a copy of the GNU Lesser General Public 17 | * License along with FFmpeg; if not, write to the Free Software 18 | * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA 19 | */ 20 | 21 | /** 22 | * @file 23 | * @brief Base64 encode/decode 24 | * @author Ryan Martell (with lots of Michael) 25 | */ 26 | 27 | // #include "common.h" 28 | #include "base64.h" 29 | #include "stdio.h" 30 | #include 31 | #include 32 | /* ---------------- private code */ 33 | static const uint8_t map2[] 34 | = { 0x3e, 0xff, 0xff, 0xff, 0x3f, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x3a, 0x3b, 0x3c, 0x3d, 0xff, 35 | 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 36 | 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 37 | 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f, 0x20, 0x21, 0x22, 0x23, 38 | 0x24, 0x25, 0x26, 0x27, 0x28, 0x29, 0x2a, 0x2b, 0x2c, 0x2d, 0x2e, 0x2f, 0x30, 0x31, 0x32, 0x33 }; 39 | 40 | int av_base64_decode(uint8_t *out, const char *in, int out_size) { 41 | int i, v; 42 | uint8_t *dst = out; 43 | 44 | v = 0; 45 | for (i = 0; in[i] && in[i] != '='; i++) { 46 | unsigned int index = in[i] - 43; 47 | if (index >= FF_ARRAY_ELEMS(map2) || map2[index] == 0xff) 48 | return -1; 49 | v = (v << 6) + map2[index]; 50 | if (i & 3) { 51 | if (dst - out < out_size) { 52 | *dst++ = v >> (6 - 2 * (i & 3)); 53 | } 54 | } 55 | } 56 | 57 | return dst - out; 58 | } 59 | 60 | /***************************************************************************** 61 | * b64_encode: Stolen from VLC's http.c. 62 | * Simplified by Michael. 63 | * Fixed edge cases and made it work from data (vs. strings) by Ryan. 64 | *****************************************************************************/ 65 | 66 | char *av_base64_encode(char *out, int out_size, const uint8_t *in, int in_size) { 67 | static const char b64[] = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"; 68 | char *ret, *dst; 69 | unsigned i_bits = 0; 70 | int i_shift = 0; 71 | int bytes_remaining = in_size; 72 | 73 | if (in_size >= UINT_MAX / 4 || out_size < AV_BASE64_SIZE(in_size)) { 74 | return NULL; 75 | } 76 | ret = dst = out; 77 | while (bytes_remaining) { 78 | i_bits = (i_bits << 8) + *in++; 79 | bytes_remaining--; 80 | i_shift += 8; 81 | 82 | do { 83 | *dst++ = b64[(i_bits << 6 >> i_shift) & 0x3f]; 84 | i_shift -= 6; 85 | } while (i_shift > 6 || (bytes_remaining == 0 && i_shift > 0)); 86 | } 87 | while ((dst - ret) & 3) 88 | *dst++ = '='; 89 | *dst = '\0'; 90 | 91 | return ret; 92 | } 93 | 94 | string encodeBase64(const string &txt) { 95 | if (txt.empty()) { 96 | return ""; 97 | } 98 | int size = AV_BASE64_SIZE(txt.size()) + 10; 99 | std::shared_ptr txt_enc(new char[size], [](char *ptr) { delete[] ptr; }); 100 | auto ret = av_base64_encode(txt_enc.get(), size, (uint8_t *)txt.data(), txt.size()); 101 | if (!ret) { 102 | return ""; 103 | } 104 | return ret; 105 | } 106 | 107 | string decodeBase64(const string &txt) { 108 | if (txt.empty()) { 109 | return ""; 110 | } 111 | int size = txt.size() * 3 / 4 + 10; 112 | std::shared_ptr txt_dec(new char[size], [](char *ptr) { delete[] ptr; }); 113 | size = av_base64_decode((uint8_t *)txt_dec.get(), txt.data(), size); 114 | if (size <= 0) { 115 | return ""; 116 | } 117 | return string(txt_dec.get(), size); 118 | } 119 | 120 | #ifdef TEST 121 | 122 | #undef printf 123 | 124 | #define MAX_DATA_SIZE 1024 125 | #define MAX_ENCODED_SIZE 2048 126 | 127 | static int test_encode_decode(const uint8_t *data, unsigned int data_size, const char *encoded_ref) { 128 | char encoded[MAX_ENCODED_SIZE]; 129 | uint8_t data2[MAX_DATA_SIZE]; 130 | int data2_size, max_data2_size = MAX_DATA_SIZE; 131 | 132 | if (!av_base64_encode(encoded, MAX_ENCODED_SIZE, data, data_size)) { 133 | printf("Failed: cannot encode the input data\n"); 134 | return 1; 135 | } 136 | if (encoded_ref && strcmp(encoded, encoded_ref)) { 137 | printf( 138 | "Failed: encoded string differs from reference\n" 139 | "Encoded:\n%s\nReference:\n%s\n", 140 | encoded, encoded_ref); 141 | return 1; 142 | } 143 | 144 | if ((data2_size = av_base64_decode(data2, encoded, max_data2_size)) < 0) { 145 | printf( 146 | "Failed: cannot decode the encoded string\n" 147 | "Encoded:\n%s\n", 148 | encoded); 149 | return 1; 150 | } 151 | if (memcmp(data2, data, data_size)) { 152 | printf("Failed: encoded/decoded data differs from original data\n"); 153 | return 1; 154 | } 155 | 156 | printf("Passed!\n"); 157 | return 0; 158 | } 159 | 160 | int main(void) { 161 | int i, error_count = 0; 162 | struct test { 163 | const uint8_t *data; 164 | const char *encoded_ref; 165 | } tests[] = { 166 | { "", "" }, 167 | { "1", "MQ==" }, 168 | { "22", "MjI=" }, 169 | { "333", "MzMz" }, 170 | { "4444", "NDQ0NA==" }, 171 | { "55555", "NTU1NTU=" }, 172 | { "666666", "NjY2NjY2" }, 173 | { "abc:def", "YWJjOmRlZg==" }, 174 | }; 175 | 176 | printf("Encoding/decoding tests\n"); 177 | for (i = 0; i < FF_ARRAY_ELEMS(tests); i++) 178 | error_count += test_encode_decode(tests[i].data, strlen(tests[i].data), tests[i].encoded_ref); 179 | 180 | return error_count; 181 | } 182 | 183 | #endif 184 | -------------------------------------------------------------------------------- /src/util/base64.h: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2006 Ryan Martell. (rdm4@martellventures.com) 3 | * 4 | * This file is part of FFmpeg. 5 | * 6 | * FFmpeg is free software; you can redistribute it and/or 7 | * modify it under the terms of the GNU Lesser General Public 8 | * License as published by the Free Software Foundation; either 9 | * version 2.1 of the License, or (at your option) any later version. 10 | * 11 | * FFmpeg is distributed in the hope that it will be useful, 12 | * but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 14 | * Lesser General Public License for more details. 15 | * 16 | * You should have received a copy of the GNU Lesser General Public 17 | * License along with FFmpeg; if not, write to the Free Software 18 | * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA 19 | */ 20 | 21 | #define FF_ARRAY_ELEMS(a) (sizeof(a) / sizeof((a)[0])) 22 | #ifndef AVUTIL_BASE64_H 23 | #define AVUTIL_BASE64_H 24 | 25 | #include 26 | #include 27 | using namespace std; 28 | 29 | /** 30 | * Decode a base64-encoded string. 31 | * 32 | * @param out buffer for decoded data 33 | * @param in null-terminated input string 34 | * @param out_size size in bytes of the out buffer, must be at 35 | * least 3/4 of the length of in 36 | * @return number of bytes written, or a negative value in case of 37 | * invalid input 38 | */ 39 | int av_base64_decode(uint8_t *out, const char *in, int out_size); 40 | 41 | /** 42 | * Encode data to base64 and null-terminate. 43 | * 44 | * @param out buffer for encoded data 45 | * @param out_size size in bytes of the output buffer, must be at 46 | * least AV_BASE64_SIZE(in_size) 47 | * @param in_size size in bytes of the 'in' buffer 48 | * @return 'out' or NULL in case of error 49 | */ 50 | char *av_base64_encode(char *out, int out_size, const uint8_t *in, int in_size); 51 | 52 | /** 53 | * Calculate the output size needed to base64-encode x bytes. 54 | */ 55 | #define AV_BASE64_SIZE(x) (((x) + 2) / 3 * 4 + 1) 56 | 57 | /** 58 | * 编码base64 59 | * @param txt 明文 60 | * @return 密文 61 | */ 62 | string encodeBase64(const string &txt); 63 | 64 | /** 65 | * 解码base64 66 | * @param txt 密文 67 | * @return 明文 68 | */ 69 | string decodeBase64(const string &txt); 70 | 71 | #endif /* AVUTIL_BASE64_H */ 72 | -------------------------------------------------------------------------------- /src/util/mini.cpp: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015 r-lyeh (https://github.com/r-lyeh) 3 | * Copyright (c) 2016-2019 xiongziliang <771730766@qq.com> 4 | * 5 | * This software is provided 'as-is', without any express or implied 6 | * warranty. In no event will the authors be held liable for any damages 7 | * arising from the use of this software. 8 | * 9 | * Permission is granted to anyone to use this software for any purpose, 10 | * including commercial applications, and to alter it and redistribute it 11 | * freely, subject to the following restrictions: 12 | * 13 | * 1. The origin of this software must not be misrepresented; you must not 14 | * claim that you wrote the original software. If you use this software 15 | * in a product, an acknowledgment in the product documentation would be 16 | * appreciated but is not required. 17 | * 2. Altered source versions must be plainly marked as such, and must not be 18 | * misrepresented as being the original software. 19 | * 3. This notice may not be removed or altered from any source distribution. 20 | */ 21 | 22 | #include "mini.h" 23 | 24 | using namespace std; 25 | 26 | namespace toolkit { 27 | 28 | template <> 29 | mINI_basic &mINI_basic::Instance() { 30 | static mINI_basic instance; 31 | return instance; 32 | } 33 | 34 | template <> 35 | bool variant::as() const { 36 | if (empty() || isdigit(front())) { 37 | // 数字开头 38 | return as_default(); 39 | } 40 | if (strToLower(std::string(*this)) == "true") { 41 | return true; 42 | } 43 | if (strToLower(std::string(*this)) == "false") { 44 | return false; 45 | } 46 | // 未识别字符串 47 | return as_default(); 48 | } 49 | 50 | template <> 51 | uint8_t variant::as() const { 52 | return 0xFF & as_default(); 53 | } 54 | 55 | } // namespace toolkit 56 | -------------------------------------------------------------------------------- /src/util/mini.h: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2015 r-lyeh (https://github.com/r-lyeh) 3 | * Copyright (c) 2016-2019 xiongziliang <771730766@qq.com> 4 | * 5 | * This software is provided 'as-is', without any express or implied 6 | * warranty. In no event will the authors be held liable for any damages 7 | * arising from the use of this software. 8 | * 9 | * Permission is granted to anyone to use this software for any purpose, 10 | * including commercial applications, and to alter it and redistribute it 11 | * freely, subject to the following restrictions: 12 | * 13 | * 1. The origin of this software must not be misrepresented; you must not 14 | * claim that you wrote the original software. If you use this software 15 | * in a product, an acknowledgment in the product documentation would be 16 | * appreciated but is not required. 17 | * 2. Altered source versions must be plainly marked as such, and must not be 18 | * misrepresented as being the original software. 19 | * 3. This notice may not be removed or altered from any source distribution. 20 | */ 21 | #ifndef UTIL_MINI_H 22 | #define UTIL_MINI_H 23 | 24 | #include "util.h" 25 | #include 26 | #include 27 | #include 28 | #include 29 | #include 30 | 31 | namespace toolkit { 32 | 33 | template 34 | class mINI_basic : public std::map { 35 | // Public API : existing map<> interface plus following methods 36 | public: 37 | void parse(const std::string &text) { 38 | // reset, split lines and parse 39 | std::vector lines = tokenize(text, "\n"); 40 | std::string symbol, tag; 41 | for (auto &line : lines) { 42 | // trim blanks 43 | line = trim(line); 44 | // split line into tokens and parse tokens 45 | if (line.empty() || line.front() == ';' || line.front() == '#') { 46 | continue; 47 | } 48 | if (line.size() >= 3 && line.front() == '[' && line.back() == ']') { 49 | tag = trim(line.substr(1, line.size() - 2)); 50 | } else { 51 | auto at = line.find('='); 52 | symbol = trim(tag + "." + line.substr(0, at)); 53 | (*this)[symbol] = (at == std::string::npos ? std::string() : trim(line.substr(at + 1))); 54 | } 55 | } 56 | } 57 | 58 | void parseFile(const std::string &fileName = exePath() + ".ini") { 59 | std::ifstream in(fileName, std::ios::in | std::ios::binary | std::ios::ate); 60 | if (!in.good()) { 61 | std::stringstream ss; 62 | ss << "invalid ini file:" << fileName; 63 | throw std::invalid_argument(ss.str()); 64 | } 65 | auto size = in.tellg(); 66 | in.seekg(0, std::ios::beg); 67 | std::string buf; 68 | buf.resize(size); 69 | in.read((char *)buf.data(), size); 70 | parse(buf); 71 | } 72 | 73 | std::string 74 | dump(const std::string &header = "; auto-generated by mINI class {", const std::string &footer = "; } ---") const { 75 | std::string output(header + (header.empty() ? "" : "\r\n")), tag; 76 | for (auto &pr : *this) { 77 | std::vector kv = tokenize(pr.first, "."); 78 | if (tag != kv[0]) { 79 | output += "\r\n[" + (tag = kv[0]) + "]\r\n"; 80 | } 81 | output += kv[1] + "=" + pr.second + "\r\n"; 82 | } 83 | return output + "\r\n" + footer + (footer.empty() ? "" : "\r\n"); 84 | } 85 | 86 | void dumpFile(const std::string &fileName = exePath() + ".ini") { 87 | std::ofstream out(fileName, std::ios::out | std::ios::binary | std::ios::trunc); 88 | auto dmp = dump(); 89 | out.write(dmp.data(), dmp.size()); 90 | } 91 | 92 | static mINI_basic &Instance(); 93 | 94 | private: 95 | std::vector tokenize(const std::string &self, const std::string &chars) const { 96 | std::vector tokens(1); 97 | std::string map(256, '\0'); 98 | for (char ch : chars) { 99 | map[(uint8_t)ch] = '\1'; 100 | } 101 | for (char ch : self) { 102 | if (!map.at((uint8_t)ch)) { 103 | tokens.back().push_back(ch); 104 | } else if (tokens.back().size()) { 105 | tokens.push_back(std::string()); 106 | } 107 | } 108 | while (tokens.size() && tokens.back().empty()) { 109 | tokens.pop_back(); 110 | } 111 | return tokens; 112 | } 113 | }; 114 | 115 | // handy variant class as key/values 116 | struct variant : public std::string { 117 | template 118 | variant(const T &t) 119 | : std::string(std::to_string(t)) {} 120 | 121 | template 122 | variant(const char (&s)[N]) 123 | : std::string(s, N) {} 124 | 125 | variant(const char *cstr) 126 | : std::string(cstr) {} 127 | 128 | variant(const std::string &other = std::string()) 129 | : std::string(other) {} 130 | 131 | template 132 | operator T() const { 133 | return as(); 134 | } 135 | 136 | template 137 | bool operator==(const T &t) const { 138 | return 0 == this->compare(variant(t)); 139 | } 140 | 141 | bool operator==(const char *t) const { return this->compare(t) == 0; } 142 | 143 | template 144 | T as() const { 145 | return as_default(); 146 | } 147 | 148 | private: 149 | template 150 | T as_default() const { 151 | T t; 152 | std::stringstream ss; 153 | return ss << *this && ss >> t ? t : T(); 154 | } 155 | }; 156 | 157 | template <> 158 | bool variant::as() const; 159 | 160 | template <> 161 | uint8_t variant::as() const; 162 | 163 | using mINI = mINI_basic; 164 | 165 | } // namespace toolkit 166 | #endif // UTIL_MINI_H 167 | -------------------------------------------------------------------------------- /src/util/util.cpp: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016 The ZLToolKit project authors. All Rights Reserved. 3 | * 4 | * This file is part of ZLToolKit(https://github.com/ZLMediaKit/ZLToolKit). 5 | * 6 | * Use of this source code is governed by MIT license that can be found in the 7 | * LICENSE file in the root of the source tree. All contributing project authors 8 | * may be found in the AUTHORS file in the root of the source tree. 9 | */ 10 | 11 | #include 12 | #include 13 | #include 14 | #include 15 | #include 16 | #include 17 | #include 18 | 19 | #include "util.h" 20 | 21 | #if defined(_WIN32) 22 | #include 23 | #include 24 | #include 25 | #include 26 | #include 27 | #include 28 | #pragma comment(lib, "shlwapi.lib") 29 | extern "C" const IMAGE_DOS_HEADER __ImageBase; 30 | #endif // defined(_WIN32) 31 | 32 | #if defined(__MACH__) || defined(__APPLE__) 33 | #include 34 | #include /* _NSGetExecutablePath */ 35 | 36 | int uv_exepath(char *buffer, int *size) { 37 | /* realpath(exepath) may be > PATH_MAX so double it to be on the safe side. */ 38 | char abspath[PATH_MAX * 2 + 1]; 39 | char exepath[PATH_MAX + 1]; 40 | uint32_t exepath_size; 41 | size_t abspath_size; 42 | 43 | if (buffer == nullptr || size == nullptr || *size == 0) 44 | return -EINVAL; 45 | 46 | exepath_size = sizeof(exepath); 47 | if (_NSGetExecutablePath(exepath, &exepath_size)) 48 | return -EIO; 49 | 50 | if (realpath(exepath, abspath) != abspath) 51 | return -errno; 52 | 53 | abspath_size = strlen(abspath); 54 | if (abspath_size == 0) 55 | return -EIO; 56 | 57 | *size -= 1; 58 | if ((size_t)*size > abspath_size) 59 | *size = abspath_size; 60 | 61 | memcpy(buffer, abspath, *size); 62 | buffer[*size] = '\0'; 63 | 64 | return 0; 65 | } 66 | 67 | #endif // defined(__MACH__) || defined(__APPLE__) 68 | 69 | #define PATH_MAX 4096 70 | 71 | using namespace std; 72 | 73 | namespace toolkit { 74 | 75 | static constexpr char CCH[] = "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"; 76 | 77 | string makeRandStr(int sz, bool printable) { 78 | string ret; 79 | ret.resize(sz); 80 | std::mt19937 rng(std::random_device {}()); 81 | for (int i = 0; i < sz; ++i) { 82 | if (printable) { 83 | uint32_t x = rng() % (sizeof(CCH) - 1); 84 | ret[i] = CCH[x]; 85 | } else { 86 | ret[i] = rng() % 0xFF; 87 | } 88 | } 89 | return ret; 90 | } 91 | 92 | bool is_safe(uint8_t b) { 93 | return b >= ' ' && b < 128; 94 | } 95 | 96 | string hexdump(const void *buf, size_t len) { 97 | string ret("\r\n"); 98 | char tmp[8]; 99 | const uint8_t *data = (const uint8_t *)buf; 100 | for (size_t i = 0; i < len; i += 16) { 101 | for (int j = 0; j < 16; ++j) { 102 | if (i + j < len) { 103 | int sz = snprintf(tmp, sizeof(tmp), "%.2x ", data[i + j]); 104 | ret.append(tmp, sz); 105 | } else { 106 | int sz = snprintf(tmp, sizeof(tmp), " "); 107 | ret.append(tmp, sz); 108 | } 109 | } 110 | for (int j = 0; j < 16; ++j) { 111 | if (i + j < len) { 112 | ret += (is_safe(data[i + j]) ? data[i + j] : '.'); 113 | } else { 114 | ret += (' '); 115 | } 116 | } 117 | ret += ('\n'); 118 | } 119 | return ret; 120 | } 121 | 122 | string hexmem(const void *buf, size_t len) { 123 | string ret; 124 | char tmp[8]; 125 | const uint8_t *data = (const uint8_t *)buf; 126 | for (size_t i = 0; i < len; ++i) { 127 | int sz = sprintf(tmp, "%.2x ", data[i]); 128 | ret.append(tmp, sz); 129 | } 130 | return ret; 131 | } 132 | 133 | string exePath(bool isExe /*= true*/) { 134 | char buffer[PATH_MAX * 2 + 1] = { 0 }; 135 | int n = -1; 136 | #if defined(_WIN32) 137 | n = GetModuleFileNameA(isExe ? nullptr : (HINSTANCE)&__ImageBase, buffer, sizeof(buffer)); 138 | #elif defined(__MACH__) || defined(__APPLE__) 139 | n = sizeof(buffer); 140 | if (uv_exepath(buffer, &n) != 0) { 141 | n = -1; 142 | } 143 | #elif defined(__linux__) 144 | n = readlink("/proc/self/exe", buffer, sizeof(buffer)); 145 | #endif 146 | 147 | string filePath; 148 | if (n <= 0) { 149 | filePath = "./"; 150 | } else { 151 | filePath = buffer; 152 | } 153 | 154 | #if defined(_WIN32) 155 | // windows下把路径统一转换层unix风格,因为后续都是按照unix风格处理的 156 | for (auto &ch : filePath) { 157 | if (ch == '\\') { 158 | ch = '/'; 159 | } 160 | } 161 | #endif // defined(_WIN32) 162 | 163 | return filePath; 164 | } 165 | 166 | string exeDir(bool isExe /*= true*/) { 167 | auto path = exePath(isExe); 168 | return path.substr(0, path.rfind('/') + 1); 169 | } 170 | 171 | string exeName(bool isExe /*= true*/) { 172 | auto path = exePath(isExe); 173 | return path.substr(path.rfind('/') + 1); 174 | } 175 | 176 | // string转小写 177 | std::string &strToLower(std::string &str) { 178 | transform(str.begin(), str.end(), str.begin(), towlower); 179 | return str; 180 | } 181 | 182 | // string转大写 183 | std::string &strToUpper(std::string &str) { 184 | transform(str.begin(), str.end(), str.begin(), towupper); 185 | return str; 186 | } 187 | 188 | // string转小写 189 | std::string strToLower(std::string &&str) { 190 | transform(str.begin(), str.end(), str.begin(), towlower); 191 | return std::move(str); 192 | } 193 | 194 | // string转大写 195 | std::string strToUpper(std::string &&str) { 196 | transform(str.begin(), str.end(), str.begin(), towupper); 197 | return std::move(str); 198 | } 199 | 200 | vector split(const string &s, const char *delim) { 201 | vector ret; 202 | size_t last = 0; 203 | auto index = s.find(delim, last); 204 | while (index != string::npos) { 205 | if (index - last > 0) { 206 | ret.push_back(s.substr(last, index - last)); 207 | } 208 | last = index + strlen(delim); 209 | index = s.find(delim, last); 210 | } 211 | if (!s.size() || s.size() - last > 0) { 212 | ret.push_back(s.substr(last)); 213 | } 214 | return ret; 215 | } 216 | 217 | #define TRIM(s, chars) \ 218 | do { \ 219 | string map(0xFF, '\0'); \ 220 | for (auto &ch : chars) { \ 221 | map[(unsigned char &)ch] = '\1'; \ 222 | } \ 223 | while (s.size() && map.at((unsigned char &)s.back())) \ 224 | s.pop_back(); \ 225 | while (s.size() && map.at((unsigned char &)s.front())) \ 226 | s.erase(0, 1); \ 227 | } while (0); 228 | 229 | // 去除前后的空格、回车符、制表符 230 | std::string &trim(std::string &s, const string &chars) { 231 | TRIM(s, chars); 232 | return s; 233 | } 234 | 235 | std::string trim(std::string &&s, const string &chars) { 236 | TRIM(s, chars); 237 | return std::move(s); 238 | } 239 | 240 | void replace(string &str, const string &old_str, const string &new_str) { 241 | if (old_str.empty() || old_str == new_str) { 242 | return; 243 | } 244 | auto pos = str.find(old_str); 245 | if (pos == string::npos) { 246 | return; 247 | } 248 | str.replace(pos, old_str.size(), new_str); 249 | replace(str, old_str, new_str); 250 | } 251 | 252 | bool start_with(const string &str, const string &substr) { 253 | return str.find(substr) == 0; 254 | } 255 | 256 | bool end_with(const string &str, const string &substr) { 257 | auto pos = str.rfind(substr); 258 | return pos != string::npos && pos == str.size() - substr.size(); 259 | } 260 | 261 | #if defined(_WIN32) 262 | void sleep(int second) { 263 | Sleep(1000 * second); 264 | } 265 | void usleep(int micro_seconds) { 266 | this_thread::sleep_for(std::chrono::microseconds(micro_seconds)); 267 | } 268 | 269 | const char *strcasestr(const char *big, const char *little) { 270 | string big_str = big; 271 | string little_str = little; 272 | strToLower(big_str); 273 | strToLower(little_str); 274 | auto pos = strstr(big_str.data(), little_str.data()); 275 | if (!pos) { 276 | return nullptr; 277 | } 278 | return big + (pos - big_str.data()); 279 | } 280 | 281 | int vasprintf(char **strp, const char *fmt, va_list ap) { 282 | // _vscprintf tells you how big the buffer needs to be 283 | int len = _vscprintf(fmt, ap); 284 | if (len == -1) { 285 | return -1; 286 | } 287 | size_t size = (size_t)len + 1; 288 | char *str = (char *)malloc(size); 289 | if (!str) { 290 | return -1; 291 | } 292 | // _vsprintf_s is the "secure" version of vsprintf 293 | int r = vsprintf_s(str, len + 1, fmt, ap); 294 | if (r == -1) { 295 | free(str); 296 | return -1; 297 | } 298 | *strp = str; 299 | return r; 300 | } 301 | 302 | int asprintf(char **strp, const char *fmt, ...) { 303 | va_list ap; 304 | va_start(ap, fmt); 305 | int r = vasprintf(strp, fmt, ap); 306 | va_end(ap); 307 | return r; 308 | } 309 | 310 | #endif // WIN32 311 | 312 | static inline uint64_t getCurrentMicrosecondOrigin() { 313 | #if !defined(_WIN32) 314 | struct timeval tv; 315 | gettimeofday(&tv, nullptr); 316 | return tv.tv_sec * 1000000LL + tv.tv_usec; 317 | #else 318 | return std::chrono::duration_cast(std::chrono::system_clock::now().time_since_epoch()) 319 | .count(); 320 | #endif 321 | } 322 | 323 | string getTimeStr(const char *fmt, time_t time) { 324 | if (!time) { 325 | time = ::time(nullptr); 326 | } 327 | auto tm = getLocalTime(time); 328 | char buffer[64]; 329 | auto success = std::strftime(buffer, sizeof(buffer), fmt, &tm); 330 | return 0 == success ? string(fmt) : buffer; 331 | } 332 | 333 | struct tm getLocalTime(time_t sec) { 334 | struct tm tm; 335 | #ifdef _WIN32 336 | localtime_s(&tm, &sec); 337 | #else 338 | localtime_r(&sec, &tm); 339 | #endif //_WIN32 340 | return tm; 341 | } 342 | 343 | static thread_local string thread_name; 344 | 345 | static string limitString(const char *name, size_t max_size) { 346 | string str = name; 347 | if (str.size() + 1 > max_size) { 348 | auto erased = str.size() + 1 - max_size + 3; 349 | str.replace(5, erased, "..."); 350 | } 351 | return str; 352 | } 353 | 354 | } // namespace toolkit 355 | -------------------------------------------------------------------------------- /src/util/util.h: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2016 The ZLToolKit project authors. All Rights Reserved. 3 | * 4 | * This file is part of ZLToolKit(https://github.com/ZLMediaKit/ZLToolKit). 5 | * 6 | * Use of this source code is governed by MIT license that can be found in the 7 | * LICENSE file in the root of the source tree. All contributing project authors 8 | * may be found in the AUTHORS file in the root of the source tree. 9 | */ 10 | 11 | #ifndef UTIL_UTIL_H_ 12 | #define UTIL_UTIL_H_ 13 | 14 | #include 15 | #include 16 | #include 17 | #include 18 | #include 19 | #include 20 | #include 21 | #include 22 | #include 23 | #if defined(_WIN32) 24 | #undef FD_SETSIZE 25 | // 修改默认64为1024路 26 | #define FD_SETSIZE 1024 27 | #else 28 | #include 29 | #include 30 | #include 31 | #include 32 | #endif // defined(_WIN32) 33 | 34 | #if defined(__APPLE__) 35 | #include "TargetConditionals.h" 36 | #if TARGET_IPHONE_SIMULATOR 37 | #define OS_IPHONE 38 | #elif TARGET_OS_IPHONE 39 | #define OS_IPHONE 40 | #endif 41 | #endif //__APPLE__ 42 | 43 | #define INSTANCE_IMP(class_name, ...) \ 44 | class_name &class_name::Instance() { \ 45 | static std::shared_ptr s_instance(new class_name(__VA_ARGS__)); \ 46 | static class_name &s_instance_ref = *s_instance; \ 47 | return s_instance_ref; \ 48 | } 49 | 50 | namespace toolkit { 51 | 52 | #define StrPrinter ::toolkit::_StrPrinter() 53 | class _StrPrinter : public std::string { 54 | public: 55 | _StrPrinter() {} 56 | 57 | template 58 | _StrPrinter &operator<<(T &&data) { 59 | _stream << std::forward(data); 60 | this->std::string::operator=(_stream.str()); 61 | return *this; 62 | } 63 | 64 | std::string operator<<(std::ostream &(*f)(std::ostream &)) const { return *this; } 65 | 66 | private: 67 | std::stringstream _stream; 68 | }; 69 | 70 | // 禁止拷贝基类 71 | class noncopyable { 72 | protected: 73 | noncopyable() {} 74 | ~noncopyable() {} 75 | 76 | private: 77 | // 禁止拷贝 78 | noncopyable(const noncopyable &that) = delete; 79 | noncopyable(noncopyable &&that) = delete; 80 | noncopyable &operator=(const noncopyable &that) = delete; 81 | noncopyable &operator=(noncopyable &&that) = delete; 82 | }; 83 | 84 | // 可以保存任意的对象 85 | class Any { 86 | public: 87 | using Ptr = std::shared_ptr; 88 | 89 | Any() = default; 90 | ~Any() = default; 91 | 92 | template 93 | void set(ArgsType &&...args) { 94 | _data.reset(new C(std::forward(args)...), [](void *ptr) { delete (C *)ptr; }); 95 | } 96 | template 97 | C &get() { 98 | if (!_data) { 99 | throw std::invalid_argument("Any is empty"); 100 | } 101 | C *ptr = (C *)_data.get(); 102 | return *ptr; 103 | } 104 | 105 | operator bool() { return _data.operator bool(); } 106 | bool empty() { return !bool(); } 107 | 108 | private: 109 | std::shared_ptr _data; 110 | }; 111 | 112 | // 用于保存一些外加属性 113 | class AnyStorage : public std::unordered_map { 114 | public: 115 | AnyStorage() = default; 116 | ~AnyStorage() = default; 117 | using Ptr = std::shared_ptr; 118 | }; 119 | 120 | // 对象安全的构建和析构 121 | // 构建后执行onCreate函数 122 | // 析构前执行onDestory函数 123 | // 在函数onCreate和onDestory中可以执行构造或析构中不能调用的方法,比如说shared_from_this或者虚函数 124 | class Creator { 125 | public: 126 | template 127 | static std::shared_ptr create(ArgsType &&...args) { 128 | std::shared_ptr ret(new C(std::forward(args)...), [](C *ptr) { 129 | ptr->onDestory(); 130 | delete ptr; 131 | }); 132 | ret->onCreate(); 133 | return ret; 134 | } 135 | 136 | private: 137 | Creator() = default; 138 | ~Creator() = default; 139 | }; 140 | 141 | template 142 | class ObjectStatistic { 143 | public: 144 | ObjectStatistic() { ++getCounter(); } 145 | 146 | ~ObjectStatistic() { --getCounter(); } 147 | 148 | static size_t count() { return getCounter().load(); } 149 | 150 | private: 151 | static std::atomic &getCounter(); 152 | }; 153 | 154 | #define StatisticImp(Type) \ 155 | template <> \ 156 | std::atomic &ObjectStatistic::getCounter() { \ 157 | static std::atomic instance(0); \ 158 | return instance; \ 159 | } 160 | 161 | std::string makeRandStr(int sz, bool printable = true); 162 | std::string hexdump(const void *buf, size_t len); 163 | std::string hexmem(const void *buf, size_t len); 164 | std::string exePath(bool isExe = true); 165 | std::string exeDir(bool isExe = true); 166 | std::string exeName(bool isExe = true); 167 | 168 | std::vector split(const std::string &s, const char *delim); 169 | // 去除前后的空格、回车符、制表符... 170 | std::string &trim(std::string &s, const std::string &chars = " \r\n\t"); 171 | std::string trim(std::string &&s, const std::string &chars = " \r\n\t"); 172 | // string转小写 173 | std::string &strToLower(std::string &str); 174 | std::string strToLower(std::string &&str); 175 | // string转大写 176 | std::string &strToUpper(std::string &str); 177 | std::string strToUpper(std::string &&str); 178 | // 替换子字符串 179 | void replace(std::string &str, const std::string &old_str, const std::string &new_str); 180 | // 判断是否为ip 181 | bool isIP(const char *str); 182 | // 字符串是否以xx开头 183 | bool start_with(const std::string &str, const std::string &substr); 184 | // 字符串是否以xx结尾 185 | bool end_with(const std::string &str, const std::string &substr); 186 | 187 | #ifndef bzero 188 | #define bzero(ptr, size) memset((ptr), 0, (size)); 189 | #endif // bzero 190 | 191 | #if defined(ANDROID) 192 | template 193 | std::string to_string(T value) { 194 | std::ostringstream os; 195 | os << std::forward(value); 196 | return os.str(); 197 | } 198 | #endif // ANDROID 199 | 200 | #if defined(_WIN32) 201 | int gettimeofday(struct timeval *tp, void *tzp); 202 | void usleep(int micro_seconds); 203 | void sleep(int second); 204 | int vasprintf(char **strp, const char *fmt, va_list ap); 205 | int asprintf(char **strp, const char *fmt, ...); 206 | const char *strcasestr(const char *big, const char *little); 207 | 208 | #if !defined(strcasecmp) 209 | #define strcasecmp _stricmp 210 | #endif 211 | 212 | #ifndef ssize_t 213 | #ifdef _WIN64 214 | #define ssize_t int64_t 215 | #else 216 | #define ssize_t int32_t 217 | #endif 218 | #endif 219 | #endif // WIN32 220 | 221 | /** 222 | * 获取时间差, 返回值单位为秒 223 | */ 224 | long getGMTOff(); 225 | 226 | /** 227 | * 获取1970年至今的毫秒数 228 | * @param system_time 是否为系统时间(系统时间可以回退),否则为程序启动时间(不可回退) 229 | */ 230 | uint64_t getCurrentMillisecond(bool system_time = false); 231 | 232 | /** 233 | * 获取1970年至今的微秒数 234 | * @param system_time 是否为系统时间(系统时间可以回退),否则为程序启动时间(不可回退) 235 | */ 236 | uint64_t getCurrentMicrosecond(bool system_time = false); 237 | 238 | /** 239 | * 获取时间字符串 240 | * @param fmt 时间格式,譬如%Y-%m-%d %H:%M:%S 241 | * @return 时间字符串 242 | */ 243 | std::string getTimeStr(const char *fmt, time_t time = 0); 244 | 245 | /** 246 | * 根据unix时间戳获取本地时间 247 | * @param sec unix时间戳 248 | * @return tm结构体 249 | */ 250 | struct tm getLocalTime(time_t sec); 251 | 252 | /** 253 | * 设置线程名 254 | */ 255 | void setThreadName(const char *name); 256 | 257 | /** 258 | * 获取线程名 259 | */ 260 | std::string getThreadName(); 261 | 262 | /** 263 | * 设置当前线程cpu亲和性 264 | * @param i cpu索引,如果为-1,那么取消cpu亲和性 265 | * @return 是否成功,目前只支持linux 266 | */ 267 | bool setThreadAffinity(int i); 268 | 269 | /** 270 | * 根据typeid(class).name()获取类名 271 | */ 272 | std::string demangle(const char *mangled); 273 | 274 | inline std::string getEnv(const std::string &key) { 275 | auto ekey = key.c_str(); 276 | if (*ekey == '$') { 277 | ++ekey; 278 | } 279 | auto value = *ekey ? getenv(ekey) : nullptr; 280 | return value ? value : ""; 281 | } 282 | 283 | } // namespace toolkit 284 | #endif /* UTIL_UTIL_H_ */ 285 | -------------------------------------------------------------------------------- /src/wifi/Rtp.h: -------------------------------------------------------------------------------- 1 | // 2 | // Created by liangzhuohua on 2024/6/13. 3 | // 4 | 5 | #ifndef FPV_WFB_RTP_H 6 | #define FPV_WFB_RTP_H 7 | 8 | #if defined(_WIN32) 9 | #pragma pack(push, 1) 10 | #endif // defined(_WIN32) 11 | 12 | class RtpHeader { 13 | public: 14 | #if defined(__BYTE_ORDER) && __BYTE_ORDER == __BIG_ENDIAN || defined(_WIN32) && REG_DWORD == REG_DWORD_BIG_ENDIAN 15 | // 版本号,固定为2 16 | uint32_t version : 2; 17 | // padding 18 | uint32_t padding : 1; 19 | // 扩展 20 | uint32_t ext : 1; 21 | // csrc 22 | uint32_t csrc : 4; 23 | // mark 24 | uint32_t mark : 1; 25 | // 负载类型 26 | uint32_t pt : 7; 27 | #else 28 | // csrc 29 | uint32_t csrc : 4; 30 | // 扩展 31 | uint32_t ext : 1; 32 | // padding 33 | uint32_t padding : 1; 34 | // 版本号,固定为2 35 | uint32_t version : 2; 36 | // 负载类型 37 | uint32_t pt : 7; 38 | // mark 39 | uint32_t mark : 1; 40 | #endif 41 | // 序列号 42 | uint32_t seq : 16; 43 | // 时间戳 44 | uint32_t stamp; 45 | // ssrc 46 | uint32_t ssrc; 47 | // 负载,如果有csrc和ext,前面为 4 * csrc + (4 + 4 * ext_len) 48 | uint8_t payload; 49 | 50 | public: 51 | #define AV_RB16(x) ((((const uint8_t *)(x))[0] << 8) | ((const uint8_t *)(x))[1]) 52 | 53 | size_t getCsrcSize() const { 54 | // 每个csrc占用4字节 55 | return csrc << 2; 56 | } 57 | 58 | uint8_t *getCsrcData() { 59 | if (!csrc) { 60 | return nullptr; 61 | } 62 | return &payload; 63 | } 64 | 65 | size_t getExtSize() const { 66 | // rtp有ext 67 | if (!ext) { 68 | return 0; 69 | } 70 | auto ext_ptr = &payload + getCsrcSize(); 71 | // uint16_t reserved = AV_RB16(ext_ptr); 72 | // 每个ext占用4字节 73 | return AV_RB16(ext_ptr + 2) << 2; 74 | } 75 | 76 | uint16_t getExtReserved() const { 77 | // rtp有ext 78 | if (!ext) { 79 | return 0; 80 | } 81 | auto ext_ptr = &payload + getCsrcSize(); 82 | return AV_RB16(ext_ptr); 83 | } 84 | 85 | uint8_t *getExtData() { 86 | if (!ext) { 87 | return nullptr; 88 | } 89 | auto ext_ptr = &payload + getCsrcSize(); 90 | // 多出的4个字节分别为reserved、ext_len 91 | return ext_ptr + 4; 92 | } 93 | 94 | size_t getPayloadOffset() const { 95 | // 有ext时,还需要忽略reserved、ext_len 4个字节 96 | return getCsrcSize() + (ext ? (4 + getExtSize()) : 0); 97 | } 98 | 99 | uint8_t *getPayloadData() { return &payload + getPayloadOffset(); } 100 | 101 | size_t getPaddingSize(size_t rtp_size) const { 102 | if (!padding) { 103 | return 0; 104 | } 105 | auto end = (uint8_t *)this + rtp_size - 1; 106 | return *end; 107 | } 108 | 109 | ssize_t getPayloadSize(size_t rtp_size) const { 110 | auto invalid_size = getPayloadOffset() + getPaddingSize(rtp_size); 111 | return (ssize_t)rtp_size - invalid_size - 12; 112 | } 113 | 114 | std::string dumpString(size_t rtp_size) const { 115 | std::stringstream printer; 116 | printer << "version:" << (int)version << "\r\n"; 117 | printer << "padding:" << getPaddingSize(rtp_size) << "\r\n"; 118 | printer << "ext:" << getExtSize() << "\r\n"; 119 | printer << "csrc:" << getCsrcSize() << "\r\n"; 120 | printer << "mark:" << (int)mark << "\r\n"; 121 | printer << "pt:" << (int)pt << "\r\n"; 122 | printer << "seq:" << ntohs(seq) << "\r\n"; 123 | printer << "stamp:" << ntohl(stamp) << "\r\n"; 124 | printer << "ssrc:" << ntohl(ssrc) << "\r\n"; 125 | printer << "rtp size:" << rtp_size << "\r\n"; 126 | printer << "payload offset:" << getPayloadOffset() << "\r\n"; 127 | printer << "payload size:" << getPayloadSize(rtp_size) << "\r\n"; 128 | return printer.str(); 129 | } 130 | 131 | /////////////////////////////////////////////////////////////////////// 132 | } PACKED; 133 | 134 | #if defined(_WIN32) 135 | #pragma pack(pop) 136 | #endif // defined(_WIN32) 137 | 138 | #endif // FPV_WFB_RTP_H 139 | -------------------------------------------------------------------------------- /src/wifi/RxFrame.h: -------------------------------------------------------------------------------- 1 | // 2 | // Created by gaeta on 2024-03-31. 3 | // 4 | 5 | #ifndef LIBUSBDEMO_RXFRAME_H 6 | #define LIBUSBDEMO_RXFRAME_H 7 | 8 | #include 9 | #include 10 | #include 11 | 12 | enum class RadioPort { /* define your RadioPort enum */ }; 13 | 14 | class RxFrame { 15 | private: 16 | std::span _data; 17 | static constexpr std::array _dataHeader 18 | = { uint8_t(0x08), uint8_t(0x01) }; // Frame control value for QoS Data 19 | 20 | public: 21 | RxFrame(const std::span &data) 22 | : _data(data) { 23 | DataAsMemory = _data; 24 | } 25 | 26 | std::span DataAsMemory; // useless in c++ 27 | 28 | std::span ControlField() const { return { _data.data(), 2 }; } 29 | std::span Duration() const { return { _data.data() + 2, 2 }; } 30 | std::span MacAp() const { return { _data.data() + 4, 6 }; } // receiverAddress 31 | std::span MacSrcUniqueIdPart() const { return { _data.data() + 10, 1 }; } // transmitterAddress 32 | std::span MacSrcNoncePart1() const { return { _data.data() + 11, 4 }; } 33 | std::span MacSrcRadioPort() const { return { _data.data() + 15, 1 }; } 34 | std::span MacDstUniqueIdPart() const { return { _data.data() + 16, 1 }; } // destinationAddress 35 | std::span MacDstNoncePart2() const { return { _data.data() + 17, 4 }; } 36 | std::span MacDstRadioPort() const { return { _data.data() + 21, 1 }; } 37 | std::span SequenceControl() const { return { _data.data() + 22, 2 }; } 38 | std::span PayloadSpan() const { return { _data.data() + 24, _data.size() - 28 }; } 39 | std::span GetNonce() const { 40 | std::array data; 41 | std::copy(_data.begin() + 11, _data.begin() + 15, data.begin()); 42 | std::copy(_data.begin() + 17, _data.begin() + 21, data.begin() + 4); 43 | return { data.data(), data.size() }; 44 | } 45 | 46 | // RadioPort get_valid_radio_port() const { 47 | // return RadioPort::Fromuint8_t(_data[15]); 48 | // } 49 | 50 | bool IsValidWfbFrame() const { 51 | if (_data.empty()) 52 | return false; 53 | if (!IsDataFrame()) 54 | return false; 55 | if (PayloadSpan().empty()) 56 | return false; 57 | if (!HasValidAirGndId()) 58 | return false; 59 | if (!HasValidRadioPort()) 60 | return false; 61 | // TODO: add `frame.PayloadSpan().size() > RAW_WIFI_FRAME_MAX_PAYLOAD_SIZE` 62 | return true; 63 | } 64 | 65 | uint8_t GetValidAirGndId() const { return _data[10]; } 66 | 67 | bool MatchesChannelID(const uint8_t *channel_id) const { 68 | // 0x57, 0x42, 0xaa, 0xbb, 0xcc, 0xdd, // last four bytes are replaced by channel_id (x2) 69 | return _data[10] == 0x57 && _data[11] == 0x42 && _data[12] == channel_id[0] && _data[13] == channel_id[1] 70 | && _data[14] == channel_id[2] && _data[15] == channel_id[3] && _data[16] == 0x57 && _data[17] == 0x42 71 | && _data[18] == channel_id[0] && _data[19] == channel_id[1] && _data[20] == channel_id[2] 72 | && _data[21] == channel_id[3]; 73 | } 74 | 75 | private: 76 | bool IsDataFrame() const { return _data.size() >= 2 && _data[0] == _dataHeader[0] && _data[1] == _dataHeader[1]; } 77 | 78 | bool HasValidAirGndId() const { return _data.size() >= 18 && _data[10] == _data[16]; } 79 | 80 | bool HasValidRadioPort() const { return _data.size() >= 22 && _data[15] == _data[21]; } 81 | }; 82 | 83 | class WifiFrame { 84 | public: 85 | WifiFrame(const std::span &rawData) { 86 | // Frame Control (2 bytes) 87 | frameControl = (rawData[1] << 8) | rawData[0]; 88 | 89 | // Duration/ID (2 bytes) 90 | durationID = (rawData[3] << 8) | rawData[2]; 91 | 92 | // Receiver Address (6 bytes) 93 | receiverAddress.assign(rawData.begin() + 4, rawData.begin() + 10); 94 | 95 | // Transmitter Address (6 bytes) 96 | transmitterAddress.assign(rawData.begin() + 10, rawData.begin() + 16); 97 | 98 | // Destination Address (6 bytes) 99 | destinationAddress.assign(rawData.begin() + 16, rawData.begin() + 22); 100 | 101 | // Source Address (6 bytes) 102 | // sourceAddress.assign(rawData.begin() + 22, rawData.begin() + 28); 103 | 104 | // Sequence Control (2 bytes) 105 | sequenceControl = (rawData[22] << 8) | rawData[22]; 106 | 107 | // Frame Body (variable length) 108 | // For simplicity, let's assume the body starts at byte 30 109 | // frameBody.assign(rawData.begin() + 30, rawData.end() - 4); 110 | // 111 | // // Frame Check Sequence (4 bytes, assuming little-endian) 112 | // frameCheckSequence = (rawData[rawData.size() - 1] << 24) | 113 | // (rawData[rawData.size() - 2] << 16) | 114 | // (rawData[rawData.size() - 3] << 8) | 115 | // rawData[rawData.size() - 4]; 116 | } 117 | uint16_t frameControl; 118 | uint16_t durationID; 119 | std::vector receiverAddress; 120 | std::vector transmitterAddress; 121 | std::vector destinationAddress; 122 | std::vector sourceAddress; 123 | uint16_t sequenceControl; 124 | std::vector frameBody; 125 | uint32_t frameCheckSequence; 126 | }; 127 | 128 | #endif // LIBUSBDEMO_RXFRAME_H 129 | -------------------------------------------------------------------------------- /src/wifi/WFBDefine.h: -------------------------------------------------------------------------------- 1 | // 2 | // Created by Talus on 2024/6/12. 3 | // 4 | 5 | #ifndef WFBDEFINE_H 6 | #define WFBDEFINE_H 7 | 8 | extern "C" { 9 | #include "fec.h" 10 | } 11 | 12 | #include 13 | #include 14 | #include 15 | #include 16 | #include 17 | 18 | using namespace std; 19 | 20 | inline uint32_t htobe32(uint32_t host_32bits) { 21 | // 检查主机字节序是否为小端模式 22 | uint16_t test = 0x1; 23 | bool is_little_endian = *((uint8_t *)&test) == 0x1; 24 | 25 | if (is_little_endian) { 26 | // 如果是小端字节序,则转换为大端字节序 27 | return ((host_32bits & 0x000000FF) << 24) | ((host_32bits & 0x0000FF00) << 8) 28 | | ((host_32bits & 0x00FF0000) >> 8) | ((host_32bits & 0xFF000000) >> 24); 29 | } else { 30 | // 如果已经是大端字节序,则直接返回 31 | return host_32bits; 32 | } 33 | } 34 | 35 | inline uint64_t be64toh(uint64_t big_endian_64bits) { 36 | // 如果本地字节序是小端,需要进行转换 37 | #if defined(_WIN32) || defined(_WIN64) 38 | // 如果是 Windows 平台 39 | return _byteswap_uint64(big_endian_64bits); 40 | #else 41 | // 如果是其他平台,假设是大端或者已经有对应的函数实现 42 | return big_endian_64bits; 43 | #endif 44 | } 45 | 46 | // 定义 be32toh 函数,将大端 32 位整数转换为主机字节顺序 47 | inline uint32_t be32toh(uint32_t big_endian_32bits) { 48 | // 如果本地字节序是小端,需要进行转换 49 | #if defined(_WIN32) || defined(_WIN64) 50 | // 如果是 Windows 平台,使用 _byteswap_ulong 函数 51 | return _byteswap_ulong(big_endian_32bits); 52 | #else 53 | // 如果是其他平台,假设是大端或者已经有对应的函数实现 54 | return big_endian_32bits; 55 | #endif 56 | } 57 | 58 | // 定义 be16toh 函数,将大端 16 位整数转换为主机字节顺序 59 | inline uint16_t be16toh(uint16_t big_endian_16bits) { 60 | // 如果本地字节序是小端,需要进行转换 61 | #if defined(_WIN32) || defined(_WIN64) 62 | // 如果是 Windows 平台,使用 _byteswap_ushort 函数 63 | return _byteswap_ushort(big_endian_16bits); 64 | #else 65 | // 如果是其他平台,假设是大端或者已经有对应的函数实现 66 | return big_endian_16bits; 67 | #endif 68 | } 69 | 70 | static uint8_t ieee80211_header[] = { 71 | 0x08, 0x01, 0x00, 0x00, // data frame, not protected, from STA to DS via an AP, duration not set 72 | 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, // receiver is broadcast 73 | 0x57, 0x42, 0xaa, 0xbb, 0xcc, 0xdd, // last four bytes will be replaced by channel_id 74 | 0x57, 0x42, 0xaa, 0xbb, 0xcc, 0xdd, // last four bytes will be replaced by channel_id 75 | 0x00, 0x00, // (seq_num << 4) + fragment_num 76 | }; 77 | 78 | #define IEEE80211_RADIOTAP_MCS_HAVE_BW 0x01 79 | #define IEEE80211_RADIOTAP_MCS_HAVE_MCS 0x02 80 | #define IEEE80211_RADIOTAP_MCS_HAVE_GI 0x04 81 | #define IEEE80211_RADIOTAP_MCS_HAVE_FMT 0x08 82 | 83 | #define IEEE80211_RADIOTAP_MCS_BW_20 0 84 | #define IEEE80211_RADIOTAP_MCS_BW_40 1 85 | #define IEEE80211_RADIOTAP_MCS_BW_20L 2 86 | #define IEEE80211_RADIOTAP_MCS_BW_20U 3 87 | #define IEEE80211_RADIOTAP_MCS_SGI 0x04 88 | #define IEEE80211_RADIOTAP_MCS_FMT_GF 0x08 89 | 90 | #define IEEE80211_RADIOTAP_MCS_HAVE_FEC 0x10 91 | #define IEEE80211_RADIOTAP_MCS_HAVE_STBC 0x20 92 | #define IEEE80211_RADIOTAP_MCS_FEC_LDPC 0x10 93 | #define IEEE80211_RADIOTAP_MCS_STBC_MASK 0x60 94 | #define IEEE80211_RADIOTAP_MCS_STBC_1 1 95 | #define IEEE80211_RADIOTAP_MCS_STBC_2 2 96 | #define IEEE80211_RADIOTAP_MCS_STBC_3 3 97 | #define IEEE80211_RADIOTAP_MCS_STBC_SHIFT 5 98 | 99 | #define MCS_KNOWN \ 100 | (IEEE80211_RADIOTAP_MCS_HAVE_MCS | IEEE80211_RADIOTAP_MCS_HAVE_BW | IEEE80211_RADIOTAP_MCS_HAVE_GI \ 101 | | IEEE80211_RADIOTAP_MCS_HAVE_STBC | IEEE80211_RADIOTAP_MCS_HAVE_FEC) 102 | 103 | static uint8_t radiotap_header[] __attribute__((unused)) = { 104 | 0x00, 0x00, // <-- radiotap version 105 | 0x0d, 0x00, // <- radiotap header length 106 | 0x00, 0x80, 0x08, 0x00, // <-- radiotap present flags: RADIOTAP_TX_FLAGS + RADIOTAP_MCS 107 | 0x08, 0x00, // RADIOTAP_F_TX_NOACK 108 | MCS_KNOWN, 0x00, 0x00 // bitmap, flags, mcs_index 109 | }; 110 | 111 | typedef struct { 112 | uint64_t block_idx; 113 | uint8_t **fragments; 114 | uint8_t *fragment_map; 115 | uint8_t fragment_to_send_idx; 116 | uint8_t has_fragments; 117 | } rx_ring_item_t; 118 | 119 | static inline int modN(int x, int base) { 120 | return (base + (x % base)) % base; 121 | } 122 | 123 | class antennaItem { 124 | public: 125 | antennaItem(void) 126 | : count_all(0) 127 | , rssi_sum(0) 128 | , rssi_min(0) 129 | , rssi_max(0) {} 130 | 131 | void log_rssi(int8_t rssi) { 132 | if (count_all == 0) { 133 | rssi_min = rssi; 134 | rssi_max = rssi; 135 | } else { 136 | rssi_min = min(rssi, rssi_min); 137 | rssi_max = max(rssi, rssi_max); 138 | } 139 | rssi_sum += rssi; 140 | count_all += 1; 141 | } 142 | 143 | int32_t count_all; 144 | int32_t rssi_sum; 145 | int8_t rssi_min; 146 | int8_t rssi_max; 147 | }; 148 | 149 | typedef std::unordered_map antenna_stat_t; 150 | 151 | #define RX_RING_SIZE 40 152 | 153 | #pragma pack(push, 1) 154 | typedef struct { 155 | uint8_t packet_type; 156 | uint8_t session_nonce[crypto_box_NONCEBYTES]; // random data 157 | } wsession_hdr_t; 158 | #pragma pack(pop) 159 | 160 | #pragma pack(push, 1) 161 | typedef struct { 162 | uint64_t epoch; // Drop session packets from old epoch 163 | uint32_t channel_id; // (link_id << 8) + port_number 164 | uint8_t fec_type; // Now only supported type is WFB_FEC_VDM_RS 165 | uint8_t k; // FEC k 166 | uint8_t n; // FEC n 167 | uint8_t session_key[crypto_aead_chacha20poly1305_KEYBYTES]; 168 | } wsession_data_t; 169 | #pragma pack(pop) 170 | 171 | // Data packet. Embed FEC-encoded data 172 | #pragma pack(push, 1) 173 | typedef struct { 174 | uint8_t packet_type; 175 | uint64_t data_nonce; // big endian, data_nonce = (block_idx << 8) + fragment_idx 176 | } wblock_hdr_t; 177 | #pragma pack(pop) 178 | 179 | // Plain data packet after FEC decode 180 | #pragma pack(push, 1) 181 | typedef struct { 182 | uint8_t flags; 183 | uint16_t packet_size; // big endian 184 | } wpacket_hdr_t; 185 | #pragma pack(pop) 186 | 187 | #define MAX_PAYLOAD_SIZE \ 188 | (MAX_PACKET_SIZE - sizeof(radiotap_header) - sizeof(ieee80211_header) - sizeof(wblock_hdr_t) \ 189 | - crypto_aead_chacha20poly1305_ABYTES - sizeof(wpacket_hdr_t)) 190 | #define MAX_FEC_PAYLOAD \ 191 | (MAX_PACKET_SIZE - sizeof(radiotap_header) - sizeof(ieee80211_header) - sizeof(wblock_hdr_t) \ 192 | - crypto_aead_chacha20poly1305_ABYTES) 193 | #define MAX_PACKET_SIZE 1510 194 | #define MAX_FORWARDER_PACKET_SIZE (MAX_PACKET_SIZE - sizeof(radiotap_header) - sizeof(ieee80211_header)) 195 | 196 | #define BLOCK_IDX_MASK ((1LLU << 56) - 1) 197 | #define MAX_BLOCK_IDX ((1LLU << 55) - 1) 198 | 199 | // packet types 200 | #define WFB_PACKET_DATA 0x1 201 | #define WFB_PACKET_KEY 0x2 202 | 203 | // FEC types 204 | #define WFB_FEC_VDM_RS 0x1 // Reed-Solomon on Vandermonde matrix 205 | 206 | // packet flags 207 | #define WFB_PACKET_FEC_ONLY 0x1 208 | 209 | #define SESSION_KEY_ANNOUNCE_MSEC 1000 210 | #define RX_ANT_MAX 4 211 | 212 | #endif // WFBDEFINE_H 213 | -------------------------------------------------------------------------------- /src/wifi/WFBProcessor.cpp: -------------------------------------------------------------------------------- 1 | // 2 | // Created by Talus on 2024/6/12. 3 | // 4 | 5 | #include "WFBProcessor.h" 6 | 7 | #include 8 | #include 9 | #include 10 | #include 11 | #include 12 | 13 | using namespace std; 14 | 15 | Aggregator::Aggregator(const string &keypair, uint64_t epoch, uint32_t channel_id, const DataCB &cb) 16 | : fec_p(NULL) 17 | , fec_k(-1) 18 | , fec_n(-1) 19 | , seq(0) 20 | , rx_ring_front(0) 21 | , rx_ring_alloc(0) 22 | , last_known_block((uint64_t)-1) 23 | , epoch(epoch) 24 | , channel_id(channel_id) 25 | , count_p_all(0) 26 | , count_p_dec_err(0) 27 | , count_p_dec_ok(0) 28 | , count_p_fec_recovered(0) 29 | , count_p_lost(0) 30 | , count_p_bad(0) 31 | , count_p_override(0) 32 | , dcb(cb) { 33 | memset(session_key, '\0', sizeof(session_key)); 34 | 35 | FILE *fp; 36 | if ((fp = fopen(keypair.c_str(), "rb")) == NULL) { 37 | throw runtime_error(format("Unable to open {}: {}", keypair.c_str(), strerror(errno))); 38 | } 39 | if (fread(rx_secretkey, crypto_box_SECRETKEYBYTES, 1, fp) != 1) { 40 | fclose(fp); 41 | throw runtime_error(format("Unable to read rx secret key: {}", strerror(errno))); 42 | } 43 | if (fread(tx_publickey, crypto_box_PUBLICKEYBYTES, 1, fp) != 1) { 44 | fclose(fp); 45 | throw runtime_error(format("Unable to read tx public key: {}", strerror(errno))); 46 | } 47 | fclose(fp); 48 | } 49 | 50 | Aggregator::~Aggregator() { 51 | if (fec_p != NULL) { 52 | deinit_fec(); 53 | } 54 | } 55 | 56 | void Aggregator::init_fec(int k, int n) { 57 | 58 | fec_k = k; 59 | fec_n = n; 60 | fec_p = fec_new(fec_k, fec_n); 61 | 62 | rx_ring_front = 0; 63 | rx_ring_alloc = 0; 64 | last_known_block = (uint64_t)-1; 65 | seq = 0; 66 | 67 | for (int ring_idx = 0; ring_idx < RX_RING_SIZE; ring_idx++) { 68 | rx_ring[ring_idx].block_idx = 0; 69 | rx_ring[ring_idx].fragment_to_send_idx = 0; 70 | rx_ring[ring_idx].has_fragments = 0; 71 | rx_ring[ring_idx].fragments = new uint8_t *[fec_n]; 72 | for (int i = 0; i < fec_n; i++) { 73 | rx_ring[ring_idx].fragments[i] = new uint8_t[MAX_FEC_PAYLOAD]; 74 | } 75 | rx_ring[ring_idx].fragment_map = new uint8_t[fec_n]; 76 | memset(rx_ring[ring_idx].fragment_map, '\0', fec_n * sizeof(uint8_t)); 77 | } 78 | } 79 | 80 | void Aggregator::deinit_fec(void) { 81 | 82 | for (int ring_idx = 0; ring_idx < RX_RING_SIZE; ring_idx++) { 83 | delete rx_ring[ring_idx].fragment_map; 84 | for (int i = 0; i < fec_n; i++) { 85 | delete rx_ring[ring_idx].fragments[i]; 86 | } 87 | delete rx_ring[ring_idx].fragments; 88 | } 89 | 90 | fec_free(fec_p); 91 | fec_p = NULL; 92 | fec_k = -1; 93 | fec_n = -1; 94 | } 95 | 96 | int Aggregator::rx_ring_push(void) { 97 | if (rx_ring_alloc < RX_RING_SIZE) { 98 | int idx = modN(rx_ring_front + rx_ring_alloc, RX_RING_SIZE); 99 | rx_ring_alloc += 1; 100 | return idx; 101 | } 102 | 103 | /* 104 | Ring overflow. This means that there are more unfinished blocks than ring size 105 | Possible solutions: 106 | 1. Increase ring size. Do this if you have large variance of packet travel time throught WiFi card or network 107 | stack. Some cards can do this due to packet reordering inside, diffent chipset and/or firmware or your RX hosts 108 | have different CPU power. 109 | 2. Reduce packet injection speed or try to unify RX hardware. 110 | */ 111 | 112 | #if 0 113 | fprintf(stderr, "Override block 0x%" PRIx64 " flush %d fragments\n", rx_ring[rx_ring_front].block_idx, rx_ring[rx_ring_front].has_fragments); 114 | #endif 115 | 116 | count_p_override += 1; 117 | 118 | for (int f_idx = rx_ring[rx_ring_front].fragment_to_send_idx; f_idx < fec_k; f_idx++) { 119 | if (rx_ring[rx_ring_front].fragment_map[f_idx]) { 120 | send_packet(rx_ring_front, f_idx); 121 | } 122 | } 123 | 124 | // override last item in ring 125 | int ring_idx = rx_ring_front; 126 | rx_ring_front = modN(rx_ring_front + 1, RX_RING_SIZE); 127 | return ring_idx; 128 | } 129 | 130 | int Aggregator::get_block_ring_idx(uint64_t block_idx) { 131 | // check if block is already in the ring 132 | for (int i = rx_ring_front, c = rx_ring_alloc; c > 0; i = modN(i + 1, RX_RING_SIZE), c--) { 133 | if (rx_ring[i].block_idx == block_idx) 134 | return i; 135 | } 136 | 137 | // check if block is already known and not in the ring then it is already processed 138 | if (last_known_block != (uint64_t)-1 && block_idx <= last_known_block) { 139 | return -1; 140 | } 141 | 142 | int new_blocks 143 | = (int)min(last_known_block != (uint64_t)-1 ? block_idx - last_known_block : 1, (uint64_t)RX_RING_SIZE); 144 | assert(new_blocks > 0); 145 | 146 | last_known_block = block_idx; 147 | int ring_idx = -1; 148 | 149 | for (int i = 0; i < new_blocks; i++) { 150 | ring_idx = rx_ring_push(); 151 | rx_ring[ring_idx].block_idx = block_idx + i + 1 - new_blocks; 152 | rx_ring[ring_idx].fragment_to_send_idx = 0; 153 | rx_ring[ring_idx].has_fragments = 0; 154 | memset(rx_ring[ring_idx].fragment_map, '\0', fec_n * sizeof(uint8_t)); 155 | } 156 | return ring_idx; 157 | } 158 | 159 | void Aggregator::process_packet( 160 | const uint8_t *buf, size_t size, uint8_t wlan_idx, const uint8_t *antenna, const int8_t *rssi) { 161 | wsession_data_t new_session_data; 162 | count_p_all += 1; 163 | 164 | if (size == 0) 165 | return; 166 | 167 | if (size > MAX_FORWARDER_PACKET_SIZE) { 168 | fprintf(stderr, "Long packet (fec payload)\n"); 169 | count_p_bad += 1; 170 | return; 171 | } 172 | 173 | switch (buf[0]) { 174 | case WFB_PACKET_DATA: 175 | if (size < sizeof(wblock_hdr_t) + sizeof(wpacket_hdr_t)) { 176 | fprintf(stderr, "Short packet (fec header)\n"); 177 | count_p_bad += 1; 178 | return; 179 | } 180 | break; 181 | 182 | case WFB_PACKET_KEY: 183 | if (size != sizeof(wsession_hdr_t) + sizeof(wsession_data_t) + crypto_box_MACBYTES) { 184 | fprintf(stderr, "Invalid session key packet\n"); 185 | count_p_bad += 1; 186 | return; 187 | } 188 | 189 | if (crypto_box_open_easy( 190 | (uint8_t *)&new_session_data, buf + sizeof(wsession_hdr_t), 191 | sizeof(wsession_data_t) + crypto_box_MACBYTES, ((wsession_hdr_t *)buf)->session_nonce, tx_publickey, 192 | rx_secretkey) 193 | != 0) { 194 | fprintf(stderr, "Unable to decrypt session key\n"); 195 | count_p_dec_err += 1; 196 | return; 197 | } 198 | 199 | if (be64toh(new_session_data.epoch) < epoch) { 200 | fprintf( 201 | stderr, "Session epoch doesn't match: %" PRIu64 " < %" PRIu64 "\n", be64toh(new_session_data.epoch), 202 | epoch); 203 | count_p_dec_err += 1; 204 | return; 205 | } 206 | 207 | if (be32toh(new_session_data.channel_id) != channel_id) { 208 | fprintf( 209 | stderr, "Session channel_id doesn't match: %d != %d\n", be32toh(new_session_data.channel_id), 210 | channel_id); 211 | count_p_dec_err += 1; 212 | return; 213 | } 214 | 215 | if (new_session_data.fec_type != WFB_FEC_VDM_RS) { 216 | fprintf(stderr, "Unsupported FEC codec type: %d\n", new_session_data.fec_type); 217 | count_p_dec_err += 1; 218 | return; 219 | } 220 | 221 | if (new_session_data.n < 1) { 222 | fprintf(stderr, "Invalid FEC N: %d\n", new_session_data.n); 223 | count_p_dec_err += 1; 224 | return; 225 | } 226 | 227 | if (new_session_data.k < 1 || new_session_data.k > new_session_data.n) { 228 | fprintf(stderr, "Invalid FEC K: %d\n", new_session_data.k); 229 | count_p_dec_err += 1; 230 | return; 231 | } 232 | 233 | count_p_dec_ok += 1; 234 | 235 | if (memcmp(session_key, new_session_data.session_key, sizeof(session_key)) != 0) { 236 | epoch = be64toh(new_session_data.epoch); 237 | memcpy(session_key, new_session_data.session_key, sizeof(session_key)); 238 | 239 | if (fec_p != NULL) { 240 | deinit_fec(); 241 | } 242 | 243 | init_fec(new_session_data.k, new_session_data.n); 244 | 245 | fflush(stdout); 246 | } 247 | return; 248 | 249 | default: 250 | fprintf(stderr, "Unknown packet type 0x%x\n", buf[0]); 251 | count_p_bad += 1; 252 | return; 253 | } 254 | 255 | uint8_t decrypted[MAX_FEC_PAYLOAD]; 256 | long long unsigned int decrypted_len; 257 | wblock_hdr_t *block_hdr = (wblock_hdr_t *)buf; 258 | 259 | if (crypto_aead_chacha20poly1305_decrypt( 260 | decrypted, &decrypted_len, NULL, buf + sizeof(wblock_hdr_t), size - sizeof(wblock_hdr_t), buf, 261 | sizeof(wblock_hdr_t), (uint8_t *)(&(block_hdr->data_nonce)), session_key) 262 | != 0) { 263 | fprintf(stderr, "Unable to decrypt packet #0x%" PRIx64 "\n", be64toh(block_hdr->data_nonce)); 264 | count_p_dec_err += 1; 265 | return; 266 | } 267 | 268 | count_p_dec_ok += 1; 269 | 270 | assert(decrypted_len <= MAX_FEC_PAYLOAD); 271 | 272 | uint64_t block_idx = be64toh(block_hdr->data_nonce) >> 8; 273 | uint8_t fragment_idx = (uint8_t)(be64toh(block_hdr->data_nonce) & 0xff); 274 | 275 | // Should never happend due to generating new session key on tx side 276 | if (block_idx > MAX_BLOCK_IDX) { 277 | fprintf(stderr, "block_idx overflow\n"); 278 | count_p_bad += 1; 279 | return; 280 | } 281 | 282 | if (fragment_idx >= fec_n) { 283 | fprintf(stderr, "Invalid fragment_idx: %d\n", fragment_idx); 284 | count_p_bad += 1; 285 | return; 286 | } 287 | 288 | int ring_idx = get_block_ring_idx(block_idx); 289 | 290 | // ignore already processed blocks 291 | if (ring_idx < 0) 292 | return; 293 | 294 | rx_ring_item_t *p = &rx_ring[ring_idx]; 295 | 296 | // ignore already processed fragments 297 | if (p->fragment_map[fragment_idx]) 298 | return; 299 | 300 | memset(p->fragments[fragment_idx], '\0', MAX_FEC_PAYLOAD); 301 | memcpy(p->fragments[fragment_idx], decrypted, decrypted_len); 302 | 303 | p->fragment_map[fragment_idx] = 1; 304 | p->has_fragments += 1; 305 | 306 | // Check if we use current (oldest) block 307 | // then we can optimize and don't wait for all K fragments 308 | // and send packets if there are no gaps in fragments from the beginning of this block 309 | if (ring_idx == rx_ring_front) { 310 | // check if any packets without gaps 311 | while (p->fragment_to_send_idx < fec_k && p->fragment_map[p->fragment_to_send_idx]) { 312 | send_packet(ring_idx, p->fragment_to_send_idx); 313 | p->fragment_to_send_idx += 1; 314 | } 315 | 316 | // remove block if full 317 | if (p->fragment_to_send_idx == fec_k) { 318 | rx_ring_front = modN(rx_ring_front + 1, RX_RING_SIZE); 319 | rx_ring_alloc -= 1; 320 | assert(rx_ring_alloc >= 0); 321 | return; 322 | } 323 | } 324 | 325 | // 1. This is not the oldest block but with sufficient number of fragments (K) to decode 326 | // 2. This is the oldest block but with gaps and total number of fragments is K 327 | if (p->fragment_to_send_idx < fec_k && p->has_fragments == fec_k) { 328 | // send all queued packets in all unfinished blocks before and remove them 329 | int nrm = modN(ring_idx - rx_ring_front, RX_RING_SIZE); 330 | 331 | while (nrm > 0) { 332 | for (int f_idx = rx_ring[rx_ring_front].fragment_to_send_idx; f_idx < fec_k; f_idx++) { 333 | if (rx_ring[rx_ring_front].fragment_map[f_idx]) { 334 | send_packet(rx_ring_front, f_idx); 335 | } 336 | } 337 | rx_ring_front = modN(rx_ring_front + 1, RX_RING_SIZE); 338 | rx_ring_alloc -= 1; 339 | nrm -= 1; 340 | } 341 | 342 | assert(rx_ring_alloc > 0); 343 | assert(ring_idx == rx_ring_front); 344 | 345 | // Search for missed data fragments and apply FEC only if needed 346 | for (int f_idx = p->fragment_to_send_idx; f_idx < fec_k; f_idx++) { 347 | if (!p->fragment_map[f_idx]) { 348 | // Recover missed fragments using FEC 349 | apply_fec(ring_idx); 350 | 351 | // Count total number of recovered fragments 352 | for (; f_idx < fec_k; f_idx++) { 353 | if (!p->fragment_map[f_idx]) { 354 | count_p_fec_recovered += 1; 355 | } 356 | } 357 | break; 358 | } 359 | } 360 | 361 | while (p->fragment_to_send_idx < fec_k) { 362 | send_packet(ring_idx, p->fragment_to_send_idx); 363 | p->fragment_to_send_idx += 1; 364 | } 365 | 366 | // remove block 367 | rx_ring_front = modN(rx_ring_front + 1, RX_RING_SIZE); 368 | rx_ring_alloc -= 1; 369 | assert(rx_ring_alloc >= 0); 370 | } 371 | } 372 | 373 | void Aggregator::send_packet(int ring_idx, int fragment_idx) { 374 | wpacket_hdr_t *packet_hdr = (wpacket_hdr_t *)(rx_ring[ring_idx].fragments[fragment_idx]); 375 | uint8_t *payload = (rx_ring[ring_idx].fragments[fragment_idx]) + sizeof(wpacket_hdr_t); 376 | uint8_t flags = packet_hdr->flags; 377 | uint16_t packet_size = be16toh(packet_hdr->packet_size); 378 | uint32_t packet_seq = rx_ring[ring_idx].block_idx * fec_k + fragment_idx; 379 | 380 | if (packet_seq > seq + 1 && seq > 0) { 381 | count_p_lost += (packet_seq - seq - 1); 382 | } 383 | 384 | seq = packet_seq; 385 | 386 | if (packet_size > MAX_PAYLOAD_SIZE) { 387 | fprintf(stderr, "Corrupted packet %u\n", seq); 388 | count_p_bad += 1; 389 | } else if (!(flags & WFB_PACKET_FEC_ONLY)) { 390 | if (dcb) { 391 | dcb(payload, packet_size); 392 | } 393 | } 394 | } 395 | 396 | void Aggregator::apply_fec(int ring_idx) { 397 | assert(fec_k >= 1); 398 | assert(fec_n >= 1); 399 | assert(fec_k <= fec_n); 400 | assert(fec_p != nullptr); 401 | 402 | // 动态分配内存 403 | unsigned *index = new unsigned[fec_k]; 404 | uint8_t **in_blocks = new uint8_t *[fec_k]; 405 | uint8_t **out_blocks = new uint8_t *[fec_n - fec_k]; 406 | int j = fec_k; 407 | int ob_idx = 0; 408 | 409 | for (int i = 0; i < fec_k; i++) { 410 | if (rx_ring[ring_idx].fragment_map[i]) { 411 | in_blocks[i] = rx_ring[ring_idx].fragments[i]; 412 | index[i] = i; 413 | } else { 414 | for (; j < fec_n; j++) { 415 | if (rx_ring[ring_idx].fragment_map[j]) { 416 | in_blocks[i] = rx_ring[ring_idx].fragments[j]; 417 | out_blocks[ob_idx++] = rx_ring[ring_idx].fragments[i]; 418 | index[i] = j; 419 | j++; 420 | break; 421 | } 422 | } 423 | } 424 | } 425 | 426 | fec_decode(fec_p, (const uint8_t **)in_blocks, out_blocks, index, MAX_FEC_PAYLOAD); 427 | 428 | // 释放动态分配的内存 429 | delete[] index; 430 | delete[] in_blocks; 431 | delete[] out_blocks; 432 | } -------------------------------------------------------------------------------- /src/wifi/WFBProcessor.h: -------------------------------------------------------------------------------- 1 | // 2 | // Created by Talus on 2024/6/12. 3 | // 4 | 5 | #ifndef WFBPROCESSOR_H 6 | #define WFBPROCESSOR_H 7 | 8 | #include "WFBDefine.h" 9 | #include 10 | 11 | class BaseAggregator { 12 | public: 13 | virtual void 14 | process_packet(const uint8_t *buf, size_t size, uint8_t wlan_idx, const uint8_t *antenna, const int8_t *rssi) 15 | = 0; 16 | }; 17 | 18 | class Aggregator : public BaseAggregator { 19 | public: 20 | using DataCB = std::function; 21 | Aggregator(const std::string &keypair, uint64_t epoch, uint32_t channel_id, const DataCB &cb = nullptr); 22 | ~Aggregator(); 23 | virtual void 24 | process_packet(const uint8_t *buf, size_t size, uint8_t wlan_idx, const uint8_t *antenna, const int8_t *rssi); 25 | 26 | private: 27 | void init_fec(int k, int n); 28 | void deinit_fec(void); 29 | void send_packet(int ring_idx, int fragment_idx); 30 | void apply_fec(int ring_idx); 31 | int get_block_ring_idx(uint64_t block_idx); 32 | int rx_ring_push(void); 33 | fec_t *fec_p; 34 | int fec_k; // RS number of primary fragments in block 35 | int fec_n; // RS total number of fragments in block 36 | int sockfd; 37 | uint32_t seq; 38 | rx_ring_item_t rx_ring[RX_RING_SIZE]; 39 | int rx_ring_front; // current packet 40 | int rx_ring_alloc; // number of allocated entries 41 | uint64_t last_known_block; // id of last known block 42 | uint64_t epoch; // current epoch 43 | const uint32_t channel_id; // (link_id << 8) + port_number 44 | 45 | // rx->tx keypair 46 | uint8_t rx_secretkey[crypto_box_SECRETKEYBYTES]; 47 | uint8_t tx_publickey[crypto_box_PUBLICKEYBYTES]; 48 | uint8_t session_key[crypto_aead_chacha20poly1305_KEYBYTES]; 49 | 50 | antenna_stat_t antenna_stat; 51 | uint32_t count_p_all; 52 | uint32_t count_p_dec_err; 53 | uint32_t count_p_dec_ok; 54 | uint32_t count_p_fec_recovered; 55 | uint32_t count_p_lost; 56 | uint32_t count_p_bad; 57 | uint32_t count_p_override; 58 | // on data output 59 | DataCB dcb; 60 | }; 61 | 62 | #endif // WFBPROCESSOR_H 63 | -------------------------------------------------------------------------------- /src/wifi/WFBReceiver.cpp: -------------------------------------------------------------------------------- 1 | // 2 | // Created by Talus on 2024/6/10. 3 | // 4 | 5 | #include "WFBReceiver.h" 6 | #include "QmlNativeAPI.h" 7 | #include "RxFrame.h" 8 | #include "WFBProcessor.h" 9 | #include "WiFiDriver.h" 10 | #include "logger.h" 11 | 12 | #include 13 | #include 14 | #include 15 | #include 16 | 17 | #include "Rtp.h" 18 | 19 | std::vector WFBReceiver::GetDongleList() { 20 | std::vector list; 21 | 22 | libusb_context *findctx; 23 | // Initialize libusb 24 | libusb_init(&findctx); 25 | 26 | // Get list of USB devices 27 | libusb_device **devs; 28 | ssize_t count = libusb_get_device_list(findctx, &devs); 29 | if (count < 0) { 30 | return list; 31 | } 32 | 33 | // Iterate over devices 34 | for (ssize_t i = 0; i < count; ++i) { 35 | libusb_device *dev = devs[i]; 36 | struct libusb_device_descriptor desc; 37 | if (libusb_get_device_descriptor(dev, &desc) == 0) { 38 | // Check if the device is using libusb driver 39 | if (desc.bDeviceClass == LIBUSB_CLASS_PER_INTERFACE) { 40 | std::stringstream ss; 41 | ss << std::setw(4) << std::setfill('0') << std::hex << desc.idVendor << ":"; 42 | ss << std::setw(4) << std::setfill('0') << std::hex << desc.idProduct; 43 | list.push_back(ss.str()); 44 | } 45 | } 46 | } 47 | std::sort(list.begin(), list.end(), [](std::string &a, std::string &b) { 48 | static std::vector specialStrings = { "0b05:17d2", "0bda:8812", "0bda:881a" }; 49 | auto itA = std::find(specialStrings.begin(), specialStrings.end(), a); 50 | auto itB = std::find(specialStrings.begin(), specialStrings.end(), b); 51 | if (itA != specialStrings.end() && itB == specialStrings.end()) { 52 | return true; 53 | } 54 | if (itB != specialStrings.end() && itA == specialStrings.end()) { 55 | return false; 56 | } 57 | return a < b; 58 | }); 59 | 60 | // Free the list of devices 61 | libusb_free_device_list(devs, 1); 62 | 63 | // Deinitialize libusb 64 | libusb_exit(findctx); 65 | return list; 66 | } 67 | bool WFBReceiver::Start(const std::string &vidPid, uint8_t channel, int channelWidth, const std::string &kPath) { 68 | 69 | QmlNativeAPI::Instance().wifiFrameCount_ = 0; 70 | QmlNativeAPI::Instance().wfbFrameCount_ = 0; 71 | QmlNativeAPI::Instance().rtpPktCount_ = 0; 72 | QmlNativeAPI::Instance().UpdateCount(); 73 | 74 | keyPath = kPath; 75 | if (usbThread) { 76 | return false; 77 | } 78 | int rc; 79 | 80 | // get vid pid 81 | std::istringstream iss(vidPid); 82 | unsigned int wifiDeviceVid, wifiDevicePid; 83 | char c; 84 | iss >> std::hex >> wifiDeviceVid >> c >> wifiDevicePid; 85 | 86 | auto logger = std::make_shared( 87 | [](const std::string &level, const std::string &msg) { QmlNativeAPI::Instance().PutLog(level, msg); }); 88 | 89 | rc = libusb_init(&ctx); 90 | if (rc < 0) { 91 | return false; 92 | } 93 | dev_handle = libusb_open_device_with_vid_pid(ctx, wifiDeviceVid, wifiDevicePid); 94 | if (dev_handle == nullptr) { 95 | logger->error("Cannot find device {:04x}:{:04x}", wifiDeviceVid, wifiDevicePid); 96 | libusb_exit(ctx); 97 | return false; 98 | } 99 | 100 | /*Check if kenel driver attached*/ 101 | if (libusb_kernel_driver_active(dev_handle, 0)) { 102 | rc = libusb_detach_kernel_driver(dev_handle, 0); // detach driver 103 | } 104 | rc = libusb_claim_interface(dev_handle, 0); 105 | 106 | if (rc < 0) { 107 | return false; 108 | } 109 | 110 | usbThread = std::make_shared([=]() { 111 | WiFiDriver wifi_driver { logger }; 112 | try { 113 | rtlDevice = wifi_driver.CreateRtlDevice(dev_handle); 114 | rtlDevice->Init( 115 | [](const Packet &p) { 116 | WFBReceiver::Instance().handle80211Frame(p); 117 | QmlNativeAPI::Instance().UpdateCount(); 118 | }, 119 | SelectedChannel { 120 | .Channel = channel, 121 | .ChannelOffset = 0, 122 | .ChannelWidth = static_cast(channelWidth), 123 | }); 124 | } catch (const std::runtime_error &e) { 125 | logger->error(e.what()); 126 | } catch (...) { 127 | } 128 | auto rc = libusb_release_interface(dev_handle, 0); 129 | if (rc < 0) { 130 | // error 131 | } 132 | logger->info("==========stoped=========="); 133 | libusb_close(dev_handle); 134 | libusb_exit(ctx); 135 | dev_handle = nullptr; 136 | ctx = nullptr; 137 | Stop(); 138 | usbThread.reset(); 139 | }); 140 | usbThread->detach(); 141 | 142 | return true; 143 | } 144 | void WFBReceiver::handle80211Frame(const Packet &packet) { 145 | 146 | QmlNativeAPI::Instance().wifiFrameCount_++; 147 | RxFrame frame(packet.Data); 148 | if (!frame.IsValidWfbFrame()) { 149 | return; 150 | } 151 | QmlNativeAPI::Instance().wfbFrameCount_++; 152 | 153 | static int8_t rssi[4] = { 1, 1, 1, 1 }; 154 | static uint8_t antenna[4] = { 1, 1, 1, 1 }; 155 | 156 | static uint32_t link_id = 7669206; // sha1 hash of link_domain="default" 157 | static uint8_t video_radio_port = 0; 158 | static uint64_t epoch = 0; 159 | 160 | static uint32_t video_channel_id_f = (link_id << 8) + video_radio_port; 161 | static auto video_channel_id_be = htobe32(video_channel_id_f); 162 | 163 | static uint8_t *video_channel_id_be8 = reinterpret_cast(&video_channel_id_be); 164 | 165 | static std::mutex agg_mutex; 166 | static std::unique_ptr video_aggregator = std::make_unique( 167 | keyPath.c_str(), epoch, video_channel_id_f, 168 | [](uint8_t *payload, uint16_t packet_size) { WFBReceiver::Instance().handleRtp(payload, packet_size); }); 169 | 170 | std::lock_guard lock(agg_mutex); 171 | if (frame.MatchesChannelID(video_channel_id_be8)) { 172 | video_aggregator->process_packet( 173 | packet.Data.data() + sizeof(ieee80211_header), packet.Data.size() - sizeof(ieee80211_header) - 4, 0, 174 | antenna, rssi); 175 | } 176 | } 177 | 178 | static unsigned long long sendFd = INVALID_SOCKET; 179 | static volatile bool playing = false; 180 | 181 | 182 | #define GET_H264_NAL_UNIT_TYPE(buffer_ptr) (buffer_ptr[0] & 0x1F) 183 | inline bool isH264(const uint8_t * data){ 184 | auto h264NalType = GET_H264_NAL_UNIT_TYPE(data); 185 | return h264NalType==24||h264NalType==28; 186 | } 187 | 188 | void WFBReceiver::handleRtp(uint8_t *payload, uint16_t packet_size) { 189 | QmlNativeAPI::Instance().rtpPktCount_++; 190 | QmlNativeAPI::Instance().UpdateCount(); 191 | if (rtlDevice->should_stop) { 192 | return; 193 | } 194 | if (packet_size < 12) { 195 | return; 196 | } 197 | 198 | sockaddr_in serverAddr {}; 199 | serverAddr.sin_family = AF_INET; 200 | serverAddr.sin_port = htons(QmlNativeAPI::Instance().playerPort); 201 | serverAddr.sin_addr.s_addr = inet_addr("127.0.0.1"); 202 | 203 | auto *header = (RtpHeader *)payload; 204 | 205 | if (!playing) { 206 | playing = true; 207 | if(QmlNativeAPI::Instance().playerCodec=="AUTO") { 208 | // judge H264 or h265 209 | if (isH264(header->getPayloadData())) { 210 | QmlNativeAPI::Instance().playerCodec = "H264"; 211 | QmlNativeAPI::Instance().PutLog("debug", 212 | "judge Codec " + QmlNativeAPI::Instance().playerCodec.toStdString()); 213 | } else{ 214 | QmlNativeAPI::Instance().playerCodec = "H265"; 215 | QmlNativeAPI::Instance().PutLog("debug", 216 | "judge Codec " + QmlNativeAPI::Instance().playerCodec.toStdString()); 217 | } 218 | } 219 | QmlNativeAPI::Instance().NotifyRtpStream(header->pt, ntohl(header->ssrc)); 220 | } 221 | 222 | // send video to player 223 | sendto( 224 | sendFd, reinterpret_cast(payload), packet_size, 0, (sockaddr *)&serverAddr, sizeof(serverAddr)); 225 | } 226 | 227 | bool WFBReceiver::Stop() { 228 | playing = false; 229 | if (rtlDevice) { 230 | rtlDevice->should_stop = true; 231 | } 232 | QmlNativeAPI::Instance().NotifyWifiStop(); 233 | 234 | return true; 235 | } 236 | 237 | WFBReceiver::WFBReceiver() { 238 | WSADATA wsaData; 239 | if (WSAStartup(MAKEWORD(2, 2), &wsaData) != 0) { 240 | std::cerr << "WSAStartup failed." << std::endl; 241 | return; 242 | } 243 | sendFd = socket(AF_INET, SOCK_DGRAM, 0); 244 | } 245 | 246 | WFBReceiver::~WFBReceiver() { 247 | closesocket(sendFd); 248 | sendFd = INVALID_SOCKET; 249 | WSACleanup(); 250 | Stop(); 251 | } 252 | -------------------------------------------------------------------------------- /src/wifi/WFBReceiver.h: -------------------------------------------------------------------------------- 1 | // 2 | // Created by Talus on 2024/6/10. 3 | // 4 | 5 | #ifndef WFBRECEIVER_H 6 | #define WFBRECEIVER_H 7 | #include "FrameParser.h" 8 | #include "Rtl8812aDevice.h" 9 | #include 10 | #include 11 | #include 12 | #include 13 | #include 14 | 15 | class WFBReceiver { 16 | public: 17 | WFBReceiver(); 18 | ~WFBReceiver(); 19 | static WFBReceiver &Instance() { 20 | static WFBReceiver wfb_receiver; 21 | return wfb_receiver; 22 | } 23 | std::vector GetDongleList(); 24 | bool Start(const std::string &vidPid, uint8_t channel, int channelWidth, const std::string &keyPath); 25 | bool Stop(); 26 | void handle80211Frame(const Packet &pkt); 27 | void handleRtp(uint8_t *payload, uint16_t packet_size); 28 | 29 | protected: 30 | libusb_context *ctx; 31 | libusb_device_handle *dev_handle; 32 | std::shared_ptr usbThread; 33 | std::unique_ptr rtlDevice; 34 | std::string keyPath; 35 | }; 36 | 37 | #endif // WFBRECEIVER_H 38 | -------------------------------------------------------------------------------- /src/wifi/fec.c: -------------------------------------------------------------------------------- 1 | /** 2 | * zfec -- fast forward error correction library with Python interface 3 | * https://tahoe-lafs.org/trac/zfec/ 4 | 5 | This package implements an "erasure code", or "forward error correction code". 6 | You may use this package under the GNU General Public License, version 2 or, at your option, any later version. 7 | */ 8 | 9 | #include "fec.h" 10 | 11 | #include 12 | #include 13 | #include 14 | #include 15 | 16 | /* 17 | * Primitive polynomials - see Lin & Costello, Appendix A, 18 | * and Lee & Messerschmitt, p. 453. 19 | */ 20 | static const char *const Pp = "101110001"; 21 | 22 | /* 23 | * To speed up computations, we have tables for logarithm, exponent and 24 | * inverse of a number. We use a table for multiplication as well (it takes 25 | * 64K, no big deal even on a PDA, especially because it can be 26 | * pre-initialized an put into a ROM!), otherwhise we use a table of 27 | * logarithms. In any case the macro gf_mul(x,y) takes care of 28 | * multiplications. 29 | */ 30 | 31 | static gf gf_exp[510]; /* index->poly form conversion table */ 32 | static int gf_log[256]; /* Poly->index form conversion table */ 33 | static gf inverse[256]; /* inverse of field elem. */ 34 | /* inv[\alpha**i]=\alpha**(GF_SIZE-i-1) */ 35 | 36 | /* 37 | * modnn(x) computes x % GF_SIZE, where GF_SIZE is 2**GF_BITS - 1, 38 | * without a slow divide. 39 | */ 40 | static gf modnn(int x) { 41 | while (x >= 255) { 42 | x -= 255; 43 | x = (x >> 8) + (x & 255); 44 | } 45 | return x; 46 | } 47 | 48 | #define SWAP(a, b, t) \ 49 | { \ 50 | t tmp; \ 51 | tmp = a; \ 52 | a = b; \ 53 | b = tmp; \ 54 | } 55 | 56 | /* 57 | * gf_mul(x,y) multiplies two numbers. It is much faster to use a 58 | * multiplication table. 59 | * 60 | * USE_GF_MULC, GF_MULC0(c) and GF_ADDMULC(x) can be used when multiplying 61 | * many numbers by the same constant. In this case the first call sets the 62 | * constant, and others perform the multiplications. A value related to the 63 | * multiplication is held in a local variable declared with USE_GF_MULC . See 64 | * usage in _addmul1(). 65 | */ 66 | static gf gf_mul_table[256][256]; 67 | 68 | #define gf_mul(x, y) gf_mul_table[x][y] 69 | 70 | #define USE_GF_MULC register gf *__gf_mulc_ 71 | 72 | #define GF_MULC0(c) __gf_mulc_ = gf_mul_table[c] 73 | #define GF_ADDMULC(dst, x) dst ^= __gf_mulc_[x] 74 | 75 | /* 76 | * Generate GF(2**m) from the irreducible polynomial p(X) in p[0]..p[m] 77 | * Lookup tables: 78 | * index->polynomial form gf_exp[] contains j= \alpha^i; 79 | * polynomial form -> index form gf_log[ j = \alpha^i ] = i 80 | * \alpha=x is the primitive element of GF(2^m) 81 | * 82 | * For efficiency, gf_exp[] has size 2*GF_SIZE, so that a simple 83 | * multiplication of two numbers can be resolved without calling modnn 84 | */ 85 | static void _init_mul_table(void) { 86 | int i, j; 87 | for (i = 0; i < 256; i++) 88 | for (j = 0; j < 256; j++) 89 | gf_mul_table[i][j] = gf_exp[modnn(gf_log[i] + gf_log[j])]; 90 | 91 | for (j = 0; j < 256; j++) 92 | gf_mul_table[0][j] = gf_mul_table[j][0] = 0; 93 | } 94 | 95 | #define NEW_GF_MATRIX(rows, cols) (gf *)malloc(rows *cols) 96 | 97 | /* 98 | * initialize the data structures used for computations in GF. 99 | */ 100 | static void generate_gf(void) { 101 | int i; 102 | gf mask; 103 | 104 | mask = 1; /* x ** 0 = 1 */ 105 | gf_exp[8] = 0; /* will be updated at the end of the 1st loop */ 106 | /* 107 | * first, generate the (polynomial representation of) powers of \alpha, 108 | * which are stored in gf_exp[i] = \alpha ** i . 109 | * At the same time build gf_log[gf_exp[i]] = i . 110 | * The first 8 powers are simply bits shifted to the left. 111 | */ 112 | for (i = 0; i < 8; i++, mask <<= 1) { 113 | gf_exp[i] = mask; 114 | gf_log[gf_exp[i]] = i; 115 | /* 116 | * If Pp[i] == 1 then \alpha ** i occurs in poly-repr 117 | * gf_exp[8] = \alpha ** 8 118 | */ 119 | if (Pp[i] == '1') 120 | gf_exp[8] ^= mask; 121 | } 122 | /* 123 | * now gf_exp[8] = \alpha ** 8 is complete, so can also 124 | * compute its inverse. 125 | */ 126 | gf_log[gf_exp[8]] = 8; 127 | /* 128 | * Poly-repr of \alpha ** (i+1) is given by poly-repr of 129 | * \alpha ** i shifted left one-bit and accounting for any 130 | * \alpha ** 8 term that may occur when poly-repr of 131 | * \alpha ** i is shifted. 132 | */ 133 | mask = 1 << 7; 134 | for (i = 9; i < 255; i++) { 135 | if (gf_exp[i - 1] >= mask) 136 | gf_exp[i] = gf_exp[8] ^ ((gf_exp[i - 1] ^ mask) << 1); 137 | else 138 | gf_exp[i] = gf_exp[i - 1] << 1; 139 | gf_log[gf_exp[i]] = i; 140 | } 141 | /* 142 | * log(0) is not defined, so use a special value 143 | */ 144 | gf_log[0] = 255; 145 | /* set the extended gf_exp values for fast multiply */ 146 | for (i = 0; i < 255; i++) 147 | gf_exp[i + 255] = gf_exp[i]; 148 | 149 | /* 150 | * again special cases. 0 has no inverse. This used to 151 | * be initialized to 255, but it should make no difference 152 | * since noone is supposed to read from here. 153 | */ 154 | inverse[0] = 0; 155 | inverse[1] = 1; 156 | for (i = 2; i <= 255; i++) 157 | inverse[i] = gf_exp[255 - gf_log[i]]; 158 | } 159 | 160 | /* 161 | * Various linear algebra operations that i use often. 162 | */ 163 | 164 | /* 165 | * addmul() computes dst[] = dst[] + c * src[] 166 | * This is used often, so better optimize it! Currently the loop is 167 | * unrolled 16 times, a good value for 486 and pentium-class machines. 168 | * The case c=0 is also optimized, whereas c=1 is not. These 169 | * calls are unfrequent in my typical apps so I did not bother. 170 | */ 171 | #define addmul(dst, src, c, sz) \ 172 | if (c != 0) \ 173 | _addmul1(dst, src, c, sz) 174 | 175 | #define UNROLL 16 /* 1, 4, 8, 16 */ 176 | static void _addmul1(register gf *restrict dst, const register gf *restrict src, gf c, size_t sz) { 177 | USE_GF_MULC; 178 | const gf *lim = &dst[sz - UNROLL + 1]; 179 | 180 | GF_MULC0(c); 181 | 182 | #if (UNROLL > 1) /* unrolling by 8/16 is quite effective on the pentium */ 183 | for (; dst < lim; dst += UNROLL, src += UNROLL) { 184 | GF_ADDMULC(dst[0], src[0]); 185 | GF_ADDMULC(dst[1], src[1]); 186 | GF_ADDMULC(dst[2], src[2]); 187 | GF_ADDMULC(dst[3], src[3]); 188 | #if (UNROLL > 4) 189 | GF_ADDMULC(dst[4], src[4]); 190 | GF_ADDMULC(dst[5], src[5]); 191 | GF_ADDMULC(dst[6], src[6]); 192 | GF_ADDMULC(dst[7], src[7]); 193 | #endif 194 | #if (UNROLL > 8) 195 | GF_ADDMULC(dst[8], src[8]); 196 | GF_ADDMULC(dst[9], src[9]); 197 | GF_ADDMULC(dst[10], src[10]); 198 | GF_ADDMULC(dst[11], src[11]); 199 | GF_ADDMULC(dst[12], src[12]); 200 | GF_ADDMULC(dst[13], src[13]); 201 | GF_ADDMULC(dst[14], src[14]); 202 | GF_ADDMULC(dst[15], src[15]); 203 | #endif 204 | } 205 | #endif 206 | lim += UNROLL - 1; 207 | for (; dst < lim; dst++, src++) /* final components */ 208 | GF_ADDMULC(*dst, *src); 209 | } 210 | 211 | /* 212 | * computes C = AB where A is n*k, B is k*m, C is n*m 213 | */ 214 | static void _matmul(gf *a, gf *b, gf *c, unsigned n, unsigned k, unsigned m) { 215 | unsigned row, col, i; 216 | 217 | for (row = 0; row < n; row++) { 218 | for (col = 0; col < m; col++) { 219 | gf *pa = &a[row * k]; 220 | gf *pb = &b[col]; 221 | gf acc = 0; 222 | for (i = 0; i < k; i++, pa++, pb += m) 223 | acc ^= gf_mul(*pa, *pb); 224 | c[row * m + col] = acc; 225 | } 226 | } 227 | } 228 | 229 | /* 230 | * _invert_mat() takes a matrix and produces its inverse 231 | * k is the size of the matrix. 232 | * (Gauss-Jordan, adapted from Numerical Recipes in C) 233 | * Return non-zero if singular. 234 | */ 235 | static void _invert_mat(gf *src, size_t k) { 236 | gf c; 237 | size_t irow = 0; 238 | size_t icol = 0; 239 | 240 | unsigned *indxc = (unsigned *)malloc(k * sizeof(unsigned)); 241 | unsigned *indxr = (unsigned *)malloc(k * sizeof(unsigned)); 242 | unsigned *ipiv = (unsigned *)malloc(k * sizeof(unsigned)); 243 | gf *id_row = NEW_GF_MATRIX(1, k); 244 | 245 | memset(id_row, '\0', k * sizeof(gf)); 246 | /* 247 | * ipiv marks elements already used as pivots. 248 | */ 249 | for (size_t i = 0; i < k; i++) 250 | ipiv[i] = 0; 251 | 252 | for (size_t col = 0; col < k; col++) { 253 | gf *pivot_row; 254 | /* 255 | * Zeroing column 'col', look for a non-zero element. 256 | * First try on the diagonal, if it fails, look elsewhere. 257 | */ 258 | if (ipiv[col] != 1 && src[col * k + col] != 0) { 259 | irow = col; 260 | icol = col; 261 | goto found_piv; 262 | } 263 | for (size_t row = 0; row < k; row++) { 264 | if (ipiv[row] != 1) { 265 | for (size_t ix = 0; ix < k; ix++) { 266 | if (ipiv[ix] == 0) { 267 | if (src[row * k + ix] != 0) { 268 | irow = row; 269 | icol = ix; 270 | goto found_piv; 271 | } 272 | } else 273 | assert(ipiv[ix] <= 1); 274 | } 275 | } 276 | } 277 | found_piv: 278 | ++(ipiv[icol]); 279 | /* 280 | * swap rows irow and icol, so afterwards the diagonal 281 | * element will be correct. Rarely done, not worth 282 | * optimizing. 283 | */ 284 | if (irow != icol) 285 | for (size_t ix = 0; ix < k; ix++) 286 | SWAP(src[irow * k + ix], src[icol * k + ix], gf); 287 | indxr[col] = irow; 288 | indxc[col] = icol; 289 | pivot_row = &src[icol * k]; 290 | c = pivot_row[icol]; 291 | assert(c != 0); 292 | if (c != 1) { /* otherwhise this is a NOP */ 293 | /* 294 | * this is done often , but optimizing is not so 295 | * fruitful, at least in the obvious ways (unrolling) 296 | */ 297 | c = inverse[c]; 298 | pivot_row[icol] = 1; 299 | for (size_t ix = 0; ix < k; ix++) 300 | pivot_row[ix] = gf_mul(c, pivot_row[ix]); 301 | } 302 | /* 303 | * from all rows, remove multiples of the selected row 304 | * to zero the relevant entry (in fact, the entry is not zero 305 | * because we know it must be zero). 306 | * (Here, if we know that the pivot_row is the identity, 307 | * we can optimize the addmul). 308 | */ 309 | id_row[icol] = 1; 310 | if (memcmp(pivot_row, id_row, k * sizeof(gf)) != 0) { 311 | gf *p = src; 312 | for (size_t ix = 0; ix < k; ix++, p += k) { 313 | if (ix != icol) { 314 | c = p[icol]; 315 | p[icol] = 0; 316 | addmul(p, pivot_row, c, k); 317 | } 318 | } 319 | } 320 | id_row[icol] = 0; 321 | } /* done all columns */ 322 | for (size_t col = k; col > 0; col--) 323 | if (indxr[col - 1] != indxc[col - 1]) 324 | for (size_t row = 0; row < k; row++) 325 | SWAP(src[row * k + indxr[col - 1]], src[row * k + indxc[col - 1]], gf); 326 | free(indxc); 327 | free(indxr); 328 | free(ipiv); 329 | free(id_row); 330 | } 331 | 332 | /* 333 | * fast code for inverting a vandermonde matrix. 334 | * 335 | * NOTE: It assumes that the matrix is not singular and _IS_ a vandermonde 336 | * matrix. Only uses the second column of the matrix, containing the p_i's. 337 | * 338 | * Algorithm borrowed from "Numerical recipes in C" -- sec.2.8, but largely 339 | * revised for my purposes. 340 | * p = coefficients of the matrix (p_i) 341 | * q = values of the polynomial (known) 342 | */ 343 | void _invert_vdm(gf *src, unsigned k) { 344 | unsigned i, j, row, col; 345 | gf *b, *c, *p; 346 | gf t, xx; 347 | 348 | if (k == 1) /* degenerate case, matrix must be p^0 = 1 */ 349 | return; 350 | /* 351 | * c holds the coefficient of P(x) = Prod (x - p_i), i=0..k-1 352 | * b holds the coefficient for the matrix inversion 353 | */ 354 | c = NEW_GF_MATRIX(1, k); 355 | b = NEW_GF_MATRIX(1, k); 356 | 357 | p = NEW_GF_MATRIX(1, k); 358 | 359 | for (j = 1, i = 0; i < k; i++, j += k) { 360 | c[i] = 0; 361 | p[i] = src[j]; /* p[i] */ 362 | } 363 | /* 364 | * construct coeffs. recursively. We know c[k] = 1 (implicit) 365 | * and start P_0 = x - p_0, then at each stage multiply by 366 | * x - p_i generating P_i = x P_{i-1} - p_i P_{i-1} 367 | * After k steps we are done. 368 | */ 369 | c[k - 1] = p[0]; /* really -p(0), but x = -x in GF(2^m) */ 370 | for (i = 1; i < k; i++) { 371 | gf p_i = p[i]; /* see above comment */ 372 | for (j = k - 1 - (i - 1); j < k - 1; j++) 373 | c[j] ^= gf_mul(p_i, c[j + 1]); 374 | c[k - 1] ^= p_i; 375 | } 376 | 377 | for (row = 0; row < k; row++) { 378 | /* 379 | * synthetic division etc. 380 | */ 381 | xx = p[row]; 382 | t = 1; 383 | b[k - 1] = 1; /* this is in fact c[k] */ 384 | for (i = k - 1; i > 0; i--) { 385 | b[i - 1] = c[i] ^ gf_mul(xx, b[i]); 386 | t = gf_mul(xx, t) ^ b[i - 1]; 387 | } 388 | for (col = 0; col < k; col++) 389 | src[col * k + row] = gf_mul(inverse[t], b[col]); 390 | } 391 | free(c); 392 | free(b); 393 | free(p); 394 | return; 395 | } 396 | 397 | static int fec_initialized = 0; 398 | static void init_fec(void) { 399 | generate_gf(); 400 | _init_mul_table(); 401 | fec_initialized = 1; 402 | } 403 | 404 | /* 405 | * This section contains the proper FEC encoding/decoding routines. 406 | * The encoding matrix is computed starting with a Vandermonde matrix, 407 | * and then transforming it into a systematic matrix. 408 | */ 409 | 410 | #define FEC_MAGIC 0xFECC0DEC 411 | 412 | void fec_free(fec_t *p) { 413 | assert(p != NULL && p->magic == (((FEC_MAGIC ^ p->k) ^ p->n) ^ (unsigned long)(p->enc_matrix))); 414 | free(p->enc_matrix); 415 | free(p); 416 | } 417 | 418 | fec_t *fec_new(unsigned short k, unsigned short n) { 419 | unsigned row, col; 420 | gf *p, *tmp_m; 421 | 422 | fec_t *retval; 423 | 424 | assert(k >= 1); 425 | assert(n >= 1); 426 | assert(n <= 256); 427 | assert(k <= n); 428 | 429 | if (fec_initialized == 0) 430 | init_fec(); 431 | 432 | retval = (fec_t *)malloc(sizeof(fec_t)); 433 | retval->k = k; 434 | retval->n = n; 435 | retval->enc_matrix = NEW_GF_MATRIX(n, k); 436 | retval->magic = ((FEC_MAGIC ^ k) ^ n) ^ (unsigned long)(retval->enc_matrix); 437 | tmp_m = NEW_GF_MATRIX(n, k); 438 | /* 439 | * fill the matrix with powers of field elements, starting from 0. 440 | * The first row is special, cannot be computed with exp. table. 441 | */ 442 | tmp_m[0] = 1; 443 | for (col = 1; col < k; col++) 444 | tmp_m[col] = 0; 445 | for (p = tmp_m + k, row = 0; row < n - 1; row++, p += k) 446 | for (col = 0; col < k; col++) 447 | p[col] = gf_exp[modnn(row * col)]; 448 | 449 | /* 450 | * quick code to build systematic matrix: invert the top 451 | * k*k vandermonde matrix, multiply right the bottom n-k rows 452 | * by the inverse, and construct the identity matrix at the top. 453 | */ 454 | _invert_vdm(tmp_m, k); /* much faster than _invert_mat */ 455 | _matmul(tmp_m + k * k, tmp_m, retval->enc_matrix + k * k, n - k, k, k); 456 | /* 457 | * the upper matrix is I so do not bother with a slow multiply 458 | */ 459 | memset(retval->enc_matrix, '\0', k * k * sizeof(gf)); 460 | for (p = retval->enc_matrix, col = 0; col < k; col++, p += k + 1) 461 | *p = 1; 462 | free(tmp_m); 463 | 464 | return retval; 465 | } 466 | 467 | /* To make sure that we stay within cache in the inner loops of fec_encode(). (It would 468 | probably help to also do this for fec_decode(). */ 469 | #ifndef STRIDE 470 | #define STRIDE 8192 471 | #endif 472 | 473 | void fec_encode(const fec_t *code, const gf **src, gf **fecs, size_t sz) { 474 | unsigned char i, j; 475 | size_t k; 476 | unsigned fecnum; 477 | const gf *p; 478 | 479 | for (k = 0; k < sz; k += STRIDE) { 480 | size_t stride = ((sz - k) < STRIDE) ? (sz - k) : STRIDE; 481 | for (i = 0; i < (code->n - code->k); i++) { 482 | fecnum = i + code->k; 483 | memset(fecs[i] + k, 0, stride); 484 | p = &(code->enc_matrix[fecnum * code->k]); 485 | for (j = 0; j < code->k; j++) 486 | addmul(fecs[i] + k, src[j] + k, p[j], stride); 487 | } 488 | } 489 | } 490 | 491 | /** 492 | * Build decode matrix into some memory space. 493 | * 494 | * @param matrix a space allocated for a k by k matrix 495 | */ 496 | void build_decode_matrix_into_space( 497 | const fec_t *restrict const code, const unsigned *const restrict index, const unsigned k, 498 | gf *restrict const matrix) { 499 | unsigned char i; 500 | gf *p; 501 | for (i = 0, p = matrix; i < k; i++, p += k) { 502 | if (index[i] < k) { 503 | memset(p, 0, k); 504 | p[i] = 1; 505 | } else { 506 | memcpy(p, &(code->enc_matrix[index[i] * code->k]), k); 507 | } 508 | } 509 | _invert_mat(matrix, k); 510 | } 511 | 512 | void fec_decode(const fec_t *code, const gf **inpkts, gf **outpkts, const unsigned *index, size_t sz) { 513 | gf *m_dec = (gf *)alloca(code->k * code->k); 514 | unsigned char outix = 0; 515 | unsigned char row = 0; 516 | unsigned char col = 0; 517 | build_decode_matrix_into_space(code, index, code->k, m_dec); 518 | 519 | for (row = 0; row < code->k; row++) { 520 | assert((index[row] >= code->k) || (index[row] == row)); /* If the block whose number is i is present, then it is 521 | required to be in the i'th element. */ 522 | if (index[row] >= code->k) { 523 | memset(outpkts[outix], 0, sz); 524 | for (col = 0; col < code->k; col++) 525 | addmul(outpkts[outix], inpkts[col], m_dec[row * code->k + col], sz); 526 | outix++; 527 | } 528 | } 529 | } 530 | 531 | /** 532 | * zfec -- fast forward error correction library with Python interface 533 | * 534 | * Copyright (C) 2007-2010 Zooko Wilcox-O'Hearn 535 | * Author: Zooko Wilcox-O'Hearn 536 | * 537 | * This file is part of zfec. 538 | * 539 | * See README.rst for licensing information. 540 | */ 541 | 542 | /* 543 | * This work is derived from the "fec" software by Luigi Rizzo, et al., the 544 | * copyright notice and licence terms of which are included below for reference. 545 | * fec.c -- forward error correction based on Vandermonde matrices 980624 (C) 546 | * 1997-98 Luigi Rizzo (luigi@iet.unipi.it) 547 | * 548 | * Portions derived from code by Phil Karn (karn@ka9q.ampr.org), 549 | * Robert Morelos-Zaragoza (robert@spectra.eng.hawaii.edu) and Hari 550 | * Thirumoorthy (harit@spectra.eng.hawaii.edu), Aug 1995 551 | * 552 | * Modifications by Dan Rubenstein (see Modifications.txt for 553 | * their description. 554 | * Modifications (C) 1998 Dan Rubenstein (drubenst@cs.umass.edu) 555 | * 556 | * Redistribution and use in source and binary forms, with or without 557 | * modification, are permitted provided that the following conditions 558 | * are met: 559 | * 560 | * 1. Redistributions of source code must retain the above copyright 561 | * notice, this list of conditions and the following disclaimer. 562 | * 2. Redistributions in binary form must reproduce the above 563 | * copyright notice, this list of conditions and the following 564 | * disclaimer in the documentation and/or other materials 565 | * provided with the distribution. 566 | * 567 | * THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND 568 | * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, 569 | * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A 570 | * PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS 571 | * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, 572 | * OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, 573 | * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, 574 | * OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 575 | * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR 576 | * TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT 577 | * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY 578 | * OF SUCH DAMAGE. 579 | */ 580 | -------------------------------------------------------------------------------- /src/wifi/fec.h: -------------------------------------------------------------------------------- 1 | #pragma once 2 | 3 | /** 4 | * zfec -- fast forward error correction library with Python interface 5 | * https://tahoe-lafs.org/trac/zfec/ 6 | 7 | This package implements an "erasure code", or "forward error correction code". 8 | You may use this package under the GNU General Public License, version 2 or, at your option, any later version. 9 | */ 10 | 11 | #include 12 | 13 | typedef unsigned char gf; 14 | 15 | typedef struct { 16 | unsigned long magic; 17 | unsigned short k, n; /* parameters of the code */ 18 | gf *enc_matrix; 19 | } fec_t; 20 | 21 | #if defined(_MSC_VER) 22 | // actually, some of the flavors (i.e. Enterprise) do support restrict 23 | // #define restrict __restrict 24 | #define restrict 25 | #endif 26 | 27 | /** 28 | * param k the number of blocks required to reconstruct 29 | * param m the total number of blocks created 30 | */ 31 | fec_t *fec_new(unsigned short k, unsigned short m); 32 | void fec_free(fec_t *p); 33 | 34 | /** 35 | * @param inpkts the "primary blocks" i.e. the chunks of the input data 36 | * @param fecs buffers into which the secondary blocks will be written 37 | * @param block_nums the numbers of the desired check blocks (the id >= k) which fec_encode() will produce and store 38 | * into the buffers of the fecs parameter 39 | * @param num_block_nums the length of the block_nums array 40 | * @param sz size of a packet in bytes 41 | */ 42 | void fec_encode(const fec_t *code, const gf **src, gf **fecs, size_t sz); 43 | 44 | /** 45 | * @param inpkts an array of packets (size k); If a primary block, i, is present then it must be at index i. Secondary 46 | * blocks can appear anywhere. 47 | * @param outpkts an array of buffers into which the reconstructed output packets will be written (only packets which 48 | * are not present in the inpkts input will be reconstructed and written to outpkts) 49 | * @param index an array of the blocknums of the packets in inpkts 50 | * @param sz size of a packet in bytes 51 | */ 52 | void fec_decode(const fec_t *code, const gf **inpkts, gf **outpkts, const unsigned *index, size_t sz); 53 | 54 | #if defined(_MSC_VER) 55 | #define alloca _alloca 56 | #else 57 | #ifdef __GNUC__ 58 | #ifndef alloca 59 | #define alloca(x) __builtin_alloca(x) 60 | #endif 61 | #else 62 | #include 63 | #endif 64 | #endif 65 | 66 | /** 67 | * zfec -- fast forward error correction library with Python interface 68 | * 69 | * Copyright (C) 2007-2008 Allmydata, Inc. 70 | * Author: Zooko Wilcox-O'Hearn 71 | * 72 | * This file is part of zfec. 73 | * 74 | * See README.rst for licensing information. 75 | */ 76 | 77 | /* 78 | * Much of this work is derived from the "fec" software by Luigi Rizzo, et 79 | * al., the copyright notice and licence terms of which are included below 80 | * for reference. 81 | * 82 | * fec.h -- forward error correction based on Vandermonde matrices 83 | * 980614 84 | * (C) 1997-98 Luigi Rizzo (luigi@iet.unipi.it) 85 | * 86 | * Portions derived from code by Phil Karn (karn@ka9q.ampr.org), 87 | * Robert Morelos-Zaragoza (robert@spectra.eng.hawaii.edu) and Hari 88 | * Thirumoorthy (harit@spectra.eng.hawaii.edu), Aug 1995 89 | * 90 | * Modifications by Dan Rubenstein (see Modifications.txt for 91 | * their description. 92 | * Modifications (C) 1998 Dan Rubenstein (drubenst@cs.umass.edu) 93 | * 94 | * Redistribution and use in source and binary forms, with or without 95 | * modification, are permitted provided that the following conditions 96 | * are met: 97 | 98 | * 1. Redistributions of source code must retain the above copyright 99 | * notice, this list of conditions and the following disclaimer. 100 | * 2. Redistributions in binary form must reproduce the above 101 | * copyright notice, this list of conditions and the following 102 | * disclaimer in the documentation and/or other materials 103 | * provided with the distribution. 104 | * 105 | * THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND 106 | * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, 107 | * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A 108 | * PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS 109 | * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, 110 | * OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, 111 | * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, 112 | * OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 113 | * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR 114 | * TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT 115 | * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY 116 | * OF SUCH DAMAGE. 117 | */ 118 | --------------------------------------------------------------------------------