├── .clang-format ├── .github └── workflows │ └── cmake-multi-platform.yml ├── .gitignore ├── .gitmodules ├── CMakeLists.txt ├── LICENSE ├── README.md ├── assets ├── COPYRIGHT.md ├── CaptureImage.svg ├── FPS.svg ├── NotoSansSC-Regular.ttf ├── RecordVideo.svg ├── Refresh.svg ├── gs.key ├── logo.ico ├── logo.png ├── openipc-logo-white.svg ├── translations.csv └── weights │ └── pairlie_180x320.onnx ├── aviateur.desktop ├── build-appimage.sh ├── set_windows_icon.ps1 ├── src ├── feature │ ├── night_image_enhancement.cpp │ ├── night_image_enhancement.h │ ├── video_stabilizer.cpp │ └── video_stabilizer.h ├── gui │ ├── control_panel.cpp │ ├── control_panel.h │ ├── player_rect.cpp │ ├── player_rect.h │ ├── settings_tab.cpp │ ├── settings_tab.h │ ├── tip_label.cpp │ └── tip_label.h ├── gui_interface.h ├── main.cpp ├── player │ ├── GifEncoder.cpp │ ├── GifEncoder.h │ ├── JpegEncoder.cpp │ ├── JpegEncoder.h │ ├── Mp4Encoder.cpp │ ├── Mp4Encoder.h │ ├── RealTimePlayer.cpp │ ├── RealTimePlayer.h │ ├── YuvRenderer.cpp │ ├── YuvRenderer.h │ ├── ffmpegDecode.cpp │ ├── ffmpegDecode.h │ └── ffmpegInclude.h └── wifi │ ├── Rtp.h │ ├── RxFrame.h │ ├── WfbDefine.h │ ├── WfbProcessor.cpp │ ├── WfbProcessor.h │ ├── WfbReceiver.cpp │ ├── WfbReceiver.h │ ├── fec.c │ └── fec.h ├── test_multicast.ps1 └── tutorials ├── interface.png ├── latency_test.jpg ├── vcpkg.jpg ├── zadig1.jpg └── zadig2.jpg /.clang-format: -------------------------------------------------------------------------------- 1 | --- 2 | Language: Cpp 3 | # BasedOnStyle: Google 4 | AccessModifierOffset: -4 5 | AlignAfterOpenBracket: Align 6 | AlignConsecutiveMacros: None 7 | AlignConsecutiveAssignments: None 8 | AlignConsecutiveBitFields: None 9 | AlignConsecutiveDeclarations: None 10 | AlignEscapedNewlines: Left 11 | AlignOperands: Align 12 | AlignTrailingComments: true 13 | AllowAllArgumentsOnNextLine: false 14 | AllowAllParametersOfDeclarationOnNextLine: false 15 | AllowShortEnumsOnASingleLine: false 16 | AllowShortBlocksOnASingleLine: Never 17 | AllowShortCaseLabelsOnASingleLine: false 18 | AllowShortFunctionsOnASingleLine: Empty 19 | AllowShortLambdasOnASingleLine: All 20 | AllowShortIfStatementsOnASingleLine: WithoutElse 21 | AllowShortLoopsOnASingleLine: true 22 | AlwaysBreakAfterDefinitionReturnType: None 23 | AlwaysBreakAfterReturnType: None 24 | AlwaysBreakBeforeMultilineStrings: true 25 | AlwaysBreakTemplateDeclarations: Yes 26 | AttributeMacros: 27 | - __capability 28 | BinPackArguments: false 29 | BinPackParameters: false 30 | BraceWrapping: 31 | AfterCaseLabel: false 32 | AfterClass: false 33 | AfterControlStatement: Never 34 | AfterEnum: false 35 | AfterFunction: false 36 | AfterNamespace: false 37 | AfterObjCDeclaration: false 38 | AfterStruct: false 39 | AfterUnion: false 40 | AfterExternBlock: false 41 | BeforeCatch: false 42 | BeforeElse: false 43 | BeforeLambdaBody: false 44 | BeforeWhile: false 45 | IndentBraces: false 46 | SplitEmptyFunction: true 47 | SplitEmptyRecord: true 48 | SplitEmptyNamespace: true 49 | BreakBeforeBinaryOperators: None 50 | BreakBeforeConceptDeclarations: true 51 | BreakBeforeBraces: Attach 52 | BreakBeforeInheritanceComma: false 53 | BreakInheritanceList: BeforeColon 54 | BreakBeforeTernaryOperators: true 55 | BreakConstructorInitializersBeforeComma: false 56 | BreakConstructorInitializers: BeforeColon 57 | BreakAfterJavaFieldAnnotations: false 58 | BreakStringLiterals: true 59 | ColumnLimit: 120 60 | CommentPragmas: '^ IWYU pragma:' 61 | CompactNamespaces: false 62 | ConstructorInitializerIndentWidth: 4 63 | ContinuationIndentWidth: 4 64 | Cpp11BracedListStyle: true 65 | DeriveLineEnding: true 66 | DerivePointerAlignment: true 67 | DisableFormat: false 68 | EmptyLineAfterAccessModifier: Never 69 | EmptyLineBeforeAccessModifier: LogicalBlock 70 | ExperimentalAutoDetectBinPacking: false 71 | ConstructorInitializerAllOnOneLineOrOnePerLine: false 72 | AllowAllConstructorInitializersOnNextLine: true 73 | FixNamespaceComments: true 74 | ForEachMacros: 75 | - foreach 76 | - Q_FOREACH 77 | - BOOST_FOREACH 78 | IncludeBlocks: Regroup 79 | IncludeCategories: 80 | - Regex: '^' 81 | Priority: 2 82 | SortPriority: 0 83 | CaseSensitive: false 84 | - Regex: '^<.*\.h>' 85 | Priority: 1 86 | SortPriority: 0 87 | CaseSensitive: false 88 | - Regex: '^<.*' 89 | Priority: 2 90 | SortPriority: 0 91 | CaseSensitive: false 92 | - Regex: '.*' 93 | Priority: 3 94 | SortPriority: 0 95 | CaseSensitive: false 96 | IncludeIsMainRegex: '([-_](test|unittest))?$' 97 | IncludeIsMainSourceRegex: '' 98 | IndentAccessModifiers: false 99 | IndentCaseLabels: true 100 | IndentCaseBlocks: false 101 | IndentGotoLabels: true 102 | IndentPPDirectives: BeforeHash 103 | IndentExternBlock: AfterExternBlock 104 | IndentWidth: 4 105 | IndentWrappedFunctionNames: false 106 | InsertTrailingCommas: None 107 | JavaScriptQuotes: Leave 108 | JavaScriptWrapImports: true 109 | KeepEmptyLinesAtTheStartOfBlocks: false 110 | MacroBlockBegin: '' 111 | MacroBlockEnd: '' 112 | MaxEmptyLinesToKeep: 1 113 | NamespaceIndentation: None 114 | ObjCBinPackProtocolList: Never 115 | ObjCBlockIndentWidth: 2 116 | ObjCBreakBeforeNestedBlockParam: true 117 | ObjCSpaceAfterProperty: false 118 | ObjCSpaceBeforeProtocolList: true 119 | PenaltyBreakAssignment: 2 120 | PenaltyBreakBeforeFirstCallParameter: 1 121 | PenaltyBreakComment: 300 122 | PenaltyBreakFirstLessLess: 120 123 | PenaltyBreakString: 1000 124 | PenaltyBreakTemplateDeclaration: 10 125 | PenaltyExcessCharacter: 1000000 126 | PenaltyReturnTypeOnItsOwnLine: 200 127 | PenaltyIndentedWhitespace: 0 128 | PointerAlignment: Left 129 | PPIndentWidth: -1 130 | RawStringFormats: 131 | - Language: Cpp 132 | Delimiters: 133 | - cc 134 | - CC 135 | - cpp 136 | - Cpp 137 | - CPP 138 | - 'c++' 139 | - 'C++' 140 | CanonicalDelimiter: '' 141 | BasedOnStyle: google 142 | - Language: TextProto 143 | Delimiters: 144 | - pb 145 | - PB 146 | - proto 147 | - PROTO 148 | EnclosingFunctions: 149 | - EqualsProto 150 | - EquivToProto 151 | - PARSE_PARTIAL_TEXT_PROTO 152 | - PARSE_TEST_PROTO 153 | - PARSE_TEXT_PROTO 154 | - ParseTextOrDie 155 | - ParseTextProtoOrDie 156 | - ParseTestProto 157 | - ParsePartialTestProto 158 | CanonicalDelimiter: pb 159 | BasedOnStyle: google 160 | ReflowComments: true 161 | ShortNamespaceLines: 1 162 | SortIncludes: CaseSensitive 163 | SortJavaStaticImport: Before 164 | SortUsingDeclarations: true 165 | SpaceAfterCStyleCast: false 166 | SpaceAfterLogicalNot: false 167 | SpaceAfterTemplateKeyword: true 168 | SpaceBeforeAssignmentOperators: true 169 | SpaceBeforeCaseColon: false 170 | SpaceBeforeCpp11BracedList: false 171 | SpaceBeforeCtorInitializerColon: true 172 | SpaceBeforeInheritanceColon: true 173 | SpaceBeforeParens: ControlStatements 174 | SpaceAroundPointerQualifiers: Default 175 | SpaceBeforeRangeBasedForLoopColon: true 176 | SpaceInEmptyBlock: false 177 | SpaceInEmptyParentheses: false 178 | SpacesBeforeTrailingComments: 1 179 | SpacesInAngles: Never 180 | SpacesInConditionalStatement: false 181 | SpacesInContainerLiterals: true 182 | SpacesInCStyleCastParentheses: false 183 | SpacesInLineCommentPrefix: 184 | Minimum: 1 185 | Maximum: -1 186 | SpacesInParentheses: false 187 | SpacesInSquareBrackets: false 188 | SpaceBeforeSquareBrackets: false 189 | BitFieldColonSpacing: Both 190 | Standard: Auto 191 | StatementAttributeLikeMacros: 192 | - Q_EMIT 193 | StatementMacros: 194 | - Q_UNUSED 195 | - QT_REQUIRE_VERSION 196 | TabWidth: 4 197 | UseCRLF: false 198 | UseTab: Never 199 | WhitespaceSensitiveMacros: 200 | - STRINGIZE 201 | - PP_STRINGIZE 202 | - BOOST_PP_STRINGIZE 203 | - NS_SWIFT_NAME 204 | - CF_SWIFT_NAME 205 | ... 206 | -------------------------------------------------------------------------------- /.github/workflows/cmake-multi-platform.yml: -------------------------------------------------------------------------------- 1 | # This starter workflow is for a CMake project running on multiple platforms. There is a different starter workflow if you just want a single platform. 2 | # See: https://github.com/actions/starter-workflows/blob/main/ci/cmake-single-platform.yml 3 | name: CMake on multiple platforms 4 | 5 | on: 6 | push: 7 | branches: [ "main" ] 8 | pull_request: 9 | branches: [ "main" ] 10 | 11 | jobs: 12 | build: 13 | runs-on: ${{ matrix.os }} 14 | 15 | strategy: 16 | # Set fail-fast to false to ensure that feedback is delivered for all matrix combinations. Consider changing this to true when your workflow is stable. 17 | fail-fast: false 18 | 19 | # Set up a matrix to run the following 3 configurations: 20 | # 1. 21 | # 2. 22 | # 3. 23 | # 24 | # To add more build types (Release, Debug, RelWithDebInfo, etc.) customize the build_type list. 25 | matrix: 26 | os: [ubuntu-latest, windows-2025] 27 | build_type: [Release] 28 | c_compiler: [gcc, clang, cl] 29 | include: 30 | - os: windows-2025 31 | c_compiler: cl 32 | cpp_compiler: cl 33 | - os: ubuntu-latest 34 | c_compiler: gcc 35 | cpp_compiler: g++ 36 | - os: ubuntu-latest 37 | c_compiler: clang 38 | cpp_compiler: clang++ 39 | exclude: 40 | - os: windows-2025 41 | c_compiler: gcc 42 | - os: windows-2025 43 | c_compiler: clang 44 | - os: ubuntu-latest 45 | c_compiler: cl 46 | 47 | steps: 48 | - uses: actions/checkout@v4 49 | 50 | - name: Set reusable strings 51 | # Turn repeated input strings (such as the build output directory) into step outputs. These step outputs can be used throughout the workflow file. 52 | id: strings 53 | shell: bash 54 | run: | 55 | echo "build-output-dir=${{ github.workspace }}/build" >> "$GITHUB_OUTPUT" 56 | 57 | - name: Install dependency libraries (Windows) 58 | id: vars 59 | if: runner.os == 'Windows' 60 | run: | 61 | git submodule init 62 | git submodule update 63 | git clone https://github.com/Microsoft/vcpkg.git 64 | cd vcpkg 65 | .\bootstrap-vcpkg.bat 66 | .\vcpkg integrate install 67 | .\vcpkg install libusb ffmpeg libsodium opencv 68 | echo ("VCPKG_ROOT=" + "$PWD") >> $env:GITHUB_ENV 69 | 70 | - name: Install dependency libraries (Linux) 71 | if: runner.os == 'Linux' 72 | run: | 73 | git submodule init 74 | git submodule update 75 | sudo apt install libusb-1.0-0-dev ffmpeg libsodium-dev libopencv-dev xorg-dev 76 | 77 | - name: Configure CMake (Windows) 78 | if: runner.os == 'Windows' 79 | # Configure CMake in a 'build' subdirectory. `CMAKE_BUILD_TYPE` is only required if you are using a single-configuration generator such as make. 80 | # See https://cmake.org/cmake/help/latest/variable/CMAKE_BUILD_TYPE.html?highlight=cmake_build_type 81 | run: > 82 | cmake -B ${{ steps.strings.outputs.build-output-dir }} 83 | -DCMAKE_CXX_COMPILER=${{ matrix.cpp_compiler }} 84 | -DCMAKE_C_COMPILER=${{ matrix.c_compiler }} 85 | -DCMAKE_BUILD_TYPE=${{ matrix.build_type }} 86 | -S ${{ github.workspace }} 87 | 88 | - name: Configure CMake (Linux) 89 | if: runner.os == 'Linux' 90 | # Configure CMake in a 'build' subdirectory. `CMAKE_BUILD_TYPE` is only required if you are using a single-configuration generator such as make. 91 | # See https://cmake.org/cmake/help/latest/variable/CMAKE_BUILD_TYPE.html?highlight=cmake_build_type 92 | run: > 93 | cmake -B ${{ steps.strings.outputs.build-output-dir }} 94 | -DCMAKE_CXX_COMPILER=${{ matrix.cpp_compiler }} 95 | -DCMAKE_C_COMPILER=${{ matrix.c_compiler }} 96 | -DCMAKE_BUILD_TYPE=${{ matrix.build_type }} 97 | -S ${{ github.workspace }} 98 | 99 | - name: Build 100 | # Build your program with the given configuration. Note that --config is needed because the default Windows generator is a multi-config generator (Visual Studio generator). 101 | run: cmake --build ${{ steps.strings.outputs.build-output-dir }} --config ${{ matrix.build_type }} 102 | 103 | - name: Test 104 | working-directory: ${{ steps.strings.outputs.build-output-dir }} 105 | # Execute tests defined by the CMake configuration. Note that --build-config is needed because the default Windows generator is a multi-config generator (Visual Studio generator). 106 | # See https://cmake.org/cmake/help/latest/manual/ctest.1.html for more detail 107 | run: ctest --build-config ${{ matrix.build_type }} 108 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Compiled Object files 2 | **/.DS_Store 3 | *.slo 4 | *.lo 5 | *.o 6 | *.obj 7 | 8 | # Precompiled Headers 9 | *.gch 10 | *.pch 11 | 12 | # Compiled Dynamic libraries 13 | *.so 14 | *.dylib 15 | *.dll 16 | 17 | # Fortran module files 18 | *.mod 19 | *.smod 20 | 21 | # Compiled Static libraries 22 | *.lai 23 | *.la 24 | *.a 25 | *.lib 26 | 27 | # Executables 28 | *.exe 29 | *.out 30 | *.app 31 | 32 | **/cmake-build-debug 33 | **/CMakeCache.txt 34 | **/cmake_install.cmake 35 | **/install_manifest.txt 36 | **/CMakeFiles/ 37 | **/CTestTestfile.cmake 38 | **/Makefile 39 | **/*.cbp 40 | **/CMakeScripts 41 | **/compile_commands.json 42 | 43 | include/divisible/* 44 | 45 | 46 | ## Local 47 | 48 | .idea/*.xml 49 | 50 | build/**/* 51 | 52 | include/* 53 | lib/* 54 | bin/* 55 | test/test_runner -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "3rd/json"] 2 | path = 3rd/json 3 | url = https://github.com/nlohmann/json.git 4 | [submodule "3rd/devourer"] 5 | path = 3rd/devourer 6 | url = https://github.com/OpenIPC/devourer.git 7 | [submodule "3rd/mINI"] 8 | path = 3rd/mINI 9 | url = https://github.com/metayeti/mINI.git 10 | [submodule "3rd/revector"] 11 | path = 3rd/revector 12 | url = https://github.com/floppyhammer/revector.git 13 | [submodule "3rd/SDL"] 14 | path = 3rd/SDL 15 | url = https://github.com/libsdl-org/SDL.git 16 | -------------------------------------------------------------------------------- /CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 3.20) 2 | 3 | # This should go before project declaration. 4 | if (WIN32) 5 | message("[aviateur] Using vcpkg in <$ENV{VCPKG_ROOT}>, which should be the vcpkg dir in which you have installed the dependency libs.") 6 | set(CMAKE_TOOLCHAIN_FILE "$ENV{VCPKG_ROOT}/scripts/buildsystems/vcpkg.cmake") 7 | endif () 8 | 9 | project(aviateur 10 | VERSION 0.1 11 | LANGUAGES C CXX) 12 | 13 | set(CMAKE_CXX_STANDARD 20) 14 | 15 | set(CMAKE_INCLUDE_CURRENT_DIR ON) 16 | 17 | set(CMAKE_RUNTIME_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/bin") 18 | 19 | find_package(PkgConfig REQUIRED) 20 | 21 | if (WIN32) 22 | pkg_check_modules(LIBUSB REQUIRED IMPORTED_TARGET libusb-1.0) 23 | 24 | find_package(FFmpeg REQUIRED) 25 | find_package(unofficial-sodium CONFIG REQUIRED) 26 | find_package(OpenCV REQUIRED) 27 | else () 28 | pkg_check_modules(LIBAV REQUIRED IMPORTED_TARGET 29 | libavformat 30 | libavcodec 31 | libswresample 32 | libswscale 33 | libavutil 34 | ) 35 | pkg_check_modules(LIBSODIUM REQUIRED IMPORTED_TARGET libsodium) 36 | find_package(OpenCV REQUIRED) 37 | endif () 38 | 39 | file(GLOB_RECURSE SRC_LIST 40 | src/*.cpp 41 | src/*.c 42 | src/*.h 43 | ) 44 | 45 | file(COPY assets DESTINATION ${CMAKE_BINARY_DIR}/bin) 46 | 47 | add_executable(${PROJECT_NAME} ${SRC_LIST}) 48 | 49 | if (WIN32) 50 | string(APPEND CMAKE_CXX_FLAGS " /utf-8") 51 | endif () 52 | 53 | add_subdirectory(3rd/devourer) 54 | target_include_directories(${PROJECT_NAME} PRIVATE "3rd/devourer/src" "3rd/devourer/hal") 55 | 56 | add_subdirectory(3rd/revector) 57 | target_include_directories(${PROJECT_NAME} PRIVATE "3rd/revector/src") 58 | 59 | add_subdirectory(3rd/json) 60 | target_include_directories(${PROJECT_NAME} PRIVATE "3rd/json/include") 61 | 62 | add_subdirectory(3rd/mINI) 63 | target_include_directories(${PROJECT_NAME} PRIVATE "3rd/mINI/src") 64 | 65 | add_subdirectory(3rd/SDL) 66 | target_include_directories(${PROJECT_NAME} PRIVATE "3rd/SDL/include") 67 | 68 | file(COPY assets DESTINATION ${CMAKE_BINARY_DIR}) 69 | 70 | if (WIN32) 71 | target_link_libraries(${PROJECT_NAME} 72 | ${FFMPEG_LIBRARIES} 73 | PkgConfig::LIBUSB 74 | unofficial-sodium::sodium 75 | ${OpenCV_LIBS} 76 | WiFiDriver 77 | revector 78 | SDL3::SDL3-static 79 | ) 80 | else () 81 | target_link_libraries(${PROJECT_NAME} 82 | PkgConfig::LIBAV 83 | PkgConfig::LIBSODIUM 84 | ${OpenCV_LIBS} 85 | WiFiDriver 86 | revector 87 | SDL3::SDL3-static 88 | ) 89 | endif () 90 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Aviateur 2 | 3 |

4 | 5 | Aviateur logo 6 | 7 |

8 | 9 | OpenIPC FPV ground station for Windows & Linux. Forked from [fpv4win](https://github.com/OpenIPC/fpv4win]). 10 | 11 | ![](tutorials/interface.png) 12 | 13 | > [!NOTE] 14 | > No AdaptiveLink support. 15 | 16 | > [!NOTE] 17 | > Only RTL8812AU Wi-Fi adapter is supported. 18 | 19 | ### Usage 20 | 21 | 1. (Only for Windows) Download [Zadig](https://zadig.akeo.ie/) 22 | 2. (Only for Windows) Install the libusb driver for your adapter. 23 | Go *Options* → *List All Devices*. 24 | ![](tutorials/zadig1.jpg) 25 | Select your adapter. Install the driver. Remember the USB ID, we will need it soon. 26 | ![](tutorials/zadig2.jpg) 27 | 28 | 3. Select the adapter with the previously obtained USB ID. 29 | 4. Select your drone channel. 30 | 5. Select your WFB key. 31 | 6. *Start* & Fly! 32 | 33 | ### Common run issues 34 | 35 | * If the application crashes at startup on Windows, install [Microsoft Visual C++ Redistributable](https://learn.microsoft.com/en-us/cpp/windows/latest-supported-vc-redist?view=msvc-170#latest-microsoft-visual-c-redistributable-version) first. 36 | 37 | ### Latency test 38 | 39 | ![](tutorials/latency_test.jpg) 40 | 41 | ### TODOs 42 | 43 | - Ground side OSD 44 | 45 | ### How to build on Windows 46 | 47 | 1. Install vcpkg somewhere else. 48 | ```powershell 49 | git clone https://github.com/microsoft/vcpkg.git 50 | cd vcpkg 51 | .\bootstrap-vcpkg.bat 52 | ``` 53 | 54 | 2. Install dependencies. 55 | ```powershell 56 | .\vcpkg integrate install 57 | .\vcpkg install libusb ffmpeg libsodium opencv 58 | ``` 59 | 60 | 3. Add VCPKG_ROOT() to environment. (Change the value to your vcpkg path.) 61 | ![](tutorials/vcpkg.jpg) 62 | 63 | 4. Clone third-party library source. 64 | ```powershell 65 | git submodule init 66 | git submodule update 67 | ``` 68 | 69 | 5. Open as a CMake project and build. 70 | 71 | ### How to build on Linux (Ubuntu 24.04+) 72 | 73 | 1. Install dependencies. 74 | ```bash 75 | git submodule init 76 | git submodule update 77 | sudo apt install libusb-1.0-0-dev ffmpeg libsodium-dev libopencv-dev xorg-dev 78 | ``` 79 | 80 | 2. Open as a CMake project and build. 81 | 82 | ### Common build issues 83 | 84 | On Windows 85 | 86 | ``` 87 | CMake Error at C:/Program Files/Microsoft Visual Studio/2022/Community/Common7/IDE/CommonExtensions/Microsoft/CMake/CMake/share/cmake-3.29/Modules/FindPackageHandleStandardArgs.cmake:230 (message): ... 88 | ``` 89 | 90 | This is because the pre-installed vcpkg from Visual Studio installer overrides the PKG_ROOT environment variable. 91 | To fix this, find `set(CMAKE_TOOLCHAIN_FILE "$ENV{VCPKG_ROOT}/scripts/buildsystems/vcpkg.cmake")` in CMakeLists.txt, 92 | replace `$ENV{VCPKG_ROOT}` with the vcpkg you cloned previously. 93 | -------------------------------------------------------------------------------- /assets/COPYRIGHT.md: -------------------------------------------------------------------------------- 1 | Some icons are from [Godot](https://github.com/godotengine/godot). 2 | -------------------------------------------------------------------------------- /assets/CaptureImage.svg: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /assets/FPS.svg: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /assets/NotoSansSC-Regular.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenIPC/aviateur/f2ced7f7c1cd13c556f3ff9f0216a9d83fd9cd76/assets/NotoSansSC-Regular.ttf -------------------------------------------------------------------------------- /assets/RecordVideo.svg: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /assets/Refresh.svg: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /assets/gs.key: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenIPC/aviateur/f2ced7f7c1cd13c556f3ff9f0216a9d83fd9cd76/assets/gs.key -------------------------------------------------------------------------------- /assets/logo.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenIPC/aviateur/f2ced7f7c1cd13c556f3ff9f0216a9d83fd9cd76/assets/logo.ico -------------------------------------------------------------------------------- /assets/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenIPC/aviateur/f2ced7f7c1cd13c556f3ff9f0216a9d83fd9cd76/assets/logo.png -------------------------------------------------------------------------------- /assets/openipc-logo-white.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | -------------------------------------------------------------------------------- /assets/translations.csv: -------------------------------------------------------------------------------- 1 | tag,en,zh,ru 2 | lang,Language,语言,Язык 3 | fullscreen,Fullscreen,全屏,Полноэкранный 4 | bit rate,Bit rate,码率,Битрейт 5 | display fps,Display FPS,显示帧率,Отображение FPS 6 | hw decoding,Hardware decoding,硬件解码,аппаратное декодирование 7 | wi-fi adapter,Wi-Fi adapter,网卡,Wi-Fi адаптер 8 | capture frame,Capture frame,截图,кадр захвата 9 | player control,Player control,播放控制,контроль игрока 10 | record mp4,Record MP4,录制 MP4,Запись MP4 11 | recording,Recording,正在录制,Запись 12 | stop recording,Stop Recording,停止录制,Остановить запись 13 | channel,Channel,频道,Канал 14 | channel width,Channel width,频宽,Ширина канала 15 | key,Key,密钥,Ключ 16 | settings,Settings,设置,Настройки 17 | video stab,Video stabilization,视频增稳,Стабилизация видео 18 | sw decoding,Software decoding,软件解码,программное декодирование 19 | start,Start,开始,начинать 20 | stop,Stop,停止,остановить 21 | open,Open,打开,Открыть 22 | close,Close,关闭,Закрывать 23 | streaming,Streaming,串流,Потоковое 24 | on,On,开,вкл 25 | off,Off,关,выкл 26 | invalid usb msg,Invalid USB device!,无效的 USB 设备!,Недопустимое USB-устройство! 27 | wi-fi stopped msg, Wi-Fi stopped!,无线已停止!,Wi-Fi остановлен! 28 | device,Device,设备,Устройство 29 | capture fail,Failed to capture frame!,截图失败!,Не удалось захватить кадр 30 | frame saved,Frame saved to: ,截图保存至:,Кадр сохранен в: 31 | record fail,Recording failed!,录制失败!,Запись не удалась 32 | save record fail,Failed to save the record file!,保存录制文件失败!,Не удалось сохранить файл записи! 33 | video saved,Video saved to: ,视频保存至:,Видео сохранено в: 34 | video stab warning,Video stabilization is experimental!,视频增稳是实验性功能!,Стабилизация видео экспериментальная! 35 | open capture folder,Open capture folder,打开录制文件夹,Открыть папку захвата 36 | open appdata folder,Open user data folder,打开用户数据文件夹,Открыть папку с данными пользователя 37 | open crash dump folder,Open crash dump folder,打开崩溃数据文件夹,Открыть папку аварийного дампа 38 | show console,Show console,显示控制台,Показать консоль 39 | hw decoder error,Hardware decoder fails!\nTry use software decoding!,硬件解码错误!请尝试软件解码!,Аппаратный декодер не работает! попробуйте использовать программное декодирование! 40 | signal lost,Signal lost!,信号丢失!,Сигнал потерян! 41 | low light enhancement simple,Low light enhancement (simple),低光增强(简单),Улучшение при слабом освещении (простое) 42 | low light enhancement dnn,Low light enhancement (DNN),低光增强(DNN),Улучшение при слабом освещении (DNN) 43 | copy version num,Copy version number,复制版本号,Скопировать номер версии 44 | control panel,Control panel,控制面板,Панель управления -------------------------------------------------------------------------------- /assets/weights/pairlie_180x320.onnx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenIPC/aviateur/f2ced7f7c1cd13c556f3ff9f0216a9d83fd9cd76/assets/weights/pairlie_180x320.onnx -------------------------------------------------------------------------------- /aviateur.desktop: -------------------------------------------------------------------------------- 1 | [Desktop Entry] 2 | Name=Aviateur 3 | Comment=OpenIPC FPV Ground Station 4 | Exec=aviateur 5 | Icon=aviateur 6 | Terminal=false 7 | Type=Application 8 | Categories=Utility; 9 | -------------------------------------------------------------------------------- /build-appimage.sh: -------------------------------------------------------------------------------- 1 | mkdir appimage-build 2 | cd appimage-build 3 | 4 | cmake .. -DCMAKE_INSTALL_PREFIX=/usr 5 | 6 | make -j$(nproc) 7 | 8 | make install DESTDIR=AppDir 9 | 10 | # In case some basic directory structure is not created 11 | ./linuxdeploy-x86_64.AppImage --appdir AppDir 12 | 13 | cp ./bin/assets/logo.png ./AppDir/usr/share/icons/hicolor/128x128/apps/aviateur.png 14 | cp -r ./bin/assets ./AppDir 15 | cp ./bin/aviateur ./AppDir/usr/bin/ 16 | 17 | wget https://github.com/linuxdeploy/linuxdeploy/releases/download/continuous/linuxdeploy-x86_64.AppImage 18 | 19 | # Make it executable 20 | chmod +x linuxdeploy*.AppImage 21 | 22 | ./linuxdeploy-x86_64.AppImage --appdir AppDir --output appimage -i ../assets/logo.png -d ../aviateur.desktop 23 | -------------------------------------------------------------------------------- /set_windows_icon.ps1: -------------------------------------------------------------------------------- 1 | ./rcedit-x64.exe "cmake-build-release/bin/aviateur.exe" --set-icon "assets/logo.ico" -------------------------------------------------------------------------------- /src/feature/night_image_enhancement.cpp: -------------------------------------------------------------------------------- 1 | #include "night_image_enhancement.h" 2 | 3 | #include 4 | #include 5 | #include 6 | #include 7 | 8 | PairLIE::PairLIE(const std::string& modelPath, float exposure) { 9 | this->net = cv::dnn::readNet(modelPath); 10 | 11 | size_t pos = modelPath.rfind("_"); 12 | size_t pos_ = modelPath.rfind("."); 13 | int len = pos_ - pos - 1; 14 | std::string hxw = modelPath.substr(pos + 1, len); 15 | 16 | pos = hxw.rfind("x"); 17 | std::string h = hxw.substr(0, pos); 18 | len = hxw.length() - pos; 19 | std::string w = hxw.substr(pos + 1, len); 20 | this->inpHeight = std::stoi(h); 21 | this->inpWidth = std::stoi(w); 22 | cv::Mat one = cv::Mat_(1, 1) << exposure; 23 | this->exposure_ = cv::dnn::blobFromImage(one); 24 | } 25 | 26 | cv::Mat PairLIE::detect(const cv::Mat& grayImg) { 27 | auto srcImg = cv::Mat(grayImg.size(), CV_8UC3); 28 | cv::cvtColor(grayImg, srcImg, cv::COLOR_GRAY2BGR); 29 | 30 | const int srch = srcImg.rows; 31 | const int srcw = srcImg.cols; 32 | cv::Mat blob = cv::dnn::blobFromImage(srcImg, 33 | 1 / 255.0, 34 | cv::Size(this->inpWidth, this->inpHeight), 35 | cv::Scalar(0, 0, 0), 36 | true, 37 | false); 38 | 39 | this->net.setInput(blob, "input"); 40 | this->net.setInput(this->exposure_, 41 | "exposure"); ////opencv-dnn多输入代码参考https://github.com/opencv/opencv/issues/19304 42 | std::vector outs; 43 | #ifdef _WIN32 44 | net.enableWinograd(false); ////如果是opencv4.7,那就需要加上这一行 45 | #endif 46 | this->net.forward(outs, this->net.getUnconnectedOutLayersNames()); 47 | 48 | float* pdata = (float*)outs[0].data; 49 | const int out_h = outs[0].size[2]; 50 | const int out_w = outs[0].size[3]; 51 | const int channel_step = out_h * out_w; 52 | 53 | cv::Mat rmat(out_h, out_w, CV_32FC1, pdata); 54 | cv::Mat gmat(out_h, out_w, CV_32FC1, pdata + channel_step); 55 | cv::Mat bmat(out_h, out_w, CV_32FC1, pdata + 2 * channel_step); 56 | 57 | rmat *= 255.f; 58 | gmat *= 255.f; 59 | bmat *= 255.f; 60 | 61 | /// output_image = np.clip(output_image, 0, 255) 62 | rmat.setTo(0, rmat < 0); 63 | rmat.setTo(255, rmat > 255); 64 | gmat.setTo(0, gmat < 0); 65 | gmat.setTo(255, gmat > 255); 66 | bmat.setTo(0, bmat < 0); 67 | bmat.setTo(255, bmat > 255); 68 | 69 | std::vector channel_mats(3); 70 | channel_mats[0] = bmat; 71 | channel_mats[1] = gmat; 72 | channel_mats[2] = rmat; 73 | 74 | cv::Mat dstImg; 75 | merge(channel_mats, dstImg); 76 | dstImg.convertTo(dstImg, CV_8UC3); 77 | cv::resize(dstImg, dstImg, cv::Size(srcw, srch)); 78 | 79 | auto finalImg = cv::Mat(dstImg.size(), CV_8UC1); 80 | cv::cvtColor(dstImg, finalImg, cv::COLOR_BGR2GRAY); 81 | 82 | return finalImg; 83 | } 84 | -------------------------------------------------------------------------------- /src/feature/night_image_enhancement.h: -------------------------------------------------------------------------------- 1 | #pragma once 2 | 3 | #include 4 | 5 | class PairLIE { 6 | public: 7 | PairLIE(const std::string& modelPath, float exposure = 0.5); 8 | cv::Mat detect(const cv::Mat& grayImg); 9 | 10 | private: 11 | int inpWidth; 12 | int inpHeight; 13 | cv::Mat exposure_; 14 | cv::dnn::Net net; 15 | }; 16 | -------------------------------------------------------------------------------- /src/feature/video_stabilizer.cpp: -------------------------------------------------------------------------------- 1 | /* 2 | Thanks to Nghia Ho for his excellent code. 3 | And,I modified the smooth step using a simple kalman filter . 4 | So,It can processes live video streaming. 5 | modified by chen jia. 6 | email:chenjia2013@foxmail.com 7 | */ 8 | 9 | #include "video_stabilizer.h" 10 | 11 | #include 12 | 13 | #include 14 | #include 15 | #include 16 | 17 | using namespace std; 18 | using namespace cv; 19 | 20 | // This video stabilization smooths the global trajectory using a sliding average window 21 | 22 | // 1. Get previous to current frame transformation (dx, dy, da) for all frames 23 | // 2. Accumulate the transformations to get the image trajectory 24 | // 3. Smooth out the trajectory using an averaging window 25 | // 4. Generate new set of previous to current transform, such that the trajectory ends up being the same as the smoothed 26 | // trajectory 27 | // 5. Apply the new transformation to the video 28 | 29 | double pstd = 4e-3; // can be changed 30 | double cstd = 0.25; // can be changed 31 | Trajectory Q(pstd, pstd, pstd); // process noise covariance 32 | Trajectory R(cstd, cstd, cstd); // measurement noise covariance 33 | 34 | cv::Mat VideoStabilizer::stabilize(cv::Mat prev, cv::Mat cur_grey) { 35 | auto timestamp = revector::Timestamp("Aviateur"); 36 | 37 | prev_grey = prev; 38 | 39 | Mat xform = Mat::zeros(2, 3, CV_64F); 40 | xform.at(0, 0) = 1; 41 | xform.at(1, 1) = 1; 42 | 43 | // Get features from the previous frame. 44 | vector prev_corners; 45 | 46 | if (1) { 47 | goodFeaturesToTrack(prev_grey, prev_corners, 200, 0.01, 30); 48 | if (prev_corners.empty()) { 49 | return xform; 50 | } 51 | } else { 52 | std::vector key_points; 53 | FAST(prev_grey, key_points, 10); 54 | prev_corners.resize(key_points.size()); 55 | for (int i = 0; i < prev_corners.size(); i++) { 56 | prev_corners[i] = Point2f(key_points[i].pt.x, key_points[i].pt.y); 57 | } 58 | } 59 | 60 | timestamp.record("goodFeaturesToTrack"); 61 | 62 | vector status; 63 | vector err; 64 | vector cur_corners; 65 | calcOpticalFlowPyrLK(prev_grey, cur_grey, prev_corners, cur_corners, status, err); 66 | 67 | timestamp.record("calcOpticalFlowPyrLK"); 68 | 69 | vector prev_corners2, cur_corners2; 70 | prev_corners2.reserve(prev_corners.size()); 71 | cur_corners2.reserve(cur_corners.size()); 72 | 73 | // Weed out bad matches 74 | for (size_t i = 0; i < status.size(); i++) { 75 | if (status[i]) { 76 | prev_corners2.push_back(prev_corners[i]); 77 | cur_corners2.push_back(cur_corners[i]); 78 | } 79 | } 80 | 81 | // Step 1 - Get previous to current frame transformation 82 | // Rigid transform, translation + rotation only, no scaling/shearing. 83 | xform = estimateAffinePartial2D(prev_corners2, cur_corners2); 84 | 85 | timestamp.record("estimateAffinePartial2D"); 86 | #ifndef NDEBUG 87 | timestamp.print(); 88 | #endif 89 | 90 | // In rare cases no transform is found. We'll just use the last known good transform. 91 | if (xform.data == nullptr) { 92 | last_xform.copyTo(xform); 93 | } 94 | 95 | xform.copyTo(last_xform); 96 | 97 | // Decompose transform 98 | double dx = xform.at(0, 2); 99 | double dy = xform.at(1, 2); 100 | double da = atan2(xform.at(1, 0), xform.at(0, 0)); 101 | 102 | if (k == 1) { 103 | // Initial guesses 104 | X = Trajectory(0, 0, 0); // Initial estimate, set 0 105 | P = Trajectory(1, 1, 1); // Error variance, set 1 106 | 107 | x = dx; 108 | y = dy; 109 | a = da; 110 | 111 | // Reset debug data files 112 | #ifndef NDEBUG 113 | out_trajectory = std::ofstream("trajectory.txt"); 114 | out_smoothed_trajectory = std::ofstream("smoothed_trajectory.txt"); 115 | #endif 116 | } else { 117 | // Accumulated frame to frame transform 118 | x += dx; 119 | y += dy; 120 | a += da; 121 | 122 | // Actual measurement 123 | auto z = Trajectory(x, y, a); 124 | 125 | Trajectory X_; // priori estimate 126 | Trajectory P_; // priori estimate error covariance 127 | 128 | // Time update (prediction) 129 | X_ = X; // X_(k) = X(k-1); 130 | P_ = P + Q; // P_(k) = P(k-1)+Q; 131 | 132 | // Measurement update (correction) 133 | Trajectory K = P_ / (P_ + R); // gain, K(k) = P_(k)/( P_(k)+R ) 134 | X = X_ + K * (z - X_); // z-X_ is residual, X(k) = X_(k)+K(k)*(z(k)-X_(k)) 135 | P = (Trajectory(1, 1, 1) - K) * P_; // P(k) = (1-K(k))*P_(k); 136 | } 137 | 138 | #ifndef NDEBUG 139 | out_trajectory << k << " " << x << " " << y << " " << a << endl; 140 | out_smoothed_trajectory << k << " " << X.x << " " << X.y << " " << X.a << endl; 141 | #endif 142 | 143 | // Target - current 144 | double diff_x = X.x - x; 145 | double diff_y = X.y - y; 146 | double diff_a = X.a - a; 147 | 148 | dx += diff_x; 149 | dy += diff_y; 150 | da += diff_a; 151 | 152 | xform.at(0, 0) = cos(da); 153 | xform.at(0, 1) = -sin(da); 154 | xform.at(1, 0) = sin(da); 155 | xform.at(1, 1) = cos(da); 156 | 157 | xform.at(0, 2) = dx; 158 | xform.at(1, 2) = dy; 159 | 160 | cur_grey.copyTo(prev_grey); 161 | 162 | k++; 163 | 164 | return xform; 165 | } 166 | -------------------------------------------------------------------------------- /src/feature/video_stabilizer.h: -------------------------------------------------------------------------------- 1 | #pragma once 2 | 3 | #include 4 | #include 5 | 6 | // In pixels. Crops the border to reduce the black borders from stabilisation being too noticeable. 7 | constexpr int HORIZONTAL_BORDER_CROP = 100; 8 | 9 | struct Trajectory { 10 | Trajectory() = default; 11 | 12 | Trajectory(double _x, double _y, double _a) { 13 | x = _x; 14 | y = _y; 15 | a = _a; 16 | } 17 | 18 | friend Trajectory operator+(const Trajectory &c1, const Trajectory &c2) { 19 | return Trajectory(c1.x + c2.x, c1.y + c2.y, c1.a + c2.a); 20 | } 21 | 22 | friend Trajectory operator-(const Trajectory &c1, const Trajectory &c2) { 23 | return Trajectory(c1.x - c2.x, c1.y - c2.y, c1.a - c2.a); 24 | } 25 | 26 | friend Trajectory operator*(const Trajectory &c1, const Trajectory &c2) { 27 | return Trajectory(c1.x * c2.x, c1.y * c2.y, c1.a * c2.a); 28 | } 29 | 30 | friend Trajectory operator/(const Trajectory &c1, const Trajectory &c2) { 31 | return Trajectory(c1.x / c2.x, c1.y / c2.y, c1.a / c2.a); 32 | } 33 | 34 | Trajectory operator=(const Trajectory &rx) { 35 | x = rx.x; 36 | y = rx.y; 37 | a = rx.a; 38 | return Trajectory(x, y, a); 39 | } 40 | 41 | double x = 0; 42 | double y = 0; 43 | double a = 0; // angle 44 | }; 45 | 46 | class VideoStabilizer { 47 | public: 48 | VideoStabilizer() = default; 49 | 50 | cv::Mat stabilize(cv::Mat prev, cv::Mat cur_grey); 51 | 52 | private: 53 | cv::Mat prev_grey; 54 | 55 | cv::Mat last_xform; 56 | 57 | int k = 1; 58 | 59 | double a = 0; 60 | double x = 0; 61 | double y = 0; 62 | 63 | // Step 3 - Smooth out the trajectory using an averaging window 64 | Trajectory X; // posteriori state estimate 65 | Trajectory P; // posteriori estimate error covariance 66 | 67 | // Debug data 68 | std::ofstream out_trajectory; 69 | std::ofstream out_smoothed_trajectory; 70 | }; 71 | -------------------------------------------------------------------------------- /src/gui/control_panel.cpp: -------------------------------------------------------------------------------- 1 | #include "control_panel.h" 2 | 3 | #include 4 | 5 | #include "settings_tab.h" 6 | 7 | void ControlPanel::update_dongle_list() { 8 | auto menu = dongle_menu_button_->get_popup_menu().lock(); 9 | 10 | devices_ = GuiInterface::GetDeviceList(); 11 | 12 | menu->clear_items(); 13 | 14 | bool previous_device_exists = false; 15 | for (const auto &d : devices_) { 16 | if (net_card_name == d.display_name) { 17 | previous_device_exists = true; 18 | selected_net_card = d; 19 | } 20 | menu->create_item(d.display_name); 21 | } 22 | 23 | if (!previous_device_exists) { 24 | net_card_name = ""; 25 | selected_net_card = {}; 26 | } 27 | } 28 | 29 | void ControlPanel::update_adapter_start_button_looking(bool start_status) const { 30 | tab_container_->set_tab_disabled(!start_status); 31 | 32 | if (!start_status) { 33 | play_button_->theme_normal.bg_color = RED; 34 | play_button_->theme_hovered.bg_color = RED; 35 | play_button_->theme_pressed.bg_color = RED; 36 | play_button_->set_text(FTR("stop") + " (F5)"); 37 | } else { 38 | play_button_->theme_normal.bg_color = GREEN; 39 | play_button_->theme_hovered.bg_color = GREEN; 40 | play_button_->theme_pressed.bg_color = GREEN; 41 | play_button_->set_text(FTR("start") + " (F5)"); 42 | } 43 | } 44 | 45 | void ControlPanel::update_url_start_button_looking(bool start_status) const { 46 | tab_container_->set_tab_disabled(!start_status); 47 | 48 | if (!start_status) { 49 | play_url_button_->theme_normal.bg_color = RED; 50 | play_url_button_->theme_hovered.bg_color = RED; 51 | play_url_button_->theme_pressed.bg_color = RED; 52 | play_url_button_->set_text(FTR("close") + " (F5)"); 53 | } else { 54 | play_url_button_->theme_normal.bg_color = GREEN; 55 | play_url_button_->theme_hovered.bg_color = GREEN; 56 | play_url_button_->theme_pressed.bg_color = GREEN; 57 | play_url_button_->set_text(FTR("start") + " (F5)"); 58 | } 59 | } 60 | 61 | void ControlPanel::custom_ready() { 62 | auto &ini = GuiInterface::Instance().ini_; 63 | net_card_name = ini[CONFIG_ADAPTER][ADAPTER_DEVICE]; 64 | channel = std::stoi(ini[CONFIG_ADAPTER][ADAPTER_CHANNEL]); 65 | channelWidthMode = std::stoi(ini[CONFIG_ADAPTER][ADAPTER_CHANNEL_WIDTH_MODE]); 66 | keyPath = ini[CONFIG_ADAPTER][ADAPTER_CHANNEL_KEY]; 67 | codec = ini[CONFIG_ADAPTER][ADAPTER_CHANNEL_CODEC]; 68 | 69 | auto default_theme = revector::DefaultResource::get_singleton()->get_default_theme(); 70 | theme_bg = std::make_optional(default_theme->panel.styles["background"]); 71 | theme_bg.value().corner_radius = 0; 72 | theme_bg.value().border_width = 0; 73 | theme_bg->border_width = 0; 74 | 75 | set_anchor_flag(revector::AnchorFlag::RightWide); 76 | 77 | tab_container_ = std::make_shared(); 78 | add_child(tab_container_); 79 | tab_container_->set_anchor_flag(revector::AnchorFlag::FullRect); 80 | 81 | // Wi-Fi adapter tab 82 | { 83 | auto margin_container = std::make_shared(); 84 | margin_container->set_margin_all(8); 85 | tab_container_->add_child(margin_container); 86 | tab_container_->set_tab_title(0, FTR("wi-fi adapter")); 87 | 88 | auto vbox_container = std::make_shared(); 89 | vbox_container->set_separation(8); 90 | margin_container->add_child(vbox_container); 91 | 92 | { 93 | auto hbox_container = std::make_shared(); 94 | hbox_container->set_separation(8); 95 | vbox_container->add_child(hbox_container); 96 | 97 | auto label = std::make_shared(); 98 | label->set_text(FTR("device")); 99 | hbox_container->add_child(label); 100 | 101 | dongle_menu_button_ = std::make_shared(); 102 | 103 | dongle_menu_button_->container_sizing.expand_h = true; 104 | dongle_menu_button_->container_sizing.flag_h = revector::ContainerSizingFlag::Fill; 105 | hbox_container->add_child(dongle_menu_button_); 106 | 107 | // Do this before setting dongle button text. 108 | update_dongle_list(); 109 | dongle_menu_button_->set_text(net_card_name); 110 | 111 | auto callback = [this](uint32_t) { net_card_name = dongle_menu_button_->get_selected_item_text(); }; 112 | dongle_menu_button_->connect_signal("item_selected", callback); 113 | 114 | refresh_dongle_button_ = std::make_shared(); 115 | auto icon = std::make_shared(revector::get_asset_dir("Refresh.svg")); 116 | refresh_dongle_button_->set_icon_normal(icon); 117 | refresh_dongle_button_->set_text(""); 118 | hbox_container->add_child(refresh_dongle_button_); 119 | 120 | auto callback2 = [this]() { update_dongle_list(); }; 121 | refresh_dongle_button_->connect_signal("pressed", callback2); 122 | } 123 | 124 | { 125 | auto hbox_container = std::make_shared(); 126 | vbox_container->add_child(hbox_container); 127 | 128 | auto label = std::make_shared(); 129 | label->set_text(FTR("channel")); 130 | hbox_container->add_child(label); 131 | 132 | channel_button_ = std::make_shared(); 133 | channel_button_->container_sizing.expand_h = true; 134 | channel_button_->container_sizing.flag_h = revector::ContainerSizingFlag::Fill; 135 | hbox_container->add_child(channel_button_); 136 | 137 | { 138 | auto channel_menu = channel_button_->get_popup_menu(); 139 | 140 | auto callback = [this](uint32_t) { channel = std::stoi(channel_button_->get_selected_item_text()); }; 141 | channel_button_->connect_signal("item_selected", callback); 142 | 143 | uint32_t selected = 0; 144 | for (auto c : CHANNELS) { 145 | channel_menu.lock()->create_item(std::to_string(c)); 146 | if (std::to_string(channel) == std::to_string(c)) { 147 | selected = channel_menu.lock()->get_item_count() - 1; 148 | } 149 | } 150 | 151 | channel_button_->select_item(selected); 152 | } 153 | } 154 | 155 | { 156 | auto hbox_container = std::make_shared(); 157 | vbox_container->add_child(hbox_container); 158 | 159 | auto label = std::make_shared(); 160 | label->set_text(FTR("channel width")); 161 | hbox_container->add_child(label); 162 | 163 | channel_width_button_ = std::make_shared(); 164 | channel_width_button_->container_sizing.expand_h = true; 165 | channel_width_button_->container_sizing.flag_h = revector::ContainerSizingFlag::Fill; 166 | hbox_container->add_child(channel_width_button_); 167 | 168 | { 169 | auto channel_width_menu = channel_width_button_->get_popup_menu(); 170 | 171 | auto callback = [this](uint32_t) { 172 | auto selected = channel_width_button_->get_selected_item_index(); 173 | if (selected.has_value()) { 174 | channelWidthMode = selected.value(); 175 | } 176 | }; 177 | channel_width_button_->connect_signal("item_selected", callback); 178 | 179 | uint32_t selected = 0; 180 | for (auto width : CHANNEL_WIDTHS) { 181 | channel_width_menu.lock()->create_item(width); 182 | int current_index = channel_width_menu.lock()->get_item_count() - 1; 183 | if (channelWidthMode == current_index) { 184 | selected = current_index; 185 | } 186 | } 187 | channel_width_button_->select_item(selected); 188 | } 189 | } 190 | 191 | { 192 | auto hbox_container = std::make_shared(); 193 | vbox_container->add_child(hbox_container); 194 | 195 | auto label = std::make_shared(); 196 | label->set_text(FTR("key")); 197 | hbox_container->add_child(label); 198 | 199 | auto text_edit = std::make_shared(); 200 | text_edit->set_editable(false); 201 | text_edit->set_text(std::filesystem::path(keyPath).filename().string()); 202 | text_edit->container_sizing.expand_h = true; 203 | text_edit->container_sizing.flag_h = revector::ContainerSizingFlag::Fill; 204 | hbox_container->add_child(text_edit); 205 | 206 | auto file_dialog = std::make_shared(); 207 | add_child(file_dialog); 208 | 209 | if (!keyPath.empty()) { 210 | auto defaultKeyPath = std::filesystem::absolute(keyPath).string(); 211 | file_dialog->set_default_path(defaultKeyPath); 212 | } 213 | 214 | auto select_button = std::make_shared(); 215 | select_button->set_text(FTR("open")); 216 | 217 | std::weak_ptr file_dialog_weak = file_dialog; 218 | std::weak_ptr text_edit_weak = text_edit; 219 | auto callback = [this, file_dialog_weak, text_edit_weak] { 220 | auto path = file_dialog_weak.lock()->show(); 221 | if (path.has_value()) { 222 | std::filesystem::path p(path.value()); 223 | text_edit_weak.lock()->set_text(p.filename().string()); 224 | keyPath = path.value(); 225 | } 226 | }; 227 | select_button->connect_signal("pressed", callback); 228 | hbox_container->add_child(select_button); 229 | } 230 | 231 | { 232 | play_button_ = std::make_shared(); 233 | play_button_->container_sizing.expand_h = true; 234 | play_button_->container_sizing.flag_h = revector::ContainerSizingFlag::Fill; 235 | update_adapter_start_button_looking(true); 236 | 237 | auto callback1 = [this] { 238 | bool start = play_button_->get_text() == FTR("start") + " (F5)"; 239 | 240 | if (start) { 241 | std::optional target_device_id; 242 | for (auto &d : devices_) { 243 | if (net_card_name == d.display_name) { 244 | target_device_id = d; 245 | } 246 | } 247 | 248 | if (target_device_id.has_value()) { 249 | bool res = 250 | GuiInterface::Start(target_device_id.value(), channel, channelWidthMode, keyPath, codec); 251 | if (!res) { 252 | start = false; 253 | } 254 | } else { 255 | start = false; 256 | } 257 | } else { 258 | GuiInterface::Stop(); 259 | } 260 | 261 | update_adapter_start_button_looking(!start); 262 | }; 263 | play_button_->connect_signal("pressed", callback1); 264 | vbox_container->add_child(play_button_); 265 | } 266 | } 267 | 268 | // Local tab 269 | { 270 | auto margin_container = std::make_shared(); 271 | margin_container->set_margin_all(8); 272 | tab_container_->add_child(margin_container); 273 | tab_container_->set_tab_title(1, FTR("streaming")); 274 | 275 | auto vbox_container = std::make_shared(); 276 | vbox_container->set_separation(8); 277 | margin_container->add_child(vbox_container); 278 | 279 | auto hbox_container = std::make_shared(); 280 | vbox_container->add_child(hbox_container); 281 | 282 | auto label = std::make_shared(); 283 | label->set_text("URL:"); 284 | hbox_container->add_child(label); 285 | 286 | url_edit_ = std::make_shared(); 287 | url_edit_->set_editable(true); 288 | url_edit_->set_text(GuiInterface::Instance().ini_[CONFIG_STREAMING][CONFIG_STREAMING_URL]); 289 | url_edit_->container_sizing.expand_h = true; 290 | url_edit_->container_sizing.flag_h = revector::ContainerSizingFlag::Fill; 291 | hbox_container->add_child(url_edit_); 292 | 293 | { 294 | play_url_button_ = std::make_shared(); 295 | play_url_button_->container_sizing.expand_h = true; 296 | play_url_button_->container_sizing.flag_h = revector::ContainerSizingFlag::Fill; 297 | update_url_start_button_looking(true); 298 | 299 | auto callback1 = [this] { 300 | bool start = play_url_button_->get_text() == FTR("start") + " (F5)"; 301 | 302 | if (start) { 303 | GuiInterface::Instance().EmitRtpStream(url_edit_->get_text()); 304 | GuiInterface::Instance().ini_[CONFIG_STREAMING][CONFIG_STREAMING_URL] = url_edit_->get_text(); 305 | } else { 306 | GuiInterface::Instance().EmitUrlStreamShouldStop(); 307 | } 308 | 309 | update_url_start_button_looking(!start); 310 | }; 311 | 312 | play_url_button_->connect_signal("pressed", callback1); 313 | vbox_container->add_child(play_url_button_); 314 | } 315 | } 316 | 317 | // Settings tab 318 | { 319 | auto margin_container = std::make_shared(); 320 | tab_container_->add_child(margin_container); 321 | tab_container_->set_tab_title(2, FTR("settings")); 322 | } 323 | } 324 | 325 | void ControlPanel::custom_input(revector::InputEvent &event) { 326 | auto input_server = revector::InputServer::get_singleton(); 327 | 328 | if (event.type == revector::InputEventType::Key) { 329 | auto key_args = event.args.key; 330 | 331 | if (key_args.key == revector::KeyCode::F5) { 332 | if (key_args.pressed) { 333 | if (tab_container_->get_current_tab().has_value()) { 334 | if (tab_container_->get_current_tab().value() == 0) { 335 | play_button_->press(); 336 | } else { 337 | play_url_button_->press(); 338 | } 339 | } 340 | } 341 | } 342 | } 343 | } 344 | -------------------------------------------------------------------------------- /src/gui/control_panel.h: -------------------------------------------------------------------------------- 1 | #pragma once 2 | 3 | #include "../gui_interface.h" 4 | #include "app.h" 5 | 6 | class ControlPanel : public revector::Container { 7 | public: 8 | std::shared_ptr dongle_menu_button_; 9 | std::shared_ptr channel_button_; 10 | std::shared_ptr channel_width_button_; 11 | std::shared_ptr refresh_dongle_button_; 12 | 13 | std::string net_card_name; 14 | std::optional selected_net_card; 15 | uint32_t channel = 0; 16 | uint32_t channelWidthMode = 0; 17 | std::string keyPath; 18 | std::string codec; 19 | 20 | std::shared_ptr play_button_; 21 | 22 | std::shared_ptr play_url_button_; 23 | std::shared_ptr url_edit_; 24 | 25 | std::shared_ptr tab_container_; 26 | 27 | std::vector devices_; 28 | 29 | void update_dongle_list(); 30 | 31 | void update_adapter_start_button_looking(bool start_status) const; 32 | 33 | void update_url_start_button_looking(bool start_status) const; 34 | 35 | void custom_ready() override; 36 | 37 | void custom_input(revector::InputEvent &event) override; 38 | }; 39 | -------------------------------------------------------------------------------- /src/gui/player_rect.cpp: -------------------------------------------------------------------------------- 1 | #include "player_rect.h" 2 | 3 | #include "../gui_interface.h" 4 | 5 | class SignalBar : public revector::ProgressBar { 6 | void custom_update(double dt) override { 7 | if (value < 0.333 * max_value) { 8 | theme_progress->bg_color = RED; 9 | } 10 | if (value > 0.333 * max_value && value < 0.6667 * max_value) { 11 | theme_progress->bg_color = YELLOW; 12 | } 13 | if (value > 0.6667 * max_value) { 14 | theme_progress->bg_color = GREEN; 15 | } 16 | } 17 | }; 18 | 19 | void PlayerRect::show_red_tip(std::string tip) { 20 | tip_label_->set_text_style(revector::TextStyle{RED}); 21 | tip_label_->show_tip(tip); 22 | } 23 | 24 | void PlayerRect::show_green_tip(std::string tip) { 25 | tip_label_->set_text_style(revector::TextStyle{GREEN}); 26 | tip_label_->show_tip(tip); 27 | } 28 | 29 | void PlayerRect::custom_input(revector::InputEvent &event) { 30 | auto input_server = revector::InputServer::get_singleton(); 31 | 32 | if (event.type == revector::InputEventType::Key) { 33 | auto key_args = event.args.key; 34 | 35 | if (key_args.key == revector::KeyCode::F11) { 36 | if (key_args.pressed) { 37 | fullscreen_button_->press(); 38 | } 39 | } 40 | 41 | if (playing_ && key_args.key == revector::KeyCode::F10) { 42 | if (key_args.pressed) { 43 | record_button_->press(); 44 | } 45 | } 46 | } 47 | } 48 | 49 | void PlayerRect::custom_ready() { 50 | auto onRtpStream = [this](std::string sdp_file) { 51 | playing_file_ = sdp_file; 52 | start_playing(sdp_file); 53 | }; 54 | GuiInterface::Instance().rtpStreamCallbacks.emplace_back(onRtpStream); 55 | 56 | collapse_panel_ = std::make_shared(); 57 | collapse_panel_->set_title(FTR("player control")); 58 | collapse_panel_->set_collapse(true); 59 | collapse_panel_->set_color(revector::ColorU(84, 138, 247)); 60 | collapse_panel_->set_anchor_flag(revector::AnchorFlag::TopRight); 61 | collapse_panel_->set_visibility(false); 62 | add_child(collapse_panel_); 63 | 64 | auto vbox = std::make_shared(); 65 | collapse_panel_->add_child(vbox); 66 | 67 | logo_ = std::make_shared(revector::get_asset_dir("openipc-logo-white.svg")); 68 | texture = logo_; 69 | 70 | auto render_server = revector::RenderServer::get_singleton(); 71 | player_ = std::make_shared(render_server->device_, render_server->queue_); 72 | 73 | render_image_ = std::make_shared(Pathfinder::Vec2I{1920, 1080}); 74 | 75 | set_stretch_mode(StretchMode::KeepAspectCentered); 76 | 77 | tip_label_ = std::make_shared(); 78 | tip_label_->set_anchor_flag(revector::AnchorFlag::Center); 79 | tip_label_->set_visibility(false); 80 | tip_label_->set_text_style(revector::TextStyle{revector::ColorU::red()}); 81 | add_child(tip_label_); 82 | 83 | hud_container_ = std::make_shared(); 84 | add_child(hud_container_); 85 | hud_container_->set_size({0, 48}); 86 | revector::StyleBox box; 87 | box.bg_color = revector::ColorU(27, 27, 27, 27); 88 | box.border_width = 0; 89 | box.corner_radius = 0; 90 | hud_container_->set_theme_bg(box); 91 | hud_container_->set_anchor_flag(revector::AnchorFlag::BottomWide); 92 | hud_container_->set_visibility(false); 93 | hud_container_->set_separation(16); 94 | 95 | { 96 | video_info_label_ = std::make_shared(); 97 | hud_container_->add_child(video_info_label_); 98 | video_info_label_->set_text_style(revector::TextStyle{revector::ColorU::white()}); 99 | video_info_label_->set_text(""); 100 | 101 | auto onFpsUpdate = [this](uint32_t width, uint32_t height, float fps) { 102 | std::stringstream ss; 103 | ss << width << "x" << height << "@" << int(round(fps)); 104 | video_info_label_->set_text(ss.str()); 105 | }; 106 | GuiInterface::Instance().decoderReadyCallbacks.emplace_back(onFpsUpdate); 107 | } 108 | 109 | bitrate_label_ = std::make_shared(); 110 | hud_container_->add_child(bitrate_label_); 111 | bitrate_label_->set_text(FTR("bit rate") + ": 0 bps"); 112 | bitrate_label_->set_text_style(revector::TextStyle{revector::ColorU::white()}); 113 | 114 | { 115 | display_fps_label_ = std::make_shared(); 116 | hud_container_->add_child(display_fps_label_); 117 | display_fps_label_->set_text_style(revector::TextStyle{revector::ColorU::white()}); 118 | display_fps_label_->set_text(FTR("display fps") + ":"); 119 | } 120 | 121 | hw_status_label_ = std::make_shared(); 122 | hud_container_->add_child(hw_status_label_); 123 | hw_status_label_->set_text_style(revector::TextStyle{revector::ColorU::white()}); 124 | 125 | auto rssi_label = std::make_shared(); 126 | hud_container_->add_child(rssi_label); 127 | rssi_label->set_text("RSSI"); 128 | rssi_label->set_text_style(revector::TextStyle{revector::ColorU::white()}); 129 | 130 | rssi_bar_ = std::make_shared(); 131 | hud_container_->add_child(rssi_bar_); 132 | rssi_bar_->set_lerp_enabled(true); 133 | rssi_bar_->set_custom_minimum_size({64, 16}); 134 | rssi_bar_->set_label_visibility(false); 135 | rssi_bar_->container_sizing.expand_v = false; 136 | rssi_bar_->container_sizing.flag_v = revector::ContainerSizingFlag::ShrinkCenter; 137 | 138 | auto snr_label = std::make_shared(); 139 | hud_container_->add_child(snr_label); 140 | snr_label->set_text("SNR"); 141 | snr_label->set_text_style(revector::TextStyle{revector::ColorU::white()}); 142 | 143 | snr_bar_ = std::make_shared(); 144 | hud_container_->add_child(snr_bar_); 145 | snr_bar_->set_lerp_enabled(true); 146 | snr_bar_->set_custom_minimum_size({64, 16}); 147 | snr_bar_->set_label_visibility(false); 148 | snr_bar_->container_sizing.expand_v = false; 149 | snr_bar_->container_sizing.flag_v = revector::ContainerSizingFlag::ShrinkCenter; 150 | 151 | rx_status_update_timer = std::make_shared(); 152 | add_child(rx_status_update_timer); 153 | 154 | auto callback = [this] { 155 | rssi_bar_->set_value( 156 | (GuiInterface::Instance().rx_status_.rssi[0] + GuiInterface::Instance().rx_status_.rssi[1]) / 2); 157 | snr_bar_->set_value((GuiInterface::Instance().rx_status_.snr[0] + GuiInterface::Instance().rx_status_.snr[1]) / 158 | 2); 159 | 160 | rx_status_update_timer->start_timer(0.1); 161 | }; 162 | rx_status_update_timer->connect_signal("timeout", callback); 163 | rx_status_update_timer->start_timer(0.1); 164 | 165 | record_status_label_ = std::make_shared(); 166 | hud_container_->add_child(record_status_label_); 167 | record_status_label_->container_sizing.expand_h = true; 168 | record_status_label_->container_sizing.flag_h = revector::ContainerSizingFlag::ShrinkEnd; 169 | record_status_label_->set_text(""); 170 | record_status_label_->set_text_style(revector::TextStyle{revector::ColorU::white()}); 171 | 172 | auto capture_button = std::make_shared(); 173 | vbox->add_child(capture_button); 174 | capture_button->set_text(FTR("capture frame")); 175 | auto icon = std::make_shared(revector::get_asset_dir("CaptureImage.svg")); 176 | capture_button->set_icon_normal(icon); 177 | auto capture_callback = [this] { 178 | auto output_file = player_->captureJpeg(); 179 | if (output_file.empty()) { 180 | show_red_tip(FTR("capture fail")); 181 | } else { 182 | show_green_tip(FTR("frame saved") + output_file); 183 | } 184 | }; 185 | capture_button->connect_signal("pressed", capture_callback); 186 | 187 | record_button_ = std::make_shared(); 188 | vbox->add_child(record_button_); 189 | auto icon2 = std::make_shared(revector::get_asset_dir("RecordVideo.svg")); 190 | record_button_->set_icon_normal(icon2); 191 | record_button_->set_text(FTR("record mp4") + " (F10)"); 192 | 193 | auto record_button_raw = record_button_.get(); 194 | auto record_callback = [record_button_raw, this] { 195 | if (!is_recording) { 196 | is_recording = player_->startRecord(); 197 | 198 | if (is_recording) { 199 | record_button_raw->set_text(FTR("stop recording") + " (F10)"); 200 | 201 | record_start_time = std::chrono::steady_clock::now(); 202 | 203 | record_status_label_->set_text(FTR("recording") + ": 00:00"); 204 | } else { 205 | record_status_label_->set_text(""); 206 | show_red_tip(FTR("record fail")); 207 | } 208 | } else { 209 | is_recording = false; 210 | 211 | auto output_file = player_->stopRecord(); 212 | 213 | record_button_raw->set_text(FTR("record mp4") + " (F10)"); 214 | record_status_label_->set_text(""); 215 | 216 | if (output_file.empty()) { 217 | show_red_tip(FTR("save record fail")); 218 | } else { 219 | show_green_tip(FTR("video saved") + output_file); 220 | } 221 | } 222 | }; 223 | record_button_->connect_signal("pressed", record_callback); 224 | 225 | { 226 | video_stabilization_button_ = std::make_shared(); 227 | video_stabilization_button_->set_text(FTR("video stab")); 228 | vbox->add_child(video_stabilization_button_); 229 | 230 | auto callback = [this](bool toggled) { 231 | player_->yuvRenderer_->mStabilize = toggled; 232 | if (toggled) { 233 | show_red_tip(FTR("video stab warning")); 234 | } 235 | }; 236 | video_stabilization_button_->connect_signal("toggled", callback); 237 | } 238 | 239 | { 240 | low_light_enhancement_button_simple_ = std::make_shared(); 241 | low_light_enhancement_button_simple_->set_text(FTR("low light enhancement simple")); 242 | vbox->add_child(low_light_enhancement_button_simple_); 243 | 244 | auto callback = [this](bool toggled) { 245 | player_->yuvRenderer_->mLowLightEnhancementSimple = toggled; 246 | if (toggled) { 247 | if (low_light_enhancement_button_advanced_->get_pressed()) { 248 | low_light_enhancement_button_advanced_->press(); 249 | } 250 | } 251 | }; 252 | low_light_enhancement_button_simple_->connect_signal("toggled", callback); 253 | } 254 | 255 | { 256 | low_light_enhancement_button_advanced_ = std::make_shared(); 257 | low_light_enhancement_button_advanced_->set_text(FTR("low light enhancement dnn")); 258 | vbox->add_child(low_light_enhancement_button_advanced_); 259 | 260 | auto callback = [this](bool toggled) { 261 | player_->yuvRenderer_->mLowLightEnhancementAdvanced = toggled; 262 | if (toggled) { 263 | if (low_light_enhancement_button_simple_->get_pressed()) { 264 | low_light_enhancement_button_simple_->press(); 265 | } 266 | } 267 | }; 268 | low_light_enhancement_button_advanced_->connect_signal("toggled", callback); 269 | } 270 | 271 | { 272 | auto button = std::make_shared(); 273 | button->set_text(FTR("sw decoding")); 274 | vbox->add_child(button); 275 | 276 | auto callback = [this](bool toggled) { 277 | force_software_decoding = toggled; 278 | if (playing_) { 279 | player_->stop(); 280 | player_->play(playing_file_, force_software_decoding); 281 | } 282 | }; 283 | button->connect_signal("toggled", callback); 284 | } 285 | 286 | auto onBitrateUpdate = [this](uint64_t bitrate) { 287 | std::string text = FTR("bit rate") + ": "; 288 | if (bitrate > 1024 * 1024) { 289 | text += std::format("{:.1f}", bitrate / 1024.0 / 1024.0) + " Mbps"; 290 | } else if (bitrate > 1024) { 291 | text += std::format("{:.1f}", bitrate / 1024.0) + " Kbps"; 292 | } else { 293 | text += std::format("{:d}", bitrate) + " bps"; 294 | } 295 | bitrate_label_->set_text(text); 296 | }; 297 | GuiInterface::Instance().bitrateUpdateCallbacks.emplace_back(onBitrateUpdate); 298 | 299 | auto onTipUpdate = [this](std::string msg) { show_red_tip(msg); }; 300 | GuiInterface::Instance().tipCallbacks.emplace_back(onTipUpdate); 301 | 302 | auto onUrlStreamShouldStop = [this]() { stop_playing(); }; 303 | GuiInterface::Instance().urlStreamShouldStopCallbacks.emplace_back(onUrlStreamShouldStop); 304 | } 305 | 306 | void PlayerRect::custom_update(double dt) { 307 | player_->update(dt); 308 | 309 | hw_status_label_->set_text(FTR("hw decoding") + ": " + 310 | std::string(player_->isHardwareAccelerated() ? FTR("on") : FTR("off"))); 311 | 312 | display_fps_label_->set_text(FTR("display fps") + ": " + 313 | std::to_string(revector::Engine::get_singleton()->get_fps_int())); 314 | 315 | if (is_recording) { 316 | std::chrono::duration duration = 317 | std::chrono::steady_clock::now() - record_start_time; 318 | 319 | int total_seconds = duration.count(); 320 | int hours = total_seconds / 3600; 321 | int minutes = (total_seconds % 3600) / 60; 322 | int seconds = total_seconds % 60; 323 | 324 | std::ostringstream ss; 325 | ss << FTR("recording") << ": "; 326 | if (hours > 0) { 327 | ss << hours << ":"; 328 | } 329 | ss << std::setw(2) << std::setfill('0') << minutes << ":"; 330 | ss << std::setw(2) << std::setfill('0') << seconds; 331 | 332 | record_status_label_->set_text(ss.str()); 333 | } 334 | } 335 | 336 | void PlayerRect::custom_draw() { 337 | if (!playing_) { 338 | return; 339 | } 340 | auto render_image = (revector::RenderImage *)texture.get(); 341 | player_->yuvRenderer_->render(render_image->get_texture(), video_stabilization_button_->get_pressed()); 342 | } 343 | 344 | void PlayerRect::start_playing(const std::string &url) { 345 | playing_ = true; 346 | player_->play(url, force_software_decoding); 347 | texture = render_image_; 348 | 349 | collapse_panel_->set_visibility(true); 350 | hud_container_->set_visibility(true); 351 | } 352 | 353 | void PlayerRect::stop_playing() { 354 | playing_ = false; 355 | 356 | if (is_recording) { 357 | record_button_->press(); 358 | } 359 | 360 | // Fix crash in WfbReceiver destructor. 361 | if (player_) { 362 | player_->stop(); 363 | } 364 | texture = logo_; 365 | 366 | collapse_panel_->set_visibility(false); 367 | hud_container_->set_visibility(false); 368 | } 369 | -------------------------------------------------------------------------------- /src/gui/player_rect.h: -------------------------------------------------------------------------------- 1 | #pragma once 2 | 3 | #include "../player/RealTimePlayer.h" 4 | #include "app.h" 5 | #include "tip_label.h" 6 | 7 | class SignalBar; 8 | 9 | class PlayerRect : public revector::TextureRect { 10 | public: 11 | std::shared_ptr player_; 12 | std::string playing_file_; 13 | bool playing_ = false; 14 | 15 | bool force_software_decoding = false; 16 | 17 | std::shared_ptr logo_; 18 | std::shared_ptr render_image_; 19 | 20 | std::shared_ptr tip_label_; 21 | 22 | bool is_recording = false; 23 | 24 | std::chrono::time_point record_start_time; 25 | 26 | std::shared_ptr rx_status_update_timer; 27 | 28 | std::shared_ptr collapse_panel_; 29 | 30 | std::shared_ptr hud_container_; 31 | 32 | std::shared_ptr record_status_label_; 33 | 34 | std::shared_ptr bitrate_label_; 35 | 36 | std::shared_ptr hw_status_label_; 37 | 38 | std::shared_ptr rx_status_label_; 39 | std::shared_ptr rssi_bar_; 40 | std::shared_ptr snr_bar_; 41 | 42 | std::shared_ptr video_info_label_; 43 | 44 | std::shared_ptr display_fps_label_; 45 | 46 | std::shared_ptr video_stabilization_button_; 47 | 48 | std::shared_ptr low_light_enhancement_button_simple_; 49 | std::shared_ptr low_light_enhancement_button_advanced_; 50 | 51 | std::shared_ptr top_control_container; 52 | std::shared_ptr fullscreen_button_; 53 | 54 | std::shared_ptr record_button_; 55 | 56 | // Record when the signal had been lost. 57 | std::chrono::time_point signal_lost_time_; 58 | 59 | void show_red_tip(std::string tip); 60 | 61 | void show_green_tip(std::string tip); 62 | 63 | void custom_input(revector::InputEvent &event) override; 64 | 65 | void custom_ready() override; 66 | 67 | void custom_update(double dt) override; 68 | 69 | void custom_draw() override; 70 | 71 | void start_playing(const std::string &url); 72 | 73 | void stop_playing(); 74 | }; 75 | -------------------------------------------------------------------------------- /src/gui/settings_tab.cpp: -------------------------------------------------------------------------------- 1 | #include "settings_tab.h" 2 | 3 | const std::string AVIATEUR_VERSION = "0.1.0"; 4 | 5 | void open_explorer(const std::string& dir) { 6 | #ifdef _WIN32 7 | ShellExecuteA(NULL, "open", dir.c_str(), NULL, NULL, SW_SHOWDEFAULT); 8 | #else 9 | std::string cmd = "xdg-open \"" + dir + "\""; 10 | system(cmd.c_str()); 11 | #endif 12 | } 13 | 14 | void SettingsContainer::custom_ready() { 15 | set_margin_all(8); 16 | 17 | auto vbox_container = std::make_shared(); 18 | vbox_container->set_separation(8); 19 | add_child(vbox_container); 20 | 21 | { 22 | auto hbox_container = std::make_shared(); 23 | hbox_container->set_separation(8); 24 | vbox_container->add_child(hbox_container); 25 | 26 | auto label = std::make_shared(); 27 | label->set_text(FTR("lang") + ":"); 28 | hbox_container->add_child(label); 29 | 30 | auto lang_menu_button = std::make_shared(); 31 | 32 | lang_menu_button->container_sizing.expand_h = true; 33 | lang_menu_button->container_sizing.flag_h = revector::ContainerSizingFlag::Fill; 34 | hbox_container->add_child(lang_menu_button); 35 | 36 | if (GuiInterface::Instance().locale_ == "en") { 37 | lang_menu_button->set_text("English"); 38 | } 39 | if (GuiInterface::Instance().locale_ == "zh") { 40 | lang_menu_button->set_text("中文"); 41 | } 42 | if (GuiInterface::Instance().locale_ == "ru") { 43 | lang_menu_button->set_text("Русский"); 44 | } 45 | 46 | auto menu = lang_menu_button->get_popup_menu().lock(); 47 | 48 | menu->create_item("English"); 49 | menu->create_item("中文"); 50 | menu->create_item("Русский"); 51 | 52 | auto callback = [this](uint32_t item_index) { 53 | GuiInterface::Instance().set_locale("en"); 54 | 55 | if (item_index == 1) { 56 | GuiInterface::Instance().set_locale("zh"); 57 | } 58 | if (item_index == 2) { 59 | GuiInterface::Instance().set_locale("ru"); 60 | } 61 | }; 62 | lang_menu_button->connect_signal("item_selected", callback); 63 | } 64 | 65 | // #ifdef _WIN32 66 | 67 | { 68 | auto open_capture_folder_button = std::make_shared(); 69 | 70 | open_capture_folder_button->container_sizing.expand_h = true; 71 | open_capture_folder_button->container_sizing.flag_h = revector::ContainerSizingFlag::Fill; 72 | vbox_container->add_child(open_capture_folder_button); 73 | open_capture_folder_button->set_text(FTR("open capture folder")); 74 | 75 | auto callback = [this]() { open_explorer(GuiInterface::GetCaptureDir()); }; 76 | open_capture_folder_button->connect_signal("pressed", callback); 77 | } 78 | 79 | { 80 | auto open_appdata_button = std::make_shared(); 81 | 82 | open_appdata_button->container_sizing.expand_h = true; 83 | open_appdata_button->container_sizing.flag_h = revector::ContainerSizingFlag::Fill; 84 | vbox_container->add_child(open_appdata_button); 85 | open_appdata_button->set_text(FTR("open appdata folder")); 86 | 87 | auto callback = [this]() { open_explorer(GuiInterface::GetAppDataDir()); }; 88 | open_appdata_button->connect_signal("pressed", callback); 89 | } 90 | 91 | #ifdef _WIN32 92 | { 93 | auto open_crash_dumps_button = std::make_shared(); 94 | 95 | open_crash_dumps_button->container_sizing.expand_h = true; 96 | open_crash_dumps_button->container_sizing.flag_h = revector::ContainerSizingFlag::Fill; 97 | vbox_container->add_child(open_crash_dumps_button); 98 | open_crash_dumps_button->set_text(FTR("open crash dump folder")); 99 | 100 | auto callback = [this] { 101 | auto dir = GuiInterface::GetAppDataDir(); 102 | auto path = std::filesystem::path(dir).parent_path().parent_path().parent_path(); 103 | auto appdata_local = path.string() + "\\Local"; 104 | auto dumps_dir = appdata_local + "\\CrashDumps"; 105 | 106 | if (std::filesystem::exists(dumps_dir)) { 107 | open_explorer(dumps_dir); 108 | } 109 | }; 110 | open_crash_dumps_button->connect_signal("pressed", callback); 111 | } 112 | 113 | { 114 | auto show_console_btn = std::make_shared(); 115 | 116 | show_console_btn->container_sizing.expand_h = true; 117 | show_console_btn->container_sizing.flag_h = revector::ContainerSizingFlag::Fill; 118 | vbox_container->add_child(show_console_btn); 119 | show_console_btn->set_text(FTR("show console")); 120 | show_console_btn->set_toggle_mode(true); 121 | 122 | auto callback = [this](bool toggled) { 123 | if (toggled) { 124 | ShowWindow(GetConsoleWindow(), SW_RESTORE); 125 | } else { 126 | ShowWindow(GetConsoleWindow(), SW_HIDE); 127 | } 128 | }; 129 | show_console_btn->connect_signal("toggled", callback); 130 | } 131 | #endif 132 | 133 | { 134 | auto version_label = std::make_shared(); 135 | 136 | version_label->container_sizing.expand_h = true; 137 | version_label->container_sizing.flag_h = revector::ContainerSizingFlag::Fill; 138 | vbox_container->add_child(version_label); 139 | version_label->set_text("Version " + AVIATEUR_VERSION); 140 | } 141 | } 142 | -------------------------------------------------------------------------------- /src/gui/settings_tab.h: -------------------------------------------------------------------------------- 1 | #pragma once 2 | 3 | #include "../gui_interface.h" 4 | #include "app.h" 5 | 6 | class SettingsContainer : public revector::MarginContainer { 7 | void custom_ready() override; 8 | }; 9 | -------------------------------------------------------------------------------- /src/gui/tip_label.cpp: -------------------------------------------------------------------------------- 1 | #include "tip_label.h" 2 | 3 | void TipLabel::custom_ready() { 4 | set_font_size(48); 5 | 6 | auto style_box = revector::StyleBox(); 7 | style_box.bg_color = revector::ColorU(50, 50, 50, 200); 8 | style_box.corner_radius = 8; 9 | theme_background = style_box; 10 | 11 | set_text_style(revector::TextStyle{revector::ColorU(201, 79, 79)}); 12 | 13 | display_timer = std::make_shared(); 14 | fade_timer = std::make_shared(); 15 | 16 | add_child(display_timer); 17 | add_child(fade_timer); 18 | 19 | auto callback = [this] { fade_timer->start_timer(fade_time); }; 20 | display_timer->connect_signal("timeout", callback); 21 | 22 | auto callback2 = [this] { set_visibility(false); }; 23 | fade_timer->connect_signal("timeout", callback2); 24 | } 25 | 26 | void TipLabel::custom_update(double dt) { 27 | if (!fade_timer->is_stopped()) { 28 | alpha = fade_timer->get_remaining_time() / fade_time; 29 | } 30 | } 31 | 32 | void TipLabel::show_tip(const std::string& tip) { 33 | if (!display_timer->is_stopped()) { 34 | display_timer->stop(); 35 | } 36 | if (!fade_timer->is_stopped()) { 37 | fade_timer->stop(); 38 | } 39 | set_text(tip); 40 | set_visibility(true); 41 | alpha = 1; 42 | display_timer->start_timer(display_time); 43 | } 44 | -------------------------------------------------------------------------------- /src/gui/tip_label.h: -------------------------------------------------------------------------------- 1 | #pragma once 2 | 3 | #include "app.h" 4 | 5 | class TipLabel : public revector::Label { 6 | public: 7 | float display_time = 1.5; 8 | float fade_time = 0.5; 9 | 10 | std::shared_ptr display_timer; 11 | std::shared_ptr fade_timer; 12 | 13 | void custom_ready() override; 14 | 15 | void custom_update(double dt) override; 16 | 17 | void show_tip(const std::string& tip); 18 | }; 19 | -------------------------------------------------------------------------------- /src/gui_interface.h: -------------------------------------------------------------------------------- 1 | #pragma once 2 | 3 | #include 4 | #include 5 | #include 6 | 7 | #include 8 | #include 9 | #include 10 | #include 11 | 12 | #ifdef __linux__ 13 | #include 14 | #include 15 | #endif 16 | 17 | #include "app.h" 18 | #include "wifi/WfbReceiver.h" 19 | 20 | #define CONFIG_FILE "config.ini" 21 | 22 | #define CONFIG_ADAPTER "adapter" 23 | #define ADAPTER_DEVICE "pid_vid" 24 | #define ADAPTER_CHANNEL "channel" 25 | #define ADAPTER_CHANNEL_WIDTH_MODE "channel_width_mode" 26 | #define ADAPTER_CHANNEL_KEY "key" 27 | #define ADAPTER_CHANNEL_CODEC "codec" 28 | 29 | #define CONFIG_STREAMING "streaming" 30 | #define CONFIG_STREAMING_URL "url" 31 | 32 | #define CONFIG_GUI "gui" 33 | #define CONFIG_GUI_LANG "language" 34 | 35 | #define DEFAULT_PORT 52356 36 | 37 | constexpr auto LOGGER_MODULE = "Aviateur"; 38 | 39 | const revector::ColorU GREEN = revector::ColorU(78, 135, 82); 40 | const revector::ColorU RED = revector::ColorU(201, 79, 79); 41 | const revector::ColorU YELLOW = revector::ColorU(255, 201, 14); 42 | 43 | /// Channels. 44 | constexpr std::array CHANNELS{ 45 | 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 32, 36, 40, 44, 48, 52, 56, 60, 64, 46 | 68, 96, 100, 104, 108, 112, 116, 120, 124, 128, 132, 136, 140, 144, 149, 153, 157, 161, 165, 169, 173, 177, 47 | }; 48 | 49 | /// Channel widths. 50 | constexpr std::array CHANNEL_WIDTHS{ 51 | "20", 52 | "40", 53 | }; 54 | 55 | enum class LogLevel { 56 | Info, 57 | Debug, 58 | Warn, 59 | Error, 60 | }; 61 | 62 | struct RxStatus { 63 | uint8_t rssi[2]; 64 | int8_t snr[2]; 65 | }; 66 | 67 | class GuiInterface { 68 | public: 69 | static GuiInterface &Instance() { 70 | static GuiInterface interface; 71 | return interface; 72 | } 73 | 74 | explicit GuiInterface() { 75 | #ifdef _WIN32 76 | ShowWindow(GetConsoleWindow(), SW_HIDE); // SW_RESTORE to bring back 77 | 78 | // Windows crash dump 79 | SetUnhandledExceptionFilter(UnhandledExceptionFilter); 80 | #endif 81 | 82 | // Redirect standard output to a file 83 | // DO NOT USE WHEN DEPLOYING, AS IT WILL CRASH THE APP ON USER PCs. 84 | // freopen((GetAppDataDir() + std::string("last_run_log.txt")).c_str(), "w", stdout); 85 | 86 | // Set up loggers 87 | { 88 | // revector::Logger::set_default_level(revector::Logger::Level::Info); 89 | revector::Logger::set_module_level("revector", revector::Logger::Level::Info); 90 | revector::Logger::set_module_level(LOGGER_MODULE, revector::Logger::Level::Info); 91 | 92 | auto logCallback = [](LogLevel level, std::string msg) { 93 | switch (level) { 94 | case LogLevel::Info: { 95 | revector::Logger::info(msg, LOGGER_MODULE); 96 | } break; 97 | case LogLevel::Debug: { 98 | revector::Logger::debug(msg, LOGGER_MODULE); 99 | } break; 100 | case LogLevel::Warn: { 101 | revector::Logger::warn(msg, LOGGER_MODULE); 102 | } break; 103 | case LogLevel::Error: { 104 | revector::Logger::error(msg, LOGGER_MODULE); 105 | } break; 106 | default:; 107 | } 108 | }; 109 | logCallbacks.emplace_back(logCallback); 110 | } 111 | 112 | auto dir = GetAppDataDir(); 113 | 114 | // Load config. 115 | mINI::INIFile file(dir + CONFIG_FILE); 116 | bool readSuccess = file.read(ini_); 117 | 118 | if (!readSuccess) { 119 | ini_[CONFIG_ADAPTER][ADAPTER_DEVICE] = ""; 120 | ini_[CONFIG_ADAPTER][ADAPTER_CHANNEL] = "161"; 121 | ini_[CONFIG_ADAPTER][ADAPTER_CHANNEL_WIDTH_MODE] = "0"; 122 | ini_[CONFIG_ADAPTER][ADAPTER_CHANNEL_KEY] = ""; 123 | ini_[CONFIG_ADAPTER][ADAPTER_CHANNEL_CODEC] = "AUTO"; 124 | 125 | ini_[CONFIG_STREAMING][CONFIG_STREAMING_URL] = "udp://239.0.0.1:1234"; 126 | 127 | ini_[CONFIG_GUI][CONFIG_GUI_LANG] = "en"; 128 | } else { 129 | set_locale(ini_[CONFIG_GUI][CONFIG_GUI_LANG]); 130 | } 131 | } 132 | 133 | ~GuiInterface() { 134 | SaveConfig(); 135 | } 136 | 137 | static std::vector GetDeviceList() { 138 | return WfbReceiver::GetDeviceList(); 139 | } 140 | 141 | static std::string GetAppDataDir() { 142 | #ifdef _WIN32 143 | auto dir = std::string(getenv("APPDATA")) + "\\Aviateur\\"; 144 | #elif defined(__linux__) 145 | passwd *pw = getpwuid(getuid()); 146 | const char *home_dir = pw->pw_dir; 147 | auto dir = std::string(home_dir) + "/aviateur/"; 148 | #endif 149 | return dir; 150 | } 151 | 152 | static std::string GetCaptureDir() { 153 | #ifdef _WIN32 154 | auto dir = std::string(getenv("USERPROFILE")) + R"(\Videos\Aviateur Captures\)"; 155 | #else 156 | passwd *pw = getpwuid(getuid()); 157 | const char *home_dir = pw->pw_dir; 158 | auto dir = std::string(home_dir) + "/Pictures/Aviateur Captures/"; 159 | #endif 160 | return dir; 161 | } 162 | 163 | static bool SaveConfig() { 164 | // For clearing obsolete entries. 165 | // Instance().ini_.clear(); 166 | 167 | Instance().ini_[CONFIG_GUI][CONFIG_GUI_LANG] = Instance().locale_; 168 | 169 | auto dir = GetAppDataDir(); 170 | 171 | try { 172 | if (!std::filesystem::exists(dir)) { 173 | std::filesystem::create_directories(dir); 174 | } 175 | } catch (const std::exception &e) { 176 | std::cerr << e.what() << std::endl; 177 | } 178 | 179 | mINI::INIFile file(dir + std::string(CONFIG_FILE)); 180 | bool writeSuccess = file.write(Instance().ini_, true); 181 | 182 | return writeSuccess; 183 | } 184 | 185 | static bool Start(const DeviceId &deviceId, 186 | int channel, 187 | int channelWidthMode, 188 | std::string keyPath, 189 | const std::string &codec) { 190 | Instance().ini_[CONFIG_ADAPTER][ADAPTER_DEVICE] = deviceId.display_name; 191 | Instance().ini_[CONFIG_ADAPTER][ADAPTER_CHANNEL] = std::to_string(channel); 192 | Instance().ini_[CONFIG_ADAPTER][ADAPTER_CHANNEL_WIDTH_MODE] = std::to_string(channelWidthMode); 193 | Instance().ini_[CONFIG_ADAPTER][ADAPTER_CHANNEL_KEY] = keyPath; 194 | Instance().ini_[CONFIG_ADAPTER][ADAPTER_CHANNEL_CODEC] = codec; 195 | 196 | // Set port. 197 | Instance().playerPort = GetFreePort(DEFAULT_PORT); 198 | Instance().PutLog(LogLevel::Info, "Using port: {}", Instance().playerPort); 199 | 200 | Instance().playerCodec = codec; 201 | 202 | // If no custom key provided by the user, use the default key. 203 | if (keyPath.empty()) { 204 | keyPath = revector::get_asset_dir("gs.key"); 205 | Instance().PutLog(LogLevel::Info, "Using GS key: {}", keyPath); 206 | } 207 | return WfbReceiver::Instance().Start(deviceId, channel, channelWidthMode, keyPath); 208 | } 209 | 210 | static bool Stop() { 211 | WfbReceiver::Instance().Stop(); 212 | return true; 213 | } 214 | 215 | static void BuildSdp(const std::string &filePath, const std::string &codec, int payloadType, int port) { 216 | auto absolutePath = std::filesystem::absolute(filePath); 217 | std::string dirPath = absolutePath.parent_path().string(); 218 | 219 | try { 220 | if (!std::filesystem::exists(dirPath)) { 221 | std::filesystem::create_directories(dirPath); 222 | } 223 | } catch (const std::exception &e) { 224 | std::cerr << e.what() << std::endl; 225 | } 226 | 227 | std::ofstream sdpFos(filePath); 228 | sdpFos << "v=0\n"; 229 | sdpFos << "o=- 0 0 IN IP4 127.0.0.1\n"; 230 | sdpFos << "s=No Name\n"; 231 | sdpFos << "c=IN IP4 127.0.0.1\n"; 232 | sdpFos << "t=0 0\n"; 233 | sdpFos << "m=video " << port << " RTP/AVP " << payloadType << "\n"; 234 | sdpFos << "a=rtpmap:" << payloadType << " " << codec << "/90000\n"; 235 | sdpFos.flush(); 236 | sdpFos.close(); 237 | 238 | Instance().PutLog( 239 | LogLevel::Debug, 240 | "Build SDP: Codec:" + codec + " PT:" + std::to_string(payloadType) + " Port:" + std::to_string(port)); 241 | } 242 | 243 | template 244 | void PutLog(LogLevel level, const std::string_view message, Args... format_items) { 245 | std::string str = std::vformat(message, std::make_format_args(format_items...)); 246 | EmitLog(level, str); 247 | } 248 | 249 | int NotifyRtpStream(int pt, uint16_t ssrc) { 250 | // Get free port. 251 | std::string sdpFile = "sdp/sdp" + std::to_string(playerPort) + ".sdp"; 252 | 253 | BuildSdp(sdpFile, playerCodec, pt, playerPort); 254 | 255 | EmitRtpStream(sdpFile); 256 | 257 | return Instance().playerPort; 258 | } 259 | 260 | void UpdateCount() { 261 | EmitWifiFrameCountUpdated(wifiFrameCount_); 262 | EmitWfbFrameCountUpdated(wfbFrameCount_); 263 | EmitRtpPktCountUpdated(rtpPktCount_); 264 | } 265 | 266 | long long GetWfbFrameCount() const { 267 | return wfbFrameCount_; 268 | } 269 | long long GetRtpPktCount() const { 270 | return rtpPktCount_; 271 | } 272 | long long GetWifiFrameCount() const { 273 | return wifiFrameCount_; 274 | } 275 | 276 | int GetPlayerPort() const { 277 | return playerPort; 278 | } 279 | std::string GetPlayerCodec() const { 280 | return playerCodec; 281 | } 282 | 283 | static int GetFreePort(int start_port) { 284 | #ifdef _WIN32 285 | // Declare some variables 286 | WSADATA wsaData; 287 | 288 | int free_port = 0; 289 | 290 | int iResult = 0; // used to return function results 291 | 292 | // the listening socket to be created 293 | SOCKET soc = INVALID_SOCKET; 294 | 295 | //---------------------- 296 | // Initialize Winsock 297 | iResult = WSAStartup(MAKEWORD(2, 2), &wsaData); 298 | if (iResult != NO_ERROR) { 299 | wprintf(L"Error at WSAStartup()\n"); 300 | return 0; 301 | } 302 | 303 | // Create a SOCKET for listening for incoming connection requests 304 | soc = socket(AF_INET, SOCK_DGRAM, 0); 305 | if (soc == INVALID_SOCKET) { 306 | wprintf(L"socket function failed with error: %u\n", WSAGetLastError()); 307 | WSACleanup(); 308 | return 0; 309 | } 310 | 311 | for (int port = start_port; port < start_port + 200; ++port) { 312 | // The sockaddr_in structure specifies the address family, 313 | // IP address, and port for the socket that is being bound. 314 | sockaddr_in sin; 315 | sin.sin_family = AF_INET; 316 | sin.sin_addr.s_addr = inet_addr("0.0.0.0"); 317 | sin.sin_port = htons(port); 318 | 319 | // Bind the socket. 320 | iResult = bind(soc, (sockaddr *)&sin, sizeof(sin)); 321 | if (iResult == SOCKET_ERROR) { 322 | Instance().PutLog(LogLevel::Info, "bind failed with error {}", WSAGetLastError()); 323 | } else { 324 | free_port = port; 325 | break; 326 | } 327 | } 328 | 329 | closesocket(soc); 330 | WSACleanup(); 331 | 332 | return free_port; 333 | #else 334 | return start_port; 335 | #endif 336 | } 337 | 338 | void set_locale(std::string locale) { 339 | locale_ = locale; 340 | revector::TranslationServer::get_singleton()->set_locale(locale_); 341 | } 342 | 343 | mINI::INIStructure ini_; 344 | 345 | std::string locale_ = "en"; 346 | 347 | long long wfbFrameCount_ = 0; 348 | long long wifiFrameCount_ = 0; 349 | long long rtpPktCount_ = 0; 350 | int playerPort = 0; 351 | std::string playerCodec; 352 | 353 | bool config_file_exists = true; 354 | 355 | RxStatus rx_status_{}; 356 | 357 | // Signals. 358 | std::vector> logCallbacks; 359 | std::vector> tipCallbacks; 360 | std::vector> wifiStopCallbacks; 361 | std::vector> wifiFrameCountCallbacks; 362 | std::vector> wfbFrameCountCallbacks; 363 | std::vector> rtpPktCountCallbacks; 364 | std::vector> rtpStreamCallbacks; 365 | std::vector> bitrateUpdateCallbacks; 366 | std::vector> decoderReadyCallbacks; 367 | 368 | std::vector> urlStreamShouldStopCallbacks; 369 | 370 | void EmitLog(LogLevel level, std::string msg) { 371 | for (auto &callback : logCallbacks) { 372 | try { 373 | callback.operator()(std::move(level), std::move(msg)); 374 | } catch (std::bad_any_cast &) { 375 | } 376 | } 377 | } 378 | 379 | void ShowTip(std::string msg) { 380 | for (auto &callback : tipCallbacks) { 381 | try { 382 | callback.operator()(std::move(msg)); 383 | } catch (std::bad_any_cast &) { 384 | } 385 | } 386 | } 387 | 388 | void EmitWifiStopped() { 389 | for (auto &callback : wifiStopCallbacks) { 390 | try { 391 | callback(); 392 | } catch (std::bad_any_cast &) { 393 | Instance().PutLog(LogLevel::Error, "Mismatched signal argument types!"); 394 | } 395 | } 396 | } 397 | 398 | void EmitWifiFrameCountUpdated(long long count) { 399 | for (auto &callback : wifiFrameCountCallbacks) { 400 | try { 401 | callback.operator()(std::move(count)); 402 | } catch (std::bad_any_cast &) { 403 | Instance().PutLog(LogLevel::Error, "Mismatched signal argument types!"); 404 | } 405 | } 406 | } 407 | 408 | void EmitWfbFrameCountUpdated(long long count) { 409 | for (auto &callback : wfbFrameCountCallbacks) { 410 | try { 411 | callback.operator()(std::move(count)); 412 | } catch (std::bad_any_cast &) { 413 | Instance().PutLog(LogLevel::Error, "Mismatched signal argument types!"); 414 | } 415 | } 416 | } 417 | 418 | void EmitRtpPktCountUpdated(long long count) { 419 | for (auto &callback : rtpPktCountCallbacks) { 420 | try { 421 | callback.operator()(std::move(count)); 422 | } catch (std::bad_any_cast &) { 423 | Instance().PutLog(LogLevel::Error, "Mismatched signal argument types!"); 424 | } 425 | } 426 | } 427 | 428 | void EmitRtpStream(std::string sdp) { 429 | for (auto &callback : rtpStreamCallbacks) { 430 | try { 431 | callback.operator()(std::move(sdp)); 432 | } catch (std::bad_any_cast &) { 433 | Instance().PutLog(LogLevel::Error, "Mismatched signal argument types!"); 434 | } 435 | } 436 | } 437 | 438 | void EmitBitrateUpdate(uint64_t bitrate) { 439 | for (auto &callback : bitrateUpdateCallbacks) { 440 | try { 441 | callback.operator()(std::move(bitrate)); 442 | } catch (std::bad_any_cast &) { 443 | Instance().PutLog(LogLevel::Error, "Mismatched signal argument types!"); 444 | } 445 | } 446 | } 447 | 448 | void EmitDecoderReady(uint32_t width, uint32_t height, float videoFps) { 449 | for (auto &callback : decoderReadyCallbacks) { 450 | try { 451 | callback.operator()(std::move(width), 452 | std::move(height), 453 | std::move(videoFps)); 454 | } catch (std::bad_any_cast &) { 455 | Instance().PutLog(LogLevel::Error, "Mismatched signal argument types!"); 456 | } 457 | } 458 | } 459 | 460 | void EmitUrlStreamShouldStop() { 461 | for (auto &callback : urlStreamShouldStopCallbacks) { 462 | try { 463 | callback.operator()<>(); 464 | } catch (std::bad_any_cast &) { 465 | Instance().PutLog(LogLevel::Error, "Mismatched signal argument types!"); 466 | } 467 | } 468 | } 469 | }; 470 | -------------------------------------------------------------------------------- /src/main.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | 5 | #include "app.h" 6 | #include "gui/control_panel.h" 7 | #include "gui/player_rect.h" 8 | #include "gui_interface.h" 9 | #include "wifi/WfbReceiver.h" 10 | 11 | static revector::App* app; 12 | 13 | int main() { 14 | GuiInterface::Instance().PutLog(LogLevel::Info, "App started"); 15 | 16 | app = new revector::App({1280, 720}); 17 | app->set_window_title("Aviateur - OpenIPC FPV Ground Station"); 18 | 19 | GuiInterface::Instance().PutLog(LogLevel::Info, "revector app created"); 20 | 21 | revector::TranslationServer::get_singleton()->load_translations(revector::get_asset_dir("translations.csv")); 22 | 23 | auto font = std::make_shared(revector::get_asset_dir("NotoSansSC-Regular.ttf")); 24 | revector::DefaultResource::get_singleton()->set_default_font(font); 25 | 26 | // Initialize the default libusb context. 27 | int rc = libusb_init(nullptr); 28 | 29 | auto hbox_container = std::make_shared(); 30 | hbox_container->set_separation(2); 31 | hbox_container->set_anchor_flag(revector::AnchorFlag::FullRect); 32 | app->get_tree_root()->add_child(hbox_container); 33 | 34 | auto player_rect = std::make_shared(); 35 | player_rect->container_sizing.expand_h = true; 36 | player_rect->container_sizing.expand_v = true; 37 | player_rect->container_sizing.flag_h = revector::ContainerSizingFlag::Fill; 38 | player_rect->container_sizing.flag_v = revector::ContainerSizingFlag::Fill; 39 | hbox_container->add_child(player_rect); 40 | 41 | auto control_panel = std::make_shared(); 42 | control_panel->set_custom_minimum_size({280, 0}); 43 | control_panel->container_sizing.expand_v = true; 44 | control_panel->container_sizing.flag_v = revector::ContainerSizingFlag::Fill; 45 | hbox_container->add_child(control_panel); 46 | 47 | std::weak_ptr control_panel_weak = control_panel; 48 | std::weak_ptr player_rect_weak = player_rect; 49 | 50 | auto onWifiStop = [control_panel_weak, player_rect_weak] { 51 | if (!control_panel_weak.expired() && !player_rect_weak.expired()) { 52 | player_rect_weak.lock()->stop_playing(); 53 | player_rect_weak.lock()->show_red_tip(FTR("wi-fi stopped msg")); 54 | control_panel_weak.lock()->update_adapter_start_button_looking(true); 55 | } 56 | }; 57 | GuiInterface::Instance().wifiStopCallbacks.emplace_back(onWifiStop); 58 | 59 | { 60 | player_rect->top_control_container = std::make_shared(); 61 | player_rect->top_control_container->set_anchor_flag(revector::AnchorFlag::TopLeft); 62 | player_rect->add_child(player_rect->top_control_container); 63 | 64 | player_rect->fullscreen_button_ = std::make_shared(); 65 | player_rect->top_control_container->add_child(player_rect->fullscreen_button_); 66 | player_rect->fullscreen_button_->set_text(FTR("fullscreen") + " (F11)"); 67 | 68 | auto callback = [control_panel_weak](bool toggled) { 69 | if (!control_panel_weak.expired()) { 70 | app->set_fullscreen(toggled); 71 | } 72 | }; 73 | player_rect->fullscreen_button_->connect_signal("toggled", callback); 74 | 75 | auto control_panel_button = std::make_shared(); 76 | player_rect->top_control_container->add_child(control_panel_button); 77 | control_panel_button->set_text(FTR("control panel")); 78 | control_panel_button->press(); 79 | 80 | auto callback2 = [control_panel_weak](bool toggled) { 81 | if (!control_panel_weak.expired()) { 82 | control_panel_weak.lock()->set_visibility(toggled); 83 | } 84 | }; 85 | control_panel_button->connect_signal("toggled", callback2); 86 | } 87 | 88 | GuiInterface::Instance().PutLog(LogLevel::Info, "Entering app main loop"); 89 | 90 | app->main_loop(); 91 | 92 | // Quit app. 93 | delete app; 94 | app = nullptr; 95 | 96 | libusb_exit(nullptr); 97 | 98 | return EXIT_SUCCESS; 99 | } 100 | -------------------------------------------------------------------------------- /src/player/GifEncoder.cpp: -------------------------------------------------------------------------------- 1 | // 2 | // Created by liangzhuohua on 2022/4/22. 3 | // 4 | 5 | #include "GifEncoder.h" 6 | 7 | #include 8 | 9 | bool GifEncoder::open(int width, int height, AVPixelFormat pixelFormat, int frameRate, const std::string &outputPath) { 10 | _formatCtx = std::shared_ptr(avformat_alloc_context(), &avformat_free_context); 11 | 12 | _formatCtx->oformat = av_guess_format("gif", nullptr, nullptr); 13 | 14 | AVStream *pAVStream = avformat_new_stream(_formatCtx.get(), nullptr); 15 | if (pAVStream == nullptr) { 16 | return false; 17 | } 18 | 19 | const AVCodec *pCodec = avcodec_find_encoder(_formatCtx->oformat->video_codec); 20 | if (!pCodec) { 21 | return false; 22 | } 23 | 24 | _codecCtx = std::shared_ptr(avcodec_alloc_context3(pCodec), 25 | [](AVCodecContext *ctx) { avcodec_free_context(&ctx); }); 26 | _frameRate = frameRate; 27 | _codecCtx->codec_id = _formatCtx->oformat->video_codec; 28 | _codecCtx->codec_type = AVMEDIA_TYPE_VIDEO; 29 | _codecCtx->pix_fmt = AV_PIX_FMT_RGB8; 30 | _codecCtx->width = 640; 31 | _codecCtx->height = (int)(640.0 * height / width); 32 | _codecCtx->time_base = AVRational{1, frameRate}; 33 | 34 | // 根据需要创建颜色空间转换器 35 | if (_codecCtx->pix_fmt != pixelFormat) { 36 | // 颜色转换器 37 | _imgConvertCtx = sws_getCachedContext(_imgConvertCtx, 38 | width, 39 | height, 40 | pixelFormat, 41 | _codecCtx->width, 42 | _codecCtx->height, 43 | _codecCtx->pix_fmt, 44 | SWS_BICUBIC, 45 | nullptr, 46 | nullptr, 47 | nullptr); 48 | if (!_imgConvertCtx) { 49 | return false; 50 | } 51 | } 52 | 53 | if (avcodec_open2(_codecCtx.get(), pCodec, nullptr) < 0) { 54 | return false; 55 | } 56 | 57 | avcodec_parameters_from_context(pAVStream->codecpar, _codecCtx.get()); 58 | 59 | if (avformat_write_header(_formatCtx.get(), nullptr) < 0) { 60 | return false; 61 | } 62 | 63 | if (avio_open(&_formatCtx->pb, outputPath.c_str(), AVIO_FLAG_READ_WRITE) < 0) { 64 | return false; 65 | } 66 | _opened = true; 67 | 68 | _saveFilePath = outputPath; 69 | 70 | return true; 71 | } 72 | 73 | bool GifEncoder::encodeFrame(const std::shared_ptr &frame) { 74 | if (!_opened) { 75 | return false; 76 | } 77 | 78 | std::lock_guard lck(_encodeMtx); 79 | 80 | // Convert format. 81 | if (_codecCtx->pix_fmt != frame->format) { 82 | // Allocate a temporary frame. 83 | if (!_tmpFrame) { 84 | _tmpFrame = std::shared_ptr(av_frame_alloc(), [](AVFrame *f) { av_frame_free(&f); }); 85 | if (!_tmpFrame) { 86 | return false; 87 | } 88 | _tmpFrame->width = _codecCtx->width; 89 | _tmpFrame->height = _codecCtx->height; 90 | _tmpFrame->format = _codecCtx->pix_fmt; 91 | int size = av_image_get_buffer_size(_codecCtx->pix_fmt, _codecCtx->width, _codecCtx->height, 1); 92 | _buff.resize(size); 93 | int ret = av_image_fill_arrays(_tmpFrame->data, 94 | _tmpFrame->linesize, 95 | _buff.data(), 96 | _codecCtx->pix_fmt, 97 | _codecCtx->width, 98 | _codecCtx->width, 99 | 1); 100 | if (ret < 0) { 101 | return false; 102 | } 103 | } 104 | // 转换为GIF编码需要的颜色和高度 105 | int h = sws_scale(_imgConvertCtx, 106 | frame->data, 107 | frame->linesize, 108 | 0, 109 | frame->height, 110 | _tmpFrame->data, 111 | _tmpFrame->linesize); 112 | if (h != _codecCtx->height) { 113 | return false; 114 | } 115 | } 116 | 117 | // Packet size. 118 | int size = _codecCtx->width * _codecCtx->height; 119 | 120 | // Allocate a packet. 121 | 122 | std::shared_ptr pkt = 123 | std::shared_ptr(av_packet_alloc(), [](AVPacket *pkt) { av_packet_free(&pkt); }); 124 | av_new_packet(pkt.get(), size); 125 | 126 | // 记录帧编码时间 127 | _lastEncodeTime = 128 | std::chrono::duration_cast(std::chrono::system_clock::now().time_since_epoch()) 129 | .count(); 130 | 131 | // 发送帧到编码上下文 132 | int ret = avcodec_send_frame(_codecCtx.get(), _tmpFrame.get()); 133 | if (ret < 0) { 134 | return false; 135 | } 136 | 137 | // 获取已经编码完成的帧 138 | avcodec_receive_packet(_codecCtx.get(), pkt.get()); 139 | 140 | // 写文件 141 | av_write_frame(_formatCtx.get(), pkt.get()); 142 | 143 | return true; 144 | } 145 | 146 | std::string GifEncoder::close() { 147 | std::lock_guard lck(_encodeMtx); 148 | 149 | if (!_opened) { 150 | return ""; 151 | } 152 | 153 | if (_formatCtx) { 154 | av_write_trailer(_formatCtx.get()); 155 | } 156 | 157 | if (_codecCtx) { 158 | _codecCtx.reset(); 159 | } 160 | 161 | // Close file. 162 | avio_close(_formatCtx->pb); 163 | _opened = false; 164 | 165 | return _saveFilePath; 166 | } 167 | 168 | GifEncoder::~GifEncoder() { 169 | close(); 170 | } 171 | 172 | bool GifEncoder::isOpened() { 173 | std::lock_guard lck(_encodeMtx); 174 | return _opened; 175 | } 176 | -------------------------------------------------------------------------------- /src/player/GifEncoder.h: -------------------------------------------------------------------------------- 1 | // 2 | // Created by liangzhuohua on 2022/4/22. 3 | // 4 | 5 | #pragma once 6 | 7 | #include 8 | #include 9 | #include 10 | #include 11 | 12 | #include "ffmpegInclude.h" 13 | 14 | class GifEncoder { 15 | public: 16 | ~GifEncoder(); 17 | 18 | bool open(int width, int height, AVPixelFormat pixelFormat, int frameRate, const std::string &outputPath); 19 | 20 | bool encodeFrame(const std::shared_ptr &frame); 21 | 22 | std::string close(); 23 | 24 | int getFrameRate() const { 25 | return _frameRate; 26 | } 27 | 28 | uint64_t getLastEncodeTime() const { 29 | return _lastEncodeTime; 30 | } 31 | 32 | bool isOpened(); 33 | 34 | std::string _saveFilePath; 35 | 36 | protected: 37 | std::mutex _encodeMtx; 38 | 39 | std::shared_ptr _formatCtx; 40 | 41 | std::shared_ptr _codecCtx; 42 | // 色彩空间转换 43 | SwsContext *_imgConvertCtx{}; 44 | // 颜色转换临时frame 45 | std::shared_ptr _tmpFrame; 46 | std::vector _buff; 47 | 48 | uint64_t _lastEncodeTime = 0; 49 | 50 | int _frameRate = 0; 51 | 52 | volatile bool _opened = false; 53 | }; 54 | -------------------------------------------------------------------------------- /src/player/JpegEncoder.cpp: -------------------------------------------------------------------------------- 1 | // 2 | // Created by liangzhuohua on 2022/2/28. 3 | // 4 | 5 | #include "JpegEncoder.h" 6 | 7 | #include 8 | inline bool convertToYUV420P(const std::shared_ptr &frame, std::shared_ptr &yuvFrame) { 9 | int width = frame->width; 10 | int height = frame->height; 11 | 12 | // Allocate YUV frame 13 | yuvFrame = std::shared_ptr(av_frame_alloc(), [](AVFrame *f) { av_frame_free(&f); }); 14 | if (!yuvFrame) { 15 | return false; 16 | } 17 | yuvFrame->format = AV_PIX_FMT_YUVJ420P; 18 | yuvFrame->width = width; 19 | yuvFrame->height = height; 20 | 21 | // Allocate buffer for YUV frame 22 | int ret = av_frame_get_buffer(yuvFrame.get(), 32); 23 | if (ret < 0) { 24 | return false; 25 | } 26 | 27 | // Convert RGB to YUV420P 28 | struct SwsContext *sws_ctx = sws_getContext(width, 29 | height, 30 | static_cast(frame->format), 31 | width, 32 | height, 33 | AV_PIX_FMT_YUVJ420P, 34 | 0, 35 | nullptr, 36 | nullptr, 37 | nullptr); 38 | if (!sws_ctx) { 39 | return false; 40 | } 41 | 42 | // Perform RGB to YUV conversion 43 | ret = sws_scale(sws_ctx, frame->data, frame->linesize, 0, height, yuvFrame->data, yuvFrame->linesize); 44 | if (ret <= 0) { 45 | sws_freeContext(sws_ctx); 46 | return false; 47 | } 48 | 49 | // Cleanup 50 | sws_freeContext(sws_ctx); 51 | 52 | return true; 53 | } 54 | 55 | bool JpegEncoder::encodeJpeg(const std::string &outFilePath, const std::shared_ptr &frame) { 56 | if (!(frame && frame->height && frame->width && frame->linesize[0])) { 57 | return false; 58 | } 59 | 60 | std::shared_ptr pFormatCtx = 61 | std::shared_ptr(avformat_alloc_context(), &avformat_free_context); 62 | 63 | pFormatCtx->oformat = av_guess_format("mjpeg", nullptr, nullptr); 64 | 65 | if (avio_open(&pFormatCtx->pb, outFilePath.c_str(), AVIO_FLAG_READ_WRITE) < 0) { 66 | return false; 67 | } 68 | 69 | AVStream *pAVStream = avformat_new_stream(pFormatCtx.get(), nullptr); 70 | if (pAVStream == nullptr) { 71 | return false; 72 | } 73 | 74 | const AVCodec *pCodec = avcodec_find_encoder(pFormatCtx->oformat->video_codec); 75 | if (!pCodec) { 76 | return false; 77 | } 78 | 79 | std::shared_ptr codecCtx = 80 | std::shared_ptr(avcodec_alloc_context3(pCodec), 81 | [](AVCodecContext *ctx) { avcodec_free_context(&ctx); }); 82 | codecCtx->codec_id = pFormatCtx->oformat->video_codec; 83 | codecCtx->codec_type = AVMEDIA_TYPE_VIDEO; 84 | codecCtx->pix_fmt = static_cast(frame->format); 85 | codecCtx->width = frame->width; 86 | codecCtx->height = frame->height; 87 | codecCtx->time_base = AVRational{1, 25}; 88 | 89 | // Convert frame to YUV420P if it's not already in that format 90 | std::shared_ptr yuvFrame; 91 | if (frame->format != AV_PIX_FMT_YUVJ420P && frame->format != AV_PIX_FMT_YUV420P) { 92 | if (!convertToYUV420P(frame, yuvFrame)) { 93 | return false; 94 | } 95 | codecCtx->pix_fmt = AV_PIX_FMT_YUVJ420P; 96 | } else { 97 | yuvFrame = frame; // If already YUV420P, use as is 98 | } 99 | 100 | if (avcodec_open2(codecCtx.get(), pCodec, nullptr) < 0) { 101 | return false; 102 | } 103 | 104 | avcodec_parameters_from_context(pAVStream->codecpar, codecCtx.get()); 105 | 106 | avformat_write_header(pFormatCtx.get(), nullptr); 107 | int y_size = codecCtx->width * codecCtx->height; 108 | 109 | // Resize packet 110 | std::shared_ptr pkt = 111 | std::shared_ptr(av_packet_alloc(), [](AVPacket *pkt) { av_packet_free(&pkt); }); 112 | av_new_packet(pkt.get(), y_size); 113 | 114 | int ret = avcodec_send_frame(codecCtx.get(), yuvFrame.get()); 115 | if (ret < 0) { 116 | return false; 117 | } 118 | 119 | avcodec_receive_packet(codecCtx.get(), pkt.get()); 120 | 121 | av_write_frame(pFormatCtx.get(), pkt.get()); 122 | 123 | av_write_trailer(pFormatCtx.get()); 124 | 125 | avio_close(pFormatCtx->pb); 126 | 127 | return true; 128 | } 129 | -------------------------------------------------------------------------------- /src/player/JpegEncoder.h: -------------------------------------------------------------------------------- 1 | // 2 | // Created by liangzhuohua on 2022/2/28. 3 | // 4 | 5 | #pragma once 6 | 7 | #include 8 | #include 9 | 10 | #include "ffmpegInclude.h" 11 | 12 | class JpegEncoder { 13 | public: 14 | static bool encodeJpeg(const std::string &outFilePath, const std::shared_ptr &frame); 15 | }; 16 | -------------------------------------------------------------------------------- /src/player/Mp4Encoder.cpp: -------------------------------------------------------------------------------- 1 | // 2 | // Created by liangzhuohua on 2022/3/1. 3 | // 4 | 5 | #include "Mp4Encoder.h" 6 | 7 | Mp4Encoder::Mp4Encoder(const std::string &saveFilePath) { 8 | formatCtx_ = std::shared_ptr(avformat_alloc_context(), &avformat_free_context); 9 | 10 | formatCtx_->oformat = av_guess_format("mov", nullptr, nullptr); 11 | 12 | saveFilePath_ = saveFilePath; 13 | } 14 | 15 | Mp4Encoder::~Mp4Encoder() { 16 | if (isOpen_) { 17 | stop(); 18 | } 19 | } 20 | 21 | void Mp4Encoder::addTrack(AVStream *stream) { 22 | AVStream *os = avformat_new_stream(formatCtx_.get(), nullptr); 23 | if (!os) { 24 | return; 25 | } 26 | int ret = avcodec_parameters_copy(os->codecpar, stream->codecpar); 27 | if (ret < 0) { 28 | return; 29 | } 30 | os->codecpar->codec_tag = 0; 31 | if (stream->codecpar->codec_type == AVMEDIA_TYPE_AUDIO) { 32 | audioIndex = os->index; 33 | originAudioTimeBase_ = stream->time_base; 34 | } else if (stream->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) { 35 | videoIndex = os->index; 36 | originVideoTimeBase_ = stream->time_base; 37 | } 38 | } 39 | 40 | bool Mp4Encoder::start() { 41 | // 初始化上下文 42 | if (avio_open(&formatCtx_->pb, saveFilePath_.c_str(), AVIO_FLAG_READ_WRITE) < 0) { 43 | return false; 44 | } 45 | // 写输出流头信息 46 | AVDictionary *opts = nullptr; 47 | av_dict_set(&opts, "movflags", "frag_keyframe+empty_moov", 0); 48 | int ret = avformat_write_header(formatCtx_.get(), &opts); 49 | if (ret < 0) { 50 | return false; 51 | } 52 | isOpen_ = true; 53 | return true; 54 | } 55 | 56 | void Mp4Encoder::writePacket(const std::shared_ptr &pkt, bool isVideo) { 57 | if (!isOpen_) { 58 | return; 59 | } 60 | #ifdef I_FRAME_FIRST 61 | // 未获取视频关键帧前先忽略音频 62 | if (videoIndex >= 0 && !writtenKeyFrame && !isVideo) { 63 | return; 64 | } 65 | // 跳过非关键帧,使关键帧前置 66 | if (!writtenKeyFrame && pkt->flags & AV_PKT_FLAG_KEY) { 67 | return; 68 | } 69 | writtenKeyFrame = true; 70 | #endif 71 | if (isVideo) { 72 | pkt->stream_index = videoIndex; 73 | av_packet_rescale_ts(pkt.get(), originVideoTimeBase_, formatCtx_->streams[videoIndex]->time_base); 74 | } else { 75 | pkt->stream_index = audioIndex; 76 | av_packet_rescale_ts(pkt.get(), originAudioTimeBase_, formatCtx_->streams[audioIndex]->time_base); 77 | } 78 | pkt->pos = -1; 79 | av_write_frame(formatCtx_.get(), pkt.get()); 80 | } 81 | 82 | void Mp4Encoder::stop() { 83 | isOpen_ = false; 84 | 85 | av_write_trailer(formatCtx_.get()); 86 | 87 | avio_close(formatCtx_->pb); 88 | } 89 | -------------------------------------------------------------------------------- /src/player/Mp4Encoder.h: -------------------------------------------------------------------------------- 1 | // 2 | // Created by liangzhuohua on 2022/3/1. 3 | // 4 | 5 | #pragma once 6 | 7 | #include "ffmpegInclude.h" 8 | #include 9 | #include 10 | 11 | class Mp4Encoder { 12 | public: 13 | explicit Mp4Encoder(const std::string &saveFilePath); 14 | ~Mp4Encoder(); 15 | 16 | bool start(); 17 | 18 | void stop(); 19 | 20 | void addTrack(AVStream *stream); 21 | 22 | void writePacket(const std::shared_ptr &pkt, bool isVideo); 23 | 24 | int videoIndex = -1; 25 | int audioIndex = -1; 26 | 27 | std::string saveFilePath_; 28 | 29 | private: 30 | // 是否已经初始化 31 | bool isOpen_ = false; 32 | // 编码上下文 33 | std::shared_ptr formatCtx_; 34 | // 原始视频流时间基 35 | AVRational originVideoTimeBase_ {}; 36 | // 原始音频流时间基 37 | AVRational originAudioTimeBase_ {}; 38 | // 已经写入关键帧 39 | bool writtenKeyFrame_ = false; 40 | }; 41 | -------------------------------------------------------------------------------- /src/player/RealTimePlayer.cpp: -------------------------------------------------------------------------------- 1 | #include "RealTimePlayer.h" 2 | 3 | #include 4 | #include 5 | 6 | #include 7 | #include 8 | 9 | #include "../gui_interface.h" 10 | #include "JpegEncoder.h" 11 | 12 | // GIF默认帧率 13 | #define DEFAULT_GIF_FRAMERATE 10 14 | 15 | RealTimePlayer::RealTimePlayer(std::shared_ptr device, std::shared_ptr queue) { 16 | yuvRenderer_ = std::make_shared(device, queue); 17 | yuvRenderer_->init(); 18 | 19 | // If the decoder fails, try to replay. 20 | connectionLostCallbacks.push_back([this] { 21 | stop(); 22 | play(url, forceSoftwareDecoding_); 23 | }); 24 | 25 | SDL_Init(SDL_INIT_AUDIO); 26 | } 27 | 28 | void RealTimePlayer::update(float dt) { 29 | if (playStop) { 30 | return; 31 | } 32 | 33 | if (infoChanged_) { 34 | yuvRenderer_->updateTextureInfo(videoWidth_, videoHeight_, videoFormat_); 35 | infoChanged_ = false; 36 | } 37 | 38 | std::shared_ptr frame = getFrame(); 39 | if (frame && frame->linesize[0]) { 40 | yuvRenderer_->updateTextureData(frame); 41 | } 42 | } 43 | 44 | std::shared_ptr RealTimePlayer::getFrame() { 45 | std::lock_guard lck(mtx); 46 | 47 | // No frame in the queue 48 | if (videoFrameQueue.empty()) { 49 | return nullptr; 50 | } 51 | 52 | // Get a frame from the queue 53 | std::shared_ptr frame = videoFrameQueue.front(); 54 | 55 | // Remove the frame from the queue. 56 | videoFrameQueue.pop(); 57 | 58 | lastFrame_ = frame; 59 | 60 | return frame; 61 | } 62 | 63 | void RealTimePlayer::onVideoInfoReady(int width, int height, int format) { 64 | if (videoWidth_ != width) { 65 | videoWidth_ = width; 66 | makeInfoDirty(true); 67 | } 68 | if (videoHeight_ != height) { 69 | videoHeight_ = height; 70 | makeInfoDirty(true); 71 | } 72 | if (videoFormat_ != format) { 73 | videoFormat_ = format; 74 | makeInfoDirty(true); 75 | } 76 | } 77 | 78 | void RealTimePlayer::play(const std::string &playUrl, bool forceSoftwareDecoding) { 79 | playStop = false; 80 | 81 | if (analysisThread.joinable()) { 82 | analysisThread.join(); 83 | } 84 | 85 | url = playUrl; 86 | 87 | decoder = std::make_shared(); 88 | 89 | analysisThread = std::thread([this, forceSoftwareDecoding] { 90 | bool ok = decoder->OpenInput(url, forceSoftwareDecoding); 91 | if (!ok) { 92 | GuiInterface::Instance().PutLog(LogLevel::Error, "Loading URL failed"); 93 | return; 94 | } 95 | 96 | GuiInterface::Instance().EmitDecoderReady(decoder->GetWidth(), decoder->GetHeight(), decoder->GetFps()); 97 | 98 | if (!isMuted && decoder->HasAudio()) { 99 | enableAudio(); 100 | } 101 | 102 | if (decoder->HasVideo()) { 103 | onVideoInfoReady(decoder->GetWidth(), decoder->GetHeight(), decoder->GetVideoFrameFormat()); 104 | } 105 | 106 | // Bitrate callback. 107 | decoder->bitrateUpdateCallback = [](uint64_t bitrate) { GuiInterface::Instance().EmitBitrateUpdate(bitrate); }; 108 | 109 | hwEnabled = decoder->hwDecoderEnabled; 110 | 111 | decodeThread = std::thread([this] { 112 | while (!playStop) { 113 | try { 114 | // Getting frame. 115 | auto frame = decoder->GetNextFrame(); 116 | if (!frame) { 117 | continue; 118 | } 119 | 120 | // Push frame to the buffer queue. 121 | std::lock_guard lck(mtx); 122 | if (videoFrameQueue.size() > 10) { 123 | videoFrameQueue.pop(); 124 | } 125 | videoFrameQueue.push(frame); 126 | } 127 | // Decoder error. 128 | catch (const SendPacketException &e) { 129 | GuiInterface::Instance().PutLog(LogLevel::Error, e.what()); 130 | GuiInterface::Instance().ShowTip(FTR("hw decoder error")); 131 | } 132 | // Read frame error, mostly due to a lost signal. 133 | catch (const ReadFrameException &e) { 134 | GuiInterface::Instance().PutLog(LogLevel::Error, e.what()); 135 | GuiInterface::Instance().ShowTip(FTR("signal lost")); 136 | } 137 | // Break on other unknown errors. 138 | catch (const std::exception &e) { 139 | GuiInterface::Instance().PutLog(LogLevel::Error, e.what()); 140 | break; 141 | } 142 | } 143 | }); 144 | 145 | // Start decode thread. 146 | decodeThread.detach(); 147 | }); 148 | 149 | // Start analysis thread. 150 | analysisThread.detach(); 151 | } 152 | 153 | void RealTimePlayer::stop() { 154 | playStop = true; 155 | 156 | if (decoder && decoder->pFormatCtx) { 157 | decoder->pFormatCtx->interrupt_callback.callback = [](void *) { return 1; }; 158 | } 159 | 160 | if (analysisThread.joinable()) { 161 | analysisThread.join(); 162 | } 163 | 164 | if (decodeThread.joinable()) { 165 | decodeThread.join(); 166 | } 167 | 168 | { 169 | std::lock_guard lck(mtx); 170 | videoFrameQueue = std::queue>(); 171 | } 172 | 173 | if (decoder) { 174 | decoder->CloseInput(); 175 | decoder.reset(); 176 | } 177 | 178 | disableAudio(); 179 | } 180 | 181 | void RealTimePlayer::setMuted(bool muted) { 182 | if (!decoder->HasAudio()) { 183 | return; 184 | } 185 | 186 | if (!muted && decoder) { 187 | decoder->ClearAudioBuff(); 188 | 189 | if (!enableAudio()) { 190 | return; 191 | } 192 | } else { 193 | disableAudio(); 194 | } 195 | 196 | isMuted = muted; 197 | // emit onMutedChanged(muted); 198 | } 199 | 200 | RealTimePlayer::~RealTimePlayer() { 201 | stop(); 202 | 203 | SDL_Quit(); 204 | } 205 | 206 | std::string RealTimePlayer::captureJpeg() { 207 | if (!lastFrame_) { 208 | return ""; 209 | } 210 | 211 | auto dir = GuiInterface::GetCaptureDir(); 212 | 213 | try { 214 | if (!std::filesystem::exists(dir)) { 215 | std::filesystem::create_directories(dir); 216 | } 217 | } catch (const std::exception &e) { 218 | std::cerr << e.what() << std::endl; 219 | } 220 | 221 | std::stringstream filePath; 222 | filePath << dir; 223 | filePath << std::chrono::duration_cast( 224 | std::chrono::system_clock::now().time_since_epoch()) 225 | .count() 226 | << ".jpg"; 227 | 228 | std::ofstream outfile(filePath.str()); 229 | outfile.close(); 230 | 231 | auto ok = JpegEncoder::encodeJpeg(filePath.str(), lastFrame_); 232 | 233 | return ok ? std::string(filePath.str()) : ""; 234 | } 235 | 236 | bool RealTimePlayer::startRecord() { 237 | if (playStop && !lastFrame_) { 238 | return false; 239 | } 240 | 241 | auto dir = GuiInterface::GetCaptureDir(); 242 | 243 | try { 244 | if (!std::filesystem::exists(dir)) { 245 | std::filesystem::create_directories(dir); 246 | } 247 | } catch (const std::exception &e) { 248 | std::cerr << e.what() << std::endl; 249 | } 250 | 251 | std::stringstream filePath; 252 | filePath << dir; 253 | filePath << std::chrono::duration_cast( 254 | std::chrono::system_clock::now().time_since_epoch()) 255 | .count() 256 | << ".mp4"; 257 | 258 | std::ofstream outfile(filePath.str()); 259 | outfile.close(); 260 | 261 | mp4Encoder_ = std::make_shared(filePath.str()); 262 | 263 | // Audio track not handled for now. 264 | if (decoder->HasAudio()) { 265 | mp4Encoder_->addTrack(decoder->pFormatCtx->streams[decoder->audioStreamIndex]); 266 | } 267 | 268 | // Add video track. 269 | if (decoder->HasVideo()) { 270 | mp4Encoder_->addTrack(decoder->pFormatCtx->streams[decoder->videoStreamIndex]); 271 | } 272 | 273 | if (!mp4Encoder_->start()) { 274 | return false; 275 | } 276 | 277 | // 设置获得NALU回调 278 | decoder->gotPktCallback = [this](const std::shared_ptr &packet) { 279 | // 输入编码器 280 | mp4Encoder_->writePacket(packet, packet->stream_index == decoder->videoStreamIndex); 281 | }; 282 | 283 | return true; 284 | } 285 | 286 | std::string RealTimePlayer::stopRecord() const { 287 | if (!mp4Encoder_) { 288 | return {}; 289 | } 290 | mp4Encoder_->stop(); 291 | decoder->gotPktCallback = nullptr; 292 | 293 | return mp4Encoder_->saveFilePath_; 294 | } 295 | 296 | int RealTimePlayer::getVideoWidth() const { 297 | if (!decoder) { 298 | return 0; 299 | } 300 | return decoder->width; 301 | } 302 | 303 | int RealTimePlayer::getVideoHeight() const { 304 | if (!decoder) { 305 | return 0; 306 | } 307 | return decoder->height; 308 | } 309 | 310 | void RealTimePlayer::forceSoftwareDecoding(bool force) { 311 | forceSoftwareDecoding_ = force; 312 | } 313 | 314 | bool RealTimePlayer::isHardwareAccelerated() const { 315 | return hwEnabled; 316 | } 317 | 318 | std::shared_ptr RealTimePlayer::getDecoder() const { 319 | return decoder; 320 | } 321 | 322 | void RealTimePlayer::emitConnectionLost() { 323 | for (auto &callback : connectionLostCallbacks) { 324 | try { 325 | callback(); 326 | } catch (std::bad_any_cast &) { 327 | abort(); 328 | } 329 | } 330 | } 331 | 332 | void SDLCALL audio_callback(void *userdata, SDL_AudioStream *stream, int additional_amount, int total_amount) { 333 | if (additional_amount > 0) { 334 | Uint8 *data = SDL_stack_alloc(Uint8, additional_amount); 335 | if (data) { 336 | auto *player = static_cast(userdata); 337 | player->getDecoder()->ReadAudioBuff(data, additional_amount); 338 | 339 | SDL_PutAudioStreamData(stream, data, additional_amount); 340 | SDL_stack_free(data); 341 | } 342 | } 343 | } 344 | 345 | bool RealTimePlayer::enableAudio() { 346 | if (!decoder->HasAudio()) { 347 | return false; 348 | } 349 | 350 | const SDL_AudioSpec spec = {SDL_AUDIO_S16, decoder->GetAudioChannelCount(), decoder->GetAudioSampleRate()}; 351 | stream = SDL_OpenAudioDeviceStream(SDL_AUDIO_DEVICE_DEFAULT_PLAYBACK, &spec, audio_callback, this); 352 | SDL_ResumeAudioDevice(SDL_GetAudioStreamDevice(stream)); 353 | 354 | return true; 355 | } 356 | 357 | void RealTimePlayer::disableAudio() { 358 | if (stream) { 359 | SDL_CloseAudioDevice(SDL_GetAudioStreamDevice(stream)); 360 | stream = nullptr; 361 | } 362 | } 363 | 364 | bool RealTimePlayer::hasAudio() const { 365 | if (!decoder) { 366 | return false; 367 | } 368 | 369 | return decoder->HasAudio(); 370 | } 371 | 372 | bool RealTimePlayer::startGifRecord() { 373 | if (playStop) { 374 | return false; 375 | } 376 | 377 | if (!(decoder && decoder->HasVideo())) { 378 | return false; 379 | } 380 | 381 | std::stringstream gif_file_path; 382 | gif_file_path << "recording/"; 383 | gif_file_path << std::chrono::duration_cast( 384 | std::chrono::system_clock::now().time_since_epoch()) 385 | .count() 386 | << ".gif"; 387 | 388 | gifEncoder_ = std::make_shared(); 389 | 390 | if (!gifEncoder_->open(decoder->width, 391 | decoder->height, 392 | decoder->GetVideoFrameFormat(), 393 | DEFAULT_GIF_FRAMERATE, 394 | gif_file_path.str())) { 395 | return false; 396 | } 397 | 398 | // 设置获得解码帧回调 399 | decoder->gotFrameCallback = [this](const std::shared_ptr &frame) { 400 | if (!gifEncoder_) { 401 | return; 402 | } 403 | if (!gifEncoder_->isOpened()) { 404 | return; 405 | } 406 | // 根据GIF帧率跳帧 407 | uint64_t now = 408 | std::chrono::duration_cast(std::chrono::system_clock::now().time_since_epoch()) 409 | .count(); 410 | if (gifEncoder_->getLastEncodeTime() + 1000 / gifEncoder_->getFrameRate() > now) { 411 | return; 412 | } 413 | 414 | gifEncoder_->encodeFrame(frame); 415 | }; 416 | 417 | return true; 418 | } 419 | 420 | std::string RealTimePlayer::stopGifRecord() const { 421 | decoder->gotFrameCallback = nullptr; 422 | if (!gifEncoder_) { 423 | return ""; 424 | } 425 | gifEncoder_->close(); 426 | return gifEncoder_->_saveFilePath; 427 | } 428 | -------------------------------------------------------------------------------- /src/player/RealTimePlayer.h: -------------------------------------------------------------------------------- 1 | #pragma once 2 | 3 | #include 4 | 5 | #include 6 | #include 7 | #include 8 | 9 | #include "GifEncoder.h" 10 | #include "Mp4Encoder.h" 11 | #include "YuvRenderer.h" 12 | #include "ffmpegDecode.h" 13 | 14 | struct SDL_AudioStream; 15 | 16 | class RealTimePlayer { 17 | public: 18 | RealTimePlayer(std::shared_ptr device, std::shared_ptr queue); 19 | ~RealTimePlayer(); 20 | void update(float dt); 21 | 22 | std::shared_ptr getFrame(); 23 | 24 | bool infoDirty() const { 25 | return infoChanged_; 26 | } 27 | void makeInfoDirty(bool dirty) { 28 | infoChanged_ = dirty; 29 | } 30 | int videoWidth() const { 31 | return videoWidth_; 32 | } 33 | int videoHeight() const { 34 | return videoHeight_; 35 | } 36 | int videoFormat() const { 37 | return videoFormat_; 38 | } 39 | bool getMuted() const { 40 | return isMuted; 41 | } 42 | // 播放 43 | void play(const std::string &playUrl, bool forceSoftwareDecoding); 44 | // 停止 45 | void stop(); 46 | // 静音 47 | void setMuted(bool muted = false); 48 | // 截图 49 | std::string captureJpeg(); 50 | 51 | // Record MP4 52 | bool startRecord(); 53 | std::string stopRecord() const; 54 | 55 | // Record GIF 56 | bool startGifRecord(); 57 | std::string stopGifRecord() const; 58 | 59 | // 获取视频宽度 60 | int getVideoWidth() const; 61 | // 获取视频高度 62 | int getVideoHeight() const; 63 | 64 | void forceSoftwareDecoding(bool force); 65 | 66 | bool isHardwareAccelerated() const; 67 | 68 | std::shared_ptr getDecoder() const; 69 | 70 | // Signals 71 | 72 | std::vector> connectionLostCallbacks; 73 | void emitConnectionLost(); 74 | 75 | // void gotRecordVol(double vol); 76 | revector::AnyCallable gotRecordVolume; 77 | 78 | // void onBitrate(long bitrate); 79 | revector::AnyCallable onBitrateUpdate; 80 | 81 | // void onMutedChanged(bool muted); 82 | revector::AnyCallable onMutedChanged; 83 | 84 | // void onHasAudio(bool has); 85 | revector::AnyCallable onHasAudio; 86 | 87 | protected: 88 | std::shared_ptr decoder; 89 | 90 | // Play file URL 91 | std::string url; 92 | 93 | volatile bool playStop = true; 94 | 95 | volatile bool isMuted = true; 96 | 97 | SDL_AudioStream *stream{}; 98 | 99 | std::queue> videoFrameQueue; 100 | 101 | std::mutex mtx; 102 | 103 | std::thread decodeThread; 104 | 105 | std::thread analysisThread; 106 | 107 | // 最后输出的帧 108 | std::shared_ptr lastFrame_; 109 | // 视频是否ready 110 | void onVideoInfoReady(int width, int height, int format); 111 | 112 | bool enableAudio(); 113 | 114 | void disableAudio(); 115 | 116 | std::shared_ptr mp4Encoder_; 117 | 118 | std::shared_ptr gifEncoder_; 119 | 120 | bool hasAudio() const; 121 | 122 | bool forceSoftwareDecoding_ = false; 123 | bool hwEnabled = false; 124 | 125 | public: 126 | std::shared_ptr yuvRenderer_; 127 | int videoWidth_{}; 128 | int videoHeight_{}; 129 | int videoFormat_{}; 130 | bool infoChanged_ = false; 131 | }; 132 | -------------------------------------------------------------------------------- /src/player/YuvRenderer.cpp: -------------------------------------------------------------------------------- 1 | #include "YuvRenderer.h" 2 | 3 | #include "libavutil/pixfmt.h" 4 | #include "resources/resource.h" 5 | 6 | std::string vertCode = R"(#version 310 es 7 | 8 | layout(std140) uniform bUniform0 { 9 | mat4 xform; 10 | int pixFmt; 11 | int pad0; 12 | int pad1; 13 | int pad2; 14 | }; 15 | 16 | layout(location = 0) in vec2 aPos; 17 | layout(location = 1) in vec2 aUV; 18 | 19 | out vec2 v_texCoord; 20 | 21 | void main() { 22 | gl_Position = xform * vec4(aPos, 1.0f, 1.0f); 23 | v_texCoord = aUV; 24 | } 25 | )"; 26 | 27 | std::string fragCode = 28 | R"(#version 310 es 29 | 30 | #ifdef GL_ES 31 | precision highp float; 32 | precision highp sampler2D; 33 | #endif 34 | 35 | out vec4 oFragColor; 36 | 37 | in vec2 v_texCoord; 38 | 39 | uniform sampler2D tex_y; 40 | uniform sampler2D tex_u; 41 | uniform sampler2D tex_v; 42 | 43 | layout(std140) uniform bUniform0 { 44 | mat4 xform; 45 | int pixFmt; 46 | int pad0; 47 | int pad1; 48 | int pad2; 49 | }; 50 | 51 | void main() { 52 | vec3 yuv; 53 | vec3 rgb; 54 | if (pixFmt == 0 || pixFmt == 12) { 55 | //yuv420p 56 | yuv.x = texture(tex_y, v_texCoord).r; 57 | yuv.y = texture(tex_u, v_texCoord).r - 0.5; 58 | yuv.z = texture(tex_v, v_texCoord).r - 0.5; 59 | rgb = mat3( 1.0, 1.0, 1.0, 60 | 0.0, -0.3455, 1.779, 61 | 1.4075, -0.7169, 0.0) * yuv; 62 | } else if( pixFmt == 23 ){ 63 | // NV12 64 | yuv.x = texture(tex_y, v_texCoord).r; 65 | yuv.y = texture(tex_u, v_texCoord).r - 0.5; 66 | yuv.z = texture(tex_u, v_texCoord).g - 0.5; 67 | rgb = mat3( 1.0, 1.0, 1.0, 68 | 0.0, -0.3455, 1.779, 69 | 1.4075, -0.7169, 0.0) * yuv; 70 | 71 | } else { 72 | //YUV444P 73 | yuv.x = texture(tex_y, v_texCoord).r; 74 | yuv.y = texture(tex_u, v_texCoord).r - 0.5; 75 | yuv.z = texture(tex_v, v_texCoord).r - 0.5; 76 | 77 | rgb.x = clamp( yuv.x + 1.402 *yuv.z, 0.0, 1.0); 78 | rgb.y = clamp( yuv.x - 0.34414 * yuv.y - 0.71414 * yuv.z, 0.0, 1.0); 79 | rgb.z = clamp( yuv.x + 1.772 * yuv.y, 0.0, 1.0); 80 | } 81 | 82 | oFragColor = vec4(rgb, 1.0); 83 | } 84 | )"; 85 | 86 | struct FragUniformBlock { 87 | Pathfinder::Mat4 xform; 88 | int pixFmt; 89 | int pad0; 90 | int pad1; 91 | int pad2; 92 | }; 93 | 94 | YuvRenderer::YuvRenderer(std::shared_ptr device, std::shared_ptr queue) { 95 | mDevice = device; 96 | mQueue = queue; 97 | } 98 | 99 | void YuvRenderer::init() { 100 | mRenderPass = mDevice->create_render_pass(Pathfinder::TextureFormat::Rgba8Unorm, 101 | Pathfinder::AttachmentLoadOp::Clear, 102 | "yuv render pass"); 103 | 104 | initPipeline(); 105 | initGeometry(); 106 | } 107 | 108 | void YuvRenderer::initGeometry() { 109 | // Set up vertex data (and buffer(s)) and configure vertex attributes. 110 | float vertices[] = { 111 | // Positions, UVs. 112 | -1.0, -1.0, 0.0, 0.0, // 0 113 | 1.0, -1.0, 1.0, 0.0, // 1 114 | 1.0, 1.0, 1.0, 1.0, // 2 115 | -1.0, -1.0, 0.0, 0.0, // 3 116 | 1.0, 1.0, 1.0, 1.0, // 4 117 | -1.0, 1.0, 0.0, 1.0 // 5 118 | }; 119 | 120 | mVertexBuffer = mDevice->create_buffer( 121 | {Pathfinder::BufferType::Vertex, sizeof(vertices), Pathfinder::MemoryProperty::DeviceLocal}, 122 | "yuv renderer vertex buffer"); 123 | 124 | auto encoder = mDevice->create_command_encoder("upload yuv vertex buffer"); 125 | encoder->write_buffer(mVertexBuffer, 0, sizeof(vertices), vertices); 126 | mQueue->submit_and_wait(encoder); 127 | } 128 | 129 | void YuvRenderer::initPipeline() { 130 | const auto vert_source = std::vector(vertCode.begin(), vertCode.end()); 131 | const auto frag_source = std::vector(fragCode.begin(), fragCode.end()); 132 | 133 | std::vector attribute_descriptions; 134 | 135 | uint32_t stride = 4 * sizeof(float); 136 | 137 | attribute_descriptions.push_back({0, 2, Pathfinder::DataType::f32, stride, 0, Pathfinder::VertexInputRate::Vertex}); 138 | 139 | attribute_descriptions.push_back( 140 | {0, 2, Pathfinder::DataType::f32, stride, 2 * sizeof(float), Pathfinder::VertexInputRate::Vertex}); 141 | 142 | Pathfinder::BlendState blend_state{}; 143 | blend_state.enabled = false; 144 | 145 | mUniformBuffer = mDevice->create_buffer( 146 | {Pathfinder::BufferType::Uniform, sizeof(FragUniformBlock), Pathfinder::MemoryProperty::HostVisibleAndCoherent}, 147 | "yuv renderer uniform buffer"); 148 | 149 | mDescriptorSet = mDevice->create_descriptor_set(); 150 | mDescriptorSet->add_or_update({ 151 | Pathfinder::Descriptor::uniform(0, Pathfinder::ShaderStage::VertexAndFragment, "bUniform0", mUniformBuffer), 152 | Pathfinder::Descriptor::sampled(1, Pathfinder::ShaderStage::Fragment, "tex_y"), 153 | Pathfinder::Descriptor::sampled(2, Pathfinder::ShaderStage::Fragment, "tex_u"), 154 | Pathfinder::Descriptor::sampled(3, Pathfinder::ShaderStage::Fragment, "tex_v"), 155 | }); 156 | 157 | Pathfinder::SamplerDescriptor sampler_desc{}; 158 | sampler_desc.mag_filter = Pathfinder::SamplerFilter::Nearest; 159 | sampler_desc.min_filter = Pathfinder::SamplerFilter::Nearest; 160 | sampler_desc.address_mode_u = Pathfinder::SamplerAddressMode::ClampToEdge; 161 | sampler_desc.address_mode_v = Pathfinder::SamplerAddressMode::ClampToEdge; 162 | 163 | mSampler = mDevice->create_sampler(sampler_desc); 164 | 165 | mPipeline = mDevice->create_render_pipeline( 166 | mDevice->create_shader_module(vert_source, Pathfinder::ShaderStage::Vertex, "yuv vert"), 167 | mDevice->create_shader_module(frag_source, Pathfinder::ShaderStage::Fragment, "yuv frag"), 168 | attribute_descriptions, 169 | blend_state, 170 | mDescriptorSet, 171 | Pathfinder::TextureFormat::Rgba8Unorm, 172 | "yuv pipeline"); 173 | } 174 | 175 | void YuvRenderer::updateTextureInfo(int width, int height, int format) { 176 | if (width == 0 || height == 0) { 177 | return; 178 | } 179 | 180 | mPixFmt = format; 181 | 182 | mTexY = mDevice->create_texture({{width, height}, Pathfinder::TextureFormat::R8}, "y texture"); 183 | 184 | if (format == AV_PIX_FMT_YUV420P || format == AV_PIX_FMT_YUVJ420P) { 185 | mTexU = mDevice->create_texture({{width / 2, height / 2}, Pathfinder::TextureFormat::R8}, "u texture"); 186 | 187 | mTexV = mDevice->create_texture({{width / 2, height / 2}, Pathfinder::TextureFormat::R8}, "v texture"); 188 | } else if (format == AV_PIX_FMT_NV12) { 189 | mTexU = mDevice->create_texture({{width / 2, height / 2}, Pathfinder::TextureFormat::Rg8}, "u texture"); 190 | 191 | // V is not used for NV12. 192 | if (mTexV == nullptr) { 193 | mTexV = mDevice->create_texture({{2, 2}, Pathfinder::TextureFormat::R8}, "dummy v texture"); 194 | } 195 | } 196 | // yuv444p 197 | else { 198 | mTexU = mDevice->create_texture({{width, height}, Pathfinder::TextureFormat::R8}, "u texture"); 199 | 200 | mTexV = mDevice->create_texture({{width, height}, Pathfinder::TextureFormat::R8}, "v texture"); 201 | } 202 | mTextureAllocated = true; 203 | } 204 | 205 | void YuvRenderer::updateTextureData(const std::shared_ptr& curFrameData) { 206 | if (mTexY == nullptr) { 207 | return; 208 | } 209 | 210 | auto encoder = mDevice->create_command_encoder("upload yuv data"); 211 | 212 | if (mStabilize) { 213 | cv::Mat frameY = cv::Mat(mTexY->get_size().y, mTexY->get_size().x, CV_8UC1, curFrameData->data[0]); 214 | 215 | if (mPreviousFrame.has_value()) { 216 | auto stabXform = mStabilizer.stabilize(mPreviousFrame.value(), frameY); 217 | 218 | mStabXform = Pathfinder::Mat3(1); 219 | mStabXform.v[0] = stabXform.at(0, 0); 220 | mStabXform.v[3] = stabXform.at(0, 1); 221 | mStabXform.v[1] = stabXform.at(1, 0); 222 | mStabXform.v[4] = stabXform.at(1, 1); 223 | mStabXform.v[6] = stabXform.at(0, 2) / mTexY->get_size().x; 224 | mStabXform.v[7] = stabXform.at(1, 2) / mTexY->get_size().y; 225 | 226 | mStabXform = 227 | mStabXform.scale(Pathfinder::Vec2F(1.0f + (float)HORIZONTAL_BORDER_CROP / mTexY->get_size().x)); 228 | } 229 | 230 | mPreviousFrame = frameY.clone(); 231 | 232 | if (!mPrevFrameData) { 233 | mPrevFrameData = curFrameData; 234 | } 235 | 236 | // Keep the cv frame alive until we call `submit_and_wait` 237 | cv::Mat enhancedFrameY; 238 | 239 | if (mPrevFrameData->linesize[0]) { 240 | const void* texYData = mPrevFrameData->data[0]; 241 | 242 | if (mLowLightEnhancementSimple) { 243 | enhancedFrameY = cv::Mat(mTexY->get_size().y, mTexY->get_size().x, CV_8UC1, mPrevFrameData->data[0]); 244 | cv::equalizeHist(enhancedFrameY, enhancedFrameY); 245 | texYData = enhancedFrameY.data; 246 | } 247 | 248 | encoder->write_texture(mTexY, {}, texYData); 249 | } 250 | if (mPrevFrameData->linesize[1]) { 251 | encoder->write_texture(mTexU, {}, mPrevFrameData->data[1]); 252 | } 253 | if (mPrevFrameData->linesize[2] && mPixFmt != AV_PIX_FMT_NV12) { 254 | encoder->write_texture(mTexV, {}, mPrevFrameData->data[2]); 255 | } 256 | 257 | mQueue->submit_and_wait(encoder); 258 | 259 | // Do this after submitting. 260 | mPrevFrameData = curFrameData; 261 | } else { 262 | if (mPreviousFrame.has_value()) { 263 | mPreviousFrame.reset(); 264 | } 265 | if (mPrevFrameData) { 266 | mPrevFrameData.reset(); 267 | } 268 | 269 | mStabXform = Pathfinder::Mat3(1); 270 | 271 | // Keep the cv frame alive until we call `submit_and_wait` 272 | cv::Mat enhancedFrameY; 273 | 274 | if (curFrameData->linesize[0]) { 275 | const void* texYData = curFrameData->data[0]; 276 | 277 | if (mLowLightEnhancementSimple) { 278 | cv::Mat originalFrameY = 279 | cv::Mat(mTexY->get_size().y, mTexY->get_size().x, CV_8UC1, curFrameData->data[0]); 280 | 281 | cv::equalizeHist(originalFrameY, enhancedFrameY); 282 | 283 | texYData = enhancedFrameY.data; 284 | } else if (mLowLightEnhancementAdvanced) { 285 | if (!mNet.has_value()) { 286 | mNet = PairLIE(revector::get_asset_dir("weights/pairlie_180x320.onnx")); 287 | } 288 | 289 | cv::Mat originalFrameY = 290 | cv::Mat(mTexY->get_size().y, mTexY->get_size().x, CV_8UC1, curFrameData->data[0]); 291 | 292 | enhancedFrameY = mNet->detect(originalFrameY); 293 | 294 | texYData = enhancedFrameY.data; 295 | } 296 | encoder->write_texture(mTexY, {}, texYData); 297 | } 298 | if (curFrameData->linesize[1]) { 299 | encoder->write_texture(mTexU, {}, curFrameData->data[1]); 300 | } 301 | if (curFrameData->linesize[2] && mPixFmt != AV_PIX_FMT_NV12) { 302 | encoder->write_texture(mTexV, {}, curFrameData->data[2]); 303 | } 304 | 305 | mQueue->submit_and_wait(encoder); 306 | } 307 | } 308 | 309 | void YuvRenderer::render(const std::shared_ptr& outputTex, bool stabilize) { 310 | if (!mTextureAllocated) { 311 | return; 312 | } 313 | if (mNeedClear) { 314 | mNeedClear = false; 315 | return; 316 | } 317 | 318 | auto encoder = mDevice->create_command_encoder("render yuv"); 319 | 320 | // Update uniform buffers. 321 | { 322 | FragUniformBlock uniform = {Pathfinder::Mat4::from_mat3(mStabXform), mPixFmt}; 323 | 324 | // We don't need to preserve the data until the upload commands are implemented because 325 | // these uniform buffers are host-visible/coherent. 326 | encoder->write_buffer(mUniformBuffer, 0, sizeof(FragUniformBlock), &uniform); 327 | } 328 | 329 | // Update descriptor set. 330 | mDescriptorSet->add_or_update({ 331 | Pathfinder::Descriptor::sampled(1, Pathfinder::ShaderStage::Fragment, "tex_y", mTexY, mSampler), 332 | Pathfinder::Descriptor::sampled(2, Pathfinder::ShaderStage::Fragment, "tex_u", mTexU, mSampler), 333 | Pathfinder::Descriptor::sampled(3, Pathfinder::ShaderStage::Fragment, "tex_v", mTexV, mSampler), 334 | }); 335 | 336 | encoder->begin_render_pass(mRenderPass, outputTex, Pathfinder::ColorF::black()); 337 | 338 | encoder->set_viewport({{0, 0}, outputTex->get_size()}); 339 | 340 | encoder->bind_render_pipeline(mPipeline); 341 | 342 | encoder->bind_vertex_buffers({mVertexBuffer}); 343 | 344 | encoder->bind_descriptor_set(mDescriptorSet); 345 | 346 | encoder->draw(0, 6); 347 | 348 | encoder->end_render_pass(); 349 | 350 | mQueue->submit_and_wait(encoder); 351 | } 352 | 353 | void YuvRenderer::clear() { 354 | mNeedClear = true; 355 | } 356 | -------------------------------------------------------------------------------- /src/player/YuvRenderer.h: -------------------------------------------------------------------------------- 1 | #pragma once 2 | 3 | #include 4 | #include 5 | #include 6 | #include 7 | #include 8 | #include 9 | #include 10 | 11 | #include 12 | #include 13 | #include 14 | 15 | #include "../feature/video_stabilizer.h" 16 | #include "libavutil/frame.h" 17 | #include "src/feature/night_image_enhancement.h" 18 | 19 | namespace cv { 20 | class Mat; 21 | } 22 | 23 | class YuvRenderer { 24 | public: 25 | YuvRenderer(std::shared_ptr device, std::shared_ptr queue); 26 | ~YuvRenderer() = default; 27 | void init(); 28 | void render(const std::shared_ptr& outputTex, bool stabilize); 29 | void updateTextureInfo(int width, int height, int format); 30 | void updateTextureData(const std::shared_ptr& data); 31 | void clear(); 32 | 33 | bool mStabilize = false; 34 | 35 | bool mLowLightEnhancementSimple = false; 36 | 37 | bool mLowLightEnhancementAdvanced = false; 38 | std::optional mNet; 39 | 40 | Pathfinder::Mat3 mStabXform; 41 | 42 | protected: 43 | void initPipeline(); 44 | void initGeometry(); 45 | 46 | private: 47 | std::shared_ptr mPipeline; 48 | std::shared_ptr mQueue; 49 | std::shared_ptr mRenderPass; 50 | std::shared_ptr mTexY; 51 | std::shared_ptr mTexU; 52 | std::shared_ptr mTexV; 53 | std::shared_ptr mPrevFrameData; 54 | std::shared_ptr mDescriptorSet; 55 | std::shared_ptr mSampler; 56 | std::shared_ptr mVertexBuffer; 57 | std::shared_ptr mUniformBuffer; 58 | 59 | std::optional mPreviousFrame; 60 | 61 | int mPixFmt = 0; 62 | bool mTextureAllocated = false; 63 | 64 | VideoStabilizer mStabilizer; 65 | 66 | bool mNeedClear = false; 67 | 68 | std::shared_ptr mDevice; 69 | 70 | volatile bool mInited = false; 71 | }; 72 | -------------------------------------------------------------------------------- /src/player/ffmpegDecode.h: -------------------------------------------------------------------------------- 1 | #pragma once 2 | 3 | #include 4 | #include 5 | #include 6 | #include 7 | 8 | #include "ffmpegInclude.h" 9 | 10 | class ReadFrameException : public std::runtime_error { 11 | public: 12 | ReadFrameException(std::string msg) : runtime_error(msg.c_str()) {} 13 | }; 14 | 15 | class SendPacketException : public std::runtime_error { 16 | public: 17 | SendPacketException(std::string msg) : runtime_error(msg.c_str()) {} 18 | }; 19 | 20 | class FfmpegDecoder { 21 | friend class RealTimePlayer; 22 | 23 | public: 24 | FfmpegDecoder() = default; 25 | 26 | ~FfmpegDecoder() { 27 | CloseInput(); 28 | 29 | swrCtx.reset(); 30 | hwFrame.reset(); 31 | } 32 | 33 | bool OpenInput(std::string &inputFile, bool forceSoftwareDecoding); 34 | 35 | bool CloseInput(); 36 | 37 | std::shared_ptr GetNextFrame(); 38 | 39 | int GetWidth() const { 40 | return width; 41 | } 42 | 43 | int GetHeight() const { 44 | return height; 45 | } 46 | 47 | float GetFps() const { 48 | return videoFps; 49 | } 50 | 51 | bool HasAudio() const { 52 | return hasAudioStream; 53 | } 54 | 55 | bool HasVideo() const { 56 | return hasVideoStream; 57 | } 58 | 59 | size_t ReadAudioBuff(uint8_t *aSample, size_t aSize); 60 | 61 | void ClearAudioBuff(); 62 | 63 | int GetAudioSampleRate() const { 64 | return pAudioCodecCtx->sample_rate; 65 | } 66 | 67 | int GetAudioChannelCount() const { 68 | return pAudioCodecCtx->ch_layout.nb_channels; 69 | } 70 | 71 | AVSampleFormat GetAudioSampleFormat() const { 72 | return AV_SAMPLE_FMT_S16; 73 | } 74 | 75 | AVPixelFormat GetVideoFrameFormat() const { 76 | if (hwDecoderEnabled) { 77 | return AV_PIX_FMT_NV12; 78 | } 79 | return pVideoCodecCtx->pix_fmt; 80 | } 81 | 82 | int GetAudioFrameSamples() const { 83 | return pAudioCodecCtx->sample_rate * 2 / 25; 84 | } 85 | 86 | private: 87 | bool OpenVideo(); 88 | 89 | bool OpenAudio(); 90 | 91 | void CloseVideo(); 92 | 93 | void CloseAudio(); 94 | 95 | int DecodeAudio(const AVPacket *av_pkt, uint8_t *pOutBuffer, size_t nOutBufferSize); 96 | 97 | bool DecodeVideo(const AVPacket *av_pkt, std::shared_ptr &pOutFrame); 98 | 99 | void writeAudioBuff(uint8_t *aSample, size_t aSize); 100 | 101 | std::function &packet)> gotPktCallback; 102 | 103 | std::function &frame)> gotFrameCallback; 104 | 105 | bool initHwDecoder(AVCodecContext *ctx, enum AVHWDeviceType type); 106 | 107 | std::chrono::time_point startTime; 108 | 109 | AVFormatContext *pFormatCtx = nullptr; 110 | 111 | AVCodecContext *pVideoCodecCtx = nullptr; 112 | 113 | AVCodecContext *pAudioCodecCtx = nullptr; 114 | 115 | // ffmpeg 音频样本格式转换 116 | std::shared_ptr swrCtx; 117 | 118 | int videoStreamIndex = -1; 119 | 120 | int audioStreamIndex = -1; 121 | 122 | volatile bool sourceIsOpened = false; 123 | 124 | float videoFps = 0; 125 | 126 | double videoBaseTime = 0; 127 | 128 | double audioBaseTime = 0; 129 | 130 | std::mutex _releaseLock; 131 | 132 | bool hasVideoStream{}; 133 | 134 | bool hasAudioStream{}; 135 | 136 | int width{}; 137 | 138 | int height{}; 139 | 140 | void emitBitrateUpdate(uint64_t pBitrate) { 141 | bitrateUpdateCallback(pBitrate); 142 | } 143 | 144 | volatile uint64_t bytesSecond = 0; 145 | uint64_t bitrate = 0; 146 | uint64_t lastCountBitrateTime = 0; 147 | std::function bitrateUpdateCallback; 148 | 149 | // Audio buffer 150 | std::mutex abBuffMtx; 151 | AVFifo* audioFifoBuffer{}; 152 | 153 | // Hardware decoding 154 | AVHWDeviceType hwDecoderType; 155 | // If a hardware decoder is being used. 156 | bool hwDecoderEnabled = false; 157 | AVPixelFormat hwPixFmt; 158 | AVBufferRef *hwDeviceCtx = nullptr; 159 | volatile bool dropCurrentVideoFrame = false; 160 | std::shared_ptr hwFrame; 161 | }; 162 | -------------------------------------------------------------------------------- /src/player/ffmpegInclude.h: -------------------------------------------------------------------------------- 1 | /* 2 | * Include ffmpeg files 3 | */ 4 | 5 | #pragma once 6 | 7 | #ifndef __STDC_CONSTANT_MACROS 8 | #define __STDC_CONSTANT_MACROS 9 | #endif 10 | 11 | extern "C" { 12 | #include 13 | #include 14 | #include 15 | #include 16 | #include 17 | #include 18 | } 19 | -------------------------------------------------------------------------------- /src/wifi/Rtp.h: -------------------------------------------------------------------------------- 1 | // 2 | // Created by liangzhuohua on 2024/6/13. 3 | // 4 | 5 | #ifndef FPV_WFB_RTP_H 6 | #define FPV_WFB_RTP_H 7 | 8 | #if defined(_WIN32) 9 | #pragma pack(push, 1) 10 | #else 11 | #include 12 | #endif // defined(_WIN32) 13 | 14 | class RtpHeader { 15 | public: 16 | #if defined(__BYTE_ORDER) && __BYTE_ORDER == __BIG_ENDIAN || defined(_WIN32) && REG_DWORD == REG_DWORD_BIG_ENDIAN 17 | // 版本号,固定为2 18 | uint32_t version : 2; 19 | // padding 20 | uint32_t padding : 1; 21 | // 扩展 22 | uint32_t ext : 1; 23 | // csrc 24 | uint32_t csrc : 4; 25 | // mark 26 | uint32_t mark : 1; 27 | // 负载类型 28 | uint32_t pt : 7; 29 | #else 30 | // csrc 31 | uint32_t csrc : 4; 32 | // 扩展 33 | uint32_t ext : 1; 34 | // padding 35 | uint32_t padding : 1; 36 | // 版本号,固定为2 37 | uint32_t version : 2; 38 | // 负载类型 39 | uint32_t pt : 7; 40 | // mark 41 | uint32_t mark : 1; 42 | #endif 43 | // 序列号 44 | uint32_t seq : 16; 45 | // 时间戳 46 | uint32_t stamp; 47 | // ssrc 48 | uint32_t ssrc; 49 | // 负载,如果有csrc和ext,前面为 4 * csrc + (4 + 4 * ext_len) 50 | uint8_t payload; 51 | 52 | public: 53 | #define AV_RB16(x) ((((const uint8_t *)(x))[0] << 8) | ((const uint8_t *)(x))[1]) 54 | 55 | size_t getCsrcSize() const { 56 | // 每个csrc占用4字节 57 | return csrc << 2; 58 | } 59 | 60 | uint8_t *getCsrcData() { 61 | if (!csrc) { 62 | return nullptr; 63 | } 64 | return &payload; 65 | } 66 | 67 | size_t getExtSize() const { 68 | // rtp有ext 69 | if (!ext) { 70 | return 0; 71 | } 72 | auto ext_ptr = &payload + getCsrcSize(); 73 | // uint16_t reserved = AV_RB16(ext_ptr); 74 | // 每个ext占用4字节 75 | return AV_RB16(ext_ptr + 2) << 2; 76 | } 77 | 78 | uint16_t getExtReserved() const { 79 | // rtp有ext 80 | if (!ext) { 81 | return 0; 82 | } 83 | auto ext_ptr = &payload + getCsrcSize(); 84 | return AV_RB16(ext_ptr); 85 | } 86 | 87 | uint8_t *getExtData() { 88 | if (!ext) { 89 | return nullptr; 90 | } 91 | auto ext_ptr = &payload + getCsrcSize(); 92 | // 多出的4个字节分别为reserved、ext_len 93 | return ext_ptr + 4; 94 | } 95 | 96 | size_t getPayloadOffset() const { 97 | // 有ext时,还需要忽略reserved、ext_len 4个字节 98 | return getCsrcSize() + (ext ? (4 + getExtSize()) : 0); 99 | } 100 | 101 | uint8_t *getPayloadData() { 102 | return &payload + getPayloadOffset(); 103 | } 104 | 105 | size_t getPaddingSize(size_t rtp_size) const { 106 | if (!padding) { 107 | return 0; 108 | } 109 | auto end = (uint8_t *)this + rtp_size - 1; 110 | return *end; 111 | } 112 | 113 | ssize_t getPayloadSize(size_t rtp_size) const { 114 | auto invalid_size = getPayloadOffset() + getPaddingSize(rtp_size); 115 | return (ssize_t)rtp_size - invalid_size - 12; 116 | } 117 | 118 | std::string dumpString(size_t rtp_size) const { 119 | std::stringstream printer; 120 | printer << "version:" << (int)version << "\r\n"; 121 | printer << "padding:" << getPaddingSize(rtp_size) << "\r\n"; 122 | printer << "ext:" << getExtSize() << "\r\n"; 123 | printer << "csrc:" << getCsrcSize() << "\r\n"; 124 | printer << "mark:" << (int)mark << "\r\n"; 125 | printer << "pt:" << (int)pt << "\r\n"; 126 | printer << "seq:" << ntohs(seq) << "\r\n"; 127 | printer << "stamp:" << ntohl(stamp) << "\r\n"; 128 | printer << "ssrc:" << ntohl(ssrc) << "\r\n"; 129 | printer << "rtp size:" << rtp_size << "\r\n"; 130 | printer << "payload offset:" << getPayloadOffset() << "\r\n"; 131 | printer << "payload size:" << getPayloadSize(rtp_size) << "\r\n"; 132 | return printer.str(); 133 | } 134 | 135 | /////////////////////////////////////////////////////////////////////// 136 | } PACKED; 137 | 138 | #if defined(_WIN32) 139 | #pragma pack(pop) 140 | #endif // defined(_WIN32) 141 | 142 | #endif // FPV_WFB_RTP_H 143 | -------------------------------------------------------------------------------- /src/wifi/RxFrame.h: -------------------------------------------------------------------------------- 1 | // 2 | // Created by gaeta on 2024-03-31. 3 | // 4 | 5 | #pragma once 6 | 7 | #include 8 | #include 9 | #include 10 | 11 | enum class RadioPort { /* define your RadioPort enum */ 12 | }; 13 | 14 | class RxFrame { 15 | private: 16 | std::span _data; 17 | static constexpr std::array _dataHeader = {uint8_t(0x08), 18 | uint8_t(0x01)}; // Frame control value for QoS Data 19 | 20 | public: 21 | RxFrame(const std::span &data) : _data(data) { 22 | DataAsMemory = _data; 23 | } 24 | 25 | std::span DataAsMemory; // useless in c++ 26 | 27 | std::span ControlField() const { 28 | return {_data.data(), 2}; 29 | } 30 | std::span Duration() const { 31 | return {_data.data() + 2, 2}; 32 | } 33 | std::span MacAp() const { 34 | return {_data.data() + 4, 6}; 35 | } // receiverAddress 36 | std::span MacSrcUniqueIdPart() const { 37 | return {_data.data() + 10, 1}; 38 | } // transmitterAddress 39 | std::span MacSrcNoncePart1() const { 40 | return {_data.data() + 11, 4}; 41 | } 42 | std::span MacSrcRadioPort() const { 43 | return {_data.data() + 15, 1}; 44 | } 45 | std::span MacDstUniqueIdPart() const { 46 | return {_data.data() + 16, 1}; 47 | } // destinationAddress 48 | std::span MacDstNoncePart2() const { 49 | return {_data.data() + 17, 4}; 50 | } 51 | std::span MacDstRadioPort() const { 52 | return {_data.data() + 21, 1}; 53 | } 54 | std::span SequenceControl() const { 55 | return {_data.data() + 22, 2}; 56 | } 57 | std::span PayloadSpan() const { 58 | return {_data.data() + 24, _data.size() - 28}; 59 | } 60 | std::span GetNonce() const { 61 | std::array data; 62 | std::copy(_data.begin() + 11, _data.begin() + 15, data.begin()); 63 | std::copy(_data.begin() + 17, _data.begin() + 21, data.begin() + 4); 64 | return {data.data(), data.size()}; 65 | } 66 | 67 | // RadioPort get_valid_radio_port() const { 68 | // return RadioPort::Fromuint8_t(_data[15]); 69 | // } 70 | 71 | bool IsValidWfbFrame() const { 72 | if (_data.empty()) return false; 73 | if (!IsDataFrame()) return false; 74 | if (PayloadSpan().empty()) return false; 75 | if (!HasValidAirGndId()) return false; 76 | if (!HasValidRadioPort()) return false; 77 | // TODO: add `frame.PayloadSpan().size() > RAW_WIFI_FRAME_MAX_PAYLOAD_SIZE` 78 | return true; 79 | } 80 | 81 | uint8_t GetValidAirGndId() const { 82 | return _data[10]; 83 | } 84 | 85 | bool MatchesChannelID(const uint8_t *channel_id) const { 86 | // 0x57, 0x42, 0xaa, 0xbb, 0xcc, 0xdd, // last four bytes are replaced by channel_id (x2) 87 | return _data[10] == 0x57 && _data[11] == 0x42 && _data[12] == channel_id[0] && _data[13] == channel_id[1] && 88 | _data[14] == channel_id[2] && _data[15] == channel_id[3] && _data[16] == 0x57 && _data[17] == 0x42 && 89 | _data[18] == channel_id[0] && _data[19] == channel_id[1] && _data[20] == channel_id[2] && 90 | _data[21] == channel_id[3]; 91 | } 92 | 93 | private: 94 | bool IsDataFrame() const { 95 | return _data.size() >= 2 && _data[0] == _dataHeader[0] && _data[1] == _dataHeader[1]; 96 | } 97 | 98 | bool HasValidAirGndId() const { 99 | return _data.size() >= 18 && _data[10] == _data[16]; 100 | } 101 | 102 | bool HasValidRadioPort() const { 103 | return _data.size() >= 22 && _data[15] == _data[21]; 104 | } 105 | }; 106 | 107 | class WifiFrame { 108 | public: 109 | WifiFrame(const std::span &rawData) { 110 | // Frame Control (2 bytes) 111 | frameControl = (rawData[1] << 8) | rawData[0]; 112 | 113 | // Duration/ID (2 bytes) 114 | durationID = (rawData[3] << 8) | rawData[2]; 115 | 116 | // Receiver Address (6 bytes) 117 | receiverAddress.assign(rawData.begin() + 4, rawData.begin() + 10); 118 | 119 | // Transmitter Address (6 bytes) 120 | transmitterAddress.assign(rawData.begin() + 10, rawData.begin() + 16); 121 | 122 | // Destination Address (6 bytes) 123 | destinationAddress.assign(rawData.begin() + 16, rawData.begin() + 22); 124 | 125 | // Source Address (6 bytes) 126 | // sourceAddress.assign(rawData.begin() + 22, rawData.begin() + 28); 127 | 128 | // Sequence Control (2 bytes) 129 | sequenceControl = (rawData[22] << 8) | rawData[22]; 130 | 131 | // Frame Body (variable length) 132 | // For simplicity, let's assume the body starts at byte 30 133 | // frameBody.assign(rawData.begin() + 30, rawData.end() - 4); 134 | // 135 | // // Frame Check Sequence (4 bytes, assuming little-endian) 136 | // frameCheckSequence = (rawData[rawData.size() - 1] << 24) | 137 | // (rawData[rawData.size() - 2] << 16) | 138 | // (rawData[rawData.size() - 3] << 8) | 139 | // rawData[rawData.size() - 4]; 140 | } 141 | uint16_t frameControl; 142 | uint16_t durationID; 143 | std::vector receiverAddress; 144 | std::vector transmitterAddress; 145 | std::vector destinationAddress; 146 | std::vector sourceAddress; 147 | uint16_t sequenceControl; 148 | std::vector frameBody; 149 | uint32_t frameCheckSequence; 150 | }; 151 | -------------------------------------------------------------------------------- /src/wifi/WfbDefine.h: -------------------------------------------------------------------------------- 1 | // 2 | // Created by Talus on 2024/6/12. 3 | // 4 | 5 | #pragma once 6 | 7 | #include 8 | #include 9 | 10 | #include 11 | #include 12 | #include 13 | 14 | #undef min 15 | #undef max 16 | 17 | #ifdef _WIN32 18 | 19 | inline uint32_t htobe32(uint32_t host_32bits) { 20 | // 检查主机字节序是否为小端模式 21 | uint16_t test = 0x1; 22 | bool is_little_endian = *((uint8_t *)&test) == 0x1; 23 | 24 | if (is_little_endian) { 25 | // 如果是小端字节序,则转换为大端字节序 26 | return ((host_32bits & 0x000000FF) << 24) | ((host_32bits & 0x0000FF00) << 8) | 27 | ((host_32bits & 0x00FF0000) >> 8) | ((host_32bits & 0xFF000000) >> 24); 28 | } else { 29 | // 如果已经是大端字节序,则直接返回 30 | return host_32bits; 31 | } 32 | } 33 | 34 | inline uint64_t be64toh(uint64_t big_endian_64bits) { 35 | // 如果本地字节序是小端,需要进行转换 36 | #if defined(_WIN32) || defined(_WIN64) 37 | // 如果是 Windows 平台 38 | return _byteswap_uint64(big_endian_64bits); 39 | #else 40 | // 如果是其他平台,假设是大端或者已经有对应的函数实现 41 | return big_endian_64bits; 42 | #endif 43 | } 44 | 45 | // 定义 be32toh 函数,将大端 32 位整数转换为主机字节顺序 46 | inline uint32_t be32toh(uint32_t big_endian_32bits) { 47 | // 如果本地字节序是小端,需要进行转换 48 | #if defined(_WIN32) || defined(_WIN64) 49 | // 如果是 Windows 平台,使用 _byteswap_ulong 函数 50 | return _byteswap_ulong(big_endian_32bits); 51 | #else 52 | // 如果是其他平台,假设是大端或者已经有对应的函数实现 53 | return big_endian_32bits; 54 | #endif 55 | } 56 | 57 | // 定义 be16toh 函数,将大端 16 位整数转换为主机字节顺序 58 | inline uint16_t be16toh(uint16_t big_endian_16bits) { 59 | // 如果本地字节序是小端,需要进行转换 60 | #if defined(_WIN32) || defined(_WIN64) 61 | // 如果是 Windows 平台,使用 _byteswap_ushort 函数 62 | return _byteswap_ushort(big_endian_16bits); 63 | #else 64 | // 如果是其他平台,假设是大端或者已经有对应的函数实现 65 | return big_endian_16bits; 66 | #endif 67 | } 68 | 69 | #endif 70 | 71 | static uint8_t ieee80211_header[] = { 72 | 0x08, 0x01, 0x00, 0x00, // data frame, not protected, from STA to DS via an AP, duration not set 73 | 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, // receiver is broadcast 74 | 0x57, 0x42, 0xaa, 0xbb, 0xcc, 0xdd, // last four bytes will be replaced by channel_id 75 | 0x57, 0x42, 0xaa, 0xbb, 0xcc, 0xdd, // last four bytes will be replaced by channel_id 76 | 0x00, 0x00, // (seq_num << 4) + fragment_num 77 | }; 78 | 79 | #define IEEE80211_RADIOTAP_MCS_HAVE_BW 0x01 80 | #define IEEE80211_RADIOTAP_MCS_HAVE_MCS 0x02 81 | #define IEEE80211_RADIOTAP_MCS_HAVE_GI 0x04 82 | #define IEEE80211_RADIOTAP_MCS_HAVE_FMT 0x08 83 | 84 | #define IEEE80211_RADIOTAP_MCS_BW_20 0 85 | #define IEEE80211_RADIOTAP_MCS_BW_40 1 86 | #define IEEE80211_RADIOTAP_MCS_BW_20L 2 87 | #define IEEE80211_RADIOTAP_MCS_BW_20U 3 88 | #define IEEE80211_RADIOTAP_MCS_SGI 0x04 89 | #define IEEE80211_RADIOTAP_MCS_FMT_GF 0x08 90 | 91 | #define IEEE80211_RADIOTAP_MCS_HAVE_FEC 0x10 92 | #define IEEE80211_RADIOTAP_MCS_HAVE_STBC 0x20 93 | #define IEEE80211_RADIOTAP_MCS_FEC_LDPC 0x10 94 | #define IEEE80211_RADIOTAP_MCS_STBC_MASK 0x60 95 | #define IEEE80211_RADIOTAP_MCS_STBC_1 1 96 | #define IEEE80211_RADIOTAP_MCS_STBC_2 2 97 | #define IEEE80211_RADIOTAP_MCS_STBC_3 3 98 | #define IEEE80211_RADIOTAP_MCS_STBC_SHIFT 5 99 | 100 | #define MCS_KNOWN \ 101 | (IEEE80211_RADIOTAP_MCS_HAVE_MCS | IEEE80211_RADIOTAP_MCS_HAVE_BW | IEEE80211_RADIOTAP_MCS_HAVE_GI | \ 102 | IEEE80211_RADIOTAP_MCS_HAVE_STBC | IEEE80211_RADIOTAP_MCS_HAVE_FEC) 103 | 104 | static uint8_t radiotap_header[] __attribute__((unused)) = { 105 | 0x00, 106 | 0x00, // <-- radiotap version 107 | 0x0d, 108 | 0x00, // <- radiotap header length 109 | 0x00, 110 | 0x80, 111 | 0x08, 112 | 0x00, // <-- radiotap present flags: RADIOTAP_TX_FLAGS + RADIOTAP_MCS 113 | 0x08, 114 | 0x00, // RADIOTAP_F_TX_NOACK 115 | MCS_KNOWN, 116 | 0x00, 117 | 0x00 // bitmap, flags, mcs_index 118 | }; 119 | 120 | typedef struct { 121 | uint64_t block_idx; 122 | uint8_t **fragments; 123 | uint8_t *fragment_map; 124 | uint8_t fragment_to_send_idx; 125 | uint8_t has_fragments; 126 | } rx_ring_item_t; 127 | 128 | static inline int modN(int x, int base) { 129 | return (base + (x % base)) % base; 130 | } 131 | 132 | class antennaItem { 133 | public: 134 | antennaItem(void) : count_all(0), rssi_sum(0), rssi_min(0), rssi_max(0) {} 135 | 136 | void log_rssi(int8_t rssi) { 137 | if (count_all == 0) { 138 | rssi_min = rssi; 139 | rssi_max = rssi; 140 | } else { 141 | rssi_min = std::min(rssi, rssi_min); 142 | rssi_max = std::max(rssi, rssi_max); 143 | } 144 | rssi_sum += rssi; 145 | count_all += 1; 146 | } 147 | 148 | int32_t count_all; 149 | int32_t rssi_sum; 150 | int8_t rssi_min; 151 | int8_t rssi_max; 152 | }; 153 | 154 | typedef std::unordered_map antenna_stat_t; 155 | 156 | #define RX_RING_SIZE 40 157 | 158 | #pragma pack(push, 1) 159 | typedef struct { 160 | uint8_t packet_type; 161 | uint8_t session_nonce[crypto_box_NONCEBYTES]; // random data 162 | } wsession_hdr_t; 163 | #pragma pack(pop) 164 | 165 | #pragma pack(push, 1) 166 | typedef struct { 167 | uint64_t epoch; // Drop session packets from old epoch 168 | uint32_t channel_id; // (link_id << 8) + port_number 169 | uint8_t fec_type; // Now only supported type is WFB_FEC_VDM_RS 170 | uint8_t k; // FEC k 171 | uint8_t n; // FEC n 172 | uint8_t session_key[crypto_aead_chacha20poly1305_KEYBYTES]; 173 | } wsession_data_t; 174 | #pragma pack(pop) 175 | 176 | // Data packet. Embed FEC-encoded data 177 | #pragma pack(push, 1) 178 | typedef struct { 179 | uint8_t packet_type; 180 | uint64_t data_nonce; // big endian, data_nonce = (block_idx << 8) + fragment_idx 181 | } wblock_hdr_t; 182 | #pragma pack(pop) 183 | 184 | // Plain data packet after FEC decode 185 | #pragma pack(push, 1) 186 | typedef struct { 187 | uint8_t flags; 188 | uint16_t packet_size; // big endian 189 | } wpacket_hdr_t; 190 | #pragma pack(pop) 191 | 192 | #define MAX_PAYLOAD_SIZE \ 193 | (MAX_PACKET_SIZE - sizeof(radiotap_header) - sizeof(ieee80211_header) - sizeof(wblock_hdr_t) - \ 194 | crypto_aead_chacha20poly1305_ABYTES - sizeof(wpacket_hdr_t)) 195 | #define MAX_FEC_PAYLOAD \ 196 | (MAX_PACKET_SIZE - sizeof(radiotap_header) - sizeof(ieee80211_header) - sizeof(wblock_hdr_t) - \ 197 | crypto_aead_chacha20poly1305_ABYTES) 198 | #define MAX_PACKET_SIZE 1510 199 | #define MAX_FORWARDER_PACKET_SIZE (MAX_PACKET_SIZE - sizeof(radiotap_header) - sizeof(ieee80211_header)) 200 | 201 | #define BLOCK_IDX_MASK ((1LLU << 56) - 1) 202 | #define MAX_BLOCK_IDX ((1LLU << 55) - 1) 203 | 204 | // packet types 205 | #define WFB_PACKET_DATA 0x1 206 | #define WFB_PACKET_KEY 0x2 207 | 208 | // FEC types 209 | #define WFB_FEC_VDM_RS 0x1 // Reed-Solomon on Vandermonde matrix 210 | 211 | // packet flags 212 | #define WFB_PACKET_FEC_ONLY 0x1 213 | 214 | #define SESSION_KEY_ANNOUNCE_MSEC 1000 215 | #define RX_ANT_MAX 4 216 | -------------------------------------------------------------------------------- /src/wifi/WfbProcessor.cpp: -------------------------------------------------------------------------------- 1 | // 2 | // Created by Talus on 2024/6/12. 3 | // 4 | 5 | #include "WfbProcessor.h" 6 | 7 | #include 8 | #include 9 | #include 10 | #include 11 | #include 12 | #include 13 | 14 | Aggregator::Aggregator(const std::string &keypair, uint64_t epoch, uint32_t channel_id, const DataCB &cb) 15 | : fec_p(NULL), fec_k(-1), fec_n(-1), seq(0), rx_ring_front(0), rx_ring_alloc(0), last_known_block((uint64_t)-1), 16 | epoch(epoch), channel_id(channel_id), count_p_all(0), count_p_dec_err(0), count_p_dec_ok(0), 17 | count_p_fec_recovered(0), count_p_lost(0), count_p_bad(0), count_p_override(0), dcb(cb) { 18 | memset(session_key, '\0', sizeof(session_key)); 19 | 20 | FILE *fp; 21 | if ((fp = fopen(keypair.c_str(), "rb")) == NULL) { 22 | throw std::runtime_error(std::format("Unable to open {}: {}", keypair.c_str(), strerror(errno))); 23 | } 24 | if (fread(rx_secretkey, crypto_box_SECRETKEYBYTES, 1, fp) != 1) { 25 | fclose(fp); 26 | throw std::runtime_error(std::format("Unable to read rx secret key: {}", strerror(errno))); 27 | } 28 | if (fread(tx_publickey, crypto_box_PUBLICKEYBYTES, 1, fp) != 1) { 29 | fclose(fp); 30 | throw std::runtime_error(std::format("Unable to read tx public key: {}", strerror(errno))); 31 | } 32 | fclose(fp); 33 | } 34 | 35 | Aggregator::~Aggregator() { 36 | if (fec_p != NULL) { 37 | deinit_fec(); 38 | } 39 | } 40 | 41 | void Aggregator::init_fec(int k, int n) { 42 | fec_k = k; 43 | fec_n = n; 44 | fec_p = fec_new(fec_k, fec_n); 45 | 46 | rx_ring_front = 0; 47 | rx_ring_alloc = 0; 48 | last_known_block = (uint64_t)-1; 49 | seq = 0; 50 | 51 | for (int ring_idx = 0; ring_idx < RX_RING_SIZE; ring_idx++) { 52 | rx_ring[ring_idx].block_idx = 0; 53 | rx_ring[ring_idx].fragment_to_send_idx = 0; 54 | rx_ring[ring_idx].has_fragments = 0; 55 | rx_ring[ring_idx].fragments = new uint8_t *[fec_n]; 56 | for (int i = 0; i < fec_n; i++) { 57 | rx_ring[ring_idx].fragments[i] = new uint8_t[MAX_FEC_PAYLOAD]; 58 | } 59 | rx_ring[ring_idx].fragment_map = new uint8_t[fec_n]; 60 | memset(rx_ring[ring_idx].fragment_map, '\0', fec_n * sizeof(uint8_t)); 61 | } 62 | } 63 | 64 | void Aggregator::deinit_fec() { 65 | for (int ring_idx = 0; ring_idx < RX_RING_SIZE; ring_idx++) { 66 | delete[] rx_ring[ring_idx].fragment_map; 67 | for (int i = 0; i < fec_n; i++) { 68 | delete[] rx_ring[ring_idx].fragments[i]; 69 | } 70 | delete[] rx_ring[ring_idx].fragments; 71 | } 72 | 73 | fec_free(fec_p); 74 | fec_p = NULL; 75 | fec_k = -1; 76 | fec_n = -1; 77 | } 78 | 79 | int Aggregator::rx_ring_push() { 80 | if (rx_ring_alloc < RX_RING_SIZE) { 81 | int idx = modN(rx_ring_front + rx_ring_alloc, RX_RING_SIZE); 82 | rx_ring_alloc += 1; 83 | return idx; 84 | } 85 | 86 | /* 87 | Ring overflow. This means that there are more unfinished blocks than ring size 88 | Possible solutions: 89 | 1. Increase ring size. Do this if you have large variance of packet travel time throught WiFi card or network 90 | stack. Some cards can do this due to packet reordering inside, diffent chipset and/or firmware or your RX hosts 91 | have different CPU power. 92 | 2. Reduce packet injection speed or try to unify RX hardware. 93 | */ 94 | 95 | #if 0 96 | fprintf(stderr, "Override block 0x%" PRIx64 " flush %d fragments\n", rx_ring[rx_ring_front].block_idx, rx_ring[rx_ring_front].has_fragments); 97 | #endif 98 | 99 | count_p_override += 1; 100 | 101 | for (int f_idx = rx_ring[rx_ring_front].fragment_to_send_idx; f_idx < fec_k; f_idx++) { 102 | if (rx_ring[rx_ring_front].fragment_map[f_idx]) { 103 | send_packet(rx_ring_front, f_idx); 104 | } 105 | } 106 | 107 | // override last item in ring 108 | int ring_idx = rx_ring_front; 109 | rx_ring_front = modN(rx_ring_front + 1, RX_RING_SIZE); 110 | return ring_idx; 111 | } 112 | 113 | int Aggregator::get_block_ring_idx(uint64_t block_idx) { 114 | // check if block is already in the ring 115 | for (int i = rx_ring_front, c = rx_ring_alloc; c > 0; i = modN(i + 1, RX_RING_SIZE), c--) { 116 | if (rx_ring[i].block_idx == block_idx) return i; 117 | } 118 | 119 | // check if block is already known and not in the ring then it is already processed 120 | if (last_known_block != (uint64_t)-1 && block_idx <= last_known_block) { 121 | return -1; 122 | } 123 | 124 | int new_blocks = 125 | (int)std::min(last_known_block != (uint64_t)-1 ? block_idx - last_known_block : 1, (uint64_t)RX_RING_SIZE); 126 | assert(new_blocks > 0); 127 | 128 | last_known_block = block_idx; 129 | int ring_idx = -1; 130 | 131 | for (int i = 0; i < new_blocks; i++) { 132 | ring_idx = rx_ring_push(); 133 | rx_ring[ring_idx].block_idx = block_idx + i + 1 - new_blocks; 134 | rx_ring[ring_idx].fragment_to_send_idx = 0; 135 | rx_ring[ring_idx].has_fragments = 0; 136 | memset(rx_ring[ring_idx].fragment_map, '\0', fec_n * sizeof(uint8_t)); 137 | } 138 | return ring_idx; 139 | } 140 | 141 | void Aggregator::process_packet(const uint8_t *buf, 142 | size_t size, 143 | uint8_t wlan_idx, 144 | const uint8_t *antenna, 145 | const int8_t *rssi) { 146 | wsession_data_t new_session_data; 147 | count_p_all += 1; 148 | 149 | if (size == 0) return; 150 | 151 | if (size > MAX_FORWARDER_PACKET_SIZE) { 152 | fprintf(stderr, "Long packet (fec payload)\n"); 153 | count_p_bad += 1; 154 | return; 155 | } 156 | 157 | switch (buf[0]) { 158 | case WFB_PACKET_DATA: 159 | if (size < sizeof(wblock_hdr_t) + sizeof(wpacket_hdr_t)) { 160 | fprintf(stderr, "Short packet (fec header)\n"); 161 | count_p_bad += 1; 162 | return; 163 | } 164 | break; 165 | 166 | case WFB_PACKET_KEY: 167 | if (size != sizeof(wsession_hdr_t) + sizeof(wsession_data_t) + crypto_box_MACBYTES) { 168 | fprintf(stderr, "Invalid session key packet\n"); 169 | count_p_bad += 1; 170 | return; 171 | } 172 | 173 | if (crypto_box_open_easy((uint8_t *)&new_session_data, 174 | buf + sizeof(wsession_hdr_t), 175 | sizeof(wsession_data_t) + crypto_box_MACBYTES, 176 | ((wsession_hdr_t *)buf)->session_nonce, 177 | tx_publickey, 178 | rx_secretkey) != 0) { 179 | fprintf(stderr, "Unable to decrypt session key\n"); 180 | count_p_dec_err += 1; 181 | return; 182 | } 183 | 184 | if (be64toh(new_session_data.epoch) < epoch) { 185 | fprintf(stderr, 186 | "Session epoch doesn't match: %" PRIu64 " < %" PRIu64 "\n", 187 | be64toh(new_session_data.epoch), 188 | epoch); 189 | count_p_dec_err += 1; 190 | return; 191 | } 192 | 193 | if (be32toh(new_session_data.channel_id) != channel_id) { 194 | fprintf(stderr, 195 | "Session channel_id doesn't match: %d != %d\n", 196 | be32toh(new_session_data.channel_id), 197 | channel_id); 198 | count_p_dec_err += 1; 199 | return; 200 | } 201 | 202 | if (new_session_data.fec_type != WFB_FEC_VDM_RS) { 203 | fprintf(stderr, "Unsupported FEC codec type: %d\n", new_session_data.fec_type); 204 | count_p_dec_err += 1; 205 | return; 206 | } 207 | 208 | if (new_session_data.n < 1) { 209 | fprintf(stderr, "Invalid FEC N: %d\n", new_session_data.n); 210 | count_p_dec_err += 1; 211 | return; 212 | } 213 | 214 | if (new_session_data.k < 1 || new_session_data.k > new_session_data.n) { 215 | fprintf(stderr, "Invalid FEC K: %d\n", new_session_data.k); 216 | count_p_dec_err += 1; 217 | return; 218 | } 219 | 220 | count_p_dec_ok += 1; 221 | 222 | if (memcmp(session_key, new_session_data.session_key, sizeof(session_key)) != 0) { 223 | epoch = be64toh(new_session_data.epoch); 224 | memcpy(session_key, new_session_data.session_key, sizeof(session_key)); 225 | 226 | if (fec_p != NULL) { 227 | deinit_fec(); 228 | } 229 | 230 | init_fec(new_session_data.k, new_session_data.n); 231 | 232 | fflush(stdout); 233 | } 234 | return; 235 | 236 | default: 237 | fprintf(stderr, "Unknown packet type 0x%x\n", buf[0]); 238 | count_p_bad += 1; 239 | return; 240 | } 241 | 242 | uint8_t decrypted[MAX_FEC_PAYLOAD]; 243 | long long unsigned int decrypted_len; 244 | wblock_hdr_t *block_hdr = (wblock_hdr_t *)buf; 245 | 246 | if (crypto_aead_chacha20poly1305_decrypt(decrypted, 247 | &decrypted_len, 248 | NULL, 249 | buf + sizeof(wblock_hdr_t), 250 | size - sizeof(wblock_hdr_t), 251 | buf, 252 | sizeof(wblock_hdr_t), 253 | (uint8_t *)(&(block_hdr->data_nonce)), 254 | session_key) != 0) { 255 | fprintf(stderr, "Unable to decrypt packet #0x%" PRIx64 "\n", be64toh(block_hdr->data_nonce)); 256 | count_p_dec_err += 1; 257 | return; 258 | } 259 | 260 | count_p_dec_ok += 1; 261 | 262 | assert(decrypted_len <= MAX_FEC_PAYLOAD); 263 | 264 | uint64_t block_idx = be64toh(block_hdr->data_nonce) >> 8; 265 | uint8_t fragment_idx = (uint8_t)(be64toh(block_hdr->data_nonce) & 0xff); 266 | 267 | // Should never happen due to generating new session key on tx side 268 | if (block_idx > MAX_BLOCK_IDX) { 269 | fprintf(stderr, "block_idx overflow\n"); 270 | count_p_bad += 1; 271 | return; 272 | } 273 | 274 | if (fragment_idx >= fec_n) { 275 | fprintf(stderr, "Invalid fragment_idx: %d\n", fragment_idx); 276 | count_p_bad += 1; 277 | return; 278 | } 279 | 280 | int ring_idx = get_block_ring_idx(block_idx); 281 | 282 | // ignore already processed blocks 283 | if (ring_idx < 0) return; 284 | 285 | rx_ring_item_t *p = &rx_ring[ring_idx]; 286 | 287 | // ignore already processed fragments 288 | if (p->fragment_map[fragment_idx]) return; 289 | 290 | memset(p->fragments[fragment_idx], '\0', MAX_FEC_PAYLOAD); 291 | memcpy(p->fragments[fragment_idx], decrypted, decrypted_len); 292 | 293 | p->fragment_map[fragment_idx] = 1; 294 | p->has_fragments += 1; 295 | 296 | // Check if we use current (oldest) block 297 | // then we can optimize and don't wait for all K fragments 298 | // and send packets if there are no gaps in fragments from the beginning of this block 299 | if (ring_idx == rx_ring_front) { 300 | // check if any packets without gaps 301 | while (p->fragment_to_send_idx < fec_k && p->fragment_map[p->fragment_to_send_idx]) { 302 | send_packet(ring_idx, p->fragment_to_send_idx); 303 | p->fragment_to_send_idx += 1; 304 | } 305 | 306 | // remove block if full 307 | if (p->fragment_to_send_idx == fec_k) { 308 | rx_ring_front = modN(rx_ring_front + 1, RX_RING_SIZE); 309 | rx_ring_alloc -= 1; 310 | assert(rx_ring_alloc >= 0); 311 | return; 312 | } 313 | } 314 | 315 | // 1. This is not the oldest block but with sufficient number of fragments (K) to decode 316 | // 2. This is the oldest block but with gaps and total number of fragments is K 317 | if (p->fragment_to_send_idx < fec_k && p->has_fragments == fec_k) { 318 | // send all queued packets in all unfinished blocks before and remove them 319 | int nrm = modN(ring_idx - rx_ring_front, RX_RING_SIZE); 320 | 321 | while (nrm > 0) { 322 | for (int f_idx = rx_ring[rx_ring_front].fragment_to_send_idx; f_idx < fec_k; f_idx++) { 323 | if (rx_ring[rx_ring_front].fragment_map[f_idx]) { 324 | send_packet(rx_ring_front, f_idx); 325 | } 326 | } 327 | rx_ring_front = modN(rx_ring_front + 1, RX_RING_SIZE); 328 | rx_ring_alloc -= 1; 329 | nrm -= 1; 330 | } 331 | 332 | assert(rx_ring_alloc > 0); 333 | assert(ring_idx == rx_ring_front); 334 | 335 | // Search for missed data fragments and apply FEC only if needed 336 | for (int f_idx = p->fragment_to_send_idx; f_idx < fec_k; f_idx++) { 337 | if (!p->fragment_map[f_idx]) { 338 | // Recover missed fragments using FEC 339 | apply_fec(ring_idx); 340 | 341 | // Count total number of recovered fragments 342 | for (; f_idx < fec_k; f_idx++) { 343 | if (!p->fragment_map[f_idx]) { 344 | count_p_fec_recovered += 1; 345 | } 346 | } 347 | break; 348 | } 349 | } 350 | 351 | while (p->fragment_to_send_idx < fec_k) { 352 | send_packet(ring_idx, p->fragment_to_send_idx); 353 | p->fragment_to_send_idx += 1; 354 | } 355 | 356 | // remove block 357 | rx_ring_front = modN(rx_ring_front + 1, RX_RING_SIZE); 358 | rx_ring_alloc -= 1; 359 | assert(rx_ring_alloc >= 0); 360 | } 361 | } 362 | 363 | void Aggregator::send_packet(int ring_idx, int fragment_idx) { 364 | wpacket_hdr_t *packet_hdr = (wpacket_hdr_t *)(rx_ring[ring_idx].fragments[fragment_idx]); 365 | uint8_t *payload = (rx_ring[ring_idx].fragments[fragment_idx]) + sizeof(wpacket_hdr_t); 366 | uint8_t flags = packet_hdr->flags; 367 | uint16_t packet_size = be16toh(packet_hdr->packet_size); 368 | uint32_t packet_seq = rx_ring[ring_idx].block_idx * fec_k + fragment_idx; 369 | 370 | if (packet_seq > seq + 1 && seq > 0) { 371 | count_p_lost += (packet_seq - seq - 1); 372 | } 373 | 374 | seq = packet_seq; 375 | 376 | if (packet_size > MAX_PAYLOAD_SIZE) { 377 | fprintf(stderr, "Corrupted packet %u\n", seq); 378 | count_p_bad += 1; 379 | } else if (!(flags & WFB_PACKET_FEC_ONLY)) { 380 | // WfbReceiver::handleRtp 381 | if (dcb) { 382 | dcb(payload, packet_size); 383 | } 384 | } 385 | } 386 | 387 | void Aggregator::apply_fec(int ring_idx) { 388 | assert(fec_k >= 1); 389 | assert(fec_n >= 1); 390 | assert(fec_k <= fec_n); 391 | assert(fec_p != nullptr); 392 | 393 | // 动态分配内存 394 | unsigned *index = new unsigned[fec_k]; 395 | uint8_t **in_blocks = new uint8_t *[fec_k]; 396 | uint8_t **out_blocks = new uint8_t *[fec_n - fec_k]; 397 | int j = fec_k; 398 | int ob_idx = 0; 399 | 400 | for (int i = 0; i < fec_k; i++) { 401 | if (rx_ring[ring_idx].fragment_map[i]) { 402 | in_blocks[i] = rx_ring[ring_idx].fragments[i]; 403 | index[i] = i; 404 | } else { 405 | for (; j < fec_n; j++) { 406 | if (rx_ring[ring_idx].fragment_map[j]) { 407 | in_blocks[i] = rx_ring[ring_idx].fragments[j]; 408 | out_blocks[ob_idx++] = rx_ring[ring_idx].fragments[i]; 409 | index[i] = j; 410 | j++; 411 | break; 412 | } 413 | } 414 | } 415 | } 416 | 417 | fec_decode(fec_p, (const uint8_t **)in_blocks, out_blocks, index, MAX_FEC_PAYLOAD); 418 | 419 | // 释放动态分配的内存 420 | delete[] index; 421 | delete[] in_blocks; 422 | delete[] out_blocks; 423 | } 424 | -------------------------------------------------------------------------------- /src/wifi/WfbProcessor.h: -------------------------------------------------------------------------------- 1 | // 2 | // Created by Talus on 2024/6/12. 3 | // 4 | 5 | #pragma once 6 | 7 | #include 8 | 9 | #include "WfbDefine.h" 10 | 11 | extern "C" { 12 | #include "fec.h" 13 | } 14 | 15 | class BaseAggregator { 16 | public: 17 | virtual ~BaseAggregator() = default; 18 | 19 | virtual void process_packet(const uint8_t *buf, 20 | size_t size, 21 | uint8_t wlan_idx, 22 | const uint8_t *antenna, 23 | const int8_t *rssi) = 0; 24 | }; 25 | 26 | class Aggregator : public BaseAggregator { 27 | public: 28 | using DataCB = std::function; 29 | Aggregator(const std::string &keypair, uint64_t epoch, uint32_t channel_id, const DataCB &cb = nullptr); 30 | ~Aggregator() override; 31 | void process_packet(const uint8_t *buf, 32 | size_t size, 33 | uint8_t wlan_idx, 34 | const uint8_t *antenna, 35 | const int8_t *rssi) override; 36 | 37 | private: 38 | void init_fec(int k, int n); 39 | void deinit_fec(); 40 | void send_packet(int ring_idx, int fragment_idx); 41 | void apply_fec(int ring_idx); 42 | int get_block_ring_idx(uint64_t block_idx); 43 | int rx_ring_push(); 44 | fec_t *fec_p; 45 | int fec_k; // RS number of primary fragments in block 46 | int fec_n; // RS total number of fragments in block 47 | int sockfd; 48 | uint32_t seq; 49 | rx_ring_item_t rx_ring[RX_RING_SIZE]; 50 | int rx_ring_front; // current packet 51 | int rx_ring_alloc; // number of allocated entries 52 | uint64_t last_known_block; // id of last known block 53 | uint64_t epoch; // current epoch 54 | const uint32_t channel_id; // (link_id << 8) + port_number 55 | 56 | // rx->tx keypair 57 | uint8_t rx_secretkey[crypto_box_SECRETKEYBYTES]; 58 | uint8_t tx_publickey[crypto_box_PUBLICKEYBYTES]; 59 | uint8_t session_key[crypto_aead_chacha20poly1305_KEYBYTES]; 60 | 61 | antenna_stat_t antenna_stat; 62 | uint32_t count_p_all; 63 | uint32_t count_p_dec_err; 64 | uint32_t count_p_dec_ok; 65 | uint32_t count_p_fec_recovered; 66 | uint32_t count_p_lost; 67 | uint32_t count_p_bad; 68 | uint32_t count_p_override; 69 | // on data output 70 | DataCB dcb; 71 | }; 72 | -------------------------------------------------------------------------------- /src/wifi/WfbReceiver.cpp: -------------------------------------------------------------------------------- 1 | // 2 | // Created by Talus on 2024/6/10. 3 | // 4 | 5 | #include "WfbReceiver.h" 6 | 7 | #include 8 | #include 9 | #include 10 | #include 11 | 12 | #include "../gui_interface.h" 13 | #include "Rtp.h" 14 | #include "RxFrame.h" 15 | #include "WfbProcessor.h" 16 | #include "WiFiDriver.h" 17 | #include "logger.h" 18 | 19 | #pragma comment(lib, "ws2_32.lib") 20 | 21 | std::vector WfbReceiver::GetDeviceList() { 22 | std::vector list; 23 | 24 | // Initialize libusb 25 | libusb_context *find_ctx; 26 | libusb_init(&find_ctx); 27 | 28 | // Get a list of USB devices 29 | libusb_device **devs; 30 | ssize_t count = libusb_get_device_list(find_ctx, &devs); 31 | if (count < 0) { 32 | return list; 33 | } 34 | 35 | // Iterate over devices 36 | for (ssize_t i = 0; i < count; ++i) { 37 | libusb_device *dev = devs[i]; 38 | 39 | libusb_device_descriptor desc{}; 40 | if (libusb_get_device_descriptor(dev, &desc) == 0) { 41 | // Check if the device is using libusb driver 42 | if (desc.bDeviceClass == LIBUSB_CLASS_PER_INTERFACE) { 43 | uint8_t bus_num = libusb_get_bus_number(dev); 44 | uint8_t port_num = libusb_get_port_number(dev); 45 | 46 | std::stringstream ss; 47 | ss << std::setw(4) << std::setfill('0') << std::hex << desc.idVendor << ":"; 48 | ss << std::setw(4) << std::setfill('0') << std::hex << desc.idProduct; 49 | ss << std::dec << " [" << (int)bus_num << ":" << (int)port_num << "]"; 50 | 51 | DeviceId dev_id = { 52 | .vendor_id = desc.idVendor, 53 | .product_id = desc.idProduct, 54 | .display_name = ss.str(), 55 | .bus_num = bus_num, 56 | .port_num = port_num, 57 | }; 58 | 59 | list.push_back(dev_id); 60 | } 61 | } 62 | } 63 | 64 | // std::sort(list.begin(), list.end(), [](std::string &a, std::string &b) { 65 | // static std::vector specialStrings = {"0b05:17d2", "0bda:8812", "0bda:881a"}; 66 | // auto itA = std::find(specialStrings.begin(), specialStrings.end(), a); 67 | // auto itB = std::find(specialStrings.begin(), specialStrings.end(), b); 68 | // if (itA != specialStrings.end() && itB == specialStrings.end()) { 69 | // return true; 70 | // } 71 | // if (itB != specialStrings.end() && itA == specialStrings.end()) { 72 | // return false; 73 | // } 74 | // return a < b; 75 | // }); 76 | 77 | // Free the list of devices 78 | libusb_free_device_list(devs, 1); 79 | 80 | // Deinitialize libusb 81 | libusb_exit(find_ctx); 82 | 83 | return list; 84 | } 85 | 86 | bool WfbReceiver::Start(const DeviceId &deviceId, uint8_t channel, int channelWidthMode, const std::string &kPath) { 87 | GuiInterface::Instance().wifiFrameCount_ = 0; 88 | GuiInterface::Instance().wfbFrameCount_ = 0; 89 | GuiInterface::Instance().rtpPktCount_ = 0; 90 | GuiInterface::Instance().UpdateCount(); 91 | 92 | keyPath = kPath; 93 | 94 | if (usbThread) { 95 | return false; 96 | } 97 | 98 | auto logger = std::make_shared(); 99 | 100 | int rc = libusb_init(&ctx); 101 | if (rc < 0) { 102 | GuiInterface::Instance().PutLog(LogLevel::Error, "Failed to initialize libusb"); 103 | return false; 104 | } 105 | 106 | libusb_set_option(ctx, LIBUSB_OPTION_LOG_LEVEL, LIBUSB_LOG_LEVEL_ERROR); 107 | 108 | // Get a list of USB devices 109 | libusb_device **devs; 110 | ssize_t count = libusb_get_device_list(ctx, &devs); 111 | if (count < 0) { 112 | return false; 113 | } 114 | 115 | libusb_device *target_dev{}; 116 | 117 | // Iterate over devices 118 | for (ssize_t i = 0; i < count; ++i) { 119 | libusb_device *dev = devs[i]; 120 | libusb_device_descriptor desc{}; 121 | if (libusb_get_device_descriptor(dev, &desc) == 0) { 122 | // Check if the device is using libusb driver 123 | if (desc.bDeviceClass == LIBUSB_CLASS_PER_INTERFACE) { 124 | int bus_num = libusb_get_bus_number(dev); 125 | int port_num = libusb_get_port_number(dev); 126 | 127 | if (desc.idVendor == deviceId.vendor_id && desc.idProduct == deviceId.product_id && 128 | bus_num == deviceId.bus_num && port_num == deviceId.port_num) { 129 | target_dev = dev; 130 | } 131 | } 132 | } 133 | } 134 | 135 | if (!target_dev) { 136 | GuiInterface::Instance().PutLog(LogLevel::Error, "Invalid device ID!"); 137 | // Free the list of devices 138 | libusb_free_device_list(devs, 1); 139 | libusb_exit(ctx); 140 | ctx = nullptr; 141 | return false; 142 | } 143 | 144 | // This cannot handle multiple devices with the same vendor_id and product_id. 145 | // devHandle = libusb_open_device_with_vid_pid(ctx, wifiDeviceVid, wifiDevicePid); 146 | libusb_open(target_dev, &devHandle); 147 | 148 | // Free the list of devices 149 | libusb_free_device_list(devs, 1); 150 | 151 | if (devHandle == nullptr) { 152 | libusb_exit(ctx); 153 | ctx = nullptr; 154 | 155 | GuiInterface::Instance().PutLog(LogLevel::Error, 156 | "Cannot open device {:04x}:{:04x} at [{:}:{:}]", 157 | deviceId.vendor_id, 158 | deviceId.product_id, 159 | deviceId.bus_num, 160 | deviceId.port_num); 161 | GuiInterface::Instance().ShowTip(FTR("invalid usb msg")); 162 | return false; 163 | } 164 | 165 | // Check if the kernel driver attached 166 | if (libusb_kernel_driver_active(devHandle, 0)) { 167 | // Detach driver 168 | rc = libusb_detach_kernel_driver(devHandle, 0); 169 | } 170 | 171 | rc = libusb_claim_interface(devHandle, 0); 172 | if (rc < 0) { 173 | libusb_close(devHandle); 174 | devHandle = nullptr; 175 | 176 | libusb_exit(ctx); 177 | ctx = nullptr; 178 | 179 | GuiInterface::Instance().PutLog(LogLevel::Error, "Failed to claim interface"); 180 | return false; 181 | } 182 | 183 | usbThread = std::make_shared([=, this]() { 184 | WiFiDriver wifi_driver{logger}; 185 | try { 186 | rtlDevice = wifi_driver.CreateRtlDevice(devHandle); 187 | rtlDevice->Init( 188 | [](const Packet &p) { 189 | Instance().handle80211Frame(p); 190 | GuiInterface::Instance().UpdateCount(); 191 | }, 192 | SelectedChannel{ 193 | .Channel = channel, 194 | .ChannelOffset = 0, 195 | .ChannelWidth = static_cast(channelWidthMode), 196 | }); 197 | } catch (const std::runtime_error &e) { 198 | GuiInterface::Instance().PutLog(LogLevel::Error, e.what()); 199 | } catch (...) { 200 | } 201 | 202 | auto rc1 = libusb_release_interface(devHandle, 0); 203 | if (rc1 < 0) { 204 | GuiInterface::Instance().PutLog(LogLevel::Error, "Failed to release interface"); 205 | } 206 | 207 | GuiInterface::Instance().PutLog(LogLevel::Info, "USB thread stopped"); 208 | 209 | libusb_close(devHandle); 210 | libusb_exit(ctx); 211 | 212 | devHandle = nullptr; 213 | ctx = nullptr; 214 | 215 | usbThread.reset(); 216 | 217 | GuiInterface::Instance().EmitWifiStopped(); 218 | }); 219 | usbThread->detach(); 220 | 221 | return true; 222 | } 223 | 224 | void WfbReceiver::handle80211Frame(const Packet &packet) { 225 | GuiInterface::Instance().wifiFrameCount_++; 226 | GuiInterface::Instance().UpdateCount(); 227 | 228 | RxFrame frame(packet.Data); 229 | if (!frame.IsValidWfbFrame()) { 230 | return; 231 | } 232 | 233 | GuiInterface::Instance().wfbFrameCount_++; 234 | GuiInterface::Instance().UpdateCount(); 235 | 236 | static int8_t rssi[2] = {1, 1}; 237 | static uint8_t antenna[4] = {1, 1, 1, 1}; 238 | 239 | memcpy(GuiInterface::Instance().rx_status_.rssi, packet.RxAtrib.rssi, sizeof(int8_t) * 2); 240 | memcpy(GuiInterface::Instance().rx_status_.snr, packet.RxAtrib.snr, sizeof(int8_t) * 2); 241 | 242 | static uint32_t link_id = 7669206; // sha1 hash of link_domain="default" 243 | static uint8_t video_radio_port = 0; 244 | static uint64_t epoch = 0; 245 | 246 | static uint32_t video_channel_id_f = (link_id << 8) + video_radio_port; 247 | static uint32_t video_channel_id_be = htobe32(video_channel_id_f); 248 | 249 | static auto *video_channel_id_be8 = reinterpret_cast(&video_channel_id_be); 250 | 251 | static std::mutex agg_mutex; 252 | static std::unique_ptr video_aggregator = std::make_unique( 253 | keyPath.c_str(), 254 | epoch, 255 | video_channel_id_f, 256 | [](uint8_t *payload, uint16_t packet_size) { Instance().handleRtp(payload, packet_size); }); 257 | 258 | std::lock_guard lock(agg_mutex); 259 | if (frame.MatchesChannelID(video_channel_id_be8)) { 260 | video_aggregator->process_packet(packet.Data.data() + sizeof(ieee80211_header), 261 | packet.Data.size() - sizeof(ieee80211_header) - 4, 262 | 0, 263 | antenna, 264 | rssi); 265 | } else { 266 | int a = 1; 267 | } 268 | } 269 | 270 | #ifdef __linux__ 271 | #define INVALID_SOCKET (-1) 272 | #endif 273 | 274 | static int socketFd = INVALID_SOCKET; 275 | static volatile bool playing = false; 276 | 277 | #define GET_H264_NAL_UNIT_TYPE(buffer_ptr) (buffer_ptr[0] & 0x1F) 278 | 279 | inline bool isH264(const uint8_t *data) { 280 | auto h264NalType = GET_H264_NAL_UNIT_TYPE(data); 281 | return h264NalType == 24 || h264NalType == 28; 282 | } 283 | 284 | void WfbReceiver::handleRtp(uint8_t *payload, uint16_t packet_size) { 285 | GuiInterface::Instance().rtpPktCount_++; 286 | GuiInterface::Instance().UpdateCount(); 287 | 288 | if (rtlDevice->should_stop) { 289 | return; 290 | } 291 | if (packet_size < 12) { 292 | return; 293 | } 294 | 295 | sockaddr_in serverAddr{}; 296 | serverAddr.sin_family = AF_INET; 297 | serverAddr.sin_port = htons(GuiInterface::Instance().playerPort); 298 | serverAddr.sin_addr.s_addr = inet_addr("127.0.0.1"); 299 | 300 | auto *header = (RtpHeader *)payload; 301 | 302 | if (!playing) { 303 | playing = true; 304 | if (GuiInterface::Instance().playerCodec == "AUTO") { 305 | // Check H264 or h265 306 | if (isH264(header->getPayloadData())) { 307 | GuiInterface::Instance().playerCodec = "H264"; 308 | } else { 309 | GuiInterface::Instance().playerCodec = "H265"; 310 | } 311 | GuiInterface::Instance().PutLog(LogLevel::Debug, "Check codec " + GuiInterface::Instance().playerCodec); 312 | } 313 | GuiInterface::Instance().NotifyRtpStream(header->pt, ntohl(header->ssrc)); 314 | } 315 | 316 | // Send payload via socket. 317 | sendto(socketFd, 318 | reinterpret_cast(payload), 319 | packet_size, 320 | 0, 321 | (sockaddr *)&serverAddr, 322 | sizeof(serverAddr)); 323 | } 324 | 325 | void WfbReceiver::Stop() const { 326 | playing = false; 327 | if (rtlDevice) { 328 | rtlDevice->should_stop = true; 329 | } 330 | } 331 | 332 | WfbReceiver::WfbReceiver() { 333 | #ifdef _WIN32 334 | WSADATA wsaData; 335 | if (WSAStartup(MAKEWORD(2, 2), &wsaData) != 0) { 336 | GuiInterface::Instance().PutLog(LogLevel::Error, "WSAStartup failed"); 337 | return; 338 | } 339 | #endif 340 | 341 | socketFd = socket(AF_INET, SOCK_DGRAM, 0); 342 | } 343 | 344 | WfbReceiver::~WfbReceiver() { 345 | #ifdef _WIN32 346 | closesocket(socketFd); 347 | socketFd = INVALID_SOCKET; 348 | WSACleanup(); 349 | #endif 350 | 351 | Stop(); 352 | } 353 | -------------------------------------------------------------------------------- /src/wifi/WfbReceiver.h: -------------------------------------------------------------------------------- 1 | // 2 | // Created by Talus on 2024/6/10. 3 | // 4 | 5 | #pragma once 6 | 7 | #ifdef _WIN32 8 | #include 9 | #else 10 | #include 11 | #endif 12 | #include 13 | #include 14 | #include 15 | 16 | #include "FrameParser.h" 17 | #include "Rtl8812aDevice.h" 18 | 19 | struct DeviceId { 20 | uint16_t vendor_id; 21 | uint16_t product_id; 22 | std::string display_name; 23 | uint8_t bus_num; 24 | uint8_t port_num; 25 | }; 26 | 27 | /// Receive packets from an adapter. 28 | class WfbReceiver { 29 | public: 30 | WfbReceiver(); 31 | ~WfbReceiver(); 32 | 33 | static WfbReceiver &Instance() { 34 | static WfbReceiver wfb_receiver; 35 | return wfb_receiver; 36 | } 37 | 38 | static std::vector GetDeviceList(); 39 | 40 | bool Start(const DeviceId &deviceId, uint8_t channel, int channelWidth, const std::string &keyPath); 41 | void Stop() const; 42 | 43 | /// Process a 802.11 frame 44 | void handle80211Frame(const Packet &pkt); 45 | 46 | /// Send a RTP payload via socket. 47 | void handleRtp(uint8_t *payload, uint16_t packet_size); 48 | 49 | protected: 50 | libusb_context *ctx{}; 51 | libusb_device_handle *devHandle{}; 52 | std::shared_ptr usbThread; 53 | std::unique_ptr rtlDevice; 54 | std::string keyPath; 55 | }; 56 | -------------------------------------------------------------------------------- /src/wifi/fec.h: -------------------------------------------------------------------------------- 1 | #pragma once 2 | 3 | /** 4 | * zfec -- fast forward error correction library with Python interface 5 | * https://tahoe-lafs.org/trac/zfec/ 6 | 7 | This package implements an "erasure code", or "forward error correction code". 8 | You may use this package under the GNU General Public License, version 2 or, at your option, any later version. 9 | */ 10 | 11 | #include 12 | 13 | typedef unsigned char gf; 14 | 15 | typedef struct { 16 | unsigned long magic; 17 | unsigned short k, n; /* parameters of the code */ 18 | gf *enc_matrix; 19 | } fec_t; 20 | 21 | #if defined(__clang__) 22 | #define restrict __restrict 23 | #elif defined(_MSC_VER) 24 | // actually, some of the flavors (i.e. Enterprise) do support restrict 25 | // #define restrict __restrict 26 | #define restrict 27 | #endif 28 | 29 | /** 30 | * param k the number of blocks required to reconstruct 31 | * param m the total number of blocks created 32 | */ 33 | fec_t *fec_new(unsigned short k, unsigned short m); 34 | void fec_free(fec_t *p); 35 | 36 | /** 37 | * @param inpkts the "primary blocks" i.e. the chunks of the input data 38 | * @param fecs buffers into which the secondary blocks will be written 39 | * @param block_nums the numbers of the desired check blocks (the id >= k) which fec_encode() will produce and store 40 | * into the buffers of the fecs parameter 41 | * @param num_block_nums the length of the block_nums array 42 | * @param sz size of a packet in bytes 43 | */ 44 | void fec_encode(const fec_t *code, const gf **src, gf **fecs, size_t sz); 45 | 46 | /** 47 | * @param inpkts an array of packets (size k); If a primary block, i, is present then it must be at index i. Secondary 48 | * blocks can appear anywhere. 49 | * @param outpkts an array of buffers into which the reconstructed output packets will be written (only packets which 50 | * are not present in the inpkts input will be reconstructed and written to outpkts) 51 | * @param index an array of the blocknums of the packets in inpkts 52 | * @param sz size of a packet in bytes 53 | */ 54 | void fec_decode(const fec_t *code, const gf **inpkts, gf **outpkts, const unsigned *index, size_t sz); 55 | 56 | #if defined(_MSC_VER) 57 | #define alloca _alloca 58 | #else 59 | #ifdef __GNUC__ 60 | #ifndef alloca 61 | #define alloca(x) __builtin_alloca(x) 62 | #endif 63 | #else 64 | #include 65 | #endif 66 | #endif 67 | 68 | /** 69 | * zfec -- fast forward error correction library with Python interface 70 | * 71 | * Copyright (C) 2007-2008 Allmydata, Inc. 72 | * Author: Zooko Wilcox-O'Hearn 73 | * 74 | * This file is part of zfec. 75 | * 76 | * See README.rst for licensing information. 77 | */ 78 | 79 | /* 80 | * Much of this work is derived from the "fec" software by Luigi Rizzo, et 81 | * al., the copyright notice and licence terms of which are included below 82 | * for reference. 83 | * 84 | * fec.h -- forward error correction based on Vandermonde matrices 85 | * 980614 86 | * (C) 1997-98 Luigi Rizzo (luigi@iet.unipi.it) 87 | * 88 | * Portions derived from code by Phil Karn (karn@ka9q.ampr.org), 89 | * Robert Morelos-Zaragoza (robert@spectra.eng.hawaii.edu) and Hari 90 | * Thirumoorthy (harit@spectra.eng.hawaii.edu), Aug 1995 91 | * 92 | * Modifications by Dan Rubenstein (see Modifications.txt for 93 | * their description. 94 | * Modifications (C) 1998 Dan Rubenstein (drubenst@cs.umass.edu) 95 | * 96 | * Redistribution and use in source and binary forms, with or without 97 | * modification, are permitted provided that the following conditions 98 | * are met: 99 | 100 | * 1. Redistributions of source code must retain the above copyright 101 | * notice, this list of conditions and the following disclaimer. 102 | * 2. Redistributions in binary form must reproduce the above 103 | * copyright notice, this list of conditions and the following 104 | * disclaimer in the documentation and/or other materials 105 | * provided with the distribution. 106 | * 107 | * THIS SOFTWARE IS PROVIDED BY THE AUTHORS ``AS IS'' AND 108 | * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, 109 | * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A 110 | * PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS 111 | * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, 112 | * OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, 113 | * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, 114 | * OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 115 | * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR 116 | * TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT 117 | * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY 118 | * OF SUCH DAMAGE. 119 | */ 120 | -------------------------------------------------------------------------------- /test_multicast.ps1: -------------------------------------------------------------------------------- 1 | ./ffmpeg -re -i test.mp4 -c copy -f mpegts udp://239.0.0.1:1234 -------------------------------------------------------------------------------- /tutorials/interface.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenIPC/aviateur/f2ced7f7c1cd13c556f3ff9f0216a9d83fd9cd76/tutorials/interface.png -------------------------------------------------------------------------------- /tutorials/latency_test.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenIPC/aviateur/f2ced7f7c1cd13c556f3ff9f0216a9d83fd9cd76/tutorials/latency_test.jpg -------------------------------------------------------------------------------- /tutorials/vcpkg.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenIPC/aviateur/f2ced7f7c1cd13c556f3ff9f0216a9d83fd9cd76/tutorials/vcpkg.jpg -------------------------------------------------------------------------------- /tutorials/zadig1.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenIPC/aviateur/f2ced7f7c1cd13c556f3ff9f0216a9d83fd9cd76/tutorials/zadig1.jpg -------------------------------------------------------------------------------- /tutorials/zadig2.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenIPC/aviateur/f2ced7f7c1cd13c556f3ff9f0216a9d83fd9cd76/tutorials/zadig2.jpg --------------------------------------------------------------------------------