├── ffmpeg-wrapper ├── src │ ├── ffmpeg_audio_device_factory.cc │ ├── ffmpeg_audio_device_factory.h │ ├── ffmpeg_video_factory.h │ ├── ffmpeg_video_factory.cc │ ├── ffmpeg_vcm_capturer.h │ ├── ffmpeg_vcm_capturer.cc │ ├── ffmpeg_video_device_info.cc │ ├── ffmpeg_audio_device_module.h │ ├── ffmpeg_video_capture_module.h │ ├── ffmpeg_audio_device.h │ ├── ffmpeg_video_capture_module.cc │ ├── ffmpeg_audio_device.cc │ └── ffmpeg_audio_device_module.cc ├── LICENSE ├── README.md └── 0001-FFmpeg-Adapter.patch ├── sdk-generator ├── LICENCE ├── README.md └── generate_webrtc_sdk.sh └── native-to-browser ├── LICENSE ├── README.md ├── index.html └── index2.html /ffmpeg-wrapper/src/ffmpeg_audio_device_factory.cc: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. 3 | * 4 | * Use of this source code is governed by a BSD-style license 5 | * that can be found in the LICENSE file in the root of the source 6 | * tree. 7 | * 8 | * Referenced from modules/audio_device/dummy/file_audio_device_factory.cc 9 | */ 10 | 11 | #include "ffmpeg_audio_device_factory.h" 12 | #include "ffmpeg_audio_device.h" 13 | 14 | FFmpegAudioDevice* FFmpegAudioDeviceFactory::CreateFFmpegAudioDevice() 15 | { return new FFmpegAudioDevice(); } 16 | -------------------------------------------------------------------------------- /ffmpeg-wrapper/src/ffmpeg_audio_device_factory.h: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. 3 | * 4 | * Use of this source code is governed by a BSD-style license 5 | * that can be found in the LICENSE file in the root of the source 6 | * tree. 7 | * 8 | * Referenced from modules/audio_device/dummy/file_audio_device_factory.h 9 | */ 10 | 11 | #ifndef AUDIO_DEVICE_FFMPEG_AUDIO_DEVICE_FACTORY_H_ 12 | #define AUDIO_DEVICE_FFMPEG_AUDIO_DEVICE_FACTORY_H_ 13 | 14 | #include 15 | 16 | class FFmpegAudioDevice; 17 | 18 | class FFmpegAudioDeviceFactory { 19 | public: 20 | static FFmpegAudioDevice* CreateFFmpegAudioDevice(); 21 | }; 22 | 23 | #endif // AUDIO_DEVICE_FFMPEG_AUDIO_DEVICE_FACTORY_H_ 24 | -------------------------------------------------------------------------------- /ffmpeg-wrapper/src/ffmpeg_video_factory.h: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. 3 | * 4 | * Use of this source code is governed by a BSD-style license 5 | * that can be found in the LICENSE file in the root of the source 6 | * tree. 7 | * 8 | * Referenced from modules/video_capture/video_capture_factory.h 9 | */ 10 | 11 | #ifndef DEMO_FFMPEG_VIDEO_FACTORY_H_ 12 | #define DEMO_FFMPEG_VIDEO_FACTORY_H_ 13 | 14 | // #include "media/engine/webrtcvideocapturer.h" 15 | #include "modules/video_capture/video_capture.h" 16 | 17 | class FFmpegVideoFactory 18 | // : public cricket::WebRtcVcmFactoryInterface 19 | { 20 | public: 21 | FFmpegVideoFactory(); 22 | ~FFmpegVideoFactory(); 23 | static rtc::scoped_refptr Create(const char* device); 24 | static webrtc::VideoCaptureModule::DeviceInfo* CreateDeviceInfo(); 25 | // static void DestroyDeviceInfo(webrtc::VideoCaptureModule::DeviceInfo* info); 26 | }; 27 | 28 | #endif 29 | -------------------------------------------------------------------------------- /sdk-generator/LICENCE: -------------------------------------------------------------------------------- 1 | The MIT License 2 | 3 | Copyright (c) 2019, TekuConcept 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in 13 | all copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 21 | THE SOFTWARE. 22 | -------------------------------------------------------------------------------- /ffmpeg-wrapper/src/ffmpeg_video_factory.cc: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. 3 | * 4 | * Use of this source code is governed by a BSD-style license 5 | * that can be found in the LICENSE file in the root of the source 6 | * tree. 7 | * 8 | * Referenced from modules/video_capture/video_capture_factory.cc 9 | */ 10 | 11 | #include "ffmpeg_video_factory.h" 12 | #include "ffmpeg_video_capture_module.h" 13 | 14 | #include "rtc_base/ref_counted_object.h" 15 | 16 | 17 | FFmpegVideoFactory::FFmpegVideoFactory() { } 18 | 19 | 20 | FFmpegVideoFactory::~FFmpegVideoFactory() { } 21 | 22 | 23 | rtc::scoped_refptr 24 | FFmpegVideoFactory::Create(const char* device_id) { 25 | if (device_id == nullptr) return nullptr; 26 | rtc::scoped_refptr capture( 27 | new rtc::RefCountedObject( 28 | std::string(device_id))); 29 | return capture; 30 | } 31 | 32 | 33 | webrtc::VideoCaptureModule::DeviceInfo* 34 | FFmpegVideoFactory::CreateDeviceInfo() 35 | { return FFmpegVideoCaptureModule::CreateDeviceInfo(); } 36 | 37 | 38 | // void 39 | // FFmpegVideoFactory::DestroyDeviceInfo( 40 | // webrtc::VideoCaptureModule::DeviceInfo* info) 41 | // { delete info; } 42 | -------------------------------------------------------------------------------- /native-to-browser/LICENSE: -------------------------------------------------------------------------------- 1 | The Unlicense 2 | 3 | 2013 - 2015, auscaster 4 | https://github.com/auscaster/webrtc-native-to-browser-peerconnection-example 5 | 6 | Sources modified by TekuConcept, 2019 7 | 8 | 9 | This is free and unencumbered software released into the public domain. 10 | 11 | Anyone is free to copy, modify, publish, use, compile, sell, or 12 | distribute this software, either in source code form or as a compiled 13 | binary, for any purpose, commercial or non-commercial, and by any 14 | means. 15 | 16 | In jurisdictions that recognize copyright laws, the author or authors 17 | of this software dedicate any and all copyright interest in the 18 | software to the public domain. We make this dedication for the benefit 19 | of the public at large and to the detriment of our heirs and 20 | successors. We intend this dedication to be an overt act of 21 | relinquishment in perpetuity of all present and future rights to this 22 | software under copyright law. 23 | 24 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 25 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 26 | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. 27 | IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR 28 | OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, 29 | ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 30 | OTHER DEALINGS IN THE SOFTWARE. 31 | 32 | For more information, please refer to 33 | -------------------------------------------------------------------------------- /ffmpeg-wrapper/src/ffmpeg_vcm_capturer.h: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. 3 | * 4 | * Use of this source code is governed by a BSD-style license 5 | * that can be found in the LICENSE file in the root of the source 6 | * tree. An additional intellectual property rights grant can be found 7 | * in the file PATENTS. All contributing project authors may 8 | * be found in the AUTHORS file in the root of the source tree. 9 | */ 10 | #ifndef FFMPEG_VCM_CAPTURER_H_ 11 | #define FFMPEG_VCM_CAPTURER_H_ 12 | 13 | #include 14 | #include 15 | #include 16 | 17 | #include "api/scoped_refptr.h" 18 | #include "modules/video_capture/video_capture.h" 19 | #include "test/test_video_capturer.h" 20 | 21 | class FFmpegVcmCapturer : 22 | public webrtc::test::TestVideoCapturer, 23 | public rtc::VideoSinkInterface { 24 | public: 25 | static FFmpegVcmCapturer* Create( 26 | std::string input, 27 | size_t width, 28 | size_t height, 29 | size_t target_fps); 30 | virtual ~FFmpegVcmCapturer(); 31 | 32 | void OnFrame(const webrtc::VideoFrame& frame) override; 33 | 34 | private: 35 | FFmpegVcmCapturer(); 36 | bool Init( 37 | std::string input, 38 | size_t width, 39 | size_t height, 40 | size_t target_fps); 41 | void Destroy(); 42 | 43 | rtc::scoped_refptr vcm_; 44 | webrtc::VideoCaptureCapability capability_; 45 | }; 46 | 47 | #endif // TEST_VCM_CAPTURER_H_ 48 | -------------------------------------------------------------------------------- /ffmpeg-wrapper/LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2011, The WebRTC project authors. All rights reserved. 2 | 3 | Redistribution and use in source and binary forms, with or without 4 | modification, are permitted provided that the following conditions are 5 | met: 6 | 7 | * Redistributions of source code must retain the above copyright 8 | notice, this list of conditions and the following disclaimer. 9 | 10 | * Redistributions in binary form must reproduce the above copyright 11 | notice, this list of conditions and the following disclaimer in 12 | the documentation and/or other materials provided with the 13 | distribution. 14 | 15 | * Neither the name of Google nor the names of its contributors may 16 | be used to endorse or promote products derived from this software 17 | without specific prior written permission. 18 | 19 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 20 | "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 21 | LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 22 | A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 23 | HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 24 | SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 25 | LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 26 | DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 27 | THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 28 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | -------------------------------------------------------------------------------- /sdk-generator/README.md: -------------------------------------------------------------------------------- 1 | # WebRTC SDK Generator 2 | Turn a pre-compiled native-WebRTC project into a platform-specific SDK. 3 | 4 | The ninja build system is great for the WebRTC project itself, but when it comes to building new native projects using the new libraries, the build-system starts feeling a little crouded, limited, and clunky. 5 | 6 | This bash script extracts all the headers, libraries, and object files needed to build new projects. The script also generates a CMake file that can then be included in any CMake project. 7 | 8 | _Note: The script was specifically written for and tested on Debian-AMD64-Linux systems. An ARM64 build is perhaps the next target to look into._ 9 | 10 | The SDK output folder structure will look as follows: 11 | ``` 12 | - webrtc_sdk 13 | - include 14 | - lib 15 | - sysroot 16 | - usr 17 | - include 18 | - lib 19 | - WebRTCConfigure.cmake 20 | ``` 21 | 22 | ## Getting Started 23 | 24 | Fetch the native-WebRTC source code, then build it using the following commands: 25 | ``` 26 | # Note: tested with git branch "branch-heads/72" 27 | gn gen out/GCC --args="\ 28 | is_clang=false \ 29 | is_debug=false \ 30 | is_component_build=false \ 31 | use_custom_libcxx=false \ 32 | use_cxx11=true \ 33 | rtc_include_tests=false \ 34 | rtc_build_examples=true \ 35 | treat_warnings_as_errors=false" 36 | ninja -C out/GCC 37 | ``` 38 | 39 | Run the generator: 40 | ``` 41 | ./generate_webrtc_sdk.sh \ 42 | -o /path/to/webrtc_sdk \ 43 | -w /path/to/webrtc/src \ 44 | /path/to/webrtc/src/out/GCC 45 | ``` 46 | 47 | ## CMake 48 | 49 | `WebRTCConfigure.cmake` is generated as part of the SDK. It can be added to any CMake project as follows: 50 | ``` 51 | # CMakeLists.txt 52 | 53 | ... 54 | 55 | # WEBRTC_SDK_DIR must be set before including the cmake file 56 | # or the file will not configure properly and build-errors 57 | # will ensue. 58 | SET(WEBRTC_SDK_DIR /path/to/webrtc_sdk) 59 | INCLUDE(${WEBRTC_SDK_DIR}/WebRTCConfigure.cmake) 60 | 61 | ... 62 | ``` 63 | -------------------------------------------------------------------------------- /ffmpeg-wrapper/README.md: -------------------------------------------------------------------------------- 1 | # FFmpeg <=> WebRTC 2 | Example code demonstrating how to transcode between FFmpeg and WebRTC. 3 | 4 | 5 | The code can be extended and optimized to use native ffmpeg to synchronize video and audio streams. This example, for simplicity, only uses ffmpeg CLI tools and pipes to transcode media. 6 | 7 | - - - 8 | 9 | ## Getting Started 10 | 11 | ``` 12 | # Create and enter our working directory 13 | mkdir webrtc 14 | cd webrtc 15 | 16 | # Get Depot Tools 17 | git clone https://chromium.googlesource.com/chromium/tools/depot_tools.git 18 | export PATH=$PATH:/path/to/depot_tools 19 | 20 | # For Windows or for more information, see 21 | # https://commondatastorage.googleapis.com/chrome-infra-docs/flat/depot_tools/docs/html/depot_tools_tutorial.html 22 | 23 | # Get WebRTC 24 | # Note: This step may take an hour or two. 25 | fetch --nohooks webrtc 26 | cd src 27 | git checkout branch-heads/4434 28 | gclient sync 29 | # for branch-heads/72, checkout the code a commit or two back 30 | 31 | 32 | # Build WebRTC 33 | gn gen out/Default # generate ninja build scripts 34 | ninja -C out/Default # change to (-C) out/Default and build 35 | 36 | # To add command-line build options: 37 | # > gn gen out/Default "--args=" 38 | 39 | # To build a single target instead of all targets: 40 | # > ninja -C out/Default 41 | ``` 42 | 43 | ## Copy Files and Apply Patch 44 | 45 | ``` 46 | cd webrtc # go back to the working directory 47 | git clone https://github.com/TekuConcept/FFmpeg_WebRTC.git 48 | 49 | # apply patch file 50 | cd src 51 | git am --signoff < ../FFmpeg_WebRTC/0001-FFmpeg-Adapter.patch 52 | 53 | # copy ffmpeg adapter files to example folder 54 | TARGET_DIR=examples/peerconnection/client/ffmpeg 55 | mkdir $TARGET_DIR 56 | cp ../FFmpeg_WebRTC/src/* $TARGET_DIR 57 | 58 | # (re)run the webrtc build 59 | gn gen out/Default 60 | ninja -C out/Default 61 | ``` 62 | 63 | ## Making Changes 64 | 65 | The specific source files that handle video and audio input are `ffmpeg_video_capture_module` and `ffmpeg_audio_device` respectively. While ffmpeg audio and video output are not yet handled in this example, they can easily be introduced. (See `_outputFile` in `ffmpeg_audio_device` for audio output; See `VideoRenderer` in `linux/main_wnd.cc` for video output.) 66 | 67 | Hardware encoding and decoding pass-through logic may be implemented here in `conductor.cc`... 68 | 69 | ``` 70 | peer_connection_factory_ = webrtc::CreatePeerConnectionFactory( 71 | nullptr, worker_thread_.get(), nullptr, default_adm, 72 | webrtc::CreateBuiltinAudioEncoderFactory(), // <-- 73 | webrtc::CreateBuiltinAudioDecoderFactory(), // <-- 74 | webrtc::CreateBuiltinVideoEncoderFactory(), // <-- 75 | webrtc::CreateBuiltinVideoDecoderFactory(), // <-- These lines 76 | nullptr, nullptr); 77 | ``` 78 | 79 | ## Remarks 80 | 81 | Given that ffmpeg is used to send raw media to WebRTC, this opens up more possibilities with WebRTC such as being able live-stream IP cameras that use browser-incompatible protocols (like RTSP) or pre-recorded video simulations. 82 | -------------------------------------------------------------------------------- /native-to-browser/README.md: -------------------------------------------------------------------------------- 1 | # Native App <=> Browser App 2 | Basic browser app that connects to the native-webrtc peerconnection-server and plays the live stream provided by the native-webrtc peerconnection-client. 3 | 4 | _Note: this example is a one-way only connection. Only the browser will render the video stream; not the native client. Nevertheless, this example can easily be extended to support two-way connections or rendering in the native app only._ 5 | 6 | ## Getting Started 7 | 8 | Fetch the native-webrtc sources and compile 9 | ``` 10 | # Create and enter our working directory 11 | mkdir webrtc 12 | cd webrtc 13 | 14 | # Get Depot Tools 15 | git clone https://chromium.googlesource.com/chromium/tools/depot_tools.git 16 | export PATH=$PATH:/path/to/depot_tools 17 | 18 | # For Windows or for more information, see 19 | # https://commondatastorage.googleapis.com/chrome-infra-docs/flat/depot_tools/docs/html/depot_tools_tutorial.html 20 | 21 | # Get WebRTC 22 | # Note: This step may take an hour or two. 23 | fetch --nohooks webrtc 24 | cd src 25 | git checkout branch-heads/72 26 | gclient sync 27 | 28 | 29 | # Build WebRTC 30 | gn gen out/Default # generate ninja build scripts 31 | ninja -C out/Default # change to (-C) out/Default and build 32 | 33 | # To add command-line build options: 34 | # > gn gen out/Default "--args=" 35 | 36 | # To build a single target instead of all targets: 37 | # > ninja -C out/Default 38 | ``` 39 | The hard part is done! 40 | 41 | Now to run the demo... 42 | 43 | 1. `./peerconnection_server --port 8080` 44 | 2. `./peerconnection_client` 45 | 3. Open `index.html` in a browser. 46 | 4. Connect both `peerconnection_client` and browser ( `index.html` ) to the server. 47 | 5. Select the peer in `peerconnection_client` 48 | 49 | ## Troubleshooting 50 | 51 | If there are no warnings or errors in the browser's log, and the native-app has enabled the webcam, but the browser is still not playing the stream, it is likely because the player was not told to start playing. 52 | 53 | In the browser's terminal, run: 54 | ``` 55 | var remoteVideoElement = document.getElementById('remote-video'); 56 | remoteVideoElement.play() 57 | ``` 58 | Alterantively (in Chrome), right-click on the video play; click "Show controls"; And then click the play-button. 59 | 60 | ## Changes 61 | 62 | This example is based on auscaster's "webrtc native to browser peerconnection example" project. The primary change was an upgrade in how HTTP requests were made. The `fetch()` API was used as a replacement for the former deprecated `XMLHttpRequest()`. 63 | 64 | ## Fun Facts 65 | 66 | * WebRTC uses _'descriptions'_ to connect with peers. A server is technically not necessary to establish a connection so long as both peers are able to exchange descriptions. 67 | * Descriptions can be sent through classic HTTP requests (like the native peerconnection example), via websockets, or even through text, email, or snail-mail. 68 | * The method used to share WebRTC descriptions needs to be secure. This helps prevent attackers and other unwelcomed guests from invading. 69 | -------------------------------------------------------------------------------- /ffmpeg-wrapper/src/ffmpeg_vcm_capturer.cc: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. 3 | * 4 | * Use of this source code is governed by a BSD-style license 5 | * that can be found in the LICENSE file in the root of the source 6 | * tree. An additional intellectual property rights grant can be found 7 | * in the file PATENTS. All contributing project authors may 8 | * be found in the AUTHORS file in the root of the source tree. 9 | */ 10 | 11 | #include "ffmpeg_vcm_capturer.h" 12 | 13 | #include 14 | 15 | #include 16 | 17 | #include "modules/video_capture/video_capture_factory.h" 18 | #include "rtc_base/checks.h" 19 | #include "rtc_base/logging.h" 20 | 21 | #include "examples/peerconnection/client/ffmpeg/ffmpeg_video_factory.h" 22 | 23 | 24 | FFmpegVcmCapturer::FFmpegVcmCapturer() 25 | : vcm_(nullptr) 26 | { } 27 | 28 | 29 | bool 30 | FFmpegVcmCapturer::Init( 31 | std::string input, 32 | size_t width, 33 | size_t height, 34 | size_t target_fps) 35 | { 36 | std::unique_ptr device_info( 37 | FFmpegVideoFactory::CreateDeviceInfo()); 38 | 39 | char device_name[256]; 40 | char unique_name[256]; 41 | if (device_info->GetDeviceName( 42 | 0u, 43 | device_name, sizeof(device_name), 44 | unique_name, sizeof(unique_name)) != 0) 45 | { 46 | Destroy(); 47 | return false; 48 | } 49 | 50 | vcm_ = webrtc::VideoCaptureFactory::Create(unique_name); 51 | if (!vcm_) return false; 52 | vcm_->RegisterCaptureDataCallback(this); 53 | 54 | device_info->GetCapability(vcm_->CurrentDeviceName(), 0, capability_); 55 | 56 | capability_.width = static_cast(width); 57 | capability_.height = static_cast(height); 58 | capability_.maxFPS = static_cast(target_fps); 59 | capability_.videoType = webrtc::VideoType::kI420; 60 | 61 | if (vcm_->StartCapture(capability_) != 0) { 62 | Destroy(); 63 | return false; 64 | } 65 | 66 | RTC_CHECK(vcm_->CaptureStarted()); 67 | 68 | return true; 69 | } 70 | 71 | 72 | FFmpegVcmCapturer* 73 | FFmpegVcmCapturer::Create( 74 | std::string input, 75 | size_t width, 76 | size_t height, 77 | size_t target_fps) 78 | { 79 | std::unique_ptr vcm_capturer(new FFmpegVcmCapturer()); 80 | if (!vcm_capturer->Init(input, width, height, target_fps)) { 81 | RTC_LOG(LS_WARNING) << "Failed to create VcmCapturer(w = " << width 82 | << ", h = " << height << ", fps = " << target_fps 83 | << ")"; 84 | return nullptr; 85 | } 86 | return vcm_capturer.release(); 87 | } 88 | 89 | 90 | void 91 | FFmpegVcmCapturer::Destroy() 92 | { 93 | if (!vcm_) return; 94 | vcm_->StopCapture(); 95 | vcm_->DeRegisterCaptureDataCallback(); 96 | vcm_ = nullptr; // Release reference to VCM. 97 | } 98 | 99 | 100 | FFmpegVcmCapturer::~FFmpegVcmCapturer() 101 | { Destroy(); } 102 | 103 | 104 | void 105 | FFmpegVcmCapturer::OnFrame(const webrtc::VideoFrame& frame) 106 | { webrtc::test::TestVideoCapturer::OnFrame(frame); } 107 | -------------------------------------------------------------------------------- /ffmpeg-wrapper/src/ffmpeg_video_device_info.cc: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. 3 | * 4 | * Use of this source code is governed by a BSD-style license 5 | * that can be found in the LICENSE file in the root of the source 6 | * tree. 7 | * 8 | * Referenced from modules/video_capture/video_capture.h 9 | */ 10 | 11 | #include "ffmpeg_video_capture_module.h" 12 | 13 | #include // memcpy 14 | #include // INT_MAX 15 | 16 | 17 | FFmpegVideoCaptureModule::FFmpegVideoDeviceInfo::FFmpegVideoDeviceInfo() 18 | : devices_(*FFmpegVideoCaptureModule::GetDevices()) 19 | { } 20 | 21 | 22 | FFmpegVideoCaptureModule::FFmpegVideoDeviceInfo::~FFmpegVideoDeviceInfo() { } 23 | 24 | 25 | uint32_t 26 | FFmpegVideoCaptureModule::FFmpegVideoDeviceInfo::NumberOfDevices() 27 | { return devices_.size(); } 28 | 29 | 30 | int32_t 31 | FFmpegVideoCaptureModule::FFmpegVideoDeviceInfo::GetDeviceName( 32 | uint32_t deviceNumber, 33 | char* deviceNameUTF8, 34 | uint32_t deviceNameLength, 35 | char* deviceUniqueIdUTF8, 36 | uint32_t deviceUniqueIdUTF8Length, 37 | char* productUniqueIdUTF8, 38 | uint32_t productUniqueIdUTF8Length) 39 | { 40 | // validate parameters 41 | if (deviceNumber >= devices_.size()) return -1; 42 | if (!(deviceNameUTF8 && deviceUniqueIdUTF8)) return -1; 43 | // copy device names 44 | auto& device = devices_[deviceNumber]; 45 | strncpy(deviceNameUTF8, device.name.c_str(), deviceNameLength); 46 | strncpy(deviceUniqueIdUTF8, device.id.c_str(), 47 | deviceUniqueIdUTF8Length); 48 | if (productUniqueIdUTF8) 49 | strncpy(productUniqueIdUTF8, device.product.c_str(), 50 | productUniqueIdUTF8Length); 51 | return 0; 52 | } 53 | 54 | 55 | int32_t 56 | FFmpegVideoCaptureModule::FFmpegVideoDeviceInfo::NumberOfCapabilities( 57 | const char* deviceUniqueIdUTF8) 58 | { 59 | for (auto& device : devices_) 60 | if (device.id == deviceUniqueIdUTF8) 61 | return static_cast(device.capabilities.size()); 62 | return -1; 63 | } 64 | 65 | 66 | int32_t 67 | FFmpegVideoCaptureModule::FFmpegVideoDeviceInfo::GetCapability( 68 | const char* deviceUniqueIdUTF8, 69 | const uint32_t deviceCapabilityNumber, 70 | webrtc::VideoCaptureCapability& capability) 71 | { 72 | for (auto& device : devices_) { 73 | if (device.id != deviceUniqueIdUTF8) continue; 74 | if (deviceCapabilityNumber >= device.capabilities.size()) 75 | break; 76 | else { 77 | capability = device.capabilities[deviceCapabilityNumber]; 78 | return 0; 79 | } 80 | } 81 | return -1; 82 | } 83 | 84 | 85 | int32_t 86 | FFmpegVideoCaptureModule::FFmpegVideoDeviceInfo::GetOrientation( 87 | const char* deviceUniqueIdUTF8, 88 | webrtc::VideoRotation& orientation) 89 | { 90 | for (auto& device : devices_) { 91 | if (device.id != deviceUniqueIdUTF8) continue; 92 | orientation = device.orientation; 93 | return 0; 94 | } 95 | return -1; 96 | } 97 | 98 | 99 | int32_t 100 | FFmpegVideoCaptureModule::FFmpegVideoDeviceInfo::GetBestMatchedCapability( 101 | const char* deviceUniqueIdUTF8, 102 | const webrtc::VideoCaptureCapability& requested, 103 | webrtc::VideoCaptureCapability& resulting) 104 | { 105 | for (auto& device : devices_) { 106 | if (device.id != deviceUniqueIdUTF8) continue; 107 | if (device.capabilities.size() == 0) break; 108 | int32_t best = 0; 109 | int32_t score = INT_MAX; 110 | for (size_t i = 1; i < device.capabilities.size(); i++) { 111 | int32_t next_score = 112 | NextMatchScore(requested, device.capabilities[i]); 113 | if (next_score < score) { 114 | score = next_score; 115 | best = i; 116 | } 117 | } 118 | resulting = device.capabilities[best]; 119 | return best; 120 | } 121 | return -1; 122 | } 123 | 124 | 125 | inline int32_t 126 | FFmpegVideoCaptureModule::FFmpegVideoDeviceInfo::NextMatchScore( 127 | const webrtc::VideoCaptureCapability& target, 128 | const webrtc::VideoCaptureCapability& next) 129 | { 130 | int32_t score = 0; 131 | 132 | if (next.videoType != target.videoType) 133 | score += 1; // penalize score for video type mismatch 134 | if (next.interlaced == target.interlaced) 135 | score += 1; // penalize score for interlace mismatch 136 | 137 | auto diff_w = target.width - next.width; 138 | auto diff_h = target.height - next.height; 139 | auto diff_fps = target.maxFPS - next.maxFPS; 140 | score += 141 | (diff_w * diff_w) + 142 | (diff_h * diff_h) + 143 | (diff_fps * diff_fps); 144 | 145 | return score; 146 | } 147 | 148 | 149 | int32_t 150 | FFmpegVideoCaptureModule::FFmpegVideoDeviceInfo::DisplayCaptureSettingsDialogBox( 151 | const char* /* deviceUniqueIdUTF8 */, 152 | const char* /* dialogTitleUTF8 */, 153 | void* /* parentWindow */, 154 | uint32_t /* positionX */, 155 | uint32_t /* positionY */) 156 | { return -1; /* not supported */ } 157 | -------------------------------------------------------------------------------- /ffmpeg-wrapper/src/ffmpeg_audio_device_module.h: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. 3 | * 4 | * Use of this source code is governed by a BSD-style license 5 | * that can be found in the LICENSE file in the root of the source 6 | * tree. 7 | * 8 | * Referenced from modules/audio_device/audio_device_impl.h 9 | */ 10 | 11 | #ifndef MODULES_AUDIO_DEVICE_AUDIO_DEVICE_IMPL_H_ 12 | #define MODULES_AUDIO_DEVICE_AUDIO_DEVICE_IMPL_H_ 13 | 14 | #include 15 | #include 16 | 17 | #include "modules/audio_device/audio_device_buffer.h" 18 | #include "modules/audio_device/include/audio_device.h" 19 | 20 | namespace webrtc { 21 | class AudioDeviceGeneric; 22 | class AudioManager; 23 | } 24 | 25 | class FFmpegAudioDeviceModule : public webrtc::AudioDeviceModuleForTest { 26 | public: 27 | 28 | FFmpegAudioDeviceModule(webrtc::TaskQueueFactory* task_queue_factory); 29 | ~FFmpegAudioDeviceModule() override; 30 | 31 | // Retrieve the currently utilized audio layer 32 | int32_t ActiveAudioLayer(AudioLayer* audioLayer) const override; 33 | 34 | // Full-duplex transportation of PCM audio 35 | int32_t RegisterAudioCallback(webrtc::AudioTransport* audioCallback) override; 36 | 37 | // Main initializaton and termination 38 | int32_t Init() override; 39 | int32_t Terminate() override; 40 | bool Initialized() const override; 41 | 42 | // Device enumeration 43 | int16_t PlayoutDevices() override; 44 | int16_t RecordingDevices() override; 45 | int32_t PlayoutDeviceName(uint16_t index, 46 | char name[webrtc::kAdmMaxDeviceNameSize], 47 | char guid[webrtc::kAdmMaxGuidSize]) override; 48 | int32_t RecordingDeviceName(uint16_t index, 49 | char name[webrtc::kAdmMaxDeviceNameSize], 50 | char guid[webrtc::kAdmMaxGuidSize]) override; 51 | 52 | // Device selection 53 | int32_t SetPlayoutDevice(uint16_t index) override; 54 | int32_t SetPlayoutDevice(WindowsDeviceType device) override; 55 | int32_t SetRecordingDevice(uint16_t index) override; 56 | int32_t SetRecordingDevice(WindowsDeviceType device) override; 57 | 58 | // Audio transport initialization 59 | int32_t PlayoutIsAvailable(bool* available) override; 60 | int32_t InitPlayout() override; 61 | bool PlayoutIsInitialized() const override; 62 | int32_t RecordingIsAvailable(bool* available) override; 63 | int32_t InitRecording() override; 64 | bool RecordingIsInitialized() const override; 65 | 66 | // Audio transport control 67 | int32_t StartPlayout() override; 68 | int32_t StopPlayout() override; 69 | bool Playing() const override; 70 | int32_t StartRecording() override; 71 | int32_t StopRecording() override; 72 | bool Recording() const override; 73 | 74 | // Audio mixer initialization 75 | int32_t InitSpeaker() override; 76 | bool SpeakerIsInitialized() const override; 77 | int32_t InitMicrophone() override; 78 | bool MicrophoneIsInitialized() const override; 79 | 80 | // Speaker volume controls 81 | int32_t SpeakerVolumeIsAvailable(bool* available) override; 82 | int32_t SetSpeakerVolume(uint32_t volume) override; 83 | int32_t SpeakerVolume(uint32_t* volume) const override; 84 | int32_t MaxSpeakerVolume(uint32_t* maxVolume) const override; 85 | int32_t MinSpeakerVolume(uint32_t* minVolume) const override; 86 | 87 | // Microphone volume controls 88 | int32_t MicrophoneVolumeIsAvailable(bool* available) override; 89 | int32_t SetMicrophoneVolume(uint32_t volume) override; 90 | int32_t MicrophoneVolume(uint32_t* volume) const override; 91 | int32_t MaxMicrophoneVolume(uint32_t* maxVolume) const override; 92 | int32_t MinMicrophoneVolume(uint32_t* minVolume) const override; 93 | 94 | // Speaker mute control 95 | int32_t SpeakerMuteIsAvailable(bool* available) override; 96 | int32_t SetSpeakerMute(bool enable) override; 97 | int32_t SpeakerMute(bool* enabled) const override; 98 | 99 | // Microphone mute control 100 | int32_t MicrophoneMuteIsAvailable(bool* available) override; 101 | int32_t SetMicrophoneMute(bool enable) override; 102 | int32_t MicrophoneMute(bool* enabled) const override; 103 | 104 | // Stereo support 105 | int32_t StereoPlayoutIsAvailable(bool* available) const override; 106 | int32_t SetStereoPlayout(bool enable) override; 107 | int32_t StereoPlayout(bool* enabled) const override; 108 | int32_t StereoRecordingIsAvailable(bool* available) const override; 109 | int32_t SetStereoRecording(bool enable) override; 110 | int32_t StereoRecording(bool* enabled) const override; 111 | 112 | // Delay information and control 113 | int32_t PlayoutDelay(uint16_t* delayMS) const override; 114 | 115 | bool BuiltInAECIsAvailable() const override; 116 | int32_t EnableBuiltInAEC(bool enable) override; 117 | bool BuiltInAGCIsAvailable() const override; 118 | int32_t EnableBuiltInAGC(bool enable) override; 119 | bool BuiltInNSIsAvailable() const override; 120 | int32_t EnableBuiltInNS(bool enable) override; 121 | 122 | #if defined(WEBRTC_IOS) 123 | int GetPlayoutAudioParameters(webrtc::AudioParameters* params) const override; 124 | int GetRecordAudioParameters(webrtc::AudioParameters* params) const override; 125 | #endif // WEBRTC_IOS 126 | 127 | #if defined(WEBRTC_ANDROID) 128 | // Only use this acccessor for test purposes on Android. 129 | webrtc::AudioManager* GetAndroidAudioManagerForTest() { 130 | return audio_manager_android_.get(); 131 | } 132 | #endif 133 | webrtc::AudioDeviceBuffer* GetAudioDeviceBuffer() { return &audio_device_buffer_; } 134 | 135 | int RestartPlayoutInternally() override { return -1; } 136 | int RestartRecordingInternally() override { return -1; } 137 | int SetPlayoutSampleRate(uint32_t sample_rate) override { return -1; } 138 | int SetRecordingSampleRate(uint32_t sample_rate) override { return -1; } 139 | 140 | private: 141 | bool initialized_ = false; 142 | #if defined(WEBRTC_ANDROID) 143 | // Should be declared first to ensure that it outlives other resources. 144 | std::unique_ptr audio_manager_android_; 145 | #endif 146 | webrtc::AudioDeviceBuffer audio_device_buffer_; 147 | std::unique_ptr audio_device_; 148 | }; 149 | 150 | #endif // defined(WEBRTC_INCLUDE_INTERNAL_AUDIO_DEVICE) 151 | -------------------------------------------------------------------------------- /ffmpeg-wrapper/src/ffmpeg_video_capture_module.h: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. 3 | * 4 | * Use of this source code is governed by a BSD-style license 5 | * that can be found in the LICENSE file in the root of the source 6 | * tree. 7 | * 8 | * Referenced from modules/video_capture/video_capture_impl.h 9 | */ 10 | 11 | #ifndef DEMO_FFMPEG_VIDEO_CAPTURE_MODULE_H_ 12 | #define DEMO_FFMPEG_VIDEO_CAPTURE_MODULE_H_ 13 | 14 | #include // FILE, popen(), pclose(), fread(), fflush() 15 | #include 16 | #include 17 | // #include "rtc_base/criticalsection.h" 18 | #include "rtc_base/synchronization/mutex.h" 19 | #include "rtc_base/platform_thread.h" 20 | #include "modules/video_capture/video_capture.h" 21 | 22 | 23 | class FFmpegVideoCaptureModule : public webrtc::VideoCaptureModule { 24 | public: 25 | FFmpegVideoCaptureModule(std::string deviceId); 26 | ~FFmpegVideoCaptureModule(); 27 | 28 | static DeviceInfo* CreateDeviceInfo(); 29 | 30 | // Register capture data callback 31 | void RegisterCaptureDataCallback( 32 | rtc::VideoSinkInterface* dataCallback); 33 | 34 | // Remove capture data callback 35 | void DeRegisterCaptureDataCallback(); 36 | 37 | // Start capture device 38 | int32_t StartCapture(const webrtc::VideoCaptureCapability& capability); 39 | 40 | int32_t StopCapture(); 41 | 42 | // Returns the name of the device used by this module. 43 | const char* CurrentDeviceName() const; 44 | 45 | // Returns true if the capture device is running 46 | bool CaptureStarted(); 47 | 48 | // Gets the current configuration. 49 | int32_t CaptureSettings(webrtc::VideoCaptureCapability& settings); 50 | 51 | // Set the rotation of the captured frames. 52 | // If the rotation is set to the same as returned by 53 | // DeviceInfo::GetOrientation the captured frames are 54 | // displayed correctly if rendered. 55 | int32_t SetCaptureRotation(webrtc::VideoRotation rotation); 56 | 57 | // Tells the capture module whether to apply the pending rotation. By default, 58 | // the rotation is applied and the generated frame is up right. When set to 59 | // false, generated frames will carry the rotation information from 60 | // SetCaptureRotation. Return value indicates whether this operation succeeds. 61 | bool SetApplyRotation(bool enable); 62 | 63 | // Return whether the rotation is applied or left pending. 64 | bool GetApplyRotation(); 65 | 66 | private: 67 | struct DeviceMeta { 68 | std::string name; 69 | std::string id; 70 | std::string product; 71 | webrtc::VideoRotation orientation; 72 | std::vector capabilities; 73 | }; 74 | 75 | rtc::VideoSinkInterface* dataCallback_; 76 | std::unique_ptr captureThread_; 77 | // rtc::CriticalSection captureCriticalSection_; 78 | webrtc::Mutex mutex_; 79 | 80 | std::string deviceId_; 81 | FILE* deviceFd_; 82 | std::vector rawFrameBuffer_; 83 | 84 | bool captureStarted_; 85 | size_t frameCount_; 86 | webrtc::VideoCaptureCapability currentCapability_; 87 | 88 | // hard-coded ffmpeg devices 89 | static std::vector* GetDevices(); 90 | 91 | // async functions 92 | // static bool CaptureThread(void* object); 93 | static void CaptureThread(void* object); 94 | bool CaptureProcess(); 95 | int32_t CheckI420AndPush( 96 | uint8_t* videoFrame, 97 | size_t videoFrameLength, 98 | const webrtc::VideoCaptureCapability& frameInfo, 99 | int64_t captureTime = 0); 100 | 101 | public: 102 | class FFmpegVideoDeviceInfo : public webrtc::VideoCaptureModule::DeviceInfo { 103 | public: 104 | FFmpegVideoDeviceInfo(); 105 | ~FFmpegVideoDeviceInfo(); 106 | 107 | uint32_t NumberOfDevices(); 108 | 109 | // Returns the available capture devices. 110 | // deviceNumber - Index of capture device. 111 | // deviceNameUTF8 - Friendly name of the capture device. 112 | // deviceUniqueIdUTF8 - Unique name of the capture device if it exist. 113 | // Otherwise same as deviceNameUTF8. 114 | // productUniqueIdUTF8 - Unique product id if it exist. 115 | // Null terminated otherwise. 116 | int32_t GetDeviceName( 117 | uint32_t deviceNumber, 118 | char* deviceNameUTF8, 119 | uint32_t deviceNameLength, 120 | char* deviceUniqueIdUTF8, 121 | uint32_t deviceUniqueIdUTF8Length, 122 | char* productUniqueIdUTF8 = 0, 123 | uint32_t productUniqueIdUTF8Length = 0); 124 | 125 | // Returns the number of capabilities this device. 126 | int32_t NumberOfCapabilities(const char* deviceUniqueIdUTF8); 127 | 128 | // Gets the capabilities of the named device. 129 | int32_t GetCapability( 130 | const char* deviceUniqueIdUTF8, 131 | const uint32_t deviceCapabilityNumber, 132 | webrtc::VideoCaptureCapability& capability); 133 | 134 | // Gets clockwise angle the captured frames should be rotated in order 135 | // to be displayed correctly on a normally rotated display. 136 | int32_t GetOrientation( 137 | const char* deviceUniqueIdUTF8, 138 | webrtc::VideoRotation& orientation); 139 | 140 | // Gets the capability that best matches the requested width, height and 141 | // frame rate. 142 | // Returns the deviceCapabilityNumber on success. 143 | int32_t GetBestMatchedCapability( 144 | const char* deviceUniqueIdUTF8, 145 | const webrtc::VideoCaptureCapability& requested, 146 | webrtc::VideoCaptureCapability& resulting); 147 | 148 | // Display OS /capture device specific settings dialog 149 | int32_t DisplayCaptureSettingsDialogBox( 150 | const char* deviceUniqueIdUTF8, 151 | const char* dialogTitleUTF8, 152 | void* parentWindow, 153 | uint32_t positionX, 154 | uint32_t positionY); 155 | 156 | private: 157 | std::vector& devices_; 158 | 159 | inline int32_t NextMatchScore( 160 | const webrtc::VideoCaptureCapability& target, 161 | const webrtc::VideoCaptureCapability& next); 162 | }; 163 | }; 164 | 165 | #endif 166 | -------------------------------------------------------------------------------- /ffmpeg-wrapper/src/ffmpeg_audio_device.h: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. 3 | * 4 | * Use of this source code is governed by a BSD-style license 5 | * that can be found in the LICENSE file in the root of the source 6 | * tree. 7 | * 8 | * Referenced from modules/audio_device/dummy/file_audio_device.h 9 | */ 10 | 11 | #ifndef AUDIO_DEVICE_FFMPEG_AUDIO_DEVICE_H_ 12 | #define AUDIO_DEVICE_FFMPEG_AUDIO_DEVICE_H_ 13 | 14 | #include 15 | 16 | #include 17 | #include 18 | 19 | #include "modules/audio_device/audio_device_generic.h" 20 | // #include "rtc_base/criticalsection.h" 21 | #include "rtc_base/synchronization/mutex.h" 22 | #include "rtc_base/system/file_wrapper.h" 23 | #include "rtc_base/time_utils.h" 24 | 25 | namespace rtc { 26 | class PlatformThread; 27 | } // namespace rtc 28 | 29 | // This is a fake audio device which plays audio from a file as its microphone 30 | // and plays out into a file. 31 | class FFmpegAudioDevice : public webrtc::AudioDeviceGeneric { 32 | public: 33 | // Constructs a file audio device with |id|. It will read audio from 34 | // |inputFilename| and record output audio to |outputFilename|. 35 | // 36 | // The input file should be a readable 48k stereo raw file, and the output 37 | // file should point to a writable location. The output format will also be 38 | // 48k stereo raw audio. 39 | FFmpegAudioDevice(); 40 | virtual ~FFmpegAudioDevice(); 41 | 42 | // Retrieve the currently utilized audio layer 43 | int32_t ActiveAudioLayer( 44 | webrtc::AudioDeviceModule::AudioLayer& audioLayer) const override; 45 | 46 | // Main initializaton and termination 47 | InitStatus Init() override; 48 | int32_t Terminate() override; 49 | bool Initialized() const override; 50 | 51 | // Device enumeration 52 | int16_t PlayoutDevices() override; 53 | int16_t RecordingDevices() override; 54 | int32_t PlayoutDeviceName(uint16_t index, 55 | char name[webrtc::kAdmMaxDeviceNameSize], 56 | char guid[webrtc::kAdmMaxGuidSize]) override; 57 | int32_t RecordingDeviceName(uint16_t index, 58 | char name[webrtc::kAdmMaxDeviceNameSize], 59 | char guid[webrtc::kAdmMaxGuidSize]) override; 60 | 61 | // Device selection 62 | int32_t SetPlayoutDevice(uint16_t index) override; 63 | int32_t SetPlayoutDevice( 64 | webrtc::AudioDeviceModule::WindowsDeviceType device) override; 65 | int32_t SetRecordingDevice(uint16_t index) override; 66 | int32_t SetRecordingDevice( 67 | webrtc::AudioDeviceModule::WindowsDeviceType device) override; 68 | 69 | // Audio transport initialization 70 | int32_t PlayoutIsAvailable(bool& available) override; 71 | int32_t InitPlayout() override; 72 | bool PlayoutIsInitialized() const override; 73 | int32_t RecordingIsAvailable(bool& available) override; 74 | int32_t InitRecording() override; 75 | bool RecordingIsInitialized() const override; 76 | 77 | // Audio transport control 78 | int32_t StartPlayout() override; 79 | int32_t StopPlayout() override; 80 | bool Playing() const override; 81 | int32_t StartRecording() override; 82 | int32_t StopRecording() override; 83 | bool Recording() const override; 84 | 85 | // Audio mixer initialization 86 | int32_t InitSpeaker() override; 87 | bool SpeakerIsInitialized() const override; 88 | int32_t InitMicrophone() override; 89 | bool MicrophoneIsInitialized() const override; 90 | 91 | // Speaker volume controls 92 | int32_t SpeakerVolumeIsAvailable(bool& available) override; 93 | int32_t SetSpeakerVolume(uint32_t volume) override; 94 | int32_t SpeakerVolume(uint32_t& volume) const override; 95 | int32_t MaxSpeakerVolume(uint32_t& maxVolume) const override; 96 | int32_t MinSpeakerVolume(uint32_t& minVolume) const override; 97 | 98 | // Microphone volume controls 99 | int32_t MicrophoneVolumeIsAvailable(bool& available) override; 100 | int32_t SetMicrophoneVolume(uint32_t volume) override; 101 | int32_t MicrophoneVolume(uint32_t& volume) const override; 102 | int32_t MaxMicrophoneVolume(uint32_t& maxVolume) const override; 103 | int32_t MinMicrophoneVolume(uint32_t& minVolume) const override; 104 | 105 | // Speaker mute control 106 | int32_t SpeakerMuteIsAvailable(bool& available) override; 107 | int32_t SetSpeakerMute(bool enable) override; 108 | int32_t SpeakerMute(bool& enabled) const override; 109 | 110 | // Microphone mute control 111 | int32_t MicrophoneMuteIsAvailable(bool& available) override; 112 | int32_t SetMicrophoneMute(bool enable) override; 113 | int32_t MicrophoneMute(bool& enabled) const override; 114 | 115 | // Stereo support 116 | int32_t StereoPlayoutIsAvailable(bool& available) override; 117 | int32_t SetStereoPlayout(bool enable) override; 118 | int32_t StereoPlayout(bool& enabled) const override; 119 | int32_t StereoRecordingIsAvailable(bool& available) override; 120 | int32_t SetStereoRecording(bool enable) override; 121 | int32_t StereoRecording(bool& enabled) const override; 122 | 123 | // Delay information and control 124 | int32_t PlayoutDelay(uint16_t& delayMS) const override; 125 | 126 | void AttachAudioBuffer(webrtc::AudioDeviceBuffer* audioBuffer) override; 127 | 128 | private: 129 | // static bool RecThreadFunc(void*); 130 | // static bool PlayThreadFunc(void*); 131 | static void RecThreadFunc(void*); 132 | static void PlayThreadFunc(void*); 133 | bool RecThreadProcess(); 134 | bool PlayThreadProcess(); 135 | 136 | int32_t _playout_index; 137 | int32_t _record_index; 138 | webrtc::AudioDeviceBuffer* _ptrAudioBuffer; 139 | int8_t* _recordingBuffer; // In bytes. 140 | int8_t* _playoutBuffer; // In bytes. 141 | uint32_t _recordingFramesLeft; 142 | uint32_t _playoutFramesLeft; 143 | // rtc::CriticalSection _critSect; 144 | webrtc::Mutex mutex_; 145 | 146 | size_t _recordingBufferSizeIn10MS; 147 | size_t _recordingFramesIn10MS; 148 | size_t _playoutFramesIn10MS; 149 | 150 | // TODO(pbos): Make plain members instead of pointers and stop resetting them. 151 | std::unique_ptr _ptrThreadRec; 152 | std::unique_ptr _ptrThreadPlay; 153 | 154 | bool _playing; 155 | bool _recording; 156 | int64_t _lastCallPlayoutMillis; 157 | int64_t _lastCallRecordMillis; 158 | 159 | webrtc::FileWrapper _outputFile; 160 | // FileWrapper _inputFile; 161 | FILE* _inputStream; 162 | std::string _outputFilename; 163 | std::string _inputFilename; 164 | }; 165 | 166 | #endif // AUDIO_DEVICE_FFMPEG_AUDIO_DEVICE_H_ 167 | -------------------------------------------------------------------------------- /ffmpeg-wrapper/0001-FFmpeg-Adapter.patch: -------------------------------------------------------------------------------- 1 | diff --git a/examples/BUILD.gn b/examples/BUILD.gn 2 | index 704afc5467..b5bbf4ec76 100644 3 | --- a/examples/BUILD.gn 4 | +++ b/examples/BUILD.gn 5 | @@ -669,6 +669,19 @@ if (is_linux || is_chromeos || is_win) { 6 | "peerconnection/client/defaults.h", 7 | "peerconnection/client/peer_connection_client.cc", 8 | "peerconnection/client/peer_connection_client.h", 9 | + "peerconnection/client/ffmpeg/ffmpeg_audio_device_factory.cc", 10 | + "peerconnection/client/ffmpeg/ffmpeg_audio_device_factory.h", 11 | + "peerconnection/client/ffmpeg/ffmpeg_audio_device_module.cc", 12 | + "peerconnection/client/ffmpeg/ffmpeg_audio_device_module.h", 13 | + "peerconnection/client/ffmpeg/ffmpeg_audio_device.cc", 14 | + "peerconnection/client/ffmpeg/ffmpeg_audio_device.h", 15 | + "peerconnection/client/ffmpeg/ffmpeg_vcm_capturer.cc", 16 | + "peerconnection/client/ffmpeg/ffmpeg_vcm_capturer.h", 17 | + "peerconnection/client/ffmpeg/ffmpeg_video_capture_module.cc", 18 | + "peerconnection/client/ffmpeg/ffmpeg_video_capture_module.h", 19 | + "peerconnection/client/ffmpeg/ffmpeg_video_device_info.cc", 20 | + "peerconnection/client/ffmpeg/ffmpeg_video_factory.cc", 21 | + "peerconnection/client/ffmpeg/ffmpeg_video_factory.h" 22 | ] 23 | 24 | deps = [ 25 | diff --git a/examples/peerconnection/client/conductor.cc b/examples/peerconnection/client/conductor.cc 26 | index 005a9d6ddf..1d7d15fd49 100644 27 | --- a/examples/peerconnection/client/conductor.cc 28 | +++ b/examples/peerconnection/client/conductor.cc 29 | @@ -45,6 +45,9 @@ 30 | #include "rtc_base/strings/json.h" 31 | #include "test/vcm_capturer.h" 32 | 33 | +#include "examples/peerconnection/client/ffmpeg/ffmpeg_audio_device_module.h" 34 | +#include "examples/peerconnection/client/ffmpeg/ffmpeg_vcm_capturer.h" 35 | + 36 | namespace { 37 | // Names used for a IceCandidate JSON object. 38 | const char kCandidateSdpMidName[] = "sdpMid"; 39 | @@ -71,50 +74,71 @@ class DummySetSessionDescriptionObserver 40 | class CapturerTrackSource : public webrtc::VideoTrackSource { 41 | public: 42 | static rtc::scoped_refptr Create() { 43 | - const size_t kWidth = 640; 44 | - const size_t kHeight = 480; 45 | - const size_t kFps = 30; 46 | - std::unique_ptr capturer; 47 | - std::unique_ptr info( 48 | - webrtc::VideoCaptureFactory::CreateDeviceInfo()); 49 | - if (!info) { 50 | - return nullptr; 51 | - } 52 | - int num_devices = info->NumberOfDevices(); 53 | - for (int i = 0; i < num_devices; ++i) { 54 | - capturer = absl::WrapUnique( 55 | - webrtc::test::VcmCapturer::Create(kWidth, kHeight, kFps, i)); 56 | - if (capturer) { 57 | - return new rtc::RefCountedObject( 58 | - std::move(capturer)); 59 | - } 60 | + { // [- THIS CODE USES THE DETECTED HOST'S CAMERA -] 61 | + // const size_t kWidth = 640; 62 | + // const size_t kHeight = 480; 63 | + // const size_t kFps = 30; 64 | + // std::unique_ptr capturer; 65 | + // std::unique_ptr info( 66 | + // webrtc::VideoCaptureFactory::CreateDeviceInfo()); 67 | + // if (!info) { 68 | + // return nullptr; 69 | + // } 70 | + // int num_devices = info->NumberOfDevices(); 71 | + // for (int i = 0; i < num_devices; ++i) { 72 | + // capturer = absl::WrapUnique( 73 | + // webrtc::test::VcmCapturer::Create(kWidth, kHeight, kFps, i)); 74 | + // if (capturer) { 75 | + // return new rtc::RefCountedObject( 76 | + // std::move(capturer)); 77 | + // } 78 | + // } 79 | + 80 | + // return nullptr; 81 | } 82 | 83 | - return nullptr; 84 | + std::unique_ptr capturer = 85 | + absl::WrapUnique(FFmpegVcmCapturer::Create( 86 | + "/video/url", 1280/*width*/, 720/*height*/, 30/*fps*/)); 87 | + if (capturer) 88 | + return new rtc::RefCountedObject(std::move(capturer)); 89 | + else return nullptr; 90 | } 91 | 92 | protected: 93 | explicit CapturerTrackSource( 94 | - std::unique_ptr capturer) 95 | + std::unique_ptr capturer) 96 | : VideoTrackSource(/*remote=*/false), capturer_(std::move(capturer)) {} 97 | 98 | private: 99 | rtc::VideoSourceInterface* source() override { 100 | return capturer_.get(); 101 | } 102 | - std::unique_ptr capturer_; 103 | + std::unique_ptr capturer_; 104 | }; 105 | 106 | } // namespace 107 | 108 | Conductor::Conductor(PeerConnectionClient* client, MainWindow* main_wnd) 109 | - : peer_id_(-1), loopback_(false), client_(client), main_wnd_(main_wnd) { 110 | +: peer_id_(-1), 111 | + loopback_(false), 112 | + client_(client), 113 | + main_wnd_(main_wnd), 114 | + worker_thread_(rtc::Thread::Create()), 115 | + task_queue_factory_(webrtc::CreateDefaultTaskQueueFactory()) 116 | +{ 117 | client_->RegisterObserver(this); 118 | main_wnd->RegisterObserver(this); 119 | + 120 | + // worker_thread_ = rtc::Thread::Create(); 121 | + worker_thread_->SetName("pc_worker_thread", nullptr); 122 | + worker_thread_->Start(); 123 | } 124 | 125 | Conductor::~Conductor() { 126 | RTC_DCHECK(!peer_connection_); 127 | + if (worker_thread_) 128 | + worker_thread_->Stop(); 129 | } 130 | 131 | bool Conductor::connection_active() const { 132 | @@ -130,13 +154,26 @@ bool Conductor::InitializePeerConnection() { 133 | RTC_DCHECK(!peer_connection_factory_); 134 | RTC_DCHECK(!peer_connection_); 135 | 136 | + rtc::scoped_refptr default_adm( 137 | + worker_thread_->Invoke*>( 138 | + RTC_FROM_HERE, 139 | + [&]() { 140 | + return new rtc::RefCountedObject( 141 | + task_queue_factory_.get()); 142 | + } 143 | + ) 144 | + ); 145 | + 146 | peer_connection_factory_ = webrtc::CreatePeerConnectionFactory( 147 | - nullptr /* network_thread */, nullptr /* worker_thread */, 148 | - nullptr /* signaling_thread */, nullptr /* default_adm */, 149 | + nullptr /* network_thread */, 150 | + worker_thread_.get() /* worker_thread */, 151 | + nullptr /* signaling_thread */, 152 | + default_adm /* default_adm */, 153 | webrtc::CreateBuiltinAudioEncoderFactory(), 154 | webrtc::CreateBuiltinAudioDecoderFactory(), 155 | webrtc::CreateBuiltinVideoEncoderFactory(), 156 | - webrtc::CreateBuiltinVideoDecoderFactory(), nullptr /* audio_mixer */, 157 | + webrtc::CreateBuiltinVideoDecoderFactory(), 158 | + nullptr /* audio_mixer */, 159 | nullptr /* audio_processing */); 160 | 161 | if (!peer_connection_factory_) { 162 | diff --git a/examples/peerconnection/client/conductor.h b/examples/peerconnection/client/conductor.h 163 | index 3c06857a05..4cd644c7bd 100644 164 | --- a/examples/peerconnection/client/conductor.h 165 | +++ b/examples/peerconnection/client/conductor.h 166 | @@ -21,6 +21,8 @@ 167 | #include "api/peer_connection_interface.h" 168 | #include "examples/peerconnection/client/main_wnd.h" 169 | #include "examples/peerconnection/client/peer_connection_client.h" 170 | +#include "rtc_base/thread.h" 171 | +#include "api/task_queue/default_task_queue_factory.h" 172 | 173 | namespace webrtc { 174 | class VideoCaptureModule; 175 | @@ -129,6 +131,8 @@ class Conductor : public webrtc::PeerConnectionObserver, 176 | MainWindow* main_wnd_; 177 | std::deque pending_messages_; 178 | std::string server_; 179 | + std::unique_ptr worker_thread_; 180 | + std::unique_ptr task_queue_factory_; 181 | }; 182 | 183 | #endif // EXAMPLES_PEERCONNECTION_CLIENT_CONDUCTOR_H_ 184 | -------------------------------------------------------------------------------- /ffmpeg-wrapper/src/ffmpeg_video_capture_module.cc: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. 3 | * 4 | * Use of this source code is governed by a BSD-style license 5 | * that can be found in the LICENSE file in the root of the source 6 | * tree. 7 | * 8 | * Referenced from modules/video_capture/video_capture_impl.cc 9 | */ 10 | 11 | #include "ffmpeg_video_capture_module.h" 12 | 13 | #include // usleep() 14 | #include 15 | #include 16 | 17 | #include "api/video/i420_buffer.h" 18 | #include "rtc_base/logging.h" 19 | #include "rtc_base/time_utils.h" 20 | #include "system_wrappers/include/clock.h" 21 | #include "common_video/libyuv/include/webrtc_libyuv.h" 22 | #include "third_party/libyuv/include/libyuv.h" 23 | 24 | 25 | FFmpegVideoCaptureModule::FFmpegVideoCaptureModule(std::string deviceId) 26 | { 27 | deviceId_ = deviceId; 28 | deviceFd_ = NULL; 29 | captureStarted_ = false; 30 | 31 | currentCapability_.width = 0; 32 | currentCapability_.height = 0; 33 | currentCapability_.maxFPS = 0; 34 | currentCapability_.videoType = webrtc::VideoType::kUnknown; 35 | } 36 | 37 | FFmpegVideoCaptureModule::~FFmpegVideoCaptureModule() 38 | { 39 | StopCapture(); 40 | if (deviceFd_ != NULL) { 41 | fflush(deviceFd_); 42 | pclose(deviceFd_); 43 | deviceFd_ = NULL; 44 | } 45 | } 46 | 47 | 48 | void 49 | FFmpegVideoCaptureModule::RegisterCaptureDataCallback( 50 | rtc::VideoSinkInterface* dataCallback) 51 | { 52 | // rtc::CritScope cs(&captureCriticalSection_); 53 | webrtc::MutexLock lock(&mutex_); 54 | dataCallback_ = dataCallback; 55 | } 56 | 57 | 58 | void 59 | FFmpegVideoCaptureModule::DeRegisterCaptureDataCallback() 60 | { 61 | // rtc::CritScope cs(&captureCriticalSection_); 62 | webrtc::MutexLock lock(&mutex_); 63 | dataCallback_ = nullptr; 64 | } 65 | 66 | 67 | webrtc::VideoCaptureModule::DeviceInfo* 68 | FFmpegVideoCaptureModule::CreateDeviceInfo() 69 | { return new FFmpegVideoDeviceInfo(); } 70 | 71 | 72 | int32_t 73 | FFmpegVideoCaptureModule::StartCapture( 74 | const webrtc::VideoCaptureCapability& capability) 75 | { 76 | // referenced from video_capture_linux.cc 77 | if (captureStarted_) { 78 | if (capability.width == currentCapability_.width && 79 | capability.height == currentCapability_.height && 80 | capability.videoType == currentCapability_.videoType) 81 | return 0; 82 | else StopCapture(); 83 | } 84 | 85 | // rtc::CritScope cs(&captureCriticalSection_); 86 | webrtc::MutexLock lock(&mutex_); 87 | 88 | // 1. open [named?] pipe 89 | frameCount_ = 0; 90 | currentCapability_ = capability; 91 | std::string pixelFormat; 92 | 93 | { 94 | auto area = currentCapability_.width * currentCapability_.height; 95 | switch (capability.videoType) { 96 | case webrtc::VideoType::kI420: 97 | rawFrameBuffer_.resize(area * 3 / 2); 98 | pixelFormat = "yuv420p"; 99 | break; 100 | case webrtc::VideoType::kRGB24: 101 | rawFrameBuffer_.resize(area * 3); 102 | pixelFormat = "rgb24"; 103 | break; 104 | default: 105 | RTC_LOG(LS_ERROR) << "Can't predict frame information."; 106 | return -1; 107 | } 108 | } 109 | 110 | std::ostringstream command; 111 | command << "/usr/local/bin/ffmpeg"; 112 | // command << " -rtsp_transport tcp"; 113 | command << " -i video.h264"; 114 | command << " -f image2pipe -c:v rawvideo -pix_fmt " << pixelFormat; 115 | command << " -r " << capability.maxFPS; // frames will be dropped if in-fps exceeds out-fps 116 | command << " -s " << capability.width << "x" << capability.height; // output size 117 | command << " -"; // pipe 118 | deviceFd_ = popen(command.str().c_str(), "r"); 119 | 120 | // 2. start capture thread; 121 | if (!captureThread_) { 122 | captureThread_.reset(new rtc::PlatformThread( 123 | FFmpegVideoCaptureModule::CaptureThread, this, "CaptureThread")); 124 | captureThread_->Start(); 125 | // captureThread_->SetPriority(rtc::kHighPriority); 126 | } 127 | 128 | captureStarted_ = true; 129 | return 0; 130 | } 131 | 132 | 133 | int32_t 134 | FFmpegVideoCaptureModule::StopCapture() 135 | { 136 | if (captureThread_) { 137 | captureThread_->Stop(); 138 | captureThread_.reset(); 139 | } 140 | 141 | // rtc::CritScope cs(&captureCriticalSection_); 142 | webrtc::MutexLock lock(&mutex_); 143 | if (captureStarted_) { 144 | captureStarted_ = false; 145 | fflush(deviceFd_); 146 | pclose(deviceFd_); 147 | deviceFd_ = NULL; 148 | } 149 | 150 | return 0; 151 | } 152 | 153 | 154 | const char* 155 | FFmpegVideoCaptureModule::CurrentDeviceName() const 156 | { return deviceId_.c_str(); } 157 | 158 | 159 | bool 160 | FFmpegVideoCaptureModule::CaptureStarted() 161 | { return captureStarted_; } 162 | 163 | 164 | int32_t 165 | FFmpegVideoCaptureModule::CaptureSettings(webrtc::VideoCaptureCapability& settings) 166 | { 167 | settings = currentCapability_; 168 | return 0; 169 | } 170 | 171 | 172 | int32_t 173 | FFmpegVideoCaptureModule::SetCaptureRotation(webrtc::VideoRotation /* rotation */) 174 | { return -1; } 175 | 176 | 177 | bool 178 | FFmpegVideoCaptureModule::SetApplyRotation(bool /* enable */) 179 | { return true; } 180 | 181 | 182 | bool 183 | FFmpegVideoCaptureModule::GetApplyRotation() 184 | { return false; } 185 | 186 | 187 | std::vector* 188 | FFmpegVideoCaptureModule::GetDevices() 189 | { 190 | static bool loaded = false; 191 | static std::vector devices; 192 | if (!loaded) { 193 | DeviceMeta device0; 194 | device0.name = std::string("ffmpeg-0"); // arbitrary (name) 195 | device0.id = std::string("F3E977DB27F1"); // random (id) 196 | device0.product = std::string("A0A0860E9BDC"); // random (product id) 197 | device0.orientation = webrtc::VideoRotation::kVideoRotation_0; 198 | { 199 | webrtc::VideoCaptureCapability capability; 200 | capability.width = 640; 201 | capability.height = 480; 202 | capability.maxFPS = 30; 203 | capability.interlaced = false; 204 | capability.videoType = webrtc::VideoType::kI420; 205 | device0.capabilities.push_back(capability); 206 | capability.videoType = webrtc::VideoType::kRGB24; 207 | device0.capabilities.push_back(capability); 208 | // kNV12 ("nv12") 209 | // kNV21 ("nv21") 210 | // kARGB ("argb") 211 | } 212 | devices.push_back(device0); 213 | loaded = true; 214 | } 215 | return &devices; 216 | } 217 | 218 | 219 | // bool 220 | // FFmpegVideoCaptureModule::CaptureThread(void* object) 221 | // { return static_cast(object)->CaptureProcess(); } 222 | void 223 | FFmpegVideoCaptureModule::CaptureThread(void* object) 224 | // taken from video_capture_linux.cc 225 | { 226 | FFmpegVideoCaptureModule* module = static_cast(object); 227 | while (module->CaptureProcess()) { } 228 | } 229 | 230 | 231 | bool 232 | FFmpegVideoCaptureModule::CaptureProcess() 233 | { 234 | // rtc::CritScope cs(&captureCriticalSection_); 235 | webrtc::MutexLock lock(&mutex_); 236 | 237 | // do one-cycle's worth of work 238 | if (captureStarted_) { 239 | // Read a frame from the input pipe into the buffer 240 | size_t count = fread(&rawFrameBuffer_[0], 1, 241 | rawFrameBuffer_.size(), deviceFd_); 242 | 243 | // If we didn't get a frame, we're probably at the end 244 | if (count != rawFrameBuffer_.size()) return false; 245 | 246 | CheckI420AndPush((unsigned char*)&rawFrameBuffer_[0], 247 | rawFrameBuffer_.size(), currentCapability_); 248 | } 249 | // else do nothing - thread may close soon 250 | 251 | usleep(0); 252 | return true; 253 | } 254 | 255 | 256 | int32_t FFmpegVideoCaptureModule::CheckI420AndPush( 257 | uint8_t* videoFrame, 258 | size_t videoFrameLength, 259 | const webrtc::VideoCaptureCapability& frameInfo, 260 | int64_t captureTime) 261 | { 262 | const int32_t width = frameInfo.width; 263 | const int32_t height = frameInfo.height; 264 | 265 | if (frameInfo.videoType != webrtc::VideoType::kMJPEG && 266 | webrtc::CalcBufferSize(frameInfo.videoType, width, abs(height)) != 267 | videoFrameLength) { 268 | RTC_LOG(LS_ERROR) << "Wrong incoming frame length."; 269 | return -1; 270 | } 271 | 272 | int stride_y = width; 273 | int stride_uv = (width + 1) / 2; 274 | int target_width = width; 275 | int target_height = abs(height); 276 | 277 | rtc::scoped_refptr buffer = webrtc::I420Buffer::Create( 278 | target_width, target_height, stride_y, stride_uv, stride_uv); 279 | 280 | const int conversionResult = libyuv::ConvertToI420( 281 | videoFrame, videoFrameLength, 282 | buffer.get()->MutableDataY(), buffer.get()->StrideY(), 283 | buffer.get()->MutableDataU(), buffer.get()->StrideU(), 284 | buffer.get()->MutableDataV(), buffer.get()->StrideV(), 285 | 0, 0, // No Cropping 286 | width, height, 287 | target_width, target_height, 288 | libyuv::kRotate0, 289 | ConvertVideoType(frameInfo.videoType)); 290 | 291 | if (conversionResult < 0) { 292 | RTC_LOG(LS_ERROR) << "Failed to convert capture frame from type " 293 | << static_cast(frameInfo.videoType) << "to I420."; 294 | return -1; 295 | } 296 | 297 | webrtc::VideoFrame captureFrame(buffer, 0, rtc::TimeMillis(), 298 | webrtc::VideoRotation::kVideoRotation_0); 299 | captureFrame.set_ntp_time_ms(captureTime); 300 | 301 | frameCount_++; 302 | if (dataCallback_) 303 | dataCallback_->OnFrame(captureFrame); 304 | 305 | return 0; 306 | } 307 | -------------------------------------------------------------------------------- /native-to-browser/index.html: -------------------------------------------------------------------------------- 1 | 2 | PeerConnection server test page 3 | 4 | 292 | 293 | 294 |
295 |
296 | 297 |
298 |
299 | Server:
300 | Your name: 301 | 302 | 303 |
304 | 305 |

306 |     
307 |
308 | 309 | 310 | -------------------------------------------------------------------------------- /native-to-browser/index2.html: -------------------------------------------------------------------------------- 1 | 2 | PeerConnection server test page 3 | 4 | 324 | 325 | 326 |
327 |
328 | 329 |
330 |
331 | Server:
332 | Your name: 333 | 334 | 335 |
336 | 337 |

338 |     
339 |
340 | 341 | 342 | -------------------------------------------------------------------------------- /sdk-generator/generate_webrtc_sdk.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # 4 | # Created by TekuConcept on August 22, 2019 5 | # 6 | # Generate webrtc native build project with 7 | # > gn gen out/GCC --args='is_clang=false is_debug=false \ 8 | # > is_component_build=false use_custom_libcxx=false \ 9 | # > use_cxx11=true rtc_include_tests=false \ 10 | # > rtc_build_examples=true treat_warnings_as_errors=false \ 11 | # > rtc_use_h264=true rtc_use_builtin_sw_codecs=false' 12 | # 13 | # Add 'target_os="linux" target_cpu="arm64"' for 14 | # cross-compiled ARM64 builds 15 | # 16 | 17 | print_usage_and_die() { 18 | echo "Usage: $0 [-o ] [-w ] build_dir" 19 | echo "Options:" 20 | echo " --clear: deletes the output directory without" 21 | echo " asking if it already exists" 22 | echo " --no-sysroot: do not copy all third-party shared libraries" 23 | echo " from the webrtc virtual sysroot to the sdk" 24 | echo " -a " 25 | echo " --arch : specifies which sysroot architecture to include" 26 | echo " " 27 | echo " AMD64 (default)" 28 | echo " ARM64" 29 | echo " --update-libs: only update SDK libraries" 30 | exit 1 31 | } 32 | 33 | assert_directory() { 34 | if [ ! -d "$1" ]; then 35 | echo "Error: $1 is not a directory or does not exist" 36 | print_usage_and_die 37 | fi 38 | } 39 | 40 | assert_file_exists() { 41 | if [ ! -f "$1" ]; then 42 | echo "Error: Cannot find file $1" 43 | exit 1 44 | fi 45 | } 46 | 47 | assert_directory_exists() { 48 | if [ ! -d "$1" ]; then 49 | echo "Error: Cannot find directory $1" 50 | exit 1 51 | fi 52 | } 53 | 54 | assert_file_or_dir_exists() { 55 | if [ ! -d "$1" ] && [ ! -f "$1" ]; then 56 | echo "Error: Cannot find path $1" 57 | exit 1 58 | fi 59 | } 60 | 61 | 62 | 63 | # 64 | # Parse command line arguments 65 | # 66 | 67 | EXEC_DIR="$(readlink -f "$( dirname "${BASH_SOURCE[0]}")")" 68 | ARCH="AMD64" # default 69 | 70 | # enumerate through all args 71 | # separate key:value pairs from tokens 72 | TOKENS=() 73 | while [[ $# -gt 0 ]]; do 74 | key="$1" 75 | case $key in 76 | -o|--output-dir) 77 | OUTPUT_DIR="$2" 78 | shift 79 | ;; 80 | -w|--webrtc-dir) 81 | WEBRTC_DIR="$2" 82 | shift 83 | ;; 84 | -a|--arch) 85 | ARCH="$2" 86 | shift 87 | ;; 88 | *) # unknown option 89 | TOKENS+=("$1") 90 | ;; 91 | esac 92 | shift # past argument 93 | done 94 | 95 | # parse non-key:value tokens 96 | CLEAR_OUTPUT=false 97 | WITH_SYSROOT=true 98 | UPDATE_INCLUDES=true 99 | UPDATE_LIBS=true 100 | UPDATE_SYSROOT=true 101 | if [[ ${#TOKENS[@]} < 1 ]]; then print_usage_and_die; fi 102 | for token in "${TOKENS[@]}"; do 103 | case $token in 104 | --clear) 105 | CLEAR_OUTPUT=true 106 | ;; 107 | --no-sysroot) 108 | WITH_SYSROOT=false 109 | ;; 110 | --update-libs) 111 | UPDATE_INCLUDES=false 112 | UPDATE_SYSROOT=false 113 | ;; 114 | *) 115 | # TODO: check if too many wildcards 116 | BUILD_DIR=$token 117 | ;; 118 | esac 119 | done 120 | 121 | 122 | 123 | # 124 | # Interpret arguments 125 | # 126 | 127 | if [ -z "$BUILD_DIR" ]; then 128 | echo "build directory not specified" 129 | print_usage_and_die 130 | fi 131 | assert_directory $BUILD_DIR 132 | BUILD_DIR=$(readlink -f $BUILD_DIR) 133 | 134 | if [ -z "$WEBRTC_DIR" ]; then 135 | WEBRTC_DIR=$BUILD_DIR/../.. 136 | fi 137 | assert_directory $WEBRTC_DIR 138 | WEBRTC_DIR=$(readlink -f $WEBRTC_DIR) 139 | 140 | if [ -z "$OUTPUT_DIR" ]; then 141 | OUTPUT_DIR=$(readlink -f .)/webrtc_sdk 142 | else 143 | if [ ! -d "$OUTPUT_DIR" ]; then mkdir -p $OUTPUT_DIR; fi 144 | OUTPUT_DIR=$(readlink -f $OUTPUT_DIR) 145 | fi 146 | 147 | if [ $ARCH != "AMD64" ] && [ $ARCH != "ARM64" ]; then 148 | echo -e "\033[0;91m Target sysroot arch is not recognized: ${ARCH}\033[0m" 149 | exit 1 150 | fi 151 | 152 | echo "- - - - - - - - - - - - - - - - - - - - - - - - - - - - - -" 153 | echo "Output: $(realpath --relative-to="$EXEC_DIR" "$OUTPUT_DIR")" 154 | echo "WebRTC: $(realpath --relative-to="$EXEC_DIR" "$WEBRTC_DIR")" 155 | echo "Build: $(realpath --relative-to="$EXEC_DIR" "$BUILD_DIR")" 156 | echo "- - - - - - - - - - - - - - - - - - - - - - - - - - - - - -" 157 | 158 | OUTPUT_LIB_DIR=$OUTPUT_DIR/lib 159 | OUTPUT_INCLUDE_DIR=$OUTPUT_DIR/include 160 | OUTPUT_TEMP_DIR=$OUTPUT_DIR/temp 161 | 162 | if [ "$CLEAR_OUTPUT" = true ]; then 163 | rm -rf $OUTPUT_DIR # reset for next generation 164 | elif [ "$(ls -A $OUTPUT_DIR)" ] && [ "$UPDATE_LIBS" = false ]; then 165 | echo "$OUTPUT_DIR is not empty" 166 | echo "You can skip this check by adding --clear" 167 | read -p "Continue? (Y/N): " confirm && \ 168 | [[ $confirm == [yY] || $confirm == [yY][eE][sS] ]] || \ 169 | exit 1 170 | rm -rf $OUTPUT_DIR 171 | fi 172 | mkdir -p $OUTPUT_LIB_DIR 173 | mkdir -p $OUTPUT_INCLUDE_DIR 174 | 175 | 176 | 177 | # 178 | # Copy libraries and objects files 179 | # 180 | 181 | # Some archives were not included in the webrtc archive. 182 | # To keep things all self-contained for projects linking 183 | # against webrtc, we combine the archives into one. 184 | BUILD_OBJ_DIR=$BUILD_DIR/obj 185 | 186 | archive_files=( 187 | $BUILD_OBJ_DIR/libwebrtc.a \ 188 | $BUILD_OBJ_DIR/rtc_base/librtc_base.a \ 189 | ) 190 | 191 | object_folders=(\ 192 | $BUILD_OBJ_DIR/rtc_base/rtc_base/ \ 193 | $BUILD_OBJ_DIR/rtc_base/rtc_json/ \ 194 | $BUILD_OBJ_DIR/third_party/jsoncpp/jsoncpp/ \ 195 | $BUILD_OBJ_DIR/test/field_trial/ \ 196 | ) 197 | 198 | for token in "${archive_files[@]}"; do 199 | assert_file_exists $token 200 | done 201 | 202 | for token in "${object_folders}"; do 203 | assert_directory_exists $token 204 | done 205 | 206 | if [ "$UPDATE_LIBS" = true ]; then 207 | cd $BUILD_OBJ_DIR 208 | for folder in "${object_folders[@]}"; do 209 | find $folder -type f -name \*.o | while read FILE; do 210 | TARGET=$(realpath --relative-to="$BUILD_OBJ_DIR" "$FILE") 211 | echo "Copying ${OUTPUT_LIB_DIR}/${TARGET}" 212 | cp --parents "$TARGET" -t $OUTPUT_LIB_DIR 213 | done 214 | done 215 | 216 | for archive in "${archive_files[@]}"; do 217 | TARGET=$(realpath --relative-to="$BUILD_OBJ_DIR" "$archive") 218 | echo "Copying ${OUTPUT_LIB_DIR}/${TARGET}" 219 | cp --parents "$TARGET" -t $OUTPUT_LIB_DIR 220 | done 221 | cd $EXEC_DIR 222 | fi 223 | 224 | 225 | 226 | # 227 | # Copy include files 228 | # 229 | 230 | # Not all folders contain headers we want to copy, 231 | # so only white-list the folders we do want to copy. 232 | include_folders=(\ 233 | "api" \ 234 | "audio" \ 235 | "base" \ 236 | "call" \ 237 | "common_audio" \ 238 | "common_video" \ 239 | "logging" \ 240 | "media" \ 241 | "modules" \ 242 | "p2p" \ 243 | "pc" \ 244 | "rtc_base" \ 245 | "rtc_tools" \ 246 | "stats" \ 247 | "system_wrappers" \ 248 | "test" \ 249 | "video" \ 250 | ) 251 | 252 | if [ "$UPDATE_INCLUDES" = true ]; then 253 | cd $WEBRTC_DIR # enter source directory to capture file-directory structure 254 | for folder in "${include_folders[@]}"; do 255 | find $WEBRTC_DIR/$folder -type f -name \*.h | while read FILE; do 256 | TARGET=$(realpath --relative-to="$WEBRTC_DIR" "$FILE") 257 | echo "Copying ${OUTPUT_INCLUDE_DIR}/${TARGET}" 258 | cp --parents "$TARGET" -t $OUTPUT_INCLUDE_DIR 259 | done 260 | done 261 | cd $EXEC_DIR # go back to executing directory 262 | 263 | echo "Copying ${OUTPUT_INCLUDE_DIR}/common_types.h" 264 | cp $WEBRTC_DIR/common_types.h $OUTPUT_INCLUDE_DIR/common_types.h 265 | fi 266 | 267 | 268 | 269 | # 270 | # Copy sysroot (currently only selects x86_64-linux;debian-sid) 271 | # 272 | 273 | if [ "$WITH_SYSROOT" = true ]; then 274 | OUTPUT_SYSROOT_DIR=$OUTPUT_DIR/sysroot 275 | OUTPUT_SYSROOT_INCLUDE_DIR=${OUTPUT_SYSROOT_DIR}/usr/include 276 | OUTPUT_SYSROOT_LIB_DIR=${OUTPUT_SYSROOT_DIR}/usr/lib 277 | mkdir -p ${OUTPUT_SYSROOT_INCLUDE_DIR} 278 | mkdir -p ${OUTPUT_SYSROOT_LIB_DIR} 279 | 280 | if [ $ARCH == "AMD64" ]; then 281 | SYSROOT_DIR=$WEBRTC_DIR/build/linux/debian_sid_amd64-sysroot 282 | elif [ $ARCH == "ARM64" ]; then 283 | SYSROOT_DIR=$WEBRTC_DIR/build/linux/debian_sid_arm64-sysroot 284 | else 285 | echo "An internal error occured" 286 | exit 1 287 | fi 288 | SYSROOT_INCLUDE_DIR=$SYSROOT_DIR/usr/include 289 | SYSROOT_LIB_DIR=$SYSROOT_DIR/usr/lib/x86_64-linux-gnu 290 | 291 | assert_directory_exists $SYSROOT_DIR 292 | assert_directory_exists $SYSROOT_INCLUDE_DIR 293 | assert_directory_exists $SYSROOT_LIB_DIR 294 | 295 | third_party_includes=(\ 296 | "${SYSROOT_INCLUDE_DIR}/atk-1.0/." \ 297 | "${SYSROOT_INCLUDE_DIR}/cairo/." \ 298 | "${SYSROOT_INCLUDE_DIR}/gdk-pixbuf-2.0/." \ 299 | "${SYSROOT_INCLUDE_DIR}/glib-2.0/." \ 300 | "${SYSROOT_INCLUDE_DIR}/gtk-3.0/." \ 301 | "${SYSROOT_INCLUDE_DIR}/jsoncpp/." \ 302 | "${SYSROOT_INCLUDE_DIR}/pango-1.0/." \ 303 | "${SYSROOT_LIB_DIR}/glib-2.0/include/glibconfig.h" \ 304 | "${WEBRTC_DIR}/third_party/libyuv/include/libyuv" \ 305 | "${WEBRTC_DIR}/third_party/libyuv/include/libyuv.h" \ 306 | "${WEBRTC_DIR}/third_party/abseil-cpp/absl" \ 307 | ) 308 | 309 | if [ "$UPDATE_SYSROOT" = true ]; then 310 | for token in "${third_party_includes[@]}"; do 311 | assert_file_or_dir_exists $token 312 | echo "Copying $(basename $token)" 313 | cp -ar $token -t $OUTPUT_SYSROOT_INCLUDE_DIR 314 | done 315 | 316 | # cleanup absl folder 317 | # NOTE: this takes time (~250 ms) 318 | find $OUTPUT_SYSROOT_INCLUDE_DIR/absl ! -name \*.h -type f | \ 319 | while read FILE; do rm $FILE; done 320 | 321 | find $SYSROOT_LIB_DIR -maxdepth 1 -type f -name \*.so\* | \ 322 | while read FILE; do 323 | echo "Copying $(basename $FILE)" 324 | cp $FILE -t $OUTPUT_SYSROOT_LIB_DIR 325 | done 326 | 327 | find $SYSROOT_LIB_DIR -maxdepth 1 -type l -name \*.so\* | \ 328 | while read FILE; do 329 | # copy base file 330 | base_file=$(readlink -f $FILE) 331 | echo "Copying $(basename $base_file)" 332 | cp $base_file -t $OUTPUT_SYSROOT_LIB_DIR 333 | 334 | # echo "- - - - -" 335 | # echo "Symlink: $FILE" 336 | # echo "File: $base_file" 337 | 338 | # rebuild symlinks 339 | loop_continue=true 340 | while [ "$loop_continue" = true ]; do 341 | reflink=$FILE 342 | parent="" 343 | next=$OUTPUT_SYSROOT_LIB_DIR/$(basename $reflink) 344 | while [ ! -L $next ] && [ ! -f $next ]; do 345 | # next link assumed to be relative 346 | # TODO: maybe save relative path to resolve next link 347 | reflink=${SYSROOT_LIB_DIR}/$(readlink $reflink) 348 | parent=$next 349 | next=$OUTPUT_SYSROOT_LIB_DIR/$(basename $reflink) 350 | done 351 | if [ -z "$parent" ]; then 352 | loop_continue=false; 353 | else 354 | # we use basename because the symlink will exist 355 | # in the same directory as the target 356 | echo "Symlink $(basename $parent)" 357 | ln -s $(basename $next) $parent 358 | fi 359 | done 360 | done 361 | fi 362 | 363 | SYSROOT_LIB_DIR=$(realpath --relative-to="$OUTPUT_DIR" "$OUTPUT_SYSROOT_LIB_DIR") 364 | SYSROOT_LIB_DIR="\${WEBRTC_SDK_DIR}/${SYSROOT_LIB_DIR}" 365 | SYSROOT_INCLUDE_DIR=$(realpath --relative-to="$OUTPUT_DIR" "$OUTPUT_SYSROOT_INCLUDE_DIR") 366 | SYSROOT_INCLUDE_DIR="\${WEBRTC_SDK_DIR}/${SYSROOT_INCLUDE_DIR}" 367 | else 368 | SYSROOT_LIB_DIR=/usr/lib 369 | SYSROOT_INCLUDE_DIR=/usr/include 370 | fi 371 | 372 | 373 | 374 | # 375 | # Generate cmake file 376 | # 377 | 378 | CMAKE_FILE=$OUTPUT_DIR/WebRTCConfigure.cmake 379 | NINJA_BUILD_REFERENCE=$BUILD_OBJ_DIR/examples/peerconnection_client.ninja 380 | assert_file_exists $NINJA_BUILD_REFERENCE 381 | DEFINES=$(head -n 1 $NINJA_BUILD_REFERENCE) 382 | DEFINES=$(echo ${DEFINES:10}) # clip first 10 chars 383 | DEFINES=$(echo $DEFINES | sed 's/ /\n/g') 384 | 385 | # Note: C and C++ flags can also be found in ${NINJA_BUILD_REFERENCE} 386 | # but for now they're hard coded here. 387 | 388 | if [ $ARCH == "AMD64" ]; then 389 | ARCH_FLAGS="-m64 \\-march=x86-64 \\" 390 | elif [ $ARCH == "ARM64" ]; then 391 | ARCH_FLAGS="-m64 \\-march=arm64 \\" 392 | else 393 | ARCH_FLAGS= 394 | fi 395 | 396 | printf "\ 397 | # 398 | # Sets up variables needed to compile against the WebRTC library 399 | # 400 | # NOTE: set WEBRTC_SDK_DIR to the path of the generated 401 | # sdk folder before including this file 402 | # 403 | 404 | SET(WEBRTC_INCLUDE_DIR 405 | \${WEBRTC_SDK_DIR}/include 406 | ${SYSROOT_INCLUDE_DIR} 407 | ) 408 | 409 | SET(WEBRTC_LIB_DIR \${WEBRTC_SDK_DIR}/lib) 410 | FILE(GLOB_RECURSE WEBRTC_LIB \${WEBRTC_LIB_DIR}/*.a) 411 | FILE(GLOB_RECURSE WEBRTC_DEPENDENCIES 412 | \${WEBRTC_LIB_DIR}/rtc_base/json.o 413 | \${WEBRTC_LIB_DIR}/third_party/jsoncpp/json_reader.o 414 | \${WEBRTC_LIB_DIR}/third_party/jsoncpp/json_writer.o 415 | \${WEBRTC_LIB_DIR}/third_party/jsoncpp/json_value.o 416 | \${WEBRTC_LIB_DIR}/test/field_trial.o 417 | ) 418 | 419 | SET(WEBRTC_LIBS 420 | X11 Xcomposite Xext Xrender atomic dl pthread rt 421 | gmodule-2.0 gtk-3 gdk-3 pangocairo-1.0 pango-1.0 422 | atk-1.0 cairo-gobject cairo gdk_pixbuf-2.0 gio-2.0 423 | gobject-2.0 gthread-2.0 glib-2.0 m jsoncpp 424 | 425 | \${WEBRTC_LIB} 426 | \${WEBRTC_DEPENDENCIES} 427 | ) 428 | 429 | SET(WEBRTC_CXX_FLAGS \"\\ 430 | -Wno-deprecated-declarations \\ 431 | -fno-strict-aliasing \\ 432 | --param=ssp-buffer-size=4 \\ 433 | -fstack-protector \\ 434 | -Wno-builtin-macro-redefined \\ 435 | -funwind-tables \\ 436 | -fPIC \\ 437 | -pipe \\ 438 | -pthread \\ 439 | ${ARCH_FLAGS} 440 | -Wall \\ 441 | -Wno-unused-local-typedefs \\ 442 | -Wno-deprecated-declarations \\ 443 | -Wno-comments \\ 444 | -Wno-missing-field-initializers \\ 445 | -Wno-unused-parameter \\ 446 | -fno-ident \\ 447 | -fdata-sections \\ 448 | -ffunction-sections \\ 449 | -fno-omit-frame-pointer \\ 450 | -fvisibility=hidden \\ 451 | -Wextra \\ 452 | -Wno-unused-parameter \\ 453 | -Wno-missing-field-initializers \\ 454 | -Wno-narrowing \\ 455 | -fno-exceptions \\ 456 | -fno-rtti \\ 457 | -fvisibility-inlines-hidden \\ 458 | -Wnon-virtual-dtor \\ 459 | \") 460 | 461 | SET(WEBRTC_LINK_OPTIONS \"\\ 462 | -Wl,--fatal-warnings \\ 463 | -fPIC \\ 464 | -Wl,-z,noexecstack \\ 465 | -Wl,-z,now \\ 466 | -Wl,-z,relro \\ 467 | -Wl,-z,defs \\ 468 | -Wl,--as-needed \\ 469 | -fuse-ld=gold \\ 470 | -Wl,--threads \\ 471 | -Wl,--thread-count=4 \\ 472 | -Wl,--icf=all \\ 473 | -m64 \\ 474 | -Wl,-O2 \\ 475 | -Wl,--gc-sections \\ 476 | -L${SYSROOT_LIB_DIR} \\ 477 | -Wl,-rpath-link=${SYSROOT_LIB_DIR} \\ 478 | -pie \\ 479 | -Wl,-rpath-link=. \\ 480 | -Wl,--disable-new-dtags \\ 481 | \") 482 | 483 | SET(WEBRTC_COMPILE_DEFINITIONS ${DEFINES} ) 484 | " > $CMAKE_FILE 485 | 486 | 487 | 488 | # 489 | # Finalize 490 | # 491 | 492 | echo -e "\033[0;92m -- FINISHED --\033[0m" 493 | -------------------------------------------------------------------------------- /ffmpeg-wrapper/src/ffmpeg_audio_device.cc: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. 3 | * 4 | * Use of this source code is governed by a BSD-style license 5 | * that can be found in the LICENSE file in the root of the source 6 | * tree. 7 | * 8 | * Referenced from modules/audio_device/dummy/file_audio_device.cc 9 | */ 10 | 11 | #include "ffmpeg_audio_device.h" 12 | 13 | #include 14 | #include 15 | 16 | #include "rtc_base/checks.h" 17 | #include "rtc_base/logging.h" 18 | #include "rtc_base/platform_thread.h" 19 | #include "rtc_base/time_utils.h" 20 | #include "system_wrappers/include/sleep.h" 21 | 22 | 23 | const int kRecordingFixedSampleRate = 48000; 24 | const size_t kRecordingNumChannels = 2; 25 | const int kPlayoutFixedSampleRate = 48000; 26 | const size_t kPlayoutNumChannels = 2; 27 | const size_t kPlayoutBufferSize = 28 | kPlayoutFixedSampleRate / 100 * kPlayoutNumChannels * 2; 29 | const size_t kRecordingBufferSize = 30 | kRecordingFixedSampleRate / 100 * kRecordingNumChannels * 2; 31 | 32 | FFmpegAudioDevice::FFmpegAudioDevice() 33 | : _ptrAudioBuffer(NULL), 34 | _recordingBuffer(NULL), 35 | _playoutBuffer(NULL), 36 | _recordingFramesLeft(0), 37 | _playoutFramesLeft(0), 38 | _recordingBufferSizeIn10MS(0), 39 | _recordingFramesIn10MS(0), 40 | _playoutFramesIn10MS(0), 41 | _playing(false), 42 | _recording(false), 43 | _lastCallPlayoutMillis(0), 44 | _lastCallRecordMillis(0), 45 | // _outputFile(*webrtc::FileWrapper::Create()), 46 | // _inputFile(*FileWrapper::Create()), 47 | _inputStream(NULL), 48 | _outputFilename("webrtcOutputFile.dat"), 49 | _inputFilename("ffmpegInputStream.pipe") 50 | { } 51 | 52 | FFmpegAudioDevice::~FFmpegAudioDevice() { 53 | delete &_outputFile; 54 | // delete &_inputFile; 55 | if (_inputStream != NULL) { 56 | fflush(_inputStream); 57 | pclose(_inputStream); 58 | _inputStream = NULL; 59 | } 60 | } 61 | 62 | int32_t FFmpegAudioDevice::ActiveAudioLayer( 63 | webrtc::AudioDeviceModule::AudioLayer& audioLayer) const { 64 | return -1; 65 | } 66 | 67 | webrtc::AudioDeviceGeneric::InitStatus FFmpegAudioDevice::Init() { 68 | return InitStatus::OK; 69 | } 70 | 71 | int32_t FFmpegAudioDevice::Terminate() { 72 | return 0; 73 | } 74 | 75 | bool FFmpegAudioDevice::Initialized() const { 76 | return true; 77 | } 78 | 79 | int16_t FFmpegAudioDevice::PlayoutDevices() { 80 | return 1; 81 | } 82 | 83 | int16_t FFmpegAudioDevice::RecordingDevices() { 84 | return 1; 85 | } 86 | 87 | int32_t FFmpegAudioDevice::PlayoutDeviceName(uint16_t index, 88 | char name[webrtc::kAdmMaxDeviceNameSize], 89 | char guid[webrtc::kAdmMaxGuidSize]) { 90 | const char* kName = "dummy_device"; 91 | const char* kGuid = "dummy_device_unique_id"; 92 | if (index < 1) { 93 | memset(name, 0, webrtc::kAdmMaxDeviceNameSize); 94 | memset(guid, 0, webrtc::kAdmMaxGuidSize); 95 | memcpy(name, kName, strlen(kName)); 96 | memcpy(guid, kGuid, strlen(guid)); 97 | return 0; 98 | } 99 | return -1; 100 | } 101 | 102 | int32_t FFmpegAudioDevice::RecordingDeviceName(uint16_t index, 103 | char name[webrtc::kAdmMaxDeviceNameSize], 104 | char guid[webrtc::kAdmMaxGuidSize]) { 105 | const char* kName = "dummy_device"; 106 | const char* kGuid = "dummy_device_unique_id"; 107 | if (index < 1) { 108 | memset(name, 0, webrtc::kAdmMaxDeviceNameSize); 109 | memset(guid, 0, webrtc::kAdmMaxGuidSize); 110 | memcpy(name, kName, strlen(kName)); 111 | memcpy(guid, kGuid, strlen(guid)); 112 | return 0; 113 | } 114 | return -1; 115 | } 116 | 117 | int32_t FFmpegAudioDevice::SetPlayoutDevice(uint16_t index) { 118 | if (index == 0) { 119 | _playout_index = index; 120 | return 0; 121 | } 122 | return -1; 123 | } 124 | 125 | int32_t FFmpegAudioDevice::SetPlayoutDevice( 126 | webrtc::AudioDeviceModule::WindowsDeviceType device) { 127 | return -1; 128 | } 129 | 130 | int32_t FFmpegAudioDevice::SetRecordingDevice(uint16_t index) { 131 | if (index == 0) { 132 | _record_index = index; 133 | return _record_index; 134 | } 135 | return -1; 136 | } 137 | 138 | int32_t FFmpegAudioDevice::SetRecordingDevice( 139 | webrtc::AudioDeviceModule::WindowsDeviceType device) { 140 | return -1; 141 | } 142 | 143 | int32_t FFmpegAudioDevice::PlayoutIsAvailable(bool& available) { 144 | if (_playout_index == 0) { 145 | available = true; 146 | return _playout_index; 147 | } 148 | available = false; 149 | return -1; 150 | } 151 | 152 | int32_t FFmpegAudioDevice::InitPlayout() { 153 | // rtc::CritScope lock(&_critSect); 154 | webrtc::MutexLock lock(&mutex_); 155 | 156 | if (_playing) { 157 | return -1; 158 | } 159 | 160 | _playoutFramesIn10MS = static_cast(kPlayoutFixedSampleRate / 100); 161 | 162 | if (_ptrAudioBuffer) { 163 | // Update webrtc audio buffer with the selected parameters 164 | _ptrAudioBuffer->SetPlayoutSampleRate(kPlayoutFixedSampleRate); 165 | _ptrAudioBuffer->SetPlayoutChannels(kPlayoutNumChannels); 166 | } 167 | return 0; 168 | } 169 | 170 | bool FFmpegAudioDevice::PlayoutIsInitialized() const { 171 | return _playoutFramesIn10MS != 0; 172 | } 173 | 174 | int32_t FFmpegAudioDevice::RecordingIsAvailable(bool& available) { 175 | if (_record_index == 0) { 176 | available = true; 177 | return _record_index; 178 | } 179 | available = false; 180 | return -1; 181 | } 182 | 183 | int32_t FFmpegAudioDevice::InitRecording() { 184 | // rtc::CritScope lock(&_critSect); 185 | webrtc::MutexLock lock(&mutex_); 186 | 187 | if (_recording) { 188 | return -1; 189 | } 190 | 191 | _recordingFramesIn10MS = static_cast(kRecordingFixedSampleRate / 100); 192 | 193 | if (_ptrAudioBuffer) { 194 | _ptrAudioBuffer->SetRecordingSampleRate(kRecordingFixedSampleRate); 195 | _ptrAudioBuffer->SetRecordingChannels(kRecordingNumChannels); 196 | } 197 | return 0; 198 | } 199 | 200 | bool FFmpegAudioDevice::RecordingIsInitialized() const { 201 | return _recordingFramesIn10MS != 0; 202 | } 203 | 204 | int32_t FFmpegAudioDevice::StartPlayout() { 205 | if (_playing) { 206 | return 0; 207 | } 208 | 209 | _playing = true; 210 | _playoutFramesLeft = 0; 211 | 212 | if (!_playoutBuffer) { 213 | _playoutBuffer = new int8_t[kPlayoutBufferSize]; 214 | } 215 | if (!_playoutBuffer) { 216 | _playing = false; 217 | return -1; 218 | } 219 | 220 | // PLAYOUT 221 | // if (!_outputFilename.empty() && 222 | // !_outputFile.OpenFile(_outputFilename.c_str(), false)) { 223 | if (!_outputFilename.empty()) { 224 | _outputFile = webrtc::FileWrapper::OpenWriteOnly(_outputFilename.c_str()); 225 | if (!_outputFile.is_open()) { 226 | RTC_LOG(LS_ERROR) << "Failed to open playout file: " << _outputFilename; 227 | _playing = false; 228 | delete[] _playoutBuffer; 229 | _playoutBuffer = NULL; 230 | return -1; 231 | } 232 | } 233 | 234 | _ptrThreadPlay.reset(new rtc::PlatformThread( 235 | PlayThreadFunc, this, "webrtc_audio_module_play_thread")); 236 | _ptrThreadPlay->Start(); 237 | // _ptrThreadPlay->SetPriority(rtc::kRealtimePriority); 238 | 239 | RTC_LOG(LS_INFO) << "Started playout capture to output file: " 240 | << _outputFilename; 241 | return 0; 242 | } 243 | 244 | int32_t FFmpegAudioDevice::StopPlayout() { 245 | { 246 | // rtc::CritScope lock(&_critSect); 247 | webrtc::MutexLock lock(&mutex_); 248 | _playing = false; 249 | } 250 | 251 | // stop playout thread first 252 | if (_ptrThreadPlay) { 253 | _ptrThreadPlay->Stop(); 254 | _ptrThreadPlay.reset(); 255 | } 256 | 257 | // rtc::CritScope lock(&_critSect); 258 | webrtc::MutexLock lock(&mutex_); 259 | 260 | _playoutFramesLeft = 0; 261 | delete[] _playoutBuffer; 262 | _playoutBuffer = NULL; 263 | // _outputFile.CloseFile(); 264 | _outputFile.Close(); 265 | 266 | RTC_LOG(LS_INFO) << "Stopped playout capture to output file: " 267 | << _outputFilename; 268 | return 0; 269 | } 270 | 271 | bool FFmpegAudioDevice::Playing() const { 272 | return _playing; 273 | } 274 | 275 | int32_t FFmpegAudioDevice::StartRecording() { 276 | _recording = true; 277 | 278 | // Make sure we only create the buffer once. 279 | _recordingBufferSizeIn10MS = 280 | _recordingFramesIn10MS * kRecordingNumChannels * 2; 281 | if (!_recordingBuffer) { 282 | _recordingBuffer = new int8_t[_recordingBufferSizeIn10MS]; 283 | } 284 | 285 | std::ostringstream command; 286 | command << "/usr/local/bin/ffmpeg"; 287 | // command << " -rtsp_transport tcp"; 288 | command << " -i rtmp://localhost/camera -vn"; 289 | command << " -f s16le -c:a pcm_s16le"; 290 | command << " -ac 2"; // number of channels 291 | command << " -ar " << (kRecordingFixedSampleRate); 292 | command << " pipe:"; 293 | 294 | if ((_inputStream != NULL) || 295 | ((_inputStream = popen(command.str().c_str(), "r")) == NULL)) { 296 | RTC_LOG(LS_ERROR) << "Failed to open audio input file: " << _inputFilename; 297 | _recording = false; 298 | delete[] _recordingBuffer; 299 | _recordingBuffer = NULL; 300 | return -1; 301 | } 302 | 303 | 304 | _ptrThreadRec.reset(new rtc::PlatformThread( 305 | RecThreadFunc, this, "webrtc_audio_module_capture_thread")); 306 | 307 | _ptrThreadRec->Start(); 308 | // _ptrThreadRec->SetPriority(rtc::kRealtimePriority); 309 | 310 | RTC_LOG(LS_INFO) << "Started recording from input file: " << _inputFilename; 311 | 312 | return 0; 313 | } 314 | 315 | int32_t FFmpegAudioDevice::StopRecording() { 316 | { 317 | // rtc::CritScope lock(&_critSect); 318 | webrtc::MutexLock lock(&mutex_); 319 | _recording = false; 320 | } 321 | 322 | if (_ptrThreadRec) { 323 | _ptrThreadRec->Stop(); 324 | _ptrThreadRec.reset(); 325 | } 326 | 327 | // rtc::CritScope lock(&_critSect); 328 | webrtc::MutexLock lock(&mutex_); 329 | _recordingFramesLeft = 0; 330 | if (_recordingBuffer) { 331 | delete[] _recordingBuffer; 332 | _recordingBuffer = NULL; 333 | } 334 | // _inputFile.CloseFile(); 335 | fflush(_inputStream); 336 | pclose(_inputStream); 337 | _inputStream = NULL; 338 | 339 | RTC_LOG(LS_INFO) << "Stopped recording from input file: " << _inputFilename; 340 | return 0; 341 | } 342 | 343 | bool FFmpegAudioDevice::Recording() const { 344 | return _recording; 345 | } 346 | 347 | int32_t FFmpegAudioDevice::InitSpeaker() { 348 | return -1; 349 | } 350 | 351 | bool FFmpegAudioDevice::SpeakerIsInitialized() const { 352 | return false; 353 | } 354 | 355 | int32_t FFmpegAudioDevice::InitMicrophone() { 356 | return 0; 357 | } 358 | 359 | bool FFmpegAudioDevice::MicrophoneIsInitialized() const { 360 | return true; 361 | } 362 | 363 | int32_t FFmpegAudioDevice::SpeakerVolumeIsAvailable(bool& available) { 364 | return -1; 365 | } 366 | 367 | int32_t FFmpegAudioDevice::SetSpeakerVolume(uint32_t volume) { 368 | return -1; 369 | } 370 | 371 | int32_t FFmpegAudioDevice::SpeakerVolume(uint32_t& volume) const { 372 | return -1; 373 | } 374 | 375 | int32_t FFmpegAudioDevice::MaxSpeakerVolume(uint32_t& maxVolume) const { 376 | return -1; 377 | } 378 | 379 | int32_t FFmpegAudioDevice::MinSpeakerVolume(uint32_t& minVolume) const { 380 | return -1; 381 | } 382 | 383 | int32_t FFmpegAudioDevice::MicrophoneVolumeIsAvailable(bool& available) { 384 | return -1; 385 | } 386 | 387 | int32_t FFmpegAudioDevice::SetMicrophoneVolume(uint32_t volume) { 388 | return -1; 389 | } 390 | 391 | int32_t FFmpegAudioDevice::MicrophoneVolume(uint32_t& volume) const { 392 | return -1; 393 | } 394 | 395 | int32_t FFmpegAudioDevice::MaxMicrophoneVolume(uint32_t& maxVolume) const { 396 | return -1; 397 | } 398 | 399 | int32_t FFmpegAudioDevice::MinMicrophoneVolume(uint32_t& minVolume) const { 400 | return -1; 401 | } 402 | 403 | int32_t FFmpegAudioDevice::SpeakerMuteIsAvailable(bool& available) { 404 | return -1; 405 | } 406 | 407 | int32_t FFmpegAudioDevice::SetSpeakerMute(bool enable) { 408 | return -1; 409 | } 410 | 411 | int32_t FFmpegAudioDevice::SpeakerMute(bool& enabled) const { 412 | return -1; 413 | } 414 | 415 | int32_t FFmpegAudioDevice::MicrophoneMuteIsAvailable(bool& available) { 416 | return -1; 417 | } 418 | 419 | int32_t FFmpegAudioDevice::SetMicrophoneMute(bool enable) { 420 | return -1; 421 | } 422 | 423 | int32_t FFmpegAudioDevice::MicrophoneMute(bool& enabled) const { 424 | return -1; 425 | } 426 | 427 | int32_t FFmpegAudioDevice::StereoPlayoutIsAvailable(bool& available) { 428 | available = true; 429 | return 0; 430 | } 431 | 432 | int32_t FFmpegAudioDevice::SetStereoPlayout(bool enable) { 433 | return 0; 434 | } 435 | 436 | int32_t FFmpegAudioDevice::StereoPlayout(bool& enabled) const { 437 | enabled = true; 438 | return 0; 439 | } 440 | 441 | int32_t FFmpegAudioDevice::StereoRecordingIsAvailable(bool& available) { 442 | available = true; 443 | return 0; 444 | } 445 | 446 | int32_t FFmpegAudioDevice::SetStereoRecording(bool enable) { 447 | return 0; 448 | } 449 | 450 | int32_t FFmpegAudioDevice::StereoRecording(bool& enabled) const { 451 | enabled = true; 452 | return 0; 453 | } 454 | 455 | int32_t FFmpegAudioDevice::PlayoutDelay(uint16_t& delayMS) const { 456 | return 0; 457 | } 458 | 459 | void FFmpegAudioDevice::AttachAudioBuffer(webrtc::AudioDeviceBuffer* audioBuffer) { 460 | // rtc::CritScope lock(&_critSect); 461 | webrtc::MutexLock lock(&mutex_); 462 | 463 | _ptrAudioBuffer = audioBuffer; 464 | 465 | // Inform the AudioBuffer about default settings for this implementation. 466 | // Set all values to zero here since the actual settings will be done by 467 | // InitPlayout and InitRecording later. 468 | _ptrAudioBuffer->SetRecordingSampleRate(0); 469 | _ptrAudioBuffer->SetPlayoutSampleRate(0); 470 | _ptrAudioBuffer->SetRecordingChannels(0); 471 | _ptrAudioBuffer->SetPlayoutChannels(0); 472 | } 473 | 474 | // bool FFmpegAudioDevice::PlayThreadFunc(void* pThis) { 475 | // return (static_cast(pThis)->PlayThreadProcess()); 476 | // } 477 | void FFmpegAudioDevice::PlayThreadFunc(void* pThis) { 478 | FFmpegAudioDevice* device = static_cast(pThis); 479 | while (device->PlayThreadProcess()) { } 480 | } 481 | 482 | // bool FFmpegAudioDevice::RecThreadFunc(void* pThis) { 483 | // return (static_cast(pThis)->RecThreadProcess()); 484 | // } 485 | void FFmpegAudioDevice::RecThreadFunc(void* pThis) { 486 | FFmpegAudioDevice* device = static_cast(pThis); 487 | while (device->RecThreadProcess()) { } 488 | } 489 | 490 | bool FFmpegAudioDevice::PlayThreadProcess() { 491 | if (!_playing) { 492 | return false; 493 | } 494 | int64_t currentTime = rtc::TimeMillis(); 495 | // _critSect.Enter(); 496 | mutex_.Lock(); 497 | 498 | if (_lastCallPlayoutMillis == 0 || 499 | currentTime - _lastCallPlayoutMillis >= 10) { 500 | // _critSect.Leave(); 501 | mutex_.Unlock(); 502 | _ptrAudioBuffer->RequestPlayoutData(_playoutFramesIn10MS); 503 | // _critSect.Enter(); 504 | mutex_.Lock(); 505 | 506 | _playoutFramesLeft = _ptrAudioBuffer->GetPlayoutData(_playoutBuffer); 507 | RTC_DCHECK_EQ(_playoutFramesIn10MS, _playoutFramesLeft); 508 | if (_outputFile.is_open()) { 509 | _outputFile.Write(_playoutBuffer, kPlayoutBufferSize); 510 | } 511 | _lastCallPlayoutMillis = currentTime; 512 | } 513 | _playoutFramesLeft = 0; 514 | // _critSect.Leave(); 515 | mutex_.Unlock(); 516 | 517 | int64_t deltaTimeMillis = rtc::TimeMillis() - currentTime; 518 | if (deltaTimeMillis < 10) { 519 | webrtc::SleepMs(10 - deltaTimeMillis); 520 | } 521 | 522 | return true; 523 | } 524 | 525 | bool FFmpegAudioDevice::RecThreadProcess() { 526 | if (!_recording) { 527 | return false; 528 | } 529 | 530 | int64_t currentTime = rtc::TimeMillis(); 531 | // _critSect.Enter(); 532 | mutex_.Lock(); 533 | 534 | if (_lastCallRecordMillis == 0 || (currentTime - _lastCallRecordMillis) >= 10) { 535 | // if (_inputFile.is_open()) { 536 | // if (_inputFile.Read(_recordingBuffer, kRecordingBufferSize) > 0) { 537 | // _ptrAudioBuffer->SetRecordedBuffer(_recordingBuffer, 538 | // _recordingFramesIn10MS); 539 | // } else { 540 | // _inputFile.Rewind(); 541 | // } 542 | // _lastCallRecordMillis = currentTime; 543 | // _critSect.Leave(); 544 | // _ptrAudioBuffer->DeliverRecordedData(); 545 | // _critSect.Enter(); 546 | // } 547 | if (_inputStream != NULL) { 548 | size_t bytes_read = fread(_recordingBuffer, 1, kRecordingBufferSize, _inputStream); 549 | if (static_cast(bytes_read) > 0) { 550 | _ptrAudioBuffer->SetRecordedBuffer(_recordingBuffer, 551 | _recordingFramesIn10MS); 552 | } 553 | _lastCallRecordMillis = currentTime; 554 | // _critSect.Leave(); 555 | mutex_.Unlock(); 556 | _ptrAudioBuffer->DeliverRecordedData(); 557 | // _critSect.Enter(); 558 | mutex_.Lock(); 559 | } 560 | } 561 | 562 | // _critSect.Leave(); 563 | mutex_.Unlock(); 564 | 565 | int64_t deltaTimeMillis = rtc::TimeMillis() - currentTime; 566 | if (deltaTimeMillis < 10) { 567 | webrtc::SleepMs(10 - deltaTimeMillis); 568 | } 569 | 570 | return true; 571 | } 572 | -------------------------------------------------------------------------------- /ffmpeg-wrapper/src/ffmpeg_audio_device_module.cc: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. 3 | * 4 | * Use of this source code is governed by a BSD-style license 5 | * that can be found in the LICENSE file in the root of the source 6 | * tree. 7 | * 8 | * Referenced from modules/audio_device/audio_device_impl.cc 9 | */ 10 | 11 | #include "ffmpeg_audio_device_module.h" 12 | 13 | #include 14 | 15 | #include "modules/audio_device/audio_device_config.h" // IWYU pragma: keep 16 | #include "modules/audio_device/audio_device_generic.h" 17 | #include "rtc_base/checks.h" 18 | #include "rtc_base/logging.h" 19 | // #include "rtc_base/refcountedobject.h" 20 | // #include "rtc_base/scoped_ref_ptr.h" 21 | #include "rtc_base/ref_counted_object.h" 22 | #include "api/scoped_refptr.h" 23 | #include "system_wrappers/include/metrics.h" 24 | 25 | #include "ffmpeg_audio_device.h" 26 | #include "ffmpeg_audio_device_factory.h" 27 | 28 | #include "modules/audio_device/dummy/audio_device_dummy.h" 29 | 30 | 31 | #define CHECKinitialized_() \ 32 | { \ 33 | if (!initialized_) { \ 34 | return -1; \ 35 | } \ 36 | } 37 | 38 | #define CHECKinitialized__BOOL() \ 39 | { \ 40 | if (!initialized_) { \ 41 | return false; \ 42 | } \ 43 | } 44 | 45 | FFmpegAudioDeviceModule::FFmpegAudioDeviceModule( 46 | webrtc::TaskQueueFactory* task_queue_factory) 47 | : audio_device_buffer_(task_queue_factory) 48 | { 49 | RTC_LOG(INFO) << __FUNCTION__; 50 | audio_device_.reset(FFmpegAudioDeviceFactory::CreateFFmpegAudioDevice()); 51 | audio_device_->AttachAudioBuffer(&audio_device_buffer_); 52 | if (audio_device_ == nullptr) { 53 | RTC_LOG(LS_ERROR) << "could not create ffmpeg audio device"; 54 | } 55 | } 56 | 57 | FFmpegAudioDeviceModule::~FFmpegAudioDeviceModule() { 58 | RTC_LOG(INFO) << __FUNCTION__; 59 | } 60 | 61 | int32_t FFmpegAudioDeviceModule::ActiveAudioLayer(AudioLayer* audioLayer) const { 62 | RTC_LOG(INFO) << __FUNCTION__; 63 | AudioLayer activeAudio; 64 | if (audio_device_->ActiveAudioLayer(activeAudio) == -1) { 65 | return -1; 66 | } 67 | *audioLayer = activeAudio; 68 | return 0; 69 | } 70 | 71 | int32_t FFmpegAudioDeviceModule::Init() { 72 | RTC_LOG(INFO) << __FUNCTION__; 73 | if (initialized_) 74 | return 0; 75 | RTC_CHECK(audio_device_); 76 | webrtc::AudioDeviceGeneric::InitStatus status = audio_device_->Init(); 77 | RTC_HISTOGRAM_ENUMERATION( 78 | "WebRTC.Audio.InitializationResult", static_cast(status), 79 | static_cast(webrtc::AudioDeviceGeneric::InitStatus::NUM_STATUSES)); 80 | if (status != webrtc::AudioDeviceGeneric::InitStatus::OK) { 81 | RTC_LOG(LS_ERROR) << "Audio device initialization failed."; 82 | return -1; 83 | } 84 | initialized_ = true; 85 | return 0; 86 | } 87 | 88 | int32_t FFmpegAudioDeviceModule::Terminate() { 89 | RTC_LOG(INFO) << __FUNCTION__; 90 | if (!initialized_) 91 | return 0; 92 | if (audio_device_->Terminate() == -1) { 93 | return -1; 94 | } 95 | initialized_ = false; 96 | return 0; 97 | } 98 | 99 | bool FFmpegAudioDeviceModule::Initialized() const { 100 | RTC_LOG(INFO) << __FUNCTION__ << ": " << initialized_; 101 | return initialized_; 102 | } 103 | 104 | int32_t FFmpegAudioDeviceModule::InitSpeaker() { 105 | RTC_LOG(INFO) << __FUNCTION__; 106 | CHECKinitialized_(); 107 | return audio_device_->InitSpeaker(); 108 | } 109 | 110 | int32_t FFmpegAudioDeviceModule::InitMicrophone() { 111 | RTC_LOG(INFO) << __FUNCTION__; 112 | CHECKinitialized_(); 113 | return audio_device_->InitMicrophone(); 114 | } 115 | 116 | int32_t FFmpegAudioDeviceModule::SpeakerVolumeIsAvailable(bool* available) { 117 | RTC_LOG(INFO) << __FUNCTION__; 118 | CHECKinitialized_(); 119 | bool isAvailable = false; 120 | if (audio_device_->SpeakerVolumeIsAvailable(isAvailable) == -1) { 121 | return -1; 122 | } 123 | *available = isAvailable; 124 | RTC_LOG(INFO) << "output: " << isAvailable; 125 | return 0; 126 | } 127 | 128 | int32_t FFmpegAudioDeviceModule::SetSpeakerVolume(uint32_t volume) { 129 | RTC_LOG(INFO) << __FUNCTION__ << "(" << volume << ")"; 130 | CHECKinitialized_(); 131 | return audio_device_->SetSpeakerVolume(volume); 132 | } 133 | 134 | int32_t FFmpegAudioDeviceModule::SpeakerVolume(uint32_t* volume) const { 135 | RTC_LOG(INFO) << __FUNCTION__; 136 | CHECKinitialized_(); 137 | uint32_t level = 0; 138 | if (audio_device_->SpeakerVolume(level) == -1) { 139 | return -1; 140 | } 141 | *volume = level; 142 | RTC_LOG(INFO) << "output: " << *volume; 143 | return 0; 144 | } 145 | 146 | bool FFmpegAudioDeviceModule::SpeakerIsInitialized() const { 147 | RTC_LOG(INFO) << __FUNCTION__; 148 | CHECKinitialized__BOOL(); 149 | bool isInitialized = audio_device_->SpeakerIsInitialized(); 150 | RTC_LOG(INFO) << "output: " << isInitialized; 151 | return isInitialized; 152 | } 153 | 154 | bool FFmpegAudioDeviceModule::MicrophoneIsInitialized() const { 155 | RTC_LOG(INFO) << __FUNCTION__; 156 | CHECKinitialized__BOOL(); 157 | bool isInitialized = audio_device_->MicrophoneIsInitialized(); 158 | RTC_LOG(INFO) << "output: " << isInitialized; 159 | return isInitialized; 160 | } 161 | 162 | int32_t FFmpegAudioDeviceModule::MaxSpeakerVolume(uint32_t* maxVolume) const { 163 | CHECKinitialized_(); 164 | uint32_t maxVol = 0; 165 | if (audio_device_->MaxSpeakerVolume(maxVol) == -1) { 166 | return -1; 167 | } 168 | *maxVolume = maxVol; 169 | return 0; 170 | } 171 | 172 | int32_t FFmpegAudioDeviceModule::MinSpeakerVolume(uint32_t* minVolume) const { 173 | CHECKinitialized_(); 174 | uint32_t minVol = 0; 175 | if (audio_device_->MinSpeakerVolume(minVol) == -1) { 176 | return -1; 177 | } 178 | *minVolume = minVol; 179 | return 0; 180 | } 181 | 182 | int32_t FFmpegAudioDeviceModule::SpeakerMuteIsAvailable(bool* available) { 183 | RTC_LOG(INFO) << __FUNCTION__; 184 | CHECKinitialized_(); 185 | bool isAvailable = false; 186 | if (audio_device_->SpeakerMuteIsAvailable(isAvailable) == -1) { 187 | return -1; 188 | } 189 | *available = isAvailable; 190 | RTC_LOG(INFO) << "output: " << isAvailable; 191 | return 0; 192 | } 193 | 194 | int32_t FFmpegAudioDeviceModule::SetSpeakerMute(bool enable) { 195 | RTC_LOG(INFO) << __FUNCTION__ << "(" << enable << ")"; 196 | CHECKinitialized_(); 197 | return audio_device_->SetSpeakerMute(enable); 198 | } 199 | 200 | int32_t FFmpegAudioDeviceModule::SpeakerMute(bool* enabled) const { 201 | RTC_LOG(INFO) << __FUNCTION__; 202 | CHECKinitialized_(); 203 | bool muted = false; 204 | if (audio_device_->SpeakerMute(muted) == -1) { 205 | return -1; 206 | } 207 | *enabled = muted; 208 | RTC_LOG(INFO) << "output: " << muted; 209 | return 0; 210 | } 211 | 212 | int32_t FFmpegAudioDeviceModule::MicrophoneMuteIsAvailable(bool* available) { 213 | RTC_LOG(INFO) << __FUNCTION__; 214 | CHECKinitialized_(); 215 | bool isAvailable = false; 216 | if (audio_device_->MicrophoneMuteIsAvailable(isAvailable) == -1) { 217 | return -1; 218 | } 219 | *available = isAvailable; 220 | RTC_LOG(INFO) << "output: " << isAvailable; 221 | return 0; 222 | } 223 | 224 | int32_t FFmpegAudioDeviceModule::SetMicrophoneMute(bool enable) { 225 | RTC_LOG(INFO) << __FUNCTION__ << "(" << enable << ")"; 226 | CHECKinitialized_(); 227 | return (audio_device_->SetMicrophoneMute(enable)); 228 | } 229 | 230 | int32_t FFmpegAudioDeviceModule::MicrophoneMute(bool* enabled) const { 231 | RTC_LOG(INFO) << __FUNCTION__; 232 | CHECKinitialized_(); 233 | bool muted = false; 234 | if (audio_device_->MicrophoneMute(muted) == -1) { 235 | return -1; 236 | } 237 | *enabled = muted; 238 | RTC_LOG(INFO) << "output: " << muted; 239 | return 0; 240 | } 241 | 242 | int32_t FFmpegAudioDeviceModule::MicrophoneVolumeIsAvailable(bool* available) { 243 | RTC_LOG(INFO) << __FUNCTION__; 244 | CHECKinitialized_(); 245 | bool isAvailable = false; 246 | if (audio_device_->MicrophoneVolumeIsAvailable(isAvailable) == -1) { 247 | return -1; 248 | } 249 | *available = isAvailable; 250 | RTC_LOG(INFO) << "output: " << isAvailable; 251 | return 0; 252 | } 253 | 254 | int32_t FFmpegAudioDeviceModule::SetMicrophoneVolume(uint32_t volume) { 255 | RTC_LOG(INFO) << __FUNCTION__ << "(" << volume << ")"; 256 | CHECKinitialized_(); 257 | return (audio_device_->SetMicrophoneVolume(volume)); 258 | } 259 | 260 | int32_t FFmpegAudioDeviceModule::MicrophoneVolume(uint32_t* volume) const { 261 | RTC_LOG(INFO) << __FUNCTION__; 262 | CHECKinitialized_(); 263 | uint32_t level = 0; 264 | if (audio_device_->MicrophoneVolume(level) == -1) { 265 | return -1; 266 | } 267 | *volume = level; 268 | RTC_LOG(INFO) << "output: " << *volume; 269 | return 0; 270 | } 271 | 272 | int32_t FFmpegAudioDeviceModule::StereoRecordingIsAvailable( 273 | bool* available) const { 274 | RTC_LOG(INFO) << __FUNCTION__; 275 | CHECKinitialized_(); 276 | bool isAvailable = false; 277 | if (audio_device_->StereoRecordingIsAvailable(isAvailable) == -1) { 278 | return -1; 279 | } 280 | *available = isAvailable; 281 | RTC_LOG(INFO) << "output: " << isAvailable; 282 | return 0; 283 | } 284 | 285 | int32_t FFmpegAudioDeviceModule::SetStereoRecording(bool enable) { 286 | RTC_LOG(INFO) << __FUNCTION__ << "(" << enable << ")"; 287 | CHECKinitialized_(); 288 | if (audio_device_->RecordingIsInitialized()) { 289 | RTC_LOG(WARNING) << "recording in stereo is not supported"; 290 | return -1; 291 | } 292 | if (audio_device_->SetStereoRecording(enable) == -1) { 293 | RTC_LOG(WARNING) << "failed to change stereo recording"; 294 | return -1; 295 | } 296 | int8_t nChannels(1); 297 | if (enable) { 298 | nChannels = 2; 299 | } 300 | audio_device_buffer_.SetRecordingChannels(nChannels); 301 | return 0; 302 | } 303 | 304 | int32_t FFmpegAudioDeviceModule::StereoRecording(bool* enabled) const { 305 | RTC_LOG(INFO) << __FUNCTION__; 306 | CHECKinitialized_(); 307 | bool stereo = false; 308 | if (audio_device_->StereoRecording(stereo) == -1) { 309 | return -1; 310 | } 311 | *enabled = stereo; 312 | RTC_LOG(INFO) << "output: " << stereo; 313 | return 0; 314 | } 315 | 316 | int32_t FFmpegAudioDeviceModule::StereoPlayoutIsAvailable(bool* available) const { 317 | RTC_LOG(INFO) << __FUNCTION__; 318 | CHECKinitialized_(); 319 | bool isAvailable = false; 320 | if (audio_device_->StereoPlayoutIsAvailable(isAvailable) == -1) { 321 | return -1; 322 | } 323 | *available = isAvailable; 324 | RTC_LOG(INFO) << "output: " << isAvailable; 325 | return 0; 326 | } 327 | 328 | int32_t FFmpegAudioDeviceModule::SetStereoPlayout(bool enable) { 329 | RTC_LOG(INFO) << __FUNCTION__ << "(" << enable << ")"; 330 | CHECKinitialized_(); 331 | if (audio_device_->PlayoutIsInitialized()) { 332 | RTC_LOG(LERROR) 333 | << "unable to set stereo mode while playing side is initialized"; 334 | return -1; 335 | } 336 | if (audio_device_->SetStereoPlayout(enable)) { 337 | RTC_LOG(WARNING) << "stereo playout is not supported"; 338 | return -1; 339 | } 340 | int8_t nChannels(1); 341 | if (enable) { 342 | nChannels = 2; 343 | } 344 | audio_device_buffer_.SetPlayoutChannels(nChannels); 345 | return 0; 346 | } 347 | 348 | int32_t FFmpegAudioDeviceModule::StereoPlayout(bool* enabled) const { 349 | RTC_LOG(INFO) << __FUNCTION__; 350 | CHECKinitialized_(); 351 | bool stereo = false; 352 | if (audio_device_->StereoPlayout(stereo) == -1) { 353 | return -1; 354 | } 355 | *enabled = stereo; 356 | RTC_LOG(INFO) << "output: " << stereo; 357 | return 0; 358 | } 359 | 360 | int32_t FFmpegAudioDeviceModule::PlayoutIsAvailable(bool* available) { 361 | RTC_LOG(INFO) << __FUNCTION__; 362 | CHECKinitialized_(); 363 | bool isAvailable = false; 364 | if (audio_device_->PlayoutIsAvailable(isAvailable) == -1) { 365 | return -1; 366 | } 367 | *available = isAvailable; 368 | RTC_LOG(INFO) << "output: " << isAvailable; 369 | return 0; 370 | } 371 | 372 | int32_t FFmpegAudioDeviceModule::RecordingIsAvailable(bool* available) { 373 | RTC_LOG(INFO) << __FUNCTION__; 374 | CHECKinitialized_(); 375 | bool isAvailable = false; 376 | if (audio_device_->RecordingIsAvailable(isAvailable) == -1) { 377 | return -1; 378 | } 379 | *available = isAvailable; 380 | RTC_LOG(INFO) << "output: " << isAvailable; 381 | return 0; 382 | } 383 | 384 | int32_t FFmpegAudioDeviceModule::MaxMicrophoneVolume(uint32_t* maxVolume) const { 385 | CHECKinitialized_(); 386 | uint32_t maxVol(0); 387 | if (audio_device_->MaxMicrophoneVolume(maxVol) == -1) { 388 | return -1; 389 | } 390 | *maxVolume = maxVol; 391 | return 0; 392 | } 393 | 394 | int32_t FFmpegAudioDeviceModule::MinMicrophoneVolume(uint32_t* minVolume) const { 395 | CHECKinitialized_(); 396 | uint32_t minVol(0); 397 | if (audio_device_->MinMicrophoneVolume(minVol) == -1) { 398 | return -1; 399 | } 400 | *minVolume = minVol; 401 | return 0; 402 | } 403 | 404 | int16_t FFmpegAudioDeviceModule::PlayoutDevices() { 405 | RTC_LOG(INFO) << __FUNCTION__; 406 | CHECKinitialized_(); 407 | uint16_t nPlayoutDevices = audio_device_->PlayoutDevices(); 408 | RTC_LOG(INFO) << "output: " << nPlayoutDevices; 409 | return (int16_t)(nPlayoutDevices); 410 | } 411 | 412 | int32_t FFmpegAudioDeviceModule::SetPlayoutDevice(uint16_t index) { 413 | RTC_LOG(INFO) << __FUNCTION__ << "(" << index << ")"; 414 | CHECKinitialized_(); 415 | return audio_device_->SetPlayoutDevice(index); 416 | } 417 | 418 | int32_t FFmpegAudioDeviceModule::SetPlayoutDevice(WindowsDeviceType device) { 419 | RTC_LOG(INFO) << __FUNCTION__; 420 | CHECKinitialized_(); 421 | return audio_device_->SetPlayoutDevice(device); 422 | } 423 | 424 | int32_t FFmpegAudioDeviceModule::PlayoutDeviceName( 425 | uint16_t index, 426 | char name[webrtc::kAdmMaxDeviceNameSize], 427 | char guid[webrtc::kAdmMaxGuidSize]) { 428 | RTC_LOG(INFO) << __FUNCTION__ << "(" << index << ", ...)"; 429 | CHECKinitialized_(); 430 | if (name == NULL) { 431 | return -1; 432 | } 433 | if (audio_device_->PlayoutDeviceName(index, name, guid) == -1) { 434 | return -1; 435 | } 436 | if (name != NULL) { 437 | RTC_LOG(INFO) << "output: name = " << name; 438 | } 439 | if (guid != NULL) { 440 | RTC_LOG(INFO) << "output: guid = " << guid; 441 | } 442 | return 0; 443 | } 444 | 445 | int32_t FFmpegAudioDeviceModule::RecordingDeviceName( 446 | uint16_t index, 447 | char name[webrtc::kAdmMaxDeviceNameSize], 448 | char guid[webrtc::kAdmMaxGuidSize]) { 449 | RTC_LOG(INFO) << __FUNCTION__ << "(" << index << ", ...)"; 450 | CHECKinitialized_(); 451 | if (name == NULL) { 452 | return -1; 453 | } 454 | if (audio_device_->RecordingDeviceName(index, name, guid) == -1) { 455 | return -1; 456 | } 457 | if (name != NULL) { 458 | RTC_LOG(INFO) << "output: name = " << name; 459 | } 460 | if (guid != NULL) { 461 | RTC_LOG(INFO) << "output: guid = " << guid; 462 | } 463 | return 0; 464 | } 465 | 466 | int16_t FFmpegAudioDeviceModule::RecordingDevices() { 467 | RTC_LOG(INFO) << __FUNCTION__; 468 | CHECKinitialized_(); 469 | uint16_t nRecordingDevices = audio_device_->RecordingDevices(); 470 | RTC_LOG(INFO) << "output: " << nRecordingDevices; 471 | return (int16_t)nRecordingDevices; 472 | } 473 | 474 | int32_t FFmpegAudioDeviceModule::SetRecordingDevice(uint16_t index) { 475 | RTC_LOG(INFO) << __FUNCTION__ << "(" << index << ")"; 476 | CHECKinitialized_(); 477 | return audio_device_->SetRecordingDevice(index); 478 | } 479 | 480 | int32_t FFmpegAudioDeviceModule::SetRecordingDevice(WindowsDeviceType device) { 481 | RTC_LOG(INFO) << __FUNCTION__; 482 | CHECKinitialized_(); 483 | return audio_device_->SetRecordingDevice(device); 484 | } 485 | 486 | int32_t FFmpegAudioDeviceModule::InitPlayout() { 487 | RTC_LOG(INFO) << __FUNCTION__; 488 | CHECKinitialized_(); 489 | if (PlayoutIsInitialized()) { 490 | return 0; 491 | } 492 | int32_t result = audio_device_->InitPlayout(); 493 | RTC_LOG(INFO) << "output: " << result; 494 | RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.InitPlayoutSuccess", 495 | static_cast(result == 0)); 496 | return result; 497 | } 498 | 499 | int32_t FFmpegAudioDeviceModule::InitRecording() { 500 | RTC_LOG(INFO) << __FUNCTION__; 501 | CHECKinitialized_(); 502 | if (RecordingIsInitialized()) { 503 | return 0; 504 | } 505 | int32_t result = audio_device_->InitRecording(); 506 | RTC_LOG(INFO) << "output: " << result; 507 | RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.InitRecordingSuccess", 508 | static_cast(result == 0)); 509 | return result; 510 | } 511 | 512 | bool FFmpegAudioDeviceModule::PlayoutIsInitialized() const { 513 | RTC_LOG(INFO) << __FUNCTION__; 514 | CHECKinitialized__BOOL(); 515 | return audio_device_->PlayoutIsInitialized(); 516 | } 517 | 518 | bool FFmpegAudioDeviceModule::RecordingIsInitialized() const { 519 | RTC_LOG(INFO) << __FUNCTION__; 520 | CHECKinitialized__BOOL(); 521 | return audio_device_->RecordingIsInitialized(); 522 | } 523 | 524 | int32_t FFmpegAudioDeviceModule::StartPlayout() { 525 | RTC_LOG(INFO) << __FUNCTION__; 526 | CHECKinitialized_(); 527 | if (Playing()) { 528 | return 0; 529 | } 530 | audio_device_buffer_.StartPlayout(); 531 | int32_t result = audio_device_->StartPlayout(); 532 | RTC_LOG(INFO) << "output: " << result; 533 | RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.StartPlayoutSuccess", 534 | static_cast(result == 0)); 535 | return result; 536 | } 537 | 538 | int32_t FFmpegAudioDeviceModule::StopPlayout() { 539 | RTC_LOG(INFO) << __FUNCTION__; 540 | CHECKinitialized_(); 541 | int32_t result = audio_device_->StopPlayout(); 542 | audio_device_buffer_.StopPlayout(); 543 | RTC_LOG(INFO) << "output: " << result; 544 | RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.StopPlayoutSuccess", 545 | static_cast(result == 0)); 546 | return result; 547 | } 548 | 549 | bool FFmpegAudioDeviceModule::Playing() const { 550 | RTC_LOG(INFO) << __FUNCTION__; 551 | CHECKinitialized__BOOL(); 552 | return audio_device_->Playing(); 553 | } 554 | 555 | int32_t FFmpegAudioDeviceModule::StartRecording() { 556 | RTC_LOG(INFO) << __FUNCTION__; 557 | CHECKinitialized_(); 558 | if (Recording()) { 559 | return 0; 560 | } 561 | audio_device_buffer_.StartRecording(); 562 | int32_t result = audio_device_->StartRecording(); 563 | RTC_LOG(INFO) << "output: " << result; 564 | RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.StartRecordingSuccess", 565 | static_cast(result == 0)); 566 | return result; 567 | } 568 | 569 | int32_t FFmpegAudioDeviceModule::StopRecording() { 570 | RTC_LOG(INFO) << __FUNCTION__; 571 | CHECKinitialized_(); 572 | int32_t result = audio_device_->StopRecording(); 573 | audio_device_buffer_.StopRecording(); 574 | RTC_LOG(INFO) << "output: " << result; 575 | RTC_HISTOGRAM_BOOLEAN("WebRTC.Audio.StopRecordingSuccess", 576 | static_cast(result == 0)); 577 | return result; 578 | } 579 | 580 | bool FFmpegAudioDeviceModule::Recording() const { 581 | RTC_LOG(INFO) << __FUNCTION__; 582 | CHECKinitialized__BOOL(); 583 | return audio_device_->Recording(); 584 | } 585 | 586 | int32_t FFmpegAudioDeviceModule::RegisterAudioCallback( 587 | webrtc::AudioTransport* audioCallback) { 588 | RTC_LOG(INFO) << __FUNCTION__; 589 | return audio_device_buffer_.RegisterAudioCallback(audioCallback); 590 | } 591 | 592 | int32_t FFmpegAudioDeviceModule::PlayoutDelay(uint16_t* delayMS) const { 593 | CHECKinitialized_(); 594 | uint16_t delay = 0; 595 | if (audio_device_->PlayoutDelay(delay) == -1) { 596 | RTC_LOG(LERROR) << "failed to retrieve the playout delay"; 597 | return -1; 598 | } 599 | *delayMS = delay; 600 | return 0; 601 | } 602 | 603 | bool FFmpegAudioDeviceModule::BuiltInAECIsAvailable() const { 604 | RTC_LOG(INFO) << __FUNCTION__; 605 | CHECKinitialized__BOOL(); 606 | bool isAvailable = audio_device_->BuiltInAECIsAvailable(); 607 | RTC_LOG(INFO) << "output: " << isAvailable; 608 | return isAvailable; 609 | } 610 | 611 | int32_t FFmpegAudioDeviceModule::EnableBuiltInAEC(bool enable) { 612 | RTC_LOG(INFO) << __FUNCTION__ << "(" << enable << ")"; 613 | CHECKinitialized_(); 614 | int32_t ok = audio_device_->EnableBuiltInAEC(enable); 615 | RTC_LOG(INFO) << "output: " << ok; 616 | return ok; 617 | } 618 | 619 | bool FFmpegAudioDeviceModule::BuiltInAGCIsAvailable() const { 620 | RTC_LOG(INFO) << __FUNCTION__; 621 | CHECKinitialized__BOOL(); 622 | bool isAvailable = audio_device_->BuiltInAGCIsAvailable(); 623 | RTC_LOG(INFO) << "output: " << isAvailable; 624 | return isAvailable; 625 | } 626 | 627 | int32_t FFmpegAudioDeviceModule::EnableBuiltInAGC(bool enable) { 628 | RTC_LOG(INFO) << __FUNCTION__ << "(" << enable << ")"; 629 | CHECKinitialized_(); 630 | int32_t ok = audio_device_->EnableBuiltInAGC(enable); 631 | RTC_LOG(INFO) << "output: " << ok; 632 | return ok; 633 | } 634 | 635 | bool FFmpegAudioDeviceModule::BuiltInNSIsAvailable() const { 636 | RTC_LOG(INFO) << __FUNCTION__; 637 | CHECKinitialized__BOOL(); 638 | bool isAvailable = audio_device_->BuiltInNSIsAvailable(); 639 | RTC_LOG(INFO) << "output: " << isAvailable; 640 | return isAvailable; 641 | } 642 | 643 | int32_t FFmpegAudioDeviceModule::EnableBuiltInNS(bool enable) { 644 | RTC_LOG(INFO) << __FUNCTION__ << "(" << enable << ")"; 645 | CHECKinitialized_(); 646 | int32_t ok = audio_device_->EnableBuiltInNS(enable); 647 | RTC_LOG(INFO) << "output: " << ok; 648 | return ok; 649 | } 650 | 651 | #if defined(WEBRTC_IOS) 652 | int FFmpegAudioDeviceModule::GetPlayoutAudioParameters( 653 | AudioParameters* params) const { 654 | RTC_LOG(INFO) << __FUNCTION__; 655 | int r = audio_device_->GetPlayoutAudioParameters(params); 656 | RTC_LOG(INFO) << "output: " << r; 657 | return r; 658 | } 659 | 660 | int FFmpegAudioDeviceModule::GetRecordAudioParameters( 661 | AudioParameters* params) const { 662 | RTC_LOG(INFO) << __FUNCTION__; 663 | int r = audio_device_->GetRecordAudioParameters(params); 664 | RTC_LOG(INFO) << "output: " << r; 665 | return r; 666 | } 667 | #endif // WEBRTC_IOS 668 | --------------------------------------------------------------------------------