├── .gitignore ├── Cargo.toml ├── LICENSE ├── README.md ├── cap ├── Cargo.toml └── src │ ├── audio.rs │ ├── capturer.rs │ ├── capturer │ └── config.rs │ ├── decoder.rs │ ├── error.rs │ ├── h264.rs │ ├── lib.rs │ ├── sharable.rs │ ├── utils.rs │ └── video.rs ├── platform_utils ├── Cargo.toml ├── build.rs └── src │ ├── lib.rs │ └── macos │ ├── idle_time.rs │ ├── mod.rs │ └── version.rs ├── renderer └── screen │ ├── cmds.rs │ ├── config.rs │ ├── mod.rs │ ├── renderer.rs │ ├── utils.rs │ └── window.rs ├── rtc.rs ├── rtc ├── Cargo.toml ├── audio.rs ├── audio_decoder.rs ├── audio_input.rs ├── capturer.rs ├── commands.rs ├── echo_cancel.rs ├── engine.rs ├── error.rs ├── ice │ ├── conn.rs │ ├── gatherer.rs │ ├── mod.rs │ └── utils.rs ├── jitter.rs ├── net │ ├── bytes_pool.rs │ └── mod.rs ├── peer │ ├── channel.rs │ └── mod.rs ├── peer2.rs ├── peer2 │ └── utils.rs ├── peer_queue.rs ├── peer_state.rs ├── player.rs ├── processor.rs ├── remote_control.rs ├── resampler.rs ├── sdp.rs ├── signal.rs └── utils.rs ├── tauri_commands ├── devices_list.rs ├── sound_status.rs └── sound_tools.rs └── video ├── Cargo.toml ├── build.rs ├── ffi.h └── src ├── bindings ├── vpx_ffi.h └── yuv_ffi.h ├── lib.rs ├── vpx.rs └── vpxcodec.rs /.gitignore: -------------------------------------------------------------------------------- 1 | # Generated by Cargo 2 | # will have compiled files and executables 3 | debug/ 4 | target/ 5 | 6 | # Remove Cargo.lock from gitignore if creating an executable, leave it for libraries 7 | # More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html 8 | Cargo.lock 9 | 10 | # These are backup files generated by rustfmt 11 | **/*.rs.bk 12 | 13 | # MSVC Windows builds of rustc generate these, which store debugging information 14 | *.pdb 15 | 16 | # RustRover 17 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 18 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 19 | # and can be added to the global gitignore or merged into this file. For a more nuclear 20 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 21 | #.idea/ -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [workspace] 2 | members = [ 3 | "rtc", 4 | "video", 5 | "compressor", 6 | "cap", 7 | "platform_utils" 8 | ] 9 | resolver = "2" 10 | rust-version = "1.75" 11 | edition = "2021" 12 | 13 | [workspace.package] 14 | edition = "2021" 15 | authors = ["Noor Chat"] 16 | rust-version = "1.75" 17 | 18 | [workspace.dependencies] 19 | serde = { version = "1", features = ["derive"] } 20 | log = "0.4" 21 | serde_json = "1" 22 | thiserror = "1" 23 | url = "2" 24 | 25 | [profile.dev] 26 | # Make compilation faster on macOS 27 | split-debuginfo = "unpacked" 28 | opt-level = 0 29 | debug = 0 30 | strip = "none" 31 | lto = false 32 | codegen-units = 256 33 | incremental = true 34 | 35 | # Set the settings for build scripts and proc-macros. 36 | [profile.dev.build-override] 37 | opt-level = 3 38 | 39 | # Set the default for dependencies, except workspace members. 40 | [profile.dev.package."*"] 41 | opt-level = 3 42 | incremental = false 43 | 44 | [profile.release] 45 | panic = "abort" # Strip expensive panic clean-up logic 46 | codegen-units = 1 # Compile crates one after another so the compiler can optimize better 47 | lto = true 48 | opt-level = "s" 49 | strip = true # Remove debug symbols 50 | 51 | # Optimize for speed 52 | [profile.release.package.webrtc-audio-processing] 53 | opt-level = 3 54 | 55 | [profile.release.package.str0m] 56 | opt-level = 3 57 | 58 | [profile.release.package.cap] 59 | opt-level = 3 60 | 61 | [profile.release.package.cpal] 62 | opt-level = 3 63 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2025 Inline 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # An implementation of WebRTC client stack built in Rust 2 | 3 | > **Warning**: This will not run or compile. This is stripped away from the noor.to application, and is only for educational purposes. 4 | 5 | ## What is this? 6 | 7 | When we started building [Noor](https://noor.to) in [Tauri](https://tauri.app/), we didn't want to use the WebKit WebRTC implementation. We wanted more control over the voice and screen sharing to achieve lower latency, higher quality, better performance, and a less-resource-intensive call experience. So we built this from scratch and learnt Rust and a whole lot about WebRTC and media processing while doing it. 8 | 9 | It would've been super helpful if I had an easy-to-read open source implementation for a client stack to learn from. Since most of the WebRTC projects are for server side implementations, I'm sharing this to help other developers building cool client WebRTC applications without relying on libWebRTC if they don't need the whole thing. 10 | 11 | ## What exactly is implemented here? 12 | 13 | This implements voice chat (cross-platform, built primarly for macOS but tested on Linux) and screen sharing (macOS-only). Specifically, it supports: 14 | 15 | - WebRTC peer to peer connection 16 | - WebRTC signaling and reconnection 17 | - TURN / STUN / ICE 18 | - Audio capture 19 | - Audio processing 20 | - Audio echo and noise cancellation 21 | - Audio encoding and decoding 22 | - Audio resampling 23 | - Audio jitter buffer 24 | - Audio playback 25 | - Screen capture 26 | - Hardware accelerated encoding and decoding 27 | - GPU rendering on a CALayer (macOS) 28 | 29 | I didn't get to finish the screen sharing part the way I wanted to, but it can be used as a starting point to get a basic idea of how an implementation would look like. 30 | 31 | ## Code guide 32 | 33 | The file names are self-explanatory. However here's a few starting points: 34 | 35 | - [engine.rs](./rtc/engine.rs) - The main entry point for the RTC engine that orchestrates the call across multiple peers. 36 | - [jitter.rs](./rtc/jitter.rs) - The audio jitter buffer. 37 | - [processor.rs](./rtc/processor.rs) - The audio processing pipeline. 38 | - [audio_decoder.rs](./rtc/audio_decoder.rs) - The audio decoding. 39 | - [audio_input.rs](./rtc/audio_input.rs) - The audio capture and processing. 40 | - [resampler.rs](./rtc/resampler.rs) - The audio resampling. 41 | - [player.rs](./rtc/player.rs) - The audio playback. 42 | - [capturer.rs](./rtc/capturer.rs) - The screen capture and processing (was not finished) 43 | - [gatherer.rs](./rtc/ice/gatherer.rs) - The ICE gatherer. 44 | 45 | ## Thanks to... 46 | 47 | We used amazing open-source libraries to build this: 48 | 49 | - [str0m](https://github.com/algesten/str0m) 50 | - [cidre](https://github.com/yury/cidre/) 51 | - [webrtc-audio-processing](https://crates.io/crates/webrtc-audio-processing) 52 | - [webrtc-rs](https://github.com/webrtc-rs/webrtc) 53 | - [tokio](https://crates.io/crates/tokio) 54 | - [cpal](https://crates.io/crates/cpal) 55 | - [ringbuf](https://crates.io/crates/ringbuf) 56 | - [stun-client](https://github.com/yoshd/stun-client) 57 | - [audio_thread_priority](https://crates.io/crates/audio_thread_priority) 58 | - [opus](https://crates.io/crates/opus) 59 | - [enigo](https://crates.io/crates/enigo) 60 | - [icrate](https://crates.io/crates/icrate) 61 | - [objc2](https://crates.io/crates/objc2) 62 | - [core-graphics](https://crates.io/crates/core-graphics) 63 | - [objc-foundation](https://crates.io/crates/objc-foundation) 64 | - [flume](https://crates.io/crates/flume) 65 | - [bytes](https://crates.io/crates/bytes) 66 | 67 | And many other crates for building the application itself, mainly, [Tauri](https://tauri.app/). 68 | 69 | While developing this, we contributed back to some of the crates mentioned above. One of which that I really enjoyed was adding support for keyboard noise cancellation through the libWebRTC's AEC to the [webrtc-audio-processing](https://crates.io/crates/webrtc-audio-processing) crate after a lot of reading through the WebRTC codebase for the first time I was looking at it. 70 | 71 | # LICENSE 72 | 73 | This code is licensed under the MIT license. See the LICENSE file for more details. 74 | -------------------------------------------------------------------------------- /cap/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "cap" 3 | version = "0.0.1" 4 | edition = "2021" 5 | 6 | [dependencies] 7 | cidre = { git = "https://github.com/yury/cidre", rev = "36948a64bf3a4c2a4cfdc6387f419fe98b7c01d7", default-features = false, features = [ 8 | "private", 9 | 10 | "blocks", 11 | "async", 12 | 13 | "app", 14 | "am", 15 | "at", 16 | # "ca", 17 | "ci", 18 | "cm", 19 | "cmio", 20 | "ct", 21 | # "av", 22 | "cf", 23 | "cg", 24 | "iio", 25 | "ns", 26 | "vt", 27 | "sc", 28 | "mtl", 29 | "mlc", 30 | "cv", 31 | "objc", 32 | "av", 33 | "ca", 34 | # "mps", 35 | # "mpsg", 36 | "dispatch", 37 | "simd", 38 | "cat", 39 | "vn", 40 | "sn", 41 | "ui", 42 | "io", 43 | "nw", 44 | "core_motion", 45 | # "wk", 46 | ] } 47 | flume = "0.11" 48 | bytes = "1.1" 49 | serde = { version = "1.0", features = ["derive"] } 50 | tokio = { version = "1", features = ["full", "rt"] } 51 | futures = { version = "0.3" } 52 | core-graphics = { version = "0.23" } 53 | sysinfo = "0.30" 54 | anyhow = "1" 55 | thiserror = "1" 56 | -------------------------------------------------------------------------------- /cap/src/audio.rs: -------------------------------------------------------------------------------- 1 | use std::collections::VecDeque; 2 | 3 | use cidre::cat::{AudioFormat, AudioFormatFlags}; 4 | use cidre::{arc, at, cm, os}; 5 | 6 | pub fn default_converter() -> at::AudioConverterRef { 7 | let output_asbd = at::audio::StreamBasicDesc { 8 | //sample_rate: 32_000.0, 9 | // sample_rate: 44_100.0, 10 | sample_rate: 48_000.0, 11 | format: AudioFormat::MPEG4_AAC, 12 | format_flags: Default::default(), 13 | // format_flags: AudioFormatFlags(MPEG4ObjectID::AAC_LC.0 as _), 14 | bytes_per_packet: 0, 15 | frames_per_packet: 1024, 16 | bytes_per_frame: 0, 17 | channels_per_frame: 2, 18 | bits_per_channel: 0, 19 | reserved: 0, 20 | }; 21 | let input_asbd = at::audio::StreamBasicDesc { 22 | //sample_rate: 32_000.0, 23 | // sample_rate: 44_100.0, 24 | sample_rate: 48_000.0, 25 | format: AudioFormat::LINEAR_PCM, 26 | //format_flags: AudioFormatFlags(41), 27 | format_flags: AudioFormatFlags::IS_FLOAT 28 | | AudioFormatFlags::IS_PACKED 29 | | AudioFormatFlags::IS_NON_INTERLEAVED, 30 | bytes_per_packet: 4, 31 | frames_per_packet: 1, 32 | bytes_per_frame: 4, 33 | channels_per_frame: 2, 34 | bits_per_channel: 32, 35 | reserved: 0, 36 | }; 37 | at::AudioConverterRef::with_formats(&input_asbd, &output_asbd).unwrap() 38 | } 39 | 40 | pub fn configured_converter(input_asbd: &at::audio::StreamBasicDesc) -> at::AudioConverterRef { 41 | // https://www.youtube.com/watch?v=yArrLvMYng8 42 | let output_asbd = at::audio::StreamBasicDesc { 43 | //sample_rate: 32_000.0, 44 | // sample_rate: 44_100.0, 45 | sample_rate: 48_000.0, 46 | format: AudioFormat::MPEG4_AAC_HE, 47 | //format_flags: AudioFormatFlags(MPEG4ObjectID::AAC_LC.0 as _), 48 | format_flags: AudioFormatFlags(0), 49 | bytes_per_packet: 0, 50 | frames_per_packet: 1024, 51 | bytes_per_frame: 0, 52 | channels_per_frame: 2, 53 | bits_per_channel: 0, 54 | reserved: 0, 55 | }; 56 | 57 | at::AudioConverterRef::with_formats(input_asbd, &output_asbd).unwrap() 58 | } 59 | 60 | pub struct AudioQueue { 61 | pub queue: VecDeque>, 62 | pub last_buffer_offset: i32, 63 | pub input_asbd: at::audio::StreamBasicDesc, 64 | } 65 | 66 | impl AudioQueue { 67 | #[inline] 68 | pub fn enque(&mut self, sbuf: &cm::SampleBuf) { 69 | self.queue.push_back(sbuf.retained()) 70 | } 71 | 72 | #[inline] 73 | pub fn is_ready(&self) -> bool { 74 | self.queue.len() > 2 75 | } 76 | 77 | pub fn fill_audio_buffer(&mut self, list: &mut at::audio::BufList<2>) -> Result<(), os::Status> { 78 | let mut left = 1024i32; 79 | let mut offset: i32 = self.last_buffer_offset; 80 | let mut out_offset = 0; 81 | let mut cursor = list.cursor(); 82 | while let Some(b) = self.queue.pop_front() { 83 | let samples = b.num_samples() as i32; 84 | let count = i32::min(samples - offset, left); 85 | b.copy_pcm_data_into_audio_buf_list( 86 | offset, 87 | count, 88 | cursor.offset(out_offset, count as _, &self.input_asbd), 89 | )?; 90 | left -= count; 91 | offset += count; 92 | out_offset += count as usize; 93 | if offset < samples { 94 | self.last_buffer_offset = offset; 95 | self.queue.push_front(b); 96 | break; 97 | } else { 98 | offset = 0; 99 | } 100 | if left == 0 { 101 | break; 102 | } 103 | } 104 | Ok(()) 105 | } 106 | } 107 | 108 | pub extern "C" fn convert_audio( 109 | _converter: &at::AudioConverter, 110 | _io_number_data_packets: &mut u32, 111 | io_data: &mut at::audio::BufList, 112 | _out_data_packet_descriptions: *mut *mut at::audio::StreamPacketDesc, 113 | in_user_data: *mut AudioQueue, 114 | ) -> os::Status { 115 | let q: &mut AudioQueue = unsafe { &mut *in_user_data }; 116 | 117 | match q.fill_audio_buffer(unsafe { std::mem::transmute(io_data) }) { 118 | Ok(()) => os::Status(0), 119 | Err(status) => status, 120 | } 121 | 122 | //let frames = i32::min(*io_number_data_packets as i32, buf.num_samples() as _); 123 | //buf.copy_pcm_data_into_audio_buffer_list(0, frames, io_data) 124 | } 125 | -------------------------------------------------------------------------------- /cap/src/capturer.rs: -------------------------------------------------------------------------------- 1 | pub mod config; 2 | 3 | use crate::error::Error; 4 | use crate::utils; 5 | 6 | use crate::audio::*; 7 | use crate::video; 8 | use bytes::Bytes; 9 | 10 | use cidre::cf; 11 | 12 | use cidre::dispatch::Queue; 13 | use cidre::sc::Stream; 14 | use cidre::{ 15 | arc, at, 16 | cat::{AudioFormat, AudioFormatFlags}, 17 | cm, define_obj_type, dispatch, ns, objc, 18 | sc::{self, stream::Output, stream::OutputImpl}, 19 | vt::{ 20 | self, 21 | compression_properties::{keys, profile_level}, 22 | }, 23 | }; 24 | use tokio::runtime::Handle; 25 | 26 | use self::config::CapturerConfig; 27 | 28 | pub mod specs { 29 | use cidre::cf; 30 | 31 | #[link(name = "VideoToolbox", kind = "framework")] 32 | extern "C" { 33 | static kVTVideoEncoderSpecification_EnableLowLatencyRateControl: &'static cf::String; 34 | } 35 | 36 | /// The number of pending frames in the compression session. 37 | /// 38 | /// This number may decrease asynchronously. 39 | #[doc(alias = "kVTVideoEncoderSpecification_EnableLowLatencyRateControl")] 40 | #[inline] 41 | pub fn enable_low_latency_rate_control() -> &'static cf::String { 42 | unsafe { kVTVideoEncoderSpecification_EnableLowLatencyRateControl } 43 | } 44 | } 45 | 46 | #[repr(C)] 47 | struct FrameCounterInner { 48 | video_counter: usize, 49 | audio_counter: usize, 50 | audio_queue: AudioQueue, 51 | session: arc::R, 52 | audio_converter: at::AudioConverterRef, 53 | 54 | keyframe_requested: bool, 55 | } 56 | 57 | impl FrameCounterInner { 58 | pub fn _video_counter(&self) -> usize { 59 | self.video_counter 60 | } 61 | 62 | pub fn invalidate(&mut self) { 63 | self.session.invalidate(); 64 | } 65 | 66 | fn handle_audio(&mut self, sample_buf: &mut cm::SampleBuf) { 67 | // if self.audio_counter == 0 { 68 | // let format_desc = sample_buf.format_desc().unwrap(); 69 | // let sbd = format_desc.stream_basic_desc().unwrap(); 70 | // println!("{:?}", sbd); 71 | // self.audio_converter = configured_converter(sbd); 72 | // } 73 | 74 | // self.audio_queue.enque(sample_buf); 75 | 76 | // if self.audio_queue.is_ready() { 77 | // let mut data = [0u8; 2000]; 78 | // let buffer = at::AudioBuf { 79 | // number_channels: 1, 80 | // data_bytes_size: data.len() as _, 81 | // data: data.as_mut_ptr(), 82 | // }; 83 | // let buffers = [buffer]; 84 | // let mut buf = at::audio::BufList { 85 | // number_buffers: buffers.len() as _, 86 | // buffers, 87 | // }; 88 | 89 | // let mut size = 1u32; 90 | 91 | // self 92 | // .audio_converter 93 | // .fill_complex_buf(convert_audio, &mut self.audio_queue, &mut size, &mut buf) 94 | // .unwrap(); 95 | 96 | // // println!("size {}", buf.buffers[0].data_bytes_size,); 97 | // } 98 | 99 | // self.audio_counter += 1; 100 | } 101 | 102 | fn handle_video(&mut self, sample_buf: &mut cm::SampleBuf) { 103 | let Some(img) = sample_buf.image_buf() else { 104 | return; 105 | }; 106 | self.video_counter += 1; 107 | let pts = sample_buf.pts(); 108 | let dur = sample_buf.duration(); 109 | 110 | let mut flags = None; 111 | let mut has_props = false; 112 | // let mut frame_properties = cf::DictionaryMut::with_capacity(1); 113 | let d = cf::DictionaryOf::with_keys_values( 114 | &[vt::compression_properties::frame_keys::force_key_frame()], 115 | &[cf::Boolean::value_true().as_type_ref()], 116 | ); 117 | 118 | let frame_properties = if self.keyframe_requested { 119 | has_props = true; 120 | 121 | // add prop 122 | // frame_properties( 123 | // vt::compression_properties::frame_keys::force_key_frame(), 124 | // cf::Boolean::value_true().as_type_ref(), 125 | // ); 126 | 127 | // reset for next frame 128 | self.keyframe_requested = false; 129 | 130 | Some(d.as_ref()) 131 | } else { 132 | None 133 | }; 134 | 135 | let res = self.session.encode_frame( 136 | img, 137 | pts, 138 | dur, 139 | if has_props { frame_properties } else { None }, 140 | std::ptr::null_mut(), 141 | &mut flags, 142 | ); 143 | 144 | if res.is_err() { 145 | println!("err {:?}", res); 146 | } 147 | } 148 | } 149 | 150 | define_obj_type!(FrameCounter + OutputImpl, FrameCounterInner, FRAME_COUNTER); 151 | 152 | impl Output for FrameCounter {} 153 | 154 | #[objc::add_methods] 155 | impl OutputImpl for FrameCounter { 156 | extern "C" fn impl_stream_did_output_sample_buf( 157 | &mut self, 158 | _cmd: Option<&cidre::objc::Sel>, 159 | _stream: &sc::Stream, 160 | sample_buffer: &mut cm::SampleBuf, 161 | kind: sc::OutputType, 162 | ) { 163 | if kind == sc::OutputType::Screen { 164 | self.inner_mut().handle_video(sample_buffer) 165 | } else if kind == sc::OutputType::Audio { 166 | self.inner_mut().handle_audio(sample_buffer); 167 | } 168 | } 169 | } 170 | 171 | /// CAPTURE 172 | pub struct ScreenCapturer { 173 | stream: Option>, 174 | queue: Option>, 175 | delegate: Option>, 176 | // session: Option>, 177 | 178 | // Something to send frames to the app 179 | sender: flume::Sender, 180 | 181 | // Configure 182 | config: CapturerConfig, 183 | display_id: Option, 184 | 185 | session_props: arc::R, 186 | 187 | // In bytes 188 | current_bitrate: i32, 189 | } 190 | pub struct CapturerOutput { 191 | pub data: Bytes, 192 | pub seconds: f64, 193 | } 194 | 195 | impl ScreenCapturer { 196 | pub fn new(sender: flume::Sender) -> Self { 197 | ScreenCapturer { 198 | stream: None, 199 | queue: None, 200 | delegate: None, 201 | // session: None, 202 | sender, 203 | config: CapturerConfig::default(), 204 | display_id: None, 205 | 206 | session_props: cf::DictionaryMut::with_capacity(10), 207 | current_bitrate: 0, 208 | } 209 | } 210 | 211 | pub fn request_keyframe(&mut self) { 212 | let Some(delegate) = self.delegate.as_mut() else { 213 | return; 214 | }; 215 | let inner = delegate.inner_mut(); 216 | inner.keyframe_requested = true; 217 | // session.encode_frame(image_buffer, pts, duration, frame_properties, source_frame_ref_con, info_flags_out) 218 | } 219 | 220 | pub fn config(&mut self) -> &mut CapturerConfig { 221 | &mut self.config 222 | } 223 | 224 | pub fn set_display(&mut self, display_id: u32) { 225 | self.display_id = Some(display_id); 226 | } 227 | 228 | pub async fn start(&mut self) -> anyhow::Result { 229 | // Is supported? 230 | if !utils::is_supported() { 231 | return Err(Error::NotSupported.into()); 232 | } 233 | 234 | if !utils::has_permission() { 235 | return Err(Error::PermissionDenied.into()); 236 | } 237 | 238 | let queue = dispatch::Queue::serial_with_ar_pool(); 239 | 240 | // display 241 | let content = sc::ShareableContent::current().await.expect("content"); 242 | let display = if let Some(id) = self.display_id { 243 | content.displays().iter().find(|d| d.display_id() == id) 244 | } else { 245 | content.displays().first() 246 | } 247 | .expect("had no display"); 248 | 249 | let fps = self.config.effective_fps(); 250 | let scale_factor = self.config.resolution_as_scale_factor(); 251 | let width = (display.width() as f32 * scale_factor).floor() as u32; 252 | let height = (display.height() as f32 * scale_factor).floor() as u32; 253 | 254 | // filter 255 | let windows = ns::Array::new(); 256 | let filter = sc::ContentFilter::with_display_excluding_windows(display, &windows); 257 | println!("capture frame size {width}x{height}"); 258 | 259 | // screen 260 | let mut cfg = sc::StreamCfg::new(); 261 | cfg.set_minimum_frame_interval(cm::Time::new(1, fps)); 262 | cfg.set_width(width as usize); // * 2 263 | cfg.set_height(height as usize); // * 2 264 | cfg.set_shows_cursor(true); 265 | 266 | // audio 267 | // cfg.set_captures_audio(true); 268 | // cfg.set_excludes_current_process_audio(true); 269 | 270 | // stream 271 | let stream = sc::Stream::new(&filter, &cfg); 272 | 273 | // video compression 274 | let input = Box::new(video::RecordContext::new(self.sender.clone())); 275 | 276 | let memory_pool = cm::MemPool::new(); 277 | let memory_pool_allocator = memory_pool.pool_allocator(); 278 | 279 | // Encoder specifications 280 | let mut specs = cf::DictionaryMut::with_capacity(10); 281 | 282 | // Enable low latency mode 283 | // ref: https://developer.apple.com/videos/play/wwdc2021/10158/# 284 | specs.insert( 285 | specs::enable_low_latency_rate_control(), 286 | cf::Boolean::value_true(), 287 | ); 288 | 289 | let mut session = vt::CompressionSession::new( 290 | width, 291 | height, 292 | cm::VideoCodec::H264, 293 | Some(&specs), 294 | None, 295 | Some(memory_pool_allocator), 296 | Some(video::callback), 297 | Box::into_raw(input) as _, 298 | ) 299 | .unwrap(); 300 | 301 | let initial_bit_rate = self.config.initial_bitrate(width, height); 302 | dbg!(initial_bit_rate); 303 | let suggested_max_qp = self.config.suggested_max_qp(); 304 | dbg!(suggested_max_qp); 305 | 306 | self.current_bitrate = initial_bit_rate; 307 | 308 | let average_bit_rate = cf::Number::from_i32(initial_bit_rate); 309 | let bool_true = cf::Boolean::value_true(); 310 | let expected_fr = cf::Number::from_i32(fps); 311 | let base_layer_bit_rate_fraction = cf::Number::from_f64(0.6); 312 | 313 | // QP is from 1-51 which lower indicates better quality at the expense of bitrate 314 | // and higher indicates lower quality. I chose 21 randomly. 315 | let max_qp = cf::Number::from_i32(suggested_max_qp); 316 | let max_key_frame_interval = cf::Number::from_i32(fps * 2); 317 | let max_key_frame_interval_duration = cf::Number::from_f64(2f64); 318 | 319 | let props = self.session_props.as_mut(); 320 | props.insert(keys::real_time(), bool_true); 321 | // props.insert(keys::allow_frame_reordering(), bool_false); 322 | props.insert(keys::max_key_frame_interval(), &max_key_frame_interval); 323 | props.insert( 324 | keys::max_key_frame_interval_duration(), 325 | &max_key_frame_interval_duration, 326 | ); 327 | props.insert(keys::avarage_bit_rate(), &average_bit_rate); 328 | props.insert(keys::expected_frame_rate(), &expected_fr); 329 | // props.insert(keys::max_frame_delay_count(), &frame_delay_count); 330 | props.insert(keys::max_allowed_frame_qp(), &max_qp); 331 | props.insert( 332 | keys::base_layer_bit_rate_fraction(), 333 | &base_layer_bit_rate_fraction, 334 | ); 335 | props.insert( 336 | keys::profile_level(), 337 | profile_level::h264::constrained_high_auto_level(), 338 | ); 339 | 340 | session.set_props(props).unwrap(); 341 | session.prepare().unwrap(); 342 | 343 | // inner core 344 | let input_asbd = at::audio::StreamBasicDesc { 345 | sample_rate: 48_000.0, 346 | format: AudioFormat::LINEAR_PCM, 347 | format_flags: AudioFormatFlags::IS_FLOAT 348 | | AudioFormatFlags::IS_PACKED 349 | | AudioFormatFlags::IS_NON_INTERLEAVED, 350 | bytes_per_packet: 4, 351 | frames_per_packet: 1, 352 | bytes_per_frame: 4, 353 | channels_per_frame: 2, 354 | bits_per_channel: 32, 355 | reserved: 0, 356 | }; 357 | 358 | let inner = FrameCounterInner { 359 | video_counter: 0, 360 | audio_counter: 0, 361 | audio_queue: AudioQueue { 362 | queue: Default::default(), 363 | last_buffer_offset: 0, 364 | input_asbd, 365 | }, 366 | session, 367 | audio_converter: default_converter(), 368 | keyframe_requested: false, 369 | }; 370 | 371 | // delegate 372 | let delegate = FrameCounter::with(inner); 373 | 374 | stream 375 | .add_stream_output(delegate.as_ref(), sc::OutputType::Screen, Some(&queue)) 376 | .unwrap(); 377 | // TODO: audio 378 | // stream 379 | // .add_stream_output(delegate.as_ref(), sc::OutputType::Audio, Some(&queue)) 380 | // .unwrap(); 381 | 382 | if let Ok(_) = stream.start().await { 383 | self.queue = Some(queue); 384 | self.stream = Some(stream); 385 | self.delegate = Some(delegate); 386 | } else { 387 | // Failed to start screen 388 | println!("failed to start screen"); 389 | return Err(Error::UnknownError.into()); 390 | } 391 | 392 | Ok(initial_bit_rate) 393 | } 394 | 395 | /// Set bitrate for the video encoder 396 | pub fn set_bitrate(&mut self, bitrate: i32) { 397 | self.current_bitrate = bitrate - 150_000; // slightly lower 398 | println!("setting bitrate to {:?}", &self.current_bitrate); 399 | let average_bit_rate = cf::Number::from_i32(self.current_bitrate); 400 | 401 | self 402 | .delegate 403 | .as_mut() 404 | .expect("to have delegate") 405 | .inner_mut() 406 | .session 407 | .set_prop(keys::avarage_bit_rate(), Some(&average_bit_rate)) 408 | .expect("to set props"); 409 | } 410 | 411 | pub async fn stop(&mut self) { 412 | if let Some(stream) = self.stream.take() { 413 | let _ = stream.stop().await; 414 | let _ = stream.autoreleased(); 415 | } 416 | 417 | if let Some(q) = self.queue.take() { 418 | // q.suspend(); 419 | let _ = q.autoreleased(); 420 | } 421 | 422 | if let Some(mut delegate) = self.delegate.take() { 423 | // Tear down the session 424 | delegate.inner_mut().invalidate(); 425 | } 426 | } 427 | } 428 | 429 | impl Drop for ScreenCapturer { 430 | fn drop(&mut self) { 431 | let handle = Handle::current(); 432 | let enter = handle.enter(); 433 | futures::executor::block_on(self.stop()); 434 | drop(enter); 435 | // self.stop().await; 436 | } 437 | } 438 | -------------------------------------------------------------------------------- /cap/src/capturer/config.rs: -------------------------------------------------------------------------------- 1 | // let's provide 3 detail levels, balance, text, and motion to adjust QP 2 | pub struct CapturerConfig { 3 | fps: Option, 4 | priority: Priority, 5 | resolution: Resolution, 6 | // captures_audio: bool, 7 | } 8 | 9 | pub enum Priority { 10 | Balanced, 11 | Motion, 12 | Quality, 13 | } 14 | 15 | pub enum Resolution { 16 | Best, 17 | Nominal, 18 | Low, 19 | } 20 | 21 | impl CapturerConfig { 22 | pub fn default() -> Self { 23 | Self { 24 | fps: None, 25 | priority: Priority::Balanced, 26 | resolution: Resolution::Nominal, 27 | // captures_audio: false, 28 | } 29 | } 30 | 31 | pub fn resolution_as_scale_factor(&self) -> f32 { 32 | match &self.resolution { 33 | Resolution::Best => 2.0, 34 | Resolution::Nominal => 1.0, 35 | Resolution::Low => 0.5, 36 | } 37 | } 38 | 39 | pub fn resolution(&self) -> &Resolution { 40 | &self.resolution 41 | } 42 | 43 | pub fn set_resolution(&mut self, resolution: Resolution) { 44 | self.resolution = resolution; 45 | } 46 | 47 | pub fn priority(&self) -> &Priority { 48 | &self.priority 49 | } 50 | 51 | pub fn set_priority(&mut self, priority: Priority) { 52 | self.priority = priority; 53 | } 54 | 55 | pub fn effective_fps(&self) -> i32 { 56 | if let Some(fps) = self.fps { 57 | return fps; 58 | } 59 | 60 | match (&self.resolution, &self.priority) { 61 | (&Resolution::Best, &Priority::Motion) => 60, 62 | (&Resolution::Nominal, &Priority::Motion) => 30, 63 | (&Resolution::Low, &Priority::Motion) => 24, 64 | 65 | (&Resolution::Best, &Priority::Quality) => 30, 66 | (&Resolution::Nominal, &Priority::Quality) => 20, 67 | (&Resolution::Low, &Priority::Quality) => 15, 68 | 69 | (&Resolution::Best, &Priority::Balanced) => 60, 70 | (&Resolution::Nominal, &Priority::Balanced) => 24, 71 | (&Resolution::Low, &Priority::Balanced) => 20, 72 | } 73 | } 74 | 75 | // QP is from 1-51 which lower indicates better quality at the expense of bitrate 76 | // and higher indicates lower quality. 77 | pub fn suggested_max_qp(&self) -> i32 { 78 | match (&self.resolution, &self.priority) { 79 | (&Resolution::Best, &Priority::Motion) => 40, 80 | (&Resolution::Nominal, &Priority::Motion) => 40, 81 | (&Resolution::Low, &Priority::Motion) => 51, 82 | 83 | (&Resolution::Best, &Priority::Quality) => 20, 84 | (&Resolution::Nominal, &Priority::Quality) => 45, 85 | (&Resolution::Low, &Priority::Quality) => 51, 86 | 87 | (&Resolution::Best, &Priority::Balanced) => 30, 88 | (&Resolution::Nominal, &Priority::Balanced) => 40, 89 | (&Resolution::Low, &Priority::Balanced) => 40, 90 | } 91 | } 92 | 93 | /// Provide your encoder width and height to get desired initial bitrate 94 | /// This will later be overriden with estimates from network. 95 | pub fn initial_bitrate(&self, width: u32, height: u32) -> i32 { 96 | // above 30 is high fps (i.e. 60) 97 | let is_high_fps = self.effective_fps() > 30; 98 | 99 | // 1440x900 = 2mbps 100 | let base = ((width as f64 * height as f64) * 1.6) as i32; 101 | if is_high_fps { 102 | (base as f32 * 1.5) as i32 103 | } else { 104 | base 105 | } 106 | } 107 | 108 | pub fn fps(&self) -> Option { 109 | self.fps 110 | } 111 | 112 | pub fn set_fps(&mut self, fps: Option) { 113 | self.fps = fps; 114 | } 115 | } 116 | -------------------------------------------------------------------------------- /cap/src/decoder.rs: -------------------------------------------------------------------------------- 1 | use std::ffi::c_void; 2 | 3 | use cidre::arc::Release; 4 | use cidre::arc::R; 5 | pub use cidre::cf; 6 | use cidre::cf::Allocator; 7 | pub use cidre::cm; 8 | use cidre::cm::BlockBuf; 9 | use cidre::cm::MemPool; 10 | use cidre::cm::SampleBuf; 11 | use cidre::cm::VideoFormatDesc; 12 | use cidre::cv; 13 | use cidre::cv::PixelFormat; 14 | use cidre::os; 15 | use cidre::vt; 16 | pub use cidre::vt::decompression::properties::keys; 17 | pub use cidre::vt::decompression::properties::video_decoder_specification; 18 | pub use cidre::vt::decompression::property_keys; 19 | pub use cidre::vt::decompression::session::Session; 20 | use cidre::vt::DecodeInfoFlags; 21 | 22 | pub struct Decoder { 23 | memory_pool: R, 24 | allocator: R, 25 | format_desc: Option>, 26 | session: Option>, 27 | sender: flume::Sender, 28 | 29 | sps_size: usize, 30 | pps_size: usize, 31 | } 32 | 33 | extern "C" { 34 | fn CMSampleBufferGetSampleAttachmentsArray( 35 | sbuf: &SampleBuf, 36 | create_if_necessary: bool, 37 | ) -> Option<&mut cf::ArrayOf>; 38 | } 39 | 40 | impl Decoder { 41 | pub fn new(sender: flume::Sender) -> Self { 42 | let memory_pool = MemPool::new(); 43 | let allocator = memory_pool.allocator().expect("to have allocator"); 44 | let allocator = allocator.retained(); 45 | Self { 46 | memory_pool, 47 | allocator, 48 | format_desc: None, 49 | session: None, 50 | sender, 51 | 52 | sps_size: 0, 53 | pps_size: 0, 54 | } 55 | } 56 | 57 | /// Decode a frame 58 | pub fn decode(&mut self, frame: &[u8]) { 59 | // todo: process frame properly 60 | 61 | // Ref 62 | // https://stackoverflow.com/questions/29525000/how-to-use-videotoolbox-to-decompress-h-264-video-stream 63 | // todo: what is frame size 64 | let frame_size = frame.len(); 65 | 66 | let mut data: Option> = None; 67 | let mut pps: Option> = None; 68 | let mut sps: Option> = None; 69 | 70 | let start_code_index = 0; 71 | let mut second_start_code_index = 0; 72 | let mut third_start_code_index = 0; 73 | 74 | let mut block_length = 0; 75 | 76 | let mut sample_buffer: Option> = None; 77 | let mut block_buffer: Option> = None; 78 | 79 | let mut nalu_type = frame[start_code_index + 4] & 0x1F; 80 | 81 | println!("~~~~~~~ Received NALU Type \"{:?}\" ~~~~~~~~", nalu_type); 82 | 83 | if nalu_type != 7 && self.format_desc.is_none() { 84 | println!("Video error: Frame is not an I Frame and format description is null"); 85 | return; 86 | } 87 | 88 | if nalu_type == 7 { 89 | for i in (start_code_index + 4)..(start_code_index + 40) { 90 | if frame[i] == 0x00 && frame[i + 1] == 0x00 && frame[i + 2] == 0x00 && frame[i + 3] == 0x01 91 | { 92 | second_start_code_index = i; 93 | self.sps_size = second_start_code_index; 94 | break; 95 | } 96 | } 97 | 98 | nalu_type = frame[second_start_code_index + 4] & 0x1F; 99 | println!("~~~~~~~ Received NALU Type \"{:?}\" ~~~~~~~~", nalu_type); 100 | } 101 | 102 | if nalu_type == 8 { 103 | for i in (self.sps_size + 4)..(self.sps_size + 30) { 104 | if frame[i] == 0x00 && frame[i + 1] == 0x00 && frame[i + 2] == 0x00 && frame[i + 3] == 0x01 105 | { 106 | third_start_code_index = i; 107 | self.pps_size = third_start_code_index - self.sps_size; 108 | break; 109 | } 110 | } 111 | 112 | sps = Some(vec![0; self.sps_size - 4]); // - 4 113 | pps = Some(vec![0; self.pps_size - 4]); 114 | 115 | sps 116 | .as_mut() 117 | .unwrap() 118 | .copy_from_slice(&frame[4..self.sps_size]); // - 4 119 | 120 | let pps_start = self.sps_size + 4; 121 | let pps_end = pps_start + self.pps_size - 4; 122 | pps 123 | .as_mut() 124 | .unwrap() 125 | .copy_from_slice(&frame[pps_start..pps_end]); 126 | 127 | let parameter_set_pointers = [ 128 | sps.as_mut().unwrap().as_ptr(), // as_mut_ptr 129 | pps.as_mut().unwrap().as_ptr(), // as_mut_ptr 130 | ]; 131 | let parameter_set_sizes = [self.sps_size - 4, self.pps_size - 4]; 132 | 133 | let new_format_desc = 134 | VideoFormatDesc::with_h264_param_sets(¶meter_set_pointers, ¶meter_set_sizes, 4) 135 | .expect("to have desc"); 136 | 137 | let mut format_same = false; 138 | 139 | if self.format_desc.is_some() { 140 | format_same = self.format_desc.as_ref().unwrap().equal(&new_format_desc); 141 | // Release the existing format description 142 | // Save format desc 143 | self.format_desc = Some(new_format_desc); 144 | 145 | if !format_same { 146 | println!("creating new session"); 147 | self.create_session(); 148 | } 149 | } else { 150 | self.format_desc = Some(new_format_desc); 151 | self.create_session(); 152 | } 153 | 154 | nalu_type = frame[third_start_code_index + 4] & 0x1F; 155 | println!("~~~~~~~ Received NALU Type \"{:?}\" ~~~~~~~~", nalu_type); 156 | } 157 | 158 | // status == noErr && 159 | if self.session.is_none() { 160 | self.create_session(); 161 | } 162 | 163 | if nalu_type == 5 { 164 | let offset = self.sps_size + self.pps_size; 165 | block_length = frame_size - offset; 166 | data = Some(vec![0; block_length]); 167 | data 168 | .as_mut() 169 | .unwrap() 170 | .copy_from_slice(&frame[offset..offset + block_length]); 171 | 172 | let data_length32 = (block_length as i32 - 4_i32).to_be(); 173 | data.as_mut().unwrap()[0..4].copy_from_slice(&data_length32.to_ne_bytes()); 174 | 175 | // Create block buffer 176 | let mut block = BlockBuf::with_mem_block(block_length, Some(&self.allocator)) 177 | .expect("to create block buff"); 178 | block 179 | .as_mut_slice() 180 | .unwrap() 181 | .copy_from_slice(data.as_ref().unwrap()); 182 | block_buffer = Some(block); 183 | // block_buffer = Some(unsafe { 184 | // BlockBuf::create_with_memory_block_in( 185 | // data.as_mut().unwrap().as_mut_ptr() as *mut c_void, 186 | // block_length, 187 | // None, 188 | // 0, 189 | // block_length, 190 | // cm::BlockBufFlags::NONE, 191 | // Some(&self.allocator), 192 | // ) 193 | // .expect("to create block buff") 194 | // }); 195 | 196 | // status = CMBlockBufferCreateWithMemoryBlock( 197 | // data.as_mut().unwrap().as_mut_ptr(), 198 | // block_length, 199 | // kCFAllocatorNull, 200 | // NULL, 201 | // 0, 202 | // block_length, 203 | // 0, 204 | // &mut block_buffer, 205 | // ); 206 | 207 | // println!( 208 | // "\t\t BlockBufferCreation: \t {}", 209 | // if status == kCMBlockBufferNoErr { 210 | // "successful!" 211 | // } else { 212 | // "failed..." 213 | // } 214 | // ); 215 | } 216 | 217 | if nalu_type == 1 { 218 | block_length = frame_size; 219 | data = Some(vec![0; block_length]); 220 | data 221 | .as_mut() 222 | .unwrap() 223 | .copy_from_slice(&frame[0..block_length]); 224 | 225 | let data_length32 = (block_length as i32 - 4_i32).to_be(); 226 | data.as_mut().unwrap()[0..4].copy_from_slice(&data_length32.to_ne_bytes()); 227 | 228 | // Create block buffer 229 | let mut block = BlockBuf::with_mem_block(block_length, Some(&self.allocator)) 230 | .expect("to create block buff"); 231 | block 232 | .as_mut_slice() 233 | .unwrap() 234 | .copy_from_slice(data.as_ref().unwrap()); 235 | block_buffer = Some(block); 236 | // block_buffer = Some(unsafe { 237 | // BlockBuf::create_with_memory_block_in( 238 | // data.as_mut().unwrap().as_mut_ptr() as *mut c_void, 239 | // block_length, 240 | // None, 241 | // 0, 242 | // block_length, 243 | // cm::BlockBufFlags::NONE, 244 | // Some(&self.allocator), 245 | // ) 246 | // .expect("to create block buff") 247 | // }); 248 | 249 | // println!( 250 | // "\t\t BlockBufferCreation: \t {}", 251 | // if status == kCMBlockBufferNoErr { 252 | // "successful!" 253 | // } else { 254 | // "failed..." 255 | // } 256 | // ); 257 | } 258 | 259 | let sample_size = block_length; 260 | 261 | let mut sample_buffer = unsafe { 262 | cm::SampleBuf::create_in( 263 | Some(&self.allocator), 264 | Some(block_buffer.as_ref().unwrap()), 265 | true, 266 | None, 267 | std::ptr::null(), 268 | Some(self.format_desc.as_ref().unwrap()), 269 | 1, 270 | 0, 271 | std::ptr::null(), 272 | 1, 273 | &sample_size, 274 | &mut sample_buffer, 275 | ) 276 | .to_result_unchecked(sample_buffer) 277 | } 278 | .expect("to have sample buffer"); 279 | 280 | let attachments = 281 | unsafe { CMSampleBufferGetSampleAttachmentsArray(&mut sample_buffer, true) }.unwrap(); 282 | 283 | let dict = &mut attachments[0]; 284 | dict.insert( 285 | cm::sample_buffer::attachment_keys::display_immediately(), 286 | cf::Boolean::value_true(), 287 | ); 288 | 289 | // ----------------------- 290 | // ----------------------- 291 | // ----------------------- 292 | // ----------------------- 293 | // ----------------------- 294 | // ----------------------- 295 | // ----------------------- 296 | // ----------------------- 297 | // ----------------------- 298 | let Some(ref session) = self.session else { 299 | return; 300 | }; 301 | 302 | block_buffer.as_ref().unwrap().retained(); 303 | sample_buffer.retained(); 304 | session 305 | .decode(&sample_buffer, vt::DecodeFrameFlags::_1X_REAL_TIME_PLAYBACK) 306 | .expect("to decode"); 307 | } 308 | 309 | fn create_session(&mut self) { 310 | let Some(format_desc) = self.format_desc.as_ref() else { 311 | eprintln!("format desc is none in create session"); 312 | return; 313 | }; 314 | // Encoder specifications 315 | let mut specs = cf::DictionaryMut::with_capacity(10); 316 | 317 | // Enable low latency mode 318 | // ref: https://developer.apple.com/videos/play/wwdc2021/10158/# 319 | specs.insert( 320 | video_decoder_specification::enable_hardware_accelerated_video_decoder(), 321 | cf::Boolean::value_true(), 322 | ); 323 | 324 | let ctx = OutputContext { 325 | sender: self.sender.clone(), 326 | }; 327 | let record = vt::DecompressionOutputCbRecord::new(ctx, callback); 328 | 329 | let mut destination_attributes = cf::DictionaryMut::with_capacity(2); 330 | destination_attributes.insert( 331 | cv::pixel_buffer_keys::pixel_format(), 332 | PixelFormat::_32_ARGB.to_cf_number().as_type_ref(), 333 | ); 334 | 335 | let session = Session::new( 336 | format_desc, 337 | Some(&specs), 338 | Some(&destination_attributes), 339 | Some(&record), 340 | ) 341 | .expect("to create session"); 342 | 343 | self.session = session.into(); 344 | } 345 | } 346 | 347 | impl Drop for Decoder { 348 | fn drop(&mut self) { 349 | self.memory_pool.invalidate(); 350 | unsafe { self.allocator.release() }; 351 | if let Some(mut s) = self.session.take() { 352 | s.invalidate() 353 | } 354 | } 355 | } 356 | 357 | // --------------------- 358 | // Output callback 359 | // --------------------- 360 | extern "C" fn callback( 361 | ctx: *mut OutputContext, 362 | _: *mut c_void, 363 | status: os::Status, 364 | flags: DecodeInfoFlags, 365 | buffer: Option<&cv::ImageBuf>, 366 | _pts: cm::Time, 367 | _duration: cm::Time, 368 | ) { 369 | if status.is_err() || buffer.is_none() { 370 | println!("status {:?} Flags: {:#b}", status, flags); 371 | return; 372 | } 373 | unsafe { 374 | let ctx = ctx.as_mut().unwrap_unchecked(); 375 | ctx.handle_image_buf(buffer.unwrap_unchecked()); 376 | } 377 | } 378 | 379 | // ----------------------- 380 | // Output Context 381 | // ----------------------- 382 | 383 | pub struct DecoderOutput { 384 | pub image_buf: R, 385 | } 386 | 387 | unsafe impl Send for DecoderOutput {} 388 | unsafe impl Sync for DecoderOutput {} 389 | unsafe impl Send for Decoder {} 390 | 391 | pub struct OutputContext { 392 | pub sender: flume::Sender, 393 | } 394 | 395 | impl OutputContext { 396 | pub fn new(sender: flume::Sender) -> Self { 397 | Self { sender } 398 | } 399 | 400 | pub fn handle_image_buf(&mut self, buffer: &cv::ImageBuf) { 401 | let _ = self.sender.try_send(DecoderOutput { 402 | image_buf: buffer.retained(), 403 | }); 404 | } 405 | } 406 | -------------------------------------------------------------------------------- /cap/src/error.rs: -------------------------------------------------------------------------------- 1 | #[derive(thiserror::Error, Debug)] 2 | pub enum Error { 3 | #[error("No permission")] 4 | PermissionDenied, 5 | 6 | #[error("Screen share requires at least macOS 12.3")] 7 | NotSupported, 8 | 9 | #[error("Unknown error occurred")] 10 | UnknownError, 11 | } 12 | -------------------------------------------------------------------------------- /cap/src/h264.rs: -------------------------------------------------------------------------------- 1 | use cidre::cm; 2 | 3 | pub const N_START_CODE_LENGTH: usize = 4; 4 | pub const N_START_CODE: [u8; 4] = [0x00, 0x00, 0x00, 0x01]; 5 | 6 | /// Convert AVCC frame to elem stream frame 7 | pub fn to_elem_stream(sample_buff: &cm::SampleBuf, format_bytes: &[u8]) -> Vec { 8 | // https://stackoverflow.com/questions/28396622/extracting-h264-from-cmblockbuffer 9 | // TODO: use MemoryPool 10 | // elementary_stream 11 | let mut out = Vec::with_capacity(5_000); 12 | 13 | if let Some(data_buffer) = sample_buff.data_buf() { 14 | let is_i_frame = sample_buff.is_key_frame(); 15 | 16 | if is_i_frame { 17 | out.extend_from_slice(format_bytes); 18 | } 19 | 20 | let block_buffer_length = data_buffer.data_len(); 21 | 22 | let buffer_data = data_buffer.as_slice().expect("data ptr"); 23 | 24 | // Loop through all the NAL units in the block buffer 25 | // and write them to the elementary stream with 26 | // start codes instead of AVCC length headers 27 | let mut buffer_offset: usize = 0; 28 | let avcc_header_length: usize = 4; 29 | 30 | while buffer_offset < (block_buffer_length - avcc_header_length) { 31 | // Read the NAL unit length 32 | let nal_unit_length: u32 = u32::from_be_bytes( 33 | buffer_data[buffer_offset..buffer_offset + avcc_header_length] 34 | .try_into() 35 | .unwrap(), 36 | ); 37 | 38 | if nal_unit_length > 0 { 39 | // Write start code to the elementary stream 40 | out.extend_from_slice(&N_START_CODE[0..N_START_CODE_LENGTH]); 41 | 42 | // Write the NAL unit without the AVCC length header to the elementary stream 43 | out.extend_from_slice( 44 | &buffer_data[buffer_offset + avcc_header_length 45 | ..buffer_offset + avcc_header_length + nal_unit_length as usize], 46 | ); 47 | 48 | // Move to the next NAL unit in the block buffer 49 | buffer_offset += avcc_header_length + nal_unit_length as usize; 50 | } 51 | } 52 | } 53 | 54 | out 55 | } 56 | -------------------------------------------------------------------------------- /cap/src/lib.rs: -------------------------------------------------------------------------------- 1 | mod audio; 2 | pub mod capturer; 3 | pub mod decoder; 4 | mod error; 5 | mod h264; 6 | pub mod sharable; 7 | mod utils; 8 | mod video; 9 | 10 | pub use error::Error; 11 | pub use utils::has_permission; 12 | pub use utils::is_supported; 13 | 14 | pub use cidre; 15 | pub use cidre::arc::R; 16 | pub use cidre::cf; 17 | pub use cidre::ci; 18 | pub use cidre::cm; 19 | pub use cidre::cv; 20 | pub use cidre::ns::Id; 21 | pub use cidre::objc; 22 | pub use cidre::sc; 23 | pub use cidre::vt; 24 | -------------------------------------------------------------------------------- /cap/src/sharable.rs: -------------------------------------------------------------------------------- 1 | use cidre::{ 2 | arc::{self, Retain}, 3 | ns, 4 | sc::Display, 5 | }; 6 | use serde::Serialize; 7 | 8 | pub async fn get_sharable_contents() -> anyhow::Result, crate::error::Error> { 9 | if !crate::has_permission() { 10 | return Err(crate::error::Error::PermissionDenied); 11 | } 12 | 13 | // display 14 | let content = cidre::sc::ShareableContent::current() 15 | .await 16 | .expect("content"); 17 | 18 | let primary_id: u32 = cidre::cg::main_display_id(); 19 | let displays: Vec = content 20 | .displays() 21 | .iter() 22 | .map(|display| { 23 | let is_primary = display.display_id() == primary_id; 24 | 25 | SharableItem { 26 | id: display.display_id().into(), 27 | kind: SharableKind::Display, 28 | title: format!( 29 | "{} ({w}x{h})", 30 | if is_primary { 31 | "Primary Display" 32 | } else { 33 | "Display" 34 | }, 35 | w = display.width(), 36 | h = display.height() 37 | ), 38 | } 39 | }) 40 | .collect(); 41 | 42 | Ok(displays) 43 | } 44 | 45 | pub async fn get_display_by_id(display_id: u32) -> Option> { 46 | // display 47 | let content = cidre::sc::ShareableContent::current() 48 | .await 49 | .expect("content"); 50 | 51 | let display = content 52 | .displays() 53 | .iter() 54 | .find(|d| d.display_id() == display_id) 55 | .map(|d| d.retained()); 56 | 57 | display 58 | } 59 | 60 | pub async fn get_displays() -> arc::R> { 61 | cidre::sc::ShareableContent::current() 62 | .await 63 | .expect("content") 64 | .displays() 65 | .retained() 66 | } 67 | 68 | #[derive(Debug, Serialize)] 69 | pub enum SharableKind { 70 | Window, 71 | Display, 72 | App, 73 | } 74 | 75 | #[derive(Debug, Serialize)] 76 | pub struct SharableItem { 77 | pub kind: SharableKind, 78 | pub title: String, 79 | 80 | /// id for display and window 81 | pub id: i64, 82 | } 83 | 84 | unsafe impl Send for SharableItem {} 85 | 86 | // impl From for SharableItem { 87 | // fn from(app: RunningApp) -> Self { 88 | // Self { 89 | // id: app.process_id().into(), 90 | // kind: SharableKind::App, 91 | // title: app.app_name().to_string(), 92 | // } 93 | // } 94 | // } 95 | 96 | // impl From<&Display> for SharableItem { 97 | // fn from(display: &Display) -> Self { 98 | // let primary_id: u32 = cidre::cg::main_display_id().into(); 99 | // let is_primary = display.display_id() == primary_id; 100 | 101 | // Self { 102 | // id: display.display_id().into(), 103 | // kind: SharableKind::Display, 104 | // title: format!( 105 | // "{} ({w}x{h})", 106 | // if is_primary { 107 | // "Primary Display" 108 | // } else { 109 | // "Display" 110 | // }, 111 | // w = display.width(), 112 | // h = display.height() 113 | // ), 114 | // } 115 | // } 116 | // } 117 | 118 | // impl From for SharableItem { 119 | // fn from(window: Window) -> Self { 120 | // let title = window 121 | // .title() 122 | // .map(|t| t.to_string()) 123 | // .unwrap_or(String::from("_")); 124 | // Self { 125 | // id: window.id().into(), 126 | // kind: SharableKind::Window, 127 | // title: format!( 128 | // "{title} ({app})", 129 | // app = window 130 | // .owning_app() 131 | // .map(|app| app.app_name().to_string()) 132 | // .unwrap_or(String::from("_")) 133 | // ), 134 | // } 135 | // } 136 | // } 137 | -------------------------------------------------------------------------------- /cap/src/utils.rs: -------------------------------------------------------------------------------- 1 | use core_graphics::access::ScreenCaptureAccess; 2 | 3 | pub fn has_permission() -> bool { 4 | let access = ScreenCaptureAccess; 5 | access.request() 6 | } 7 | 8 | pub fn is_supported() -> bool { 9 | let os_version = sysinfo::System::os_version() 10 | .expect("Failed to get macOS version") 11 | .as_bytes() 12 | .to_vec(); 13 | 14 | let min_version: Vec = "12.3\n".as_bytes().to_vec(); 15 | 16 | os_version >= min_version 17 | } 18 | -------------------------------------------------------------------------------- /cap/src/video.rs: -------------------------------------------------------------------------------- 1 | use bytes::Bytes; 2 | use cidre::{arc, cm, os, vt::EncodeInfoFlags}; 3 | use std::ffi::c_void; 4 | 5 | use crate::{ 6 | capturer::CapturerOutput, 7 | h264::{self, N_START_CODE}, 8 | }; 9 | 10 | pub struct RecordContext { 11 | pub frames_count: usize, 12 | pub format_desc: Option>, 13 | pub sender: flume::Sender, 14 | 15 | pub cached_format_bytes: Vec, 16 | pub video_start_time: Option, 17 | } 18 | 19 | impl RecordContext { 20 | pub fn new(sender: flume::Sender) -> Self { 21 | Self { 22 | frames_count: 0, 23 | format_desc: None, 24 | sender, 25 | video_start_time: None, 26 | cached_format_bytes: Vec::with_capacity(50), 27 | } 28 | } 29 | 30 | pub fn handle_sample_buffer(&mut self, buffer: &cm::SampleBuf) { 31 | // Calc seconds 32 | let pts = buffer.pts(); 33 | let seconds: f64; 34 | if let Some(ref start) = self.video_start_time { 35 | let diff = pts.sub(*start); 36 | seconds = diff.as_secs(); 37 | } else { 38 | self.video_start_time = Some(pts); 39 | seconds = 0.0; 40 | }; 41 | 42 | // let is_depended_on_by_others = is_depended_on_by_others(buffer); 43 | // println!("is_depended_on_by_others {:?}", is_depended_on_by_others); 44 | 45 | let desc_changed; 46 | 47 | // Cache format description 48 | let desc = buffer.format_desc().expect("to have desc"); 49 | match self.format_desc { 50 | None => { 51 | self.format_desc = Some(desc.retained()); 52 | desc_changed = true; 53 | } 54 | Some(ref prev_desc) => { 55 | if desc.equal(prev_desc.as_type_ref()) { 56 | desc_changed = false; 57 | } else { 58 | self.format_desc = Some(desc.retained()); 59 | desc_changed = true; 60 | } 61 | } 62 | } 63 | 64 | // Cache start bytes and format 65 | if desc_changed { 66 | self.cached_format_bytes.clear(); 67 | let (num_params, _) = desc 68 | .h264_params_count_and_header_len() 69 | .expect("to get params count"); 70 | //write each param-set to elementary stream 71 | for i in 0..num_params { 72 | let param = desc.h264_param_set_at(i).expect("to get param"); 73 | self.cached_format_bytes.extend_from_slice(&N_START_CODE); 74 | self.cached_format_bytes.extend_from_slice(param); 75 | } 76 | } 77 | 78 | let out = h264::to_elem_stream(buffer, &self.cached_format_bytes); 79 | 80 | // Prevent sending empty frame that crashes decoder 81 | if !out.is_empty() { 82 | let _ = self.sender.try_send(CapturerOutput { 83 | data: Bytes::from(out), 84 | seconds, 85 | }); 86 | } 87 | 88 | self.frames_count += 1; 89 | } 90 | } 91 | 92 | pub extern "C" fn callback( 93 | ctx: *mut RecordContext, 94 | _: *mut c_void, 95 | status: os::Status, 96 | flags: EncodeInfoFlags, 97 | buffer: Option<&cm::SampleBuf>, 98 | ) { 99 | if status.is_err() || buffer.is_none() { 100 | println!("status {:?} Flags: {:#b}", status, flags); 101 | return; 102 | } 103 | 104 | unsafe { 105 | let ctx = ctx.as_mut().unwrap_unchecked(); 106 | let buffer = buffer.unwrap_unchecked(); 107 | ctx.handle_sample_buffer(buffer); 108 | } 109 | } 110 | -------------------------------------------------------------------------------- /platform_utils/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "platform_utils" 3 | version = "0.1.0" 4 | edition = "2021" 5 | bitcode = true 6 | 7 | [lib] 8 | # or [bin] 9 | crate-type = ["rlib", "cdylib"] 10 | 11 | [dependencies] 12 | core-graphics = { version = "0.23" } 13 | 14 | [target."cfg(target_os = \"macos\")".dependencies] 15 | objc = "0.2" 16 | cocoa = "0.25" 17 | objc-foundation = "0.1" 18 | objc_id = "0.1" 19 | core-foundation = "0.9.4" 20 | -------------------------------------------------------------------------------- /platform_utils/build.rs: -------------------------------------------------------------------------------- 1 | fn main() { 2 | println!("cargo:rustc-link-lib=framework=CoreGraphics"); 3 | } 4 | -------------------------------------------------------------------------------- /platform_utils/src/lib.rs: -------------------------------------------------------------------------------- 1 | extern crate core_graphics; 2 | 3 | mod macos; 4 | 5 | pub fn get_idle_time() -> f64 { 6 | #[cfg(target_os = "macos")] 7 | return macos::idle_time::get_idle_time(); 8 | 9 | #[cfg(target_os = "windows")] 10 | return 0.0; 11 | 12 | #[cfg(target_os = "linux")] 13 | return 0.0; 14 | } 15 | 16 | pub fn key_pressed() -> bool { 17 | #[cfg(target_os = "macos")] 18 | return macos::key_pressed(); 19 | 20 | #[cfg(target_os = "windows")] 21 | return false; 22 | 23 | #[cfg(target_os = "linux")] 24 | return false; 25 | } 26 | -------------------------------------------------------------------------------- /platform_utils/src/macos/idle_time.rs: -------------------------------------------------------------------------------- 1 | use core_foundation::{ 2 | base::{CFAllocator, CFRelease}, 3 | dictionary::CFDictionaryRef, 4 | number::CFNumberRef, 5 | string::CFStringRef, 6 | }; 7 | use std::os::raw::{c_char, c_int, c_void}; 8 | 9 | #[link(name = "IOKit", kind = "framework")] 10 | #[link(name = "CoreFoundation", kind = "framework")] 11 | extern "C" { 12 | fn IOServiceGetMatchingServices( 13 | masterPort: i32, 14 | matching: CFDictionaryRef, 15 | existing: *mut io_iterator_t, 16 | ) -> i32; 17 | fn IOServiceMatching(name: *const i8) -> CFDictionaryRef; 18 | fn IOIteratorNext(iterator: io_iterator_t) -> io_registry_entry_t; 19 | fn IORegistryEntryCreateCFProperties( 20 | entry: io_registry_entry_t, 21 | properties: *mut CFDictionaryRef, 22 | allocator: CFAllocatorRef, 23 | options: IOOptionBits, 24 | ) -> i32; 25 | fn IOObjectRelease(object: io_object_t); 26 | 27 | fn CFDictionaryGetValue(theDict: CFDictionaryRef, key: *const c_void) -> *const c_void; 28 | 29 | fn CFNumberGetValue( 30 | number: CFNumberRef, 31 | theType: CFNumberType, 32 | valuePtr: *mut c_void, 33 | ) -> Boolean; 34 | 35 | fn CFStringCreateWithCString( 36 | alloc: CFAllocatorRef, 37 | cStr: *const c_char, 38 | encoding: CFStringEncoding, 39 | ) -> CFStringRef; 40 | } 41 | 42 | type io_iterator_t = *mut c_void; 43 | type io_registry_entry_t = *mut c_void; 44 | type io_object_t = *mut c_void; 45 | type CFAllocatorRef = *const c_void; 46 | type IOOptionBits = u32; 47 | type Boolean = u8; 48 | type CFNumberType = u32; 49 | type CFStringEncoding = u32; 50 | 51 | const KERN_SUCCESS: i32 = 0; 52 | const kIOMasterPortDefault: i32 = 0; 53 | const kCFNumberSInt64Type: CFNumberType = 4; 54 | const kCFStringEncodingUTF8: CFStringEncoding = 0x08000100; 55 | 56 | pub fn get_idle_time() -> f64 { 57 | let mut idle_seconds = -1.0; 58 | let mut iter: io_iterator_t = std::ptr::null_mut(); 59 | 60 | unsafe { 61 | let matching = IOServiceMatching(b"IOHIDSystem\0".as_ptr() as *const i8); 62 | if IOServiceGetMatchingServices(kIOMasterPortDefault, matching, &mut iter) == KERN_SUCCESS { 63 | let entry = IOIteratorNext(iter); 64 | if !entry.is_null() { 65 | let mut dict: CFDictionaryRef = std::ptr::null_mut(); 66 | if IORegistryEntryCreateCFProperties(entry, &mut dict, std::ptr::null(), 0) 67 | == KERN_SUCCESS 68 | { 69 | let idle_time_key = CFStringCreateWithCString( 70 | std::ptr::null(), 71 | b"HIDIdleTime\0".as_ptr() as *const c_char, 72 | kCFStringEncodingUTF8, 73 | ); 74 | let value = CFDictionaryGetValue(dict, idle_time_key as *const c_void); 75 | if !value.is_null() { 76 | let number = value as CFNumberRef; 77 | let mut nanoseconds: i64 = 0; 78 | if CFNumberGetValue( 79 | number, 80 | kCFNumberSInt64Type, 81 | &mut nanoseconds as *mut _ as *mut c_void, 82 | ) != 0 83 | { 84 | idle_seconds = nanoseconds as f64 / 1_000_000_000.0; 85 | } 86 | } 87 | CFRelease(idle_time_key as *const c_void); 88 | CFRelease(dict as *mut c_void); 89 | } 90 | IOObjectRelease(entry as *mut c_void); 91 | } 92 | IOObjectRelease(iter as *mut c_void); 93 | } 94 | } 95 | 96 | idle_seconds 97 | } 98 | -------------------------------------------------------------------------------- /platform_utils/src/macos/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod idle_time; 2 | pub mod version; 3 | 4 | use core_graphics::event_source::CGEventSourceStateID; 5 | 6 | #[link(name = "CoreGraphics", kind = "framework")] 7 | extern "C" { 8 | fn CGEventSourceKeyState(source: CGEventSourceStateID, allowedKeyCode: CGKeyCode) -> bool; 9 | } 10 | 11 | type CGKeyCode = u16; 12 | 13 | pub fn key_pressed() -> bool { 14 | let mut pressed = false; 15 | for key_code in 0..=0x5D { 16 | let key_state = 17 | unsafe { CGEventSourceKeyState(CGEventSourceStateID::HIDSystemState, key_code) }; 18 | 19 | pressed |= key_state; 20 | 21 | if pressed { 22 | break; 23 | } 24 | } 25 | 26 | return pressed; 27 | } 28 | 29 | #[cfg(test)] 30 | mod test { 31 | use crate::macos; 32 | 33 | #[test] 34 | fn key_pressed() { 35 | let _ = macos::key_pressed(); 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /platform_utils/src/macos/version.rs: -------------------------------------------------------------------------------- 1 | use cocoa::base::id; 2 | use cocoa::foundation::NSOperatingSystemVersion; 3 | 4 | use objc::class; 5 | use objc::msg_send; 6 | use objc::{sel, sel_impl}; 7 | 8 | pub fn macos_version() -> (u64, u64, u64) { 9 | unsafe { 10 | let process_info: id = msg_send![class!(NSProcessInfo), processInfo]; 11 | let version: NSOperatingSystemVersion = msg_send![process_info, operatingSystemVersion]; 12 | ( 13 | version.majorVersion, 14 | version.minorVersion, 15 | version.patchVersion, 16 | ) 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /renderer/screen/cmds.rs: -------------------------------------------------------------------------------- 1 | use cap::sharable::get_displays; 2 | use serde::{Deserialize, Serialize}; 3 | use tauri::Manager; 4 | use tauri::State; 5 | 6 | use crate::rtc::engine::Engine; 7 | use crate::rtc::peer::channel::StartScreenOpts; 8 | use crate::rtc::peer2::DataChannelEvent; 9 | use crate::rtc::utils::UserId; 10 | 11 | #[tauri::command] 12 | pub async fn get_sharable_contents() -> Result, ()> { 13 | match cap::sharable::get_sharable_contents().await { 14 | Err(error) => { 15 | error!("Error getting sharable contents: {:?}", error); 16 | Err(()) 17 | } 18 | Ok(items) => Ok(items), 19 | } 20 | } 21 | 22 | #[tauri::command] 23 | pub async fn rtc_start_screen( 24 | app_handle: tauri::AppHandle, 25 | engine: State<'_, Engine>, 26 | display_id: Option, 27 | ) -> Result<(), ()> { 28 | // if no display, pick the one with mouse cursor in it 29 | if display_id.is_none() { 30 | let displays = get_displays().await; 31 | let cursor_pos = app_handle.cursor_position().map_err(|_| ())?; 32 | let display = displays.iter().find(|display| { 33 | let frame = display.frame(); 34 | // check if cursor_pos is within the display rect 35 | return frame.origin.x <= cursor_pos.x 36 | && cursor_pos.x <= frame.origin.x + frame.size.width 37 | && frame.origin.y <= cursor_pos.y 38 | && cursor_pos.y <= frame.origin.y + frame.size.height; 39 | }); 40 | if let Some(display) = display { 41 | engine.start_screen(StartScreenOpts { 42 | display_id: display.display_id().into(), 43 | }); 44 | return Ok(()); // return early 45 | } else { 46 | error!("No display found for mouse position: {:?}", cursor_pos); 47 | } 48 | } 49 | 50 | engine.start_screen(StartScreenOpts { display_id }); 51 | Ok(()) 52 | } 53 | 54 | #[tauri::command] 55 | pub async fn rtc_stop_screen(engine: State<'_, Engine>) -> Result<(), ()> { 56 | engine.stop_screen(); 57 | Ok(()) 58 | } 59 | 60 | #[tauri::command] 61 | pub fn rtc_open_screen_window(engine: State<'_, Engine>) { 62 | engine.open_screen_window(); 63 | } 64 | #[tauri::command] 65 | pub fn rtc_open_screen_window_for(engine: State<'_, Engine>, user_id: String) { 66 | engine.open_screen_window_for(UserId(user_id)); 67 | } 68 | 69 | #[derive(Clone, Debug, Serialize, Deserialize)] 70 | pub struct VideoSizePayload { 71 | pub width: f64, 72 | pub height: f64, 73 | } 74 | 75 | pub fn emit_video_size(window: &tauri::WebviewWindow, payload: VideoSizePayload) { 76 | let _ = window.emit("set_video_size", payload); 77 | } 78 | 79 | #[derive(Clone, Serialize, Deserialize)] 80 | pub struct OverlayDataChannelEvent { 81 | pub id: String, 82 | pub event: DataChannelEvent, 83 | } 84 | 85 | /// Used for mouse move sending to overlay 86 | pub fn emit_data_event(window: &tauri::WebviewWindow, payload: OverlayDataChannelEvent) { 87 | let _ = window.emit("data_channel_event", payload); 88 | } 89 | -------------------------------------------------------------------------------- /renderer/screen/config.rs: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/inline-chat/webrtc-client-rust/c16c1e1268b4877d4e056a5ce34f594fef480318/renderer/screen/config.rs -------------------------------------------------------------------------------- /renderer/screen/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod cmds; 2 | pub mod renderer; 3 | pub mod utils; 4 | -------------------------------------------------------------------------------- /renderer/screen/renderer.rs: -------------------------------------------------------------------------------- 1 | use crate::rtc::utils::UserId; 2 | use crate::window_ext::WindowExt; 3 | use cap::decoder::Decoder; 4 | use cap::decoder::DecoderOutput; 5 | use cocoa::quartzcore::transaction; 6 | use raw_window_handle::HasWindowHandle; 7 | use raw_window_handle::RawWindowHandle; 8 | use std::sync::Arc; 9 | use str0m::media::MediaData; 10 | use tauri::window::EffectsBuilder; 11 | use tauri::AppHandle; 12 | use tauri::LogicalSize; 13 | use tauri::Manager; 14 | use tauri::PhysicalPosition; 15 | use tauri::PhysicalSize; 16 | use tauri::WebviewUrl; 17 | use tauri::WebviewWindowBuilder; 18 | 19 | /// Managing screen window and render 20 | pub struct ScreenManager { 21 | command_sender: flume::Sender, 22 | } 23 | 24 | /// Manage video renderers 25 | impl ScreenManager { 26 | pub fn new(app: &tauri::AppHandle) -> Self { 27 | let mut run_loop = ScreenWindowRunLoop::new(app.clone()); 28 | let command_sender = run_loop.sender().clone(); 29 | 30 | tokio::spawn(async move { 31 | run_loop.run().await; 32 | }); 33 | 34 | Self { command_sender } 35 | } 36 | 37 | pub fn add_media_data(&mut self, user_id: UserId, media_data: Arc) { 38 | let _ = self 39 | .command_sender 40 | .try_send(ScreenCommand::MediaData(user_id, media_data)); 41 | } 42 | 43 | /// Create viewer window and show it 44 | pub fn create_window(&self) { 45 | let _ = self.command_sender.try_send(ScreenCommand::CreateWindow); 46 | } 47 | 48 | /// Switch viewing screen 49 | pub fn set_active_screen(&self, user_id: UserId) { 50 | let _ = self 51 | .command_sender 52 | .try_send(ScreenCommand::SetActiveUser(user_id)); 53 | } 54 | } 55 | 56 | impl Drop for ScreenManager { 57 | fn drop(&mut self) { 58 | let _ = self.command_sender.try_send(ScreenCommand::Destroy); 59 | } 60 | } 61 | 62 | use cocoa::quartzcore::CALayer; 63 | 64 | use cocoa::{ 65 | appkit::NSView, 66 | base::nil, 67 | foundation::{NSPoint, NSRect, NSSize}, 68 | }; 69 | use objc::{msg_send, runtime::Object, sel, sel_impl}; 70 | use std::ffi::c_void; 71 | 72 | use super::cmds::emit_video_size; 73 | use super::cmds::VideoSizePayload; 74 | 75 | const TOOLBAR_HEIGHT: f64 = 38.0; 76 | 77 | pub fn add_overlay_to_window( 78 | window: &tauri::WebviewWindow, 79 | initial_size: &PhysicalSize, 80 | ) -> ScreenOverlayContext { 81 | let RawWindowHandle::AppKit(handle) = window.window_handle().unwrap().as_raw() else { 82 | unreachable!("only runs on macos") 83 | }; 84 | 85 | unsafe { 86 | // let ns_window = handle.ns_window as *mut Object; 87 | // let content_view: *mut Object = msg_send![ns_window, contentView]; 88 | // let ns_window = handle.ns_window as *mut Object; 89 | let content_view: *mut Object = handle.ns_view.as_ptr() as *mut Object; 90 | 91 | // Make a new layer 92 | let lay = CALayer::new(); 93 | lay.remove_all_animation(); 94 | let lay_id = lay.id(); 95 | 96 | // Make a new view 97 | let new_view = NSView::alloc(nil).initWithFrame_(NSRect::new( 98 | NSPoint::new(0.0, 0.0), 99 | NSSize::new( 100 | initial_size.width as f64, 101 | initial_size.height as f64 - TOOLBAR_HEIGHT, 102 | ), 103 | )); 104 | new_view.setWantsLayer(cocoa::base::YES); 105 | 106 | // Set layer 107 | new_view.setLayer(lay_id); 108 | new_view.setLayerContentsPlacement(cocoa::appkit::NSViewLayerContentsPlacement::NSViewLayerContentsPlacementScaleProportionallyToFit); 109 | 110 | // Add it to the contentView, as a sibling of webview, so that it appears on top 111 | let _: c_void = msg_send![content_view, addSubview: new_view]; 112 | 113 | ScreenOverlayContext::new(window.clone(), new_view, lay) 114 | } 115 | } 116 | 117 | pub enum ScreenCommand { 118 | // Render(cap::R), 119 | CreateWindow, 120 | WindowClosed, 121 | SetActiveUser(UserId), 122 | ContextCreated(Arc), 123 | MediaData(UserId, Arc), 124 | Destroy, 125 | } 126 | 127 | pub struct ScreenWindowRunLoop { 128 | handle: AppHandle, 129 | active_ctx: Option>, 130 | command_receiver: flume::Receiver, 131 | command_sender: flume::Sender, 132 | 133 | decoder: Option, 134 | decoder_sender: flume::Sender, 135 | decoder_receiver: flume::Receiver, 136 | 137 | // state 138 | active_user_id: Option, 139 | size: Option, 140 | 141 | frame_no: usize, 142 | } 143 | 144 | impl ScreenWindowRunLoop { 145 | pub fn new(handle: AppHandle) -> Self { 146 | let (command_sender, command_receiver) = flume::bounded(1000); 147 | let (decoder_sender, decoder_receiver) = flume::bounded::(1000); 148 | 149 | Self { 150 | handle, 151 | active_ctx: None, 152 | command_receiver, 153 | command_sender, 154 | decoder: None, 155 | decoder_sender, 156 | decoder_receiver, 157 | active_user_id: None, 158 | size: None, 159 | frame_no: 0, 160 | } 161 | } 162 | 163 | /// Get command sender to control the run loop 164 | pub fn sender(&self) -> &flume::Sender { 165 | &self.command_sender 166 | } 167 | 168 | /// Run the main loop inside its task 169 | pub async fn run(&mut self) { 170 | // Setup your env here 171 | 172 | loop { 173 | tokio::select! { 174 | // handle player commands 175 | Ok(command) = self.command_receiver.recv_async() => { 176 | match command { 177 | ScreenCommand::Destroy => { 178 | self.teardown(true); 179 | 180 | // End operation and run loop 181 | break; 182 | } 183 | _ => {} 184 | } 185 | 186 | self.handle_command(command).await; 187 | } 188 | 189 | // Render event (decoder sends response here) 190 | Ok(frame) = self.decoder_receiver.recv_async() => { 191 | self.render(frame); 192 | } 193 | } 194 | } 195 | 196 | // Clean up here 197 | info!("Screen window run loop ended.") 198 | } 199 | 200 | /// Render frame onto context 201 | fn render(&mut self, frame: DecoderOutput) { 202 | if let Some(ctx) = self.active_ctx.as_ref() { 203 | self.frame_no = self.frame_no + 1; 204 | 205 | // Save size 206 | let size = frame.image_buf.display_size(); 207 | if self.size.is_none() { 208 | // resize 209 | let width: f64 = 900.0; 210 | let ratio = size.width / size.height; 211 | self.active_ctx.as_ref().map(|ctx| { 212 | ctx 213 | .window 214 | .set_size(LogicalSize::new(width, (width / ratio) + TOOLBAR_HEIGHT)) 215 | }); 216 | 217 | self.size = Some(size); 218 | 219 | // emit initially 220 | // self.active_ctx.as_ref().map(|ctx| { 221 | // emit_video_size( 222 | // &ctx.window, 223 | // VideoSizePayload { 224 | // width: size.width, 225 | // height: size.height, 226 | // }, 227 | // ) 228 | // }); 229 | } 230 | 231 | // emit peridically 232 | // todo: optimize to emit only on size change 233 | if let Some(size) = self.size { 234 | if self.frame_no % 10 == 0 { 235 | self.active_ctx.as_ref().map(|ctx| { 236 | emit_video_size( 237 | &ctx.window, 238 | VideoSizePayload { 239 | width: size.width, 240 | height: size.height, 241 | }, 242 | ) 243 | }); 244 | } 245 | } 246 | 247 | // Render 248 | ctx.render(frame.image_buf); 249 | } 250 | } 251 | 252 | async fn handle_command(&mut self, command: ScreenCommand) { 253 | match command { 254 | // todo: Add user id 255 | ScreenCommand::CreateWindow => { 256 | // Create decoder 257 | let decoder = Decoder::new(self.decoder_sender.clone()); 258 | self.decoder = Some(decoder); 259 | 260 | // Drop previous context 261 | self.create_window(); 262 | } 263 | 264 | ScreenCommand::ContextCreated(ctx) => { 265 | self.active_ctx = Some(ctx); 266 | } 267 | 268 | ScreenCommand::SetActiveUser(user_id) => { 269 | // recreate decoder 270 | self.active_user_id = Some(user_id); 271 | } 272 | 273 | ScreenCommand::MediaData(user_id, media_data) => { 274 | let Some(active_user_id) = self.active_user_id.as_ref() else { 275 | // no active user id 276 | return; 277 | }; 278 | 279 | if active_user_id != &user_id { 280 | // not currently viewing 281 | return; 282 | } 283 | 284 | self 285 | .decoder 286 | .as_mut() 287 | .map(|decoder| decoder.decode(media_data.data.as_slice())); 288 | // response will come through our decoder_receiver; 289 | } 290 | 291 | ScreenCommand::Destroy => { 292 | // handled above 293 | } 294 | 295 | ScreenCommand::WindowClosed => { 296 | self.teardown(false); 297 | } 298 | } 299 | } 300 | 301 | fn teardown(&mut self, destroy_window: bool) { 302 | // Drop context 303 | let _ = self.decoder.take(); 304 | let _ = self.active_user_id.take(); 305 | if let Some(ctx) = self.active_ctx.take() { 306 | if destroy_window { 307 | let _ = ctx.window.destroy(); 308 | } 309 | // let win = ctx.window.clone(); 310 | // let _ = win.close(); 311 | // drop(ctx); 312 | } 313 | } 314 | 315 | /// Must run on main thread 316 | fn create_window(&mut self) { 317 | let size = PhysicalSize::new(900, 506 + TOOLBAR_HEIGHT as u32); 318 | 319 | if let Some(existing) = self.handle.get_webview_window("screen") { 320 | let _ = existing.show(); 321 | let _ = existing.set_focus(); 322 | return; 323 | } 324 | 325 | // Create window 326 | let window = WebviewWindowBuilder::new( 327 | &self.handle, 328 | "screen", 329 | WebviewUrl::App("screen.html".into()), 330 | ) 331 | .inner_size(size.width as f64, size.height as f64) 332 | .title_bar_style(tauri::TitleBarStyle::Overlay) 333 | .effects( 334 | EffectsBuilder::new() 335 | .effect(tauri::window::Effect::HeaderView) 336 | .state(tauri::window::EffectState::FollowsWindowActiveState) 337 | .radius(9.0) 338 | .build(), 339 | ) 340 | .build() 341 | .expect("to build screen window"); 342 | 343 | // Show window 344 | // window.show().expect("to show screen window"); 345 | info!("created window"); 346 | 347 | let window_ = window.clone(); 348 | let _window__ = window.clone(); 349 | let sender__ = self.command_sender.clone(); 350 | 351 | let _ = window.run_on_main_thread(move || { 352 | // ... 353 | // add overlay 354 | let ctx = Arc::new(add_overlay_to_window(&window_, &size)); 355 | let weak_ctx = Arc::downgrade(&ctx); 356 | 357 | // save on app_handle 358 | let _ = sender__.try_send(ScreenCommand::ContextCreated(ctx)); 359 | 360 | // style window 361 | #[cfg(target_os = "macos")] 362 | window_.set_transparent_titlebar(); 363 | #[cfg(target_os = "macos")] 364 | window_.set_background(0.15, 0.19, 0.24); 365 | 366 | let window__ = window_.clone(); 367 | window_.on_window_event(move |event| match event { 368 | tauri::WindowEvent::Resized(_size) => { 369 | // Using `size` from the event broke on tauri beta 1 370 | let size = window__.outer_size().expect("to have inner size"); 371 | println!("resized {:?}", &size); 372 | if let Some(ctx) = weak_ctx.upgrade() { 373 | let new_size = 374 | size.to_logical::(window__.scale_factor().expect("to have scale factor")); 375 | ctx.set_size(&new_size); 376 | } 377 | } 378 | 379 | tauri::WindowEvent::ScaleFactorChanged { scale_factor, .. } => { 380 | if let Some(ctx) = weak_ctx.upgrade() { 381 | // Using `inner_size` broke on tauri beta 1 382 | let new_outer_size = window__.outer_size().expect("to have inner size"); 383 | let new_size = new_outer_size.to_logical::(*scale_factor); 384 | ctx.set_size(&new_size); 385 | } 386 | } 387 | 388 | tauri::WindowEvent::CloseRequested { .. } => { 389 | let _ = sender__.try_send(ScreenCommand::WindowClosed); 390 | } 391 | 392 | _ => {} 393 | }); 394 | }); 395 | } 396 | } 397 | 398 | impl Drop for ScreenWindowRunLoop { 399 | fn drop(&mut self) { 400 | self.teardown(true); 401 | } 402 | } 403 | 404 | pub struct ScreenOverlayContext { 405 | window: tauri::WebviewWindow, 406 | // ns_window: *mut Object, 407 | ns_view: *mut Object, 408 | pub layer: CALayer, 409 | // ns_window: Id, 410 | // ns_view: Id, 411 | // pub layer: CALayer, 412 | } 413 | 414 | impl ScreenOverlayContext { 415 | pub fn new( 416 | window: tauri::WebviewWindow, 417 | // ns_window: *mut Object, 418 | ns_view: *mut Object, 419 | layer: CALayer, 420 | // ns_window: Id, 421 | // ns_view: Id, 422 | // layer: CALayer, 423 | ) -> Self { 424 | // ns_view.retain(); 425 | // ns_window.retain(); 426 | Self { 427 | window, 428 | // ns_window: ns_window, 429 | ns_view: ns_view, 430 | layer, 431 | } 432 | } 433 | 434 | /// Must be run on main thread 435 | pub fn set_size(&self, size: &LogicalSize) { 436 | print!("set_size"); 437 | let size = NSSize::new(size.width as f64, size.height as f64 - TOOLBAR_HEIGHT); 438 | unsafe { self.ns_view.setFrameSize(size) }; 439 | } 440 | 441 | /// Must be run on main thread 442 | pub fn set_position(&self, _point: &PhysicalPosition) {} 443 | 444 | pub fn render(&self, buf: cap::R) { 445 | print!("render"); 446 | transaction::begin(); 447 | transaction::set_disable_actions(true); 448 | 449 | // do render 450 | unsafe { 451 | let buf_obj = buf.as_type_ptr().cast_mut() as *mut _ as *mut objc::runtime::Object; 452 | self.layer.set_contents(buf_obj); 453 | } 454 | // done with render 455 | 456 | transaction::commit(); 457 | } 458 | 459 | // todo: frameSize 460 | // todo: frameOrigin 461 | } 462 | 463 | impl Drop for ScreenOverlayContext { 464 | fn drop(&mut self) { 465 | unsafe { 466 | // todo: release 467 | } 468 | } 469 | } 470 | 471 | unsafe impl Send for ScreenOverlayContext {} 472 | unsafe impl Sync for ScreenOverlayContext {} 473 | -------------------------------------------------------------------------------- /renderer/screen/utils.rs: -------------------------------------------------------------------------------- 1 | // pub fn has_permission() -> bool { 2 | // let access = ScreenCaptureAccess::default(); 3 | // access.request() 4 | // } 5 | 6 | use std::process::Command; 7 | 8 | pub fn is_supported() -> bool { 9 | let min_version: Vec = "12.3\n".as_bytes().to_vec(); 10 | let output = Command::new("sw_vers") 11 | .arg("-productVersion") 12 | .output() 13 | .expect("Failed to execute sw_vers command"); 14 | 15 | let os_version = output.stdout; 16 | 17 | os_version >= min_version 18 | } 19 | -------------------------------------------------------------------------------- /renderer/screen/window.rs: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/inline-chat/webrtc-client-rust/c16c1e1268b4877d4e056a5ce34f594fef480318/renderer/screen/window.rs -------------------------------------------------------------------------------- /rtc.rs: -------------------------------------------------------------------------------- 1 | mod audio; 2 | mod audio_decoder; 3 | mod audio_input; 4 | mod capturer; 5 | pub mod commands; 6 | mod echo_cancel; 7 | pub mod engine; 8 | pub mod error; 9 | mod ice; 10 | mod jitter; 11 | pub mod net; 12 | pub mod peer; 13 | pub mod peer2; 14 | mod peer_queue; 15 | mod peer_state; 16 | mod player; 17 | mod processor; 18 | mod remote_control; 19 | mod resampler; 20 | pub mod sdp; 21 | mod signal; 22 | pub mod utils; 23 | -------------------------------------------------------------------------------- /rtc/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "rtc" 3 | version = "0.2.75" 4 | description = "RTC module for Noor" 5 | authors = ["Noor Chat"] 6 | edition = "2021" 7 | rust-version = "1.75" 8 | 9 | [dependencies] 10 | # Core dependencies from workspace 11 | serde = { version = "1", features = ["derive"] } 12 | log = { version = "0.4", features = ["release_max_level_debug"] } 13 | serde_json = "1" 14 | thiserror = "1" 15 | 16 | # RTC-specific dependencies 17 | tokio = { version = "1", features = ["full", "rt"] } 18 | futures = "0.3" 19 | futures-util = "0.3" 20 | anyhow = "1" 21 | bytes = "1" 22 | async-trait = "0.1.67" 23 | 24 | # Audio processing 25 | cpal = { git = "https://github.com/morajabi/cpal", rev = "6728e272c08460396ef6b3b27da54ed020fa26ce" } 26 | ringbuf = "0.3" 27 | rubato = "0.14" 28 | audio_thread_priority = "0.30" 29 | webrtc-audio-processing = { git = "https://github.com/morajabi/webrtc-audio-processing", rev = "f2d3926957770653add3791dc4d5a3dba715e24c", features = [ 30 | "bundled", 31 | "derive_serde", 32 | ] } 33 | opus = { git = "https://github.com/morajabi/opus-rs", branch = "add-complexity" } 34 | 35 | # WebRTC dependencies 36 | webrtc = { git = "https://github.com/webrtc-rs/webrtc", rev = "23d2e6088d761b700842eab4f02d492bc2a12119" } 37 | str0m = { git = "https://github.com/algesten/str0m", rev = "0e9c9225d796df1b275340d3a909c77321a959fc" } 38 | stun-client = { git = "https://github.com/yoshd/stun-client", rev = "4ccfc3a9944dac8bfbe52d0030f0fc31b7fbbb14" } 39 | 40 | [target."cfg(target_os = \"macos\")".dependencies] 41 | objc = "0.2" 42 | cocoa = "0.25" 43 | core-graphics = "0.23" 44 | objc2 = { version = "0.4.1", features = ["relax-void-encoding"] } 45 | icrate = { version = "0.0.4", features = [ 46 | "apple", 47 | "Foundation", 48 | "Foundation_all", 49 | "Foundation_NSString", 50 | "AppKit", 51 | "AppKit_NSApplication", 52 | "AppKit_NSWindow", 53 | "AppKit_NSResponder", 54 | "CoreAnimation", 55 | "CoreAnimation_CALayer", 56 | ] } 57 | objc-foundation = "0.1.1" 58 | objc_id = "0.1.1" 59 | 60 | [profile.dev] 61 | split-debuginfo = "unpacked" 62 | opt-level = 0 63 | debug = 0 64 | strip = "none" 65 | lto = false 66 | codegen-units = 256 67 | incremental = true 68 | 69 | [profile.release] 70 | panic = "abort" 71 | codegen-units = 1 72 | lto = true 73 | opt-level = "s" 74 | strip = true 75 | 76 | # Optimize performance-critical dependencies 77 | [profile.release.package.webrtc-audio-processing] 78 | opt-level = 3 79 | 80 | [profile.release.package.str0m] 81 | opt-level = 3 82 | 83 | [profile.release.package.cpal] 84 | opt-level = 3 85 | -------------------------------------------------------------------------------- /rtc/audio.rs: -------------------------------------------------------------------------------- 1 | use std::fmt::Display; 2 | 3 | use cpal::{ 4 | traits::{DeviceTrait, HostTrait}, 5 | Device, DevicesError, SampleRate, SupportedBufferSize, 6 | }; 7 | 8 | use crate::store::DEFAULT_DEVICE; 9 | 10 | pub fn get_default_output_device() -> Result { 11 | #[cfg(any( 12 | not(any(target_os = "linux", target_os = "dragonfly", target_os = "freebsd")), 13 | not(feature = "jack") 14 | ))] 15 | let host = cpal::default_host(); 16 | 17 | // Set up the input device and stream with the default input config. 18 | let device = host 19 | .default_output_device() 20 | .expect("failed to find output device"); 21 | 22 | Ok(device) 23 | } 24 | 25 | /// used in decoder thread 26 | pub fn get_output_device() -> cpal::StreamConfig { 27 | let sample_rate: u32 = 48_000; 28 | let channels: u16 = 2; 29 | 30 | let output_device = get_default_output_device().expect("to have output device"); 31 | let mut supported_configs = output_device 32 | .supported_output_configs() 33 | .expect("to have output device"); 34 | let default_config = output_device 35 | .default_output_config() 36 | .expect("to have default output config for sample rate"); 37 | let supports_sample_rate = &supported_configs 38 | .find(|config| { 39 | config.max_sample_rate() >= SampleRate(sample_rate) 40 | && config.min_sample_rate() <= SampleRate(sample_rate) 41 | }) 42 | .is_some(); 43 | 44 | let is_sample_rate_native_default = default_config.sample_rate().0 == sample_rate; 45 | 46 | let sample_rate = if *supports_sample_rate { 47 | SampleRate(sample_rate) 48 | } else { 49 | default_config.sample_rate() 50 | }; 51 | info!( 52 | "output device > default > buffer size {:#?}", 53 | default_config.buffer_size() 54 | ); 55 | info!( 56 | "output device > default > sample rate {:#?}", 57 | default_config.sample_rate() 58 | ); 59 | 60 | info!("Output device sample rate {}", sample_rate.0); 61 | 62 | // 240 * 48/44.1*2 63 | let _ideal_buff_size = 240; // for final output after resampling to be close to 480; 64 | let _ideal_buff_size = 480; // for better sync with echo cancel 65 | // min or 512 or default if None 66 | // *48/44.1*2 = 960 = 10ms 67 | let ideal_buff_size = if is_sample_rate_native_default { 68 | 480 69 | } else { 70 | 441 71 | }; 72 | let min_supported_buffer_size: Option = match output_device 73 | .default_output_config() 74 | .expect("to have output config") 75 | .buffer_size() 76 | { 77 | SupportedBufferSize::Range { min, max } => { 78 | if *min <= ideal_buff_size && ideal_buff_size <= *max { 79 | Some(ideal_buff_size) 80 | } else { 81 | Some(min.to_owned()) 82 | } 83 | } 84 | SupportedBufferSize::Unknown => None, 85 | }; 86 | 87 | cpal::StreamConfig { 88 | buffer_size: if let Some(buffer_size) = min_supported_buffer_size { 89 | cpal::BufferSize::Fixed(buffer_size) 90 | } else { 91 | // default can be super large, so we only fallback to it if Unknown 92 | cpal::BufferSize::Default 93 | }, 94 | 95 | channels, 96 | sample_rate, 97 | } 98 | } 99 | 100 | pub fn get_default_input_device() -> Result { 101 | #[cfg(any( 102 | not(any(target_os = "linux", target_os = "dragonfly", target_os = "freebsd")), 103 | not(feature = "jack") 104 | ))] 105 | let host = cpal::default_host(); 106 | 107 | // Set up the input device and stream with the default input config. 108 | let device = host 109 | .default_input_device() 110 | .expect("failed to find default input device"); 111 | 112 | Ok(device) 113 | } 114 | 115 | pub fn get_input_device_by_name(device_name: String) -> Result { 116 | #[cfg(any( 117 | not(any(target_os = "linux", target_os = "dragonfly", target_os = "freebsd")), 118 | not(feature = "jack") 119 | ))] 120 | let host = cpal::default_host(); 121 | // dbg!(&device_name); 122 | 123 | // Set up the input device and stream with the default input config. 124 | let device = host 125 | .input_devices() 126 | .expect("failed to find input devices") 127 | .find(|device| { 128 | let name = device.name().unwrap_or("Audio Device".to_string()); 129 | dbg!(&name); 130 | name == device_name 131 | }); 132 | 133 | // If picked device not found, pick default 134 | if let Some(preferred) = device { 135 | Ok(preferred) 136 | } else { 137 | warn!( 138 | "Could not use preferred input device {} falling back to default", 139 | &device_name 140 | ); 141 | get_default_input_device() 142 | } 143 | } 144 | 145 | use thiserror::Error; 146 | 147 | #[derive(Debug, Error)] 148 | pub enum AudioError { 149 | #[error(transparent)] 150 | DeviceError(#[from] cpal::DevicesError), 151 | ConfigError(#[from] cpal::DefaultStreamConfigError), 152 | SupportedStreamConfigError(#[from] cpal::SupportedStreamConfigsError), 153 | // #[error("directory already exists")] 154 | // DirectoryExists, 155 | } 156 | 157 | use std::fmt; 158 | 159 | impl Display for AudioError { 160 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 161 | write!(f, "Audio error") 162 | } 163 | } 164 | 165 | /// used in encoder thread 166 | pub fn get_input_device( 167 | preferred_sample_rate: u32, 168 | preferred_mic: Option, 169 | buffer_size: u32, 170 | ) -> Result<(cpal::Device, cpal::StreamConfig), AudioError> { 171 | let input_device = if preferred_mic == Some(DEFAULT_DEVICE.to_string()) || preferred_mic.is_none() 172 | { 173 | get_default_input_device()? 174 | } else if let Some(preferred_mic) = preferred_mic { 175 | get_input_device_by_name(preferred_mic)? 176 | } else { 177 | unreachable!("preferred_mic should be Some or None") 178 | }; 179 | 180 | let default_config = input_device.default_input_config()?; 181 | let mut supported_configs = input_device.supported_input_configs()?; 182 | let supports_track_sample_rate = &supported_configs 183 | .find(|config| { 184 | config.max_sample_rate() >= SampleRate(preferred_sample_rate) 185 | && config.min_sample_rate() <= SampleRate(preferred_sample_rate) 186 | }) 187 | .is_some(); 188 | 189 | // min or 512 or default if None 190 | let min_supported_buffer_size: Option = match input_device 191 | .default_input_config() 192 | .expect("to have input config") 193 | .buffer_size() 194 | { 195 | SupportedBufferSize::Range { min, max } => { 196 | // prev 512 197 | // 960 / 4 = 240 to fill a whole 198 | if *min <= buffer_size && buffer_size <= *max { 199 | Some(buffer_size) 200 | } else { 201 | Some(min.to_owned()) 202 | } 203 | } 204 | SupportedBufferSize::Unknown => None, 205 | }; 206 | 207 | let config = cpal::StreamConfig { 208 | // todo: must check with supported output config 209 | buffer_size: if let Some(buffer_size) = min_supported_buffer_size { 210 | cpal::BufferSize::Fixed(buffer_size) 211 | } else { 212 | // default can be super large, so we only fallback to it if Unknown 213 | cpal::BufferSize::Default 214 | }, 215 | 216 | // Never set this to fixed 2 , it causes left channel to be filled 217 | channels: 1, 218 | // channels: default_config.channels(), 219 | // channels: 1, 220 | // sample_rate: SampleRate(24000), 221 | // sample_rate: SampleRate(44100), 222 | sample_rate: if *supports_track_sample_rate { 223 | SampleRate(preferred_sample_rate) 224 | } else { 225 | default_config.sample_rate() 226 | }, 227 | }; 228 | 229 | Ok((input_device, config)) 230 | } 231 | 232 | pub fn opus_channels(channels: u16) -> opus::Channels { 233 | if channels == 1 { 234 | opus::Channels::Mono 235 | } else { 236 | opus::Channels::Stereo 237 | } 238 | } 239 | 240 | pub fn cpal_err_fn(err: cpal::StreamError) { 241 | eprintln!("an error occurred on stream: {}", err); 242 | } 243 | 244 | pub fn get_opus_samples_count(sample_rate: u32, channels: u16, duration: u32) -> usize { 245 | (sample_rate as usize / 1000 * duration /* ms */ as usize) * channels as usize 246 | } 247 | -------------------------------------------------------------------------------- /rtc/audio_decoder.rs: -------------------------------------------------------------------------------- 1 | use crate::{debugger::Debugger, rtc::audio::get_opus_samples_count}; 2 | use std::sync::Arc; 3 | use str0m::media::MediaData; 4 | 5 | use super::{ 6 | jitter::{self, JitterBuffer, MAX_10MS_SAMPLE_COUNT}, 7 | peer::channel::AudioMedia, 8 | resampler::{self, Resampler, ResamplerConfig}, 9 | utils::UserId, 10 | }; 11 | 12 | /// Track decoder 13 | pub struct TrackDecoder { 14 | jitter_buffer: Arc, 15 | resampler: Resampler, 16 | task: tokio::task::JoinHandle<()>, 17 | close_sender: flume::Sender<()>, 18 | jitter_output_buffer: Vec, 19 | } 20 | 21 | impl Drop for TrackDecoder { 22 | fn drop(&mut self) { 23 | let _ = self.close_sender.try_send(()); 24 | self.task.abort(); 25 | } 26 | } 27 | 28 | impl TrackDecoder { 29 | pub async fn new(user_id: UserId, debugger: Debugger, media: AudioMedia) -> Self { 30 | let jitter_buffer = JitterBuffer::new(jitter::JitterBufConfig { 31 | sample_rate: media.clock_rate, 32 | channels: media.channels, 33 | debugger, 34 | user_id, 35 | }); 36 | let resampler = Resampler::new(ResamplerConfig { 37 | input_sample_rate: media.clock_rate, 38 | channels: media.channels, 39 | output_sample_rate: 48_000, 40 | chunk: resampler::Chunk::TenMs, 41 | }); 42 | // We return slices of this after processing 43 | // let output_buffer = Vec::with_capacity(MAX_10MS_SAMPLE_COUNT * 5); 44 | let _output_buffer = vec![0.0f32; MAX_10MS_SAMPLE_COUNT * 2]; 45 | 46 | let mut interval_20ms = tokio::time::interval(std::time::Duration::from_millis(20)); 47 | interval_20ms.set_missed_tick_behavior(tokio::time::MissedTickBehavior::Skip); 48 | let jitter_buffer = Arc::new(jitter_buffer); 49 | let jitter_buffer_ = jitter_buffer.clone(); 50 | let jitter_output_buffer = jitter_buffer.allocate_output_buffer(); 51 | let (close_sender, close_recv) = flume::bounded::<()>(1); 52 | 53 | // no need to be in same thread as player 54 | // let task = tokio::task::spawn(async move { 55 | let task = tokio::task::spawn_local(async move { 56 | loop { 57 | tokio::select! { 58 | _ = interval_20ms.tick() => { 59 | jitter_buffer_.tick(); 60 | }, 61 | _ = close_recv.recv_async() => { 62 | break; 63 | } 64 | } 65 | } 66 | }); 67 | 68 | Self { 69 | jitter_buffer, 70 | 71 | resampler, 72 | task, 73 | close_sender, 74 | jitter_output_buffer, 75 | } 76 | } 77 | 78 | pub fn insert_packet(&self, media_data: Arc) { 79 | self.jitter_buffer.insert_packet(media_data); 80 | } 81 | // pub fn decoder_tick(&self) { 82 | // self.jitter_buffer.tick(); 83 | // } 84 | 85 | /// Outputs 10ms of streo interleaved audio at 48khz 86 | pub fn get_audio<'a>(&'a mut self, dest: &mut [f32]) -> usize { 87 | let samples = self 88 | .jitter_buffer 89 | .get_audio(self.jitter_output_buffer.as_mut_slice()); 90 | let output = &self.jitter_output_buffer[0..samples]; 91 | 92 | // RESAMPLE TO 48khz 93 | let output = self.resampler.process(output); 94 | 95 | // todo : optimize 96 | let expected = get_opus_samples_count(48_000, 2, 10); 97 | assert!( 98 | output.len() == expected, 99 | "track output frame size not correct. expected: {} actual: {}", 100 | expected, 101 | output.len() 102 | ); 103 | 104 | // self.output_buffer[0..output.len()] 105 | // .as_mut() 106 | // .copy_from_slice(output); 107 | 108 | // &self.output_buffer[0..output.len()] 109 | 110 | dest[0..output.len()].as_mut().copy_from_slice(output); 111 | 112 | output.len() 113 | } 114 | } 115 | -------------------------------------------------------------------------------- /rtc/capturer.rs: -------------------------------------------------------------------------------- 1 | use std::time::Instant; 2 | 3 | use super::{engine::EngineSender, peer::channel::StartScreenOpts}; 4 | use crate::rtc::{peer::channel::PeerChannelCommand, peer2::BweCfg}; 5 | use cap::capturer::{CapturerOutput, ScreenCapturer}; 6 | use flume::{Receiver, Sender}; 7 | use str0m::{ 8 | bwe::Bitrate, 9 | media::{MediaKind, MediaTime}, 10 | }; 11 | use tauri::AppHandle; 12 | 13 | pub enum RunLoopEvent { 14 | Start(StartScreenOpts), 15 | Stop, 16 | SetBitrate(Bitrate), 17 | RequestKeyFrame, 18 | Destroy, 19 | } 20 | 21 | pub struct CaptureController { 22 | run_loop_sender: Sender, 23 | } 24 | 25 | impl CaptureController { 26 | pub fn new(_engine_sender: EngineSender, run_loop_sender: Sender) -> Self { 27 | Self { run_loop_sender } 28 | } 29 | 30 | pub async fn stop(&self) { 31 | let _ = self.run_loop_sender.try_send(RunLoopEvent::Stop); 32 | } 33 | 34 | pub async fn start(&self, opts: StartScreenOpts) { 35 | let _ = self.run_loop_sender.try_send(RunLoopEvent::Start(opts)); 36 | } 37 | pub async fn set_bitrate(&self, bitrate: Bitrate) { 38 | let _ = self 39 | .run_loop_sender 40 | .try_send(RunLoopEvent::SetBitrate(bitrate)); 41 | } 42 | 43 | pub async fn request_key_frame(&self) { 44 | let _ = self.run_loop_sender.try_send(RunLoopEvent::RequestKeyFrame); 45 | } 46 | } 47 | 48 | impl Drop for CaptureController { 49 | fn drop(&mut self) { 50 | let _ = self.run_loop_sender.try_send(RunLoopEvent::Destroy); 51 | } 52 | } 53 | 54 | pub struct CapturerRunLoop { 55 | reveiver: Receiver, 56 | close_receiver: Receiver<()>, 57 | close_sender: Sender<()>, 58 | engine_sender: EngineSender, 59 | frame_sender: Sender, 60 | frame_receiver: Receiver, 61 | capturer: Option, 62 | running: bool, 63 | start: Instant, 64 | } 65 | 66 | impl CapturerRunLoop { 67 | pub fn new( 68 | run_loop_reveiver: Receiver, 69 | _run_loop_sender: Sender, 70 | engine_sender: EngineSender, 71 | _app_handle: AppHandle, 72 | ) -> Self { 73 | let (close_sender, close_receiver) = flume::bounded::<()>(2); 74 | 75 | // This receives frames from our Capturer 76 | let (frame_sender, frame_receiver) = flume::bounded::(100); 77 | 78 | Self { 79 | engine_sender, 80 | reveiver: run_loop_reveiver, 81 | close_receiver, 82 | close_sender, 83 | capturer: None, 84 | running: false, 85 | start: Instant::now(), 86 | frame_sender, 87 | frame_receiver, 88 | } 89 | } 90 | 91 | pub async fn run(&mut self) { 92 | loop { 93 | tokio::select! { 94 | Ok(event) = self.reveiver.recv_async() => { 95 | match event { 96 | RunLoopEvent::Destroy => { 97 | let _ = self.close_sender.try_send(()); 98 | break; 99 | } 100 | _ => {} 101 | } 102 | info!("handling capturer event"); 103 | self.handle_command(event).await; 104 | } 105 | 106 | // When an encoded frame arrives 107 | Ok(frame) = self.frame_receiver.recv_async() => { 108 | self.tick(frame).await; 109 | } 110 | 111 | _ = self.close_receiver.recv_async() => { 112 | break; 113 | } 114 | } 115 | } 116 | 117 | info!("RTC screen capturer run loop ended"); 118 | } 119 | 120 | async fn handle_command(&mut self, event: RunLoopEvent) { 121 | match event { 122 | RunLoopEvent::Start(opts) => { 123 | self.start_capture(opts).await; 124 | } 125 | 126 | RunLoopEvent::SetBitrate(bitrate) => { 127 | self 128 | .capturer 129 | .as_mut() 130 | .map(|cap| cap.set_bitrate(bitrate.as_u64() as i32)); 131 | } 132 | 133 | RunLoopEvent::Stop => { 134 | self.stop_capture().await; 135 | } 136 | 137 | RunLoopEvent::RequestKeyFrame => { 138 | self.capturer.as_mut().map(|cap| cap.request_keyframe()); 139 | } 140 | 141 | _ => {} 142 | } 143 | } 144 | 145 | /// Send a frame to engine 146 | async fn tick(&mut self, frame: CapturerOutput) { 147 | if !self.running { 148 | return; 149 | } 150 | 151 | let rtp_time = MediaTime::from_seconds(frame.seconds); 152 | let _ = self.engine_sender.try_send(PeerChannelCommand::SendMedia { 153 | kind: MediaKind::Video, 154 | data: frame.data, 155 | rtp_time, 156 | extra: None, 157 | }); 158 | } 159 | 160 | async fn start_capture(&mut self, opts: StartScreenOpts) { 161 | info!("starting capture"); 162 | 163 | let mut capturer = ScreenCapturer::new(self.frame_sender.clone()); 164 | 165 | // let mut config = capturer.config(); 166 | if let Some(id) = opts.display_id { 167 | capturer.set_display(id); 168 | } 169 | 170 | let Ok(desired_bitrate) = capturer.start().await else { 171 | return; 172 | }; 173 | 174 | // Send the desired bitrate to the engine to set on BWE for peers 175 | self.send_to_engine(PeerChannelCommand::ConfigBwe(BweCfg::desired( 176 | Bitrate::bps(desired_bitrate as u64), 177 | ))); 178 | 179 | self.capturer = Some(capturer); 180 | self.start = Instant::now(); 181 | self.running = true; 182 | } 183 | 184 | async fn stop_capture(&mut self) { 185 | info!("stopping capture"); 186 | 187 | self.running = false; 188 | 189 | // stop capturing 190 | if let Some(mut cap) = self.capturer.take() { 191 | cap.stop().await; 192 | } 193 | } 194 | 195 | #[allow(dead_code)] 196 | fn send_to_engine(&mut self, command: PeerChannelCommand) { 197 | let _ = self.engine_sender.try_send(command); 198 | } 199 | } 200 | -------------------------------------------------------------------------------- /rtc/commands.rs: -------------------------------------------------------------------------------- 1 | use std::sync::Arc; 2 | 3 | use crate::system::NoSleepManager; 4 | 5 | use super::{ 6 | engine::Engine, 7 | peer::channel::Signal, 8 | peer2::utils::{parse_candidate, parse_sdp}, 9 | utils::{CallId, UserId}, 10 | }; 11 | use serde::{Deserialize, Serialize}; 12 | use tauri::Manager; 13 | use tauri::{State, WebviewWindow}; 14 | 15 | #[derive(Serialize, Clone, Debug, PartialEq)] 16 | pub enum ConnectionState { 17 | #[serde(rename = "connected")] 18 | Connected, 19 | #[serde(rename = "connecting")] 20 | Connecting, 21 | #[serde(rename = "disconnected")] 22 | Disconnected, 23 | } 24 | 25 | #[derive(Serialize, Clone)] 26 | #[serde(rename_all = "camelCase")] 27 | pub struct PeerStateChangeEventPayload { 28 | pub user_id: UserId, 29 | pub call_id: CallId, 30 | pub state: ConnectionState, 31 | } 32 | 33 | #[derive(Serialize, Clone)] 34 | #[serde(rename_all = "camelCase")] 35 | struct SendSignalPayload { 36 | user_id: UserId, 37 | call_id: CallId, 38 | signal: String, 39 | } 40 | 41 | pub fn send_signal_to_win( 42 | window: &WebviewWindow, 43 | user_id: UserId, 44 | call_id: CallId, 45 | signal: Arc, 46 | ) { 47 | window 48 | .emit( 49 | "rtc_send_signal", 50 | SendSignalPayload { 51 | user_id, 52 | call_id, 53 | signal: signal.to_json(), 54 | }, 55 | ) 56 | .expect("Failed to send signal"); 57 | } 58 | 59 | /// rtc_join 60 | #[tauri::command] 61 | pub async fn rtc_join( 62 | engine: State<'_, Engine>, 63 | no_sleep: State<'_, NoSleepManager>, 64 | call_id: String, 65 | mic_enabled: bool, 66 | ) -> Result<(), ()> { 67 | engine.join_room(CallId(call_id), mic_enabled); 68 | no_sleep.enable(); 69 | Ok(()) 70 | } 71 | 72 | /// rtc_leave 73 | #[tauri::command] 74 | pub async fn rtc_leave( 75 | engine: State<'_, Engine>, 76 | no_sleep: State<'_, NoSleepManager>, 77 | ) -> Result<(), ()> { 78 | engine.leave_room(); 79 | no_sleep.disable(); 80 | Ok(()) 81 | } 82 | 83 | /// rtc_enable_mic 84 | #[tauri::command] 85 | pub fn rtc_enable_mic(engine: State<'_, Engine>) { 86 | engine.toggle_mic(true); 87 | } 88 | 89 | /// rtc_disable_mic 90 | #[tauri::command] 91 | pub fn rtc_disable_mic(engine: State<'_, Engine>) { 92 | engine.toggle_mic(false); 93 | } 94 | 95 | #[tauri::command] 96 | pub async fn rtc_send_data_string( 97 | engine: State<'_, Engine>, 98 | user_id: String, 99 | data: String, 100 | ) -> Result<(), ()> { 101 | engine.send_data_string(user_id.into(), data); 102 | Ok(()) 103 | } 104 | 105 | /// rtc_set_signal (called from window) 106 | /// -------------- 107 | #[derive(Deserialize, Clone)] 108 | #[serde(rename_all = "camelCase")] 109 | pub struct SetSignalPayload { 110 | user_id: String, 111 | call_id: String, 112 | sdp: Option, 113 | candidate: Option, 114 | } 115 | #[tauri::command] 116 | pub async fn rtc_set_signal( 117 | engine: State<'_, Engine>, 118 | payload: SetSignalPayload, 119 | ) -> Result<(), ()> { 120 | let user_id = UserId(payload.user_id); 121 | let call_id = CallId(payload.call_id); 122 | let signal = if let Some(sdp) = payload.sdp { 123 | Signal::Sdp(parse_sdp(sdp.as_str())) 124 | } else if let Some(candidate_string) = payload.candidate { 125 | Signal::Candidate(parse_candidate(candidate_string.as_str())) 126 | } else { 127 | warn!("got empty signal"); 128 | return Err(()); 129 | }; 130 | 131 | engine.set_incoming_signal(user_id, call_id, signal); 132 | Ok(()) 133 | } 134 | 135 | /// Add Peer (called from window) 136 | #[derive(Deserialize, Clone)] 137 | #[serde(rename_all = "camelCase")] 138 | pub struct AddPeerPayload { 139 | user_id: String, 140 | call_id: String, 141 | initial_offerer: bool, 142 | } 143 | #[tauri::command] 144 | pub async fn rtc_add_peer(engine: State<'_, Engine>, payload: AddPeerPayload) -> Result<(), ()> { 145 | let user_id = UserId(payload.user_id); 146 | let call_id = CallId(payload.call_id); 147 | let initial_offerer = payload.initial_offerer; 148 | 149 | engine.add_peer(user_id, call_id, initial_offerer); 150 | Ok(()) 151 | } 152 | 153 | /// Remove Peer (called from window) 154 | #[derive(Deserialize, Clone)] 155 | #[serde(rename_all = "camelCase")] 156 | pub struct RemovePeerPayload { 157 | user_id: String, 158 | call_id: String, 159 | } 160 | #[tauri::command] 161 | pub async fn rtc_remove_peer( 162 | engine: State<'_, Engine>, 163 | payload: RemovePeerPayload, 164 | ) -> Result<(), ()> { 165 | let user_id = UserId(payload.user_id); 166 | let call_id = CallId(payload.call_id); 167 | engine.remove_peer(user_id, call_id); 168 | Ok(()) 169 | } 170 | 171 | #[tauri::command] 172 | pub fn rtc_set_echo_cancel(payload: bool) -> Result<(), ()> { 173 | error!("setting echo cancel no longer supported. {}", payload); 174 | Ok(()) 175 | } 176 | -------------------------------------------------------------------------------- /rtc/echo_cancel.rs: -------------------------------------------------------------------------------- 1 | use webrtc_audio_processing::Stats; 2 | 3 | use super::processor::AudioEchoProcessor; 4 | 5 | pub struct EncoderProcess { 6 | processor: AudioEchoProcessor, 7 | channels: u16, 8 | sample_rate: u32, 9 | } 10 | 11 | impl EncoderProcess { 12 | pub fn new(processor: AudioEchoProcessor, channels: u16, sample_rate: u32) -> Self { 13 | Self { 14 | processor, 15 | sample_rate, 16 | channels, 17 | } 18 | } 19 | 20 | pub fn frame_size(&self) -> usize { 21 | self.processor.num_samples_per_frame() * self.channels as usize 22 | } 23 | 24 | pub fn set_output_will_be_muted(&self, muted: bool) { 25 | self.processor.set_output_will_be_muted(muted); 26 | } 27 | 28 | pub fn stats(&self) -> Stats { 29 | self.processor.get_stats() 30 | } 31 | 32 | pub fn get_echo_processor(&self) -> &AudioEchoProcessor { 33 | &self.processor 34 | } 35 | pub fn get_echo_processor_mut(&mut self) -> &mut AudioEchoProcessor { 36 | &mut self.processor 37 | } 38 | 39 | pub fn preallocate_buffer(&self) -> Vec { 40 | let one_ms = (self.sample_rate as usize / 1000) * self.channels as usize; 41 | // allocate 80ms buffer (random number) 42 | vec![0.0_f32; one_ms * 100] 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /rtc/error.rs: -------------------------------------------------------------------------------- 1 | use str0m::{error::IceError, RtcError}; 2 | use thiserror::Error; 3 | // #[error("Invalid header (expected {expected:?}, got {found:?})")] 4 | // InvalidHeader { 5 | // expected: String, 6 | // found: String, 7 | // }, 8 | // #[error("Missing attribute: {0}")] 9 | // MissingAttribute(String), 10 | 11 | #[derive(Error, Debug)] 12 | pub enum EngineError { 13 | /// ICE errors 14 | #[error("No network interface found for connection")] 15 | IceNoNetworkInterface, 16 | #[error("No stun found for interface")] 17 | IceStunForInterface, 18 | 19 | /// ICE agent errors. 20 | #[error("{0}")] 21 | Ice(#[from] IceError), 22 | 23 | /// ICE agent errors. 24 | #[error("{0}")] 25 | Rtc(#[from] RtcError), 26 | 27 | /// STUN error from webrtc-rs 28 | #[error("WebRTC utility error: {0}")] 29 | WebRtcUtil(#[from] webrtc::util::Error), 30 | 31 | /// STUN error from webrtc-rs 32 | #[error("STUN error: {0}")] 33 | Stun(#[from] webrtc::stun::Error), 34 | 35 | /// TURN error from webrtc-rs 36 | #[error("TURN error: {0}")] 37 | Turn(#[from] webrtc::turn::Error), 38 | 39 | /// Other IO errors. 40 | #[error("{0}")] 41 | Io(#[from] std::io::Error), 42 | // #[error("{0}")] 43 | // Other(String), 44 | } 45 | -------------------------------------------------------------------------------- /rtc/ice/conn.rs: -------------------------------------------------------------------------------- 1 | use async_std::net::UdpSocket; 2 | use async_trait::async_trait; 3 | use std::net::SocketAddr; 4 | 5 | use webrtc::util::Result; 6 | 7 | pub struct UdpConn(pub UdpSocket); 8 | 9 | #[async_trait] 10 | impl webrtc::util::Conn for UdpConn { 11 | async fn connect(&self, addr: SocketAddr) -> Result<()> { 12 | Ok(self.0.connect(addr).await?) 13 | } 14 | 15 | async fn recv(&self, buf: &mut [u8]) -> Result { 16 | Ok(self.0.recv(buf).await?) 17 | } 18 | 19 | async fn recv_from(&self, buf: &mut [u8]) -> Result<(usize, SocketAddr)> { 20 | Ok(self.0.recv_from(buf).await?) 21 | } 22 | 23 | async fn send(&self, buf: &[u8]) -> Result { 24 | Ok(self.0.send(buf).await?) 25 | } 26 | 27 | async fn send_to(&self, buf: &[u8], target: SocketAddr) -> Result { 28 | Ok(self.0.send_to(buf, target).await?) 29 | } 30 | 31 | fn local_addr(&self) -> Result { 32 | Ok(self.0.local_addr()?) 33 | } 34 | 35 | fn remote_addr(&self) -> Option { 36 | None 37 | } 38 | 39 | async fn close(&self) -> Result<()> { 40 | Ok(()) 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /rtc/ice/gatherer.rs: -------------------------------------------------------------------------------- 1 | // goals for this 2 | // 1. gather host candidates 3 | // 2. for each, get srflx remote ip 4 | // 3. get turn remote allocation and relay packets 5 | // 4. repeat 6 | 7 | use crate::rtc::error::EngineError; 8 | use crate::rtc::ice::utils::stun_binding; 9 | use crate::rtc::net::bytes_pool::BytesPool; 10 | use crate::rtc::peer2::RunLoopEvent; 11 | use async_std::net::UdpSocket; 12 | use flume::Receiver; 13 | use std::collections::HashMap; 14 | use std::net::{IpAddr, SocketAddr}; 15 | 16 | use std::sync::Arc; 17 | use str0m::net::Transmit; 18 | use str0m::Candidate; 19 | use tokio::time::sleep; 20 | use webrtc::turn::client::Client; 21 | use webrtc::util::Conn; 22 | 23 | use super::conn::UdpConn; 24 | use super::utils::socket_recv_from; 25 | 26 | pub struct IceGatherer { 27 | peer_event_sender: flume::Sender, 28 | /// awaiting for poll 29 | receiver: Receiver, 30 | sender: flume::Sender, 31 | sockets: HashMap, 32 | // sockets: HashMap>, 33 | /// to close socket listeners 34 | socket_signals: HashMap>, 35 | local_mapped: HashMap, 36 | turn_clients: HashMap, 37 | ice_policy: IcePolicy, 38 | } 39 | 40 | #[derive(Clone)] 41 | pub(super) enum Socket { 42 | Conn(Arc), 43 | Raw(Arc), 44 | } 45 | 46 | // Compat layer between turn and stun impl 47 | impl Socket { 48 | pub fn local_addr(&self) -> Result { 49 | match &self { 50 | Self::Conn(inner) => inner.local_addr().map_err(EngineError::from), 51 | Self::Raw(inner) => inner.local_addr().map_err(EngineError::from), 52 | } 53 | } 54 | 55 | pub async fn send_to(&self, buf: &[u8], target: SocketAddr) -> Result { 56 | match &self { 57 | Self::Conn(inner) => inner.send_to(buf, target).await.map_err(EngineError::from), 58 | Self::Raw(inner) => inner.send_to(buf, target).await.map_err(EngineError::from), 59 | } 60 | } 61 | 62 | pub async fn recv_from(&self, buf: &mut [u8]) -> Result<(usize, SocketAddr), EngineError> { 63 | match &self { 64 | Self::Conn(inner) => inner.recv_from(buf).await.map_err(EngineError::from), 65 | Self::Raw(inner) => inner.recv_from(buf).await.map_err(EngineError::from), 66 | } 67 | } 68 | } 69 | 70 | pub struct Gathered { 71 | pub local: Candidate, 72 | pub remote: Candidate, 73 | 74 | // pub(self) socket: Arc, 75 | pub(self) socket: Socket, 76 | pub(self) local_socket_addr: SocketAddr, 77 | pub(self) turn_client: Option, 78 | } 79 | 80 | impl Drop for IceGatherer { 81 | fn drop(&mut self) { 82 | warn!("closing ice gatherer"); 83 | // close sockets 84 | for (_, signal) in self.socket_signals.iter() { 85 | let _ = signal.try_send(()); 86 | } 87 | 88 | // close turn clients 89 | for (_, client) in self.turn_clients.iter() { 90 | let client_ = client.clone(); 91 | tokio::spawn(async move { 92 | // to avoid crash for last packets 93 | sleep(std::time::Duration::from_secs(1)).await; 94 | client_.close().await.expect("to close turn"); 95 | info!("closed turn."); 96 | }); 97 | } 98 | } 99 | } 100 | #[derive(Debug, PartialEq, Clone)] 101 | pub enum IcePolicy { 102 | Relay = 0, 103 | All = 1, 104 | Local = 2, 105 | } 106 | 107 | impl IceGatherer { 108 | pub fn new(peer_event_sender: flume::Sender, ice_policy: IcePolicy) -> Self { 109 | // channel to pass candidates from threads 110 | let (sender, receiver) = flume::bounded::(500); 111 | 112 | info!("ice transport policy {:#?}", &ice_policy); 113 | 114 | Self { 115 | receiver, 116 | sender, 117 | peer_event_sender, 118 | sockets: HashMap::with_capacity(10), 119 | socket_signals: HashMap::with_capacity(10), 120 | local_mapped: HashMap::with_capacity(10), 121 | turn_clients: HashMap::with_capacity(5), 122 | ice_policy, 123 | } 124 | } 125 | 126 | pub fn gather(&self) { 127 | let sender_ = self.sender.clone(); 128 | let ice_policy = self.ice_policy.clone(); 129 | 130 | info!("Gathering ICE candidates..."); 131 | 132 | // Start gathering 133 | tokio::spawn(async move { 134 | // find network interfaces 135 | for addr in super::utils::find_host_addresses().expect("to find host interfaces") { 136 | if matches!(ice_policy, IcePolicy::Local | IcePolicy::All) { 137 | // UDP 138 | // add host candidates 139 | let addr_ = addr; 140 | let sender = sender_.clone(); 141 | tokio::spawn(async move { 142 | if let Ok(gathered) = Self::make_host(addr_, "udp").await { 143 | let _ = sender.try_send(gathered); 144 | } else { 145 | warn!("failed to gather host for udp address {}", &addr_); 146 | } 147 | }); 148 | 149 | // // TCP 150 | // let addr_ = addr; 151 | // let sender = sender_.clone(); 152 | // tokio::spawn(async move { 153 | // if let Ok(gathered) = Self::make_host(addr_, "tcp").await { 154 | // let _ = sender.try_send(gathered); 155 | // } else { 156 | // warn!("failed to gather host for tcp address {}", &addr_); 157 | // } 158 | // }); 159 | 160 | // Add srflx first 161 | // STUN addresses 162 | let stun_servers = [ 163 | "stun.l.google.com:19302".to_string(), 164 | ]; 165 | 166 | for stun_server in stun_servers { 167 | let addr_ = addr; 168 | let sender = sender_.clone(); 169 | tokio::spawn(async move { 170 | if let Ok(gathered) = Self::make_srflx(addr_, stun_server).await { 171 | let _ = sender.try_send(gathered); 172 | } else { 173 | warn!("failed to gather srflx for address {}", &addr_); 174 | } 175 | }); 176 | } 177 | } 178 | 179 | if matches!(ice_policy, IcePolicy::Relay | IcePolicy::All) { 180 | // add turn 181 | let addr_ = addr; 182 | let sender = sender_.clone(); 183 | tokio::spawn(async move { 184 | // add TURN with a delay 185 | if let Ok(gathered) = Self::make_turn(addr_).await { 186 | // sleep(std::time::Duration::from_millis(150)).await; 187 | let _ = sender.try_send(gathered); 188 | } else { 189 | warn!("failed to gather relayed for address {}", &addr_); 190 | } 191 | }); 192 | } 193 | } 194 | }); 195 | } 196 | 197 | /// Get available candidates waiting to be added to agent 198 | pub async fn poll_candidate(&mut self) -> Option { 199 | // listen on channel 200 | match self.receiver.recv_async().await { 201 | Ok(result) => { 202 | // start listening to packets 203 | self.start_listening(&result); 204 | 205 | // save ref turn client to close later 206 | if let Some(client) = result.turn_client.as_ref() { 207 | self.turn_clients.insert( 208 | result.socket.local_addr().expect("to get addr"), 209 | client.clone(), 210 | ); 211 | } 212 | 213 | info!("Gathered candidate {:?}", &result.remote); 214 | 215 | result.into() 216 | } 217 | Err(_) => None, 218 | } 219 | } 220 | 221 | /// Send outgoing packets through the socket 222 | pub async fn send_to(&self, transmit: Transmit) { 223 | // let socket_addr = &transmit.source; 224 | let socket_addr = if let Some(s) = self.local_mapped.get(&transmit.source) { 225 | s 226 | } else { 227 | &transmit.source 228 | }; 229 | 230 | if let Some(socket) = self.sockets.get(socket_addr) { 231 | let socket_ = socket.clone(); 232 | tokio::spawn(async move { 233 | if let Err(error) = socket_ 234 | // if let Err(error) = socket 235 | .send_to(&transmit.contents, transmit.destination) 236 | .await 237 | { 238 | debug!("failed to send {}", error); 239 | } 240 | }); 241 | 242 | // I tried this and this caused weird delay 243 | // if let Err(error) = socket 244 | // .send_to(&*transmit.contents, transmit.destination) 245 | // .await 246 | // { 247 | // error!("failed to send {}", error); 248 | // } 249 | } else { 250 | warn!("socket to transmit with not found {:#?}", transmit); 251 | } 252 | } 253 | 254 | // ------------------------- 255 | // --- end of public API --- 256 | // ------------------------- 257 | async fn make_host( 258 | addr: IpAddr, 259 | proto: impl TryInto, 260 | ) -> Result { 261 | let socket = UdpSocket::bind(format!("{addr}:0")).await?; 262 | let local_addr = socket.local_addr().expect("a local socket adddress"); 263 | 264 | let candidate = Candidate::host(local_addr, proto)?; 265 | 266 | Ok(Gathered { 267 | local: candidate.clone(), 268 | remote: candidate, 269 | socket: Socket::Raw(Arc::new(socket)), 270 | local_socket_addr: local_addr, 271 | turn_client: None, 272 | }) 273 | } 274 | 275 | async fn make_turn(addr: IpAddr) -> Result { 276 | use webrtc::turn::client; 277 | 278 | let local_socket = UdpSocket::bind(format!("{addr}:0")).await?; 279 | let _local_addr = local_socket.local_addr()?; 280 | 281 | // Your TURN server 282 | let turn_server_addr = "0.0.0.0:3478".to_string(); 283 | 284 | let cfg = client::ClientConfig { 285 | stun_serv_addr: String::new(), 286 | turn_serv_addr: turn_server_addr, 287 | username: "your usermame".to_string(), 288 | password: "your password".to_string(), 289 | realm: "your realm".to_string(), 290 | software: String::new(), 291 | rto_in_ms: 0, 292 | conn: Arc::new(UdpConn(local_socket)), 293 | vnet: None, 294 | }; 295 | 296 | let client = client::Client::new(cfg).await?; 297 | 298 | client.listen().await?; 299 | 300 | // Allocate a relay socket on the TURN server. On success, it 301 | // will return a net.PacketConn which represents the remote 302 | // socket. 303 | let relay_conn = client.allocate().await?; 304 | 305 | // Send BindingRequest to learn our external IP 306 | // let mapped_addr = client.send_binding_request().await?; 307 | 308 | let relay_addr = relay_conn.local_addr()?; 309 | // The relayConn's local address is actually the transport 310 | // address assigned on the TURN server. 311 | info!("relayed-address={}", &relay_addr); 312 | 313 | // let local = Candidate::host(local_addr.clone())?; 314 | // let remote = Candidate::relayed(relay_addr, local_addr, "udp".to_string())?; 315 | // let local = Candidate::host(local_addr.clone())?; 316 | let remote = Candidate::relayed(relay_addr, "udp")?; 317 | 318 | Ok(Gathered { 319 | local: remote.clone(), 320 | remote, 321 | socket: Socket::Conn(Arc::new(relay_conn)), 322 | 323 | // could be different 324 | local_socket_addr: relay_addr, 325 | turn_client: Some(client), 326 | }) 327 | } 328 | 329 | async fn make_srflx(addr: IpAddr, stun_server: String) -> Result { 330 | let local_socket = UdpSocket::bind(format!("{addr}:0")).await?; 331 | let local_addr = local_socket.local_addr().expect("a local socket adddress"); 332 | 333 | debug!("STUN server connecting to: {stun_server}"); 334 | let socket = Arc::new(local_socket); 335 | let public_addr = match stun_binding(socket.clone()).await { 336 | Ok(public_addr) => { 337 | info!("got srflx candidate {:#?}", &public_addr); 338 | public_addr 339 | } 340 | Err(error) => { 341 | warn!("failed to gather SRFLX candidates {:#?}", error); 342 | return Err(EngineError::IceStunForInterface); 343 | } 344 | }; 345 | 346 | // SRFLX candidates are added as Host locally 347 | let local = Candidate::host(local_addr.clone(), str0m::net::Protocol::Udp)?; 348 | let remote = Candidate::server_reflexive(public_addr, local_addr, str0m::net::Protocol::Udp)?; 349 | 350 | Ok(Gathered { 351 | local, 352 | remote, 353 | socket: Socket::Raw(socket), 354 | 355 | // could be different 356 | local_socket_addr: local_addr, 357 | turn_client: None, 358 | }) 359 | } 360 | 361 | /// Start listening on a socket, add it to hashmap 362 | fn start_listening(&mut self, gathered: &Gathered) { 363 | let socket_addr = gathered.socket.local_addr().expect("to get local addr"); 364 | self.sockets.insert(socket_addr, gathered.socket.clone()); 365 | 366 | self 367 | .local_mapped 368 | .insert(gathered.local_socket_addr, socket_addr); 369 | 370 | let (signal, closed) = flume::bounded::<()>(1); 371 | self.socket_signals.insert(socket_addr, signal); 372 | 373 | let socket_addr_ = socket_addr; 374 | let socket_ = gathered.socket.clone(); 375 | let peer_runloop_sender = self.peer_event_sender.clone(); 376 | 377 | // Recv packets 378 | tokio::spawn(async move { 379 | let mut bytes_pool = BytesPool::new(20, 2000); 380 | 381 | loop { 382 | tokio::select! { 383 | Ok((source, bytes)) = socket_recv_from(&socket_, &mut bytes_pool) => { 384 | let _ = peer_runloop_sender.try_send(RunLoopEvent::NetworkInput { 385 | bytes, 386 | source, 387 | destination: socket_addr_, 388 | }); 389 | } 390 | 391 | _ = closed.recv_async() => { 392 | break; 393 | } 394 | } 395 | } 396 | debug!("socket loop task closed {}", &socket_addr_); 397 | }); 398 | } 399 | } 400 | -------------------------------------------------------------------------------- /rtc/ice/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod conn; 2 | pub mod gatherer; 3 | pub mod utils; 4 | -------------------------------------------------------------------------------- /rtc/ice/utils.rs: -------------------------------------------------------------------------------- 1 | use crate::rtc::{error::EngineError, net::bytes_pool::BytesPool}; 2 | use anyhow::anyhow; 3 | use async_std::net::UdpSocket; 4 | use bytes::Bytes; 5 | use std::{ 6 | net::{IpAddr, SocketAddr}, 7 | sync::Arc, 8 | }; 9 | use systemstat::{Platform, System}; 10 | 11 | use super::gatherer::Socket; 12 | 13 | pub fn find_host_addresses() -> Result, EngineError> { 14 | let system = System::new(); 15 | let networks = system.networks()?; 16 | 17 | let mut hosts = Vec::with_capacity(3); 18 | for net in networks.values() { 19 | for n in &net.addrs { 20 | if let systemstat::IpAddr::V4(v) = n.addr { 21 | if !v.is_loopback() && !v.is_link_local() && !v.is_broadcast() { 22 | hosts.push(IpAddr::V4(v)); 23 | } 24 | } 25 | } 26 | } 27 | 28 | info!("networks found: {:#?}", &hosts); 29 | 30 | if hosts.is_empty() { 31 | Err(EngineError::IceNoNetworkInterface) 32 | } else { 33 | Ok(hosts) 34 | } 35 | } 36 | 37 | /// Given socket, fills the buffer 38 | pub async fn socket_recv_from( 39 | socket: &Socket, 40 | // socket: &UdpSocket, 41 | bytes_pool: &mut BytesPool, 42 | // buf: &mut Vec, 43 | ) -> Result<(SocketAddr, Bytes), EngineError> { 44 | // buf.resize(2000, 0); 45 | let mut bytes = bytes_pool.get_bytes_mut(); 46 | 47 | match socket.recv_from(bytes.as_mut()).await { 48 | Ok((n, source)) => { 49 | // buf.truncate(n); 50 | // let contents = buf.as_slice().try_into().expect("failed to get buf"); 51 | // let bytes = Bytes::copy_from_slice(contents); 52 | // return (source, bytes.into()).into(); 53 | 54 | // new way 55 | bytes.truncate(n); 56 | let bytes = bytes.freeze(); 57 | Ok((source, bytes)) 58 | } 59 | 60 | Err(e) => match e { 61 | // taken from str0m example 62 | // webrtc::util::Error::ErrTimeout => , 63 | // ErrorKind::WouldBlock | ErrorKind::TimedOut => None, 64 | _ => Err(EngineError::from(e)), 65 | }, 66 | } 67 | } 68 | 69 | pub async fn stun_binding(socket: Arc) -> Result { 70 | let mut client = stun_client::Client::from_socket(socket, None); 71 | let res = client 72 | .binding_request("stun.l.google.com:19302", None) 73 | .await?; 74 | 75 | let class = res.get_class(); 76 | match class { 77 | stun_client::Class::SuccessResponse => { 78 | let xor_mapped_addr = stun_client::Attribute::get_xor_mapped_address(&res); 79 | 80 | Ok(xor_mapped_addr.expect("to get ip from stun response")) 81 | } 82 | _ => Err(anyhow!(format!( 83 | "failed to request stun. class: {:?}", 84 | class 85 | ))), 86 | } 87 | } 88 | -------------------------------------------------------------------------------- /rtc/net/bytes_pool.rs: -------------------------------------------------------------------------------- 1 | use bytes::BytesMut; 2 | 3 | pub struct BytesPool { 4 | pool_size: usize, 5 | bytes_capacity: usize, 6 | reserve: Vec, 7 | } 8 | 9 | impl BytesPool { 10 | pub fn new(pool_size: usize, bytes_capacity: usize) -> Self { 11 | Self { 12 | reserve: vec![BytesMut::zeroed(bytes_capacity); pool_size], 13 | pool_size, 14 | bytes_capacity, 15 | } 16 | } 17 | 18 | pub fn get_bytes_mut(&mut self) -> BytesMut { 19 | if let Some(bytes) = self.reserve.pop() { 20 | bytes 21 | } else { 22 | // re-allocate 23 | for _ in 0..=self.pool_size { 24 | self.reserve.push(BytesMut::zeroed(self.bytes_capacity)); 25 | } 26 | 27 | BytesMut::zeroed(self.bytes_capacity) 28 | } 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /rtc/net/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod bytes_pool; 2 | -------------------------------------------------------------------------------- /rtc/peer/channel.rs: -------------------------------------------------------------------------------- 1 | use std::sync::Arc; 2 | 3 | use bytes::Bytes; 4 | use serde::{Deserialize, Serialize}; 5 | use str0m::{ 6 | bwe::Bitrate, 7 | media::{MediaData, MediaKind, MediaTime}, 8 | Candidate, IceConnectionState, 9 | }; 10 | 11 | use crate::rtc::{ 12 | commands::ConnectionState, 13 | peer2::{ 14 | utils::{CandidateJsonStr, SdpJson, SdpType}, 15 | BweCfg, MediaExtra, MediaType, PeerId, 16 | }, 17 | utils::{CallId, UserId}, 18 | }; 19 | 20 | #[derive(Debug)] 21 | pub enum Sdp { 22 | Offer(str0m::change::SdpOffer), 23 | Answer(str0m::change::SdpAnswer), 24 | } 25 | 26 | pub type PoolBytesVec = Bytes; 27 | 28 | #[derive(Clone, Copy)] 29 | pub struct StartScreenOpts { 30 | pub display_id: Option, 31 | } 32 | 33 | pub enum PeerChannelCommand { 34 | // action 35 | AddPeer { 36 | user_id: UserId, 37 | call_id: CallId, 38 | initial_offerer: bool, 39 | // remote_offer: Option, 40 | }, 41 | RemovePeer { 42 | user_id: UserId, 43 | call_id: CallId, 44 | }, 45 | Join(CallId, bool), 46 | Leave, 47 | ToggleScreen, 48 | StartScreen(StartScreenOpts), 49 | ScreenKeyFrameRequest, 50 | StopScreen, 51 | OpenScreenWindow, 52 | OpenScreenWindowFor(UserId), 53 | SendDataString(UserId, String), 54 | 55 | ConfigBwe(BweCfg), 56 | SetScreenBitrate(Bitrate), 57 | 58 | SendSignal { 59 | user_id: UserId, 60 | signal: Signal, 61 | }, 62 | SetIncomingSignal { 63 | user_id: UserId, 64 | call_id: CallId, 65 | signal: Signal, 66 | }, 67 | 68 | SendAudio { 69 | data: Bytes, 70 | duration: MediaTime, 71 | }, 72 | SendMedia { 73 | kind: MediaKind, 74 | data: PoolBytesVec, 75 | rtp_time: MediaTime, 76 | extra: Option, 77 | }, 78 | 79 | /// deprectated 80 | EnableMic, 81 | /// deprectated 82 | DisableMic, 83 | 84 | ChangeMic(String), 85 | PeerStateChange { 86 | user_id: UserId, 87 | peer_id: PeerId, 88 | state: ConnectionState, 89 | ice_state: Option, 90 | }, 91 | 92 | // incoming 93 | MediaAdded { 94 | user_id: UserId, 95 | media: AudioMedia, 96 | }, 97 | IncomingMediaData { 98 | user_id: UserId, 99 | data: Arc, 100 | media_type: MediaType, 101 | }, 102 | 103 | // -------------------- 104 | // internal 105 | // -------------------- 106 | 107 | // emitted when we detect that a peer was disconnected, but we will re-evaluate this in engine to check if it's still valid or not 108 | // we set multiple timers to check if the peer is still connected 109 | MaybeReconnect { 110 | user_id: UserId, 111 | peer_id: PeerId, 112 | call_id: CallId, 113 | case: ReconnectCase, 114 | }, 115 | } 116 | 117 | #[derive(Debug)] 118 | pub struct AudioMedia { 119 | pub clock_rate: u32, 120 | pub channels: u16, 121 | } 122 | 123 | #[derive(Debug)] 124 | pub enum ReconnectCase { 125 | DidNotGetFirstSignal, 126 | InitialFailure, 127 | TryIceRestart, 128 | Disconnect, 129 | } 130 | 131 | #[derive(Debug)] 132 | pub enum Signal { 133 | Sdp(Sdp), 134 | Candidate(Candidate), 135 | } 136 | 137 | #[derive(Serialize, Deserialize)] 138 | struct JsonCandidate { 139 | #[serde(rename = "type")] 140 | _type: String, 141 | candidate: String, 142 | } 143 | 144 | #[derive(Serialize, Deserialize)] 145 | struct JsonSdp { 146 | #[serde(rename = "type")] 147 | _type: String, 148 | sdp: SdpJson, 149 | // sdp: String, 150 | } 151 | 152 | impl Signal { 153 | pub fn is_sdp(&self) -> bool { 154 | matches!(self, Self::Sdp(_)) 155 | } 156 | 157 | pub fn is_candidate(&self) -> bool { 158 | matches!(self, Self::Candidate(_)) 159 | } 160 | 161 | pub fn is_sdp_offer(&self) -> bool { 162 | matches!(self, Self::Sdp(Sdp::Offer(_))) 163 | } 164 | 165 | pub fn is_sdp_answer(&self) -> bool { 166 | matches!(self, Self::Sdp(Sdp::Answer(_))) 167 | } 168 | 169 | // consume it 170 | pub fn to_json(&self) -> String { 171 | // The type is `serde_json::Value` 172 | match *self { 173 | Self::Sdp(ref sdp) => serde_json::to_string(&JsonSdp { 174 | _type: "sdp".into(), 175 | sdp: match sdp { 176 | Sdp::Answer(answer) => SdpJson { 177 | sdp_type: SdpType::Answer, 178 | sdp_string: answer.to_sdp_string(), 179 | }, 180 | Sdp::Offer(offer) => SdpJson { 181 | sdp_type: SdpType::Offer, 182 | sdp_string: offer.to_sdp_string(), 183 | }, 184 | }, 185 | // sdp: match sdp { 186 | // Sdp::Answer(answer) => answer.to_sdp_string(), 187 | // Sdp::Offer(offer) => offer.to_sdp_string(), 188 | // }, 189 | }) 190 | .expect("Failed to parse SDP string"), 191 | Self::Candidate(ref candidate) => serde_json::to_string(&JsonCandidate { 192 | _type: "candidate".into(), 193 | candidate: serde_json::to_string(&CandidateJsonStr { 194 | candidate: candidate.to_string().replace("a=", ""), 195 | username_fragment: None, 196 | // username_fragment: candidate.ufrag().map(|u| u.to_string()), 197 | // todo 198 | sdp_m_line_index: Some(0), 199 | sdp_mid: None, 200 | }) 201 | .expect("to make json candidate"), 202 | }) 203 | .expect("Failed to parse candidate string"), 204 | } 205 | } 206 | } 207 | -------------------------------------------------------------------------------- /rtc/peer/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod channel; 2 | -------------------------------------------------------------------------------- /rtc/peer2/utils.rs: -------------------------------------------------------------------------------- 1 | use crate::rtc::peer::channel::Sdp; 2 | use serde::{Deserialize, Serialize}; 3 | use str0m::change::{SdpAnswer, SdpOffer}; 4 | use str0m::Candidate; 5 | 6 | #[derive(Serialize, Deserialize)] 7 | #[serde(rename_all = "camelCase")] 8 | pub struct CandidateJsonStr { 9 | pub candidate: String, 10 | pub username_fragment: Option, 11 | pub sdp_mid: Option, 12 | pub sdp_m_line_index: Option, 13 | } 14 | 15 | pub fn parse_candidate(raw: &str) -> Candidate { 16 | // let v = serde_json::to_string(raw).expect("to json for debug"); 17 | // let raw = v.as_str(); 18 | debug!("candidate raw {}", raw); 19 | // it's json 20 | if !raw.contains('{') { 21 | unimplemented!("cannot parse candidate that is not JSON"); 22 | }; 23 | 24 | // is json string 25 | let raw = if raw.starts_with('\"') { 26 | serde_json::from_str::(raw).expect("to parse candidate as string first") 27 | } else { 28 | raw.to_string() 29 | }; 30 | 31 | let candidate: Candidate = serde_json::from_str(raw.as_str()).expect("to have valid candidate"); 32 | 33 | debug!( 34 | "candidate parsed. \nog: {} \nparsed: {}", 35 | raw, 36 | &candidate.to_string() 37 | ); 38 | candidate 39 | } 40 | 41 | #[derive(Debug, Serialize, Deserialize)] 42 | pub enum SdpType { 43 | #[serde(rename = "offer")] 44 | Offer, 45 | #[serde(rename = "answer")] 46 | Answer, 47 | } 48 | 49 | #[derive(Debug, Serialize, Deserialize)] 50 | pub struct SdpJson { 51 | #[serde(rename = "type")] 52 | pub sdp_type: SdpType, 53 | #[serde(rename = "sdp")] 54 | pub sdp_string: String, 55 | } 56 | 57 | // remote_descr.type = "offer"; 58 | // remote_descr.sdp = msg 59 | pub fn parse_sdp(raw: &str) -> Sdp { 60 | let parsed = serde_json::from_str::(raw).expect("to parse json sdp"); 61 | 62 | info!("parsing remote sdp: {}", parsed.sdp_string); 63 | match parsed.sdp_type { 64 | SdpType::Answer => Sdp::Answer( 65 | SdpAnswer::from_sdp_string(parsed.sdp_string.as_str()).expect("to get sdp answer"), 66 | ), 67 | SdpType::Offer => { 68 | Sdp::Offer(SdpOffer::from_sdp_string(parsed.sdp_string.as_str()).expect("to get sdp offer")) 69 | } 70 | } 71 | } 72 | 73 | // pub fn create_addr(_network: NetworkType, ip: IpAddr, port: u16) -> SocketAddr { 74 | // /*if network.is_tcp(){ 75 | // return &net.TCPAddr{IP: ip, Port: port} 76 | // default: 77 | // return &net.UDPAddr{IP: ip, Port: port} 78 | // }*/ 79 | // SocketAddr::new(ip, port) 80 | // } 81 | 82 | // /// Initiates a stun requests to `server_addr` using conn, reads the response and returns the 83 | // /// `XORMappedAddress` returned by the stun server. 84 | // /// Adapted from stun v0.2. 85 | // pub async fn get_xormapped_addr( 86 | // conn: &Arc, 87 | // server_addr: SocketAddr, 88 | // deadline: Duration, 89 | // ) -> Result { 90 | // let resp = stun_request(conn, server_addr, deadline).await?; 91 | // let mut addr = XorMappedAddress::default(); 92 | // addr.get_from(&resp)?; 93 | // Ok(addr) 94 | // } 95 | 96 | // const MAX_MESSAGE_SIZE: usize = 1280; 97 | 98 | // pub async fn stun_request( 99 | // conn: &Arc, 100 | // server_addr: SocketAddr, 101 | // deadline: Duration, 102 | // ) -> Result { 103 | // let mut request = Message::new(); 104 | // request.build(&[Box::new(BINDING_REQUEST), Box::new(TransactionId::new())])?; 105 | 106 | // conn.send_to(&request.raw, server_addr).await?; 107 | // let mut bs = vec![0_u8; MAX_MESSAGE_SIZE]; 108 | // let (n, _) = if deadline > Duration::from_secs(0) { 109 | // match tokio::time::timeout(deadline, conn.recv_from(&mut bs)).await { 110 | // Ok(result) => match result { 111 | // Ok((n, addr)) => (n, addr), 112 | // Err(err) => return Err(Error::Other(err.to_string())), 113 | // }, 114 | // Err(err) => return Err(Error::Other(err.to_string())), 115 | // } 116 | // } else { 117 | // conn.recv_from(&mut bs).await? 118 | // }; 119 | 120 | // let mut res = Message::new(); 121 | // res.raw = bs[..n].to_vec(); 122 | // res.decode()?; 123 | 124 | // Ok(res) 125 | // } 126 | -------------------------------------------------------------------------------- /rtc/peer_queue.rs: -------------------------------------------------------------------------------- 1 | use super::utils::UserId; 2 | use std::collections::VecDeque; 3 | 4 | pub struct PeerInit { 5 | pub user_id: UserId, 6 | pub initial_offerer: bool, 7 | } 8 | 9 | pub struct PeerQueue { 10 | queue: VecDeque, 11 | } 12 | 13 | impl PeerQueue { 14 | pub fn init() -> Self { 15 | Self { 16 | queue: VecDeque::new(), 17 | } 18 | } 19 | 20 | #[allow(dead_code)] 21 | pub fn add(&mut self, user_id: &UserId, initial_offerer: bool) { 22 | self.queue.push_back(PeerInit { 23 | user_id: user_id.to_owned(), 24 | initial_offerer, 25 | }); 26 | } 27 | 28 | /// Get next peer in queue 29 | #[allow(dead_code)] 30 | pub fn next(&mut self) -> Option { 31 | self.queue.pop_front() 32 | } 33 | 34 | /// Get all peers 35 | #[allow(dead_code)] 36 | pub fn drain(&mut self) -> VecDeque { 37 | self.queue.drain(..).collect::>() 38 | } 39 | 40 | /// Reset 41 | pub fn clear(&mut self) { 42 | self.queue.clear(); 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /rtc/peer_state.rs: -------------------------------------------------------------------------------- 1 | use super::commands::ConnectionState; 2 | use str0m::IceConnectionState; 3 | 4 | /// Used in engine 5 | pub struct PeerState { 6 | initial_offerer: bool, 7 | got_first_offer: bool, 8 | got_first_answer: bool, 9 | connection_state: ConnectionState, 10 | ice_connection_state: IceConnectionState, 11 | // whether it received one "connected" ice connection state 12 | established_once: bool, 13 | } 14 | 15 | impl PeerState { 16 | pub fn new(initial_offerer: bool) -> Self { 17 | Self { 18 | initial_offerer, 19 | got_first_offer: false, 20 | got_first_answer: false, 21 | connection_state: ConnectionState::Connecting, 22 | ice_connection_state: IceConnectionState::New, 23 | established_once: false, 24 | } 25 | } 26 | 27 | pub fn got_first_offer(&self) -> bool { 28 | self.got_first_offer 29 | } 30 | 31 | pub fn set_got_first_offer(&mut self) { 32 | self.got_first_offer = true; 33 | } 34 | 35 | pub fn got_first_answer(&self) -> bool { 36 | self.got_first_answer 37 | } 38 | 39 | pub fn set_got_first_answer(&mut self) { 40 | self.got_first_answer = true; 41 | } 42 | 43 | pub fn is_initial_offerer(&self) -> bool { 44 | self.initial_offerer 45 | } 46 | 47 | pub fn connection_state(&self) -> &ConnectionState { 48 | &self.connection_state 49 | } 50 | 51 | pub fn set_connection_state(&mut self, state: ConnectionState) { 52 | self.connection_state = state; 53 | } 54 | 55 | pub fn ice_connection_state(&self) -> &IceConnectionState { 56 | &self.ice_connection_state 57 | } 58 | 59 | pub fn set_ice_connection_state(&mut self, state: IceConnectionState) { 60 | if state == IceConnectionState::Connected { 61 | self.established_once = true; 62 | } 63 | 64 | self.ice_connection_state = state; 65 | } 66 | 67 | pub fn has_connected_once(&self) -> bool { 68 | self.established_once 69 | } 70 | } 71 | -------------------------------------------------------------------------------- /rtc/player.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | collections::HashMap, 3 | sync::{atomic::AtomicU64, Arc}, 4 | time::{Duration, Instant}, 5 | }; 6 | 7 | use audio_thread_priority::{ 8 | demote_current_thread_from_real_time, promote_current_thread_to_real_time, 9 | }; 10 | use cap::cv::current_host_time; 11 | use cpal::{ 12 | traits::{DeviceTrait, StreamTrait}, 13 | BufferSize, StreamConfig, 14 | }; 15 | use flume::{Receiver, Sender}; 16 | use ringbuf::{HeapConsumer, HeapProducer, HeapRb}; 17 | use str0m::media::MediaData; 18 | 19 | use crate::{ 20 | debugger::{Debugger, StatKind}, 21 | rtc::audio::{cpal_err_fn, get_default_output_device, get_output_device}, 22 | }; 23 | 24 | use super::{ 25 | audio_decoder::TrackDecoder, 26 | jitter::MAX_10MS_SAMPLE_COUNT, 27 | peer::channel::AudioMedia, 28 | processor::AudioEchoProcessor, 29 | resampler::{self, Resampler, ResamplerConfig}, 30 | utils::UserId, 31 | }; 32 | 33 | pub struct PlayerOptions { 34 | pub echo_processor: AudioEchoProcessor, 35 | pub preferred_device: Option, 36 | pub debugger: Debugger, 37 | } 38 | 39 | /// Mix, resample and play audio (one instance) 40 | pub struct Player { 41 | echo_processor: AudioEchoProcessor, 42 | command_receiver: Receiver, 43 | debugger: Debugger, 44 | // device_close_sender: Sender, 45 | controller: PlayerController, 46 | producer: HeapProducer, 47 | #[allow(unused)] 48 | resampler: Resampler, 49 | mix_buffer: Vec, 50 | tracks: HashMap, 51 | #[allow(unused)] 52 | volume: f32, 53 | config: OutputConfig, 54 | consumer: Option>, 55 | chunks_buffer: Vec>, 56 | channels: i32, 57 | frame_size: usize, 58 | device_latency: Arc, 59 | } 60 | 61 | pub enum PlayerCommand { 62 | AddMedia(UserId, AudioMedia), 63 | RemoveMedia(UserId), 64 | MediaData(UserId, Arc), 65 | // ChangeDevice(String), 66 | // SetVolume(f32), 67 | Stop, 68 | } 69 | 70 | #[derive(Clone)] 71 | pub struct PlayerController { 72 | player_sender: Sender, 73 | } 74 | impl PlayerController { 75 | pub fn new(player_sender: Sender) -> Self { 76 | Self { player_sender } 77 | } 78 | 79 | pub fn add_media(&self, user_id: UserId, media: AudioMedia) { 80 | let _ = self 81 | .player_sender 82 | .try_send(PlayerCommand::AddMedia(user_id, media)); 83 | } 84 | 85 | pub fn add_media_data(&self, user_id: UserId, media_data: Arc) { 86 | let _ = self 87 | .player_sender 88 | .try_send(PlayerCommand::MediaData(user_id, media_data)); 89 | } 90 | 91 | pub fn remove_media(&self, user_id: UserId) { 92 | let _ = self 93 | .player_sender 94 | .try_send(PlayerCommand::RemoveMedia(user_id)); 95 | } 96 | 97 | #[allow(unused)] 98 | pub fn change_device(&self) { 99 | todo!(); 100 | } 101 | 102 | #[allow(unused)] 103 | pub fn set_volume(&self) { 104 | todo!(); 105 | } 106 | 107 | pub fn stop(&self) { 108 | let _ = self.player_sender.try_send(PlayerCommand::Stop); 109 | } 110 | } 111 | 112 | impl Drop for PlayerController { 113 | fn drop(&mut self) { 114 | self.stop(); 115 | info!("Player controller dropped"); 116 | } 117 | } 118 | 119 | impl Player { 120 | pub fn new( 121 | PlayerOptions { 122 | echo_processor, 123 | debugger, 124 | .. 125 | }: PlayerOptions, 126 | ) -> Self { 127 | let (command_sender, command_receiver) = flume::bounded::(10); 128 | let config = Self::get_output_device(); 129 | debug!("Player config: {:?}", &config.3); 130 | let (producer, consumer) = Self::create_ringbuffer(config.3).split(); 131 | 132 | // TODO: we can have another signal to change device in the middle of playing 133 | 134 | let device_sample_rate = config.1; 135 | let device_channels = config.2; 136 | let resampler = Resampler::new(ResamplerConfig { 137 | input_sample_rate: 48_000, // Hard coded from track decoders 138 | output_sample_rate: device_sample_rate, 139 | channels: device_channels, 140 | chunk: resampler::Chunk::TenMs, 141 | }); 142 | 143 | // hard coded from track decoder 144 | let channels = 2; 145 | let frame_size = echo_processor.num_samples_per_frame() * channels as usize; 146 | 147 | Self { 148 | controller: PlayerController::new(command_sender), 149 | echo_processor, 150 | debugger, 151 | command_receiver, 152 | consumer: Some(consumer), 153 | producer, 154 | resampler, 155 | mix_buffer: vec![0.0; MAX_10MS_SAMPLE_COUNT], 156 | tracks: HashMap::new(), 157 | volume: 1.0, 158 | channels, 159 | frame_size, 160 | config, 161 | chunks_buffer: vec![vec![0.0; MAX_10MS_SAMPLE_COUNT]; 20], 162 | device_latency: Arc::new(AtomicU64::new(0)), 163 | } 164 | } 165 | 166 | pub fn get_controller(&self) -> PlayerController { 167 | self.controller.clone() 168 | } 169 | 170 | fn real_buffer_size(&self, buffer_size: u32) -> u32 { 171 | (buffer_size.max(512) as f32 * 48.0 / 44.1 * 2_f32).ceil() as u32 172 | } 173 | 174 | pub async fn run(&mut self) -> Result<(), anyhow::Error> { 175 | let (close_sender, close_receiver) = flume::bounded::<()>(1); 176 | 177 | let _buffer_size = self.real_buffer_size(self.config.3.to_owned()); 178 | 179 | // ... on a thread that will compute audio and has to be real-time: buffer_size 0 will auto select from sample rate 180 | let prio_handle = match promote_current_thread_to_real_time(0, 48_000) { 181 | Ok(h) => Some(h), 182 | Err(e) => { 183 | eprintln!("Error promoting player thread to real-time: {}", e); 184 | None 185 | } 186 | }; 187 | 188 | let processor = self.echo_processor.clone(); 189 | // keep here to have stream play 190 | let (_, __, ___, stream) = Self::start_output_device( 191 | self.consumer.take().expect("to have consumer"), 192 | &self.config, 193 | processor, 194 | self.device_latency.clone(), 195 | ); 196 | 197 | // NEW 198 | let mut interval = tokio::time::interval(Duration::from_millis(10)); // we operate in 10ms chunks 199 | interval.set_missed_tick_behavior(tokio::time::MissedTickBehavior::Skip); // Skip because it's only skipped if more than 5ms 200 | 201 | loop { 202 | tokio::select! { 203 | // handle player commands 204 | Ok(command) = self.command_receiver.recv_async() => { 205 | self.handle_command(command, close_sender.clone()).await; 206 | } 207 | 208 | // every 10ms play audio 209 | _ = interval.tick() => { 210 | self.audio_tick().await; 211 | } 212 | 213 | // when player should end its operations 214 | _ = close_receiver.recv_async() => { 215 | break; 216 | } 217 | } 218 | } 219 | 220 | drop(stream); 221 | 222 | if let Some(handle) = prio_handle { 223 | let _ = demote_current_thread_from_real_time(handle); 224 | } 225 | 226 | info!("player loop ended."); 227 | Ok(()) 228 | } 229 | 230 | async fn handle_command(&mut self, command: PlayerCommand, close_sender: Sender<()>) { 231 | match command { 232 | PlayerCommand::AddMedia(user_id, media) => { 233 | let track_decoder = 234 | TrackDecoder::new(user_id.to_owned(), self.debugger.clone(), media).await; 235 | // add to the list of remote tracks so we poll it in the run loop 236 | self.tracks.insert(user_id, track_decoder); 237 | } 238 | PlayerCommand::MediaData(user_id, data) => { 239 | if let Some(track) = self.tracks.get_mut(&user_id) { 240 | let v = data.ext_vals.voice_activity.clone().unwrap_or(false); 241 | //These can add delay to audio look 242 | // self.debugger.stat( 243 | // StatKind::VoiceActivity, 244 | // if v { "true".into() } else { "false".into() }, 245 | // Some(&user_id), 246 | // ); 247 | 248 | track.insert_packet(data); 249 | } else { 250 | debug!("no track to add packet"); 251 | } 252 | } 253 | PlayerCommand::RemoveMedia(user_id) => { 254 | self.tracks.remove(&user_id); 255 | debug!("PlayerCommand::RemoveMedia") 256 | } 257 | PlayerCommand::Stop => { 258 | let _ = close_sender.try_send(()); 259 | } 260 | } 261 | } 262 | 263 | async fn audio_tick(&mut self) { 264 | // get track decoder audios 265 | // get audio from all decoder tasks 266 | 267 | // Remove this allocation 268 | if self.chunks_buffer.len() < self.tracks.len() { 269 | panic!("does not support +20"); 270 | } 271 | 272 | if self.tracks.is_empty() { 273 | return; 274 | } 275 | 276 | if self.producer.is_full() { 277 | return; 278 | } 279 | 280 | let active_tracks_len = self.tracks.len(); 281 | let _processing_start_instant = Instant::now(); 282 | 283 | // fill out the buffer 284 | for (index, (_, track)) in self.tracks.iter_mut().enumerate() { 285 | let buffer = self 286 | .chunks_buffer 287 | .get_mut(index) 288 | .expect("to have chunks buffer"); 289 | // buffer.clear(); 290 | let size = track.get_audio(buffer); 291 | // probably fix audio quality 292 | buffer.truncate(size); 293 | } 294 | 295 | ////// MIX 296 | self.mix_buffer.clear(); 297 | let chunk_size = self.chunks_buffer[0].len(); // each audio track chunk size 298 | 299 | // do not loop over old chunks left over 300 | let active_chunks_buffers = &self.chunks_buffer[0..active_tracks_len]; 301 | 302 | // safety check 303 | for chunk in active_chunks_buffers.iter() { 304 | assert!( 305 | chunk.len() == chunk_size, 306 | "player track output size is not equal to other chunks" 307 | ); 308 | } 309 | 310 | for i in 0..chunk_size { 311 | // go over each sample and add them together 312 | let mut sample: f32 = 0.0; 313 | for chunk in active_chunks_buffers.iter() { 314 | sample += chunk[i]; 315 | } 316 | // Note: Removed min max clipping to avoid altering stream for the echo processor 317 | // todo: maybe batch a slice to lower volume? 318 | sample = sample.max(-1.0).min(1.0); 319 | self.mix_buffer.push(sample); 320 | } 321 | 322 | assert!( 323 | self.mix_buffer.len() == chunk_size, 324 | "mix buffer size is not equal to chunk size before mix" 325 | ); 326 | 327 | //// PROCESS 328 | 329 | // Now process the in 480 sample frames and push to ringbuffer 330 | let samples = &mut self.mix_buffer[..]; 331 | let _full_frame_len = samples.len(); 332 | let _sample_rate = 48_000; 333 | let _channels = 2; 334 | let decoder_output_exact = samples.chunks_exact_mut(self.frame_size); 335 | 336 | // First, let's calculate stream delay by estimating when these frames 337 | // that we're processing will be played by the audio hardware 338 | // we use 960 here because we process 960 samples (2 frames) every 10ms 339 | // 960 is equivalent to 10ms of 48khz stereo audio 340 | 341 | // let buffer_delay_us = 342 | // ((1e6 * buffer_samples as f64 / sample_rate as f64 / channels as f64) + 0.5) as u64; 343 | // let processing_delay_us = processing_start_instant.elapsed().as_micros() as u64; 344 | // let device_latency_us = self.device_latency.load(Ordering::Relaxed); 345 | // // let device_latency_us = 0; // don't use this unreliable value for now. 346 | // let render_delay_us = processing_delay_us + device_latency_us + buffer_delay_us; 347 | // dbg!(buffer_delay_us); 348 | // dbg!(processing_delay_us); 349 | // dbg!(device_latency_us); 350 | // self.echo_processor.set_playback_delay_us(render_delay_us); 351 | 352 | for frame in decoder_output_exact { 353 | // check 354 | if self.producer.free_len() < frame.len() { 355 | error!("producer buffer is full, ignoring"); 356 | continue; 357 | } 358 | 359 | trace!("processing render frame"); 360 | // process 361 | self.echo_processor.process_render_frame(frame); 362 | 363 | // play 364 | self.producer.push_slice(frame); 365 | 366 | // log 367 | // let debugger = self.debugger.clone(); 368 | // let len = self.producer.len(); 369 | // tokio::spawn(async move { 370 | // debugger.stat(StatKind::PlayerBufferSamples, len.into(), None); 371 | // debugger.stat(StatKind::PlayerDecodedFrameLen, full_frame_len.into(), None); 372 | // }); 373 | } 374 | } 375 | 376 | fn get_output_device() -> OutputConfig { 377 | // Config 378 | let output_config = get_output_device(); 379 | info!("output config {:?}", output_config); 380 | 381 | let channels = output_config.channels; 382 | assert!(channels == 2, "output device must be stereo"); 383 | let sample_rate = output_config.sample_rate.0; 384 | let buffer_size = if let BufferSize::Fixed(h) = output_config.buffer_size { 385 | h 386 | } else { 387 | warn!("failed to get fixed buffer size"); 388 | 512 // todo: make better 389 | }; 390 | 391 | OutputConfig(output_config, sample_rate, channels, buffer_size) 392 | } 393 | 394 | fn start_output_device( 395 | mut consumer: HeapConsumer, 396 | config: &OutputConfig, 397 | _processor: AudioEchoProcessor, 398 | _device_latency: Arc, 399 | ) -> (u16, u32, Sender, cpal::Stream) { 400 | let OutputConfig(output_config, sample_rate, channels, _) = config; 401 | let (close_sender, _) = flume::bounded::(1); 402 | let sample_rate_ = *sample_rate; 403 | let channels_ = *channels; 404 | 405 | // consumer 406 | let output_data_fn = move |data: &mut [f32], info: &cpal::OutputCallbackInfo| { 407 | let count = consumer.pop_slice(data); 408 | 409 | trace!("output device data {}", data.len()); 410 | 411 | // fill the rest with 0.0 412 | for i in count..data.len() { 413 | data[i] = 0.0; 414 | } 415 | 416 | // Delay calc after play 417 | let render_time_ns = info.timestamp().callback.as_nanos(); 418 | let now_time_ns = host_time_to_stream_instant(current_host_time()).as_nanos(); 419 | let device_latency_us = 1e-3 * (render_time_ns - now_time_ns) as f64; 420 | let buffer_samples = consumer.len(); 421 | let buffer_delay_us = 422 | ((1e6 * buffer_samples as f64 / sample_rate_ as f64 / channels_ as f64) + 0.5) as f64; 423 | let render_latency_ms = (1e-3 * (device_latency_us + buffer_delay_us) + 0.5) as u64; 424 | 425 | // maybe move this to a tokio task. 426 | _processor.set_playback_delay_ms(render_latency_ms); 427 | }; 428 | 429 | debug!("output config={:#?}", &output_config); 430 | 431 | // output_config_clone 432 | let output_stream = get_default_output_device() 433 | .expect("Could not get default output default device") 434 | .build_output_stream(output_config, output_data_fn, cpal_err_fn) 435 | .expect("failed to create output stream"); 436 | 437 | output_stream 438 | .play() 439 | .expect("failed to play in output device"); 440 | 441 | (*channels, *sample_rate, close_sender, output_stream) 442 | } 443 | 444 | fn create_ringbuffer(buffer_size: u32) -> HeapRb { 445 | // let max_output_device_readsize = buffer_size as usize * 3; 446 | // buffer_size can be zero 447 | let _actual_buffer_size = buffer_size as f32 * 48.0 / 44.1 * 2_f32; 448 | // let max_output_device_readsize = (buffer_size as usize * 3).max(480); 449 | let _max_output_device_readsize = 960_f32 * 3_f32; // 2 times of decoded frame + 1 time of output buffer size 450 | // let max_output_device_readsize = 960_f32 + actual_buffer_size * 4_f32; // 2 times of decoded frame + 1 time of output buffer size 451 | let max_output_device_readsize = 960_f32 * 4_f32; // increased because I increased buffer size to 480 452 | 453 | #[cfg(target_os = "linux")] 454 | let max_output_device_readsize = buffer_size as f32 * 18_f32; // 240 * 18 455 | info!("max_output_device_readsize {}", &max_output_device_readsize); 456 | 457 | HeapRb::new(max_output_device_readsize.ceil() as usize) 458 | } 459 | } 460 | 461 | impl Drop for Player { 462 | fn drop(&mut self) { 463 | info!("player dropped"); 464 | } 465 | } 466 | 467 | struct OutputConfig(StreamConfig, u32, u16, u32); 468 | 469 | #[cfg(test)] 470 | mod tests { 471 | 472 | #[test] 473 | fn trying_to_write_test() { 474 | assert!(true); 475 | } 476 | } 477 | 478 | pub fn host_time_to_stream_instant(m_host_time: u64) -> cpal::StreamInstant { 479 | let info = cap::cidre::mach::TimeBaseInfo::new(); 480 | let nanos = m_host_time * info.numer as u64 / info.denom as u64; 481 | let secs = nanos / 1_000_000_000; 482 | let subsec_nanos = nanos - secs * 1_000_000_000; 483 | 484 | cpal::StreamInstant::new(secs as i64, subsec_nanos as u32) 485 | } 486 | -------------------------------------------------------------------------------- /rtc/processor.rs: -------------------------------------------------------------------------------- 1 | use crate::debugger::{Debugger, StatKind}; 2 | use std::{ 3 | sync::{atomic::AtomicU64, Arc}, 4 | time::Instant, 5 | }; 6 | use webrtc_audio_processing::{ 7 | EchoCancellationSuppressionLevel, Error, InitializationConfig, NoiseSuppressionLevel, Processor, 8 | NUM_SAMPLES_PER_FRAME, 9 | }; 10 | 11 | #[derive(Clone)] 12 | pub struct AudioEchoProcessor { 13 | inner: Processor, 14 | noise_cancel_level: NoiseSuppressionLevel, 15 | echo_cancel_level: EchoCancellationSuppressionLevel, 16 | playback_delay: Arc, 17 | debugger: Debugger, 18 | } 19 | 20 | // Notes: 21 | // Process at 48khz and stereo 22 | 23 | impl AudioEchoProcessor { 24 | pub fn new(debugger: Debugger, echo_cancel: String, noise_cancel: String) -> Self { 25 | let echo_cancel_level = match echo_cancel.as_str() { 26 | "lowest" => EchoCancellationSuppressionLevel::Lowest, 27 | "lower" => EchoCancellationSuppressionLevel::Lower, 28 | "low" => EchoCancellationSuppressionLevel::Low, 29 | "moderate" => EchoCancellationSuppressionLevel::Moderate, 30 | "high" => EchoCancellationSuppressionLevel::High, 31 | _ => EchoCancellationSuppressionLevel::Moderate, 32 | }; 33 | 34 | let noise_cancel_level = if noise_cancel == "moderate" { 35 | NoiseSuppressionLevel::Moderate 36 | } else if noise_cancel == "high" { 37 | NoiseSuppressionLevel::High 38 | } else if noise_cancel == "very_high" { 39 | NoiseSuppressionLevel::VeryHigh 40 | } else { 41 | NoiseSuppressionLevel::VeryHigh 42 | }; 43 | 44 | debug!("processor noise_cancel_level {:#?}", &noise_cancel_level); 45 | let mut processor = Self::create_processor(2, 2).expect("to create processor"); 46 | 47 | // Initial config 48 | AudioEchoProcessor::set_config_static( 49 | &mut processor, 50 | None, 51 | noise_cancel_level, 52 | echo_cancel_level, 53 | ); 54 | 55 | let (_playback_delay_ms_sender, _playback_delay_ms_recv) = 56 | flume::unbounded::<(usize, Instant)>(); 57 | 58 | Self { 59 | inner: processor, 60 | debugger, 61 | noise_cancel_level, 62 | echo_cancel_level, 63 | playback_delay: Arc::new(AtomicU64::new(0)), 64 | } 65 | } 66 | 67 | fn create_processor( 68 | num_capture_channels: i32, 69 | num_render_channels: i32, 70 | ) -> Result { 71 | let processor = Processor::new(&InitializationConfig { 72 | num_capture_channels: num_capture_channels, 73 | num_render_channels: num_render_channels, 74 | ..Default::default() 75 | })?; 76 | 77 | Ok(processor) 78 | } 79 | 80 | fn set_config_static( 81 | processor: &mut Processor, 82 | stream_delay_ms: Option, 83 | noise_cancel_level: NoiseSuppressionLevel, 84 | echo_cancel_level: EchoCancellationSuppressionLevel, 85 | ) { 86 | // High pass filter is a prerequisite to running echo cancellation. 87 | let config = webrtc_audio_processing::Config { 88 | echo_cancellation: Some(webrtc_audio_processing::EchoCancellation { 89 | suppression_level: echo_cancel_level, 90 | stream_delay_ms: stream_delay_ms, 91 | enable_delay_agnostic: false, 92 | enable_extended_filter: false, 93 | }), 94 | 95 | enable_transient_suppressor: true, 96 | enable_high_pass_filter: true, 97 | 98 | noise_suppression: Some(webrtc_audio_processing::NoiseSuppression { 99 | suppression_level: noise_cancel_level, 100 | }), 101 | 102 | gain_control: None, 103 | // gain_control: Some(webrtc_audio_processing::GainControl { 104 | // compression_gain_db: 12, 105 | // mode: webrtc_audio_processing::GainControlMode::AdaptiveDigital, 106 | // target_level_dbfs: 18, 107 | // enable_limiter: true, 108 | // }), 109 | voice_detection: Some(webrtc_audio_processing::VoiceDetection { 110 | // FIXME: calculate this based on key pressed 111 | detection_likelihood: webrtc_audio_processing::VoiceDetectionLikelihood::Low, 112 | }), 113 | 114 | ..webrtc_audio_processing::Config::default() 115 | }; 116 | 117 | processor.set_config(config); 118 | } 119 | 120 | pub fn num_samples_per_frame(&self) -> usize { 121 | NUM_SAMPLES_PER_FRAME as usize 122 | } 123 | 124 | pub fn set_playback_delay_ms(&self, render_delay_ms: u64) { 125 | self 126 | .playback_delay 127 | .store(render_delay_ms, std::sync::atomic::Ordering::Relaxed); 128 | // dbg!(render_delay_us); 129 | // let now = Instant::now(); 130 | // let mut pbd = self.playback_delay.lock().expect("get playback delay"); 131 | // *pbd = (render_delay_us, now); 132 | } 133 | 134 | /// Attempt to calculate fresh delay if new estimate is available for renderer size 135 | /// otherwise return None and the processor will use its last used value 136 | pub fn update_current_stream_delay(&mut self, _capture_delay_ms: u64) { 137 | // Sets the delay in ms between process_render_frame() receiving a far-end frame 138 | // and process_capture_frame() receiving a near-end frame containing the corresponding echo. 139 | 140 | let stream_delay_ms = { 141 | // get 142 | let render_delay_ms = { 143 | self 144 | .playback_delay 145 | .load(std::sync::atomic::Ordering::Relaxed) 146 | }; 147 | let total_delay_ms = render_delay_ms + render_delay_ms; 148 | Some(total_delay_ms) 149 | }; 150 | 151 | // Avoid any unneccessary clone in the audio loop 152 | // #[cfg(debug_assertions)] 153 | { 154 | let stream_delay_ms_ = stream_delay_ms.clone(); 155 | let debugger = self.debugger.clone(); 156 | tokio::spawn(async move { 157 | debugger.stat(StatKind::StreamDelayMs, stream_delay_ms_.into(), None); 158 | }); 159 | } 160 | 161 | if let Some(stream_delay_ms) = stream_delay_ms { 162 | Self::set_config_static( 163 | &mut self.inner, 164 | Some(stream_delay_ms as i32), 165 | self.noise_cancel_level, 166 | self.echo_cancel_level, 167 | ) 168 | // self.inner.set_stream_delay_ms(stream_delay_ms as usize); 169 | } 170 | } 171 | 172 | pub fn set_output_will_be_muted(&self, muted: bool) { 173 | self.inner.set_output_will_be_muted(muted); 174 | } 175 | 176 | pub fn set_stream_key_pressed(&self, key_pressed: bool) { 177 | self.inner.set_stream_key_pressed(key_pressed); 178 | } 179 | 180 | pub fn get_stats(&self) -> webrtc_audio_processing::Stats { 181 | self.inner.get_stats() 182 | } 183 | 184 | pub fn process_capture_frame( 185 | &mut self, 186 | frame: &mut [f32], 187 | capture_delay_ms: u64, 188 | ) -> Result<(), Error> { 189 | // should call conditionally? 190 | self.update_current_stream_delay(capture_delay_ms); 191 | self.set_stream_key_pressed(platform_utils::key_pressed()); 192 | self.inner.process_capture_frame(frame) 193 | } 194 | 195 | pub fn process_render_frame(&mut self, frame: &mut [f32]) { 196 | self 197 | .inner 198 | .process_render_frame(frame) 199 | .expect("to process render frame"); 200 | } 201 | } 202 | 203 | // V2 204 | 205 | // use std::{ 206 | // sync::{Arc, Mutex}, 207 | // time::Instant, 208 | // }; 209 | // use webrtc_audio_processing::{ 210 | // EchoCanceller, Error, GainController, GainControllerMode, NoiseSuppression, 211 | // NoiseSuppressionLevel, Pipeline, Processor, 212 | // }; 213 | 214 | // use crate::{debugger::Debugger, macos}; 215 | 216 | // use super::audio::create_processor; 217 | 218 | // #[derive(Clone)] 219 | // pub struct AudioEchoProcessor { 220 | // inner: Processor, 221 | // noise_cancel_level: NoiseSuppressionLevel, 222 | // playback_delay: Arc>, 223 | // debugger: Debugger, 224 | // } 225 | 226 | // // Process at 48khz and streo 227 | // impl AudioEchoProcessor { 228 | // pub fn new(debugger: Debugger, _echo_cancel: String, noise_cancel: String) -> Self { 229 | // let noise_cancel_level = if noise_cancel == "moderate" { 230 | // NoiseSuppressionLevel::Moderate 231 | // } else if noise_cancel == "high" { 232 | // NoiseSuppressionLevel::High 233 | // } else if noise_cancel == "very_high" { 234 | // NoiseSuppressionLevel::VeryHigh 235 | // } else { 236 | // NoiseSuppressionLevel::VeryHigh 237 | // }; 238 | 239 | // debug!("processor noise_cancel_level {:#?}", &noise_cancel_level); 240 | // let mut processor = create_processor(2, 2, noise_cancel_level).expect("to create processor"); 241 | 242 | // // Initial config 243 | // AudioEchoProcessor::set_config_static(&mut processor, None, noise_cancel_level); 244 | 245 | // let (playback_delay_ms_sender, playback_delay_ms_recv) = flume::unbounded::<(usize, Instant)>(); 246 | 247 | // Self { 248 | // inner: processor, 249 | // debugger, 250 | // noise_cancel_level, 251 | // playback_delay: Arc::new(Mutex::new((0, Instant::now()))), 252 | // } 253 | // } 254 | 255 | // // pub fn get_processor(&self) -> Processor { 256 | // // self.inner.clone() 257 | // // } 258 | 259 | // fn set_config_static( 260 | // processor: &mut Processor, 261 | // _stream_delay_ms: Option, 262 | // noise_cancel_level: NoiseSuppressionLevel, 263 | // ) { 264 | // // High pass filter is a prerequisite to running echo cancellation. 265 | // let config = webrtc_audio_processing::Config { 266 | // echo_canceller: EchoCanceller::Full { 267 | // enforce_high_pass_filtering: false, 268 | // } 269 | // .into(), 270 | 271 | // reporting: webrtc_audio_processing::ReportingConfig { 272 | // enable_voice_detection: true, 273 | // enable_residual_echo_detector: false, 274 | // enable_level_estimation: false, 275 | // }, 276 | 277 | // // for keyboard 278 | // enable_transient_suppression: true, 279 | // high_pass_filter: Some(webrtc_audio_processing::HighPassFilter { 280 | // apply_in_full_band: true, 281 | // }), 282 | 283 | // // pre_amplifier: Some(webrtc_audio_processing::PreAmplifier { 284 | // // ..Default::default() 285 | // // }), 286 | // noise_suppression: Some(NoiseSuppression { 287 | // level: noise_cancel_level, // low was lower than libwebrtc 288 | // analyze_linear_aec_output: false, 289 | // }), 290 | 291 | // gain_controller: Some(GainController { 292 | // mode: GainControllerMode::AdaptiveDigital, 293 | // target_level_dbfs: 7, // was 3 294 | // compression_gain_db: 12, 295 | // enable_limiter: true, 296 | // ..Default::default() 297 | // }), 298 | // pipeline: Pipeline { 299 | // maximum_internal_processing_rate: 300 | // webrtc_audio_processing::PipelineProcessingRate::Max48000Hz, 301 | // multi_channel_capture: true, 302 | // multi_channel_render: true, 303 | // }, 304 | 305 | // ..webrtc_audio_processing::Config::default() 306 | // }; 307 | 308 | // processor.set_config(config); 309 | // } 310 | 311 | // pub fn num_samples_per_frame(&self) -> usize { 312 | // self.inner.num_samples_per_frame() 313 | // } 314 | 315 | // pub fn set_playback_delay_ms(&self, playback_delay_ms: usize) { 316 | // let now = Instant::now(); 317 | // let mut pbd = self.playback_delay.lock().expect("get playback delay"); 318 | // *pbd = (playback_delay_ms, now); 319 | // } 320 | 321 | // /// Attempt to calculate fresh delay if new estimate is available for renderer size 322 | // /// otherwise return None and the processor will use its last used value 323 | // pub fn update_current_stream_delay(&mut self, capture_delay_ms: usize) { 324 | // // Sets the delay in ms between process_render_frame() receiving a far-end frame 325 | // // and process_capture_frame() receiving a near-end frame containing the corresponding echo. 326 | 327 | // let stream_delay_ms = { 328 | // // get 329 | // let val = { self.playback_delay.lock().expect("to get pbd").clone() }; 330 | 331 | // // if 0 return None 332 | 333 | // let playback_delay_ms = val.0; 334 | // //+ Instant::now().saturating_duration_since(val.1).as_millis() as usize; 335 | // let ms = (playback_delay_ms + capture_delay_ms) as i32; 336 | 337 | // Some(ms) 338 | // }; 339 | 340 | // // Avoid any unneccessary clone in the audio loop 341 | // // #[cfg(debug_assertions)] 342 | // // { 343 | // // let stream_delay_ms_ = stream_delay_ms.clone(); 344 | // // let debugger = self.debugger.clone(); 345 | // // tokio::spawn(async move { 346 | // // debugger.stat(StatKind::StreamDelayMs, stream_delay_ms_.into(), None); 347 | // // }); 348 | // // } 349 | 350 | // if let Some(stream_delay_ms) = stream_delay_ms { 351 | // self.inner.set_stream_delay_ms(stream_delay_ms as usize); 352 | // } 353 | // } 354 | 355 | // pub fn set_output_will_be_muted(&self, muted: bool) { 356 | // self.inner.set_output_will_be_muted(muted); 357 | // } 358 | 359 | // pub fn set_stream_key_pressed(&self, key_pressed: bool) { 360 | // self.inner.set_stream_key_pressed(key_pressed); 361 | // } 362 | 363 | // pub fn get_stats(&self) -> webrtc_audio_processing::Stats { 364 | // self.inner.get_stats() 365 | // } 366 | 367 | // pub fn process_capture_frame( 368 | // &mut self, 369 | // frame: &mut [f32], 370 | // caputre_delay: usize, 371 | // ) -> Result<(), Error> { 372 | // // should call conditionally? 373 | // self.update_current_stream_delay(caputre_delay); 374 | 375 | // self.set_stream_key_pressed(platform_utils::key_pressed()); 376 | // self.inner.process_capture_frame(frame) 377 | // } 378 | 379 | // pub fn process_render_frame(&mut self, frame: &mut [f32]) { 380 | // self 381 | // .inner 382 | // .process_render_frame(frame) 383 | // .expect("to process render frame"); 384 | // } 385 | // } 386 | 387 | // pub fn create_processor( 388 | // num_capture_channels: i32, 389 | // num_render_channels: i32, 390 | // _noise_level: NoiseSuppressionLevel, 391 | // ) -> Result { 392 | // let processor = Processor::new(&InitializationConfig { 393 | // sample_rate_hz: 48_000, 394 | // num_capture_channels: num_capture_channels as usize, 395 | // num_render_channels: num_render_channels as usize, 396 | // })?; 397 | 398 | // Ok(processor) 399 | // } 400 | -------------------------------------------------------------------------------- /rtc/remote_control.rs: -------------------------------------------------------------------------------- 1 | use crate::screen::cmds::{emit_data_event, OverlayDataChannelEvent}; 2 | use crate::window::create_or_get_overlay_window; 3 | 4 | use super::peer::channel::StartScreenOpts; 5 | use super::{peer2::DataChannelEvent, utils::UserId}; 6 | 7 | use std::sync::{Arc, Mutex}; 8 | use tauri::{ 9 | AppHandle, LogicalPosition, LogicalSize, Manager, PhysicalPosition, PhysicalSize, WebviewWindow, 10 | }; 11 | 12 | type Event = DataChannelEvent; 13 | 14 | pub enum Command { 15 | // CreateOverlay, 16 | Event(UserId, Event), 17 | Start(StartScreenOpts), 18 | Stop, 19 | 20 | Destroy, 21 | } 22 | 23 | #[derive(Clone)] 24 | pub struct RemoteControl { 25 | sender: flume::Sender, 26 | } 27 | 28 | impl RemoteControl { 29 | pub fn new(app_handle: &AppHandle) -> Self { 30 | let (sender, receiver) = flume::bounded(1_000); 31 | let mut run_loop = RemoteControlRunLoop::new(receiver, &app_handle); 32 | 33 | tokio::spawn(async move { 34 | run_loop.run().await; 35 | error!("remote control run loop exited"); 36 | }); 37 | 38 | Self { sender } 39 | } 40 | 41 | pub fn event(&self, user_id: UserId, event: Event) { 42 | let _ = self.sender.try_send(Command::Event(user_id, event)); 43 | } 44 | 45 | pub fn start(&self, opts: StartScreenOpts) { 46 | let _ = self.sender.try_send(Command::Start(opts)); 47 | } 48 | 49 | pub fn stop(&self) { 50 | let _ = self.sender.try_send(Command::Stop); 51 | } 52 | 53 | pub fn destroy(&self) { 54 | let _ = self.sender.try_send(Command::Destroy); 55 | } 56 | } 57 | 58 | impl Drop for RemoteControl { 59 | fn drop(&mut self) { 60 | self.destroy(); 61 | } 62 | } 63 | 64 | enum Controller { 65 | Us, 66 | Peer(UserId), 67 | } 68 | 69 | pub struct RemoteControlRunLoop { 70 | overlay: Option, 71 | 72 | /// Who is in control 73 | controller: Controller, 74 | 75 | /// Command receiver 76 | receiver: flume::Receiver, 77 | 78 | app_handle: AppHandle, 79 | 80 | display_size: Option>, 81 | display_position: Option>, 82 | // enigo: Arc>, 83 | } 84 | 85 | impl Drop for RemoteControlRunLoop { 86 | fn drop(&mut self) { 87 | self.handle_stop() 88 | } 89 | } 90 | 91 | impl RemoteControlRunLoop { 92 | pub fn new(receiver: flume::Receiver, app_handle: &AppHandle) -> Self { 93 | // let enigo = Enigo::new(); 94 | Self { 95 | overlay: None, 96 | display_size: None, 97 | display_position: None, 98 | controller: Controller::Us, 99 | receiver, 100 | app_handle: app_handle.clone(), 101 | // enigo: Arc::new(Mutex::new(enigo)), 102 | } 103 | } 104 | 105 | pub async fn run(&mut self) { 106 | while let Ok(command) = self.receiver.recv_async().await { 107 | match command { 108 | Command::Event(user_id, event) => { 109 | self.handle_event(user_id, event); 110 | } 111 | 112 | Command::Start(opts) => { 113 | self.handle_start(opts).await; 114 | } 115 | 116 | Command::Stop => { 117 | self.handle_stop(); 118 | } 119 | 120 | Command::Destroy => { 121 | self.handle_stop(); 122 | break; 123 | } 124 | } 125 | } 126 | } 127 | 128 | async fn handle_start(&mut self, opts: StartScreenOpts) { 129 | let Some(display_id) = opts.display_id else { 130 | return; 131 | }; 132 | 133 | // find display rect 134 | let Some(display) = cap::sharable::get_display_by_id(display_id).await else { 135 | println!("failed to get display "); 136 | return; 137 | }; 138 | 139 | println!("got display"); 140 | 141 | // Save display frame 142 | let frame = display.frame(); 143 | self.display_size = Some(PhysicalSize { 144 | width: frame.size.width, 145 | height: frame.size.height, 146 | }); 147 | self.display_position = Some(PhysicalPosition { 148 | x: frame.origin.x, 149 | y: frame.origin.y, 150 | }); 151 | 152 | // Create overlay 153 | let Ok(overlay) = create_or_get_overlay_window(&self.app_handle) else { 154 | error!("failed to create overlay"); 155 | return; 156 | }; 157 | self.overlay = Some(overlay.clone()); 158 | 159 | let size = LogicalSize { 160 | width: frame.size.width, 161 | height: frame.size.height, 162 | }; 163 | let position = LogicalPosition { 164 | x: frame.origin.x, 165 | y: frame.origin.y, 166 | }; 167 | 168 | /// Manage overlay size 169 | if let Some(overlay_frame) = self.app_handle.try_state::() { 170 | // Update values 171 | let _ = overlay_frame.set_size(size.clone()); 172 | let _ = overlay_frame.set_position(position.clone()); 173 | } else { 174 | // Save for first time 175 | let overlay_frame = OverlayFrame::new(size, position); 176 | let _ = self.app_handle.manage(overlay_frame); 177 | }; 178 | 179 | // set size and then show 180 | let _ = overlay.set_size(size.clone()); 181 | let _ = overlay.set_position(position.clone()); 182 | let _ = overlay.show(); 183 | 184 | let overlay_ = overlay.clone(); 185 | let handle_ = self.app_handle.clone(); 186 | 187 | overlay.on_window_event(move |event| match event { 188 | tauri::WindowEvent::Moved(position_) => { 189 | info!("moved {:#?}", position_); 190 | let Some(overlay_frame) = handle_.try_state::() else { 191 | return; 192 | }; 193 | let _ = overlay_.set_position(overlay_frame.get_position()); 194 | } 195 | 196 | tauri::WindowEvent::Resized(size_) => { 197 | info!("resized {:#?}", size_); 198 | let Some(overlay_frame) = handle_.try_state::() else { 199 | return; 200 | }; 201 | let _ = overlay_.set_size(overlay_frame.get_size()); 202 | } 203 | 204 | tauri::WindowEvent::Focused(focused) => { 205 | info!("focused {:#?}", focused); 206 | } 207 | _ => {} 208 | }); 209 | } 210 | 211 | fn handle_stop(&mut self) { 212 | // close overlay 213 | if let Some(overlay) = self.overlay.take() { 214 | let _ = overlay.destroy(); 215 | } 216 | 217 | // clean up 218 | self.display_size.take(); 219 | self.display_position.take(); 220 | } 221 | 222 | fn handle_event(&mut self, user_id: UserId, event: Event) { 223 | // let Some(display_size) = self.display_size else { 224 | // return; 225 | // }; 226 | // let Some(display_position) = self.display_position else { 227 | // return; 228 | // }; 229 | // let display_width = display_size.width; 230 | // let display_height = display_size.height; 231 | 232 | info!("got remote event {}", &user_id); 233 | 234 | // I run events here 235 | match event { 236 | DataChannelEvent::MouseMove { x: _, y: _ } => { 237 | // let my_x = (x * display_width) as i32; 238 | // let my_y = (y * display_height) as i32; 239 | 240 | if let Some(ref overlay) = self.overlay { 241 | emit_data_event( 242 | overlay, 243 | OverlayDataChannelEvent { 244 | id: user_id.0, 245 | event, 246 | }, 247 | ); 248 | } 249 | } 250 | DataChannelEvent::MouseDown { 251 | x: _, 252 | y: _, 253 | button: _, 254 | } => { 255 | // let my_x = (display_position.x + (x * display_width)) as i32; 256 | // let my_y = (display_position.y + (y * display_height)) as i32; 257 | 258 | // let enigo = Enigo::new(); 259 | // self.enigo.mouse_move_to(my_x, my_y); 260 | // self.enigo.mouse_click(MouseButton::Left); 261 | // // move back 262 | // self.enigo.mouse_move_to(my_x, my_y); 263 | } 264 | 265 | // Unrelated 266 | _ => {} 267 | } 268 | } 269 | } 270 | 271 | struct OverlayFrame { 272 | size: Arc>>, 273 | position: Arc>>, 274 | } 275 | 276 | impl OverlayFrame { 277 | pub fn new(size: LogicalSize, position: LogicalPosition) -> Self { 278 | Self { 279 | size: Arc::new(Mutex::new(size)), 280 | position: Arc::new(Mutex::new(position)), 281 | } 282 | } 283 | 284 | pub fn set_size(&self, input_size: LogicalSize) { 285 | if let Ok(mut size) = self.size.lock() { 286 | *size = input_size; 287 | } 288 | } 289 | 290 | pub fn set_position(&self, input_position: LogicalPosition) { 291 | if let Ok(mut position) = self.position.lock() { 292 | *position = input_position; 293 | } 294 | } 295 | 296 | pub fn get_position(&self) -> LogicalPosition { 297 | if let Ok(pos) = self.position.lock() { 298 | pos.clone() 299 | } else { 300 | LogicalPosition::new(0.0, 0.0) 301 | } 302 | } 303 | 304 | pub fn get_size(&self) -> LogicalSize { 305 | if let Ok(size) = self.size.lock() { 306 | size.clone() 307 | } else { 308 | LogicalSize::new(1.0, 1.0) 309 | } 310 | } 311 | } 312 | 313 | // struct Remote { 314 | // enigo: Enigo, 315 | // } 316 | 317 | // unsafe impl Send for Remote {} 318 | // unsafe impl Sync for Remote {} 319 | 320 | // impl Remote { 321 | // pub fn new() -> Self { 322 | // Self { 323 | // enigo: Enigo::default(), 324 | // } 325 | // } 326 | // } 327 | -------------------------------------------------------------------------------- /rtc/resampler.rs: -------------------------------------------------------------------------------- 1 | use rubato::{FftFixedIn, VecResampler}; 2 | 3 | use crate::rtc::audio::get_opus_samples_count; 4 | 5 | #[derive(Debug, Clone, Copy, PartialEq, Eq)] 6 | pub enum Chunk { 7 | TenMs, 8 | #[allow(unused)] 9 | FiveMs, 10 | TwoAndHalfMs, 11 | } 12 | 13 | pub struct ResamplerConfig { 14 | pub input_sample_rate: u32, 15 | pub output_sample_rate: u32, 16 | pub channels: u16, 17 | pub chunk: Chunk, 18 | } 19 | 20 | pub struct Resampler { 21 | resampler: FftFixedIn, 22 | // resampler: FftFixedInOut, 23 | needs_resampling: bool, 24 | resampler_in_buffer: Vec>, 25 | resampler_out_buffer: Vec>, 26 | output_buffer: Vec, 27 | /// Interleaved output for a given frame (may consist of multiple 10ms chunks appeneded in a single vec) 28 | chunk_size: usize, 29 | channels: u16, 30 | } 31 | 32 | impl Resampler { 33 | pub fn new(config: ResamplerConfig) -> Self { 34 | info!( 35 | "input_sample_rate {} output_sample_rate {}", 36 | &config.input_sample_rate, &config.output_sample_rate, 37 | ); 38 | 39 | let samples_count_10ms = get_opus_samples_count(config.input_sample_rate, config.channels, 10); 40 | // 10 ms 41 | let chunk_size = if config.chunk == Chunk::TenMs { 42 | samples_count_10ms 43 | } else if config.chunk == Chunk::TwoAndHalfMs { 44 | samples_count_10ms / 4 45 | } else { 46 | samples_count_10ms / 2 47 | } / config.channels as usize; 48 | info!("resampler: input_sample_rate={}", config.input_sample_rate); 49 | info!("resampler: samples_count_10ms={}", samples_count_10ms); 50 | info!("resampler: chunk_size={}", chunk_size); 51 | // 52 | // let resampler = FftFixedOut::::new( 53 | let resampler = FftFixedIn::::new( 54 | config.input_sample_rate as usize, 55 | config.output_sample_rate as usize, 56 | // get 10ms 57 | chunk_size, 58 | 2, 59 | config.channels as usize, 60 | ) 61 | .expect("Failed to create resampler"); 62 | 63 | let needs_resampling = config.input_sample_rate != config.output_sample_rate; 64 | 65 | // Pre-allocate 66 | let resampler_in_buffer: Vec> = resampler.input_buffer_allocate(true); 67 | let resampler_out_buffer: Vec> = resampler.output_buffer_allocate(true); 68 | // max 120ms per spec 69 | let output_buffer: Vec = Vec::with_capacity(samples_count_10ms * 12); 70 | 71 | Self { 72 | resampler, 73 | needs_resampling, 74 | resampler_in_buffer, 75 | resampler_out_buffer, 76 | output_buffer, 77 | chunk_size, 78 | channels: config.channels, 79 | } 80 | } 81 | 82 | /// Process in 10 ms chunks 83 | pub fn process<'a, 'b>(&'a mut self, samples: &'b [f32]) -> &'a [f32] { 84 | if self.needs_resampling { 85 | // cleanup previous frame 86 | self.output_buffer.clear(); 87 | 88 | // we will furthur split this per channels before the process 89 | let chunk_size_before_split = self.chunk_size * self.channels as usize; 90 | // info!( 91 | // "resampler.process: samples={} channels={} chunks={}", 92 | // &samples.len(), 93 | // self.channels, 94 | // &samples.len() / chunk_size_before_split 95 | // ); 96 | 97 | // Resample = do synchronous resampling 98 | let in_buffer = self.resampler_in_buffer.as_mut(); 99 | let mut processed_samples = 0; 100 | 101 | for chunk in samples.chunks(chunk_size_before_split) { 102 | // convert to non-interleaved 103 | let _ = Self::process_for_resampler(chunk, self.channels, in_buffer); 104 | 105 | // info!("chunk len={}", chunk.len()); 106 | 107 | // process chunk 108 | self 109 | .resampler 110 | .process_into_buffer(in_buffer, self.resampler_out_buffer.as_mut_slice(), None) 111 | .expect("resampler failed to process"); 112 | 113 | // Insert into output buffer 114 | let samples_per_channel = self.resampler_out_buffer[0].len(); 115 | 116 | trace!( 117 | "resampler.process: left={:?} right={:?}", 118 | &self.resampler_out_buffer[0][0..40], 119 | &self.resampler_out_buffer[1][0..40] 120 | ); 121 | 122 | assert!( 123 | self.resampler_out_buffer[0].len() == self.resampler_out_buffer[1].len(), 124 | "resampler output buffer is not the same size" 125 | ); 126 | assert!(self.channels == 2, "channels is not 2"); 127 | 128 | // dbg!(samples_per_channel); 129 | // the order is important 130 | for sample_index in 0..samples_per_channel { 131 | for channel_index in 0..self.channels as usize { 132 | processed_samples += 1; 133 | self 134 | .output_buffer 135 | .push(self.resampler_out_buffer[channel_index][sample_index]); 136 | } 137 | } 138 | } 139 | 140 | // output all chunks as a whole 141 | &self.output_buffer[0..processed_samples] 142 | } else { 143 | // no need to touch 144 | self.output_buffer.clear(); 145 | self.output_buffer.extend_from_slice(samples); 146 | &self.output_buffer[0..samples.len()] 147 | // return samples; 148 | } 149 | } 150 | 151 | /// Converts interleaved samples to non-interleaved, 152 | /// output how many noninterleaved vecs should be used 153 | fn process_for_resampler( 154 | input_samples: &[f32], 155 | channels: u16, 156 | processed_buffer: &mut Vec>, 157 | ) -> usize { 158 | processed_buffer[0].clear(); 159 | processed_buffer[1].clear(); 160 | 161 | if channels == 2 { 162 | for (_i, pair) in input_samples[..].chunks_exact(2).enumerate() { 163 | processed_buffer[0].push(pair[0]); 164 | processed_buffer[1].push(pair[1]); 165 | } 166 | } else { 167 | for sample in input_samples { 168 | processed_buffer[0].push(*sample); 169 | // added by mo to fix the single channel bug 170 | processed_buffer[1].push(*sample); 171 | // if a channel sub vec length = 0, it is marked as inactive by resampler 172 | } 173 | } 174 | 175 | input_samples.len() / channels as usize 176 | } 177 | } 178 | -------------------------------------------------------------------------------- /rtc/sdp.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | 3 | use super::peer::channel::Signal; 4 | 5 | #[derive(Serialize, Deserialize, Clone, Debug, PartialEq)] 6 | #[serde(tag = "type")] 7 | #[serde(rename_all = "lowercase")] 8 | pub enum SignalJson { 9 | Sdp { sdp: String }, 10 | 11 | Candidate { candidate: String }, 12 | } 13 | 14 | pub fn parse_signal_json(_signal_json: String) -> Signal { 15 | todo!(); 16 | } 17 | 18 | // mod tests { 19 | 20 | // #[test] 21 | // fn test_parse_sdp() { 22 | // let parsed = serde_json::from_str::("{\"type\":\"sdp\", \"sdp\": \"\"}").expect(""); 23 | // assert_eq!( 24 | // parsed, 25 | // SignalJson::Sdp { 26 | // sdp: String::from("") 27 | // } 28 | // ); 29 | // } 30 | // } 31 | -------------------------------------------------------------------------------- /rtc/signal.rs: -------------------------------------------------------------------------------- 1 | use super::{ 2 | peer::channel::{Sdp, Signal}, 3 | utils::UserId, 4 | }; 5 | use std::collections::{HashMap, VecDeque}; 6 | 7 | pub struct PendingSignals { 8 | signals: HashMap>, 9 | } 10 | 11 | impl PendingSignals { 12 | pub fn init() -> Self { 13 | Self { 14 | signals: HashMap::new(), 15 | } 16 | } 17 | 18 | pub fn add(&mut self, user_id: &UserId, signal: Signal) { 19 | if let Some(user_signals) = self.signals.get_mut(user_id) { 20 | // only add if candidate 21 | let should_add = match signal { 22 | Signal::Candidate(_) => true, 23 | _ => false, 24 | }; 25 | 26 | if should_add { 27 | user_signals.push_back(signal); 28 | } else { 29 | warn!( 30 | "did not add signal to pending queue despite peer not being present 1 {:?}", 31 | signal 32 | ); 33 | } 34 | } else { 35 | // only add if sdp 36 | let should_add = match signal { 37 | Signal::Sdp(_) => true, 38 | _ => false, 39 | }; 40 | 41 | if should_add { 42 | self 43 | .signals 44 | .insert(user_id.to_owned(), VecDeque::from([signal])); 45 | } else { 46 | warn!( 47 | "did not add signal to pending queue despite peer not being present 2 {:?}", 48 | signal 49 | ); 50 | } 51 | } 52 | } 53 | 54 | fn should_return(&self, user_id: &UserId, initial_offerer: bool, is_drain_all: bool) -> bool { 55 | if let Some(user_signals) = self.signals.get(user_id) { 56 | let signal = user_signals.front(); 57 | 58 | // check first signle 59 | match signal { 60 | // only return if it's useful and doesn't cause error 61 | Some(Signal::Sdp(ref sdp)) => match &sdp { 62 | Sdp::Offer(_) => !initial_offerer, 63 | 64 | // DO NOT SET ANSWER WHEN WE JUST SENT OFFER 65 | Sdp::Answer(_) => false, 66 | // RTCSdpType::Answer => initial_offerer, 67 | }, 68 | // candidates are safe because we've already checked at the front of deque 69 | Some(Signal::Candidate(_)) => { 70 | // only allow if we're not draining all otherwise this is the first one and we should discard the whole thing 71 | !is_drain_all 72 | } 73 | _ => false, 74 | } 75 | } else { 76 | false 77 | } 78 | } 79 | 80 | /// Check if should return and return all signals at once 81 | pub fn drain(&mut self, user_id: &UserId, initial_offerer: bool) -> Option> { 82 | let should_return = self.should_return(user_id, initial_offerer, true); 83 | if self.signals.get_mut(user_id).is_some() { 84 | // it's not in a good shape, clean it and return None 85 | if !should_return { 86 | self.remove(user_id); 87 | None 88 | } else { 89 | self.remove(user_id) 90 | // Some(user_signals.to_owned()) 91 | } 92 | } else { 93 | self.remove(user_id); 94 | None 95 | } 96 | } 97 | 98 | /// Get all offers for user and removes all (no checks) 99 | pub fn remove(&mut self, user_id: &UserId) -> Option> { 100 | self.signals.remove(user_id) 101 | } 102 | 103 | /// Get all offers and removes all 104 | pub fn reset(&mut self) { 105 | self.signals.clear(); 106 | } 107 | } 108 | -------------------------------------------------------------------------------- /rtc/utils.rs: -------------------------------------------------------------------------------- 1 | use std::fmt; 2 | 3 | use serde::Serialize; 4 | 5 | #[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize)] 6 | pub struct UserId(pub String); 7 | 8 | impl fmt::Display for UserId { 9 | #[inline] 10 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 11 | fmt::Display::fmt(&*self.0, f) 12 | } 13 | } 14 | 15 | impl From for UserId { 16 | fn from(user_id: String) -> Self { 17 | UserId(user_id) 18 | } 19 | } 20 | 21 | #[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize)] 22 | pub struct CallId(pub String); 23 | impl fmt::Display for CallId { 24 | #[inline] 25 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 26 | fmt::Display::fmt(&*self.0, f) 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /tauri_commands/devices_list.rs: -------------------------------------------------------------------------------- 1 | extern crate anyhow; 2 | extern crate cpal; 3 | use cpal::traits::{DeviceTrait, HostTrait}; 4 | use serde::{Deserialize, Serialize}; 5 | 6 | fn enumerate() -> Result, anyhow::Error> { 7 | // let available_hosts = cpal::available_hosts(); 8 | let mut audio_devices = Vec::new(); 9 | 10 | #[cfg(any( 11 | not(any(target_os = "linux", target_os = "dragonfly", target_os = "freebsd")), 12 | not(feature = "jack") 13 | ))] 14 | let hosts = cpal::available_hosts(); 15 | if hosts.is_empty() { 16 | return Ok(audio_devices); 17 | } 18 | let host = cpal::default_host(); 19 | 20 | // for host_id in available_hosts { 21 | // println!("{}", host_id.name()); 22 | // let host = cpal::host_from_id(host_id)?; 23 | 24 | let default_in = host 25 | .default_input_device() 26 | .map(|e| e.name().unwrap_or("Audio Device".to_string())); 27 | 28 | let default_out = host 29 | .default_output_device() 30 | .map(|e| e.name().unwrap_or("Audio Device".to_string())); 31 | 32 | let default_in_name: String = match default_in.as_deref() { 33 | Some(name) => name.to_owned(), 34 | None => "".to_string(), 35 | }; 36 | 37 | let default_out_name: String = match default_out.as_deref() { 38 | Some(name) => name.to_owned(), 39 | None => "".to_string(), 40 | }; 41 | 42 | if let Ok(devices) = host.devices() { 43 | for (_device_index, device) in devices.enumerate() { 44 | let is_default_device: bool; 45 | 46 | if default_in_name == device.name().unwrap_or("Audio Device".to_string()) { 47 | is_default_device = true; 48 | } else if default_out_name == device.name().unwrap_or("Audio Device".to_string()) { 49 | is_default_device = true; 50 | } else { 51 | is_default_device = false; 52 | } 53 | 54 | audio_devices.push(AudioDevice { 55 | kind: get_device_type(&device), 56 | name: device.name().unwrap_or("Audio Device".to_string()), 57 | default: is_default_device, 58 | }); 59 | } 60 | } 61 | 62 | Ok(audio_devices) 63 | } 64 | 65 | fn get_device_type(device: &cpal::Device) -> AudioDeviceType { 66 | let input_configs = match device.supported_input_configs() { 67 | Ok(f) => f.collect(), 68 | Err(e) => { 69 | println!(" Error getting supported input configs: {:?}", e); 70 | Vec::new() 71 | } 72 | }; 73 | if !input_configs.is_empty() { 74 | AudioDeviceType::Input 75 | } else { 76 | AudioDeviceType::Output 77 | } 78 | } 79 | 80 | #[derive(Serialize, Deserialize)] 81 | enum AudioDeviceType { 82 | Output, 83 | Input, 84 | } 85 | 86 | #[derive(Serialize, Deserialize)] 87 | pub struct AudioDevice { 88 | kind: AudioDeviceType, 89 | name: String, 90 | default: bool, 91 | } 92 | 93 | #[tauri::command] 94 | pub async fn devices_list() -> Result, ()> { 95 | if let Ok(result) = enumerate() { 96 | Ok(result) 97 | } else { 98 | println!("failed to get device list"); 99 | Ok(vec![]) 100 | } 101 | } 102 | 103 | use async_process::Command; 104 | 105 | #[tauri::command] 106 | pub async fn get_speaker_volume() -> Result { 107 | let output = Command::new("osascript") 108 | .arg("-e") 109 | .arg("set outputVolume to output volume of (get volume settings)") 110 | .output() 111 | .await 112 | .expect("failed to run osascript"); 113 | 114 | let volume = String::from_utf8(output.stdout).unwrap(); 115 | 116 | Ok(volume.to_string()) 117 | } 118 | 119 | #[tauri::command] 120 | pub async fn get_is_speaker_muted() -> Result { 121 | let get_sound_code = r#" 122 | -- Check if Sound is Muted and Output 0 or 1 123 | tell application "System Events" 124 | set isMuted to output muted of (get volume settings) 125 | if isMuted then 126 | set mutedStatus to 1 -- 1 indicates muted 127 | else 128 | set mutedStatus to 0 -- 0 indicates not muted 129 | end if 130 | end tell 131 | 132 | return mutedStatus 133 | "#; 134 | 135 | let output = Command::new("osascript") 136 | .arg("-e") 137 | .arg(get_sound_code) 138 | .output() 139 | .await 140 | .expect("failed to run osascript"); 141 | 142 | let muted = String::from_utf8(output.stdout).unwrap(); 143 | 144 | Ok(muted.to_string()) 145 | } 146 | -------------------------------------------------------------------------------- /tauri_commands/sound_status.rs: -------------------------------------------------------------------------------- 1 | extern crate anyhow; 2 | extern crate cpal; 3 | 4 | 5 | 6 | use async_process::Command; 7 | 8 | #[tauri::command] 9 | pub async fn get_is_music_playing() -> Result { 10 | let playing_music_state = r#" 11 | on is_running(appName) 12 | tell application "System Events" to (name of processes) contains appName 13 | end is_running 14 | 15 | 16 | set QTRunning to is_running("QuickTime Player") 17 | set MusicRunning to is_running("Music") 18 | set SpotifyRunning to is_running("Spotify") 19 | set TVRunning to is_running("TV") 20 | 21 | if QTRunning then 22 | tell application "QuickTime Player" to set isQTplaying to (playing of documents contains true) 23 | else 24 | set isQTplaying to false 25 | end if 26 | 27 | if MusicRunning then 28 | tell application "Music" to set isMusicPlaying to (player state is playing) 29 | else 30 | set isMusicPlaying to false 31 | end if 32 | 33 | if SpotifyRunning then 34 | tell application "Spotify" to set isSpotifyPlaying to (player state is playing) 35 | else 36 | set isSpotifyPlaying to false 37 | end if 38 | 39 | if TVRunning then 40 | tell application "TV" to set isTVPlaying to (player state is playing) 41 | else 42 | set isTVPlaying to false 43 | end if 44 | 45 | if isMusicPlaying or isSpotifyPlaying or isQTplaying or isTVPlaying then 46 | return "1" 47 | else 48 | return "0" 49 | end if 50 | "#; 51 | 52 | let output = Command::new("osascript") 53 | .arg("-e") 54 | .arg(playing_music_state) 55 | .output() 56 | .await 57 | .expect("failed to run osascript"); 58 | 59 | let playing = String::from_utf8(output.stdout).unwrap(); 60 | 61 | Ok(playing.to_string()) 62 | } 63 | -------------------------------------------------------------------------------- /tauri_commands/sound_tools.rs: -------------------------------------------------------------------------------- 1 | use cap::{ 2 | cf, 3 | cidre::{ 4 | arc, 5 | core_audio::{ 6 | AudioObjId, AudioObjPropAddr, AudioObjPropElement, AudioObjPropScope, AudioObjPropSelector, 7 | }, 8 | }, 9 | }; 10 | 11 | #[tauri::command] 12 | pub fn get_default_speaker() -> Result { 13 | let def_addr = AudioObjPropAddr { 14 | selector: AudioObjPropSelector::HARDWARE_DEFAULT_OUTPUT_DEVICE, 15 | scope: AudioObjPropScope::GLOBAL, 16 | element: AudioObjPropElement::MAIN, 17 | }; 18 | 19 | let device: AudioObjId = AudioObjId::SYS_OBJECT.prop(&def_addr).unwrap(); 20 | let name_addr = AudioObjPropAddr { 21 | selector: AudioObjPropSelector::NAME, 22 | scope: AudioObjPropScope::GLOBAL, 23 | element: AudioObjPropElement::MAIN, 24 | }; 25 | 26 | let name: arc::R = device.cf_prop(&name_addr).unwrap(); 27 | let device_name = name.to_string(); 28 | 29 | Ok(device_name) 30 | } 31 | 32 | // #[cfg(test)] 33 | // mod test { 34 | // use crate::macos; 35 | 36 | // #[test] 37 | // fn key_pressed() { 38 | // let hi = macos::get_default_speaker(); 39 | // } 40 | // } 41 | -------------------------------------------------------------------------------- /video/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "video" 3 | version = "0.1.0" 4 | edition = "2021" 5 | 6 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 7 | 8 | [dependencies] 9 | thiserror = "1" 10 | 11 | [build-dependencies] 12 | pkg-config = "0.3.5" 13 | target_build_utils = "0.3" 14 | bindgen = "0.65" 15 | -------------------------------------------------------------------------------- /video/build.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | env, fs, 3 | path::{Path, PathBuf}, 4 | println, 5 | }; 6 | 7 | #[cfg(all(target_os = "linux", feature = "linux-pkg-config"))] 8 | fn link_pkg_config(name: &str) -> Vec { 9 | // sometimes an override is needed 10 | let pc_name = match name { 11 | "libvpx" => "vpx", 12 | _ => name, 13 | }; 14 | let lib = pkg_config::probe_library(pc_name) 15 | .expect(format!( 16 | "unable to find '{pc_name}' development headers with pkg-config (feature linux-pkg-config is enabled). 17 | try installing '{pc_name}-dev' from your system package manager.").as_str()); 18 | 19 | lib.include_paths 20 | } 21 | #[cfg(not(all(target_os = "linux", feature = "linux-pkg-config")))] 22 | fn link_pkg_config(_name: &str) -> Vec { 23 | unimplemented!() 24 | } 25 | 26 | /// Link vcpkg package. 27 | fn link_vcpkg(mut path: PathBuf, name: &str) -> PathBuf { 28 | let target_os = std::env::var("CARGO_CFG_TARGET_OS").unwrap(); 29 | let mut target_arch = std::env::var("CARGO_CFG_TARGET_ARCH").unwrap(); 30 | if target_arch == "x86_64" { 31 | target_arch = "x64".to_owned(); 32 | } else if target_arch == "x86" { 33 | target_arch = "x86".to_owned(); 34 | } else if target_arch == "aarch64" { 35 | target_arch = "arm64".to_owned(); 36 | } else { 37 | target_arch = "arm".to_owned(); 38 | } 39 | let mut target = if target_os == "macos" { 40 | if target_arch == "x64" { 41 | "x64-osx".to_owned() 42 | } else if target_arch == "arm64" { 43 | "arm64-osx".to_owned() 44 | } else { 45 | format!("{}-{}", target_arch, target_os) 46 | } 47 | } else if target_os == "windows" { 48 | "x64-windows-static".to_owned() 49 | } else { 50 | format!("{}-{}", target_arch, target_os) 51 | }; 52 | if target_arch == "x86" { 53 | target = target.replace("x64", "x86"); 54 | } 55 | println!("cargo:info={}", target); 56 | path.push("installed"); 57 | path.push(target); 58 | println!( 59 | "{}", 60 | format!( 61 | "cargo:rustc-link-lib=static={}", 62 | name.trim_start_matches("lib") 63 | ) 64 | ); 65 | println!( 66 | "{}", 67 | format!( 68 | "cargo:rustc-link-search={}", 69 | path.join("lib").to_str().unwrap() 70 | ) 71 | ); 72 | let include = path.join("include"); 73 | println!("{}", format!("cargo:include={}", include.to_str().unwrap())); 74 | include 75 | } 76 | 77 | /// Link homebrew package(for Mac M1). 78 | fn link_homebrew_m1(name: &str) -> PathBuf { 79 | let target_os = std::env::var("CARGO_CFG_TARGET_OS").unwrap(); 80 | let target_arch = std::env::var("CARGO_CFG_TARGET_ARCH").unwrap(); 81 | if target_os != "macos" || target_arch != "aarch64" { 82 | panic!("Couldn't find VCPKG_ROOT, also can't fallback to homebrew because it's only for macos aarch64."); 83 | } 84 | let mut path = PathBuf::from("/opt/homebrew/Cellar"); 85 | path.push(name); 86 | let entries = if let Ok(dir) = std::fs::read_dir(&path) { 87 | dir 88 | } else { 89 | panic!( 90 | "Could not find package in {}. Make sure your homebrew and package {} are all installed.", 91 | path.to_str().unwrap(), 92 | &name 93 | ); 94 | }; 95 | let mut directories = entries 96 | .into_iter() 97 | .filter(|x| x.is_ok()) 98 | .map(|x| x.unwrap().path()) 99 | .filter(|x| x.is_dir()) 100 | .collect::>(); 101 | // Find the newest version. 102 | directories.sort_unstable(); 103 | if directories.is_empty() { 104 | panic!( 105 | "There's no installed version of {} in /opt/homebrew/Cellar", 106 | name 107 | ); 108 | } 109 | path.push(directories.pop().unwrap()); 110 | // Link the library. 111 | println!( 112 | "{}", 113 | format!( 114 | "cargo:rustc-link-lib=static={}", 115 | name.trim_start_matches("lib") 116 | ) 117 | ); 118 | // Add the library path. 119 | println!( 120 | "{}", 121 | format!( 122 | "cargo:rustc-link-search={}", 123 | path.join("lib").to_str().unwrap() 124 | ) 125 | ); 126 | // Add the include path. 127 | let include = path.join("include"); 128 | println!("{}", format!("cargo:include={}", include.to_str().unwrap())); 129 | include 130 | } 131 | 132 | /// Find package. By default, it will try to find vcpkg first, then homebrew(currently only for Mac M1). 133 | /// If building for linux and feature "linux-pkg-config" is enabled, will try to use pkg-config 134 | /// unless check fails (e.g. NO_PKG_CONFIG_libyuv=1) 135 | fn find_package(name: &str) -> Vec { 136 | let no_pkg_config_var_name = format!("NO_PKG_CONFIG_{name}"); 137 | println!("cargo:rerun-if-env-changed={no_pkg_config_var_name}"); 138 | if cfg!(all(target_os = "linux", feature = "linux-pkg-config")) 139 | && std::env::var(no_pkg_config_var_name).as_deref() != Ok("1") 140 | { 141 | link_pkg_config(name) 142 | } else if let Ok(vcpkg_root) = std::env::var("VCPKG_ROOT") { 143 | vec![link_vcpkg(vcpkg_root.into(), name)] 144 | } else { 145 | // Try using homebrew 146 | vec![link_homebrew_m1(name)] 147 | } 148 | } 149 | 150 | fn generate_bindings( 151 | ffi_header: &Path, 152 | include_paths: &[PathBuf], 153 | ffi_rs: &Path, 154 | exact_file: &Path, 155 | regex: &str, 156 | ) { 157 | let mut b = bindgen::builder() 158 | .header(ffi_header.to_str().unwrap()) 159 | .allowlist_type(regex) 160 | .allowlist_var(regex) 161 | .allowlist_function(regex) 162 | .rustified_enum(regex) 163 | .trust_clang_mangling(false) 164 | .layout_tests(false) // breaks 32/64-bit compat 165 | .generate_comments(false); // comments have prefix /*!\ 166 | 167 | for dir in include_paths { 168 | b = b.clang_arg(format!("-I{}", dir.display())); 169 | } 170 | 171 | b.generate().unwrap().write_to_file(ffi_rs).unwrap(); 172 | fs::copy(ffi_rs, exact_file).ok(); // ignore failure 173 | } 174 | 175 | fn gen_vcpkg_package(package: &str, ffi_header: &str, generated: &str, regex: &str) { 176 | let includes = find_package(package); 177 | let src_dir = env::var_os("CARGO_MANIFEST_DIR").unwrap(); 178 | let src_dir = Path::new(&src_dir); 179 | let out_dir = env::var_os("OUT_DIR").unwrap(); 180 | let out_dir = Path::new(&out_dir); 181 | 182 | let ffi_header = src_dir.join("src").join("bindings").join(ffi_header); 183 | println!("rerun-if-changed={}", ffi_header.display()); 184 | for dir in &includes { 185 | println!("rerun-if-changed={}", dir.display()); 186 | } 187 | 188 | let ffi_rs = out_dir.join(generated); 189 | let exact_file = src_dir.join("generated").join(generated); 190 | generate_bindings(&ffi_header, &includes, &ffi_rs, &exact_file, regex); 191 | } 192 | 193 | fn main() { 194 | // note: all link symbol names in x86 (32-bit) are prefixed wth "_". 195 | // run "rustup show" to show current default toolchain, if it is stable-x86-pc-windows-msvc, 196 | // please install x64 toolchain by "rustup toolchain install stable-x86_64-pc-windows-msvc", 197 | // then set x64 to default by "rustup default stable-x86_64-pc-windows-msvc" 198 | let target = target_build_utils::TargetInfo::new(); 199 | if target.unwrap().target_pointer_width() != "64" { 200 | // panic!("Only support 64bit system"); 201 | } 202 | env::remove_var("CARGO_CFG_TARGET_FEATURE"); 203 | env::set_var("CARGO_CFG_TARGET_FEATURE", "crt-static"); 204 | 205 | find_package("libyuv"); 206 | gen_vcpkg_package("libvpx", "vpx_ffi.h", "vpx_ffi.rs", "^[vV].*"); 207 | // gen_vcpkg_package("aom", "aom_ffi.h", "aom_ffi.rs", "^(aom|AOM|OBU|AV1).*"); 208 | gen_vcpkg_package("libyuv", "yuv_ffi.h", "yuv_ffi.rs", ".*"); 209 | 210 | // there is problem with cfg(target_os) in build.rs, so use our workaround 211 | let target_os = std::env::var("CARGO_CFG_TARGET_OS").unwrap(); 212 | if target_os == "ios" { 213 | // nothing 214 | } else if target_os == "android" { 215 | println!("cargo:rustc-cfg=android"); 216 | } else if cfg!(windows) { 217 | // The first choice is Windows because DXGI is amazing. 218 | println!("cargo:rustc-cfg=dxgi"); 219 | } else if cfg!(target_os = "macos") { 220 | // Quartz is second because macOS is the (annoying) exception. 221 | println!("cargo:rustc-cfg=quartz"); 222 | } else if cfg!(unix) { 223 | // On UNIX we pray that X11 (with XCB) is available. 224 | println!("cargo:rustc-cfg=x11"); 225 | } 226 | } 227 | -------------------------------------------------------------------------------- /video/ffi.h: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | #include 5 | #include 6 | #include 7 | #include 8 | #include 9 | #include -------------------------------------------------------------------------------- /video/src/bindings/vpx_ffi.h: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | #include 5 | #include 6 | #include 7 | #include 8 | #include 9 | #include -------------------------------------------------------------------------------- /video/src/bindings/yuv_ffi.h: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | #include 5 | #include 6 | #include -------------------------------------------------------------------------------- /video/src/lib.rs: -------------------------------------------------------------------------------- 1 | pub mod vpx; 2 | pub mod vpxcodec; 3 | -------------------------------------------------------------------------------- /video/src/vpx.rs: -------------------------------------------------------------------------------- 1 | #![allow(non_camel_case_types)] 2 | #![allow(non_snake_case)] 3 | #![allow(non_upper_case_globals)] 4 | #![allow(improper_ctypes)] 5 | 6 | // VP9 7 | #[repr(i32)] 8 | pub enum AQ_MODE { 9 | NO_AQ = 0, 10 | VARIANCE_AQ = 1, 11 | COMPLEXITY_AQ = 2, 12 | CYCLIC_REFRESH_AQ = 3, 13 | EQUATOR360_AQ = 4, 14 | // AQ based on lookahead temporal 15 | // variance (only valid for altref frames) 16 | LOOKAHEAD_AQ = 5, 17 | } 18 | 19 | // Back compat 20 | pub use vpx_codec_err_t::*; 21 | 22 | include!(concat!(env!("OUT_DIR"), "/vpx_ffi.rs")); 23 | -------------------------------------------------------------------------------- /video/src/vpxcodec.rs: -------------------------------------------------------------------------------- 1 | //! Rust interface to libvpx encoder 2 | //! 3 | //! This crate provides a Rust API to use 4 | //! [libvpx](https://en.wikipedia.org/wiki/Libvpx) for encoding images. 5 | //! 6 | //! It it based entirely on code from [srs](https://crates.io/crates/srs). 7 | //! Compared to the original `srs`, this code has been simplified for use as a 8 | //! library and updated to add support for both the VP8 codec and (optionally) 9 | //! the VP9 codec. 10 | //! 11 | //! # Optional features 12 | //! 13 | //! Compile with the cargo feature `vp9` to enable support for the VP9 codec. 14 | //! 15 | //! # Example 16 | //! 17 | //! An example of using `vpx-encode` can be found in the [`record-screen`]() 18 | //! program. The source code for `record-screen` is in the [vpx-encode git 19 | //! repository](). 20 | //! 21 | //! # Contributing 22 | //! 23 | //! All contributions are appreciated. 24 | 25 | // vpx_sys is provided by the `env-libvpx-sys` crate 26 | 27 | #![cfg_attr( 28 | feature = "backtrace", 29 | feature(error_generic_member_access, provide_any) 30 | )] 31 | 32 | use std::{ 33 | mem::MaybeUninit, 34 | os::raw::{c_int, c_uint, c_ulong}, 35 | }; 36 | 37 | #[cfg(feature = "backtrace")] 38 | use std::backtrace::Backtrace; 39 | use std::{ptr, slice}; 40 | 41 | use thiserror::Error; 42 | 43 | use super::vpx as vpx_sys; 44 | 45 | use vpx_sys::vp8e_enc_control_id::*; 46 | use vpx_sys::vpx_codec_cx_pkt_kind::VPX_CODEC_CX_FRAME_PKT; 47 | use vpx_sys::*; 48 | 49 | #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] 50 | pub enum VideoCodecId { 51 | VP8, 52 | VP9, 53 | } 54 | pub struct Encoder { 55 | ctx: vpx_codec_ctx_t, 56 | width: usize, 57 | height: usize, 58 | } 59 | 60 | #[derive(Debug, Error)] 61 | #[error("VPX encode error: {msg}")] 62 | pub struct Error { 63 | msg: String, 64 | // #[cfg(feature = "backtrace")] 65 | // #[backtrace] 66 | // backtrace: Backtrace, 67 | } 68 | 69 | impl From for Error { 70 | fn from(msg: String) -> Self { 71 | Self { 72 | msg, 73 | #[cfg(feature = "backtrace")] 74 | backtrace: Backtrace::capture(), 75 | } 76 | } 77 | } 78 | 79 | pub type Result = std::result::Result; 80 | 81 | macro_rules! call_vpx { 82 | ($x:expr) => {{ 83 | let result = unsafe { $x }; // original expression 84 | let result_int = unsafe { std::mem::transmute::<_, i32>(result) }; 85 | // if result != VPX_CODEC_OK { 86 | if result_int != 0 { 87 | return Err(Error::from(format!( 88 | "Function call failed (error code {}).", 89 | result_int 90 | ))); 91 | } 92 | result 93 | }}; 94 | } 95 | 96 | macro_rules! call_vpx_ptr { 97 | ($x:expr) => {{ 98 | let result = unsafe { $x }; // original expression 99 | if result.is_null() { 100 | return Err(Error::from("Bad pointer.".to_string())); 101 | } 102 | result 103 | }}; 104 | } 105 | 106 | impl Encoder { 107 | pub fn new(config: Config) -> Result { 108 | let i = match config.codec { 109 | VideoCodecId::VP8 => call_vpx_ptr!(vpx_codec_vp8_cx()), 110 | VideoCodecId::VP9 => call_vpx_ptr!(vpx_codec_vp9_cx()), 111 | }; 112 | 113 | if config.width % 2 != 0 { 114 | return Err(Error::from("Width must be divisible by 2".to_string())); 115 | } 116 | if config.height % 2 != 0 { 117 | return Err(Error::from("Height must be divisible by 2".to_string())); 118 | } 119 | 120 | let c = MaybeUninit::zeroed(); 121 | let mut c = unsafe { c.assume_init() }; 122 | call_vpx!(vpx_codec_enc_config_default(i, &mut c, 0)); 123 | 124 | c.g_w = config.width; 125 | c.g_h = config.height; 126 | c.g_timebase.num = config.timebase[0]; 127 | c.g_timebase.den = config.timebase[1]; 128 | c.rc_target_bitrate = config.bitrate; 129 | 130 | c.rc_undershoot_pct = 95; 131 | // When the data buffer falls below this percentage of fullness, a dropped frame is indicated. Set the threshold to zero (0) to disable this feature. 132 | // In dynamic scenes, low bitrate gets low fps while high bitrate gets high fps. 133 | c.rc_dropframe_thresh = 25; 134 | c.g_error_resilient = VPX_ERROR_RESILIENT_DEFAULT; 135 | // https://developers.google.com/media/vp9/bitrate-modes/ 136 | // Constant Bitrate mode (CBR) is recommended for live streaming with VP9. 137 | c.rc_end_usage = vpx_rc_mode::VPX_CBR; 138 | 139 | c.g_threads = 8; 140 | c.g_error_resilient = VPX_ERROR_RESILIENT_DEFAULT; 141 | 142 | let ctx = MaybeUninit::zeroed(); 143 | let mut ctx = unsafe { ctx.assume_init() }; 144 | 145 | match config.codec { 146 | VideoCodecId::VP8 => { 147 | call_vpx!(vpx_codec_enc_init_ver( 148 | &mut ctx, 149 | i, 150 | &c, 151 | 0, 152 | vpx_sys::VPX_ENCODER_ABI_VERSION as i32 153 | )); 154 | } 155 | VideoCodecId::VP9 => { 156 | call_vpx!(vpx_codec_enc_init_ver( 157 | &mut ctx, 158 | i, 159 | &c, 160 | 0, 161 | vpx_sys::VPX_ENCODER_ABI_VERSION as i32 162 | )); 163 | // set encoder internal speed settings 164 | call_vpx!(vpx_codec_control_( 165 | &mut ctx, 166 | VP8E_SET_CPUUSED as _, 167 | 6 as c_int 168 | )); 169 | // set row level multi-threading 170 | call_vpx!(vpx_codec_control_( 171 | &mut ctx, 172 | VP9E_SET_ROW_MT as _, 173 | 1 as c_int 174 | )); 175 | } 176 | }; 177 | 178 | Ok(Self { 179 | ctx, 180 | width: config.width as usize, 181 | height: config.height as usize, 182 | }) 183 | } 184 | 185 | pub fn encode(&mut self, pts: i64, data: &[u8]) -> Result { 186 | assert!(2 * data.len() >= 3 * self.width * self.height); 187 | 188 | let image = MaybeUninit::zeroed(); 189 | let mut image = unsafe { image.assume_init() }; 190 | 191 | call_vpx_ptr!(vpx_img_wrap( 192 | &mut image, 193 | vpx_img_fmt::VPX_IMG_FMT_I420, 194 | self.width as _, 195 | self.height as _, 196 | 1, 197 | data.as_ptr() as _, 198 | )); 199 | 200 | call_vpx!(vpx_codec_encode( 201 | &mut self.ctx, 202 | &image, 203 | pts, 204 | 1, // Duration 205 | 0, // Flags 206 | vpx_sys::VPX_DL_REALTIME as c_ulong, 207 | )); 208 | 209 | Ok(Packets { 210 | ctx: &mut self.ctx, 211 | iter: ptr::null(), 212 | }) 213 | } 214 | 215 | pub fn finish(mut self) -> Result { 216 | call_vpx!(vpx_codec_encode( 217 | &mut self.ctx, 218 | ptr::null(), 219 | -1, // PTS 220 | 1, // Duration 221 | 0, // Flags 222 | vpx_sys::VPX_DL_REALTIME as c_ulong, 223 | )); 224 | 225 | Ok(Finish { 226 | enc: self, 227 | iter: ptr::null(), 228 | }) 229 | } 230 | } 231 | 232 | impl Drop for Encoder { 233 | fn drop(&mut self) { 234 | unsafe { 235 | let result = vpx_codec_destroy(&mut self.ctx); 236 | if result != vpx_sys::VPX_CODEC_OK { 237 | eprintln!("failed to destroy vpx codec: {result:?}"); 238 | } 239 | } 240 | } 241 | } 242 | 243 | #[derive(Clone, Copy, Debug)] 244 | pub struct Frame<'a> { 245 | /// Compressed data. 246 | pub data: &'a [u8], 247 | /// Whether the frame is a keyframe. 248 | pub key: bool, 249 | /// Presentation timestamp (in timebase units). 250 | pub pts: i64, 251 | } 252 | 253 | #[derive(Clone, Copy, Debug)] 254 | pub struct Config { 255 | /// The width (in pixels). 256 | pub width: c_uint, 257 | /// The height (in pixels). 258 | pub height: c_uint, 259 | /// The timebase numerator and denominator (in seconds). 260 | pub timebase: [c_int; 2], 261 | /// The target bitrate (in kilobits per second). 262 | pub bitrate: c_uint, 263 | /// The codec 264 | pub codec: VideoCodecId, 265 | } 266 | 267 | pub struct Packets<'a> { 268 | ctx: &'a mut vpx_codec_ctx_t, 269 | iter: vpx_codec_iter_t, 270 | } 271 | 272 | impl<'a> Iterator for Packets<'a> { 273 | type Item = Frame<'a>; 274 | fn next(&mut self) -> Option { 275 | loop { 276 | unsafe { 277 | let pkt = vpx_codec_get_cx_data(self.ctx, &mut self.iter); 278 | if pkt.is_null() { 279 | return None; 280 | } else if (*pkt).kind == VPX_CODEC_CX_FRAME_PKT { 281 | let f = &(*pkt).data.frame; 282 | return Some(Frame { 283 | data: slice::from_raw_parts(f.buf as _, f.sz as usize), 284 | key: (f.flags & VPX_FRAME_IS_KEY) != 0, 285 | pts: f.pts, 286 | }); 287 | } else { 288 | // Ignore the packet. 289 | } 290 | } 291 | } 292 | } 293 | } 294 | 295 | pub struct Finish { 296 | enc: Encoder, 297 | iter: vpx_codec_iter_t, 298 | } 299 | 300 | impl Finish { 301 | pub fn next(&mut self) -> Result> { 302 | let mut tmp = Packets { 303 | ctx: &mut self.enc.ctx, 304 | iter: self.iter, 305 | }; 306 | 307 | if let Some(packet) = tmp.next() { 308 | self.iter = tmp.iter; 309 | Ok(Some(packet)) 310 | } else { 311 | call_vpx!(vpx_codec_encode( 312 | tmp.ctx, 313 | ptr::null(), 314 | -1, // PTS 315 | 1, // Duration 316 | 0, // Flags 317 | vpx_sys::VPX_DL_REALTIME as c_ulong, 318 | )); 319 | 320 | tmp.iter = ptr::null(); 321 | if let Some(packet) = tmp.next() { 322 | self.iter = tmp.iter; 323 | Ok(Some(packet)) 324 | } else { 325 | Ok(None) 326 | } 327 | } 328 | } 329 | } 330 | --------------------------------------------------------------------------------