├── callme-cli ├── .gitignore ├── README.md ├── Cargo.toml └── src │ └── main.rs ├── callme ├── .gitignore ├── src │ ├── net.rs │ ├── codec.rs │ ├── rtc │ │ ├── track.rs │ │ ├── protocol_handler.rs │ │ ├── rtp_receiver.rs │ │ └── rtp_sender.rs │ ├── rtc.rs │ ├── lib.rs │ ├── audio.rs │ ├── audio │ │ ├── processor.rs │ │ ├── device.rs │ │ ├── playback.rs │ │ └── capture.rs │ └── codec │ │ └── opus.rs ├── Cargo.toml └── examples │ ├── echo.rs │ └── playback-record.rs ├── .gitignore ├── callme-egui ├── .gitignore ├── manifest.yaml ├── .cargo │ └── config.toml ├── android-vars.sh ├── src │ ├── main.rs │ ├── lib.rs │ └── app.rs ├── Cargo.toml ├── README.md └── build-ios.py ├── Makefile.toml ├── LICENSE-MIT ├── Cargo.toml ├── README.md ├── .github └── workflows │ ├── ci.yml │ └── release.yml └── LICENSE-APACHE /callme-cli/.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | -------------------------------------------------------------------------------- /callme/.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | SANDBOX 2 | /target 3 | -------------------------------------------------------------------------------- /callme-egui/.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | -------------------------------------------------------------------------------- /callme-egui/manifest.yaml: -------------------------------------------------------------------------------- 1 | android: 2 | manifest: 3 | uses_permission: 4 | - name: android.permission.INTERNET 5 | - name: android.permission.ACCESS_NETWORK_STATE 6 | - name: android.permission.CHANGE_NETWORK_STATE 7 | - name: android.permission.RECORD_AUDIO 8 | -------------------------------------------------------------------------------- /callme-egui/.cargo/config.toml: -------------------------------------------------------------------------------- 1 | # we specify the profile here, because it is the only way to define 2 | # different settings for a single crate in a workspace. 3 | [profile.release] 4 | strip = true # Automatically strip symbols from the binary. 5 | opt-level = "z" # Optimize for size. 6 | lto = true 7 | codegen-units = 1 8 | -------------------------------------------------------------------------------- /callme-egui/android-vars.sh: -------------------------------------------------------------------------------- 1 | # setup 2 | # you never know which tools needs which of those, so we define them all... 3 | export ANDROID_HOME=$HOME/Android/Sdk 4 | export ANDROID_NDK=$ANDROID_HOME/ndk/28.0.12674087 5 | export ANDROID_NDK_ROOT=$ANDROID_NDK 6 | export JAVA_HOME=/opt/android-studio/jbr 7 | export TOOLCHAIN="${ANDROID_NDK}/toolchains/llvm/prebuilt/linux-x86_64" 8 | export PATH=$ANDROID_HOME/platform-tools:$TOOLCHAIN/bin:$PATH 9 | 10 | export CARGO_APK_RELEASE_KEYSTORE_PASSWORD="android" 11 | export CARGO_APK_RELEASE_KEYSTORE=$HOME/.android/debug.keystore 12 | -------------------------------------------------------------------------------- /callme-egui/src/main.rs: -------------------------------------------------------------------------------- 1 | #![cfg_attr(not(debug_assertions), windows_subsystem = "windows")] // hide console window on Windows in release 2 | #![allow(rustdoc::missing_crate_level_docs)] // it's an example 3 | 4 | use callme_egui::app::App; 5 | use eframe::NativeOptions; 6 | 7 | fn main() -> Result<(), eframe::Error> { 8 | tracing_subscriber::fmt::init(); 9 | let mut options = NativeOptions::default(); 10 | options.viewport = options 11 | .viewport 12 | .with_title("Callme") 13 | .with_resizable(true) 14 | .with_inner_size([500., 600.]); 15 | App::run(options) 16 | } 17 | -------------------------------------------------------------------------------- /callme/src/net.rs: -------------------------------------------------------------------------------- 1 | use std::str::FromStr; 2 | 3 | use anyhow::{bail, Context, Result}; 4 | use iroh::{Endpoint, NodeAddr, SecretKey}; 5 | pub use iroh_roq::ALPN; 6 | 7 | use crate::rtc::RtcConnection; 8 | 9 | pub async fn bind_endpoint() -> Result { 10 | let secret_key = match std::env::var("IROH_SECRET") { 11 | Ok(secret) => { 12 | SecretKey::from_str(&secret).expect("failed to parse secret key from IROH_SECRET") 13 | } 14 | Err(_) => SecretKey::generate(&mut rand::rngs::OsRng), 15 | }; 16 | Endpoint::builder() 17 | .secret_key(secret_key) 18 | .discovery_n0() 19 | .alpns(vec![ALPN.to_vec()]) 20 | .bind() 21 | .await 22 | } 23 | -------------------------------------------------------------------------------- /callme-cli/README.md: -------------------------------------------------------------------------------- 1 | # callme-cli 2 | 3 | A command-line interface to make calls with `callme`. 4 | 5 | ## Usage 6 | 7 | ``` 8 | cargo run --release 9 | ``` 10 | 11 | On Linux, you need ALSA and DBUS development headers: 12 | ``` 13 | apt-get install libasound2-dev libdbus-1-dev 14 | ``` 15 | 16 | The crate includes a C dependency for echo cancellation (`webrtc-audio-processing`) that needs C build tools to be installed. 17 | On macOS these can be installed with homebrew: 18 | ``` 19 | brew install automake libtool 20 | ``` 21 | 22 | On Windows, or if the build fails, you can disable the audio processing entirely. You should only use callme with headphones then. 23 | ``` 24 | cargo run --release --no-default-features 25 | ``` 26 | -------------------------------------------------------------------------------- /Makefile.toml: -------------------------------------------------------------------------------- 1 | # Use cargo-make to run tasks here: https://crates.io/crates/cargo-make 2 | 3 | [tasks.format] 4 | workspace = false 5 | command = "cargo" 6 | args = [ 7 | "fmt", 8 | "--all", 9 | "--", 10 | "--config", 11 | "unstable_features=true", 12 | "--config", 13 | "imports_granularity=Crate,group_imports=StdExternalCrate,reorder_imports=true,format_code_in_doc_comments=true", 14 | ] 15 | 16 | [tasks.format-check] 17 | workspace = false 18 | command = "cargo" 19 | args = [ 20 | "fmt", 21 | "--all", 22 | "--check", 23 | "--", 24 | "--config", 25 | "unstable_features=true", 26 | "--config", 27 | "imports_granularity=Crate,group_imports=StdExternalCrate,reorder_imports=true,format_code_in_doc_comments=true", 28 | ] 29 | -------------------------------------------------------------------------------- /callme-cli/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "callme-cli" 3 | version = "0.1.0" 4 | edition = "2021" 5 | repository = { workspace = true } 6 | license = { workspace = true } 7 | authors = { workspace = true } 8 | description = "make voicecalls over iroh (cli app)" 9 | 10 | [package.metadata.wix] 11 | upgrade-guid = "5EE47959-3AF5-4C59-BB50-C27106F2EF0D" 12 | path-guid = "A8E48474-C88A-430F-904A-019A704E797A" 13 | license = false 14 | eula = false 15 | 16 | [dependencies] 17 | anyhow = "1.0.96" 18 | clap = { version = "4.5.31", features = ["derive"] } 19 | callme = { version = "0.1.0", path = "../callme", default-features = false } 20 | tokio = { version = "1.43.0", features = ["macros", "rt-multi-thread", "rt"] } 21 | tracing = "0.1.41" 22 | tracing-subscriber = "0.3.19" 23 | iroh = { version = "0.33.0", default-features = false } 24 | dialoguer = "0.11.0" 25 | n0-future = "0.1.2" 26 | 27 | [features] 28 | default = ["audio-processing"] 29 | audio-processing = ["callme/audio-processing"] 30 | -------------------------------------------------------------------------------- /LICENSE-MIT: -------------------------------------------------------------------------------- 1 | Copyright 2023 N0, INC. 2 | 3 | Permission is hereby granted, free of charge, to any 4 | person obtaining a copy of this software and associated 5 | documentation files (the "Software"), to deal in the 6 | Software without restriction, including without 7 | limitation the rights to use, copy, modify, merge, 8 | publish, distribute, sublicense, and/or sell copies of 9 | the Software, and to permit persons to whom the Software 10 | is furnished to do so, subject to the following 11 | conditions: 12 | 13 | The above copyright notice and this permission notice 14 | shall be included in all copies or substantial portions 15 | of the Software. 16 | 17 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF 18 | ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED 19 | TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 20 | PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT 21 | SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY 22 | CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 23 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR 24 | IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER 25 | DEALINGS IN THE SOFTWARE. 26 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | 2 | [workspace] 3 | resolver = "2" 4 | members = ["callme", "callme-cli", "callme-egui"] 5 | 6 | [workspace.package] 7 | authors = ["Frando ", "n0 team"] 8 | repository = "https://github.com/n0-computer/callme.git" 9 | license = "MIT OR Apache-2.0" 10 | 11 | [patch.crates-io] 12 | # make receive flows close on connection close: https://github.com/n0-computer/iroh-roq/pull/4 13 | iroh-roq = { git = "https://github.com/n0-computer/iroh-roq.git", branch = "fix/close-flows" } 14 | 15 | # fix android device names: https://github.com/RustAudio/cpal/pull/959 16 | cpal = { git = "https://github.com/Frando/cpal.git", branch = "feat/android-device-name" } 17 | 18 | # fix android build: https://github.com/tonarino/webrtc-audio-processing/pull/47 19 | # replace failure with anyhow: https://github.com/tonarino/webrtc-audio-processing/pull/48 20 | webrtc-audio-processing = { git = "https://github.com/Frando/webrtc-audio-processing.git", branch = "callme-dev" } 21 | webrtc-audio-processing-sys = { git = "https://github.com/Frando/webrtc-audio-processing.git", branch = "callme-dev" } 22 | 23 | # this is needed for compiling to wasm 24 | # webrtc-util = { git = "https://github.com/Frando/webrtc.git", branch = "fix/util-feature-flags" } 25 | -------------------------------------------------------------------------------- /callme-egui/src/lib.rs: -------------------------------------------------------------------------------- 1 | #[cfg(target_os = "android")] 2 | use egui_winit::winit; 3 | 4 | pub mod app; 5 | 6 | #[cfg(target_os = "android")] 7 | #[no_mangle] 8 | fn android_main(app: winit::platform::android::activity::AndroidApp) { 9 | use eframe::{NativeOptions, Renderer}; 10 | 11 | std::env::set_var("RUST_BACKTRACE", "1"); 12 | std::env::set_var("RUST_LOG", "warn,callme=debug"); 13 | 14 | tracing_subscriber::fmt::init(); 15 | 16 | // this would setup a android logging contxt 17 | // however then we get duplicate logs in the default adb output 18 | // because that displays stdout/stderr already. 19 | // use tracing_subscriber::{layer::SubscriberExt, EnvFilter}; 20 | // let subscriber = tracing_subscriber::fmt() 21 | // .with_env_filter(EnvFilter::new("warn,callme=debug")) 22 | // .pretty() 23 | // .finish(); 24 | // let subscriber = { 25 | // let android_layer = tracing_android::layer("callme").unwrap(); 26 | // subscriber.with(android_layer) 27 | // }; 28 | // tracing::subscriber::set_global_default(subscriber).expect("Unable to set global subscriber"); 29 | 30 | let options = NativeOptions { 31 | android_app: Some(app), 32 | renderer: Renderer::Wgpu, 33 | ..Default::default() 34 | }; 35 | self::app::App::run(options).unwrap(); 36 | } 37 | -------------------------------------------------------------------------------- /callme/src/codec.rs: -------------------------------------------------------------------------------- 1 | use self::opus::OpusChannels; 2 | use crate::rtc::TrackKind; 3 | 4 | pub mod opus; 5 | 6 | #[derive(Debug, Clone, Copy, Eq, PartialEq)] 7 | #[non_exhaustive] 8 | pub enum Codec { 9 | Opus { channels: OpusChannels }, 10 | } 11 | 12 | impl Codec { 13 | /// We use the "dynamic" identifiers 96-127 in a "static" way here 14 | /// to skip SDP. 15 | /// 16 | /// See https://en.wikipedia.org/wiki/RTP_payload_formats 17 | pub fn rtp_payload_type(&self) -> u8 { 18 | match self { 19 | Codec::Opus { 20 | channels: OpusChannels::Mono, 21 | } => 96, 22 | Codec::Opus { 23 | channels: OpusChannels::Stereo, 24 | } => 97, 25 | } 26 | } 27 | 28 | pub fn try_from_rtp_payload_type(payload_type: u8) -> Option { 29 | match payload_type { 30 | 96 => Some(Codec::Opus { 31 | channels: OpusChannels::Mono, 32 | }), 33 | 97 => Some(Codec::Opus { 34 | channels: OpusChannels::Stereo, 35 | }), 36 | _ => None, 37 | } 38 | } 39 | 40 | pub fn sample_rate(&self) -> u32 { 41 | match self { 42 | Codec::Opus { .. } => self::opus::OPUS_SAMPLE_RATE, 43 | } 44 | } 45 | 46 | pub fn kind(&self) -> TrackKind { 47 | match self { 48 | Codec::Opus { .. } => TrackKind::Audio, 49 | } 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # callme 2 | 3 | Audio calls with Iroh! 4 | 5 | `callme` is an experimental library and tool that uses [iroh-roq](https://github.com/dignifiedquire/iroh-roq) to transfer Opus-encoded audio between devices. It uses [cpal](https://github.com/RustAudio/cpal) for cross-platform access to the device's audio interfaces. It includes optional audio processing with echo cancellation, and should run on most platforms. 6 | 7 | ## Crates 8 | 9 | See the READMEs of the individual crates for usage instructions. 10 | 11 | * **[callme](callme)** is the main Rust library used by all other crates in the workspace. 12 | * **[callme-cli](callme-cli)** is a basic command-line tool to make audio calls. 13 | * **[callme-egui](callme-egui)** is a GUI for callme. It runs on desktop (Linux, macOS, WindowS) and Android. iOS support is currently untested, but should work. See the [README](callme-egui/README.md) for detailed instructions. 14 | 15 | ## License 16 | 17 | Copyright 2024 N0, INC. 18 | 19 | This project is licensed under either of 20 | 21 | * Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or 22 | http://www.apache.org/licenses/LICENSE-2.0) 23 | * MIT license ([LICENSE-MIT](LICENSE-MIT) or 24 | http://opensource.org/licenses/MIT) 25 | 26 | at your option. 27 | 28 | ## Contribution 29 | 30 | Unless you explicitly state otherwise, any contribution intentionally submitted for inclusion in this project by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any additional terms or conditions. 31 | 32 | -------------------------------------------------------------------------------- /callme/src/rtc/track.rs: -------------------------------------------------------------------------------- 1 | use bytes::Bytes; 2 | use tokio::sync::broadcast; 3 | 4 | use crate::codec::Codec; 5 | 6 | #[derive(Debug, Clone, Copy)] 7 | pub enum TrackKind { 8 | Audio, 9 | Video, 10 | } 11 | 12 | #[derive(Debug)] 13 | pub struct MediaTrack { 14 | pub(crate) receiver: broadcast::Receiver, 15 | pub(crate) codec: Codec, 16 | pub(crate) kind: TrackKind, 17 | } 18 | 19 | impl Clone for MediaTrack { 20 | fn clone(&self) -> Self { 21 | Self { 22 | receiver: self.receiver.resubscribe(), 23 | codec: self.codec, 24 | kind: self.kind, 25 | } 26 | } 27 | } 28 | 29 | impl MediaTrack { 30 | pub fn new(receiver: broadcast::Receiver, codec: Codec, kind: TrackKind) -> Self { 31 | Self { 32 | receiver, 33 | codec, 34 | kind, 35 | } 36 | } 37 | pub async fn recv(&mut self) -> Result { 38 | self.receiver.recv().await 39 | } 40 | 41 | pub fn try_recv(&mut self) -> Result { 42 | self.receiver.try_recv() 43 | } 44 | 45 | pub fn kind(&self) -> TrackKind { 46 | self.kind 47 | } 48 | 49 | pub fn codec(&self) -> Codec { 50 | self.codec 51 | } 52 | } 53 | 54 | #[derive(Debug, Clone)] 55 | pub struct MediaFrame { 56 | pub payload: Bytes, 57 | pub sample_count: Option, 58 | pub skipped_frames: Option, 59 | pub skipped_samples: Option, 60 | } 61 | -------------------------------------------------------------------------------- /callme/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "callme" 3 | version = "0.1.0" 4 | edition = "2021" 5 | repository = { workspace = true } 6 | license = { workspace = true } 7 | authors = { workspace = true } 8 | 9 | [dependencies] 10 | anyhow = "1.0.96" 11 | async-channel = "2.3.1" 12 | audio_thread_priority = "0.33.0" 13 | bytes = "1.10.0" 14 | cpal = { version = "0.15.3" } 15 | dasp_sample = "0.11.0" 16 | derive_more = { version = "2.0.1", features = ["debug", "deref"] } 17 | fixed-resample = "0.6.1" 18 | futures-concurrency = "7.6.3" 19 | iroh = { version = "0.33.0", default-features = false } 20 | iroh-roq = "0.1.0" 21 | n0-future = "0.1.2" 22 | # opus = { package = "magnum-opus", version = "0.3" } 23 | opus = { git = "https://github.com/DCNick3/opus-rs.git", branch = "unsafe-libopus", default-features = false, features = [ 24 | "unsafe-libopus-backend", 25 | ] } 26 | rand = "0.8" 27 | ringbuf = "0.4.7" 28 | serde = { version = "1.0.218", features = ["derive"] } 29 | spin_sleep = "1.3.0" 30 | tokio = { version = "1.43.0", features = ["sync"] } 31 | tokio-util = { version = "0.7.13", default-features = false } 32 | tracing = "0.1.41" 33 | webrtc-audio-processing = { version = "0.4.0", optional = true, features = [ 34 | "bundled", 35 | "derive_serde", 36 | ] } 37 | webrtc-media = "0.9.0" 38 | 39 | [target.'cfg(target_os = "android")'.dependencies] 40 | cpal = { version = "0.15.3", features = ["oboe-shared-stdcxx"] } 41 | 42 | [target.'cfg(target_family = "wasm")'.dependencies] 43 | cpal = { version = "0.15.3", features = ["wasm-bindgen"] } 44 | 45 | [dev-dependencies] 46 | clap = { version = "4.5.31", features = ["derive"] } 47 | hound = "3.5.1" 48 | testresult = "0.4.1" 49 | tokio = { version = "1.43.0", features = ["macros", "rt-multi-thread"] } 50 | tracing-subscriber = "0.3.19" 51 | tracing-test = "0.2.5" 52 | 53 | [features] 54 | default = ["audio-processing"] 55 | audio-processing = ["webrtc-audio-processing"] 56 | -------------------------------------------------------------------------------- /callme/src/rtc/protocol_handler.rs: -------------------------------------------------------------------------------- 1 | use anyhow::Result; 2 | use iroh::{protocol::ProtocolHandler, Endpoint, NodeAddr}; 3 | use iroh_roq::ALPN; 4 | use n0_future::{boxed::BoxFuture, FutureExt}; 5 | use tokio_util::sync::CancellationToken; 6 | use tracing::debug; 7 | 8 | use super::RtcConnection; 9 | 10 | #[derive(Debug, Clone)] 11 | pub struct RtcProtocol { 12 | shutdown_token: CancellationToken, 13 | endpoint: Endpoint, 14 | sender: async_channel::Sender, 15 | receiver: async_channel::Receiver, 16 | } 17 | 18 | impl ProtocolHandler for RtcProtocol { 19 | fn accept(&self, connecting: iroh::endpoint::Connecting) -> BoxFuture> { 20 | let sender = self.sender.clone(); 21 | async move { 22 | debug!("ProtocolHandler::accept: connecting"); 23 | let conn = connecting.await?; 24 | debug!("ProtocolHandler::accept: conn"); 25 | let conn = RtcConnection::new(conn); 26 | sender.send(conn).await?; 27 | Ok(()) 28 | } 29 | .boxed() 30 | } 31 | 32 | fn shutdown(&self) -> BoxFuture<()> { 33 | self.shutdown_token.cancel(); 34 | async move {}.boxed() 35 | } 36 | } 37 | 38 | impl RtcProtocol { 39 | pub const ALPN: &[u8] = ALPN; 40 | pub fn new(endpoint: Endpoint) -> Self { 41 | let (sender, receiver) = async_channel::bounded(16); 42 | Self { 43 | sender, 44 | receiver, 45 | endpoint, 46 | shutdown_token: CancellationToken::new(), 47 | } 48 | } 49 | 50 | pub async fn accept(&self) -> Result> { 51 | tokio::select! { 52 | _ = self.shutdown_token.cancelled() => { 53 | Ok(None) 54 | } 55 | conn = self.receiver.recv() => { 56 | let conn = conn?; 57 | Ok(Some(conn)) 58 | } 59 | } 60 | } 61 | 62 | pub async fn connect(&self, node_addr: impl Into) -> Result { 63 | let conn = self.endpoint.connect(node_addr, ALPN).await?; 64 | Ok(RtcConnection::new(conn)) 65 | } 66 | } 67 | -------------------------------------------------------------------------------- /callme-egui/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "callme-egui" 3 | version = "0.1.0" 4 | edition = "2021" 5 | repository = { workspace = true } 6 | license = { workspace = true } 7 | authors = { workspace = true } 8 | description = "make voicecalls over iroh (gui app)" 9 | 10 | [lib] 11 | crate-type = ["lib", "cdylib"] 12 | 13 | [dependencies] 14 | anyhow = "1.0.96" 15 | async-channel = "2.3.1" 16 | egui = { version = "0.31.0", default-features = false, features = [ 17 | "color-hex", 18 | ] } 19 | iroh = { version = "0.33.0", default-features = false } 20 | callme = { version = "0.1.0", path = "../callme", default-features = false } 21 | n0-future = "0.1.2" 22 | tokio = { version = "1.43.0", features = ["rt", "rt-multi-thread", "sync"] } 23 | tracing = "0.1.41" 24 | tracing-subscriber = { version = "0.3.19", features = ["env-filter", "std"] } 25 | strum = { version = "0.27.1", features = ["derive"] } 26 | 27 | [target.'cfg(target_os = "android")'.dependencies] 28 | android_clipboard = "0.1.0" 29 | tracing-android = "0.2" 30 | eframe = { version = "0.31", default-features = false, features = [ 31 | "wgpu", 32 | "android-native-activity", 33 | "default_fonts", 34 | ] } 35 | egui-winit = { version = "0.31", default-features = false, features = [ 36 | "clipboard", 37 | "links", 38 | ] } 39 | 40 | [target.'cfg(target_os = "ios")'.dependencies] 41 | eframe = { version = "0.29", default-features = false, features = [ 42 | "accesskit", 43 | "default_fonts", 44 | "wgpu", 45 | ] } 46 | 47 | [target.'cfg(not(target_os = "android"))'.dependencies] 48 | eframe = { version = "0.31", default-features = false, features = [ 49 | "default_fonts", 50 | "accesskit", 51 | "x11", 52 | "wayland", 53 | "glow", 54 | ] } 55 | arboard = { version = "3.4.1", features = ["wl-clipboard-rs"] } 56 | 57 | [features] 58 | default = ["audio-processing"] 59 | audio-processing = ["callme/audio-processing"] 60 | 61 | [[package.metadata.android.uses_permission]] 62 | name = "android.permission.INTERNET" 63 | [[package.metadata.android.uses_permission]] 64 | name = "android.permission.ACCESS_NETWORK_STATE" 65 | [[package.metadata.android.uses_permission]] 66 | name = "android.permission.CHANGE_NETWORK_STATE" 67 | [[package.metadata.android.uses_permission]] 68 | name = "android.permission.RECORD_AUDIO" 69 | 70 | [package.metadata.bundle] 71 | name = "callme" 72 | identifier = "computer.n0.callme" 73 | -------------------------------------------------------------------------------- /callme/src/rtc/rtp_receiver.rs: -------------------------------------------------------------------------------- 1 | use anyhow::Result; 2 | use iroh_roq::{ 3 | rtp::{self, codecs::opus::OpusPacket, packetizer::Depacketizer}, 4 | ReceiveFlow, 5 | }; 6 | use tokio::sync::{broadcast, oneshot}; 7 | use tracing::{trace, warn}; 8 | use webrtc_media::io::sample_builder::SampleBuilder; 9 | 10 | use crate::{codec::Codec, rtc::MediaFrame}; 11 | 12 | pub(crate) struct RtpMediaTrackReceiver { 13 | pub(crate) recv_flow: ReceiveFlow, 14 | pub(crate) track_sender: broadcast::Sender, 15 | pub(crate) init_tx: Option>>, 16 | } 17 | 18 | impl RtpMediaTrackReceiver { 19 | pub async fn run(mut self) { 20 | if let Err(err) = self.run_inner().await { 21 | let id: u64 = self.recv_flow.flow_id().into(); 22 | warn!(%id, "rtp receive flow failed: {err}"); 23 | if let Some(tx) = self.init_tx.take() { 24 | tx.send(Err(err)).ok(); 25 | } 26 | } 27 | } 28 | 29 | async fn run_inner(&mut self) -> Result<()> { 30 | let first_packet = self.recv_flow.read_rtp().await?; 31 | let codec = Codec::try_from_rtp_payload_type(first_packet.header.payload_type) 32 | .ok_or_else(|| anyhow::anyhow!("unsupported codec type"))?; 33 | if let Some(tx) = self.init_tx.take() { 34 | tx.send(Ok(codec)).ok(); 35 | } 36 | match codec { 37 | Codec::Opus { .. } => { 38 | self.run_loop(OpusPacket, codec.sample_rate(), first_packet) 39 | .await 40 | } 41 | } 42 | } 43 | 44 | async fn run_loop( 45 | &mut self, 46 | depacketizer: T, 47 | sample_rate: u32, 48 | first_packet: rtp::packet::Packet, 49 | ) -> Result<()> { 50 | let mut sample_builder = SampleBuilder::new(16, depacketizer, sample_rate); 51 | let mut packet = first_packet; 52 | loop { 53 | trace!( 54 | "recv packet len {} seq {} ts {}", 55 | packet.payload.len(), 56 | packet.header.sequence_number, 57 | packet.header.timestamp, 58 | ); 59 | sample_builder.push(packet); 60 | if let Some(frame) = sample_builder.pop() { 61 | let webrtc_media::Sample { 62 | data, 63 | duration, 64 | prev_dropped_packets, 65 | timestamp: _, 66 | packet_timestamp: _, 67 | prev_padding_packets: _, 68 | } = frame; 69 | let frame = MediaFrame { 70 | payload: data, 71 | sample_count: Some((sample_rate as f32 / duration.as_secs_f32()) as u32), 72 | skipped_frames: Some(prev_dropped_packets as u32), 73 | skipped_samples: None, 74 | }; 75 | self.track_sender.send(frame)?; 76 | } 77 | 78 | packet = self.recv_flow.read_rtp().await?; 79 | } 80 | } 81 | } 82 | -------------------------------------------------------------------------------- /callme/src/rtc/rtp_sender.rs: -------------------------------------------------------------------------------- 1 | use anyhow::{anyhow, Result}; 2 | use iroh_roq::{ 3 | rtp, 4 | rtp::{ 5 | codecs::opus::OpusPayloader, 6 | packetizer::{new_packetizer, Packetizer}, 7 | sequence::Sequencer, 8 | }, 9 | SendFlow, 10 | }; 11 | use tokio::sync::broadcast::error::RecvError; 12 | use tracing::trace; 13 | 14 | use super::{MediaFrame, MediaTrack}; 15 | use crate::codec::Codec; 16 | 17 | #[derive(Debug)] 18 | pub(crate) struct RtpMediaTrackSender { 19 | pub(crate) track: MediaTrack, 20 | pub(crate) send_flow: SendFlow, 21 | } 22 | 23 | pub(crate) const MTU: usize = 1100; 24 | 25 | pub(crate) const CLOCK_RATE: u32 = crate::audio::SAMPLE_RATE.0; 26 | 27 | impl RtpMediaTrackSender { 28 | pub(crate) async fn run(mut self) -> Result<()> { 29 | let ssrc = 0; 30 | let sequencer: Box = 31 | Box::new(rtp::sequence::new_random_sequencer()); 32 | let payloader = match self.track.codec() { 33 | Codec::Opus { .. } => Box::new(OpusPayloader), 34 | }; 35 | let payload_type = self.track.codec().rtp_payload_type(); 36 | let mut packetizer = new_packetizer( 37 | MTU, 38 | payload_type, 39 | ssrc, 40 | payloader, 41 | sequencer.clone(), 42 | CLOCK_RATE, 43 | ); 44 | loop { 45 | let frame = match self.track.recv().await { 46 | Ok(frame) => frame, 47 | Err(RecvError::Lagged(n)) => { 48 | // increase sequence number for frames skipped due to lagging 49 | for _ in 0..n { 50 | sequencer.next_sequence_number(); 51 | } 52 | continue; 53 | } 54 | Err(RecvError::Closed) => { 55 | break; 56 | } 57 | }; 58 | let MediaFrame { 59 | payload, 60 | sample_count, 61 | skipped_frames, 62 | skipped_samples, 63 | } = frame; 64 | // increase sequence number for frames skipped at source 65 | if let Some(skipped_frames) = skipped_frames { 66 | for _ in 0..skipped_frames { 67 | sequencer.next_sequence_number(); 68 | } 69 | } 70 | // increase timestamp for skipped samples 71 | // TODO: should also do that for skipped frames? 72 | if let Some(skipped_samples) = skipped_samples { 73 | packetizer.skip_samples(skipped_samples); 74 | } 75 | 76 | let sample_count = sample_count 77 | .ok_or_else(|| anyhow!("received media track frame without sample count"))?; 78 | let packets = packetizer.packetize(&payload, sample_count)?; 79 | for packet in packets { 80 | trace!( 81 | "send packet len {} seq {} ts {}", 82 | packet.payload.len(), 83 | packet.header.sequence_number, 84 | packet.header.timestamp, 85 | ); 86 | self.send_flow.send_rtp(&packet)?; 87 | } 88 | } 89 | Ok(()) 90 | } 91 | } 92 | -------------------------------------------------------------------------------- /callme-egui/README.md: -------------------------------------------------------------------------------- 1 | # callme-egui 2 | 3 | A very WIP user interface for callme. 4 | 5 | ## Run on desktop (Linux / macOS / Windows) 6 | 7 | ``` 8 | cargo run --release 9 | ``` 10 | 11 | On Linux, you need ALSA and DBUS development headers: 12 | ``` 13 | apt-get install libasound2-dev libdbus-1-dev libtool automake 14 | ``` 15 | 16 | The crate includes a C dependency for echo cancellation (`webrtc-audio-processing`) that needs C build tools to be installed. 17 | On macOS these can be installed with homebrew: 18 | ``` 19 | brew install automake libtool 20 | ``` 21 | 22 | On Windows, or if the build fails, you can disable the audio processing entirely. You should only use callme with headphones then. 23 | ``` 24 | cargo run --release --no-default-features 25 | ``` 26 | 27 | ## Run on Android 28 | 29 | Prerequisites: Android SDK and NDK must be installed, through Android studio. 30 | See e.g. the [Dioxus guide](https://dioxuslabs.com/learn/0.6/guides/mobile/#android) on what you need to do. 31 | 32 | Also install [`cargo apk`](https://github.com/rust-mobile/xbuild), a Rust build tool for mobile. 33 | 34 | ``` 35 | cargo binstall cargo-apk 36 | ``` 37 | 38 | Now you need to set some environment variables for things to work. It is a bit of a pain to get this working. 39 | For me, I'm using these here, the paths might needsto be adapted to your system. 40 | 41 | ```sh 42 | export ANDROID_HOME=$HOME/Android/Sdk 43 | export ANDROID_NDK=$ANDROID_HOME/ndk/28.0.12674087 44 | export ANDROID_NDK_ROOT=$ANDROID_NDK 45 | export JAVA_HOME=/opt/android-studio/jbr 46 | export TOOLCHAIN="${ANDROID_NDK}/toolchains/llvm/prebuilt/linux-x86_64" 47 | export PATH=$ANDROID_HOME/platform-tools:$TOOLCHAIN/bin:$PATH 48 | 49 | export CARGO_APK_RELEASE_KEYSTORE_PASSWORD="android" 50 | export CARGO_APK_RELEASE_KEYSTORE=$HOME/.android/debug.keystore 51 | ``` 52 | You can put those into a file and then do `source android-vars.sh` in your terminal before following the rest of the guide. 53 | Note: You should only do android builds in this terminal then - e.g. Wasm builds might fail with the changed `PATH`. 54 | 55 | Now, on your phone, go to *Settings* -> *System* -> *Developer settings* and enable *Wireless debugging* and click on *Pair device with pairing code* 56 | Make sure your computer and phone are in the same WIFI. 57 | 58 | Now run 59 | ``` 60 | adb pair IP:Port 61 | ``` 62 | with IP and port as printed on the pairing screen on the phone. 63 | and afterwards 64 | ``` 65 | adb connect IP:Port 66 | ``` 67 | with the IP and port as printed on the Wireless debugging screen. 68 | 69 | And now, finally: 70 | 71 | ``` 72 | cargo apk run --device IP:Port --target aarch64-linux-android --lib --release 73 | ``` 74 | 75 | This should now run the GUI directly on your phone! 76 | 77 | ## Run on iOS 78 | 79 | *Note: This is untested. Instructions taken from [here](https://github.com/emilk/eframe_template/pull/152)* 80 | 81 | #### Prerequesites 82 | 83 | * Install xcode 84 | * Accept license `sudo xcodebuild -license` 85 | * Install cargo-bundle `cargo install cargo-bundle` 86 | * Install the required target: 87 | * `rustup target add aarch64-apple-ios` for modern iOS devices 88 | * `rustup target add aarch64-apple-ios-sim` for simulator on modern machines 89 | * `rustup target add x86_64-apple-ios` for old iOS devices or simulator on old machines 90 | * Install python 3.11 or newer 91 | * Run the build scripts `./build-ios.py` - it will print a help text with supported options 92 | 93 | #### Run in simulator 94 | 95 | `./build-ios.py run --sim` 96 | 97 | #### Run on device 98 | 99 | `./build-ios-py run` 100 | -------------------------------------------------------------------------------- /callme/examples/echo.rs: -------------------------------------------------------------------------------- 1 | use std::time::Duration; 2 | 3 | use anyhow::Result; 4 | use callme::net::bind_endpoint; 5 | use clap::Parser; 6 | use futures_concurrency::future::TryJoin; 7 | use iroh::{endpoint::Connection, NodeId}; 8 | use iroh_roq::{Session, VarInt, ALPN}; 9 | use n0_future::TryFutureExt; 10 | use tracing::{info, trace, warn}; 11 | 12 | #[derive(Debug, Parser)] 13 | struct Args { 14 | #[clap(subcommand)] 15 | command: Command, 16 | #[clap(short, long)] 17 | delay: Option, 18 | } 19 | 20 | #[derive(Debug, Parser)] 21 | enum Command { 22 | Connect { node_id: NodeId }, 23 | Accept, 24 | } 25 | 26 | #[tokio::main] 27 | async fn main() -> Result<()> { 28 | tracing_subscriber::fmt::init(); 29 | let args = Args::parse(); 30 | let endpoint = bind_endpoint().await?; 31 | println!("node id: {}", endpoint.node_id()); 32 | 33 | let opts = Opts { 34 | delay: Duration::from_millis(args.delay.unwrap_or(200)), 35 | }; 36 | match args.command { 37 | Command::Connect { node_id } => { 38 | let conn = endpoint.connect(node_id, ALPN).await?; 39 | if let Err(err) = handle_connection(conn, opts).await { 40 | warn!("conn terminated with error {err:?}"); 41 | } 42 | } 43 | Command::Accept => { 44 | while let Some(incoming) = endpoint.accept().await { 45 | let Ok(mut connecting) = incoming.accept() else { 46 | continue; 47 | }; 48 | let opts = opts.clone(); 49 | tokio::task::spawn(async move { 50 | if connecting.alpn().await.ok().as_deref() != Some(ALPN) { 51 | return; 52 | } 53 | let Ok(conn) = connecting.await else { 54 | return; 55 | }; 56 | if let Err(err) = handle_connection(conn, opts).await { 57 | warn!("conn terminated with error {err:?}"); 58 | } 59 | }); 60 | } 61 | } 62 | } 63 | Ok(()) 64 | } 65 | 66 | #[derive(Debug, Clone)] 67 | struct Opts { 68 | delay: Duration, 69 | } 70 | 71 | async fn handle_connection(conn: Connection, opts: Opts) -> Result<()> { 72 | info!("new connection with {}", conn.remote_node_id()?); 73 | 74 | let (tx, mut rx) = tokio::sync::mpsc::channel(64); 75 | 76 | let flow_id = VarInt::from_u32(0); 77 | let session = Session::new(conn); 78 | let send_flow = session.new_send_flow(flow_id).await.unwrap(); 79 | let mut recv_flow = session.new_receive_flow(flow_id).await.unwrap(); 80 | 81 | let recv_fut = async move { 82 | loop { 83 | let packet = match recv_flow.read_rtp().await { 84 | Ok(packet) => packet, 85 | Err(err) => break anyhow::Result::<(), _>::Err(err), 86 | }; 87 | trace!( 88 | "recv packet len {} seq {} ts {}", 89 | packet.payload.len(), 90 | packet.header.sequence_number, 91 | packet.header.timestamp, 92 | ); 93 | let tx = tx.clone(); 94 | tokio::task::spawn(async move { 95 | tokio::time::sleep(opts.delay).await; 96 | tx.try_send(packet).ok(); 97 | }); 98 | } 99 | }; 100 | 101 | let send_fut = async move { 102 | while let Some(packet) = rx.recv().await { 103 | trace!( 104 | "send packet len {} seq {} ts {}", 105 | packet.payload.len(), 106 | packet.header.sequence_number, 107 | packet.header.timestamp, 108 | ); 109 | send_flow.send_rtp(&packet)?; 110 | } 111 | anyhow::Ok(()) 112 | }; 113 | let send_fut = send_fut.map_err(|err| err.context("rtp sender")); 114 | let recv_fut = recv_fut.map_err(|err| err.context("rtp receiver")); 115 | (send_fut, recv_fut).try_join().await?; 116 | Ok(()) 117 | } 118 | -------------------------------------------------------------------------------- /callme/src/rtc.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | collections::HashMap, 3 | future::Future, 4 | sync::{ 5 | atomic::{AtomicU32, AtomicU64, Ordering}, 6 | Arc, 7 | }, 8 | }; 9 | 10 | use anyhow::{anyhow, Result}; 11 | use bytes::Bytes; 12 | use iroh::{endpoint::Connection, protocol::ProtocolHandler}; 13 | use iroh_roq::{ 14 | rtp::{self, codecs::opus::OpusPayloader, packetizer::Packetizer}, 15 | Session, VarInt, 16 | }; 17 | use n0_future::{task, Stream}; 18 | use tokio::sync::{broadcast, oneshot}; 19 | use tracing::{info, warn}; 20 | use webrtc_media::io::sample_builder::SampleBuilder; 21 | 22 | pub use self::{ 23 | protocol_handler::RtcProtocol, 24 | track::{MediaFrame, MediaTrack, TrackKind}, 25 | }; 26 | use self::{rtp_receiver::RtpMediaTrackReceiver, rtp_sender::RtpMediaTrackSender}; 27 | use crate::audio::AudioContext; 28 | 29 | mod protocol_handler; 30 | mod rtp_receiver; 31 | mod rtp_sender; 32 | mod track; 33 | 34 | #[derive(Debug, Clone)] 35 | pub struct RtcConnection { 36 | conn: Connection, 37 | session: Session, 38 | next_recv_flow_id: Arc, 39 | next_send_flow_id: Arc, 40 | } 41 | 42 | impl RtcConnection { 43 | pub fn new(conn: Connection) -> Self { 44 | let session = Session::new(conn.clone()); 45 | Self { 46 | conn, 47 | session, 48 | next_recv_flow_id: Default::default(), 49 | next_send_flow_id: Default::default(), 50 | } 51 | } 52 | 53 | pub fn transport(&self) -> &Connection { 54 | &self.conn 55 | } 56 | 57 | pub async fn send_track(&self, track: MediaTrack) -> Result<()> { 58 | let flow_id = self.next_send_flow_id.fetch_add(1, Ordering::SeqCst); 59 | let send_flow = self.session.new_send_flow(flow_id.into()).await?; 60 | let sender = RtpMediaTrackSender { send_flow, track }; 61 | task::spawn(async move { 62 | if let Err(err) = sender.run().await { 63 | warn!(flow_id, "send flow failed: {err}"); 64 | } 65 | }); 66 | Ok(()) 67 | } 68 | 69 | pub async fn recv_track(&self) -> Result> { 70 | let flow_id = self.next_recv_flow_id.fetch_add(1, Ordering::SeqCst); 71 | let recv_flow = self.session.new_receive_flow(flow_id.into()).await?; 72 | let (track_sender, track_receiver) = broadcast::channel(12); 73 | let (init_tx, init_rx) = oneshot::channel(); 74 | let receiver = RtpMediaTrackReceiver { 75 | recv_flow, 76 | track_sender, 77 | init_tx: Some(init_tx), 78 | }; 79 | task::spawn(async move { 80 | receiver.run().await; 81 | info!("rtp receiver closed"); 82 | }); 83 | let closed = self.transport().closed(); 84 | let codec = tokio::select! { 85 | res = init_rx => res??, 86 | err = closed => { 87 | match err { 88 | iroh::endpoint::ConnectionError::LocallyClosed => return Ok(None), 89 | err => return Err(err.into()) 90 | } 91 | } 92 | }; 93 | let track = MediaTrack { 94 | receiver: track_receiver, 95 | codec, 96 | kind: codec.kind(), 97 | }; 98 | Ok(Some(track)) 99 | } 100 | } 101 | 102 | pub async fn handle_connection_with_audio_context( 103 | audio_ctx: AudioContext, 104 | conn: RtcConnection, 105 | ) -> Result<()> { 106 | let capture_track = audio_ctx.capture_track().await?; 107 | conn.send_track(capture_track).await?; 108 | info!("added capture track to rtc connection"); 109 | while let Some(remote_track) = conn.recv_track().await? { 110 | info!( 111 | "new remote track: {:?} {:?}", 112 | remote_track.kind(), 113 | remote_track.codec() 114 | ); 115 | match remote_track.kind() { 116 | TrackKind::Audio => { 117 | audio_ctx.play_track(remote_track).await?; 118 | } 119 | TrackKind::Video => unimplemented!(), 120 | } 121 | } 122 | Ok(()) 123 | } 124 | -------------------------------------------------------------------------------- /callme/src/lib.rs: -------------------------------------------------------------------------------- 1 | #![allow(unused_imports)] 2 | 3 | pub mod audio; 4 | pub mod codec; 5 | pub mod net; 6 | pub mod rtc; 7 | 8 | pub use cpal; 9 | pub use iroh::NodeId; 10 | 11 | #[cfg(test)] 12 | mod tests { 13 | use std::{ 14 | ops::ControlFlow, 15 | time::{Duration, Instant}, 16 | }; 17 | 18 | use futures_concurrency::future::{Join, TryJoin}; 19 | use iroh::protocol::Router; 20 | use testresult::TestResult; 21 | use tokio::sync::{mpsc, oneshot}; 22 | 23 | use crate::{ 24 | audio::{AudioSink, AudioSource, ENGINE_FORMAT}, 25 | codec::opus::{MediaTrackOpusDecoder, MediaTrackOpusEncoder}, 26 | net::bind_endpoint, 27 | rtc::{MediaTrack, RtcProtocol}, 28 | }; 29 | 30 | async fn build() -> TestResult<(Router, RtcProtocol)> { 31 | let endpoint = bind_endpoint().await?; 32 | let proto = RtcProtocol::new(endpoint.clone()); 33 | let router = Router::builder(endpoint) 34 | .accept(RtcProtocol::ALPN, proto.clone()) 35 | .spawn() 36 | .await?; 37 | Ok((router, proto)) 38 | } 39 | 40 | #[tracing_test::traced_test] 41 | #[tokio::test] 42 | async fn smoke() -> TestResult { 43 | let (router1, rtc1) = build().await?; 44 | let (router2, rtc2) = build().await?; 45 | let addr1 = router1.endpoint().node_addr().await?; 46 | 47 | let (conn1, conn2) = (rtc2.connect(addr1), rtc1.accept()).try_join().await?; 48 | 49 | let conn2 = conn2.unwrap(); 50 | 51 | let (mut node1, track1) = MediaTrackOpusEncoder::new(4, ENGINE_FORMAT)?; 52 | conn1.send_track(track1.clone()).await?; 53 | 54 | let sample_count = ENGINE_FORMAT.sample_count(Duration::from_millis(20)); 55 | // start sending audio at node1 56 | let (abort_tx, mut abort_rx) = mpsc::channel(1); 57 | let send_task = tokio::task::spawn(async move { 58 | println!("loop start"); 59 | let fut = async move { 60 | loop { 61 | #[allow(clippy::question_mark)] 62 | if let Err(err) = node1.tick(&vec![0.5; sample_count]) { 63 | return Err(err); 64 | } 65 | tokio::time::sleep(Duration::from_millis(20)).await; 66 | } 67 | }; 68 | tokio::select! { 69 | x = abort_rx.recv() => x.unwrap(), 70 | x = fut => x.unwrap(), 71 | } 72 | println!("loop end"); 73 | conn1.transport().close(1u32.into(), b"bye"); 74 | tokio::time::sleep(Duration::from_millis(20)).await; 75 | anyhow::Ok(()) 76 | }); 77 | let track2 = conn2.recv_track().await?.unwrap(); 78 | 79 | assert_eq!(track1.codec(), track2.codec()); 80 | 81 | let mut decoder = MediaTrackOpusDecoder::new(track2)?; 82 | let mut out = vec![0.; sample_count]; 83 | // we need to wait a bit likely. 84 | let start = Instant::now(); 85 | // wait for some audio to arrive. 86 | let expected = sample_count * 3; 87 | let mut total = 0; 88 | 'outer: loop { 89 | let n = loop { 90 | if start.elapsed() > Duration::from_secs(2) { 91 | panic!("timeout"); 92 | } 93 | tokio::time::sleep(Duration::from_millis(10)).await; 94 | match decoder.tick(&mut out)? { 95 | ControlFlow::Continue(0) => continue, 96 | ControlFlow::Continue(n) => break n, 97 | // this signals end of track, triggered when the connection closes. 98 | ControlFlow::Break(()) => break 'outer, 99 | } 100 | }; 101 | assert!(out[..n].iter().any(|s| *s != 0.)); 102 | out.fill(0.); 103 | total += n; 104 | if total >= expected { 105 | abort_tx.try_send(()).ok(); 106 | } 107 | println!("received {n} audio frames, total {total}"); 108 | } 109 | assert_eq!(total, expected); 110 | send_task.await??; 111 | router1.shutdown().await?; 112 | router2.shutdown().await?; 113 | Ok(()) 114 | } 115 | } 116 | -------------------------------------------------------------------------------- /callme/src/audio.rs: -------------------------------------------------------------------------------- 1 | use std::time::Duration; 2 | 3 | use anyhow::Result; 4 | use bytes::Bytes; 5 | use cpal::{ChannelCount, SampleRate}; 6 | 7 | use self::{ 8 | capture::AudioCapture, device::list_devices, playback::AudioPlayback, 9 | ringbuf_pipe::ringbuf_pipe, 10 | }; 11 | pub use self::{ 12 | capture::AudioSink, 13 | device::{AudioConfig, Devices, Direction}, 14 | playback::AudioSource, 15 | }; 16 | use crate::rtc::MediaTrack; 17 | 18 | #[cfg(feature = "audio-processing")] 19 | mod processor; 20 | #[cfg(feature = "audio-processing")] 21 | pub use processor::WebrtcAudioProcessor; 22 | 23 | #[cfg(not(feature = "audio-processing"))] 24 | #[derive(Debug, Clone)] 25 | pub struct WebrtcAudioProcessor; 26 | 27 | mod capture; 28 | mod device; 29 | mod playback; 30 | 31 | pub const SAMPLE_RATE: SampleRate = SampleRate(48_000); 32 | pub const ENGINE_FORMAT: AudioFormat = AudioFormat::new(SAMPLE_RATE, 2); 33 | 34 | const DURATION_10MS: Duration = Duration::from_millis(10); 35 | const DURATION_20MS: Duration = Duration::from_millis(20); 36 | 37 | #[derive(Debug, Clone)] 38 | pub struct AudioContext { 39 | playback: AudioPlayback, 40 | capture: AudioCapture, 41 | } 42 | 43 | impl AudioContext { 44 | pub async fn list_devices() -> Result { 45 | tokio::task::spawn_blocking(list_devices).await? 46 | } 47 | 48 | pub fn list_devices_sync() -> Result { 49 | list_devices() 50 | } 51 | 52 | /// Create a new [`AudioContext`]. 53 | pub async fn new(config: AudioConfig) -> Result { 54 | let host = cpal::default_host(); 55 | 56 | #[cfg(feature = "audio-processing")] 57 | let processor = WebrtcAudioProcessor::new(config.processing_enabled)?; 58 | #[cfg(not(feature = "audio-processing"))] 59 | let processor = WebrtcAudioProcessor; 60 | 61 | let capture = 62 | AudioCapture::build(&host, config.input_device.as_deref(), processor.clone()).await?; 63 | let playback = 64 | AudioPlayback::build(&host, config.output_device.as_deref(), processor.clone()).await?; 65 | Ok(Self { playback, capture }) 66 | } 67 | 68 | pub async fn capture_track(&self) -> Result { 69 | self.capture.create_opus_track().await 70 | } 71 | 72 | pub async fn play_track(&self, track: MediaTrack) -> Result<()> { 73 | self.playback.add_track(track).await?; 74 | Ok(()) 75 | } 76 | 77 | pub async fn feedback_encoded(&self) -> Result<()> { 78 | let track = self.capture_track().await?; 79 | self.play_track(track).await?; 80 | Ok(()) 81 | } 82 | 83 | pub async fn feedback_raw(&self) -> Result<()> { 84 | let buffer_size = ENGINE_FORMAT.sample_count(DURATION_20MS * 16); 85 | let (sink, source) = ringbuf_pipe(buffer_size); 86 | self.capture.add_sink(sink).await?; 87 | self.playback.add_source(source).await?; 88 | Ok(()) 89 | } 90 | } 91 | 92 | mod ringbuf_pipe { 93 | use std::ops::ControlFlow; 94 | 95 | use anyhow::Result; 96 | use ringbuf::{ 97 | traits::{Consumer as _, Observer, Producer as _, Split}, 98 | HeapCons as Consumer, HeapProd as Producer, 99 | }; 100 | use tracing::warn; 101 | 102 | use super::{AudioSink, AudioSource}; 103 | 104 | pub struct RingbufSink(Producer); 105 | pub struct RingbufSource(Consumer); 106 | 107 | pub fn ringbuf_pipe(buffer_size: usize) -> (RingbufSink, RingbufSource) { 108 | let (producer, consumer) = ringbuf::HeapRb::::new(buffer_size).split(); 109 | (RingbufSink(producer), RingbufSource(consumer)) 110 | } 111 | 112 | impl AudioSink for RingbufSink { 113 | fn tick(&mut self, buf: &[f32]) -> Result> { 114 | let len = self.0.push_slice(buf); 115 | if len < buf.len() { 116 | warn!("ringbuf sink xrun: failed to send {}", buf.len() - len); 117 | } 118 | Ok(ControlFlow::Continue(())) 119 | } 120 | } 121 | 122 | impl AudioSource for RingbufSource { 123 | fn tick(&mut self, buf: &mut [f32]) -> Result> { 124 | let len = self.0.pop_slice(buf); 125 | if len < buf.len() { 126 | warn!("ringbuf source xrun: failed to recv {}", buf.len() - len); 127 | } 128 | Ok(ControlFlow::Continue(len)) 129 | } 130 | } 131 | } 132 | 133 | #[derive(Debug, Clone, Copy)] 134 | pub struct AudioFormat { 135 | pub sample_rate: SampleRate, 136 | pub channel_count: ChannelCount, 137 | } 138 | 139 | impl AudioFormat { 140 | pub const fn new(sample_rate: SampleRate, channel_count: ChannelCount) -> Self { 141 | Self { 142 | sample_rate, 143 | channel_count, 144 | } 145 | } 146 | pub const fn new2(sample_rate: u32, channel_count: u16) -> Self { 147 | Self { 148 | sample_rate: SampleRate(sample_rate), 149 | channel_count, 150 | } 151 | } 152 | 153 | pub fn duration_from_sample_count(&self, sample_count: usize) -> Duration { 154 | Duration::from_secs_f32( 155 | (sample_count as f32 / self.channel_count as f32) / self.sample_rate.0 as f32, 156 | ) 157 | } 158 | 159 | pub const fn block_count(&self, duration: Duration) -> usize { 160 | (self.sample_rate.0 as usize / 1000) * duration.as_millis() as usize 161 | } 162 | 163 | pub const fn sample_count(&self, duration: Duration) -> usize { 164 | self.block_count(duration) * self.channel_count as usize 165 | } 166 | } 167 | -------------------------------------------------------------------------------- /callme-cli/src/main.rs: -------------------------------------------------------------------------------- 1 | use callme::{ 2 | audio::{AudioConfig, AudioContext}, 3 | net, 4 | rtc::{handle_connection_with_audio_context, RtcConnection, RtcProtocol}, 5 | NodeId, 6 | }; 7 | use clap::Parser; 8 | use dialoguer::Confirm; 9 | use iroh::protocol::Router; 10 | use tokio::task::JoinSet; 11 | use tracing::{error, info, warn}; 12 | 13 | #[derive(Parser, Debug)] 14 | #[command(about = "Call me iroh", long_about = None)] 15 | struct Args { 16 | /// The audio input device to use. 17 | #[arg(short, long)] 18 | input_device: Option, 19 | /// The audio output device to use. 20 | #[arg(short, long)] 21 | output_device: Option, 22 | /// If set, audio processing and echo cancellation will be disabled. 23 | #[arg(long)] 24 | disable_processing: bool, 25 | #[clap(subcommand)] 26 | command: Command, 27 | } 28 | 29 | #[derive(Debug, Parser)] 30 | enum Command { 31 | /// Accept calls from remote nodes. 32 | Accept { 33 | /// Accept more than one call. 34 | #[clap(long)] 35 | many: bool, 36 | /// Auto-accept calls without confirmation. 37 | #[clap(long)] 38 | auto: bool, 39 | }, 40 | /// Make calls to remote nodes. 41 | Connect { node_id: Vec }, 42 | /// Create a debug feedback loop through an in-memory channel. 43 | Feedback { mode: Option }, 44 | /// List the available audio devices 45 | ListDevices, 46 | } 47 | 48 | #[derive(Debug, Clone, clap::ValueEnum, Default)] 49 | enum FeedbackMode { 50 | #[default] 51 | Raw, 52 | Encoded, 53 | } 54 | 55 | #[tokio::main(flavor = "multi_thread")] 56 | async fn main() -> anyhow::Result<()> { 57 | tracing_subscriber::fmt::init(); 58 | let args = Args::parse(); 59 | let audio_config = AudioConfig { 60 | input_device: args.input_device, 61 | output_device: args.output_device, 62 | processing_enabled: !args.disable_processing, 63 | }; 64 | let mut endpoint_shutdown = None; 65 | let fut = async { 66 | match args.command { 67 | Command::Accept { many, auto } => { 68 | let endpoint = net::bind_endpoint().await?; 69 | let proto = RtcProtocol::new(endpoint.clone()); 70 | let _router = Router::builder(endpoint.clone()) 71 | .accept(RtcProtocol::ALPN, proto.clone()) 72 | .spawn() 73 | .await?; 74 | 75 | endpoint_shutdown = Some(endpoint.clone()); 76 | println!("our node id:\n{}", endpoint.node_id()); 77 | 78 | let audio_ctx = AudioContext::new(audio_config).await?; 79 | 80 | while let Some(conn) = proto.accept().await? { 81 | if !many { 82 | handle_connection(audio_ctx, conn).await; 83 | break; 84 | } else { 85 | let peer = conn.transport().remote_node_id()?.fmt_short(); 86 | let accept = 87 | auto || confirm(format!("Incoming call from {peer}. Accept?")).await; 88 | if accept { 89 | n0_future::task::spawn(handle_connection(audio_ctx.clone(), conn)); 90 | } else { 91 | info!("reject connection from {peer}"); 92 | conn.transport().close(0u32.into(), b"bye"); 93 | } 94 | } 95 | } 96 | } 97 | Command::Connect { node_id } => { 98 | let endpoint = net::bind_endpoint().await?; 99 | endpoint_shutdown = Some(endpoint.clone()); 100 | 101 | let proto = RtcProtocol::new(endpoint); 102 | let audio_ctx = AudioContext::new(audio_config).await?; 103 | 104 | let mut join_set = JoinSet::new(); 105 | 106 | for node_id in node_id { 107 | info!("connecting to {}", node_id.fmt_short()); 108 | let audio_ctx = audio_ctx.clone(); 109 | let proto = proto.clone(); 110 | join_set.spawn(async move { 111 | let fut = async { 112 | let conn = proto.connect(node_id).await?; 113 | info!("established connection to {}", node_id.fmt_short()); 114 | handle_connection(audio_ctx, conn).await; 115 | anyhow::Ok(()) 116 | }; 117 | (node_id, fut.await) 118 | }); 119 | } 120 | 121 | while let Some(res) = join_set.join_next().await { 122 | let (node_id, res) = res.expect("task panicked"); 123 | if let Err(err) = res { 124 | warn!("failed to connect to {}: {err:?}", node_id.fmt_short()) 125 | } 126 | } 127 | } 128 | Command::Feedback { mode } => { 129 | let ctx = AudioContext::new(audio_config).await?; 130 | let mode = mode.unwrap_or_default(); 131 | println!("start feedback loop for 5 seconds (mode {mode:?}"); 132 | match mode { 133 | FeedbackMode::Raw => ctx.feedback_raw().await?, 134 | FeedbackMode::Encoded => ctx.feedback_encoded().await?, 135 | } 136 | tokio::time::sleep(std::time::Duration::from_secs(5)).await; 137 | println!("closing"); 138 | } 139 | Command::ListDevices => { 140 | let devices = AudioContext::list_devices().await?; 141 | println!("{devices:?}"); 142 | } 143 | } 144 | anyhow::Ok(()) 145 | }; 146 | 147 | tokio::select! { 148 | res = fut => res?, 149 | _ = tokio::signal::ctrl_c() => { 150 | tracing::info!("shutting down"); 151 | if let Some(endpoint) = endpoint_shutdown { 152 | endpoint.close().await; 153 | } 154 | } 155 | } 156 | Ok(()) 157 | } 158 | 159 | async fn handle_connection(audio_ctx: AudioContext, conn: RtcConnection) { 160 | let peer = conn.transport().remote_node_id().unwrap().fmt_short(); 161 | if let Err(err) = handle_connection_with_audio_context(audio_ctx, conn).await { 162 | error!("connection from {peer} closed with error: {err:?}",) 163 | } else { 164 | info!("connection from {peer} closed") 165 | } 166 | } 167 | 168 | async fn confirm(msg: String) -> bool { 169 | tokio::task::spawn_blocking(move || Confirm::new().with_prompt(msg).interact().unwrap()) 170 | .await 171 | .unwrap() 172 | } 173 | -------------------------------------------------------------------------------- /callme/src/audio/processor.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | sync::{ 3 | atomic::{AtomicBool, AtomicU64, AtomicUsize, Ordering}, 4 | Arc, Mutex, 5 | }, 6 | time::Duration, 7 | }; 8 | 9 | use anyhow::Result; 10 | use dasp_sample::ToSample; 11 | use tracing::{debug, info}; 12 | use webrtc_audio_processing::{ 13 | Config, EchoCancellation, EchoCancellationSuppressionLevel, InitializationConfig, 14 | NoiseSuppression, NoiseSuppressionLevel, 15 | }; 16 | 17 | #[derive(Clone, Debug)] 18 | pub struct WebrtcAudioProcessor(Arc); 19 | 20 | #[derive(derive_more::Debug)] 21 | struct Inner { 22 | #[debug("Processor")] 23 | inner: Mutex>, 24 | config: Mutex, 25 | capture_delay: AtomicU64, 26 | playback_delay: AtomicU64, 27 | enabled: AtomicBool, 28 | capture_channels: AtomicUsize, 29 | playback_channels: AtomicUsize, 30 | } 31 | 32 | impl WebrtcAudioProcessor { 33 | pub fn new(enabled: bool) -> Result { 34 | let suppression_level = EchoCancellationSuppressionLevel::Moderate; 35 | // High pass filter is a prerequisite to running echo cancellation. 36 | let config = Config { 37 | echo_cancellation: Some(EchoCancellation { 38 | suppression_level, 39 | // stream_delay_ms: Some(20), 40 | stream_delay_ms: None, 41 | enable_delay_agnostic: true, 42 | enable_extended_filter: true, 43 | }), 44 | enable_high_pass_filter: true, 45 | // noise_suppression: Some(NoiseSuppression { 46 | // suppression_level: NoiseSuppressionLevel::High, 47 | // }), 48 | ..Config::default() 49 | }; 50 | // processor.set_config(config.clone()); 51 | info!("init audio processor (enabled={enabled})"); 52 | Ok(Self(Arc::new(Inner { 53 | inner: Mutex::new(None), 54 | config: Mutex::new(config), 55 | capture_delay: Default::default(), 56 | playback_delay: Default::default(), 57 | enabled: AtomicBool::new(enabled), 58 | capture_channels: Default::default(), 59 | playback_channels: Default::default(), 60 | }))) 61 | } 62 | 63 | pub fn is_enabled(&self) -> bool { 64 | self.0.enabled.load(Ordering::SeqCst) 65 | } 66 | 67 | pub fn set_enabled(&self, enabled: bool) { 68 | let _prev = self.0.enabled.swap(enabled, Ordering::SeqCst); 69 | } 70 | 71 | pub fn init_capture(&self, channels: usize) -> Result<()> { 72 | self.0.capture_channels.store(channels, Ordering::SeqCst); 73 | if self.0.playback_channels.load(Ordering::SeqCst) > 0 { 74 | self.init()?; 75 | } 76 | Ok(()) 77 | } 78 | 79 | pub fn init_playback(&self, channels: usize) -> Result<()> { 80 | self.0.playback_channels.store(channels, Ordering::SeqCst); 81 | if self.0.capture_channels.load(Ordering::SeqCst) > 0 { 82 | self.init()?; 83 | } 84 | Ok(()) 85 | } 86 | 87 | fn init(&self) -> Result<()> { 88 | let playback_channels = self.0.playback_channels.load(Ordering::SeqCst); 89 | let capture_channels = self.0.playback_channels.load(Ordering::SeqCst); 90 | let mut processor = webrtc_audio_processing::Processor::new(&InitializationConfig { 91 | num_capture_channels: capture_channels as i32, 92 | num_render_channels: playback_channels as i32, 93 | ..InitializationConfig::default() 94 | })?; 95 | processor.set_config(self.0.config.lock().unwrap().clone()); 96 | *self.0.inner.lock().unwrap() = Some(processor); 97 | Ok(()) 98 | } 99 | 100 | /// Processes and modifies the audio frame from a capture device by applying 101 | /// signal processing as specified in the config. `frame` should hold an 102 | /// interleaved f32 audio frame, with [`NUM_SAMPLES_PER_FRAME`] samples. 103 | // webrtc-audio-processing expects a 10ms chunk for each process call. 104 | pub fn process_capture_frame( 105 | &self, 106 | frame: &mut [f32], 107 | ) -> Result<(), webrtc_audio_processing::Error> { 108 | if !self.is_enabled() { 109 | return Ok(()); 110 | } 111 | if let Some(processor) = self.0.inner.lock().unwrap().as_mut() { 112 | processor.process_capture_frame(frame) 113 | } else { 114 | Ok(()) 115 | } 116 | } 117 | /// Processes and optionally modifies the audio frame from a playback device. 118 | /// `frame` should hold an interleaved `f32` audio frame, with 119 | /// [`NUM_SAMPLES_PER_FRAME`] samples. 120 | pub fn process_render_frame( 121 | &self, 122 | frame: &mut [f32], 123 | ) -> Result<(), webrtc_audio_processing::Error> { 124 | if !self.is_enabled() { 125 | return Ok(()); 126 | } 127 | if let Some(processor) = self.0.inner.lock().unwrap().as_mut() { 128 | processor.process_render_frame(frame) 129 | } else { 130 | Ok(()) 131 | } 132 | } 133 | 134 | pub fn set_capture_delay(&self, stream_delay: Duration) { 135 | let new_val = stream_delay.as_millis() as u64; 136 | if let Ok(old_val) = 137 | self.0 138 | .capture_delay 139 | .fetch_update(Ordering::SeqCst, Ordering::SeqCst, |val| { 140 | if new_val.abs_diff(val) > 1 { 141 | Some(new_val) 142 | } else { 143 | None 144 | } 145 | }) 146 | { 147 | debug!("changing capture delay from {old_val} to {new_val}"); 148 | self.update_stream_delay(); 149 | } 150 | } 151 | 152 | pub fn set_playback_delay(&self, stream_delay: Duration) { 153 | let new_val = stream_delay.as_millis() as u64; 154 | if let Ok(old_val) = 155 | self.0 156 | .playback_delay 157 | .fetch_update(Ordering::SeqCst, Ordering::SeqCst, |val| { 158 | if new_val.abs_diff(val) > 1 { 159 | Some(new_val) 160 | } else { 161 | None 162 | } 163 | }) 164 | { 165 | debug!("changing playback delay from {old_val} to {new_val}"); 166 | self.update_stream_delay(); 167 | } 168 | } 169 | 170 | fn update_stream_delay(&self) { 171 | let playback = self.0.playback_delay.load(Ordering::Relaxed); 172 | let capture = self.0.capture_delay.load(Ordering::Relaxed); 173 | let total = playback + capture; 174 | let mut config = self.0.config.lock().unwrap(); 175 | config.echo_cancellation.as_mut().unwrap().stream_delay_ms = Some(total as i32); 176 | if let Some(processor) = self.0.inner.lock().unwrap().as_mut() { 177 | processor.set_config(config.clone()); 178 | } 179 | } 180 | } 181 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: Rust 2 | 3 | on: 4 | push: 5 | branches: ["main"] 6 | pull_request: 7 | branches: ["main"] 8 | 9 | env: 10 | CARGO_TERM_COLOR: always 11 | RUST_BACKTRACE: 1 12 | RUSTFLAGS: -Dwarnings 13 | RUSTDOCFLAGS: -Dwarnings 14 | 15 | jobs: 16 | build_and_test_linux: 17 | runs-on: ubuntu-latest 18 | timeout-minutes: 30 19 | name: "Build and test (Linux)" 20 | steps: 21 | - name: Checkout 22 | uses: actions/checkout@v4 23 | - name: Update apt 24 | run: sudo apt update 25 | - name: Install dependencies 26 | run: sudo apt-get install libasound2-dev libdbus-1-dev 27 | - name: Install stable 28 | uses: dtolnay/rust-toolchain@stable 29 | with: 30 | components: clippy 31 | 32 | - name: Install cargo-nextest 33 | uses: taiki-e/install-action@v2 34 | with: 35 | tool: nextest 36 | 37 | - name: build 38 | run: | 39 | cargo build 40 | 41 | - name: build tests 42 | run: | 43 | cargo nextest run --workspace --lib --bins --tests --no-run 44 | 45 | - name: run tests 46 | run: | 47 | cargo nextest run --workspace --lib --bins --tests --no-fail-fast 48 | env: 49 | RUST_LOG: ${{ runner.debug && 'TRACE' || 'DEBUG'}} 50 | 51 | - name: run doctests 52 | env: 53 | RUST_LOG: ${{ runner.debug && 'TRACE' || 'DEBUG'}} 54 | run: | 55 | cargo test --workspace --all-features --doc 56 | 57 | build_and_test_macos: 58 | runs-on: macOS-latest 59 | timeout-minutes: 30 60 | name: "Build and test (macOS)" 61 | steps: 62 | - name: Checkout 63 | uses: actions/checkout@v4 64 | - name: Set up Homebrew 65 | id: set-up-homebrew 66 | uses: Homebrew/actions/setup-homebrew@master 67 | - name: Install dependencies 68 | run: brew install automake libtool 69 | 70 | - name: Install stable 71 | uses: dtolnay/rust-toolchain@stable 72 | with: 73 | components: clippy 74 | 75 | - name: Install cargo-nextest 76 | uses: taiki-e/install-action@v2 77 | with: 78 | tool: nextest 79 | 80 | - name: build 81 | run: | 82 | cargo build 83 | 84 | - name: build tests 85 | run: | 86 | cargo nextest run --workspace --lib --bins --tests --no-run 87 | 88 | - name: run tests 89 | run: | 90 | cargo nextest run --workspace --lib --bins --tests --no-fail-fast 91 | env: 92 | RUST_LOG: ${{ runner.debug && 'TRACE' || 'DEBUG'}} 93 | 94 | - name: run doctests 95 | env: 96 | RUST_LOG: ${{ runner.debug && 'TRACE' || 'DEBUG'}} 97 | run: | 98 | cargo test --workspace --all-features --doc 99 | 100 | build_and_test_windows: 101 | timeout-minutes: 30 102 | name: "Build and test (Windows)" 103 | runs-on: windows-latest 104 | steps: 105 | - name: Checkout 106 | uses: actions/checkout@v4 107 | with: 108 | ref: ${{ inputs.git-ref }} 109 | 110 | - name: Install Rust stable 111 | uses: dtolnay/rust-toolchain@stable 112 | with: 113 | components: clippy 114 | 115 | - name: Install cargo-nextest 116 | shell: powershell 117 | run: | 118 | $tmp = New-TemporaryFile | Rename-Item -NewName { $_ -replace 'tmp$', 'zip' } -PassThru 119 | Invoke-WebRequest -OutFile $tmp https://get.nexte.st/latest/windows 120 | $outputDir = if ($Env:CARGO_HOME) { Join-Path $Env:CARGO_HOME "bin" } else { "~/.cargo/bin" } 121 | $tmp | Expand-Archive -DestinationPath $outputDir -Force 122 | $tmp | Remove-Item 123 | 124 | - name: build 125 | run: | 126 | cargo build --no-default-features 127 | 128 | - name: build tests 129 | run: | 130 | cargo nextest run --workspace --lib --bins --tests --no-run --no-default-features 131 | 132 | - name: run tests 133 | run: | 134 | cargo nextest run --workspace --lib --bins --tests --no-fail-fast --no-default-features 135 | env: 136 | RUST_LOG: ${{ runner.debug && 'TRACE' || 'DEBUG'}} 137 | 138 | build_android: 139 | name: "Build Android APK" 140 | runs-on: ubuntu-latest 141 | steps: 142 | - uses: actions/checkout@v4 143 | with: 144 | submodules: true 145 | - uses: dtolnay/rust-toolchain@stable 146 | with: 147 | targets: aarch64-linux-android 148 | - uses: Swatinem/rust-cache@v2 149 | - uses: actions/setup-java@v2 150 | with: 151 | distribution: "temurin" 152 | java-version: "17" 153 | - uses: android-actions/setup-android@v3 154 | with: 155 | packages: "platforms;android-30" 156 | - uses: nttld/setup-ndk@v1 157 | id: setup-ndk 158 | with: 159 | ndk-version: r28 160 | 161 | - uses: taiki-e/cache-cargo-install-action@v2 162 | with: 163 | tool: cargo-apk 164 | 165 | - name: Build apk 166 | run: | 167 | keytool -genkey -v -keystore /tmp/debug.keystore -storepass android -alias androiddebugkey -keypass android -keyalg RSA -keysize 2048 -validity 10000 -dname "C=US, O=Android, CN=Android Debug" 168 | cargo apk build -p callme-egui --target aarch64-linux-android --lib 169 | env: 170 | ANDROID_NDK_HOME: ${{ steps.setup-ndk.outputs.ndk-path }} 171 | ANDROID_NDK_ROOT: ${{ steps.setup-ndk.outputs.ndk-path }} 172 | ANDROID_NDK: ${{ steps.setup-ndk.outputs.ndk-path }} 173 | TOOLCHAIN: ${{ steps.setup-ndk.outputs.ndk-path }}/toolchains/llvm/prebuilt/linux-x86_64 174 | CARGO_APK_RELEASE_KEYSTORE_PASSWORD: android 175 | CARGO_APK_RELEASE_KEYSTORE: /tmp/debug.keystore 176 | 177 | check_fmt: 178 | timeout-minutes: 30 179 | name: Chec fmt 180 | runs-on: ubuntu-latest 181 | steps: 182 | - uses: actions/checkout@v4 183 | 184 | - uses: dtolnay/rust-toolchain@stable 185 | with: 186 | components: rustfmt 187 | 188 | - name: fmt 189 | run: cargo fmt --all -- --check 190 | 191 | # check_docs: 192 | # timeout-minutes: 30 193 | # name: Check docs 194 | # runs-on: ubuntu-latest 195 | # steps: 196 | # - uses: actions/checkout@v4 197 | # - uses: dtolnay/rust-toolchain@master 198 | # with: 199 | # toolchain: nightly-2024-05-02 200 | # - name: Install sccache 201 | # uses: mozilla-actions/sccache-action@v0.0.5 202 | 203 | # - name: Docs 204 | # run: cargo doc --workspace --all-features --no-deps --document-private-items 205 | # env: 206 | # RUSTDOCFLAGS: --cfg docsrs 207 | 208 | clippy_check: 209 | name: Check clippy 210 | timeout-minutes: 30 211 | runs-on: ubuntu-latest 212 | steps: 213 | - uses: actions/checkout@v4 214 | - name: Update apt 215 | run: sudo apt update 216 | - name: Install dependencies 217 | run: sudo apt-get install libasound2-dev libdbus-1-dev 218 | - uses: dtolnay/rust-toolchain@stable 219 | with: 220 | components: clippy 221 | - name: Install sccache 222 | uses: mozilla-actions/sccache-action@v0.0.5 223 | 224 | - name: clippy check (all features) 225 | run: cargo clippy --workspace --all-features --all-targets --bins --tests --benches 226 | 227 | - name: clippy check (no features) 228 | run: cargo clippy --workspace --no-default-features --lib --bins --tests 229 | 230 | - name: clippy check (default features) 231 | run: cargo clippy --workspace --all-targets 232 | -------------------------------------------------------------------------------- /callme/examples/playback-record.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | path::PathBuf, 3 | time::{Duration, Instant}, 4 | }; 5 | 6 | use anyhow::{bail, Context, Result}; 7 | use callme::{ 8 | codec::{ 9 | opus::{OpusChannels, OpusEncoder, OPUS_SAMPLE_RATE}, 10 | Codec, 11 | }, 12 | net::bind_endpoint, 13 | rtc::{MediaFrame, MediaTrack, RtcConnection, RtcProtocol, TrackKind}, 14 | }; 15 | use clap::Parser; 16 | use cpal::Sample; 17 | use hound::{WavReader, WavWriter}; 18 | use iroh::protocol::Router; 19 | use tokio::sync::broadcast; 20 | use tracing::{debug, info, warn}; 21 | 22 | #[derive(Debug, Parser, Clone)] 23 | struct Args { 24 | #[clap(short, long)] 25 | playback_file: Option, 26 | #[clap(short, long)] 27 | record_dir: Option, 28 | } 29 | 30 | #[tokio::main] 31 | async fn main() -> Result<()> { 32 | tracing_subscriber::fmt::init(); 33 | let args = Args::parse(); 34 | let endpoint = bind_endpoint().await?; 35 | println!("node id: {}", endpoint.node_id()); 36 | 37 | let rtc = RtcProtocol::new(endpoint.clone()); 38 | let _router = Router::builder(endpoint) 39 | .accept(RtcProtocol::ALPN, rtc.clone()) 40 | .spawn() 41 | .await?; 42 | 43 | while let Some(conn) = rtc.accept().await? { 44 | info!("accepted"); 45 | let remote_node = conn.transport().remote_node_id()?; 46 | let now = Instant::now(); 47 | let args = args.clone(); 48 | info!(?remote_node, "connection established"); 49 | tokio::task::spawn(async move { 50 | if let Err(err) = handle_connection(conn, args).await { 51 | let elapsed = now.elapsed(); 52 | info!(?remote_node, ?err, "connection closed after {elapsed:?}",); 53 | } 54 | }); 55 | } 56 | 57 | Ok(()) 58 | } 59 | 60 | async fn handle_connection(conn: RtcConnection, args: Args) -> Result<()> { 61 | if let Some(file_path) = args.playback_file { 62 | let (sender, receiver) = broadcast::channel(2); 63 | let track = MediaTrack::new( 64 | receiver, 65 | Codec::Opus { 66 | channels: OpusChannels::Mono, 67 | }, 68 | TrackKind::Audio, 69 | ); 70 | std::thread::spawn({ 71 | move || { 72 | if let Err(err) = stream_wav(file_path, sender) { 73 | tracing::error!("stream thread failed: {err:?}"); 74 | } else { 75 | tracing::info!("stream thread closed"); 76 | } 77 | } 78 | }); 79 | conn.send_track(track).await?; 80 | } 81 | // let file_track = build_file_track(file).await?; 82 | // conn.send_track(file_track).await?; 83 | let mut id = 0; 84 | while let Some(mut track) = conn.recv_track().await? { 85 | info!("incoming track"); 86 | if let Some(dir) = &args.record_dir { 87 | tokio::fs::create_dir_all(&dir).await?; 88 | let node_id = conn.transport().remote_node_id()?.fmt_short(); 89 | let suffix = id; 90 | let file_name = format!("{node_id}-{suffix}.wav"); 91 | let file_path = dir.join(&file_name); 92 | tokio::task::spawn(async move { 93 | if let Err(err) = record_wav(file_path, track).await { 94 | warn!("failed to record {file_name}: {err:?}"); 95 | } else { 96 | info!("recorded {file_name}"); 97 | } 98 | }); 99 | } else { 100 | info!("skip track"); 101 | #[allow(clippy::redundant_pattern_matching)] 102 | tokio::task::spawn(async move { while let Ok(_) = track.recv().await {} }); 103 | } 104 | id += 1; 105 | } 106 | Ok(()) 107 | } 108 | 109 | async fn record_wav(file_path: PathBuf, mut track: MediaTrack) -> Result<()> { 110 | let channels = match track.codec() { 111 | Codec::Opus { channels } => channels, 112 | _ => bail!("only opus tracks are supported"), 113 | }; 114 | info!("start recording {file_path:?} with {channels:?}"); 115 | let mut decoder = opus::Decoder::new(48_000, channels.into())?; 116 | let mut buf = vec![0f32; 960 * channels as usize]; 117 | let file = std::fs::File::create(file_path)?; 118 | let spec = hound::WavSpec { 119 | channels: channels as u16, 120 | sample_rate: 48000, 121 | bits_per_sample: 16, 122 | sample_format: hound::SampleFormat::Int, 123 | }; 124 | let mut writer = WavWriter::new(file, spec)?; 125 | while let Ok(frame) = track.recv().await { 126 | let MediaFrame { 127 | payload, 128 | skipped_frames, 129 | .. 130 | } = frame; 131 | for _ in 0..skipped_frames.unwrap_or(0) { 132 | let block_count = decoder.decode_float(&[], &mut buf, false)?; 133 | let sample_count = block_count * channels as usize; 134 | for sample in &buf[..sample_count] { 135 | let sample: i16 = sample.to_sample(); 136 | writer.write_sample(sample)?; 137 | } 138 | } 139 | let block_count = decoder.decode_float(&payload, &mut buf, false)?; 140 | let sample_count = block_count * channels as usize; 141 | for sample in &buf[..sample_count] { 142 | let sample: i16 = sample.to_sample(); 143 | writer.write_sample(sample)?; 144 | } 145 | writer.flush()?; 146 | } 147 | writer.finalize()?; 148 | info!("finalized!"); 149 | 150 | Ok(()) 151 | } 152 | 153 | fn stream_wav(file_path: PathBuf, sender: broadcast::Sender) -> Result<()> { 154 | 'outer: loop { 155 | let file = std::fs::File::open(&file_path)?; 156 | let mut reader = WavReader::new(&file)?; 157 | let channels = match reader.spec().channels { 158 | 1 => OpusChannels::Mono, 159 | 2 => OpusChannels::Stereo, 160 | n => bail!( 161 | "wav file has unsupported channel count of {}: must be mono or stereo", 162 | n 163 | ), 164 | }; 165 | if reader.spec().sample_rate != OPUS_SAMPLE_RATE { 166 | bail!( 167 | "wav file has invalid sample rate: must be {}", 168 | OPUS_SAMPLE_RATE 169 | ) 170 | } 171 | let mut encoder = OpusEncoder::new(channels); 172 | info!("wav info: {:?}", reader.spec()); 173 | let start = Instant::now(); 174 | let time_per_sample = Duration::from_secs(1) / 48_000; 175 | for (i, sample) in reader.samples::().enumerate() { 176 | let sample = sample.with_context(|| format!("failed to read sample {i}"))?; 177 | let sample: f32 = sample.to_sample(); 178 | if let Some((payload, sample_count)) = encoder.push_sample(sample) { 179 | let frame = MediaFrame { 180 | payload, 181 | sample_count: Some(sample_count), 182 | skipped_frames: None, 183 | skipped_samples: None, 184 | }; 185 | if let Err(_err) = sender.send(frame) { 186 | tracing::debug!("encoder skipped frame: failed to forward to track"); 187 | if sender.receiver_count() == 0 { 188 | tracing::warn!("track dropped, stop encoder"); 189 | break 'outer; 190 | } 191 | } else { 192 | tracing::trace!("opus encoder: sent {sample_count}"); 193 | } 194 | let music_time = time_per_sample * i as u32 / channels as u32; 195 | let actual_time = start.elapsed(); 196 | let sleep_time = music_time - actual_time; 197 | debug!("sleep {sleep_time:?}"); 198 | std::thread::sleep(sleep_time); 199 | } 200 | } 201 | } 202 | Ok(()) 203 | } 204 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: "release" 2 | 3 | on: 4 | push: 5 | tags: 6 | - "v*" 7 | workflow_dispatch: 8 | inputs: 9 | release_version: 10 | description: "Release version" 11 | required: true 12 | default: "" 13 | create_release: 14 | description: "Create release" 15 | required: true 16 | default: "true" 17 | upload_artifacts: 18 | description: "Upload artifacts" 19 | required: true 20 | default: "true" 21 | 22 | env: 23 | BIN_NAME: callme-egui 24 | 25 | jobs: 26 | create-release: 27 | name: create-release 28 | runs-on: ubuntu-latest 29 | outputs: 30 | upload_url: ${{ steps.release.outputs.upload_url }} 31 | release_version: ${{ env.RELEASE_VERSION }} 32 | steps: 33 | - name: Get the release version from the tag (push) 34 | shell: bash 35 | if: env.RELEASE_VERSION == '' && github.event_name == 'push' 36 | run: | 37 | # See: https://github.community/t5/GitHub-Actions/How-to-get-just-the-tag-name/m-p/32167/highlight/true#M1027 38 | echo "RELEASE_VERSION=${GITHUB_REF#refs/tags/}" >> $GITHUB_ENV 39 | echo "version is: ${{ env.RELEASE_VERSION }}" 40 | - name: Get the release version from the tag (dispatch) 41 | shell: bash 42 | if: github.event_name == 'workflow_dispatch' 43 | run: | 44 | echo "RELEASE_VERSION=${{ github.event.inputs.release_version }}" >> $GITHUB_ENV 45 | echo "version is: ${{ env.RELEASE_VERSION }}" 46 | - name: Checkout repository 47 | uses: actions/checkout@v4 48 | with: 49 | fetch-depth: 1 50 | - name: Create GitHub release 51 | id: release 52 | if: github.event.inputs.create_release == 'true' || github.event_name == 'push' 53 | uses: actions/create-release@v1 54 | env: 55 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 56 | with: 57 | tag_name: ${{ env.RELEASE_VERSION }} 58 | release_name: ${{ env.RELEASE_VERSION }} 59 | 60 | build-release: 61 | name: build-release 62 | needs: create-release 63 | runs-on: ${{ matrix.runner }} 64 | strategy: 65 | matrix: 66 | name: 67 | [ 68 | ubuntu-latest, 69 | ubuntu-arm-latest, 70 | macOS-arm-latest, 71 | macOS-latest, 72 | windows-latest, 73 | ] 74 | rust: [stable] 75 | include: 76 | - name: ubuntu-arm-latest 77 | os: ubuntu-latest 78 | target: linux-aarch64 79 | cargo_targets: "aarch64-unknown-linux-gnu" 80 | runner: ubuntu-22.04-arm 81 | - name: ubuntu-latest 82 | os: ubuntu-latest 83 | target: linux-x86_64 84 | cargo_targets: "x86_64-unknown-linux-gnu" 85 | runner: ubuntu-20.04 86 | - name: macOS-latest 87 | os: macOS-latest 88 | target: mac-x86_64 89 | cargo_targets: "x86_64-apple-darwin" 90 | runner: macos-13 91 | - name: macOS-arm-latest 92 | os: macOS-latest 93 | target: mac-aarch64 94 | cargo_targets: "aarch64-apple-darwin" 95 | runner: macos-latest 96 | - name: windows-latest 97 | os: windows-latest 98 | target: windows-x86_64 99 | cargo_targets: "x86_64-pc-windows-msvc" 100 | runner: windows-latest 101 | steps: 102 | - name: Checkout repository 103 | uses: actions/checkout@v4 104 | with: 105 | fetch-depth: 1 106 | - name: Install Rust 107 | uses: dtolnay/rust-toolchain@stable 108 | with: 109 | toolchain: ${{ matrix.rust }} 110 | targets: ${{ matrix.cargo_targets }} 111 | 112 | - name: Install dependencies (linux) 113 | if: matrix.os == 'ubuntu-latest' 114 | run: | 115 | sudo apt-get update 116 | sudo apt-get install libasound2-dev libdbus-1-dev musl-tools -y 117 | 118 | - name: Set up Homebrew (macOS) 119 | if: matrix.os == 'macOS-latest' 120 | uses: Homebrew/actions/setup-homebrew@master 121 | 122 | - name: Install dependencies (macOS) 123 | if: matrix.os == 'macOS-latest' 124 | run: brew install automake libtool 125 | 126 | - name: Build release binary 127 | shell: bash 128 | run: | 129 | if [ "${{ matrix.name }}" = "ubuntu-arm-latest" ]; then 130 | export CARGO_TARGET_AARCH64_UNKNOWN_LINUX_MUSL_LINKER=aarch64-linux-gnu-gcc 131 | export CC=aarch64-linux-gnu-gcc 132 | fi 133 | if [ "${{ matrix.os }}" = "windows-latest" ]; then 134 | cargo build -p callme-egui --no-default-features --verbose --release --target ${{ matrix.cargo_targets }} 135 | else 136 | cargo build -p callme-egui --verbose --release --target ${{ matrix.cargo_targets }} 137 | fi 138 | 139 | - name: Build archive 140 | shell: bash 141 | run: | 142 | staging="${{ env.BIN_NAME }}-${{ needs.create-release.outputs.release_version }}-${{ matrix.target }}" 143 | mkdir -p "$staging" 144 | if [ "${{ matrix.os }}" = "windows-latest" ]; then 145 | cp "target/${{ matrix.cargo_targets }}/release/${{ env.BIN_NAME }}.exe" "$staging/" 146 | cd "$staging" 147 | 7z a "../$staging.zip" . 148 | echo "ASSET=$staging.zip" >> $GITHUB_ENV 149 | else 150 | cp "target/${{ matrix.cargo_targets }}/release/${{ env.BIN_NAME }}" "$staging/" 151 | tar czf "$staging.tar.gz" -C "$staging" . 152 | echo "ASSET=$staging.tar.gz" >> $GITHUB_ENV 153 | fi 154 | - name: Upload release archive 155 | uses: actions/upload-release-asset@v1.0.2 156 | if: github.event.inputs.upload_artifacts == 'true' || github.event_name == 'push' 157 | env: 158 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 159 | with: 160 | upload_url: ${{ needs.create-release.outputs.upload_url }} 161 | asset_path: ${{ env.ASSET }} 162 | asset_name: ${{ env.ASSET }} 163 | asset_content_type: application/octet-stream 164 | 165 | build-android: 166 | name: build-android 167 | needs: create-release 168 | runs-on: ubuntu-latest 169 | steps: 170 | - uses: actions/checkout@v4 171 | with: 172 | submodules: true 173 | - uses: dtolnay/rust-toolchain@stable 174 | with: 175 | targets: aarch64-linux-android 176 | - uses: Swatinem/rust-cache@v2 177 | - uses: actions/setup-java@v2 178 | with: 179 | distribution: "temurin" 180 | java-version: "17" 181 | - uses: android-actions/setup-android@v3 182 | with: 183 | packages: "platforms;android-30" 184 | - uses: nttld/setup-ndk@v1 185 | id: setup-ndk 186 | with: 187 | ndk-version: r28 188 | 189 | - uses: taiki-e/cache-cargo-install-action@v2 190 | with: 191 | tool: cargo-apk 192 | 193 | - name: Build apk 194 | run: | 195 | keytool -genkey -v -keystore /tmp/debug.keystore -storepass android -alias androiddebugkey -keypass android -keyalg RSA -keysize 2048 -validity 10000 -dname "C=US, O=Android, CN=Android Debug" 196 | cargo apk build -p callme-egui --target aarch64-linux-android --lib --release 197 | env: 198 | ANDROID_NDK_HOME: ${{ steps.setup-ndk.outputs.ndk-path }} 199 | ANDROID_NDK_ROOT: ${{ steps.setup-ndk.outputs.ndk-path }} 200 | ANDROID_NDK: ${{ steps.setup-ndk.outputs.ndk-path }} 201 | TOOLCHAIN: ${{ steps.setup-ndk.outputs.ndk-path }}/toolchains/llvm/prebuilt/linux-x86_64 202 | CARGO_APK_RELEASE_KEYSTORE_PASSWORD: android 203 | CARGO_APK_RELEASE_KEYSTORE: /tmp/debug.keystore 204 | 205 | - name: Rename apk 206 | shell: bash 207 | run: | 208 | filename="${{ env.BIN_NAME }}-${{ needs.create-release.outputs.release_version }}-android-aarch64.apk" 209 | mv target/release/apk/callme-egui.apk "$filename" 210 | echo "ASSET=$filename" >> $GITHUB_ENV 211 | 212 | - name: Upload release archive 213 | uses: actions/upload-release-asset@v1.0.2 214 | if: github.event.inputs.upload_artifacts == 'true' || github.event_name == 'push' 215 | env: 216 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 217 | with: 218 | upload_url: ${{ needs.create-release.outputs.upload_url }} 219 | asset_path: ${{ env.ASSET }} 220 | asset_name: ${{ env.ASSET }} 221 | asset_content_type: application/octet-stream 222 | -------------------------------------------------------------------------------- /callme-egui/build-ios.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import argparse 4 | import subprocess 5 | import tomllib 6 | import os 7 | import shutil 8 | import zipfile 9 | import tempfile 10 | import json 11 | 12 | def parse_cargo_toml(): 13 | with open('Cargo.toml', 'rb') as f: 14 | cargo_toml = tomllib.load(f) 15 | app_name = cargo_toml['package']['name'] 16 | app_id = cargo_toml['package']['metadata']['bundle']['identifier'] 17 | return app_name, app_id 18 | 19 | def ipa(args): 20 | print("Releasing the build...") 21 | args.release = True 22 | build(args) 23 | 24 | app_name, _ = parse_cargo_toml() 25 | build_type = 'release' 26 | target = get_target(args) 27 | cargo_target_dir = os.getenv('CARGO_TARGET_DIR') 28 | 29 | if cargo_target_dir: 30 | base_target_dir = cargo_target_dir 31 | else: 32 | base_target_dir = 'target' 33 | app_path = os.path.join(base_target_dir, target, build_type, 'bundle', 'ios', f'{app_name}.app') 34 | temp_dir = tempfile.mkdtemp() 35 | 36 | payload_dir = os.path.join(temp_dir, "Payload") 37 | os.makedirs(payload_dir) 38 | shutil.copytree(app_path, os.path.join(payload_dir, f'{app_name}.app')) 39 | 40 | ipa_path = f'{app_name}.ipa' 41 | with zipfile.ZipFile(ipa_path, 'w', zipfile.ZIP_DEFLATED) as ipa_file: 42 | for root, dirs, files in os.walk(payload_dir): 43 | for file in files: 44 | file_path = os.path.join(root, file) 45 | ipa_file.write(file_path, os.path.relpath(file_path, os.path.dirname(payload_dir))) 46 | 47 | shutil.rmtree(payload_dir) 48 | print(f"Created {ipa_path}") 49 | 50 | def post_process_info_plist(plist_path, ipad): 51 | try: 52 | with open('insert.plist', 'r') as insert_file: 53 | insert_content = insert_file.read() 54 | except: 55 | insert_content = "" 56 | with open(plist_path, 'r') as plist_file: 57 | plist_content = plist_file.read() 58 | 59 | if ipad: 60 | ipad_content = """ 61 | UIDeviceFamily 62 | 63 | 1 64 | 2 65 | 66 | UISupportedInterfaceOrientations 67 | 68 | UIInterfaceOrientationPortrait 69 | UIInterfaceOrientationLandscapeLeft 70 | UIInterfaceOrientationLandscapeRight 71 | 72 | UISupportedInterfaceOrientations~ipad 73 | 74 | UIInterfaceOrientationPortrait 75 | UIInterfaceOrientationPortraitUpsideDown 76 | UIInterfaceOrientationLandscapeLeft 77 | UIInterfaceOrientationLandscapeRight 78 | 79 | """ 80 | insert_content += ipad_content 81 | 82 | modified_content = plist_content.replace('', f'{insert_content}\n') 83 | 84 | with open(plist_path, 'w') as plist_file: 85 | plist_file.write(modified_content) 86 | 87 | def get_target(args): 88 | target = 'aarch64-apple-ios' 89 | if args.x86: 90 | target = 'x86_64-apple-ios' 91 | elif args.sim: 92 | target = 'aarch64-apple-ios-sim' 93 | if args.target: 94 | target = args.target 95 | return target 96 | 97 | def build(args): 98 | target = get_target(args) 99 | command = ['cargo', 'bundle', '--target', target] 100 | if args.release: 101 | command.append('--release') 102 | 103 | print(f"Running command: {' '.join(command)}") 104 | subprocess.run(command, check=True) 105 | app_name, _ = parse_cargo_toml() 106 | build_type = 'release' if args.release else 'debug' 107 | cargo_target_dir = os.getenv('CARGO_TARGET_DIR') 108 | if cargo_target_dir: 109 | base_target_dir = cargo_target_dir 110 | else: 111 | base_target_dir = 'target' 112 | plist_path = os.path.join(base_target_dir, target, build_type, 'bundle', 'ios', f'{app_name}.app', 'Info.plist') 113 | post_process_info_plist(plist_path, args.ipad) 114 | 115 | def get_booted_device(): 116 | result = subprocess.run(['xcrun', 'simctl', 'list', 'devices', '--json'], capture_output=True, text=True, check=True) 117 | devices = json.loads(result.stdout) 118 | for runtime in devices['devices']: 119 | for dev in devices['devices'][runtime]: 120 | if dev['state'] == 'Booted': 121 | return dev['udid'] 122 | return None 123 | 124 | def boot_device(device): 125 | print(f"Booting device {device}...") 126 | subprocess.run(['xcrun', 'simctl', 'boot', device], check=True) 127 | print(f"Device {device} booted.") 128 | 129 | def get_newest_iphone_udid(): 130 | result = subprocess.run(['xcrun', 'simctl', 'list', 'devices', '--json'], capture_output=True, text=True, check=True) 131 | devices = json.loads(result.stdout) 132 | 133 | for runtime in devices['devices']: 134 | for dev in devices['devices'][runtime]: 135 | if 'iPhone' in dev['name'] and dev['isAvailable'] and 'SE' not in dev['name']: 136 | return dev['udid'] 137 | 138 | raise Exception("No available iPhone simulators found") 139 | 140 | def run_build(args): 141 | app_name, app_id = parse_cargo_toml() 142 | build_type = 'release' if args.release else 'debug' 143 | target = get_target(args) 144 | 145 | cargo_target_dir = os.getenv('CARGO_TARGET_DIR') 146 | if cargo_target_dir: 147 | base_target_dir = cargo_target_dir 148 | else: 149 | base_target_dir = 'target' 150 | app_path = os.path.join(base_target_dir, target, build_type, 'bundle', 'ios', f'{app_name}.app') 151 | 152 | if args.device == "booted": 153 | if not get_booted_device(): 154 | specific_device_udid = get_newest_iphone_udid() 155 | boot_device(specific_device_udid) 156 | args.device = specific_device_udid 157 | 158 | install_command = [ 159 | 'xcrun', 'simctl', 'install', args.device, app_path 160 | ] 161 | 162 | launch_command = ['xcrun', 'simctl', 'launch', '--console', args.device, app_id] 163 | 164 | print(f"Running command: {' '.join(install_command)}") 165 | subprocess.run(install_command, check=True) 166 | 167 | print(f"Running command: {' '.join(launch_command)}") 168 | subprocess.run(launch_command, check=True) 169 | 170 | def run(args): 171 | print("Running the build process...") 172 | build(args) 173 | print("Running the build...") 174 | run_build(args) 175 | 176 | def main(): 177 | parser = argparse.ArgumentParser(description='A script with build, run, run-build, and release subcommands.') 178 | subparsers = parser.add_subparsers(dest='command', required=True) 179 | 180 | build_parser = subparsers.add_parser('build', help='Build the project') 181 | build_parser.add_argument('--x86', action='store_true', help='Use x86 target') 182 | build_parser.add_argument('--sim', action='store_true', help='Use simulator target') 183 | build_parser.add_argument('--target', type=str, help='Specify custom target') 184 | build_parser.add_argument('--release', '-r', action='store_true', help='Build for release') 185 | build_parser.add_argument('--ipad', action='store_true', help='Include iPad-specific Info.plist entries') 186 | build_parser.set_defaults(func=build) 187 | 188 | run_parser = subparsers.add_parser('run', help='Build and run the project') 189 | run_parser.add_argument('--x86', action='store_true', help='Use x86 target') 190 | run_parser.add_argument('--sim', action='store_true', help='Use simulator target') 191 | run_parser.add_argument('--target', type=str, help='Specify custom target') 192 | run_parser.add_argument('--release', '-r', action='store_true', help='Build for release') 193 | run_parser.add_argument('--ipad', action='store_true', help='Include iPad-specific Info.plist entries') 194 | run_parser.add_argument('--device', type=str, default='booted', help='Specify the target device') 195 | 196 | run_parser.set_defaults(func=run) 197 | 198 | run_build_parser = subparsers.add_parser('run-build', help='Runs already built project') 199 | run_build_parser.add_argument('--x86', action='store_true', help='Use x86 target') 200 | run_build_parser.add_argument('--sim', action='store_true', help='Use simulator target') 201 | run_build_parser.add_argument('--target', type=str, help='Specify custom target') 202 | run_build_parser.add_argument('--release', '-r', action='store_true', help='Build for release') 203 | run_build_parser.add_argument('--device', type=str, default='booted', help='Specify the target device') 204 | run_build_parser.add_argument('--ipad', action='store_true', help='Include iPad-specific Info.plist entries') 205 | 206 | run_build_parser.set_defaults(func=run_build) 207 | 208 | release_parser = subparsers.add_parser('ipa', help='Creates a ipa') 209 | release_parser.add_argument('--x86', action='store_true', help='Use x86 target') 210 | release_parser.add_argument('--sim', action='store_true', help='Use simulator target') 211 | release_parser.add_argument('--target', type=str, help='Specify custom target') 212 | release_parser.add_argument('--release', '-r', action='store_true', help='Build for release') 213 | release_parser.add_argument('--ipad', action='store_true', help='Include iPad-specific Info.plist entries') 214 | release_parser.set_defaults(func=ipa) 215 | 216 | args = parser.parse_args() 217 | args.func(args) 218 | 219 | if __name__ == '__main__': 220 | main() 221 | -------------------------------------------------------------------------------- /callme/src/audio/device.rs: -------------------------------------------------------------------------------- 1 | use anyhow::{Context, Result}; 2 | use cpal::{ 3 | traits::{DeviceTrait, HostTrait}, 4 | BufferSize, Device, Host, SampleFormat, StreamConfig, 5 | SupportedBufferSize::{Range, Unknown}, 6 | SupportedStreamConfig, SupportedStreamConfigRange, 7 | }; 8 | use tracing::{debug, info}; 9 | 10 | use super::{AudioFormat, SAMPLE_RATE}; 11 | use crate::audio::{DURATION_10MS, DURATION_20MS, ENGINE_FORMAT}; 12 | 13 | #[derive(Debug, Clone)] 14 | pub struct AudioConfig { 15 | /// The input device to use. 16 | pub input_device: Option, 17 | /// The output device to use. 18 | pub output_device: Option, 19 | /// If true, audio processing with echo cancellation is enabled. 20 | pub processing_enabled: bool, 21 | } 22 | 23 | impl Default for AudioConfig { 24 | fn default() -> Self { 25 | #[cfg(not(target_arch = "wasm32"))] 26 | let input_device = std::env::var("CALLME_INPUT_DEVICE").ok(); 27 | #[cfg(target_arch = "wasm32")] 28 | let input_device = None; 29 | 30 | #[cfg(not(target_arch = "wasm32"))] 31 | let output_device = std::env::var("CALLME_OUTPUT_DEVICE").ok(); 32 | #[cfg(target_arch = "wasm32")] 33 | let output_device = None; 34 | 35 | Self { 36 | input_device, 37 | output_device, 38 | processing_enabled: true, 39 | } 40 | } 41 | } 42 | 43 | #[derive(Debug, Clone, Copy)] 44 | pub enum Direction { 45 | Capture, 46 | Playback, 47 | } 48 | 49 | pub fn list_devices() -> Result { 50 | let host = cpal::default_host(); 51 | let input = host 52 | .input_devices()? 53 | .filter_map(|x| x.name().ok()) 54 | .collect(); 55 | let output = host 56 | .output_devices()? 57 | .filter_map(|x| x.name().ok()) 58 | .collect(); 59 | Ok(Devices { input, output }) 60 | } 61 | 62 | #[derive(Debug, Default)] 63 | pub struct Devices { 64 | pub input: Vec, 65 | pub output: Vec, 66 | } 67 | 68 | pub fn find_device(host: &cpal::Host, direction: Direction, name: Option<&str>) -> Result { 69 | let iter = || match direction { 70 | Direction::Capture => host.input_devices(), 71 | Direction::Playback => host.output_devices(), 72 | }; 73 | let default = || { 74 | // On linux, prefer the `pipewire` device, if available. 75 | #[cfg(target_os = "linux")] 76 | if let Some(device) = iter()?.find(|x| x.name().ok().as_deref() == Some("pipewire")) { 77 | return anyhow::Ok(Some(device)); 78 | }; 79 | 80 | let default_device = match direction { 81 | Direction::Capture => host.default_input_device(), 82 | Direction::Playback => host.default_output_device(), 83 | }; 84 | 85 | let default_device = match default_device { 86 | Some(device) => Some(device), 87 | None => iter()?.next(), 88 | }; 89 | anyhow::Ok(default_device) 90 | }; 91 | 92 | let device = match &name { 93 | Some(device) => iter()?.find(|x| x.name().map(|y| &y == device).unwrap_or(false)), 94 | None => default()?, 95 | }; 96 | device.with_context(|| { 97 | format!( 98 | "could not find input audio device `{}`", 99 | name.unwrap_or("default") 100 | ) 101 | }) 102 | } 103 | 104 | #[derive(Debug)] 105 | pub struct StreamConfigWithFormat { 106 | pub sample_format: SampleFormat, 107 | pub config: StreamConfig, 108 | } 109 | 110 | impl StreamConfigWithFormat { 111 | fn new(config: SupportedStreamConfig, ideal_buffer_size: u32) -> Self { 112 | let sample_format = config.sample_format(); 113 | let buffer_size = match config.buffer_size() { 114 | Range { min, max } => BufferSize::Fixed(ideal_buffer_size.clamp(*min, *max)), 115 | Unknown => BufferSize::Default, 116 | }; 117 | let config = StreamConfig { 118 | channels: config.channels(), 119 | sample_rate: config.sample_rate(), 120 | buffer_size, 121 | }; 122 | Self { 123 | sample_format, 124 | config, 125 | } 126 | } 127 | 128 | pub fn audio_format(&self) -> AudioFormat { 129 | AudioFormat { 130 | sample_rate: self.config.sample_rate, 131 | channel_count: self.config.channels, 132 | } 133 | } 134 | } 135 | 136 | pub fn find_input_stream_config( 137 | device: &Device, 138 | format: &AudioFormat, 139 | ) -> Result { 140 | let d = device.name().unwrap(); 141 | debug!("find capture stream config for device {d} and format {format:?}"); 142 | let mut supported_configs: Vec<_> = device 143 | .supported_input_configs() 144 | .with_context(|| format!("failed to get supported stream configs for audio device `{d}`"))? 145 | .collect(); 146 | 147 | let config = if !supported_configs.is_empty() { 148 | supported_configs.sort_by(|a, b| cmp_stream_format(format, a, b).reverse()); 149 | let config_range = supported_configs[0]; 150 | debug!("selected capture stream config range: {config_range:?}"); 151 | config_range 152 | .try_with_sample_rate(format.sample_rate) 153 | .unwrap_or_else(|| config_range.with_max_sample_rate()) 154 | } else { 155 | info!("no supported configs available, use default input config"); 156 | device.default_input_config().with_context(|| { 157 | format!("failed to get default stream config for audio device `{d}`") 158 | })? 159 | }; 160 | 161 | let ideal_buffer_size = format.sample_count(DURATION_20MS) as u32; 162 | info!("selected capture stream config: {config:?}"); 163 | Ok(StreamConfigWithFormat::new(config, ideal_buffer_size)) 164 | } 165 | 166 | pub fn find_output_stream_config( 167 | device: &Device, 168 | format: &AudioFormat, 169 | ) -> Result { 170 | let d = device.name().unwrap(); 171 | debug!("find playback stream config for device {d} and format {format:?}"); 172 | let mut supported_configs: Vec<_> = device 173 | .supported_output_configs() 174 | .with_context(|| format!("failed to get supported stream configs for audio device `{d}`"))? 175 | .collect(); 176 | 177 | let config = if !supported_configs.is_empty() { 178 | supported_configs.sort_by(|a, b| cmp_stream_format(format, a, b).reverse()); 179 | let config_range = supported_configs[0]; 180 | debug!("selected playback stream config range: {config_range:?}"); 181 | config_range 182 | .try_with_sample_rate(format.sample_rate) 183 | .unwrap_or_else(|| config_range.with_max_sample_rate()) 184 | } else { 185 | info!("no supported configs available, use default output config"); 186 | device.default_output_config().with_context(|| { 187 | format!("failed to get default stream config for audio device `{d}`") 188 | })? 189 | }; 190 | info!("selected playback stream config: {config:?}"); 191 | 192 | let ideal_buffer_size = format.sample_count(DURATION_20MS) as u32; 193 | Ok(StreamConfigWithFormat::new(config, ideal_buffer_size)) 194 | } 195 | 196 | fn cmp_stream_format( 197 | format: &AudioFormat, 198 | a: &SupportedStreamConfigRange, 199 | b: &SupportedStreamConfigRange, 200 | ) -> std::cmp::Ordering { 201 | use cpal::SupportedBufferSize::{Range, Unknown}; 202 | use std::cmp::Ordering::{Equal, Greater, Less}; 203 | 204 | let is_perfect = |x: &SupportedStreamConfigRange| { 205 | x.channels() == format.channel_count 206 | && x.sample_format() == SampleFormat::F32 207 | && x.try_with_sample_rate(format.sample_rate).is_some() 208 | }; 209 | // check if one of the configs is our desired config. 210 | let a_is_perfect = is_perfect(a); 211 | let b_is_perfect = is_perfect(b); 212 | let cmp = a_is_perfect.cmp(&b_is_perfect); 213 | // if only one supports the desired config, use that. 214 | if cmp != Equal { 215 | return cmp; 216 | } 217 | // if both support the desired config, use the one with the smaller buffer size. 218 | if a_is_perfect { 219 | return match (a.buffer_size(), b.buffer_size()) { 220 | (Range { min: a, .. }, Range { min: b, .. }) => a.cmp(b).reverse(), 221 | (Range { .. }, _) => Greater, 222 | (Unknown, Range { .. }) => Less, 223 | (Unknown, Unknown) => Equal, 224 | }; 225 | } 226 | 227 | // if none, support the desired config, first look for the correct channel count, then for the 228 | // desired sample format, then for the desired sample rate. 229 | 230 | // first: get a config with the correct number of channels. 231 | let cmp_channel_count = 232 | (a.channels() == format.channel_count).cmp(&(b.channels() == format.channel_count)); 233 | if cmp_channel_count != Equal { 234 | return cmp_channel_count; 235 | } 236 | 237 | // second: get the desired sample format, or one of the "good ones" 238 | let cmp_sample_format = 239 | (a.sample_format() == SampleFormat::F32).cmp(&(b.sample_format() == SampleFormat::F32)); 240 | if cmp_sample_format != Equal { 241 | return cmp_sample_format; 242 | } 243 | let cmp_sample_format = 244 | (a.sample_format() == SampleFormat::I16).cmp(&(b.sample_format() == SampleFormat::I16)); 245 | if cmp_sample_format != Equal { 246 | return cmp_sample_format; 247 | } 248 | let cmp_sample_format = 249 | (a.sample_format() == SampleFormat::U16).cmp(&(b.sample_format() == SampleFormat::U16)); 250 | if cmp_sample_format != Equal { 251 | return cmp_sample_format; 252 | } 253 | 254 | // third: get the desired sample rate 255 | let cmp_sample_rate = (a.try_with_sample_rate(format.sample_rate).is_some()) 256 | .cmp(&(b.try_with_sample_rate(format.sample_rate).is_some())); 257 | if cmp_sample_rate != Equal { 258 | return cmp_sample_rate; 259 | } 260 | 261 | // forth: support the smaller buffer size 262 | match (a.buffer_size(), b.buffer_size()) { 263 | (Range { min: a, .. }, Range { min: b, .. }) => a.cmp(b).reverse(), 264 | (Range { .. }, _) => Greater, 265 | (Unknown, Range { .. }) => Less, 266 | (Unknown, Unknown) => Equal, 267 | } 268 | } 269 | -------------------------------------------------------------------------------- /callme/src/codec/opus.rs: -------------------------------------------------------------------------------- 1 | use std::{ops::ControlFlow, time::Duration}; 2 | 3 | use anyhow::{bail, Result}; 4 | use bytes::{Bytes, BytesMut}; 5 | use ringbuf::{ 6 | traits::{Consumer as _, Observer, Producer as _, Split}, 7 | HeapCons as Consumer, HeapProd as Producer, 8 | }; 9 | use tokio::sync::broadcast::{self, error::TryRecvError}; 10 | use tracing::{debug, info, trace}; 11 | 12 | use super::Codec; 13 | use crate::{ 14 | audio::{AudioFormat, AudioSink, AudioSource, SAMPLE_RATE}, 15 | rtc::{MediaFrame, MediaTrack, TrackKind}, 16 | }; 17 | 18 | pub const OPUS_SAMPLE_RATE: u32 = 48_000; 19 | pub const OPUS_STREAM_PARAMS: AudioFormat = AudioFormat::new2(OPUS_SAMPLE_RATE, 2); 20 | 21 | const DURATION_20MS: Duration = Duration::from_millis(20); 22 | 23 | #[derive(Debug, Clone, Copy, Eq, PartialEq)] 24 | pub enum OpusChannels { 25 | Mono = 1, 26 | Stereo = 2, 27 | } 28 | 29 | impl From for ::opus::Channels { 30 | fn from(value: OpusChannels) -> Self { 31 | match value { 32 | OpusChannels::Mono => ::opus::Channels::Mono, 33 | OpusChannels::Stereo => ::opus::Channels::Stereo, 34 | } 35 | } 36 | } 37 | 38 | pub struct MediaTrackOpusDecoder { 39 | track: MediaTrack, 40 | decoder: opus::Decoder, 41 | audio_buf: Vec, 42 | decode_buf: Vec, 43 | underflows: usize, 44 | remaining_silence_ticks: usize, 45 | audio_format: AudioFormat, 46 | } 47 | 48 | impl MediaTrackOpusDecoder { 49 | pub fn new(track: MediaTrack) -> Result { 50 | let channel_count = match track.codec() { 51 | Codec::Opus { channels } => channels, 52 | }; 53 | let audio_format = AudioFormat::new2(OPUS_SAMPLE_RATE, channel_count as u16); 54 | let decoder = 55 | opus::Decoder::new(OPUS_STREAM_PARAMS.sample_rate.0, channel_count.into()).unwrap(); 56 | let buffer_size = audio_format.sample_count(DURATION_20MS); 57 | let decode_buf = vec![0.; buffer_size]; 58 | let audio_buf = vec![]; 59 | Ok(Self { 60 | track, 61 | decoder, 62 | audio_buf, 63 | decode_buf, 64 | underflows: 0, 65 | remaining_silence_ticks: 0, 66 | audio_format, 67 | }) 68 | } 69 | 70 | pub fn decode(&mut self, buf: &[u8]) -> Result { 71 | let block_count = self 72 | .decoder 73 | .decode_float(buf, &mut self.decode_buf, false)?; 74 | let sample_count = block_count * self.audio_format.channel_count as usize; 75 | let decoded = &self.decode_buf[..sample_count]; 76 | // we need to upscale to two channels, AudioSource tick always expects stereo. 77 | match self.audio_format.channel_count { 78 | 1 => self.audio_buf.extend(decoded.iter().flat_map(|s| [s, s])), 79 | 2 => self.audio_buf.extend(decoded), 80 | _ => unreachable!(), 81 | } 82 | Ok(sample_count) 83 | } 84 | 85 | pub fn peek(&self) -> &[f32] { 86 | &self.audio_buf 87 | } 88 | 89 | pub fn advance(&mut self, n: usize) { 90 | if n > self.audio_buf.len() { 91 | panic!("requested advance further than buffer length"); 92 | } 93 | self.audio_buf.copy_within(n.., 0); 94 | self.audio_buf.truncate(self.audio_buf.len() - n); 95 | } 96 | } 97 | 98 | impl AudioSource for MediaTrackOpusDecoder { 99 | fn tick(&mut self, buf: &mut [f32]) -> Result> { 100 | // decode everything that is ready to recv'd on the track channel. 101 | loop { 102 | let (skipped_frames, payload) = match self.track.try_recv() { 103 | Ok(frame) => { 104 | let MediaFrame { 105 | payload, 106 | skipped_frames, 107 | .. 108 | } = frame; 109 | trace!("opus decoder: mediatrack recv frame"); 110 | (skipped_frames, Some(payload)) 111 | } 112 | Err(TryRecvError::Empty) => { 113 | trace!("opus decoder: mediatrack recv empty"); 114 | break; 115 | } 116 | Err(TryRecvError::Lagged(count)) => { 117 | trace!("opus decoder: mediatrack recv lagged {count}"); 118 | (Some(count as u32), None) 119 | } 120 | Err(TryRecvError::Closed) => { 121 | info!("stop opus to audio loop: media track sender dropped"); 122 | return Ok(ControlFlow::Break(())); 123 | } 124 | }; 125 | if let Some(skipped_count) = skipped_frames { 126 | for _ in 0..skipped_count { 127 | let sample_count = self.decode(&[])?; 128 | trace!( 129 | "decoder: {sample_count} samples from skipped frames, now at {}", 130 | self.audio_buf.len() 131 | ); 132 | } 133 | } 134 | if let Some(payload) = payload { 135 | let sample_count = self.decode(&payload)?; 136 | trace!( 137 | "decoder: {sample_count} samples from payload, now at {}", 138 | self.audio_buf.len() 139 | ); 140 | } 141 | } 142 | 143 | // TODO: right now a very hacky way to add some latency if we don't get enough packets. 144 | if self.remaining_silence_ticks > 0 { 145 | self.remaining_silence_ticks -= 1; 146 | return Ok(ControlFlow::Continue(0)); 147 | } else if self.audio_buf.len() < buf.len() { 148 | self.underflows += 1; 149 | if self.underflows > 2 { 150 | self.remaining_silence_ticks = 4; 151 | tracing::debug!("increase silence"); 152 | self.underflows = 0; 153 | } 154 | return Ok(ControlFlow::Continue(0)); 155 | } 156 | 157 | // TODO: a very hacky way to decrease latency if we buffered too much 158 | if self 159 | .audio_format 160 | .duration_from_sample_count(self.audio_buf.len()) 161 | > Duration::from_secs(1) 162 | { 163 | self.advance(self.audio_format.sample_count(Duration::from_millis(500))); 164 | } 165 | 166 | let count = buf.len().min(self.audio_buf.len()); 167 | buf.copy_from_slice(&self.audio_buf[..count]); 168 | self.advance(count); 169 | 170 | Ok(ControlFlow::Continue(count)) 171 | } 172 | } 173 | 174 | pub struct MediaTrackOpusEncoder { 175 | sender: broadcast::Sender, 176 | encoder: OpusEncoder, 177 | } 178 | 179 | impl MediaTrackOpusEncoder { 180 | pub fn new(track_channel_cap: usize, audio_format: AudioFormat) -> Result<(Self, MediaTrack)> { 181 | debug_assert_eq!(audio_format.sample_rate.0, OPUS_SAMPLE_RATE); 182 | let (sender, receiver) = broadcast::channel(track_channel_cap); 183 | let channels = match audio_format.channel_count { 184 | 1 => OpusChannels::Mono, 185 | 2 => OpusChannels::Stereo, 186 | _ => bail!("unsupported channel count"), 187 | }; 188 | let track = MediaTrack::new(receiver, Codec::Opus { channels }, TrackKind::Audio); 189 | let encoder = MediaTrackOpusEncoder { 190 | sender, 191 | encoder: OpusEncoder::new(channels), 192 | }; 193 | Ok((encoder, track)) 194 | } 195 | } 196 | 197 | impl AudioSink for MediaTrackOpusEncoder { 198 | fn tick(&mut self, buf: &[f32]) -> Result> { 199 | for (payload, sample_count) in self.encoder.push_slice(buf) { 200 | let payload_len = payload.len(); 201 | let frame = MediaFrame { 202 | payload, 203 | sample_count: Some(sample_count), 204 | skipped_frames: None, 205 | skipped_samples: None, 206 | }; 207 | match self.sender.send(frame) { 208 | Err(_) => { 209 | info!("closing encoder loop: track receiver closed."); 210 | return Ok(ControlFlow::Break(())); 211 | } 212 | Ok(_) => { 213 | trace!("sent opus {sample_count}S {payload_len}B") 214 | } 215 | } 216 | } 217 | Ok(ControlFlow::Continue(())) 218 | } 219 | } 220 | 221 | pub struct OpusEncoder { 222 | encoder: opus::Encoder, 223 | samples: Vec, 224 | out_buf: BytesMut, 225 | samples_per_frame: usize, 226 | } 227 | 228 | impl OpusEncoder { 229 | pub fn new(channels: OpusChannels) -> Self { 230 | let format = AudioFormat::new2(OPUS_SAMPLE_RATE, channels as u16); 231 | let mut encoder = 232 | opus::Encoder::new(OPUS_SAMPLE_RATE, channels.into(), opus::Application::Voip).unwrap(); 233 | debug!( 234 | "initialized opus encoder: channels {} bitrate {:?} bandwidth {:?}", 235 | channels as u16, 236 | encoder.get_bitrate().unwrap(), 237 | encoder.get_bandwidth() 238 | ); 239 | let mut out_buf = BytesMut::new(); 240 | let samples_per_frame = format.sample_count(DURATION_20MS); 241 | out_buf.resize(samples_per_frame, 0); 242 | let samples = Vec::new(); 243 | Self { 244 | encoder, 245 | out_buf, 246 | samples, 247 | samples_per_frame, 248 | } 249 | } 250 | 251 | pub fn pop_from_consumer<'a>( 252 | &'a mut self, 253 | consumer: &'a mut Consumer, 254 | ) -> impl Iterator + 'a { 255 | std::iter::from_fn(|| { 256 | for sample in consumer.pop_iter() { 257 | if let Some((payload, sample_count)) = self.push_sample(sample) { 258 | return Some((payload, sample_count)); 259 | } 260 | } 261 | None 262 | }) 263 | } 264 | 265 | pub fn push_slice<'a>( 266 | &'a mut self, 267 | samples: &'a [f32], 268 | ) -> impl Iterator + 'a { 269 | let mut iter = samples.iter(); 270 | std::iter::from_fn(move || { 271 | for sample in iter.by_ref() { 272 | if let Some((payload, sample_count)) = self.push_sample(*sample) { 273 | return Some((payload, sample_count)); 274 | } 275 | } 276 | None 277 | }) 278 | } 279 | 280 | pub fn push_sample(&mut self, sample: f32) -> Option<(Bytes, u32)> { 281 | self.samples.push(sample); 282 | if self.samples.len() >= self.samples_per_frame { 283 | let sample_count = self.samples.len() as u32; 284 | let size = self 285 | .encoder 286 | .encode_float(&self.samples, &mut self.out_buf) 287 | .expect("failed to encode"); 288 | self.samples.clear(); 289 | let encoded = self.out_buf.split_to(size).freeze(); 290 | self.out_buf.resize(self.samples_per_frame, 0); 291 | Some((encoded, sample_count)) 292 | } else { 293 | None 294 | } 295 | } 296 | } 297 | -------------------------------------------------------------------------------- /LICENSE-APACHE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [2023] [N0, INC] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /callme/src/audio/playback.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | num::NonZeroUsize, 3 | ops::ControlFlow, 4 | sync::{atomic::AtomicBool, Arc, Mutex}, 5 | time::{Duration, Instant}, 6 | }; 7 | 8 | use anyhow::{anyhow, bail, Result}; 9 | use cpal::{ 10 | traits::{DeviceTrait, StreamTrait}, 11 | Device, Sample, SampleFormat, 12 | }; 13 | use fixed_resample::{FixedResampler, ResampleQuality}; 14 | use ringbuf::{ 15 | traits::{Consumer as _, Observer as _, Producer as _, Split}, 16 | HeapCons as Consumer, HeapProd as Producer, 17 | }; 18 | use tokio::sync::{broadcast, mpsc, oneshot}; 19 | use tracing::{debug, error, info, trace, trace_span, warn, Level}; 20 | 21 | use super::{ 22 | device::{find_device, find_output_stream_config, Direction, StreamConfigWithFormat}, 23 | AudioFormat, WebrtcAudioProcessor, DURATION_10MS, DURATION_20MS, ENGINE_FORMAT, SAMPLE_RATE, 24 | }; 25 | use crate::{ 26 | codec::opus::MediaTrackOpusDecoder, 27 | rtc::{MediaFrame, MediaTrack}, 28 | }; 29 | 30 | pub trait AudioSource: Send + 'static { 31 | fn tick(&mut self, buf: &mut [f32]) -> Result>; 32 | } 33 | 34 | #[derive(derive_more::Debug, Clone)] 35 | pub struct AudioPlayback { 36 | source_sender: mpsc::Sender>, 37 | } 38 | 39 | impl AudioPlayback { 40 | pub async fn build( 41 | host: &cpal::Host, 42 | device: Option<&str>, 43 | processor: WebrtcAudioProcessor, 44 | ) -> Result { 45 | let device = find_device(host, Direction::Playback, device)?; 46 | let stream_config = find_output_stream_config(&device, &ENGINE_FORMAT)?; 47 | 48 | let buffer_size = ENGINE_FORMAT.sample_count(DURATION_20MS) * 32; 49 | let (producer, consumer) = ringbuf::HeapRb::::new(buffer_size).split(); 50 | 51 | let (source_sender, source_receiver) = mpsc::channel(16); 52 | let (init_tx, init_rx) = oneshot::channel(); 53 | 54 | std::thread::spawn(move || { 55 | if let Err(err) = audio_thread_priority::promote_current_thread_to_real_time( 56 | buffer_size as u32, 57 | ENGINE_FORMAT.sample_rate.0, 58 | ) { 59 | warn!("failed to set playback thread to realtime priority: {err:?}"); 60 | } 61 | let stream = match start_playback_stream(&device, &stream_config, processor, consumer) { 62 | Ok(stream) => { 63 | init_tx.send(Ok(())).unwrap(); 64 | stream 65 | } 66 | Err(err) => { 67 | init_tx.send(Err(err)).unwrap(); 68 | return; 69 | } 70 | }; 71 | playback_loop(producer, source_receiver); 72 | drop(stream); 73 | }); 74 | 75 | init_rx.await??; 76 | Ok(Self { source_sender }) 77 | } 78 | 79 | pub async fn add_track(&self, track: MediaTrack) -> Result<()> { 80 | let decoder = MediaTrackOpusDecoder::new(track)?; 81 | self.add_source(decoder).await 82 | } 83 | 84 | pub async fn add_source(&self, source: impl AudioSource) -> Result<()> { 85 | self.source_sender 86 | .send(Box::new(source)) 87 | .await 88 | .map_err(|_| anyhow!("failed to add audio source: playback loop dead"))?; 89 | Ok(()) 90 | } 91 | } 92 | 93 | fn playback_loop( 94 | mut producer: Producer, 95 | mut source_receiver: mpsc::Receiver>, 96 | ) { 97 | let span = tracing::span!(Level::TRACE, "playback-loop"); 98 | let _guard = span.enter(); 99 | info!("playback loop start"); 100 | 101 | let tick_duration = DURATION_20MS; 102 | let buffer_size = ENGINE_FORMAT.sample_count(tick_duration); 103 | let mut work_buf = vec![0.; buffer_size]; 104 | let mut out_buf = vec![0.; buffer_size]; 105 | let mut sources: Vec> = vec![]; 106 | 107 | // todo: do we want this? 108 | let initial_latency = ENGINE_FORMAT.sample_count(DURATION_20MS); 109 | let initial_silence = vec![0.; initial_latency]; 110 | let n = producer.push_slice(&initial_silence); 111 | debug_assert_eq!(n, initial_silence.len()); 112 | 113 | let mut tick = 0; 114 | loop { 115 | let start = Instant::now(); 116 | 117 | // pull incoming sources 118 | loop { 119 | match source_receiver.try_recv() { 120 | Ok(source) => { 121 | info!("add new track to decoder"); 122 | sources.push(source); 123 | } 124 | Err(mpsc::error::TryRecvError::Empty) => break, 125 | Err(mpsc::error::TryRecvError::Disconnected) => { 126 | info!("stop playback mixer loop: channel closed"); 127 | return; 128 | } 129 | } 130 | } 131 | 132 | out_buf.fill(0.); 133 | sources.retain_mut(|source| match source.tick(&mut work_buf) { 134 | Ok(ControlFlow::Continue(count)) => { 135 | for i in 0..count { 136 | out_buf[i] += work_buf[i]; 137 | } 138 | if count < work_buf.len() { 139 | debug!( 140 | "audio source xrun: missing {} of {}", 141 | work_buf.len() - count, 142 | work_buf.len() 143 | ); 144 | } 145 | true 146 | } 147 | Ok(ControlFlow::Break(())) => { 148 | debug!("remove decoder: closed"); 149 | false 150 | } 151 | Err(err) => { 152 | warn!("remove decoder: failed {err:?}"); 153 | false 154 | } 155 | }); 156 | 157 | let len = producer.push_slice(&out_buf[..]); 158 | if len < out_buf.len() { 159 | warn!( 160 | "xrun: failed to push {} of {}", 161 | out_buf.len() - len, 162 | out_buf.len() 163 | ); 164 | } 165 | 166 | trace!("tick {tick} took {:?} pushed {len}", start.elapsed()); 167 | if start.elapsed() > tick_duration { 168 | warn!( 169 | "playback thread tick exceeded interval (took {:?})", 170 | start.elapsed() 171 | ); 172 | } else { 173 | let sleep_time = tick_duration.saturating_sub(start.elapsed()); 174 | spin_sleep::sleep(sleep_time); 175 | } 176 | tick += 1; 177 | } 178 | } 179 | 180 | fn start_playback_stream( 181 | device: &Device, 182 | stream_config: &StreamConfigWithFormat, 183 | processor: WebrtcAudioProcessor, 184 | consumer: Consumer, 185 | ) -> Result { 186 | let config = &stream_config.config; 187 | let format = stream_config.audio_format(); 188 | #[cfg(feature = "audio-processing")] 189 | processor.init_playback(config.channels as usize)?; 190 | let resampler = FixedResampler::new( 191 | NonZeroUsize::new(format.channel_count as usize).unwrap(), 192 | SAMPLE_RATE.0, 193 | format.sample_rate.0, 194 | ResampleQuality::High, 195 | true, 196 | ); 197 | let state = PlaybackState { 198 | consumer, 199 | format, 200 | processor, 201 | resampler, 202 | }; 203 | let stream = match stream_config.sample_format { 204 | SampleFormat::I8 => build_playback_stream::(device, config, state), 205 | SampleFormat::I16 => build_playback_stream::(device, config, state), 206 | SampleFormat::I32 => build_playback_stream::(device, config, state), 207 | SampleFormat::F32 => build_playback_stream::(device, config, state), 208 | sample_format => { 209 | tracing::error!("Unsupported sample format '{sample_format}'"); 210 | Err(cpal::BuildStreamError::StreamConfigNotSupported) 211 | } 212 | }?; 213 | info!( 214 | "start playback stream on {} with {format:?}", 215 | device.name()? 216 | ); 217 | stream.play()?; 218 | Ok(stream) 219 | } 220 | 221 | struct PlaybackState { 222 | format: AudioFormat, 223 | resampler: FixedResampler, 224 | #[allow(unused)] 225 | processor: WebrtcAudioProcessor, 226 | consumer: Consumer, 227 | } 228 | 229 | fn build_playback_stream + cpal::SizedSample + Default>( 230 | device: &cpal::Device, 231 | config: &cpal::StreamConfig, 232 | mut state: PlaybackState, 233 | ) -> Result { 234 | let frame_size = state.format.sample_count(DURATION_10MS); 235 | let mut unprocessed: Vec = Vec::with_capacity(frame_size); 236 | let mut processed: Vec = Vec::with_capacity(frame_size); 237 | let mut resampled: Vec = Vec::with_capacity(frame_size); 238 | let mut tick = 0; 239 | let mut last_warning = Instant::now(); 240 | let mut underflows = 0; 241 | let span = trace_span!("playback-cb"); 242 | 243 | device.build_output_stream::( 244 | config, 245 | move |data: &mut [S], info: &_| { 246 | let _guard = span.enter(); 247 | let delay = { 248 | let output_delay = info 249 | .timestamp() 250 | .callback 251 | .duration_since(&info.timestamp().playback) 252 | .unwrap_or_default(); 253 | let resampler_delay = Duration::from_secs_f32(state.resampler.output_delay() as f32 / state.format.sample_rate.0 as f32); 254 | output_delay + resampler_delay 255 | }; 256 | 257 | if tick % 100 == 0 { 258 | trace!("callback tick {tick} len={} delay={delay:?}", data.len()); 259 | } 260 | 261 | 262 | #[cfg(feature = "audio-processing")] 263 | state.processor.set_playback_delay(delay); 264 | 265 | // pop from channel 266 | unprocessed.extend(state.consumer.pop_iter()); 267 | 268 | // process 269 | let mut chunks = unprocessed.chunks_exact_mut(frame_size); 270 | for chunk in &mut chunks { 271 | #[cfg(feature = "audio-processing")] 272 | state.processor.process_render_frame(chunk).unwrap(); 273 | processed.extend_from_slice(chunk); 274 | } 275 | // cleanup 276 | let remainder_len = chunks.into_remainder().len(); 277 | let end = unprocessed.len() - remainder_len; 278 | unprocessed.copy_within(end.., 0); 279 | unprocessed.truncate(remainder_len); 280 | 281 | // resample 282 | state.resampler.process_interleaved(&processed, |samples|{ 283 | resampled.extend_from_slice(samples); 284 | } , None, false); 285 | processed.clear(); 286 | 287 | 288 | // copy to out 289 | let out_len = resampled.len().min(data.len()); 290 | let remaining = resampled.len() - out_len; 291 | for (i, sample) in data[..out_len].iter_mut().enumerate() { 292 | *sample = resampled[i].to_sample() 293 | } 294 | resampled.copy_within(out_len.., 0); 295 | resampled.truncate(remaining); 296 | 297 | // trace!("out_len {out_len} resampled_remaining {} processed_remaining {}", resampled.len(), processed.len()); 298 | if out_len < data.len() { 299 | let now = Instant::now(); 300 | if now.duration_since(last_warning) > Duration::from_secs(1) { 301 | warn!( 302 | "[tick {tick}] playback xrun: {} of {} samples missing (buffered {}) (+ {} previous)", 303 | data.len() - out_len, 304 | data.len(), 305 | unprocessed.len() + state.consumer.occupied_len(), 306 | underflows 307 | ); 308 | underflows += 1; 309 | last_warning = now; 310 | } 311 | } 312 | tick += 1; 313 | }, 314 | |err| { 315 | error!("an error occurred on output stream: {}", err); 316 | }, 317 | None, 318 | ) 319 | } 320 | -------------------------------------------------------------------------------- /callme/src/audio/capture.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | cmp::Ordering, 3 | num::NonZeroUsize, 4 | ops::ControlFlow, 5 | sync::{ 6 | atomic::{AtomicBool, AtomicU64}, 7 | Arc, 8 | }, 9 | time::{Duration, Instant}, 10 | }; 11 | 12 | use anyhow::{anyhow, bail, Context, Result}; 13 | use bytes::{Bytes, BytesMut}; 14 | use cpal::{ 15 | traits::{DeviceTrait, StreamTrait}, 16 | Device, SampleFormat, 17 | }; 18 | use dasp_sample::ToSample; 19 | use fixed_resample::{FixedResampler, ResampleQuality}; 20 | use ringbuf::{ 21 | traits::{Consumer as _, Observer, Producer as _, Split}, 22 | HeapCons as Consumer, HeapProd as Producer, 23 | }; 24 | use tokio::sync::{broadcast, mpsc, oneshot}; 25 | use tracing::{debug, error, info, span, trace, trace_span, warn, Level}; 26 | 27 | use super::{ 28 | device::{find_device, find_input_stream_config, Direction, StreamConfigWithFormat}, 29 | AudioFormat, WebrtcAudioProcessor, DURATION_10MS, DURATION_20MS, ENGINE_FORMAT, SAMPLE_RATE, 30 | }; 31 | use crate::{ 32 | codec::opus::MediaTrackOpusEncoder, 33 | rtc::{MediaFrame, MediaTrack, TrackKind}, 34 | }; 35 | 36 | pub trait AudioSink: Send + 'static { 37 | fn tick(&mut self, buf: &[f32]) -> Result>; 38 | } 39 | 40 | #[derive(Debug, Clone)] 41 | pub struct AudioCapture { 42 | sink_sender: mpsc::Sender>, 43 | } 44 | 45 | impl AudioCapture { 46 | pub async fn build( 47 | host: &cpal::Host, 48 | device: Option<&str>, 49 | processor: WebrtcAudioProcessor, 50 | ) -> Result { 51 | let device = find_device(host, Direction::Capture, device)?; 52 | 53 | // find a config for the capture stream. note that the returned config may not 54 | // match the format. the passed format is a hint as to which stream config 55 | // to prefer if there are multiple. if no matching format is found, the 56 | // device's default stream config is used. 57 | let stream_config = find_input_stream_config(&device, &ENGINE_FORMAT)?; 58 | 59 | let buffer_size = ENGINE_FORMAT.sample_count(DURATION_20MS) * 16; 60 | let (producer, consumer) = ringbuf::HeapRb::::new(buffer_size).split(); 61 | 62 | // a channel to pass new sinks to the the audio thread. 63 | let (sink_sender, sink_receiver) = mpsc::channel(16); 64 | 65 | let (init_tx, init_rx) = oneshot::channel(); 66 | std::thread::spawn(move || { 67 | if let Err(err) = audio_thread_priority::promote_current_thread_to_real_time( 68 | buffer_size as u32, 69 | ENGINE_FORMAT.sample_rate.0, 70 | ) { 71 | warn!("failed to set capture thread to realtime priority: {err:?}"); 72 | } 73 | 74 | let stream = match start_capture_stream(&device, &stream_config, producer, processor) { 75 | Ok(stream) => { 76 | init_tx.send(Ok(())).unwrap(); 77 | stream 78 | } 79 | Err(err) => { 80 | let err = err.context("failed to start capture stream"); 81 | init_tx.send(Err(err)).unwrap(); 82 | return; 83 | } 84 | }; 85 | capture_loop(consumer, sink_receiver); 86 | drop(stream); 87 | }); 88 | init_rx.await??; 89 | let handle = AudioCapture { sink_sender }; 90 | Ok(handle) 91 | } 92 | 93 | pub async fn add_sink(&self, sink: impl AudioSink) -> Result<()> { 94 | self.sink_sender 95 | .send(Box::new(sink)) 96 | .await 97 | .map_err(|_| anyhow!("failed to add captue sink: capture loop dead")) 98 | } 99 | 100 | pub async fn create_opus_track(&self) -> Result { 101 | let (encoder, track) = MediaTrackOpusEncoder::new(16, ENGINE_FORMAT)?; 102 | self.add_sink(encoder).await?; 103 | Ok(track) 104 | } 105 | } 106 | 107 | fn start_capture_stream( 108 | device: &Device, 109 | stream_config: &StreamConfigWithFormat, 110 | producer: Producer, 111 | processor: WebrtcAudioProcessor, 112 | ) -> Result { 113 | let d = device.name()?; 114 | let config = &stream_config.config; 115 | 116 | #[cfg(feature = "audio-processing")] 117 | processor.init_capture(config.channels as usize)?; 118 | 119 | let capture_format = stream_config.audio_format(); 120 | 121 | let resampler = FixedResampler::new( 122 | NonZeroUsize::new(ENGINE_FORMAT.channel_count as usize).unwrap(), 123 | capture_format.sample_rate.0, 124 | ENGINE_FORMAT.sample_rate.0, 125 | ResampleQuality::High, 126 | true, 127 | ); 128 | let state = CaptureState { 129 | format: capture_format, 130 | producer, 131 | processor: processor.clone(), 132 | resampler, 133 | }; 134 | let stream = match stream_config.sample_format { 135 | SampleFormat::I8 => build_capture_stream::(device, config, state), 136 | SampleFormat::I16 => build_capture_stream::(device, config, state), 137 | SampleFormat::I32 => build_capture_stream::(device, config, state), 138 | SampleFormat::F32 => build_capture_stream::(device, config, state), 139 | sample_format => { 140 | tracing::error!("Unsupported sample format '{sample_format}'"); 141 | Err(cpal::BuildStreamError::StreamConfigNotSupported) 142 | } 143 | } 144 | .with_context(|| format!("failed to build capture stream on {d} with {capture_format:?}"))?; 145 | info!("starting capture stream on {d} with {capture_format:?}"); 146 | stream.play()?; 147 | Ok(stream) 148 | } 149 | 150 | struct CaptureState { 151 | format: AudioFormat, 152 | producer: Producer, 153 | #[allow(unused)] 154 | processor: WebrtcAudioProcessor, 155 | resampler: FixedResampler, 156 | } 157 | 158 | fn build_capture_stream + cpal::SizedSample + Default>( 159 | device: &cpal::Device, 160 | config: &cpal::StreamConfig, 161 | mut state: CaptureState, 162 | ) -> Result { 163 | let mut tick = 0; 164 | let span = trace_span!("capture-cb"); 165 | 166 | // if we change this, code in here needs to change, so let's assert it 167 | debug_assert_eq!(ENGINE_FORMAT.channel_count, 2); 168 | debug_assert!(matches!(state.format.channel_count, 1 | 2)); 169 | 170 | // this needs to be at 10ms = 480 samples per channel, otherwise 171 | // the WebrtcAudioProcessor panics. 172 | let processor_chunk_size = ENGINE_FORMAT.sample_count(DURATION_10MS); 173 | let mut resampled_buf: Vec = Vec::with_capacity(processor_chunk_size); 174 | 175 | // this will grow as needed and contains samples directly from the input buf 176 | // (before resampling) but with channels adjusted 177 | let mut input_buf: Vec = Vec::with_capacity(processor_chunk_size); 178 | 179 | device.build_input_stream::( 180 | config, 181 | move |data: &[S], info: &_| { 182 | let _guard = span.enter(); 183 | let start = Instant::now(); 184 | let max_tick_time = state.format.duration_from_sample_count(data.len()); 185 | 186 | let delay = { 187 | let capture_delay = info 188 | .timestamp() 189 | .callback 190 | .duration_since(&info.timestamp().capture) 191 | .unwrap_or_default(); 192 | let resampler_delay = Duration::from_secs_f32( 193 | state.resampler.output_delay() as f32 / ENGINE_FORMAT.sample_rate.0 as f32, 194 | ); 195 | capture_delay + resampler_delay 196 | }; 197 | 198 | // adjust sample format and channel count. 199 | // we convert to ENGINE_FORMAT here which always has two channels (asserted above). 200 | if state.format.channel_count == 1 { 201 | input_buf.extend( 202 | data.iter() 203 | .map(|s| s.to_sample()) 204 | .flat_map(|s| [s, s].into_iter()), 205 | ); 206 | } else if state.format.channel_count == 2 { 207 | input_buf.extend(data.iter().map(|s| s.to_sample())); 208 | } else { 209 | // checked above. 210 | unreachable!() 211 | }; 212 | 213 | // resample 214 | state.resampler.process_interleaved( 215 | &input_buf[..], 216 | |samples| { 217 | resampled_buf.extend(samples); 218 | }, 219 | None, 220 | false, 221 | ); 222 | input_buf.clear(); 223 | 224 | // update capture delay in processor 225 | #[cfg(feature = "audio-processing")] 226 | state.processor.set_capture_delay(delay); 227 | 228 | // process, and push processed chunks to the producer 229 | let mut chunks = resampled_buf.chunks_exact_mut(processor_chunk_size); 230 | let mut pushed = 0; 231 | for chunk in &mut chunks { 232 | #[cfg(feature = "audio-processing")] 233 | state.processor.process_capture_frame(chunk).unwrap(); 234 | 235 | let n = state.producer.push_slice(chunk); 236 | pushed += n; 237 | 238 | if n < chunk.len() { 239 | warn!( 240 | "record xrun: failed to push out {} of {}", 241 | chunk.len() - n, 242 | chunk.len() 243 | ); 244 | break; 245 | } 246 | } 247 | 248 | // cleanup: we need to keep the unprocessed samples that are still in the resampled buf 249 | let remainder_len = chunks.into_remainder().len(); 250 | let end = resampled_buf.len() - remainder_len; 251 | resampled_buf.copy_within(end.., 0); 252 | resampled_buf.truncate(remainder_len); 253 | 254 | trace!( 255 | "tick {tick}: delay={:?} available={:?} time={:?} / get {} push {} samples", 256 | delay, 257 | max_tick_time, 258 | start.elapsed(), 259 | data.len(), 260 | pushed 261 | ); 262 | tick += 1; 263 | }, 264 | |err| { 265 | error!("an error occurred on output stream: {}", err); 266 | }, 267 | None, 268 | ) 269 | } 270 | 271 | fn capture_loop( 272 | mut consumer: Consumer, 273 | mut sink_receiver: mpsc::Receiver>, 274 | ) { 275 | let span = tracing::span!(Level::TRACE, "capture-loop"); 276 | let _guard = span.enter(); 277 | info!("capture loop start"); 278 | 279 | let tick_duration = DURATION_20MS; 280 | let samples_per_tick = ENGINE_FORMAT.sample_count(tick_duration); 281 | let mut buf = vec![0.; samples_per_tick]; 282 | let mut sinks = vec![]; 283 | 284 | let mut tick = 0; 285 | loop { 286 | let start = Instant::now(); 287 | 288 | // poll incoming sources 289 | loop { 290 | match sink_receiver.try_recv() { 291 | Ok(sink) => { 292 | info!("new sink added to capture loop"); 293 | sinks.push(sink); 294 | } 295 | Err(mpsc::error::TryRecvError::Empty) => break, 296 | Err(mpsc::error::TryRecvError::Disconnected) => { 297 | info!("stop playback mixer loop: channel closed"); 298 | return; 299 | } 300 | } 301 | } 302 | let count = consumer.pop_slice(&mut buf); 303 | 304 | sinks.retain_mut(|sink| match sink.tick(&buf[..count]) { 305 | Ok(ControlFlow::Continue(())) => true, 306 | Ok(ControlFlow::Break(())) => { 307 | debug!("remove decoder: closed"); 308 | false 309 | } 310 | Err(err) => { 311 | warn!("remove decoder: failed {err:?}"); 312 | false 313 | } 314 | }); 315 | trace!("tick {tick} took {:?} pulled {count}", start.elapsed()); 316 | if start.elapsed() > tick_duration { 317 | warn!( 318 | "capture thread tick exceeded interval (took {:?})", 319 | start.elapsed() 320 | ); 321 | } else { 322 | let sleep_time = tick_duration.saturating_sub(start.elapsed()); 323 | spin_sleep::sleep(sleep_time); 324 | } 325 | tick += 1; 326 | } 327 | } 328 | -------------------------------------------------------------------------------- /callme-egui/src/app.rs: -------------------------------------------------------------------------------- 1 | use std::{collections::BTreeMap, str::FromStr}; 2 | 3 | use anyhow::{anyhow, Context, Result}; 4 | use async_channel::{Receiver, Sender}; 5 | use callme::{ 6 | audio::{AudioConfig, AudioContext}, 7 | rtc::{MediaTrack, RtcConnection, RtcProtocol, TrackKind}, 8 | }; 9 | use eframe::NativeOptions; 10 | use egui::{Color32, RichText, Ui}; 11 | use iroh::{protocol::Router, Endpoint, KeyParsingError, NodeId}; 12 | use tokio::task::JoinSet; 13 | use tracing::{info, warn}; 14 | 15 | const DEFAULT: &str = ""; 16 | 17 | pub struct App { 18 | is_first_update: bool, 19 | state: AppState, 20 | } 21 | 22 | enum UiSection { 23 | Config, 24 | Main, 25 | } 26 | 27 | struct AppState { 28 | section: UiSection, 29 | remote_node_id: Option>, 30 | worker: WorkerHandle, 31 | our_node_id: Option, 32 | devices: callme::audio::Devices, 33 | audio_config: UiAudioConfig, 34 | calls: BTreeMap, 35 | } 36 | 37 | struct UiAudioConfig { 38 | selected_input: String, 39 | selected_output: String, 40 | processing_enabled: bool, 41 | } 42 | 43 | impl From<&UiAudioConfig> for AudioConfig { 44 | fn from(value: &UiAudioConfig) -> Self { 45 | let input_device = if value.selected_input == DEFAULT { 46 | None 47 | } else { 48 | Some(value.selected_input.to_string()) 49 | }; 50 | let output_device = if value.selected_output == DEFAULT { 51 | None 52 | } else { 53 | Some(value.selected_output.to_string()) 54 | }; 55 | AudioConfig { 56 | input_device, 57 | output_device, 58 | processing_enabled: value.processing_enabled, 59 | } 60 | } 61 | } 62 | 63 | impl Default for UiAudioConfig { 64 | fn default() -> Self { 65 | Self { 66 | selected_input: DEFAULT.to_string(), 67 | selected_output: DEFAULT.to_string(), 68 | processing_enabled: true, 69 | } 70 | } 71 | } 72 | 73 | impl eframe::App for App { 74 | fn update(&mut self, ctx: &egui::Context, _frame: &mut eframe::Frame) { 75 | if self.is_first_update { 76 | self.is_first_update = false; 77 | ctx.set_zoom_factor(1.5); 78 | let ctx = ctx.clone(); 79 | let callback = Box::new(move || ctx.request_repaint()); 80 | self.state.cmd(Command::SetUpdateCallback { callback }); 81 | } 82 | // on android, add some space at the top. 83 | #[cfg(target_os = "android")] 84 | egui::TopBottomPanel::top("my_panel") 85 | .min_height(40.) 86 | .show(ctx, |_ui| {}); 87 | 88 | self.state.update(ctx); 89 | } 90 | } 91 | 92 | impl App { 93 | pub fn run(options: NativeOptions) -> Result<(), eframe::Error> { 94 | let handle = Worker::spawn(); 95 | let devices = 96 | callme::audio::AudioContext::list_devices_sync().expect("failed to list audio devices"); 97 | let state = AppState { 98 | section: UiSection::Config, 99 | remote_node_id: Default::default(), 100 | worker: handle, 101 | our_node_id: None, 102 | devices, 103 | audio_config: Default::default(), 104 | calls: Default::default(), 105 | }; 106 | 107 | let app = App { 108 | state, 109 | is_first_update: true, 110 | }; 111 | eframe::run_native("callme", options, Box::new(|_cc| Ok(Box::new(app)))) 112 | } 113 | } 114 | impl AppState { 115 | fn update(&mut self, ctx: &egui::Context) { 116 | while let Ok(event) = self.worker.event_rx.try_recv() { 117 | match event { 118 | Event::EndpointBound(node_id) => { 119 | self.our_node_id = Some(node_id); 120 | } 121 | Event::SetCallState(node_id, call_state) => { 122 | if matches!(call_state, CallState::Aborted) { 123 | self.calls.remove(&node_id); 124 | } else { 125 | self.calls.insert(node_id, call_state); 126 | } 127 | } 128 | } 129 | } 130 | 131 | egui::CentralPanel::default().show(ctx, |ui| match self.section { 132 | UiSection::Config => self.ui_section_config(ui), 133 | UiSection::Main => self.ui_section_call(ui), 134 | }); 135 | } 136 | 137 | fn audio_config(&self) -> AudioConfig { 138 | (&self.audio_config).into() 139 | } 140 | 141 | fn ui_section_call(&mut self, ui: &mut Ui) { 142 | ui.heading("Call a remote node"); 143 | ui.vertical(|ui| { 144 | ui.horizontal(|ui| { 145 | if ui 146 | .button("📋 Paste node id") 147 | .on_hover_text("Click to paste") 148 | .clicked() 149 | { 150 | #[cfg(not(target_os = "android"))] 151 | let pasted = { 152 | arboard::Clipboard::new() 153 | .expect("failed to access clipboard") 154 | .get_text() 155 | .expect("failed to get text from clipboard") 156 | }; 157 | 158 | #[cfg(target_os = "android")] 159 | let pasted = { 160 | android_clipboard::get_text().expect("failed to get text from clipboard") 161 | }; 162 | 163 | let node_id = NodeId::from_str(&pasted); 164 | self.remote_node_id = Some(node_id); 165 | } 166 | }); 167 | if let Some(node_id) = self.remote_node_id.as_ref() { 168 | ui.horizontal(|ui| match node_id { 169 | Ok(node_id) => { 170 | if ui.button("Call").clicked() { 171 | self.cmd(Command::Call { node_id: *node_id }); 172 | } 173 | ui.label(fmt_node_id(&node_id.fmt_short())); 174 | } 175 | Err(err) => { 176 | ui.label(fmt_error(&format!("Invalid node id: {err}"))); 177 | } 178 | }); 179 | } 180 | }); 181 | 182 | ui.add_space(8.); 183 | ui.heading("Accept calls"); 184 | if let Some(node_id) = &self.our_node_id { 185 | ui.horizontal(|ui| { 186 | ui.label("Our node id:".to_string()); 187 | ui.label(fmt_node_id(&node_id.fmt_short())); 188 | if ui 189 | .button("📋 Copy") 190 | .on_hover_text("Click to copy") 191 | .clicked() 192 | { 193 | #[cfg(not(target_os = "android"))] 194 | { 195 | if let Err(err) = arboard::Clipboard::new() 196 | .expect("failed to get clipboard") 197 | .set_text(node_id.to_string()) 198 | { 199 | warn!("failed to copy text to clipboard: {err}"); 200 | } 201 | } 202 | #[cfg(target_os = "android")] 203 | if let Err(err) = android_clipboard::set_text(node_id.to_string()) { 204 | warn!("failed to copy text to clipboard: {err}"); 205 | } 206 | } 207 | }); 208 | } 209 | 210 | ui.add_space(8.); 211 | ui.heading("Active calls"); 212 | ui.vertical(|ui| { 213 | for (node_id, state) in &self.calls { 214 | let node_id = *node_id; 215 | ui.horizontal(|ui| { 216 | ui.label(fmt_node_id(&node_id.fmt_short())); 217 | ui.label(format!("{}", state)); 218 | if matches!(state, CallState::Incoming) { 219 | if ui.button("Accept").clicked() { 220 | self.cmd(Command::HandleIncoming { 221 | node_id, 222 | accept: true, 223 | }); 224 | } 225 | if ui.button("Decline").clicked() { 226 | self.cmd(Command::HandleIncoming { 227 | node_id, 228 | accept: false, 229 | }); 230 | } 231 | } else if ui.button("Drop").clicked() { 232 | self.cmd(Command::Abort { node_id }); 233 | } 234 | }); 235 | } 236 | }); 237 | } 238 | 239 | fn cmd(&self, command: Command) { 240 | self.worker 241 | .command_tx 242 | .send_blocking(command) 243 | .expect("worker thread is dead"); 244 | } 245 | 246 | fn ui_section_config(&mut self, ui: &mut Ui) { 247 | ui.heading("Audio config"); 248 | ui.vertical(|ui| { 249 | egui::ComboBox::from_label("Capture device") 250 | .selected_text(&self.audio_config.selected_input) 251 | .show_ui(ui, |ui| { 252 | if ui 253 | .selectable_label(self.audio_config.selected_input == DEFAULT, DEFAULT) 254 | .clicked() 255 | { 256 | self.audio_config.selected_input = DEFAULT.to_string(); 257 | } 258 | for device in &self.devices.input { 259 | if ui 260 | .selectable_label(&self.audio_config.selected_input == device, device) 261 | .clicked() 262 | { 263 | self.audio_config.selected_input = device.to_string() 264 | } 265 | } 266 | }); 267 | 268 | egui::ComboBox::from_label("Playback device") 269 | .selected_text(&self.audio_config.selected_output) 270 | .show_ui(ui, |ui| { 271 | if ui 272 | .selectable_label(self.audio_config.selected_output == DEFAULT, DEFAULT) 273 | .clicked() 274 | { 275 | self.audio_config.selected_output = DEFAULT.to_string(); 276 | } 277 | for device in &self.devices.output { 278 | if ui 279 | .selectable_label(&self.audio_config.selected_output == device, device) 280 | .clicked() 281 | { 282 | self.audio_config.selected_output = device.to_string() 283 | } 284 | } 285 | }); 286 | 287 | #[cfg(feature = "audio-processing")] 288 | ui.checkbox( 289 | &mut self.audio_config.processing_enabled, 290 | "Enable echo cancellation", 291 | ); 292 | 293 | if ui.button("Save & start").clicked() { 294 | let audio_config = self.audio_config(); 295 | self.cmd(Command::SetAudioConfig { audio_config }); 296 | self.section = UiSection::Main; 297 | } 298 | }); 299 | } 300 | } 301 | 302 | fn fmt_node_id(text: &str) -> RichText { 303 | let text = format!("{text}…"); 304 | egui::RichText::new(text) 305 | .underline() 306 | .family(egui::FontFamily::Monospace) 307 | } 308 | 309 | fn fmt_error(text: &str) -> RichText { 310 | egui::RichText::new(text).color(Color32::LIGHT_RED) 311 | } 312 | 313 | enum Event { 314 | EndpointBound(NodeId), 315 | SetCallState(NodeId, CallState), 316 | } 317 | 318 | #[derive(strum::Display)] 319 | enum CallState { 320 | Incoming, 321 | Calling, 322 | Active, 323 | Aborted, 324 | } 325 | 326 | enum CallInfo { 327 | Calling, 328 | Incoming(RtcConnection), 329 | Active(RtcConnection), 330 | } 331 | 332 | type UpdateCallback = Box; 333 | 334 | enum Command { 335 | SetUpdateCallback { callback: UpdateCallback }, 336 | SetAudioConfig { audio_config: AudioConfig }, 337 | Call { node_id: NodeId }, 338 | HandleIncoming { node_id: NodeId, accept: bool }, 339 | Abort { node_id: NodeId }, 340 | } 341 | 342 | struct Worker { 343 | command_rx: Receiver, 344 | event_tx: Sender, 345 | active_calls: BTreeMap, 346 | update_callback: Option, 347 | endpoint: Endpoint, 348 | handler: RtcProtocol, 349 | call_tasks: JoinSet<(NodeId, Result<()>)>, 350 | connect_tasks: JoinSet<(NodeId, Result<(RtcConnection, MediaTrack)>)>, 351 | _router: Router, 352 | audio_context: Option, 353 | } 354 | 355 | struct WorkerHandle { 356 | command_tx: Sender, 357 | event_rx: Receiver, 358 | } 359 | 360 | impl Worker { 361 | pub fn spawn() -> WorkerHandle { 362 | let (command_tx, command_rx) = async_channel::bounded(16); 363 | let (event_tx, event_rx) = async_channel::bounded(16); 364 | let handle = WorkerHandle { 365 | event_rx, 366 | command_tx, 367 | }; 368 | std::thread::spawn(move || { 369 | let rt = tokio::runtime::Builder::new_multi_thread() 370 | .enable_all() 371 | .build() 372 | .expect("failed to start tokio runtime"); 373 | rt.block_on(async move { 374 | let mut worker = Worker::start(event_tx, command_rx) 375 | .await 376 | .expect("worker failed to start"); 377 | if let Err(err) = worker.run().await { 378 | warn!("worker stopped with error: {err:?}"); 379 | } 380 | }); 381 | }); 382 | handle 383 | } 384 | 385 | async fn emit(&self, event: Event) -> Result<()> { 386 | self.event_tx.send(event).await?; 387 | if let Some(callback) = &self.update_callback { 388 | callback(); 389 | } 390 | Ok(()) 391 | } 392 | 393 | async fn start( 394 | event_tx: async_channel::Sender, 395 | command_rx: async_channel::Receiver, 396 | ) -> Result { 397 | let endpoint = callme::net::bind_endpoint().await?; 398 | let handler = RtcProtocol::new(endpoint.clone()); 399 | let _router = Router::builder(endpoint.clone()) 400 | .accept(RtcProtocol::ALPN, handler.clone()) 401 | .spawn() 402 | .await?; 403 | Ok(Self { 404 | command_rx, 405 | event_tx, 406 | active_calls: Default::default(), 407 | call_tasks: JoinSet::new(), 408 | connect_tasks: JoinSet::new(), 409 | endpoint, 410 | handler, 411 | _router, 412 | audio_context: None, 413 | update_callback: None, 414 | }) 415 | } 416 | 417 | async fn run(&mut self) -> Result<()> { 418 | self.emit(Event::EndpointBound(self.endpoint.node_id())) 419 | .await?; 420 | loop { 421 | tokio::select! { 422 | command = self.command_rx.recv() => { 423 | let command = command?; 424 | if let Err(err) = self.handle_command(command).await { 425 | warn!("command failed: {err}"); 426 | } 427 | } 428 | conn = self.handler.accept() => { 429 | let Some(conn) = conn? else { 430 | break; 431 | }; 432 | self.handle_incoming(conn).await?; 433 | } 434 | Some(res) = self.call_tasks.join_next(), if !self.call_tasks.is_empty() => { 435 | let (node_id, res) = res.expect("connection task panicked"); 436 | if let Err(err) = res { 437 | warn!("connection with {} closed: {err:?}", node_id.fmt_short()); 438 | } else { 439 | info!("connection with {} closed", node_id.fmt_short()); 440 | } 441 | self.active_calls.remove(&node_id); 442 | self.emit(Event::SetCallState(node_id, CallState::Aborted)) 443 | .await?; 444 | } 445 | Some(res) = self.connect_tasks.join_next(), if !self.connect_tasks.is_empty() => { 446 | let (node_id, res) = res.expect("connect task panicked"); 447 | self.handle_connected(node_id, res).await?; 448 | } 449 | } 450 | } 451 | Ok(()) 452 | } 453 | 454 | async fn handle_incoming(&mut self, conn: RtcConnection) -> Result<()> { 455 | let node_id = conn.transport().remote_node_id()?; 456 | info!("incoming connection from {}", node_id.fmt_short()); 457 | self.active_calls.insert(node_id, CallInfo::Incoming(conn)); 458 | self.emit(Event::SetCallState(node_id, CallState::Incoming)) 459 | .await?; 460 | Ok(()) 461 | } 462 | 463 | async fn handle_connected( 464 | &mut self, 465 | node_id: NodeId, 466 | conn: Result<(RtcConnection, MediaTrack)>, 467 | ) -> Result<()> { 468 | match conn { 469 | Ok((conn, track)) => { 470 | self.accept_from_connect(conn, track).await?; 471 | } 472 | Err(err) => { 473 | warn!("connection to {} failed: {err:?}", node_id); 474 | self.active_calls.remove(&node_id); 475 | self.emit(Event::SetCallState(node_id, CallState::Aborted)) 476 | .await?; 477 | } 478 | } 479 | Ok(()) 480 | } 481 | 482 | async fn accept_from_connect(&mut self, conn: RtcConnection, track: MediaTrack) -> Result<()> { 483 | let node_id = conn.transport().remote_node_id()?; 484 | self.active_calls 485 | .insert(node_id, CallInfo::Active(conn.clone())); 486 | self.emit(Event::SetCallState(node_id, CallState::Active)) 487 | .await?; 488 | let audio_context = self 489 | .audio_context 490 | .clone() 491 | .context("missing audio context")?; 492 | self.call_tasks.spawn(async move { 493 | info!("starting connection with {}", node_id.fmt_short()); 494 | let fut = async { 495 | audio_context.play_track(track).await?; 496 | let capture_track = audio_context.capture_track().await?; 497 | conn.send_track(capture_track).await?; 498 | #[allow(clippy::redundant_pattern_matching)] 499 | while let Some(_) = conn.recv_track().await? {} 500 | anyhow::Ok(()) 501 | }; 502 | let res = fut.await; 503 | info!("connection with {} closed: {:?}", node_id.fmt_short(), res); 504 | (node_id, res) 505 | }); 506 | Ok(()) 507 | } 508 | 509 | async fn accept_from_accept(&mut self, conn: RtcConnection) -> Result<()> { 510 | let node_id = conn.transport().remote_node_id()?; 511 | self.active_calls 512 | .insert(node_id, CallInfo::Active(conn.clone())); 513 | self.emit(Event::SetCallState(node_id, CallState::Active)) 514 | .await?; 515 | let audio_context = self 516 | .audio_context 517 | .clone() 518 | .context("missing audio context")?; 519 | self.call_tasks.spawn(async move { 520 | info!("starting connection with {}", node_id.fmt_short()); 521 | let fut = async { 522 | let capture_track = audio_context.capture_track().await?; 523 | conn.send_track(capture_track).await?; 524 | info!("added capture track to rtc connection"); 525 | while let Some(remote_track) = conn.recv_track().await? { 526 | info!( 527 | "new remote track: {:?} {:?}", 528 | remote_track.kind(), 529 | remote_track.codec() 530 | ); 531 | match remote_track.kind() { 532 | TrackKind::Audio => { 533 | audio_context.play_track(remote_track).await?; 534 | } 535 | TrackKind::Video => unimplemented!(), 536 | } 537 | } 538 | anyhow::Ok(()) 539 | }; 540 | let res = fut.await; 541 | info!("connection with {} closed: {:?}", node_id.fmt_short(), res); 542 | (node_id, res) 543 | }); 544 | Ok(()) 545 | } 546 | 547 | async fn handle_command(&mut self, command: Command) -> Result<()> { 548 | match command { 549 | Command::SetUpdateCallback { callback } => { 550 | self.update_callback = Some(callback); 551 | } 552 | Command::SetAudioConfig { audio_config } => { 553 | let audio_context = AudioContext::new(audio_config).await?; 554 | self.audio_context = Some(audio_context); 555 | } 556 | Command::Call { node_id } => { 557 | if self.active_calls.contains_key(&node_id) { 558 | return Ok(()); 559 | } 560 | self.active_calls.insert(node_id, CallInfo::Calling); 561 | self.emit(Event::SetCallState(node_id, CallState::Calling)) 562 | .await?; 563 | 564 | let handler = self.handler.clone(); 565 | self.connect_tasks.spawn(async move { 566 | let fut = async { 567 | let conn = handler.connect(node_id).await?; 568 | let track = conn.recv_track().await?.ok_or_else(|| { 569 | anyhow!("connection closed without receiving a single track") 570 | })?; 571 | anyhow::Ok((conn, track)) 572 | }; 573 | (node_id, fut.await) 574 | }); 575 | } 576 | Command::HandleIncoming { node_id, accept } => { 577 | let Some(CallInfo::Incoming(conn)) = self.active_calls.remove(&node_id) else { 578 | return Ok(()); 579 | }; 580 | if accept { 581 | self.accept_from_accept(conn).await?; 582 | } else { 583 | conn.transport().close(0u32.into(), b"bye"); 584 | self.emit(Event::SetCallState(node_id, CallState::Aborted)) 585 | .await?; 586 | } 587 | } 588 | Command::Abort { node_id } => { 589 | if let Some(state) = self.active_calls.remove(&node_id) { 590 | match state { 591 | CallInfo::Calling => {} 592 | CallInfo::Active(conn) => { 593 | conn.transport().close(0u32.into(), b"bye"); 594 | } 595 | CallInfo::Incoming(conn) => { 596 | conn.transport().close(0u32.into(), b"bye"); 597 | } 598 | } 599 | self.emit(Event::SetCallState(node_id, CallState::Aborted)) 600 | .await?; 601 | } 602 | } 603 | } 604 | Ok(()) 605 | } 606 | } 607 | --------------------------------------------------------------------------------