├── .cargo └── config.toml ├── .github ├── fix-typos.yml ├── pull_request_template.md └── workflows │ └── cargo-test.yml ├── .gitignore ├── Cargo.toml ├── Dockerfile.aarch64-android ├── LICENSE ├── README.md ├── docs └── runes-quickstart.md ├── extensions ├── readme.md ├── warp-blink-wrtc │ ├── Cargo.toml │ ├── README.md │ ├── docs │ │ ├── Blink Controller.drawio.png │ │ └── Gossip Listener.drawio.png │ └── src │ │ ├── blink_impl │ │ ├── blink_controller.rs │ │ ├── data │ │ │ └── mod.rs │ │ ├── gossipsub_listener.rs │ │ ├── gossipsub_sender.rs │ │ ├── mod.rs │ │ ├── readme.md │ │ ├── signaling.rs │ │ └── store.rs │ │ ├── host_media │ │ ├── audio │ │ │ ├── mod.rs │ │ │ ├── sink │ │ │ │ ├── decoder_task.rs │ │ │ │ ├── mod.rs │ │ │ │ └── receiver_task.rs │ │ │ ├── source │ │ │ │ ├── encoder_task.rs │ │ │ │ ├── mod.rs │ │ │ │ └── sender_task.rs │ │ │ └── utils │ │ │ │ ├── audio_buf.rs │ │ │ │ ├── audio_device_config_impl.rs │ │ │ │ ├── automute.rs │ │ │ │ ├── codec_config.rs │ │ │ │ ├── framer_output.rs │ │ │ │ ├── loudness.rs │ │ │ │ ├── mod.rs │ │ │ │ ├── resampler.rs │ │ │ │ └── speech.rs │ │ ├── default_controller.rs │ │ ├── loopback │ │ │ ├── mod.rs │ │ │ ├── receiver.rs │ │ │ └── sender.rs │ │ ├── loopback_controller.rs │ │ ├── mod.rs │ │ └── mp4_logger │ │ │ ├── loggers │ │ │ ├── dummy.rs │ │ │ ├── mod.rs │ │ │ └── opus.rs │ │ │ └── mod.rs │ │ ├── lib.rs │ │ ├── notify_wrapper.rs │ │ ├── rtp_logger.rs │ │ └── simple_webrtc │ │ ├── events.rs │ │ ├── mod.rs │ │ └── time_of_flight │ │ └── mod.rs └── warp-ipfs │ ├── Cargo.toml │ ├── examples │ ├── README.md │ ├── identity-interface.rs │ ├── ipfs-example.rs │ ├── ipfs-friends.rs │ ├── ipfs-identity.rs │ ├── ipfs-persistent.rs │ ├── messenger.rs │ ├── wasm-ipfs-friends │ │ ├── Cargo.toml │ │ ├── README.md │ │ ├── src │ │ │ └── lib.rs │ │ └── static │ │ │ └── index.html │ ├── wasm-ipfs-identity │ │ ├── Cargo.toml │ │ ├── readme.md │ │ ├── src │ │ │ └── lib.rs │ │ └── static │ │ │ └── index.html │ └── wasm-ipfs-storage │ │ ├── Cargo.toml │ │ ├── readme.md │ │ ├── src │ │ └── lib.rs │ │ └── static │ │ └── index.html │ ├── shuttle │ ├── Cargo.toml │ └── src │ │ └── main.rs │ ├── src │ ├── behaviour │ │ ├── mod.rs │ │ ├── phonebook.rs │ │ └── phonebook │ │ │ └── handler.rs │ ├── config.rs │ ├── lib.rs │ ├── shuttle │ │ ├── gateway │ │ │ └── mod.rs │ │ ├── identity.rs │ │ ├── identity │ │ │ └── protocol.rs │ │ ├── message.rs │ │ ├── message │ │ │ └── protocol.rs │ │ ├── mod.rs │ │ ├── server.rs │ │ ├── store.rs │ │ ├── store │ │ │ ├── identity.rs │ │ │ ├── messages.rs │ │ │ └── root.rs │ │ └── subscription_stream.rs │ ├── store │ │ ├── community.rs │ │ ├── conversation.rs │ │ ├── conversation │ │ │ ├── message.rs │ │ │ └── reference.rs │ │ ├── discovery.rs │ │ ├── document.rs │ │ ├── document │ │ │ ├── cache.rs │ │ │ ├── files.rs │ │ │ ├── identity.rs │ │ │ ├── image_dag.rs │ │ │ └── root.rs │ │ ├── event_subscription.rs │ │ ├── files.rs │ │ ├── identity.rs │ │ ├── keystore.rs │ │ ├── message.rs │ │ ├── message │ │ │ ├── attachment.rs │ │ │ ├── community_task.rs │ │ │ └── task.rs │ │ ├── mod.rs │ │ ├── payload.rs │ │ ├── phonebook.rs │ │ └── queue.rs │ ├── thumbnail.rs │ └── utils.rs │ └── tests │ ├── accounts.rs │ ├── common.rs │ ├── community.rs │ ├── direct.rs │ ├── files.rs │ ├── friends.rs │ └── group.rs ├── tools ├── audio-codec-repl │ ├── Cargo.toml │ └── src │ │ ├── encode.rs │ │ ├── feedback.rs │ │ ├── loudness.rs │ │ ├── main.rs │ │ ├── packetizer.rs │ │ ├── play.rs │ │ └── record.rs ├── blink-repl │ ├── Cargo.toml │ └── src │ │ ├── logger.rs │ │ └── main.rs ├── fs │ ├── Cargo.toml │ ├── README.md │ └── src │ │ └── lib.rs ├── inspect │ ├── Cargo.toml │ └── src │ │ └── main.rs ├── opencv-test │ ├── Cargo.toml │ ├── README.md │ └── src │ │ ├── encode │ │ ├── aom.rs │ │ ├── h264.rs │ │ ├── mod.rs │ │ ├── rav1e.rs │ │ └── x264.rs │ │ ├── lib.rs │ │ ├── main.rs │ │ └── utils │ │ ├── mod.rs │ │ ├── rgb.rs │ │ └── yuv.rs ├── relay-server │ ├── Cargo.toml │ └── src │ │ ├── config.rs │ │ └── main.rs └── video-codec-cli │ ├── Cargo.toml │ └── src │ ├── encode │ ├── aom.rs │ └── mod.rs │ ├── lib.rs │ ├── main.rs │ └── utils │ ├── mod.rs │ ├── rgb.rs │ └── yuv.rs └── warp ├── Cargo.toml └── src ├── blink ├── audio_config.rs ├── call_state.rs └── mod.rs ├── constellation ├── directory.rs ├── file.rs ├── item.rs └── mod.rs ├── crypto ├── cipher.rs ├── hash.rs ├── keypair.rs ├── mod.rs └── multihash.rs ├── data └── mod.rs ├── error.rs ├── lib.rs ├── module.rs ├── multipass ├── generator.rs ├── identity.rs └── mod.rs ├── raygun ├── community.rs ├── group.rs └── mod.rs ├── tesseract └── mod.rs ├── warp.rs └── warp └── dummy.rs /.cargo/config.toml: -------------------------------------------------------------------------------- 1 | [profile.dev] 2 | debug = 0 3 | strip = "debuginfo" 4 | 5 | -------------------------------------------------------------------------------- /.github/fix-typos.yml: -------------------------------------------------------------------------------- 1 | name: Automatically fix typos 2 | on: 3 | push: 4 | branches: 5 | - main 6 | 7 | jobs: 8 | build: 9 | runs-on: ubuntu-latest 10 | steps: 11 | - uses: actions/checkout@v3 12 | with: 13 | ref: main 14 | - uses: sobolevn/misspell-fixer-action@master 15 | - uses: peter-evans/create-pull-request@v4.2.0 16 | env: 17 | ACTIONS_ALLOW_UNSECURE_COMMANDS: 'true' 18 | with: 19 | token: ${{ secrets.GITHUB_TOKEN }} 20 | -------------------------------------------------------------------------------- /.github/pull_request_template.md: -------------------------------------------------------------------------------- 1 | 4 | 5 | **What this PR does** 📖 6 | 7 | **Which issue(s) this PR fixes** 🔨 8 | 9 | 10 | 11 | **Special notes for reviewers** 🗒️ 12 | 13 | **Additional comments** 🎤 14 | -------------------------------------------------------------------------------- /.github/workflows/cargo-test.yml: -------------------------------------------------------------------------------- 1 | name: Lint and Tests 2 | on: [push] 3 | jobs: 4 | lint: 5 | name: Lint 6 | runs-on: ubuntu-latest 7 | env: 8 | RUSTFLAGS: -D warnings 9 | steps: 10 | - uses: actions/checkout@v4 11 | 12 | - name: Install dependencies 13 | run: | 14 | sudo apt-get update 15 | sudo apt-get install -y build-essential pkg-config libssl-dev 16 | sudo apt-get install -y librust-alsa-sys-dev libxdo-dev llvm-dev cmake libudev-dev 17 | 18 | - uses: dtolnay/rust-toolchain@master 19 | with: 20 | toolchain: stable 21 | components: clippy, rustfmt 22 | - uses: Swatinem/rust-cache@v2 23 | - name: Check formatting 24 | run: cargo fmt -- --check 25 | - name: Catch common mistakes and unwrap calls 26 | run: cargo clippy -- -D warnings 27 | 28 | test-native: 29 | name: Test (Native) 30 | runs-on: ubuntu-latest 31 | env: 32 | RUSTFLAGS: -D warnings 33 | RUST_BACKTRACE: full 34 | steps: 35 | - uses: actions/checkout@v4 36 | 37 | - name: Install dependencies 38 | run: | 39 | sudo apt-get update 40 | sudo apt-get install -y build-essential pkg-config libssl-dev 41 | sudo apt-get install -y librust-alsa-sys-dev libxdo-dev llvm-dev cmake libudev-dev 42 | 43 | - uses: dtolnay/rust-toolchain@master 44 | with: 45 | toolchain: stable 46 | - uses: Swatinem/rust-cache@v2 47 | - run: cargo test 48 | 49 | test-wasm: 50 | name: Check (WASM) 51 | runs-on: ubuntu-latest 52 | env: 53 | RUSTFLAGS: -D warnings 54 | RUST_BACKTRACE: full 55 | steps: 56 | - uses: actions/checkout@v4 57 | 58 | - name: Install dependencies 59 | run: | 60 | sudo apt-get update 61 | sudo apt-get install -y build-essential pkg-config libssl-dev 62 | sudo apt-get install -y librust-alsa-sys-dev libxdo-dev llvm-dev cmake libudev-dev 63 | 64 | - uses: dtolnay/rust-toolchain@master 65 | with: 66 | toolchain: stable 67 | target: wasm32-unknown-unknown 68 | - uses: Swatinem/rust-cache@v2 69 | - run: cargo check -p warp-ipfs -p warp --target wasm32-unknown-unknown -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Generated by Cargo 2 | # will have compiled files and executables 3 | /target/ 4 | 5 | # Remove Cargo.lock from gitignore if creating an executable, leave it for libraries 6 | # More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html 7 | Cargo.lock 8 | 9 | # These are backup files generated by rustfmt 10 | **/*.rs.bk 11 | 12 | # C/C++ Headers 13 | **/*.h 14 | 15 | /c/bin 16 | 17 | # Misc files generated by system or editor 18 | **/.DS_Store 19 | .idea 20 | .vscode 21 | 22 | /data 23 | 24 | .bash_history 25 | 26 | *.csv 27 | 28 | # Built wasm files 29 | built-wasm/ -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [workspace.package] 2 | version = "0.1.0" 3 | edition = "2021" 4 | license = "MIT" 5 | rust-version = "1.83" 6 | repository = "https://github.com/Satellite-im/Warp" 7 | 8 | 9 | [workspace] 10 | members = [ 11 | "extensions/*", 12 | "warp", 13 | "tools/*", 14 | "extensions/warp-ipfs/shuttle", 15 | "extensions/warp-ipfs/examples/wasm-ipfs-identity", 16 | "extensions/warp-ipfs/examples/wasm-ipfs-friends", 17 | "extensions/warp-ipfs/examples/wasm-ipfs-storage", 18 | ] 19 | exclude = ["deprecated/*", "tools/opencv-test", "tools/video-codec-cli", "extensions/warp-blink-wrtc", "tools/blink-repl", "tools/audio-codec-repl"] 20 | 21 | resolver = "2" 22 | 23 | [workspace.dependencies] 24 | 25 | # Async and futures crates 26 | futures = { version = "0.3", default-features = false, features = ["std"] } 27 | futures-timer = "3.0.3" 28 | futures-timeout = "0.1.0" 29 | async-trait = { version = "0.1" } 30 | async-stream = "0.3" 31 | async-broadcast = "0.5" 32 | pollable-map = "0.1.0-alpha.1" 33 | tokio = { version = "1", features = [ 34 | "macros", 35 | "fs", 36 | "net", 37 | "rt-multi-thread", 38 | "sync", 39 | "time", 40 | ] } 41 | tokio-util = { version = "0.7" } 42 | tokio-stream = { version = "0.1" } 43 | 44 | # Crypto crates 45 | ed25519-dalek = { version = "1", default-features = false } 46 | sha2 = { version = "0.10" } 47 | hmac = { version = "0.12.0", default-features = false } 48 | digest = { version = "0.10" } 49 | aes-gcm = { version = "0.10" } 50 | zeroize = "1" 51 | rand = { version = "0.8" } 52 | multihash = { version = "0.18" } 53 | did-key = { git = "https://github.com/Satellite-im/did-key.rs", branch = "backport-patch-v0" } 54 | tiny-bip39 = "1.0" 55 | 56 | # Error handling crates 57 | anyhow = { version = "1" } 58 | thiserror = "1.0" 59 | 60 | # Sync crates 61 | parking_lot = { version = "0.12" } 62 | once_cell = "1.16" 63 | 64 | # Time crate 65 | chrono = { version = "~0.4.27", default-features = false, features = [ 66 | "serde", 67 | "wasmbind", 68 | "now", 69 | ] } 70 | 71 | # Encoding and Serializing Crates 72 | serde = { version = "1.0", features = ["derive", "rc"] } 73 | serde_json = { version = "1.0" } 74 | serde_cbor = "0.11.2" 75 | cbor4ii = { version = "0.3.2", features = ["serde1", "use_std"] } 76 | serde_yaml = "0.9" 77 | toml = "0.5" 78 | bs58 = "0.4" 79 | hex = "0.4" 80 | ipld-core = { version = "0.4.1" } 81 | bytes = { version = "1", features = ["serde"] } 82 | bincode = "1" 83 | image = { version = "0.25.2", default-features = false, features = [ 84 | "default-formats", 85 | ] } 86 | mediatype = { version = "0.19", features = ["serde"] } 87 | 88 | # Misc 89 | dyn-clone = "1.0" 90 | uuid = { version = "1", features = ["serde", "v4"] } 91 | derive_more = "0.99" 92 | paste = "1.0" 93 | tracing = { version = "0.1" } 94 | either = "1" 95 | void = "1" 96 | indexmap = { version = "2.4.0", features = ["serde"] } 97 | 98 | # ipfs dependency 99 | rust-ipfs = "0.14.0" 100 | 101 | # wasm crates 102 | wasm-bindgen = "0.2" 103 | gloo = "0.7" 104 | web-sys = "0.3" 105 | js-sys = "0.3" 106 | console_error_panic_hook = "0.1.7" 107 | wasm-streams = "0.4" 108 | wasm-bindgen-futures = "0.4" 109 | serde-wasm-bindgen = "0.4" 110 | send_wrapper = "0.6.0" 111 | tracing-wasm = "0.2.0" 112 | 113 | # Blink related crates 114 | # av-data is needed to use libaom. need to ensure that Warp and libaom use the same version of av-data 115 | av-data = "*" 116 | libaom = { git = "https://github.com/Satellite-im/aom-rs", branch = "feat/windows-build" } 117 | mp4 = { git = "https://github.com/satellite-im/mp4-rust", rev = "9abb40d9a7690c3d5012a9f259f4b22adab06ec3" } 118 | eye = { git = "https://github.com/raymanfx/eye-rs.git", rev = "24324eb629dd73f349d0b4678cb0dd4dc5d75f1c" } 119 | opus = { git = "https://github.com/Satellite-im/opus-rs", rev = "893b9f7e7e0cd00d13a64533967c6d2d6b1cb044" } 120 | 121 | warp = { path = "./warp" } 122 | warp-ipfs = { path = "./extensions/warp-ipfs" } 123 | warp-blink-wrtc = { path = "./extensions/warp-blink-wrtc" } 124 | -------------------------------------------------------------------------------- /Dockerfile.aarch64-android: -------------------------------------------------------------------------------- 1 | FROM runmymind/docker-android-sdk:ubuntu-standalone-20230711 2 | # the above docker file can be found here: https://github.com/mindrunner/docker-android-sdk/blob/main/ubuntu/standalone/Dockerfile 3 | 4 | RUN apt update && apt install -y \ 5 | build-essential \ 6 | pkg-config \ 7 | git \ 8 | ssh \ 9 | curl \ 10 | openssl \ 11 | libssl-dev \ 12 | wget \ 13 | gettext \ 14 | autoconf \ 15 | automake \ 16 | libtool \ 17 | zip 18 | RUN apt install -y g++-aarch64-linux-gnu libc6-dev-arm64-cross 19 | RUN apt install -y libgtk-3-dev libwebkit2gtk-4.1-dev libayatana-appindicator3-dev librsvg2-dev librust-alsa-sys-dev 20 | 21 | WORKDIR /root 22 | RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | bash -s -- -y 23 | # pretty sure these next 2 lines aren't needed 24 | # RUN . $HOME/.cargo/env && rustup target add aarch64-unknown-linux-gnu 25 | # RUN . $HOME/.cargo/env && rustup toolchain install stable-aarch64-unknown-linux-gnu 26 | RUN . $HOME/.cargo/env && rustup target add aarch64-linux-android 27 | RUN . $HOME/.cargo/env && cargo install --git https://github.com/tauri-apps/cargo-mobile2 28 | 29 | WORKDIR /root/cmake 30 | RUN wget https://github.com/Kitware/CMake/releases/download/v3.23.1/cmake-3.23.1.tar.gz 31 | RUN tar -xzvf cmake-3.23.1.tar.gz 32 | RUN cd cmake-3.23.1 && ./configure 33 | RUN cd cmake-3.23.1 && make -j$(nproc --ignore=2) 34 | RUN cd cmake-3.23.1 && make install 35 | 36 | # saving this for reference. the android-sdk docker container already has everything in /opt/android-sdk-linux 37 | # WORKDIR /root/android 38 | # RUN wget https://dl.google.com/android/repository/android-ndk-r25c-linux.zip 39 | # RUN unzip android-ndk-r25c-linux.zip 40 | 41 | ENV MY_ANDROID_NDK_PATH=/opt/android-sdk-linux/ndk-bundle 42 | ENV MY_SYSROOT=$MY_ANDROID_NDK_PATH/toolchains/llvm/prebuilt/linux-x86_64 43 | 44 | ENV TOOLCHAIN_BIN_PATH=$MY_SYSROOT/bin 45 | ENV TOOLCHAIN_PREFIX=$TOOLCHAIN_BIN_PATH/aarch64-linux-android30- 46 | ENV LLVM_TOOLCHAIN_PREFIX=$MY_SYSROOT/bin/llvm- 47 | 48 | # a lot of these environment variables were copied from cross-rs https://github.com/cross-rs/cross 49 | # cargo seems to append the target name to AR, CC, etc. 50 | # other code uses CC, LD, etc. Those are defined after. 51 | # some variables that cross-rs used may not be needed, like TMPDIR. 52 | ENV CARGO_TARGET_AARCH64_LINUX_ANDROID_LINKER="$TOOLCHAIN_PREFIX"clang \ 53 | AR_aarch64_linux_android="$LLVM_TOOLCHAIN_PREFIX"ar \ 54 | AS_aarch64_linux_android="$LLVM_TOOLCHAIN_PREFIX"as \ 55 | CC_aarch64_linux_android="$TOOLCHAIN_PREFIX"clang \ 56 | CXX_aarch64_linux_android="$TOOLCHAIN_PREFIX"clang++ \ 57 | LD_aarch64_linux_android=$TOOLCHAIN_BIN_PATH/ld \ 58 | NM_aarch64_linux_android="$LLVM_TOOLCHAIN_PREFIX"-nm \ 59 | OBJCOPY_aarch64_linux_android="$LLVM_TOOLCHAIN_PREFIX"objcopy \ 60 | OBJDUMP_aarch64_linux_android="$LLVM_TOOLCHAIN_PREFIX"objdump \ 61 | RANLIB_aarch64_linux_android="$LLVM_TOOLCHAIN_PREFIX"ranlib \ 62 | READELF_aarch64_linux_android="$LLVM_TOOLCHAIN_PREFIX"readelf \ 63 | SIZE_aarch64_linux_android="$LLVM_TOOLCHAIN_PREFIX"size \ 64 | STRINGS_aarch64_linux_android="$LLVM_TOOLCHAIN_PREFIX"strings \ 65 | STRIP_aarch64_linux_android="$LLVM_TOOLCHAIN_PREFIX"strip \ 66 | CMAKE_TOOLCHAIN_FILE_aarch64_linux_android=$MY_ANDROID_NDK_PATH/build/cmake/android.toolchain.cmake \ 67 | BINDGEN_EXTRA_CLANG_ARGS_aarch64_linux_android="--sysroot=$MY_SYSROOT" \ 68 | DEP_Z_INCLUDE="$MY_SYSROOT/usr/include"/ \ 69 | RUST_TEST_THREADS=1 \ 70 | TMPDIR=/tmp/ \ 71 | ANDROID_DATA=/ \ 72 | ANDROID_DNS_MODE=local \ 73 | ANDROID_ROOT=/system \ 74 | CROSS_CMAKE_SYSTEM_NAME=Android \ 75 | CROSS_CMAKE_SYSTEM_PROCESSOR=aarch64 \ 76 | CROSS_CMAKE_CRT=android \ 77 | CROSS_CMAKE_OBJECT_FLAGS="-DANDROID -ffunction-sections -fdata-sections -fPIC" \ 78 | PKG_CONFIG_SYSROOT_DIR=/ 79 | 80 | ENV CC=$CC_aarch64_linux_android \ 81 | CXX=$CXX_aarch64_linux_android \ 82 | LD=$LD_aarch64_linux_android \ 83 | AR=$AR_aarch64_linux_android 84 | 85 | # for ring library: https://github.com/briansmith/ring/blob/main/BUILDING.md. 86 | # one of their dependencies seems to use non-standard environment variables for CC and AR. 87 | ENV TARGET_CC=$CC_aarch64_linux_android TARGET_AR=$AR_aarch64_linux_android 88 | 89 | # cross compile opus 90 | WORKDIR /root 91 | RUN git clone https://github.com/xiph/opus.git 92 | WORKDIR /root/opus 93 | RUN git checkout 7b05f44f4baadf34d8d1073f4ff69f1806d5cdb4 94 | RUN ./autogen.sh 95 | RUN ./configure --host=aarch64-linux-android 96 | RUN make -j$(nproc --ignore=2) 97 | 98 | # for audiopus_sys 99 | ENV OPUS_STATIC=true OPUS_LIB_DIR=/root/opus/.libs 100 | 101 | # for tauri mobile 102 | ENV NDK_HOME=/opt/android-sdk-linux/ndk-bundle SDK_HOME=/opt/android-sdk-linux 103 | 104 | WORKDIR /root/build 105 | 106 | CMD ["bash"] 107 | 108 | 109 | # docker build . -t aarch64-builder -f Dockerfile.aarch64-android 110 | # docker run --rm -ti -v `pwd`:/root/build aarch64-builder 111 | # cargo android apk build aarch64 112 | # cargo build --target aarch64-linux-android --verbose 113 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2022 Satellite 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Warp 2 | 3 | Interface Driven Distributed Data Service 4 | 5 | ### Overview 6 | 7 | Warp can run as a single binary, providing an interface into the core technologies that run 8 | Satellite. This allows us to avoid rewriting the same tech over and over when developing for 9 | different platforms. Warp will work on most phones, tablets, computers, and consoles. 10 | 11 | It provides abstractions to many different modules which are required to run Satellite. These 12 | modules include Messaging, Caching, File Sharing & Storage, RTC connections, and more. Because we 13 | focus on building these modules as interfaces first and then allow implementation layers to be built 14 | on top of these, we can easily change the core technologies with no extra development required on 15 | the "front-end" stacks. This means we can jump from multiple blockchains or decentralized solutions 16 | without affecting the front-end application. 17 | 18 | Additionally, libraries to interface with Warp (will) exist in JavaScript (TypeScript), Java, 19 | Python, and more. So you can quickly develop your platforms and integrations on top of the Satellite 20 | tech stack. Lastly, a REST API service can be enabled for Warp. However, it should never be exposed 21 | outside of localhost. 22 | 23 | ### Build Requirement 24 | 25 | #### Windows 26 | 27 | ***TBD*** 28 | 29 | #### Linux 30 | 31 | **Ubuntu WSL (Maybe also Ubuntu + Debian)** 32 | | Dep | Install Command | 33 | |------|------------------------------------------------------------------| 34 | | Rust | `curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs \| sh` | 35 | | Build Essentials | `sudo apt install build-essential` | 36 | | CMake | `sudo apt install cmake` | 37 | | LLVM libs & headers | `sudo apt install llvm-dev` | 38 | | udev libs & headers | `sudo apt install libudev-dev` | 39 | 40 | **Fedora 38** 41 | | Dep | Install Command | 42 | |------|------------------------------------------------------------------| 43 | | Rust | `curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs \| sh` | 44 | | Build Essentials | `sudo dnf groupinstall "Development Tools" "Development Libraries"` | 45 | | CMake | `sudo dnf install cmake` | 46 | | LLVM libs & headers | `sudo dnf install llvm-devel` | 47 | | udev libs & headers | `sudo dnf install libudev-devel` | 48 | 49 | #### Mac 50 | 51 | | Dep | Install Command | 52 | |------|------------------------------------------------------------------| 53 | | Rust | `curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs \| sh` | 54 | | CMake | `brew install cmake` | 55 | 56 | ## Usage 57 | 58 | [See warp-ipfs examples](./extensions/warp-ipfs/examples/README.md) 59 | 60 | ## Docs 61 | 62 | http://warp.satellite.im/ 63 | -------------------------------------------------------------------------------- /docs/runes-quickstart.md: -------------------------------------------------------------------------------- 1 | How to create and use runes on our own testnet: 2 | ---------------------------------------------------- 3 | 1 - Install `bitcoind` (pre-compiled) or build from source: https://github.com/bitcoin/bitcoin 4 | 5 | 2 - Start a node and join the network 6 | ``` 7 | bitcoind -regtest -txindex -addnode= 8 | ``` 9 | 3 - In another terminal, install `ord` (pre-compiled) or build from source: https://github.com/ordinals/ord 10 | 11 | 4 - Start ord indexer 12 | ``` 13 | ord --regtest --index-runes server 14 | ``` 15 | 5 - In another terminal, run these commands to create a wallet, produce a receiving address, and mine 101 blocks. (Mined coins need 100 blocks before they can be used. Mining difficulty is disabled, so blocks are mined instantly) This will get you coins to pay for fees in the next steps. Alternatively, you could ask someone to send you some coins. 16 | ``` 17 | ord --regtest wallet create 18 | ord --regtest wallet receive 19 | bitcoin-cli -regtest generatetoaddress 101
20 | ord --regtest wallet balance 21 | ``` 22 | 23 | 6 - Create the `etch.yaml` and `icon.svg` files in your current directory 24 | 25 | `etch.yaml` 26 | ``` 27 | mode: separate-outputs 28 | etching: 29 | rune: HOUSE.IS.ON.FIRE 30 | symbol: '🔥' 31 | divisibility: 0 32 | premine: 1000 33 | supply: 1000000 34 | terms: 35 | amount: 1000 36 | cap: 999 37 | inscriptions: 38 | - file: icon.svg 39 | ``` 40 | 41 | `icon.svg` 42 | ``` 43 | 44 | 45 | 57 | 58 | 59 | 60 | ``` 61 | 62 | 7 - Create rune and mine 6 blocks (to prevent front-running, creation is done using commit-reveal, requiring at least 6 blocks before submitting the reveal transaction) 63 | ``` 64 | ord --regtest wallet batch --fee-rate 1 --batch etch.yaml 65 | bitcoin-cli -regtest generatetoaddress 6
66 | ``` 67 | 68 | 8 - This `etch.yaml` is configured to have a premine of 1000 units, which should be in your wallet. This config also has an open-mint feature (defined under `terms`) where anyone can mint 1000 units to themselves. The config limits this open-mint to happen a max of 999 times. Use the open-mint and mine 1 block: 69 | ``` 70 | ord --regtest wallet mint --fee-rate 1 --rune 71 | bitcoin-cli -regtest generatetoaddress 1
72 | ``` 73 | 74 | 9 - Transfer runes and mine 1 block: 75 | ``` 76 | ord --regtest wallet send --fee-rate 1
: 77 | bitcoin-cli -regtest generatetoaddress 1
78 | ``` 79 | 80 | 10 - View runes in the ord indexer: `localhost/runes` 81 | 82 | 83 | 11 - Some unusual properties of runes: 84 | - You may opt to have the identifier (must be unique) be randomly assigned, or specifically chose it. In this case: `HOUSE.IS.ON.FIRE` The dots are just for readability and don't count as characters in the identifier. so `HOUSEISONFIRE` is the same thing. 85 | - The length of the identifier must be 13-26 characters long. The minimum length requirement is gradually relaxed over the next 4 years at which point even 1 character identifiers will become available. 86 | - The `symbol` is just 1 character, does not need to be unique, and can be any unicode symbol, including emojis. 87 | -------------------------------------------------------------------------------- /extensions/readme.md: -------------------------------------------------------------------------------- 1 | # Plugins 2 | 3 | These are implementation using warp modules. Some plugins written are examples to show how it can be used. 4 | 5 | ## warp-pd-stretto 6 | 7 | Pocket Dimension implementation with [Stretto](https://github.com/al8n/stretto), a high performance thread-safe memory 8 | cache written in rust. This allows for in-memory caching and should be used for development purpose. 9 | 10 | ## warp-pd-flatfile 11 | 12 | Flatfile implementation for Pocket Dimension to cache data to disk. 13 | 14 | ## warp-fs-memory 15 | 16 | In-memory filesystem implementation. **This extension is designed for development purpose only and should not be used in production** 17 | 18 | ## warp-fs-storj 19 | 20 | Implementation of [StorJ](https://storj.io) for Constellation, a decentralized cloud storage provider. This extension utilizes StorJ S3 compatible API. 21 | 22 | ### Note 23 | 24 | This extension will require access and secret keys from StorJ to operate. You can get them by [signing up](https://us1.storj.io/signup). Go to []() for more information 25 | 26 | ## warp-fs-ipfs 27 | 28 | Implementation of [IPFS](https://ipfs.io/) for Constellation. a peer-to-peer protocol for storing and accessing various of data. This extension connects to an IPFS node via HTTP API. 29 | 30 | ### Note 31 | 32 | For this extension to work, one would need to have a IPFS node installed or connect to a IPFS node via HTTP. 33 | 34 | ## warp-mp-solana 35 | 36 | **TODO** 37 | 38 | ## warp-rg-ipfs 39 | 40 | **TODO** -------------------------------------------------------------------------------- /extensions/warp-blink-wrtc/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "warp-blink-wrtc" 3 | version = "0.1.0" 4 | edition = "2021" 5 | 6 | [lib] 7 | crate-type = ["cdylib", "rlib", "staticlib"] 8 | 9 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 10 | 11 | [dependencies] 12 | anyhow = { workspace = true } 13 | async-trait = { workspace = true } 14 | async-stream = { workspace = true } 15 | bytes = { workspace = true } 16 | # the mp4 crate uses byteorder 17 | byteorder = "*" 18 | bs58 = { workspace = true } 19 | derive_more = { workspace = true } 20 | futures = { workspace = true } 21 | rust-ipfs = { workspace = true } 22 | libipld = { workspace = true } 23 | log = "0.4.17" 24 | mp4 = { workspace = true } 25 | once_cell = { workspace = true } 26 | opus = { workspace = true } 27 | uuid = { workspace = true } 28 | rand = { workspace = true } 29 | ringbuf = "0.3" 30 | serde = { workspace = true, features = ["derive"] } 31 | serde_cbor = { workspace = true } 32 | tokio = { workspace = true } 33 | warp.workspace = true 34 | webrtc = "0.6.0" 35 | rayon = "1.8" 36 | parking_lot.workspace = true 37 | 38 | # media 39 | cpal = "0.15.0" 40 | 41 | [build-dependencies] 42 | cbindgen = "0.23" 43 | 44 | [features] 45 | build-header = [] 46 | loopback = [] 47 | -------------------------------------------------------------------------------- /extensions/warp-blink-wrtc/README.md: -------------------------------------------------------------------------------- 1 | # warp-blink-wrtc 2 | 3 | ## Introduction 4 | This extension is an implementation of the [Blink trait](https://github.com/Satellite-im/Warp/blob/main/warp/src/blink/mod.rs), providing teleconferencing capabilities. There main modules are: 5 | - [simple-webrtc](https://github.com/Satellite-im/Warp/blob/main/extensions/warp-blink-wrtc/src/simple_webrtc/mod.rs): built on top of the [webrtc-rs](https://github.com/webrtc-rs/webrtc) crate, manages multiple concurrent peer connections and their associated media streams. 6 | - [host-media](https://github.com/Satellite-im/Warp/blob/main/extensions/warp-blink-wrtc/src/host_media): deals with audio I/O using the following modules: 7 | - [audio/sink](https://github.com/Satellite-im/Warp/tree/main/extensions/warp-blink-wrtc/src/host_media/audio/sink): reads RTP packets from a stream, decodes them, and feeds them to the output device 8 | - [audio/source](https://github.com/Satellite-im/Warp/tree/main/extensions/warp-blink-wrtc/src/host_media/audio/source): reads audio from an input device, encodes it, splits the opus frames into RTP packets, and writes them to a stream. 9 | - [mp4_logger](https://github.com/Satellite-im/Warp/tree/main/extensions/warp-blink-wrtc/src/host_media/mp4_logger): writes opus packets to an mp4 file, using a different track for each concurrent audio stream. 10 | - [loopback](https://github.com/Satellite-im/Warp/tree/main/extensions/warp-blink-wrtc/src/host_media/loopback): exists for testing purposes; used with the `loopback_controller`. 11 | - [blink-impl](https://github.com/Satellite-im/Warp/tree/main/extensions/warp-blink-wrtc/src/blink_impl): implements the `Blink` trait, providing a unified API for the following: 12 | - selecting audio I/O devices 13 | - initiating audio calls with one or more peers 14 | - answering/declining calls 15 | - mute/unmute self 16 | - record call 17 | 18 | ## blink-impl 19 | 20 | ### Background 21 | Peers are discovered using the IPFS network. WebRTC signaling is performed using gossip (exposed by the IPFS crate). 22 | 23 | 24 | ### Overview 25 | The core of the `blink-impl` module is a `BlinkController` which manages external commands and signals from WebRTC and gossipsub. 26 | Diagrams are provided for the more complicated modules. 27 | 28 | #### BlinkController Diagram 29 | 30 | ![blink-impl's Blink Controller](docs/Blink%20Controller.drawio.png) 31 | 32 | #### GossipListener Diagram 33 | 34 | ![blink-impl's Gossip Listener](docs/Gossip%20Listener.drawio.png) 35 | -------------------------------------------------------------------------------- /extensions/warp-blink-wrtc/docs/Blink Controller.drawio.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Satellite-im/Warp/aca31b55d130536c579b0e69d897d0da1a5299a9/extensions/warp-blink-wrtc/docs/Blink Controller.drawio.png -------------------------------------------------------------------------------- /extensions/warp-blink-wrtc/docs/Gossip Listener.drawio.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Satellite-im/Warp/aca31b55d130536c579b0e69d897d0da1a5299a9/extensions/warp-blink-wrtc/docs/Gossip Listener.drawio.png -------------------------------------------------------------------------------- /extensions/warp-blink-wrtc/src/blink_impl/data/mod.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashMap; 2 | use uuid::Uuid; 3 | use warp::{ 4 | blink::{CallInfo, CallState, ParticipantState}, 5 | crypto::DID, 6 | }; 7 | 8 | #[derive(Clone, Debug, Eq, PartialEq)] 9 | pub struct CallData { 10 | pub info: CallInfo, 11 | pub state: CallState, 12 | } 13 | 14 | impl CallData { 15 | pub fn new(info: CallInfo, state: CallState) -> Self { 16 | Self { info, state } 17 | } 18 | 19 | pub fn get_info(&self) -> CallInfo { 20 | self.info.clone() 21 | } 22 | 23 | pub fn get_state(&self) -> CallState { 24 | self.state.clone() 25 | } 26 | 27 | pub fn get_participant_state(&self, id: &DID) -> Option { 28 | self.state.participants_joined.get(id).cloned() 29 | } 30 | } 31 | 32 | pub struct CallDataMap { 33 | pub own_id: DID, 34 | pub active_call: Option, 35 | pub map: HashMap, 36 | } 37 | 38 | impl CallDataMap { 39 | pub fn new(own_id: DID) -> Self { 40 | Self { 41 | own_id, 42 | active_call: None, 43 | map: HashMap::default(), 44 | } 45 | } 46 | pub fn add_call(&mut self, info: CallInfo, sender: &DID) { 47 | let call_id = info.call_id(); 48 | if self.map.contains_key(&call_id) { 49 | log::warn!("tried to add a call for which a key already exists"); 50 | return; 51 | } 52 | 53 | let mut state = CallState::new(self.own_id.clone()); 54 | state.add_participant(sender, ParticipantState::default()); 55 | state.add_participant(&self.own_id, ParticipantState::default()); 56 | self.map.insert(call_id, CallData::new(info, state)); 57 | } 58 | 59 | pub fn get_pending_calls(&self) -> Vec { 60 | self.map.values().map(|x| x.get_info()).collect() 61 | } 62 | 63 | pub fn is_active_call(&self, call_id: Uuid) -> bool { 64 | self.active_call 65 | .as_ref() 66 | .map(|x| x == &call_id) 67 | .unwrap_or_default() 68 | } 69 | 70 | pub fn get_mut(&mut self, call_id: Uuid) -> Option<&mut CallData> { 71 | self.map.get_mut(&call_id) 72 | } 73 | 74 | pub fn get_active_mut(&mut self) -> Option<&mut CallData> { 75 | match self.active_call { 76 | None => None, 77 | Some(call_id) => self.map.get_mut(&call_id), 78 | } 79 | } 80 | 81 | pub fn get_active(&self) -> Option<&CallData> { 82 | match self.active_call { 83 | None => None, 84 | Some(call_id) => self.map.get(&call_id), 85 | } 86 | } 87 | 88 | pub fn set_active(&mut self, call_id: Uuid) { 89 | self.active_call.replace(call_id); 90 | } 91 | } 92 | 93 | impl CallDataMap { 94 | pub fn add_participant( 95 | &mut self, 96 | call_id: Uuid, 97 | peer_id: &DID, 98 | participant_state: ParticipantState, 99 | ) { 100 | if let Some(data) = self.map.get_mut(&call_id) { 101 | if data.info.contains_participant(peer_id) { 102 | data.state.add_participant(peer_id, participant_state); 103 | } 104 | } 105 | } 106 | 107 | pub fn call_empty(&self, call_id: Uuid) -> bool { 108 | self.map 109 | .get(&call_id) 110 | .map(|data| data.state.participants_joined.is_empty()) 111 | .unwrap_or(true) 112 | } 113 | 114 | pub fn contains_participant(&self, call_id: Uuid, peer_id: &DID) -> bool { 115 | self.map 116 | .get(&call_id) 117 | .map(|data| data.info.contains_participant(peer_id)) 118 | .unwrap_or_default() 119 | } 120 | 121 | pub fn get_call_info(&self, id: Uuid) -> Option { 122 | self.map.get(&id).map(|x| x.get_info()) 123 | } 124 | 125 | fn get_call_data(&self, call_id: Uuid) -> Option { 126 | self.map.get(&call_id).cloned() 127 | } 128 | 129 | pub fn get_own_state(&self) -> Option { 130 | self.get_active() 131 | .and_then(|data| data.get_participant_state(&self.own_id)) 132 | } 133 | 134 | pub fn get_participant_state(&self, call_id: Uuid, peer_id: &DID) -> Option { 135 | self.get_call_data(call_id) 136 | .and_then(|cd| cd.get_participant_state(peer_id)) 137 | } 138 | 139 | pub fn insert(&mut self, id: Uuid, data: CallData) { 140 | self.map.insert(id, data); 141 | } 142 | 143 | pub fn leave_call(&mut self, call_id: Uuid) { 144 | if self.is_active_call(call_id) { 145 | self.active_call.take(); 146 | } 147 | if let Some(data) = self.map.get_mut(&call_id) { 148 | data.state.reset_self(); 149 | } 150 | } 151 | 152 | pub fn remove_call(&mut self, call_id: Uuid) { 153 | self.map.remove(&call_id); 154 | } 155 | 156 | pub fn remove_participant(&mut self, call_id: Uuid, peer_id: &DID) { 157 | if let Some(data) = self.map.get_mut(&call_id) { 158 | if data.info.contains_participant(peer_id) { 159 | data.state.remove_participant(peer_id); 160 | } 161 | } 162 | } 163 | } 164 | 165 | impl CallDataMap { 166 | pub fn set_muted(&mut self, call_id: Uuid, participant: &DID, value: bool) { 167 | if let Some(data) = self.map.get_mut(&call_id) { 168 | data.state.set_muted(participant, value); 169 | } 170 | } 171 | 172 | pub fn set_deafened(&mut self, call_id: Uuid, participant: &DID, value: bool) { 173 | if let Some(data) = self.map.get_mut(&call_id) { 174 | data.state.set_deafened(participant, value); 175 | } 176 | } 177 | 178 | pub fn set_recording(&mut self, call_id: Uuid, participant: &DID, value: bool) { 179 | if let Some(data) = self.map.get_mut(&call_id) { 180 | data.state.set_recording(participant, value); 181 | } 182 | } 183 | } 184 | -------------------------------------------------------------------------------- /extensions/warp-blink-wrtc/src/blink_impl/readme.md: -------------------------------------------------------------------------------- 1 | ### BlinkImpl spawns three long running tasks 2 | - `GossipSubListener`: receives messages via IPFS and forwards them to the `BlinkController` 3 | - `GossipSubSender`: contains the user's full DID - both the public and private key - and is responsible for sending and decoding messages. GossipSubSender also can provide a clone of the DID (which returns just the public key) upon request. 4 | - `BlinkController`: contains the instance of `SimpleWebrtc`. receives all the gossipsub messages, webrtc events, and user commands (invoked by BlinkImpl) 5 | 6 | ### when BlinkImpl offers a call 7 | - an Offer signal is sent (and retried) 8 | - the sender subscribes to a gossip channel specific to that call 9 | - all recipients subscribe to that gossip channel too, even if they don't join the call (this allows them to detect when all the other participants have left the call - in this case the call would be considered terminated). 10 | - The sender automatically joins the call. 11 | 12 | ### when someone joins the call 13 | - they subscribe to another gossip channel, using the call id and their DID. This one is for receiving webrtc specific signals (SDP and ICE mostly). 14 | - they broadcast an `Announce` signal on the call-wide channel periodically. 15 | - they track all the `Announce` and `Leave` signals. 16 | - they periodically go through a list of all participants who joined the call but to whom they aren't yet connected. they compare DIDs and based off of that, one of the peers will initiate a webrtc connection via the `Dial` signal. 17 | 18 | ### in response to a dial signal 19 | - the other side automatically accepts and proceeds with the webrtc connection process. -------------------------------------------------------------------------------- /extensions/warp-blink-wrtc/src/blink_impl/signaling.rs: -------------------------------------------------------------------------------- 1 | use derive_more::Display; 2 | 3 | use serde::{Deserialize, Serialize}; 4 | use uuid::Uuid; 5 | use warp::{ 6 | blink::{CallInfo, ParticipantState}, 7 | crypto::DID, 8 | }; 9 | use webrtc::{ 10 | ice_transport::ice_candidate::RTCIceCandidate, 11 | peer_connection::sdp::session_description::RTCSessionDescription, 12 | }; 13 | #[derive(Clone)] 14 | pub enum GossipSubSignal { 15 | Peer { 16 | sender: DID, 17 | call_id: Uuid, 18 | signal: Box, 19 | }, 20 | Call { 21 | sender: DID, 22 | call_id: Uuid, 23 | signal: CallSignal, 24 | }, 25 | Initiation { 26 | sender: DID, 27 | signal: InitiationSignal, 28 | }, 29 | } 30 | 31 | #[derive(Serialize, Deserialize, Display, Clone)] 32 | pub enum PeerSignal { 33 | #[display(fmt = "Ice")] 34 | Ice(RTCIceCandidate), 35 | // sent after receiving the dial signal 36 | #[display(fmt = "Sdp")] 37 | Sdp(RTCSessionDescription), 38 | // sent first 39 | #[display(fmt = "Dial")] 40 | Dial(RTCSessionDescription), 41 | } 42 | 43 | // this is used for webrtc signaling. 44 | // it is somewhat redundant but for now i'll leave it in. 45 | #[derive(Serialize, Deserialize, Display, Clone)] 46 | pub enum CallSignal { 47 | #[display(fmt = "Announce")] 48 | Announce { participant_state: ParticipantState }, 49 | #[display(fmt = "Leave")] 50 | Leave, 51 | } 52 | 53 | #[derive(Serialize, Deserialize, Display, Clone)] 54 | pub enum InitiationSignal { 55 | /// invite a peer to join a call 56 | #[display(fmt = "Offer")] 57 | Offer { call_info: CallInfo }, 58 | } 59 | 60 | pub mod ipfs_routes { 61 | use uuid::Uuid; 62 | use warp::crypto::DID; 63 | 64 | const TELECON_BROADCAST: &str = "telecon2"; 65 | const OFFER_CALL: &str = "offer_call2"; 66 | /// subscribe/unsubscribe per-call 67 | /// CallSignal 68 | pub fn call_signal_route(call_id: &Uuid) -> String { 69 | format!("{TELECON_BROADCAST}/{call_id}") 70 | } 71 | 72 | /// subscribe/unsubscribe per-call 73 | /// PeerSignal 74 | pub fn peer_signal_route(peer: &DID, call_id: &Uuid) -> String { 75 | format!("{TELECON_BROADCAST}/{call_id}/{peer}") 76 | } 77 | 78 | /// subscribe to this when initializing Blink 79 | /// InitiationSignal 80 | pub fn call_initiation_route(peer: &DID) -> String { 81 | format!("{OFFER_CALL}/{peer}") 82 | } 83 | } 84 | -------------------------------------------------------------------------------- /extensions/warp-blink-wrtc/src/blink_impl/store.rs: -------------------------------------------------------------------------------- 1 | use std::fmt::Display; 2 | 3 | use ipfs::{libp2p, Ipfs}; 4 | use rust_ipfs as ipfs; 5 | use serde::de::DeserializeOwned; 6 | use serde::Serialize; 7 | 8 | type Result = std::result::Result; 9 | 10 | use warp::{ 11 | crypto::{ 12 | cipher::Cipher, 13 | did_key::{Generate, ECDH}, 14 | zeroize::Zeroizing, 15 | DIDKey, Ed25519KeyPair, KeyMaterial, DID, 16 | }, 17 | error::Error, 18 | }; 19 | 20 | pub trait PeerIdExt { 21 | fn to_did(&self) -> std::result::Result; 22 | } 23 | 24 | impl PeerIdExt for ipfs::PeerId { 25 | fn to_did(&self) -> std::result::Result { 26 | let multihash = self.as_ref(); 27 | if multihash.code() != 0 { 28 | anyhow::bail!("PeerId does not contain inline public key"); 29 | } 30 | let public_key = ipfs::PublicKey::try_decode_protobuf(multihash.digest())?; 31 | libp2p_pub_to_did(&public_key) 32 | } 33 | } 34 | 35 | // uses asymmetric encryption 36 | pub async fn send_signal_ecdh( 37 | ipfs: &Ipfs, 38 | own_did: &DID, 39 | dest: &DID, 40 | signal: T, 41 | topic: String, 42 | ) -> anyhow::Result<()> { 43 | let serialized = serde_cbor::to_vec(&signal)?; 44 | let encrypted = ecdh_encrypt(own_did, dest, serialized)?; 45 | ipfs.pubsub_publish(topic, encrypted).await?; 46 | Ok(()) 47 | } 48 | 49 | // uses symmetric encryption 50 | pub async fn send_signal_aes( 51 | ipfs: &Ipfs, 52 | key: &[u8], 53 | signal: T, 54 | topic: String, 55 | ) -> anyhow::Result<()> { 56 | let serialized = serde_cbor::to_vec(&signal)?; 57 | let msg = Cipher::direct_encrypt(&serialized, key)?; 58 | ipfs.pubsub_publish(topic, msg).await?; 59 | Ok(()) 60 | } 61 | 62 | pub fn decode_gossipsub_msg_ecdh( 63 | own_did: &DID, 64 | sender: &DID, 65 | msg: &libp2p::gossipsub::Message, 66 | ) -> anyhow::Result { 67 | let bytes = ecdh_decrypt(own_did, sender, &msg.data)?; 68 | let data: T = serde_cbor::from_slice(&bytes)?; 69 | Ok(data) 70 | } 71 | 72 | pub fn decode_gossipsub_msg_aes( 73 | key: &[u8], 74 | msg: &libp2p::gossipsub::Message, 75 | ) -> anyhow::Result { 76 | let decrypted = Cipher::direct_decrypt(&msg.data, key)?; 77 | let data: T = serde_cbor::from_slice(&decrypted)?; 78 | Ok(data) 79 | } 80 | 81 | pub fn ecdh_encrypt>(own_did: &DID, recipient: &DID, data: K) -> Result> { 82 | let prikey = Ed25519KeyPair::from_secret_key(&own_did.private_key_bytes()).get_x25519(); 83 | let did_pubkey = recipient.public_key_bytes(); 84 | 85 | let pubkey = Ed25519KeyPair::from_public_key(&did_pubkey).get_x25519(); 86 | let prik = Zeroizing::new(prikey.key_exchange(&pubkey)); 87 | let data = Cipher::direct_encrypt(data.as_ref(), &prik)?; 88 | 89 | Ok(data) 90 | } 91 | 92 | pub fn ecdh_decrypt>(own_did: &DID, sender: &DID, data: K) -> Result> { 93 | let prikey = Ed25519KeyPair::from_secret_key(&own_did.private_key_bytes()).get_x25519(); 94 | let did_pubkey = sender.public_key_bytes(); 95 | 96 | let pubkey = Ed25519KeyPair::from_public_key(&did_pubkey).get_x25519(); 97 | let prik = Zeroizing::new(prikey.key_exchange(&pubkey)); 98 | let data = Cipher::direct_decrypt(data.as_ref(), &prik)?; 99 | 100 | Ok(data) 101 | } 102 | 103 | fn _did_to_libp2p_pub(public_key: &DID) -> anyhow::Result { 104 | rust_ipfs::libp2p::identity::ed25519::PublicKey::try_from_bytes(&public_key.public_key_bytes()) 105 | .map(rust_ipfs::libp2p::identity::PublicKey::from) 106 | .map_err(anyhow::Error::from) 107 | } 108 | 109 | fn libp2p_pub_to_did(public_key: &rust_ipfs::libp2p::identity::PublicKey) -> anyhow::Result { 110 | let pk = match public_key.clone().try_into_ed25519() { 111 | Ok(pk) => { 112 | let did: DIDKey = Ed25519KeyPair::from_public_key(&pk.to_bytes()).into(); 113 | did.into() 114 | } 115 | _ => anyhow::bail!(warp::error::Error::PublicKeyInvalid), 116 | }; 117 | Ok(pk) 118 | } 119 | 120 | #[cfg(test)] 121 | mod test { 122 | use rand::rngs::OsRng; 123 | use warp::crypto::{aes_gcm::Aes256Gcm, did_key::generate, digest::KeyInit}; 124 | 125 | use super::*; 126 | 127 | #[test] 128 | fn ecdh_test1() -> anyhow::Result<()> { 129 | let own_did: DID = generate::(Some(b"seed")).into(); 130 | let recipient_did: DID = generate::(Some(b"another seed")).into(); 131 | 132 | let to_encrypt = b"test message to encrypt"; 133 | let encrypted = ecdh_encrypt(&own_did, &recipient_did, to_encrypt)?; 134 | 135 | assert!(encrypted != to_encrypt); 136 | 137 | let decrypted = ecdh_decrypt(&recipient_did, &own_did, &encrypted)?; 138 | assert!(decrypted != encrypted); 139 | assert!(decrypted == to_encrypt); 140 | Ok(()) 141 | } 142 | 143 | #[test] 144 | fn aes_test1() -> anyhow::Result<()> { 145 | let key: Vec = Aes256Gcm::generate_key(&mut OsRng).as_slice().into(); 146 | let to_encrypt = b"test message to encrypt"; 147 | let encrypted = Cipher::direct_encrypt(to_encrypt, &key)?; 148 | 149 | assert!(encrypted != to_encrypt); 150 | 151 | let decrypted = Cipher::direct_decrypt(encrypted.as_ref(), &key)?; 152 | assert!(decrypted != encrypted); 153 | assert!(decrypted == to_encrypt); 154 | Ok(()) 155 | } 156 | } 157 | -------------------------------------------------------------------------------- /extensions/warp-blink-wrtc/src/host_media/audio/mod.rs: -------------------------------------------------------------------------------- 1 | use std::{mem::MaybeUninit, sync::Arc}; 2 | 3 | use ringbuf::{Consumer, Producer, SharedRb}; 4 | 5 | pub mod sink; 6 | pub mod source; 7 | pub mod utils; 8 | 9 | pub const OPUS_SAMPLES: usize = 480; 10 | pub type AudioConsumer = Consumer>>>>; 11 | pub type AudioProducer = Producer>>>>; 12 | -------------------------------------------------------------------------------- /extensions/warp-blink-wrtc/src/host_media/audio/sink/decoder_task.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | sync::{ 3 | atomic::{AtomicBool, Ordering}, 4 | Arc, 5 | }, 6 | time::Duration, 7 | }; 8 | 9 | use rayon::prelude::*; 10 | use tokio::sync::mpsc::UnboundedReceiver; 11 | use warp::crypto::DID; 12 | use webrtc::media::Sample; 13 | 14 | use crate::host_media::audio::{AudioProducer, OPUS_SAMPLES}; 15 | 16 | pub enum Cmd { 17 | AddTrack { 18 | decoder: opus::Decoder, 19 | peer_id: DID, 20 | packet_rx: UnboundedReceiver, 21 | producer: AudioProducer, 22 | }, 23 | RemoveTrack { 24 | peer_id: DID, 25 | }, 26 | 27 | // these last two are for changing the output device. the number of channels could change 28 | // and either way a new cpal stream will be created 29 | PauseAll { 30 | new_num_channels: usize, 31 | }, 32 | ReplaceSampleTx { 33 | peer_id: DID, 34 | producer: AudioProducer, 35 | }, 36 | SetAudioMultiplier { 37 | peer_id: DID, 38 | audio_multiplier: f32, 39 | }, 40 | } 41 | 42 | struct Entry { 43 | decoder: opus::Decoder, 44 | peer_id: DID, 45 | audio_multiplier: f32, 46 | packet_rx: UnboundedReceiver, 47 | producer: AudioProducer, 48 | paused: bool, 49 | } 50 | 51 | pub struct Args { 52 | pub cmd_rx: UnboundedReceiver, 53 | pub should_quit: Arc, 54 | pub num_channels: usize, 55 | } 56 | 57 | pub fn run(args: Args) { 58 | let Args { 59 | mut cmd_rx, 60 | should_quit, 61 | mut num_channels, 62 | } = args; 63 | 64 | let mut connections: Vec = vec![]; 65 | while !should_quit.load(Ordering::Relaxed) { 66 | let mut remaining_tries = 50_u32; 67 | while let Ok(cmd) = cmd_rx.try_recv() { 68 | match cmd { 69 | Cmd::AddTrack { 70 | decoder, 71 | peer_id, 72 | packet_rx, 73 | producer, 74 | } => { 75 | connections.retain(|x| x.peer_id != peer_id); 76 | 77 | connections.push(Entry { 78 | decoder, 79 | peer_id, 80 | packet_rx, 81 | producer, 82 | paused: false, 83 | audio_multiplier: 1.0_f32, 84 | }); 85 | } 86 | Cmd::RemoveTrack { peer_id } => { 87 | connections.retain(|x| x.peer_id != peer_id); 88 | } 89 | Cmd::PauseAll { new_num_channels } => { 90 | for peer in connections.iter_mut() { 91 | peer.paused = true; 92 | } 93 | num_channels = new_num_channels; 94 | } 95 | Cmd::ReplaceSampleTx { peer_id, producer } => { 96 | if let Some(peer) = connections.iter_mut().find(|x| x.peer_id == peer_id) { 97 | peer.producer = producer; 98 | peer.paused = false; 99 | } 100 | } 101 | Cmd::SetAudioMultiplier { 102 | peer_id, 103 | audio_multiplier, 104 | } => { 105 | if let Some(entry) = connections.iter_mut().find(|x| x.peer_id == peer_id) { 106 | entry.audio_multiplier = audio_multiplier; 107 | } 108 | } 109 | } 110 | remaining_tries -= 1; 111 | if remaining_tries == 0 { 112 | break; 113 | } 114 | } 115 | 116 | let packets_decoded: u16 = connections 117 | .par_iter_mut() 118 | .map(|entry| { 119 | let mut ran_once = false; 120 | while let Ok(sample) = entry.packet_rx.try_recv() { 121 | ran_once = true; 122 | 123 | if entry.paused { 124 | continue; 125 | } 126 | 127 | // 10ms 128 | let mut decoder_output_buf = vec![0_f32; OPUS_SAMPLES]; 129 | match entry 130 | .decoder 131 | .decode_float(&sample.data, &mut decoder_output_buf, false) 132 | { 133 | Ok(size) => { 134 | let mut buf2 = vec![0_f32; size * num_channels]; 135 | let it1 = buf2.chunks_exact_mut(num_channels); 136 | let it2 = decoder_output_buf.iter().take(size); 137 | for (chunk, val) in std::iter::zip(it1, it2) { 138 | chunk.fill(*val * entry.audio_multiplier); 139 | } 140 | 141 | for sample in buf2.drain(..) { 142 | let _ = entry.producer.push(sample); 143 | } 144 | } 145 | Err(e) => { 146 | log::error!("decode error: {e}"); 147 | } 148 | } 149 | } 150 | if ran_once { 151 | 1_u16 152 | } else { 153 | 0_u16 154 | } 155 | }) 156 | .sum(); 157 | 158 | if packets_decoded == 0 { 159 | std::thread::sleep(Duration::from_millis(5)); 160 | } 161 | } 162 | } 163 | -------------------------------------------------------------------------------- /extensions/warp-blink-wrtc/src/host_media/audio/sink/receiver_task.rs: -------------------------------------------------------------------------------- 1 | use std::sync::{ 2 | atomic::{self, AtomicBool, Ordering}, 3 | Arc, 4 | }; 5 | 6 | use tokio::{ 7 | sync::{ 8 | broadcast, 9 | mpsc::{UnboundedReceiver, UnboundedSender}, 10 | Notify, 11 | }, 12 | time::Instant, 13 | }; 14 | use warp::{blink::BlinkEventKind, crypto::DID}; 15 | use webrtc::{ 16 | media::{io::sample_builder::SampleBuilder, Sample}, 17 | track::track_remote::TrackRemote, 18 | util::Unmarshal, 19 | }; 20 | 21 | use crate::host_media::{ 22 | audio::utils::SpeechDetector, audio_utils::automute, mp4_logger::Mp4LoggerInstance, 23 | }; 24 | 25 | pub struct Args { 26 | pub track: Arc, 27 | pub mp4_logger: Box, 28 | pub peer_id: DID, 29 | pub should_quit: Arc, 30 | pub silenced: Arc, 31 | pub packet_tx: UnboundedSender, 32 | pub cmd_ch: UnboundedReceiver, 33 | pub ui_event_ch: broadcast::Sender, 34 | } 35 | 36 | pub enum Cmd { 37 | SetMp4Logger { logger: Box }, 38 | } 39 | 40 | pub async fn run(args: Args) { 41 | let Args { 42 | track, 43 | mut mp4_logger, 44 | should_quit, 45 | silenced, 46 | packet_tx, 47 | ui_event_ch, 48 | mut cmd_ch, 49 | peer_id, 50 | } = args; 51 | 52 | let mut b = [0u8; 2880 * 4]; 53 | let mut speech_detector = SpeechDetector::new(10, 100); 54 | let mut log_decode_error_once = false; 55 | 56 | let mut sample_builder = { 57 | let max_late = 512; 58 | let depacketizer = webrtc::rtp::codecs::opus::OpusPacket; 59 | SampleBuilder::new(max_late, depacketizer, 48000) 60 | }; 61 | 62 | let automute_tx = automute::AUDIO_CMD_CH.tx.clone(); 63 | 64 | loop { 65 | let (siz, _attr) = tokio::select! { 66 | x = track.read(&mut b) => match x { 67 | Ok(y) => y, 68 | Err(e) => { 69 | log::debug!("audio receiver task for peer {peer_id} terminated by error: {e}"); 70 | break; 71 | } 72 | }, 73 | opt = cmd_ch.recv() => match opt { 74 | Some(cmd) => match cmd { 75 | Cmd::SetMp4Logger { logger } => { 76 | mp4_logger = logger; 77 | continue; 78 | } 79 | }, 80 | None => { 81 | log::debug!("receiver task terminated: cmd channel closed"); 82 | break; 83 | } 84 | }, 85 | _ = should_quit.notified() => { 86 | log::debug!("audio receiver task for peer {peer_id} terminated by notify"); 87 | break; 88 | } 89 | }; 90 | 91 | // get RTP packet 92 | let mut buf = &b[..siz]; 93 | let rtp_packet = match webrtc::rtp::packet::Packet::unmarshal(&mut buf) { 94 | Ok(r) => r, 95 | Err(e) => { 96 | if !log_decode_error_once { 97 | log_decode_error_once = true; 98 | // this only happens if a packet is "short" 99 | log::error!("unmarshall rtp packet failed for peer {peer_id}: {}", e); 100 | } 101 | continue; 102 | } 103 | }; 104 | 105 | if !silenced.load(atomic::Ordering::Relaxed) { 106 | mp4_logger.log(rtp_packet.payload.clone()); 107 | } 108 | 109 | // if let Some(logger) = logger.as_ref() { 110 | // logger.log(rtp_packet.header.clone(), task_start_time.elapsed().as_millis()); 111 | // } 112 | 113 | if let Some(extension) = rtp_packet.header.extensions.first() { 114 | // don't yet have the MediaEngine exposed. for now since there's only one extension being used, this way seems to be good enough 115 | // copies extension::audio_level_extension::AudioLevelExtension from the webrtc-rs crate 116 | // todo: use this: 117 | // .media_engine 118 | // .get_header_extension_id(RTCRtpHeaderExtensionCapability { 119 | // uri: ::sdp::extmap::SDES_MID_URI.to_owned(), 120 | // }) 121 | // followed by this: header.get_extension(extension_id) 122 | let audio_level = extension.payload.first().map(|x| x & 0x7F).unwrap_or(0); 123 | if speech_detector.should_emit_event(audio_level) { 124 | let _ = ui_event_ch.send(BlinkEventKind::ParticipantSpeaking { 125 | peer_id: peer_id.clone(), 126 | }); 127 | 128 | let _ = automute_tx.send(automute::Cmd::MuteAt(Instant::now())); 129 | } 130 | } 131 | 132 | // saved for later. automute codepath 133 | // let mut sample_created = false; 134 | 135 | // turn RTP packets into samples via SampleBuilder.push 136 | sample_builder.push(rtp_packet); 137 | 138 | // if silenced, discard all samples 139 | if silenced.load(Ordering::Relaxed) { 140 | while sample_builder.pop().is_some() {} 141 | continue; 142 | } 143 | 144 | while let Some(media_sample) = sample_builder.pop() { 145 | let _ = packet_tx.send(media_sample); 146 | } 147 | } 148 | } 149 | -------------------------------------------------------------------------------- /extensions/warp-blink-wrtc/src/host_media/audio/source/encoder_task.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | sync::{ 3 | atomic::{AtomicBool, Ordering}, 4 | Arc, 5 | }, 6 | time::Duration, 7 | }; 8 | 9 | use crate::host_media::audio::AudioConsumer; 10 | 11 | use super::super::utils::{FramerOutput, SpeechDetector}; 12 | 13 | use tokio::sync::mpsc::UnboundedSender; 14 | 15 | pub struct Args { 16 | pub encoder: opus::Encoder, 17 | pub consumer: AudioConsumer, 18 | pub tx: UnboundedSender, 19 | pub should_quit: Arc, 20 | pub num_samples: usize, 21 | } 22 | 23 | pub fn run(args: Args) { 24 | let Args { 25 | mut encoder, 26 | mut consumer, 27 | tx, 28 | should_quit, 29 | num_samples, 30 | } = args; 31 | 32 | // speech_detector should emit at most 1 event per second 33 | let _speech_detector = SpeechDetector::new(10, 100); 34 | let mut opus_out = vec![0_u8; num_samples * 4]; 35 | let mut buf = Vec::new(); 36 | buf.reserve(480); 37 | 38 | while !should_quit.load(Ordering::Relaxed) { 39 | while let Some(sample) = consumer.pop() { 40 | buf.push(sample); 41 | if buf.len() == 480 { 42 | break; 43 | } 44 | } 45 | if buf.len() < 480 { 46 | std::thread::sleep(Duration::from_millis(5)); 47 | continue; 48 | } 49 | 50 | // calculate rms of frame 51 | let rms = f32::sqrt(buf.iter().map(|x| x * x).sum::() / buf.len() as f32); 52 | let loudness = match rms * 1000.0 { 53 | x if x >= 127.0 => 127, 54 | x => x as u8, 55 | }; 56 | 57 | // encode and send off to the network bound task 58 | match encoder.encode_float(buf.as_mut_slice(), opus_out.as_mut_slice()) { 59 | Ok(size) => { 60 | let slice = opus_out.as_slice(); 61 | let bytes = bytes::Bytes::copy_from_slice(&slice[0..size]); 62 | 63 | let _ = tx.send(FramerOutput { bytes, loudness }); 64 | } 65 | Err(e) => { 66 | log::error!("OpusPacketizer failed to encode: {}", e); 67 | } 68 | } 69 | 70 | buf.clear(); 71 | } 72 | } 73 | -------------------------------------------------------------------------------- /extensions/warp-blink-wrtc/src/host_media/audio/source/sender_task.rs: -------------------------------------------------------------------------------- 1 | use std::sync::Arc; 2 | 3 | use crate::host_media::{ 4 | audio::utils::{FramerOutput, SpeechDetector}, 5 | mp4_logger::Mp4LoggerInstance, 6 | }; 7 | 8 | use rand::Rng; 9 | use tokio::sync::{broadcast, mpsc::UnboundedReceiver, Notify}; 10 | use warp::blink::BlinkEventKind; 11 | use webrtc::{ 12 | rtp::{self, extension::audio_level_extension::AudioLevelExtension, packetizer::Packetizer}, 13 | track::track_local::track_local_static_rtp::TrackLocalStaticRTP, 14 | }; 15 | 16 | pub struct Args { 17 | pub track: Arc, 18 | pub mp4_logger: Box, 19 | pub ui_event_ch: broadcast::Sender, 20 | pub rx: UnboundedReceiver, 21 | pub cmd_ch: UnboundedReceiver, 22 | pub notify: Arc, 23 | pub num_samples: usize, 24 | } 25 | 26 | pub enum Cmd { 27 | SetMp4Logger { logger: Box }, 28 | } 29 | 30 | pub async fn run(args: Args) { 31 | let Args { 32 | track, 33 | mut mp4_logger, 34 | ui_event_ch, 35 | mut rx, 36 | mut cmd_ch, 37 | notify, 38 | num_samples, 39 | } = args; 40 | 41 | let mut packetizer = { 42 | // create the ssrc for the RTP packets. ssrc serves to uniquely identify the sender 43 | let mut rng = rand::thread_rng(); 44 | let ssrc: u32 = rng.gen(); 45 | let opus = Box::new(rtp::codecs::opus::OpusPayloader {}); 46 | let seq = Box::new(rtp::sequence::new_random_sequencer()); 47 | rtp::packetizer::new_packetizer( 48 | // frame size is number of samples 49 | // 12 is for the header, though there may be an additional 4*csrc bytes in the header. 50 | (1024) + 12, 51 | // payload type means nothing 52 | // https://en.wikipedia.org/wiki/RTP_payload_formats 53 | // todo: use an enum for this 54 | 98, 55 | // randomly generated and uniquely identifies the source 56 | ssrc, 57 | opus, 58 | seq, 59 | 48000, 60 | ) 61 | }; 62 | 63 | // speech_detector should emit at most 1 event per second 64 | let mut speech_detector = SpeechDetector::new(10, 100); 65 | 66 | loop { 67 | let frame: FramerOutput = tokio::select! { 68 | opt = cmd_ch.recv() => match opt { 69 | Some(cmd) => match cmd { 70 | Cmd::SetMp4Logger { logger } => { 71 | mp4_logger = logger; 72 | continue; 73 | } 74 | }, 75 | None => { 76 | log::debug!("sender task terminated: cmd channel closed"); 77 | break; 78 | } 79 | }, 80 | _ = notify.notified() => { 81 | log::debug!("sender task terminated via notify"); 82 | break; 83 | }, 84 | opt = rx.recv() => match opt { 85 | Some(r) => r, 86 | None => { 87 | log::debug!("sender task terminated: channel closed"); 88 | break; 89 | } 90 | } 91 | }; 92 | 93 | if speech_detector.should_emit_event(frame.loudness) { 94 | let _ = ui_event_ch.send(BlinkEventKind::SelfSpeaking); 95 | } 96 | 97 | let packets = match packetizer.packetize(&frame.bytes, num_samples as _).await { 98 | Ok(r) => r, 99 | Err(e) => { 100 | log::error!("failed to packetize for opus: {}", e); 101 | continue; 102 | } 103 | }; 104 | 105 | for packet in &packets { 106 | mp4_logger.log(packet.payload.clone()); 107 | if let Err(e) = track 108 | .write_rtp_with_extensions( 109 | packet, 110 | &[rtp::extension::HeaderExtension::AudioLevel( 111 | AudioLevelExtension { 112 | level: frame.loudness, 113 | voice: false, 114 | }, 115 | )], 116 | ) 117 | .await 118 | { 119 | log::error!("failed to send RTP packet: {}", e); 120 | } 121 | } 122 | } 123 | } 124 | -------------------------------------------------------------------------------- /extensions/warp-blink-wrtc/src/host_media/audio/utils/audio_buf.rs: -------------------------------------------------------------------------------- 1 | pub struct AudioBuf { 2 | samples: Vec, 3 | frame_size: usize, 4 | } 5 | 6 | impl AudioBuf { 7 | pub fn new(frame_size: usize) -> Self { 8 | let samples = Vec::with_capacity(frame_size); 9 | Self { 10 | samples, 11 | frame_size, 12 | } 13 | } 14 | 15 | pub fn insert(&mut self, buf: &[f32]) { 16 | self.samples.extend_from_slice(buf); 17 | } 18 | 19 | pub fn get_frame(&mut self) -> Option> { 20 | if self.samples.len() < self.frame_size { 21 | return None; 22 | } 23 | 24 | let mut r = vec![0_f32; self.frame_size]; 25 | r.copy_from_slice(&self.samples[0..self.frame_size]); 26 | let remaining = self.samples.len() - self.frame_size; 27 | let mut new_samples = vec![0_f32; remaining]; 28 | new_samples.copy_from_slice(&self.samples[self.frame_size..]); 29 | self.samples = new_samples; 30 | self.samples.reserve(self.frame_size); 31 | 32 | Some(r) 33 | } 34 | 35 | pub fn copy_to_slice(&mut self, slice: &mut [f32]) { 36 | if self.samples.len() < slice.len() { 37 | slice.fill(0_f32); 38 | return; 39 | } 40 | slice.copy_from_slice(&self.samples[0..slice.len()]); 41 | let mut samples2 = vec![0_f32; self.samples.len() - slice.len()]; 42 | samples2.copy_from_slice(&self.samples[slice.len()..]); 43 | samples2.reserve(self.frame_size); 44 | self.samples = samples2; 45 | } 46 | 47 | pub fn frame_size(&self) -> usize { 48 | self.frame_size 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /extensions/warp-blink-wrtc/src/host_media/audio/utils/automute.rs: -------------------------------------------------------------------------------- 1 | use anyhow::{bail, Result}; 2 | use once_cell::sync::Lazy; 3 | use std::{ 4 | sync::{ 5 | atomic::{AtomicBool, Ordering}, 6 | Arc, 7 | }, 8 | time::Duration, 9 | }; 10 | use tokio::{ 11 | sync::mpsc::{self}, 12 | time::Instant, 13 | }; 14 | 15 | // tells the automute module how much longer to delay before unmuting 16 | pub struct AudioMuteChannels { 17 | pub tx: mpsc::UnboundedSender, 18 | pub rx: Arc>>, 19 | } 20 | pub static AUDIO_CMD_CH: Lazy = Lazy::new(|| { 21 | let (tx, rx) = tokio::sync::mpsc::unbounded_channel(); 22 | AudioMuteChannels { 23 | tx, 24 | rx: Arc::new(tokio::sync::Mutex::new(rx)), 25 | } 26 | }); 27 | 28 | pub static SHOULD_MUTE: Lazy = Lazy::new(|| AtomicBool::new(false)); 29 | 30 | pub enum Cmd { 31 | Quit, 32 | MuteAt(Instant), 33 | Disable, 34 | Enable, 35 | } 36 | 37 | pub fn start() { 38 | tokio::spawn(async move { 39 | if let Err(e) = run().await { 40 | log::error!("automute error: {e}"); 41 | } 42 | }); 43 | } 44 | 45 | pub fn stop() { 46 | let tx = AUDIO_CMD_CH.tx.clone(); 47 | let _ = tx.send(Cmd::Quit); 48 | } 49 | 50 | async fn run() -> Result<()> { 51 | log::debug!("starting automute"); 52 | let rx = AUDIO_CMD_CH.rx.clone(); 53 | let mut rx = match rx.try_lock() { 54 | Ok(r) => r, 55 | Err(e) => bail!("mutex not available: {e}"), 56 | }; 57 | 58 | let (tx2, mut rx2) = tokio::sync::mpsc::unbounded_channel::(); 59 | tokio::spawn(async move { 60 | log::debug!("starting automute helper"); 61 | let mut unmute_time: Option = None; 62 | let mut timer = tokio::time::interval_at( 63 | Instant::now() + Duration::from_millis(100), 64 | Duration::from_millis(100), 65 | ); 66 | loop { 67 | tokio::select! { 68 | _ = timer.tick() => { 69 | if SHOULD_MUTE.load(Ordering::Relaxed) 70 | && unmute_time 71 | .as_ref() 72 | .map(|x| Instant::now() > *x) 73 | .unwrap_or_default() 74 | { 75 | SHOULD_MUTE.store(false, Ordering::Relaxed); 76 | } 77 | }, 78 | res = rx2.recv() => match res { 79 | Some(instant) => { 80 | let now = Instant::now(); 81 | let future = instant + Duration::from_millis(1000); 82 | if now >= future { 83 | continue; 84 | } 85 | 86 | if unmute_time.map(|x| future > x).unwrap_or(true) { 87 | unmute_time.replace(future); 88 | if !SHOULD_MUTE.load(Ordering::Relaxed) { 89 | SHOULD_MUTE.store(true, Ordering::Relaxed); 90 | } 91 | } 92 | }, 93 | None => { 94 | log::debug!("automute task terminated - cmd channel closed"); 95 | break; 96 | } 97 | } 98 | } 99 | } 100 | SHOULD_MUTE.store(false, Ordering::Relaxed); 101 | log::debug!("terminating automute helper"); 102 | }); 103 | 104 | let mut enabled = true; 105 | while let Some(cmd) = rx.recv().await { 106 | match cmd { 107 | Cmd::Quit => { 108 | log::debug!("quitting automute"); 109 | SHOULD_MUTE.store(false, Ordering::Relaxed); 110 | break; 111 | } 112 | Cmd::MuteAt(instant) => { 113 | if enabled { 114 | let _ = tx2.send(instant); 115 | } 116 | } 117 | Cmd::Disable => { 118 | enabled = false; 119 | } 120 | Cmd::Enable => enabled = true, 121 | } 122 | } 123 | Ok(()) 124 | } 125 | -------------------------------------------------------------------------------- /extensions/warp-blink-wrtc/src/host_media/audio/utils/codec_config.rs: -------------------------------------------------------------------------------- 1 | use warp::blink::MimeType; 2 | // for webrtc, the number of audio channels is hardcoded to 1. 3 | #[derive(Debug, Clone)] 4 | pub struct AudioCodec { 5 | pub mime: MimeType, 6 | pub sample_rate: AudioSampleRate, 7 | } 8 | 9 | #[derive(Clone)] 10 | pub struct AudioHardwareConfig { 11 | pub sample_rate: AudioSampleRate, 12 | pub channels: u16, 13 | } 14 | 15 | impl AudioHardwareConfig { 16 | pub fn sample_rate(&self) -> u32 { 17 | self.sample_rate.to_u32() 18 | } 19 | pub fn channels(&self) -> u16 { 20 | self.channels 21 | } 22 | } 23 | 24 | impl AudioCodec { 25 | pub fn mime_type(&self) -> String { 26 | self.mime.to_string() 27 | } 28 | pub fn sample_rate(&self) -> u32 { 29 | self.sample_rate.to_u32() 30 | } 31 | pub fn frame_size(&self) -> usize { 32 | self.sample_rate.frame_size() 33 | } 34 | } 35 | 36 | impl Default for AudioCodec { 37 | fn default() -> Self { 38 | Self { 39 | mime: MimeType::OPUS, 40 | sample_rate: AudioSampleRate::High, 41 | } 42 | } 43 | } 44 | 45 | #[derive(Clone, Debug)] 46 | pub enum AudioSampleRate { 47 | Low, 48 | Medium, 49 | High, 50 | } 51 | 52 | impl AudioSampleRate { 53 | pub fn to_u32(&self) -> u32 { 54 | match self { 55 | AudioSampleRate::Low => 8000, 56 | AudioSampleRate::Medium => 24000, 57 | AudioSampleRate::High => 48000, 58 | } 59 | } 60 | 61 | // this seems backwards. i'd think a greater sample rate would require a larger buffer but for some reason, 62 | // 48kHz seems to work best with the lowest sample rate. 63 | pub fn frame_size(&self) -> usize { 64 | match self { 65 | AudioSampleRate::Low => 480, 66 | AudioSampleRate::Medium => 480, 67 | AudioSampleRate::High => 480, 68 | } 69 | } 70 | } 71 | -------------------------------------------------------------------------------- /extensions/warp-blink-wrtc/src/host_media/audio/utils/framer_output.rs: -------------------------------------------------------------------------------- 1 | use bytes::Bytes; 2 | 3 | pub struct FramerOutput { 4 | pub bytes: Bytes, 5 | pub loudness: u8, 6 | } 7 | -------------------------------------------------------------------------------- /extensions/warp-blink-wrtc/src/host_media/audio/utils/loudness.rs: -------------------------------------------------------------------------------- 1 | /// calculates loudness using root mean square. 2 | /// is basically a moving average filter. has a delay (in samples) equal to the buffer size 3 | pub struct Calculator { 4 | buf: Vec, 5 | ss: f32, 6 | idx: usize, 7 | // equals 1/buf.size(). multiplication is faster than division 8 | normalizer: f32, 9 | } 10 | 11 | impl Calculator { 12 | pub fn new(buf_size: usize) -> Self { 13 | let mut buf = Vec::new(); 14 | buf.resize(buf_size, 0.0); 15 | Self { 16 | buf, 17 | ss: 0.0, 18 | idx: 0, 19 | normalizer: 1.0 / buf_size as f32, 20 | } 21 | } 22 | pub fn insert(&mut self, sample: f32) { 23 | let sq = sample.powf(2.0); 24 | self.ss += sq; 25 | self.ss -= self.buf[self.idx]; 26 | self.buf[self.idx] = sq; 27 | self.idx = (self.idx + 1) % self.buf.len(); 28 | } 29 | 30 | pub fn get_rms(&self) -> f32 { 31 | f32::sqrt(self.ss * self.normalizer) 32 | } 33 | 34 | pub fn reset(&mut self) { 35 | let mut buf = Vec::new(); 36 | buf.resize(self.buf.len(), 0.0); 37 | self.buf = buf; 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /extensions/warp-blink-wrtc/src/host_media/audio/utils/mod.rs: -------------------------------------------------------------------------------- 1 | mod audio_buf; 2 | mod audio_device_config_impl; 3 | pub mod automute; 4 | mod codec_config; 5 | mod framer_output; 6 | mod loudness; 7 | mod resampler; 8 | mod speech; 9 | 10 | #[allow(unused_imports)] 11 | pub use audio_buf::*; 12 | pub use audio_device_config_impl::*; 13 | #[allow(unused_imports)] 14 | pub use codec_config::*; 15 | pub use framer_output::*; 16 | #[allow(unused_imports)] 17 | pub use loudness::Calculator as LoudnessCalculator; 18 | #[allow(unused_imports)] 19 | pub use resampler::*; 20 | pub use speech::Detector as SpeechDetector; 21 | -------------------------------------------------------------------------------- /extensions/warp-blink-wrtc/src/host_media/audio/utils/resampler.rs: -------------------------------------------------------------------------------- 1 | use std::{mem::MaybeUninit, sync::Arc}; 2 | 3 | pub type AudioSampleProducer = 4 | ringbuf::Producer>>>>; 5 | pub type AudioSampleConsumer = 6 | ringbuf::Consumer>>>>; 7 | 8 | pub enum ResamplerConfig { 9 | None, 10 | DownSample(u32), 11 | UpSample(u32), 12 | } 13 | 14 | pub struct Resampler { 15 | config: ResamplerConfig, 16 | down_sample_count: u32, 17 | } 18 | 19 | impl Resampler { 20 | pub fn new(config: ResamplerConfig) -> Self { 21 | Self { 22 | config, 23 | down_sample_count: 0, 24 | } 25 | } 26 | pub fn process(&mut self, sample: f32, out: &mut Vec) { 27 | match self.config { 28 | ResamplerConfig::None => out.push(sample), 29 | ResamplerConfig::DownSample(x) => { 30 | self.down_sample_count += 1; 31 | if self.down_sample_count == x { 32 | self.down_sample_count = 0; 33 | out.push(sample); 34 | } 35 | } 36 | ResamplerConfig::UpSample(x) => { 37 | out.push(sample); 38 | for _ in 1..x { 39 | out.push(0.0); 40 | } 41 | } 42 | } 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /extensions/warp-blink-wrtc/src/host_media/audio/utils/speech.rs: -------------------------------------------------------------------------------- 1 | /// processes the loudness level from RTP packets. This value is a u8. 2 | /// it is created by taking the output of loudness::Calculator (a float), multiplying it by 1000, and casting it as a u8, saturating at 127. 3 | /// it seems that values >= 10 could be speech. 4 | /// 5 | /// each RTP packet has a frame size which spans some timeframe, usually 10 or 20 milliseconds. 6 | /// delay is measured in frames. 7 | pub struct Detector { 8 | min_delay_between_events: usize, 9 | remaining_delay: usize, 10 | speech_threshold: u8, 11 | is_speaking: bool, 12 | } 13 | 14 | impl Detector { 15 | pub fn new(speech_threshold: u8, min_delay: usize) -> Self { 16 | Self { 17 | min_delay_between_events: min_delay, 18 | remaining_delay: 0, 19 | speech_threshold, 20 | is_speaking: false, 21 | } 22 | } 23 | 24 | pub fn should_emit_event(&mut self, sample: u8) -> bool { 25 | if self.remaining_delay > 0 { 26 | self.remaining_delay -= 1; 27 | return false; 28 | } 29 | 30 | self.is_speaking = if sample >= self.speech_threshold { 31 | self.remaining_delay = self.min_delay_between_events; 32 | true 33 | } else { 34 | false 35 | }; 36 | self.is_speaking 37 | } 38 | 39 | pub fn is_speaking(&self) -> bool { 40 | self.is_speaking 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /extensions/warp-blink-wrtc/src/host_media/loopback/mod.rs: -------------------------------------------------------------------------------- 1 | use std::{collections::HashMap, sync::Arc}; 2 | 3 | use tokio::sync::{mpsc, Notify}; 4 | use warp::crypto::DID; 5 | use webrtc::{ 6 | rtp::packet::Packet, 7 | track::{track_local::track_local_static_rtp::TrackLocalStaticRTP, track_remote::TrackRemote}, 8 | }; 9 | 10 | mod receiver; 11 | mod sender; 12 | 13 | struct ReceiverTask { 14 | should_quit: Arc, 15 | } 16 | 17 | impl Drop for ReceiverTask { 18 | fn drop(&mut self) { 19 | self.should_quit.notify_waiters(); 20 | } 21 | } 22 | 23 | pub struct LoopbackController { 24 | quit_sender_task: Arc, 25 | sample_tx: mpsc::UnboundedSender<(u8, Packet)>, 26 | sender_cmd_ch: mpsc::UnboundedSender, 27 | receiver_tasks: HashMap, 28 | } 29 | 30 | impl Drop for LoopbackController { 31 | fn drop(&mut self) { 32 | self.quit_sender_task.notify_waiters(); 33 | } 34 | } 35 | 36 | impl LoopbackController { 37 | pub fn new() -> Self { 38 | log::debug!("adding source track"); 39 | let quit_sender_task = Arc::new(Notify::new()); 40 | let (sample_tx, sample_rx) = mpsc::unbounded_channel(); 41 | let (sender_tx, sender_rx) = mpsc::unbounded_channel(); 42 | 43 | let should_quit = quit_sender_task.clone(); 44 | tokio::task::spawn(async { 45 | sender::run(sender::Args { 46 | should_quit, 47 | cmd_rx: sender_rx, 48 | sample_rx, 49 | }) 50 | .await; 51 | log::debug!("quitting source track"); 52 | }); 53 | 54 | Self { 55 | quit_sender_task, 56 | sample_tx, 57 | sender_cmd_ch: sender_tx, 58 | receiver_tasks: HashMap::new(), 59 | } 60 | } 61 | 62 | pub fn add_track(&mut self, peer_id: DID, track: Arc) { 63 | log::debug!("adding sink track"); 64 | let should_quit = Arc::new(Notify::new()); 65 | let ch = self.sample_tx.clone(); 66 | 67 | let task = ReceiverTask { 68 | should_quit: should_quit.clone(), 69 | }; 70 | self.receiver_tasks.insert(peer_id.clone(), task); 71 | 72 | tokio::spawn(async move { 73 | receiver::run(receiver::Args { 74 | should_quit, 75 | track, 76 | ch, 77 | }) 78 | .await; 79 | log::debug!("quitting sink track for peer_id {}", peer_id); 80 | }); 81 | } 82 | 83 | pub fn remove_track(&mut self, peer_id: DID) { 84 | self.receiver_tasks.remove(&peer_id); 85 | } 86 | 87 | pub fn set_source_track(&self, track: Arc) { 88 | let _ = self 89 | .sender_cmd_ch 90 | .send(sender::Cmd::SetSourceTrack { track }); 91 | } 92 | 93 | pub fn remove_audio_source_track(&self) { 94 | let _ = self.sender_cmd_ch.send(sender::Cmd::RemoveSourceTrack); 95 | } 96 | } 97 | -------------------------------------------------------------------------------- /extensions/warp-blink-wrtc/src/host_media/loopback/receiver.rs: -------------------------------------------------------------------------------- 1 | use std::sync::Arc; 2 | 3 | use rand::Rng; 4 | use tokio::sync::{mpsc, Notify}; 5 | use webrtc::{ 6 | media::io::sample_builder::SampleBuilder, 7 | rtp::{self, packet::Packet, packetizer::Packetizer}, 8 | track::track_remote::TrackRemote, 9 | util::Unmarshal, 10 | }; 11 | 12 | pub struct Args { 13 | pub should_quit: Arc, 14 | pub track: Arc, 15 | pub ch: mpsc::UnboundedSender<(u8, Packet)>, 16 | } 17 | 18 | pub async fn run(args: Args) { 19 | let Args { 20 | should_quit, 21 | track, 22 | ch, 23 | } = args; 24 | 25 | let mut b = [0u8; 2880 * 4]; 26 | let mut sample_builder = { 27 | let max_late = 512; 28 | let depacketizer = webrtc::rtp::codecs::opus::OpusPacket; 29 | SampleBuilder::new(max_late, depacketizer, 48000) 30 | }; 31 | let mut log_decode_error_once = false; 32 | 33 | let mut packetizer = { 34 | // create the ssrc for the RTP packets. ssrc serves to uniquely identify the sender 35 | let mut rng = rand::thread_rng(); 36 | let ssrc: u32 = rng.gen(); 37 | let opus = Box::new(rtp::codecs::opus::OpusPayloader {}); 38 | let seq = Box::new(rtp::sequence::new_random_sequencer()); 39 | rtp::packetizer::new_packetizer( 40 | // frame size is number of samples 41 | // 12 is for the header, though there may be an additional 4*csrc bytes in the header. 42 | (1024) + 12, 43 | // payload type means nothing 44 | // https://en.wikipedia.org/wiki/RTP_payload_formats 45 | // todo: use an enum for this 46 | 98, 47 | // randomly generated and uniquely identifies the source 48 | ssrc, 49 | opus, 50 | seq, 51 | 48000, 52 | ) 53 | }; 54 | 55 | let mut packet_queue = Vec::new(); 56 | let mut loudness = None; 57 | 58 | loop { 59 | let (siz, _attr) = tokio::select! { 60 | _ = should_quit.notified() => { 61 | log::debug!("loopback receiver task terminated via notify"); 62 | break; 63 | }, 64 | opt = track.read(&mut b) => match opt { 65 | Ok(x) => x, 66 | Err(e) => { 67 | log::error!("loopback receiver encountered error when reading from track: {e}"); 68 | break; 69 | } 70 | } 71 | }; 72 | 73 | // get RTP packet 74 | let mut buf = &b[..siz]; 75 | let rtp_packet = match webrtc::rtp::packet::Packet::unmarshal(&mut buf) { 76 | Ok(r) => r, 77 | Err(e) => { 78 | if !log_decode_error_once { 79 | log_decode_error_once = true; 80 | // this only happens if a packet is "short" 81 | log::error!("unmarshall rtp packet failed: {e}"); 82 | } 83 | continue; 84 | } 85 | }; 86 | 87 | if loudness.is_none() { 88 | if let Some(extension) = rtp_packet.header.extensions.first() { 89 | loudness.replace(extension.payload.first().map(|x| x & 0x7f).unwrap_or(0_u8)); 90 | } 91 | } 92 | 93 | sample_builder.push(rtp_packet); 94 | while let Some(sample) = sample_builder.pop() { 95 | let mut packets = match packetizer.packetize(&sample.data, 480).await { 96 | Ok(r) => r, 97 | Err(e) => { 98 | log::error!("failed to packetize: {e}"); 99 | continue; 100 | } 101 | }; 102 | for packet in packets.drain(..) { 103 | packet_queue.push(packet); 104 | } 105 | } 106 | 107 | // 10ms * 1000 = 10 seconds 108 | if packet_queue.len() >= 1000 { 109 | log::debug!("collected 10 seconds of voice. replaying it now"); 110 | for packet in packet_queue.drain(..) { 111 | let _ = ch.send((loudness.unwrap_or_default(), packet)); 112 | } 113 | 114 | loudness.take(); 115 | } 116 | } 117 | } 118 | -------------------------------------------------------------------------------- /extensions/warp-blink-wrtc/src/host_media/loopback/sender.rs: -------------------------------------------------------------------------------- 1 | use std::sync::Arc; 2 | 3 | use tokio::sync::{mpsc, Notify}; 4 | use webrtc::{ 5 | rtp::{self, extension::audio_level_extension::AudioLevelExtension, packet::Packet}, 6 | track::track_local::track_local_static_rtp::TrackLocalStaticRTP, 7 | }; 8 | 9 | pub enum Cmd { 10 | SetSourceTrack { track: Arc }, 11 | RemoveSourceTrack, 12 | } 13 | 14 | pub struct Args { 15 | pub should_quit: Arc, 16 | pub cmd_rx: mpsc::UnboundedReceiver, 17 | pub sample_rx: mpsc::UnboundedReceiver<(u8, Packet)>, 18 | } 19 | 20 | pub async fn run(args: Args) { 21 | let Args { 22 | should_quit, 23 | mut cmd_rx, 24 | mut sample_rx, 25 | } = args; 26 | 27 | let mut source_track: Option> = None; 28 | 29 | loop { 30 | tokio::select! { 31 | _ = should_quit.notified() => { 32 | log::debug!("loopback sender terminated by notify"); 33 | break; 34 | }, 35 | opt = cmd_rx.recv() => match opt { 36 | Some(cmd) => match cmd { 37 | Cmd::SetSourceTrack { track } => { 38 | log::debug!("Cmd::SetSourceTrack"); 39 | source_track.replace(track); 40 | }, 41 | Cmd::RemoveSourceTrack => { 42 | source_track.take(); 43 | } 44 | } 45 | None => { 46 | log::debug!("loopback sender cmd channel closed. terminating"); 47 | break; 48 | } 49 | }, 50 | opt = sample_rx.recv() => match opt { 51 | Some((loudness, packet)) => { 52 | if let Some(track) = source_track.as_mut() { 53 | let _ = track .write_rtp_with_extensions( 54 | &packet, 55 | &[rtp::extension::HeaderExtension::AudioLevel( 56 | AudioLevelExtension { 57 | level: loudness, 58 | voice: false, 59 | }, 60 | )], 61 | ).await; 62 | } else { 63 | log::warn!("source track missing"); 64 | } 65 | } 66 | None => { 67 | log::debug!("loopback sender sample channel closed. terminating"); 68 | break; 69 | } 70 | } 71 | } 72 | } 73 | } 74 | -------------------------------------------------------------------------------- /extensions/warp-blink-wrtc/src/host_media/loopback_controller.rs: -------------------------------------------------------------------------------- 1 | //! CPAL is used for audio IO. cpal has a stream which isn't Send or Sync, making it difficult to use in an abstraction. 2 | //! To circumvent this, the collection of SinkTracks and the host's SourceTrack are static variables. Mutating static variables 3 | //! is `unsafe`. However, it should not be dangerous due to the RwLock. 4 | //! 5 | 6 | use cpal::traits::DeviceTrait; 7 | use once_cell::sync::Lazy; 8 | use std::sync::Arc; 9 | use tokio::sync::{broadcast, Mutex}; 10 | use warp::blink::BlinkEventKind; 11 | use warp::crypto::DID; 12 | use warp::error::Error; 13 | use webrtc::track::track_local::track_local_static_rtp::TrackLocalStaticRTP; 14 | use webrtc::track::track_remote::TrackRemote; 15 | 16 | use super::audio::utils::AudioDeviceConfigImpl; 17 | use super::mp4_logger::Mp4LoggerConfig; 18 | use super::{loopback, mp4_logger}; 19 | 20 | struct Data { 21 | controller: loopback::LoopbackController, 22 | } 23 | 24 | static LOCK: Lazy> = Lazy::new(|| Mutex::new(())); 25 | static mut DATA: Lazy = Lazy::new(|| Data { 26 | controller: loopback::LoopbackController::new(), 27 | }); 28 | 29 | pub const AUDIO_SOURCE_ID: &str = "audio-input"; 30 | 31 | pub async fn get_input_device_name() -> Option { 32 | None 33 | } 34 | 35 | pub async fn get_output_device_name() -> Option { 36 | None 37 | } 38 | 39 | pub async fn reset() { 40 | let _lock = LOCK.lock().await; 41 | unsafe { 42 | DATA.controller = loopback::LoopbackController::new(); 43 | } 44 | mp4_logger::deinit(); 45 | } 46 | 47 | pub async fn has_audio_source() -> bool { 48 | let _lock = LOCK.lock().await; 49 | false 50 | } 51 | 52 | // turns a track, device, and codec into a SourceTrack, which reads and packetizes audio input. 53 | // webrtc should remove the old media source before this is called. 54 | // use AUDIO_SOURCE_ID 55 | pub async fn create_audio_source_track( 56 | _own_id: &DID, 57 | _ui_event_ch: broadcast::Sender, 58 | track: Arc, 59 | ) -> Result<(), Error> { 60 | let _lock = LOCK.lock().await; 61 | 62 | unsafe { DATA.controller.set_source_track(track) } 63 | 64 | Ok(()) 65 | } 66 | 67 | pub async fn remove_audio_source_track() -> anyhow::Result<()> { 68 | let _lock = LOCK.lock().await; 69 | unsafe { 70 | DATA.controller.remove_audio_source_track(); 71 | } 72 | Ok(()) 73 | } 74 | 75 | pub async fn create_audio_sink_track( 76 | peer_id: DID, 77 | _ui_event_ch: broadcast::Sender, 78 | track: Arc, 79 | ) -> anyhow::Result<()> { 80 | let _lock = LOCK.lock().await; 81 | 82 | unsafe { 83 | DATA.controller.add_track(peer_id, track); 84 | } 85 | 86 | Ok(()) 87 | } 88 | 89 | pub async fn change_audio_input( 90 | _own_id: &DID, 91 | _device: cpal::Device, 92 | _ui_event_ch: broadcast::Sender, 93 | ) -> anyhow::Result<()> { 94 | let _lock = LOCK.lock().await; 95 | 96 | Ok(()) 97 | } 98 | 99 | pub async fn change_audio_output(_device: cpal::Device) -> anyhow::Result<()> { 100 | let _lock = LOCK.lock().await; 101 | 102 | Ok(()) 103 | } 104 | 105 | pub async fn get_audio_device_config() -> AudioDeviceConfigImpl { 106 | let _lock = LOCK.lock().await; 107 | AudioDeviceConfigImpl::new(None, None) 108 | } 109 | 110 | pub async fn remove_sink_track(peer_id: DID) -> anyhow::Result<()> { 111 | let _lock = LOCK.lock().await; 112 | unsafe { 113 | DATA.controller.remove_track(peer_id); 114 | } 115 | Ok(()) 116 | } 117 | 118 | pub async fn mute_self() -> anyhow::Result<()> { 119 | let _lock = LOCK.lock().await; 120 | 121 | Ok(()) 122 | } 123 | 124 | pub async fn unmute_self() -> anyhow::Result<()> { 125 | let _lock = LOCK.lock().await; 126 | 127 | Ok(()) 128 | } 129 | 130 | pub async fn deafen() { 131 | let _lock = LOCK.lock().await; 132 | } 133 | 134 | pub async fn undeafen() { 135 | let _lock = LOCK.lock().await; 136 | } 137 | 138 | // the source and sink tracks will use mp4_logger::get_instance() regardless of whether init_recording is called. 139 | // but that instance (when uninitialized) won't do anything. 140 | // when the user issues the command to begin recording, mp4_logger needs to be initialized and 141 | // the source and sink tracks need to be told to get a new instance of mp4_logger. 142 | pub async fn init_recording(_config: Mp4LoggerConfig) -> anyhow::Result<()> { 143 | let _lock = LOCK.lock().await; 144 | Ok(()) 145 | } 146 | 147 | pub async fn pause_recording() { 148 | let _lock = LOCK.lock().await; 149 | } 150 | 151 | pub async fn resume_recording() { 152 | let _lock = LOCK.lock().await; 153 | } 154 | 155 | pub async fn set_peer_audio_gain(_peer_id: DID, _audio_multiplier: f32) -> anyhow::Result<()> { 156 | let _lock = LOCK.lock().await; 157 | 158 | Ok(()) 159 | } 160 | 161 | fn get_min_source_channels(input_device: &cpal::Device) -> anyhow::Result { 162 | let min_channels = input_device 163 | .supported_input_configs()? 164 | .fold(None, |acc: Option, x| match acc { 165 | None => Some(x.channels()), 166 | Some(y) => Some(std::cmp::min(x.channels(), y)), 167 | }); 168 | let channels = min_channels.ok_or(anyhow::anyhow!( 169 | "unsupported audio input device - no input configuration available" 170 | ))?; 171 | Ok(channels) 172 | } 173 | 174 | fn get_min_sink_channels(output_device: &cpal::Device) -> anyhow::Result { 175 | let min_channels = 176 | output_device 177 | .supported_output_configs()? 178 | .fold(None, |acc: Option, x| match acc { 179 | None => Some(x.channels()), 180 | Some(y) => Some(std::cmp::min(x.channels(), y)), 181 | }); 182 | let channels = min_channels.ok_or(anyhow::anyhow!( 183 | "unsupported audio output device. no output configuration available" 184 | ))?; 185 | Ok(channels) 186 | } 187 | -------------------------------------------------------------------------------- /extensions/warp-blink-wrtc/src/host_media/mod.rs: -------------------------------------------------------------------------------- 1 | mod audio; 2 | pub mod default_controller; 3 | mod loopback; 4 | pub mod loopback_controller; 5 | mod mp4_logger; 6 | 7 | pub use audio::utils as audio_utils; 8 | pub use mp4_logger::Mp4LoggerConfig; 9 | 10 | #[cfg(not(feature = "loopback"))] 11 | pub use default_controller as controller; 12 | #[cfg(feature = "loopback")] 13 | pub use loopback_controller as controller; 14 | 15 | pub const AUDIO_SOURCE_ID: &str = "audio"; 16 | -------------------------------------------------------------------------------- /extensions/warp-blink-wrtc/src/host_media/mp4_logger/loggers/dummy.rs: -------------------------------------------------------------------------------- 1 | use crate::host_media::mp4_logger::Mp4LoggerInstance; 2 | 3 | pub struct Logger {} 4 | 5 | impl Mp4LoggerInstance for Logger { 6 | fn log(&mut self, _bytes: bytes::Bytes) {} 7 | } 8 | -------------------------------------------------------------------------------- /extensions/warp-blink-wrtc/src/host_media/mp4_logger/loggers/mod.rs: -------------------------------------------------------------------------------- 1 | use tokio::sync::mpsc::Sender; 2 | 3 | use super::{Mp4Fragment, Mp4LoggerInstance}; 4 | 5 | mod dummy; 6 | pub use dummy::Logger as DummyLogger; 7 | mod opus; 8 | 9 | pub(crate) fn get_opus_logger( 10 | tx: Sender, 11 | track_id: u32, 12 | ) -> Box { 13 | Box::new(opus::Opus::new(tx, track_id)) 14 | } 15 | -------------------------------------------------------------------------------- /extensions/warp-blink-wrtc/src/host_media/mp4_logger/loggers/opus.rs: -------------------------------------------------------------------------------- 1 | use std::time::{SystemTime, UNIX_EPOCH}; 2 | 3 | use bytes::Bytes; 4 | use mp4::{TfdtBox, TfhdBox, TrafBox, TrunBox}; 5 | use tokio::sync::mpsc::Sender; 6 | 7 | use crate::host_media::mp4_logger::{Mp4Fragment, Mp4LoggerInstance}; 8 | 9 | // an opus frame (10ms) is about 65 bytes. only want 50-100 of them 10 | const MAX_FRAME_SIZE: usize = 1024 * 10; 11 | 12 | pub struct Opus { 13 | track_id: u32, 14 | tx: Sender, 15 | 16 | sample_buffer: [u8; MAX_FRAME_SIZE], 17 | sample_lengths: Vec, 18 | sample_buffer_len: usize, 19 | 20 | fragment_start_time: u32, 21 | elapsed_time: u32, 22 | } 23 | 24 | impl Opus { 25 | pub(crate) fn new(tx: Sender, track_id: u32) -> Self { 26 | Self { 27 | tx, 28 | track_id, 29 | 30 | sample_buffer: [0; MAX_FRAME_SIZE], 31 | sample_lengths: vec![], 32 | 33 | sample_buffer_len: 0, 34 | fragment_start_time: 0, 35 | elapsed_time: 0, 36 | } 37 | } 38 | } 39 | // todo: use num samples written to increment the timestamp unless rtp_start_time is too far ahead... 40 | impl Mp4LoggerInstance for Opus { 41 | fn log(&mut self, bytes: bytes::Bytes) { 42 | if self.sample_buffer.len() - self.sample_buffer_len < bytes.len() { 43 | self.make_fragment(); 44 | 45 | // don't return - still need to log this sample 46 | } 47 | 48 | // todo: check sample_time - previous_sample_time 49 | 50 | self.sample_lengths.push(bytes.len() as u32); 51 | self.sample_buffer[self.sample_buffer_len..(self.sample_buffer_len + bytes.len())] 52 | .copy_from_slice(&bytes.slice(..)); 53 | self.sample_buffer_len += bytes.len(); 54 | 55 | if self.sample_lengths.len() >= 100 { 56 | self.make_fragment(); 57 | } 58 | } 59 | } 60 | 61 | impl Opus { 62 | fn make_fragment(&mut self) { 63 | let fragment_start_time = self.fragment_start_time; 64 | let num_samples_in_trun = self.sample_lengths.len() as u32; 65 | // create a traf and push to moof.trafs for each track fragment 66 | let traf = TrafBox { 67 | // track fragment header 68 | // size is 9 + header_size 69 | tfhd: TfhdBox { 70 | version: 0, 71 | // 0x020000: default-base-is-moof is 1 and base-data-offset-present is 0 72 | // memory addresses are relative to the start of this box 73 | // 74 | // 0x10: sample size is present 75 | flags: 0x020000, //| 0x10, 76 | track_id: self.track_id, 77 | //default_sample_size: Some(1), 78 | ..Default::default() 79 | }, 80 | // track fragment decode time 81 | // size is 9 + header_size 82 | tfdt: Some(TfdtBox { 83 | version: 0, 84 | flags: 0, 85 | base_media_decode_time: fragment_start_time as u64, 86 | }), 87 | // track fragment run 88 | // size is 13 + sample_length + header_size 89 | trun: Some(TrunBox { 90 | version: 0, 91 | // data-offset-present, sample-size-present 92 | flags: 1 | 0x200, 93 | sample_count: num_samples_in_trun, 94 | // warning: this needs to be changed after the moof box is declared 95 | data_offset: Some(0), 96 | sample_sizes: self.sample_lengths.clone(), 97 | ..Default::default() 98 | }), 99 | }; 100 | 101 | let mdat: Bytes = Bytes::copy_from_slice(&self.sample_buffer[0..self.sample_buffer_len]); 102 | 103 | self.sample_buffer_len = 0; 104 | self.sample_lengths.clear(); 105 | self.fragment_start_time += num_samples_in_trun; 106 | 107 | let _ = self.tx.try_send(Mp4Fragment { 108 | traf, 109 | mdat, 110 | system_time_ms: SystemTime::now() 111 | .duration_since(UNIX_EPOCH) 112 | .unwrap_or_default() 113 | .as_millis(), 114 | }); 115 | } 116 | } 117 | -------------------------------------------------------------------------------- /extensions/warp-blink-wrtc/src/lib.rs: -------------------------------------------------------------------------------- 1 | //! A Blink implementation relying on Mozilla's WebRTC library (hence the name warp-blink-wrtc) 2 | //! 3 | //! The init() function must be called prior to using the Blink implementation. 4 | //! the deinit() function must be called to ensure all threads are cleaned up properly. 5 | //! 6 | //! init() returns a BlinkImpl struct, which as the name suggests, implements Blink. 7 | //! All data used by the implementation is contained in two static variables: IPFS and BLINK_DATA. 8 | //! 9 | 10 | #![allow(dead_code)] 11 | 12 | // mod rtp_logger; 13 | mod blink_impl; 14 | mod host_media; 15 | mod notify_wrapper; 16 | mod simple_webrtc; 17 | 18 | pub use blink_impl::*; 19 | -------------------------------------------------------------------------------- /extensions/warp-blink-wrtc/src/notify_wrapper.rs: -------------------------------------------------------------------------------- 1 | use std::sync::Arc; 2 | use tokio::sync::Notify; 3 | 4 | pub struct NotifyWrapper { 5 | pub notify: Arc, 6 | } 7 | 8 | impl Drop for NotifyWrapper { 9 | fn drop(&mut self) { 10 | self.notify.notify_waiters(); 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /extensions/warp-blink-wrtc/src/simple_webrtc/events.rs: -------------------------------------------------------------------------------- 1 | use std::sync::Arc; 2 | 3 | use futures::stream::BoxStream; 4 | use warp::crypto::DID; 5 | use webrtc::{ 6 | ice_transport::ice_candidate::RTCIceCandidate, 7 | peer_connection::sdp::session_description::RTCSessionDescription, 8 | track::track_remote::TrackRemote, 9 | }; 10 | 11 | pub struct WebRtcEventStream(pub BoxStream<'static, EmittedEvents>); 12 | 13 | impl core::ops::Deref for WebRtcEventStream { 14 | type Target = BoxStream<'static, EmittedEvents>; 15 | fn deref(&self) -> &Self::Target { 16 | &self.0 17 | } 18 | } 19 | 20 | impl core::ops::DerefMut for WebRtcEventStream { 21 | fn deref_mut(&mut self) -> &mut Self::Target { 22 | &mut self.0 23 | } 24 | } 25 | 26 | #[derive(Clone, derive_more::Display)] 27 | pub enum EmittedEvents { 28 | #[display(fmt = "Ice")] 29 | Ice { 30 | dest: DID, 31 | candidate: Box, 32 | }, 33 | #[display(fmt = "Connected")] 34 | Connected { peer: DID }, 35 | #[display(fmt = "Disconnected")] 36 | Disconnected { peer: DID }, 37 | #[display(fmt = "ConnectionFailed")] 38 | ConnectionFailed { peer: DID }, 39 | #[display(fmt = "ConnectionClosed")] 40 | ConnectionClosed { peer: DID }, 41 | /// emitted in response to accept_call. the sdp should be sent to dest 42 | #[display(fmt = "Sdp")] 43 | Sdp { 44 | dest: DID, 45 | sdp: Box, 46 | }, 47 | /// emitted in response to `Dial` 48 | #[display(fmt = "CallInitiated")] 49 | CallInitiated { 50 | dest: DID, 51 | sdp: Box, 52 | }, 53 | 54 | /// a peer added a track. The calling application is responsible for reading from the track 55 | /// and processing the output 56 | #[display(fmt = "TrackAdded")] 57 | TrackAdded { peer: DID, track: Arc }, 58 | 59 | #[display(fmt = "AudioDegradation")] 60 | AudioDegradation { peer: DID }, 61 | } 62 | 63 | // needed because RTcDAtaChannel doesn't implement Debug 64 | impl std::fmt::Debug for EmittedEvents { 65 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 66 | write!(f, "{}", self) 67 | } 68 | } 69 | -------------------------------------------------------------------------------- /extensions/warp-ipfs/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "warp-ipfs" 3 | version = "0.1.0" 4 | edition = "2021" 5 | 6 | [lib] 7 | crate-type = ["cdylib", "rlib", "staticlib"] 8 | 9 | [dependencies] 10 | warp.workspace = true 11 | 12 | futures-timeout.workspace = true 13 | futures-finally = "0.1.0-alpha.2" 14 | cbor4ii.workspace = true 15 | rust-ipfs = { workspace = true, features = ["experimental_stream"] } 16 | ipld-core.workspace = true 17 | uuid = { workspace = true, features = ["serde", "v4"] } 18 | futures.workspace = true 19 | async-trait.workspace = true 20 | async-stream.workspace = true 21 | anyhow.workspace = true 22 | serde.workspace = true 23 | serde_json.workspace = true 24 | either = { workspace = true, features = ["serde"] } 25 | bs58.workspace = true 26 | parking_lot.workspace = true 27 | 28 | tracing.workspace = true 29 | 30 | async-recursion = "1" 31 | 32 | chrono = { workspace = true } 33 | indexmap.workspace = true 34 | 35 | void.workspace = true 36 | 37 | image = { workspace = true } 38 | derive_more.workspace = true 39 | mediatype.workspace = true 40 | 41 | async-rt = "0.1.4" 42 | 43 | bincode.workspace = true 44 | bytes.workspace = true 45 | 46 | fs = { path = "../../tools/fs", default-features = false } 47 | 48 | tokio-util = { workspace = true } 49 | tokio-stream = { workspace = true } 50 | 51 | web-time = "1.1.0" 52 | 53 | base64 = "0.21" 54 | 55 | pollable-map.workspace = true 56 | 57 | [target.'cfg(not(target_arch = "wasm32"))'.dependencies] 58 | tokio = { workspace = true } 59 | futures-timer = { workspace = true } 60 | 61 | [target.'cfg(target_arch = "wasm32")'.dependencies] 62 | tokio = { version = "1", default-features = false, features = ["sync"] } 63 | futures-timer = { workspace = true, features = ["wasm-bindgen"] } 64 | wasm-bindgen-futures = { version = "0.4" } 65 | wasm-bindgen.workspace = true 66 | js-sys.workspace = true 67 | 68 | [dev-dependencies] 69 | derive_more.workspace = true 70 | strum = "0.25" 71 | strum_macros = "0.25" 72 | fdlimit = "0.3" 73 | rustyline-async = "0.4" 74 | r3bl_terminal_async = "0.5.6" 75 | comfy-table = "7.1" 76 | tracing-subscriber = { version = "0.3", features = ["env-filter"] } 77 | tracing-appender = "0.2" 78 | 79 | clap = { version = "4.0", features = ["derive"] } 80 | 81 | rpassword = "7.3" 82 | 83 | # crates for examples 84 | tiny_file_server = "0.1.5" 85 | 86 | [target.'cfg(target_arch = "wasm32")'.dev-dependencies] 87 | wasm-bindgen-test = "0.3.42" 88 | 89 | [features] 90 | default = [] 91 | build-header = [] 92 | -------------------------------------------------------------------------------- /extensions/warp-ipfs/examples/README.md: -------------------------------------------------------------------------------- 1 | # Examples 2 | 3 | Example usages of warp-ipfs 4 | 5 | ## Using from Rust (desktop) 6 | 7 | CLI for interacting with Multipass (identity): 8 | ``` 9 | cargo run --example identity-interface 10 | ``` 11 | 12 | Basic ipfs setup example: 13 | ``` 14 | cargo run --example ipfs-example 15 | ``` 16 | 17 | Basic friends example: 18 | ``` 19 | cargo run --example ipfs-friends 20 | ``` 21 | 22 | Basic identity example: 23 | ``` 24 | cargo run --example ipfs-identity 25 | ``` 26 | 27 | CLI for interacting with Constellation (file management): 28 | ``` 29 | cargo run --example ipfs-persisent 30 | ``` 31 | 32 | CLI messenger example: 33 | ``` 34 | cargo run --example messenger 35 | ``` 36 | 37 | ## Using from Rust (WASM) 38 | 39 | [wasm-ipfs-friends](./wasm-ipfs-friends/README.md) 40 | 41 | [wasm-ipfs-identity](./wasm-ipfs-identity/README.md) 42 | 43 | [wasm-ipfs-storage](./wasm-ipfs-storage/README.md) 44 | 45 | ## Using from Javascript 46 | 47 | Serves web files that contain examples of javascript calling into wasm built from `warp-ipfs`: 48 | ``` 49 | cargo run --example from-js 50 | ``` -------------------------------------------------------------------------------- /extensions/warp-ipfs/examples/ipfs-example.rs: -------------------------------------------------------------------------------- 1 | use warp::constellation::Constellation; 2 | use warp::multipass::{LocalIdentity, MultiPass}; 3 | use warp_ipfs::WarpIpfsBuilder; 4 | 5 | #[tokio::main] 6 | async fn main() -> anyhow::Result<()> { 7 | let mut instance = WarpIpfsBuilder::default().await; 8 | 9 | instance 10 | .tesseract() 11 | .unlock(b"this is my totally secured password that should nnever be embedded in code")?; 12 | 13 | instance.create_identity(None, None).await?; 14 | 15 | instance 16 | .put_buffer("readme.txt", &b"Hello, World!"[..]) 17 | .await?; 18 | 19 | let buffer = instance.get_buffer("readme.txt").await?; 20 | let data = String::from_utf8_lossy(&buffer); 21 | println!("readme.txt: {data}"); 22 | 23 | Ok(()) 24 | } 25 | -------------------------------------------------------------------------------- /extensions/warp-ipfs/examples/ipfs-identity.rs: -------------------------------------------------------------------------------- 1 | use warp::multipass::identity::IdentityUpdate; 2 | use warp::multipass::{LocalIdentity, MultiPass}; 3 | use warp::tesseract::Tesseract; 4 | use warp_ipfs::WarpIpfsBuilder; 5 | 6 | async fn update_name(account: &mut M, name: &str) -> anyhow::Result<()> { 7 | account 8 | .update_identity(IdentityUpdate::Username(name.to_string())) 9 | .await?; 10 | let ident = account.identity().await?; 11 | println!(); 12 | println!("Updated Identity: {}", serde_json::to_string(&ident)?); 13 | Ok(()) 14 | } 15 | 16 | async fn update_status(account: &mut M, status: &str) -> anyhow::Result<()> { 17 | account 18 | .update_identity(IdentityUpdate::StatusMessage(Some(status.to_string()))) 19 | .await?; 20 | let ident = account.identity().await?; 21 | println!(); 22 | println!("Updated Identity: {}", serde_json::to_string(&ident)?); 23 | Ok(()) 24 | } 25 | 26 | #[tokio::main] 27 | async fn main() -> anyhow::Result<()> { 28 | let tesseract = Tesseract::default(); 29 | tesseract.unlock(b"super duper pass")?; 30 | 31 | let mut identity = WarpIpfsBuilder::default().set_tesseract(tesseract).await; 32 | 33 | identity 34 | .tesseract() 35 | .unlock(b"this is my totally secured password that should nnever be embedded in code")?; 36 | 37 | let profile = identity.create_identity(None, None).await?; 38 | 39 | let ident = profile.identity(); 40 | 41 | println!("Current Identity: {}", serde_json::to_string(&ident)?); 42 | 43 | update_name(&mut identity, &warp::multipass::generator::generate_name()).await?; 44 | update_status(&mut identity, "New status message").await?; 45 | 46 | Ok(()) 47 | } 48 | -------------------------------------------------------------------------------- /extensions/warp-ipfs/examples/wasm-ipfs-friends/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "wasm-ipfs-friends" 3 | version = "0.1.0" 4 | edition = "2021" 5 | 6 | [lib] 7 | crate-type = ["cdylib"] 8 | 9 | [dependencies] 10 | futures = "0.3" 11 | wasm-bindgen = "0.2.90" 12 | wasm-bindgen-futures = "0.4.42" 13 | web-sys = { version = "0.3", features = ['Document', 'Element', 'HtmlElement', 'Node', 'Response', 'Window', "console"] } 14 | js-sys = "0.3.69" 15 | console_error_panic_hook = "0.1.7" 16 | serde_json.workspace = true 17 | serde.workspace = true 18 | warp.workspace = true 19 | warp-ipfs.workspace = true 20 | tracing-wasm = "0.2.1" 21 | futures-timeout = "0.1" 22 | tokio = { version = "1", default-features = false, features = ["sync"] } -------------------------------------------------------------------------------- /extensions/warp-ipfs/examples/wasm-ipfs-friends/README.md: -------------------------------------------------------------------------------- 1 | # wasm-ipfs-friends 2 | 3 | To build do the following: 4 | 5 | 1. Install wasm32-unknown-unknown target by doing `rustup target add wasm32-unknown-unknown` 6 | 2. Install wasm-pack by doing `cargo install wasm-pack` 7 | 3. Run `wasm-pack build --target web --out-dir static` 8 | 4. Use a web server to serve the content from `static` directory. E.g with python install you can do `python3 -m http.server -d ./static` -------------------------------------------------------------------------------- /extensions/warp-ipfs/examples/wasm-ipfs-friends/static/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | Friends Example 4 | 5 | 6 | 11 | 12 | -------------------------------------------------------------------------------- /extensions/warp-ipfs/examples/wasm-ipfs-identity/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "wasm-ipfs-identity" 3 | version = "0.1.0" 4 | edition = "2021" 5 | 6 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 7 | 8 | [lib] 9 | crate-type = ["cdylib"] 10 | 11 | [dependencies] 12 | futures = "0.3" 13 | wasm-bindgen = "0.2.90" 14 | wasm-bindgen-futures = "0.4.42" 15 | web-sys = { version = "0.3", features = ['Document', 'Element', 'HtmlElement', 'Node', 'Response', 'Window', "console"] } 16 | js-sys = "0.3.69" 17 | console_error_panic_hook.workspace = true 18 | serde_json.workspace = true 19 | serde.workspace = true 20 | warp.workspace = true 21 | warp-ipfs.workspace = true 22 | tracing-wasm = "0.2.1" -------------------------------------------------------------------------------- /extensions/warp-ipfs/examples/wasm-ipfs-identity/readme.md: -------------------------------------------------------------------------------- 1 | # wasm-ipfs-identity 2 | 3 | To build do the following: 4 | 5 | 1. Install wasm32-unknown-unknown target by doing `rustup target add wasm32-unknown-unknown` 6 | 2. Install wasm-pack by doing `cargo install wasm-pack` 7 | 3. Run `wasm-pack build --target web --out-dir static` 8 | 4. Use a web server to serve the content from `static` directory. E.g with python install you can do `python3 -m http.server -d ./static` -------------------------------------------------------------------------------- /extensions/warp-ipfs/examples/wasm-ipfs-identity/src/lib.rs: -------------------------------------------------------------------------------- 1 | use warp::error::Error; 2 | use warp::multipass::identity::IdentityUpdate; 3 | use warp::multipass::MultiPass; 4 | use warp::tesseract::Tesseract; 5 | use warp_ipfs::config::Config; 6 | use warp_ipfs::WarpIpfsBuilder; 7 | use wasm_bindgen::prelude::*; 8 | 9 | macro_rules! web_log { 10 | ( $( $t:tt )* ) => { 11 | web_sys::console::log_1(&format!( $( $t )* ).into()); 12 | } 13 | } 14 | 15 | async fn update_name(account: &mut M, name: &str) -> Result<(), Error> { 16 | account 17 | .update_identity(IdentityUpdate::Username(name.to_string())) 18 | .await?; 19 | let ident = account.identity().await?; 20 | web_log!("Updated Identity: {}", serde_json::to_string(&ident)?); 21 | Ok(()) 22 | } 23 | 24 | async fn update_status(account: &mut M, status: &str) -> Result<(), Error> { 25 | account 26 | .update_identity(IdentityUpdate::StatusMessage(Some(status.to_string()))) 27 | .await?; 28 | let ident = account.identity().await?; 29 | web_log!("Updated Identity: {}", serde_json::to_string(&ident)?); 30 | Ok(()) 31 | } 32 | 33 | #[wasm_bindgen] 34 | pub async fn run() -> Result<(), JsError> { 35 | tracing_wasm::set_as_global_default(); 36 | std::panic::set_hook(Box::new(console_error_panic_hook::hook)); 37 | let tesseract = Tesseract::default(); 38 | tesseract.unlock(b"super duper pass")?; 39 | 40 | let mut instance = WarpIpfsBuilder::default() 41 | .set_config(Config::minimal_testing()) 42 | .set_tesseract(tesseract) 43 | .await; 44 | 45 | let profile = instance.create_identity(None, None).await?; 46 | 47 | let ident = profile.identity(); 48 | 49 | web_log!("Current Identity: {}", serde_json::to_string(&ident)?); 50 | 51 | update_name(&mut instance, &warp::multipass::generator::generate_name()).await?; 52 | update_status(&mut instance, "New status message").await?; 53 | Ok(()) 54 | } 55 | -------------------------------------------------------------------------------- /extensions/warp-ipfs/examples/wasm-ipfs-identity/static/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | Identity Example 4 | 5 | 6 | 11 | 12 | -------------------------------------------------------------------------------- /extensions/warp-ipfs/examples/wasm-ipfs-storage/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "wasm-ipfs-storage" 3 | version = "0.1.0" 4 | edition = "2021" 5 | 6 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 7 | 8 | [lib] 9 | crate-type = ["cdylib"] 10 | 11 | [dependencies] 12 | futures = "0.3" 13 | wasm-bindgen = "0.2.90" 14 | wasm-bindgen-futures = "0.4.42" 15 | web-sys = { version = "0.3", features = ['Document', 'Element', 'HtmlElement', 'Node', 'Response', 'Window', "console"] } 16 | js-sys = "0.3.69" 17 | console_error_panic_hook.workspace = true 18 | serde_json.workspace = true 19 | serde.workspace = true 20 | warp.workspace = true 21 | warp-ipfs.workspace = true 22 | tracing-wasm = "0.2.1" 23 | base64 = "0.22.1" -------------------------------------------------------------------------------- /extensions/warp-ipfs/examples/wasm-ipfs-storage/readme.md: -------------------------------------------------------------------------------- 1 | # wasm-ipfs-storage 2 | 3 | To build do the following: 4 | 5 | 1. Install wasm32-unknown-unknown target by doing `rustup target add wasm32-unknown-unknown` 6 | 2. Install wasm-pack by doing `cargo install wasm-pack` 7 | 3. Run `wasm-pack build --target web --out-dir static` 8 | 4. Use a web server to serve the content from `static` directory. E.g with python install you can do `python3 -m http.server -d ./static` -------------------------------------------------------------------------------- /extensions/warp-ipfs/examples/wasm-ipfs-storage/static/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Constellation Example 5 | 6 | 7 | 8 | 13 | 14 | 15 | -------------------------------------------------------------------------------- /extensions/warp-ipfs/shuttle/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "shuttle" 3 | version = "0.1.0" 4 | edition = "2021" 5 | 6 | [dependencies] 7 | warp-ipfs = { path = "../" } 8 | bytes.workspace = true 9 | cbor4ii.workspace = true 10 | rust-ipfs = { workspace = true, features = ["webrtc_transport", "experimental_stream"] } 11 | ipld-core.workspace = true 12 | uuid = { workspace = true, features = ["serde", "v4"] } 13 | futures.workspace = true 14 | async-trait.workspace = true 15 | async-stream.workspace = true 16 | anyhow.workspace = true 17 | serde.workspace = true 18 | serde_json.workspace = true 19 | either = { workspace = true, features = ["serde"] } 20 | 21 | tracing = "0.1" 22 | tracing-subscriber = { version = "0.3", features = ["env-filter"] } 23 | tracing-appender = "0.2" 24 | 25 | chrono = { workspace = true } 26 | 27 | void.workspace = true 28 | 29 | clap = { version = "4.4", features = ["derive"] } 30 | zeroize.workspace = true 31 | dotenv = "0.15" 32 | base64 = "0.21" 33 | 34 | bs58.workspace = true 35 | 36 | tokio = { workspace = true } 37 | tokio-util = { workspace = true, features = ["full"] } 38 | tokio-stream = { workspace = true, features = ["net"] } 39 | 40 | -------------------------------------------------------------------------------- /extensions/warp-ipfs/src/behaviour/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod phonebook; 2 | 3 | use libp2p::swarm::NetworkBehaviour; 4 | use rust_ipfs::libp2p; 5 | 6 | #[derive(NetworkBehaviour)] 7 | #[behaviour(prelude = "libp2p::swarm::derive_prelude", to_swarm = "void::Void")] 8 | pub struct Behaviour { 9 | pub phonebook: phonebook::Behaviour, 10 | } 11 | -------------------------------------------------------------------------------- /extensions/warp-ipfs/src/behaviour/phonebook/handler.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | collections::VecDeque, 3 | task::{Context, Poll}, 4 | }; 5 | 6 | use rust_ipfs::libp2p::{ 7 | core::upgrade::DeniedUpgrade, 8 | swarm::{ 9 | handler::ConnectionEvent, ConnectionHandler, ConnectionHandlerEvent, SubstreamProtocol, 10 | }, 11 | }; 12 | use void::Void; 13 | 14 | #[allow(clippy::type_complexity)] 15 | #[allow(deprecated)] 16 | #[derive(Default, Debug)] 17 | pub struct Handler { 18 | events: VecDeque< 19 | ConnectionHandlerEvent< 20 | ::OutboundProtocol, 21 | ::OutboundOpenInfo, 22 | ::ToBehaviour, 23 | >, 24 | >, 25 | reserve: bool, 26 | } 27 | 28 | impl Handler { 29 | pub fn new(reserve: bool) -> Self { 30 | Self { 31 | reserve, 32 | ..Default::default() 33 | } 34 | } 35 | } 36 | 37 | #[derive(Debug, Clone)] 38 | pub enum In { 39 | Reserve, 40 | Release, 41 | } 42 | 43 | #[derive(Debug, Clone)] 44 | pub enum Out { 45 | Reserved, 46 | Released, 47 | } 48 | 49 | impl ConnectionHandler for Handler { 50 | type FromBehaviour = In; 51 | type ToBehaviour = Out; 52 | type InboundProtocol = DeniedUpgrade; 53 | type OutboundProtocol = DeniedUpgrade; 54 | type InboundOpenInfo = (); 55 | type OutboundOpenInfo = Void; 56 | 57 | fn listen_protocol(&self) -> SubstreamProtocol { 58 | SubstreamProtocol::new(DeniedUpgrade, ()) 59 | } 60 | 61 | fn connection_keep_alive(&self) -> bool { 62 | self.reserve 63 | } 64 | 65 | fn on_behaviour_event(&mut self, event: Self::FromBehaviour) { 66 | match event { 67 | In::Reserve => { 68 | if self.reserve { 69 | return; 70 | } 71 | self.reserve = true; 72 | self.events 73 | .push_back(ConnectionHandlerEvent::NotifyBehaviour(Out::Reserved)); 74 | } 75 | In::Release => { 76 | if !self.reserve { 77 | return; 78 | } 79 | self.reserve = false; 80 | self.events 81 | .push_back(ConnectionHandlerEvent::NotifyBehaviour(Out::Released)); 82 | } 83 | } 84 | } 85 | 86 | fn on_connection_event( 87 | &mut self, 88 | _: ConnectionEvent< 89 | Self::InboundProtocol, 90 | Self::OutboundProtocol, 91 | Self::InboundOpenInfo, 92 | Self::OutboundOpenInfo, 93 | >, 94 | ) { 95 | } 96 | 97 | fn poll( 98 | &mut self, 99 | _: &mut Context<'_>, 100 | ) -> Poll< 101 | ConnectionHandlerEvent, 102 | > { 103 | if let Some(event) = self.events.pop_front() { 104 | return Poll::Ready(event); 105 | } 106 | Poll::Pending 107 | } 108 | } 109 | -------------------------------------------------------------------------------- /extensions/warp-ipfs/src/shuttle/gateway/mod.rs: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /extensions/warp-ipfs/src/shuttle/identity.rs: -------------------------------------------------------------------------------- 1 | use ipld_core::cid::Cid; 2 | use serde::{Deserialize, Serialize}; 3 | 4 | use crate::store::document::identity::IdentityDocument; 5 | 6 | pub mod protocol; 7 | 8 | #[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq, Hash)] 9 | pub struct IdentityDag { 10 | pub identity: IdentityDocument, 11 | #[serde(skip_serializing_if = "Option::is_none")] 12 | pub package: Option, 13 | #[serde(skip_serializing_if = "Option::is_none")] 14 | pub mailbox: Option, 15 | } 16 | -------------------------------------------------------------------------------- /extensions/warp-ipfs/src/shuttle/message.rs: -------------------------------------------------------------------------------- 1 | pub mod protocol; 2 | -------------------------------------------------------------------------------- /extensions/warp-ipfs/src/shuttle/message/protocol.rs: -------------------------------------------------------------------------------- 1 | use std::collections::BTreeMap; 2 | 3 | use ipld_core::cid::Cid; 4 | use rust_ipfs::Keypair; 5 | use serde::{de::DeserializeOwned, Deserialize, Serialize}; 6 | use uuid::Uuid; 7 | use warp::crypto::DID; 8 | 9 | use crate::store::payload::{PayloadBuilder, PayloadMessage}; 10 | 11 | pub fn payload_message_construct( 12 | keypair: &Keypair, 13 | cosigner: Option<&Keypair>, 14 | message: T, 15 | ) -> Result, anyhow::Error> { 16 | let mut payload = PayloadBuilder::new(keypair, message); 17 | if let Some(cosigner) = cosigner { 18 | payload = payload.cosign(cosigner); 19 | } 20 | let payload = payload.build()?; 21 | Ok(payload) 22 | } 23 | 24 | #[derive(Clone, Debug, Serialize, Deserialize)] 25 | pub enum ConversationType { 26 | Direct, 27 | Group, 28 | } 29 | 30 | #[derive(Clone, Debug, Serialize, Deserialize)] 31 | #[serde(rename_all = "snake_case")] 32 | pub enum Request { 33 | RegisterConversation(RegisterConversation), 34 | MessageUpdate(MessageUpdate), 35 | FetchMailBox { conversation_id: Uuid }, 36 | } 37 | 38 | #[derive(Clone, Debug, Serialize, Deserialize)] 39 | pub struct RegisterConversation { 40 | pub owner: DID, 41 | pub conversation_id: Uuid, 42 | pub conversation_type: ConversationType, 43 | pub conversation_document: Cid, 44 | } 45 | 46 | #[derive(Clone, Debug, Serialize, Deserialize)] 47 | #[serde(rename_all = "snake_case")] 48 | pub enum MessageUpdate { 49 | Insert { 50 | conversation_id: Uuid, 51 | message_id: Uuid, 52 | recipients: Vec, 53 | message_cid: Cid, 54 | }, 55 | Delivered { 56 | conversation_id: Uuid, 57 | message_id: Uuid, 58 | }, 59 | Remove { 60 | conversation_id: Uuid, 61 | message_id: Uuid, 62 | }, 63 | } 64 | 65 | impl From for Request { 66 | fn from(request: RegisterConversation) -> Self { 67 | Self::RegisterConversation(request) 68 | } 69 | } 70 | 71 | impl From for Request { 72 | fn from(request: MessageUpdate) -> Self { 73 | Self::MessageUpdate(request) 74 | } 75 | } 76 | 77 | #[derive(Clone, Debug, Serialize, Deserialize)] 78 | #[serde(rename_all = "snake_case")] 79 | pub enum Response { 80 | Ack, 81 | Mailbox { 82 | conversation_id: Uuid, 83 | content: BTreeMap, 84 | }, 85 | Error(String), 86 | } 87 | -------------------------------------------------------------------------------- /extensions/warp-ipfs/src/shuttle/mod.rs: -------------------------------------------------------------------------------- 1 | use rust_ipfs::PeerId; 2 | 3 | pub mod gateway; 4 | pub mod identity; 5 | pub mod message; 6 | 7 | #[cfg(not(target_arch = "wasm32"))] 8 | pub mod server; 9 | #[cfg(not(target_arch = "wasm32"))] 10 | pub mod store; 11 | #[cfg(not(target_arch = "wasm32"))] 12 | pub mod subscription_stream; 13 | 14 | pub enum ShuttleNodeQuorum { 15 | Primary, 16 | Seconary, 17 | Select(PeerId), 18 | All, 19 | } 20 | -------------------------------------------------------------------------------- /extensions/warp-ipfs/src/shuttle/store.rs: -------------------------------------------------------------------------------- 1 | pub mod identity; 2 | pub mod messages; 3 | pub mod root; 4 | -------------------------------------------------------------------------------- /extensions/warp-ipfs/src/shuttle/store/root.rs: -------------------------------------------------------------------------------- 1 | use std::sync::Arc; 2 | 3 | use futures::TryFutureExt; 4 | use ipld_core::cid::Cid; 5 | use rust_ipfs::Ipfs; 6 | use serde::{Deserialize, Serialize}; 7 | use tokio::sync::RwLock; 8 | use warp::error::Error; 9 | 10 | const ROOT_KEY: &str = "root"; 11 | 12 | #[derive(Default, Serialize, Deserialize, Clone, Copy, Debug)] 13 | pub struct Root { 14 | #[serde(skip_serializing_if = "Option::is_none")] 15 | pub users: Option, 16 | #[serde(skip_serializing_if = "Option::is_none")] 17 | pub mailbox: Option, 18 | #[serde(skip_serializing_if = "Option::is_none")] 19 | pub conversation_mailbox: Option, 20 | } 21 | 22 | #[derive(Debug)] 23 | struct RootInner { 24 | root: Root, 25 | cid: Option, 26 | } 27 | 28 | #[derive(Debug, Clone)] 29 | pub struct RootStorage { 30 | ipfs: Ipfs, 31 | inner: Arc>, 32 | } 33 | 34 | impl RootStorage { 35 | pub async fn new(ipfs: &Ipfs) -> Self { 36 | let root_cid = ipfs 37 | .repo() 38 | .data_store() 39 | .get(ROOT_KEY.as_bytes()) 40 | .await 41 | .unwrap_or_default() 42 | .map(|bytes| String::from_utf8_lossy(&bytes).to_string()) 43 | .and_then(|cid_str| cid_str.parse().ok()); 44 | 45 | // let root_cid = ipfs 46 | // .ipns() 47 | // .resolve(&IpfsPath::from(peer_id)) 48 | // .await 49 | // .map(|path| path.root().cid().copied()) 50 | // .ok() 51 | // .flatten(); 52 | 53 | let root = futures::future::ready(root_cid.ok_or(anyhow::anyhow!("error"))) 54 | .and_then(|cid| async move { ipfs.get_dag(cid).local().deserialized::().await }) 55 | .await 56 | .unwrap_or_default(); 57 | 58 | let inner = RootInner { 59 | root, 60 | cid: root_cid, 61 | }; 62 | 63 | Self { 64 | ipfs: ipfs.clone(), 65 | inner: Arc::new(RwLock::new(inner)), 66 | } 67 | } 68 | 69 | pub async fn set_user_documents(&self, cid: Cid) -> Result<(), Error> { 70 | let inner = &mut *self.inner.write().await; 71 | inner.set_user_documents(&self.ipfs, cid).await 72 | } 73 | 74 | pub async fn set_mailbox(&self, cid: Cid) -> Result<(), Error> { 75 | let inner = &mut *self.inner.write().await; 76 | inner.set_mailbox(&self.ipfs, cid).await 77 | } 78 | 79 | pub async fn set_conversation_mailbox(&self, cid: Cid) -> Result<(), Error> { 80 | let inner: &mut RootInner = &mut *self.inner.write().await; 81 | inner.set_conversation_mailbox(&self.ipfs, cid).await 82 | } 83 | 84 | pub async fn get_root(&self) -> Root { 85 | let inner = &*self.inner.read().await; 86 | inner.root 87 | } 88 | } 89 | 90 | impl RootInner { 91 | async fn set_user_documents(&mut self, ipfs: &Ipfs, cid: Cid) -> Result<(), Error> { 92 | self.root.users.replace(cid); 93 | tracing::debug!(%cid, "package set"); 94 | self.save(ipfs).await?; 95 | Ok(()) 96 | } 97 | 98 | async fn set_mailbox(&mut self, ipfs: &Ipfs, cid: Cid) -> Result<(), Error> { 99 | self.root.mailbox.replace(cid); 100 | tracing::debug!(%cid, "mailbox set"); 101 | self.save(ipfs).await?; 102 | //TODO: Broadcast root document to nodes 103 | Ok(()) 104 | } 105 | 106 | async fn set_conversation_mailbox(&mut self, ipfs: &Ipfs, cid: Cid) -> Result<(), Error> { 107 | self.root.conversation_mailbox.replace(cid); 108 | tracing::debug!(%cid, "conversation mailbox set"); 109 | self.save(ipfs).await?; 110 | //TODO: Broadcast root document to nodes 111 | Ok(()) 112 | } 113 | 114 | async fn save(&mut self, ipfs: &Ipfs) -> std::io::Result<()> { 115 | let cid = ipfs 116 | .put_dag(self.root) 117 | .pin(false) 118 | .await 119 | .map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e))?; 120 | 121 | tracing::info!(cid = %cid, "storing root"); 122 | 123 | let old_cid = self.cid.replace(cid); 124 | 125 | if let Some(old_cid) = old_cid { 126 | if old_cid != cid && ipfs.is_pinned(&old_cid).await.unwrap_or_default() { 127 | tracing::debug!(cid = %old_cid, "unpinning root block"); 128 | _ = ipfs.remove_pin(old_cid).await; 129 | } 130 | } 131 | 132 | let cid_str = cid.to_string(); 133 | 134 | if let Err(e) = ipfs 135 | .repo() 136 | .data_store() 137 | .put(ROOT_KEY.as_bytes(), cid_str.as_bytes()) 138 | .await 139 | { 140 | tracing::error!(error = %e, "unable to store root cid"); 141 | } 142 | 143 | tracing::info!(cid = %cid, "root is stored"); 144 | Ok(()) 145 | } 146 | } 147 | -------------------------------------------------------------------------------- /extensions/warp-ipfs/src/shuttle/subscription_stream.rs: -------------------------------------------------------------------------------- 1 | use futures::SinkExt; 2 | use futures::StreamExt; 3 | use pollable_map::stream::StreamMap; 4 | use rust_ipfs::{Ipfs, SubscriptionStream}; 5 | 6 | use super::store::{identity::IdentityStorage, messages::MessageStorage}; 7 | use crate::store::topics::{ConversationTopic, PeerTopic}; 8 | #[derive(Clone)] 9 | pub struct Subscriptions { 10 | tx: futures::channel::mpsc::Sender, 11 | } 12 | 13 | impl Subscriptions { 14 | pub fn new(ipfs: &Ipfs, identity: &IdentityStorage, message: &MessageStorage) -> Self { 15 | let (tx, rx) = futures::channel::mpsc::channel(1); 16 | 17 | let mut task = SubscriptionTask { 18 | ipfs: ipfs.clone(), 19 | select_stream: StreamMap::default(), 20 | rx, 21 | }; 22 | 23 | let identity = identity.clone(); 24 | let message = message.clone(); 25 | tokio::spawn(async move { 26 | { 27 | let mut list = identity.list().await; 28 | let mut conversations = message.list_conversations().await.boxed(); 29 | 30 | while let Some(id) = list.next().await { 31 | _ = task.subscribe(id.did.inbox()).await; 32 | _ = task.subscribe(id.did.messaging()).await; 33 | } 34 | 35 | while let Some(id) = conversations.next().await { 36 | _ = task.subscribe(id.base()).await; 37 | _ = task.subscribe(id.event_topic()).await; 38 | } 39 | } 40 | 41 | task.run().await 42 | }); 43 | 44 | Self { tx } 45 | } 46 | 47 | pub async fn subscribe(&mut self, topic: String) -> anyhow::Result<()> { 48 | let (tx, rx) = futures::channel::oneshot::channel(); 49 | 50 | _ = self 51 | .tx 52 | .send(SubscriptionCommand::Subscribe { 53 | topic, 54 | response: tx, 55 | }) 56 | .await; 57 | 58 | rx.await? 59 | } 60 | 61 | pub async fn unsubscribe(&mut self, topic: String) -> anyhow::Result<()> { 62 | let (tx, rx) = futures::channel::oneshot::channel(); 63 | 64 | _ = self 65 | .tx 66 | .send(SubscriptionCommand::Unsubscribe { 67 | topic, 68 | response: tx, 69 | }) 70 | .await; 71 | 72 | rx.await? 73 | } 74 | } 75 | 76 | struct SubscriptionTask { 77 | ipfs: Ipfs, 78 | select_stream: StreamMap, 79 | rx: futures::channel::mpsc::Receiver, 80 | } 81 | 82 | impl SubscriptionTask { 83 | async fn run(&mut self) { 84 | loop { 85 | tokio::select! { 86 | //Poll all streams so the internal channels can be flushed out without 87 | //stopping those subscribed streams 88 | _ = self.select_stream.next() => {}, 89 | Some(command) = self.rx.next() => { 90 | match command { 91 | SubscriptionCommand::Subscribe { topic, response } => { 92 | _ = response.send(self.subscribe(topic).await); 93 | }, 94 | SubscriptionCommand::Unsubscribe { topic, response } => { 95 | _ = response.send(self.unsubscribe(topic).await); 96 | }, 97 | } 98 | } 99 | } 100 | } 101 | } 102 | 103 | async fn subscribe(&mut self, topic: String) -> Result<(), anyhow::Error> { 104 | let stream = self.ipfs.pubsub_subscribe(topic.clone()).await?; 105 | self.select_stream.insert(topic, stream); 106 | Ok(()) 107 | } 108 | 109 | async fn unsubscribe(&mut self, topic: String) -> Result<(), anyhow::Error> { 110 | self.ipfs.pubsub_unsubscribe(&topic).await?; 111 | self.select_stream.remove(&topic); 112 | Ok(()) 113 | } 114 | } 115 | 116 | enum SubscriptionCommand { 117 | Subscribe { 118 | topic: String, 119 | response: futures::channel::oneshot::Sender>, 120 | }, 121 | Unsubscribe { 122 | topic: String, 123 | response: futures::channel::oneshot::Sender>, 124 | }, 125 | } 126 | -------------------------------------------------------------------------------- /extensions/warp-ipfs/src/store/document/image_dag.rs: -------------------------------------------------------------------------------- 1 | use futures::StreamExt; 2 | use ipld_core::cid::Cid; 3 | use rust_ipfs::{Ipfs, PeerId}; 4 | use serde::{Deserialize, Serialize}; 5 | use warp::{constellation::file::FileType, error::Error, multipass::identity::IdentityImage}; 6 | 7 | #[derive(Deserialize, Serialize, Debug, Clone)] 8 | pub struct ImageDag { 9 | pub link: Cid, 10 | pub size: u64, 11 | pub mime: FileType, 12 | } 13 | 14 | #[tracing::instrument(skip(ipfs, opt))] 15 | pub async fn store_photo( 16 | ipfs: &Ipfs, 17 | opt: impl Into, 18 | file_type: FileType, 19 | limit: Option, 20 | ) -> Result { 21 | let mut stream = ipfs.add_unixfs(opt); 22 | 23 | let (cid, size) = loop { 24 | let status = stream.next().await.ok_or(Error::Other)?; 25 | 26 | match status { 27 | rust_ipfs::unixfs::UnixfsStatus::ProgressStatus { written, .. } => { 28 | if let Some(limit) = limit { 29 | if written > limit { 30 | return Err(Error::InvalidLength { 31 | context: "photo".into(), 32 | current: written, 33 | minimum: Some(1), 34 | maximum: Some(limit), 35 | }); 36 | } 37 | } 38 | tracing::trace!("{written} bytes written"); 39 | } 40 | rust_ipfs::unixfs::UnixfsStatus::CompletedStatus { path, written, .. } => { 41 | tracing::debug!("Image is written with {written} bytes - stored at {path}"); 42 | let cid = path.root().cid().copied().ok_or(Error::Other)?; 43 | break (cid, written); 44 | } 45 | rust_ipfs::unixfs::UnixfsStatus::FailedStatus { written, error, .. } => { 46 | tracing::error!( 47 | "Error uploading picture with {written} bytes written with error: {error}" 48 | ); 49 | 50 | return Err(error.into()); 51 | } 52 | } 53 | }; 54 | 55 | let dag = ImageDag { 56 | link: cid, 57 | size: size as _, 58 | mime: file_type, 59 | }; 60 | 61 | let cid = ipfs.put_dag(dag).pin(true).await?; 62 | 63 | Ok(cid) 64 | } 65 | 66 | #[tracing::instrument(skip(ipfs))] 67 | pub async fn get_image( 68 | ipfs: &Ipfs, 69 | cid: Cid, 70 | peers: &[PeerId], 71 | local: bool, 72 | limit: Option, 73 | ) -> Result { 74 | let dag: ImageDag = ipfs.get_dag(cid).set_local(local).deserialized().await?; 75 | 76 | if matches!(limit, Some(size) if dag.size > size as _ ) { 77 | return Err(Error::InvalidLength { 78 | context: "image".into(), 79 | current: dag.size as _, 80 | minimum: None, 81 | maximum: limit, 82 | }); 83 | } 84 | 85 | let size = limit.unwrap_or(dag.size as _); 86 | 87 | let image = ipfs 88 | .cat_unixfs(dag.link) 89 | // we apply the limit in the event the stream of bytes exceeds the explicit limit 90 | // which may be the define sized if `limit` is `Some(_)` or no greater 91 | // than ImageDag::size, which by usage a limit is usually set as we set a hard limit for 92 | // profile images and banners to 2MB 93 | .max_length(size) 94 | .providers(peers) 95 | .set_local(local) 96 | .await 97 | .map_err(anyhow::Error::from)?; 98 | 99 | let mut id_img = IdentityImage::default(); 100 | 101 | id_img.set_data(image); 102 | id_img.set_image_type(dag.mime); 103 | 104 | Ok(id_img) 105 | } 106 | -------------------------------------------------------------------------------- /extensions/warp-ipfs/src/store/phonebook.rs: -------------------------------------------------------------------------------- 1 | use futures::channel::oneshot; 2 | use futures::SinkExt; 3 | 4 | use warp::crypto::DID; 5 | use warp::error::Error; 6 | 7 | use crate::behaviour::phonebook::PhoneBookCommand; 8 | 9 | use super::discovery::Discovery; 10 | use super::DidExt; 11 | 12 | /// Used to handle friends connectivity status 13 | #[derive(Clone)] 14 | pub struct PhoneBook { 15 | discovery: Discovery, 16 | pb_tx: futures::channel::mpsc::Sender, 17 | } 18 | 19 | impl PhoneBook { 20 | pub fn new( 21 | discovery: Discovery, 22 | pb_tx: futures::channel::mpsc::Sender, 23 | ) -> Self { 24 | PhoneBook { discovery, pb_tx } 25 | } 26 | 27 | pub async fn add_friend_list(&self, list: &[DID]) -> Result<(), Error> { 28 | for friend in list { 29 | self.add_friend(friend).await?; 30 | } 31 | Ok(()) 32 | } 33 | 34 | pub async fn add_friend(&self, did: &DID) -> Result<(), Error> { 35 | if !self.discovery.contains(did).await { 36 | self.discovery.insert(did).await?; 37 | } 38 | 39 | let peer_id = did.to_peer_id()?; 40 | 41 | let (tx, rx) = oneshot::channel(); 42 | 43 | let _ = self 44 | .pb_tx 45 | .clone() 46 | .send(PhoneBookCommand::AddEntry { 47 | peer_id, 48 | response: tx, 49 | }) 50 | .await; 51 | 52 | rx.await.map_err(|_| Error::Other)? 53 | } 54 | 55 | pub async fn remove_friend(&self, did: &DID) -> Result<(), Error> { 56 | let peer_id = did.to_peer_id()?; 57 | 58 | let (tx, rx) = oneshot::channel(); 59 | 60 | let _ = self 61 | .pb_tx 62 | .clone() 63 | .send(PhoneBookCommand::RemoveEntry { 64 | peer_id, 65 | response: tx, 66 | }) 67 | .await; 68 | 69 | rx.await.map_err(|_| Error::Other)? 70 | } 71 | } 72 | -------------------------------------------------------------------------------- /tools/audio-codec-repl/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "audio-codec-repl" 3 | version = "0.1.0" 4 | edition = "2021" 5 | 6 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 7 | 8 | [dependencies] 9 | bs1770 = "1.0.0" 10 | clap = { version = "4.0", features = ["derive"] } 11 | cpal = "0.15.0" 12 | log = { version = "0.4.17", features = ["std"]} 13 | mp4 = { workspace = true } 14 | opus = { workspace = true } 15 | ringbuf = "0.3.3" 16 | simple_logger = "4.1.0" 17 | webrtc = "0.6.0" 18 | 19 | anyhow.workspace = true 20 | bytes.workspace = true 21 | once_cell.workspace = true 22 | rand.workspace = true 23 | serde.workspace = true 24 | tokio = { workspace = true } 25 | -------------------------------------------------------------------------------- /tools/audio-codec-repl/src/feedback.rs: -------------------------------------------------------------------------------- 1 | use cpal::{ 2 | traits::{DeviceTrait, HostTrait, StreamTrait}, 3 | SampleRate, 4 | }; 5 | use ringbuf::HeapRb; 6 | 7 | use crate::{err_fn, StaticArgs}; 8 | 9 | // taken from here: https://github.com/RustAudio/cpal/blob/master/examples/feedback.rs 10 | pub async fn feedback(args: StaticArgs) -> anyhow::Result<()> { 11 | let host = cpal::default_host(); 12 | let latency = 1000.0; 13 | 14 | // Find devices. 15 | let input_device = host.default_input_device().unwrap(); 16 | 17 | let output_device = host.default_output_device().unwrap(); 18 | 19 | println!("Using input device: \"{}\"", input_device.name()?); 20 | println!("Using output device: \"{}\"", output_device.name()?); 21 | 22 | // We'll try and use the same configuration between streams to keep it simple. 23 | let config: cpal::StreamConfig = cpal::StreamConfig { 24 | channels: 1, 25 | sample_rate: SampleRate(args.sample_rate), 26 | buffer_size: cpal::BufferSize::Default, 27 | }; 28 | 29 | // Create a delay in case the input and output devices aren't synced. 30 | let latency_frames = (latency / 1_000.0) * config.sample_rate.0 as f32; 31 | let latency_samples = latency_frames as usize * config.channels as usize; 32 | 33 | // The buffer to share samples 34 | let ring = HeapRb::::new(latency_samples * 2); 35 | let (mut producer, mut consumer) = ring.split(); 36 | 37 | // Fill the samples with 0.0 equal to the length of the delay. 38 | for _ in 0..latency_samples { 39 | // The ring buffer has twice as much space as necessary to add latency here, 40 | // so this should never fail 41 | producer.push(0.0).unwrap(); 42 | } 43 | 44 | let input_data_fn = move |data: &[f32], _: &cpal::InputCallbackInfo| { 45 | let mut output_fell_behind = false; 46 | for &sample in data { 47 | if producer.push(sample).is_err() { 48 | output_fell_behind = true; 49 | } 50 | } 51 | if output_fell_behind { 52 | eprintln!("output stream fell behind: try increasing latency"); 53 | } 54 | }; 55 | 56 | let output_data_fn = move |data: &mut [f32], _: &cpal::OutputCallbackInfo| { 57 | let mut input_fell_behind = false; 58 | for sample in data { 59 | *sample = match consumer.pop() { 60 | Some(s) => s, 61 | None => { 62 | input_fell_behind = true; 63 | 0.0 64 | } 65 | }; 66 | } 67 | if input_fell_behind { 68 | eprintln!("input stream fell behind: try increasing latency"); 69 | } 70 | }; 71 | 72 | // Build streams. 73 | println!( 74 | "Attempting to build both streams with f32 samples and `{:?}`.", 75 | config 76 | ); 77 | let input_stream = input_device.build_input_stream(&config, input_data_fn, err_fn, None)?; 78 | let output_stream = output_device.build_output_stream(&config, output_data_fn, err_fn, None)?; 79 | println!("Successfully built streams."); 80 | 81 | // Play the streams. 82 | println!( 83 | "Starting the input and output streams with `{}` milliseconds of latency.", 84 | latency 85 | ); 86 | input_stream.play()?; 87 | output_stream.play()?; 88 | 89 | // Run for 3 seconds before closing. 90 | println!("Playing for 3 seconds... "); 91 | tokio::time::sleep(std::time::Duration::from_secs(3)).await; 92 | drop(input_stream); 93 | drop(output_stream); 94 | println!("Done!"); 95 | Ok(()) 96 | } 97 | -------------------------------------------------------------------------------- /tools/audio-codec-repl/src/loudness.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | fs::File, 3 | io::{Read, Write}, 4 | mem, 5 | ops::Div, 6 | slice, 7 | }; 8 | 9 | use crate::StaticArgs; 10 | 11 | pub fn calculate_loudness_bs177( 12 | args: StaticArgs, 13 | input_file_name: &str, 14 | output_file_name: &str, 15 | ) -> anyhow::Result<()> { 16 | let mut input_file = File::open(input_file_name)?; 17 | let mut output_file = File::create(output_file_name)?; 18 | let mut loudness_meter = bs1770::ChannelLoudnessMeter::new(args.sample_rate); 19 | let header = "loudness\n"; 20 | let _ = output_file.write(header.as_bytes())?; 21 | let mut buf = [0_f32; 48000]; 22 | let bp: *mut u8 = buf.as_mut_ptr() as _; 23 | let bs: &mut [u8] = unsafe { slice::from_raw_parts_mut(bp, mem::size_of::() * buf.len()) }; 24 | while let Ok(len) = input_file.read(bs) { 25 | if len == 0 { 26 | break; 27 | } 28 | let num_samples = len / mem::size_of::(); 29 | loudness_meter.push(buf[0..num_samples].iter().cloned()); 30 | let loudness = loudness_meter.as_100ms_windows().inner.iter().last(); 31 | if loudness.is_some() { 32 | for sample in loudness_meter.as_100ms_windows().inner { 33 | let s = format!("{}\n", sample.loudness_lkfs()); 34 | let _ = output_file.write(s.as_bytes())?; 35 | } 36 | // reset the algorithm 37 | loudness_meter = bs1770::ChannelLoudnessMeter::new(args.sample_rate); 38 | } 39 | } 40 | 41 | println!("finished calculating loudness"); 42 | Ok(()) 43 | } 44 | 45 | pub fn calculate_loudness_rms(input_file_name: &str, output_file_name: &str) -> anyhow::Result<()> { 46 | let mut input_file = File::open(input_file_name)?; 47 | let mut output_file = File::create(output_file_name)?; 48 | let header = "loudness\n"; 49 | let _ = output_file.write(header.as_bytes())?; 50 | // 100 ms samples, for easy comparison with bs177 51 | let mut buf = [0_f32; 4800]; 52 | let bp: *mut u8 = buf.as_mut_ptr() as _; 53 | let bs: &mut [u8] = unsafe { slice::from_raw_parts_mut(bp, mem::size_of::() * buf.len()) }; 54 | while let Ok(len) = input_file.read(bs) { 55 | if len == 0 { 56 | break; 57 | } 58 | let num_samples = len / mem::size_of::(); 59 | let mut sum = 0_f32; 60 | for sample in buf[0..num_samples].iter() { 61 | let sq = sample * sample; 62 | sum += sq; 63 | } 64 | sum = sum.div(num_samples as f32); 65 | sum = f32::sqrt(sum); 66 | let s = format!("{}\n", sum); 67 | let _ = output_file.write(s.as_bytes())?; 68 | } 69 | 70 | println!("finished calculating loudness"); 71 | Ok(()) 72 | } 73 | 74 | // reminds me of C code 75 | struct LoudnessCalculator { 76 | buf: [f32; 4800], 77 | // sum of squares 78 | ss: f32, 79 | idx: usize, 80 | } 81 | 82 | impl LoudnessCalculator { 83 | fn new() -> Self { 84 | Self { 85 | buf: [0.0; 4800], 86 | ss: 0.0, 87 | idx: 0, 88 | } 89 | } 90 | fn insert(&mut self, sample: f32) { 91 | let sq = sample.powf(2.0); 92 | self.ss += sq; 93 | self.ss -= self.buf[self.idx]; 94 | self.buf[self.idx] = sq; 95 | self.idx = (self.idx + 1) % self.buf.len(); 96 | } 97 | 98 | fn get_rms(&self) -> f32 { 99 | f32::sqrt(self.ss.div(self.buf.len() as f32)) 100 | } 101 | } 102 | 103 | pub fn calculate_loudness_rms2( 104 | input_file_name: &str, 105 | output_file_name: &str, 106 | ) -> anyhow::Result<()> { 107 | let mut input_file = File::open(input_file_name)?; 108 | let mut output_file = File::create(output_file_name)?; 109 | let mut loudness_calculator = LoudnessCalculator::new(); 110 | let header = "loudness\n"; 111 | let _ = output_file.write(header.as_bytes())?; 112 | // 100 ms samples, for easy comparison with bs177 113 | let mut buf = [0_f32; 4800]; 114 | let bp: *mut u8 = buf.as_mut_ptr() as _; 115 | let bs: &mut [u8] = unsafe { slice::from_raw_parts_mut(bp, mem::size_of::() * buf.len()) }; 116 | while let Ok(len) = input_file.read(bs) { 117 | if len == 0 { 118 | break; 119 | } 120 | let num_samples = len / mem::size_of::(); 121 | for sample in buf[0..num_samples].iter() { 122 | loudness_calculator.insert(*sample); 123 | } 124 | let s = format!("{}\n", loudness_calculator.get_rms()); 125 | let _ = output_file.write(s.as_bytes())?; 126 | } 127 | 128 | println!("finished calculating loudness"); 129 | Ok(()) 130 | } 131 | -------------------------------------------------------------------------------- /tools/audio-codec-repl/src/packetizer.rs: -------------------------------------------------------------------------------- 1 | // opus::Encoder has separate functions for i16 and f32 2 | // want to use the same struct for both functions. will do some unsafe stuff to accomplish this. 3 | pub struct OpusPacketizer { 4 | // encodes groups of samples (frames) 5 | encoder: opus::Encoder, 6 | float_samples: Vec, 7 | _int_samples: Vec, 8 | // number of samples in a frame 9 | frame_size: usize, 10 | } 11 | 12 | impl OpusPacketizer { 13 | pub fn init( 14 | frame_size: usize, 15 | sample_rate: u32, 16 | channels: opus::Channels, 17 | ) -> anyhow::Result { 18 | let encoder = 19 | opus::Encoder::new(sample_rate, channels, opus::Application::Voip).map_err(|e| { 20 | anyhow::anyhow!("{e}: sample_rate: {sample_rate}, channels: {channels:?}") 21 | })?; 22 | 23 | Ok(Self { 24 | encoder, 25 | float_samples: vec![], 26 | _int_samples: vec![], 27 | frame_size, 28 | }) 29 | } 30 | 31 | pub fn _packetize_i16(&mut self, sample: i16, out: &mut [u8]) -> anyhow::Result> { 32 | self._int_samples.push(sample); 33 | if self._int_samples.len() == self.frame_size { 34 | match self.encoder.encode(self._int_samples.as_slice(), out) { 35 | Ok(size) => { 36 | self._int_samples.clear(); 37 | Ok(Some(size)) 38 | } 39 | Err(e) => anyhow::bail!("failed to encode: {e}"), 40 | } 41 | } else { 42 | Ok(None) 43 | } 44 | } 45 | 46 | pub fn packetize_f32(&mut self, sample: f32, out: &mut [u8]) -> anyhow::Result> { 47 | self.float_samples.push(sample); 48 | if self.float_samples.len() == self.frame_size { 49 | match self 50 | .encoder 51 | .encode_float(self.float_samples.as_slice(), out) 52 | { 53 | Ok(size) => { 54 | self.float_samples.clear(); 55 | Ok(Some(size)) 56 | } 57 | Err(e) => anyhow::bail!("failed to encode: {e}"), 58 | } 59 | } else { 60 | Ok(None) 61 | } 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /tools/audio-codec-repl/src/play.rs: -------------------------------------------------------------------------------- 1 | use std::{fs::File, io::Read, time::Duration}; 2 | 3 | use cpal::{ 4 | traits::{DeviceTrait, HostTrait, StreamTrait}, 5 | SampleRate, 6 | }; 7 | 8 | use crate::{err_fn, StaticArgs, AUDIO_FILE_NAME}; 9 | 10 | static mut AUDIO_FILE: Option = None; 11 | 12 | pub async fn play_f32(args: StaticArgs, sample_rate: Option) -> anyhow::Result<()> { 13 | unsafe { 14 | AUDIO_FILE = Some(File::open(AUDIO_FILE_NAME.as_str())?); 15 | } 16 | let sample_rate = sample_rate.unwrap_or(args.sample_rate); 17 | let duration_secs = args.audio_duration_secs; 18 | let total_samples = sample_rate as usize * (duration_secs + 1); 19 | let mut decoded_samples: Vec = Vec::new(); 20 | decoded_samples.resize(total_samples, 0_f32); 21 | 22 | let config = cpal::StreamConfig { 23 | channels: args.channels, 24 | sample_rate: SampleRate(sample_rate), 25 | buffer_size: cpal::BufferSize::Default, 26 | }; 27 | 28 | let output_data_fn = move |data: &mut [f32], _: &cpal::OutputCallbackInfo| { 29 | for sample in data { 30 | let mut buf: [u8; 4] = [0; 4]; 31 | unsafe { 32 | if let Some(mut f) = AUDIO_FILE.as_ref() { 33 | match f.read(&mut buf) { 34 | Ok(size) => { 35 | if size == 0 { 36 | return; 37 | } 38 | assert_eq!(size, 4); 39 | } 40 | Err(e) => { 41 | log::error!("failed to read from file: {e}"); 42 | } 43 | } 44 | } 45 | } 46 | let p: *const f32 = buf.as_ptr() as _; 47 | unsafe { 48 | *sample = *p; 49 | } 50 | } 51 | }; 52 | let output_stream = cpal::default_host() 53 | .default_output_device() 54 | .ok_or(anyhow::anyhow!("no output device"))? 55 | .build_output_stream(&config, output_data_fn, err_fn, None)?; 56 | 57 | output_stream.play()?; 58 | tokio::time::sleep(Duration::from_secs(duration_secs as u64)).await; 59 | println!("finished playing audio"); 60 | Ok(()) 61 | } 62 | -------------------------------------------------------------------------------- /tools/audio-codec-repl/src/record.rs: -------------------------------------------------------------------------------- 1 | use std::{fs::File, io::Write, mem, slice, time::Duration}; 2 | 3 | use cpal::{ 4 | traits::{DeviceTrait, HostTrait, StreamTrait}, 5 | SampleRate, 6 | }; 7 | 8 | use crate::{err_fn, StaticArgs, AUDIO_FILE_NAME}; 9 | 10 | // needs to be static for a callback 11 | static mut AUDIO_FILE: Option = None; 12 | 13 | pub async fn raw_f32(args: StaticArgs) -> anyhow::Result<()> { 14 | let duration_secs = args.audio_duration_secs; 15 | 16 | unsafe { 17 | AUDIO_FILE = Some(File::create(AUDIO_FILE_NAME.as_str())?); 18 | } 19 | let config = cpal::StreamConfig { 20 | channels: args.channels, 21 | sample_rate: SampleRate(args.sample_rate), 22 | buffer_size: cpal::BufferSize::Default, 23 | }; 24 | 25 | // batch audio samples into a Packetizer, encode them via packetize(), and write the bytes to a global variable. 26 | let input_data_fn = move |data: &[f32], _: &cpal::InputCallbackInfo| { 27 | for sample in data { 28 | let arr = [*sample]; 29 | let p: *const u8 = arr.as_ptr() as _; 30 | let bs: &[u8] = unsafe { slice::from_raw_parts(p, mem::size_of::()) }; 31 | unsafe { 32 | if let Some(mut f) = AUDIO_FILE.as_ref() { 33 | if let Err(e) = f.write(bs) { 34 | log::error!("failed to write bytes to file: {e}"); 35 | } 36 | } 37 | } 38 | } 39 | }; 40 | let input_stream = cpal::default_host() 41 | .default_input_device() 42 | .ok_or(anyhow::anyhow!("no input device"))? 43 | .build_input_stream(&config, input_data_fn, err_fn, None) 44 | .map_err(|e| { 45 | anyhow::anyhow!( 46 | "failed to build input stream: {e}, {}, {}", 47 | file!(), 48 | line!() 49 | ) 50 | })?; 51 | 52 | input_stream.play()?; 53 | tokio::time::sleep(Duration::from_secs(duration_secs as u64)).await; 54 | input_stream.pause()?; 55 | unsafe { 56 | if let Some(f) = AUDIO_FILE.as_ref() { 57 | f.sync_all()?; 58 | } 59 | } 60 | println!("finished recording audio"); 61 | Ok(()) 62 | } 63 | -------------------------------------------------------------------------------- /tools/blink-repl/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "blink-repl" 3 | version = "0.1.0" 4 | edition = "2021" 5 | 6 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 7 | 8 | [dependencies] 9 | fdlimit = "0.2" 10 | clap = { version = "4.0", features = ["derive"] } 11 | cpal = "0.15.0" 12 | log = { version = "0.4.17", features = ["std"]} 13 | warp = { path = "../../warp" } 14 | warp-blink-wrtc = { path = "../../extensions/warp-blink-wrtc" } 15 | warp-ipfs = { path = "../../extensions/warp-ipfs" } 16 | 17 | uuid = { workspace = true, features = ["serde", "v4"] } 18 | tokio = { workspace = true } 19 | 20 | rand= { workspace = true } 21 | 22 | 23 | anyhow.workspace = true 24 | serde.workspace = true 25 | futures.workspace = true 26 | once_cell.workspace = true 27 | 28 | -------------------------------------------------------------------------------- /tools/blink-repl/src/logger.rs: -------------------------------------------------------------------------------- 1 | use log::{LevelFilter, SetLoggerError}; 2 | 3 | pub struct Logger { 4 | max_level: LevelFilter, 5 | } 6 | 7 | pub fn init_with_level(level: LevelFilter) -> Result<(), SetLoggerError> { 8 | log::set_max_level(level); 9 | log::set_boxed_logger(Box::new(Logger::new(level)))?; 10 | Ok(()) 11 | } 12 | 13 | impl Logger { 14 | pub fn new(max_level: LevelFilter) -> Self { 15 | Self { max_level } 16 | } 17 | } 18 | 19 | impl log::Log for Logger { 20 | fn enabled(&self, metadata: &log::Metadata) -> bool { 21 | metadata.level() <= self.max_level 22 | } 23 | 24 | fn log(&self, record: &log::Record) { 25 | if !self.enabled(record.metadata()) { 26 | return; 27 | } 28 | 29 | let should_log = record 30 | .file() 31 | .map(|x| x.contains("blink-w")) 32 | .unwrap_or(false); 33 | if !should_log { 34 | return; 35 | } 36 | 37 | let msg = format!("{}", record.args()); 38 | println!("{msg}"); 39 | } 40 | 41 | fn flush(&self) {} 42 | } 43 | -------------------------------------------------------------------------------- /tools/fs/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "fs" 3 | version.workspace = true 4 | edition.workspace = true 5 | license.workspace = true 6 | rust-version.workspace = true 7 | repository.workspace = true 8 | 9 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 10 | 11 | [dependencies] 12 | 13 | 14 | [target.'cfg(not(target_arch = "wasm32"))'.dependencies] 15 | tokio = { workspace = true } 16 | 17 | [target.'cfg(target_arch = "wasm32")'.dependencies] 18 | gloo = "0.7" -------------------------------------------------------------------------------- /tools/fs/README.md: -------------------------------------------------------------------------------- 1 | # fs 2 | 3 | This crate provides functions to interact with the filesystem in a cross platform way. On `wasm32` targets it uses `LocalStorage`, otherwise it uses `tokio::fs` 4 | -------------------------------------------------------------------------------- /tools/fs/src/lib.rs: -------------------------------------------------------------------------------- 1 | use std::{io, path::Path}; 2 | 3 | #[cfg(target_arch = "wasm32")] 4 | use gloo::storage::{LocalStorage, Storage}; 5 | 6 | /// Read the contents of the file at path 7 | pub async fn read(path: impl AsRef) -> io::Result> { 8 | #[cfg(not(target_arch = "wasm32"))] 9 | return tokio::fs::read(path).await; 10 | 11 | #[cfg(target_arch = "wasm32")] 12 | { 13 | let path = path.as_ref().to_str().ok_or(io::ErrorKind::InvalidInput)?; 14 | match LocalStorage::get(&path) { 15 | Ok(ok) => Ok(ok), 16 | Err(e) => Err(io::Error::other(e)), 17 | } 18 | } 19 | } 20 | 21 | /// Write the contents of the file at path 22 | pub async fn write(path: impl AsRef, contents: impl AsRef<[u8]>) -> io::Result<()> { 23 | #[cfg(not(target_arch = "wasm32"))] 24 | return tokio::fs::write(path, contents).await; 25 | 26 | #[cfg(target_arch = "wasm32")] 27 | { 28 | let path = path.as_ref().to_str().ok_or(io::ErrorKind::InvalidInput)?; 29 | match LocalStorage::set(path, contents.as_ref().to_owned()) { 30 | Ok(_) => Ok(()), 31 | Err(e) => Err(io::Error::other(e)), 32 | } 33 | } 34 | } 35 | 36 | /// Create all directories in path 37 | pub async fn create_dir_all(path: impl AsRef) -> io::Result<()> { 38 | #[cfg(not(target_arch = "wasm32"))] 39 | return tokio::fs::create_dir_all(path).await; 40 | 41 | //Dirs don't need to be created in wasm since we are using the path as a key in LocalStorage 42 | #[cfg(target_arch = "wasm32")] 43 | { 44 | _ = path; 45 | Ok(()) 46 | } 47 | } 48 | 49 | /// Delete the file at path 50 | pub async fn remove_file(path: impl AsRef) -> io::Result<()> { 51 | #[cfg(not(target_arch = "wasm32"))] 52 | return tokio::fs::remove_file(path).await; 53 | 54 | #[cfg(target_arch = "wasm32")] 55 | { 56 | let path = path.as_ref().to_str().ok_or(io::ErrorKind::InvalidInput)?; 57 | LocalStorage::delete(path); 58 | Ok(()) 59 | } 60 | } 61 | 62 | /// Get the size of the file at path 63 | pub async fn file_size(path: impl AsRef) -> io::Result { 64 | #[cfg(not(target_arch = "wasm32"))] 65 | return Ok(tokio::fs::metadata(&path).await?.len() as usize); 66 | 67 | #[cfg(target_arch = "wasm32")] 68 | Ok(read(path).await?.len()) 69 | } 70 | -------------------------------------------------------------------------------- /tools/inspect/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "inspect" 3 | version = "0.1.0" 4 | edition = "2021" 5 | 6 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 7 | 8 | [dependencies] 9 | fdlimit = "0.2" 10 | comfy-table = "6.1" 11 | clap = { version = "4.0", features = ["derive"] } 12 | rpassword = "7.2" 13 | warp = { path = "../../warp" } 14 | warp-ipfs = { path = "../../extensions/warp-ipfs" } 15 | 16 | uuid = { workspace = true, features = ["serde", "v4"] } 17 | tokio = { workspace = true } 18 | tokio-util = { workspace = true } 19 | tokio-stream = { workspace = true, features = ["net"] } 20 | 21 | futures.workspace = true 22 | async-trait.workspace = true 23 | async-stream.workspace = true 24 | anyhow.workspace = true 25 | serde.workspace = true 26 | serde_json.workspace = true 27 | -------------------------------------------------------------------------------- /tools/inspect/src/main.rs: -------------------------------------------------------------------------------- 1 | use std::path::{Path, PathBuf}; 2 | use std::time::Instant; 3 | 4 | use clap::Parser; 5 | use comfy_table::Table; 6 | use futures::StreamExt; 7 | 8 | use warp::crypto::zeroize::Zeroizing; 9 | use warp::multipass::identity::Identifier; 10 | use warp::multipass::{Friends, LocalIdentity, MultiPass}; 11 | use warp::raygun::RayGun; 12 | use warp::tesseract::Tesseract; 13 | use warp_ipfs::config::Discovery; 14 | use warp_ipfs::{WarpIpfsBuilder, WarpIpfsInstance}; 15 | 16 | #[derive(Debug, Parser)] 17 | #[clap(name = "inspect")] 18 | struct Opt { 19 | /// Path to directory 20 | #[clap(long)] 21 | path: PathBuf, 22 | 23 | /// Name of the tesseract keystore 24 | #[clap(long)] 25 | keystore: Option, 26 | 27 | /// Password to unlock keystore 28 | #[clap(long)] 29 | password: Option, 30 | } 31 | 32 | async fn setup>( 33 | path: P, 34 | keystore: Option, 35 | passphrase: Zeroizing, 36 | ) -> anyhow::Result { 37 | let path = path.as_ref(); 38 | let keystore_path = path.join(keystore.unwrap_or("tesseract_store".into())); 39 | 40 | let tesseract = Tesseract::from_file(keystore_path)?; 41 | tesseract.unlock(passphrase.as_bytes())?; 42 | 43 | let mut config = warp_ipfs::config::Config::production(path); 44 | config.store_setting_mut().discovery = Discovery::None; 45 | config.ipfs_setting_mut().mdns.enable = false; 46 | *config.enable_relay_mut() = false; 47 | 48 | let instance = WarpIpfsBuilder::default() 49 | .set_tesseract(tesseract) 50 | .set_config(config) 51 | .await; 52 | 53 | //validating that account exist 54 | _ = instance.identity().await?; 55 | Ok(instance) 56 | } 57 | 58 | #[tokio::main] 59 | async fn main() -> anyhow::Result<()> { 60 | println!("Utility inspector tool.. "); 61 | let opt = Opt::parse(); 62 | //Just in case 63 | if fdlimit::raise_fd_limit().is_none() { 64 | // 65 | } 66 | 67 | let password = Zeroizing::new(match opt.password { 68 | Some(password) => password, 69 | None => rpassword::prompt_password("Enter A Password: ")?, 70 | }); 71 | 72 | let start_time = Instant::now(); 73 | let instance = setup(&opt.path, opt.keystore.clone(), password).await?; 74 | let end_time = start_time.elapsed(); 75 | println!( 76 | "Took {}ms to load the account, messaging and filesystem", 77 | end_time.as_millis() 78 | ); 79 | 80 | let start_time = Instant::now(); 81 | let identity = instance.identity().await?; 82 | let end_time = start_time.elapsed(); 83 | println!("Took {}ms to load the own identity", end_time.as_millis()); 84 | 85 | println!("Username: {}#{}", identity.username(), identity.short_id()); 86 | 87 | let start_time = Instant::now(); 88 | let mut friends = instance.list_friends().await?; 89 | let end_time = start_time.elapsed(); 90 | println!("Took {}ms to load friends list", end_time.as_millis()); 91 | 92 | println!("Total Friends: {}", friends.len()); 93 | 94 | if !friends.is_empty() { 95 | let mut table = Table::new(); 96 | table.set_header(vec!["Username", "DID"]); 97 | 98 | let start_time = Instant::now(); 99 | let mut identites = instance.get_identity(Identifier::DIDList(friends.clone())); 100 | let end_time = start_time.elapsed(); 101 | println!("Took {}ms to load friends identities", end_time.as_millis()); 102 | 103 | while let Some(identity) = identites.next().await { 104 | table.add_row(vec![ 105 | format!("{}#{}", identity.username(), identity.short_id()), 106 | identity.did_key().to_string(), 107 | ]); 108 | if let Some(position) = friends.iter().position(|key| identity.did_key().eq(key)) { 109 | friends.remove(position); 110 | } 111 | } 112 | 113 | for did in friends { 114 | table.add_row(vec!["N/A".into(), did.to_string()]); 115 | } 116 | 117 | println!("{table}"); 118 | } 119 | 120 | let start_time = Instant::now(); 121 | let conversations = instance.list_conversations().await?; 122 | let end_time = start_time.elapsed(); 123 | println!( 124 | "Took {}ms to load list of conversations", 125 | end_time.as_millis() 126 | ); 127 | 128 | println!("Total Conversations: {}", conversations.len()); 129 | 130 | let mut table = Table::new(); 131 | table.set_header(vec!["ID", "Name", "Type", "Recipients", "# of Messages"]); 132 | for convo in conversations { 133 | let recipients = instance 134 | .get_identity(convo.recipients()) 135 | .map(|id| format!("{}#{}", id.username(), id.short_id())) 136 | .collect::>() 137 | .await; 138 | 139 | let count = instance.get_message_count(convo.id()).await?; 140 | 141 | table.add_row(vec![ 142 | convo.id().to_string(), 143 | convo.name().map(ToOwned::to_owned).unwrap_or_default(), 144 | convo.conversation_type().to_string(), 145 | recipients.join(", "), 146 | count.to_string(), 147 | ]); 148 | } 149 | 150 | println!("{table}"); 151 | 152 | Ok(()) 153 | } 154 | -------------------------------------------------------------------------------- /tools/opencv-test/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "opencv-test" 3 | version = "0.1.0" 4 | edition = "2021" 5 | 6 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 7 | 8 | [dependencies] 9 | anyhow = { workspace = true } 10 | clap = { version = "4.0", features=["derive"] } 11 | tokio = { workspace = true, features=["full"] } 12 | 13 | opencv = { version = "0.82", features=["clang-runtime"] } 14 | av-data = { workspace = true } 15 | libaom = { workspace = true } 16 | 17 | openh264 = { git="https://github.com/sdwoodbury/openh264-rs", rev="36abe7e7349c890684457fabaf5d90cb4b716cec", optional = true } 18 | x264 = { version = "0.5", optional = true } 19 | rav1e = { version = "0.6", default-features = false, optional = true } 20 | 21 | [features] 22 | all = ["dep:openh264", "dep:x264", "dep:rav1e"] 23 | default = [] -------------------------------------------------------------------------------- /tools/opencv-test/README.md: -------------------------------------------------------------------------------- 1 | # build dependencies 2 | 3 | # testing all codecs 4 | `cargo build --package opencv-test -F all` 5 | 6 | ## Mac OS 7 | clang - comes with llvm. `brew install llvm`. symlink $(brew --prefix llvm)/lib/libclang.dylib to wherever is needed 8 | opencv 9 | 10 | ## Linux 11 | libopencv-dev 12 | libstdc++-12-dev 13 | clang 14 | libclang-dev 15 | libx264-dev 16 | libaom-dev 17 | 18 | ## video file extensions and codecs known to work with opencv 19 | - .avi / MJPG 20 | - .mkv / H264 21 | - .mp4 / avc1 22 | 23 | ## testing a .mov file 24 | ffmpeg -i output_%04d.png -------------------------------------------------------------------------------- /tools/opencv-test/src/encode/aom.rs: -------------------------------------------------------------------------------- 1 | use crate::{encode::Mode, utils::yuv::*}; 2 | 3 | use super::{Args, EncodingType}; 4 | use anyhow::{bail, Result}; 5 | use av_data::{frame::FrameType, timeinfo::TimeInfo}; 6 | use std::{ 7 | fs::OpenOptions, 8 | io::{BufWriter, Write}, 9 | sync::Arc, 10 | }; 11 | 12 | use libaom::encoder::*; 13 | 14 | use opencv::{ 15 | core::{Mat_AUTO_STEP, CV_32F}, 16 | prelude::*, 17 | videoio, 18 | }; 19 | 20 | pub fn encode_aom(args: Args) -> Result<()> { 21 | let color_scale = args.color_scale.unwrap_or(ColorScale::HdTv); 22 | let optimized_mode = args.mode.unwrap_or(Mode::Normal); 23 | let is_lossy = args 24 | .encoding_type 25 | .map(|t| matches!(t, EncodingType::Lossy)) 26 | .unwrap_or(true); 27 | let multiplier: usize = if is_lossy { 1 } else { 2 }; 28 | 29 | let cam = videoio::VideoCapture::from_file(&args.input, videoio::CAP_ANY)?; 30 | let opened = videoio::VideoCapture::is_opened(&cam)?; 31 | if !opened { 32 | panic!("Unable to open video file!"); 33 | } 34 | 35 | // https://docs.opencv.org/3.4/d4/d15/group__videoio__flags__base.html 36 | let frame_width = cam.get(3)? as u32; 37 | let frame_height = cam.get(4)? as u32; 38 | let _fps = cam.get(5)? as f32; 39 | 40 | let output_file = OpenOptions::new() 41 | .read(false) 42 | .write(true) 43 | .create(true) 44 | .truncate(true) 45 | .open(args.output)?; 46 | let mut writer = BufWriter::new(output_file); 47 | 48 | let mut encoder_config = match AV1EncoderConfig::new_with_usage(AomUsage::RealTime) { 49 | Ok(r) => r, 50 | Err(e) => bail!("failed to get Av1EncoderConfig: {e:?}"), 51 | }; 52 | encoder_config.g_h = frame_height * multiplier as u32; 53 | encoder_config.g_w = frame_width * multiplier as u32; 54 | let mut encoder = match encoder_config.get_encoder() { 55 | Ok(r) => r, 56 | Err(e) => bail!("failed to get Av1Encoder: {e:?}"), 57 | }; 58 | 59 | // this is for testing an optimized version 60 | let color_scale_idx = color_scale.to_idx(); 61 | let mut m = [ 62 | // these scales are for turning RGB to YUV. but the input is in BGR. 63 | Y_SCALE[color_scale_idx], 64 | U_SCALE[color_scale_idx], 65 | V_SCALE[color_scale_idx], 66 | ]; 67 | m[0].reverse(); 68 | m[1].reverse(); 69 | m[2].reverse(); 70 | let p = m.as_ptr() as *mut std::ffi::c_void; 71 | let m = unsafe { Mat::new_rows_cols_with_data(3, 3, CV_32F, p, Mat_AUTO_STEP) } 72 | .expect("failed to make xform matrix"); 73 | 74 | let pixel_format = *av_data::pixel::formats::YUV420; 75 | let pixel_format = Arc::new(pixel_format); 76 | for (idx, mut frame) in crate::VideoFileIter::new(cam).enumerate() { 77 | println!("read new frame"); 78 | let sz = frame.size()?; 79 | let width = sz.width as usize; 80 | let height = sz.height as usize; 81 | if width == 0 { 82 | continue; 83 | } 84 | 85 | let yuv = match optimized_mode { 86 | Mode::Faster => bgr_to_yuv420_lossy_faster(frame, &m, width, height, color_scale), 87 | _ => { 88 | let p = frame.data_mut(); 89 | let len = width * height * 3; 90 | let s = std::ptr::slice_from_raw_parts(p, len as _); 91 | let s: &[u8] = unsafe { &*s }; 92 | 93 | if is_lossy { 94 | bgr_to_yuv420_lossy(s, width, height, color_scale) 95 | } else { 96 | bgr_to_yuv420(s, width, height, color_scale) 97 | } 98 | } 99 | }; 100 | 101 | let yuv_buf = YUV420Buf { 102 | data: yuv, 103 | width: width * multiplier, 104 | height: height * multiplier, 105 | }; 106 | 107 | let frame = av_data::frame::Frame { 108 | kind: av_data::frame::MediaKind::Video(av_data::frame::VideoInfo::new( 109 | yuv_buf.width, 110 | yuv_buf.height, 111 | false, 112 | FrameType::I, 113 | pixel_format.clone(), 114 | )), 115 | buf: Box::new(yuv_buf), 116 | t: TimeInfo { 117 | pts: Some(idx as i64 * 60), 118 | ..Default::default() 119 | }, 120 | }; 121 | 122 | println!("encoding"); 123 | if let Err(e) = encoder.encode(&frame) { 124 | bail!("encoding error: {e}"); 125 | } 126 | 127 | println!("calling get_packet"); 128 | while let Some(packet) = encoder.get_packet() { 129 | if let AOMPacket::Packet(p) = packet { 130 | let _ = writer.write(&p.data)?; 131 | } 132 | } 133 | } 134 | writer.flush()?; 135 | Ok(()) 136 | } 137 | -------------------------------------------------------------------------------- /tools/opencv-test/src/encode/h264.rs: -------------------------------------------------------------------------------- 1 | use super::Args; 2 | use anyhow::Result; 3 | 4 | use crate::utils::yuv::*; 5 | use opencv::{prelude::*, videoio}; 6 | use std::{ 7 | fs::OpenOptions, 8 | io::{BufWriter, Write}, 9 | }; 10 | 11 | pub fn encode_h264(args: Args) -> Result<()> { 12 | let color_scale = args.color_scale.unwrap_or(ColorScale::Full); 13 | let cam = videoio::VideoCapture::from_file(&args.input, videoio::CAP_ANY)?; 14 | let opened = videoio::VideoCapture::is_opened(&cam)?; 15 | if !opened { 16 | panic!("Unable to open video file!"); 17 | } 18 | 19 | // https://docs.opencv.org/3.4/d4/d15/group__videoio__flags__base.html 20 | let frame_width = cam.get(3)? as u32; 21 | let frame_height = cam.get(4)? as u32; 22 | let fps = cam.get(5)? as _; 23 | 24 | let output_file = OpenOptions::new() 25 | .read(false) 26 | .write(true) 27 | .create(true) 28 | .truncate(true) 29 | .open(args.output)?; 30 | let mut writer = BufWriter::new(output_file); 31 | 32 | let config = openh264::encoder::EncoderConfig::new(frame_width * 2, frame_height * 2) 33 | .max_frame_rate(fps); //.rate_control_mode(openh264::encoder::RateControlMode::Timestamp); 34 | 35 | let mut encoder = openh264::encoder::Encoder::with_config(config)?; 36 | 37 | for mut frame in crate::VideoFileIter::new(cam) { 38 | let sz = frame.size()?; 39 | let width = sz.width as usize; 40 | let height = sz.height as usize; 41 | if width == 0 { 42 | continue; 43 | } 44 | let p = frame.data_mut(); 45 | let len = width * height * 3; 46 | let s = std::ptr::slice_from_raw_parts(p, len as _); 47 | let s: &[u8] = unsafe { &*s }; 48 | 49 | let yuv = bgr_to_yuv420(s, width, height, color_scale); 50 | 51 | let yuv_buf = YUV420Buf { 52 | data: yuv, 53 | width: width * 2, 54 | height: height * 2, 55 | }; 56 | 57 | let encoded_stream = encoder.encode(&yuv_buf)?; 58 | encoded_stream.write(&mut writer)?; 59 | } 60 | writer.flush()?; 61 | Ok(()) 62 | } 63 | -------------------------------------------------------------------------------- /tools/opencv-test/src/encode/mod.rs: -------------------------------------------------------------------------------- 1 | mod aom; 2 | #[cfg(feature = "all")] 3 | mod h264; 4 | #[cfg(feature = "all")] 5 | mod rav1e; 6 | #[cfg(feature = "all")] 7 | mod x264; 8 | pub use crate::encode::aom::encode_aom; 9 | #[cfg(feature = "all")] 10 | pub use crate::encode::h264::encode_h264; 11 | #[cfg(feature = "all")] 12 | pub use crate::encode::rav1e::encode_rav1e; 13 | #[cfg(feature = "all")] 14 | pub use crate::encode::x264::encode_x264; 15 | use crate::utils::yuv::ColorScale; 16 | 17 | use clap::Parser; 18 | 19 | // transforms the input file to h264 20 | #[derive(Parser, Debug)] 21 | pub struct Args { 22 | /// an mp4 file generated by opencv 23 | pub input: String, 24 | /// name of the file to save 25 | pub output: String, 26 | /// The codec to use 27 | pub codec: CodecTypes, 28 | /// Optional parameter. defaults to Lossy. 29 | /// NotLossy doubles the size of each frame so that converting to YUV420 30 | /// doesn't lose any chromiance information. 31 | pub encoding_type: Option, 32 | ///specifies the RGB to YUV transformation matrix 33 | pub color_scale: Option, 34 | /// use optimized version. currently only works for aom in lossy mode. 35 | /// if set, overrides encoding_type 36 | pub mode: Option, 37 | } 38 | 39 | #[derive(Debug, Clone, clap::ValueEnum)] 40 | pub enum CodecTypes { 41 | #[cfg(feature = "all")] 42 | /// OpenH264 43 | H264, 44 | #[cfg(feature = "all")] 45 | /// x264 46 | X264, 47 | #[cfg(feature = "all")] 48 | /// av1 (rav1e) 49 | RAV1E, 50 | /// av1 (aom) 51 | AOM, 52 | } 53 | 54 | #[derive(Debug, Clone, clap::ValueEnum)] 55 | pub enum EncodingType { 56 | /// convert from BGR24 to YUV420 57 | Lossy, 58 | /// expand BGR24 image before converting to YUV420 59 | NotLossy, 60 | } 61 | 62 | #[derive(Debug, Clone, Copy, clap::ValueEnum)] 63 | pub enum Mode { 64 | Normal, 65 | /// attempt to use opencv matrix multiplication 66 | /// to speed up the conversion from BGR to YUV 67 | Faster, 68 | } 69 | -------------------------------------------------------------------------------- /tools/opencv-test/src/encode/rav1e.rs: -------------------------------------------------------------------------------- 1 | use super::Args; 2 | use crate::utils::yuv::*; 3 | use anyhow::Result; 4 | use rav1e::{prelude::ChromaSampling, *}; 5 | use std::{ 6 | fs::OpenOptions, 7 | io::{BufWriter, Write}, 8 | }; 9 | 10 | use opencv::{prelude::*, videoio}; 11 | 12 | pub fn encode_rav1e(args: Args) -> Result<()> { 13 | let color_scale = args.color_scale.unwrap_or(ColorScale::Full); 14 | let cam = videoio::VideoCapture::from_file(&args.input, videoio::CAP_ANY)?; 15 | let opened = videoio::VideoCapture::is_opened(&cam)?; 16 | if !opened { 17 | panic!("Unable to open video file!"); 18 | } 19 | 20 | // https://docs.opencv.org/3.4/d4/d15/group__videoio__flags__base.html 21 | let frame_width = cam.get(3)? as usize; 22 | let frame_height = cam.get(4)? as usize; 23 | let _fps = cam.get(5)? as f32; 24 | 25 | let output_file = OpenOptions::new() 26 | .read(false) 27 | .write(true) 28 | .create(true) 29 | .truncate(true) 30 | .open(args.output)?; 31 | let mut writer = BufWriter::new(output_file); 32 | 33 | let enc = EncoderConfig { 34 | width: frame_width * 2, 35 | height: frame_height * 2, 36 | // todo: try using 444 37 | chroma_sampling: ChromaSampling::Cs420, 38 | ..Default::default() 39 | }; 40 | 41 | let cfg = Config::new().with_encoder_config(enc); 42 | let mut ctx: Context = cfg 43 | .new_context() 44 | .map_err(|e| anyhow::anyhow!(format!("couldn't make context: {e:?}")))?; 45 | 46 | for mut frame in crate::VideoFileIter::new(cam) { 47 | println!("read new frame"); 48 | let sz = frame.size()?; 49 | let width = sz.width as usize; 50 | let height = sz.height as usize; 51 | if width == 0 { 52 | continue; 53 | } 54 | let p = frame.data_mut(); 55 | let len = width * height * 3; 56 | let s = std::ptr::slice_from_raw_parts(p, len as _); 57 | let s: &[u8] = unsafe { &*s }; 58 | 59 | println!("converting format"); 60 | // note that width and height have doubled 61 | let yuv = bgr_to_yuv420(s, width, height, color_scale); 62 | println!("done converting"); 63 | 64 | // create a frame 65 | let mut f1 = ctx.new_frame(); 66 | let mut start = 0; 67 | let mut end = 4 * width * height; 68 | f1.planes[0].copy_from_raw_u8(&yuv[start..end], width * 2, 1); 69 | start = end; 70 | end = start + (width * height); 71 | f1.planes[1].copy_from_raw_u8(&yuv[start..end], width, 1); 72 | start = end; 73 | f1.planes[2].copy_from_raw_u8(&yuv[start..], width, 1); 74 | 75 | println!("sending frame"); 76 | ctx.send_frame(f1)?; 77 | 78 | loop { 79 | println!("requesting packet"); 80 | match ctx.receive_packet() { 81 | Ok(packet) => { 82 | println!("got packet"); 83 | let _ = writer.write(&packet.data)?; 84 | } 85 | Err(EncoderStatus::Encoded) => { 86 | println!("frame encoded"); 87 | } 88 | Err(e) => { 89 | println!("got err: {e:?}"); 90 | break; 91 | } 92 | } 93 | } 94 | } 95 | writer.flush()?; 96 | Ok(()) 97 | } 98 | -------------------------------------------------------------------------------- /tools/opencv-test/src/encode/x264.rs: -------------------------------------------------------------------------------- 1 | use super::Args; 2 | use anyhow::Result; 3 | 4 | use std::{ 5 | fs::OpenOptions, 6 | io::{BufWriter, Write}, 7 | }; 8 | 9 | use opencv::{prelude::*, videoio}; 10 | 11 | pub fn encode_x264(args: Args) -> Result<()> { 12 | let cam = videoio::VideoCapture::from_file(&args.input, videoio::CAP_ANY)?; 13 | let opened = videoio::VideoCapture::is_opened(&cam)?; 14 | if !opened { 15 | panic!("Unable to open video file!"); 16 | } 17 | 18 | // https://docs.opencv.org/3.4/d4/d15/group__videoio__flags__base.html 19 | let frame_width = cam.get(3)? as i32; 20 | let frame_height = cam.get(4)? as i32; 21 | let fps = cam.get(5)?; 22 | 23 | let output_file = OpenOptions::new() 24 | .read(false) 25 | .write(true) 26 | .create(true) 27 | .truncate(true) 28 | .open(args.output)?; 29 | let mut writer = BufWriter::new(output_file); 30 | let mut encoder = x264::Encoder::builder() 31 | .fps(fps as _, 1) 32 | .build(x264::Colorspace::BGR, frame_width as _, frame_height as _) 33 | .expect("failed to make builder"); 34 | let mut idx = 0; 35 | 36 | for mut frame in crate::VideoFileIter::new(cam) { 37 | let sz = frame.size()?; 38 | if sz.width > 0 { 39 | let p = frame.data_mut(); 40 | let len = sz.width * sz.height * 3; 41 | let s = std::ptr::slice_from_raw_parts(p, len as _); 42 | 43 | let img = x264::Image::bgr(sz.width, sz.height, unsafe { &*s }); 44 | let (data, _) = encoder 45 | .encode(fps as i64 * idx as i64, img) 46 | .expect("failed to encode frame"); 47 | idx += 1; 48 | let _ = writer.write(data.entirety())?; 49 | } 50 | } 51 | writer.flush()?; 52 | Ok(()) 53 | } 54 | -------------------------------------------------------------------------------- /tools/opencv-test/src/lib.rs: -------------------------------------------------------------------------------- 1 | use opencv::{ 2 | prelude::Mat, 3 | videoio::{VideoCapture, VideoCaptureTrait}, 4 | }; 5 | 6 | pub mod encode; 7 | pub mod utils; 8 | 9 | pub struct VideoFileIter { 10 | cam: VideoCapture, 11 | } 12 | 13 | impl VideoFileIter { 14 | pub fn new(cam: VideoCapture) -> Self { 15 | Self { cam } 16 | } 17 | } 18 | 19 | impl Iterator for VideoFileIter { 20 | type Item = opencv::prelude::Mat; 21 | 22 | fn next(&mut self) -> Option { 23 | let mut frame = Mat::default(); 24 | match self.cam.read(&mut frame) { 25 | Ok(b) => { 26 | if b { 27 | Some(frame) 28 | } else { 29 | None 30 | } 31 | } 32 | Err(e) => { 33 | println!("error reading file: {e}"); 34 | None 35 | } 36 | } 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /tools/opencv-test/src/main.rs: -------------------------------------------------------------------------------- 1 | use clap::Parser; 2 | use opencv::{highgui, prelude::*, videoio}; 3 | use opencv_test::encode::{encode_aom, CodecTypes}; 4 | #[cfg(feature = "all")] 5 | use opencv_test::encode::{encode_h264, encode_rav1e, encode_x264}; 6 | 7 | #[derive(Parser, Debug)] 8 | enum Command { 9 | /// play the specified video file 10 | Play(PlayArgs), 11 | /// capture video in the specified format and save at the specified file 12 | Capture(CaptureArgs), 13 | /// read a video file and re-encode using the specified codec 14 | Encode(opencv_test::encode::Args), 15 | } 16 | 17 | #[tokio::main] 18 | async fn main() -> anyhow::Result<()> { 19 | let cmd = Command::parse(); 20 | match cmd { 21 | Command::Encode(args) => match args.codec { 22 | #[cfg(feature = "all")] 23 | CodecTypes::H264 => encode_h264(args)?, 24 | #[cfg(feature = "all")] 25 | CodecTypes::X264 => encode_x264(args)?, 26 | #[cfg(feature = "all")] 27 | CodecTypes::RAV1E => encode_rav1e(args)?, 28 | CodecTypes::AOM => encode_aom(args)?, 29 | }, 30 | Command::Capture(args) => capture(args).await?, 31 | Command::Play(args) => play(args).await?, 32 | }; 33 | Ok(()) 34 | } 35 | 36 | // https://softron.zendesk.com/hc/en-us/articles/207695697-List-of-FourCC-codes-for-video-codecs 37 | #[derive(Parser, Debug)] 38 | struct CaptureArgs { 39 | /// name of the file to save 40 | /// try avi 41 | output: String, 42 | /// specifies the codec 43 | /// try MJPG 44 | fourcc: String, 45 | } 46 | 47 | async fn capture(args: CaptureArgs) -> anyhow::Result<()> { 48 | let fourcc = args.fourcc.as_bytes(); 49 | 50 | let window = "video capture"; 51 | highgui::named_window(window, highgui::WINDOW_AUTOSIZE)?; 52 | let mut cam = videoio::VideoCapture::new(0, videoio::CAP_ANY)?; // 0 is the default camera 53 | let opened = videoio::VideoCapture::is_opened(&cam)?; 54 | if !opened { 55 | panic!("Unable to open default camera!"); 56 | } 57 | 58 | // https://docs.opencv.org/3.4/d4/d15/group__videoio__flags__base.html 59 | let frame_width = cam.get(3)? as i32; 60 | let frame_height = cam.get(4)? as i32; 61 | let fps = cam.get(5)?; 62 | 63 | let mut writer = videoio::VideoWriter::default()?; 64 | writer.open( 65 | &args.output, 66 | videoio::VideoWriter::fourcc( 67 | fourcc[0].into(), 68 | fourcc[1].into(), 69 | fourcc[2].into(), 70 | fourcc[3].into(), 71 | )?, 72 | fps, 73 | opencv::core::Size::new(frame_width, frame_height), 74 | true, 75 | )?; 76 | 77 | loop { 78 | let mut frame = Mat::default(); 79 | cam.read(&mut frame)?; 80 | if frame.size()?.width > 0 { 81 | highgui::imshow(window, &frame)?; 82 | writer.write(&frame)?; 83 | } 84 | 85 | let key = highgui::wait_key(10)?; 86 | if key > 0 && key != 255 { 87 | break; 88 | } 89 | } 90 | writer.release()?; 91 | 92 | //signal::ctrl_c().await?; 93 | Ok(()) 94 | } 95 | 96 | #[derive(Parser, Debug)] 97 | struct PlayArgs { 98 | /// the video file to play 99 | input: String, 100 | } 101 | 102 | async fn play(args: PlayArgs) -> anyhow::Result<()> { 103 | let window = "video playback"; 104 | highgui::named_window(window, highgui::WINDOW_AUTOSIZE)?; 105 | let mut cam = videoio::VideoCapture::from_file(&args.input, videoio::CAP_ANY)?; 106 | let opened = videoio::VideoCapture::is_opened(&cam)?; 107 | if !opened { 108 | panic!("Unable to read from file!"); 109 | } 110 | 111 | loop { 112 | let mut frame = Mat::default(); 113 | cam.read(&mut frame)?; 114 | if frame.size()?.width > 0 { 115 | highgui::imshow(window, &frame)?; 116 | } 117 | 118 | let key = highgui::wait_key(10)?; 119 | if key > 0 && key != 255 { 120 | break; 121 | } 122 | } 123 | 124 | Ok(()) 125 | } 126 | -------------------------------------------------------------------------------- /tools/opencv-test/src/utils/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod rgb; 2 | pub mod yuv; 3 | -------------------------------------------------------------------------------- /tools/opencv-test/src/utils/rgb.rs: -------------------------------------------------------------------------------- 1 | pub fn bgr_to_rgb(input: &[u8]) -> Vec { 2 | let mut v = Vec::new(); 3 | v.extend_from_slice(input); 4 | for chunk in v.chunks_exact_mut(3) { 5 | chunk.swap(0, 2); 6 | } 7 | v 8 | } 9 | 10 | pub struct RGBBuf { 11 | pub data: Vec, 12 | pub width: usize, 13 | pub height: usize, 14 | } 15 | 16 | impl av_data::frame::FrameBuffer for RGBBuf { 17 | fn linesize(&self, idx: usize) -> Result { 18 | match idx { 19 | 0 | 1 | 2 => Ok(self.width), 20 | _ => Err(av_data::frame::FrameError::InvalidIndex), 21 | } 22 | } 23 | 24 | fn count(&self) -> usize { 25 | 3 26 | } 27 | 28 | fn as_slice_inner(&self, idx: usize) -> Result<&[u8], av_data::frame::FrameError> { 29 | match idx { 30 | 0 | 1 | 2 => Ok(&self.data[0..]), 31 | _ => Err(av_data::frame::FrameError::InvalidIndex), 32 | } 33 | } 34 | 35 | fn as_mut_slice_inner(&mut self, idx: usize) -> Result<&mut [u8], av_data::frame::FrameError> { 36 | match idx { 37 | 0 | 1 | 2 => Ok(&mut self.data[0..]), 38 | _ => Err(av_data::frame::FrameError::InvalidIndex), 39 | } 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /tools/relay-server/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "relay-server" 3 | version.workspace = true 4 | edition.workspace = true 5 | license.workspace = true 6 | rust-version.workspace = true 7 | repository.workspace = true 8 | 9 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 10 | 11 | [dependencies] 12 | rust-ipfs = { workspace = true } 13 | tokio = { workspace = true } 14 | tokio-util = { workspace = true, features = ["full"] } 15 | tokio-stream = { workspace = true, features = ["net"] } 16 | futures.workspace = true 17 | async-trait.workspace = true 18 | async-stream.workspace = true 19 | anyhow.workspace = true 20 | serde.workspace = true 21 | serde_json.workspace = true 22 | void.workspace = true 23 | tracing.workspace = true 24 | toml.workspace = true 25 | tracing-subscriber = { version = "0.3", features = ["env-filter"] } 26 | clap = { version = "4.4", features = ["derive"] } 27 | zeroize.workspace = true 28 | dotenv = "0.15" 29 | base64 = "0.21" 30 | -------------------------------------------------------------------------------- /tools/relay-server/src/config.rs: -------------------------------------------------------------------------------- 1 | use std::{error::Error, path::Path}; 2 | 3 | use base64::{ 4 | alphabet::STANDARD, 5 | engine::{general_purpose::PAD, GeneralPurpose}, 6 | Engine, 7 | }; 8 | use rust_ipfs::{Keypair, PeerId}; 9 | use serde::Deserialize; 10 | use zeroize::Zeroizing; 11 | 12 | #[derive(Clone, Deserialize)] 13 | #[serde(rename_all = "PascalCase")] 14 | pub struct IpfsConfig { 15 | pub identity: Identity, 16 | } 17 | 18 | impl IpfsConfig { 19 | pub async fn load>(path: P) -> Result> { 20 | let file = tokio::fs::read(path).await?; 21 | let config = serde_json::from_slice(&file)?; 22 | Ok(config) 23 | } 24 | } 25 | 26 | #[derive(Deserialize, Clone)] 27 | #[serde(rename_all = "PascalCase")] 28 | pub struct Identity { 29 | #[serde(rename = "PeerID")] 30 | pub peer_id: PeerId, 31 | pub priv_key: String, 32 | } 33 | 34 | impl Identity { 35 | pub fn keypair(&self) -> Result> { 36 | let engine = GeneralPurpose::new(&STANDARD, PAD); 37 | let keypair_bytes = Zeroizing::new(engine.decode(self.priv_key.as_bytes())?); 38 | let keypair = Keypair::from_protobuf_encoding(&keypair_bytes)?; 39 | assert_eq!(self.peer_id, keypair.public().to_peer_id()); 40 | Ok(keypair) 41 | } 42 | } 43 | 44 | impl zeroize::Zeroize for IpfsConfig { 45 | fn zeroize(&mut self) { 46 | self.identity.priv_key.zeroize(); 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /tools/video-codec-cli/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "video-codec-cli" 3 | version = "0.1.0" 4 | edition = "2021" 5 | 6 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 7 | 8 | [dependencies] 9 | anyhow = { workspace = true } 10 | clap = { version = "4.0", features=["derive"] } 11 | tokio = { workspace = true } 12 | 13 | av-data = { workspace = true } 14 | eye = { workspace = true } 15 | libaom = { workspace = true } 16 | -------------------------------------------------------------------------------- /tools/video-codec-cli/src/encode/aom.rs: -------------------------------------------------------------------------------- 1 | use crate::utils::yuv::*; 2 | 3 | use anyhow::{bail, Result}; 4 | use av_data::{frame::FrameType, timeinfo::TimeInfo}; 5 | use eye::{ 6 | colorconvert::Device, 7 | hal::{ 8 | format::PixelFormat, 9 | traits::{Context as _, Device as _, Stream as _}, 10 | PlatformContext, 11 | }, 12 | }; 13 | use std::{sync::mpsc, time::{Instant, Duration}}; 14 | use std::{ 15 | fs::OpenOptions, 16 | io::{BufWriter, Write}, 17 | sync::Arc, 18 | }; 19 | 20 | use libaom::encoder::*; 21 | 22 | pub fn encode_aom(output_file: &str) -> Result<()> { 23 | // Create a context 24 | let ctx = PlatformContext::all() 25 | .next() 26 | .ok_or(anyhow::anyhow!("No platform context available"))?; 27 | 28 | // Create a list of valid capture devices in the system. 29 | let dev_descrs = ctx.devices()?; 30 | 31 | // Print the supported formats for each device. 32 | let dev = ctx.open_device(&dev_descrs[0].uri)?; 33 | let dev = Device::new(dev)?; 34 | let stream_descr = dev 35 | .streams()? 36 | .into_iter() 37 | .reduce(|s1, s2| { 38 | // Choose RGB with 8 bit depth 39 | if s1.pixfmt == PixelFormat::Rgb(24) && s2.pixfmt != PixelFormat::Rgb(24) { 40 | return s1; 41 | } 42 | 43 | // Strive for HD (1280 x 720) 44 | let distance = |width: u32, height: u32| { 45 | f32::sqrt(((640 - width as i32).pow(2) + (480 - height as i32).pow(2)) as f32) 46 | }; 47 | 48 | if distance(s1.width, s1.height) < distance(s2.width, s2.height) { 49 | s1 50 | } else { 51 | s2 52 | } 53 | }) 54 | .ok_or(anyhow::anyhow!("failed to get video stream"))?; 55 | 56 | if stream_descr.pixfmt != PixelFormat::Rgb(24) { 57 | bail!("No RGB3 streams available"); 58 | } 59 | 60 | println!("Selected stream:\n{:?}", stream_descr); 61 | 62 | // Start the stream 63 | let mut stream = dev.start_stream(&stream_descr)?; 64 | let (tx, rx) = mpsc::channel(); 65 | 66 | println!("starting stream with description: {stream_descr:?}"); 67 | 68 | std::thread::spawn(move || loop { 69 | let buf = stream.next().unwrap().unwrap(); 70 | if tx.send(buf.to_vec()).is_err() { 71 | return; 72 | } 73 | }); 74 | 75 | let color_scale = ColorScale::HdTv; 76 | let multiplier: usize = 1; 77 | 78 | let frame_width = stream_descr.width; 79 | let frame_height = stream_descr.height; 80 | let fps = 1000.0 / (stream_descr.interval.as_millis() as f64); 81 | 82 | let output_file = OpenOptions::new() 83 | .read(false) 84 | .write(true) 85 | .create(true) 86 | .truncate(true) 87 | .open(output_file)?; 88 | let mut writer = BufWriter::new(output_file); 89 | 90 | let mut encoder_config = match AV1EncoderConfig::new_with_usage(AomUsage::RealTime) { 91 | Ok(r) => r, 92 | Err(e) => bail!("failed to get Av1EncoderConfig: {e:?}"), 93 | }; 94 | encoder_config.g_h = frame_height * multiplier as u32; 95 | encoder_config.g_w = frame_width * multiplier as u32; 96 | let mut encoder = match encoder_config.get_encoder() { 97 | Ok(r) => r, 98 | Err(e) => bail!("failed to get Av1Encoder: {e:?}"), 99 | }; 100 | 101 | let pixel_format = *av_data::pixel::formats::YUV420; 102 | let pixel_format = Arc::new(pixel_format); 103 | 104 | // run the loop for about 10 seconds 105 | let start_time = Instant::now(); 106 | while let Ok(frame) = rx.recv() { 107 | // println!("read new frame"); 108 | 109 | let frame_time = Duration::from(Instant::now() - start_time); 110 | let frame_time_ms = frame_time.as_millis(); 111 | 112 | if frame_time_ms > 10000 { 113 | break; 114 | } 115 | 116 | let timestamp = frame_time_ms as f64 / fps; 117 | 118 | let yuv = { 119 | let p = frame.as_ptr(); 120 | let len = frame_width * frame_height * 3; 121 | let s = std::ptr::slice_from_raw_parts(p, len as _); 122 | let s: &[u8] = unsafe { &*s }; 123 | 124 | rgb_to_yuv420(s, frame_width as _, frame_height as _, color_scale) 125 | }; 126 | 127 | let yuv_buf = YUV420Buf { 128 | data: yuv, 129 | width: frame_width as usize * multiplier, 130 | height: frame_height as usize * multiplier, 131 | }; 132 | 133 | let frame = av_data::frame::Frame { 134 | kind: av_data::frame::MediaKind::Video(av_data::frame::VideoInfo::new( 135 | yuv_buf.width, 136 | yuv_buf.height, 137 | false, 138 | FrameType::I, 139 | pixel_format.clone(), 140 | )), 141 | buf: Box::new(yuv_buf), 142 | t: TimeInfo { 143 | pts: Some(timestamp as i64), 144 | ..Default::default() 145 | }, 146 | }; 147 | 148 | // println!("encoding"); 149 | if let Err(e) = encoder.encode(&frame) { 150 | bail!("encoding error: {e}"); 151 | } 152 | 153 | // println!("calling get_packet"); 154 | while let Some(packet) = encoder.get_packet() { 155 | if let AOMPacket::Packet(p) = packet { 156 | let _ = writer.write(&p.data)?; 157 | } 158 | } 159 | } 160 | writer.flush()?; 161 | Ok(()) 162 | } 163 | -------------------------------------------------------------------------------- /tools/video-codec-cli/src/encode/mod.rs: -------------------------------------------------------------------------------- 1 | mod aom; 2 | 3 | pub use aom::*; 4 | -------------------------------------------------------------------------------- /tools/video-codec-cli/src/lib.rs: -------------------------------------------------------------------------------- 1 | pub mod encode; 2 | pub mod utils; 3 | -------------------------------------------------------------------------------- /tools/video-codec-cli/src/main.rs: -------------------------------------------------------------------------------- 1 | // this is a gutted version of open-cv test. At this point the av1 codec has been chosen and all that's needed is to validate eye-rs. 2 | 3 | use clap::Parser; 4 | use video_codec_cli::encode::encode_aom; 5 | 6 | /// capture a video and encode it using av1 codec, saving the file at the specified path 7 | #[derive(Parser, Debug)] 8 | struct Command { 9 | output_file: String, 10 | } 11 | 12 | #[tokio::main] 13 | async fn main() -> anyhow::Result<()> { 14 | let cmd = Command::parse(); 15 | encode_aom(&cmd.output_file)?; 16 | Ok(()) 17 | } 18 | -------------------------------------------------------------------------------- /tools/video-codec-cli/src/utils/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod rgb; 2 | pub mod yuv; 3 | -------------------------------------------------------------------------------- /tools/video-codec-cli/src/utils/rgb.rs: -------------------------------------------------------------------------------- 1 | pub fn bgr_to_rgb(input: &[u8]) -> Vec { 2 | let mut v = Vec::new(); 3 | v.extend_from_slice(input); 4 | for chunk in v.chunks_exact_mut(3) { 5 | chunk.swap(0, 2); 6 | } 7 | v 8 | } 9 | 10 | pub struct RGBBuf { 11 | pub data: Vec, 12 | pub width: usize, 13 | pub height: usize, 14 | } 15 | 16 | impl av_data::frame::FrameBuffer for RGBBuf { 17 | fn linesize(&self, idx: usize) -> Result { 18 | match idx { 19 | 0 | 1 | 2 => Ok(self.width), 20 | _ => Err(av_data::frame::FrameError::InvalidIndex), 21 | } 22 | } 23 | 24 | fn count(&self) -> usize { 25 | 3 26 | } 27 | 28 | fn as_slice_inner(&self, idx: usize) -> Result<&[u8], av_data::frame::FrameError> { 29 | match idx { 30 | 0 | 1 | 2 => Ok(&self.data[0..]), 31 | _ => Err(av_data::frame::FrameError::InvalidIndex), 32 | } 33 | } 34 | 35 | fn as_mut_slice_inner(&mut self, idx: usize) -> Result<&mut [u8], av_data::frame::FrameError> { 36 | match idx { 37 | 0 | 1 | 2 => Ok(&mut self.data[0..]), 38 | _ => Err(av_data::frame::FrameError::InvalidIndex), 39 | } 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /warp/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "warp" 3 | version.workspace = true 4 | description = "TBD" 5 | # readme = "README.md" 6 | license.workspace = true 7 | edition.workspace = true 8 | repository.workspace = true 9 | 10 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 11 | [lib] 12 | crate-type = ["cdylib", "rlib", "staticlib"] 13 | 14 | 15 | [dependencies] 16 | 17 | # Async and futures crates 18 | futures.workspace = true 19 | async-trait.workspace = true 20 | async-stream.workspace = true 21 | async-broadcast.workspace = true 22 | 23 | # Crypto crates 24 | ed25519-dalek.workspace = true 25 | sha2.workspace = true 26 | hmac.workspace = true 27 | digest.workspace = true 28 | aes-gcm = { workspace = true, features = ["stream"] } 29 | zeroize.workspace = true 30 | rand.workspace = true 31 | multihash = { workspace = true, features = ["sha1"] } 32 | did-key.workspace = true 33 | tiny-bip39.workspace = true 34 | 35 | # Error handling crates 36 | anyhow.workspace = true 37 | thiserror.workspace = true 38 | 39 | # Sync crates 40 | parking_lot = { workspace = true, features = ["serde"] } 41 | 42 | # Time crate 43 | chrono = { workspace = true } 44 | 45 | # Encoding and Serializing Crates 46 | serde.workspace = true 47 | serde_cbor.workspace = true 48 | serde_json.workspace = true 49 | bincode.workspace = true 50 | bs58.workspace = true 51 | hex.workspace = true 52 | 53 | # Misc 54 | bytes.workspace = true 55 | dyn-clone.workspace = true 56 | uuid.workspace = true 57 | derive_more.workspace = true 58 | paste.workspace = true 59 | tracing = { workspace = true } 60 | mediatype.workspace = true 61 | send_wrapper.workspace = true 62 | indexmap.workspace = true 63 | 64 | [target.'cfg(not(target_arch = "wasm32"))'.dependencies] 65 | tokio = { workspace = true } 66 | 67 | [target.'cfg(target_arch = "wasm32")'.dependencies] 68 | tokio = { version = "1", default-features = false, features = ["sync"] } 69 | wasm-bindgen.workspace = true 70 | gloo.workspace = true 71 | web-sys.workspace = true 72 | js-sys.workspace = true 73 | console_error_panic_hook.workspace = true 74 | wasm-streams.workspace = true 75 | wasm-bindgen-futures.workspace = true 76 | serde-wasm-bindgen.workspace = true 77 | tracing-wasm.workspace = true 78 | 79 | [features] 80 | default = [] 81 | wasm_debug = [] 82 | 83 | # These are use for build.rs to install cbindgen and nightly toolchain to generate headers 84 | # Note this will change in the future once its fixed upstream 85 | build-header = [] 86 | force-install = [] 87 | 88 | [build-dependencies] 89 | cbindgen = "0.23" 90 | -------------------------------------------------------------------------------- /warp/src/blink/audio_config.rs: -------------------------------------------------------------------------------- 1 | use anyhow::Result; 2 | use dyn_clone::DynClone; 3 | use futures::channel::oneshot; 4 | use tokio::sync::mpsc; 5 | 6 | // Note: CPAL streams aren't Send and have to be run in an async context. It is expected that tokio::task::spawn_blocking may be used to test 7 | // the microphone and speaker. Because of this, the event channel can't be returned by the function but is instead sent over a oneshot channel. 8 | pub trait AudioDeviceConfig: DynClone + Send + Sync { 9 | fn test_speaker( 10 | &self, 11 | rsp: oneshot::Sender>, 12 | ) -> Result<()>; 13 | fn test_microphone( 14 | &self, 15 | rsp: oneshot::Sender>, 16 | ) -> Result<()>; 17 | 18 | fn set_speaker(&mut self, device_name: &str); 19 | fn set_microphone(&mut self, device_name: &str); 20 | 21 | fn microphone_device_name(&self) -> Option; 22 | fn speaker_device_name(&self) -> Option; 23 | 24 | fn get_available_microphones(&self) -> Result>; 25 | fn get_available_speakers(&self) -> Result>; 26 | } 27 | 28 | #[derive(Clone, Debug)] 29 | pub enum AudioTestEvent { 30 | Output { loudness: u8 }, 31 | Input { loudness: u8 }, 32 | Done, 33 | } 34 | -------------------------------------------------------------------------------- /warp/src/blink/call_state.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashMap; 2 | 3 | use serde::{Deserialize, Serialize}; 4 | 5 | use crate::crypto::DID; 6 | 7 | #[derive(Debug, Clone, Serialize, Deserialize, Eq, PartialEq)] 8 | pub struct CallState { 9 | pub own_id: DID, 10 | pub participants_joined: HashMap, 11 | } 12 | 13 | #[derive(Default, Debug, Clone, Serialize, Deserialize, Eq, PartialEq)] 14 | pub struct ParticipantState { 15 | pub muted: bool, 16 | pub deafened: bool, 17 | pub recording: bool, 18 | } 19 | 20 | impl CallState { 21 | pub fn new(own_id: DID) -> Self { 22 | Self { 23 | own_id, 24 | participants_joined: HashMap::default(), 25 | } 26 | } 27 | pub fn add_participant(&mut self, id: &DID, state: ParticipantState) { 28 | self.participants_joined.insert(id.clone(), state); 29 | } 30 | 31 | pub fn is_call_empty(&self) -> bool { 32 | self.participants_joined.is_empty() 33 | } 34 | 35 | pub fn remove_participant(&mut self, id: &DID) { 36 | self.participants_joined.remove(id); 37 | } 38 | 39 | pub fn set_muted(&mut self, id: &DID, muted: bool) { 40 | if let Some(participant) = self.participants_joined.get_mut(id) { 41 | participant.muted = muted; 42 | } 43 | } 44 | 45 | pub fn set_deafened(&mut self, id: &DID, deafened: bool) { 46 | if let Some(participant) = self.participants_joined.get_mut(id) { 47 | participant.deafened = deafened; 48 | } 49 | } 50 | 51 | pub fn set_recording(&mut self, id: &DID, recording: bool) { 52 | if let Some(participant) = self.participants_joined.get_mut(id) { 53 | participant.recording = recording; 54 | } 55 | } 56 | 57 | pub fn set_self_muted(&mut self, muted: bool) { 58 | let own_id = self.own_id.clone(); 59 | self.set_muted(&own_id, muted); 60 | } 61 | 62 | pub fn set_self_deafened(&mut self, deafened: bool) { 63 | let own_id = self.own_id.clone(); 64 | self.set_deafened(&own_id, deafened); 65 | } 66 | 67 | pub fn set_self_recording(&mut self, recording: bool) { 68 | let own_id = self.own_id.clone(); 69 | self.set_recording(&own_id, recording); 70 | } 71 | 72 | pub fn reset_self(&mut self) { 73 | self.participants_joined.remove(&self.own_id); 74 | } 75 | } 76 | -------------------------------------------------------------------------------- /warp/src/crypto/hash.rs: -------------------------------------------------------------------------------- 1 | #![allow(clippy::result_large_err)] 2 | use digest::Digest; 3 | use sha2::Sha256; 4 | use std::io::Read; 5 | 6 | use crate::error::Error; 7 | 8 | #[allow(dead_code)] 9 | type Result = std::result::Result; 10 | 11 | pub fn sha256_hash_stream(reader: &mut impl Read, salt: Option<&[u8]>) -> Result> { 12 | let mut hasher = Sha256::new(); 13 | std::io::copy(reader, &mut hasher)?; 14 | if let Some(salt) = salt { 15 | hasher.update(salt); 16 | } 17 | Ok(hasher.finalize().to_vec()) 18 | } 19 | 20 | pub fn sha256_hash(data: &[u8], salt: Option<&[u8]>) -> Vec { 21 | let mut hasher = Sha256::new(); 22 | hasher.update(data); 23 | if let Some(salt) = salt { 24 | hasher.update(salt); 25 | } 26 | hasher.finalize().to_vec() 27 | } 28 | 29 | pub fn sha256_iter( 30 | iter: impl Iterator>>, 31 | salt: Option<&[u8]>, 32 | ) -> Vec { 33 | let mut hasher = Sha256::new(); 34 | for data in iter.flatten() { 35 | hasher.update(data); 36 | } 37 | if let Some(salt) = salt { 38 | hasher.update(salt); 39 | } 40 | hasher.finalize().to_vec() 41 | } 42 | 43 | #[cfg(test)] 44 | mod test { 45 | use crate::crypto::hash::*; 46 | 47 | #[test] 48 | fn sha256_test() -> anyhow::Result<()> { 49 | let hash = sha256_hash(b"Hello, World!", None); 50 | 51 | assert_eq!( 52 | hex::encode(hash), 53 | String::from("dffd6021bb2bd5b0af676290809ec3a53191dd81c7f70a4b28688a362182986f") 54 | ); 55 | Ok(()) 56 | } 57 | 58 | #[test] 59 | fn sha256_test_invalid_hash() -> anyhow::Result<()> { 60 | let hash = sha256_hash(b"Invalid hash", None); 61 | 62 | assert_ne!( 63 | hex::encode(hash), 64 | String::from("dffd6021bb2bd5b0af676290809ec3a53191dd81c7f70a4b28688a362182986f") 65 | ); 66 | Ok(()) 67 | } 68 | } 69 | -------------------------------------------------------------------------------- /warp/src/crypto/keypair.rs: -------------------------------------------------------------------------------- 1 | #![allow(clippy::result_large_err)] 2 | use crate::{error::Error, tesseract::Tesseract}; 3 | use bip39::{Language, Mnemonic, MnemonicType, Seed}; 4 | use derive_more::Display; 5 | use did_key::KeyMaterial; 6 | use ed25519_dalek::{Keypair, PublicKey, SecretKey, KEYPAIR_LENGTH, SECRET_KEY_LENGTH}; 7 | use hmac::{Hmac, Mac}; 8 | use sha2::Sha512; 9 | use zeroize::Zeroizing; 10 | 11 | use super::DID; 12 | 13 | const ED25519_BIP32_NAME: &str = "ed25519 seed"; 14 | type HmacSha512 = Hmac; 15 | 16 | #[derive(Clone, Display, Copy)] 17 | #[repr(C)] 18 | pub enum PhraseType { 19 | #[display(fmt = "standard")] 20 | Standard, 21 | #[display(fmt = "secure")] 22 | Secure, 23 | } 24 | 25 | impl Default for PhraseType { 26 | fn default() -> Self { 27 | Self::Standard 28 | } 29 | } 30 | 31 | pub fn generate_mnemonic_phrase(phrase: PhraseType) -> Mnemonic { 32 | let m_type = match phrase { 33 | PhraseType::Standard => MnemonicType::Words12, 34 | PhraseType::Secure => MnemonicType::Words24, 35 | }; 36 | Mnemonic::new(m_type, Language::English) 37 | } 38 | 39 | pub fn generate_keypair( 40 | phrase: PhraseType, 41 | passphrase: Option<&str>, 42 | ) -> Result<(String, DID), Error> { 43 | let mnemonic = generate_mnemonic_phrase(phrase); 44 | let did = did_from_mnemonic(mnemonic.phrase(), passphrase)?; 45 | Ok((mnemonic.into_phrase(), did)) 46 | } 47 | 48 | /// Generate DID from mnemonic phrase, extending compatibility 49 | pub fn did_from_mnemonic_with_chain( 50 | mnemonic: &str, 51 | passphrase: Option<&str>, 52 | ) -> Result<(DID, [u8; 32]), Error> { 53 | let mnemonic = Mnemonic::from_phrase(mnemonic, Language::English)?; 54 | let seed = Seed::new(&mnemonic, passphrase.unwrap_or_default()); 55 | let mut mac = HmacSha512::new_from_slice(ED25519_BIP32_NAME.as_ref()).unwrap(); 56 | mac.update(seed.as_bytes()); 57 | let bytes = mac.finalize().into_bytes(); 58 | let secret = SecretKey::from_bytes(&bytes[..32])?; 59 | // Note: This will allow extending to `ed25519-dalek-bip32` for path derivation, with this being the root of the `ExtendedSecretKey` in the following format 60 | /* 61 | ExtendedSecretKey { 62 | depth: 0, 63 | child_index: ChildIndex::Normal(0), 64 | secret_key, 65 | chain_code 66 | } 67 | */ 68 | let mut chain_code = [0; 32]; 69 | chain_code.copy_from_slice(&bytes[32..]); 70 | Ok((secret.into(), chain_code)) 71 | } 72 | 73 | /// Generate DID from mnemonic phrase 74 | pub fn did_from_mnemonic(mnemonic: &str, passphrase: Option<&str>) -> Result { 75 | did_from_mnemonic_with_chain(mnemonic, passphrase).map(|(did, _)| did) 76 | } 77 | 78 | pub fn mnemonic_into_tesseract( 79 | tesseract: &Tesseract, 80 | mnemonic: &str, 81 | passphrase: Option<&str>, 82 | save_mnemonic: bool, 83 | override_key: bool, 84 | ) -> Result<(), Error> { 85 | if !tesseract.is_unlock() { 86 | return Err(Error::TesseractLocked); 87 | } 88 | 89 | if tesseract.exist("keypair") && !override_key { 90 | return Err(Error::Any(anyhow::anyhow!("Keypair already exist"))); 91 | } 92 | 93 | let (did, chain) = did_from_mnemonic_with_chain(mnemonic, passphrase)?; 94 | 95 | let bytes = Zeroizing::new(did.as_ref().private_key_bytes()); 96 | let secret_key = SecretKey::from_bytes(&bytes)?; 97 | let public_key: PublicKey = (&secret_key).into(); 98 | let mut bytes: Zeroizing<[u8; KEYPAIR_LENGTH]> = Zeroizing::new([0u8; KEYPAIR_LENGTH]); 99 | 100 | bytes[..SECRET_KEY_LENGTH].copy_from_slice(secret_key.as_bytes()); 101 | bytes[SECRET_KEY_LENGTH..].copy_from_slice(public_key.as_bytes()); 102 | 103 | let kp = Keypair::from_bytes(&*bytes)?; 104 | 105 | let encoded = Zeroizing::new(bs58::encode(&kp.to_bytes()).into_string()); 106 | 107 | tesseract.set("keypair", &encoded)?; 108 | 109 | if save_mnemonic { 110 | let encoded_chain = Zeroizing::new(bs58::encode(&chain).into_string()); 111 | tesseract.set("chain", &encoded_chain)?; 112 | tesseract.set("mnemonic", mnemonic)?; 113 | } 114 | Ok(()) 115 | } 116 | 117 | #[cfg(test)] 118 | mod test { 119 | use super::did_from_mnemonic; 120 | 121 | const PHRASE: &str = 122 | "morning caution dose lab six actress pond humble pause enact virtual train"; 123 | 124 | #[test] 125 | fn generate_did_from_phrase() -> anyhow::Result<()> { 126 | let expected = "did:key:z6MksiU5wFcZHHSp4VvtQePW4zwUDNmGADqxfQi4TdcEvmjz"; 127 | let did = did_from_mnemonic(PHRASE, None)?; 128 | assert_eq!(did.to_string(), expected); 129 | Ok(()) 130 | } 131 | } 132 | -------------------------------------------------------------------------------- /warp/src/crypto/mod.rs: -------------------------------------------------------------------------------- 1 | #![allow(clippy::result_large_err)] 2 | use std::{fmt::Display, str::FromStr}; 3 | 4 | pub use aes_gcm; 5 | pub use did_key::{self, DIDKey, Ed25519KeyPair, Fingerprint, KeyMaterial}; 6 | use did_key::{Generate, P256KeyPair, Secp256k1KeyPair, X25519KeyPair}; 7 | pub use digest; 8 | pub use ed25519_dalek; 9 | pub use rand; 10 | pub use sha2; 11 | pub use zeroize; 12 | 13 | pub mod cipher; 14 | pub mod hash; 15 | pub mod keypair; 16 | pub mod multihash; 17 | 18 | use serde::{Deserialize, Deserializer, Serialize}; 19 | 20 | use crate::error::Error; 21 | 22 | pub struct DID(DIDKey); 23 | 24 | impl FromStr for DID { 25 | type Err = Error; 26 | fn from_str(key: &str) -> Result { 27 | let key = match key.starts_with("did:key:") { 28 | true => key.to_string(), 29 | false => format!("did:key:{key}"), 30 | }; 31 | 32 | std::panic::catch_unwind(|| did_key::resolve(&key)) 33 | .map_err(|_| Error::PublicKeyInvalid) 34 | .and_then(|res| res.map(DID).map_err(|_| Error::PublicKeyInvalid)) 35 | } 36 | } 37 | 38 | impl core::hash::Hash for DID { 39 | fn hash(&self, state: &mut H) { 40 | self.fingerprint().hash(state); 41 | } 42 | } 43 | 44 | impl AsRef for DID { 45 | fn as_ref(&self) -> &DIDKey { 46 | &self.0 47 | } 48 | } 49 | 50 | impl std::fmt::Debug for DID { 51 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 52 | write!(f, "{self}") 53 | } 54 | } 55 | 56 | impl Eq for DID {} 57 | 58 | impl PartialEq for DID { 59 | fn eq(&self, other: &Self) -> bool { 60 | self.fingerprint() == other.fingerprint() 61 | } 62 | } 63 | 64 | // maybe have it clone the public key instead of checking for private key 65 | impl Clone for DID { 66 | fn clone(&self) -> Self { 67 | let public_bytes = self.0.public_key_bytes(); 68 | // let private_bytes = self.0.private_key_bytes(); 69 | // let pk = if private_bytes.is_empty() || private_bytes.len() != 32 { 70 | // None 71 | // } else { 72 | // Some(private_bytes.as_slice()) 73 | // }; 74 | let pk = None; 75 | let did = match self.0 { 76 | did_key::KeyPair::Ed25519(_) => { 77 | did_key::from_existing_key::(&public_bytes, pk) 78 | } 79 | did_key::KeyPair::X25519(_) => { 80 | did_key::from_existing_key::(&public_bytes, pk) 81 | } 82 | did_key::KeyPair::P256(_) => { 83 | did_key::from_existing_key::(&public_bytes, pk) 84 | } 85 | did_key::KeyPair::Secp256k1(_) => { 86 | did_key::from_existing_key::(&public_bytes, pk) 87 | } 88 | }; 89 | DID(did) 90 | } 91 | } 92 | 93 | impl Default for DID { 94 | fn default() -> Self { 95 | DID(did_key::generate::(None)) 96 | } 97 | } 98 | 99 | impl core::ops::Deref for DID { 100 | type Target = DIDKey; 101 | fn deref(&self) -> &Self::Target { 102 | &self.0 103 | } 104 | } 105 | 106 | impl Serialize for DID { 107 | fn serialize(&self, serializer: S) -> std::result::Result 108 | where 109 | S: serde::Serializer, 110 | { 111 | serializer.serialize_str(&self.to_string()) 112 | } 113 | } 114 | 115 | impl<'d> Deserialize<'d> for DID { 116 | fn deserialize(deserializer: D) -> std::result::Result 117 | where 118 | D: Deserializer<'d>, 119 | { 120 | let did_str = ::deserialize(deserializer)?; 121 | DID::try_from(did_str).map_err(serde::de::Error::custom) 122 | } 123 | } 124 | 125 | impl Display for DID { 126 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 127 | write!(f, "did:key:{}", self.fingerprint()) 128 | } 129 | } 130 | 131 | impl TryFrom for DID { 132 | type Error = Error; 133 | fn try_from(value: String) -> Result { 134 | DID::from_str(&value) 135 | } 136 | } 137 | 138 | impl From for DID { 139 | fn from(did: DIDKey) -> Self { 140 | DID(did) 141 | } 142 | } 143 | 144 | impl From for DID { 145 | fn from(public_key: ed25519_dalek::PublicKey) -> Self { 146 | let did = Ed25519KeyPair::from_public_key(public_key.as_bytes()); 147 | DID(did.into()) 148 | } 149 | } 150 | 151 | impl From for DID { 152 | fn from(secret: ed25519_dalek::SecretKey) -> Self { 153 | let did: DIDKey = Ed25519KeyPair::from_secret_key(secret.as_bytes()).into(); 154 | did.into() 155 | } 156 | } 157 | 158 | pub fn generate() -> [u8; N] { 159 | use rand::{rngs::OsRng, RngCore}; 160 | 161 | let mut buf = [0u8; N]; 162 | OsRng.fill_bytes(&mut buf); 163 | buf 164 | } 165 | -------------------------------------------------------------------------------- /warp/src/crypto/multihash.rs: -------------------------------------------------------------------------------- 1 | //TODO: Possibly reduce and simplify 2 | #![allow(clippy::result_large_err)] 3 | use multihash::{ 4 | derive::Multihash, Blake2b256, Blake2b512, Blake2s128, Blake2s256, Blake3_256, Hasher, 5 | Identity256, Keccak224, Keccak256, Keccak384, Keccak512, MultihashDigest, Sha1, Sha2_256, 6 | Sha2_512, Sha3_224, Sha3_256, Sha3_384, Sha3_512, 7 | }; 8 | 9 | #[derive(Clone, Copy, Debug, Eq, Multihash, PartialEq)] 10 | #[mh(alloc_size = 64)] 11 | pub enum Code { 12 | #[mh(code = 0x00, hasher = Identity256)] 13 | Identity, 14 | #[mh(code = 0x11, hasher = Sha1)] 15 | Sha1, 16 | #[mh(code = 0x12, hasher = Sha2_256)] 17 | Sha2_256, 18 | #[mh(code = 0x13, hasher = Sha2_512)] 19 | Sha2_512, 20 | #[mh(code = 0x17, hasher = Sha3_224)] 21 | Sha3_224, 22 | #[mh(code = 0x16, hasher = Sha3_256)] 23 | Sha3_256, 24 | #[mh(code = 0x15, hasher = Sha3_384)] 25 | Sha3_384, 26 | #[mh(code = 0x14, hasher = Sha3_512)] 27 | Sha3_512, 28 | #[mh(code = 0x1a, hasher = Keccak224)] 29 | Keccak224, 30 | #[mh(code = 0x1b, hasher = Keccak256)] 31 | Keccak256, 32 | #[mh(code = 0x1c, hasher = Keccak384)] 33 | Keccak384, 34 | #[mh(code = 0x1d, hasher = Keccak512)] 35 | Keccak512, 36 | #[mh(code = 0xb220, hasher = Blake2b256)] 37 | Blake2b256, 38 | #[mh(code = 0xb240, hasher = Blake2b512)] 39 | Blake2b512, 40 | #[mh(code = 0xb250, hasher = Blake2s128)] 41 | Blake2s128, 42 | #[mh(code = 0xb260, hasher = Blake2s256)] 43 | Blake2s256, 44 | #[mh(code = 0x1e, hasher = Blake3_256)] 45 | Blake3_256, 46 | } 47 | 48 | macro_rules! create_hash_functions { 49 | ($code:expr) => { 50 | paste::item! { 51 | pub fn [<$code:lower _multihash_slice>](slice: &[u8]) -> Result, crate::error::Error> { 52 | multihash_slice::<$code>(Code::$code, slice).map_err(crate::error::Error::from) 53 | } 54 | 55 | pub fn [<$code:lower _multihash_file>]>(file: P) -> Result, crate::error::Error> { 56 | multihash_file::<$code, _>(Code::$code, file).map_err(crate::error::Error::from) 57 | } 58 | } 59 | }; 60 | } 61 | 62 | pub fn multihash_slice(code: Code, slice: &[u8]) -> anyhow::Result> { 63 | let mut hasher = H::default(); 64 | hasher.update(slice); 65 | let digest = code.wrap(hasher.finalize())?; 66 | Ok(digest.to_bytes()) 67 | } 68 | 69 | pub fn multihash_reader( 70 | code: Code, 71 | reader: &mut impl std::io::Read, 72 | ) -> anyhow::Result> { 73 | let mut hasher = H::default(); 74 | std::io::copy(reader, &mut hasher)?; 75 | let digest = code.wrap(hasher.finalize())?; 76 | Ok(digest.to_bytes()) 77 | } 78 | 79 | pub fn multihash_file>( 80 | code: Code, 81 | file: P, 82 | ) -> anyhow::Result> { 83 | use std::io::Read; 84 | let mut hasher = H::default(); 85 | let mut reader = std::fs::File::open(file)?; 86 | loop { 87 | let mut buffer = [0u8; 512]; 88 | match reader.read(&mut buffer) { 89 | Ok(0) => break, 90 | Ok(size) => hasher.update(&buffer[..size]), 91 | Err(e) if e.kind() == std::io::ErrorKind::Interrupted => continue, 92 | Err(e) => return Err(anyhow::anyhow!(e)), 93 | } 94 | } 95 | let digest = code.wrap(hasher.finalize())?; 96 | Ok(digest.to_bytes()) 97 | } 98 | 99 | create_hash_functions!(Sha1); 100 | create_hash_functions!(Sha2_256); 101 | create_hash_functions!(Sha2_512); 102 | create_hash_functions!(Sha3_224); 103 | create_hash_functions!(Sha3_256); 104 | create_hash_functions!(Sha3_384); 105 | create_hash_functions!(Sha3_512); 106 | create_hash_functions!(Keccak224); 107 | create_hash_functions!(Keccak256); 108 | create_hash_functions!(Keccak384); 109 | create_hash_functions!(Keccak512); 110 | create_hash_functions!(Blake2b256); 111 | create_hash_functions!(Blake2b512); 112 | create_hash_functions!(Blake2s128); 113 | create_hash_functions!(Blake2s256); 114 | create_hash_functions!(Blake3_256); 115 | 116 | #[cfg(test)] 117 | mod test { 118 | // 119 | use crate::crypto::multihash::*; 120 | #[test] 121 | fn sha1_multihash_test() -> anyhow::Result<()> { 122 | let hash = sha1_multihash_slice(b"Hello, World!")?; 123 | 124 | assert_eq!( 125 | bs58::encode(&hash).into_string(), 126 | String::from("5dqvXR93VnV1Grn96DBGdqEQAbJe1e") 127 | ); 128 | Ok(()) 129 | } 130 | 131 | #[test] 132 | fn sha2_256_multihash_test() -> anyhow::Result<()> { 133 | let hash = sha2_256_multihash_slice(b"Hello, World!")?; 134 | 135 | assert_eq!( 136 | bs58::encode(&hash).into_string(), 137 | String::from("QmdR1iHsUocy7pmRHBhNa9znM8eh8Mwqq5g5vcw8MDMXTt") 138 | ); 139 | Ok(()) 140 | } 141 | } 142 | -------------------------------------------------------------------------------- /warp/src/lib.rs: -------------------------------------------------------------------------------- 1 | #![allow(clippy::result_large_err)] 2 | 3 | pub mod blink; 4 | pub mod constellation; 5 | pub mod crypto; 6 | pub mod data; 7 | pub mod error; 8 | pub mod module; 9 | pub mod multipass; 10 | pub mod raygun; 11 | pub mod tesseract; 12 | pub mod warp; 13 | 14 | /// Used to downcast a specific type from an extension to share to another 15 | pub trait SingleHandle { 16 | fn handle(&self) -> Result, error::Error> { 17 | Err(error::Error::Unimplemented) 18 | } 19 | } 20 | 21 | pub trait Extension { 22 | /// Returns an id of the extension. Should be the crate name (eg in a `warp-module-ext` format) 23 | fn id(&self) -> String; 24 | 25 | /// Returns the name of an extension 26 | fn name(&self) -> String; 27 | 28 | /// Returns the description of the extension 29 | fn description(&self) -> String { 30 | format!( 31 | "{} is an extension that is designed to be used for {}", 32 | self.name(), 33 | self.module() 34 | ) 35 | } 36 | 37 | /// Returns the module type the extension is meant to be used for 38 | fn module(&self) -> crate::module::Module; 39 | } 40 | -------------------------------------------------------------------------------- /warp/src/module.rs: -------------------------------------------------------------------------------- 1 | use derive_more::Display; 2 | use serde::{Deserialize, Serialize}; 3 | 4 | // 5 | /// `Messaging` - Allows direct, and multi-user encrypted messaging with ownership rights added so only 6 | /// the expected users can edit, and delete messages. 7 | /// 8 | /// `FileSystem` - Facilitates the creation of files and folders within a central directory tree (Index). 9 | /// This index is managed internally and traversal of the directory as well as full listings, 10 | /// deletion, and creation is provided within this module. Additionally uploading files to the filesystem. 11 | /// 12 | /// `Accounts` - Creates a unique user accounts used to store core information about the user. 13 | /// This can include simple things like usernames and status messages, but may also 14 | /// include permissions, friends, and more. 15 | /// 16 | #[derive(Hash, Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, Display)] 17 | #[serde(rename_all = "lowercase")] 18 | #[repr(C)] 19 | pub enum Module { 20 | /// Allows for direct, and multi-user encrypted messaging with ownership 21 | #[display(fmt = "messaging")] 22 | Messaging, 23 | 24 | /// Facilitates the creation of files and directories within a central directory tree. This tree, which is an index, 25 | /// is managed internally and traversal of the directory as well as full listings, deletion, and creation provided within 26 | /// this module by an extension in addition to uploading files to the filesystem. 27 | #[display(fmt = "filesystem")] 28 | FileSystem, 29 | 30 | /// Creates a unique user account used to store core information about the user, which can include usernames, status messages, permissions, etc. 31 | #[display(fmt = "accounts")] 32 | Accounts, 33 | 34 | /// Allow for storing of data for faster access at a later point in time. Additionally, it may allow for caching of frequently used (or accessed) data 35 | /// so that request can be made faster. 36 | #[display(fmt = "cache")] 37 | Cache, 38 | 39 | /// Represents media such as audio/video calls 40 | #[display(fmt = "media")] 41 | Media, 42 | 43 | /// Unknown module. Should be used by default where a module cannot be identified for any specific reason. 44 | #[display(fmt = "unknown")] 45 | Unknown, 46 | } 47 | 48 | impl Default for Module { 49 | fn default() -> Self { 50 | Self::Unknown 51 | } 52 | } 53 | 54 | impl From for Module 55 | where 56 | A: AsRef, 57 | { 58 | fn from(module: A) -> Self { 59 | match module.as_ref() { 60 | "messaging" => Module::Messaging, 61 | "filesystem" => Module::FileSystem, 62 | "accounts" => Module::Accounts, 63 | "cache" => Module::Cache, 64 | _ => Module::Unknown, 65 | } 66 | } 67 | } 68 | --------------------------------------------------------------------------------