├── .gitattributes ├── .github └── workflows │ └── ci.yml ├── .gitignore ├── README.md ├── core ├── Cargo.lock ├── Cargo.toml ├── cli │ ├── Cargo.toml │ └── src │ │ └── main.rs ├── lib │ ├── Cargo.toml │ └── src │ │ ├── audio_clip.rs │ │ ├── db.rs │ │ ├── ggml-base.en-q5_0.bin │ │ ├── internal_encoding.rs │ │ ├── language_processor.rs │ │ ├── lib.rs │ │ └── spectrum.rs └── napi │ ├── Cargo.toml │ ├── build.rs │ ├── package-lock.json │ ├── package.json │ └── src │ ├── js_logger.rs │ └── lib.rs ├── sound-ci-helpers-windows ├── devcon.exe ├── setup_sound.ps1 └── vbcable.cer └── ui ├── .eslintignore ├── .eslintrc.json ├── .postcssrc.json ├── .prettierignore ├── .proxyrc.js ├── forge.config.js ├── jest.config.js ├── package-lock.json ├── package.json ├── src ├── AudioView.tsx ├── ClipList.test.tsx ├── ClipList.tsx ├── CurrentClip.test.tsx ├── CurrentClip.tsx ├── RecordTab.test.tsx ├── RecordTab.tsx ├── Toaster.test.tsx ├── Toaster.tsx ├── UiMain.tsx ├── drag_and_drop.png ├── icons.js ├── index.css ├── index.html ├── main.js ├── preload.d.ts ├── preload.js ├── renderer.tsx └── test_setup.tsx ├── tailwind.config.js ├── test ├── __snapshots__ │ └── record_and_playback.test.tsx.snap ├── record_and_playback.test.tsx └── ui_state.test.tsx └── tsconfig.json /.gitattributes: -------------------------------------------------------------------------------- 1 | # https://github.com/actions/checkout/issues/135#issuecomment-613329879 2 | * text=auto 3 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | on: 2 | push: 3 | branches: 4 | - main 5 | pull_request: 6 | 7 | jobs: 8 | ci: 9 | runs-on: ${{ matrix.os }} 10 | timeout-minutes: 30 11 | 12 | strategy: 13 | fail-fast: false 14 | matrix: 15 | os: [macos-11, ubuntu-20.04, windows-2019] 16 | 17 | steps: 18 | - uses: actions/checkout@v2 19 | - uses: actions/cache@v2 20 | name: Cache (rust) 21 | with: 22 | path: | 23 | ~/.cargo/bin/ 24 | ~/.cargo/registry/index/ 25 | ~/.cargo/registry/cache/ 26 | ~/.cargo/git/db/ 27 | core/target/ 28 | key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }} 29 | - name: Get npm cache directory 30 | id: npm-cache-dir 31 | run: | 32 | echo "::set-output name=dir::$(npm config get cache)" 33 | - uses: actions/cache@v2 34 | name: Cache (node) 35 | id: npm-cache # use this to check for `cache-hit` ==> if: steps.npm-cache.outputs.cache-hit != 'true' 36 | with: 37 | path: ${{ steps.npm-cache-dir.outputs.dir }} 38 | key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }} 39 | restore-keys: | 40 | ${{ runner.os }}-node- 41 | - name: Set version (ui) 42 | run: npm version 0.0.1-${{ github.sha }} 43 | working-directory: ./ui 44 | - name: Install deps (ubuntu) 45 | run: | 46 | sudo apt-get install -y libasound2-dev libportaudio2 jackd2 libjack-jackd2-0 libjack-jackd2-dev 47 | if: matrix.os == 'ubuntu-20.04' 48 | - name: Install LLVM and Clang (windows) # required for bindgen to work, see https://github.com/rust-lang/rust-bindgen/issues/1797 49 | uses: KyleMayes/install-llvm-action@13d5d77cbf0bd7e35cb02a8f9ed4bb85bed3393b 50 | with: 51 | version: "12.0" 52 | directory: ${{ runner.temp }}/llvm 53 | if: matrix.os == 'windows-2019' 54 | - name: Set LIBCLANG_PATH (windows) 55 | run: echo "LIBCLANG_PATH=$((gcm clang).source -replace "clang.exe")" >> $env:GITHUB_ENV 56 | if: matrix.os == 'windows-2019' 57 | - name: Install deps (windows) 58 | run: | 59 | net start audiosrv 60 | Set-ItemProperty -Path 'HKLM:\SOFTWARE\Microsoft\PolicyManager\default\Privacy\LetAppsAccessMicrophone' -Name value -Value 1 61 | powershell sound-ci-helpers-windows/setup_sound.ps1 62 | 63 | Get-PnpDevice -Class AudioEndpoint | fl 64 | Get-ItemProperty -Path 'HKLM:\SOFTWARE\Microsoft\Windows\CurrentVersion\CapabilityAccessManager\ConsentStore\microphone' 65 | Get-ItemProperty -Path 'HKLM:\SOFTWARE\Microsoft\PolicyManager\default\Privacy\LetAppsAccessMicrophone' 66 | if: matrix.os == 'windows-2019' 67 | - name: test (core) 68 | run: cargo test --release 69 | working-directory: ./core 70 | - name: clippy (core) 71 | run: cargo clippy 72 | working-directory: ./core 73 | if: matrix.os == 'ubuntu-20.04' 74 | - name: install dependencies (ui) 75 | run: npm ci 76 | working-directory: ./ui 77 | - name: test (ui) 78 | run: npm test 79 | working-directory: ./ui 80 | if: matrix.os != 'ubuntu-20.04' 81 | - name: test (ui, ubuntu) 82 | run: | 83 | jackd -r -d dummy & 84 | xvfb-run npm run dev:core-and-jest-with-jack 85 | echo OK 86 | working-directory: ./ui 87 | if: matrix.os == 'ubuntu-20.04' 88 | - name: make (ui) 89 | if: github.event_name == 'pull_request' 90 | run: npm run prod:make 91 | working-directory: ./ui 92 | - uses: actions/upload-artifact@v2 93 | if: github.event_name == 'pull_request' 94 | with: 95 | name: Oxygen-${{matrix.os}} 96 | path: ui/out/make/**/*.zip 97 | - name: publish 98 | if: github.ref == 'refs/heads/main' 99 | run: npm run prod:publish 100 | working-directory: ./ui 101 | env: 102 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 103 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | core/target 2 | *.node 3 | core/napi/index.js 4 | core/napi/index.d.ts 5 | core/napi/node_modules 6 | ui/node_modules 7 | ui/.parcel-cache 8 | ui/dist 9 | ui/out 10 | oxygen.sqlite 11 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Oxygen Voice Journal 2 | 3 | Oxygen is a voice journal and audio analysis toolkit for people who want to 4 | change the way their voice comes across. Or rather, it will be. This is still 5 | in very early development. 6 | 7 | You can watch me code this at https://www.youtube.com/c/JocelynStericker 8 | 9 | ## Motivation 10 | 11 | Like many others, I couldn't stand my voice, but it's not until fairly recently 12 | that I learned just how flexible our vocal system is and took advantage of 13 | that. With the risk of being a little bold, with enough training, it's possible 14 | for most people to develop their voice to take on the character of another 15 | voice they like. The main limitation is that it can be physically impossible to 16 | develop a darker, more boomy voice than your vocal tract supports, but 17 | otherwise, whatever traits you wish your voice had, you can probably build 18 | them! 19 | 20 | Voice training feels a bit like a game. The "core loop" is recording a sample, 21 | analyzing it, both with my ear and with software, trying to change some aspect 22 | of it, and then repeating. Progress is slow, so it's also important to be able 23 | to look back weeks or months to see improvement and stay motivated. Software 24 | can help visualize or measure the aspects we need to work on, but my current 25 | setup, without that software, is a bit of a Rube Goldberg machine, and I hope I 26 | can make training more accessible, organized, and addicting. 27 | 28 | ## Dependencies 29 | 30 | Building Oxygen requires [Rust](https://www.rust-lang.org/learn/get-started), [Node 16](https://nodejs.org/en/), and [CMake](https://cmake.org/download/) on all platforms. The Linux build also requires ALSA development files. 31 | 32 | ### Linux 33 | 34 | I recommend installing Rust using [rustup](https://www.rust-lang.org/learn/get-started). 35 | 36 | Oxygen is tested with Node 16. If your package manager contains Node 16, I recommend using that. Otherwise, see [Installing Node.js via package manager](https://nodejs.org/en/download/package-manager/). 37 | 38 | I recommend installing CMake via your package manager. 39 | 40 | In addition, on Linux, the ALSA development files are required. These are provided as part of the `libasound2-dev` package on Debian and Ubuntu distributions and `alsa-lib-devel` on Fedora. 41 | 42 | ### macOS 43 | 44 | I recommend installing Node and CMake using brew (`brew install node cmake`), and Rust using [rustup](https://www.rust-lang.org/learn/get-started). 45 | 46 | ### Windows 47 | 48 | I recommend installing all dependencies from the download links on their homepages. 49 | 50 | When installing Rust and Node on Windows, please follow the instructions in the respective installers to install associated build tools. 51 | 52 | ## Building the CLI 53 | 54 | This project uses Rust. Once Rust is installed, you can build and run the 55 | oxygen CLI with cargo: 56 | 57 | ``` 58 | cd ./core 59 | cargo run 60 | ``` 61 | 62 | ## Using the CLI 63 | 64 | You can run the CLI by running `cargo run` in `./core`. 65 | 66 | Oxygen will store clips in the "oxygen.sqlite" file in the current working 67 | directory. 68 | 69 | Oxygen supports the following commands: 70 | 71 | ``` 72 | cargo run -- record [name] 73 | Record an audio clip using the default input device until ctrl+c is pressed. 74 | If name is not specified, the current date and time will be used. 75 | 76 | cargo run -- list 77 | List all clips 78 | 79 | cargo run -- record [name] 80 | 81 | cargo run -- play name 82 | Play the clip with the given name 83 | 84 | cargo run -- delete name 85 | Delete the clip with the given name 86 | 87 | cargo run -- import path [name] 88 | Import the clip at the given path. If a name is not specified, the clip will be 89 | named after the path. 90 | 91 | cargo run -- export name path 92 | Export the clip with the given name to the given path. 93 | The path should end in ".wav". 94 | 95 | cargo run -- export-all folder 96 | Export all clips to the given folder. 97 | ``` 98 | 99 | ## Running the UI 100 | 101 | To run the UI: 102 | 103 | ``` 104 | cd ./ui 105 | npm install 106 | npm start 107 | ``` 108 | 109 | ## Tests 110 | 111 | To run UI and integration tests, as well as lints, run: 112 | 113 | ``` 114 | cd ./ui 115 | npm test 116 | ``` 117 | 118 | You can also run jest directly with: 119 | 120 | ``` 121 | cd ./ui 122 | npm run dev:build-core 123 | npm run jest 124 | ``` 125 | 126 | Or in watch mode with: 127 | 128 | ``` 129 | cd ./ui 130 | npm run dev:build-core 131 | npm run jest -- --watch`. 132 | ``` 133 | 134 | ## Building the app in release mode 135 | 136 | To build a package to `ui/out`: 137 | 138 | ``` 139 | cd ./ui 140 | npm run prod:package 141 | ``` 142 | 143 | To create the kind of asset that would get uploaded to GitHub releases: 144 | 145 | ``` 146 | cd ./ui 147 | npm run prod:make 148 | ``` 149 | 150 | `npm run prod:publish` would publish the app to GitHub releases, but it requires `GITHUB_TOKEN` to be set. We only publish releases from GitHub Actions. 151 | 152 | ## Contributing 153 | 154 | Bug fixes are very welcome! I do have a rough roadmap planned for this project, 155 | and am uploading development recordings, so reach out (e.g., in issues) before 156 | implementing any major features. 157 | -------------------------------------------------------------------------------- /core/Cargo.toml: -------------------------------------------------------------------------------- 1 | [workspace] 2 | 3 | members = [ 4 | "lib", 5 | "cli", 6 | "napi" 7 | ] 8 | -------------------------------------------------------------------------------- /core/cli/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "oxygen-cli" 3 | version = "0.1.0" 4 | edition = "2021" 5 | 6 | [features] 7 | jack = ["oxygen-core/jack"] 8 | 9 | [dependencies] 10 | oxygen-core = { path = "../lib" } 11 | chrono = "0.4.24" 12 | clap = { version = "4.2.7", features = ["derive"] } 13 | color-eyre = "0.6.2" 14 | ctrlc = "3.2.5" 15 | symphonia = {version = "0.5.2", features = ["isomp4", "mp3", "aac"] } 16 | hound = "3.5.0" 17 | log = "0.4.17" 18 | env_logger = "0.10.0" 19 | -------------------------------------------------------------------------------- /core/cli/src/main.rs: -------------------------------------------------------------------------------- 1 | use chrono::prelude::*; 2 | use clap::{Parser, Subcommand}; 3 | use color_eyre::eyre::{eyre, Result}; 4 | use oxygen_core::audio_clip::{AudioBackend, AudioClip}; 5 | use oxygen_core::db::Db; 6 | use oxygen_core::language_processor::LanguageProcessor; 7 | use std::{ffi::OsStr, path::Path, sync::mpsc::channel}; 8 | 9 | #[derive(Parser, Debug)] 10 | #[clap(name = "oxygen")] 11 | #[clap( 12 | about = "A voice journal and audio analysis toolkit for people who want to change the way their voice comes across." 13 | )] 14 | struct Cli { 15 | #[clap(subcommand)] 16 | command: Commands, 17 | 18 | #[cfg(feature = "jack")] 19 | #[clap(global = true, long)] 20 | /// On Linux, use the jack backend instead of the alsa backend. 21 | /// 22 | /// Note that this requires that the app was compiled with the "jack" feature 23 | /// (e.g., `cargo run --features=jack -- --jack`) 24 | jack: bool, 25 | } 26 | 27 | #[derive(Subcommand, Debug)] 28 | enum Commands { 29 | /// Record an audio clip using the default input device until ctrl+c is pressed. 30 | Record { 31 | /// The name of the clip to record. If not specified, the current date and time will be 32 | /// used. 33 | name: Option, 34 | }, 35 | /// List all clips. 36 | List {}, 37 | /// Play the clip with the given name. 38 | #[clap(arg_required_else_help = true)] 39 | Play { 40 | /// The name of the clip to play. 41 | name: String, 42 | }, 43 | /// Prints a transcript of the clip. 44 | #[clap(arg_required_else_help = true)] 45 | Transcribe { 46 | /// The name of the clip to transcribe. 47 | name: String, 48 | }, 49 | /// Rename a clip with the given name. 50 | #[clap(arg_required_else_help = true)] 51 | Rename { 52 | /// The old name. 53 | old_name: String, 54 | 55 | /// The new name. 56 | new_name: String, 57 | }, 58 | /// Delete the clip with the given name. 59 | #[clap(arg_required_else_help = true)] 60 | Delete { 61 | /// The name of the clip to delete. 62 | name: String, 63 | }, 64 | /// Import the clip at the given path. If a name is not specified, the clip will be 65 | /// named after the path. 66 | #[clap(arg_required_else_help = true)] 67 | Import { 68 | /// The path to import. 69 | path: String, 70 | /// The name of the clip to import. 71 | name: Option, 72 | }, 73 | /// Export the clip with the given name to the given path, as a wav file. 74 | #[clap(arg_required_else_help = true)] 75 | Export { 76 | /// The name of the clip to export. 77 | name: String, 78 | /// The path to export to, ending in ".wav". 79 | path: String, 80 | }, 81 | #[clap(arg_required_else_help = true)] 82 | /// Export all clips to the given folder. 83 | ExportAll { folder: String }, 84 | } 85 | 86 | fn main() -> Result<()> { 87 | env_logger::init(); 88 | color_eyre::install()?; 89 | let args = Cli::parse(); 90 | let db = Db::open()?; 91 | 92 | #[cfg(feature = "jack")] 93 | let host = match args.jack { 94 | true => AudioBackend::Jack, 95 | false => AudioBackend::Default, 96 | }; 97 | 98 | #[cfg(not(feature = "jack"))] 99 | let host = AudioBackend::Default; 100 | 101 | match args.command { 102 | Commands::Record { name } => { 103 | let name = name.unwrap_or_else(|| Local::now().format("%Y-%m-%d %H:%M:%S").to_string()); 104 | if db.load(&name)?.is_some() { 105 | return Err(eyre!("There is already a clip named {}", name)); 106 | } 107 | let handle = AudioClip::record(host, name)?; 108 | 109 | let (tx, rx) = channel(); 110 | ctrlc::set_handler(move || tx.send(()).expect("Could not send signal on channel."))?; 111 | 112 | println!("Waiting for Ctrl-C..."); 113 | rx.recv()?; 114 | println!("Got it! Exiting..."); 115 | 116 | let mut clip = handle.stop(); 117 | 118 | eprintln!("Recorded {} samples", clip.samples.len()); 119 | db.save(&mut clip)?; 120 | } 121 | Commands::List {} => { 122 | println!("{:5} {:30} {:30}", "id", "name", "date"); 123 | for entry in db.list()? { 124 | println!( 125 | "{:5} {:30} {:30}", 126 | entry.id, 127 | entry.name, 128 | entry.date.with_timezone(&Local).format("%Y-%m-%d %H:%M:%S") 129 | ) 130 | } 131 | } 132 | Commands::Play { name } => { 133 | if let Some(clip) = db.load(&name)? { 134 | let handle = clip.play(host)?; 135 | let (done_tx, done_rx) = channel::<()>(); 136 | handle.connect_done(move || { 137 | done_tx.send(()).unwrap(); 138 | }); 139 | done_rx.recv()?; 140 | } else { 141 | return Err(eyre!("No such clip.")); 142 | } 143 | } 144 | Commands::Transcribe { name } => { 145 | let mut language_processor = LanguageProcessor::new()?; 146 | if let Some(clip) = db.load(&name)? { 147 | for segment in &language_processor.transcribe(&clip)? { 148 | println!( 149 | "{:10.3} - {:10.3} {:30}", 150 | (segment.0).0, 151 | (segment.0).1, 152 | segment.1 153 | ) 154 | } 155 | } else { 156 | return Err(eyre!("No such clip.")); 157 | } 158 | } 159 | Commands::Rename { old_name, new_name } => { 160 | db.rename(&old_name, &new_name)?; 161 | } 162 | Commands::Delete { name } => { 163 | db.delete(&name)?; 164 | } 165 | Commands::Import { name, path } => { 166 | let name = match name { 167 | Some(name) => name, 168 | None => Path::new(&path) 169 | .file_stem() 170 | .ok_or_else(|| eyre!("Invalid path: {}", path))? 171 | .to_str() 172 | .ok_or_else(|| eyre!("Path is not utf8"))? 173 | .to_string(), 174 | }; 175 | if db.load(&name)?.is_some() { 176 | return Err(eyre!("There is already a clip named {}", name)); 177 | } 178 | let mut clip = AudioClip::import(name, path)?; 179 | db.save(&mut clip)?; 180 | } 181 | Commands::Export { name, path } => { 182 | if let Some(clip) = db.load(&name)? { 183 | clip.export(&path)? 184 | } else { 185 | return Err(eyre!("No such clip.")); 186 | } 187 | } 188 | Commands::ExportAll { folder } => { 189 | let path = Path::new(&folder); 190 | if !path.exists() { 191 | std::fs::create_dir(path)?; 192 | } 193 | let mut children = path.read_dir()?; 194 | if children.next().is_some() { 195 | return Err(eyre!("Expected {} to be empty.", folder)); 196 | } 197 | 198 | for entry in db.list()? { 199 | if let Some(clip) = db.load(&entry.name)? { 200 | let safe_name = Path::new(&entry.name) 201 | .file_name() 202 | .unwrap_or_else(|| OsStr::new("invalid")) 203 | .to_str() 204 | .ok_or_else(|| eyre!("Path is not valid utf8"))? 205 | .to_string(); 206 | let export_path = 207 | path.join(Path::new(&format!("{}_{}.wav", entry.id, safe_name))); 208 | let export_path = export_path 209 | .to_str() 210 | .ok_or_else(|| eyre!("Path is not utf8"))?; 211 | clip.export(export_path)?; 212 | } else { 213 | return Err(eyre!("{} was removed during export.", entry.name)); 214 | } 215 | } 216 | 217 | eprintln!("Exported to {}", folder); 218 | } 219 | } 220 | 221 | Ok(()) 222 | } 223 | -------------------------------------------------------------------------------- /core/lib/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "oxygen-core" 3 | version = "0.1.0" 4 | edition = "2021" 5 | 6 | [features] 7 | jack = ["cpal/jack"] 8 | whisper_dummy = [] 9 | 10 | [dependencies] 11 | cpal = { version = "0.15.2", features = [] } 12 | color-eyre = "0.6.2" 13 | dasp = {version = "0.11.0", features = ["signal", "interpolate", "interpolate-linear"]} 14 | directories = "5.0.1" 15 | chrono = "0.4.24" 16 | audiopus = "0.3.0-rc.0" 17 | symphonia = {version = "0.5.2", features = ["isomp4", "mp3", "aac"] } 18 | hound = "3.5.0" 19 | log = "0.4.17" 20 | rusqlite = { version = "0.29.0", features = ["bundled"] } 21 | whisper-rs = {git = "https://github.com/tazz4843/whisper-rs", rev = "bf6d6fcf17c39e008d8280a174f135a95c517b4e"} 22 | realfft = "3.3.0" 23 | -------------------------------------------------------------------------------- /core/lib/src/audio_clip.rs: -------------------------------------------------------------------------------- 1 | use chrono::prelude::*; 2 | use color_eyre::eyre::{eyre, Result, WrapErr}; 3 | use cpal::traits::{DeviceTrait, HostTrait, StreamTrait}; 4 | use cpal::{Host, HostUnavailable, Sample, Stream}; 5 | use dasp::{interpolate::linear::Linear, signal, Signal}; 6 | use std::fs::File; 7 | use std::path::Path; 8 | use std::sync::{Arc, Mutex}; 9 | use symphonia::core::audio::SampleBuffer; 10 | use symphonia::core::codecs::DecoderOptions; 11 | use symphonia::core::errors::Error; 12 | use symphonia::core::formats::FormatOptions; 13 | use symphonia::core::io::MediaSourceStream; 14 | use symphonia::core::meta::MetadataOptions; 15 | use symphonia::core::probe::Hint; 16 | 17 | use crate::spectrum; 18 | 19 | pub struct RecordState { 20 | clip: AudioClip, 21 | } 22 | 23 | pub struct RecordHandle { 24 | stream: Stream, 25 | /// Option is only taken in "stop". 26 | clip: Arc>>, 27 | } 28 | 29 | impl RecordHandle { 30 | pub fn stop(self) -> AudioClip { 31 | drop(self.stream); 32 | let clip = self.clip.lock().unwrap().take().unwrap().clip; 33 | log::info!("Recorded clip has {} samples", clip.samples.len()); 34 | clip 35 | } 36 | } 37 | 38 | type RecordStateHandle = Arc>>; 39 | 40 | struct PlaybackState { 41 | time: usize, 42 | samples: Vec, 43 | changed_cbs: Vec>, 44 | changed_cbs_triggered_at: usize, 45 | done_cbs: Vec>, 46 | sample_rate: usize, 47 | } 48 | 49 | type PlaybackStateHandle = Arc>>; 50 | 51 | pub struct PlayHandle { 52 | _stream: Stream, 53 | state: PlaybackStateHandle, 54 | } 55 | 56 | impl PlayHandle { 57 | pub fn connect_changed(&self, f: F) { 58 | let mut state = self.state.lock().unwrap(); 59 | let state = state.as_mut().unwrap(); 60 | state.changed_cbs.push(Box::new(f)); 61 | } 62 | 63 | pub fn connect_done(&self, f: F) { 64 | let mut state = self.state.lock().unwrap(); 65 | let state = state.as_mut().unwrap(); 66 | 67 | if state.time >= state.samples.len() { 68 | f(); 69 | } else { 70 | state.done_cbs.push(Box::new(f)); 71 | } 72 | } 73 | 74 | pub fn seek(&self, time: f64) { 75 | let mut state = self.state.lock().unwrap(); 76 | let state = state.as_mut().unwrap(); 77 | state.time = (time * state.sample_rate as f64) as usize; 78 | state.changed_cbs_triggered_at = 0; 79 | } 80 | } 81 | 82 | pub trait StreamHandle { 83 | fn sample_rate(&self) -> u32; 84 | fn samples(&self) -> usize; 85 | fn time(&self) -> f64; 86 | } 87 | 88 | impl StreamHandle for RecordHandle { 89 | fn sample_rate(&self) -> u32 { 90 | let mut state = self.clip.lock().unwrap(); 91 | let state = state.as_mut().unwrap(); 92 | 93 | state.clip.sample_rate 94 | } 95 | 96 | fn samples(&self) -> usize { 97 | let mut state = self.clip.lock().unwrap(); 98 | let state = state.as_mut().unwrap(); 99 | 100 | state.clip.samples.len() 101 | } 102 | 103 | fn time(&self) -> f64 { 104 | let mut state = self.clip.lock().unwrap(); 105 | let state = state.as_mut().unwrap(); 106 | 107 | (state.clip.samples.len() as f64) / (state.clip.sample_rate as f64) 108 | } 109 | } 110 | 111 | impl StreamHandle for PlayHandle { 112 | fn sample_rate(&self) -> u32 { 113 | let mut state = self.state.lock().unwrap(); 114 | let state = state.as_mut().unwrap(); 115 | 116 | state.sample_rate as u32 117 | } 118 | 119 | fn samples(&self) -> usize { 120 | let mut state = self.state.lock().unwrap(); 121 | let state = state.as_mut().unwrap(); 122 | 123 | state.samples.len() 124 | } 125 | 126 | fn time(&self) -> f64 { 127 | let mut state = self.state.lock().unwrap(); 128 | let state = state.as_mut().unwrap(); 129 | 130 | (state.time as f64) / (state.sample_rate as f64) 131 | } 132 | } 133 | 134 | pub trait ClipHandle { 135 | fn render_waveform(&self, range: (usize, usize), width: usize, height: usize) -> Vec; 136 | fn render_spectrogram( 137 | &self, 138 | range: (usize, usize), 139 | width: usize, 140 | height: usize, 141 | ) -> Result>; 142 | fn num_samples(&self) -> usize; 143 | fn sample_rate(&self) -> usize; 144 | } 145 | 146 | impl ClipHandle for RecordHandle { 147 | fn render_waveform(&self, range: (usize, usize), width: usize, height: usize) -> Vec { 148 | let mut state = self.clip.lock().unwrap(); 149 | let state = state.as_mut().unwrap(); 150 | 151 | state.clip.render_waveform(range, width, height) 152 | } 153 | 154 | fn render_spectrogram( 155 | &self, 156 | range: (usize, usize), 157 | width: usize, 158 | height: usize, 159 | ) -> Result> { 160 | let mut lock = self.clip.lock().unwrap(); 161 | let state = lock.as_mut().unwrap(); 162 | let clip = state.clip.clone(); 163 | drop(lock); 164 | 165 | clip.render_spectrogram(range, width, height) 166 | } 167 | 168 | fn num_samples(&self) -> usize { 169 | let mut state = self.clip.lock().unwrap(); 170 | let state = state.as_mut().unwrap(); 171 | 172 | state.clip.samples.len() 173 | } 174 | 175 | fn sample_rate(&self) -> usize { 176 | let mut state = self.clip.lock().unwrap(); 177 | let state = state.as_mut().unwrap(); 178 | 179 | state.clip.sample_rate() 180 | } 181 | } 182 | 183 | impl ClipHandle for AudioClip { 184 | fn render_waveform(&self, range: (usize, usize), width: usize, height: usize) -> Vec { 185 | self.render_waveform(range, width, height) 186 | } 187 | 188 | fn render_spectrogram( 189 | &self, 190 | range: (usize, usize), 191 | width: usize, 192 | height: usize, 193 | ) -> Result> { 194 | self.render_spectrogram(range, width, height) 195 | } 196 | 197 | fn num_samples(&self) -> usize { 198 | self.samples.len() 199 | } 200 | 201 | fn sample_rate(&self) -> usize { 202 | self.sample_rate as usize 203 | } 204 | } 205 | 206 | /// Raw mono audio data. 207 | #[derive(Clone)] 208 | pub struct AudioClip { 209 | pub id: Option, 210 | pub name: String, 211 | pub date: DateTime, 212 | pub samples: Vec, 213 | pub sample_rate: u32, 214 | } 215 | 216 | pub struct DisplayColumn { 217 | pub min: f32, 218 | pub max: f32, 219 | } 220 | 221 | #[derive(Clone, Copy, Default)] 222 | pub enum AudioBackend { 223 | #[default] 224 | Default, 225 | #[cfg(feature = "jack")] 226 | Jack, 227 | } 228 | 229 | impl AudioBackend { 230 | fn host(&self) -> Result { 231 | match self { 232 | AudioBackend::Default => Ok(cpal::default_host()), 233 | 234 | #[cfg(feature = "jack")] 235 | AudioBackend::Jack => cpal::host_from_id(cpal::HostId::Jack), 236 | } 237 | } 238 | } 239 | 240 | impl AudioClip { 241 | pub fn resample(&self, sample_rate: u32) -> AudioClip { 242 | if self.sample_rate == sample_rate { 243 | return self.clone(); 244 | } 245 | 246 | let mut signal = signal::from_iter(self.samples.iter().copied()); 247 | let a = signal.next(); 248 | let b = signal.next(); 249 | 250 | let linear = Linear::new(a, b); 251 | 252 | AudioClip { 253 | id: self.id, 254 | name: self.name.clone(), 255 | date: self.date, 256 | samples: signal 257 | .from_hz_to_hz(linear, self.sample_rate as f64, sample_rate as f64) 258 | .take(self.samples.len() * (sample_rate as usize) / (self.sample_rate as usize)) 259 | .collect(), 260 | sample_rate, 261 | } 262 | } 263 | 264 | pub fn record(host: AudioBackend, name: String) -> Result { 265 | let host = host.host().wrap_err("Could not open specified host")?; 266 | let device = host 267 | .default_input_device() 268 | .ok_or_else(|| eyre!("No input device"))?; 269 | log::info!("Input device: {}", device.name()?); 270 | let config = device.default_input_config()?; 271 | 272 | let clip = AudioClip { 273 | id: None, 274 | name, 275 | date: Utc::now(), 276 | samples: Vec::new(), 277 | sample_rate: config.sample_rate().0, 278 | }; 279 | let clip = Arc::new(Mutex::new(Some(RecordState { clip }))); 280 | let clip_2 = clip.clone(); 281 | 282 | log::info!("Begin recording..."); 283 | let err_fn = move |err| { 284 | log::error!("an error occurred on stream: {}", err); 285 | }; 286 | 287 | let channels = config.channels(); 288 | 289 | fn write_input_data(input: &[T], channels: u16, writer: &RecordStateHandle) 290 | where 291 | T: cpal::Sample, 292 | f32: cpal::FromSample, 293 | { 294 | if let Ok(mut guard) = writer.try_lock() { 295 | if let Some(state) = guard.as_mut() { 296 | for frame in input.chunks(channels.into()) { 297 | state.clip.samples.push(f32::from_sample(frame[0])); 298 | } 299 | } 300 | } 301 | } 302 | 303 | let stream = match config.sample_format() { 304 | cpal::SampleFormat::F32 => device.build_input_stream( 305 | &config.into(), 306 | move |data, _: &_| write_input_data::(data, channels, &clip_2), 307 | err_fn, 308 | None, 309 | )?, 310 | cpal::SampleFormat::I16 => device.build_input_stream( 311 | &config.into(), 312 | move |data, _: &_| write_input_data::(data, channels, &clip_2), 313 | err_fn, 314 | None, 315 | )?, 316 | cpal::SampleFormat::U16 => device.build_input_stream( 317 | &config.into(), 318 | move |data, _: &_| write_input_data::(data, channels, &clip_2), 319 | err_fn, 320 | None, 321 | )?, 322 | format => { 323 | return Err(eyre!("Unknown sample format {:?}.", format)); 324 | } 325 | }; 326 | 327 | stream.play()?; 328 | 329 | Ok(RecordHandle { stream, clip }) 330 | } 331 | 332 | pub fn import(name: String, path: String) -> Result { 333 | // Create a media source. Note that the MediaSource trait is automatically implemented for File, 334 | // among other types. 335 | let file = Box::new(File::open(Path::new(&path))?); 336 | 337 | let creation_time = file.metadata()?.created()?; 338 | 339 | // Create the media source stream using the boxed media source from above. 340 | let mss = MediaSourceStream::new(file, Default::default()); 341 | 342 | // Create a hint to help the format registry guess what format reader is appropriate. In this 343 | // example we'll leave it empty. 344 | let hint = Hint::new(); 345 | 346 | // Use the default options when reading and decoding. 347 | let format_opts: FormatOptions = Default::default(); 348 | let metadata_opts: MetadataOptions = Default::default(); 349 | let decoder_opts: DecoderOptions = Default::default(); 350 | 351 | // Probe the media source stream for a format. 352 | let probed = 353 | symphonia::default::get_probe().format(&hint, mss, &format_opts, &metadata_opts)?; 354 | 355 | // Get the format reader yielded by the probe operation. 356 | let mut format = probed.format; 357 | 358 | // Get the default track. 359 | let track = format 360 | .default_track() 361 | .ok_or_else(|| eyre!("No default track"))?; 362 | 363 | // Create a decoder for the track. 364 | let mut decoder = 365 | symphonia::default::get_codecs().make(&track.codec_params, &decoder_opts)?; 366 | 367 | // Store the track identifier, we'll use it to filter packets. 368 | let track_id = track.id; 369 | 370 | let mut sample_count = 0; 371 | let mut sample_buf = None; 372 | let channels = track 373 | .codec_params 374 | .channels 375 | .ok_or_else(|| eyre!("Unknown channel count"))?; 376 | 377 | let mut clip = AudioClip { 378 | id: None, 379 | name, 380 | date: DateTime::::from(creation_time), 381 | samples: Vec::new(), 382 | sample_rate: track 383 | .codec_params 384 | .sample_rate 385 | .ok_or_else(|| eyre!("Unknown sample rate"))?, 386 | }; 387 | 388 | loop { 389 | // Get the next packet from the format reader. 390 | let packet = match format.next_packet() { 391 | Ok(packet_ok) => packet_ok, 392 | Err(Error::IoError(ref packet_err)) 393 | if packet_err.kind() == std::io::ErrorKind::UnexpectedEof => 394 | { 395 | break; 396 | } 397 | Err(packet_err) => { 398 | return Err(packet_err.into()); 399 | } 400 | }; 401 | 402 | // If the packet does not belong to the selected track, skip it. 403 | if packet.track_id() != track_id { 404 | continue; 405 | } 406 | 407 | // Decode the packet into audio samples, ignoring any decode errors. 408 | match decoder.decode(&packet) { 409 | Ok(audio_buf) => { 410 | // The decoded audio samples may now be accessed via the audio buffer if per-channel 411 | // slices of samples in their native decoded format is desired. Use-cases where 412 | // the samples need to be accessed in an interleaved order or converted into 413 | // another sample format, or a byte buffer is required, are covered by copying the 414 | // audio buffer into a sample buffer or raw sample buffer, respectively. In the 415 | // example below, we will copy the audio buffer into a sample buffer in an 416 | // interleaved order while also converting to a f32 sample format. 417 | 418 | // If this is the *first* decoded packet, create a sample buffer matching the 419 | // decoded audio buffer format. 420 | if sample_buf.is_none() { 421 | // Get the audio buffer specification. 422 | let spec = *audio_buf.spec(); 423 | 424 | // Get the capacity of the decoded buffer. Note: This is capacity, not length! 425 | let duration = audio_buf.capacity() as u64; 426 | 427 | // Create the f32 sample buffer. 428 | sample_buf = Some(SampleBuffer::::new(duration, spec)); 429 | } 430 | 431 | // Copy the decoded audio buffer into the sample buffer in an interleaved format. 432 | if let Some(buf) = &mut sample_buf { 433 | buf.copy_interleaved_ref(audio_buf); 434 | let mono: Vec = buf 435 | .samples() 436 | .iter() 437 | .step_by(channels.count()) 438 | .copied() 439 | .collect(); 440 | clip.samples.extend_from_slice(&mono); 441 | 442 | // The samples may now be access via the `samples()` function. 443 | sample_count += buf.samples().len(); 444 | log::info!("\rDecoded {} samples", sample_count); 445 | } 446 | } 447 | Err(Error::DecodeError(_)) => (), 448 | Err(_) => break, 449 | } 450 | } 451 | 452 | Ok(clip) 453 | } 454 | 455 | pub fn play(&self, host: AudioBackend) -> Result { 456 | let host = host.host().wrap_err("Could not open specified host")?; 457 | let device = host 458 | .default_output_device() 459 | .ok_or_else(|| eyre!("No output device"))?; 460 | log::info!("Output device: {}", device.name()?); 461 | let config = device.default_output_config()?; 462 | 463 | log::info!("Begin playback..."); 464 | 465 | let sample_rate = config.sample_rate().0; 466 | let state = PlaybackState { 467 | time: 0, 468 | samples: self.resample(sample_rate).samples, 469 | done_cbs: vec![], 470 | changed_cbs: vec![], 471 | changed_cbs_triggered_at: 0, 472 | sample_rate: sample_rate as usize, 473 | }; 474 | let state: PlaybackStateHandle = Arc::new(Mutex::new(Some(state))); 475 | let state_2 = state.clone(); 476 | let channels = config.channels(); 477 | 478 | let err_fn = move |err| { 479 | log::error!("an error occurred on stream: {}", err); 480 | }; 481 | 482 | fn write_output_data(output: &mut [T], channels: u16, writer: &PlaybackStateHandle) 483 | where 484 | T: cpal::Sample + cpal::SizedSample + cpal::FromSample, 485 | { 486 | if let Ok(mut guard) = writer.try_lock() { 487 | if let Some(state) = guard.as_mut() { 488 | for frame in output.chunks_mut(channels.into()) { 489 | for sample in frame.iter_mut() { 490 | *sample = 491 | T::from_sample(*state.samples.get(state.time).unwrap_or(&0f32)); 492 | } 493 | state.time += 1; 494 | } 495 | if state.time >= state.samples.len() { 496 | for cb in &*state.done_cbs { 497 | cb(); 498 | } 499 | } 500 | if state.time >= state.changed_cbs_triggered_at + state.sample_rate / 100 { 501 | for cb in &*state.changed_cbs { 502 | cb(); 503 | } 504 | state.changed_cbs_triggered_at = state.time; 505 | } 506 | } 507 | } 508 | } 509 | 510 | let stream = match config.sample_format() { 511 | cpal::SampleFormat::F32 => device.build_output_stream( 512 | &config.into(), 513 | move |data, _: &_| write_output_data::(data, channels, &state), 514 | err_fn, 515 | None, 516 | )?, 517 | cpal::SampleFormat::I16 => device.build_output_stream( 518 | &config.into(), 519 | move |data, _: &_| write_output_data::(data, channels, &state), 520 | err_fn, 521 | None, 522 | )?, 523 | cpal::SampleFormat::U16 => device.build_output_stream( 524 | &config.into(), 525 | move |data, _: &_| write_output_data::(data, channels, &state), 526 | err_fn, 527 | None, 528 | )?, 529 | format => { 530 | return Err(eyre!("Unknown sample format {:?}.", format)); 531 | } 532 | }; 533 | 534 | stream.play()?; 535 | 536 | Ok(PlayHandle { 537 | _stream: stream, 538 | state: state_2, 539 | }) 540 | } 541 | 542 | pub fn export(&self, path: &str) -> Result<()> { 543 | if !path.ends_with(".wav") { 544 | return Err(eyre!("Expected {} to end in .wav", path)); 545 | } 546 | 547 | let spec = hound::WavSpec { 548 | channels: 2, 549 | sample_rate: self.sample_rate, 550 | bits_per_sample: 32, 551 | sample_format: hound::SampleFormat::Float, 552 | }; 553 | 554 | let mut writer = hound::WavWriter::create(path, spec)?; 555 | for sample in &self.samples { 556 | writer.write_sample(*sample)?; 557 | writer.write_sample(*sample)?; 558 | } 559 | 560 | writer.finalize()?; 561 | 562 | Ok(()) 563 | } 564 | 565 | pub fn render_waveform( 566 | &self, 567 | mut range: (usize, usize), 568 | width: usize, 569 | height: usize, 570 | ) -> Vec { 571 | range.1 = range.1.clamp(range.0, usize::MAX); 572 | 573 | let min_t = range.0 as f32; 574 | let max_t = range.1 as f32; 575 | let samples_per_pixel = (max_t - min_t) / (width as f32); 576 | 577 | let columns: Vec = (0..width) 578 | .map(|pixel_i| { 579 | let mut min = 1.0f32; 580 | let mut max = -1.0f32; 581 | 582 | let start_sample = (min_t + samples_per_pixel * (pixel_i as f32)).floor() as usize; 583 | let end_sample = 584 | (min_t + samples_per_pixel * ((pixel_i + 1) as f32)).floor() as usize; 585 | 586 | let start_sample = start_sample.clamp(0, self.samples.len()); 587 | let end_sample = end_sample.clamp(start_sample, self.samples.len()); 588 | 589 | for sample in &self.samples[start_sample..end_sample] { 590 | min = min.min(*sample); 591 | max = max.max(*sample); 592 | } 593 | 594 | if min > max { 595 | min = 0.0; 596 | max = 0.0; 597 | } 598 | if min < -1.0 { 599 | min = -1.0; 600 | } 601 | if max > 1.0 { 602 | max = 1.0; 603 | } 604 | 605 | DisplayColumn { min, max } 606 | }) 607 | .collect(); 608 | 609 | let mut buffer = vec![0; width * height * 4]; 610 | 611 | for (x, column) in columns.iter().enumerate() { 612 | let min_y = ((height as f32) * (column.min + 1.0) / 2.0) 613 | .floor() 614 | .max(0.0) as usize; 615 | let max_y = 616 | (((height as f32) * (column.max + 1.0) / 2.0).ceil() as usize).min(height - 1); 617 | 618 | for y in min_y..=max_y { 619 | // purple-900 :) 620 | buffer[y * width * 4 + x * 4] = 88; 621 | buffer[y * width * 4 + x * 4 + 1] = 28; 622 | buffer[y * width * 4 + x * 4 + 2] = 135; 623 | buffer[y * width * 4 + x * 4 + 3] = 255; 624 | } 625 | } 626 | 627 | buffer 628 | } 629 | 630 | pub fn render_spectrogram( 631 | &self, 632 | range: (usize, usize), 633 | width: usize, 634 | height: usize, 635 | ) -> Result> { 636 | spectrum::render_spectrogram(self, range, width, height) 637 | } 638 | 639 | pub fn num_samples(&self) -> usize { 640 | self.samples.len() 641 | } 642 | } 643 | 644 | #[cfg(test)] 645 | mod tests { 646 | use super::*; 647 | 648 | #[test] 649 | fn test_render_with_zero_samples() { 650 | let clip = AudioClip { 651 | id: Some(1), 652 | name: "Name".into(), 653 | date: Utc::now(), 654 | samples: vec![], 655 | sample_rate: 44100, 656 | }; 657 | assert_eq!(clip.render_waveform((0, 0), 100, 1).len(), 100 * 4); 658 | assert_eq!(clip.render_waveform((0, 0), 0, 1).len(), 0); 659 | assert_eq!(clip.render_waveform((100, 0), 0, 1).len(), 0); 660 | assert_eq!(clip.render_waveform((100, 200), 100, 1).len(), 100 * 4); 661 | assert_eq!(clip.render_waveform((100, 200), 100, 4).len(), 400 * 4); 662 | } 663 | } 664 | -------------------------------------------------------------------------------- /core/lib/src/db.rs: -------------------------------------------------------------------------------- 1 | use std::path::Path; 2 | 3 | use crate::audio_clip::AudioClip; 4 | use crate::internal_encoding::{decode_v0, decode_v1, encode_v1}; 5 | use chrono::prelude::*; 6 | use color_eyre::eyre::{eyre, Result}; 7 | use directories::ProjectDirs; 8 | use rusqlite::{params, types::Type, Connection}; 9 | 10 | pub struct Db(Connection); 11 | 12 | pub struct ClipMeta { 13 | pub id: usize, 14 | pub name: String, 15 | pub date: DateTime, 16 | } 17 | impl Db { 18 | pub fn open() -> Result { 19 | let proj_dirs = ProjectDirs::from("ca", "nettek", "oxygen").ok_or_else(|| { 20 | eyre!("Could not find project directories (home directory could not be retreived)") 21 | })?; 22 | let data_dir = proj_dirs.data_dir(); 23 | 24 | std::fs::create_dir_all(data_dir)?; 25 | let db_file_path = data_dir.join("oxygen.sqlite"); 26 | 27 | if Path::new("oxygen.sqlite").exists() && !db_file_path.exists() { 28 | log::info!("Migration: moving oxygen.sqlite to {:?}", db_file_path); 29 | std::fs::copy("oxygen.sqlite", &db_file_path)?; 30 | std::fs::remove_file("oxygen.sqlite")?; 31 | } 32 | 33 | let connection = Connection::open(db_file_path)?; 34 | Self::from_connection(connection) 35 | } 36 | 37 | pub fn in_memory() -> Result { 38 | let connection = Connection::open_in_memory()?; 39 | Self::from_connection(connection) 40 | } 41 | 42 | fn from_connection(connection: Connection) -> Result { 43 | let user_version: u32 = 44 | connection.query_row("SELECT user_version FROM pragma_user_version", [], |r| { 45 | r.get(0) 46 | })?; 47 | connection.pragma_update(None, "page_size", 8192)?; 48 | connection.pragma_update(None, "user_version", 2)?; 49 | 50 | if user_version < 1 { 51 | log::info!("Migration: init schema..."); 52 | connection.execute( 53 | " 54 | CREATE TABLE IF NOT EXISTS clips ( 55 | id INTEGER PRIMARY KEY, 56 | name TEXT NOT NULL UNIQUE, 57 | date TEXT NOT NULL, 58 | sample_rate INTEGER NOT NULL, 59 | samples BLOB NOT NULL 60 | ); 61 | ", 62 | [], 63 | )?; 64 | } 65 | 66 | if user_version < 2 { 67 | log::info!("Migration: updating schema to version 2..."); 68 | let mut stmt = 69 | connection.prepare("SELECT id, name, date, sample_rate, samples FROM clips")?; 70 | let clip_iter = stmt.query_map([], |row| { 71 | let date: String = row.get(2)?; 72 | let samples: Vec = row.get(4)?; 73 | 74 | Ok(AudioClip { 75 | id: Some(row.get(0)?), 76 | name: row.get(1)?, 77 | date: date.parse().map_err(|_| { 78 | rusqlite::Error::InvalidColumnType(2, "date".to_string(), Type::Text) 79 | })?, 80 | sample_rate: row.get(3)?, 81 | samples: decode_v0(&samples), 82 | }) 83 | })?; 84 | 85 | let clips: Vec<_> = clip_iter.collect::>()?; 86 | for clip in &clips { 87 | let (sr, bytes) = encode_v1(clip)?; 88 | connection.execute( 89 | "INSERT OR REPLACE INTO clips (id, name, date, sample_rate, samples) VALUES (?1, ?2, ?3, ?4, ?5)", 90 | params![ 91 | clip.id, 92 | clip.name, 93 | clip.date.to_string(), 94 | sr, 95 | bytes, 96 | ], 97 | )?; 98 | } 99 | 100 | connection.execute("ALTER TABLE clips RENAME COLUMN samples TO opus", [])?; 101 | } 102 | 103 | Ok(Db(connection)) 104 | } 105 | 106 | pub fn save(&self, clip: &mut AudioClip) -> Result<()> { 107 | let (sr, bytes) = encode_v1(clip)?; 108 | 109 | self.0.execute( 110 | "INSERT OR REPLACE INTO clips (id, name, date, sample_rate, opus) VALUES (?1, ?2, ?3, ?4, ?5)", 111 | params![ 112 | clip.id, 113 | clip.name, 114 | clip.date.to_string(), 115 | sr, 116 | bytes, 117 | ], 118 | )?; 119 | 120 | if clip.id.is_none() { 121 | clip.id = Some(self.0.last_insert_rowid().try_into()?); 122 | } 123 | 124 | Ok(()) 125 | } 126 | 127 | pub fn load(&self, name: &str) -> Result> { 128 | let mut stmt = self 129 | .0 130 | .prepare("SELECT id, name, date, sample_rate, opus FROM clips WHERE name = ?1")?; 131 | let mut clip_iter = stmt.query_map([name], |row| { 132 | let date: String = row.get(2)?; 133 | let bytes: Vec = row.get(4)?; 134 | let sample_rate: u32 = row.get(3)?; 135 | let samples = decode_v1(sample_rate, &bytes).map_err(|_| { 136 | rusqlite::Error::InvalidColumnType(3, "opus".to_string(), Type::Blob) 137 | })?; 138 | 139 | Ok(AudioClip { 140 | id: Some(row.get(0)?), 141 | name: row.get(1)?, 142 | date: date.parse().map_err(|_| { 143 | rusqlite::Error::InvalidColumnType(2, "date".to_string(), Type::Text) 144 | })?, 145 | sample_rate, 146 | samples, 147 | }) 148 | })?; 149 | 150 | Ok(if let Some(clip) = clip_iter.next() { 151 | Some(clip?) 152 | } else { 153 | None 154 | }) 155 | } 156 | 157 | pub fn load_by_id(&self, id: usize) -> Result> { 158 | let mut stmt = self 159 | .0 160 | .prepare("SELECT id, name, date, sample_rate, opus FROM clips WHERE id = ?1")?; 161 | let mut clip_iter = stmt.query_map([id], |row| { 162 | let date: String = row.get(2)?; 163 | let bytes: Vec = row.get(4)?; 164 | let sample_rate: u32 = row.get(3)?; 165 | let samples = decode_v1(sample_rate, &bytes).map_err(|_| { 166 | rusqlite::Error::InvalidColumnType(3, "opus".to_string(), Type::Blob) 167 | })?; 168 | 169 | Ok(AudioClip { 170 | id: Some(row.get(0)?), 171 | name: row.get(1)?, 172 | date: date.parse().map_err(|_| { 173 | rusqlite::Error::InvalidColumnType(2, "date".to_string(), Type::Text) 174 | })?, 175 | sample_rate, 176 | samples, 177 | }) 178 | })?; 179 | 180 | Ok(if let Some(clip) = clip_iter.next() { 181 | Some(clip?) 182 | } else { 183 | None 184 | }) 185 | } 186 | 187 | pub fn list(&self) -> Result> { 188 | let mut stmt = self 189 | .0 190 | .prepare("SELECT id, name, date FROM clips ORDER BY date")?; 191 | let clip_iter = stmt.query_map([], |row| { 192 | let date: String = row.get(2)?; 193 | 194 | Ok(ClipMeta { 195 | id: row.get(0)?, 196 | name: row.get(1)?, 197 | date: date.parse().map_err(|_| { 198 | rusqlite::Error::InvalidColumnType(2, "date".to_string(), Type::Text) 199 | })?, 200 | }) 201 | })?; 202 | 203 | Ok(clip_iter.collect::>()?) 204 | } 205 | 206 | pub fn delete(&self, name: &str) -> Result<()> { 207 | self.0 208 | .execute("DELETE FROM clips WHERE name = ?1", [name])?; 209 | 210 | Ok(()) 211 | } 212 | 213 | pub fn delete_by_id(&self, id: usize) -> Result<()> { 214 | self.0.execute("DELETE FROM clips WHERE id = ?1", [id])?; 215 | 216 | Ok(()) 217 | } 218 | 219 | pub fn rename(&self, old_name: &str, new_name: &str) -> Result<()> { 220 | let rows_changed = self.0.execute( 221 | "UPDATE clips SET name = ?2 WHERE name = ?1", 222 | [old_name, new_name], 223 | )?; 224 | 225 | if rows_changed == 0 { 226 | return Err(eyre!("There is no clip named \"{}\"", old_name)); 227 | } 228 | 229 | Ok(()) 230 | } 231 | 232 | pub fn rename_by_id(&self, id: usize, new_name: &str) -> Result<()> { 233 | let rows_changed = self.0.execute( 234 | "UPDATE clips SET name = ?2 WHERE id = ?1", 235 | params![id, new_name], 236 | )?; 237 | 238 | if rows_changed == 0 { 239 | return Err(eyre!("There is no clip with ID {}", id)); 240 | } 241 | 242 | Ok(()) 243 | } 244 | } 245 | -------------------------------------------------------------------------------- /core/lib/src/ggml-base.en-q5_0.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jocelyn-stericker/oxygen/f2e01fa156ea4e0ce6ed3d5bb60a34ce36a9fe2f/core/lib/src/ggml-base.en-q5_0.bin -------------------------------------------------------------------------------- /core/lib/src/internal_encoding.rs: -------------------------------------------------------------------------------- 1 | use audiopus::{ 2 | coder::{Decoder, Encoder}, 3 | packet::Packet, 4 | Application, Bitrate, Channels, Error as OpusError, ErrorCode as OpusErrorCode, MutSignals, 5 | SampleRate, 6 | }; 7 | use color_eyre::{eyre::eyre, Result}; 8 | 9 | use crate::audio_clip::AudioClip; 10 | 11 | pub fn decode_v0(bytes: &[u8]) -> Vec { 12 | let mut samples = Vec::with_capacity(bytes.len() / 4); 13 | for chunk in bytes.chunks(4) { 14 | samples.push(f32::from_be_bytes([chunk[0], chunk[1], chunk[2], chunk[3]])); 15 | } 16 | samples 17 | } 18 | 19 | /// Encode a clip into a custom opus container. 20 | /// 21 | /// Format is: 22 | /// - 4 bytes, number of samples as a u32 in big endian 23 | /// - for each packet: 24 | /// - 2 bytes, number of bytes in packet as a u16 in big endian 25 | /// - the raw packet 26 | pub fn encode_v1(clip: &AudioClip) -> Result<(u32, Vec)> { 27 | let sample_rate: i32 = clip.sample_rate.try_into()?; 28 | let resampled: AudioClip; 29 | let (samples, sample_rate) = match SampleRate::try_from(sample_rate) { 30 | Ok(sample_rate) => (&clip.samples, sample_rate), 31 | Err(_) => { 32 | resampled = clip.resample(48000); 33 | (&resampled.samples, SampleRate::Hz48000) 34 | } 35 | }; 36 | let mut encoder = Encoder::new(sample_rate, Channels::Mono, Application::Audio)?; 37 | encoder.set_bitrate(Bitrate::BitsPerSecond(24000))?; 38 | 39 | let frame_size = (sample_rate as i32 / 1000 * 20) as usize; 40 | 41 | let mut output = vec![0u8; samples.len().max(128)]; 42 | let mut samples_i = 0; 43 | let mut output_i = 0; 44 | let mut end_buffer = vec![0f32; frame_size]; 45 | 46 | // Store number of samples. 47 | { 48 | let samples: u32 = samples.len().try_into()?; 49 | let bytes = samples.to_be_bytes(); 50 | output[..4].clone_from_slice(&bytes[..4]); 51 | output_i += 4; 52 | } 53 | 54 | while samples_i < samples.len() { 55 | match encoder.encode_float( 56 | if samples_i + frame_size < samples.len() { 57 | &samples[samples_i..(samples_i + frame_size)] 58 | } else { 59 | end_buffer[..(samples.len() - samples_i)].clone_from_slice( 60 | &samples[samples_i..((samples.len() - samples_i) + samples_i)], 61 | ); 62 | 63 | &end_buffer 64 | }, 65 | &mut output[output_i + 2..], 66 | ) { 67 | Ok(pkt_len) => { 68 | samples_i += frame_size; 69 | let bytes = u16::try_from(pkt_len)?.to_be_bytes(); 70 | output[output_i] = bytes[0]; 71 | output[output_i + 1] = bytes[1]; 72 | output_i += pkt_len + 2; 73 | } 74 | Err(OpusError::Opus(OpusErrorCode::BufferTooSmall)) => { 75 | log::error!( 76 | "Needed to increase buffer size, opus is compressing less well than expected." 77 | ); 78 | output.resize(output.len() * 2, 0u8); 79 | } 80 | Err(e) => { 81 | return Err(eyre!(e)); 82 | } 83 | } 84 | } 85 | 86 | output.truncate(output_i); 87 | 88 | Ok((sample_rate as i32 as u32, output)) 89 | } 90 | 91 | pub fn decode_v1(sample_rate: u32, bytes: &[u8]) -> Result> { 92 | let sample_rate: i32 = sample_rate.try_into()?; 93 | let sample_rate = SampleRate::try_from(sample_rate)?; 94 | let mut decoder = Decoder::new(sample_rate, Channels::Mono)?; 95 | 96 | let frame_size = (sample_rate as i32 / 1000 * 20) as usize; 97 | 98 | let mut bytes_i = 0; 99 | if bytes.len() < 4 { 100 | return Err(eyre!("Invalid number of bytes in encoded data")); 101 | } 102 | let num_samples: usize = 103 | u32::from_be_bytes([bytes[0], bytes[1], bytes[2], bytes[3]]).try_into()?; 104 | bytes_i += 4; 105 | 106 | let mut samples = vec![0f32; num_samples + frame_size]; 107 | let mut samples_i = 0; 108 | 109 | while bytes_i < bytes.len() { 110 | let pkt_len: usize = match (bytes.get(bytes_i), bytes.get(bytes_i + 1)) { 111 | (Some(&a), Some(&b)) => u16::from_be_bytes([a, b]).into(), 112 | _ => { 113 | return Err(eyre!("Invalid encoding")); 114 | } 115 | }; 116 | bytes_i += 2; 117 | if bytes_i + pkt_len > bytes.len() { 118 | return Err(eyre!("Invalid encoding")); 119 | } 120 | if samples_i + frame_size > samples.len() { 121 | return Err(eyre!("Invalid encoding")); 122 | } 123 | 124 | let actual_frame_size = decoder.decode_float( 125 | Some(Packet::try_from(&bytes[bytes_i..bytes_i + pkt_len])?), 126 | MutSignals::try_from(&mut samples[samples_i..samples_i + frame_size])?, 127 | false, 128 | )?; 129 | 130 | if actual_frame_size != frame_size { 131 | return Err(eyre!("Invalid frame size")); 132 | } 133 | 134 | bytes_i += pkt_len; 135 | samples_i += actual_frame_size; 136 | } 137 | 138 | samples.truncate(samples_i); 139 | 140 | Ok(samples) 141 | } 142 | -------------------------------------------------------------------------------- /core/lib/src/language_processor.rs: -------------------------------------------------------------------------------- 1 | use crate::audio_clip::AudioClip; 2 | use color_eyre::eyre::{eyre, Result}; 3 | use std::sync::mpsc::{channel, Receiver, Sender}; 4 | use std::sync::{Arc, Mutex}; 5 | use whisper_rs::{FullParams, SamplingStrategy, WhisperContext, WhisperError}; 6 | 7 | #[cfg(not(feature = "whisper_dummy"))] 8 | const GGML_BASE_EN_Q5: &[u8] = include_bytes!("./ggml-base.en-q5_0.bin"); 9 | 10 | pub struct LanguageProcessor { 11 | whisper_context: Option, 12 | } 13 | 14 | pub type Segment = ((f64, f64), String); 15 | 16 | impl LanguageProcessor { 17 | pub fn new() -> Result { 18 | Ok(LanguageProcessor { 19 | whisper_context: None, 20 | }) 21 | } 22 | 23 | #[cfg(not(feature = "whisper_dummy"))] 24 | fn whisper_context(&mut self) -> Result<&mut WhisperContext> { 25 | let ctx = &mut self.whisper_context; 26 | if let Some(ctx) = ctx { 27 | Ok(ctx) 28 | } else { 29 | Ok(ctx.insert(WhisperContext::new_from_buffer(GGML_BASE_EN_Q5)?)) 30 | } 31 | } 32 | 33 | /// Return a transcript of the audio using whisper.cpp 34 | #[cfg(not(feature = "whisper_dummy"))] 35 | pub fn transcribe(&mut self, clip: &AudioClip) -> Result> { 36 | let mut state = self 37 | .whisper_context()? 38 | .create_state() 39 | .expect("failed to create state"); 40 | 41 | // create a params object 42 | // note that currently the only implemented strategy is Greedy, BeamSearch is a WIP 43 | // n_past defaults to 0 44 | let mut params = FullParams::new(SamplingStrategy::Greedy { best_of: 1 }); 45 | 46 | params.set_n_threads(4); 47 | params.set_token_timestamps(true); 48 | params.set_language(Some("en")); 49 | params.set_suppress_blank(false); 50 | params.set_suppress_non_speech_tokens(true); 51 | params.set_print_progress(false); 52 | params.set_translate(false); 53 | 54 | // we must convert to 16KHz mono f32 samples for the model 55 | let resampled = clip.resample(16000); 56 | 57 | // Run it! 58 | state.full(params, &resampled.samples[..])?; 59 | 60 | // fetch the results 61 | let num_segments = state 62 | .full_n_segments() 63 | .expect("failed to get number of segments"); 64 | 65 | let mut segments = Vec::with_capacity(num_segments as usize); 66 | for i in 0..num_segments { 67 | let segment = state.full_get_segment_text(i); 68 | let start_timestamp = state.full_get_segment_t0(i)?; 69 | let end_timestamp = state.full_get_segment_t1(i)?; 70 | let num_tokens = state.full_n_tokens(i)?; 71 | 72 | // whisper.cpp hallucinates. If this isn't seeming reliable, skip it. 73 | let mut total_prob = 0f32; 74 | for j in 0..num_tokens { 75 | total_prob += state.full_get_token_prob(i, j)?; 76 | } 77 | if total_prob / (num_tokens as f32) < 0.5 { 78 | continue; 79 | } 80 | 81 | match segment { 82 | Ok(segment) => { 83 | segments.push(( 84 | ( 85 | (start_timestamp as f64) * 10f64 / 1000f64, 86 | (end_timestamp as f64) * 10f64 / 1000f64, 87 | ), 88 | segment, 89 | )); 90 | } 91 | Err(WhisperError::InvalidUtf8 { .. }) => { 92 | // Whisper does not always give valid unicode... max_len=1 seems to 93 | // split in invalid ways. 94 | log::warn!("Whisper gave invalid utf8"); 95 | } 96 | Err(err) => Err(err)?, 97 | } 98 | } 99 | 100 | Ok(segments) 101 | } 102 | 103 | #[cfg(feature = "whisper_dummy")] 104 | pub fn transcribe(&mut self, _clip: &AudioClip) -> Result> { 105 | Ok(vec![]) 106 | } 107 | } 108 | 109 | enum Event { 110 | Transcribe(AudioClip, Sender>>), 111 | } 112 | 113 | fn event_queue(mut analyzer: LanguageProcessor, events: Receiver) { 114 | while let Ok(event) = events.recv() { 115 | let Event::Transcribe(clip, sender) = event; 116 | let result = analyzer.transcribe(&clip); 117 | sender.send(result).expect("failed to send result"); 118 | } 119 | } 120 | 121 | pub struct AsyncLanguageProcessor { 122 | events: Arc>>, 123 | } 124 | 125 | impl AsyncLanguageProcessor { 126 | pub fn new() -> Result { 127 | let (sender, receiver) = channel(); 128 | std::thread::spawn(move || { 129 | let analyzer = LanguageProcessor::new().expect("failed to create analyzer"); 130 | event_queue(analyzer, receiver) 131 | }); 132 | 133 | Ok(AsyncLanguageProcessor { 134 | events: Arc::new(Mutex::new(sender)), 135 | }) 136 | } 137 | 138 | pub fn transcribe(&self, clip: AudioClip) -> Result { 139 | let (sender, receiver) = channel(); 140 | 141 | let events = self.events.lock().unwrap(); 142 | events 143 | .send(Event::Transcribe(clip, sender)) 144 | .map_err(|e| eyre!("{:?}", e))?; 145 | 146 | Ok(TranscriptionHandle { receiver }) 147 | } 148 | } 149 | 150 | type TranscriptionResult = Result>; 151 | 152 | pub struct TranscriptionHandle { 153 | receiver: Receiver, 154 | } 155 | 156 | impl TranscriptionHandle { 157 | pub fn resolve(self) -> TranscriptionResult { 158 | self.receiver.recv()? 159 | } 160 | } 161 | -------------------------------------------------------------------------------- /core/lib/src/lib.rs: -------------------------------------------------------------------------------- 1 | pub mod audio_clip; 2 | pub mod db; 3 | pub mod internal_encoding; 4 | pub mod language_processor; 5 | pub mod spectrum; 6 | 7 | #[cfg(feature = "napi")] 8 | pub mod napi; 9 | -------------------------------------------------------------------------------- /core/lib/src/spectrum.rs: -------------------------------------------------------------------------------- 1 | use crate::audio_clip::AudioClip; 2 | use color_eyre::eyre::Result; 3 | use realfft::RealFftPlanner; 4 | 5 | pub fn spectrogram(clip: &AudioClip, mut range: (usize, usize)) -> Result>> { 6 | let resampled = clip.resample(12000); 7 | let n_fft = 2048; 8 | let offset = 200; // 16ms 9 | 10 | let signal = resampled.samples; 11 | let mut fft = RealFftPlanner::::new(); 12 | let r2c = fft.plan_fft_forward(n_fft); 13 | let mut spectrums = Vec::new(); 14 | let mut start_i = (range.0 / n_fft) * n_fft; 15 | let mut chunk = vec![0f32; n_fft]; 16 | range.0 = range.0 * (resampled.sample_rate as usize) / (clip.sample_rate as usize); 17 | range.1 = range.1 * (resampled.sample_rate as usize) / (clip.sample_rate as usize); 18 | 19 | while start_i + n_fft < range.1 { 20 | for i in start_i..start_i + n_fft { 21 | chunk[i - start_i] = *signal.get(i).unwrap_or(&0f32); 22 | } 23 | 24 | // Hann window 25 | for (i, sample) in chunk.iter_mut().enumerate() { 26 | let x = (i as f32) / (n_fft as f32); 27 | *sample *= 0.5 * (1.0 - (2.0 * std::f32::consts::PI * x).cos()); 28 | } 29 | 30 | // fft 31 | let mut spectrum = r2c.make_output_vec(); 32 | r2c.process(&mut chunk, &mut spectrum)?; 33 | let mut spectrum = spectrum.to_vec(); 34 | spectrum.truncate(n_fft / 2); 35 | spectrums.push(spectrum.into_iter().map(|x| x.re.abs()).collect()); 36 | 37 | start_i += offset; 38 | } 39 | Ok(spectrums) 40 | } 41 | 42 | pub fn render_spectrogram( 43 | clip: &AudioClip, 44 | range: (usize, usize), 45 | width: usize, 46 | height: usize, 47 | ) -> Result> { 48 | let spectrums = spectrogram(clip, range)?; 49 | 50 | let pixels_per_spectrum = (width as f32) / (spectrums.len() as f32); 51 | let mut buffer = vec![0; width * height * 4]; 52 | 53 | let min_freq = 1f32; 54 | let max_freq = 6000f32; 55 | 56 | for x in 0..width { 57 | let column = (x as f32) / pixels_per_spectrum; 58 | 59 | if let Some(spectrum) = spectrums.get(column as usize) { 60 | let mut prev_y = 0; 61 | let mut prev_num = 0f32; 62 | let mut prev_denom = 0f32; 63 | for (i, cell) in spectrum.iter().enumerate() { 64 | let frequency = (i as f32) * 12000f32 / 2048f32; 65 | let trans_min = 2595f32 * (1f32 + min_freq / 700f32).log10(); 66 | let trans_max = 2595f32 * (1f32 + max_freq / 700f32).log10(); 67 | let trans_val = 2595f32 * (1f32 + frequency / 700f32).log10(); 68 | let coord = 2f32 * (trans_val - trans_min) / (trans_max - trans_min) - 1f32; 69 | let this_y = ((coord + 1.0) / 2.0 * (height as f32)).round() as usize; 70 | 71 | if this_y > prev_y { 72 | while prev_y < this_y { 73 | if prev_denom > 0.0 { 74 | let color = COLORMAP[(255f32 * (prev_num / prev_denom)) as usize]; 75 | buffer[(height - prev_y - 1) * width * 4 + x * 4] = 76 | (color[0] * 255.0) as u8; 77 | buffer[(height - prev_y - 1) * width * 4 + x * 4 + 1] = 78 | (color[1] * 255.0) as u8; 79 | buffer[(height - prev_y - 1) * width * 4 + x * 4 + 2] = 80 | (color[2] * 255.0) as u8; 81 | buffer[(height - prev_y - 1) * width * 4 + x * 4 + 3] = 255; 82 | } 83 | 84 | prev_y += 1; 85 | } 86 | prev_num = 0f32; 87 | prev_denom = 0f32; 88 | } 89 | 90 | let gain = 20f32 * cell.log10(); 91 | let min_gain = -80f32; 92 | let max_gain = 20f32; 93 | let a = ((gain - min_gain) / (max_gain - min_gain)).clamp(0.0f32, 1.0f32); 94 | prev_num += a; 95 | prev_denom += 1f32; 96 | } 97 | } 98 | } 99 | 100 | Ok(buffer) 101 | } 102 | 103 | // acton colourmap https://github.com/tsipkens/cmap/blob/master/acton.m 104 | // https://doi.org/10.5281/zenodo.1243862 105 | const COLORMAP: [[f32; 3]; 256] = [ 106 | [0.180_626_91, 0.129_915_98, 0.300_243_7], 107 | [0.184_609_89, 0.133_361_37, 0.303_782_5], 108 | [0.188_588_17, 0.136_829_14, 0.307_329_5], 109 | [0.192_546_78, 0.140_323_33, 0.310_900_24], 110 | [0.196_547_94, 0.143_831_66, 0.314_443_02], 111 | [0.200_488_48, 0.147_340_52, 0.318_014_7], 112 | [0.204_514_6, 0.150_846_35, 0.321_580_5], 113 | [0.208_493_22, 0.154_369_01, 0.325_153_4], 114 | [0.212_499_16, 0.157_916_04, 0.328_751_9], 115 | [0.216_523_45, 0.161_487_7, 0.332_345_5], 116 | [0.220_543_46, 0.164_996_95, 0.335_927_7], 117 | [0.224_525_66, 0.168_579_07, 0.339_540_27], 118 | [0.228_599_47, 0.172_137_96, 0.343_142_84], 119 | [0.232_626_91, 0.175_705_7, 0.346_748_98], 120 | [0.236_700_42, 0.179_308_92, 0.350_370_14], 121 | [0.240_737_56, 0.182_887_2, 0.353_977_98], 122 | [0.244_814_38, 0.186_501_56, 0.357_621_64], 123 | [0.248_929_55, 0.190_103_96, 0.361_237_26], 124 | [0.253_030_4, 0.193_733_16, 0.364_873_5], 125 | [0.257_149_28, 0.197_343_93, 0.368_523_33], 126 | [0.261_278_1, 0.200_942_31, 0.372_155_3], 127 | [0.265_425_18, 0.204_605_01, 0.375_810_06], 128 | [0.269_603_2, 0.208_226_44, 0.379_461_47], 129 | [0.273_782_73, 0.211_880_56, 0.383_123_55], 130 | [0.277_978_54, 0.215_504_94, 0.386_769], 131 | [0.282_204_8, 0.219_154_91, 0.390_434_5], 132 | [0.286_418_4, 0.222_807_35, 0.394_097_27], 133 | [0.290_685_86, 0.226_458_12, 0.397_768_26], 134 | [0.294_963_78, 0.230_076_77, 0.401_431_05], 135 | [0.299_282_22, 0.233_732_12, 0.405_094_5], 136 | [0.303_592_15, 0.237_416_46, 0.408_762_04], 137 | [0.307_941, 0.241_039_32, 0.412_435_7], 138 | [0.312_310_96, 0.244_674_95, 0.416_090_25], 139 | [0.316_729_13, 0.248_352_07, 0.419_755_43], 140 | [0.321_154_92, 0.251_982_96, 0.423_403_32], 141 | [0.325_598_1, 0.255_609_6, 0.427_068_9], 142 | [0.330_097_38, 0.259_251_15, 0.430_713_47], 143 | [0.334_616_42, 0.262_862_15, 0.434_348_9], 144 | [0.339_156_3, 0.266_473_5, 0.437_993_14], 145 | [0.343_714_65, 0.270_084_44, 0.441_615_97], 146 | [0.348_324_92, 0.273_672_7, 0.445_225_98], 147 | [0.352_962_37, 0.277_245_55, 0.448_835_9], 148 | [0.357_637_8, 0.280_787_05, 0.452_428_9], 149 | [0.362_328_1, 0.284_341_63, 0.455_993_98], 150 | [0.367_058_13, 0.287_868_1, 0.459_556_67], 151 | [0.371_818, 0.291_392_74, 0.463_085_47], 152 | [0.376_633_38, 0.294_860_87, 0.466_617_35], 153 | [0.381_461_62, 0.298_321_2, 0.470_118_88], 154 | [0.386_313_68, 0.301_752_5, 0.473_580_8], 155 | [0.391_226, 0.305_174_83, 0.477_034_8], 156 | [0.396_150_98, 0.308_549_23, 0.480_453_34], 157 | [0.401_118, 0.311_869_26, 0.483_837_87], 158 | [0.406_106_32, 0.315_186_6, 0.487_202_3], 159 | [0.411_120_8, 0.318_454_8, 0.490_523_64], 160 | [0.416_167_53, 0.321_674_02, 0.493_798_9], 161 | [0.421_235_92, 0.324_851_66, 0.497_038_96], 162 | [0.426_337_3, 0.327_991_9, 0.500_233], 163 | [0.431_472_8, 0.331_069_14, 0.503_383_3], 164 | [0.436_611_7, 0.334_121_4, 0.506_489_5], 165 | [0.441_778_33, 0.337_097_17, 0.509_546_5], 166 | [0.446_950_82, 0.340_021_46, 0.512_544_04], 167 | [0.452_153_6, 0.342_887_52, 0.515_484_45], 168 | [0.457_348_9, 0.345_671_98, 0.518_381], 169 | [0.462_560_6, 0.348_416_45, 0.521_200_36], 170 | [0.467_787_68, 0.351_091_03, 0.523_964_64], 171 | [0.473_007_02, 0.353_679_12, 0.526_669_14], 172 | [0.478_238_14, 0.356_220_7, 0.529_299_56], 173 | [0.483_458_46, 0.358_663_5, 0.531_857_6], 174 | [0.488_681_97, 0.361_046, 0.534_347], 175 | [0.493_892_16, 0.363_355_52, 0.536_776_7], 176 | [0.499_088_67, 0.365_588_72, 0.539_117_04], 177 | [0.504_283, 0.367_732_8, 0.541_384_4], 178 | [0.509_443_64, 0.369_805_4, 0.543_589_53], 179 | [0.514_595_3, 0.371_792_67, 0.545_715_6], 180 | [0.519_713_64, 0.373_712_33, 0.547_757_7], 181 | [0.524_818_5, 0.375_533_7, 0.549_728_33], 182 | [0.529_894_53, 0.377_288_37, 0.551_622_7], 183 | [0.534_937_2, 0.378_951_58, 0.553_430_56], 184 | [0.539_947_57, 0.380_543_92, 0.555_172_74], 185 | [0.544_933_14, 0.382_040_62, 0.556_844_06], 186 | [0.549_88, 0.383_479_54, 0.558_422_6], 187 | [0.554_790_2, 0.384_818_05, 0.559_949_34], 188 | [0.559_666_2, 0.386_089_5, 0.561_388_4], 189 | [0.564_499_5, 0.387_290_42, 0.562_767_7], 190 | [0.569_293_6, 0.388_424_75, 0.564_069_7], 191 | [0.574_053_9, 0.389_477_55, 0.565_293_2], 192 | [0.578_778_15, 0.390_462_7, 0.566_472_8], 193 | [0.583_457_65, 0.391_391_37, 0.567_578_85], 194 | [0.588_098_94, 0.392_240_26, 0.568_619_6], 195 | [0.592_717_3, 0.393_034_6, 0.569_605_5], 196 | [0.597_284_2, 0.393_772_24, 0.570_537_7], 197 | [0.601_813_5, 0.394_455_6, 0.571_418_3], 198 | [0.606_312_6, 0.395_091_1, 0.572_252], 199 | [0.610_785_9, 0.395_679_24, 0.573_036_2], 200 | [0.615_216_26, 0.396_213_56, 0.573_769_2], 201 | [0.619_633_8, 0.396_704_4, 0.574_462_83], 202 | [0.624_015_4, 0.397_167_98, 0.575_128_9], 203 | [0.628_373_15, 0.397_605_36, 0.575_766_8], 204 | [0.632_707_1, 0.398_012_04, 0.576_369_76], 205 | [0.637_028_34, 0.398_390_44, 0.576_941_13], 206 | [0.641_324_76, 0.398_748_5, 0.577_489_55], 207 | [0.645_618_4, 0.399_093_87, 0.578_027_07], 208 | [0.649_908_24, 0.399_434_63, 0.578_558_9], 209 | [0.654_185_2, 0.399_779_02, 0.579_085_77], 210 | [0.658_464_25, 0.400_134_8, 0.579_609_2], 211 | [0.662_752_1, 0.400_503_75, 0.580_132_25], 212 | [0.667_036_24, 0.400_884_93, 0.580_662_85], 213 | [0.671_339_3, 0.401_281_24, 0.581_210_2], 214 | [0.675_657_75, 0.401_707_95, 0.581_780_1], 215 | [0.679_990_9, 0.402_184_72, 0.582_378_1], 216 | [0.684_352_16, 0.402_720_4, 0.583_011_15], 217 | [0.688_750_57, 0.403_316_26, 0.583_686_05], 218 | [0.693_169_7, 0.403_970_48, 0.584_410_1], 219 | [0.697_618_4, 0.404_696_7, 0.585_192_1], 220 | [0.702_108_74, 0.405_530_3, 0.586_029_9], 221 | [0.706_646_26, 0.406_447_65, 0.586_932_4], 222 | [0.711_215_56, 0.407_470_02, 0.587_915_06], 223 | [0.715_826_75, 0.408_614_52, 0.588_983_06], 224 | [0.720_468_5, 0.409_892_05, 0.590_144_63], 225 | [0.725_159_94, 0.411_292_02, 0.591_406_6], 226 | [0.729_876_4, 0.412_854_94, 0.592_771_65], 227 | [0.734_623_13, 0.414_551_2, 0.594_233_3], 228 | [0.739_394_6, 0.416_419_18, 0.595_813_4], 229 | [0.744_168_64, 0.418_443_9, 0.597_513_44], 230 | [0.748_950_3, 0.420_647_92, 0.599_331_5], 231 | [0.753_727_73, 0.423_021_44, 0.601_262_2], 232 | [0.758_483_95, 0.425_579_04, 0.603_322_45], 233 | [0.763_196_6, 0.428_302_35, 0.605_490_57], 234 | [0.767_860_7, 0.431_199_25, 0.607_790_4], 235 | [0.772_459_75, 0.434_245_88, 0.610_184_7], 236 | [0.776_976_5, 0.437_474_04, 0.612_689_3], 237 | [0.781_381_9, 0.440_823_2, 0.615_295_2], 238 | [0.785_670_1, 0.444_338_08, 0.617_993_35], 239 | [0.789_818_94, 0.447_950_5, 0.620_776], 240 | [0.793_821_2, 0.451_697_86, 0.623_623_97], 241 | [0.797_645_2, 0.455_528_23, 0.626_524_75], 242 | [0.801_296_35, 0.459_445_12, 0.629_480_06], 243 | [0.804_757_3, 0.463_415_32, 0.632_478_8], 244 | [0.808_016_24, 0.467_454_58, 0.635_497_87], 245 | [0.811_068_06, 0.471_521_7, 0.638_538_3], 246 | [0.813_906_5, 0.475_606_68, 0.641_580_34], 247 | [0.816_538_93, 0.479_697_17, 0.644_622_2], 248 | [0.818_947_1, 0.483_773_14, 0.647_655_67], 249 | [0.821_153_4, 0.487_855_55, 0.650_670_95], 250 | [0.823_149_5, 0.491_897_7, 0.653_657_1], 251 | [0.824_942_47, 0.495_898_07, 0.656_620_86], 252 | [0.826_546_6, 0.499_866_93, 0.659_540_7], 253 | [0.827_957_6, 0.503_800_45, 0.662_431_8], 254 | [0.829_202_3, 0.507_681_25, 0.665_272_53], 255 | [0.830_272_14, 0.511_500_66, 0.668_084_1], 256 | [0.831_190_35, 0.515_275_54, 0.670_847_4], 257 | [0.831_962_05, 0.519_007_27, 0.673_567_6], 258 | [0.832_595_17, 0.522_672_1, 0.676_250_2], 259 | [0.833_109_26, 0.526_298_3, 0.678_890_1], 260 | [0.833_513_26, 0.529_868_25, 0.681_492_4], 261 | [0.833_814, 0.533_389_1, 0.684_062_2], 262 | [0.834_021_2, 0.536_885_3, 0.686_602_4], 263 | [0.834_145_25, 0.540_322_3, 0.689_112_66], 264 | [0.834_195_6, 0.543_731_45, 0.691_582_9], 265 | [0.834_181_1, 0.547_106_27, 0.694_027_9], 266 | [0.834_109_6, 0.550_453_07, 0.696_460_84], 267 | [0.833_988_55, 0.553_768_9, 0.698_860_35], 268 | [0.833_824_75, 0.557_074_1, 0.701_248_35], 269 | [0.833_624_5, 0.560_345_6, 0.703_619_5], 270 | [0.833_393_9, 0.563_608_8, 0.705_975_6], 271 | [0.833_139_06, 0.566_852_5, 0.708_318_6], 272 | [0.832_866_25, 0.570_077_3, 0.710_651_46], 273 | [0.832_581_16, 0.573_310_6, 0.712_971_2], 274 | [0.832_287_9, 0.576_534_7, 0.715_295_1], 275 | [0.831_989_47, 0.579_747_56, 0.717_610_6], 276 | [0.831_688_64, 0.582_952_44, 0.719_917_83], 277 | [0.831_388_8, 0.586_172_6, 0.722_220_66], 278 | [0.831_093_97, 0.589_380_3, 0.724_534_1], 279 | [0.830_807_86, 0.592_613_1, 0.726_840_9], 280 | [0.830_533_6, 0.595_830_7, 0.729_151_7], 281 | [0.830_274, 0.599_066_6, 0.731_464_27], 282 | [0.830_031_2, 0.602_303_7, 0.733_783_96], 283 | [0.829_807_46, 0.605_550_35, 0.736_101], 284 | [0.829_604_6, 0.608_823, 0.738_432_17], 285 | [0.829_424_4, 0.612_085_34, 0.740_763_8], 286 | [0.829_268_4, 0.615_368_7, 0.743_110_84], 287 | [0.829_138_3, 0.618_676_1, 0.745_453_36], 288 | [0.829_035_6, 0.621_976_5, 0.747_813_76], 289 | [0.828_961_73, 0.625_307_5, 0.750_172_3], 290 | [0.828_918_1, 0.628_653_05, 0.752_542_6], 291 | [0.828_905_8, 0.632_007_8, 0.754_931_15], 292 | [0.828_926, 0.635_377_94, 0.757_317_3], 293 | [0.828_979_55, 0.638_769_57, 0.759_713_77], 294 | [0.829_067_05, 0.642_177_7, 0.762_124_06], 295 | [0.829_189_24, 0.645_596_15, 0.764_546_45], 296 | [0.829_346_5, 0.649_043_8, 0.766_972_6], 297 | [0.829_539_36, 0.652_496, 0.769_410_9], 298 | [0.829_768_3, 0.655_978, 0.771_864_1], 299 | [0.830_034, 0.659_465_7, 0.774_318_4], 300 | [0.830_337_1, 0.662_980_6, 0.776_794_3], 301 | [0.830_678, 0.666_499_8, 0.779_268_5], 302 | [0.831_057_1, 0.670_054_1, 0.781_761_4], 303 | [0.831_473_77, 0.673_611_76, 0.784_261_7], 304 | [0.831_926_2, 0.677_196_15, 0.786_77], 305 | [0.832_413_2, 0.680_781_9, 0.789_285_96], 306 | [0.832_938_85, 0.684_395_73, 0.791_810_1], 307 | [0.833_506_35, 0.688_031_6, 0.794_350_15], 308 | [0.834_109_2, 0.691_677_03, 0.796_890_9], 309 | [0.834_744_6, 0.695_339_3, 0.799_452_4], 310 | [0.835_417_3, 0.699_013_65, 0.802_010_36], 311 | [0.836_126_5, 0.702_712_1, 0.804_585_93], 312 | [0.836_872, 0.706_423_3, 0.807_167_23], 313 | [0.837_653, 0.710_144_6, 0.809_754_4], 314 | [0.838_461_7, 0.713_881_1, 0.812_349_56], 315 | [0.839_310_6, 0.717_638_7, 0.814_954_6], 316 | [0.840_191_84, 0.721_397_16, 0.817_566_2], 317 | [0.841_100_75, 0.725_185_4, 0.820_185], 318 | [0.842_045_07, 0.728_973, 0.822_815_7], 319 | [0.843_015_9, 0.732_783_2, 0.825_446_4], 320 | [0.844_017_15, 0.736_596_76, 0.828_086_73], 321 | [0.845_051_47, 0.740_429_64, 0.830_737_7], 322 | [0.846_108_56, 0.744_271_34, 0.833_390_1], 323 | [0.847_196_2, 0.748_128_4, 0.836_048_7], 324 | [0.848_311_66, 0.751_991_87, 0.838_712_1], 325 | [0.849_455_2, 0.755_866_65, 0.841_385_66], 326 | [0.850_615, 0.759_748_46, 0.844_059_6], 327 | [0.851_805_75, 0.763_645_9, 0.846_739_9], 328 | [0.853_016_85, 0.767_550_3, 0.849_431_9], 329 | [0.854_248_8, 0.771_471_44, 0.852_124_3], 330 | [0.855_507_55, 0.775_390_4, 0.854_819_2], 331 | [0.856_778_44, 0.779_324, 0.857_516_7], 332 | [0.858_072_1, 0.783_265_35, 0.860_226_33], 333 | [0.859_386_8, 0.787_217_6, 0.862_937_15], 334 | [0.860_717, 0.791_171_43, 0.865_646_8], 335 | [0.862_067_94, 0.795_136_3, 0.868_370_3], 336 | [0.863_430_74, 0.799_111_6, 0.871_086_5], 337 | [0.864_804_15, 0.803_085_9, 0.873_811_2], 338 | [0.866_198_54, 0.807_075_7, 0.876_537_9], 339 | [0.867_604_14, 0.811_065_4, 0.879_271_15], 340 | [0.869_025_4, 0.815_062_64, 0.882_008_7], 341 | [0.870_448_8, 0.819_064_26, 0.884_748], 342 | [0.871_896_5, 0.823_078, 0.887_486_8], 343 | [0.873_344_66, 0.827_095_3, 0.890_233], 344 | [0.874_805_57, 0.831_117_9, 0.892_981], 345 | [0.876_275_1, 0.835_141_54, 0.895_732_76], 346 | [0.877_755_1, 0.839_172_84, 0.898_483_04], 347 | [0.879_238_25, 0.843_207_2, 0.901_239_93], 348 | [0.880_731_5, 0.847_246_47, 0.903_997_1], 349 | [0.882_229_8, 0.851_290_3, 0.906_760_3], 350 | [0.883_737_44, 0.855_346_26, 0.909_521_76], 351 | [0.885_244_2, 0.859_397, 0.912_287_06], 352 | [0.886_758_74, 0.863_457_7, 0.915_052_65], 353 | [0.888_274_2, 0.867_515_1, 0.917_826_7], 354 | [0.889_792_6, 0.871_582_1, 0.920_591_7], 355 | [0.891_315, 0.875_649_4, 0.923_371], 356 | [0.892_84, 0.879_717_77, 0.926_144_2], 357 | [0.894_367_8, 0.883_796_9, 0.928_918_8], 358 | [0.895_892_56, 0.887_870_4, 0.931_698_8], 359 | [0.897_421_3, 0.891_954_3, 0.934_479], 360 | [0.898_946_05, 0.896_037_16, 0.937_266], 361 | [0.900_471_75, 0.900_123_1, 0.940_051_14], 362 | ]; 363 | -------------------------------------------------------------------------------- /core/napi/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "oxygen-napi" 3 | version = "0.1.0" 4 | edition = "2021" 5 | 6 | [lib] 7 | crate-type = ["cdylib"] 8 | 9 | [features] 10 | jack = ["oxygen-core/jack"] 11 | whisper_dummy = ["oxygen-core/whisper_dummy"] 12 | 13 | [dependencies] 14 | oxygen-core = { path = "../lib" } 15 | napi = {version = "2.12.6", features = ["napi8"]} 16 | napi-derive = "2.12.5" 17 | chrono = "0.4.24" 18 | log = "0.4.17" 19 | lazy_static = "1.4.0" 20 | 21 | [build-dependencies] 22 | napi-build = "2.0.1" 23 | -------------------------------------------------------------------------------- /core/napi/build.rs: -------------------------------------------------------------------------------- 1 | fn main() { 2 | napi_build::setup(); 3 | } 4 | -------------------------------------------------------------------------------- /core/napi/package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "oxygen-core", 3 | "version": "0.0.0", 4 | "lockfileVersion": 2, 5 | "requires": true, 6 | "packages": { 7 | "": { 8 | "name": "oxygen-core", 9 | "version": "0.0.0", 10 | "license": "MIT", 11 | "devDependencies": { 12 | "@napi-rs/cli": "^2.15.2" 13 | } 14 | }, 15 | "node_modules/@napi-rs/cli": { 16 | "version": "2.15.2", 17 | "resolved": "https://registry.npmjs.org/@napi-rs/cli/-/cli-2.15.2.tgz", 18 | "integrity": "sha512-80tBCtCnEhAmFtB9oPM0FL74uW7fAmtpeqjvERH7Q1z/aZzCAs/iNfE7U3ehpwg9Q07Ob2Eh/+1guyCdX/p24w==", 19 | "dev": true, 20 | "bin": { 21 | "napi": "scripts/index.js" 22 | }, 23 | "engines": { 24 | "node": ">= 10" 25 | }, 26 | "funding": { 27 | "type": "github", 28 | "url": "https://github.com/sponsors/Brooooooklyn" 29 | } 30 | } 31 | }, 32 | "dependencies": { 33 | "@napi-rs/cli": { 34 | "version": "2.15.2", 35 | "resolved": "https://registry.npmjs.org/@napi-rs/cli/-/cli-2.15.2.tgz", 36 | "integrity": "sha512-80tBCtCnEhAmFtB9oPM0FL74uW7fAmtpeqjvERH7Q1z/aZzCAs/iNfE7U3ehpwg9Q07Ob2Eh/+1guyCdX/p24w==", 37 | "dev": true 38 | } 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /core/napi/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "oxygen-core", 3 | "version": "0.0.0", 4 | "description": "", 5 | "main": "index.js", 6 | "repository": "", 7 | "license": "MIT", 8 | "napi": { 9 | "name": "oxygen-core", 10 | "triples": { 11 | "defaults": true, 12 | "additional": [ 13 | "aarch64-apple-darwin" 14 | ] 15 | } 16 | }, 17 | "files": [ 18 | "index.d.ts", 19 | "index.js", 20 | "*.node" 21 | ], 22 | "scripts": { 23 | "build": "napi build --platform --release", 24 | "build:debug": "napi build --platform", 25 | "build:with-jack": "napi build --platform --release --features jack", 26 | "build:with-whisper-dummy": "napi build --platform --features whisper_dummy", 27 | "build:debug-with-jack": "napi build --platform --features jack", 28 | "build:debug-with-whisper-dummy": "napi build --platform --features whisper_dummy" 29 | }, 30 | "devDependencies": { 31 | "@napi-rs/cli": "^2.15.2" 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /core/napi/src/js_logger.rs: -------------------------------------------------------------------------------- 1 | use lazy_static::lazy_static; 2 | use log::{Level, Log, Metadata, Record}; 3 | use napi::{ 4 | threadsafe_function::{ 5 | ErrorStrategy, ThreadSafeCallContext, ThreadsafeFunction, ThreadsafeFunctionCallMode, 6 | }, 7 | Error, JsFunction, JsString, Result, 8 | }; 9 | use std::sync::Mutex; 10 | 11 | pub struct JsLogger(Mutex>>); 12 | 13 | lazy_static! { 14 | static ref LOGGER: JsLogger = JsLogger(Mutex::new(None)); 15 | } 16 | 17 | impl Log for JsLogger { 18 | fn enabled(&self, metadata: &Metadata) -> bool { 19 | metadata.level() <= log::max_level() 20 | } 21 | 22 | fn log(&self, record: &Record) { 23 | let logger = self.0.lock().unwrap(); 24 | if let Some(logger) = &*logger { 25 | if !self.enabled(record.metadata()) { 26 | return; 27 | } 28 | 29 | let level = match record.level() { 30 | Level::Error => "error", 31 | Level::Trace => "trace", 32 | Level::Warn => "warn", 33 | Level::Info => "info", 34 | Level::Debug => "debug", 35 | } 36 | .to_owned(); 37 | 38 | logger.call( 39 | (level, std::format!("{}", record.args())), 40 | ThreadsafeFunctionCallMode::NonBlocking, 41 | ); 42 | } 43 | } 44 | 45 | fn flush(&self) {} 46 | } 47 | 48 | impl JsLogger { 49 | pub fn set_logger(log_cb: JsFunction) -> Result<()> { 50 | let mut logger = LOGGER.0.lock().unwrap(); 51 | let should_init = logger.is_none(); 52 | *logger = Some(log_cb.create_threadsafe_function( 53 | 0, 54 | |ctx: ThreadSafeCallContext<(String, String)>| { 55 | Ok(vec![ 56 | ctx.env.create_string_from_std(ctx.value.0)?, 57 | ctx.env.create_string_from_std(ctx.value.1)?, 58 | ]) as Result> 59 | }, 60 | )?); 61 | 62 | if should_init { 63 | let logger: &'static JsLogger = &LOGGER; 64 | log::set_logger(logger).map_err(|e| Error::from_reason(format!("{:?}", e)))?; 65 | log::set_max_level(log::LevelFilter::Trace); 66 | } 67 | 68 | Ok(()) 69 | } 70 | } 71 | -------------------------------------------------------------------------------- /core/napi/src/lib.rs: -------------------------------------------------------------------------------- 1 | use napi::bindgen_prelude::{AsyncTask, FromNapiValue, ToNapiValue}; 2 | 3 | use std::{ffi::OsStr, path::Path}; 4 | 5 | use chrono::prelude::*; 6 | use napi::{ 7 | bindgen_prelude::Buffer, 8 | threadsafe_function::{ErrorStrategy, ThreadsafeFunction, ThreadsafeFunctionCallMode}, 9 | Env, Error, JsDate, JsFunction, JsUnknown, Result, Task, 10 | }; 11 | use napi_derive::napi; 12 | use oxygen_core::audio_clip::{ 13 | AudioBackend, AudioClip, ClipHandle, PlayHandle, RecordHandle, StreamHandle, 14 | }; 15 | use oxygen_core::db::{ClipMeta, Db}; 16 | use oxygen_core::language_processor::{AsyncLanguageProcessor, Segment, TranscriptionHandle}; 17 | 18 | pub struct TranscriptionTask(Option); 19 | 20 | impl Task for TranscriptionTask { 21 | type Output = Vec; 22 | type JsValue = Vec; 23 | 24 | fn compute(&mut self) -> Result { 25 | let handle = self 26 | .0 27 | .take() 28 | .ok_or_else(|| Error::from_reason("no handle"))?; 29 | 30 | handle 31 | .resolve() 32 | .map_err(|e| Error::from_reason(format!("{:?}", e))) 33 | } 34 | 35 | fn resolve(&mut self, _env: Env, output: Vec) -> Result { 36 | Ok(output 37 | .into_iter() 38 | .map(|((t0, t1), segment)| JsSegment { t0, t1, segment }) 39 | .collect::>()) 40 | } 41 | } 42 | 43 | mod js_logger; 44 | use js_logger::JsLogger; 45 | 46 | enum Tab { 47 | Record { 48 | handle: Option, 49 | }, 50 | Play { 51 | audio_clip: AudioClip, 52 | handle: PlayHandle, 53 | }, 54 | Pause { 55 | audio_clip: AudioClip, 56 | time: f64, 57 | }, 58 | } 59 | 60 | impl Default for Tab { 61 | fn default() -> Self { 62 | Tab::Record { handle: None } 63 | } 64 | } 65 | 66 | #[napi] 67 | pub enum RenderMode { 68 | Waveform, 69 | Spectrogram, 70 | } 71 | 72 | #[napi] 73 | pub struct UiState { 74 | tab: Tab, 75 | db: Db, 76 | deleted_clip: Option, 77 | update_cb: ThreadsafeFunction<(), ErrorStrategy::Fatal>, 78 | host: AudioBackend, 79 | language_processor: AsyncLanguageProcessor, 80 | render_mode: RenderMode, 81 | } 82 | 83 | #[napi] 84 | pub struct JsSegment { 85 | pub t0: f64, 86 | pub t1: f64, 87 | pub segment: String, 88 | } 89 | 90 | #[napi] 91 | pub struct JsClipMeta(ClipMeta); 92 | 93 | #[napi] 94 | impl JsClipMeta { 95 | #[napi(getter)] 96 | pub fn get_id(&self) -> usize { 97 | self.0.id 98 | } 99 | 100 | #[napi(getter)] 101 | pub fn get_name(&self) -> &str { 102 | &self.0.name 103 | } 104 | 105 | #[napi(getter, ts_return_type = "Date")] 106 | pub fn get_date(&self, env: Env) -> Result { 107 | env.create_date(self.0.date.timestamp_millis() as f64) 108 | } 109 | } 110 | 111 | impl From for JsClipMeta { 112 | fn from(clip_meta: ClipMeta) -> Self { 113 | JsClipMeta(clip_meta) 114 | } 115 | } 116 | 117 | impl From<&AudioClip> for JsClipMeta { 118 | fn from(clip: &AudioClip) -> Self { 119 | JsClipMeta(ClipMeta { 120 | id: clip.id.unwrap_or(0), 121 | name: clip.name.clone(), 122 | date: clip.date, 123 | }) 124 | } 125 | } 126 | 127 | #[napi] 128 | impl UiState { 129 | #[napi(constructor)] 130 | pub fn new(update_cb: JsFunction, log_cb: JsFunction, in_memory: bool) -> Result { 131 | JsLogger::set_logger(log_cb)?; 132 | 133 | Ok(UiState { 134 | tab: Tab::Record { handle: None }, 135 | db: if in_memory { 136 | Db::in_memory() 137 | } else { 138 | Db::open() 139 | } 140 | .map_err(|e| Error::from_reason(format!("{:?}", e)))?, 141 | deleted_clip: None, 142 | update_cb: update_cb 143 | .create_threadsafe_function(0, |_ctx| Ok(vec![] as Vec))?, 144 | 145 | #[cfg(feature = "jack")] 146 | host: if std::env::var("OXYGEN_NAPI_USE_JACK").unwrap_or_default() == "1" { 147 | AudioBackend::Jack 148 | } else { 149 | AudioBackend::Default 150 | }, 151 | 152 | #[cfg(not(feature = "jack"))] 153 | host: AudioBackend::Default, 154 | 155 | language_processor: AsyncLanguageProcessor::new() 156 | .map_err(|e| Error::from_reason(format!("{:?}", e)))?, 157 | 158 | render_mode: RenderMode::Waveform, 159 | }) 160 | } 161 | 162 | #[napi] 163 | pub fn get_clips(&self) -> Result> { 164 | self.db 165 | .list() 166 | .map_err(|e| Error::from_reason(format!("{:?}", e))) 167 | .map(|clips| clips.into_iter().map(JsClipMeta::from).collect()) 168 | } 169 | 170 | #[napi(getter)] 171 | pub fn get_current_clip_id(&self) -> Option { 172 | match &self.tab { 173 | Tab::Record { .. } => None, 174 | Tab::Play { audio_clip, .. } | Tab::Pause { audio_clip, .. } => { 175 | Some(audio_clip.id.expect("Saved clips must have IDs")) 176 | } 177 | } 178 | } 179 | 180 | #[napi(getter)] 181 | pub fn get_current_clip(&self) -> Option { 182 | match &self.tab { 183 | Tab::Record { .. } => None, 184 | Tab::Play { audio_clip, .. } | Tab::Pause { audio_clip, .. } => { 185 | Some(JsClipMeta::from(audio_clip)) 186 | } 187 | } 188 | } 189 | 190 | #[napi(getter)] 191 | pub fn get_render_mode(&self) -> RenderMode { 192 | self.render_mode 193 | } 194 | 195 | #[napi] 196 | pub fn set_render_mode(&mut self, render_mode: RenderMode) { 197 | self.render_mode = render_mode; 198 | 199 | self.update_cb 200 | .call((), ThreadsafeFunctionCallMode::NonBlocking); 201 | } 202 | 203 | #[napi(getter)] 204 | pub fn get_record_tab_selected(&self) -> bool { 205 | matches!(&self.tab, Tab::Record { .. }) 206 | } 207 | 208 | #[napi] 209 | pub fn set_current_clip_id(&mut self, id: u32) -> Result<()> { 210 | if let Some(audio_clip) = self 211 | .db 212 | .load_by_id(id as usize) 213 | .map_err(|e| Error::from_reason(format!("{:?}", e)))? 214 | { 215 | self.tab = Tab::Pause { 216 | audio_clip, 217 | time: 0.0, 218 | }; 219 | self.update_cb 220 | .call((), ThreadsafeFunctionCallMode::NonBlocking); 221 | } 222 | Ok(()) 223 | } 224 | 225 | #[napi] 226 | pub fn set_current_tab_record(&mut self) { 227 | self.tab = Tab::Record { handle: None }; 228 | self.update_cb 229 | .call((), ThreadsafeFunctionCallMode::NonBlocking); 230 | } 231 | 232 | #[napi] 233 | pub fn play(&mut self, on_done: JsFunction) -> Result<()> { 234 | self.tab = match std::mem::take(&mut self.tab) { 235 | Tab::Pause { audio_clip, time } => { 236 | let new_handle = audio_clip 237 | .play(self.host) 238 | .map_err(|e| Error::from_reason(format!("{:?}", e)))?; 239 | 240 | let on_done: ThreadsafeFunction<(), ErrorStrategy::Fatal> = 241 | on_done.create_threadsafe_function(0, |_ctx| Ok(vec![] as Vec))?; 242 | new_handle.connect_done(move || { 243 | on_done.call((), ThreadsafeFunctionCallMode::NonBlocking); 244 | }); 245 | 246 | let update_cb = self.update_cb.clone(); 247 | new_handle.connect_changed(move || { 248 | update_cb.call((), ThreadsafeFunctionCallMode::NonBlocking); 249 | }); 250 | 251 | new_handle.seek(time); 252 | 253 | Tab::Play { 254 | audio_clip, 255 | handle: new_handle, 256 | } 257 | } 258 | tab => tab, 259 | }; 260 | 261 | self.update_cb 262 | .call((), ThreadsafeFunctionCallMode::NonBlocking); 263 | 264 | Ok(()) 265 | } 266 | 267 | #[napi] 268 | pub fn record(&mut self) -> Result<()> { 269 | if let Tab::Record { handle } = &mut self.tab { 270 | let name = Local::now().format("%Y-%m-%d %H:%M:%S").to_string(); 271 | let new_handle = AudioClip::record(self.host, name) 272 | .map_err(|e| Error::from_reason(format!("{:?}", e)))?; 273 | 274 | *handle = Some(new_handle); 275 | 276 | self.update_cb 277 | .call((), ThreadsafeFunctionCallMode::NonBlocking); 278 | } 279 | 280 | Ok(()) 281 | } 282 | 283 | #[napi] 284 | pub fn seek(&mut self, time: f64) -> Result<()> { 285 | match &mut self.tab { 286 | Tab::Play { handle, .. } => { 287 | handle.seek(time); 288 | } 289 | Tab::Pause { 290 | time: paused_time, .. 291 | } => { 292 | *paused_time = time; 293 | } 294 | Tab::Record { .. } => {} 295 | } 296 | 297 | self.update_cb 298 | .call((), ThreadsafeFunctionCallMode::NonBlocking); 299 | 300 | Ok(()) 301 | } 302 | 303 | #[napi] 304 | pub fn stop(&mut self) -> Result<()> { 305 | self.tab = match std::mem::take(&mut self.tab) { 306 | Tab::Record { mut handle } => { 307 | if let Some(handle) = handle.take() { 308 | let mut audio_clip = handle.stop(); 309 | self.db 310 | .save(&mut audio_clip) 311 | .map_err(|e| Error::from_reason(format!("{:?}", e)))?; 312 | 313 | Tab::Pause { 314 | audio_clip, 315 | time: 0.0, 316 | } 317 | } else { 318 | Tab::Record { handle: None } 319 | } 320 | } 321 | Tab::Play { audio_clip, handle } => Tab::Pause { 322 | audio_clip, 323 | time: handle.time(), 324 | }, 325 | Tab::Pause { audio_clip, time } => Tab::Pause { audio_clip, time }, 326 | }; 327 | 328 | self.update_cb 329 | .call((), ThreadsafeFunctionCallMode::NonBlocking); 330 | 331 | Ok(()) 332 | } 333 | 334 | #[napi] 335 | pub fn delete_current_clip(&mut self) -> Result<()> { 336 | let mut tab = Tab::Record { handle: None }; 337 | std::mem::swap(&mut tab, &mut self.tab); 338 | 339 | self.update_cb 340 | .call((), ThreadsafeFunctionCallMode::NonBlocking); 341 | 342 | if let Tab::Play { mut audio_clip, .. } | Tab::Pause { mut audio_clip, .. } = tab { 343 | if let Some(id) = audio_clip.id { 344 | self.db 345 | .delete_by_id(id) 346 | .map_err(|e| Error::from_reason(format!("{:?}", e)))?; 347 | audio_clip.id = None; 348 | self.deleted_clip = Some(audio_clip); 349 | } else { 350 | return Err(Error::from_reason("Clip is not saved to db")); 351 | } 352 | } else { 353 | return Err(Error::from_reason("No clip selected")); 354 | } 355 | 356 | Ok(()) 357 | } 358 | 359 | #[napi] 360 | pub fn undelete_current_clip(&mut self) -> Result<()> { 361 | if let Some(mut audio_clip) = self.deleted_clip.take() { 362 | self.db 363 | .save(&mut audio_clip) 364 | .map_err(|e| Error::from_reason(format!("{:?}", e)))?; 365 | 366 | self.tab = Tab::Pause { 367 | audio_clip, 368 | time: 0.0, // TODO: remember time? 369 | }; 370 | 371 | self.update_cb 372 | .call((), ThreadsafeFunctionCallMode::NonBlocking); 373 | } else { 374 | return Err(Error::from_reason("No clip to undelete")); 375 | } 376 | 377 | Ok(()) 378 | } 379 | 380 | #[napi] 381 | pub fn rename_current_clip(&mut self, new_name: String) -> Result<()> { 382 | let clip_id; 383 | 384 | if let Tab::Pause { 385 | audio_clip: AudioClip { id: Some(id), .. }, 386 | .. 387 | } 388 | | Tab::Play { 389 | audio_clip: AudioClip { id: Some(id), .. }, 390 | .. 391 | } = &mut self.tab 392 | { 393 | clip_id = *id; 394 | 395 | self.db 396 | .rename_by_id(*id, &new_name) 397 | .map_err(|e| Error::from_reason(format!("{:?}", e)))?; 398 | } else { 399 | return Err(Error::from_reason("No clip selected")); 400 | } 401 | 402 | self.set_current_clip_id(clip_id as u32)?; 403 | self.update_cb 404 | .call((), ThreadsafeFunctionCallMode::NonBlocking); 405 | 406 | Ok(()) 407 | } 408 | 409 | fn clip(&self) -> Option<&dyn ClipHandle> { 410 | match &self.tab { 411 | Tab::Record { 412 | handle: Some(handle), 413 | } => Some(handle as &dyn ClipHandle), 414 | Tab::Record { handle: None } => None, 415 | Tab::Play { audio_clip, .. } => Some(audio_clip), 416 | Tab::Pause { audio_clip, .. } => Some(audio_clip), 417 | } 418 | } 419 | 420 | #[napi] 421 | pub fn draw_current_clip(&mut self, width: u32, height: u32) -> Result> { 422 | let width = width as usize; 423 | let height = height as usize; 424 | 425 | let clip = match self.clip() { 426 | Some(clip) => clip, 427 | None => { 428 | return Ok(None); 429 | } 430 | }; 431 | 432 | if width == 0 || height == 0 { 433 | return Ok(Some(vec![].into())); 434 | } 435 | 436 | match self.render_mode { 437 | RenderMode::Waveform => Ok(Some( 438 | clip.render_waveform((self.x1_samples(), self.x2_samples()), width, height) 439 | .into(), 440 | )), 441 | RenderMode::Spectrogram => Ok(Some( 442 | clip.render_spectrogram((self.x1_samples(), self.x2_samples()), width, height) 443 | .map_err(|err| Error::from_reason(format!("{:?}", err)))? 444 | .into(), 445 | )), 446 | } 447 | } 448 | 449 | pub fn x1_samples(&self) -> usize { 450 | 0 451 | } 452 | 453 | pub fn x2_samples(&self) -> usize { 454 | self.clip() 455 | .map(|clip| clip.num_samples().max(clip.sample_rate() * 10)) 456 | .unwrap_or(0) 457 | } 458 | 459 | #[napi(getter)] 460 | pub fn get_time(&self) -> f64 { 461 | match &self.tab { 462 | Tab::Record { 463 | handle: Some(handle), 464 | } => handle.time(), 465 | Tab::Play { handle, .. } => handle.time(), 466 | Tab::Pause { time, .. } => *time, 467 | Tab::Record { handle: None } => 0f64, 468 | } 469 | } 470 | 471 | #[napi(getter)] 472 | pub fn get_duration(&self) -> f64 { 473 | match self.clip() { 474 | Some(clip) => (clip.num_samples() as f64) / (clip.sample_rate() as f64), 475 | None => 0f64, 476 | } 477 | } 478 | 479 | #[napi(getter)] 480 | pub fn get_time_start(&self) -> f32 { 481 | 0.0 482 | } 483 | 484 | #[napi(getter)] 485 | pub fn get_time_end(&self) -> f32 { 486 | self.clip() 487 | .map(|clip| { 488 | (clip.num_samples().max(clip.sample_rate() * 10) as f32) / clip.sample_rate() as f32 489 | }) 490 | .unwrap_or(0f32) 491 | } 492 | 493 | #[napi(ts_return_type = "Promise | null")] 494 | pub fn transcribe(&self) -> Result>> { 495 | let clip: &AudioClip = match &self.tab { 496 | Tab::Record { 497 | handle: Some(_handle), 498 | } => { 499 | // TODO: transcribe during recording 500 | return Ok(None); 501 | } 502 | Tab::Record { handle: None } => { 503 | return Ok(None); 504 | } 505 | Tab::Play { audio_clip, .. } => audio_clip as &AudioClip, 506 | Tab::Pause { audio_clip, .. } => audio_clip as &AudioClip, 507 | }; 508 | 509 | let clip = clip.clone(); 510 | 511 | Ok(Some(AsyncTask::new(TranscriptionTask(Some( 512 | self.language_processor 513 | .transcribe(clip) 514 | .map_err(|err| Error::from_reason(format!("{:?}", err)))?, 515 | ))))) 516 | } 517 | 518 | #[napi(getter)] 519 | pub fn get_streaming(&self) -> bool { 520 | match &self.tab { 521 | Tab::Record { handle } => handle.is_some(), 522 | Tab::Play { .. } => true, 523 | Tab::Pause { .. } => false, 524 | } 525 | } 526 | 527 | #[napi] 528 | pub fn import(&mut self, path: String) -> Result<()> { 529 | let name = Path::new(&path) 530 | .file_stem() 531 | .ok_or_else(|| Error::from_reason(format!("Invalid path: {}", path)))? 532 | .to_str() 533 | .ok_or_else(|| Error::from_reason("Path is not utf8"))? 534 | .to_string(); 535 | 536 | if self 537 | .db 538 | .load(&name) 539 | .map_err(|err| Error::from_reason(format!("{:?}", err)))? 540 | .is_some() 541 | { 542 | return Err(Error::from_reason(format!( 543 | "There is already a clip named {}", 544 | name 545 | ))); 546 | } 547 | let mut audio_clip = AudioClip::import(name, path) 548 | .map_err(|err| Error::from_reason(format!("{:?}", err)))?; 549 | self.db 550 | .save(&mut audio_clip) 551 | .map_err(|err| Error::from_reason(format!("{:?}", err)))?; 552 | 553 | self.tab = Tab::Pause { 554 | audio_clip, 555 | time: 0f64, 556 | }; 557 | self.update_cb 558 | .call((), ThreadsafeFunctionCallMode::NonBlocking); 559 | 560 | Ok(()) 561 | } 562 | 563 | #[napi] 564 | pub fn export(&mut self, id: u32) -> Result { 565 | if let Some(clip) = self 566 | .db 567 | .load_by_id(id as usize) 568 | .map_err(|err| Error::from_reason(format!("{:?}", err)))? 569 | { 570 | let temp_dir = std::env::temp_dir(); 571 | let safe_name = Path::new(&clip.name) 572 | .file_name() 573 | .unwrap_or_else(|| OsStr::new("invalid")) 574 | .to_str() 575 | .ok_or_else(|| Error::from_reason("Path is not valid utf8"))? 576 | .to_string(); 577 | let filename = format!("{}_{}.wav", clip.id.unwrap_or(0), safe_name); 578 | let tmp_path = temp_dir.join(Path::new(&filename)); 579 | let tmp_path = tmp_path 580 | .to_str() 581 | .ok_or_else(|| Error::from_reason("Path is not utf8"))? 582 | .to_string(); 583 | 584 | clip.export(&tmp_path) 585 | .map_err(|err| Error::from_reason(format!("{}", err)))?; 586 | 587 | Ok(tmp_path) 588 | } else { 589 | Err(Error::from_reason(format!("No clip with ID {}", id))) 590 | } 591 | } 592 | } 593 | -------------------------------------------------------------------------------- /sound-ci-helpers-windows/devcon.exe: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jocelyn-stericker/oxygen/f2e01fa156ea4e0ce6ed3d5bb60a34ce36a9fe2f/sound-ci-helpers-windows/devcon.exe -------------------------------------------------------------------------------- /sound-ci-helpers-windows/setup_sound.ps1: -------------------------------------------------------------------------------- 1 | # Source: https://github.com/LABSN/sound-ci-helpers/blob/master/windows/setup_sound.ps1 2 | 3 | # Someday if this breaks we can use: 4 | # https://github.com/duncanthrax/scream/tree/master/Install 5 | # But it signs using a SHA that is buggy with Windows 7 so don't use for now 6 | # https://support.microsoft.com/en-us/help/2921916/the-untrusted-publisher-dialog-box-appears-when-you-install-a-driver-i 7 | function DownloadVirtualSoundcard () { 8 | $webclient = New-Object System.Net.WebClient 9 | $url = "https://download.vb-audio.com/Download_CABLE/VBCABLE_Driver_Pack43.zip" 10 | $filepath = $pwd.Path + "\vbcable.zip" 11 | Write-Host "Downloading" $url 12 | $retry_attempts = 2 13 | for($i=0; $i -lt $retry_attempts; $i++){ 14 | try { 15 | $webclient.DownloadFile($url, $filepath) 16 | break 17 | } 18 | Catch [Exception]{ 19 | Start-Sleep 1 20 | } 21 | } 22 | if (Test-Path $filepath) { 23 | Write-Host "File saved at" $filepath 24 | } else { 25 | # Retry once to get the error message if any at the last try 26 | $webclient.DownloadFile($url, $filepath) 27 | } 28 | } 29 | 30 | function main () { 31 | Push-Location $PSScriptRoot 32 | DownloadVirtualSoundcard 33 | Expand-Archive -LiteralPath vbcable.zip -DestinationPath vbcable 34 | certutil -addstore "TrustedPublisher" vbcable.cer 35 | # PnPutil.exe -i -a vbcable/vbMmeCable64_win7.inf 36 | .\devcon.exe install vbcable\vbMmeCable64_win7.inf VBAudioVACWDM 37 | Pop-location 38 | } 39 | 40 | main 41 | -------------------------------------------------------------------------------- /sound-ci-helpers-windows/vbcable.cer: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jocelyn-stericker/oxygen/f2e01fa156ea4e0ce6ed3d5bb60a34ce36a9fe2f/sound-ci-helpers-windows/vbcable.cer -------------------------------------------------------------------------------- /ui/.eslintignore: -------------------------------------------------------------------------------- 1 | .parcel-cache 2 | dist 3 | out 4 | -------------------------------------------------------------------------------- /ui/.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "root": true, 3 | "env": { 4 | "browser": true, 5 | "es2021": true, 6 | "jest": true 7 | }, 8 | "extends": [ 9 | "eslint:recommended", 10 | "plugin:react/recommended", 11 | "plugin:@typescript-eslint/recommended" 12 | ], 13 | "parser": "@typescript-eslint/parser", 14 | "parserOptions": { 15 | "ecmaFeatures": { 16 | "jsx": true 17 | }, 18 | "ecmaVersion": "latest", 19 | "sourceType": "module" 20 | }, 21 | "plugins": ["react", "@typescript-eslint", "react-hooks"], 22 | "rules": { 23 | "react-hooks/rules-of-hooks": "error", 24 | "react-hooks/exhaustive-deps": "warn", 25 | "@typescript-eslint/no-empty-function": "off", 26 | "no-inner-declarations": "off" 27 | }, 28 | "settings": { 29 | "react": { 30 | "version": "detect" 31 | } 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /ui/.postcssrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "plugins": { 3 | "tailwindcss": {} 4 | } 5 | } 6 | -------------------------------------------------------------------------------- /ui/.prettierignore: -------------------------------------------------------------------------------- 1 | .parcel-cache 2 | dist 3 | out 4 | -------------------------------------------------------------------------------- /ui/.proxyrc.js: -------------------------------------------------------------------------------- 1 | module.exports = function (app) { 2 | app.use(function (req, res, next) { 3 | // Prevent browser caching of all URLs (except /api and /ws because we proxy those) 4 | if ( 5 | !req.path || 6 | (!req.path.startsWith("/api") && !req.path.startsWith("/ws")) 7 | ) { 8 | res.setHeader("Cache-Control", "no-cache, no-store, must-revalidate"); 9 | } 10 | next(); 11 | }); 12 | }; 13 | -------------------------------------------------------------------------------- /ui/forge.config.js: -------------------------------------------------------------------------------- 1 | /* eslint-env node */ 2 | // @ts-check 3 | 4 | /** @type {import('@electron-forge/maker-zip').default['config']} */ 5 | module.exports = { 6 | packagerConfig: { 7 | derefSymlinks: true, 8 | ignore: [ 9 | "src/(?!(preload\\.js|main\\.js))", 10 | "node_modules/oxygen-core/target", 11 | "node_modules/oxygen-core/src", 12 | "node_modules/oxygen-core/build.rs", 13 | "node_modules/oxygen-core/Cargo.*", 14 | "node_modules/.package-lock.json", 15 | "oxygen\\.sqlite", 16 | ".postcssrc", 17 | ".parcel-cache", 18 | "tailwind.config.js", 19 | ], 20 | }, 21 | makers: [ 22 | { 23 | name: "@electron-forge/maker-zip", 24 | platforms: ["darwin", "win32", "linux"], 25 | }, 26 | ], 27 | publishers: [ 28 | { 29 | name: "@electron-forge/publisher-github", 30 | config: { 31 | repository: { 32 | owner: "emilyskidsister", 33 | name: "oxygen", 34 | }, 35 | prerelease: true, 36 | draft: true, 37 | }, 38 | }, 39 | ], 40 | }; 41 | -------------------------------------------------------------------------------- /ui/jest.config.js: -------------------------------------------------------------------------------- 1 | /* eslint-env node */ 2 | 3 | module.exports = { 4 | transform: { 5 | "^.+\\.(t|j)sx?$": ["@swc-node/jest"], 6 | }, 7 | modulePathIgnorePatterns: ["out"], 8 | runner: "@kayahr/jest-electron-runner", 9 | testEnvironment: "@kayahr/jest-electron-runner/environment", 10 | setupFiles: ["/src/test_setup.tsx"], 11 | }; 12 | -------------------------------------------------------------------------------- /ui/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "oxygen-ui", 3 | "productName": "Oxygen", 4 | "author": "Jocelyn Stericker", 5 | "description": "A voice journal and audio analysis toolkit for people who want to change the way their voice comes across.", 6 | "version": "0.0.1", 7 | "main": "src/main.js", 8 | "scripts": { 9 | "postinstall": "cd ../core/napi && npm ci", 10 | "start": "run-p --race dev:parcel dev:sync-electron", 11 | "dev:parcel": "parcel ./src/index.html", 12 | "dev:sync-electron": "run-s prod:build-core dev:wait-for-parcel dev:electron", 13 | "dev:build-core": "cd ../core/napi && npm run build:debug", 14 | "dev:build-core-with-jack": "cd ../core/napi && npm run build:debug-with-jack", 15 | "dev:build-core-with-whisper-dummy": "cd ../core/napi && npm run build:debug-with-jack", 16 | "dev:wait-for-parcel": "wait-on http://localhost:1234", 17 | "dev:electron": "electron .", 18 | "dev:core-and-jest": "run-s dev:build-core jest", 19 | "dev:core-and-jest-with-jack": "cross-env OXYGEN_NAPI_USE_JACK=1 run-s dev:build-core-with-jack jest", 20 | "prod:make-deps": "run-p prod:parcel prod:build-core", 21 | "prod:parcel": "rimraf dist && parcel build ./src/index.html --public-url ./", 22 | "prod:build-core": "cd ../core/napi && npm run build", 23 | "prod:build-core-with-jack": "cd ../core/napi && npm run build:with-jack", 24 | "prod:build-core-with-whisper-dummy": "cd ../core/napi && npm run build:with-whisper-dummy", 25 | "prod:package": "rimraf out && npm run prod:make-deps && electron-forge package", 26 | "prod:make": "rimraf out && npm run prod:make-deps && electron-forge make", 27 | "prod:publish": "rimraf out && npm run prod:make-deps && electron-forge publish", 28 | "prod:core-and-jest": "run-s prod:build-core jest", 29 | "prod:core-and-jest-with-jack": "cross-env OXYGEN_NAPI_USE_JACK=1 run-s prod:build-core-with-jack jest", 30 | "prettier": "prettier --check .", 31 | "prettier:fix": "prettier --write .", 32 | "eslint": "eslint . --ext .js,.jsx,.ts,.tsx --max-warnings=0", 33 | "eslint:fix": "eslint --fix . --ext .js,.jsx,.ts,.tsx --max-warnings=0", 34 | "jest": "jest", 35 | "tsc": "tsc --noEmit", 36 | "test": "npm-run-all -p prettier eslint prod:build-core-with-whisper-dummy -p jest tsc", 37 | "test-with-jack": "npm-run-all -p prettier eslint prod:build-core-with-jack -p jest tsc" 38 | }, 39 | "devDependencies": { 40 | "@electron-forge/cli": "6.2.1", 41 | "@electron-forge/maker-zip": "^6.2.1", 42 | "@electron-forge/publisher-github": "6.2.1", 43 | "@kayahr/jest-electron-runner": "29.6.0", 44 | "@swc-node/jest": "1.6.6", 45 | "@testing-library/react": "14.0.0", 46 | "@types/jest": "29.5.3", 47 | "@types/node": "^20.4.2", 48 | "@types/react": "18.2.15", 49 | "@types/react-dom": "18.2.7", 50 | "@typescript-eslint/eslint-plugin": "6.0.0", 51 | "@typescript-eslint/parser": "6.0.0", 52 | "classnames": "2.3.2", 53 | "cross-env": "7.0.3", 54 | "electron": "25.3.0", 55 | "eslint": "8.45.0", 56 | "eslint-plugin-react": "7.32.2", 57 | "eslint-plugin-react-hooks": "4.6.0", 58 | "jest": "29.6.1", 59 | "npm-run-all": "4.1.5", 60 | "parcel": "2.9.3", 61 | "postcss": "8.4.26", 62 | "prettier": "3.0.0", 63 | "process": "^0.11.10", 64 | "react": "18.2.0", 65 | "react-dom": "18.2.0", 66 | "rimraf": "^5.0.1", 67 | "snapshot-diff": "0.10.0", 68 | "tailwindcss": "3.3.3", 69 | "typescript": "5.1.6", 70 | "wait-on": "7.0.1" 71 | }, 72 | "dependencies": { 73 | "oxygen-core": "file:../core/napi" 74 | }, 75 | "license": "ISC", 76 | "alias": { 77 | "oxygen-core": { 78 | "global": "oxygen" 79 | } 80 | }, 81 | "targets": { 82 | "main": false 83 | }, 84 | "config": { 85 | "forge": "./forge.config.js" 86 | }, 87 | "prettier": { 88 | "endOfLine": "auto" 89 | } 90 | } 91 | -------------------------------------------------------------------------------- /ui/src/AudioView.tsx: -------------------------------------------------------------------------------- 1 | import React, { useCallback, useEffect, useRef, useState } from "react"; 2 | import cx from "classnames"; 3 | import { RenderMode, JsSegment } from "oxygen-core"; 4 | import { Spectrogram } from "./icons"; 5 | 6 | export default function AudioView({ 7 | drawCurrentClip, 8 | streaming, 9 | time, 10 | clipId, 11 | transcribe, 12 | onSeek, 13 | onSetRenderMode, 14 | renderMode, 15 | timeStart, 16 | timeEnd, 17 | }: { 18 | drawCurrentClip: (width: number, height: number) => Buffer | null; 19 | streaming?: boolean; 20 | time: number; 21 | clipId?: bigint | number; 22 | transcribe?: () => Promise; 23 | onSeek: (time: number) => void; 24 | onSetRenderMode: (renderMode: RenderMode) => void; 25 | renderMode: RenderMode; 26 | timeStart: number; 27 | timeEnd: number; 28 | }) { 29 | const canvas = useRef(null); 30 | const canvasContainer = useRef(null); 31 | const duration = timeEnd - timeStart; 32 | 33 | const redraw = useCallback(() => { 34 | const parent = canvas.current?.parentElement; 35 | if (!parent) { 36 | // called one last time on dismount, before the observer disconnects. 37 | return; 38 | } 39 | 40 | const rect = parent.getBoundingClientRect(); 41 | canvas.current.width = rect.width * devicePixelRatio; 42 | canvas.current.height = rect.height * devicePixelRatio; 43 | canvas.current.style.width = `${rect.width * devicePixelRatio}px`; 44 | canvas.current.style.height = `${rect.height * devicePixelRatio}px`; 45 | canvas.current.style.transform = `scale(${1 / devicePixelRatio})`; 46 | canvas.current.style.transformOrigin = "top left"; 47 | 48 | const buffer = drawCurrentClip(canvas.current.width, canvas.current.height); 49 | 50 | const array = new Uint8ClampedArray(buffer); 51 | if (array.length > 0) { 52 | const image = new ImageData( 53 | array, 54 | canvas.current.width, 55 | canvas.current.height, 56 | ); 57 | const context = canvas.current.getContext("2d"); 58 | context.putImageData(image, 0, 0); 59 | } 60 | }, [drawCurrentClip]); 61 | 62 | const [transcription, setTranscription] = useState< 63 | Array<{ 64 | t0: number; 65 | t1: number; 66 | segment: string; 67 | }> 68 | >(null); 69 | 70 | useEffect(() => { 71 | let expired = false; 72 | setTranscription([]); 73 | 74 | (async () => { 75 | if (transcribe) { 76 | const transcription = await transcribe(); 77 | if (!expired) { 78 | setTranscription(transcription); 79 | } 80 | } 81 | })(); 82 | 83 | return () => { 84 | expired = true; 85 | }; 86 | }, [clipId, transcribe]); 87 | 88 | useEffect(() => { 89 | // ResizeObserver calls immediately on observe, so we need to work around that. 90 | const state = { didInit: false }; 91 | const observer = new ResizeObserver(() => { 92 | if (state.didInit) { 93 | redraw(); 94 | } else { 95 | state.didInit = true; 96 | } 97 | }); 98 | observer.observe(canvasContainer.current); 99 | return () => { 100 | observer.disconnect(); 101 | }; 102 | }, [redraw]); 103 | 104 | useEffect(() => { 105 | if (streaming) { 106 | const interval = setInterval(() => { 107 | redraw(); 108 | }, 100); 109 | return () => { 110 | clearInterval(interval); 111 | }; 112 | } 113 | }, [redraw, streaming]); 114 | 115 | useEffect(redraw, [redraw, clipId, renderMode]); 116 | 117 | return ( 118 | <> 119 |
120 | { 125 | const rect = ev.currentTarget.getBoundingClientRect(); 126 | onSeek( 127 | ((ev.clientX - rect.left) / rect.width) * (timeEnd - timeStart), 128 | ); 129 | }} 130 | /> 131 |
138 | { 145 | ev.preventDefault(); 146 | if (ev.target.checked) { 147 | onSetRenderMode(RenderMode.Spectrogram); 148 | } else { 149 | onSetRenderMode(RenderMode.Waveform); 150 | } 151 | }} 152 | id="toggle-spectrogram" 153 | > 154 | 165 |
166 |
167 | {transcription?.map((segment, i) => ( 168 | 182 | 183 | 190 | {segment.segment} 191 | 192 | 193 | 194 | ))} 195 |
196 | 197 | ); 198 | } 199 | -------------------------------------------------------------------------------- /ui/src/ClipList.test.tsx: -------------------------------------------------------------------------------- 1 | import React from "react"; 2 | import { render, fireEvent } from "@testing-library/react"; 3 | 4 | import ClipList from "./ClipList"; 5 | 6 | describe("ClipList", () => { 7 | it("renders placeholder text when there are no clips", () => { 8 | const clipList = render( 9 | {}} 14 | onSetCurrentClipId={() => {}} 15 | onExport={jest.fn()} 16 | />, 17 | ); 18 | expect(clipList.getByTestId("cliplist-placeholder").textContent).toEqual( 19 | "Your clips will appear here.", 20 | ); 21 | }); 22 | 23 | it("renders clips, which can be selected", () => { 24 | const handleSetCurrentClipId = jest.fn(); 25 | 26 | const clipList = render( 27 | {}} 39 | onSetCurrentClipId={handleSetCurrentClipId} 40 | onExport={jest.fn()} 41 | />, 42 | ); 43 | expect(clipList.queryByTestId("cliplist-placeholder")).toEqual(null); 44 | 45 | { 46 | const record = clipList.getByTestId("record-item"); 47 | expect(record.classList).toContain("bg-purple-900"); // Selected 48 | expect(record.getAttribute("aria-selected")).toEqual("true"); 49 | 50 | const clip1 = clipList.getByTestId("clip-1"); 51 | expect(clip1.textContent).toEqual( 52 | "Clip 1Saturday, May 14, 2022 at 12:00:00 AM", 53 | ); 54 | expect(clip1.classList).not.toContain("bg-purple-900"); // Not selected. 55 | expect(clip1.getAttribute("aria-selected")).toEqual("false"); 56 | 57 | const clip2 = clipList.getByTestId("clip-2"); 58 | expect(clip2.textContent).toEqual( 59 | "Clip 2Friday, May 20, 2022 at 7:34:29 PM", 60 | ); 61 | expect(clip2.classList).not.toContain("bg-purple-900"); // Not selected. 62 | expect(clip2.getAttribute("aria-selected")).toEqual("false"); 63 | 64 | fireEvent.click(clip1); 65 | expect(handleSetCurrentClipId).toHaveBeenCalledWith(1); 66 | 67 | fireEvent.click(clip2); 68 | expect(handleSetCurrentClipId).toHaveBeenCalledWith(2); 69 | } 70 | 71 | clipList.rerender( 72 | {}} 84 | onSetCurrentClipId={handleSetCurrentClipId} 85 | onExport={jest.fn()} 86 | />, 87 | ); 88 | 89 | { 90 | const record = clipList.getByTestId("record-item"); 91 | expect(record.classList).not.toContain("bg-purple-900"); // Not selected 92 | expect(record.getAttribute("aria-selected")).toEqual("false"); 93 | 94 | const clip1 = clipList.getByTestId("clip-1"); 95 | expect(clip1.classList).not.toContain("bg-purple-900"); // Not selected. 96 | expect(clip1.getAttribute("aria-selected")).toEqual("false"); 97 | 98 | const clip2 = clipList.getByTestId("clip-2"); 99 | expect(clip2.classList).toContain("bg-purple-900"); // Selected. 100 | expect(clip2.getAttribute("aria-selected")).toEqual("true"); 101 | } 102 | }); 103 | 104 | it("renders option for record tab, which can be selected", () => { 105 | const handleSetCurrentTabRecord = jest.fn(); 106 | 107 | const clipList = render( 108 | {}} 121 | onExport={jest.fn()} 122 | />, 123 | ); 124 | 125 | const record = clipList.getByTestId("record-item"); 126 | expect(record.classList).not.toContain("bg-purple-900"); // Not selected 127 | expect(record.getAttribute("aria-selected")).toEqual("false"); 128 | 129 | fireEvent.click(record); 130 | expect(handleSetCurrentTabRecord).toHaveBeenCalledTimes(1); 131 | }); 132 | 133 | it("can filter by name", () => { 134 | const handleSetCurrentTabRecord = jest.fn(); 135 | const handleSetCurrentClipId = jest.fn(); 136 | 137 | const clipList = render( 138 | , 154 | ); 155 | 156 | expect(clipList.getAllByTestId("record-item")).toHaveLength(1); 157 | expect( 158 | clipList.getAllByTestId(/clip-\d+/).map((c) => c.textContent), 159 | ).toEqual([ 160 | "Practice 1Saturday, May 14, 2022 at 12:00:00 AM", 161 | "Practice 2Sunday, May 15, 2022 at 12:00:00 AM", 162 | "Phone call 1Friday, May 20, 2022 at 7:34:29 PM", 163 | ]); 164 | 165 | const searchClips = clipList.getByRole("textbox", { name: "Search clips" }); 166 | fireEvent.change(searchClips, { target: { value: "Practice" } }); 167 | 168 | expect( 169 | clipList.getAllByTestId(/clip-\d+/).map((c) => c.textContent), 170 | ).toEqual([ 171 | "Practice 1Saturday, May 14, 2022 at 12:00:00 AM", 172 | "Practice 2Sunday, May 15, 2022 at 12:00:00 AM", 173 | ]); 174 | expect(clipList.queryAllByTestId("record-item")).toHaveLength(0); 175 | 176 | expect(handleSetCurrentClipId).not.toHaveBeenCalled(); 177 | }); 178 | }); 179 | -------------------------------------------------------------------------------- /ui/src/ClipList.tsx: -------------------------------------------------------------------------------- 1 | import React, { useState } from "react"; 2 | import { JsClipMeta } from "oxygen-core"; 3 | import cx from "classnames"; 4 | 5 | import { Record } from "./icons"; 6 | 7 | export default function ClipList({ 8 | clips, 9 | recordTabSelected, 10 | currentClipId, 11 | onSetCurrentTabRecord, 12 | onSetCurrentClipId, 13 | onExport, 14 | }: { 15 | clips: Array; 16 | recordTabSelected: boolean; 17 | currentClipId: bigint | null; 18 | onSetCurrentTabRecord: () => void; 19 | onSetCurrentClipId: (clipId: number) => void; 20 | onExport: (clipId: number) => string; 21 | }) { 22 | if (!recordTabSelected && currentClipId == null) { 23 | throw new Error("Invalid state: a tab must be selected."); 24 | } 25 | 26 | if (!recordTabSelected && !clips.find((clip) => clip.id === currentClipId)) { 27 | throw new Error("Invalid state: no clip with the selected ID"); 28 | } 29 | 30 | if (recordTabSelected && currentClipId != null) { 31 | throw new Error( 32 | "Invalid state: the record tab is selected and there is a clip ID", 33 | ); 34 | } 35 | 36 | const [filter, setFilter] = useState(""); 37 | 38 | return ( 39 |
40 |
41 | { 46 | setFilter(ev.currentTarget.value); 47 | }} 48 | placeholder="Search clips" 49 | title="Search clips" 50 | autoFocus 51 | /> 52 |
53 |
    54 | {filter.trim() == "" && ( 55 |
  • { 65 | ev.preventDefault(); 66 | onSetCurrentTabRecord(); 67 | }} 68 | > 69 |

    74 | Record New Clip 75 |

    76 |
  • 77 | )} 78 | {clips 79 | .filter((clip) => filter === "" || clip.name.includes(filter)) 80 | .map((clip) => ( 81 |
  • { 92 | ev.preventDefault(); 93 | onSetCurrentClipId(Number(clip.id)); 94 | }} 95 | draggable={true} 96 | onDragStart={(ev) => { 97 | ev.preventDefault(); 98 | const tmpExport = onExport(Number(clip.id)); 99 | window.startDragOut(tmpExport); 100 | }} 101 | onDragEnd={(ev) => { 102 | ev.preventDefault(); 103 | console.log("end"); 104 | }} 105 | > 106 |

    110 | {clip.name} 111 |

    112 |
    113 |
    114 | {clip.date.toLocaleDateString(undefined, { 115 | dateStyle: "full", 116 | })}{" "} 117 | at {clip.date.toLocaleTimeString()} 118 |
    119 |
    120 |
  • 121 | ))} 122 | {clips.length === 0 && ( 123 |
    127 | Your clips will appear here. 128 |
    129 | )} 130 |
131 |
132 | ); 133 | } 134 | -------------------------------------------------------------------------------- /ui/src/CurrentClip.test.tsx: -------------------------------------------------------------------------------- 1 | import React from "react"; 2 | import { RenderMode } from "oxygen-core"; 3 | import { render, fireEvent } from "@testing-library/react"; 4 | 5 | import CurrentClip from "./CurrentClip"; 6 | 7 | describe("CurrentClip", () => { 8 | it("renders playing state and can be stopped", () => { 9 | const handlePlay = jest.fn(); 10 | const handleStop = jest.fn(); 11 | const handleRename = jest.fn(); 12 | const handleDelete = jest.fn(); 13 | const handleSeek = jest.fn(); 14 | const handleTranscribe = null; 15 | const handleDrawCurrentClip = jest.fn((width, height) => { 16 | return Buffer.from(Array(width * height * 4).fill(0)); 17 | }); 18 | 19 | const currentClip = render( 20 | {}} 39 | />, 40 | ); 41 | 42 | expect(handleDrawCurrentClip).toHaveBeenCalledTimes(1); 43 | 44 | const stop = currentClip.getByTestId("current-clip-toggle-playback"); 45 | 46 | expect(stop.textContent).toEqual("Pause"); 47 | fireEvent.click(stop); 48 | expect(handleStop).toHaveBeenCalledTimes(1); 49 | expect(currentClip.getByTestId("current-clip-time").textContent).toEqual( 50 | "125.00", 51 | ); 52 | }); 53 | it("renders stopped state and can be played", () => { 54 | const handlePlay = jest.fn(); 55 | const handleStop = jest.fn(); 56 | const handleRename = jest.fn(); 57 | const handleDelete = jest.fn(); 58 | const handleSeek = jest.fn(); 59 | const handleTranscribe = null; 60 | const handleDrawCurrentClip = jest.fn((width, height) => { 61 | return Buffer.from(Array(width * height * 4).fill(0)); 62 | }); 63 | 64 | const currentClip = render( 65 | {}} 84 | />, 85 | ); 86 | 87 | expect(handleDrawCurrentClip).toHaveBeenCalledTimes(1); 88 | 89 | const play = currentClip.getByTestId("current-clip-toggle-playback"); 90 | 91 | expect(play.textContent).toEqual("Play"); 92 | fireEvent.click(play); 93 | expect(handlePlay).toHaveBeenCalledTimes(1); 94 | expect(currentClip.getByTestId("current-clip-time").textContent).toEqual( 95 | "125.00", 96 | ); 97 | }); 98 | it("can be deleted", () => { 99 | const handlePlay = jest.fn(); 100 | const handleStop = jest.fn(); 101 | const handleRename = jest.fn(); 102 | const handleDelete = jest.fn(); 103 | const handleSeek = jest.fn(); 104 | const handleTranscribe = null; 105 | const handleDrawCurrentClip = jest.fn((width, height) => { 106 | return Buffer.from(Array(width * height * 4).fill(0)); 107 | }); 108 | 109 | const currentClip = render( 110 | {}} 129 | />, 130 | ); 131 | 132 | expect(handleDrawCurrentClip).toHaveBeenCalledTimes(1); 133 | 134 | const deleteBtn = currentClip.getByTestId("current-clip-delete"); 135 | 136 | expect(deleteBtn.title).toEqual("Delete this clip"); 137 | fireEvent.click(deleteBtn); 138 | expect(handleDelete).toHaveBeenCalledTimes(1); 139 | }); 140 | it("can be renamed", () => { 141 | const handlePlay = jest.fn(); 142 | const handleStop = jest.fn(); 143 | const handleRename = jest.fn(); 144 | const handleDelete = jest.fn(); 145 | const handleSeek = jest.fn(); 146 | const handleTranscribe = null; 147 | const handleDrawCurrentClip = jest.fn((width, height) => { 148 | return Buffer.from(Array(width * height * 4).fill(0)); 149 | }); 150 | 151 | const currentClip = render( 152 | {}} 171 | />, 172 | ); 173 | 174 | expect(handleDrawCurrentClip).toHaveBeenCalledTimes(1); 175 | 176 | const clipName = currentClip.getByTestId("current-clip-name"); 177 | fireEvent.focus(clipName); 178 | fireEvent.change(clipName, { target: { value: "New clip name" } }); 179 | fireEvent.blur(clipName); 180 | 181 | expect(handleRename).toHaveBeenCalledWith("New clip name"); 182 | }); 183 | }); 184 | -------------------------------------------------------------------------------- /ui/src/CurrentClip.tsx: -------------------------------------------------------------------------------- 1 | import { JsClipMeta, JsSegment, RenderMode } from "oxygen-core"; 2 | import cx from "classnames"; 3 | import React, { useState, useEffect } from "react"; 4 | import { Pause, Play, Delete } from "./icons"; 5 | import AudioView from "./AudioView"; 6 | 7 | export default function CurrentClip({ 8 | clip, 9 | drawCurrentClip, 10 | transcribe, 11 | time, 12 | streaming, 13 | onPlay, 14 | onStop, 15 | onSeek, 16 | onRename, 17 | onDelete, 18 | onSetRenderMode, 19 | renderMode, 20 | timeStart, 21 | timeEnd, 22 | }: { 23 | clip: JsClipMeta; 24 | drawCurrentClip: (width: number, height: number) => Buffer | null; 25 | transcribe: () => Promise; 26 | time: number; 27 | streaming: boolean; 28 | onPlay: (cb: () => void) => void; 29 | onStop: () => void; 30 | onSeek: (time: number) => void; 31 | onRename: (name: string) => void; 32 | onDelete: () => void; 33 | onSetRenderMode: (renderMode: RenderMode) => void; 34 | renderMode: RenderMode; 35 | timeStart: number; 36 | timeEnd: number; 37 | }) { 38 | const [editedName, setEditedName] = useState(clip.name); 39 | useEffect(() => { 40 | setEditedName(clip.name); 41 | }, [clip.name]); 42 | 43 | return ( 44 |
45 |
46 | { 51 | setEditedName(ev.currentTarget.value); 52 | }} 53 | onBlur={() => { 54 | const name = editedName.trim(); 55 | if (name !== "") { 56 | onRename(name); 57 | } 58 | setEditedName(clip.name); 59 | }} 60 | /> 61 | 72 |
73 | 84 |
85 |
89 | {time.toFixed(2).padStart(6, "0")} 90 |
91 |
92 | 125 |
126 |
127 |
128 |
129 | ); 130 | } 131 | -------------------------------------------------------------------------------- /ui/src/RecordTab.test.tsx: -------------------------------------------------------------------------------- 1 | import React from "react"; 2 | import { render, fireEvent } from "@testing-library/react"; 3 | import { RenderMode } from "oxygen-core"; 4 | 5 | import RecordTab from "./RecordTab"; 6 | 7 | describe("RecordTab", () => { 8 | it("renders playing state and can be completed", () => { 9 | const handleRecord = jest.fn(); 10 | const handleStop = jest.fn(); 11 | const handleDrawCurrentClip = jest.fn((width, height) => { 12 | return Buffer.from(Array(width * height * 4).fill(0)); 13 | }); 14 | 15 | const recordTab = render( 16 | {}} 23 | timeStart={0} 24 | timeEnd={1} 25 | />, 26 | ); 27 | 28 | const toggle = recordTab.getByTestId("toggle-record"); 29 | fireEvent.click(toggle); 30 | 31 | expect(toggle.textContent).toEqual("Complete Recording"); 32 | expect(handleStop).toHaveBeenCalledTimes(1); 33 | expect(handleRecord).toHaveBeenCalledTimes(0); 34 | }); 35 | 36 | it("renders stopped state and can be started", () => { 37 | const handleRecord = jest.fn(); 38 | const handleStop = jest.fn(); 39 | const handleDrawCurrentClip = jest.fn((width, height) => { 40 | return Buffer.from(Array(width * height * 4).fill(0)); 41 | }); 42 | 43 | const recordTab = render( 44 | {}} 51 | timeStart={0} 52 | timeEnd={1} 53 | />, 54 | ); 55 | 56 | const toggle = recordTab.getByTestId("toggle-record"); 57 | fireEvent.click(toggle); 58 | 59 | expect(toggle.textContent).toEqual("Start Recording"); 60 | expect(handleStop).toHaveBeenCalledTimes(0); 61 | expect(handleRecord).toHaveBeenCalledTimes(1); 62 | }); 63 | }); 64 | -------------------------------------------------------------------------------- /ui/src/RecordTab.tsx: -------------------------------------------------------------------------------- 1 | import React from "react"; 2 | import cx from "classnames"; 3 | import { RenderMode } from "oxygen-core"; 4 | 5 | import { Record, Stop } from "./icons"; 6 | import AudioView from "./AudioView"; 7 | 8 | export default function RecordTab({ 9 | drawCurrentClip, 10 | streaming, 11 | onRecord, 12 | onStop, 13 | onSetRenderMode, 14 | renderMode, 15 | timeStart, 16 | timeEnd, 17 | }: { 18 | drawCurrentClip: (width: number, height: number) => Buffer | null; 19 | streaming: boolean; 20 | onRecord: () => void; 21 | onStop: () => void; 22 | onSetRenderMode: (renderMode: RenderMode) => void; 23 | renderMode: RenderMode; 24 | timeStart: number; 25 | timeEnd: number; 26 | }) { 27 | return ( 28 |
29 | { 34 | console.warn("TODO: implement seek in record tab?"); 35 | }} 36 | onSetRenderMode={onSetRenderMode} 37 | renderMode={renderMode} 38 | timeStart={timeStart} 39 | timeEnd={timeEnd} 40 | /> 41 |
42 |
43 | 74 |
75 |
76 |
77 | ); 78 | } 79 | -------------------------------------------------------------------------------- /ui/src/Toaster.test.tsx: -------------------------------------------------------------------------------- 1 | import React from "react"; 2 | import { render, act, fireEvent, within } from "@testing-library/react"; 3 | 4 | import Toaster, { ToasterInterface } from "./Toaster"; 5 | 6 | describe("Toaster", () => { 7 | it("renders error and info toasts, which can be dismissed", () => { 8 | const toasterRef = { current: null as ToasterInterface | null }; 9 | const toaster = render(); 10 | expect(toaster.asFragment()).toMatchInlineSnapshot(` 11 | 12 |
15 | 16 | `); 17 | 18 | act(() => { 19 | toasterRef.current.info("This is an info toast"); 20 | }); 21 | 22 | expect(toaster.asFragment()).toMatchInlineSnapshot(` 23 | 24 |
27 |
31 |
35 | This is an info toast 36 |
37 | 57 |
58 |
59 |
60 | `); 61 | 62 | expect( 63 | toaster 64 | .getAllByTestId(/^toast-\d+$/) 65 | .map((toast) => `${toast.className}: ${toast.textContent}`), 66 | ).toEqual([ 67 | "left-1/2 -translate-x-1/2 p-2 first-of-type:border-b-0 border-2 last-of-type:rounded-t-md flex text-blue-900 bg-blue-100 border-blue-300: This is an info toast", 68 | ]); 69 | 70 | act(() => { 71 | toasterRef.current.error("This is an error toast"); 72 | }); 73 | 74 | expect( 75 | toaster 76 | .getAllByTestId(/^toast-\d+$/) 77 | .map((toast) => `${toast.className}: ${toast.textContent}`), 78 | ).toEqual([ 79 | "left-1/2 -translate-x-1/2 p-2 first-of-type:border-b-0 border-2 last-of-type:rounded-t-md flex text-blue-900 bg-blue-100 border-blue-300: This is an info toast", 80 | "left-1/2 -translate-x-1/2 p-2 first-of-type:border-b-0 border-2 last-of-type:rounded-t-md flex text-red-900 bg-red-100 border-red-300: This is an error toast", 81 | ]); 82 | 83 | const dismissInfo = within( 84 | toaster 85 | .getAllByTestId(/^toast-label-\d+$/) 86 | .find((t) => t.textContent === "This is an info toast").parentElement, 87 | ).getByRole("button", { name: "Dismiss" }); 88 | 89 | fireEvent.click(dismissInfo); 90 | 91 | expect( 92 | toaster 93 | .getAllByTestId(/^toast-\d+$/) 94 | .map((toast) => `${toast.className}: ${toast.textContent}`), 95 | ).toEqual([ 96 | "left-1/2 -translate-x-1/2 p-2 first-of-type:border-b-0 border-2 last-of-type:rounded-t-md flex text-red-900 bg-red-100 border-red-300: This is an error toast", 97 | ]); 98 | 99 | const dismissError = within( 100 | toaster 101 | .getAllByTestId(/^toast-label-\d+$/) 102 | .find((t) => t.textContent === "This is an error toast").parentElement, 103 | ).getByRole("button", { name: "Dismiss" }); 104 | 105 | fireEvent.click(dismissError); 106 | 107 | expect( 108 | toaster 109 | .queryAllByTestId(/^toast-\d+$/) 110 | .map((toast) => `${toast.className}: ${toast.textContent}`), 111 | ).toEqual([]); 112 | }); 113 | 114 | it("dismisses toasts with the same uniqueKey", () => { 115 | const toasterRef = { current: null as ToasterInterface | null }; 116 | const toaster = render(); 117 | 118 | act(() => { 119 | toasterRef.current.info("info toast 1", null, "key1"); 120 | toasterRef.current.error("error toast 2", null, "key2"); 121 | toasterRef.current.info("info toast 3", null, null); 122 | }); 123 | 124 | expect( 125 | toaster.getAllByTestId(/^toast-\d+$/).map((toast) => toast.textContent), 126 | ).toEqual(["info toast 1", "error toast 2", "info toast 3"]); 127 | 128 | act(() => { 129 | toasterRef.current.error("replacement error toast 1", null, "key1"); 130 | toasterRef.current.info("replacement info toast 2", null, "key2"); 131 | toasterRef.current.info("info toast 4", null, null); 132 | }); 133 | 134 | expect( 135 | toaster.getAllByTestId(/^toast-\d+$/).map((toast) => toast.textContent), 136 | ).toEqual([ 137 | "info toast 3", 138 | "replacement error toast 1", 139 | "replacement info toast 2", 140 | "info toast 4", 141 | ]); 142 | }); 143 | 144 | it("renders and responds to toast actions", () => { 145 | const toasterRef = { current: null as ToasterInterface | null }; 146 | const toaster = render(); 147 | const handleCb = jest.fn(); 148 | 149 | act(() => { 150 | toasterRef.current.info("This is an info toast", { 151 | text: "Foobarify", 152 | cb: handleCb, 153 | }); 154 | }); 155 | 156 | expect(toaster.asFragment()).toMatchInlineSnapshot(` 157 | 158 |
161 |
165 |
169 | This is an info toast 170 |
171 | 177 | 197 |
198 |
199 |
200 | `); 201 | 202 | const action = within( 203 | toaster 204 | .getAllByTestId(/^toast-label-\d+$/) 205 | .find((t) => t.textContent === "This is an info toast").parentElement, 206 | ).getByRole("button", { name: "Foobarify" }); 207 | 208 | fireEvent.click(action); 209 | 210 | expect(handleCb).toHaveBeenCalledTimes(1); 211 | 212 | expect( 213 | toaster 214 | .queryAllByTestId(/^toast-\d+$/) 215 | .map((toast) => `${toast.className}: ${toast.textContent}`), 216 | ).toEqual([]); 217 | }); 218 | }); 219 | -------------------------------------------------------------------------------- /ui/src/Toaster.tsx: -------------------------------------------------------------------------------- 1 | import React, { 2 | forwardRef, 3 | useCallback, 4 | useEffect, 5 | useImperativeHandle, 6 | useRef, 7 | useState, 8 | } from "react"; 9 | import cx from "classnames"; 10 | import { Dismiss } from "./icons"; 11 | 12 | type ToastType = "error" | "info"; 13 | interface ToastAction { 14 | text: string; 15 | cb: () => void; 16 | } 17 | 18 | interface Toast { 19 | toastType: ToastType; 20 | id: number; 21 | text: string; 22 | uniqueKey?: string; 23 | action?: ToastAction; 24 | } 25 | 26 | export interface ToasterInterface { 27 | /** 28 | * Show an error toast. 29 | * 30 | * If uniqueKey is provided, any toasts with the same uniqueKey will be dismissed. 31 | */ 32 | error: (msg: string, action?: ToastAction, uniqueKey?: string) => void; 33 | 34 | /** 35 | * Show an info toast. 36 | * 37 | * If uniqueKey is provided, any toasts with the same uniqueKey will be dismissed. 38 | */ 39 | info: (msg: string, action?: ToastAction, uniqueKey?: string) => void; 40 | } 41 | 42 | const Toaster = forwardRef(function Toaster(_props, ref) { 43 | const [toasts, setToasts] = useState>([]); 44 | const nextId = useRef(0); 45 | 46 | const mounted = useRef(true); 47 | useEffect(() => { 48 | return () => { 49 | mounted.current = false; 50 | }; 51 | }, []); 52 | 53 | const addToast = useCallback( 54 | ( 55 | text: string, 56 | toastType: ToastType, 57 | action?: ToastAction, 58 | uniqueKey?: string, 59 | ) => { 60 | const id = nextId.current; 61 | setToasts((toasts) => [ 62 | ...toasts.filter( 63 | (toast) => !uniqueKey || toast.uniqueKey !== uniqueKey, 64 | ), 65 | { text, toastType, id, action, uniqueKey }, 66 | ]); 67 | nextId.current += 1; 68 | 69 | setTimeout(() => { 70 | setToasts((toasts) => toasts.filter((toast) => toast.id !== id)); 71 | }, 4000); 72 | }, 73 | [], 74 | ); 75 | 76 | useImperativeHandle( 77 | ref, 78 | () => ({ 79 | error: (msg: string, action?: ToastAction, uniqueKey?: string) => { 80 | addToast(msg, "error", action, uniqueKey); 81 | }, 82 | info: (msg: string, action?: ToastAction, uniqueKey?: string) => { 83 | addToast(msg, "info", action, uniqueKey); 84 | }, 85 | }), 86 | [addToast], 87 | ); 88 | 89 | return ( 90 |
91 | {toasts.map((toast) => ( 92 |
103 |
104 | {toast.text} 105 |
106 | {toast.action && ( 107 | 121 | )} 122 | 123 | 135 |
136 | ))} 137 |
138 | ); 139 | }); 140 | 141 | export default Toaster; 142 | -------------------------------------------------------------------------------- /ui/src/UiMain.tsx: -------------------------------------------------------------------------------- 1 | import { RenderMode, UiState } from "oxygen-core"; 2 | import React, { useState, useCallback, useRef, useReducer } from "react"; 3 | import cx from "classnames"; 4 | 5 | import Toaster, { ToasterInterface } from "./Toaster"; 6 | import ClipList from "./ClipList"; 7 | import RecordTab from "./RecordTab"; 8 | import CurrentClip from "./CurrentClip"; 9 | 10 | function nativeLog(level: string, log: string) { 11 | if (level === "error") { 12 | console.error(log); 13 | } else if (level === "trace") { 14 | console.trace(log); 15 | } else if (level === "warn") { 16 | console.warn(log); 17 | } else if (level === "info") { 18 | console.info(log); 19 | } else if (level === "debug") { 20 | console.debug(log); 21 | } else { 22 | console.log(log, `(Note: unknown log level ${level}`); 23 | } 24 | } 25 | 26 | export default function Main({ inMemory }: { inMemory: boolean }) { 27 | // Hack to force a re-render when the state changes. 28 | const [, forceUpdate] = useReducer(() => ({}), []); 29 | const [uiState] = useState( 30 | () => new UiState(forceUpdate, nativeLog, inMemory), 31 | ); 32 | const toaster = useRef(null); 33 | 34 | const drawCurrentClip = useCallback( 35 | (width: number, height: number) => uiState.drawCurrentClip(width, height), 36 | [uiState], 37 | ); 38 | 39 | const transcribe = useCallback(() => uiState.transcribe(), [uiState]); 40 | 41 | const handlePlay = useCallback( 42 | (cb: () => void) => { 43 | uiState.play(cb); 44 | }, 45 | [uiState], 46 | ); 47 | 48 | const handleStop = useCallback(() => uiState.stop(), [uiState]); 49 | 50 | const handleSeek = useCallback( 51 | (time: number) => { 52 | uiState.seek(time); 53 | }, 54 | [uiState], 55 | ); 56 | 57 | const handleRename = useCallback( 58 | (name: string) => { 59 | try { 60 | const prevName = uiState.currentClip.name; 61 | if (name !== prevName) { 62 | uiState.renameCurrentClip(name); 63 | toaster.current.info(`Renamed "${prevName}" to "${name}"`); 64 | } 65 | } catch (err) { 66 | if (err instanceof Error) { 67 | // TODO: stable interface for error messages and/or tests 68 | if (err.message == "UNIQUE constraint failed: clips.name") { 69 | toaster.current.error("This name is taken by another clip."); 70 | } else { 71 | toaster.current.error( 72 | "Something went wrong when renaming this clip.", 73 | ); 74 | } 75 | } 76 | } 77 | }, 78 | [uiState], 79 | ); 80 | 81 | const handleDelete = useCallback(() => { 82 | const prevName = uiState.currentClip.name; 83 | uiState.deleteCurrentClip(); 84 | toaster.current.info( 85 | `Deleted "${prevName}"`, 86 | { 87 | text: "Undo", 88 | cb: () => { 89 | uiState.undeleteCurrentClip(); 90 | }, 91 | }, 92 | "undoDeleteCurrentClip", 93 | ); 94 | }, [uiState]); 95 | 96 | const handleSetRenderMode = useCallback( 97 | (renderMode: RenderMode) => { 98 | uiState.setRenderMode(renderMode); 99 | }, 100 | [uiState], 101 | ); 102 | 103 | const handleRecord = useCallback(() => { 104 | uiState.record(); 105 | }, [uiState]); 106 | 107 | const handleSetTabRecord = useCallback(() => { 108 | uiState.setCurrentTabRecord(); 109 | }, [uiState]); 110 | 111 | const handleSetCurrentClipId = useCallback( 112 | (clipId: number) => { 113 | uiState.setCurrentClipId(clipId); 114 | }, 115 | [uiState], 116 | ); 117 | 118 | const handleExport = useCallback( 119 | (clipId: number) => { 120 | return uiState.export(clipId); 121 | }, 122 | [uiState], 123 | ); 124 | 125 | const [dragOver, setDragOver] = useState(false); 126 | 127 | return ( 128 |
{ 131 | ev.preventDefault(); 132 | if ( 133 | [...ev.dataTransfer.items].filter( 134 | (item) => item.type === "audio/wav" || item.type === "audio/mpeg", 135 | ).length > 0 136 | ) { 137 | setDragOver(true); 138 | } else { 139 | setDragOver("invalid"); 140 | } 141 | }} 142 | onDragLeave={(ev) => { 143 | ev.preventDefault(); 144 | setDragOver(false); 145 | }} 146 | onDrop={(ev) => { 147 | for (const item of ev.dataTransfer.items) { 148 | if (item.type === "audio/wav" || item.type === "audio/mpeg") { 149 | try { 150 | uiState.import(item.getAsFile().path); 151 | toaster.current.info(`Imported ${item.getAsFile().name}.`); 152 | } catch (err) { 153 | toaster.current.error( 154 | `Could not import ${item.getAsFile().name}: ${err.toString()}`, 155 | ); 156 | } 157 | } else { 158 | toaster.current.error( 159 | `Count not import ${ 160 | item.getAsFile().name 161 | } because the file type is unsupported.`, 162 | ); 163 | } 164 | } 165 | setDragOver(false); 166 | }} 167 | > 168 | 176 | {uiState.currentClipId != null && ( 177 | 193 | )} 194 | {uiState.recordTabSelected && ( 195 | 205 | )} 206 | {dragOver && ( 207 |
208 |
215 | {dragOver === true &&
Drop audio clips here.
} 216 | {dragOver === "invalid" && ( 217 |
This file type is not supported.
218 | )} 219 |
220 |
221 | )} 222 | 223 |
224 | ); 225 | } 226 | -------------------------------------------------------------------------------- /ui/src/drag_and_drop.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jocelyn-stericker/oxygen/f2e01fa156ea4e0ce6ed3d5bb60a34ce36a9fe2f/ui/src/drag_and_drop.png -------------------------------------------------------------------------------- /ui/src/icons.js: -------------------------------------------------------------------------------- 1 | import React from "react"; 2 | 3 | // From https://heroicons.com/ 4 | 5 | export const Record = () => ( 6 | 13 | 19 | 20 | ); 21 | 22 | export const Play = () => ( 23 | 30 | 36 | 42 | 43 | ); 44 | 45 | export const Pause = () => ( 46 | 53 | 59 | 60 | ); 61 | 62 | export const Stop = () => ( 63 | 70 | 76 | 82 | 83 | ); 84 | 85 | export const Delete = () => ( 86 | 94 | 99 | 100 | ); 101 | 102 | export const Dismiss = () => ( 103 | 111 | 116 | 117 | ); 118 | 119 | export const Spectrogram = () => ( 120 | 128 | 133 | 138 | 139 | ); 140 | -------------------------------------------------------------------------------- /ui/src/index.css: -------------------------------------------------------------------------------- 1 | @tailwind base; 2 | @tailwind components; 3 | @tailwind utilities; 4 | -------------------------------------------------------------------------------- /ui/src/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 9 | Oxygen 10 | 11 | 12 | 13 | 14 |
15 | 16 | 17 | -------------------------------------------------------------------------------- /ui/src/main.js: -------------------------------------------------------------------------------- 1 | /** 2 | * This is the minimal code that runs in Electron's main thread. 3 | * 4 | * It opens a browser window. In addition to the UI code, the native Rust code 5 | * runs in the renderer thread, not the main thread. 6 | */ 7 | 8 | /* eslint-env node */ 9 | /* eslint-disable @typescript-eslint/no-var-requires */ 10 | 11 | const { app, BrowserWindow, ipcMain } = require("electron"); 12 | const path = require("path"); 13 | 14 | const createWindow = () => { 15 | const win = new BrowserWindow({ 16 | width: 800, 17 | height: 600, 18 | webPreferences: { 19 | contextIsolation: false, 20 | nodeIntegration: true, 21 | preload: path.join(__dirname, "preload.js"), 22 | }, 23 | }); 24 | 25 | if (app.isPackaged) { 26 | win.loadFile("dist/index.html"); 27 | } else { 28 | win.loadURL("http://localhost:1234"); 29 | } 30 | }; 31 | 32 | ipcMain.on("ondragstart", (event, filePath) => { 33 | event.sender.startDrag({ 34 | file: filePath, 35 | icon: path.join(__dirname, "drag_and_drop.png"), 36 | }); 37 | }); 38 | 39 | app.on("web-contents-created", (_event, contents) => { 40 | contents.on("will-navigate", (event) => { 41 | // https://www.electronjs.org/docs/latest/tutorial/security#13-disable-or-limit-navigation 42 | event.preventDefault(); 43 | }); 44 | 45 | // https://www.electronjs.org/docs/latest/tutorial/security#14-disable-or-limit-creation-of-new-windows 46 | contents.setWindowOpenHandler(() => ({ action: "deny" })); 47 | }); 48 | 49 | app.on("window-all-closed", () => { 50 | if (process.platform !== "darwin") app.quit(); 51 | }); 52 | 53 | app.whenReady().then(() => { 54 | createWindow(); 55 | 56 | app.on("activate", () => { 57 | if (BrowserWindow.getAllWindows().length === 0) createWindow(); 58 | }); 59 | }); 60 | -------------------------------------------------------------------------------- /ui/src/preload.d.ts: -------------------------------------------------------------------------------- 1 | interface Window { 2 | oxygen: typeof import("oxygen-core"); 3 | startDragOut(tmpPath: string): void; 4 | } 5 | -------------------------------------------------------------------------------- /ui/src/preload.js: -------------------------------------------------------------------------------- 1 | /* eslint-env node, browser */ 2 | /* eslint-disable @typescript-eslint/no-var-requires */ 3 | 4 | const { ipcRenderer } = require("electron"); 5 | 6 | window.oxygen = require("oxygen-core"); 7 | window.startDragOut = (tmpPath) => { 8 | ipcRenderer.send("ondragstart", tmpPath); 9 | }; 10 | -------------------------------------------------------------------------------- /ui/src/renderer.tsx: -------------------------------------------------------------------------------- 1 | /** 2 | * This is the entrypoint for Electron's renderer thread. 3 | */ 4 | 5 | import { createRoot } from "react-dom/client"; 6 | import React from "react"; 7 | import UiMain from "./UiMain"; 8 | 9 | const root = createRoot(document.getElementById("root")); 10 | root.render(); 11 | -------------------------------------------------------------------------------- /ui/src/test_setup.tsx: -------------------------------------------------------------------------------- 1 | const toLocaleString = Date.prototype.toLocaleString; 2 | const toLocaleDateString = Date.prototype.toLocaleDateString; 3 | const toLocaleTimeString = Date.prototype.toLocaleTimeString; 4 | Date.prototype.toLocaleString = function (locale?, options?) { 5 | return toLocaleString.call(this, locale || "en-US", { 6 | timeZone: "UTC", 7 | ...(options ?? {}), 8 | }); 9 | }; 10 | Date.prototype.toLocaleTimeString = function (locale?, options?) { 11 | return toLocaleTimeString.call(this, locale || "en-US", { 12 | timeZone: "UTC", 13 | ...(options ?? {}), 14 | }); 15 | }; 16 | Date.prototype.toLocaleDateString = function (locale?, options?) { 17 | return toLocaleDateString.call(this, locale || "en-US", { 18 | timeZone: "UTC", 19 | ...(options ?? {}), 20 | }); 21 | }; 22 | -------------------------------------------------------------------------------- /ui/tailwind.config.js: -------------------------------------------------------------------------------- 1 | /* eslint-env node */ 2 | 3 | module.exports = { 4 | content: ["./src/**/*.{html,js,tsx}"], 5 | theme: { 6 | extend: {}, 7 | }, 8 | plugins: [], 9 | }; 10 | -------------------------------------------------------------------------------- /ui/test/__snapshots__/record_and_playback.test.tsx.snap: -------------------------------------------------------------------------------- 1 | // Jest Snapshot v1, https://goo.gl/fbAQLP 2 | 3 | exports[`app [integration] can record and playback a clip: 1 initial 1`] = ` 4 | 5 |
8 |
11 |
14 |
17 | 24 |
25 |
    28 | 54 |
    58 | Your clips will appear here. 59 |
    60 |
61 |
62 |
65 |
68 | 75 |
79 | 86 | 110 |
111 |
114 |
117 |
120 | 143 |
146 |
147 |
148 |
151 |
152 |
153 | 154 | `; 155 | 156 | exports[`app [integration] can record and playback a clip: 1 recording 1`] = ` 157 | "Snapshot Diff: 158 | - First value 159 | + Second value 160 | 161 | @@ -70,10 +70,11 @@ 162 | width="736" 163 | /> 164 |
169 | 175 |
178 | 210 |
213 |
" 214 | `; 215 | 216 | exports[`app [integration] can record and playback a clip: 2 done recording 1`] = ` 217 | "Snapshot Diff: 218 | - First value 219 | + Second value 220 | 221 | @@ -21,12 +21,12 @@ 222 |
223 |
    226 | 242 | + 268 |
269 |
270 |
273 |
276 | + 281 | + 301 | +
302 | +
305 | 314 |
320 | 326 |
329 |
333 | + 000.00 334 | +
335 | +
338 | 372 |
375 | +
378 |
379 |
380 |
391 | 392 | Record New Clip 393 | 394 | 395 | + 421 | 422 |
423 |
" 426 | `; 427 | -------------------------------------------------------------------------------- /ui/test/record_and_playback.test.tsx: -------------------------------------------------------------------------------- 1 | import React from "react"; 2 | import { act, fireEvent, render, within } from "@testing-library/react"; 3 | import diff from "snapshot-diff"; 4 | import postcss from "postcss"; 5 | import tailwind from "tailwindcss"; 6 | 7 | import UiMain from "../src/UiMain"; 8 | 9 | beforeAll(async () => { 10 | jest 11 | .spyOn(Date.prototype, "toLocaleDateString") 12 | .mockReturnValue("Mocked Date"); 13 | jest 14 | .spyOn(Date.prototype, "toLocaleTimeString") 15 | .mockReturnValue("Mocked Date"); 16 | 17 | const styleSheet = document.createElement("style"); 18 | styleSheet.innerText = ( 19 | await postcss([tailwind("tailwind.config.js")]).process( 20 | " @tailwind base; @tailwind components; @tailwind utilities;", 21 | { 22 | from: "../src/index.css", 23 | }, 24 | ) 25 | ).css; 26 | document.head.appendChild(styleSheet); 27 | }); 28 | 29 | describe("app [integration]", () => { 30 | it("can record and playback a clip", async () => { 31 | const app = render( 32 |
33 | 34 |
, 35 | ); 36 | const startRecording = app.getByRole("button", { 37 | name: "Start Recording", 38 | }); 39 | fireEvent.click(startRecording); 40 | 41 | let nextFragment = app.asFragment(); 42 | const initialFragment = nextFragment; 43 | expect(nextFragment).toMatchSnapshot("1 initial"); 44 | let prevFragment = nextFragment; 45 | 46 | const stopRecording = await app.findByRole("button", { 47 | name: "Complete Recording", 48 | }); 49 | nextFragment = app.asFragment(); 50 | expect(diff(prevFragment, nextFragment)).toMatchSnapshot("1 recording"); 51 | prevFragment = nextFragment; 52 | 53 | await act(async () => { 54 | await new Promise((res) => setTimeout(res, 200)); 55 | fireEvent.click(stopRecording); 56 | await new Promise((res) => setTimeout(res, 200)); 57 | }); 58 | 59 | const play = await app.findByRole("button", { name: "Play" }); 60 | // Rename the clip since it has a date in it. 61 | const clipName = app.getByTestId("current-clip-name") as HTMLInputElement; 62 | expect(clipName.value).toMatch(/20\d\d-\d\d-\d\d .*/); 63 | fireEvent.focus(clipName); 64 | fireEvent.change(clipName, { target: { value: "New clip name" } }); 65 | fireEvent.blur(clipName); 66 | 67 | const dismissRename = within( 68 | app.getByText(/Renamed.*to.*/).parentElement, 69 | ).getByRole("button", { name: "Dismiss" }); 70 | fireEvent.click(dismissRename); 71 | 72 | await app.findByDisplayValue("New clip name"); 73 | 74 | nextFragment = app.asFragment(); 75 | expect(diff(prevFragment, nextFragment)).toMatchSnapshot( 76 | "2 done recording", 77 | ); 78 | prevFragment = nextFragment; 79 | 80 | fireEvent.click(play); 81 | 82 | const pause = await app.findByRole("button", { name: "Pause" }); 83 | await app.findByRole("button", { name: "Pause" }); 84 | 85 | fireEvent.click(pause); 86 | 87 | fireEvent.click( 88 | app.getByRole("tab", { name: "Record New Clip", selected: false }), 89 | ); 90 | await app.findByRole("tab", { name: "Record New Clip", selected: true }); 91 | 92 | nextFragment = app.asFragment(); 93 | expect(diff(initialFragment, nextFragment)).toMatchSnapshot( 94 | "3 delta from initial", 95 | ); 96 | }); 97 | }); 98 | -------------------------------------------------------------------------------- /ui/test/ui_state.test.tsx: -------------------------------------------------------------------------------- 1 | import { UiState } from "oxygen-core"; 2 | 3 | describe("UiState", () => { 4 | it("can be created twice without crashing", () => { 5 | new UiState( 6 | () => {}, 7 | () => {}, 8 | true, 9 | ); 10 | new UiState( 11 | () => {}, 12 | () => {}, 13 | true, 14 | ); 15 | }); 16 | }); 17 | -------------------------------------------------------------------------------- /ui/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "exclude": ["dist", "out"], 3 | "compilerOptions": { 4 | /* Visit https://aka.ms/tsconfig.json to read more about this file */ 5 | 6 | /* Projects */ 7 | // "incremental": true, /* Enable incremental compilation */ 8 | // "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */ 9 | // "tsBuildInfoFile": "./", /* Specify the folder for .tsbuildinfo incremental compilation files. */ 10 | // "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects */ 11 | // "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */ 12 | // "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */ 13 | 14 | /* Language and Environment */ 15 | "target": "es2020" /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */, 16 | // "lib": [], /* Specify a set of bundled library declaration files that describe the target runtime environment. */ 17 | "jsx": "preserve" /* Specify what JSX code is generated. */, 18 | // "experimentalDecorators": true, /* Enable experimental support for TC39 stage 2 draft decorators. */ 19 | // "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */ 20 | // "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h' */ 21 | // "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */ 22 | // "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using `jsx: react-jsx*`.` */ 23 | // "reactNamespace": "", /* Specify the object invoked for `createElement`. This only applies when targeting `react` JSX emit. */ 24 | // "noLib": true, /* Disable including any library files, including the default lib.d.ts. */ 25 | // "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */ 26 | 27 | /* Modules */ 28 | "module": "commonjs" /* Specify what module code is generated. */, 29 | // "rootDir": "./", /* Specify the root folder within your source files. */ 30 | // "moduleResolution": "node", /* Specify how TypeScript looks up a file from a given module specifier. */ 31 | // "baseUrl": "./", /* Specify the base directory to resolve non-relative module names. */ 32 | // "paths": {}, /* Specify a set of entries that re-map imports to additional lookup locations. */ 33 | // "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */ 34 | // "typeRoots": [], /* Specify multiple folders that act like `./node_modules/@types`. */ 35 | // "types": [], /* Specify type package names to be included without being referenced in a source file. */ 36 | // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */ 37 | // "resolveJsonModule": true, /* Enable importing .json files */ 38 | // "noResolve": true, /* Disallow `import`s, `require`s or ``s from expanding the number of files TypeScript should add to a project. */ 39 | 40 | /* JavaScript Support */ 41 | "allowJs": true /* Allow JavaScript files to be a part of your program. Use the `checkJS` option to get errors from these files. */, 42 | "checkJs": true /* Enable error reporting in type-checked JavaScript files. */, 43 | // "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from `node_modules`. Only applicable with `allowJs`. */ 44 | 45 | /* Emit */ 46 | // "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */ 47 | // "declarationMap": true, /* Create sourcemaps for d.ts files. */ 48 | // "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */ 49 | // "sourceMap": true, /* Create source map files for emitted JavaScript files. */ 50 | // "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If `declaration` is true, also designates a file that bundles all .d.ts output. */ 51 | // "outDir": "./", /* Specify an output folder for all emitted files. */ 52 | // "removeComments": true, /* Disable emitting comments. */ 53 | // "noEmit": true, /* Disable emitting files from a compilation. */ 54 | // "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */ 55 | // "importsNotUsedAsValues": "remove", /* Specify emit/checking behavior for imports that are only used for types */ 56 | // "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */ 57 | // "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */ 58 | // "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */ 59 | // "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */ 60 | // "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */ 61 | // "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */ 62 | // "newLine": "crlf", /* Set the newline character for emitting files. */ 63 | // "stripInternal": true, /* Disable emitting declarations that have `@internal` in their JSDoc comments. */ 64 | // "noEmitHelpers": true, /* Disable generating custom helper functions like `__extends` in compiled output. */ 65 | // "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */ 66 | // "preserveConstEnums": true, /* Disable erasing `const enum` declarations in generated code. */ 67 | // "declarationDir": "./", /* Specify the output directory for generated declaration files. */ 68 | // "preserveValueImports": true, /* Preserve unused imported values in the JavaScript output that would otherwise be removed. */ 69 | 70 | /* Interop Constraints */ 71 | // "isolatedModules": true, /* Ensure that each file can be safely transpiled without relying on other imports. */ 72 | // "allowSyntheticDefaultImports": true, /* Allow 'import x from y' when a module doesn't have a default export. */ 73 | "esModuleInterop": true /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables `allowSyntheticDefaultImports` for type compatibility. */, 74 | // "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */ 75 | "forceConsistentCasingInFileNames": true /* Ensure that casing is correct in imports. */, 76 | 77 | /* Type Checking */ 78 | "strict": false /* Enable all strict type-checking options. */, 79 | // "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied `any` type.. */ 80 | // "strictNullChecks": true, /* When type checking, take into account `null` and `undefined`. */ 81 | // "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */ 82 | // "strictBindCallApply": true, /* Check that the arguments for `bind`, `call`, and `apply` methods match the original function. */ 83 | // "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */ 84 | // "noImplicitThis": true, /* Enable error reporting when `this` is given the type `any`. */ 85 | // "useUnknownInCatchVariables": true, /* Type catch clause variables as 'unknown' instead of 'any'. */ 86 | // "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */ 87 | // "noUnusedLocals": true, /* Enable error reporting when a local variables aren't read. */ 88 | // "noUnusedParameters": true, /* Raise an error when a function parameter isn't read */ 89 | // "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */ 90 | // "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */ 91 | // "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */ 92 | // "noUncheckedIndexedAccess": true, /* Include 'undefined' in index signature results */ 93 | // "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */ 94 | // "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type */ 95 | // "allowUnusedLabels": true, /* Disable error reporting for unused labels. */ 96 | // "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */ 97 | 98 | /* Completeness */ 99 | // "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */ 100 | "skipLibCheck": true /* Skip type checking all .d.ts files. */ 101 | } 102 | } 103 | --------------------------------------------------------------------------------