├── .gitignore ├── examples ├── react-hook-example │ ├── public │ │ └── mp3worker.js │ ├── vite.config.js │ ├── src │ │ ├── main.jsx │ │ └── App.jsx │ ├── index.html │ └── package.json ├── react-hook-countdown-example │ ├── public │ │ └── mp3worker.js │ ├── vite.config.js │ ├── src │ │ ├── main.jsx │ │ └── App.jsx │ ├── index.html │ └── package.json ├── minimal-example │ └── index.html ├── minimal-example-promises │ └── index.html ├── misc │ └── test-encoding-queue-size.html └── main-example │ └── index.html ├── .github └── GitHubAudioRecorderHeader.png ├── src ├── utils.js ├── Timer.js ├── mp3worker │ ├── mp3worker.js │ └── WorkerEncoder.js ├── react.js └── AudioRecorder.js ├── package.json ├── rollup.config.js ├── dist ├── audiorecorder.min.js ├── index.mjs ├── index.cjs ├── audiorecorder.js ├── audiorecorder.min.js.map ├── react.mjs └── react.cjs └── README.md /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | dist/*LICENSE.txt 3 | -------------------------------------------------------------------------------- /examples/react-hook-example/public/mp3worker.js: -------------------------------------------------------------------------------- 1 | ../../../dist/mp3worker.js -------------------------------------------------------------------------------- /examples/react-hook-countdown-example/public/mp3worker.js: -------------------------------------------------------------------------------- 1 | ../../../dist/mp3worker.js -------------------------------------------------------------------------------- /.github/GitHubAudioRecorderHeader.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vocaroo/simple-audio-recorder/HEAD/.github/GitHubAudioRecorderHeader.png -------------------------------------------------------------------------------- /examples/react-hook-example/vite.config.js: -------------------------------------------------------------------------------- 1 | import { defineConfig } from 'vite' 2 | import react from '@vitejs/plugin-react' 3 | 4 | // https://vitejs.dev/config/ 5 | export default defineConfig({ 6 | plugins: [react()], 7 | }) 8 | -------------------------------------------------------------------------------- /examples/react-hook-countdown-example/vite.config.js: -------------------------------------------------------------------------------- 1 | import { defineConfig } from 'vite' 2 | import react from '@vitejs/plugin-react' 3 | 4 | // https://vitejs.dev/config/ 5 | export default defineConfig({ 6 | plugins: [react()], 7 | }) 8 | -------------------------------------------------------------------------------- /examples/react-hook-example/src/main.jsx: -------------------------------------------------------------------------------- 1 | import React from 'react' 2 | import ReactDOM from 'react-dom/client' 3 | import App from './App.jsx' 4 | 5 | ReactDOM.createRoot(document.getElementById('root')).render( 6 | 7 | 8 | , 9 | ) 10 | -------------------------------------------------------------------------------- /examples/react-hook-countdown-example/src/main.jsx: -------------------------------------------------------------------------------- 1 | import React from 'react' 2 | import ReactDOM from 'react-dom/client' 3 | import App from './App.jsx' 4 | 5 | ReactDOM.createRoot(document.getElementById('root')).render( 6 | 7 | 8 | , 9 | ) 10 | -------------------------------------------------------------------------------- /examples/react-hook-example/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | React Simple Audio Recorder Example 7 | 8 | 9 |
10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /examples/react-hook-countdown-example/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | React Simple Audio Recorder Example 7 | 8 | 9 |
10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /examples/react-hook-example/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "react-hook-example", 3 | "private": true, 4 | "version": "0.0.0", 5 | "type": "module", 6 | "scripts": { 7 | "dev": "vite", 8 | "build": "vite build", 9 | "preview": "vite preview" 10 | }, 11 | "dependencies": { 12 | "react": "^18.2.0", 13 | "react-dom": "^18.2.0" 14 | }, 15 | "devDependencies": { 16 | "@types/react": "^18.2.15", 17 | "@types/react-dom": "^18.2.7", 18 | "@vitejs/plugin-react": "^4.0.3", 19 | "vite": "^4.4.5" 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /examples/react-hook-countdown-example/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "react-hook-example", 3 | "private": true, 4 | "version": "0.0.0", 5 | "type": "module", 6 | "scripts": { 7 | "dev": "vite", 8 | "build": "vite build", 9 | "preview": "vite preview" 10 | }, 11 | "dependencies": { 12 | "react": "^18.2.0", 13 | "react-dom": "^18.2.0" 14 | }, 15 | "devDependencies": { 16 | "@types/react": "^18.2.15", 17 | "@types/react-dom": "^18.2.7", 18 | "@vitejs/plugin-react": "^4.0.3", 19 | "vite": "^4.4.5" 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /src/utils.js: -------------------------------------------------------------------------------- 1 | 2 | export function stopStream(stream) { 3 | if (stream.getTracks) { 4 | stream.getTracks().forEach(track => track.stop()); 5 | } else { 6 | stream.stop(); // Deprecated 7 | } 8 | } 9 | 10 | // https://stackoverflow.com/a/9039885 11 | export function detectIOS() { 12 | return [ 13 | 'iPad Simulator', 14 | 'iPhone Simulator', 15 | 'iPod Simulator', 16 | 'iPad', 17 | 'iPhone', 18 | 'iPod' 19 | ].includes(navigator.platform) 20 | // iPad on iOS 13 detection 21 | || (navigator.userAgent.includes("Mac") && "ontouchend" in document); 22 | } 23 | 24 | export function detectSafari() { 25 | return /^((?!chrome|android).)*safari/i.test(navigator.userAgent); 26 | } 27 | -------------------------------------------------------------------------------- /src/Timer.js: -------------------------------------------------------------------------------- 1 | 2 | export default class Timer { 3 | constructor() { 4 | this.reset(); 5 | } 6 | 7 | reset() { 8 | this.startTime = null; // May be modified when resuming, so not the true start time. 9 | this.stoppedTime = null; 10 | } 11 | 12 | start() { 13 | if (!this.startTime) { 14 | this.startTime = Date.now(); 15 | } 16 | 17 | if (this.stoppedTime) { 18 | // Skip time forward by the time length we were stopped 19 | this.startTime += Date.now() - this.stoppedTime; 20 | this.stoppedTime = null; 21 | } 22 | } 23 | 24 | resetAndStart() { 25 | this.reset(); 26 | this.start(); 27 | } 28 | 29 | stop() { 30 | if (!this.stoppedTime) { 31 | this.stoppedTime = Date.now(); 32 | } 33 | } 34 | 35 | getTime() { 36 | if (this.startTime) { 37 | if (this.stoppedTime) { 38 | return this.stoppedTime - this.startTime; 39 | } else { 40 | return Date.now() - this.startTime; 41 | } 42 | } else { 43 | return 0; 44 | } 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /examples/minimal-example/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 28 | 29 | 30 | 31 | 32 |
33 | 34 | 35 | -------------------------------------------------------------------------------- /examples/minimal-example-promises/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 33 | 34 | 35 | 36 | 37 |
38 | 39 | 40 | -------------------------------------------------------------------------------- /examples/react-hook-example/src/App.jsx: -------------------------------------------------------------------------------- 1 | import {SimpleAudioRecorder, useSimpleAudioRecorder} from "../../../src/react.js"; 2 | 3 | export default function App() { 4 | const recorder = useSimpleAudioRecorder({ 5 | workerUrl : "mp3worker.js", 6 | onDataAvailable : data => console.log("DATA AVAILABLE", data.length), 7 | onComplete : mp3Blob => console.log("RECORDING COMPLETE!", mp3Blob), 8 | onError : error => console.log("RECORDING ERROR!", error) 9 | }); 10 | 11 | const viewInitial = ( 12 | 13 | ); 14 | 15 | const viewRecording = ( 16 | <> 17 | 20 | 23 | 24 | ); 25 | 26 | const viewPaused = ( 27 | <> 28 | 31 | 32 | 33 | ); 34 | 35 | const viewError = ( 36 | <> 37 | {viewInitial} 38 |
Error occurred! {recorder.errorStr}
39 | 40 | ); 41 | 42 | return ( 43 |
44 | 50 | 51 |
52 | 53 | {recorder.mp3Urls.toReversed().map(url => 54 |
55 |
57 | )} 58 |
59 | ); 60 | } 61 | -------------------------------------------------------------------------------- /examples/misc/test-encoding-queue-size.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 13 | 14 | 15 | 16 | 17 | 48 | 49 | 50 | 51 | 52 |
53 | 54 | 55 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "simple-audio-recorder", 3 | "version": "1.3.0", 4 | "description": "Web audio recording library with encoding to mp3 and chunked output", 5 | "keywords": [ 6 | "audio", 7 | "sound", 8 | "recorder", 9 | "mp3", 10 | "encoder", 11 | "microphone", 12 | "react", 13 | "hook" 14 | ], 15 | "homepage": "https://github.com/vocaroo/simple-audio-recorder", 16 | "repository": { 17 | "type": "git", 18 | "url": "git+https://github.com/vocaroo/simple-audio-recorder.git" 19 | }, 20 | "type": "module", 21 | "main": "dist/index.cjs", 22 | "module": "dist/index.mjs", 23 | "exports": { 24 | ".": { 25 | "import": "./dist/index.mjs", 26 | "require": "./dist/index.cjs", 27 | "default": "./dist/index.mjs" 28 | }, 29 | "./react": { 30 | "import": "./dist/react.mjs", 31 | "require": "./dist/react.cjs" 32 | }, 33 | "./umd": "./dist/audiorecorder.min.js" 34 | }, 35 | "sideEffects": false, 36 | "files": [ 37 | "dist", 38 | "README.md" 39 | ], 40 | "scripts": { 41 | "build": "NODE_ENV=production rollup -c", 42 | "dev": "concurrently \"rollup -c -w\" \"live-server --open=/examples/main-example/\"" 43 | }, 44 | "license": "MIT", 45 | "devDependencies": { 46 | "@babel/core": "^7", 47 | "@babel/preset-env": "^7", 48 | "@rollup/plugin-babel": "^6", 49 | "@rollup/plugin-commonjs": "^25", 50 | "@rollup/plugin-node-resolve": "^15", 51 | "@rollup/plugin-replace": "^5", 52 | "@rollup/plugin-terser": "^0.4.4", 53 | "concurrently": "^7.6.0", 54 | "lamejstmp": "1.0.1", 55 | "live-server": "1.2.1", 56 | "rollup": "^4" 57 | }, 58 | "peerDependencies": { 59 | "react": ">=16.8.0" 60 | }, 61 | "peerDependenciesMeta": { 62 | "react": { 63 | "optional": true 64 | } 65 | } 66 | } 67 | -------------------------------------------------------------------------------- /examples/react-hook-countdown-example/src/App.jsx: -------------------------------------------------------------------------------- 1 | import {SimpleAudioRecorder, useSimpleAudioRecorder} from "../../../src/react.js"; 2 | 3 | export default function App() { 4 | const recorder = useSimpleAudioRecorder({ 5 | workerUrl : "mp3worker.js", 6 | onDataAvailable : data => console.log("DATA AVAILABLE", data.length), 7 | onComplete : mp3Blob => console.log("RECORDING COMPLETE!", mp3Blob), 8 | onError : error => console.log("RECORDING ERROR!", error), 9 | countdown : 3000 10 | }); 11 | 12 | const viewInitial = ( 13 | 14 | ); 15 | 16 | const viewCountdown = ( 17 | 20 | ); 21 | 22 | const viewRecording = ( 23 | <> 24 | 27 | 30 | 31 | ); 32 | 33 | const viewPaused = ( 34 | <> 35 | 38 | 39 | 40 | ); 41 | 42 | const viewError = ( 43 | <> 44 | {viewInitial} 45 |
Error occurred! {recorder.errorStr}
46 | 47 | ); 48 | 49 | return ( 50 |
51 | 58 | 59 |
60 | 61 | {recorder.mp3Urls.toReversed().map(url => 62 |
63 |
65 | )} 66 |
67 | ); 68 | } 69 | -------------------------------------------------------------------------------- /rollup.config.js: -------------------------------------------------------------------------------- 1 | import resolve from "@rollup/plugin-node-resolve"; 2 | import commonjs from "@rollup/plugin-commonjs"; 3 | import replace from "@rollup/plugin-replace"; 4 | import {babel} from "@rollup/plugin-babel"; 5 | import terser from "@rollup/plugin-terser"; 6 | 7 | const isProd = process.env.NODE_ENV === "production"; 8 | const extensions = [".js", ".mjs"]; 9 | 10 | const minify = terser({ 11 | format : {comments : false}, 12 | compress : { 13 | passes : 2, 14 | pure_getters : true, 15 | unsafe_math : false, 16 | }, 17 | mangle : {toplevel : true}, 18 | }); 19 | 20 | const basePlugins = [ 21 | resolve({extensions}), 22 | commonjs(), 23 | // keep builds SSR-safe and deterministic 24 | replace({ 25 | preventAssignment : true, 26 | values : { 27 | "process.env.NODE_ENV" : JSON.stringify("production"), 28 | }, 29 | }), 30 | babel({ 31 | babelHelpers : "bundled", 32 | presets : [["@babel/preset-env", {targets : ">0.5%, not dead"}]], 33 | extensions, 34 | exclude : /node_modules/, 35 | }), 36 | ]; 37 | 38 | // Core library (AudioRecorder) 39 | const coreInput = "src/AudioRecorder.js"; 40 | 41 | export default [ 42 | // Core: ESM + CJS for bundlers/SSR 43 | { 44 | input : coreInput, 45 | plugins : basePlugins, 46 | output : [ 47 | {file : "dist/index.mjs", format : "es", sourcemap : true}, 48 | {file : "dist/index.cjs", format : "cjs", exports : "default", sourcemap : true}, 49 | ], 50 | }, 51 | 52 | // Core: UMD for 7 | 8 | 9 | 126 | 127 |
128 | 129 | 130 | 131 | 132 |
133 |
autoGainControl:
134 |
noiseSuppression:
135 |
echoCancellation:
136 |
force using old script processor:
137 |
138 |
139 | 140 | 141 | -------------------------------------------------------------------------------- /src/react.js: -------------------------------------------------------------------------------- 1 | import {useRef, useState, useEffect} from "react"; 2 | import AudioRecorder from "./AudioRecorder.js"; 3 | 4 | export const RecorderStates = { 5 | INITIAL : 0, 6 | STARTING : 1, 7 | RECORDING : 2, 8 | PAUSED : 3, 9 | ENCODING : 4, 10 | COMPLETE : 5, 11 | ERROR : 6, 12 | COUNTDOWN : 7 13 | }; 14 | 15 | function useInterval(updateFunc, timeStep = 1000/60.0) { 16 | const intervalIdRef = useRef(null); 17 | 18 | useEffect(() => { 19 | intervalIdRef.current = setInterval(updateFunc, timeStep); 20 | 21 | return () => { 22 | intervalIdRef.current && clearInterval(intervalIdRef.current); 23 | }; 24 | }, []); 25 | } 26 | 27 | export function useSimpleAudioRecorder({ 28 | workerUrl, 29 | onDataAvailable, 30 | onComplete, 31 | onError, 32 | options, 33 | cleanup = false, 34 | timeUpdateStep = 111, 35 | countdown = 0 36 | } = {}) { 37 | const [recorderState, setRecorderState] = useState(RecorderStates.INITIAL); 38 | const [mp3Blobs, setMp3Blobs] = useState([]); 39 | const [mp3Urls, setMp3Urls] = useState([]); 40 | const [error, setError] = useState(null); 41 | const [time, setTime] = useState(0); 42 | const [countdownStartTime, setCountdownStartTime] = useState(null); 43 | const [countdownTimeLeft, setCountdownTimeLeft] = useState(0); 44 | 45 | const recorderStateRef = useRef(recorderState); 46 | const countdownStartTimeRef = useRef(0); 47 | 48 | const recorderRef = useRef(null); 49 | const audioDataRef = useRef(null); 50 | const countdownTimerRef = useRef(null); 51 | 52 | recorderStateRef.current = recorderState; 53 | countdownStartTimeRef.current = countdownStartTime; 54 | 55 | function clearCountdownTimeout() { 56 | if (countdownTimerRef.current != null) { 57 | clearTimeout(countdownTimerRef.current); 58 | countdownTimerRef.current = null; 59 | } 60 | } 61 | 62 | useEffect(() => { 63 | if (workerUrl) { 64 | AudioRecorder.preload(workerUrl); 65 | } 66 | 67 | return () => { 68 | clearCountdownTimeout(); 69 | 70 | if (recorderRef.current) { 71 | recorderRef.current.ondataavailable = null; 72 | recorderRef.current.onstart = null; 73 | recorderRef.current.onstop = null; 74 | recorderRef.current.onerror = null; 75 | recorderRef.current.stop(); 76 | recorderRef.current = null; 77 | } 78 | 79 | if (cleanup) { 80 | mp3Urls.forEach(URL.revokeObjectURL); 81 | } 82 | }; 83 | }, []); 84 | 85 | useInterval(() => { 86 | recorderRef.current && setTime(recorderRef.current.time); 87 | 88 | if (recorderStateRef.current == RecorderStates.COUNTDOWN) { 89 | setCountdownTimeLeft(Math.max(0, countdown - (Date.now() - countdownStartTimeRef.current))); 90 | } 91 | }, timeUpdateStep); 92 | 93 | function start() { 94 | audioDataRef.current = []; 95 | recorderRef.current = new AudioRecorder({...options, streaming : true}); 96 | 97 | setRecorderState(RecorderStates.STARTING); 98 | setCountdownTimeLeft(countdown); 99 | 100 | recorderRef.current.ondataavailable = (data) => { 101 | audioDataRef.current.push(data); 102 | onDataAvailable && onDataAvailable(data); 103 | }; 104 | 105 | recorderRef.current.onstart = () => { 106 | if (countdown > 0) { 107 | setRecorderState(RecorderStates.COUNTDOWN); 108 | setCountdownStartTime(Date.now()); 109 | 110 | countdownTimerRef.current = setTimeout(() => { 111 | if (recorderStateRef.current == RecorderStates.COUNTDOWN) { 112 | recorderRef.current.resume(); 113 | setRecorderState(RecorderStates.RECORDING); 114 | setCountdownTimeLeft(0); 115 | } 116 | }, countdown); 117 | } else { 118 | setRecorderState(RecorderStates.RECORDING); 119 | } 120 | setError(null); 121 | }; 122 | 123 | recorderRef.current.onstop = () => { 124 | // Combine all the mp3 data chunks from the audioData array into a Blob 125 | const mp3Blob = new Blob(audioDataRef.current, {type : "audio/mpeg"}); 126 | const mp3Url = URL.createObjectURL(mp3Blob); 127 | setRecorderState(RecorderStates.COMPLETE); 128 | setMp3Blobs([...mp3Blobs, mp3Blob]); 129 | setMp3Urls([...mp3Urls, mp3Url]); 130 | onComplete && onComplete({mp3Blob, mp3Url}); 131 | }; 132 | 133 | recorderRef.current.onerror = (error) => { 134 | setRecorderState(RecorderStates.ERROR); 135 | setError(error); 136 | onError && onError(error); 137 | }; 138 | 139 | recorderRef.current.start(countdown > 0); 140 | } 141 | 142 | function stop() { 143 | clearCountdownTimeout(); 144 | 145 | if (recorderRef.current.getEncodingQueueSize() > 1000) { 146 | // If there's a fair amount of data left, we'll enter the ENCODING state. 147 | // (so a spinner or something could be shown) 148 | setRecorderState(RecorderStates.ENCODING); 149 | } 150 | 151 | recorderRef.current.stop(); 152 | } 153 | 154 | function pause() { 155 | if (recorderStateRef.current == RecorderStates.RECORDING) { 156 | recorderRef.current.pause(); 157 | setRecorderState(RecorderStates.PAUSED); 158 | } 159 | } 160 | 161 | function resume() { 162 | if (recorderStateRef.current == RecorderStates.PAUSED) { 163 | recorderRef.current.resume(); 164 | setRecorderState(RecorderStates.RECORDING); 165 | } 166 | } 167 | 168 | const props = {recorderState}; 169 | 170 | return { 171 | error, 172 | errorStr : error ? error.toString() : null, 173 | time, 174 | countdownTimeLeft, 175 | mp3Blobs, 176 | mp3Urls, 177 | mp3Blob : mp3Blobs.at(-1), 178 | mp3Url : mp3Urls.at(-1), 179 | start, 180 | stop, 181 | pause, 182 | resume, 183 | ...props, 184 | getProps : () => props 185 | }; 186 | } 187 | 188 | export function SimpleAudioRecorder({ 189 | recorderState, 190 | viewInitial, viewStarting, viewCountdown, viewRecording, viewPaused, viewEncoding, viewComplete, viewError 191 | }) { 192 | // Only viewInitial and viewRecording are required. 193 | // Others will default to one of viewInitial or viewRecording if not specified, for a simpler UI. 194 | 195 | // if viewStarting is not set, we fallback first to viewCountdown, and then to viewRecording 196 | viewStarting = viewStarting ?? (viewCountdown ?? viewRecording); 197 | viewCountdown = viewCountdown ?? viewRecording; 198 | viewPaused = viewPaused ?? viewInitial; 199 | viewEncoding = viewEncoding ?? viewComplete; 200 | viewComplete = viewComplete ?? viewInitial; 201 | viewError = viewError ?? viewInitial; 202 | 203 | const stateMap = new Map(); 204 | stateMap.set(RecorderStates.INITIAL, viewInitial); 205 | stateMap.set(RecorderStates.STARTING, viewStarting); 206 | stateMap.set(RecorderStates.COUNTDOWN, viewCountdown); 207 | stateMap.set(RecorderStates.RECORDING, viewRecording); 208 | stateMap.set(RecorderStates.PAUSED, viewPaused); 209 | stateMap.set(RecorderStates.ENCODING, viewEncoding); 210 | stateMap.set(RecorderStates.COMPLETE, viewComplete); 211 | stateMap.set(RecorderStates.ERROR, viewError); 212 | 213 | return stateMap.get(recorderState) ?? RecorderStates.INITIAL; 214 | } 215 | 216 | export function preloadWorker(workerUrl) { 217 | AudioRecorder.preload(workerUrl); 218 | } 219 | -------------------------------------------------------------------------------- /dist/audiorecorder.min.js: -------------------------------------------------------------------------------- 1 | !function(t,e){"object"==typeof exports&&"undefined"!=typeof module?module.exports=e():"function"==typeof define&&define.amd?define(e):(t="undefined"!=typeof globalThis?globalThis:t||self).AudioRecorder=e()}(this,function(){"use strict";let t=null,e=0,o=[],s={};function i(t){e=t;for(let t of o)t();o=[]}function r(o){/^https?:\/\//.test(o)&&(o=URL.createObjectURL(new Blob([`importScripts("${o}");`],{type:"text/javascript"}))),t=new Worker(o),e=1,t.onmessage=t=>{switch(t.data.message){case"ready":i(2);break;case"encoded":t.data.jobId in s&&s[t.data.jobId].onencoded(t.data.srcBufLen);break;case"data":t.data.jobId in s&&s[t.data.jobId].ondataavailable(t.data.data);break;case"stopped":t.data.jobId in s&&s[t.data.jobId].onstopped()}},t.onerror=t=>{console.error("mp3worker error. Is the worker URL correct?"),i(3)}}class a{constructor(t){this.jobId=([1e7]+-1e3+-4e3+-8e3+-1e11).replace(/[018]/g,t=>(t^crypto.getRandomValues(new Uint8Array(1))[0]&15>>t/4).toString(16)),this.options=t,this.queuedData=0,s[this.jobId]={onencoded:t=>{this.queuedData-=t},ondataavailable:t=>{this.ondataavailable&&this.ondataavailable(t)},onstopped:()=>{delete s[this.jobId],this.onstopped&&this.onstopped()}}}static preload(t){0!=e&&3!=e||r(t)}static waitForWorker(t){return 2==e?Promise.resolve():(0!=e&&3!=e||r(t),new Promise((t,s)=>{o.push(()=>{if(2==e)t();else{let t=new Error("MP3 worker failed");t.name="WorkerError",s(t)}})}))}start(){t.postMessage({command:"start",jobId:this.jobId,options:this.options})}sendData(e){e&&e.length>0&&e[0].length>0&&(this.queuedData+=e[0].length,t.postMessage({command:"data",jobId:this.jobId,buffers:e}))}getQueuedDataLen(){return this.queuedData}stop(){t.postMessage({command:"stop",jobId:this.jobId})}}class n{constructor(){this.reset()}reset(){this.startTime=null,this.stoppedTime=null}start(){this.startTime||(this.startTime=Date.now()),this.stoppedTime&&(this.startTime+=Date.now()-this.stoppedTime,this.stoppedTime=null)}resetAndStart(){this.reset(),this.start()}stop(){this.stoppedTime||(this.stoppedTime=Date.now())}getTime(){return this.startTime?this.stoppedTime?this.stoppedTime-this.startTime:Date.now()-this.startTime:0}}function d(){return window.AudioContext||window.webkitAudioContext}const h={recordingGain:1,encoderBitRate:96,streaming:!1,streamBufferSize:5e4,forceScriptProcessor:!1,constraints:{channelCount:1,autoGainControl:!0,echoCancellation:!0,noiseSuppression:!0}};let u=null,c=null;return class{constructor(t){this.options={...h,...t},this.state=0,this.audioContext=null,this.encoder=null,this.encodedData=null,this.stopPromiseResolve=null,this.stopPromiseReject=null,this.timer=new n}static isRecordingSupported(){return d()&&navigator&&navigator.mediaDevices&&navigator.mediaDevices.getUserMedia}static preload(t){u=t,a.preload(u)}useAudioWorklet(){return window.AudioWorklet&&!(["iPad Simulator","iPhone Simulator","iPod Simulator","iPad","iPhone","iPod"].includes(navigator.platform)||navigator.userAgent.includes("Mac")&&"ontouchend"in document)&&!/^((?!chrome|android).)*safari/i.test(navigator.userAgent)&&!this.options.forceScriptProcessor}createAndStartEncoder(t){this.encoder=new a({originalSampleRate:this.audioContext.sampleRate,numberOfChannels:t,encoderBitRate:this.options.encoderBitRate,streamBufferSize:this.options.streamBufferSize}),this.encoder.ondataavailable=t=>{this.options.streaming?this.ondataavailable&&this.ondataavailable(t):this.encodedData.push(t)},this.encoder.onstopped=()=>{this.state=0;let t=this.options.streaming?void 0:new Blob(this.encodedData,{type:"audio/mpeg"});this.onstop&&this.onstop(t),this.stopPromiseResolve(t)},this.encoder.start()}createOutputNode(t){this.useAudioWorklet()?(console.log("Using AudioWorklet"),this.outputNode=new AudioWorkletNode(this.audioContext,"audio-output-processor",{numberOfOutputs:0}),this.outputNode.port.onmessage=t=>{let{data:e}=t;1==this.state&&this.encoder.sendData(e)}):(console.log("Using ScriptProcessorNode"),this.outputNode=this.audioContext.createScriptProcessor(4096,t,t),this.outputNode.connect(this.audioContext.destination),this.outputNode.onaudioprocess=t=>{if(1==this.state){let e=t.inputBuffer,o=[];for(let t=0;tt.stop()):t.stop(),this.stream=null),this.useAudioWorklet()?this.outputNode&&(this.outputNode.port.onmessage=null):this.outputNode&&(this.outputNode.onaudioprocess=null),this.outputNode&&this.outputNode.disconnect(),this.recordingGainNode&&this.recordingGainNode.disconnect(),this.sourceNode&&this.sourceNode.disconnect(),this.audioContext&&this.audioContext.close()}setRecordingGain(t){this.options.recordingGain=t,this.recordingGainNode&&this.recordingGainNode.gain.setTargetAtTime(t,this.audioContext.currentTime,.01)}get time(){return this.timer.getTime()}getEncodingQueueSize(){return this.encoder?this.encoder.getQueuedDataLen():0}stoppingCheck(){if(4==this.state)throw function(){let t=new Error("AudioRecorder start cancelled by call to stop");return t.name="CancelStartError",t}()}async __start(t){if(0!=this.state)throw new Error("Called start when not in stopped state");if(null==u)throw new Error("preload was not called on AudioRecorder");this.state=3,this.encodedData=[],this.stream=null;try{await a.waitForWorker(u),this.stoppingCheck();let e=!(Object.keys(this.options.constraints).length>0)||this.options.constraints;this.stream=await navigator.mediaDevices.getUserMedia({audio:e}),this.stoppingCheck();const o=d();this.audioContext=new o,this.useAudioWorklet()&&(await this.audioContext.audioWorklet.addModule(c||(c=URL.createObjectURL(new Blob(['\n\t\tclass AudioOutputProcessor extends AudioWorkletProcessor {\n\t\t\tprocess(inputs, outputs) {\n\t\t\t\tthis.port.postMessage(inputs[0]);\n\t\t\t\treturn true;\n\t\t\t}\n\t\t}\n\n\t\tregisterProcessor("audio-output-processor", AudioOutputProcessor);\n\t'],{type:"application/javascript"})),c),{credentials:"omit"}),this.stoppingCheck());let s=function(t){let e=t.getAudioTracks();if(e.length<1)throw new Error("No audio tracks in user media stream");let o=e[0].getSettings();return"channelCount"in o?o.channelCount:1}(this.stream);this.createAndStartEncoder(s),this.createAudioNodes(s),t?(this.timer.reset(),this.state=2):(this.timer.resetAndStart(),this.state=1),this.onstart&&this.onstart()}catch(t){let e=4==this.state;throw this.cleanupAudioNodes(),this.state=0,e&&this.stopPromiseReject(t),t}}async __stop(){if(this.timer.stop(),1==this.state||2==this.state)return this.state=4,this.cleanupAudioNodes(),this.encoder.stop(),new Promise((t,e)=>{this.stopPromiseResolve=t});if(3==this.state)return this.state=4,new Promise((t,e)=>{this.stopPromiseReject=e});throw new Error("Called stop when AudioRecorder was not started")}start(){let t=this.__start(arguments.length>0&&void 0!==arguments[0]&&arguments[0]);if(t.catch(t=>{"CancelStartError"!=t.name&&this.onerror&&this.onerror(t)}),!this.onerror)return t}stop(){let t=this.__stop();if(t.catch(t=>{"CancelStartError"==t.name?this.onstop&&this.onstop(this.options.streaming?void 0:null):this.onerror&&this.onerror(t)}),!this.onerror)return t}pause(){1==this.state&&(this.state=2,this.timer.stop())}resume(){2==this.state&&(this.state=1,this.timer.start())}}}); 2 | //# sourceMappingURL=audiorecorder.min.js.map 3 | -------------------------------------------------------------------------------- /src/AudioRecorder.js: -------------------------------------------------------------------------------- 1 | import WorkerEncoder from "./mp3worker/WorkerEncoder.js"; 2 | import Timer from "./Timer.js"; 3 | import {stopStream, detectIOS, detectSafari} from "./utils.js"; 4 | 5 | function getAudioContextCtor() { 6 | return window.AudioContext || window.webkitAudioContext; 7 | } 8 | // Don't use audio worklet on iOS or safari, fall back to ScriptProcessor. 9 | // There are issues with dropped incoming audio data after ~45 seconds. Thus, the resulting audio would be shorter and sped up / glitchy. 10 | // Curiously, these same issues are present if *not using* AudioWorklet on Chrome 11 | function isAudioWorkletSupported() { 12 | return window.AudioWorklet && !detectIOS() && !detectSafari(); 13 | } 14 | 15 | const states = { 16 | STOPPED : 0, 17 | RECORDING : 1, 18 | PAUSED : 2, 19 | STARTING : 3, 20 | STOPPING : 4 21 | }; 22 | 23 | const DEFAULT_OPTIONS = { 24 | recordingGain : 1, 25 | encoderBitRate : 96, 26 | streaming : false, 27 | streamBufferSize : 50000, 28 | forceScriptProcessor : false, 29 | constraints : { 30 | channelCount : 1, 31 | autoGainControl : true, 32 | echoCancellation : true, 33 | noiseSuppression : true 34 | } 35 | }; 36 | 37 | let workerUrl = null; 38 | 39 | function createCancelStartError() { 40 | let error = new Error("AudioRecorder start cancelled by call to stop"); 41 | error.name = "CancelStartError"; 42 | return error; 43 | } 44 | 45 | function getNumberOfChannels(stream) { 46 | let audioTracks = stream.getAudioTracks(); 47 | 48 | if (audioTracks.length < 1) { 49 | throw new Error("No audio tracks in user media stream"); 50 | } 51 | 52 | let trackSettings = audioTracks[0].getSettings(); 53 | return "channelCount" in trackSettings ? trackSettings.channelCount : 1; 54 | } 55 | 56 | // Worklet does nothing more than pass the data out, to be actually encoded by a regular Web Worker 57 | // Previously this was rewritten to do the encoding within an AudioWorklet, and it was all very nice and clean 58 | // but apparently doing anything that uses much CPU in a AudioWorklet will cause glitches in some browsers. 59 | // So, it's best to do the encoding in a regular Web Worker. 60 | let AUDIO_OUTPUT_MODULE_URL = null; 61 | 62 | function getAudioOutputModuleUrl() { 63 | if (AUDIO_OUTPUT_MODULE_URL) { 64 | return AUDIO_OUTPUT_MODULE_URL; 65 | } 66 | 67 | AUDIO_OUTPUT_MODULE_URL = URL.createObjectURL(new Blob([` 68 | class AudioOutputProcessor extends AudioWorkletProcessor { 69 | process(inputs, outputs) { 70 | this.port.postMessage(inputs[0]); 71 | return true; 72 | } 73 | } 74 | 75 | registerProcessor("audio-output-processor", AudioOutputProcessor); 76 | `], {type : "application/javascript"})); 77 | 78 | return AUDIO_OUTPUT_MODULE_URL; 79 | } 80 | 81 | /* 82 | Callbacks: 83 | ondataavailable 84 | onstart - called when recording successfully started 85 | onstop - called when all data finished encoding and was output 86 | onerror - error starting recording 87 | */ 88 | export default class AudioRecorder { 89 | constructor(options) { 90 | this.options = { 91 | ...DEFAULT_OPTIONS, 92 | ...options 93 | }; 94 | 95 | this.state = states.STOPPED; 96 | this.audioContext = null; 97 | this.encoder = null; 98 | this.encodedData = null; 99 | this.stopPromiseResolve = null; 100 | this.stopPromiseReject = null; 101 | this.timer = new Timer(); 102 | } 103 | 104 | static isRecordingSupported() { 105 | return getAudioContextCtor() && navigator && navigator.mediaDevices && navigator.mediaDevices.getUserMedia; 106 | } 107 | 108 | static preload(_workerUrl) { 109 | workerUrl = _workerUrl; 110 | WorkerEncoder.preload(workerUrl); 111 | } 112 | 113 | // Will we use AudioWorklet? 114 | useAudioWorklet() { 115 | return isAudioWorkletSupported() && !this.options.forceScriptProcessor; 116 | } 117 | 118 | createAndStartEncoder(numberOfChannels) { 119 | this.encoder = new WorkerEncoder({ 120 | originalSampleRate : this.audioContext.sampleRate, 121 | numberOfChannels : numberOfChannels, 122 | encoderBitRate : this.options.encoderBitRate, 123 | streamBufferSize : this.options.streamBufferSize 124 | }); 125 | 126 | this.encoder.ondataavailable = (data) => { 127 | if (this.options.streaming) { 128 | this.ondataavailable && this.ondataavailable(data); 129 | } else { 130 | this.encodedData.push(data); 131 | } 132 | }; 133 | 134 | this.encoder.onstopped = () => { 135 | this.state = states.STOPPED; 136 | let mp3Blob = this.options.streaming ? undefined : new Blob(this.encodedData, {type : "audio/mpeg"}); 137 | this.onstop && this.onstop(mp3Blob); 138 | this.stopPromiseResolve(mp3Blob); 139 | }; 140 | 141 | this.encoder.start(); 142 | } 143 | 144 | createOutputNode(numberOfChannels) { 145 | if (this.useAudioWorklet()) { 146 | console.log("Using AudioWorklet"); 147 | 148 | this.outputNode = new AudioWorkletNode(this.audioContext, "audio-output-processor", {numberOfOutputs : 0}); 149 | 150 | this.outputNode.port.onmessage = ({data}) => { 151 | if (this.state == states.RECORDING) { 152 | this.encoder.sendData(data); 153 | } 154 | }; 155 | } else { 156 | console.log("Using ScriptProcessorNode"); 157 | 158 | this.outputNode = this.audioContext.createScriptProcessor(4096, numberOfChannels, numberOfChannels); 159 | 160 | this.outputNode.connect(this.audioContext.destination); 161 | this.outputNode.onaudioprocess = (event) => { 162 | if (this.state == states.RECORDING) { 163 | let inputBuffer = event.inputBuffer; 164 | let buffers = []; 165 | 166 | for (let i = 0; i < inputBuffer.numberOfChannels; i ++) { 167 | buffers.push(inputBuffer.getChannelData(i)); 168 | } 169 | 170 | this.encoder.sendData(buffers); 171 | } 172 | }; 173 | } 174 | } 175 | 176 | createAudioNodes(numberOfChannels) { 177 | this.createOutputNode(numberOfChannels); 178 | 179 | this.recordingGainNode = this.audioContext.createGain(); 180 | this.setRecordingGain(this.options.recordingGain); 181 | this.recordingGainNode.connect(this.outputNode); 182 | 183 | this.sourceNode = this.audioContext.createMediaStreamSource(this.stream); 184 | this.sourceNode.connect(this.recordingGainNode); 185 | } 186 | 187 | cleanupAudioNodes() { 188 | if (this.stream) { 189 | stopStream(this.stream); 190 | this.stream = null; 191 | } 192 | 193 | if (this.useAudioWorklet()) { 194 | this.outputNode && (this.outputNode.port.onmessage = null); 195 | } else { 196 | this.outputNode && (this.outputNode.onaudioprocess = null); 197 | } 198 | 199 | this.outputNode && this.outputNode.disconnect(); 200 | this.recordingGainNode && this.recordingGainNode.disconnect(); 201 | this.sourceNode && this.sourceNode.disconnect(); 202 | this.audioContext && this.audioContext.close(); 203 | } 204 | 205 | setRecordingGain(gain) { 206 | this.options.recordingGain = gain; 207 | 208 | if (this.recordingGainNode) { 209 | this.recordingGainNode.gain.setTargetAtTime(gain, this.audioContext.currentTime, 0.01); 210 | } 211 | } 212 | 213 | get time() { 214 | return this.timer.getTime(); 215 | } 216 | 217 | // Get the amount of data left to be encoded. 218 | // Useful to estimate if STOPPING state (encoding still ongoing) will last a while. 219 | getEncodingQueueSize() { 220 | return this.encoder ? this.encoder.getQueuedDataLen() : 0; 221 | } 222 | 223 | // Called after every "await" in start(), to check that stop wasn't called 224 | // and we should abandon starting 225 | stoppingCheck() { 226 | if (this.state == states.STOPPING) { 227 | throw createCancelStartError(); 228 | } 229 | } 230 | 231 | async __start(paused) { 232 | if (this.state != states.STOPPED) { 233 | throw new Error("Called start when not in stopped state"); 234 | } 235 | 236 | if (workerUrl == null) { 237 | throw new Error("preload was not called on AudioRecorder"); 238 | } 239 | 240 | this.state = states.STARTING; 241 | this.encodedData = []; 242 | this.stream = null; 243 | 244 | try { 245 | await WorkerEncoder.waitForWorker(workerUrl); 246 | this.stoppingCheck(); 247 | 248 | // If a constraint is set, pass them, otherwise just pass true 249 | let constraints = Object.keys(this.options.constraints).length > 0 ? this.options.constraints : true; 250 | 251 | this.stream = await navigator.mediaDevices.getUserMedia({audio : constraints}); 252 | this.stoppingCheck(); 253 | 254 | const _AudioContext = getAudioContextCtor(); 255 | this.audioContext = new _AudioContext(); 256 | 257 | if (this.useAudioWorklet()) { 258 | await this.audioContext.audioWorklet.addModule(getAudioOutputModuleUrl(), {credentials : "omit"}); 259 | this.stoppingCheck(); 260 | } 261 | 262 | // Channel count must be gotten from the stream, as it might not have supported 263 | // the desired amount specified in the constraints 264 | let numberOfChannels = getNumberOfChannels(this.stream); 265 | 266 | // Successfully recording! 267 | this.createAndStartEncoder(numberOfChannels); 268 | this.createAudioNodes(numberOfChannels); 269 | 270 | if (paused) { 271 | this.timer.reset(); 272 | this.state = states.PAUSED; 273 | } else { 274 | this.timer.resetAndStart(); 275 | this.state = states.RECORDING; 276 | } 277 | 278 | this.onstart && this.onstart(); 279 | } catch (error) { 280 | let startWasCancelled = this.state == states.STOPPING; 281 | this.cleanupAudioNodes(); 282 | 283 | // Reset so can attempt start again 284 | this.state = states.STOPPED; 285 | 286 | // Reject the stop promise now we have cleaned up and are in STOPPED state and ready to start() again 287 | if (startWasCancelled) { 288 | this.stopPromiseReject(error); 289 | } 290 | 291 | throw error; 292 | } 293 | } 294 | 295 | async __stop() { 296 | this.timer.stop(); 297 | 298 | if (this.state == states.RECORDING || this.state == states.PAUSED) { 299 | // Stop recording, but encoding may not have finished yet, 300 | // so we enter the stopping state. 301 | this.state = states.STOPPING; 302 | 303 | this.cleanupAudioNodes(); 304 | this.encoder.stop(); 305 | 306 | // Will be resolved later when encoding finishes 307 | return new Promise((resolve, reject) => { 308 | this.stopPromiseResolve = resolve; 309 | }); 310 | } else if (this.state == states.STARTING) { 311 | this.state = states.STOPPING; 312 | 313 | // Will be rejected later when start() has completely finished operation 314 | return new Promise((resolve, reject) => { 315 | this.stopPromiseReject = reject; 316 | }) 317 | } 318 | 319 | throw new Error("Called stop when AudioRecorder was not started"); 320 | } 321 | 322 | start(paused = false) { 323 | let promise = this.__start(paused); 324 | 325 | promise.catch(error => { 326 | // Don't send CancelStartError to onerror, as it's not *really* an error state 327 | // Only used as a promise rejection to indicate that starting did not succeed. 328 | if (error.name != "CancelStartError") { 329 | this.onerror && this.onerror(error); 330 | } 331 | }); 332 | 333 | if (!this.onerror) { 334 | return promise; 335 | } 336 | } 337 | 338 | stop() { 339 | let promise = this.__stop(); 340 | 341 | promise.catch(error => { 342 | if (error.name == "CancelStartError") { 343 | // Stop was called before recording even started 344 | // Send a onstop event anyway to indicate that recording can be retried. 345 | this.onstop && this.onstop(this.options.streaming ? undefined : null); 346 | } else { 347 | this.onerror && this.onerror(error); 348 | } 349 | }); 350 | 351 | if (!this.onerror) { 352 | return promise; 353 | } 354 | } 355 | 356 | pause() { 357 | if (this.state == states.RECORDING) { 358 | this.state = states.PAUSED; 359 | this.timer.stop(); 360 | } 361 | } 362 | 363 | resume() { 364 | if (this.state == states.PAUSED) { 365 | this.state = states.RECORDING; 366 | this.timer.start(); 367 | } 368 | } 369 | } 370 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Simple Audio Recorder 2 | 3 | ![](https://raw.githubusercontent.com/bobbles911/simple-audio-recorder/master/.github/GitHubAudioRecorderHeader.png) 4 | 5 | A simple web audio recording library with encoding to MP3 (using [lamejs](https://github.com/zhuker/lamejs)) and optional streaming/chunked output. Made by [Vocaroo, the quick and easy online voice recorder](https://vocaroo.com)! 6 | 7 | **NOTE**: *This library is considered basically feature complete. However, it's definitely not abandoned and is used by thousands of Vocaroo users every day. If anything needs fixing, it will be done, but most issues such as occasional audio glitches are the fault of the web browsers rather than this library.* 8 | 9 | Now including both a vanilla-js version and an easy to use react hook and component! 10 | 11 | ### Vanilla-js 12 | 13 | ```javascript 14 | import AudioRecorder from "simple-audio-recorder"; 15 | 16 | AudioRecorder.preload("mp3worker.js"); 17 | 18 | let recorder = new AudioRecorder(); 19 | 20 | recorder.start().then(() => { 21 | console.log("Recording started..."); 22 | }).catch(error => { 23 | console.log(error); 24 | }); 25 | 26 | recorder.stop().then(mp3Blob => { 27 | console.log("Recording stopped..."); 28 | 29 | const newAudio = document.createElement("audio"); 30 | newAudio.src = URL.createObjectURL(mp3Blob); 31 | newAudio.controls = true; 32 | document.body.append(newAudio); 33 | }).catch(error => { 34 | console.log(error); 35 | }); 36 | ``` 37 | 38 | ### React hook and component 39 | 40 | ```JSX 41 | import {SimpleAudioRecorder, useSimpleAudioRecorder} from "simple-audio-recorder/react"; 42 | 43 | export default function App() { 44 | const recorder = useSimpleAudioRecorder({workerUrl : "mp3worker.js"}); 45 | 46 | const viewInitial = ; 47 | const viewRecording = ; 48 | const viewError = (<>{viewInitial}
Error occurred! {recorder.errorStr}
); 49 | 50 | return ( 51 |
52 | 57 | 58 | {recorder.mp3Urls.map(url => 59 |
62 | ); 63 | } 64 | ``` 65 | 66 | ## Examples 67 | 68 | ### On codepen 69 | 70 | - [Minimal promise example](https://codepen.io/bobbles911/pen/JjBzPvm) 71 | - [Main example of all features](https://codepen.io/bobbles911/pen/rNrRBZd) 72 | - [React hook and component example](https://stackblitz.com/edit/react-61mepu?file=src%2FApp.js) 73 | 74 | ### Included in the project 75 | 76 | - [Minimal promise example](examples/minimal-example-promises) 77 | - [Main example of all features](examples/main-example) 78 | - [React hook and component example](examples/react-hook-example) 79 | 80 | To run the built in examples in the ./examples/ directory, start a dev server from the project root and then navigate to them. 81 | 82 | Or start developing with: 83 | 84 | ```bash 85 | npm install 86 | npm run dev 87 | ``` 88 | 89 | ## Usage 90 | 91 | ### Including 92 | 93 | ```javascript 94 | npm install simple-audio-recorder 95 | ``` 96 | 97 | ```javascript 98 | import AudioRecorder from "simple-audio-recorder"; 99 | ``` 100 | 101 | Alternatively, just use a script tag: 102 | ```javascript 103 | 104 | ``` 105 | Also, you must make sure that you distribute the web worker file "mp3worker.js" along with your application. 106 | 107 | ### Preload the MP3 encoder worker: 108 | 109 | ```javascript 110 | // This is a static method. 111 | // You should preload the worker immediately on page load to enable recording to start quickly 112 | AudioRecorder.preload("./mp3worker.js"); 113 | ``` 114 | 115 | ### Create an audio recorder 116 | 117 | ```javascript 118 | let recorder = new AudioRecorder({ 119 | recordingGain : 1, // Initial recording volume 120 | encoderBitRate : 96, // MP3 encoding bit rate 121 | streaming : false, // Data will be returned in chunks (ondataavailable callback) as it is encoded, 122 | // rather than at the end as one large blob 123 | streamBufferSize : 50000, // Size of encoded mp3 data chunks returned by ondataavailable, if streaming is enabled 124 | constraints : { // Optional audio constraints, see https://developer.mozilla.org/en-US/docs/Web/API/MediaDevices/getUserMedia 125 | channelCount : 1, // Set to 2 to hint for stereo if it's available, or leave as 1 to force mono at all times 126 | autoGainControl : true, 127 | echoCancellation : true, 128 | noiseSuppression : true 129 | }, 130 | 131 | // Used for debugging only. Force using the older script processor instead of AudioWorklet. 132 | // forceScriptProcessor : true 133 | }); 134 | ``` 135 | 136 | ### Use promises to start and stop recording 137 | 138 | ```javascript 139 | recorder.start().then(() => { 140 | console.log("Recording started..."); 141 | }).catch(error => { 142 | console.log(error); 143 | }); 144 | 145 | recorder.stop().then(mp3Blob => { 146 | // Do something with the mp3 Blob! 147 | }).catch(error => { 148 | console.log(error); 149 | }); 150 | ``` 151 | 152 | ### Or use events 153 | 154 | ```javascript 155 | recorder.onstart = () => { 156 | console.log("Recording started..."); 157 | }; 158 | 159 | recorder.onstop = (mp3Blob) => { 160 | // Do something with the mp3 Blob! 161 | // When using onstop, mp3Blob could in rare cases be null if nothing was recorded 162 | // (with the Promise API, that would be a stop() promise rejection) 163 | }; 164 | 165 | recorder.onerror = (error) => { 166 | console.log(error); 167 | }; 168 | 169 | // if onerror is set, start and stop won't return a promise 170 | recorder.start(); 171 | 172 | // later... 173 | recorder.stop(); 174 | ``` 175 | 176 | ### Handle encoded data chunks 177 | 178 | Want to receive encoded data chunks as they are produced? Useful for streaming uploads to a remote server. 179 | 180 | ```javascript 181 | let recorder = new AudioRecorder({ 182 | streaming : true, 183 | streamBufferSize : 50000 184 | }); 185 | 186 | let audioChunks = []; 187 | 188 | recorder.ondataavailable = (data) => { 189 | // 50 KB of MP3 data received! 190 | audioChunks.push(data); 191 | }; 192 | 193 | recorder.start(); 194 | 195 | // No mp3Blob will be received either with the promise API or via recorder.onstop if streaming is enabled. 196 | recorder.stop().then(() => { 197 | // ...do something with all the chunks that were received by ondataavailable 198 | let mp3Blob = new Blob(audioChunks, {type : "audio/mpeg"}); 199 | }); 200 | ``` 201 | 202 | ### Other functions/attributes 203 | 204 | ```javascript 205 | recorder.start(paused = false); // Supports starting in paused mode 206 | recorder.pause(); 207 | recorder.resume(); 208 | 209 | recorder.setRecordingGain(gain); // Change the volume while recording is in progress (0.0 to 1.0) 210 | 211 | recorder.time; // Access the current recorded duration in milliseconds. Time pauses when recording is paused. 212 | 213 | // Get the amount of data remaining to be encoded 214 | // Will only be much above zero on very slow systems as mp3 encoding is quite fast. 215 | // A large value indicates there might be a delay between calling stop() and getting the mp3Blob 216 | recorder.getEncodingQueueSize(); 217 | 218 | AudioRecorder.isRecordingSupported(); // Static method. Does this browser support getUserMedia? 219 | ``` 220 | 221 | ### Error handling 222 | 223 | Error handling can be done either via promises and catching errors, or via the onerror event handler if it is set. 224 | 225 | #### Errors 226 | 227 | These are named via the error.name property 228 | 229 | - **CancelStartError** - if stop() is called while start() has not completed (perhaps due to slow loading of the worker), then both the start and stop promises will reject with this error. However, if using the onerror event handler this error will **not** be given (as it's not _really_ an error, but a deliberate action of the user). In that case, the onstop event handler will receive null instead of an mp3 Blob. 230 | - **WorkerError** - there was some problem loading the worker, maybe the URL was incorrect or the internet broke 231 | - _getUserMedia errors_ - any error that [getUserMedia](https://developer.mozilla.org/en-US/docs/Web/API/MediaDevices/getUserMedia) can fail with, such as NotAllowedError or NotFoundError 232 | - _Miscellaneous unnamed errors_ - if you do something like calling start() while recording has already started, or forgetting to call preload() before creating an AudioRecorder, then you'll probably see some other errors. 233 | 234 | ### React hook and component 235 | 236 | Please see the [react hook and component example](examples/react-hook-example) for a working example of usage. 237 | 238 | #### Importing 239 | 240 | ```javascript 241 | import { 242 | useSimpleAudioRecorder, 243 | SimpleAudioRecorder, 244 | preloadWorker, 245 | RecorderStates 246 | } from "simple-audio-recorder/react" 247 | ``` 248 | 249 | #### useSimpleAudioRecorder hook 250 | 251 | ```javascript 252 | const { 253 | error, // Any current error object, or null 254 | errorStr, // Error object as string, or null 255 | time, // Current recorded time in milliseconds 256 | countdownTimeLeft, // Time left of the countdown before recording will start, if one was set 257 | mp3Blobs, // List of all recordings as a blob 258 | mp3Urls, // List of all recordings as URLs (created with URL.createObjectURL) 259 | mp3Blob, // Single most recent recording blob 260 | mp3Url, // Single most recent recording URL 261 | start, stop, pause, resume, // Recording functions 262 | recorderState, // Current state of recorder (see RecorderStates) 263 | getProps // Function to get the props that can be passed to the SimpleAudioRecorder react component 264 | } = useSimpleAudioRecorder({ 265 | workerUrl, onDataAvailable, onComplete, onError, options, cleanup = false, timeUpdateStep = 111, countdown = 0 266 | }) 267 | ``` 268 | 269 | - **workerUrl** - URL of the mp3 encoder. Can alternatively be specified using preloadWorker() 270 | - **onDataAvailable** - optional callback to receive encoded data as it is created. 271 | - **onComplete** - optional callback, receives `{mp3Blob, mp3Url}` when recording and encoding is finished. 272 | - **onError** - optional callback, receives any error object. 273 | - **options** - see the documentation for AudioRecorder. 274 | - **cleanup** - if true, any mp3Urls created via URL.createObjectURL will be freed when unmounting. By default, this is false, and you may need to free them yourself if there is an excessive amount of recordings. 275 | - **timeUpdateStep** - how often in milliseconds the returned time will be updated. 276 | - **countdown** - a countdown time in milliseconds until recording will actually start, running from after start() was called and microphone access has been granted. Defaults to zero. 277 | 278 | #### SimpleAudioRecorder component 279 | 280 | This is a very simple state machine component that shows a different view component depending on the current recorder state. 281 | 282 | ```javascript 283 | SimpleAudioRecorder({ 284 | // As returned by useSimpleAudioRecorder 285 | recorderState, 286 | // The components to display in each of the states. 287 | // Only viewInitial and viewRecording are absolutely required. 288 | viewInitial, viewStarting, viewCountdown, viewRecording, viewPaused, viewEncoding, viewComplete, viewError 289 | }) 290 | ``` 291 | 292 | - **viewInitial** - initial state of the recorder, you should show a "start recording" button that calls the `start` function from useSimpleAudioRecorder. 293 | - **viewStarting** - optional state, will show when recording is starting but has not yet started, for example while the user is responding to the microphone access prompt. 294 | - **viewCountdown** - optional, will show when in the countdown state if a greater than zero countdown time has been set. 295 | - **viewRecording** - required state, recording is in progress! You may want to show stop and pause buttons here that call the `stop` and `pause` functions. 296 | - **viewPaused** - required if the pause function is used. Show resume or stop buttons. 297 | - **viewEncoding** - optional. This may show in very rare cases when the user has a very slow device and mp3 encoding is still ongoing after recording has been stopped. 298 | - **viewComplete** - optional, shown after recording has completed successfully. Defaults to viewInitial. 299 | - **viewError** - optional, but highly recommended. Shown when there is a recording error. You can display the contents of the error object or errorStr from useSimpleAudioRecorder. 300 | 301 | #### preloadWorker(workerUrl) 302 | 303 | Instead of passing a workerUrl to `useSimpleAudioRecorder`, it's better to call this function somewhere at the start of your app to preload the worker as soon as possible. 304 | 305 | #### RecorderStates 306 | 307 | An enumeration of possible recorder states. Used by the SimpleAudioRecorder component. 308 | 309 | ```javascript 310 | RecorderStates = { 311 | INITIAL, 312 | STARTING, 313 | RECORDING, 314 | PAUSED, 315 | ENCODING, 316 | COMPLETE, 317 | ERROR, 318 | COUNTDOWN 319 | } 320 | ``` 321 | 322 | ### Known issues 323 | 324 | #### iOS/Safari 325 | 326 | Simple Audio Recorder uses an AudioWorkletNode to extract the audio data, where supported, and falls back to using the deprecated ScriptProcessorNode on older browsers. However, there seem to be some occasional issues using AudioWorkletNode on iOS/Safari. After about 45 seconds, audio packets from the microphone start to get dropped, creating a recording that is shorter than expected with stuttering and glitches. So currently, the deprecated ScriptProcessorNode will always be used on iOS/Safari. 327 | 328 | AFAIK this is an unsolved issue, perhaps related to Safari's implementation of AudioWorklets and them not being given enough CPU priority. These issues only appear on some devices. Curiously, similar glitches have also been experienced when using the old ScriptProcessorNode on Chrome on other platforms. 329 | 330 | Chrome isn't any better on iOS either as they are forced to use Safari under the hood (somehow, [this feels rather familiar](https://en.wikipedia.org/wiki/United_States_v._Microsoft_Corp.)). 331 | 332 | ## Licenses 333 | SimpleAudioRecorder is mostly MIT licenced, but the worker is probably LGPL as it uses [lamejs](https://github.com/zhuker/lamejs). -------------------------------------------------------------------------------- /dist/index.mjs: -------------------------------------------------------------------------------- 1 | let workerStates = { 2 | INACTIVE: 0, 3 | LOADING: 1, 4 | READY: 2, 5 | ERROR: 3 6 | }; 7 | let worker = null; 8 | let workerState = workerStates.INACTIVE; 9 | let workerStateChangeCallbacks = []; 10 | let jobCallbacks = {}; 11 | function uuidv4() { 12 | // https://stackoverflow.com/a/2117523 13 | return ([1e7] + -1e3 + -4e3 + -8e3 + -1e11).replace(/[018]/g, c => (c ^ crypto.getRandomValues(new Uint8Array(1))[0] & 15 >> c / 4).toString(16)); 14 | } 15 | function notifyWorkerState(newState) { 16 | workerState = newState; 17 | for (let callback of workerStateChangeCallbacks) { 18 | callback(); 19 | } 20 | workerStateChangeCallbacks = []; 21 | } 22 | 23 | // This hack required to load worker from another domain (e.g. a CDN) 24 | // https://stackoverflow.com/a/62914052 25 | function getWorkerCrossDomainURL(url) { 26 | const content = `importScripts("${url}");`; 27 | return URL.createObjectURL(new Blob([content], { 28 | type: "text/javascript" 29 | })); 30 | } 31 | function loadWorker(workerUrl) { 32 | if (/^https?:\/\//.test(workerUrl)) { 33 | // Is it an absolute URL? Then consider it cross domain. 34 | workerUrl = getWorkerCrossDomainURL(workerUrl); 35 | } 36 | worker = new Worker(workerUrl); 37 | workerState = workerStates.LOADING; 38 | worker.onmessage = event => { 39 | switch (event.data.message) { 40 | case "ready": 41 | notifyWorkerState(workerStates.READY); 42 | break; 43 | case "encoded": 44 | if (event.data.jobId in jobCallbacks) { 45 | jobCallbacks[event.data.jobId].onencoded(event.data.srcBufLen); 46 | } 47 | break; 48 | case "data": 49 | if (event.data.jobId in jobCallbacks) { 50 | jobCallbacks[event.data.jobId].ondataavailable(event.data.data); 51 | } 52 | break; 53 | case "stopped": 54 | if (event.data.jobId in jobCallbacks) { 55 | jobCallbacks[event.data.jobId].onstopped(); 56 | } 57 | break; 58 | } 59 | }; 60 | worker.onerror = event => { 61 | console.error("mp3worker error. Is the worker URL correct?"); 62 | notifyWorkerState(workerStates.ERROR); 63 | }; 64 | } 65 | 66 | // Callbacks: 67 | // - ondataavailable 68 | // - onstopped 69 | class WorkerEncoder { 70 | constructor(options) { 71 | this.jobId = uuidv4(); 72 | this.options = options; 73 | this.queuedData = 0; 74 | jobCallbacks[this.jobId] = { 75 | onencoded: srcBufLen => { 76 | this.queuedData -= srcBufLen; 77 | }, 78 | ondataavailable: data => { 79 | this.ondataavailable && this.ondataavailable(data); 80 | }, 81 | onstopped: () => { 82 | delete jobCallbacks[this.jobId]; // Clean up 83 | this.onstopped && this.onstopped(); 84 | } 85 | }; 86 | } 87 | static preload(workerUrl) { 88 | if (workerState == workerStates.INACTIVE || workerState == workerStates.ERROR) { 89 | loadWorker(workerUrl); 90 | } 91 | } 92 | static waitForWorker(workerUrl) { 93 | if (workerState == workerStates.READY) { 94 | return Promise.resolve(); 95 | } else { 96 | // Worker loading already failed, try again... 97 | if (workerState == workerStates.INACTIVE || workerState == workerStates.ERROR) { 98 | loadWorker(workerUrl); 99 | } 100 | return new Promise((resolve, reject) => { 101 | workerStateChangeCallbacks.push(() => { 102 | if (workerState == workerStates.READY) { 103 | resolve(); 104 | } else { 105 | let error = new Error("MP3 worker failed"); 106 | error.name = "WorkerError"; 107 | reject(error); 108 | } 109 | }); 110 | }); 111 | } 112 | } 113 | start() { 114 | worker.postMessage({ 115 | command: "start", 116 | jobId: this.jobId, 117 | options: this.options 118 | }); 119 | } 120 | sendData(buffers) { 121 | // Check for an empty buffer 122 | if (buffers && buffers.length > 0 && buffers[0].length > 0) { 123 | this.queuedData += buffers[0].length; 124 | worker.postMessage({ 125 | command: "data", 126 | jobId: this.jobId, 127 | buffers: buffers 128 | }); 129 | } 130 | } 131 | 132 | // Amount of data that is not yet encoded. 133 | getQueuedDataLen() { 134 | return this.queuedData; 135 | } 136 | stop() { 137 | worker.postMessage({ 138 | command: "stop", 139 | jobId: this.jobId 140 | }); 141 | } 142 | } 143 | 144 | class Timer { 145 | constructor() { 146 | this.reset(); 147 | } 148 | reset() { 149 | this.startTime = null; // May be modified when resuming, so not the true start time. 150 | this.stoppedTime = null; 151 | } 152 | start() { 153 | if (!this.startTime) { 154 | this.startTime = Date.now(); 155 | } 156 | if (this.stoppedTime) { 157 | // Skip time forward by the time length we were stopped 158 | this.startTime += Date.now() - this.stoppedTime; 159 | this.stoppedTime = null; 160 | } 161 | } 162 | resetAndStart() { 163 | this.reset(); 164 | this.start(); 165 | } 166 | stop() { 167 | if (!this.stoppedTime) { 168 | this.stoppedTime = Date.now(); 169 | } 170 | } 171 | getTime() { 172 | if (this.startTime) { 173 | if (this.stoppedTime) { 174 | return this.stoppedTime - this.startTime; 175 | } else { 176 | return Date.now() - this.startTime; 177 | } 178 | } else { 179 | return 0; 180 | } 181 | } 182 | } 183 | 184 | function stopStream(stream) { 185 | if (stream.getTracks) { 186 | stream.getTracks().forEach(track => track.stop()); 187 | } else { 188 | stream.stop(); // Deprecated 189 | } 190 | } 191 | 192 | // https://stackoverflow.com/a/9039885 193 | function detectIOS() { 194 | return ['iPad Simulator', 'iPhone Simulator', 'iPod Simulator', 'iPad', 'iPhone', 'iPod'].includes(navigator.platform) 195 | // iPad on iOS 13 detection 196 | || navigator.userAgent.includes("Mac") && "ontouchend" in document; 197 | } 198 | function detectSafari() { 199 | return /^((?!chrome|android).)*safari/i.test(navigator.userAgent); 200 | } 201 | 202 | function getAudioContextCtor() { 203 | return window.AudioContext || window.webkitAudioContext; 204 | } 205 | // Don't use audio worklet on iOS or safari, fall back to ScriptProcessor. 206 | // There are issues with dropped incoming audio data after ~45 seconds. Thus, the resulting audio would be shorter and sped up / glitchy. 207 | // Curiously, these same issues are present if *not using* AudioWorklet on Chrome 208 | function isAudioWorkletSupported() { 209 | return window.AudioWorklet && !detectIOS() && !detectSafari(); 210 | } 211 | const states = { 212 | STOPPED: 0, 213 | RECORDING: 1, 214 | PAUSED: 2, 215 | STARTING: 3, 216 | STOPPING: 4 217 | }; 218 | const DEFAULT_OPTIONS = { 219 | recordingGain: 1, 220 | encoderBitRate: 96, 221 | streaming: false, 222 | streamBufferSize: 50000, 223 | forceScriptProcessor: false, 224 | constraints: { 225 | channelCount: 1, 226 | autoGainControl: true, 227 | echoCancellation: true, 228 | noiseSuppression: true 229 | } 230 | }; 231 | let workerUrl = null; 232 | function createCancelStartError() { 233 | let error = new Error("AudioRecorder start cancelled by call to stop"); 234 | error.name = "CancelStartError"; 235 | return error; 236 | } 237 | function getNumberOfChannels(stream) { 238 | let audioTracks = stream.getAudioTracks(); 239 | if (audioTracks.length < 1) { 240 | throw new Error("No audio tracks in user media stream"); 241 | } 242 | let trackSettings = audioTracks[0].getSettings(); 243 | return "channelCount" in trackSettings ? trackSettings.channelCount : 1; 244 | } 245 | 246 | // Worklet does nothing more than pass the data out, to be actually encoded by a regular Web Worker 247 | // Previously this was rewritten to do the encoding within an AudioWorklet, and it was all very nice and clean 248 | // but apparently doing anything that uses much CPU in a AudioWorklet will cause glitches in some browsers. 249 | // So, it's best to do the encoding in a regular Web Worker. 250 | let AUDIO_OUTPUT_MODULE_URL = null; 251 | function getAudioOutputModuleUrl() { 252 | if (AUDIO_OUTPUT_MODULE_URL) { 253 | return AUDIO_OUTPUT_MODULE_URL; 254 | } 255 | AUDIO_OUTPUT_MODULE_URL = URL.createObjectURL(new Blob([` 256 | class AudioOutputProcessor extends AudioWorkletProcessor { 257 | process(inputs, outputs) { 258 | this.port.postMessage(inputs[0]); 259 | return true; 260 | } 261 | } 262 | 263 | registerProcessor("audio-output-processor", AudioOutputProcessor); 264 | `], { 265 | type: "application/javascript" 266 | })); 267 | return AUDIO_OUTPUT_MODULE_URL; 268 | } 269 | 270 | /* 271 | Callbacks: 272 | ondataavailable 273 | onstart - called when recording successfully started 274 | onstop - called when all data finished encoding and was output 275 | onerror - error starting recording 276 | */ 277 | class AudioRecorder { 278 | constructor(options) { 279 | this.options = { 280 | ...DEFAULT_OPTIONS, 281 | ...options 282 | }; 283 | this.state = states.STOPPED; 284 | this.audioContext = null; 285 | this.encoder = null; 286 | this.encodedData = null; 287 | this.stopPromiseResolve = null; 288 | this.stopPromiseReject = null; 289 | this.timer = new Timer(); 290 | } 291 | static isRecordingSupported() { 292 | return getAudioContextCtor() && navigator && navigator.mediaDevices && navigator.mediaDevices.getUserMedia; 293 | } 294 | static preload(_workerUrl) { 295 | workerUrl = _workerUrl; 296 | WorkerEncoder.preload(workerUrl); 297 | } 298 | 299 | // Will we use AudioWorklet? 300 | useAudioWorklet() { 301 | return isAudioWorkletSupported() && !this.options.forceScriptProcessor; 302 | } 303 | createAndStartEncoder(numberOfChannels) { 304 | this.encoder = new WorkerEncoder({ 305 | originalSampleRate: this.audioContext.sampleRate, 306 | numberOfChannels: numberOfChannels, 307 | encoderBitRate: this.options.encoderBitRate, 308 | streamBufferSize: this.options.streamBufferSize 309 | }); 310 | this.encoder.ondataavailable = data => { 311 | if (this.options.streaming) { 312 | this.ondataavailable && this.ondataavailable(data); 313 | } else { 314 | this.encodedData.push(data); 315 | } 316 | }; 317 | this.encoder.onstopped = () => { 318 | this.state = states.STOPPED; 319 | let mp3Blob = this.options.streaming ? undefined : new Blob(this.encodedData, { 320 | type: "audio/mpeg" 321 | }); 322 | this.onstop && this.onstop(mp3Blob); 323 | this.stopPromiseResolve(mp3Blob); 324 | }; 325 | this.encoder.start(); 326 | } 327 | createOutputNode(numberOfChannels) { 328 | if (this.useAudioWorklet()) { 329 | console.log("Using AudioWorklet"); 330 | this.outputNode = new AudioWorkletNode(this.audioContext, "audio-output-processor", { 331 | numberOfOutputs: 0 332 | }); 333 | this.outputNode.port.onmessage = _ref => { 334 | let { 335 | data 336 | } = _ref; 337 | if (this.state == states.RECORDING) { 338 | this.encoder.sendData(data); 339 | } 340 | }; 341 | } else { 342 | console.log("Using ScriptProcessorNode"); 343 | this.outputNode = this.audioContext.createScriptProcessor(4096, numberOfChannels, numberOfChannels); 344 | this.outputNode.connect(this.audioContext.destination); 345 | this.outputNode.onaudioprocess = event => { 346 | if (this.state == states.RECORDING) { 347 | let inputBuffer = event.inputBuffer; 348 | let buffers = []; 349 | for (let i = 0; i < inputBuffer.numberOfChannels; i++) { 350 | buffers.push(inputBuffer.getChannelData(i)); 351 | } 352 | this.encoder.sendData(buffers); 353 | } 354 | }; 355 | } 356 | } 357 | createAudioNodes(numberOfChannels) { 358 | this.createOutputNode(numberOfChannels); 359 | this.recordingGainNode = this.audioContext.createGain(); 360 | this.setRecordingGain(this.options.recordingGain); 361 | this.recordingGainNode.connect(this.outputNode); 362 | this.sourceNode = this.audioContext.createMediaStreamSource(this.stream); 363 | this.sourceNode.connect(this.recordingGainNode); 364 | } 365 | cleanupAudioNodes() { 366 | if (this.stream) { 367 | stopStream(this.stream); 368 | this.stream = null; 369 | } 370 | if (this.useAudioWorklet()) { 371 | this.outputNode && (this.outputNode.port.onmessage = null); 372 | } else { 373 | this.outputNode && (this.outputNode.onaudioprocess = null); 374 | } 375 | this.outputNode && this.outputNode.disconnect(); 376 | this.recordingGainNode && this.recordingGainNode.disconnect(); 377 | this.sourceNode && this.sourceNode.disconnect(); 378 | this.audioContext && this.audioContext.close(); 379 | } 380 | setRecordingGain(gain) { 381 | this.options.recordingGain = gain; 382 | if (this.recordingGainNode) { 383 | this.recordingGainNode.gain.setTargetAtTime(gain, this.audioContext.currentTime, 0.01); 384 | } 385 | } 386 | get time() { 387 | return this.timer.getTime(); 388 | } 389 | 390 | // Get the amount of data left to be encoded. 391 | // Useful to estimate if STOPPING state (encoding still ongoing) will last a while. 392 | getEncodingQueueSize() { 393 | return this.encoder ? this.encoder.getQueuedDataLen() : 0; 394 | } 395 | 396 | // Called after every "await" in start(), to check that stop wasn't called 397 | // and we should abandon starting 398 | stoppingCheck() { 399 | if (this.state == states.STOPPING) { 400 | throw createCancelStartError(); 401 | } 402 | } 403 | async __start(paused) { 404 | if (this.state != states.STOPPED) { 405 | throw new Error("Called start when not in stopped state"); 406 | } 407 | if (workerUrl == null) { 408 | throw new Error("preload was not called on AudioRecorder"); 409 | } 410 | this.state = states.STARTING; 411 | this.encodedData = []; 412 | this.stream = null; 413 | try { 414 | await WorkerEncoder.waitForWorker(workerUrl); 415 | this.stoppingCheck(); 416 | 417 | // If a constraint is set, pass them, otherwise just pass true 418 | let constraints = Object.keys(this.options.constraints).length > 0 ? this.options.constraints : true; 419 | this.stream = await navigator.mediaDevices.getUserMedia({ 420 | audio: constraints 421 | }); 422 | this.stoppingCheck(); 423 | const _AudioContext = getAudioContextCtor(); 424 | this.audioContext = new _AudioContext(); 425 | if (this.useAudioWorklet()) { 426 | await this.audioContext.audioWorklet.addModule(getAudioOutputModuleUrl(), { 427 | credentials: "omit" 428 | }); 429 | this.stoppingCheck(); 430 | } 431 | 432 | // Channel count must be gotten from the stream, as it might not have supported 433 | // the desired amount specified in the constraints 434 | let numberOfChannels = getNumberOfChannels(this.stream); 435 | 436 | // Successfully recording! 437 | this.createAndStartEncoder(numberOfChannels); 438 | this.createAudioNodes(numberOfChannels); 439 | if (paused) { 440 | this.timer.reset(); 441 | this.state = states.PAUSED; 442 | } else { 443 | this.timer.resetAndStart(); 444 | this.state = states.RECORDING; 445 | } 446 | this.onstart && this.onstart(); 447 | } catch (error) { 448 | let startWasCancelled = this.state == states.STOPPING; 449 | this.cleanupAudioNodes(); 450 | 451 | // Reset so can attempt start again 452 | this.state = states.STOPPED; 453 | 454 | // Reject the stop promise now we have cleaned up and are in STOPPED state and ready to start() again 455 | if (startWasCancelled) { 456 | this.stopPromiseReject(error); 457 | } 458 | throw error; 459 | } 460 | } 461 | async __stop() { 462 | this.timer.stop(); 463 | if (this.state == states.RECORDING || this.state == states.PAUSED) { 464 | // Stop recording, but encoding may not have finished yet, 465 | // so we enter the stopping state. 466 | this.state = states.STOPPING; 467 | this.cleanupAudioNodes(); 468 | this.encoder.stop(); 469 | 470 | // Will be resolved later when encoding finishes 471 | return new Promise((resolve, reject) => { 472 | this.stopPromiseResolve = resolve; 473 | }); 474 | } else if (this.state == states.STARTING) { 475 | this.state = states.STOPPING; 476 | 477 | // Will be rejected later when start() has completely finished operation 478 | return new Promise((resolve, reject) => { 479 | this.stopPromiseReject = reject; 480 | }); 481 | } 482 | throw new Error("Called stop when AudioRecorder was not started"); 483 | } 484 | start() { 485 | let paused = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : false; 486 | let promise = this.__start(paused); 487 | promise.catch(error => { 488 | // Don't send CancelStartError to onerror, as it's not *really* an error state 489 | // Only used as a promise rejection to indicate that starting did not succeed. 490 | if (error.name != "CancelStartError") { 491 | this.onerror && this.onerror(error); 492 | } 493 | }); 494 | if (!this.onerror) { 495 | return promise; 496 | } 497 | } 498 | stop() { 499 | let promise = this.__stop(); 500 | promise.catch(error => { 501 | if (error.name == "CancelStartError") { 502 | // Stop was called before recording even started 503 | // Send a onstop event anyway to indicate that recording can be retried. 504 | this.onstop && this.onstop(this.options.streaming ? undefined : null); 505 | } else { 506 | this.onerror && this.onerror(error); 507 | } 508 | }); 509 | if (!this.onerror) { 510 | return promise; 511 | } 512 | } 513 | pause() { 514 | if (this.state == states.RECORDING) { 515 | this.state = states.PAUSED; 516 | this.timer.stop(); 517 | } 518 | } 519 | resume() { 520 | if (this.state == states.PAUSED) { 521 | this.state = states.RECORDING; 522 | this.timer.start(); 523 | } 524 | } 525 | } 526 | 527 | export { AudioRecorder as default }; 528 | //# sourceMappingURL=index.mjs.map 529 | -------------------------------------------------------------------------------- /dist/index.cjs: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | let workerStates = { 4 | INACTIVE: 0, 5 | LOADING: 1, 6 | READY: 2, 7 | ERROR: 3 8 | }; 9 | let worker = null; 10 | let workerState = workerStates.INACTIVE; 11 | let workerStateChangeCallbacks = []; 12 | let jobCallbacks = {}; 13 | function uuidv4() { 14 | // https://stackoverflow.com/a/2117523 15 | return ([1e7] + -1e3 + -4e3 + -8e3 + -1e11).replace(/[018]/g, c => (c ^ crypto.getRandomValues(new Uint8Array(1))[0] & 15 >> c / 4).toString(16)); 16 | } 17 | function notifyWorkerState(newState) { 18 | workerState = newState; 19 | for (let callback of workerStateChangeCallbacks) { 20 | callback(); 21 | } 22 | workerStateChangeCallbacks = []; 23 | } 24 | 25 | // This hack required to load worker from another domain (e.g. a CDN) 26 | // https://stackoverflow.com/a/62914052 27 | function getWorkerCrossDomainURL(url) { 28 | const content = `importScripts("${url}");`; 29 | return URL.createObjectURL(new Blob([content], { 30 | type: "text/javascript" 31 | })); 32 | } 33 | function loadWorker(workerUrl) { 34 | if (/^https?:\/\//.test(workerUrl)) { 35 | // Is it an absolute URL? Then consider it cross domain. 36 | workerUrl = getWorkerCrossDomainURL(workerUrl); 37 | } 38 | worker = new Worker(workerUrl); 39 | workerState = workerStates.LOADING; 40 | worker.onmessage = event => { 41 | switch (event.data.message) { 42 | case "ready": 43 | notifyWorkerState(workerStates.READY); 44 | break; 45 | case "encoded": 46 | if (event.data.jobId in jobCallbacks) { 47 | jobCallbacks[event.data.jobId].onencoded(event.data.srcBufLen); 48 | } 49 | break; 50 | case "data": 51 | if (event.data.jobId in jobCallbacks) { 52 | jobCallbacks[event.data.jobId].ondataavailable(event.data.data); 53 | } 54 | break; 55 | case "stopped": 56 | if (event.data.jobId in jobCallbacks) { 57 | jobCallbacks[event.data.jobId].onstopped(); 58 | } 59 | break; 60 | } 61 | }; 62 | worker.onerror = event => { 63 | console.error("mp3worker error. Is the worker URL correct?"); 64 | notifyWorkerState(workerStates.ERROR); 65 | }; 66 | } 67 | 68 | // Callbacks: 69 | // - ondataavailable 70 | // - onstopped 71 | class WorkerEncoder { 72 | constructor(options) { 73 | this.jobId = uuidv4(); 74 | this.options = options; 75 | this.queuedData = 0; 76 | jobCallbacks[this.jobId] = { 77 | onencoded: srcBufLen => { 78 | this.queuedData -= srcBufLen; 79 | }, 80 | ondataavailable: data => { 81 | this.ondataavailable && this.ondataavailable(data); 82 | }, 83 | onstopped: () => { 84 | delete jobCallbacks[this.jobId]; // Clean up 85 | this.onstopped && this.onstopped(); 86 | } 87 | }; 88 | } 89 | static preload(workerUrl) { 90 | if (workerState == workerStates.INACTIVE || workerState == workerStates.ERROR) { 91 | loadWorker(workerUrl); 92 | } 93 | } 94 | static waitForWorker(workerUrl) { 95 | if (workerState == workerStates.READY) { 96 | return Promise.resolve(); 97 | } else { 98 | // Worker loading already failed, try again... 99 | if (workerState == workerStates.INACTIVE || workerState == workerStates.ERROR) { 100 | loadWorker(workerUrl); 101 | } 102 | return new Promise((resolve, reject) => { 103 | workerStateChangeCallbacks.push(() => { 104 | if (workerState == workerStates.READY) { 105 | resolve(); 106 | } else { 107 | let error = new Error("MP3 worker failed"); 108 | error.name = "WorkerError"; 109 | reject(error); 110 | } 111 | }); 112 | }); 113 | } 114 | } 115 | start() { 116 | worker.postMessage({ 117 | command: "start", 118 | jobId: this.jobId, 119 | options: this.options 120 | }); 121 | } 122 | sendData(buffers) { 123 | // Check for an empty buffer 124 | if (buffers && buffers.length > 0 && buffers[0].length > 0) { 125 | this.queuedData += buffers[0].length; 126 | worker.postMessage({ 127 | command: "data", 128 | jobId: this.jobId, 129 | buffers: buffers 130 | }); 131 | } 132 | } 133 | 134 | // Amount of data that is not yet encoded. 135 | getQueuedDataLen() { 136 | return this.queuedData; 137 | } 138 | stop() { 139 | worker.postMessage({ 140 | command: "stop", 141 | jobId: this.jobId 142 | }); 143 | } 144 | } 145 | 146 | class Timer { 147 | constructor() { 148 | this.reset(); 149 | } 150 | reset() { 151 | this.startTime = null; // May be modified when resuming, so not the true start time. 152 | this.stoppedTime = null; 153 | } 154 | start() { 155 | if (!this.startTime) { 156 | this.startTime = Date.now(); 157 | } 158 | if (this.stoppedTime) { 159 | // Skip time forward by the time length we were stopped 160 | this.startTime += Date.now() - this.stoppedTime; 161 | this.stoppedTime = null; 162 | } 163 | } 164 | resetAndStart() { 165 | this.reset(); 166 | this.start(); 167 | } 168 | stop() { 169 | if (!this.stoppedTime) { 170 | this.stoppedTime = Date.now(); 171 | } 172 | } 173 | getTime() { 174 | if (this.startTime) { 175 | if (this.stoppedTime) { 176 | return this.stoppedTime - this.startTime; 177 | } else { 178 | return Date.now() - this.startTime; 179 | } 180 | } else { 181 | return 0; 182 | } 183 | } 184 | } 185 | 186 | function stopStream(stream) { 187 | if (stream.getTracks) { 188 | stream.getTracks().forEach(track => track.stop()); 189 | } else { 190 | stream.stop(); // Deprecated 191 | } 192 | } 193 | 194 | // https://stackoverflow.com/a/9039885 195 | function detectIOS() { 196 | return ['iPad Simulator', 'iPhone Simulator', 'iPod Simulator', 'iPad', 'iPhone', 'iPod'].includes(navigator.platform) 197 | // iPad on iOS 13 detection 198 | || navigator.userAgent.includes("Mac") && "ontouchend" in document; 199 | } 200 | function detectSafari() { 201 | return /^((?!chrome|android).)*safari/i.test(navigator.userAgent); 202 | } 203 | 204 | function getAudioContextCtor() { 205 | return window.AudioContext || window.webkitAudioContext; 206 | } 207 | // Don't use audio worklet on iOS or safari, fall back to ScriptProcessor. 208 | // There are issues with dropped incoming audio data after ~45 seconds. Thus, the resulting audio would be shorter and sped up / glitchy. 209 | // Curiously, these same issues are present if *not using* AudioWorklet on Chrome 210 | function isAudioWorkletSupported() { 211 | return window.AudioWorklet && !detectIOS() && !detectSafari(); 212 | } 213 | const states = { 214 | STOPPED: 0, 215 | RECORDING: 1, 216 | PAUSED: 2, 217 | STARTING: 3, 218 | STOPPING: 4 219 | }; 220 | const DEFAULT_OPTIONS = { 221 | recordingGain: 1, 222 | encoderBitRate: 96, 223 | streaming: false, 224 | streamBufferSize: 50000, 225 | forceScriptProcessor: false, 226 | constraints: { 227 | channelCount: 1, 228 | autoGainControl: true, 229 | echoCancellation: true, 230 | noiseSuppression: true 231 | } 232 | }; 233 | let workerUrl = null; 234 | function createCancelStartError() { 235 | let error = new Error("AudioRecorder start cancelled by call to stop"); 236 | error.name = "CancelStartError"; 237 | return error; 238 | } 239 | function getNumberOfChannels(stream) { 240 | let audioTracks = stream.getAudioTracks(); 241 | if (audioTracks.length < 1) { 242 | throw new Error("No audio tracks in user media stream"); 243 | } 244 | let trackSettings = audioTracks[0].getSettings(); 245 | return "channelCount" in trackSettings ? trackSettings.channelCount : 1; 246 | } 247 | 248 | // Worklet does nothing more than pass the data out, to be actually encoded by a regular Web Worker 249 | // Previously this was rewritten to do the encoding within an AudioWorklet, and it was all very nice and clean 250 | // but apparently doing anything that uses much CPU in a AudioWorklet will cause glitches in some browsers. 251 | // So, it's best to do the encoding in a regular Web Worker. 252 | let AUDIO_OUTPUT_MODULE_URL = null; 253 | function getAudioOutputModuleUrl() { 254 | if (AUDIO_OUTPUT_MODULE_URL) { 255 | return AUDIO_OUTPUT_MODULE_URL; 256 | } 257 | AUDIO_OUTPUT_MODULE_URL = URL.createObjectURL(new Blob([` 258 | class AudioOutputProcessor extends AudioWorkletProcessor { 259 | process(inputs, outputs) { 260 | this.port.postMessage(inputs[0]); 261 | return true; 262 | } 263 | } 264 | 265 | registerProcessor("audio-output-processor", AudioOutputProcessor); 266 | `], { 267 | type: "application/javascript" 268 | })); 269 | return AUDIO_OUTPUT_MODULE_URL; 270 | } 271 | 272 | /* 273 | Callbacks: 274 | ondataavailable 275 | onstart - called when recording successfully started 276 | onstop - called when all data finished encoding and was output 277 | onerror - error starting recording 278 | */ 279 | class AudioRecorder { 280 | constructor(options) { 281 | this.options = { 282 | ...DEFAULT_OPTIONS, 283 | ...options 284 | }; 285 | this.state = states.STOPPED; 286 | this.audioContext = null; 287 | this.encoder = null; 288 | this.encodedData = null; 289 | this.stopPromiseResolve = null; 290 | this.stopPromiseReject = null; 291 | this.timer = new Timer(); 292 | } 293 | static isRecordingSupported() { 294 | return getAudioContextCtor() && navigator && navigator.mediaDevices && navigator.mediaDevices.getUserMedia; 295 | } 296 | static preload(_workerUrl) { 297 | workerUrl = _workerUrl; 298 | WorkerEncoder.preload(workerUrl); 299 | } 300 | 301 | // Will we use AudioWorklet? 302 | useAudioWorklet() { 303 | return isAudioWorkletSupported() && !this.options.forceScriptProcessor; 304 | } 305 | createAndStartEncoder(numberOfChannels) { 306 | this.encoder = new WorkerEncoder({ 307 | originalSampleRate: this.audioContext.sampleRate, 308 | numberOfChannels: numberOfChannels, 309 | encoderBitRate: this.options.encoderBitRate, 310 | streamBufferSize: this.options.streamBufferSize 311 | }); 312 | this.encoder.ondataavailable = data => { 313 | if (this.options.streaming) { 314 | this.ondataavailable && this.ondataavailable(data); 315 | } else { 316 | this.encodedData.push(data); 317 | } 318 | }; 319 | this.encoder.onstopped = () => { 320 | this.state = states.STOPPED; 321 | let mp3Blob = this.options.streaming ? undefined : new Blob(this.encodedData, { 322 | type: "audio/mpeg" 323 | }); 324 | this.onstop && this.onstop(mp3Blob); 325 | this.stopPromiseResolve(mp3Blob); 326 | }; 327 | this.encoder.start(); 328 | } 329 | createOutputNode(numberOfChannels) { 330 | if (this.useAudioWorklet()) { 331 | console.log("Using AudioWorklet"); 332 | this.outputNode = new AudioWorkletNode(this.audioContext, "audio-output-processor", { 333 | numberOfOutputs: 0 334 | }); 335 | this.outputNode.port.onmessage = _ref => { 336 | let { 337 | data 338 | } = _ref; 339 | if (this.state == states.RECORDING) { 340 | this.encoder.sendData(data); 341 | } 342 | }; 343 | } else { 344 | console.log("Using ScriptProcessorNode"); 345 | this.outputNode = this.audioContext.createScriptProcessor(4096, numberOfChannels, numberOfChannels); 346 | this.outputNode.connect(this.audioContext.destination); 347 | this.outputNode.onaudioprocess = event => { 348 | if (this.state == states.RECORDING) { 349 | let inputBuffer = event.inputBuffer; 350 | let buffers = []; 351 | for (let i = 0; i < inputBuffer.numberOfChannels; i++) { 352 | buffers.push(inputBuffer.getChannelData(i)); 353 | } 354 | this.encoder.sendData(buffers); 355 | } 356 | }; 357 | } 358 | } 359 | createAudioNodes(numberOfChannels) { 360 | this.createOutputNode(numberOfChannels); 361 | this.recordingGainNode = this.audioContext.createGain(); 362 | this.setRecordingGain(this.options.recordingGain); 363 | this.recordingGainNode.connect(this.outputNode); 364 | this.sourceNode = this.audioContext.createMediaStreamSource(this.stream); 365 | this.sourceNode.connect(this.recordingGainNode); 366 | } 367 | cleanupAudioNodes() { 368 | if (this.stream) { 369 | stopStream(this.stream); 370 | this.stream = null; 371 | } 372 | if (this.useAudioWorklet()) { 373 | this.outputNode && (this.outputNode.port.onmessage = null); 374 | } else { 375 | this.outputNode && (this.outputNode.onaudioprocess = null); 376 | } 377 | this.outputNode && this.outputNode.disconnect(); 378 | this.recordingGainNode && this.recordingGainNode.disconnect(); 379 | this.sourceNode && this.sourceNode.disconnect(); 380 | this.audioContext && this.audioContext.close(); 381 | } 382 | setRecordingGain(gain) { 383 | this.options.recordingGain = gain; 384 | if (this.recordingGainNode) { 385 | this.recordingGainNode.gain.setTargetAtTime(gain, this.audioContext.currentTime, 0.01); 386 | } 387 | } 388 | get time() { 389 | return this.timer.getTime(); 390 | } 391 | 392 | // Get the amount of data left to be encoded. 393 | // Useful to estimate if STOPPING state (encoding still ongoing) will last a while. 394 | getEncodingQueueSize() { 395 | return this.encoder ? this.encoder.getQueuedDataLen() : 0; 396 | } 397 | 398 | // Called after every "await" in start(), to check that stop wasn't called 399 | // and we should abandon starting 400 | stoppingCheck() { 401 | if (this.state == states.STOPPING) { 402 | throw createCancelStartError(); 403 | } 404 | } 405 | async __start(paused) { 406 | if (this.state != states.STOPPED) { 407 | throw new Error("Called start when not in stopped state"); 408 | } 409 | if (workerUrl == null) { 410 | throw new Error("preload was not called on AudioRecorder"); 411 | } 412 | this.state = states.STARTING; 413 | this.encodedData = []; 414 | this.stream = null; 415 | try { 416 | await WorkerEncoder.waitForWorker(workerUrl); 417 | this.stoppingCheck(); 418 | 419 | // If a constraint is set, pass them, otherwise just pass true 420 | let constraints = Object.keys(this.options.constraints).length > 0 ? this.options.constraints : true; 421 | this.stream = await navigator.mediaDevices.getUserMedia({ 422 | audio: constraints 423 | }); 424 | this.stoppingCheck(); 425 | const _AudioContext = getAudioContextCtor(); 426 | this.audioContext = new _AudioContext(); 427 | if (this.useAudioWorklet()) { 428 | await this.audioContext.audioWorklet.addModule(getAudioOutputModuleUrl(), { 429 | credentials: "omit" 430 | }); 431 | this.stoppingCheck(); 432 | } 433 | 434 | // Channel count must be gotten from the stream, as it might not have supported 435 | // the desired amount specified in the constraints 436 | let numberOfChannels = getNumberOfChannels(this.stream); 437 | 438 | // Successfully recording! 439 | this.createAndStartEncoder(numberOfChannels); 440 | this.createAudioNodes(numberOfChannels); 441 | if (paused) { 442 | this.timer.reset(); 443 | this.state = states.PAUSED; 444 | } else { 445 | this.timer.resetAndStart(); 446 | this.state = states.RECORDING; 447 | } 448 | this.onstart && this.onstart(); 449 | } catch (error) { 450 | let startWasCancelled = this.state == states.STOPPING; 451 | this.cleanupAudioNodes(); 452 | 453 | // Reset so can attempt start again 454 | this.state = states.STOPPED; 455 | 456 | // Reject the stop promise now we have cleaned up and are in STOPPED state and ready to start() again 457 | if (startWasCancelled) { 458 | this.stopPromiseReject(error); 459 | } 460 | throw error; 461 | } 462 | } 463 | async __stop() { 464 | this.timer.stop(); 465 | if (this.state == states.RECORDING || this.state == states.PAUSED) { 466 | // Stop recording, but encoding may not have finished yet, 467 | // so we enter the stopping state. 468 | this.state = states.STOPPING; 469 | this.cleanupAudioNodes(); 470 | this.encoder.stop(); 471 | 472 | // Will be resolved later when encoding finishes 473 | return new Promise((resolve, reject) => { 474 | this.stopPromiseResolve = resolve; 475 | }); 476 | } else if (this.state == states.STARTING) { 477 | this.state = states.STOPPING; 478 | 479 | // Will be rejected later when start() has completely finished operation 480 | return new Promise((resolve, reject) => { 481 | this.stopPromiseReject = reject; 482 | }); 483 | } 484 | throw new Error("Called stop when AudioRecorder was not started"); 485 | } 486 | start() { 487 | let paused = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : false; 488 | let promise = this.__start(paused); 489 | promise.catch(error => { 490 | // Don't send CancelStartError to onerror, as it's not *really* an error state 491 | // Only used as a promise rejection to indicate that starting did not succeed. 492 | if (error.name != "CancelStartError") { 493 | this.onerror && this.onerror(error); 494 | } 495 | }); 496 | if (!this.onerror) { 497 | return promise; 498 | } 499 | } 500 | stop() { 501 | let promise = this.__stop(); 502 | promise.catch(error => { 503 | if (error.name == "CancelStartError") { 504 | // Stop was called before recording even started 505 | // Send a onstop event anyway to indicate that recording can be retried. 506 | this.onstop && this.onstop(this.options.streaming ? undefined : null); 507 | } else { 508 | this.onerror && this.onerror(error); 509 | } 510 | }); 511 | if (!this.onerror) { 512 | return promise; 513 | } 514 | } 515 | pause() { 516 | if (this.state == states.RECORDING) { 517 | this.state = states.PAUSED; 518 | this.timer.stop(); 519 | } 520 | } 521 | resume() { 522 | if (this.state == states.PAUSED) { 523 | this.state = states.RECORDING; 524 | this.timer.start(); 525 | } 526 | } 527 | } 528 | 529 | module.exports = AudioRecorder; 530 | //# sourceMappingURL=index.cjs.map 531 | -------------------------------------------------------------------------------- /dist/audiorecorder.js: -------------------------------------------------------------------------------- 1 | (function (global, factory) { 2 | typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() : 3 | typeof define === 'function' && define.amd ? define(factory) : 4 | (global = typeof globalThis !== 'undefined' ? globalThis : global || self, global.AudioRecorder = factory()); 5 | })(this, (function () { 'use strict'; 6 | 7 | let workerStates = { 8 | INACTIVE: 0, 9 | LOADING: 1, 10 | READY: 2, 11 | ERROR: 3 12 | }; 13 | let worker = null; 14 | let workerState = workerStates.INACTIVE; 15 | let workerStateChangeCallbacks = []; 16 | let jobCallbacks = {}; 17 | function uuidv4() { 18 | // https://stackoverflow.com/a/2117523 19 | return ([1e7] + -1e3 + -4e3 + -8e3 + -1e11).replace(/[018]/g, c => (c ^ crypto.getRandomValues(new Uint8Array(1))[0] & 15 >> c / 4).toString(16)); 20 | } 21 | function notifyWorkerState(newState) { 22 | workerState = newState; 23 | for (let callback of workerStateChangeCallbacks) { 24 | callback(); 25 | } 26 | workerStateChangeCallbacks = []; 27 | } 28 | 29 | // This hack required to load worker from another domain (e.g. a CDN) 30 | // https://stackoverflow.com/a/62914052 31 | function getWorkerCrossDomainURL(url) { 32 | const content = `importScripts("${url}");`; 33 | return URL.createObjectURL(new Blob([content], { 34 | type: "text/javascript" 35 | })); 36 | } 37 | function loadWorker(workerUrl) { 38 | if (/^https?:\/\//.test(workerUrl)) { 39 | // Is it an absolute URL? Then consider it cross domain. 40 | workerUrl = getWorkerCrossDomainURL(workerUrl); 41 | } 42 | worker = new Worker(workerUrl); 43 | workerState = workerStates.LOADING; 44 | worker.onmessage = event => { 45 | switch (event.data.message) { 46 | case "ready": 47 | notifyWorkerState(workerStates.READY); 48 | break; 49 | case "encoded": 50 | if (event.data.jobId in jobCallbacks) { 51 | jobCallbacks[event.data.jobId].onencoded(event.data.srcBufLen); 52 | } 53 | break; 54 | case "data": 55 | if (event.data.jobId in jobCallbacks) { 56 | jobCallbacks[event.data.jobId].ondataavailable(event.data.data); 57 | } 58 | break; 59 | case "stopped": 60 | if (event.data.jobId in jobCallbacks) { 61 | jobCallbacks[event.data.jobId].onstopped(); 62 | } 63 | break; 64 | } 65 | }; 66 | worker.onerror = event => { 67 | console.error("mp3worker error. Is the worker URL correct?"); 68 | notifyWorkerState(workerStates.ERROR); 69 | }; 70 | } 71 | 72 | // Callbacks: 73 | // - ondataavailable 74 | // - onstopped 75 | class WorkerEncoder { 76 | constructor(options) { 77 | this.jobId = uuidv4(); 78 | this.options = options; 79 | this.queuedData = 0; 80 | jobCallbacks[this.jobId] = { 81 | onencoded: srcBufLen => { 82 | this.queuedData -= srcBufLen; 83 | }, 84 | ondataavailable: data => { 85 | this.ondataavailable && this.ondataavailable(data); 86 | }, 87 | onstopped: () => { 88 | delete jobCallbacks[this.jobId]; // Clean up 89 | this.onstopped && this.onstopped(); 90 | } 91 | }; 92 | } 93 | static preload(workerUrl) { 94 | if (workerState == workerStates.INACTIVE || workerState == workerStates.ERROR) { 95 | loadWorker(workerUrl); 96 | } 97 | } 98 | static waitForWorker(workerUrl) { 99 | if (workerState == workerStates.READY) { 100 | return Promise.resolve(); 101 | } else { 102 | // Worker loading already failed, try again... 103 | if (workerState == workerStates.INACTIVE || workerState == workerStates.ERROR) { 104 | loadWorker(workerUrl); 105 | } 106 | return new Promise((resolve, reject) => { 107 | workerStateChangeCallbacks.push(() => { 108 | if (workerState == workerStates.READY) { 109 | resolve(); 110 | } else { 111 | let error = new Error("MP3 worker failed"); 112 | error.name = "WorkerError"; 113 | reject(error); 114 | } 115 | }); 116 | }); 117 | } 118 | } 119 | start() { 120 | worker.postMessage({ 121 | command: "start", 122 | jobId: this.jobId, 123 | options: this.options 124 | }); 125 | } 126 | sendData(buffers) { 127 | // Check for an empty buffer 128 | if (buffers && buffers.length > 0 && buffers[0].length > 0) { 129 | this.queuedData += buffers[0].length; 130 | worker.postMessage({ 131 | command: "data", 132 | jobId: this.jobId, 133 | buffers: buffers 134 | }); 135 | } 136 | } 137 | 138 | // Amount of data that is not yet encoded. 139 | getQueuedDataLen() { 140 | return this.queuedData; 141 | } 142 | stop() { 143 | worker.postMessage({ 144 | command: "stop", 145 | jobId: this.jobId 146 | }); 147 | } 148 | } 149 | 150 | class Timer { 151 | constructor() { 152 | this.reset(); 153 | } 154 | reset() { 155 | this.startTime = null; // May be modified when resuming, so not the true start time. 156 | this.stoppedTime = null; 157 | } 158 | start() { 159 | if (!this.startTime) { 160 | this.startTime = Date.now(); 161 | } 162 | if (this.stoppedTime) { 163 | // Skip time forward by the time length we were stopped 164 | this.startTime += Date.now() - this.stoppedTime; 165 | this.stoppedTime = null; 166 | } 167 | } 168 | resetAndStart() { 169 | this.reset(); 170 | this.start(); 171 | } 172 | stop() { 173 | if (!this.stoppedTime) { 174 | this.stoppedTime = Date.now(); 175 | } 176 | } 177 | getTime() { 178 | if (this.startTime) { 179 | if (this.stoppedTime) { 180 | return this.stoppedTime - this.startTime; 181 | } else { 182 | return Date.now() - this.startTime; 183 | } 184 | } else { 185 | return 0; 186 | } 187 | } 188 | } 189 | 190 | function stopStream(stream) { 191 | if (stream.getTracks) { 192 | stream.getTracks().forEach(track => track.stop()); 193 | } else { 194 | stream.stop(); // Deprecated 195 | } 196 | } 197 | 198 | // https://stackoverflow.com/a/9039885 199 | function detectIOS() { 200 | return ['iPad Simulator', 'iPhone Simulator', 'iPod Simulator', 'iPad', 'iPhone', 'iPod'].includes(navigator.platform) 201 | // iPad on iOS 13 detection 202 | || navigator.userAgent.includes("Mac") && "ontouchend" in document; 203 | } 204 | function detectSafari() { 205 | return /^((?!chrome|android).)*safari/i.test(navigator.userAgent); 206 | } 207 | 208 | function getAudioContextCtor() { 209 | return window.AudioContext || window.webkitAudioContext; 210 | } 211 | // Don't use audio worklet on iOS or safari, fall back to ScriptProcessor. 212 | // There are issues with dropped incoming audio data after ~45 seconds. Thus, the resulting audio would be shorter and sped up / glitchy. 213 | // Curiously, these same issues are present if *not using* AudioWorklet on Chrome 214 | function isAudioWorkletSupported() { 215 | return window.AudioWorklet && !detectIOS() && !detectSafari(); 216 | } 217 | const states = { 218 | STOPPED: 0, 219 | RECORDING: 1, 220 | PAUSED: 2, 221 | STARTING: 3, 222 | STOPPING: 4 223 | }; 224 | const DEFAULT_OPTIONS = { 225 | recordingGain: 1, 226 | encoderBitRate: 96, 227 | streaming: false, 228 | streamBufferSize: 50000, 229 | forceScriptProcessor: false, 230 | constraints: { 231 | channelCount: 1, 232 | autoGainControl: true, 233 | echoCancellation: true, 234 | noiseSuppression: true 235 | } 236 | }; 237 | let workerUrl = null; 238 | function createCancelStartError() { 239 | let error = new Error("AudioRecorder start cancelled by call to stop"); 240 | error.name = "CancelStartError"; 241 | return error; 242 | } 243 | function getNumberOfChannels(stream) { 244 | let audioTracks = stream.getAudioTracks(); 245 | if (audioTracks.length < 1) { 246 | throw new Error("No audio tracks in user media stream"); 247 | } 248 | let trackSettings = audioTracks[0].getSettings(); 249 | return "channelCount" in trackSettings ? trackSettings.channelCount : 1; 250 | } 251 | 252 | // Worklet does nothing more than pass the data out, to be actually encoded by a regular Web Worker 253 | // Previously this was rewritten to do the encoding within an AudioWorklet, and it was all very nice and clean 254 | // but apparently doing anything that uses much CPU in a AudioWorklet will cause glitches in some browsers. 255 | // So, it's best to do the encoding in a regular Web Worker. 256 | let AUDIO_OUTPUT_MODULE_URL = null; 257 | function getAudioOutputModuleUrl() { 258 | if (AUDIO_OUTPUT_MODULE_URL) { 259 | return AUDIO_OUTPUT_MODULE_URL; 260 | } 261 | AUDIO_OUTPUT_MODULE_URL = URL.createObjectURL(new Blob([` 262 | class AudioOutputProcessor extends AudioWorkletProcessor { 263 | process(inputs, outputs) { 264 | this.port.postMessage(inputs[0]); 265 | return true; 266 | } 267 | } 268 | 269 | registerProcessor("audio-output-processor", AudioOutputProcessor); 270 | `], { 271 | type: "application/javascript" 272 | })); 273 | return AUDIO_OUTPUT_MODULE_URL; 274 | } 275 | 276 | /* 277 | Callbacks: 278 | ondataavailable 279 | onstart - called when recording successfully started 280 | onstop - called when all data finished encoding and was output 281 | onerror - error starting recording 282 | */ 283 | class AudioRecorder { 284 | constructor(options) { 285 | this.options = { 286 | ...DEFAULT_OPTIONS, 287 | ...options 288 | }; 289 | this.state = states.STOPPED; 290 | this.audioContext = null; 291 | this.encoder = null; 292 | this.encodedData = null; 293 | this.stopPromiseResolve = null; 294 | this.stopPromiseReject = null; 295 | this.timer = new Timer(); 296 | } 297 | static isRecordingSupported() { 298 | return getAudioContextCtor() && navigator && navigator.mediaDevices && navigator.mediaDevices.getUserMedia; 299 | } 300 | static preload(_workerUrl) { 301 | workerUrl = _workerUrl; 302 | WorkerEncoder.preload(workerUrl); 303 | } 304 | 305 | // Will we use AudioWorklet? 306 | useAudioWorklet() { 307 | return isAudioWorkletSupported() && !this.options.forceScriptProcessor; 308 | } 309 | createAndStartEncoder(numberOfChannels) { 310 | this.encoder = new WorkerEncoder({ 311 | originalSampleRate: this.audioContext.sampleRate, 312 | numberOfChannels: numberOfChannels, 313 | encoderBitRate: this.options.encoderBitRate, 314 | streamBufferSize: this.options.streamBufferSize 315 | }); 316 | this.encoder.ondataavailable = data => { 317 | if (this.options.streaming) { 318 | this.ondataavailable && this.ondataavailable(data); 319 | } else { 320 | this.encodedData.push(data); 321 | } 322 | }; 323 | this.encoder.onstopped = () => { 324 | this.state = states.STOPPED; 325 | let mp3Blob = this.options.streaming ? undefined : new Blob(this.encodedData, { 326 | type: "audio/mpeg" 327 | }); 328 | this.onstop && this.onstop(mp3Blob); 329 | this.stopPromiseResolve(mp3Blob); 330 | }; 331 | this.encoder.start(); 332 | } 333 | createOutputNode(numberOfChannels) { 334 | if (this.useAudioWorklet()) { 335 | console.log("Using AudioWorklet"); 336 | this.outputNode = new AudioWorkletNode(this.audioContext, "audio-output-processor", { 337 | numberOfOutputs: 0 338 | }); 339 | this.outputNode.port.onmessage = _ref => { 340 | let { 341 | data 342 | } = _ref; 343 | if (this.state == states.RECORDING) { 344 | this.encoder.sendData(data); 345 | } 346 | }; 347 | } else { 348 | console.log("Using ScriptProcessorNode"); 349 | this.outputNode = this.audioContext.createScriptProcessor(4096, numberOfChannels, numberOfChannels); 350 | this.outputNode.connect(this.audioContext.destination); 351 | this.outputNode.onaudioprocess = event => { 352 | if (this.state == states.RECORDING) { 353 | let inputBuffer = event.inputBuffer; 354 | let buffers = []; 355 | for (let i = 0; i < inputBuffer.numberOfChannels; i++) { 356 | buffers.push(inputBuffer.getChannelData(i)); 357 | } 358 | this.encoder.sendData(buffers); 359 | } 360 | }; 361 | } 362 | } 363 | createAudioNodes(numberOfChannels) { 364 | this.createOutputNode(numberOfChannels); 365 | this.recordingGainNode = this.audioContext.createGain(); 366 | this.setRecordingGain(this.options.recordingGain); 367 | this.recordingGainNode.connect(this.outputNode); 368 | this.sourceNode = this.audioContext.createMediaStreamSource(this.stream); 369 | this.sourceNode.connect(this.recordingGainNode); 370 | } 371 | cleanupAudioNodes() { 372 | if (this.stream) { 373 | stopStream(this.stream); 374 | this.stream = null; 375 | } 376 | if (this.useAudioWorklet()) { 377 | this.outputNode && (this.outputNode.port.onmessage = null); 378 | } else { 379 | this.outputNode && (this.outputNode.onaudioprocess = null); 380 | } 381 | this.outputNode && this.outputNode.disconnect(); 382 | this.recordingGainNode && this.recordingGainNode.disconnect(); 383 | this.sourceNode && this.sourceNode.disconnect(); 384 | this.audioContext && this.audioContext.close(); 385 | } 386 | setRecordingGain(gain) { 387 | this.options.recordingGain = gain; 388 | if (this.recordingGainNode) { 389 | this.recordingGainNode.gain.setTargetAtTime(gain, this.audioContext.currentTime, 0.01); 390 | } 391 | } 392 | get time() { 393 | return this.timer.getTime(); 394 | } 395 | 396 | // Get the amount of data left to be encoded. 397 | // Useful to estimate if STOPPING state (encoding still ongoing) will last a while. 398 | getEncodingQueueSize() { 399 | return this.encoder ? this.encoder.getQueuedDataLen() : 0; 400 | } 401 | 402 | // Called after every "await" in start(), to check that stop wasn't called 403 | // and we should abandon starting 404 | stoppingCheck() { 405 | if (this.state == states.STOPPING) { 406 | throw createCancelStartError(); 407 | } 408 | } 409 | async __start(paused) { 410 | if (this.state != states.STOPPED) { 411 | throw new Error("Called start when not in stopped state"); 412 | } 413 | if (workerUrl == null) { 414 | throw new Error("preload was not called on AudioRecorder"); 415 | } 416 | this.state = states.STARTING; 417 | this.encodedData = []; 418 | this.stream = null; 419 | try { 420 | await WorkerEncoder.waitForWorker(workerUrl); 421 | this.stoppingCheck(); 422 | 423 | // If a constraint is set, pass them, otherwise just pass true 424 | let constraints = Object.keys(this.options.constraints).length > 0 ? this.options.constraints : true; 425 | this.stream = await navigator.mediaDevices.getUserMedia({ 426 | audio: constraints 427 | }); 428 | this.stoppingCheck(); 429 | const _AudioContext = getAudioContextCtor(); 430 | this.audioContext = new _AudioContext(); 431 | if (this.useAudioWorklet()) { 432 | await this.audioContext.audioWorklet.addModule(getAudioOutputModuleUrl(), { 433 | credentials: "omit" 434 | }); 435 | this.stoppingCheck(); 436 | } 437 | 438 | // Channel count must be gotten from the stream, as it might not have supported 439 | // the desired amount specified in the constraints 440 | let numberOfChannels = getNumberOfChannels(this.stream); 441 | 442 | // Successfully recording! 443 | this.createAndStartEncoder(numberOfChannels); 444 | this.createAudioNodes(numberOfChannels); 445 | if (paused) { 446 | this.timer.reset(); 447 | this.state = states.PAUSED; 448 | } else { 449 | this.timer.resetAndStart(); 450 | this.state = states.RECORDING; 451 | } 452 | this.onstart && this.onstart(); 453 | } catch (error) { 454 | let startWasCancelled = this.state == states.STOPPING; 455 | this.cleanupAudioNodes(); 456 | 457 | // Reset so can attempt start again 458 | this.state = states.STOPPED; 459 | 460 | // Reject the stop promise now we have cleaned up and are in STOPPED state and ready to start() again 461 | if (startWasCancelled) { 462 | this.stopPromiseReject(error); 463 | } 464 | throw error; 465 | } 466 | } 467 | async __stop() { 468 | this.timer.stop(); 469 | if (this.state == states.RECORDING || this.state == states.PAUSED) { 470 | // Stop recording, but encoding may not have finished yet, 471 | // so we enter the stopping state. 472 | this.state = states.STOPPING; 473 | this.cleanupAudioNodes(); 474 | this.encoder.stop(); 475 | 476 | // Will be resolved later when encoding finishes 477 | return new Promise((resolve, reject) => { 478 | this.stopPromiseResolve = resolve; 479 | }); 480 | } else if (this.state == states.STARTING) { 481 | this.state = states.STOPPING; 482 | 483 | // Will be rejected later when start() has completely finished operation 484 | return new Promise((resolve, reject) => { 485 | this.stopPromiseReject = reject; 486 | }); 487 | } 488 | throw new Error("Called stop when AudioRecorder was not started"); 489 | } 490 | start() { 491 | let paused = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : false; 492 | let promise = this.__start(paused); 493 | promise.catch(error => { 494 | // Don't send CancelStartError to onerror, as it's not *really* an error state 495 | // Only used as a promise rejection to indicate that starting did not succeed. 496 | if (error.name != "CancelStartError") { 497 | this.onerror && this.onerror(error); 498 | } 499 | }); 500 | if (!this.onerror) { 501 | return promise; 502 | } 503 | } 504 | stop() { 505 | let promise = this.__stop(); 506 | promise.catch(error => { 507 | if (error.name == "CancelStartError") { 508 | // Stop was called before recording even started 509 | // Send a onstop event anyway to indicate that recording can be retried. 510 | this.onstop && this.onstop(this.options.streaming ? undefined : null); 511 | } else { 512 | this.onerror && this.onerror(error); 513 | } 514 | }); 515 | if (!this.onerror) { 516 | return promise; 517 | } 518 | } 519 | pause() { 520 | if (this.state == states.RECORDING) { 521 | this.state = states.PAUSED; 522 | this.timer.stop(); 523 | } 524 | } 525 | resume() { 526 | if (this.state == states.PAUSED) { 527 | this.state = states.RECORDING; 528 | this.timer.start(); 529 | } 530 | } 531 | } 532 | 533 | return AudioRecorder; 534 | 535 | })); 536 | //# sourceMappingURL=audiorecorder.js.map 537 | -------------------------------------------------------------------------------- /dist/audiorecorder.min.js.map: -------------------------------------------------------------------------------- 1 | {"version":3,"file":"audiorecorder.min.js","sources":["../src/mp3worker/WorkerEncoder.js","../src/Timer.js","../src/AudioRecorder.js","../src/utils.js"],"sourcesContent":["let workerStates = {\n\tINACTIVE : 0,\n\tLOADING : 1,\n\tREADY : 2,\n\tERROR : 3\n}\n\nlet worker = null;\nlet workerState = workerStates.INACTIVE;\nlet workerStateChangeCallbacks = [];\nlet jobCallbacks = {};\n\nfunction uuidv4() { // https://stackoverflow.com/a/2117523\n\treturn ([1e7]+-1e3+-4e3+-8e3+-1e11).replace(/[018]/g, c =>\n\t\t(c ^ crypto.getRandomValues(new Uint8Array(1))[0] & 15 >> c / 4).toString(16)\n\t);\n}\n\nfunction notifyWorkerState(newState) {\n\tworkerState = newState;\n\n\tfor (let callback of workerStateChangeCallbacks) {\n\t\tcallback();\n\t}\n\n\tworkerStateChangeCallbacks = [];\n}\n\n// This hack required to load worker from another domain (e.g. a CDN)\n// https://stackoverflow.com/a/62914052\nfunction getWorkerCrossDomainURL(url) {\n\tconst content = `importScripts(\"${url}\");`;\n\treturn URL.createObjectURL(new Blob([content], {type : \"text/javascript\"}));\n}\n\nfunction loadWorker(workerUrl) {\n\tif (/^https?:\\/\\//.test(workerUrl)) { // Is it an absolute URL? Then consider it cross domain.\n\t\tworkerUrl = getWorkerCrossDomainURL(workerUrl);\n\t}\n\n\tworker = new Worker(workerUrl);\n\tworkerState = workerStates.LOADING;\n\n\tworker.onmessage = (event) => {\n\t\tswitch (event.data.message) {\n\t\t\tcase \"ready\":\n\t\t\t\tnotifyWorkerState(workerStates.READY);\n\t\t\t\tbreak;\n\t\t\tcase \"encoded\":\n\t\t\t\tif (event.data.jobId in jobCallbacks) {\n\t\t\t\t\tjobCallbacks[event.data.jobId].onencoded(event.data.srcBufLen);\n\t\t\t\t}\n\t\t\t\tbreak;\n\t\t\tcase \"data\":\n\t\t\t\tif (event.data.jobId in jobCallbacks) {\n\t\t\t\t\tjobCallbacks[event.data.jobId].ondataavailable(event.data.data);\n\t\t\t\t}\n\t\t\t\tbreak;\n\t\t\tcase \"stopped\":\n\t\t\t\tif (event.data.jobId in jobCallbacks) {\n\t\t\t\t\tjobCallbacks[event.data.jobId].onstopped();\n\t\t\t\t}\n\t\t\t\tbreak;\n\t\t}\n\t};\n\n\tworker.onerror = (event) => {\n\t\tconsole.error(\"mp3worker error. Is the worker URL correct?\");\n\t\tnotifyWorkerState(workerStates.ERROR);\n\t};\n}\n\n// Callbacks:\n// - ondataavailable\n// - onstopped\nexport default class WorkerEncoder {\n\tconstructor(options) {\n\t\tthis.jobId = uuidv4();\n\t\tthis.options = options;\n\t\tthis.queuedData = 0;\n\n\t\tjobCallbacks[this.jobId] = {\n\t\t\tonencoded : (srcBufLen) => {\n\t\t\t\tthis.queuedData -= srcBufLen;\n\t\t\t},\n\t\t\tondataavailable : (data) => {\n\t\t\t\tthis.ondataavailable && this.ondataavailable(data);\n\t\t\t},\n\t\t\tonstopped : () => {\n\t\t\t\tdelete jobCallbacks[this.jobId]; // Clean up\n\t\t\t\tthis.onstopped && this.onstopped();\n\t\t\t}\n\t\t};\n\t}\n\n\tstatic preload(workerUrl) {\n\t\tif (workerState == workerStates.INACTIVE || workerState == workerStates.ERROR) {\n\t\t\tloadWorker(workerUrl);\n\t\t}\n\t}\n\n\tstatic waitForWorker(workerUrl) {\n\t\tif (workerState == workerStates.READY) {\n\t\t\treturn Promise.resolve();\n\t\t} else {\n\t\t\t// Worker loading already failed, try again...\n\t\t\tif (workerState == workerStates.INACTIVE || workerState == workerStates.ERROR) {\n\t\t\t\tloadWorker(workerUrl);\n\t\t\t}\n\n\t\t\treturn new Promise((resolve, reject) => {\n\t\t\t\tworkerStateChangeCallbacks.push(() => {\n\t\t\t\t\tif (workerState == workerStates.READY) {\n\t\t\t\t\t\tresolve();\n\t\t\t\t\t} else {\n\t\t\t\t\t\tlet error = new Error(\"MP3 worker failed\");\n\t\t\t\t\t\terror.name = \"WorkerError\";\n\t\t\t\t\t\treject(error);\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t});\n\t\t}\n\t}\n\n\tstart() {\n\t\tworker.postMessage({\n\t\t\tcommand : \"start\",\n\t\t\tjobId : this.jobId,\n\t\t\toptions : this.options\n\t\t});\n\t}\n\n\tsendData(buffers) {\n\t\t// Check for an empty buffer\n\t\tif (buffers && buffers.length > 0 && buffers[0].length > 0) {\n\t\t\tthis.queuedData += buffers[0].length;\n\t\t\t\n\t\t\tworker.postMessage({\n\t\t\t\tcommand : \"data\",\n\t\t\t\tjobId : this.jobId,\n\t\t\t\tbuffers : buffers\n\t\t\t});\n\t\t}\n\t}\n\t\n\t// Amount of data that is not yet encoded.\n\tgetQueuedDataLen() {\n\t\treturn this.queuedData;\n\t}\n\n\tstop() {\n\t\tworker.postMessage({\n\t\t\tcommand : \"stop\",\n\t\t\tjobId : this.jobId\n\t\t});\n\t}\n}\n","\nexport default class Timer {\n\tconstructor() {\n\t\tthis.reset();\n\t}\n\n\treset() {\n\t\tthis.startTime = null; // May be modified when resuming, so not the true start time.\n\t\tthis.stoppedTime = null;\n\t}\n\n\tstart() {\n\t\tif (!this.startTime) {\n\t\t\tthis.startTime = Date.now();\n\t\t}\n\n\t\tif (this.stoppedTime) {\n\t\t\t// Skip time forward by the time length we were stopped\n\t\t\tthis.startTime += Date.now() - this.stoppedTime;\n\t\t\tthis.stoppedTime = null;\n\t\t}\n\t}\n\t\n\tresetAndStart() {\n\t\tthis.reset();\n\t\tthis.start();\n\t}\n\n\tstop() {\n\t\tif (!this.stoppedTime) {\n\t\t\tthis.stoppedTime = Date.now();\n\t\t}\n\t}\n\n\tgetTime() {\n\t\tif (this.startTime) {\n\t\t\tif (this.stoppedTime) {\n\t\t\t\treturn this.stoppedTime - this.startTime;\n\t\t\t} else {\n\t\t\t\treturn Date.now() - this.startTime;\n\t\t\t}\n\t\t} else {\n\t\t\treturn 0;\n\t\t}\n\t}\n}\n","import WorkerEncoder from \"./mp3worker/WorkerEncoder.js\";\nimport Timer from \"./Timer.js\";\nimport {stopStream, detectIOS, detectSafari} from \"./utils.js\";\n\nfunction getAudioContextCtor() {\n\treturn window.AudioContext || window.webkitAudioContext;\n}\n// Don't use audio worklet on iOS or safari, fall back to ScriptProcessor.\n// There are issues with dropped incoming audio data after ~45 seconds. Thus, the resulting audio would be shorter and sped up / glitchy.\n// Curiously, these same issues are present if *not using* AudioWorklet on Chrome\nfunction isAudioWorkletSupported() {\n\treturn window.AudioWorklet && !detectIOS() && !detectSafari();\n}\n\nconst states = {\n\tSTOPPED : 0,\n\tRECORDING : 1,\n\tPAUSED : 2,\n\tSTARTING : 3,\n\tSTOPPING : 4\n};\n\nconst DEFAULT_OPTIONS = {\n\trecordingGain : 1,\n\tencoderBitRate : 96,\n\tstreaming : false,\n\tstreamBufferSize : 50000,\n\tforceScriptProcessor : false,\n\tconstraints : {\n\t\tchannelCount : 1,\n\t\tautoGainControl : true,\n\t\techoCancellation : true,\n\t\tnoiseSuppression : true\n\t}\n};\n\nlet workerUrl = null;\n\nfunction createCancelStartError() {\n\tlet error = new Error(\"AudioRecorder start cancelled by call to stop\");\n\terror.name = \"CancelStartError\";\n\treturn error;\n}\n\nfunction getNumberOfChannels(stream) {\n\tlet audioTracks = stream.getAudioTracks();\n\t\n\tif (audioTracks.length < 1) {\n\t\tthrow new Error(\"No audio tracks in user media stream\");\n\t}\n\t\n\tlet trackSettings = audioTracks[0].getSettings();\n\treturn \"channelCount\" in trackSettings ? trackSettings.channelCount : 1;\n}\n\n// Worklet does nothing more than pass the data out, to be actually encoded by a regular Web Worker\n// Previously this was rewritten to do the encoding within an AudioWorklet, and it was all very nice and clean\n// but apparently doing anything that uses much CPU in a AudioWorklet will cause glitches in some browsers.\n// So, it's best to do the encoding in a regular Web Worker.\nlet AUDIO_OUTPUT_MODULE_URL = null;\n\nfunction getAudioOutputModuleUrl() {\n\tif (AUDIO_OUTPUT_MODULE_URL) {\n\t\treturn AUDIO_OUTPUT_MODULE_URL;\n\t}\n\n\tAUDIO_OUTPUT_MODULE_URL = URL.createObjectURL(new Blob([`\n\t\tclass AudioOutputProcessor extends AudioWorkletProcessor {\n\t\t\tprocess(inputs, outputs) {\n\t\t\t\tthis.port.postMessage(inputs[0]);\n\t\t\t\treturn true;\n\t\t\t}\n\t\t}\n\n\t\tregisterProcessor(\"audio-output-processor\", AudioOutputProcessor);\n\t`], {type : \"application/javascript\"}));\n\n\treturn AUDIO_OUTPUT_MODULE_URL;\n}\n\n/*\nCallbacks:\n\tondataavailable\n\tonstart - called when recording successfully started\n\tonstop - called when all data finished encoding and was output\n\tonerror - error starting recording\n*/\nexport default class AudioRecorder {\n\tconstructor(options) {\n\t\tthis.options = {\n\t\t\t...DEFAULT_OPTIONS,\n\t\t\t...options\n\t\t};\n\n\t\tthis.state = states.STOPPED;\n\t\tthis.audioContext = null;\n\t\tthis.encoder = null;\n\t\tthis.encodedData = null;\n\t\tthis.stopPromiseResolve = null;\n\t\tthis.stopPromiseReject = null;\n\t\tthis.timer = new Timer();\n\t}\n\t\n\tstatic isRecordingSupported() {\n\t\treturn getAudioContextCtor() && navigator && navigator.mediaDevices && navigator.mediaDevices.getUserMedia;\n\t}\n\t\n\tstatic preload(_workerUrl) {\n\t\tworkerUrl = _workerUrl;\n\t\tWorkerEncoder.preload(workerUrl);\n\t}\n\t\n\t// Will we use AudioWorklet?\n\tuseAudioWorklet() {\n\t\treturn isAudioWorkletSupported() && !this.options.forceScriptProcessor;\n\t}\n\t\n\tcreateAndStartEncoder(numberOfChannels) {\n\t\tthis.encoder = new WorkerEncoder({\n\t\t\toriginalSampleRate : this.audioContext.sampleRate,\n\t\t\tnumberOfChannels : numberOfChannels,\n\t\t\tencoderBitRate : this.options.encoderBitRate,\n\t\t\tstreamBufferSize : this.options.streamBufferSize\n\t\t});\n\n\t\tthis.encoder.ondataavailable = (data) => {\n\t\t\tif (this.options.streaming) {\n\t\t\t\tthis.ondataavailable && this.ondataavailable(data);\n\t\t\t} else {\n\t\t\t\tthis.encodedData.push(data);\n\t\t\t}\n\t\t};\n\n\t\tthis.encoder.onstopped = () => {\n\t\t\tthis.state = states.STOPPED;\n\t\t\tlet mp3Blob = this.options.streaming ? undefined : new Blob(this.encodedData, {type : \"audio/mpeg\"});\n\t\t\tthis.onstop && this.onstop(mp3Blob);\n\t\t\tthis.stopPromiseResolve(mp3Blob);\n\t\t};\n\n\t\tthis.encoder.start();\n\t}\n\n\tcreateOutputNode(numberOfChannels) {\t\t\n\t\tif (this.useAudioWorklet()) {\n\t\t\tconsole.log(\"Using AudioWorklet\");\n\n\t\t\tthis.outputNode = new AudioWorkletNode(this.audioContext, \"audio-output-processor\", {numberOfOutputs : 0});\n\n\t\t\tthis.outputNode.port.onmessage = ({data}) => {\n\t\t\t\tif (this.state == states.RECORDING) {\n\t\t\t\t\tthis.encoder.sendData(data);\n\t\t\t\t}\n\t\t\t};\n\t\t} else {\n\t\t\tconsole.log(\"Using ScriptProcessorNode\");\n\t\t\t\n\t\t\tthis.outputNode = this.audioContext.createScriptProcessor(4096, numberOfChannels, numberOfChannels);\n\n\t\t\tthis.outputNode.connect(this.audioContext.destination);\n\t\t\tthis.outputNode.onaudioprocess = (event) => {\n\t\t\t\tif (this.state == states.RECORDING) {\n\t\t\t\t\tlet inputBuffer = event.inputBuffer;\n\t\t\t\t\tlet buffers = [];\n\n\t\t\t\t\tfor (let i = 0; i < inputBuffer.numberOfChannels; i ++) {\n\t\t\t\t\t\tbuffers.push(inputBuffer.getChannelData(i));\n\t\t\t\t\t}\n\n\t\t\t\t\tthis.encoder.sendData(buffers);\n\t\t\t\t}\n\t\t\t};\n\t\t}\n\t}\n\t\n\tcreateAudioNodes(numberOfChannels) {\n\t\tthis.createOutputNode(numberOfChannels);\n\t\t\n\t\tthis.recordingGainNode = this.audioContext.createGain();\n\t\tthis.setRecordingGain(this.options.recordingGain);\n\t\tthis.recordingGainNode.connect(this.outputNode);\n\n\t\tthis.sourceNode = this.audioContext.createMediaStreamSource(this.stream);\n\t\tthis.sourceNode.connect(this.recordingGainNode);\n\t}\n\n\tcleanupAudioNodes() {\n\t\tif (this.stream) {\n\t\t\tstopStream(this.stream);\n\t\t\tthis.stream = null;\n\t\t}\n\t\t\n\t\tif (this.useAudioWorklet()) {\n\t\t\tthis.outputNode && (this.outputNode.port.onmessage = null);\n\t\t} else {\n\t\t\tthis.outputNode && (this.outputNode.onaudioprocess = null);\n\t\t}\n\t\t\n\t\tthis.outputNode && this.outputNode.disconnect();\n\t\tthis.recordingGainNode && this.recordingGainNode.disconnect();\n\t\tthis.sourceNode && this.sourceNode.disconnect();\n\t\tthis.audioContext && this.audioContext.close();\n\t}\n\n\tsetRecordingGain(gain) {\n\t\tthis.options.recordingGain = gain;\n\n\t\tif (this.recordingGainNode) {\n\t\t\tthis.recordingGainNode.gain.setTargetAtTime(gain, this.audioContext.currentTime, 0.01);\n\t\t}\n\t}\n\t\n\tget time() {\n\t\treturn this.timer.getTime();\n\t}\n\t\n\t// Get the amount of data left to be encoded.\n\t// Useful to estimate if STOPPING state (encoding still ongoing) will last a while.\n\tgetEncodingQueueSize() {\n\t\treturn this.encoder ? this.encoder.getQueuedDataLen() : 0;\n\t}\n\t\n\t// Called after every \"await\" in start(), to check that stop wasn't called\n\t// and we should abandon starting\n\tstoppingCheck() {\n\t\tif (this.state == states.STOPPING) {\n\t\t\tthrow createCancelStartError();\n\t\t}\n\t}\n\n\tasync __start(paused) {\n\t\tif (this.state != states.STOPPED) {\n\t\t\tthrow new Error(\"Called start when not in stopped state\");\n\t\t}\n\t\t\n\t\tif (workerUrl == null) {\n\t\t\tthrow new Error(\"preload was not called on AudioRecorder\");\n\t\t}\n\t\t\n\t\tthis.state = states.STARTING;\n\t\tthis.encodedData = [];\n\t\tthis.stream = null;\n\t\t\n\t\ttry {\n\t\t\tawait WorkerEncoder.waitForWorker(workerUrl);\n\t\t\tthis.stoppingCheck();\n\t\t\t\n\t\t\t// If a constraint is set, pass them, otherwise just pass true\n\t\t\tlet constraints = Object.keys(this.options.constraints).length > 0 ? this.options.constraints : true;\n\t\t\t\n\t\t\tthis.stream = await navigator.mediaDevices.getUserMedia({audio : constraints});\n\t\t\tthis.stoppingCheck();\n\t\t\t\n\t\t\tconst _AudioContext = getAudioContextCtor();\n\t\t\tthis.audioContext = new _AudioContext();\n\t\t\t\n\t\t\tif (this.useAudioWorklet()) {\n\t\t\t\tawait this.audioContext.audioWorklet.addModule(getAudioOutputModuleUrl(), {credentials : \"omit\"});\n\t\t\t\tthis.stoppingCheck();\n\t\t\t}\n\t\t\t\n\t\t\t// Channel count must be gotten from the stream, as it might not have supported\n\t\t\t// the desired amount specified in the constraints\n\t\t\tlet numberOfChannels = getNumberOfChannels(this.stream);\n\t\t\t\n\t\t\t// Successfully recording!\n\t\t\tthis.createAndStartEncoder(numberOfChannels);\n\t\t\tthis.createAudioNodes(numberOfChannels);\n\n\t\t\tif (paused) {\n\t\t\t\tthis.timer.reset();\n\t\t\t\tthis.state = states.PAUSED;\n\t\t\t} else {\n\t\t\t\tthis.timer.resetAndStart();\n\t\t\t\tthis.state = states.RECORDING;\n\t\t\t}\n\n\t\t\tthis.onstart && this.onstart();\n\t\t} catch (error) {\n\t\t\tlet startWasCancelled = this.state == states.STOPPING;\n\t\t\tthis.cleanupAudioNodes();\n\t\t\t\n\t\t\t// Reset so can attempt start again\n\t\t\tthis.state = states.STOPPED;\n\t\t\t\n\t\t\t// Reject the stop promise now we have cleaned up and are in STOPPED state and ready to start() again\n\t\t\tif (startWasCancelled) {\n\t\t\t\tthis.stopPromiseReject(error);\n\t\t\t}\n\t\t\t\n\t\t\tthrow error;\n\t\t}\n\t}\n\n\tasync __stop() {\n\t\tthis.timer.stop();\n\t\t\n\t\tif (this.state == states.RECORDING || this.state == states.PAUSED) {\n\t\t\t// Stop recording, but encoding may not have finished yet,\n\t\t\t// so we enter the stopping state.\n\t\t\tthis.state = states.STOPPING;\n\t\t\t\n\t\t\tthis.cleanupAudioNodes();\n\t\t\tthis.encoder.stop();\n\t\t\t\n\t\t\t// Will be resolved later when encoding finishes\n\t\t\treturn new Promise((resolve, reject) => {\n\t\t\t\tthis.stopPromiseResolve = resolve;\n\t\t\t});\n\t\t} else if (this.state == states.STARTING) {\n\t\t\tthis.state = states.STOPPING;\n\t\t\t\n\t\t\t// Will be rejected later when start() has completely finished operation\n\t\t\treturn new Promise((resolve, reject) => {\n\t\t\t\tthis.stopPromiseReject = reject;\n\t\t\t})\n\t\t}\n\t\t\n\t\tthrow new Error(\"Called stop when AudioRecorder was not started\");\n\t}\n\t\n\tstart(paused = false) {\n\t\tlet promise = this.__start(paused);\n\t\t\n\t\tpromise.catch(error => {\n\t\t\t// Don't send CancelStartError to onerror, as it's not *really* an error state\n\t\t\t// Only used as a promise rejection to indicate that starting did not succeed.\n\t\t\tif (error.name != \"CancelStartError\") {\n\t\t\t\tthis.onerror && this.onerror(error);\n\t\t\t}\n\t\t});\n\t\t\n\t\tif (!this.onerror) {\n\t\t\treturn promise;\n\t\t}\n\t}\n\t\n\tstop() {\n\t\tlet promise = this.__stop();\n\t\t\n\t\tpromise.catch(error => {\n\t\t\tif (error.name == \"CancelStartError\") {\n\t\t\t\t// Stop was called before recording even started\n\t\t\t\t// Send a onstop event anyway to indicate that recording can be retried.\n\t\t\t\tthis.onstop && this.onstop(this.options.streaming ? undefined : null);\n\t\t\t} else {\n\t\t\t\tthis.onerror && this.onerror(error);\n\t\t\t}\n\t\t});\n\t\t\n\t\tif (!this.onerror) {\n\t\t\treturn promise;\n\t\t}\n\t}\n\n\tpause() {\n\t\tif (this.state == states.RECORDING) {\n\t\t\tthis.state = states.PAUSED;\n\t\t\tthis.timer.stop();\n\t\t}\n\t}\n\n\tresume() {\n\t\tif (this.state == states.PAUSED) {\n\t\t\tthis.state = states.RECORDING;\n\t\t\tthis.timer.start();\n\t\t}\n\t}\n}\n","\nexport function stopStream(stream) {\n\tif (stream.getTracks) {\n\t\tstream.getTracks().forEach(track => track.stop());\n\t} else {\n\t\tstream.stop(); // Deprecated\n\t}\n}\n\n// https://stackoverflow.com/a/9039885\nexport function detectIOS() {\n\treturn [\n\t\t'iPad Simulator',\n\t\t'iPhone Simulator',\n\t\t'iPod Simulator',\n\t\t'iPad',\n\t\t'iPhone',\n\t\t'iPod'\n\t].includes(navigator.platform)\n\t\t// iPad on iOS 13 detection\n\t\t|| (navigator.userAgent.includes(\"Mac\") && \"ontouchend\" in document);\n}\n\nexport function detectSafari() {\n\treturn /^((?!chrome|android).)*safari/i.test(navigator.userAgent);\n}\n"],"names":["worker","workerState","workerStateChangeCallbacks","jobCallbacks","notifyWorkerState","newState","callback","loadWorker","workerUrl","test","URL","createObjectURL","Blob","type","Worker","onmessage","event","data","message","jobId","onencoded","srcBufLen","ondataavailable","onstopped","onerror","console","error","WorkerEncoder","constructor","options","this","replace","c","crypto","getRandomValues","Uint8Array","toString","queuedData","preload","waitForWorker","Promise","resolve","reject","push","Error","name","start","postMessage","command","sendData","buffers","length","getQueuedDataLen","stop","Timer","reset","startTime","stoppedTime","Date","now","resetAndStart","getTime","getAudioContextCtor","window","AudioContext","webkitAudioContext","DEFAULT_OPTIONS","recordingGain","encoderBitRate","streaming","streamBufferSize","forceScriptProcessor","constraints","channelCount","autoGainControl","echoCancellation","noiseSuppression","AUDIO_OUTPUT_MODULE_URL","state","audioContext","encoder","encodedData","stopPromiseResolve","stopPromiseReject","timer","isRecordingSupported","navigator","mediaDevices","getUserMedia","_workerUrl","useAudioWorklet","AudioWorklet","includes","platform","userAgent","document","createAndStartEncoder","numberOfChannels","originalSampleRate","sampleRate","mp3Blob","undefined","onstop","createOutputNode","log","outputNode","AudioWorkletNode","numberOfOutputs","port","_ref","createScriptProcessor","connect","destination","onaudioprocess","inputBuffer","i","getChannelData","createAudioNodes","recordingGainNode","createGain","setRecordingGain","sourceNode","createMediaStreamSource","stream","cleanupAudioNodes","getTracks","forEach","track","disconnect","close","gain","setTargetAtTime","currentTime","time","getEncodingQueueSize","stoppingCheck","createCancelStartError","__start","paused","Object","keys","audio","_AudioContext","audioWorklet","addModule","credentials","audioTracks","getAudioTracks","trackSettings","getSettings","getNumberOfChannels","onstart","startWasCancelled","__stop","promise","catch","pause","resume"],"mappings":"6OAAA,IAOIA,EAAS,KACTC,EAPQ,EAQRC,EAA6B,GAC7BC,EAAe,CAAA,EAQnB,SAASC,EAAkBC,GAC1BJ,EAAcI,EAEd,IAAK,IAAIC,KAAYJ,EACpBI,IAGDJ,EAA6B,EAC9B,CASA,SAASK,EAAWC,GACf,eAAeC,KAAKD,KACvBA,EALME,IAAIC,gBAAgB,IAAIC,KAAK,CADnB,kBAMoBJ,QALU,CAACK,KAAO,sBAQvDb,EAAS,IAAIc,OAAON,GACpBP,EAvCU,EAyCVD,EAAOe,UAAaC,IACnB,OAAQA,EAAMC,KAAKC,SAClB,IAAK,QACJd,EA3CK,GA4CL,MACD,IAAK,UACAY,EAAMC,KAAKE,SAAShB,GACvBA,EAAaa,EAAMC,KAAKE,OAAOC,UAAUJ,EAAMC,KAAKI,WAErD,MACD,IAAK,OACAL,EAAMC,KAAKE,SAAShB,GACvBA,EAAaa,EAAMC,KAAKE,OAAOG,gBAAgBN,EAAMC,KAAKA,MAE3D,MACD,IAAK,UACAD,EAAMC,KAAKE,SAAShB,GACvBA,EAAaa,EAAMC,KAAKE,OAAOI,cAMnCvB,EAAOwB,QAAWR,IACjBS,QAAQC,MAAM,+CACdtB,EAhEO,GAkET,CAKe,MAAMuB,EACpBC,WAAAA,CAAYC,GACXC,KAAKX,OAhEE,CAAC,MAAK,KAAK,UAAU,MAAOY,QAAQ,SAAUC,IACpDA,EAAIC,OAAOC,gBAAgB,IAAIC,WAAW,IAAI,GAAK,IAAMH,EAAI,GAAGI,SAAS,KAgE1EN,KAAKD,QAAUA,EACfC,KAAKO,WAAa,EAElBlC,EAAa2B,KAAKX,OAAS,CAC1BC,UAAaC,IACZS,KAAKO,YAAchB,GAEpBC,gBAAmBL,IAClBa,KAAKR,iBAAmBQ,KAAKR,gBAAgBL,IAE9CM,UAAY,YACJpB,EAAa2B,KAAKX,OACzBW,KAAKP,WAAaO,KAAKP,aAG1B,CAEA,cAAOe,CAAQ9B,GA9FJ,GA+FNP,GA5FG,GA4FqCA,GAC3CM,EAAWC,EAEb,CAEA,oBAAO+B,CAAc/B,GACpB,OAnGO,GAmGHP,EACIuC,QAAQC,WAtGN,GAyGLxC,GAtGE,GAsGsCA,GAC3CM,EAAWC,GAGL,IAAIgC,QAAQ,CAACC,EAASC,KAC5BxC,EAA2ByC,KAAK,KAC/B,GA7GI,GA6GA1C,EACHwC,QACM,CACN,IAAIf,EAAQ,IAAIkB,MAAM,qBACtBlB,EAAMmB,KAAO,cACbH,EAAOhB,EACR,MAIJ,CAEAoB,KAAAA,GACC9C,EAAO+C,YAAY,CAClBC,QAAU,QACV7B,MAAQW,KAAKX,MACbU,QAAUC,KAAKD,SAEjB,CAEAoB,QAAAA,CAASC,GAEJA,GAAWA,EAAQC,OAAS,GAAKD,EAAQ,GAAGC,OAAS,IACxDrB,KAAKO,YAAca,EAAQ,GAAGC,OAE9BnD,EAAO+C,YAAY,CAClBC,QAAU,OACV7B,MAAQW,KAAKX,MACb+B,QAAUA,IAGb,CAGAE,gBAAAA,GACC,OAAOtB,KAAKO,UACb,CAEAgB,IAAAA,GACCrD,EAAO+C,YAAY,CAClBC,QAAU,OACV7B,MAAQW,KAAKX,OAEf,EC1Jc,MAAMmC,EACpB1B,WAAAA,GACCE,KAAKyB,OACN,CAEAA,KAAAA,GACCzB,KAAK0B,UAAY,KACjB1B,KAAK2B,YAAc,IACpB,CAEAX,KAAAA,GACMhB,KAAK0B,YACT1B,KAAK0B,UAAYE,KAAKC,OAGnB7B,KAAK2B,cAER3B,KAAK0B,WAAaE,KAAKC,MAAQ7B,KAAK2B,YACpC3B,KAAK2B,YAAc,KAErB,CAEAG,aAAAA,GACC9B,KAAKyB,QACLzB,KAAKgB,OACN,CAEAO,IAAAA,GACMvB,KAAK2B,cACT3B,KAAK2B,YAAcC,KAAKC,MAE1B,CAEAE,OAAAA,GACC,OAAI/B,KAAK0B,UACJ1B,KAAK2B,YACD3B,KAAK2B,YAAc3B,KAAK0B,UAExBE,KAAKC,MAAQ7B,KAAK0B,UAGnB,CAET,ECxCD,SAASM,IACR,OAAOC,OAAOC,cAAgBD,OAAOE,kBACtC,CAQA,MAQMC,EAAkB,CACvBC,cAAgB,EAChBC,eAAiB,GACjBC,WAAY,EACZC,iBAAmB,IACnBC,sBAAuB,EACvBC,YAAc,CACbC,aAAe,EACfC,iBAAkB,EAClBC,kBAAmB,EACnBC,kBAAmB,IAIrB,IAAIpE,EAAY,KAuBZqE,EAA0B,YA4Bf,MACdjD,WAAAA,CAAYC,GACXC,KAAKD,QAAU,IACXqC,KACArC,GAGJC,KAAKgD,MA/EI,EAgFThD,KAAKiD,aAAe,KACpBjD,KAAKkD,QAAU,KACflD,KAAKmD,YAAc,KACnBnD,KAAKoD,mBAAqB,KAC1BpD,KAAKqD,kBAAoB,KACzBrD,KAAKsD,MAAQ,IAAI9B,CAClB,CAEA,2BAAO+B,GACN,OAAOvB,KAAyBwB,WAAaA,UAAUC,cAAgBD,UAAUC,aAAaC,YAC/F,CAEA,cAAOlD,CAAQmD,GACdjF,EAAYiF,EACZ9D,EAAcW,QAAQ9B,EACvB,CAGAkF,eAAAA,GACC,OAvGM3B,OAAO4B,gBCAP,CACN,iBACA,mBACA,iBACA,OACA,SACA,QACCC,SAASN,UAAUO,WAEhBP,UAAUQ,UAAUF,SAAS,QAAU,eAAgBG,YAIrD,iCAAiCtF,KAAK6E,UAAUQ,aD0FjBhE,KAAKD,QAAQ0C,oBACnD,CAEAyB,qBAAAA,CAAsBC,GACrBnE,KAAKkD,QAAU,IAAIrD,EAAc,CAChCuE,mBAAqBpE,KAAKiD,aAAaoB,WACvCF,iBAAmBA,EACnB7B,eAAiBtC,KAAKD,QAAQuC,eAC9BE,iBAAmBxC,KAAKD,QAAQyC,mBAGjCxC,KAAKkD,QAAQ1D,gBAAmBL,IAC3Ba,KAAKD,QAAQwC,UAChBvC,KAAKR,iBAAmBQ,KAAKR,gBAAgBL,GAE7Ca,KAAKmD,YAAYtC,KAAK1B,IAIxBa,KAAKkD,QAAQzD,UAAY,KACxBO,KAAKgD,MAvHG,EAwHR,IAAIsB,EAAUtE,KAAKD,QAAQwC,eAAYgC,EAAY,IAAIzF,KAAKkB,KAAKmD,YAAa,CAACpE,KAAO,eACtFiB,KAAKwE,QAAUxE,KAAKwE,OAAOF,GAC3BtE,KAAKoD,mBAAmBkB,IAGzBtE,KAAKkD,QAAQlC,OACd,CAEAyD,gBAAAA,CAAiBN,GACZnE,KAAK4D,mBACRjE,QAAQ+E,IAAI,sBAEZ1E,KAAK2E,WAAa,IAAIC,iBAAiB5E,KAAKiD,aAAc,yBAA0B,CAAC4B,gBAAkB,IAEvG7E,KAAK2E,WAAWG,KAAK7F,UAAY8F,IAAY,IAAX5F,KAACA,GAAK4F,EArI9B,GAsIL/E,KAAKgD,OACRhD,KAAKkD,QAAQ/B,SAAShC,MAIxBQ,QAAQ+E,IAAI,6BAEZ1E,KAAK2E,WAAa3E,KAAKiD,aAAa+B,sBAAsB,KAAMb,EAAkBA,GAElFnE,KAAK2E,WAAWM,QAAQjF,KAAKiD,aAAaiC,aAC1ClF,KAAK2E,WAAWQ,eAAkBjG,IACjC,GAjJS,GAiJLc,KAAKgD,MAA2B,CACnC,IAAIoC,EAAclG,EAAMkG,YACpBhE,EAAU,GAEd,IAAK,IAAIiE,EAAI,EAAGA,EAAID,EAAYjB,iBAAkBkB,IACjDjE,EAAQP,KAAKuE,EAAYE,eAAeD,IAGzCrF,KAAKkD,QAAQ/B,SAASC,EACvB,GAGH,CAEAmE,gBAAAA,CAAiBpB,GAChBnE,KAAKyE,iBAAiBN,GAEtBnE,KAAKwF,kBAAoBxF,KAAKiD,aAAawC,aAC3CzF,KAAK0F,iBAAiB1F,KAAKD,QAAQsC,eACnCrC,KAAKwF,kBAAkBP,QAAQjF,KAAK2E,YAEpC3E,KAAK2F,WAAa3F,KAAKiD,aAAa2C,wBAAwB5F,KAAK6F,QACjE7F,KAAK2F,WAAWV,QAAQjF,KAAKwF,kBAC9B,CAEAM,iBAAAA,GCzLM,IAAoBD,ED0LrB7F,KAAK6F,UC1LgBA,ED2Lb7F,KAAK6F,QC1LPE,UACVF,EAAOE,YAAYC,QAAQC,GAASA,EAAM1E,QAE1CsE,EAAOtE,ODwLNvB,KAAK6F,OAAS,MAGX7F,KAAK4D,kBACR5D,KAAK2E,aAAe3E,KAAK2E,WAAWG,KAAK7F,UAAY,MAErDe,KAAK2E,aAAe3E,KAAK2E,WAAWQ,eAAiB,MAGtDnF,KAAK2E,YAAc3E,KAAK2E,WAAWuB,aACnClG,KAAKwF,mBAAqBxF,KAAKwF,kBAAkBU,aACjDlG,KAAK2F,YAAc3F,KAAK2F,WAAWO,aACnClG,KAAKiD,cAAgBjD,KAAKiD,aAAakD,OACxC,CAEAT,gBAAAA,CAAiBU,GAChBpG,KAAKD,QAAQsC,cAAgB+D,EAEzBpG,KAAKwF,mBACRxF,KAAKwF,kBAAkBY,KAAKC,gBAAgBD,EAAMpG,KAAKiD,aAAaqD,YAAa,IAEnF,CAEA,QAAIC,GACH,OAAOvG,KAAKsD,MAAMvB,SACnB,CAIAyE,oBAAAA,GACC,OAAOxG,KAAKkD,QAAUlD,KAAKkD,QAAQ5B,mBAAqB,CACzD,CAIAmF,aAAAA,GACC,GA9MU,GA8MNzG,KAAKgD,MACR,MA5LH,WACC,IAAIpD,EAAQ,IAAIkB,MAAM,iDAEtB,OADAlB,EAAMmB,KAAO,mBACNnB,CACR,CAwLS8G,EAER,CAEA,aAAMC,CAAQC,GACb,GAxNS,GAwNL5G,KAAKgD,MACR,MAAM,IAAIlC,MAAM,0CAGjB,GAAiB,MAAbpC,EACH,MAAM,IAAIoC,MAAM,2CAGjBd,KAAKgD,MA7NK,EA8NVhD,KAAKmD,YAAc,GACnBnD,KAAK6F,OAAS,KAEd,UACOhG,EAAcY,cAAc/B,GAClCsB,KAAKyG,gBAGL,IAAI/D,IAAcmE,OAAOC,KAAK9G,KAAKD,QAAQ2C,aAAarB,OAAS,IAAIrB,KAAKD,QAAQ2C,YAElF1C,KAAK6F,aAAerC,UAAUC,aAAaC,aAAa,CAACqD,MAAQrE,IACjE1C,KAAKyG,gBAEL,MAAMO,EAAgBhF,IACtBhC,KAAKiD,aAAe,IAAI+D,EAEpBhH,KAAK4D,0BACF5D,KAAKiD,aAAagE,aAAaC,UAnMpCnE,IAIJA,EAA0BnE,IAAIC,gBAAgB,IAAIC,KAAK,CAAE,qQASrD,CAACC,KAAO,4BAELgE,GAoLsE,CAACoE,YAAc,SACzFnH,KAAKyG,iBAKN,IAAItC,EA3NP,SAA6B0B,GAC5B,IAAIuB,EAAcvB,EAAOwB,iBAEzB,GAAID,EAAY/F,OAAS,EACxB,MAAM,IAAIP,MAAM,wCAGjB,IAAIwG,EAAgBF,EAAY,GAAGG,cACnC,MAAO,iBAAkBD,EAAgBA,EAAc3E,aAAe,CACvE,CAkN0B6E,CAAoBxH,KAAK6F,QAGhD7F,KAAKkE,sBAAsBC,GAC3BnE,KAAKuF,iBAAiBpB,GAElByC,GACH5G,KAAKsD,MAAM7B,QACXzB,KAAKgD,MA9PC,IAgQNhD,KAAKsD,MAAMxB,gBACX9B,KAAKgD,MAlQI,GAqQVhD,KAAKyH,SAAWzH,KAAKyH,SACtB,CAAE,MAAO7H,GACR,IAAI8H,EApQK,GAoQe1H,KAAKgD,MAW7B,MAVAhD,KAAK8F,oBAGL9F,KAAKgD,MA5QG,EA+QJ0E,GACH1H,KAAKqD,kBAAkBzD,GAGlBA,CACP,CACD,CAEA,YAAM+H,GAGL,GAFA3H,KAAKsD,MAAM/B,OAvRA,GAyRPvB,KAAKgD,OAxRD,GAwR8BhD,KAAKgD,MAS1C,OANAhD,KAAKgD,MAzRI,EA2RThD,KAAK8F,oBACL9F,KAAKkD,QAAQ3B,OAGN,IAAIb,QAAQ,CAACC,EAASC,KAC5BZ,KAAKoD,mBAAqBzC,IAErB,GAnSG,GAmSCX,KAAKgD,MAIf,OAHAhD,KAAKgD,MAnSI,EAsSF,IAAItC,QAAQ,CAACC,EAASC,KAC5BZ,KAAKqD,kBAAoBzC,IAI3B,MAAM,IAAIE,MAAM,iDACjB,CAEAE,KAAAA,GAAsB,IACjB4G,EAAU5H,KAAK2G,iEAUnB,GARAiB,EAAQC,MAAMjI,IAGK,oBAAdA,EAAMmB,MACTf,KAAKN,SAAWM,KAAKN,QAAQE,MAI1BI,KAAKN,QACT,OAAOkI,CAET,CAEArG,IAAAA,GACC,IAAIqG,EAAU5H,KAAK2H,SAYnB,GAVAC,EAAQC,MAAMjI,IACK,oBAAdA,EAAMmB,KAGTf,KAAKwE,QAAUxE,KAAKwE,OAAOxE,KAAKD,QAAQwC,eAAYgC,EAAY,MAEhEvE,KAAKN,SAAWM,KAAKN,QAAQE,MAI1BI,KAAKN,QACT,OAAOkI,CAET,CAEAE,KAAAA,GAnVY,GAoVP9H,KAAKgD,QACRhD,KAAKgD,MApVE,EAqVPhD,KAAKsD,MAAM/B,OAEb,CAEAwG,MAAAA,GAzVS,GA0VJ/H,KAAKgD,QACRhD,KAAKgD,MA5VK,EA6VVhD,KAAKsD,MAAMtC,QAEb"} -------------------------------------------------------------------------------- /dist/react.mjs: -------------------------------------------------------------------------------- 1 | import { useState, useRef, useEffect } from 'react'; 2 | 3 | let workerStates = { 4 | INACTIVE: 0, 5 | LOADING: 1, 6 | READY: 2, 7 | ERROR: 3 8 | }; 9 | let worker = null; 10 | let workerState = workerStates.INACTIVE; 11 | let workerStateChangeCallbacks = []; 12 | let jobCallbacks = {}; 13 | function uuidv4() { 14 | // https://stackoverflow.com/a/2117523 15 | return ([1e7] + -1e3 + -4e3 + -8e3 + -1e11).replace(/[018]/g, c => (c ^ crypto.getRandomValues(new Uint8Array(1))[0] & 15 >> c / 4).toString(16)); 16 | } 17 | function notifyWorkerState(newState) { 18 | workerState = newState; 19 | for (let callback of workerStateChangeCallbacks) { 20 | callback(); 21 | } 22 | workerStateChangeCallbacks = []; 23 | } 24 | 25 | // This hack required to load worker from another domain (e.g. a CDN) 26 | // https://stackoverflow.com/a/62914052 27 | function getWorkerCrossDomainURL(url) { 28 | const content = `importScripts("${url}");`; 29 | return URL.createObjectURL(new Blob([content], { 30 | type: "text/javascript" 31 | })); 32 | } 33 | function loadWorker(workerUrl) { 34 | if (/^https?:\/\//.test(workerUrl)) { 35 | // Is it an absolute URL? Then consider it cross domain. 36 | workerUrl = getWorkerCrossDomainURL(workerUrl); 37 | } 38 | worker = new Worker(workerUrl); 39 | workerState = workerStates.LOADING; 40 | worker.onmessage = event => { 41 | switch (event.data.message) { 42 | case "ready": 43 | notifyWorkerState(workerStates.READY); 44 | break; 45 | case "encoded": 46 | if (event.data.jobId in jobCallbacks) { 47 | jobCallbacks[event.data.jobId].onencoded(event.data.srcBufLen); 48 | } 49 | break; 50 | case "data": 51 | if (event.data.jobId in jobCallbacks) { 52 | jobCallbacks[event.data.jobId].ondataavailable(event.data.data); 53 | } 54 | break; 55 | case "stopped": 56 | if (event.data.jobId in jobCallbacks) { 57 | jobCallbacks[event.data.jobId].onstopped(); 58 | } 59 | break; 60 | } 61 | }; 62 | worker.onerror = event => { 63 | console.error("mp3worker error. Is the worker URL correct?"); 64 | notifyWorkerState(workerStates.ERROR); 65 | }; 66 | } 67 | 68 | // Callbacks: 69 | // - ondataavailable 70 | // - onstopped 71 | class WorkerEncoder { 72 | constructor(options) { 73 | this.jobId = uuidv4(); 74 | this.options = options; 75 | this.queuedData = 0; 76 | jobCallbacks[this.jobId] = { 77 | onencoded: srcBufLen => { 78 | this.queuedData -= srcBufLen; 79 | }, 80 | ondataavailable: data => { 81 | this.ondataavailable && this.ondataavailable(data); 82 | }, 83 | onstopped: () => { 84 | delete jobCallbacks[this.jobId]; // Clean up 85 | this.onstopped && this.onstopped(); 86 | } 87 | }; 88 | } 89 | static preload(workerUrl) { 90 | if (workerState == workerStates.INACTIVE || workerState == workerStates.ERROR) { 91 | loadWorker(workerUrl); 92 | } 93 | } 94 | static waitForWorker(workerUrl) { 95 | if (workerState == workerStates.READY) { 96 | return Promise.resolve(); 97 | } else { 98 | // Worker loading already failed, try again... 99 | if (workerState == workerStates.INACTIVE || workerState == workerStates.ERROR) { 100 | loadWorker(workerUrl); 101 | } 102 | return new Promise((resolve, reject) => { 103 | workerStateChangeCallbacks.push(() => { 104 | if (workerState == workerStates.READY) { 105 | resolve(); 106 | } else { 107 | let error = new Error("MP3 worker failed"); 108 | error.name = "WorkerError"; 109 | reject(error); 110 | } 111 | }); 112 | }); 113 | } 114 | } 115 | start() { 116 | worker.postMessage({ 117 | command: "start", 118 | jobId: this.jobId, 119 | options: this.options 120 | }); 121 | } 122 | sendData(buffers) { 123 | // Check for an empty buffer 124 | if (buffers && buffers.length > 0 && buffers[0].length > 0) { 125 | this.queuedData += buffers[0].length; 126 | worker.postMessage({ 127 | command: "data", 128 | jobId: this.jobId, 129 | buffers: buffers 130 | }); 131 | } 132 | } 133 | 134 | // Amount of data that is not yet encoded. 135 | getQueuedDataLen() { 136 | return this.queuedData; 137 | } 138 | stop() { 139 | worker.postMessage({ 140 | command: "stop", 141 | jobId: this.jobId 142 | }); 143 | } 144 | } 145 | 146 | class Timer { 147 | constructor() { 148 | this.reset(); 149 | } 150 | reset() { 151 | this.startTime = null; // May be modified when resuming, so not the true start time. 152 | this.stoppedTime = null; 153 | } 154 | start() { 155 | if (!this.startTime) { 156 | this.startTime = Date.now(); 157 | } 158 | if (this.stoppedTime) { 159 | // Skip time forward by the time length we were stopped 160 | this.startTime += Date.now() - this.stoppedTime; 161 | this.stoppedTime = null; 162 | } 163 | } 164 | resetAndStart() { 165 | this.reset(); 166 | this.start(); 167 | } 168 | stop() { 169 | if (!this.stoppedTime) { 170 | this.stoppedTime = Date.now(); 171 | } 172 | } 173 | getTime() { 174 | if (this.startTime) { 175 | if (this.stoppedTime) { 176 | return this.stoppedTime - this.startTime; 177 | } else { 178 | return Date.now() - this.startTime; 179 | } 180 | } else { 181 | return 0; 182 | } 183 | } 184 | } 185 | 186 | function stopStream(stream) { 187 | if (stream.getTracks) { 188 | stream.getTracks().forEach(track => track.stop()); 189 | } else { 190 | stream.stop(); // Deprecated 191 | } 192 | } 193 | 194 | // https://stackoverflow.com/a/9039885 195 | function detectIOS() { 196 | return ['iPad Simulator', 'iPhone Simulator', 'iPod Simulator', 'iPad', 'iPhone', 'iPod'].includes(navigator.platform) 197 | // iPad on iOS 13 detection 198 | || navigator.userAgent.includes("Mac") && "ontouchend" in document; 199 | } 200 | function detectSafari() { 201 | return /^((?!chrome|android).)*safari/i.test(navigator.userAgent); 202 | } 203 | 204 | function getAudioContextCtor() { 205 | return window.AudioContext || window.webkitAudioContext; 206 | } 207 | // Don't use audio worklet on iOS or safari, fall back to ScriptProcessor. 208 | // There are issues with dropped incoming audio data after ~45 seconds. Thus, the resulting audio would be shorter and sped up / glitchy. 209 | // Curiously, these same issues are present if *not using* AudioWorklet on Chrome 210 | function isAudioWorkletSupported() { 211 | return window.AudioWorklet && !detectIOS() && !detectSafari(); 212 | } 213 | const states = { 214 | STOPPED: 0, 215 | RECORDING: 1, 216 | PAUSED: 2, 217 | STARTING: 3, 218 | STOPPING: 4 219 | }; 220 | const DEFAULT_OPTIONS = { 221 | recordingGain: 1, 222 | encoderBitRate: 96, 223 | streaming: false, 224 | streamBufferSize: 50000, 225 | forceScriptProcessor: false, 226 | constraints: { 227 | channelCount: 1, 228 | autoGainControl: true, 229 | echoCancellation: true, 230 | noiseSuppression: true 231 | } 232 | }; 233 | let workerUrl = null; 234 | function createCancelStartError() { 235 | let error = new Error("AudioRecorder start cancelled by call to stop"); 236 | error.name = "CancelStartError"; 237 | return error; 238 | } 239 | function getNumberOfChannels(stream) { 240 | let audioTracks = stream.getAudioTracks(); 241 | if (audioTracks.length < 1) { 242 | throw new Error("No audio tracks in user media stream"); 243 | } 244 | let trackSettings = audioTracks[0].getSettings(); 245 | return "channelCount" in trackSettings ? trackSettings.channelCount : 1; 246 | } 247 | 248 | // Worklet does nothing more than pass the data out, to be actually encoded by a regular Web Worker 249 | // Previously this was rewritten to do the encoding within an AudioWorklet, and it was all very nice and clean 250 | // but apparently doing anything that uses much CPU in a AudioWorklet will cause glitches in some browsers. 251 | // So, it's best to do the encoding in a regular Web Worker. 252 | let AUDIO_OUTPUT_MODULE_URL = null; 253 | function getAudioOutputModuleUrl() { 254 | if (AUDIO_OUTPUT_MODULE_URL) { 255 | return AUDIO_OUTPUT_MODULE_URL; 256 | } 257 | AUDIO_OUTPUT_MODULE_URL = URL.createObjectURL(new Blob([` 258 | class AudioOutputProcessor extends AudioWorkletProcessor { 259 | process(inputs, outputs) { 260 | this.port.postMessage(inputs[0]); 261 | return true; 262 | } 263 | } 264 | 265 | registerProcessor("audio-output-processor", AudioOutputProcessor); 266 | `], { 267 | type: "application/javascript" 268 | })); 269 | return AUDIO_OUTPUT_MODULE_URL; 270 | } 271 | 272 | /* 273 | Callbacks: 274 | ondataavailable 275 | onstart - called when recording successfully started 276 | onstop - called when all data finished encoding and was output 277 | onerror - error starting recording 278 | */ 279 | class AudioRecorder { 280 | constructor(options) { 281 | this.options = { 282 | ...DEFAULT_OPTIONS, 283 | ...options 284 | }; 285 | this.state = states.STOPPED; 286 | this.audioContext = null; 287 | this.encoder = null; 288 | this.encodedData = null; 289 | this.stopPromiseResolve = null; 290 | this.stopPromiseReject = null; 291 | this.timer = new Timer(); 292 | } 293 | static isRecordingSupported() { 294 | return getAudioContextCtor() && navigator && navigator.mediaDevices && navigator.mediaDevices.getUserMedia; 295 | } 296 | static preload(_workerUrl) { 297 | workerUrl = _workerUrl; 298 | WorkerEncoder.preload(workerUrl); 299 | } 300 | 301 | // Will we use AudioWorklet? 302 | useAudioWorklet() { 303 | return isAudioWorkletSupported() && !this.options.forceScriptProcessor; 304 | } 305 | createAndStartEncoder(numberOfChannels) { 306 | this.encoder = new WorkerEncoder({ 307 | originalSampleRate: this.audioContext.sampleRate, 308 | numberOfChannels: numberOfChannels, 309 | encoderBitRate: this.options.encoderBitRate, 310 | streamBufferSize: this.options.streamBufferSize 311 | }); 312 | this.encoder.ondataavailable = data => { 313 | if (this.options.streaming) { 314 | this.ondataavailable && this.ondataavailable(data); 315 | } else { 316 | this.encodedData.push(data); 317 | } 318 | }; 319 | this.encoder.onstopped = () => { 320 | this.state = states.STOPPED; 321 | let mp3Blob = this.options.streaming ? undefined : new Blob(this.encodedData, { 322 | type: "audio/mpeg" 323 | }); 324 | this.onstop && this.onstop(mp3Blob); 325 | this.stopPromiseResolve(mp3Blob); 326 | }; 327 | this.encoder.start(); 328 | } 329 | createOutputNode(numberOfChannels) { 330 | if (this.useAudioWorklet()) { 331 | console.log("Using AudioWorklet"); 332 | this.outputNode = new AudioWorkletNode(this.audioContext, "audio-output-processor", { 333 | numberOfOutputs: 0 334 | }); 335 | this.outputNode.port.onmessage = _ref => { 336 | let { 337 | data 338 | } = _ref; 339 | if (this.state == states.RECORDING) { 340 | this.encoder.sendData(data); 341 | } 342 | }; 343 | } else { 344 | console.log("Using ScriptProcessorNode"); 345 | this.outputNode = this.audioContext.createScriptProcessor(4096, numberOfChannels, numberOfChannels); 346 | this.outputNode.connect(this.audioContext.destination); 347 | this.outputNode.onaudioprocess = event => { 348 | if (this.state == states.RECORDING) { 349 | let inputBuffer = event.inputBuffer; 350 | let buffers = []; 351 | for (let i = 0; i < inputBuffer.numberOfChannels; i++) { 352 | buffers.push(inputBuffer.getChannelData(i)); 353 | } 354 | this.encoder.sendData(buffers); 355 | } 356 | }; 357 | } 358 | } 359 | createAudioNodes(numberOfChannels) { 360 | this.createOutputNode(numberOfChannels); 361 | this.recordingGainNode = this.audioContext.createGain(); 362 | this.setRecordingGain(this.options.recordingGain); 363 | this.recordingGainNode.connect(this.outputNode); 364 | this.sourceNode = this.audioContext.createMediaStreamSource(this.stream); 365 | this.sourceNode.connect(this.recordingGainNode); 366 | } 367 | cleanupAudioNodes() { 368 | if (this.stream) { 369 | stopStream(this.stream); 370 | this.stream = null; 371 | } 372 | if (this.useAudioWorklet()) { 373 | this.outputNode && (this.outputNode.port.onmessage = null); 374 | } else { 375 | this.outputNode && (this.outputNode.onaudioprocess = null); 376 | } 377 | this.outputNode && this.outputNode.disconnect(); 378 | this.recordingGainNode && this.recordingGainNode.disconnect(); 379 | this.sourceNode && this.sourceNode.disconnect(); 380 | this.audioContext && this.audioContext.close(); 381 | } 382 | setRecordingGain(gain) { 383 | this.options.recordingGain = gain; 384 | if (this.recordingGainNode) { 385 | this.recordingGainNode.gain.setTargetAtTime(gain, this.audioContext.currentTime, 0.01); 386 | } 387 | } 388 | get time() { 389 | return this.timer.getTime(); 390 | } 391 | 392 | // Get the amount of data left to be encoded. 393 | // Useful to estimate if STOPPING state (encoding still ongoing) will last a while. 394 | getEncodingQueueSize() { 395 | return this.encoder ? this.encoder.getQueuedDataLen() : 0; 396 | } 397 | 398 | // Called after every "await" in start(), to check that stop wasn't called 399 | // and we should abandon starting 400 | stoppingCheck() { 401 | if (this.state == states.STOPPING) { 402 | throw createCancelStartError(); 403 | } 404 | } 405 | async __start(paused) { 406 | if (this.state != states.STOPPED) { 407 | throw new Error("Called start when not in stopped state"); 408 | } 409 | if (workerUrl == null) { 410 | throw new Error("preload was not called on AudioRecorder"); 411 | } 412 | this.state = states.STARTING; 413 | this.encodedData = []; 414 | this.stream = null; 415 | try { 416 | await WorkerEncoder.waitForWorker(workerUrl); 417 | this.stoppingCheck(); 418 | 419 | // If a constraint is set, pass them, otherwise just pass true 420 | let constraints = Object.keys(this.options.constraints).length > 0 ? this.options.constraints : true; 421 | this.stream = await navigator.mediaDevices.getUserMedia({ 422 | audio: constraints 423 | }); 424 | this.stoppingCheck(); 425 | const _AudioContext = getAudioContextCtor(); 426 | this.audioContext = new _AudioContext(); 427 | if (this.useAudioWorklet()) { 428 | await this.audioContext.audioWorklet.addModule(getAudioOutputModuleUrl(), { 429 | credentials: "omit" 430 | }); 431 | this.stoppingCheck(); 432 | } 433 | 434 | // Channel count must be gotten from the stream, as it might not have supported 435 | // the desired amount specified in the constraints 436 | let numberOfChannels = getNumberOfChannels(this.stream); 437 | 438 | // Successfully recording! 439 | this.createAndStartEncoder(numberOfChannels); 440 | this.createAudioNodes(numberOfChannels); 441 | if (paused) { 442 | this.timer.reset(); 443 | this.state = states.PAUSED; 444 | } else { 445 | this.timer.resetAndStart(); 446 | this.state = states.RECORDING; 447 | } 448 | this.onstart && this.onstart(); 449 | } catch (error) { 450 | let startWasCancelled = this.state == states.STOPPING; 451 | this.cleanupAudioNodes(); 452 | 453 | // Reset so can attempt start again 454 | this.state = states.STOPPED; 455 | 456 | // Reject the stop promise now we have cleaned up and are in STOPPED state and ready to start() again 457 | if (startWasCancelled) { 458 | this.stopPromiseReject(error); 459 | } 460 | throw error; 461 | } 462 | } 463 | async __stop() { 464 | this.timer.stop(); 465 | if (this.state == states.RECORDING || this.state == states.PAUSED) { 466 | // Stop recording, but encoding may not have finished yet, 467 | // so we enter the stopping state. 468 | this.state = states.STOPPING; 469 | this.cleanupAudioNodes(); 470 | this.encoder.stop(); 471 | 472 | // Will be resolved later when encoding finishes 473 | return new Promise((resolve, reject) => { 474 | this.stopPromiseResolve = resolve; 475 | }); 476 | } else if (this.state == states.STARTING) { 477 | this.state = states.STOPPING; 478 | 479 | // Will be rejected later when start() has completely finished operation 480 | return new Promise((resolve, reject) => { 481 | this.stopPromiseReject = reject; 482 | }); 483 | } 484 | throw new Error("Called stop when AudioRecorder was not started"); 485 | } 486 | start() { 487 | let paused = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : false; 488 | let promise = this.__start(paused); 489 | promise.catch(error => { 490 | // Don't send CancelStartError to onerror, as it's not *really* an error state 491 | // Only used as a promise rejection to indicate that starting did not succeed. 492 | if (error.name != "CancelStartError") { 493 | this.onerror && this.onerror(error); 494 | } 495 | }); 496 | if (!this.onerror) { 497 | return promise; 498 | } 499 | } 500 | stop() { 501 | let promise = this.__stop(); 502 | promise.catch(error => { 503 | if (error.name == "CancelStartError") { 504 | // Stop was called before recording even started 505 | // Send a onstop event anyway to indicate that recording can be retried. 506 | this.onstop && this.onstop(this.options.streaming ? undefined : null); 507 | } else { 508 | this.onerror && this.onerror(error); 509 | } 510 | }); 511 | if (!this.onerror) { 512 | return promise; 513 | } 514 | } 515 | pause() { 516 | if (this.state == states.RECORDING) { 517 | this.state = states.PAUSED; 518 | this.timer.stop(); 519 | } 520 | } 521 | resume() { 522 | if (this.state == states.PAUSED) { 523 | this.state = states.RECORDING; 524 | this.timer.start(); 525 | } 526 | } 527 | } 528 | 529 | const RecorderStates = { 530 | INITIAL: 0, 531 | STARTING: 1, 532 | RECORDING: 2, 533 | PAUSED: 3, 534 | ENCODING: 4, 535 | COMPLETE: 5, 536 | ERROR: 6, 537 | COUNTDOWN: 7 538 | }; 539 | function useInterval(updateFunc) { 540 | let timeStep = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 1000 / 60.0; 541 | const intervalIdRef = useRef(null); 542 | useEffect(() => { 543 | intervalIdRef.current = setInterval(updateFunc, timeStep); 544 | return () => { 545 | intervalIdRef.current && clearInterval(intervalIdRef.current); 546 | }; 547 | }, []); 548 | } 549 | function useSimpleAudioRecorder() { 550 | let { 551 | workerUrl, 552 | onDataAvailable, 553 | onComplete, 554 | onError, 555 | options, 556 | cleanup = false, 557 | timeUpdateStep = 111, 558 | countdown = 0 559 | } = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {}; 560 | const [recorderState, setRecorderState] = useState(RecorderStates.INITIAL); 561 | const [mp3Blobs, setMp3Blobs] = useState([]); 562 | const [mp3Urls, setMp3Urls] = useState([]); 563 | const [error, setError] = useState(null); 564 | const [time, setTime] = useState(0); 565 | const [countdownStartTime, setCountdownStartTime] = useState(null); 566 | const [countdownTimeLeft, setCountdownTimeLeft] = useState(0); 567 | const recorderStateRef = useRef(recorderState); 568 | const countdownStartTimeRef = useRef(0); 569 | const recorderRef = useRef(null); 570 | const audioDataRef = useRef(null); 571 | const countdownTimerRef = useRef(null); 572 | recorderStateRef.current = recorderState; 573 | countdownStartTimeRef.current = countdownStartTime; 574 | function clearCountdownTimeout() { 575 | if (countdownTimerRef.current != null) { 576 | clearTimeout(countdownTimerRef.current); 577 | countdownTimerRef.current = null; 578 | } 579 | } 580 | useEffect(() => { 581 | if (workerUrl) { 582 | AudioRecorder.preload(workerUrl); 583 | } 584 | return () => { 585 | clearCountdownTimeout(); 586 | if (recorderRef.current) { 587 | recorderRef.current.ondataavailable = null; 588 | recorderRef.current.onstart = null; 589 | recorderRef.current.onstop = null; 590 | recorderRef.current.onerror = null; 591 | recorderRef.current.stop(); 592 | recorderRef.current = null; 593 | } 594 | if (cleanup) { 595 | mp3Urls.forEach(URL.revokeObjectURL); 596 | } 597 | }; 598 | }, []); 599 | useInterval(() => { 600 | recorderRef.current && setTime(recorderRef.current.time); 601 | if (recorderStateRef.current == RecorderStates.COUNTDOWN) { 602 | setCountdownTimeLeft(Math.max(0, countdown - (Date.now() - countdownStartTimeRef.current))); 603 | } 604 | }, timeUpdateStep); 605 | function start() { 606 | audioDataRef.current = []; 607 | recorderRef.current = new AudioRecorder({ 608 | ...options, 609 | streaming: true 610 | }); 611 | setRecorderState(RecorderStates.STARTING); 612 | setCountdownTimeLeft(countdown); 613 | recorderRef.current.ondataavailable = data => { 614 | audioDataRef.current.push(data); 615 | onDataAvailable && onDataAvailable(data); 616 | }; 617 | recorderRef.current.onstart = () => { 618 | if (countdown > 0) { 619 | setRecorderState(RecorderStates.COUNTDOWN); 620 | setCountdownStartTime(Date.now()); 621 | countdownTimerRef.current = setTimeout(() => { 622 | if (recorderStateRef.current == RecorderStates.COUNTDOWN) { 623 | recorderRef.current.resume(); 624 | setRecorderState(RecorderStates.RECORDING); 625 | setCountdownTimeLeft(0); 626 | } 627 | }, countdown); 628 | } else { 629 | setRecorderState(RecorderStates.RECORDING); 630 | } 631 | setError(null); 632 | }; 633 | recorderRef.current.onstop = () => { 634 | // Combine all the mp3 data chunks from the audioData array into a Blob 635 | const mp3Blob = new Blob(audioDataRef.current, { 636 | type: "audio/mpeg" 637 | }); 638 | const mp3Url = URL.createObjectURL(mp3Blob); 639 | setRecorderState(RecorderStates.COMPLETE); 640 | setMp3Blobs([...mp3Blobs, mp3Blob]); 641 | setMp3Urls([...mp3Urls, mp3Url]); 642 | onComplete && onComplete({ 643 | mp3Blob, 644 | mp3Url 645 | }); 646 | }; 647 | recorderRef.current.onerror = error => { 648 | setRecorderState(RecorderStates.ERROR); 649 | setError(error); 650 | onError && onError(error); 651 | }; 652 | recorderRef.current.start(countdown > 0); 653 | } 654 | function stop() { 655 | clearCountdownTimeout(); 656 | if (recorderRef.current.getEncodingQueueSize() > 1000) { 657 | // If there's a fair amount of data left, we'll enter the ENCODING state. 658 | // (so a spinner or something could be shown) 659 | setRecorderState(RecorderStates.ENCODING); 660 | } 661 | recorderRef.current.stop(); 662 | } 663 | function pause() { 664 | if (recorderStateRef.current == RecorderStates.RECORDING) { 665 | recorderRef.current.pause(); 666 | setRecorderState(RecorderStates.PAUSED); 667 | } 668 | } 669 | function resume() { 670 | if (recorderStateRef.current == RecorderStates.PAUSED) { 671 | recorderRef.current.resume(); 672 | setRecorderState(RecorderStates.RECORDING); 673 | } 674 | } 675 | const props = { 676 | recorderState 677 | }; 678 | return { 679 | error, 680 | errorStr: error ? error.toString() : null, 681 | time, 682 | countdownTimeLeft, 683 | mp3Blobs, 684 | mp3Urls, 685 | mp3Blob: mp3Blobs.at(-1), 686 | mp3Url: mp3Urls.at(-1), 687 | start, 688 | stop, 689 | pause, 690 | resume, 691 | ...props, 692 | getProps: () => props 693 | }; 694 | } 695 | function SimpleAudioRecorder(_ref) { 696 | let { 697 | recorderState, 698 | viewInitial, 699 | viewStarting, 700 | viewCountdown, 701 | viewRecording, 702 | viewPaused, 703 | viewEncoding, 704 | viewComplete, 705 | viewError 706 | } = _ref; 707 | // Only viewInitial and viewRecording are required. 708 | // Others will default to one of viewInitial or viewRecording if not specified, for a simpler UI. 709 | 710 | // if viewStarting is not set, we fallback first to viewCountdown, and then to viewRecording 711 | viewStarting = viewStarting ?? viewCountdown ?? viewRecording; 712 | viewCountdown = viewCountdown ?? viewRecording; 713 | viewPaused = viewPaused ?? viewInitial; 714 | viewEncoding = viewEncoding ?? viewComplete; 715 | viewComplete = viewComplete ?? viewInitial; 716 | viewError = viewError ?? viewInitial; 717 | const stateMap = new Map(); 718 | stateMap.set(RecorderStates.INITIAL, viewInitial); 719 | stateMap.set(RecorderStates.STARTING, viewStarting); 720 | stateMap.set(RecorderStates.COUNTDOWN, viewCountdown); 721 | stateMap.set(RecorderStates.RECORDING, viewRecording); 722 | stateMap.set(RecorderStates.PAUSED, viewPaused); 723 | stateMap.set(RecorderStates.ENCODING, viewEncoding); 724 | stateMap.set(RecorderStates.COMPLETE, viewComplete); 725 | stateMap.set(RecorderStates.ERROR, viewError); 726 | return stateMap.get(recorderState) ?? RecorderStates.INITIAL; 727 | } 728 | function preloadWorker(workerUrl) { 729 | AudioRecorder.preload(workerUrl); 730 | } 731 | 732 | export { RecorderStates, SimpleAudioRecorder, preloadWorker, useSimpleAudioRecorder }; 733 | //# sourceMappingURL=react.mjs.map 734 | -------------------------------------------------------------------------------- /dist/react.cjs: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | var react = require('react'); 4 | 5 | let workerStates = { 6 | INACTIVE: 0, 7 | LOADING: 1, 8 | READY: 2, 9 | ERROR: 3 10 | }; 11 | let worker = null; 12 | let workerState = workerStates.INACTIVE; 13 | let workerStateChangeCallbacks = []; 14 | let jobCallbacks = {}; 15 | function uuidv4() { 16 | // https://stackoverflow.com/a/2117523 17 | return ([1e7] + -1e3 + -4e3 + -8e3 + -1e11).replace(/[018]/g, c => (c ^ crypto.getRandomValues(new Uint8Array(1))[0] & 15 >> c / 4).toString(16)); 18 | } 19 | function notifyWorkerState(newState) { 20 | workerState = newState; 21 | for (let callback of workerStateChangeCallbacks) { 22 | callback(); 23 | } 24 | workerStateChangeCallbacks = []; 25 | } 26 | 27 | // This hack required to load worker from another domain (e.g. a CDN) 28 | // https://stackoverflow.com/a/62914052 29 | function getWorkerCrossDomainURL(url) { 30 | const content = `importScripts("${url}");`; 31 | return URL.createObjectURL(new Blob([content], { 32 | type: "text/javascript" 33 | })); 34 | } 35 | function loadWorker(workerUrl) { 36 | if (/^https?:\/\//.test(workerUrl)) { 37 | // Is it an absolute URL? Then consider it cross domain. 38 | workerUrl = getWorkerCrossDomainURL(workerUrl); 39 | } 40 | worker = new Worker(workerUrl); 41 | workerState = workerStates.LOADING; 42 | worker.onmessage = event => { 43 | switch (event.data.message) { 44 | case "ready": 45 | notifyWorkerState(workerStates.READY); 46 | break; 47 | case "encoded": 48 | if (event.data.jobId in jobCallbacks) { 49 | jobCallbacks[event.data.jobId].onencoded(event.data.srcBufLen); 50 | } 51 | break; 52 | case "data": 53 | if (event.data.jobId in jobCallbacks) { 54 | jobCallbacks[event.data.jobId].ondataavailable(event.data.data); 55 | } 56 | break; 57 | case "stopped": 58 | if (event.data.jobId in jobCallbacks) { 59 | jobCallbacks[event.data.jobId].onstopped(); 60 | } 61 | break; 62 | } 63 | }; 64 | worker.onerror = event => { 65 | console.error("mp3worker error. Is the worker URL correct?"); 66 | notifyWorkerState(workerStates.ERROR); 67 | }; 68 | } 69 | 70 | // Callbacks: 71 | // - ondataavailable 72 | // - onstopped 73 | class WorkerEncoder { 74 | constructor(options) { 75 | this.jobId = uuidv4(); 76 | this.options = options; 77 | this.queuedData = 0; 78 | jobCallbacks[this.jobId] = { 79 | onencoded: srcBufLen => { 80 | this.queuedData -= srcBufLen; 81 | }, 82 | ondataavailable: data => { 83 | this.ondataavailable && this.ondataavailable(data); 84 | }, 85 | onstopped: () => { 86 | delete jobCallbacks[this.jobId]; // Clean up 87 | this.onstopped && this.onstopped(); 88 | } 89 | }; 90 | } 91 | static preload(workerUrl) { 92 | if (workerState == workerStates.INACTIVE || workerState == workerStates.ERROR) { 93 | loadWorker(workerUrl); 94 | } 95 | } 96 | static waitForWorker(workerUrl) { 97 | if (workerState == workerStates.READY) { 98 | return Promise.resolve(); 99 | } else { 100 | // Worker loading already failed, try again... 101 | if (workerState == workerStates.INACTIVE || workerState == workerStates.ERROR) { 102 | loadWorker(workerUrl); 103 | } 104 | return new Promise((resolve, reject) => { 105 | workerStateChangeCallbacks.push(() => { 106 | if (workerState == workerStates.READY) { 107 | resolve(); 108 | } else { 109 | let error = new Error("MP3 worker failed"); 110 | error.name = "WorkerError"; 111 | reject(error); 112 | } 113 | }); 114 | }); 115 | } 116 | } 117 | start() { 118 | worker.postMessage({ 119 | command: "start", 120 | jobId: this.jobId, 121 | options: this.options 122 | }); 123 | } 124 | sendData(buffers) { 125 | // Check for an empty buffer 126 | if (buffers && buffers.length > 0 && buffers[0].length > 0) { 127 | this.queuedData += buffers[0].length; 128 | worker.postMessage({ 129 | command: "data", 130 | jobId: this.jobId, 131 | buffers: buffers 132 | }); 133 | } 134 | } 135 | 136 | // Amount of data that is not yet encoded. 137 | getQueuedDataLen() { 138 | return this.queuedData; 139 | } 140 | stop() { 141 | worker.postMessage({ 142 | command: "stop", 143 | jobId: this.jobId 144 | }); 145 | } 146 | } 147 | 148 | class Timer { 149 | constructor() { 150 | this.reset(); 151 | } 152 | reset() { 153 | this.startTime = null; // May be modified when resuming, so not the true start time. 154 | this.stoppedTime = null; 155 | } 156 | start() { 157 | if (!this.startTime) { 158 | this.startTime = Date.now(); 159 | } 160 | if (this.stoppedTime) { 161 | // Skip time forward by the time length we were stopped 162 | this.startTime += Date.now() - this.stoppedTime; 163 | this.stoppedTime = null; 164 | } 165 | } 166 | resetAndStart() { 167 | this.reset(); 168 | this.start(); 169 | } 170 | stop() { 171 | if (!this.stoppedTime) { 172 | this.stoppedTime = Date.now(); 173 | } 174 | } 175 | getTime() { 176 | if (this.startTime) { 177 | if (this.stoppedTime) { 178 | return this.stoppedTime - this.startTime; 179 | } else { 180 | return Date.now() - this.startTime; 181 | } 182 | } else { 183 | return 0; 184 | } 185 | } 186 | } 187 | 188 | function stopStream(stream) { 189 | if (stream.getTracks) { 190 | stream.getTracks().forEach(track => track.stop()); 191 | } else { 192 | stream.stop(); // Deprecated 193 | } 194 | } 195 | 196 | // https://stackoverflow.com/a/9039885 197 | function detectIOS() { 198 | return ['iPad Simulator', 'iPhone Simulator', 'iPod Simulator', 'iPad', 'iPhone', 'iPod'].includes(navigator.platform) 199 | // iPad on iOS 13 detection 200 | || navigator.userAgent.includes("Mac") && "ontouchend" in document; 201 | } 202 | function detectSafari() { 203 | return /^((?!chrome|android).)*safari/i.test(navigator.userAgent); 204 | } 205 | 206 | function getAudioContextCtor() { 207 | return window.AudioContext || window.webkitAudioContext; 208 | } 209 | // Don't use audio worklet on iOS or safari, fall back to ScriptProcessor. 210 | // There are issues with dropped incoming audio data after ~45 seconds. Thus, the resulting audio would be shorter and sped up / glitchy. 211 | // Curiously, these same issues are present if *not using* AudioWorklet on Chrome 212 | function isAudioWorkletSupported() { 213 | return window.AudioWorklet && !detectIOS() && !detectSafari(); 214 | } 215 | const states = { 216 | STOPPED: 0, 217 | RECORDING: 1, 218 | PAUSED: 2, 219 | STARTING: 3, 220 | STOPPING: 4 221 | }; 222 | const DEFAULT_OPTIONS = { 223 | recordingGain: 1, 224 | encoderBitRate: 96, 225 | streaming: false, 226 | streamBufferSize: 50000, 227 | forceScriptProcessor: false, 228 | constraints: { 229 | channelCount: 1, 230 | autoGainControl: true, 231 | echoCancellation: true, 232 | noiseSuppression: true 233 | } 234 | }; 235 | let workerUrl = null; 236 | function createCancelStartError() { 237 | let error = new Error("AudioRecorder start cancelled by call to stop"); 238 | error.name = "CancelStartError"; 239 | return error; 240 | } 241 | function getNumberOfChannels(stream) { 242 | let audioTracks = stream.getAudioTracks(); 243 | if (audioTracks.length < 1) { 244 | throw new Error("No audio tracks in user media stream"); 245 | } 246 | let trackSettings = audioTracks[0].getSettings(); 247 | return "channelCount" in trackSettings ? trackSettings.channelCount : 1; 248 | } 249 | 250 | // Worklet does nothing more than pass the data out, to be actually encoded by a regular Web Worker 251 | // Previously this was rewritten to do the encoding within an AudioWorklet, and it was all very nice and clean 252 | // but apparently doing anything that uses much CPU in a AudioWorklet will cause glitches in some browsers. 253 | // So, it's best to do the encoding in a regular Web Worker. 254 | let AUDIO_OUTPUT_MODULE_URL = null; 255 | function getAudioOutputModuleUrl() { 256 | if (AUDIO_OUTPUT_MODULE_URL) { 257 | return AUDIO_OUTPUT_MODULE_URL; 258 | } 259 | AUDIO_OUTPUT_MODULE_URL = URL.createObjectURL(new Blob([` 260 | class AudioOutputProcessor extends AudioWorkletProcessor { 261 | process(inputs, outputs) { 262 | this.port.postMessage(inputs[0]); 263 | return true; 264 | } 265 | } 266 | 267 | registerProcessor("audio-output-processor", AudioOutputProcessor); 268 | `], { 269 | type: "application/javascript" 270 | })); 271 | return AUDIO_OUTPUT_MODULE_URL; 272 | } 273 | 274 | /* 275 | Callbacks: 276 | ondataavailable 277 | onstart - called when recording successfully started 278 | onstop - called when all data finished encoding and was output 279 | onerror - error starting recording 280 | */ 281 | class AudioRecorder { 282 | constructor(options) { 283 | this.options = { 284 | ...DEFAULT_OPTIONS, 285 | ...options 286 | }; 287 | this.state = states.STOPPED; 288 | this.audioContext = null; 289 | this.encoder = null; 290 | this.encodedData = null; 291 | this.stopPromiseResolve = null; 292 | this.stopPromiseReject = null; 293 | this.timer = new Timer(); 294 | } 295 | static isRecordingSupported() { 296 | return getAudioContextCtor() && navigator && navigator.mediaDevices && navigator.mediaDevices.getUserMedia; 297 | } 298 | static preload(_workerUrl) { 299 | workerUrl = _workerUrl; 300 | WorkerEncoder.preload(workerUrl); 301 | } 302 | 303 | // Will we use AudioWorklet? 304 | useAudioWorklet() { 305 | return isAudioWorkletSupported() && !this.options.forceScriptProcessor; 306 | } 307 | createAndStartEncoder(numberOfChannels) { 308 | this.encoder = new WorkerEncoder({ 309 | originalSampleRate: this.audioContext.sampleRate, 310 | numberOfChannels: numberOfChannels, 311 | encoderBitRate: this.options.encoderBitRate, 312 | streamBufferSize: this.options.streamBufferSize 313 | }); 314 | this.encoder.ondataavailable = data => { 315 | if (this.options.streaming) { 316 | this.ondataavailable && this.ondataavailable(data); 317 | } else { 318 | this.encodedData.push(data); 319 | } 320 | }; 321 | this.encoder.onstopped = () => { 322 | this.state = states.STOPPED; 323 | let mp3Blob = this.options.streaming ? undefined : new Blob(this.encodedData, { 324 | type: "audio/mpeg" 325 | }); 326 | this.onstop && this.onstop(mp3Blob); 327 | this.stopPromiseResolve(mp3Blob); 328 | }; 329 | this.encoder.start(); 330 | } 331 | createOutputNode(numberOfChannels) { 332 | if (this.useAudioWorklet()) { 333 | console.log("Using AudioWorklet"); 334 | this.outputNode = new AudioWorkletNode(this.audioContext, "audio-output-processor", { 335 | numberOfOutputs: 0 336 | }); 337 | this.outputNode.port.onmessage = _ref => { 338 | let { 339 | data 340 | } = _ref; 341 | if (this.state == states.RECORDING) { 342 | this.encoder.sendData(data); 343 | } 344 | }; 345 | } else { 346 | console.log("Using ScriptProcessorNode"); 347 | this.outputNode = this.audioContext.createScriptProcessor(4096, numberOfChannels, numberOfChannels); 348 | this.outputNode.connect(this.audioContext.destination); 349 | this.outputNode.onaudioprocess = event => { 350 | if (this.state == states.RECORDING) { 351 | let inputBuffer = event.inputBuffer; 352 | let buffers = []; 353 | for (let i = 0; i < inputBuffer.numberOfChannels; i++) { 354 | buffers.push(inputBuffer.getChannelData(i)); 355 | } 356 | this.encoder.sendData(buffers); 357 | } 358 | }; 359 | } 360 | } 361 | createAudioNodes(numberOfChannels) { 362 | this.createOutputNode(numberOfChannels); 363 | this.recordingGainNode = this.audioContext.createGain(); 364 | this.setRecordingGain(this.options.recordingGain); 365 | this.recordingGainNode.connect(this.outputNode); 366 | this.sourceNode = this.audioContext.createMediaStreamSource(this.stream); 367 | this.sourceNode.connect(this.recordingGainNode); 368 | } 369 | cleanupAudioNodes() { 370 | if (this.stream) { 371 | stopStream(this.stream); 372 | this.stream = null; 373 | } 374 | if (this.useAudioWorklet()) { 375 | this.outputNode && (this.outputNode.port.onmessage = null); 376 | } else { 377 | this.outputNode && (this.outputNode.onaudioprocess = null); 378 | } 379 | this.outputNode && this.outputNode.disconnect(); 380 | this.recordingGainNode && this.recordingGainNode.disconnect(); 381 | this.sourceNode && this.sourceNode.disconnect(); 382 | this.audioContext && this.audioContext.close(); 383 | } 384 | setRecordingGain(gain) { 385 | this.options.recordingGain = gain; 386 | if (this.recordingGainNode) { 387 | this.recordingGainNode.gain.setTargetAtTime(gain, this.audioContext.currentTime, 0.01); 388 | } 389 | } 390 | get time() { 391 | return this.timer.getTime(); 392 | } 393 | 394 | // Get the amount of data left to be encoded. 395 | // Useful to estimate if STOPPING state (encoding still ongoing) will last a while. 396 | getEncodingQueueSize() { 397 | return this.encoder ? this.encoder.getQueuedDataLen() : 0; 398 | } 399 | 400 | // Called after every "await" in start(), to check that stop wasn't called 401 | // and we should abandon starting 402 | stoppingCheck() { 403 | if (this.state == states.STOPPING) { 404 | throw createCancelStartError(); 405 | } 406 | } 407 | async __start(paused) { 408 | if (this.state != states.STOPPED) { 409 | throw new Error("Called start when not in stopped state"); 410 | } 411 | if (workerUrl == null) { 412 | throw new Error("preload was not called on AudioRecorder"); 413 | } 414 | this.state = states.STARTING; 415 | this.encodedData = []; 416 | this.stream = null; 417 | try { 418 | await WorkerEncoder.waitForWorker(workerUrl); 419 | this.stoppingCheck(); 420 | 421 | // If a constraint is set, pass them, otherwise just pass true 422 | let constraints = Object.keys(this.options.constraints).length > 0 ? this.options.constraints : true; 423 | this.stream = await navigator.mediaDevices.getUserMedia({ 424 | audio: constraints 425 | }); 426 | this.stoppingCheck(); 427 | const _AudioContext = getAudioContextCtor(); 428 | this.audioContext = new _AudioContext(); 429 | if (this.useAudioWorklet()) { 430 | await this.audioContext.audioWorklet.addModule(getAudioOutputModuleUrl(), { 431 | credentials: "omit" 432 | }); 433 | this.stoppingCheck(); 434 | } 435 | 436 | // Channel count must be gotten from the stream, as it might not have supported 437 | // the desired amount specified in the constraints 438 | let numberOfChannels = getNumberOfChannels(this.stream); 439 | 440 | // Successfully recording! 441 | this.createAndStartEncoder(numberOfChannels); 442 | this.createAudioNodes(numberOfChannels); 443 | if (paused) { 444 | this.timer.reset(); 445 | this.state = states.PAUSED; 446 | } else { 447 | this.timer.resetAndStart(); 448 | this.state = states.RECORDING; 449 | } 450 | this.onstart && this.onstart(); 451 | } catch (error) { 452 | let startWasCancelled = this.state == states.STOPPING; 453 | this.cleanupAudioNodes(); 454 | 455 | // Reset so can attempt start again 456 | this.state = states.STOPPED; 457 | 458 | // Reject the stop promise now we have cleaned up and are in STOPPED state and ready to start() again 459 | if (startWasCancelled) { 460 | this.stopPromiseReject(error); 461 | } 462 | throw error; 463 | } 464 | } 465 | async __stop() { 466 | this.timer.stop(); 467 | if (this.state == states.RECORDING || this.state == states.PAUSED) { 468 | // Stop recording, but encoding may not have finished yet, 469 | // so we enter the stopping state. 470 | this.state = states.STOPPING; 471 | this.cleanupAudioNodes(); 472 | this.encoder.stop(); 473 | 474 | // Will be resolved later when encoding finishes 475 | return new Promise((resolve, reject) => { 476 | this.stopPromiseResolve = resolve; 477 | }); 478 | } else if (this.state == states.STARTING) { 479 | this.state = states.STOPPING; 480 | 481 | // Will be rejected later when start() has completely finished operation 482 | return new Promise((resolve, reject) => { 483 | this.stopPromiseReject = reject; 484 | }); 485 | } 486 | throw new Error("Called stop when AudioRecorder was not started"); 487 | } 488 | start() { 489 | let paused = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : false; 490 | let promise = this.__start(paused); 491 | promise.catch(error => { 492 | // Don't send CancelStartError to onerror, as it's not *really* an error state 493 | // Only used as a promise rejection to indicate that starting did not succeed. 494 | if (error.name != "CancelStartError") { 495 | this.onerror && this.onerror(error); 496 | } 497 | }); 498 | if (!this.onerror) { 499 | return promise; 500 | } 501 | } 502 | stop() { 503 | let promise = this.__stop(); 504 | promise.catch(error => { 505 | if (error.name == "CancelStartError") { 506 | // Stop was called before recording even started 507 | // Send a onstop event anyway to indicate that recording can be retried. 508 | this.onstop && this.onstop(this.options.streaming ? undefined : null); 509 | } else { 510 | this.onerror && this.onerror(error); 511 | } 512 | }); 513 | if (!this.onerror) { 514 | return promise; 515 | } 516 | } 517 | pause() { 518 | if (this.state == states.RECORDING) { 519 | this.state = states.PAUSED; 520 | this.timer.stop(); 521 | } 522 | } 523 | resume() { 524 | if (this.state == states.PAUSED) { 525 | this.state = states.RECORDING; 526 | this.timer.start(); 527 | } 528 | } 529 | } 530 | 531 | const RecorderStates = { 532 | INITIAL: 0, 533 | STARTING: 1, 534 | RECORDING: 2, 535 | PAUSED: 3, 536 | ENCODING: 4, 537 | COMPLETE: 5, 538 | ERROR: 6, 539 | COUNTDOWN: 7 540 | }; 541 | function useInterval(updateFunc) { 542 | let timeStep = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 1000 / 60.0; 543 | const intervalIdRef = react.useRef(null); 544 | react.useEffect(() => { 545 | intervalIdRef.current = setInterval(updateFunc, timeStep); 546 | return () => { 547 | intervalIdRef.current && clearInterval(intervalIdRef.current); 548 | }; 549 | }, []); 550 | } 551 | function useSimpleAudioRecorder() { 552 | let { 553 | workerUrl, 554 | onDataAvailable, 555 | onComplete, 556 | onError, 557 | options, 558 | cleanup = false, 559 | timeUpdateStep = 111, 560 | countdown = 0 561 | } = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {}; 562 | const [recorderState, setRecorderState] = react.useState(RecorderStates.INITIAL); 563 | const [mp3Blobs, setMp3Blobs] = react.useState([]); 564 | const [mp3Urls, setMp3Urls] = react.useState([]); 565 | const [error, setError] = react.useState(null); 566 | const [time, setTime] = react.useState(0); 567 | const [countdownStartTime, setCountdownStartTime] = react.useState(null); 568 | const [countdownTimeLeft, setCountdownTimeLeft] = react.useState(0); 569 | const recorderStateRef = react.useRef(recorderState); 570 | const countdownStartTimeRef = react.useRef(0); 571 | const recorderRef = react.useRef(null); 572 | const audioDataRef = react.useRef(null); 573 | const countdownTimerRef = react.useRef(null); 574 | recorderStateRef.current = recorderState; 575 | countdownStartTimeRef.current = countdownStartTime; 576 | function clearCountdownTimeout() { 577 | if (countdownTimerRef.current != null) { 578 | clearTimeout(countdownTimerRef.current); 579 | countdownTimerRef.current = null; 580 | } 581 | } 582 | react.useEffect(() => { 583 | if (workerUrl) { 584 | AudioRecorder.preload(workerUrl); 585 | } 586 | return () => { 587 | clearCountdownTimeout(); 588 | if (recorderRef.current) { 589 | recorderRef.current.ondataavailable = null; 590 | recorderRef.current.onstart = null; 591 | recorderRef.current.onstop = null; 592 | recorderRef.current.onerror = null; 593 | recorderRef.current.stop(); 594 | recorderRef.current = null; 595 | } 596 | if (cleanup) { 597 | mp3Urls.forEach(URL.revokeObjectURL); 598 | } 599 | }; 600 | }, []); 601 | useInterval(() => { 602 | recorderRef.current && setTime(recorderRef.current.time); 603 | if (recorderStateRef.current == RecorderStates.COUNTDOWN) { 604 | setCountdownTimeLeft(Math.max(0, countdown - (Date.now() - countdownStartTimeRef.current))); 605 | } 606 | }, timeUpdateStep); 607 | function start() { 608 | audioDataRef.current = []; 609 | recorderRef.current = new AudioRecorder({ 610 | ...options, 611 | streaming: true 612 | }); 613 | setRecorderState(RecorderStates.STARTING); 614 | setCountdownTimeLeft(countdown); 615 | recorderRef.current.ondataavailable = data => { 616 | audioDataRef.current.push(data); 617 | onDataAvailable && onDataAvailable(data); 618 | }; 619 | recorderRef.current.onstart = () => { 620 | if (countdown > 0) { 621 | setRecorderState(RecorderStates.COUNTDOWN); 622 | setCountdownStartTime(Date.now()); 623 | countdownTimerRef.current = setTimeout(() => { 624 | if (recorderStateRef.current == RecorderStates.COUNTDOWN) { 625 | recorderRef.current.resume(); 626 | setRecorderState(RecorderStates.RECORDING); 627 | setCountdownTimeLeft(0); 628 | } 629 | }, countdown); 630 | } else { 631 | setRecorderState(RecorderStates.RECORDING); 632 | } 633 | setError(null); 634 | }; 635 | recorderRef.current.onstop = () => { 636 | // Combine all the mp3 data chunks from the audioData array into a Blob 637 | const mp3Blob = new Blob(audioDataRef.current, { 638 | type: "audio/mpeg" 639 | }); 640 | const mp3Url = URL.createObjectURL(mp3Blob); 641 | setRecorderState(RecorderStates.COMPLETE); 642 | setMp3Blobs([...mp3Blobs, mp3Blob]); 643 | setMp3Urls([...mp3Urls, mp3Url]); 644 | onComplete && onComplete({ 645 | mp3Blob, 646 | mp3Url 647 | }); 648 | }; 649 | recorderRef.current.onerror = error => { 650 | setRecorderState(RecorderStates.ERROR); 651 | setError(error); 652 | onError && onError(error); 653 | }; 654 | recorderRef.current.start(countdown > 0); 655 | } 656 | function stop() { 657 | clearCountdownTimeout(); 658 | if (recorderRef.current.getEncodingQueueSize() > 1000) { 659 | // If there's a fair amount of data left, we'll enter the ENCODING state. 660 | // (so a spinner or something could be shown) 661 | setRecorderState(RecorderStates.ENCODING); 662 | } 663 | recorderRef.current.stop(); 664 | } 665 | function pause() { 666 | if (recorderStateRef.current == RecorderStates.RECORDING) { 667 | recorderRef.current.pause(); 668 | setRecorderState(RecorderStates.PAUSED); 669 | } 670 | } 671 | function resume() { 672 | if (recorderStateRef.current == RecorderStates.PAUSED) { 673 | recorderRef.current.resume(); 674 | setRecorderState(RecorderStates.RECORDING); 675 | } 676 | } 677 | const props = { 678 | recorderState 679 | }; 680 | return { 681 | error, 682 | errorStr: error ? error.toString() : null, 683 | time, 684 | countdownTimeLeft, 685 | mp3Blobs, 686 | mp3Urls, 687 | mp3Blob: mp3Blobs.at(-1), 688 | mp3Url: mp3Urls.at(-1), 689 | start, 690 | stop, 691 | pause, 692 | resume, 693 | ...props, 694 | getProps: () => props 695 | }; 696 | } 697 | function SimpleAudioRecorder(_ref) { 698 | let { 699 | recorderState, 700 | viewInitial, 701 | viewStarting, 702 | viewCountdown, 703 | viewRecording, 704 | viewPaused, 705 | viewEncoding, 706 | viewComplete, 707 | viewError 708 | } = _ref; 709 | // Only viewInitial and viewRecording are required. 710 | // Others will default to one of viewInitial or viewRecording if not specified, for a simpler UI. 711 | 712 | // if viewStarting is not set, we fallback first to viewCountdown, and then to viewRecording 713 | viewStarting = viewStarting ?? viewCountdown ?? viewRecording; 714 | viewCountdown = viewCountdown ?? viewRecording; 715 | viewPaused = viewPaused ?? viewInitial; 716 | viewEncoding = viewEncoding ?? viewComplete; 717 | viewComplete = viewComplete ?? viewInitial; 718 | viewError = viewError ?? viewInitial; 719 | const stateMap = new Map(); 720 | stateMap.set(RecorderStates.INITIAL, viewInitial); 721 | stateMap.set(RecorderStates.STARTING, viewStarting); 722 | stateMap.set(RecorderStates.COUNTDOWN, viewCountdown); 723 | stateMap.set(RecorderStates.RECORDING, viewRecording); 724 | stateMap.set(RecorderStates.PAUSED, viewPaused); 725 | stateMap.set(RecorderStates.ENCODING, viewEncoding); 726 | stateMap.set(RecorderStates.COMPLETE, viewComplete); 727 | stateMap.set(RecorderStates.ERROR, viewError); 728 | return stateMap.get(recorderState) ?? RecorderStates.INITIAL; 729 | } 730 | function preloadWorker(workerUrl) { 731 | AudioRecorder.preload(workerUrl); 732 | } 733 | 734 | exports.RecorderStates = RecorderStates; 735 | exports.SimpleAudioRecorder = SimpleAudioRecorder; 736 | exports.preloadWorker = preloadWorker; 737 | exports.useSimpleAudioRecorder = useSimpleAudioRecorder; 738 | //# sourceMappingURL=react.cjs.map 739 | --------------------------------------------------------------------------------