├── src ├── index.ts ├── getBufferHeader.ts ├── AudioStreamer.ts ├── MediaBuffer.ts └── AudioRecorder.ts ├── tsconfig.json ├── package.json ├── LICENSE ├── .gitignore └── README.md /src/index.ts: -------------------------------------------------------------------------------- 1 | export * from "./AudioRecorder"; 2 | export * from "./AudioStreamer"; 3 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "es2016", 4 | "module": "commonjs", 5 | "declaration": true, 6 | "esModuleInterop": true, 7 | "forceConsistentCasingInFileNames": true, 8 | "strict": true, 9 | "skipLibCheck": true, 10 | "outDir": "dist" 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "jnaudiostream", 3 | "version": "1.0.2", 4 | "description": "HTML5 audio streamer library for live streaming microphone and receiving", 5 | "main": "dist/index.js", 6 | "types": "dist/index.d.ts", 7 | "scripts": { 8 | "prepare": "tsc", 9 | "test": "echo \"Error: no test specified\" && exit 1" 10 | }, 11 | "repository": { 12 | "type": "git", 13 | "url": "git+https://github.com/WoolDoughnut310/jnaudiostream.git" 14 | }, 15 | "keywords": [ 16 | "audio", 17 | "nodejs", 18 | "library", 19 | "html5", 20 | "stream", 21 | "microphone", 22 | "live" 23 | ], 24 | "author": "Joseph Nma", 25 | "license": "MIT", 26 | "bugs": { 27 | "url": "https://github.com/WoolDoughnut310/jnaudiostream/issues" 28 | }, 29 | "homepage": "https://github.com/WoolDoughnut310/jnaudiostream#readme", 30 | "devDependencies": { 31 | "typescript": "^4.8.4" 32 | } 33 | } -------------------------------------------------------------------------------- /src/getBufferHeader.ts: -------------------------------------------------------------------------------- 1 | const BufferHeader: { [key: string]: any } = { 2 | "audio/webm;codecs=opus": 3 | "GkXfo59ChoEBQveBAULygQRC84EIQoKEd2VibUKHgQRChYECGFOAZwH/////////FUmpZpkq17GDD0JATYCGQ2hyb21lV0GGQ2hyb21lFlSua7+uvdeBAXPFh7o5nyc1kHqDgQKGhkFfT1BVU2Oik09wdXNIZWFkAQIAAIC7AAAAAADhjbWERzuAAJ+BAmJkgSAfQ7Z1Af/////////ngQCjjIEAAID/A//+//7//qM=", 4 | }; 5 | 6 | export default function getBufferHeader(type: string) { 7 | if (!("chrome" in window) && type === "audio/webm;codecs=opus") { 8 | // this header is only for chrome based brosers 9 | return false; 10 | } 11 | 12 | if (!(type in BufferHeader)) return false; 13 | 14 | let buffer = BufferHeader[type]; 15 | 16 | if (buffer.constructor === Blob) return buffer; 17 | 18 | buffer = window.atob(buffer); 19 | 20 | var UInt = new Uint8Array(buffer.length); 21 | for (var i = 0; i < buffer.length; i++) UInt[i] = buffer.charCodeAt(i); 22 | 23 | return (BufferHeader[type] = new Blob([UInt])); 24 | } 25 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2022 J. Nma 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | lerna-debug.log* 8 | 9 | # Diagnostic reports (https://nodejs.org/api/report.html) 10 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 11 | 12 | # Runtime data 13 | pids 14 | *.pid 15 | *.seed 16 | *.pid.lock 17 | 18 | # Directory for instrumented libs generated by jscoverage/JSCover 19 | lib-cov 20 | 21 | # Coverage directory used by tools like istanbul 22 | coverage 23 | *.lcov 24 | 25 | # nyc test coverage 26 | .nyc_output 27 | 28 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 29 | .grunt 30 | 31 | # Bower dependency directory (https://bower.io/) 32 | bower_components 33 | 34 | # node-waf configuration 35 | .lock-wscript 36 | 37 | # Compiled binary addons (https://nodejs.org/api/addons.html) 38 | build/Release 39 | 40 | # Dependency directories 41 | node_modules/ 42 | jspm_packages/ 43 | 44 | # TypeScript v1 declaration files 45 | typings/ 46 | 47 | # TypeScript cache 48 | *.tsbuildinfo 49 | 50 | # Optional npm cache directory 51 | .npm 52 | 53 | # Optional eslint cache 54 | .eslintcache 55 | 56 | # Microbundle cache 57 | .rpt2_cache/ 58 | .rts2_cache_cjs/ 59 | .rts2_cache_es/ 60 | .rts2_cache_umd/ 61 | 62 | # Optional REPL history 63 | .node_repl_history 64 | 65 | # Output of 'npm pack' 66 | *.tgz 67 | 68 | # Yarn Integrity file 69 | .yarn-integrity 70 | 71 | # dotenv environment variables file 72 | .env 73 | .env.test 74 | 75 | # parcel-bundler cache (https://parceljs.org/) 76 | .cache 77 | 78 | # Next.js build output 79 | .next 80 | 81 | # Nuxt.js build / generate output 82 | .nuxt 83 | dist 84 | 85 | # Gatsby files 86 | .cache/ 87 | # Comment in the public line in if your project uses Gatsby and *not* Next.js 88 | # https://nextjs.org/blog/next-9-1#public-directory-support 89 | # public 90 | 91 | # vuepress build output 92 | .vuepress/dist 93 | 94 | # Serverless directories 95 | .serverless/ 96 | 97 | # FuseBox cache 98 | .fusebox/ 99 | 100 | # DynamoDB Local files 101 | .dynamodb/ 102 | 103 | # TernJS port file 104 | .tern-port 105 | -------------------------------------------------------------------------------- /src/AudioStreamer.ts: -------------------------------------------------------------------------------- 1 | import MediaBuffer from "./MediaBuffer"; 2 | 3 | export class AudioStreamer { 4 | chunksDuration: number; 5 | chunksSeconds: number; 6 | debug: boolean; 7 | playing: boolean; 8 | latency: number; 9 | mimeType?: string; 10 | onStop?: () => void; 11 | audioContext: AudioContext; 12 | audioElement: HTMLAudioElement; 13 | 14 | mediaBuffer?: ReturnType; 15 | 16 | constructor(chunksDuration?: number) { 17 | this.chunksDuration = chunksDuration ?? 1000; 18 | this.chunksSeconds = this.chunksDuration / 1000; 19 | this.audioContext = new AudioContext(); 20 | 21 | this.debug = false; 22 | this.playing = false; 23 | this.latency = 0; 24 | this.audioElement = new Audio(); 25 | } 26 | 27 | stop() { 28 | if (!this.mediaBuffer) return; 29 | this.playing = false; 30 | this.onStop?.(); 31 | } 32 | 33 | setBufferHeader(packet: { 34 | mimeType: string; 35 | data: ArrayBuffer; 36 | startTime: number; 37 | }) { 38 | if (!packet.data) { 39 | return; 40 | } 41 | 42 | const arrayBuffer = packet.data; 43 | this.mimeType = packet.mimeType; 44 | 45 | this.mediaBuffer?.stop(); 46 | 47 | this.mediaBuffer = MediaBuffer( 48 | arrayBuffer, 49 | this.mimeType, 50 | this.chunksDuration 51 | ); 52 | 53 | this.audioElement.src = this.mediaBuffer.objectURL; 54 | } 55 | 56 | playStream() { 57 | this.playing = true; 58 | } 59 | 60 | receiveBuffer(packet: [ArrayBuffer, number]) { 61 | if (!this.playing || !this.mediaBuffer?.append) return; 62 | 63 | const arrayBuffer = packet[0]; 64 | const streamingTime = packet[1]; 65 | 66 | this.mediaBuffer.append(arrayBuffer); 67 | 68 | if (this.audioElement.paused) this.audioElement.play(); 69 | 70 | this.latency = 71 | Number(String(Date.now()).slice(-5, -3)) - 72 | streamingTime + 73 | this.audioContext.baseLatency + 74 | this.chunksSeconds; 75 | if (this.debug) console.log("Total latency: " + this.latency); 76 | } 77 | } 78 | -------------------------------------------------------------------------------- /src/MediaBuffer.ts: -------------------------------------------------------------------------------- 1 | const MediaBuffer = ( 2 | bufferHeader: BufferSource, 3 | mimeType: string, 4 | chunksDuration?: number 5 | ) => { 6 | const source = new MediaSource(); 7 | const objectURL = URL.createObjectURL(source); 8 | 9 | var removing = false; 10 | var totalTime = 0; // miliseconds 11 | var sourceBuffer: SourceBuffer | null = null; 12 | var buffers: ArrayBuffer[] = []; 13 | 14 | source.onsourceopen = function () { 15 | sourceBuffer = source.addSourceBuffer(mimeType); 16 | sourceBuffer.mode = "sequence"; 17 | sourceBuffer.appendBuffer(bufferHeader); 18 | 19 | sourceBuffer.onerror = function (e) { 20 | console.error("SourceBuffer error:", e); 21 | }; 22 | 23 | sourceBuffer.onupdateend = () => { 24 | if (removing) { 25 | removing = false; 26 | totalTime = 10000; 27 | 28 | // 0 ~ 10 seconds 29 | sourceBuffer?.remove(0, 10); 30 | return; 31 | } 32 | 33 | if (!sourceBuffer?.updating && buffers.length !== 0) { 34 | const buffer = buffers.shift(); 35 | if (buffer) { 36 | startAppending(buffer); 37 | } 38 | } 39 | }; 40 | }; 41 | 42 | const startAppending = (buffer: ArrayBuffer) => { 43 | sourceBuffer?.appendBuffer(buffer); 44 | totalTime += chunksDuration ?? 1000; 45 | // console.log(totalTime, buffer); 46 | }; 47 | 48 | const append = (arrayBuffer: ArrayBuffer) => { 49 | if (sourceBuffer === null) return false; 50 | 51 | if (!sourceBuffer.updating && sourceBuffer.buffered.length === 2) 52 | // The problem of accessing to 'sourceBuffer.buffered' is that after you append data, the SourceBuffer instance becomes temporarily unusable while it's working. 53 | // During this time, the SourceBuffer's updating property will be set to true, so it's easy to check for. 54 | console.log("something wrong"); 55 | 56 | if (totalTime >= 20000) removing = true; 57 | 58 | if (!sourceBuffer.updating) startAppending(arrayBuffer); 59 | else buffers.push(arrayBuffer); 60 | 61 | return totalTime / 1000; 62 | }; 63 | 64 | const stop = function () { 65 | if (sourceBuffer?.updating) sourceBuffer?.abort(); 66 | 67 | if (source.readyState === "open") source.endOfStream(); 68 | }; 69 | 70 | return { objectURL, append, stop }; 71 | }; 72 | 73 | export default MediaBuffer; 74 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # jnaudiostream 2 | 3 | HTML5 audio streamer library for live streaming microphone and receiving, based off [sfmediastream](https://github.com/ScarletsFiction/SFMediaStream). The transmitted data is compressed (depend on the browser media encoder) before being sent to node server, and the latency is configurable. 4 | 5 | ## Install with CDN link 6 | 7 | You can download minified js from this repository or use the CDN link 8 | 9 | ```html 10 | 14 | ``` 15 | 16 | ## Install with NPM 17 | 18 | ``` 19 | npm i jnaudiostream 20 | ``` 21 | 22 | ## Use the library 23 | 24 | ```javascript 25 | // ES Modules 26 | import { AudioRecorder, AudioStreamer } from "jnaudiostream"; 27 | 28 | // CommonJS 29 | const { AudioRecorder, AudioStreamer } = require("jnaudiostream"); 30 | 31 | const recorder = new AudioRecorder(...); 32 | const streamer = new AudioStreamer(...); 33 | ``` 34 | 35 | This is for web bundler like Webpack or Browserify, and can't be used as a library for Node.js. If you want to use this recorder/effect/plugin for Node.js, the I think it may be possible by using headless browser like Puppeteer. 36 | 37 | ## How to use 38 | 39 | ### AudioRecorder 40 | 41 | This class is used for streaming the microphone to the server. 42 | 43 | ### Properties 44 | 45 | | Property | Details | 46 | | -------------- | --------------------------------------------------------- | 47 | | debug | Set to true for outputting any message to browser console | 48 | | mediaRecorder | Return current `mediaRecorder` that being used | 49 | | mediaStream | Return current `mediaStream` that being used | 50 | | mediaGranted | Return true if user granted the recorder | 51 | | recordingReady | Return true if the recording was ready | 52 | | recording | Return true if currently recording | 53 | 54 | ```js 55 | // Example for accessing the properties 56 | recorder.debug = true; 57 | ``` 58 | 59 | ### Method 60 | 61 | | Function | Arguments | Description | 62 | | -------------- | --------- | ------------------------------------ | 63 | | startRecording | `()` | Start recording camera or microphone | 64 | | stopRecording | `()` | Stop recording camera or microphone | 65 | 66 | #### Event Listener 67 | 68 | ##### onReady 69 | 70 | Callback when the library is ready for recording 71 | 72 | ```js 73 | recorder.onReady = function (packet) { 74 | console.log("Header size: " + packet.data.size); 75 | mySocket.emit("bufferHeader", packet); 76 | }; 77 | ``` 78 | 79 | ##### onBuffer 80 | 81 | Callback when data buffer is ready to be played 82 | 83 | ```js 84 | recorder.onBuffer = function (packet) { 85 | console.log("Data", packet); 86 | mySocket.emit("stream", packet); 87 | }; 88 | ``` 89 | 90 | ### Example 91 | 92 | ```js 93 | const recorder = new AudioRecorder( 94 | { 95 | /* audio:{ 96 | sampleRate: 44100, 97 | channelCount: 1, 98 | echoCancellation: false, 99 | } */ 100 | }, 101 | 1000 102 | ); // 1sec 103 | 104 | recorder.onRecordingReady = function (packet) { 105 | console.log("Recording started!"); 106 | console.log("Header size: " + packet.data.size + "bytes"); 107 | 108 | // Every new streamer must receive this header packet 109 | mySocket.emit("bufferHeader", packet); 110 | }; 111 | 112 | recorder.onBufferProcess = function (packet) { 113 | console.log("Buffer sent: " + packet[0].size + "bytes"); 114 | mySocket.emit("stream", packet); 115 | }; 116 | 117 | recorder.startRecording(); 118 | 119 | setTimeout(function () { 120 | recorder.stopRecording(); 121 | }, 5000); 122 | ``` 123 | 124 | ## AudioStreamer 125 | 126 | This class is used for buffering and playing microphone stream from the server. 127 | 128 | ```js 129 | // The minimum duration for audio is ~100ms 130 | var audioStreamer = new AudioStreamer(1000); // 1sec 131 | ``` 132 | 133 | ### Properties 134 | 135 | | Property | Details | 136 | | -------- | --------------------------------------------------------- | 137 | | debug | Set to true for outputting any message to browser console | 138 | | playing | Return true if playing a stream | 139 | | latency | Return current latency | 140 | | mimeType | Return mimeType of current streamed media | 141 | 142 | ```js 143 | // Example for accessing the properties 144 | audioStreamer.debug = true; 145 | ``` 146 | 147 | ### Method 148 | 149 | | Function | Arguments | Description | 150 | | --------------- | ---------------- | ----------------------------------------------------------------- | 151 | | playStream | `()` | Set this library to automatically play any received buffer | 152 | | setBufferHeader | `(bufferHeader)` | Receive buffer header containing mimeType and `ArrayBuffer` data | 153 | | receiveBuffer | `(packetBuffer)` | Receive arrayBuffer and play it when last buffer finished playing | 154 | | stop | `()` | Stop playing any buffer | 155 | 156 | ### Example 157 | 158 | ```js 159 | var audioStreamer = new AudioStreamer(1000); // 1sec 160 | audioStreamer.playStream(); 161 | 162 | // First thing that must be received 163 | mySocket.on("bufferHeader", function (packet) { 164 | audioStreamer.setBufferHeader(packet); 165 | }); 166 | 167 | mySocket.on("stream", function (packet) { 168 | console.log("Buffer received: " + packet[0].byteLength + "bytes"); 169 | audioStreamer.receiveBuffer(packet); 170 | }); 171 | ``` 172 | 173 | ## Contribution 174 | 175 | If you want to help `jnaudiostream` please fork this project and edit on your repository, then make a pull request to here. 176 | 177 | ### Compile from source 178 | 179 | After you downloaded this repo you need to install the devDependencies. 180 | 181 | ``` 182 | $ npm i 183 | $ tsc -w 184 | ``` 185 | 186 | After you make some changes on `/src` it will automatically compile into `/dist/`. Make sure you cleared your cache before doing experiments. 187 | 188 | ## License 189 | 190 | `jnaudiostream` is under the MIT license. 191 | 192 | But don't forget to add a link to this repository. 193 | -------------------------------------------------------------------------------- /src/AudioRecorder.ts: -------------------------------------------------------------------------------- 1 | import getBufferHeader from "./getBufferHeader"; 2 | 3 | export interface Options { 4 | mediaStream?: MediaStream; 5 | element?: HTMLAudioElement; 6 | debug?: boolean; 7 | recorder?: MediaRecorderOptions; 8 | audio?: MediaTrackConstraints; 9 | } 10 | 11 | const codecsList = { 12 | webm: ["opus", "vorbis"], 13 | ogg: ["opus", "vorbis"], // This may not work on mobile 14 | }; 15 | 16 | export class AudioRecorder { 17 | options: Options; 18 | latency: number; 19 | debug: boolean; 20 | mediaStream?: MediaStream; 21 | mediaRecorder?: MediaRecorder; 22 | recordingReady: boolean; 23 | mediaGranted: boolean; 24 | recording: boolean; 25 | onBuffer?: (info: [Blob, number]) => void; 26 | onReady?: (info: { 27 | mimeType: string; 28 | data: Blob; 29 | startTime: number; 30 | }) => void; 31 | onStop?: () => void; 32 | bufferHeader: Blob | null; 33 | afterStop: boolean; 34 | 35 | constructor(options?: Options, latency?: number) { 36 | options ??= {}; 37 | 38 | this.options = options; 39 | if (!latency) latency = 1000; 40 | this.latency = latency; 41 | 42 | this.debug = options.debug ?? false; 43 | if ( 44 | options.element && 45 | options.element.srcObject instanceof MediaStream 46 | ) { 47 | this.mediaStream = options.element.srcObject; 48 | } 49 | this.recordingReady = false; 50 | this.mediaGranted = false; 51 | this.recording = false; 52 | this.bufferHeader = null; 53 | 54 | this.afterStop = false; 55 | 56 | this.getSupportedMimeType(); 57 | } 58 | 59 | getSupportedMimeType() { 60 | if (!this.options.recorder) this.options.recorder = {}; 61 | 62 | if ( 63 | this.options.recorder.mimeType && 64 | !MediaRecorder.isTypeSupported(this.options.recorder.mimeType) 65 | ) { 66 | console.log( 67 | "MediaRecorder doesn't supports mimetype " + 68 | this.options.recorder.mimeType 69 | ); 70 | this.options.recorder.mimeType = undefined; 71 | } 72 | 73 | if (!this.options.recorder?.mimeType) { 74 | let supportedMimeType: string | undefined = undefined; 75 | 76 | for (let format of Object.keys(codecsList) as Array< 77 | keyof typeof codecsList 78 | >) { 79 | let codecs = codecsList[format]; 80 | let mimeType = "audio/" + format; 81 | 82 | for (let i = 0; i < codecs.length; i++) { 83 | let temp = mimeType + ";codecs=" + codecs[i]; 84 | if ( 85 | MediaRecorder.isTypeSupported(temp) && 86 | MediaSource.isTypeSupported(temp) 87 | ) { 88 | supportedMimeType = temp; 89 | break; 90 | } 91 | } 92 | 93 | if ( 94 | !supportedMimeType && 95 | MediaRecorder.isTypeSupported(mimeType) && 96 | MediaSource.isTypeSupported(mimeType) 97 | ) 98 | supportedMimeType = mimeType; 99 | 100 | if (!supportedMimeType) break; 101 | } 102 | 103 | this.options.recorder.mimeType = supportedMimeType; 104 | 105 | if (this.debug) console.log("mimeType: " + supportedMimeType); 106 | } 107 | } 108 | 109 | onMediaGranted(mediaStream: MediaStream) { 110 | console.log("onMediaGranted:", mediaStream); 111 | this.mediaGranted = true; 112 | 113 | this.bufferHeader = null; 114 | let bufferHeaderLength = 0; 115 | 116 | this.mediaRecorder = new MediaRecorder( 117 | mediaStream, 118 | this.options.recorder 119 | ); 120 | 121 | if (this.debug) console.log("MediaRecorder obtained"); 122 | this.mediaRecorder.onstart = () => { 123 | this.recording = true; 124 | }; 125 | 126 | const headerLatency = 100; 127 | 128 | this.mediaRecorder.ondataavailable = (event) => { 129 | if (!this.options.recorder?.mimeType) { 130 | console.log("No mimeType available"); 131 | return; 132 | } 133 | 134 | if (!this.mediaRecorder) return; // avoid type warnings 135 | 136 | if (bufferHeaderLength) { 137 | const streamingTime = Number(String(Date.now()).slice(-5, -3)); 138 | this.onBuffer?.([event.data, streamingTime]); 139 | return; 140 | } 141 | 142 | // Return if the recording was stopped 143 | if (this.mediaRecorder.state !== "recording") return; 144 | 145 | if (event.data.size <= 1) return; 146 | 147 | // The audio buffer can contain some duration that causes a noise 148 | // So we will need to remove it on streamer side 149 | // Because the AudioBuffer can't be converted to ArrayBuffer with WebAudioAPI 150 | this.bufferHeader = event.data; 151 | 152 | var predefinedBuffer = getBufferHeader( 153 | this.mediaRecorder.mimeType 154 | ) as Blob; 155 | if (predefinedBuffer) this.bufferHeader = predefinedBuffer; 156 | 157 | bufferHeaderLength = this.bufferHeader.size; 158 | 159 | if (bufferHeaderLength > 900 || bufferHeaderLength < 100) 160 | console.log( 161 | "%c[WARN] The buffer header length was more than 0.9KB or smaller than 0.1KB. This sometime cause decode error on streamer side. Try to avoid any heavy CPU usage when using the recorder.", 162 | "color:yellow" 163 | ); 164 | 165 | if (this.onReady) 166 | this.onReady({ 167 | mimeType: this.options.recorder.mimeType, 168 | startTime: Date.now(), 169 | data: this.bufferHeader, 170 | }); 171 | 172 | this.recordingReady = true; 173 | 174 | if (this.latency === headerLatency) return; 175 | 176 | // Record with the custom latency 177 | console.log("stopping"); 178 | this.mediaRecorder.stop(); 179 | setTimeout(() => { 180 | this.mediaRecorder?.start(this.latency); 181 | }, 10); 182 | }; 183 | 184 | // Get first header 185 | this.mediaRecorder.start(headerLatency); 186 | } 187 | 188 | reAddTracks(mediaStream: MediaStream) { 189 | if (!this.mediaRecorder) return; 190 | 191 | var streams = mediaStream.getTracks(); 192 | for (var i = 0; i < streams.length; i++) 193 | this.mediaRecorder.stream.addTrack(streams[i]); 194 | 195 | this.mediaRecorder.start(this.latency); 196 | this.recording = true; 197 | } 198 | 199 | async startRecording() { 200 | if (this.afterStop) { 201 | this.afterStop = false; 202 | 203 | if (!this.options.mediaStream) { 204 | const stream = await navigator.mediaDevices.getUserMedia({ 205 | audio: this.options.audio ?? true, 206 | }); 207 | this.reAddTracks(stream); 208 | } 209 | return; 210 | } else if (!this.mediaGranted || !this.mediaRecorder) { 211 | this.recordingReady = false; 212 | 213 | if (this.options.mediaStream) { 214 | this.onMediaGranted(this.options.mediaStream); 215 | } else { 216 | const stream = await navigator.mediaDevices.getUserMedia({ 217 | audio: this.options.audio ?? true, 218 | }); 219 | this.onMediaGranted(stream); 220 | } 221 | 222 | return false; 223 | } 224 | 225 | if (this.mediaRecorder.state !== "recording") { 226 | this.mediaRecorder.start(this.latency); 227 | this.recording = true; 228 | } 229 | 230 | return true; 231 | } 232 | 233 | stopRecording() { 234 | if (!this.recording || !this.mediaRecorder) { 235 | return; 236 | } 237 | 238 | this.recording = false; 239 | this.mediaRecorder.stop(); 240 | 241 | if (!this.options.mediaStream) { 242 | // Turn off stream from microphone 243 | var streams = this.mediaRecorder.stream.getTracks(); 244 | for (var i = 0; i < streams.length; i++) { 245 | streams[i].stop(); 246 | this.mediaRecorder.stream.removeTrack(streams[i]); 247 | } 248 | } 249 | 250 | // this.mediaRecorder.ondataavailable = null; 251 | // this.mediaRecorder.onstart = null; 252 | 253 | this.bufferHeader = null; 254 | 255 | this.afterStop = true; 256 | 257 | if (this.onStop) this.onStop(); 258 | } 259 | } 260 | --------------------------------------------------------------------------------