├── LICENSE ├── README.md ├── WebCodecsOpusRecorder.js └── webcodecs-opus-recorder-mse-wav-player.html /LICENSE: -------------------------------------------------------------------------------- 1 | DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE 2 | Version 2, December 2004 3 | 4 | Copyright (C) 2004 Sam Hocevar 5 | 6 | Everyone is permitted to copy and distribute verbatim or modified 7 | copies of this license document, and changing it is allowed as long 8 | as the name is changed. 9 | 10 | DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE 11 | TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION 12 | 13 | 0. You just DO WHAT THE FUCK YOU WANT TO. 14 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # WebCodecsOpusRecorder 2 | WebCodecs Opus Recorder/Media Source Extensions Opus EncodedAudioChunk Player 3 | 4 | [Example](https://guest271314.github.io/WebCodecsOpusRecorder/webcodecs-opus-recorder-mse-wav-player.html) 5 | 6 | # License 7 | This is FOSS. If you think a license is necessary consider [WTFPLv2](http://www.wtfpl.net/about/) applicable. 8 | 9 | wav-audio-encoder-js license: https://github.com/higuma/wav-audio-encoder-js/blob/master/LICENSE.txt 10 | -------------------------------------------------------------------------------- /WebCodecsOpusRecorder.js: -------------------------------------------------------------------------------- 1 | class WebCodecsOpusRecorder { 2 | constructor(track) { 3 | const processor = new MediaStreamTrackProcessor({ 4 | track, 5 | }); 6 | const metadata = { 7 | offsets: [], // Opus packet offsets 8 | }, 9 | blob = new Blob(), 10 | config = { 11 | numberOfChannels: 1, 12 | sampleRate: 48000, 13 | codec: "opus", 14 | }; 15 | this.isConfigured = false; 16 | Object.assign(this, { 17 | track, 18 | processor, 19 | metadata, 20 | blob, 21 | config, 22 | }); 23 | } 24 | async start() { 25 | this.processor.readable 26 | .pipeTo( 27 | new WritableStream({ 28 | write: async (frame) => { 29 | if (!this.isConfigured) { 30 | this.config.numberOfChannels = frame.numberOfChannels; 31 | this.config.sampleRate = frame.sampleRate; 32 | console.log( 33 | await AudioEncoder.isConfigSupported(this.config), 34 | frame, 35 | ); 36 | this.encoder.configure(this.config); 37 | this.isConfigured = true; 38 | } 39 | this.encoder.encode(frame); 40 | }, 41 | close() { 42 | console.log("Processor closed"); 43 | }, 44 | }), 45 | ) 46 | .catch(console.warn); 47 | let firstEncodedChunk = false; 48 | this.encoder = new AudioEncoder({ 49 | error(e) { 50 | console.log(e); 51 | }, 52 | output: async (chunk, { decoderConfig } = {}) => { 53 | if (decoderConfig) { 54 | decoderConfig.description = btoa( 55 | String.fromCharCode(...new Uint8Array(decoderConfig.description)), 56 | ); 57 | Object.assign(this.metadata, { 58 | decoderConfig, 59 | }); 60 | console.log(this.metadata); 61 | } 62 | if (!firstEncodedChunk) { 63 | console.log(chunk, this.config); 64 | firstEncodedChunk = true; 65 | } 66 | const { byteLength } = chunk; 67 | this.metadata.offsets.push(byteLength); 68 | const ab = new ArrayBuffer(byteLength); 69 | chunk.copyTo(ab); 70 | this.blob = new Blob([this.blob, ab]); 71 | }, 72 | }); 73 | 74 | this.encoder.configure(this.config); 75 | } 76 | async stop() { 77 | this.track.stop(); 78 | console.log(this.track); 79 | await this.encoder.flush(); 80 | const json = JSON.stringify(this.metadata); 81 | console.log("stop", this.metadata); 82 | const length = Uint32Array.of(json.length); // JSON configuration length 83 | this.blob = new Blob([length, json, this.blob], { 84 | type: "application/octet-stream", 85 | }); 86 | console.log(URL.createObjectURL(this.blob)); 87 | try { 88 | const handle = await showSaveFilePicker({ 89 | startIn: "music", 90 | suggestedName: `recording.opus.webcodecs`, 91 | }); 92 | const writable = await handle.createWritable(); 93 | await this.blob.stream().pipeTo(writable); 94 | } catch (e) { 95 | console.warn(e); 96 | } 97 | } 98 | } 99 | class WebCodecsOpusPlayer { 100 | constructor(source, { type = "mediaSource" } = {}) { 101 | this.buffer = source; 102 | this.type = type; 103 | const view = new DataView(this.buffer); 104 | const length = view.getUint32(0, true); 105 | const json = new TextDecoder().decode( 106 | new Uint8Array(this.buffer).subarray(4, length + 4), 107 | ); 108 | this.config = JSON.parse(json); 109 | console.log(this.config); 110 | this.data = new Uint8Array(this.buffer).subarray(json.length + 4); 111 | this.index = 0; 112 | this.timestamp = 0; 113 | this.duration = 60000; 114 | this.config.decoderConfig.description = new Uint8Array( 115 | [...atob(this.config.decoderConfig.description)].map((s) => 116 | s.charCodeAt() 117 | ), 118 | ).buffer; 119 | } 120 | async play() { 121 | this.audio = new Audio(); 122 | this.audio.controls = true; 123 | const events = [ 124 | "loadedmetadata", 125 | "loadeddata", 126 | "canplay", 127 | "canplaythrough", 128 | "play", 129 | "playing", 130 | "pause", 131 | "waiting", 132 | "progress", 133 | "seeking", 134 | "seeked", 135 | "ended", 136 | "stalled", 137 | "timeupdate", 138 | ]; 139 | for (const event of events) { 140 | this.audio.addEventListener(event, async (e) => { 141 | if (this.type === "mediaSource") { 142 | if (this.ms.readyState === "open") { 143 | if ( 144 | this.ms.activeSourceBuffers.length && 145 | !this.ms.activeSourceBuffers[0].updating && 146 | e.type === "timeupdate" && 147 | this.audio.currentTime > 0 148 | ) { 149 | this.ms.activeSourceBuffers[0].timestampOffset = 150 | this.audio.currentTime; 151 | } 152 | if (e.type === "waiting" && this.audio.currentTime > 0) { 153 | console.log( 154 | e.type, 155 | this.audio.currentTime, 156 | this.ms.activeSourceBuffers[0].timestampOffset, 157 | ); 158 | this.ms.activeSourceBuffers[0].timestampOffset = 0; 159 | // this.audio.currentTime = 0; 160 | // this.ms.endOfStream(); 161 | } 162 | if (e.type === "ended") { 163 | // this.audio.currentTime = 0; 164 | } 165 | if (e.type === "loadedmetadata") { 166 | console.log(e.type); 167 | await this.audio.play(); 168 | } 169 | } 170 | } else { 171 | if (this.type === "wav" && e.type !== "timeupdate") { 172 | console.log(e.type); 173 | } 174 | } 175 | }); 176 | } 177 | document.body.appendChild(this.audio); 178 | if (this.type === "mediaSource") { 179 | this.ms = new MediaSource(); 180 | this.ms.addEventListener("sourceopen", async (e) => { 181 | console.log(e.type); 182 | URL.revokeObjectURL(this.audio.src); 183 | const sourceBuffer = this.ms.addSourceBuffer({ 184 | audioConfig: this.config.decoderConfig, 185 | }); 186 | console.log(this.ms.activeSourceBuffers); 187 | sourceBuffer.onupdate = (e) => console.log(e.type); 188 | sourceBuffer.mode = "sequence"; 189 | for (const offset of this.config.offsets) { 190 | const eac = new EncodedAudioChunk({ 191 | type: "key", 192 | timestamp: this.timestamp, 193 | duration: !this.index ? 53500 : this.duration, 194 | data: this.data.subarray(this.index, this.index + offset), 195 | }); 196 | await sourceBuffer.appendEncodedChunks(eac); 197 | this.timestamp += eac.duration; 198 | this.index += offset; 199 | } 200 | }); 201 | this.audio.src = URL.createObjectURL(this.ms); 202 | } else { 203 | if (this.type === "wav") { 204 | const wav = new WavAudioEncoder({ 205 | numberOfChannels: this.config.decoderConfig.numberOfChannels, 206 | sampleRate: this.config.decoderConfig.sampleRate, 207 | }); 208 | let start = false; 209 | const decoder = new AudioDecoder({ 210 | error(e) { 211 | console.error(e); 212 | }, 213 | async output(frame) { 214 | const size = frame.allocationSize({ 215 | planeIndex: 0, 216 | }); 217 | const chunk = new ArrayBuffer(size); 218 | frame.copyTo(chunk, { 219 | planeIndex: 0, 220 | }); 221 | wav.write(chunk); 222 | }, 223 | }); 224 | console.log( 225 | await AudioDecoder.isConfigSupported(this.config.decoderConfig), 226 | ); 227 | decoder.configure(this.config.decoderConfig); 228 | this.index = 0; 229 | this.timestamp = 0; 230 | this.duration = 60000; 231 | for (const offset of this.config.offsets) { 232 | const eac = new EncodedAudioChunk({ 233 | type: "key", 234 | timestamp: this.timestamp, 235 | duration: !this.index ? 53500 : this.duration, 236 | data: this.data.subarray(this.index, this.index + offset), 237 | }); 238 | decoder.decode(eac); 239 | this.timestamp += eac.duration; 240 | this.index += offset; 241 | } 242 | await decoder.flush(); 243 | const data = await wav.encode(); 244 | this.audio.src = URL.createObjectURL(data); 245 | } 246 | } 247 | 248 | if ( 249 | this.config.mediaSessionMetadata && 250 | Object.values(this.config.mediaSessionMetadata).length 251 | ) { 252 | navigator.mediaSession.metadata = new MediaMetadata( 253 | this.config.mediaSessionMetadata, 254 | ); 255 | } 256 | } 257 | } 258 | // https://github.com/higuma/wav-audio-encoder-js 259 | class WavAudioEncoder { 260 | constructor({ sampleRate, numberOfChannels }) { 261 | let controller; 262 | let readable = new ReadableStream({ 263 | start(c) { 264 | return (controller = c); 265 | }, 266 | }); 267 | Object.assign(this, { 268 | sampleRate, 269 | numberOfChannels, 270 | numberOfSamples: 0, 271 | dataViews: [], 272 | controller, 273 | readable, 274 | }); 275 | } 276 | write(buffer) { 277 | let channels; 278 | // ArrayBuffer, WebCodecs AudioData f32 format 279 | if (buffer instanceof ArrayBuffer) { 280 | const floats = new Float32Array(buffer); 281 | // Deinterleave 282 | channels = this.numberOfChannels === 2 283 | ? Object.values( 284 | Object.groupBy(floats, (_, i) => i % 2), 285 | ) 286 | : [floats]; 287 | } 288 | // Web Audio API AudioBuffer 289 | if (buffer instanceof AudioBuffer) { 290 | channels = Array.from( 291 | { 292 | length: buffer.numberOfChannels, 293 | }, 294 | (_, i) => buffer.getChannelData(i), 295 | ); 296 | } 297 | const [{ length }] = channels; 298 | const ab = new ArrayBuffer(length * this.numberOfChannels * 2); 299 | const data = new DataView(ab); 300 | let offset = 0; 301 | for (let i = 0; i < length; i++) { 302 | for (let ch = 0; ch < this.numberOfChannels; ch++) { 303 | let x = channels[ch][i] * 0x7fff; 304 | data.setInt16( 305 | offset, 306 | x < 0 ? Math.max(x, -0x8000) : Math.min(x, 0x7fff), 307 | true, 308 | ); 309 | offset += 2; 310 | } 311 | } 312 | this.controller.enqueue(new Uint8Array(ab)); 313 | this.numberOfSamples += length; 314 | } 315 | setString(view, offset, str) { 316 | const len = str.length; 317 | for (let i = 0; i < len; i++) { 318 | view.setUint8(offset + i, str.charCodeAt(i)); 319 | } 320 | } 321 | async encode() { 322 | const dataSize = this.numberOfChannels * this.numberOfSamples * 2; 323 | const buffer = new ArrayBuffer(44); 324 | const view = new DataView(buffer); 325 | this.setString(view, 0, "RIFF"); 326 | view.setUint32(4, 36 + dataSize, true); 327 | this.setString(view, 8, "WAVE"); 328 | this.setString(view, 12, "fmt "); 329 | view.setUint32(16, 16, true); 330 | view.setUint16(20, 1, true); 331 | view.setUint16(22, this.numberOfChannels, true); 332 | view.setUint32(24, this.sampleRate, true); 333 | view.setUint32(28, this.sampleRate * 4, true); 334 | view.setUint16(32, this.numberOfChannels * 2, true); 335 | view.setUint16(34, 16, true); 336 | this.setString(view, 36, "data"); 337 | view.setUint32(40, dataSize, true); 338 | this.controller.close(); 339 | return new Blob( 340 | [ 341 | buffer, 342 | await new Response(this.readable, { 343 | cache: "no-store", 344 | }).arrayBuffer(), 345 | ], 346 | { 347 | type: "audio/wav", 348 | }, 349 | ); 350 | } 351 | } 352 | export { WavAudioEncoder, WebCodecsOpusPlayer, WebCodecsOpusRecorder }; 353 | -------------------------------------------------------------------------------- /webcodecs-opus-recorder-mse-wav-player.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | WebCodecs Opus Recorder/Media Source Extensions Opus EncodedAudioChunk, 6 | WAV Player 7 | 8 | 26 | 27 | 28 |

WebCodecs Opus Recorder and Player

29 | 30 | 31 |
32 | 33 | 37 | 41 | 46 |
47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 |
56 | 60 | 61 | 62 | 153 | 154 | 155 | --------------------------------------------------------------------------------