├── .gitignore ├── webpack.config.js ├── package.json ├── demo ├── img │ └── github.svg ├── css │ └── styles.css └── index.html ├── LICENSE ├── readme.md ├── src └── audio-recorder.js └── dist └── audio-recorder.js /.gitignore: -------------------------------------------------------------------------------- 1 | .idea 2 | node_modules 3 | 4 | -------------------------------------------------------------------------------- /webpack.config.js: -------------------------------------------------------------------------------- 1 | const path = require('path'); 2 | 3 | module.exports = { 4 | mode: 'production', 5 | entry: { 6 | 'audio-recorder': './src/audio-recorder.js' 7 | }, 8 | output: { 9 | path: path.join(__dirname, 'dist'), 10 | filename: '[name].js' 11 | }, 12 | module: { 13 | rules: [] 14 | } 15 | }; 16 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@dannymoerkerke/audio-recorder", 3 | "version": "1.1.1", 4 | "description": "Audio Recorder Web Component", 5 | "repository": { 6 | "type": "git", 7 | "url": "git+ssh://git@github.com:DannyMoerkerke/audio-recorder.git" 8 | }, 9 | "publishConfig": { 10 | "access": "public" 11 | }, 12 | "main": "src/audio-recorder.js", 13 | "files": [ 14 | "/src", 15 | "dist" 16 | ], 17 | "author": "Danny Moerkerke ", 18 | "license": "MIT", 19 | "homepage": "https://dannymoerkerke.github.io/audio-recorder/", 20 | "scripts": { 21 | "start": "ws -p 8080 --spa demo/index.html", 22 | "webpack": "rm -rf dist && webpack" 23 | }, 24 | "devDependencies": { 25 | "gh-pages": "^3.1.0", 26 | "local-web-server": "^2.6.1", 27 | "webpack": "^5.4.0", 28 | "webpack-cli": "^4.2.0" 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /demo/img/github.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | This software is licensed under the MIT License. 2 | 3 | Copyright (c) 2019 Danny Moerkerke and other contributors 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in 13 | all copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 21 | THE SOFTWARE. 22 | -------------------------------------------------------------------------------- /readme.md: -------------------------------------------------------------------------------- 1 | # audio-recorder 2 | An audio recorder Web Component that records audio through the microphone of the user's device. 3 | 4 | It provides a graphic frequency analyzer and waveform view of the recorded audio and uses the MediaDevices API, 5 | Web Audio API and the MediaRecorder API. 6 | 7 | ## Installation 8 | ``` 9 | npm i @dannymoerkerke/audio-recorder 10 | ``` 11 | 12 | ## Usage 13 | Add a ` 17 | ``` 18 | 19 | or import it: 20 | 21 | ```javascript 22 | import './path/to/node_modules/@dannymoerkerke/src/audio-recorder.js'; 23 | ``` 24 | 25 | You can also import the bundled version which is in `dist` for use with Webpack: 26 | 27 | ```javascript 28 | import './path/to/node_modules/@dannymoerkerke/dist/audio-recorder.js'; 29 | ``` 30 | 31 | and add the tag to your page: 32 | 33 | ```html 34 | 35 | ``` 36 | 37 | ### Attributes 38 | - `bars`: number of bars in frequency analyzer, default: 20 39 | - `view`: `frequencies` or `waveform`, default: `frequencies` 40 | 41 | ### Styling 42 | - `--width`: width of the recorder, default: 600px 43 | - `--height`: height of the recorder, default: 300px 44 | - `--border`: border of the recorder, default: none 45 | - `--frequency-background-color`: background color of frequency analyzer, default: #ffffff 46 | - `--frequency-bars-color`: background color of frequency bars, default: #ff0000 47 | - `--waveform-background-color`: background color of waveform view, default: #ffffff 48 | - `--waveform-color`: color of waveform, default: #ff0000 49 | - `--waveform-progress-color`: color of waveform of part of file that has already played, default: #337ab7 50 | 51 | In addition, mixins can be applied using the `::part` pseudo element. 52 | 53 | Usage: 54 | 55 | ```css 56 | audio-recorder::part([selector]) { 57 | 58 | /** css rules **/ 59 | 60 | } 61 | 62 | ``` 63 | 64 | Available selectors: 65 | 66 | - `::part(button)`: styles the buttons except the volume buttons 67 | - `::part(volume-button)`: styles the volume buttons 68 | - `::part(slider)`: styles the volume slider 69 | - `::part(time)`: styles the elapsed and remaining time display 70 | 71 | The buttons and slider are part of the `material-webcomponents` library ([https://dannymoerkerke.github.io/material-webcomponents/](https://dannymoerkerke.github.io/material-webcomponents/)) 72 | 73 | The Custom CSS properties exposed by these components can be used as part of the styling through `::part`. 74 | 75 | For example, the `material-slider` component exposes the properties `--track-size`, `--track-color`, `--thumb-size` and 76 | `--thumb-color`. 77 | 78 | These can be used in combination with `::part` to style the volume slider: 79 | 80 | ```css 81 | audio-recorder::part(slider) { 82 | --track-color: #000000; 83 | --track-size: 3px; 84 | --thumb-color: #000000; 85 | --thumb-size: 10px; 86 | } 87 | ``` 88 | 89 | ### Demo 90 | To run the demo, run `npm install` once and then `npm start` and view the demo on 91 | [http://localhost:8080/](http://localhost:8080/) 92 | -------------------------------------------------------------------------------- /demo/css/styles.css: -------------------------------------------------------------------------------- 1 | html, body { 2 | min-height: 100%; 3 | height: 100%; 4 | margin: 0; 5 | font-family: Verdana; 6 | } 7 | 8 | main { 9 | min-height: 100%; 10 | display: flex; 11 | flex-direction: column; 12 | align-items: center; 13 | } 14 | header, footer { 15 | display: flex; 16 | align-items: center; 17 | justify-content: center; 18 | min-height: 70px; 19 | background-color: #337ab7; 20 | padding: 10px; 21 | color: #ffffff; 22 | } 23 | 24 | #content { 25 | flex-grow: 1; 26 | background-color: #ffffff; 27 | padding: 10px; 28 | width: 85%; 29 | } 30 | .logo { 31 | display: flex; 32 | align-items: center; 33 | width: 95%; 34 | font-size: 1.7em; 35 | } 36 | .logo i { 37 | font-size: 2em; 38 | margin-right: 5px; 39 | } 40 | #github { 41 | display: flex; 42 | margin-left: auto; 43 | } 44 | #github img { 45 | max-width: 80%; 46 | } 47 | @media screen and (min-width: 1025px) { 48 | #content { 49 | width: 70%; 50 | } 51 | .logo { 52 | width: 72%; 53 | font-size: 3.5em; 54 | } 55 | .logo i { 56 | font-size: 2.5em; 57 | margin-right: 20px; 58 | } 59 | #github img { 60 | max-width: 100%; 61 | } 62 | } 63 | nav { 64 | display: flex; 65 | justify-content: space-between; 66 | width: 85%; 67 | position: relative; 68 | } 69 | 70 | @media screen and (min-width: 1025px) { 71 | nav { 72 | display: block; 73 | width: 70%; 74 | } 75 | } 76 | nav ul { 77 | display: none; 78 | list-style-type: none; 79 | height: 100%; 80 | margin: 0; 81 | padding: 0; 82 | position: absolute; 83 | } 84 | 85 | @media screen and (min-width: 1024px) { 86 | nav ul { 87 | display: flex; 88 | position: static; 89 | align-items: center; 90 | justify-content: space-between; 91 | text-transform: uppercase; 92 | } 93 | } 94 | nav li { 95 | display: block; 96 | padding: 5px; 97 | background-color: #ffffff; 98 | } 99 | @media screen and (min-width: 1024px) { 100 | nav li { 101 | padding: 0; 102 | background-color: transparent; 103 | } 104 | } 105 | nav a, 106 | nav a:visited { 107 | color: #000000; 108 | display: inline-block; 109 | text-decoration: none; 110 | border-bottom-style: solid; 111 | border-bottom-width: 2px; 112 | border-bottom-color: transparent; 113 | } 114 | @media screen and (min-width: 1024px) { 115 | nav a, 116 | nav a:visited { 117 | color: #ffffff; 118 | } 119 | } 120 | h1 { 121 | font-weight: normal; 122 | font-size: 3em; 123 | } 124 | 125 | h3 { 126 | font-weight: normal; 127 | font-size: 2em; 128 | } 129 | p { 130 | font-size: 0.9rem; 131 | line-height: 1.8rem; 132 | } 133 | pre { 134 | word-break: break-word; 135 | white-space: pre-wrap; 136 | overflow-wrap: break-word; 137 | background-color: #e5e5e5; 138 | display: inline-block; 139 | padding: 20px; 140 | } 141 | 142 | code { 143 | font-size: 1.1rem; 144 | word-break: break-all; 145 | } 146 | img { 147 | max-width: 100%; 148 | height: auto; 149 | } 150 | audio-recorder { 151 | width: 100%; 152 | --border: 1px solid #cccccc; 153 | --waveform-background-color: #cccccc; 154 | } 155 | 156 | @media screen and (min-width: 1024px) { 157 | audio-recorder { 158 | width: 600px; 159 | } 160 | } 161 | 162 | audio-recorder::part(slider) { 163 | --track-color: #000000; 164 | --thumb-color: #000000; 165 | width: 100%; 166 | } 167 | 168 | #record-audio { 169 | --font-color: #ff0000; 170 | } 171 | 172 | .usage { 173 | padding: 0; 174 | } 175 | 176 | .usage li { 177 | list-style-type: none; 178 | display: flex; 179 | align-items: center; 180 | padding: 5px; 181 | } 182 | 183 | .usage li button { 184 | margin-right: 10px; 185 | pointer-events: none; 186 | border: none; 187 | padding: 6px 8px; 188 | } 189 | 190 | -------------------------------------------------------------------------------- /demo/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | audio-recorder 7 | 8 | 9 | 10 | 11 | 12 |
13 | 21 |
22 |
23 |
24 | 25 | 26 |

audio recorder

27 | 28 |

An audio recorder Web Component that records audio through the microphone of the user's device.

29 | 30 |

It provides a graphic frequency analyzer and waveform view of the recorded audio and uses the MediaDevices 31 | API, 32 | Web Audio API and the MediaRecorder API.

33 | 34 |

Demo

35 | 36 | 37 | 38 |

To run the demo locally , run npm install once and then npm start and view the demo 39 | on 40 | http://localhost:8080/

41 | 42 | 43 |
44 |

Usage

45 |
    46 |
  • 47 | 50 | Start capturing microphone 51 |
  • 52 | 53 |
  • 54 | 57 | Play the recorded audio 58 |
  • 59 | 60 |
  • 61 | 64 | Start recording audio 65 |
  • 66 | 67 |
  • 68 | 71 | Save recorded audio 72 |
  • 73 | 74 |
  • 75 | 78 | Show frequency analyzer 79 |
  • 80 | 81 |
  • 82 | 85 | Show recorded audio as waveform 86 |
  • 87 |
88 |
89 | 90 | 91 |

Installation

92 |
npm i @dannymoerkerke/audio-recorder
93 | 94 |

Usage

95 |

Add a <script> tag to your page:

96 | 97 | 98 |
<script type="module" src="path/to/node_modules/@dannymoerkerke/src/audio-recorder.js"></script>
99 | 100 | 101 |

or import it:

102 | 103 |
import './path/to/node_modules/@dannymoerkerke/src/audio-recorder.js';
104 | 105 |

You can also import the bundled version which is in dist for use with Webpack:

106 | 107 |
import './path/to/node_modules/@dannymoerkerke/dist/audio-recorder.js';
108 | 109 |

and add the tag to your page:

110 | 111 |
<audio-recorder></audio-recorder>
112 | 113 |

Attributes

114 |
    115 |
  • bars: number of bars in frequency analyzer, default: 20
  • 116 |
  • view: frequencies or waveform, default: frequencies 117 |
  • 118 |
119 | 120 |

Styling

121 |
    122 |
  • --width: width of the recorder, default: 600px
  • 123 |
  • --height: height of the recorder, default: 300px
  • 124 |
  • --border: border of the recorder, default: none
  • 125 |
  • --frequency-background-color: background color of frequency analyzer, default: #ffffff
  • 126 |
  • --frequency-bars-color: background color of frequency bars, default: #ff0000
  • 127 |
  • --waveform-background-color: background color of waveform view, default: #ffffff
  • 128 |
  • --waveform-color: color of waveform, default: #ff0000
  • 129 |
  • --waveform-progress-color: color of waveform of part of file that has already played, 130 | default: #337ab7 131 |
  • 132 |
133 | 134 |

In addition, mixins can be applied using the ::part pseudo element.

135 | 136 |

Usage:

137 | 138 | 139 |
140 | audio-recorder::part([selector]) {
141 | 
142 |   /** css rules **/
143 | 
144 | }
145 |         
146 | 147 | 148 |

Available selectors:

149 | 150 |
    151 |
  • ::part(button): styles the buttons except the volume buttons
  • 152 |
  • ::part(volume-button): styles the volume buttons
  • 153 |
  • ::part(time): styles the elapsed and remaining time display
  • 154 |
155 | 156 |

The Custom CSS properties exposed by these components can be used as part of the styling through 157 | ::part.

158 | 159 |

For example, buttons can be styled like this:

160 | 161 |
162 | audio-recorder::part(button) {
163 |   color: #000000;
164 |   padding: 8px;
165 | }
166 |         
167 | 168 |

Events

169 |

Audio recorder throws a notallowed event when permission to access the device's microphone is 170 | denied:

171 | 172 |
173 | const recorder = document.querySelector('audio-recorder');
174 | 
175 | recorder.addEventListener('notallowed' e => {
176 | 
177 |   // prompt user to change device settings to allow access to microphone
178 | 
179 | });
180 |         
181 |
182 |
183 | 184 | 185 | 186 | 187 |

No access to media

188 |

Your device does not have permission to access the microphone. Please enable this in your device's 189 | settings.

190 |
191 | 192 |
193 |
194 | 195 | 196 | 204 | 205 | 206 | -------------------------------------------------------------------------------- /src/audio-recorder.js: -------------------------------------------------------------------------------- 1 | export class AudioRecorder extends HTMLElement { 2 | 3 | static get observedAttributes() { 4 | return ['view']; 5 | } 6 | 7 | constructor() { 8 | super(); 9 | 10 | const shadowRoot = this.attachShadow({mode: 'open'}); 11 | 12 | shadowRoot.innerHTML = ` 13 | 248 | 249 | 250 | 251 |
252 |
253 |
254 | 255 |
256 | 257 |
258 | 259 |
260 | 261 |
262 | 263 |
264 |
265 | 266 |
267 |
268 | 271 | 272 | 275 | 276 | 279 | 280 | 283 | 284 | 287 | 288 | 291 | 292 | 297 | 298 | 301 | 302 | 305 |
306 | 307 |
308 | 311 | 312 | 313 | 314 | 317 |
318 | 319 |
320 | / 321 |
322 |
323 |
324 | `; 325 | 326 | this.hours = 0; 327 | this.minutes = 0; 328 | this.seconds = 0; 329 | this.secs = 0; 330 | this.pauseTime = 0; 331 | this.audioBuffers = []; 332 | this.frequencies = false; 333 | this.state = 'idle'; 334 | this.view = this.getAttribute('view') || 'frequencies'; 335 | this.bars = parseInt(this.getAttribute('bars') || 20, 10); 336 | 337 | this.mediaElementSource = null; 338 | this.mediaStreamSource = null; 339 | 340 | this.nativeFileSystemSupported = 'showSaveFilePicker' in window; 341 | this.maxChunkLength = 524000; //1024000 * 50; 342 | this.canvas = this.shadowRoot.querySelector('#waveform'); 343 | this.canvasContext = this.canvas.getContext('2d'); 344 | this.progressCanvas = this.shadowRoot.querySelector('#progress'); 345 | this.progressCanvasContext = this.progressCanvas.getContext('2d'); 346 | this.audioContainer = this.shadowRoot.querySelector('#audio-container'); 347 | this.frequencyCanvas = this.shadowRoot.querySelector('#frequencies'); 348 | this.frequencyCanvasContext = this.frequencyCanvas.getContext('2d'); 349 | this.waveformContainer = this.shadowRoot.querySelector('#waveform-container'); 350 | this.progressContainer = this.shadowRoot.querySelector('#progress-container'); 351 | this.frequenciesContainer = this.shadowRoot.querySelector('#frequencies-container'); 352 | 353 | this.playButton = this.shadowRoot.querySelector('#play'); 354 | this.pauseButton = this.shadowRoot.querySelector('#pause'); 355 | this.elapsedTime = this.shadowRoot.querySelector('#elapsed-time'); 356 | this.totalTime = this.shadowRoot.querySelector('#total-time'); 357 | this.volume = this.shadowRoot.querySelector('#volume'); 358 | this.volumeMinButton = this.shadowRoot.querySelector('#volume-min'); 359 | this.volumeMaxButton = this.shadowRoot.querySelector('#volume-max'); 360 | this.input = this.shadowRoot.querySelector('audio'); 361 | this.freqButton = this.shadowRoot.querySelector('#frequencies-button'); 362 | this.waveformButton = this.shadowRoot.querySelector('#waveform-button'); 363 | this.captureAudioButton = this.shadowRoot.querySelector('#capture-audio'); 364 | this.stopCaptureAudioButton = this.shadowRoot.querySelector('#stop-capture-audio'); 365 | this.recordAudioButton = this.shadowRoot.querySelector('#record-audio'); 366 | this.stopRecordAudioButton = this.shadowRoot.querySelector('#stop-record-audio'); 367 | this.saveAudioLink = this.shadowRoot.querySelector('#save-audio-link'); 368 | 369 | if(this.nativeFileSystemSupported) { 370 | this.saveAudioLink.addEventListener('click', async () => this.saveFile(this.recording)); 371 | } 372 | } 373 | 374 | resizeCanvas({width, height}) { 375 | this.canvas.width = 0; 376 | this.canvas.height = 0; 377 | 378 | this.canvas.width = width; 379 | this.canvas.height = height; 380 | 381 | this.progressCanvas.width = width; 382 | this.progressCanvas.height = height; 383 | 384 | this.frequencyCanvas.width = width; 385 | this.frequencyCanvas.height = height; 386 | 387 | this.canvasWidth = this.canvas.width; 388 | this.canvasHeight = this.canvas.height; 389 | 390 | const mimeTypes = [ 391 | { 392 | type: 'audio/mpeg', 393 | ext: 'mp3' 394 | }, 395 | { 396 | type: 'audio/webm', 397 | ext: 'webm', 398 | }, 399 | { 400 | type: 'audio/mp4', 401 | ext: 'mp4' 402 | } 403 | ]; 404 | 405 | const isSupportedMimeType = ({type}) => MediaRecorder.isTypeSupported(type); 406 | const defaultMime = {type: 'audio/mpeg', ext: 'mp3'}; 407 | 408 | this.mimeType = 'isTypeSupported' in MediaRecorder ? mimeTypes.find(isSupportedMimeType) : defaultMime; 409 | } 410 | 411 | connectedCallback() { 412 | const hostStyle = getComputedStyle(this.shadowRoot.host); 413 | const waveformBackgroundColor = hostStyle.getPropertyValue('--waveform-background-color'); 414 | const waveformColor = hostStyle.getPropertyValue('--waveform-color'); 415 | const waveformProgressColor = hostStyle.getPropertyValue('--waveform-progress-color'); 416 | 417 | this.canvases = [ 418 | { 419 | element: this.canvas, 420 | context: this.canvasContext, 421 | fillStyle: waveformBackgroundColor, 422 | strokeStyle: waveformColor 423 | }, 424 | { 425 | element: this.progressCanvas, 426 | context: this.progressCanvasContext, 427 | fillStyle: waveformBackgroundColor, 428 | strokeStyle: waveformProgressColor 429 | } 430 | ]; 431 | 432 | this.frequenciesBackgroundColor = hostStyle.getPropertyValue('--frequency-background-color'); 433 | this.frequenciesBarsColor = hostStyle.getPropertyValue('--frequency-bars-color'); 434 | 435 | setTimeout(() => { 436 | const {width, height} = this.audioContainer.getBoundingClientRect(); 437 | this.resizeCanvas({width, height}); 438 | 439 | if('ResizeObserver' in window) { 440 | let observerStarted = true; 441 | 442 | const observer = new ResizeObserver(entries => { 443 | if(observerStarted) { 444 | observerStarted = false; 445 | return; 446 | } 447 | 448 | entries.forEach(({contentRect}) => { 449 | this.resizeCanvas(contentRect); 450 | 451 | if(this.view === 'waveform' && this.recording) { 452 | this.renderWaveform(this.recording); 453 | } 454 | if(this.view === 'frequencies' && this.analyser) { 455 | cancelAnimationFrame(this.frequencyAnimation); 456 | this.renderFrequencyAnalyzer(); 457 | } 458 | 459 | observerStarted = true; 460 | observer.observe(this.audioContainer); 461 | }); 462 | }); 463 | 464 | observer.observe(this.audioContainer); 465 | } 466 | }); 467 | 468 | this.showTotalTime(0); 469 | this.showElapsedTime(0); 470 | 471 | this.audioContainer.addEventListener('click', this.handleWaveformClick.bind(this)); 472 | this.playButton.addEventListener('click', this.playPause.bind(this)); 473 | this.pauseButton.addEventListener('click', this.playPause.bind(this)); 474 | this.volume.addEventListener('input', e => this.setVolume(e.target.value)); 475 | this.input.addEventListener('ended', this.stopAudio.bind(this)); 476 | this.freqButton.addEventListener('click', this.showFrequencyAnalyzer.bind(this)); 477 | this.waveformButton.addEventListener('click', this.showWaveform.bind(this)); 478 | this.captureAudioButton.addEventListener('click', this.captureAudio.bind(this)); 479 | this.stopCaptureAudioButton.addEventListener('click', this.stopCaptureAudio.bind(this)); 480 | this.recordAudioButton.addEventListener('click', this.recordAudio.bind(this)); 481 | this.stopRecordAudioButton.addEventListener('click', this.stopRecordAudio.bind(this)); 482 | this.volumeMinButton.addEventListener('click', e => { 483 | this.setVolume(0); 484 | this.volume.value = 0; 485 | }); 486 | 487 | this.volumeMaxButton.addEventListener('click', e => { 488 | this.setVolume(1); 489 | this.volume.value = 1; 490 | }); 491 | 492 | const init = () => { 493 | this.isWebKit = 'webkitAudioContext' in window; 494 | this.context = new (window.AudioContext || window.webkitAudioContext)(); 495 | this.output = this.context.destination; 496 | this.gainNode = this.context.createGain(); 497 | this.analyser = this.context.createAnalyser(); 498 | this.analyser.fftSize = 256; 499 | 500 | document.removeEventListener('mousedown', init); 501 | }; 502 | 503 | document.addEventListener('mousedown', init); 504 | } 505 | 506 | getMediaElementSource(input) { 507 | if(!this.mediaElementSource) { 508 | this.mediaElementSource = this.context.createMediaElementSource(input); 509 | 510 | this.mediaElementSource.connect(this.analyser); 511 | this.mediaElementSource.connect(this.gainNode); 512 | } 513 | 514 | return this.mediaElementSource; 515 | } 516 | 517 | getMediaStreamSource(input) { 518 | if(!this.mediaStreamSource) { 519 | this.mediaStreamSource = this.context.createMediaStreamSource(input); 520 | 521 | this.mediaStreamSource.connect(this.analyser); 522 | this.mediaStreamSource.connect(this.gainNode); 523 | } 524 | 525 | return this.mediaStreamSource; 526 | } 527 | 528 | async initializeAudio(input) { 529 | this.curSource = input instanceof HTMLAudioElement ? this.getMediaElementSource(input) : this.getMediaStreamSource(input); 530 | 531 | this.curSource.connect(this.analyser); 532 | this.curSource.connect(this.gainNode); 533 | this.gainNode.connect(this.output); 534 | } 535 | 536 | async openFile(file) { 537 | await this.loadFile(file); 538 | await this.initializeAudio(this.input); 539 | } 540 | 541 | async loadFile(file) { 542 | const reader = new FileReader(); 543 | 544 | reader.onloadend = e => { 545 | this.src = e.target.result; 546 | 547 | if(!this.nativeFileSystemSupported) { 548 | this.saveAudioLink.download = `capture.${this.mimeType.ext}`; 549 | this.saveAudioLink.href = e.target.result; 550 | } 551 | }; 552 | 553 | reader.readAsDataURL(file); 554 | 555 | await this.renderWaveform(file); 556 | await this.initializeAudio(this.input); 557 | this.progressContainer.style.width = 0; 558 | 559 | this.view = 'waveform'; 560 | } 561 | 562 | async saveFile(file) { 563 | const ext = `.${file.type.split('/').pop()}`; 564 | const handle = await window.showSaveFilePicker({ 565 | types: [ 566 | { 567 | description: 'Audio file', 568 | accept: { 569 | [file.type]: ext 570 | } 571 | } 572 | ] 573 | }); 574 | 575 | const writable = await handle.createWritable(); 576 | await writable.write({type: 'write', data: file}); 577 | await writable.close(); 578 | } 579 | 580 | async captureAudio() { 581 | try { 582 | this.stream = await navigator.mediaDevices.getUserMedia({audio: true}); 583 | 584 | this.currentVolume = this.volume.value; 585 | this.setVolume(0); 586 | this.volume.disabled = true; 587 | 588 | await this.initializeAudio(this.stream); 589 | 590 | this.renderFrequencyAnalyzer(); 591 | this.view = 'frequencies'; 592 | this.state = 'capturing'; 593 | } 594 | catch(e) { 595 | if(e.name === 'NotAllowedError') { 596 | this.dispatchEvent(new CustomEvent('notallowed', { 597 | detail: {message: `Access to the device's microphone is not allowed`} 598 | })); 599 | } 600 | } 601 | } 602 | 603 | stopCaptureAudio() { 604 | if(this.stream) { 605 | this.stream.getTracks().map(track => track.stop()); 606 | this.mediaStreamSource = null; 607 | 608 | cancelAnimationFrame(this.frequencyAnimation); 609 | this.clearFrequenciesDisplay(); 610 | 611 | this.state = 'idle'; 612 | 613 | this.volume.disabled = false; 614 | this.setVolume(this.currentVolume); 615 | } 616 | } 617 | 618 | recordAudio() { 619 | const chunks = []; 620 | 621 | this.recorder = new MediaRecorder(this.stream); 622 | 623 | const options = {type: `${this.mimeType.type}`}; 624 | 625 | this.recorder.start(250); 626 | 627 | this.state = 'recording'; 628 | 629 | const handleStopRecording = async () => { 630 | this.recording = new Blob(chunks, options); 631 | 632 | this.stopCaptureAudio(); 633 | await this.loadFile(this.recording); 634 | }; 635 | 636 | const processChunk = ({data}) => { 637 | if(data !== undefined && data.size !== 0) { 638 | chunks.push(data); 639 | 640 | const recording = new Blob(chunks, options); 641 | this.renderWaveform(recording); 642 | } 643 | }; 644 | 645 | this.recorder.addEventListener('dataavailable', processChunk); 646 | this.recorder.addEventListener('stop', handleStopRecording); 647 | } 648 | 649 | stopRecordAudio() { 650 | this.recorder.stop(); 651 | } 652 | 653 | showFrequencyAnalyzer() { 654 | this.view = 'frequencies'; 655 | } 656 | 657 | showWaveform() { 658 | this.view = 'waveform'; 659 | } 660 | 661 | setVolume(value) { 662 | this.gainNode.gain.setValueAtTime(value, this.context.currentTime); 663 | } 664 | 665 | handleWaveformClick(e) { 666 | if(this.curSource) { 667 | this.state = 'idle'; 668 | this.input.pause(); 669 | 670 | cancelAnimationFrame(this.timerId); 671 | } 672 | 673 | this.progressContainer.style.width = `${e.offsetX}px`; 674 | this.input.currentTime = (e.offsetX / this.canvasWidth) * this.duration; 675 | this.showElapsedTime(this.input.currentTime); 676 | } 677 | 678 | stringToArrayBuffer(byteString) { 679 | return new Uint8Array(byteString.length).map((_, i) => byteString.codePointAt(i)); 680 | } 681 | 682 | getArrayBuffer(blob) { 683 | const reader = new FileReader(); 684 | reader.readAsArrayBuffer(blob); 685 | 686 | return new Promise((resolve, reject) => { 687 | reader.onerror = err => reject(err); 688 | reader.onloadend = e => resolve(e.target.result); 689 | }); 690 | } 691 | 692 | sliceAudio(buffer, start, end) { 693 | const chunk = start + end === buffer.byteLength ? buffer : buffer.slice(start, end); 694 | const blob = new Blob([new Uint8Array(chunk)]); 695 | const reader = new FileReader(); 696 | 697 | reader.readAsArrayBuffer(blob); 698 | 699 | return new Promise((resolve, reject) => { 700 | reader.onloadend = e => { 701 | const buffer = e.target.result; 702 | 703 | if(this.isWebKit) { 704 | this.context.decodeAudioData(buffer, decodedBuffer => resolve(decodedBuffer), (err) => reject(err)); 705 | } 706 | else { 707 | this.context.decodeAudioData(buffer) 708 | .then(decodedBuffer => resolve(decodedBuffer)) 709 | .catch((err) => reject(err)); 710 | } 711 | }; 712 | }); 713 | } 714 | 715 | async getAudioBuffers(buffer) { 716 | const bufferLength = buffer.byteLength; 717 | const chunkLength = bufferLength > this.maxChunkLength ? this.maxChunkLength : bufferLength; 718 | 719 | let start = 0; 720 | let end = start + chunkLength; 721 | const self = this; 722 | 723 | const slice = (buffer, start, end) => { 724 | async function* gen() { 725 | while(start < bufferLength) { 726 | const decodedBuffer = await self.sliceAudio(buffer, start, end); 727 | yield decodedBuffer; 728 | 729 | start += chunkLength; 730 | end = start + chunkLength > bufferLength ? bufferLength : start + chunkLength; 731 | } 732 | } 733 | 734 | return gen(); 735 | }; 736 | 737 | const audioBuffers = []; 738 | 739 | for await (const decodedBuffer of slice(buffer, start, end)) { 740 | audioBuffers.push(decodedBuffer); 741 | } 742 | 743 | return audioBuffers; 744 | } 745 | 746 | getNodesAfterOffset(nodes, offset) { 747 | let duration = 0; 748 | let skipped = 0; 749 | 750 | const remaining = nodes.filter(node => { 751 | duration += node.buffer.duration; 752 | skipped += duration < offset ? node.buffer.duration : 0; 753 | 754 | return duration > offset; 755 | }); 756 | 757 | return [remaining, offset - skipped]; 758 | } 759 | 760 | playPause() { 761 | 762 | const progress = () => { 763 | const diff = (this.input.currentTime); 764 | this.showElapsedTime(diff); 765 | const progressWidth = ((diff / this.duration) * this.canvasWidth); 766 | this.progressContainer.style.width = `${progressWidth}px`; 767 | 768 | this.timerId = requestAnimationFrame(progress); 769 | }; 770 | 771 | if(this.state === 'playing') { 772 | this.state = 'idle'; 773 | 774 | this.input.pause(); 775 | this.pauseTime = this.input.currentTime; 776 | this.clearFrequenciesDisplay(); 777 | 778 | cancelAnimationFrame(this.timerId); 779 | cancelAnimationFrame(this.frequencyAnimation); 780 | } 781 | else { 782 | this.state = 'playing'; 783 | this.input.play(); 784 | 785 | this.renderFrequencyAnalyzer(); 786 | requestAnimationFrame(progress); 787 | } 788 | } 789 | 790 | stopAudio() { 791 | this.state = 'idle'; 792 | this.clearFrequenciesDisplay(); 793 | 794 | cancelAnimationFrame(this.timerId); 795 | cancelAnimationFrame(this.frequencyAnimation); 796 | 797 | this.input.currentTime = 0; 798 | this.progressContainer.style.width = 0; 799 | this.showElapsedTime(0); 800 | } 801 | 802 | formatTime(secs) { 803 | const minute = 60; 804 | const hour = 3600; 805 | 806 | const hrs = Math.floor(secs / hour); 807 | const min = Math.floor((secs % hour) / minute); 808 | const sec = Math.floor(secs) % minute; 809 | 810 | const hours = hrs < 10 ? `0${hrs}` : hrs; 811 | const minutes = min < 10 ? `0${min}` : min; 812 | const seconds = sec < 10 ? `0${sec}` : sec; 813 | return hours !== '00' ? `${hours}:${minutes}:${seconds}` : `${minutes}:${seconds}`; 814 | } 815 | 816 | showElapsedTime(secs) { 817 | this.elapsedTime.innerHTML = this.formatTime(secs); 818 | } 819 | 820 | showTotalTime(secs) { 821 | this.totalTime.innerHTML = this.formatTime(secs); 822 | } 823 | 824 | renderFrequencyAnalyzer() { 825 | const bufferLength = this.analyser.frequencyBinCount; 826 | const dataArray = new Float32Array(bufferLength); 827 | const barWidth = (this.canvasWidth - (this.bars - 1)) / this.bars; 828 | 829 | this.frequencyCanvasContext.clearRect(0, 0, this.canvasWidth, this.canvasHeight); 830 | 831 | const draw = () => { 832 | this.frequencyAnimation = requestAnimationFrame(draw); 833 | this.analyser.getFloatFrequencyData(dataArray); 834 | this.frequencyCanvasContext.fillStyle = this.frequenciesBackgroundColor; 835 | this.frequencyCanvasContext.fillRect(0, 0, this.canvasWidth, this.canvasHeight); 836 | 837 | let x = 0; 838 | 839 | for (let i = 0; i < bufferLength; i++) { 840 | const barHeight = (dataArray[i] + 140) * 2; 841 | 842 | this.frequencyCanvasContext.fillStyle = this.frequenciesBarsColor; 843 | this.frequencyCanvasContext.fillRect(x, this.canvasHeight - barHeight * 0.75, barWidth, barHeight * 0.75); 844 | 845 | x += barWidth + 1; 846 | } 847 | }; 848 | 849 | draw(); 850 | } 851 | 852 | getWaveformData(buffers) { 853 | const dataArrays = buffers.map(buffer => buffer.getChannelData(0)); 854 | const totalLength = dataArrays.reduce((total, data) => total + data.length, 0); 855 | const channelData = new Float32Array(totalLength); 856 | 857 | let offset = 0; 858 | 859 | dataArrays.forEach(data => { 860 | channelData.set(data, offset); 861 | offset += data.length; 862 | }); 863 | 864 | return channelData; 865 | } 866 | 867 | async renderWaveform(file) { 868 | const buffer = await this.getArrayBuffer(file); 869 | const audioBuffers = await this.getAudioBuffers(buffer); 870 | 871 | this.audioBuffers = audioBuffers; 872 | this.duration = audioBuffers.reduce((total, buffer) => total + buffer.duration, 0); 873 | 874 | this.showTotalTime(this.duration); 875 | 876 | const channelData = this.getWaveformData(audioBuffers); 877 | const drawLines = 2000; 878 | const totallength = channelData.length; 879 | const eachBlock = Math.floor(totallength / drawLines); 880 | const lineGap = this.canvasWidth / drawLines; 881 | 882 | this.canvases.forEach(canvas => { 883 | canvas.context.save(); 884 | canvas.context.fillStyle = canvas.fillStyle; 885 | canvas.context.fillRect(0, 0, this.canvasWidth, this.canvasHeight); 886 | canvas.context.strokeStyle = canvas.strokeStyle; 887 | canvas.context.translate(0, this.canvasHeight / 2); 888 | canvas.context.lineWidth = 1; 889 | canvas.context.beginPath(); 890 | 891 | for(let i = 0; i <= drawLines; i++) { 892 | const audioBuffKey = Math.floor(eachBlock * i); 893 | const x = i * lineGap; 894 | const y = channelData[audioBuffKey] * this.canvasHeight * 0.8; 895 | 896 | canvas.context.moveTo(x, y); 897 | canvas.context.lineTo(x, (y * -1)); 898 | } 899 | 900 | canvas.context.stroke(); 901 | canvas.context.restore(); 902 | canvas.context.strokeStyle = canvas.strokeStyle; 903 | canvas.context.moveTo(0, this.canvasHeight / 2); 904 | canvas.context.lineTo(this.canvasWidth, this.canvasHeight / 2); 905 | canvas.context.stroke(); 906 | canvas.context.restore(); 907 | }); 908 | } 909 | 910 | get view() { 911 | return this.getAttribute('view'); 912 | } 913 | 914 | set view(value) { 915 | this.setAttribute('view', value); 916 | } 917 | 918 | get state() { 919 | return this.getAttribute('state'); 920 | } 921 | 922 | set state(value) { 923 | this.setAttribute('state', value); 924 | } 925 | 926 | get src() { 927 | return this.getAttribute('src'); 928 | } 929 | 930 | set src(value) { 931 | this.setAttribute('src', value); 932 | this.input.src = value; 933 | } 934 | 935 | clearWaveform() { 936 | this.canvases.forEach(canvas => canvas.context.clearRect(0, 0, canvas.element.width, canvas.element.height)); 937 | } 938 | 939 | clearFrequenciesDisplay() { 940 | this.frequencyCanvasContext.clearRect(0, 0, this.frequencyCanvas.width, this.frequencyCanvas.height); 941 | } 942 | } 943 | 944 | customElements.define('audio-recorder', AudioRecorder); 945 | -------------------------------------------------------------------------------- /dist/audio-recorder.js: -------------------------------------------------------------------------------- 1 | (()=>{"use strict";class t extends HTMLElement{static get observedAttributes(){return["label"]}constructor(){super(),this.attachShadow({mode:"open"}).innerHTML='\n \n \n \n ',this.button=this.shadowRoot.querySelector("button"),this.label=this.shadowRoot.querySelector("#label"),this.ripple=this.shadowRoot.querySelector(".ripple")}connectedCallback(){this.hasAttribute("label")?this.label.textContent=this.getAttribute("label"):this.label.style.display="none",this.button.addEventListener("click",(()=>{this.button.classList.add("active")})),this.ripple.addEventListener("animationend",(()=>{this.button.classList.remove("active")}))}attributeChangedCallback(t){"label"===t&&(this.hasAttribute("label")?this.label.textContent=this.getAttribute("label"):this.label.style.display="none")}get disabled(){return this.hasAttribute("disabled")}set disabled(t){this.button.disabled=t,t?this.setAttribute("disabled",""):this.removeAttribute("disabled")}}customElements.get("material-button")||customElements.define("material-button",t);class e extends HTMLElement{static get observedAttributes(){return["value","disabled"]}constructor(){super(),this.attachShadow({mode:"open"}).innerHTML='\n \n \n
\n
\n \n
\n \n
\n ',this.input=this.shadowRoot.querySelector("input[type=range]")}connectedCallback(){this.container=this.shadowRoot.querySelector("#container"),this.host=this.input.getRootNode().host,this.min=this.hasAttribute("min")?this.getAttribute("min"):1,this.input.min=this.min,this.max=this.hasAttribute("max")?this.getAttribute("max"):100,this.input.max=this.max,this.step=this.hasAttribute("step")?this.getAttribute("step"):1,this.input.step=this.step,this.input.value=this.hasAttribute("value")?this.getAttribute("value"):this.input.value,this.value=this.input.value,this.disabled=this.hasAttribute("disabled"),this.input.addEventListener("input",this.handleInput.bind(this));const t=/rgba\((\d{1,3}),\s?(\d{1,3}),\s?(\d{1,3}),\s?(\d|\d\.\d+)\)/,e=getComputedStyle(this.host),n=e.getPropertyValue("--thumb-color").trim()||e.getPropertyValue("--thumb-color").trim(),i=t.exec(n),o=/^#([A-Fa-f0-9]{3}){1,2}$/.test(n)?this.hexToRgbA(n):/rgb\((.+)\)/.test(n)?n.replace(/rgb\((.+)\)/,"rgba($1, 0.1)"):t.test(n)?`rgba(${i[1]}, ${i[2]}, ${i[3]}, 0.1)`:null;if(null===o)throw new Error("invalid color specified for --thumb color: "+n);this.host.style.setProperty("--thumb-color-light",o)}handleInput(t){this.value=t.target.value,this.dispatchEvent(new CustomEvent("change",{detail:{value:t.target.value}}))}hexToRgbA(t){let e=[...t.substring(1)];return 3===e.length&&(e=[e[0],e[0],e[1],e[1],e[2],e[2]]),e="0x"+e.join(""),`rgba(${[e>>16&255,e>>8&255,255&e].join(",")}, 0.1)`}set value(t){this.input.value=t,this.setAttribute("value",t)}get value(){return this.getAttribute("value")}set disabled(t){this.input.disabled=t,t?this.setAttribute("disabled",""):this.removeAttribute("disabled")}get disabled(){return this.hasAttribute("disabled")}attributeChangedCallback(t,e,n){"value"===t&&(this.input.value=n),"disabled"===t&&(this.input.disabled=this.hasAttribute("disabled"))}}customElements.get("material-slider")||customElements.define("material-slider",e);class n extends HTMLElement{static get observedAttributes(){return["view"]}constructor(){super(),this.attachShadow({mode:"open"}).innerHTML='\n \n \n \n \n
\n
\n
\n \n
\n \n
\n \n
\n \n
\n \n
\n
\n \n
\n
\n \n mic\n \n \n \n mic_off\n \n \n \n play_arrow\n \n \n \n pause\n \n \n \n fiber_manual_record\n \n \n \n stop\n \n \n \n \n save\n \n \n \n \n equalizer\n \n \n \n graphic_eq\n \n
\n \n
\n \n volume_off\n \n \n \n \n \n volume_up\n \n
\n \n
\n / \n
\n
\n
\n ',this.hours=0,this.minutes=0,this.seconds=0,this.secs=0,this.pauseTime=0,this.audioBuffers=[],this.frequencies=!1,this.state="idle",this.view=this.getAttribute("view")||"frequencies",this.bars=parseInt(this.getAttribute("bars")||20,10),this.mediaElementSource=null,this.mediaStreamSource=null,this.nativeFileSystemSupported="showSaveFilePicker"in window,this.maxChunkLength=524e3,this.canvas=this.shadowRoot.querySelector("#waveform"),this.canvasContext=this.canvas.getContext("2d"),this.progressCanvas=this.shadowRoot.querySelector("#progress"),this.progressCanvasContext=this.progressCanvas.getContext("2d"),this.audioContainer=this.shadowRoot.querySelector("#audio-container"),this.frequencyCanvas=this.shadowRoot.querySelector("#frequencies"),this.frequencyCanvasContext=this.frequencyCanvas.getContext("2d"),this.waveformContainer=this.shadowRoot.querySelector("#waveform-container"),this.progressContainer=this.shadowRoot.querySelector("#progress-container"),this.frequenciesContainer=this.shadowRoot.querySelector("#frequencies-container"),this.playButton=this.shadowRoot.querySelector("#play"),this.pauseButton=this.shadowRoot.querySelector("#pause"),this.elapsedTime=this.shadowRoot.querySelector("#elapsed-time"),this.totalTime=this.shadowRoot.querySelector("#total-time"),this.volume=this.shadowRoot.querySelector("#volume"),this.volumeMinButton=this.shadowRoot.querySelector("#volume-min"),this.volumeMaxButton=this.shadowRoot.querySelector("#volume-max"),this.input=this.shadowRoot.querySelector("audio"),this.freqButton=this.shadowRoot.querySelector("#frequencies-button"),this.waveformButton=this.shadowRoot.querySelector("#waveform-button"),this.captureAudioButton=this.shadowRoot.querySelector("#capture-audio"),this.stopCaptureAudioButton=this.shadowRoot.querySelector("#stop-capture-audio"),this.recordAudioButton=this.shadowRoot.querySelector("#record-audio"),this.stopRecordAudioButton=this.shadowRoot.querySelector("#stop-record-audio"),this.saveAudioLink=this.shadowRoot.querySelector("#save-audio-link"),this.nativeFileSystemSupported&&this.saveAudioLink.addEventListener("click",(async()=>this.saveFile(this.recording)))}resizeCanvas({width:t,height:e}){this.canvas.width=0,this.canvas.height=0,this.canvas.width=t,this.canvas.height=e,this.progressCanvas.width=t,this.progressCanvas.height=e,this.frequencyCanvas.width=t,this.frequencyCanvas.height=e,this.canvasWidth=this.canvas.width,this.canvasHeight=this.canvas.height,this.mimeType="isTypeSupported"in MediaRecorder?[{type:"audio/mpeg",ext:"mp3"},{type:"audio/webm",ext:"webm"},{type:"audio/mp4",ext:"mp4"}].find((({type:t})=>MediaRecorder.isTypeSupported(t))):{type:"audio/mpeg",ext:"mp3"}}connectedCallback(){const t=getComputedStyle(this.shadowRoot.host),e=t.getPropertyValue("--waveform-background-color"),n=t.getPropertyValue("--waveform-color"),i=t.getPropertyValue("--waveform-progress-color");this.canvases=[{element:this.canvas,context:this.canvasContext,fillStyle:e,strokeStyle:n},{element:this.progressCanvas,context:this.progressCanvasContext,fillStyle:e,strokeStyle:i}],this.frequenciesBackgroundColor=t.getPropertyValue("--frequency-background-color"),this.frequenciesBarsColor=t.getPropertyValue("--frequency-bars-color"),setTimeout((()=>{const{width:t,height:e}=this.audioContainer.getBoundingClientRect();if(this.resizeCanvas({width:t,height:e}),"ResizeObserver"in window){let t=!0;const e=new ResizeObserver((n=>{t?t=!1:n.forEach((({contentRect:n})=>{this.resizeCanvas(n),"waveform"===this.view&&this.recording&&this.renderWaveform(this.recording),"frequencies"===this.view&&this.analyser&&(cancelAnimationFrame(this.frequencyAnimation),this.renderFrequencyAnalyzer()),t=!0,e.observe(this.audioContainer)}))}));e.observe(this.audioContainer)}})),this.showTotalTime(0),this.showElapsedTime(0),this.audioContainer.addEventListener("click",this.handleWaveformClick.bind(this)),this.playButton.addEventListener("click",this.playPause.bind(this)),this.pauseButton.addEventListener("click",this.playPause.bind(this)),this.volume.addEventListener("change",(t=>this.setVolume(t.detail.value))),this.input.addEventListener("ended",this.stopAudio.bind(this)),this.freqButton.addEventListener("click",this.showFrequencyAnalyzer.bind(this)),this.waveformButton.addEventListener("click",this.showWaveform.bind(this)),this.captureAudioButton.addEventListener("click",this.captureAudio.bind(this)),this.stopCaptureAudioButton.addEventListener("click",this.stopCaptureAudio.bind(this)),this.recordAudioButton.addEventListener("click",this.recordAudio.bind(this)),this.stopRecordAudioButton.addEventListener("click",this.stopRecordAudio.bind(this)),this.volumeMinButton.addEventListener("click",(t=>{this.setVolume(0),this.volume.value=0})),this.volumeMaxButton.addEventListener("click",(t=>{this.setVolume(1),this.volume.value=1}));const o=()=>{this.isWebKit="webkitAudioContext"in window,this.context=new(window.AudioContext||window.webkitAudioContext),this.output=this.context.destination,this.gainNode=this.context.createGain(),this.analyser=this.context.createAnalyser(),this.analyser.fftSize=256,document.removeEventListener("mousedown",o)};document.addEventListener("mousedown",o)}getMediaElementSource(t){return this.mediaElementSource||(this.mediaElementSource=this.context.createMediaElementSource(t),this.mediaElementSource.connect(this.analyser),this.mediaElementSource.connect(this.gainNode)),this.mediaElementSource}getMediaStreamSource(t){return this.mediaStreamSource||(this.mediaStreamSource=this.context.createMediaStreamSource(t),this.mediaStreamSource.connect(this.analyser),this.mediaStreamSource.connect(this.gainNode)),this.mediaStreamSource}async initializeAudio(t){this.curSource=t instanceof HTMLAudioElement?this.getMediaElementSource(t):this.getMediaStreamSource(t),this.curSource.connect(this.analyser),this.curSource.connect(this.gainNode),this.gainNode.connect(this.output)}async openFile(t){await this.loadFile(t),await this.initializeAudio(this.input)}async loadFile(t){const e=new FileReader;e.onloadend=t=>{this.src=t.target.result,this.nativeFileSystemSupported||(this.saveAudioLink.download="capture."+this.mimeType.ext,this.saveAudioLink.href=t.target.result)},e.readAsDataURL(t),await this.renderWaveform(t),await this.initializeAudio(this.input),this.progressContainer.style.width=0,this.view="waveform"}async saveFile(t){const e="."+t.type.split("/").pop(),n=await window.showSaveFilePicker({types:[{description:"Audio file",accept:{[t.type]:e}}]}),i=await n.createWritable();await i.write({type:"write",data:t}),await i.close()}async captureAudio(){try{this.stream=await navigator.mediaDevices.getUserMedia({audio:!0}),this.currentVolume=this.volume.value,this.setVolume(0),this.volume.disabled=!0,await this.initializeAudio(this.stream),this.renderFrequencyAnalyzer(),this.view="frequencies",this.state="capturing"}catch(t){"NotAllowedError"===t.name&&this.dispatchEvent(new CustomEvent("notallowed",{detail:{message:"Access to the device's microphone is not allowed"}}))}}stopCaptureAudio(){this.stream&&(this.stream.getTracks().map((t=>t.stop())),this.mediaStreamSource=null,cancelAnimationFrame(this.frequencyAnimation),this.clearFrequenciesDisplay(),this.state="idle",this.volume.disabled=!1,this.setVolume(this.currentVolume))}recordAudio(){const t=[];this.recorder=new MediaRecorder(this.stream);const e={type:""+this.mimeType.type};this.recorder.start(250),this.state="recording",this.recorder.addEventListener("dataavailable",(({data:n})=>{if(void 0!==n&&0!==n.size){t.push(n);const i=new Blob(t,e);this.renderWaveform(i)}})),this.recorder.addEventListener("stop",(async()=>{this.recording=new Blob(t,e),this.stopCaptureAudio(),await this.loadFile(this.recording)}))}stopRecordAudio(){this.recorder.stop()}showFrequencyAnalyzer(){this.view="frequencies"}showWaveform(){this.view="waveform"}setVolume(t){this.gainNode.gain.setValueAtTime(t,this.context.currentTime)}handleWaveformClick(t){this.curSource&&(this.state="idle",this.input.pause(),cancelAnimationFrame(this.timerId)),this.progressContainer.style.width=t.offsetX+"px",this.input.currentTime=t.offsetX/this.canvasWidth*this.duration,this.showElapsedTime(this.input.currentTime)}stringToArrayBuffer(t){return new Uint8Array(t.length).map(((e,n)=>t.codePointAt(n)))}getArrayBuffer(t){const e=new FileReader;return e.readAsArrayBuffer(t),new Promise(((t,n)=>{e.onerror=t=>n(t),e.onloadend=e=>t(e.target.result)}))}sliceAudio(t,e,n){const i=e+n===t.byteLength?t:t.slice(e,n),o=new Blob([new Uint8Array(i)]),a=new FileReader;return a.readAsArrayBuffer(o),new Promise(((t,e)=>{a.onloadend=n=>{const i=n.target.result;this.isWebKit?this.context.decodeAudioData(i,(e=>t(e)),(t=>e(t))):this.context.decodeAudioData(i).then((e=>t(e))).catch((t=>e(t)))}}))}async getAudioBuffers(t){const e=t.byteLength,n=e>this.maxChunkLength?this.maxChunkLength:e;let i=0+n;const o=this,a=[];for await(const s of((t,i,a)=>async function*(){for(;ie?e:i+n}}())(t,0,i))a.push(s);return a}getNodesAfterOffset(t,e){let n=0,i=0;return[t.filter((t=>(n+=t.buffer.duration,i+=ne))),e-i]}playPause(){const t=()=>{const e=this.input.currentTime;this.showElapsedTime(e);const n=e/this.duration*this.canvasWidth;this.progressContainer.style.width=n+"px",this.timerId=requestAnimationFrame(t)};"playing"===this.state?(this.state="idle",this.input.pause(),this.pauseTime=this.input.currentTime,this.clearFrequenciesDisplay(),cancelAnimationFrame(this.timerId),cancelAnimationFrame(this.frequencyAnimation)):(this.state="playing",this.input.play(),this.renderFrequencyAnalyzer(),requestAnimationFrame(t))}stopAudio(){this.state="idle",this.clearFrequenciesDisplay(),cancelAnimationFrame(this.timerId),cancelAnimationFrame(this.frequencyAnimation),this.input.currentTime=0,this.progressContainer.style.width=0,this.showElapsedTime(0)}formatTime(t){const e=Math.floor(t/3600),n=Math.floor(t%3600/60),i=Math.floor(t)%60,o=e<10?"0"+e:e,a=n<10?"0"+n:n,s=i<10?"0"+i:i;return"00"!==o?`${o}:${a}:${s}`:`${a}:${s}`}showElapsedTime(t){this.elapsedTime.innerHTML=this.formatTime(t)}showTotalTime(t){this.totalTime.innerHTML=this.formatTime(t)}renderFrequencyAnalyzer(){const t=this.analyser.frequencyBinCount,e=new Float32Array(t),n=(this.canvasWidth-(this.bars-1))/this.bars;this.frequencyCanvasContext.clearRect(0,0,this.canvasWidth,this.canvasHeight);const i=()=>{this.frequencyAnimation=requestAnimationFrame(i),this.analyser.getFloatFrequencyData(e),this.frequencyCanvasContext.fillStyle=this.frequenciesBackgroundColor,this.frequencyCanvasContext.fillRect(0,0,this.canvasWidth,this.canvasHeight);let o=0;for(let i=0;it.getChannelData(0))),n=e.reduce(((t,e)=>t+e.length),0),i=new Float32Array(n);let o=0;return e.forEach((t=>{i.set(t,o),o+=t.length})),i}async renderWaveform(t){const e=await this.getArrayBuffer(t),n=await this.getAudioBuffers(e);this.audioBuffers=n,this.duration=n.reduce(((t,e)=>t+e.duration),0),this.showTotalTime(this.duration);const i=this.getWaveformData(n),o=2e3,a=i.length,s=Math.floor(a/o),r=this.canvasWidth/o;this.canvases.forEach((t=>{t.context.save(),t.context.fillStyle=t.fillStyle,t.context.fillRect(0,0,this.canvasWidth,this.canvasHeight),t.context.strokeStyle=t.strokeStyle,t.context.translate(0,this.canvasHeight/2),t.context.lineWidth=1,t.context.beginPath();for(let e=0;e<=o;e++){const n=Math.floor(s*e),o=e*r,a=i[n]*this.canvasHeight*.8;t.context.moveTo(o,a),t.context.lineTo(o,-1*a)}t.context.stroke(),t.context.restore(),t.context.strokeStyle=t.strokeStyle,t.context.moveTo(0,this.canvasHeight/2),t.context.lineTo(this.canvasWidth,this.canvasHeight/2),t.context.stroke(),t.context.restore()}))}get view(){return this.getAttribute("view")}set view(t){this.setAttribute("view",t)}get state(){return this.getAttribute("state")}set state(t){this.setAttribute("state",t)}get src(){return this.getAttribute("src")}set src(t){this.setAttribute("src",t),this.input.src=t}clearWaveform(){this.canvases.forEach((t=>t.context.clearRect(0,0,t.element.width,t.element.height)))}clearFrequenciesDisplay(){this.frequencyCanvasContext.clearRect(0,0,this.frequencyCanvas.width,this.frequencyCanvas.height)}}customElements.define("audio-recorder",n)})(); --------------------------------------------------------------------------------