├── src ├── react-app-env.d.ts ├── banner.png ├── index.css ├── reportWebVitals.ts ├── index.tsx ├── App.css ├── SliderComponent.tsx ├── App.tsx ├── fft.js └── AudioSync.js ├── img ├── banner.png └── averages.png ├── public ├── banner.png ├── favicon.ico ├── robots.txt ├── manifest.json └── index.html ├── .gitignore ├── tsconfig.json ├── .github └── workflows │ └── deploy.yaml ├── LICENSE ├── package.json └── README.md /src/react-app-env.d.ts: -------------------------------------------------------------------------------- 1 | /// 2 | -------------------------------------------------------------------------------- /img/banner.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/not-matt/AudioSync/HEAD/img/banner.png -------------------------------------------------------------------------------- /src/banner.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/not-matt/AudioSync/HEAD/src/banner.png -------------------------------------------------------------------------------- /img/averages.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/not-matt/AudioSync/HEAD/img/averages.png -------------------------------------------------------------------------------- /public/banner.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/not-matt/AudioSync/HEAD/public/banner.png -------------------------------------------------------------------------------- /public/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/not-matt/AudioSync/HEAD/public/favicon.ico -------------------------------------------------------------------------------- /public/robots.txt: -------------------------------------------------------------------------------- 1 | # https://www.robotstxt.org/robotstxt.html 2 | User-agent: * 3 | Disallow: 4 | -------------------------------------------------------------------------------- /src/index.css: -------------------------------------------------------------------------------- 1 | body { 2 | margin: 0; 3 | font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', 'Roboto', 'Oxygen', 4 | 'Ubuntu', 'Cantarell', 'Fira Sans', 'Droid Sans', 'Helvetica Neue', 5 | sans-serif; 6 | -webkit-font-smoothing: antialiased; 7 | -moz-osx-font-smoothing: grayscale; 8 | } 9 | 10 | code { 11 | font-family: source-code-pro, Menlo, Monaco, Consolas, 'Courier New', 12 | monospace; 13 | } 14 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # See https://help.github.com/articles/ignoring-files/ for more about ignoring files. 2 | 3 | # dependencies 4 | /node_modules 5 | /.pnp 6 | .pnp.js 7 | 8 | # testing 9 | /coverage 10 | 11 | # production 12 | /build 13 | 14 | # misc 15 | .DS_Store 16 | .env.local 17 | .env.development.local 18 | .env.test.local 19 | .env.production.local 20 | 21 | npm-debug.log* 22 | yarn-debug.log* 23 | yarn-error.log* 24 | public/audio/* 25 | -------------------------------------------------------------------------------- /src/reportWebVitals.ts: -------------------------------------------------------------------------------- 1 | import { ReportHandler } from 'web-vitals'; 2 | 3 | const reportWebVitals = (onPerfEntry?: ReportHandler) => { 4 | if (onPerfEntry && onPerfEntry instanceof Function) { 5 | import('web-vitals').then(({ getCLS, getFID, getFCP, getLCP, getTTFB }) => { 6 | getCLS(onPerfEntry); 7 | getFID(onPerfEntry); 8 | getFCP(onPerfEntry); 9 | getLCP(onPerfEntry); 10 | getTTFB(onPerfEntry); 11 | }); 12 | } 13 | }; 14 | 15 | export default reportWebVitals; 16 | -------------------------------------------------------------------------------- /public/manifest.json: -------------------------------------------------------------------------------- 1 | { 2 | "short_name": "React App", 3 | "name": "Create React App Sample", 4 | "icons": [ 5 | { 6 | "src": "favicon.ico", 7 | "sizes": "64x64 32x32 24x24 16x16", 8 | "type": "image/x-icon" 9 | }, 10 | { 11 | "src": "logo192.png", 12 | "type": "image/png", 13 | "sizes": "192x192" 14 | }, 15 | { 16 | "src": "logo512.png", 17 | "type": "image/png", 18 | "sizes": "512x512" 19 | } 20 | ], 21 | "start_url": ".", 22 | "display": "standalone", 23 | "theme_color": "#000000", 24 | "background_color": "#ffffff" 25 | } 26 | -------------------------------------------------------------------------------- /src/index.tsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import ReactDOM from 'react-dom/client'; 3 | import './index.css'; 4 | import App from './App'; 5 | import reportWebVitals from './reportWebVitals'; 6 | 7 | const root = ReactDOM.createRoot( 8 | document.getElementById('root') as HTMLElement 9 | ); 10 | root.render( 11 | 12 | 13 | 14 | ); 15 | 16 | // If you want to start measuring performance in your app, pass a function 17 | // to log results (for example: reportWebVitals(console.log)) 18 | // or send to an analytics endpoint. Learn more: https://bit.ly/CRA-vitals 19 | reportWebVitals() 20 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "es5", 4 | "lib": [ 5 | "dom", 6 | "dom.iterable", 7 | "esnext" 8 | ], 9 | "allowJs": true, 10 | "skipLibCheck": true, 11 | "esModuleInterop": true, 12 | "allowSyntheticDefaultImports": true, 13 | "strict": true, 14 | "forceConsistentCasingInFileNames": true, 15 | "noFallthroughCasesInSwitch": true, 16 | "module": "esnext", 17 | "moduleResolution": "node", 18 | "resolveJsonModule": true, 19 | "isolatedModules": true, 20 | "noEmit": true, 21 | "jsx": "react-jsx" 22 | }, 23 | "include": [ 24 | "src" 25 | ] 26 | } 27 | -------------------------------------------------------------------------------- /.github/workflows/deploy.yaml: -------------------------------------------------------------------------------- 1 | name: Build and Deploy 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | 8 | permissions: 9 | contents: write 10 | 11 | jobs: 12 | build-deploy: 13 | runs-on: ubuntu-latest 14 | 15 | steps: 16 | - name: Checkout code 17 | uses: actions/checkout@v2 18 | 19 | - name: Set up Node.js 20 | uses: actions/setup-node@v2 21 | with: 22 | node-version: 14 23 | 24 | - name: Install dependencies 25 | run: npm install 26 | 27 | - name: Build 28 | run: npm ci && npm run build 29 | 30 | - name: Deploy to GitHub Pages 31 | uses: peaceiris/actions-gh-pages@v3 32 | with: 33 | personal_token: ${{ secrets.GITHUB_TOKEN }} 34 | publish_dir: ./build 35 | publish_branch: gh-pages -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2023 Matthew Bowley 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /src/App.css: -------------------------------------------------------------------------------- 1 | .App { 2 | /* center everything on the page */ 3 | /* display: flex; */ 4 | flex-direction: column; 5 | align-items: center; 6 | justify-content: center; 7 | gap: 20px; 8 | text-align: center; 9 | /* padding: 20px; */ 10 | } 11 | 12 | .dropzone { 13 | flex: 1; 14 | display: flex; 15 | flex-direction: column; 16 | align-items: center; 17 | padding: 20px; 18 | margin: 0 auto; 19 | width: 30%; 20 | border-width: 2; 21 | border-radius: 2; 22 | border-color: #eeeeee; 23 | border-style: dashed; 24 | background-color: #fafafa; 25 | color: #bdbdbd; 26 | outline: none; 27 | transition: border .24s ease-in-out; 28 | } 29 | 30 | .App-logo { 31 | height: 40vmin; 32 | pointer-events: none; 33 | } 34 | 35 | @media (prefers-reduced-motion: no-preference) { 36 | .App-logo { 37 | animation: App-logo-spin infinite 20s linear; 38 | } 39 | } 40 | 41 | .App-header { 42 | background-color: #282c34; 43 | min-height: 100vh; 44 | display: flex; 45 | flex-direction: column; 46 | align-items: center; 47 | justify-content: center; 48 | font-size: calc(10px + 2vmin); 49 | color: white; 50 | } 51 | 52 | .App-link { 53 | color: #61dafb; 54 | } 55 | 56 | @keyframes App-logo-spin { 57 | from { 58 | transform: rotate(0deg); 59 | } 60 | 61 | to { 62 | transform: rotate(360deg); 63 | } 64 | } -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "audiosync", 3 | "version": "0.1.0", 4 | "private": true, 5 | "homepage": "https://not-matt.github.io/AudioSync", 6 | "dependencies": { 7 | "@emotion/react": "^11.11.0", 8 | "@emotion/styled": "^11.11.0", 9 | "@mui/material": "^5.13.1", 10 | "@testing-library/jest-dom": "^5.16.5", 11 | "@testing-library/react": "^13.4.0", 12 | "@testing-library/user-event": "^13.5.0", 13 | "@types/jest": "^27.5.2", 14 | "@types/node": "^16.18.31", 15 | "@types/react": "^18.2.6", 16 | "@types/react-dom": "^18.2.4", 17 | "console-browserify": "^1.2.0", 18 | "debounce": "^1.2.1", 19 | "iodash": "^0.0.1-security", 20 | "react": "18.2", 21 | "react-dom": "^18.2.0", 22 | "react-dropzone": "^14.2.3", 23 | "react-scripts": "5.0.1", 24 | "wavesurfer.js": "^6.6.3", 25 | "web-vitals": "^2.1.4" 26 | }, 27 | "scripts": { 28 | "start": "react-scripts start", 29 | "build": "react-scripts build" 30 | }, 31 | "eslintConfig": { 32 | "extends": [ 33 | "react-app", 34 | "react-app/jest" 35 | ] 36 | }, 37 | "browserslist": { 38 | "production": [ 39 | ">0.2%", 40 | "not dead", 41 | "not op_mini all" 42 | ], 43 | "development": [ 44 | "last 1 chrome version", 45 | "last 1 firefox version", 46 | "last 1 safari version" 47 | ] 48 | }, 49 | "devDependencies": { 50 | "@types/lodash": "^4.14.195", 51 | "@types/wavesurfer.js": "^6.0.6", 52 | "typescript": "<5.0.0" 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /src/SliderComponent.tsx: -------------------------------------------------------------------------------- 1 | import React, { useState } from 'react'; 2 | import { Slider } from '@mui/material'; 3 | 4 | interface SliderComponentProps { 5 | windowSize: number; 6 | shortWindowSize: number; 7 | onSettingsChange: (property: string, value: number) => void; 8 | } 9 | 10 | const SliderComponent: React.FC = ({ 11 | windowSize, 12 | shortWindowSize, 13 | onSettingsChange, 14 | }) => { 15 | const [sliderValues, setSliderValues] = useState<[number, number]>([ 16 | shortWindowSize, 17 | windowSize, 18 | ]); 19 | 20 | const handleValuesChange = ( 21 | event: Event, 22 | newValues: number | number[] 23 | ): void => { 24 | setSliderValues(Array.isArray(newValues) ? (newValues as [number, number]) : [newValues, windowSize]); 25 | }; 26 | 27 | const handleValuesChangeEnd = (): void => { 28 | const [newSmallWindowSize, newWindowSize] = sliderValues; 29 | onSettingsChange('shortWindowSize', newSmallWindowSize); 30 | onSettingsChange('windowSize', Math.max(newSmallWindowSize + 1, newWindowSize)); 31 | }; 32 | 33 | const valueLabelFormat = (value: number): string => { 34 | if (value === shortWindowSize) { 35 | return `Short Rolling Average Window Size: ${value}`; 36 | } 37 | if (value === windowSize) { 38 | return `Long Rolling Average Window Size: ${value}`; 39 | } 40 | return `${value}`; 41 | }; 42 | 43 | return ( 44 |
45 | 55 |
56 | ); 57 | }; 58 | 59 | export default SliderComponent; -------------------------------------------------------------------------------- /public/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 12 | 13 | 17 | 18 | 27 | AudioSync 28 | 29 | 30 | 31 |
32 | 42 | 43 | 44 | -------------------------------------------------------------------------------- /src/App.tsx: -------------------------------------------------------------------------------- 1 | import React, { useState, useEffect } from 'react'; 2 | import { Button, CircularProgress } from '@mui/material'; 3 | import WaveSurfer from 'wavesurfer.js'; 4 | import Dropzone from 'react-dropzone'; 5 | import { debounce } from 'lodash'; 6 | import AudioSyncPlugin from './AudioSync.js'; 7 | import SliderComponent from './SliderComponent'; 8 | import './App.css'; 9 | import bannerImage from './banner.png'; 10 | 11 | const App: React.FC = () => { 12 | const [file, setFile] = useState(null); 13 | const [isLoading, setIsLoading] = useState(false); 14 | const [wavesurfer, setWavesurfer] = useState(null); 15 | const [isPlaying, setIsPlaying] = useState(false); 16 | const [AudioSyncSettings, setAudioSyncSettings] = useState({ 17 | windowSize: 80, // Size of the rolling window and long moving average 18 | shortWindowSize: 5, // Size of the short moving average 19 | }); 20 | 21 | useEffect(() => { 22 | if (wavesurfer) { 23 | const AudioSyncInstance = wavesurfer["audiosync"]; 24 | debouncedUpdateSettings(AudioSyncInstance, AudioSyncSettings); 25 | } 26 | }, [AudioSyncSettings]); // eslint-disable-line react-hooks/exhaustive-deps 27 | 28 | const debouncedUpdateSettings = debounce((AudioSyncInstance: any, settings: any) => { 29 | AudioSyncInstance.updateSettings(settings); 30 | }, 1000); // Adjust the debounce delay as needed (e.g., 500ms) 31 | 32 | const updateSettings = (property: string, value: number) => { 33 | setAudioSyncSettings((prevState) => ({ 34 | ...prevState, 35 | [property]: value, 36 | })); 37 | }; 38 | 39 | const handleFileDrop = (files: string | any[]) => { 40 | if (!files || files.length === 0) { 41 | setFile(null); 42 | setIsLoading(false); 43 | return 44 | } 45 | setFile(files[0]); 46 | setIsLoading(true); 47 | setIsPlaying(false); 48 | const url = URL.createObjectURL(files[0]); 49 | if (wavesurfer) { 50 | wavesurfer.destroy(); 51 | } 52 | 53 | const options = { 54 | container: '#waveform', 55 | waveColor: '#f0921b', 56 | progressColor: '#fd544b', 57 | loaderColor: '#fd544b', 58 | cursorColor: 'black', 59 | minPxPerSec: 25, 60 | scrollParent: true, 61 | plugins: [ 62 | AudioSyncPlugin.create({ 63 | container: '#AudioSync', 64 | settings: AudioSyncSettings, 65 | labels: true, 66 | fftSamples: 512, 67 | }), 68 | ] 69 | }; 70 | 71 | const newWavesurfer = WaveSurfer.create(options); 72 | 73 | // Set the new wavesurfer instance 74 | setWavesurfer(newWavesurfer); 75 | 76 | // Load the audio file and update settings 77 | if (newWavesurfer) { 78 | // const AudioSyncInstance = newWavesurfer["audiosync"]; 79 | // AudioSyncInstance?.updateSettings(AudioSyncSettings); 80 | 81 | newWavesurfer.load(url); 82 | 83 | // Hide loading indicator 84 | newWavesurfer.on('ready', () => { 85 | setIsLoading(false); 86 | }); 87 | } 88 | }; 89 | 90 | const handlePlayPause = () => { 91 | if (!wavesurfer) { return; } 92 | wavesurfer.playPause(); 93 | setIsPlaying(!isPlaying); 94 | }; 95 | 96 | return ( 97 |
98 | {/* banner image centered */} 99 | logo 100 | {!file ? ( 101 | 102 | {({ getRootProps, getInputProps }) => ( 103 |
104 | 105 |

Drag and drop an audio file here, or click to browse

106 |
107 | )} 108 |
109 | ) : ( 110 |
111 |
112 | 115 | 120 |
121 |
122 | )} 123 | {isLoading && ( 124 |
125 | 126 |

Loading...

127 |
128 | )} 129 |
130 |
131 |
132 | ); 133 | }; 134 | 135 | export default App 136 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ![AudioSync](./img/banner.png) 2 | ## Realtime Audio Segmentation 3 | 4 | The real-time audio segmentation algorithm described here is specifically developed to address the need for dynamic and coherent visual effects in audio reactive LED lighting systems. The goal is to create visually engaging displays that change and flow with the music. This algorithm segments the real-time audio into coherent sections and provides a signal when a change in the song occurs. 5 | 6 | ## Features 7 | 8 | - **Real-time**: The algorithm processes the audio in real-time, allowing it to respond to changes in the music as they occur. 9 | - **Coherent**: The algorithm segments the audio into coherent sections that are consistent with the music. Sometimes it might miss a section or detect a section that is not present, but overall, the sections are consistent enough with the music to create a pleasing visual effect. 10 | - **Lightweight**: The algorithm is lightweight and can be implemented on microcontrollers such as the ESP32. It is also highly optimized to minimize computational load. 11 | 12 | 13 |
14 | 15 | 16 | ![Build Status](https://img.shields.io/github/actions/workflow/status/not-matt/AudioSync/deploy.yaml?style=for-the-badge) 17 | 18 | 19 | [![Live Demo](https://img.shields.io/badge/Live%20Demo-Try%20it%20out!-blue?style=for-the-badge)](https://not-matt.github.io/AudioSync/) 20 | 21 |
22 | 23 | ## Algorithmic Outline 24 | 25 | 1. **Frame-based analysis**: The audio is processed frame by frame in real-time. Each frame represents a short segment of the audio signal. The frame size is typically 512 samples, which corresponds to 11.6 ms at a sampling rate of 44.1 kHz. The frame size can be adjusted to fit the existing system requirements with little detriment to the algorithm's performance. 26 | 2. **Feature extraction**: Calculate audio features for each frame that capture general characteristics of the sound. Effective features include Low-Frequency Content (LFC), Energy, and Zero-Crossing Rate (ZCR). LFC is computed from the frequency spectrum obtained through FFT, while Energy and ZCR are calculated directly from the raw audio frame. These features are combined into a three-dimensional feature vector. 27 | 3. **Averaging**: Rolling averages are used to compare the current sounds to the previous sounds heard in the last few seconds. A rolling window is maintained, and for each frame, the feature vector is added to the start of the rolling window while discarding the oldest vector. Short and long averages are computed from the rolling window. The short average represents the current sound, and the long average represents the past sounds. To optimize the averaging process, the new data is added to the average and the old data is subtracted, avoiding recomputing the average by summing and dividing the entire window. See image for a visual representation of the window averaging process. 28 | ![Averaging](./img/averages.png) 29 | 4. **Change detection**: The Euclidean distance between the short and long averages is calculated to measure the change in the audio characteristics. A large distance indicates a significant change in the sound. The distance is compared to a threshold to determine if a change has occurred. 30 | 4. **Thresholding (WIP)** The threshold is dynamically adjusted based on the average distance between the short and long averages. This allows the algorithm to adapt to different songs and environments. A cooldown period is also implemented to prevent the threshold from firing too often and causing the lights to flicker if the music is very dynamic. 31 | 32 | ## Usage 33 | 34 | To use this algorithm in your project, follow these steps: 35 | 36 | 1. **Integration**: Integrate the algorithm into your system or firmware, considering the constraints of the target platform (e.g., ESP32 microcontroller). 37 | 38 | 2. **Audio Input**: Ensure that the algorithm has access to a real-time audio feed. This can be achieved by connecting a microphone or using the system audio output as input. 39 | 40 | 3. **Configuration**: Adjust the parameters of the algorithm based on your application's requirements. This includes frame size, feature selection, averaging window size, threshold value, and event handling mechanism. The most important values to configure are related to the rolling averages. You want the short average to capture the current sound and the long average to capture the past sounds. The short average should be around 50ms, and the long average should be around 2-5 seconds. You can experiment with different values in the live demo. Note that all music has different characteristics, so the optimal values will vary depending on the song. Test with different songs to find the best values for your application. 41 | 42 | 4. **Event Handling**: Integrate an event handling mechanism to respond to the detected audio sections or changes. For example, use the events triggered by the algorithm to change visual effects on an LED strip or perform other desired actions. 43 | 44 | ## License 45 | 46 | AudioSync is provided under the MIT License, allowing you to use, modify, and distribute it freely. However, please refer to the license file for the full terms and conditions. 47 | 48 | ## Contributing 49 | 50 | Contributions to enhance and improve AudioSync are welcome. If you encounter any issues, have suggestions for improvements, or would like to contribute new features, please feel free to submit an issue or a pull request to the project repository. Your contributions will help make the algorithm better and more robust. 51 | 52 | ## Contact 53 | 54 | For any questions, comments, or inquiries regarding the AudioSync algorithm, please open an issue or contact @not_matt on WLED or LedFx's Discord. -------------------------------------------------------------------------------- /src/fft.js: -------------------------------------------------------------------------------- 1 | /* eslint-disable complexity, no-redeclare, no-var, one-var */ 2 | 3 | /** 4 | * Calculate FFT - Based on https://github.com/corbanbrook/dsp.js 5 | * 6 | * @param {Number} bufferSize Buffer size 7 | * @param {Number} sampleRate Sample rate 8 | * @param {Function} windowFunc Window function 9 | * @param {Number} alpha Alpha channel 10 | */ 11 | 12 | export default function FFT(bufferSize, sampleRate, windowFunc, alpha) { 13 | this.bufferSize = bufferSize; 14 | this.sampleRate = sampleRate; 15 | this.bandwidth = (2 / bufferSize) * (sampleRate / 2); 16 | 17 | this.sinTable = new Float32Array(bufferSize); 18 | this.cosTable = new Float32Array(bufferSize); 19 | this.windowValues = new Float32Array(bufferSize); 20 | this.reverseTable = new Uint32Array(bufferSize); 21 | 22 | this.peakBand = 0; 23 | this.peak = 0; 24 | 25 | var i; 26 | switch (windowFunc) { 27 | case 'bartlett': 28 | for (i = 0; i < bufferSize; i++) { 29 | this.windowValues[i] = 30 | (2 / (bufferSize - 1)) * 31 | ((bufferSize - 1) / 2 - Math.abs(i - (bufferSize - 1) / 2)); 32 | } 33 | break; 34 | case 'bartlettHann': 35 | for (i = 0; i < bufferSize; i++) { 36 | this.windowValues[i] = 37 | 0.62 - 38 | 0.48 * Math.abs(i / (bufferSize - 1) - 0.5) - 39 | 0.38 * Math.cos((Math.PI * 2 * i) / (bufferSize - 1)); 40 | } 41 | break; 42 | case 'blackman': 43 | alpha = alpha || 0.16; 44 | for (i = 0; i < bufferSize; i++) { 45 | this.windowValues[i] = 46 | (1 - alpha) / 2 - 47 | 0.5 * Math.cos((Math.PI * 2 * i) / (bufferSize - 1)) + 48 | (alpha / 2) * 49 | Math.cos((4 * Math.PI * i) / (bufferSize - 1)); 50 | } 51 | break; 52 | case 'cosine': 53 | for (i = 0; i < bufferSize; i++) { 54 | this.windowValues[i] = Math.cos( 55 | (Math.PI * i) / (bufferSize - 1) - Math.PI / 2 56 | ); 57 | } 58 | break; 59 | case 'gauss': 60 | alpha = alpha || 0.25; 61 | for (i = 0; i < bufferSize; i++) { 62 | this.windowValues[i] = Math.pow( 63 | Math.E, 64 | -0.5 * 65 | Math.pow( 66 | (i - (bufferSize - 1) / 2) / 67 | ((alpha * (bufferSize - 1)) / 2), 68 | 2 69 | ) 70 | ); 71 | } 72 | break; 73 | case 'hamming': 74 | for (i = 0; i < bufferSize; i++) { 75 | this.windowValues[i] = 76 | 0.54 - 77 | 0.46 * Math.cos((Math.PI * 2 * i) / (bufferSize - 1)); 78 | } 79 | break; 80 | case 'hann': 81 | case undefined: 82 | for (i = 0; i < bufferSize; i++) { 83 | this.windowValues[i] = 84 | 0.5 * (1 - Math.cos((Math.PI * 2 * i) / (bufferSize - 1))); 85 | } 86 | break; 87 | case 'lanczoz': 88 | for (i = 0; i < bufferSize; i++) { 89 | this.windowValues[i] = 90 | Math.sin(Math.PI * ((2 * i) / (bufferSize - 1) - 1)) / 91 | (Math.PI * ((2 * i) / (bufferSize - 1) - 1)); 92 | } 93 | break; 94 | case 'rectangular': 95 | for (i = 0; i < bufferSize; i++) { 96 | this.windowValues[i] = 1; 97 | } 98 | break; 99 | case 'triangular': 100 | for (i = 0; i < bufferSize; i++) { 101 | this.windowValues[i] = 102 | (2 / bufferSize) * 103 | (bufferSize / 2 - Math.abs(i - (bufferSize - 1) / 2)); 104 | } 105 | break; 106 | default: 107 | throw Error("No such window function '" + windowFunc + "'"); 108 | } 109 | 110 | var limit = 1; 111 | var bit = bufferSize >> 1; 112 | var i; 113 | 114 | while (limit < bufferSize) { 115 | for (i = 0; i < limit; i++) { 116 | this.reverseTable[i + limit] = this.reverseTable[i] + bit; 117 | } 118 | 119 | limit = limit << 1; 120 | bit = bit >> 1; 121 | } 122 | 123 | for (i = 0; i < bufferSize; i++) { 124 | this.sinTable[i] = Math.sin(-Math.PI / i); 125 | this.cosTable[i] = Math.cos(-Math.PI / i); 126 | } 127 | 128 | this.calculateSpectrum = function (buffer) { 129 | // Locally scope variables for speed up 130 | var bufferSize = this.bufferSize, 131 | cosTable = this.cosTable, 132 | sinTable = this.sinTable, 133 | reverseTable = this.reverseTable, 134 | real = new Float32Array(bufferSize), 135 | imag = new Float32Array(bufferSize), 136 | bSi = 2 / this.bufferSize, 137 | sqrt = Math.sqrt, 138 | rval, 139 | ival, 140 | mag, 141 | spectrum = new Float32Array(bufferSize / 2); 142 | 143 | var k = Math.floor(Math.log(bufferSize) / Math.LN2); 144 | 145 | if (Math.pow(2, k) !== bufferSize) { 146 | throw new Error('Invalid buffer size, must be a power of 2.'); 147 | } 148 | if (bufferSize !== buffer.length) { 149 | throw new Error('Supplied buffer is not the same size as defined FFT. FFT Size: ' + 150 | bufferSize + 151 | ' Buffer Size: ' + 152 | buffer.length); 153 | } 154 | 155 | var halfSize = 1, 156 | phaseShiftStepReal, 157 | phaseShiftStepImag, 158 | currentPhaseShiftReal, 159 | currentPhaseShiftImag, 160 | off, 161 | tr, 162 | ti, 163 | tmpReal; 164 | 165 | for (var i = 0; i < bufferSize; i++) { 166 | real[i] = 167 | buffer[reverseTable[i]] * this.windowValues[reverseTable[i]]; 168 | imag[i] = 0; 169 | } 170 | 171 | while (halfSize < bufferSize) { 172 | phaseShiftStepReal = cosTable[halfSize]; 173 | phaseShiftStepImag = sinTable[halfSize]; 174 | 175 | currentPhaseShiftReal = 1; 176 | currentPhaseShiftImag = 0; 177 | 178 | for (var fftStep = 0; fftStep < halfSize; fftStep++) { 179 | var i = fftStep; 180 | 181 | while (i < bufferSize) { 182 | off = i + halfSize; 183 | tr = 184 | currentPhaseShiftReal * real[off] - 185 | currentPhaseShiftImag * imag[off]; 186 | ti = 187 | currentPhaseShiftReal * imag[off] + 188 | currentPhaseShiftImag * real[off]; 189 | 190 | real[off] = real[i] - tr; 191 | imag[off] = imag[i] - ti; 192 | real[i] += tr; 193 | imag[i] += ti; 194 | 195 | i += halfSize << 1; 196 | } 197 | 198 | tmpReal = currentPhaseShiftReal; 199 | currentPhaseShiftReal = 200 | tmpReal * phaseShiftStepReal - 201 | currentPhaseShiftImag * phaseShiftStepImag; 202 | currentPhaseShiftImag = 203 | tmpReal * phaseShiftStepImag + 204 | currentPhaseShiftImag * phaseShiftStepReal; 205 | } 206 | 207 | halfSize = halfSize << 1; 208 | } 209 | 210 | for (var i = 0, N = bufferSize / 2; i < N; i++) { 211 | rval = real[i]; 212 | ival = imag[i]; 213 | mag = bSi * sqrt(rval * rval + ival * ival); 214 | 215 | if (mag > this.peak) { 216 | this.peakBand = i; 217 | this.peak = mag; 218 | } 219 | spectrum[i] = mag; 220 | } 221 | return spectrum; 222 | }; 223 | } 224 | -------------------------------------------------------------------------------- /src/AudioSync.js: -------------------------------------------------------------------------------- 1 | import FFT from './fft.js'; 2 | 3 | /* 4 | Based on Wavesurfer's spectrogram plugin 5 | https://github.com/wavesurfer-js/wavesurfer.js/tree/master/src/plugin/spectrogram 6 | */ 7 | 8 | export default class AudioSyncPlugin { 9 | /** 10 | * AudioSync plugin definition factory 11 | * 12 | * This function must be used to create a plugin definition which can be 13 | * used by wavesurfer to correctly instantiate the plugin. 14 | * 15 | * @param {AudioSyncPluginParams} params Parameters used to initialise the plugin 16 | * @return {PluginDefinition} An object representing the plugin. 17 | */ 18 | static create(params) { 19 | return { 20 | name: 'audiosync', 21 | deferInit: params && params.deferInit ? params.deferInit : false, 22 | params: params, 23 | staticProps: { 24 | FFT: FFT 25 | }, 26 | instance: AudioSyncPlugin 27 | }; 28 | } 29 | 30 | constructor(params, ws) { 31 | this.params = params; 32 | this.wavesurfer = ws; 33 | this.util = ws.util; 34 | 35 | this.frequenciesDataUrl = params.frequenciesDataUrl; 36 | this._onScroll = e => { 37 | this.updateScroll(e); 38 | }; 39 | this._onRender = () => { 40 | this.render(); 41 | }; 42 | this._onWrapperClick = e => { 43 | this._wrapperClickHandler(e); 44 | }; 45 | this._onReady = () => { 46 | const drawer = (this.drawer = ws.drawer); 47 | 48 | this.container = 49 | 'string' == typeof params.container 50 | ? document.querySelector(params.container) 51 | : params.container; 52 | 53 | if (!this.container) { 54 | throw Error('No container for WaveSurfer spectrogram'); 55 | } 56 | 57 | this.width = drawer.width; 58 | this.pixelRatio = this.params.pixelRatio || ws.params.pixelRatio; 59 | this.fftSamples = this.params.fftSamples || ws.params.fftSamples || 512; 60 | this.height = this.params.height || 450; 61 | this.noverlap = params.noverlap; 62 | this.windowFunc = params.windowFunc; 63 | this.alpha = params.alpha; 64 | this.splitChannels = params.splitChannels; 65 | this.channels = this.splitChannels ? ws.backend.buffer.numberOfChannels : 1; 66 | 67 | // Define variables and parameters 68 | this.settings = this.params.settings; 69 | console.log(this.settings) 70 | this.featureHistory = []; 71 | this.cooldownCounter = 0; 72 | this.shortAverageVector = null; 73 | this.longAverageVector = null; 74 | this.featuresData = []; 75 | 76 | this.createWrapper(); 77 | this.createCanvas(); 78 | this.render(); 79 | 80 | drawer.wrapper.addEventListener('scroll', this._onScroll); 81 | ws.on('redraw', this._onRender); 82 | }; 83 | } 84 | 85 | updateSettings(settings) { 86 | this.settings = settings; 87 | console.log(this.settings) 88 | if (this.featuresData) { 89 | this.calculateSpectralFeatures(); 90 | } 91 | this.drawSpectralFeatures(); 92 | } 93 | 94 | init() { 95 | // Check if wavesurfer is ready 96 | if (this.wavesurfer.isReady) { 97 | this._onReady(); 98 | } else { 99 | this.wavesurfer.once('ready', this._onReady); 100 | } 101 | } 102 | 103 | destroy() { 104 | this.unAll(); 105 | this.wavesurfer.un('ready', this._onReady); 106 | this.wavesurfer.un('redraw', this._onRender); 107 | this.drawer && this.drawer.wrapper.removeEventListener('scroll', this._onScroll); 108 | this.wavesurfer = null; 109 | this.util = null; 110 | this.params = null; 111 | if (this.wrapper) { 112 | this.wrapper.removeEventListener('click', this._onWrapperClick); 113 | this.wrapper.parentNode.removeChild(this.wrapper); 114 | this.wrapper = null; 115 | } 116 | } 117 | 118 | createWrapper() { 119 | const oldWrapper = this.container.querySelector('AudioSync'); 120 | if (oldWrapper) { 121 | this.container.removeChild(oldWrapper); 122 | } 123 | const wsParams = this.wavesurfer.params; 124 | this.wrapper = document.createElement('AudioSync'); 125 | 126 | this.drawer.style(this.wrapper, { 127 | display: 'block', 128 | position: 'relative', 129 | userSelect: 'none', 130 | webkitUserSelect: 'none', 131 | height: `${this.height}px` 132 | }); 133 | 134 | if (wsParams.fillParent || wsParams.scrollParent) { 135 | this.drawer.style(this.wrapper, { 136 | width: '100%', 137 | overflowX: 'hidden', 138 | overflowY: 'hidden' 139 | }); 140 | } 141 | this.container.appendChild(this.wrapper); 142 | 143 | this.wrapper.addEventListener('click', this._onWrapperClick); 144 | } 145 | 146 | _wrapperClickHandler(event) { 147 | event.preventDefault(); 148 | const relX = 'offsetX' in event ? event.offsetX : event.layerX; 149 | this.fireEvent('click', relX / this.width || 0); 150 | } 151 | 152 | createCanvas() { 153 | const specCanvas = (this.specCanvas = this.wrapper.appendChild( 154 | document.createElement('canvas') 155 | )); 156 | const overlayCanvas = (this.overlayCanvas = this.wrapper.appendChild( 157 | document.createElement('canvas') 158 | )); 159 | 160 | this.spectrCc = specCanvas.getContext('2d'); 161 | this.overlayCc = overlayCanvas.getContext('2d'); 162 | 163 | this.util.style(specCanvas, { 164 | zIndex: 4, 165 | position: 'absolute', 166 | top: '0px', 167 | left: '0px', 168 | }); 169 | this.util.style(overlayCanvas, { 170 | zIndex: 4, 171 | position: 'absolute', 172 | top: '0px', 173 | left: '0px', 174 | }); 175 | } 176 | 177 | render() { 178 | this.updateCanvasStyle(); 179 | 180 | if (this.frequenciesDataUrl) { 181 | this.loadFrequenciesData(this.frequenciesDataUrl); 182 | } else { 183 | this.calculateSpectralFeatures(); 184 | } 185 | } 186 | 187 | calculateSpectralFeatures() { 188 | const fftSamples = this.fftSamples; 189 | const buffer = this.wavesurfer.backend.buffer; 190 | const channels = this.channels; 191 | 192 | if (!buffer) { 193 | this.fireEvent('error', 'Web Audio buffer is not available'); 194 | return; 195 | } 196 | 197 | const sampleRate = buffer.sampleRate; 198 | this.featuresData = []; 199 | 200 | let noverlap = this.noverlap; 201 | if (!noverlap) { 202 | const uniqueSamplesPerPx = buffer.length / this.specCanvas.width; 203 | noverlap = Math.max(0, Math.round(fftSamples - uniqueSamplesPerPx)); 204 | } 205 | 206 | const fft = new FFT(fftSamples, sampleRate, this.windowFunc, this.alpha); 207 | 208 | for (let c = 0; c < channels; c++) { 209 | 210 | const channelData = buffer.getChannelData(c); 211 | const channelFeatures = []; 212 | 213 | let currentOffset = 0; 214 | 215 | while (currentOffset + fftSamples < channelData.length) { 216 | const segment = channelData.slice( 217 | currentOffset, 218 | currentOffset + fftSamples 219 | ); 220 | const spectrum = fft.calculateSpectrum(segment); 221 | 222 | const features = { 223 | spectrum: spectrum, 224 | lfc: null, 225 | zcr: null, 226 | energy: null, 227 | }; 228 | 229 | features.lfc = this.calculateLFC(spectrum); 230 | features.zcr = this.calculateZeroCrossingRate(segment); 231 | features.energy = this.calculateEnergy(segment); 232 | 233 | channelFeatures.push(features); 234 | 235 | currentOffset += fftSamples - noverlap; 236 | } 237 | 238 | this.featuresData.push(channelFeatures); 239 | } 240 | 241 | this.drawSpectralFeatures(); 242 | } 243 | 244 | calculateZeroCrossingRate(frame) { 245 | let zcr = 0; 246 | for (let i = 0; i < frame.length - 1; i++) { 247 | if (frame[i] * frame[i + 1] < 0) { 248 | zcr++; 249 | } 250 | } 251 | return zcr; 252 | } 253 | 254 | calculateEnergy(frame) { 255 | let energy = 0; 256 | for (let i = 0; i < frame.length; i++) { 257 | energy += frame[i] * frame[i]; 258 | } 259 | return energy; 260 | } 261 | 262 | calculateLFC(spectrum) { 263 | const startIndex = 0; 264 | const endIndex = Math.ceil(spectrum.length * 0.05); 265 | 266 | let sum = 0; 267 | let total = 0; 268 | 269 | for (let i = startIndex; i <= endIndex; i++) { 270 | sum += spectrum[i]; 271 | total += 1; 272 | } 273 | 274 | return total > 0 ? sum / total : 0; 275 | } 276 | 277 | // Function to update the vector feature rolling averages 278 | updateFeatureHistory(currentVector) { 279 | this.featureHistory.unshift(currentVector); 280 | 281 | if (this.featureHistory.length > this.settings.windowSize) { 282 | const removedVector = this.featureHistory.pop(); 283 | const shortVector = this.featureHistory[this.settings.shortWindowSize]; 284 | 285 | // Update the rolling average vectors 286 | for (let i = 0; i < this.longAverageVector.length; i++) { 287 | this.shortAverageVector[i] = this.shortAverageVector[i] - shortVector[i] + currentVector[i]; 288 | this.longAverageVector[i] = this.longAverageVector[i] - removedVector[i] + shortVector[i]; 289 | } 290 | } else { 291 | this.longAverageVector = Array.from(currentVector); 292 | this.shortAverageVector = Array.from(currentVector); 293 | } 294 | } 295 | 296 | // Function to calculate the squared distance between two vectors 297 | // This is like euclidian distance, but by skipping the sqrt and comparing squared values, we can save some processing power 298 | calculateSquaredDistance(vector1, vector2) { 299 | let sum = 0; 300 | for (let i = 0; i < vector1.length; i++) { 301 | sum += Math.pow(vector1[i] - vector2[i], 2); 302 | } 303 | return Math.pow(sum, 2); 304 | } 305 | 306 | drawSpectralFeatures() { 307 | const featuresData = this.featuresData; 308 | const spectrCc = this.spectrCc; 309 | const height = this.height; 310 | const width = this.width; 311 | const ratio = this.settings.shortWindowSize / (this.settings.windowSize - this.settings.shortWindowSize); 312 | const shortScale = 255 * 0.57735 / this.settings.shortWindowSize // 1 / sqrt(3), the diagonal of a unit cube 313 | const longScale = 255 * 0.57735 / (this.settings.windowSize - this.settings.shortWindowSize) // 1 / sqrt(3), the diagonal of a unit cube 314 | 315 | console.log(ratio, shortScale, longScale) 316 | let squaredDistances = []; 317 | this.featureHistory = []; // reset the feature history 318 | 319 | console.log(shortScale, longScale) 320 | 321 | const maxLFC = Math.max( 322 | ...featuresData.flatMap(channel => channel.flatMap(frame => frame.lfc)) 323 | ); 324 | const maxZCR = Math.max( 325 | ...featuresData.flatMap(channel => channel.flatMap(frame => frame.zcr)) 326 | ); 327 | const maxEnergy = Math.max( 328 | ...featuresData.flatMap(channel => channel.flatMap(frame => frame.energy)) 329 | ); 330 | 331 | if (!spectrCc) { 332 | throw new Error('No canvas context to draw spectrogram or overlay'); 333 | } 334 | 335 | spectrCc.clearRect(0, 0, width, height); 336 | 337 | for (let c = 0; c < featuresData.length; c++) { 338 | const channelData = featuresData[c]; 339 | 340 | for (let i = 0; i < channelData.length; i++) { 341 | const frame = channelData[i]; 342 | const x = i * (width / channelData.length); 343 | 344 | // Scale the features to between 0 and 1 345 | const lfc = frame.lfc / maxLFC; 346 | const zcr = frame.zcr / maxZCR; 347 | const energy = frame.energy / maxEnergy; 348 | 349 | // Combine the features into a three-dimensional vector 350 | const currentVector = [lfc, energy, zcr]; 351 | 352 | // Update the rolling window of feature vectors 353 | this.updateFeatureHistory(currentVector); 354 | 355 | // Change detection algorithm 356 | // Update the cooldown counter 357 | if (this.cooldownCounter > 0) { 358 | this.cooldownCounter *= 0.998; 359 | } 360 | 361 | // Calculate the squared distance between the short and long rolling average vectors 362 | // multiple this.longAverageVector by ratio to account for the difference in window sizes 363 | const squaredDistance = this.calculateSquaredDistance(this.shortAverageVector, this.longAverageVector.map(x => x * ratio)); 364 | squaredDistances.push(squaredDistance) 365 | 366 | // Calculate the threshold for a significant change based on the cooldown duration 367 | // const threshold = this.cooldownCounter; // Adjust the threshold as needed 368 | // const triggered = squaredDistance > threshold 369 | 370 | // Draw the three spectral features in the top quarter of the canvas 371 | const lfcColor = "255, 0, 0" 372 | const zcrColor = "0, 255, 0" 373 | const energyColor = "0, 0, 255" 374 | 375 | const stackHeight = height / 4 / 3 376 | const lfcBarHeight = lfc * stackHeight; 377 | const lfcY = stackHeight - lfcBarHeight 378 | const zcrBarHeight = zcr * stackHeight; 379 | const zcrY = stackHeight * 2 - zcrBarHeight 380 | const energyBarHeight = energy * stackHeight; 381 | const energyY = stackHeight * 3 - energyBarHeight 382 | 383 | spectrCc.fillStyle = `rgba(${lfcColor}, ${lfcBarHeight / stackHeight})`; 384 | spectrCc.fillRect(x, lfcY, 1, lfcBarHeight); 385 | spectrCc.fillStyle = `rgba(${zcrColor}, ${zcrBarHeight / stackHeight})`; 386 | spectrCc.fillRect(x, zcrY, 1, zcrBarHeight); 387 | spectrCc.fillStyle = `rgba(${energyColor}, ${energyBarHeight / stackHeight})`; 388 | spectrCc.fillRect(x, energyY, 1, energyBarHeight); 389 | 390 | // Draw the spectral feature vector as a colour 391 | spectrCc.fillStyle = `rgba(${this.shortAverageVector[0] * shortScale}, ${this.shortAverageVector[1] * shortScale}, ${this.shortAverageVector[2] * shortScale}, 1)`; 392 | spectrCc.fillRect(x, height * 1/4, 1, height * 1/4); 393 | spectrCc.fillStyle = `rgba(${this.longAverageVector[0] * longScale}, ${this.longAverageVector[1] * longScale}, ${this.longAverageVector[2] * longScale}, 1)`; 394 | spectrCc.fillRect(x, height * 2/4, 1, height * 1/4); 395 | 396 | // if (triggered) { 397 | // // reset the cooldown counter 398 | // this.cooldownCounter = 65535 399 | // // Draw a white line if a change was detected 400 | // spectrCc.fillStyle = 'white'; 401 | // spectrCc.fillRect(x, 0, 1, height); 402 | // } 403 | 404 | // // Draw the cooldown counter as a transparent white line 405 | // const cooldownBarHeight = this.cooldownCounter / 65535 * height * 0.5; 406 | // const cooldownBarY = height - cooldownBarHeight; 407 | // spectrCc.fillStyle = `rgba(255, 255, 255, ${this.cooldownCounter / 65535})`; 408 | // spectrCc.fillRect(x, cooldownBarY, 1, cooldownBarHeight); 409 | 410 | } 411 | } 412 | 413 | // set the last 5% to zero to avoid the graph being dominated by the cooldown period 414 | squaredDistances = squaredDistances.map((x, i) => i > squaredDistances.length * 0.95 ? 0 : x) 415 | 416 | // Draw the squared distance as solid red bars in the lower quarter of the canvas 417 | console.log(squaredDistances) 418 | const maxSquaredDistance = Math.max(...squaredDistances) 419 | for (let i = 0; i < squaredDistances.length; i++) { 420 | const x = i * (width / squaredDistances.length); 421 | const y = height * 3 / 4; 422 | const barHeight = squaredDistances[i] / maxSquaredDistance * height / 4; 423 | const barY = y + height / 4 - barHeight; 424 | spectrCc.fillStyle = `rgba(255, 0, 0, 1)`; 425 | spectrCc.fillRect(x, barY, 1, barHeight); 426 | } 427 | 428 | // Draw the legend 429 | const legendX = 10; 430 | spectrCc.fillStyle = 'black'; 431 | spectrCc.font = '20px Arial'; 432 | spectrCc.fillText('LFC', legendX, 30); 433 | spectrCc.fillText('ZCR', legendX, height/12 + 30); 434 | spectrCc.fillText('Energy', legendX, height/12 * 2 + 30); 435 | spectrCc.fillStyle = 'white'; 436 | spectrCc.fillText('Short Rolling Average', legendX, height/4 + 30); 437 | spectrCc.fillText('(Feature Vector visualised as RGB)', legendX, height/4 + 60); 438 | spectrCc.fillText('Long Rolling Average', legendX, height/2 + 60); 439 | spectrCc.fillStyle = 'black'; 440 | spectrCc.fillText('Change Detection', legendX, height/4 * 3 + 30); 441 | spectrCc.fillText('(Euclidean Distance between Short and Long Rolling Averages)', legendX, height/4 * 3 + 60); 442 | 443 | // Update playback position overlay 444 | this.updatePlaybackLine(); 445 | } 446 | 447 | updatePlaybackLine() { 448 | if (!this.wavesurfer) { 449 | return; 450 | } 451 | 452 | const overlayCc = this.overlayCc 453 | const height = this.height; 454 | const playbackPos = this.wavesurfer.backend.getCurrentTime() * this.width / this.wavesurfer.getDuration(); 455 | 456 | // Clear previous overlay 457 | overlayCc.clearRect(0, 0, this.width, height); 458 | overlayCc.beginPath(); 459 | overlayCc.moveTo(playbackPos, 0); 460 | overlayCc.lineTo(playbackPos, height); 461 | overlayCc.strokeStyle = 'rgba(0,0,0,1)'; 462 | overlayCc.lineWidth = 2; 463 | overlayCc.stroke(); 464 | // Request animation frame for continuous updating 465 | requestAnimationFrame(() => this.updatePlaybackLine()); 466 | } 467 | 468 | updateCanvasStyle() { 469 | const width = Math.round(this.width / this.pixelRatio) + 'px'; 470 | this.specCanvas.width = this.width; 471 | this.specCanvas.height = this.height; 472 | this.specCanvas.style.width = width; 473 | this.specCanvas.style.height = this.height + 'px'; 474 | this.overlayCanvas.width = this.width; 475 | this.overlayCanvas.height = this.height; 476 | this.overlayCanvas.style.width = width; 477 | this.overlayCanvas.style.height = this.height + 'px'; 478 | } 479 | 480 | loadFrequenciesData(url) { 481 | const request = this.util.fetchFile({ url: url }); 482 | 483 | request.on('success', data => 484 | this.calculateSpectralFeatures(JSON.parse(data), this) 485 | ); 486 | request.on('error', e => this.fireEvent('error', e)); 487 | 488 | return request; 489 | } 490 | 491 | updateScroll(e) { 492 | if (this.wrapper) { 493 | this.wrapper.scrollLeft = e.target.scrollLeft; 494 | } 495 | } 496 | } --------------------------------------------------------------------------------