├── .eslintignore ├── .npmrc ├── src ├── vite-env.d.ts ├── assets │ ├── stop.svg │ ├── pause.svg │ ├── play.svg │ ├── microphone.svg │ ├── MicrophoneIcon.tsx │ └── AudioWaveIcon.tsx ├── index.tsx ├── helpers │ ├── getFileExtensionFromMimeType.ts │ ├── formatToInlineStyleValue.ts │ ├── paintLineFromCenterToRight.ts │ ├── paintLine.ts │ ├── formatRecordingTime.ts │ ├── initialCanvasSetup.ts │ ├── index.ts │ ├── formatDurationTime.ts │ ├── formatRecordedAudioTime.ts │ ├── drawByBlob.ts │ ├── getBarsData.ts │ └── drawByLiveStream.ts ├── hooks │ ├── useLatest.tsx │ ├── useDebounce.tsx │ ├── useWebWorker.tsx │ └── useVoiceVisualizer.tsx ├── types │ └── types.ts ├── index.css └── components │ └── VoiceVisualizer.tsx ├── public └── voiceVisualizer.png ├── tsconfig.node.json ├── index.html ├── .gitignore ├── tsconfig.json ├── .github └── ISSUE_TEMPLATE │ └── bug_report.md ├── .eslintrc.cjs ├── LICENSE ├── vite.config.ts ├── package.json └── README.md /.eslintignore: -------------------------------------------------------------------------------- 1 | vite.config.ts -------------------------------------------------------------------------------- /.npmrc: -------------------------------------------------------------------------------- 1 | legacy-peer-deps=true -------------------------------------------------------------------------------- /src/vite-env.d.ts: -------------------------------------------------------------------------------- 1 | /// 2 | -------------------------------------------------------------------------------- /public/voiceVisualizer.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/YZarytskyi/react-voice-visualizer/HEAD/public/voiceVisualizer.png -------------------------------------------------------------------------------- /src/assets/stop.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | -------------------------------------------------------------------------------- /src/index.tsx: -------------------------------------------------------------------------------- 1 | export { default as VoiceVisualizer } from "./components/VoiceVisualizer.tsx"; 2 | export { default as useVoiceVisualizer } from "./hooks/useVoiceVisualizer.tsx"; 3 | -------------------------------------------------------------------------------- /src/assets/pause.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | -------------------------------------------------------------------------------- /tsconfig.node.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "composite": true, 4 | "skipLibCheck": true, 5 | "module": "ESNext", 6 | "moduleResolution": "bundler", 7 | "allowSyntheticDefaultImports": true 8 | }, 9 | "include": ["vite.config.ts"] 10 | } 11 | -------------------------------------------------------------------------------- /src/assets/play.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | -------------------------------------------------------------------------------- /src/helpers/getFileExtensionFromMimeType.ts: -------------------------------------------------------------------------------- 1 | export const getFileExtensionFromMimeType = ( 2 | mimeType: string | undefined, 3 | ): string => { 4 | if (!mimeType) return ""; 5 | 6 | const matches = mimeType.match(/audio\/([^;]+)/); 7 | if (matches && matches.length >= 2) { 8 | return `.${matches[1]}`; 9 | } 10 | return ""; 11 | }; 12 | -------------------------------------------------------------------------------- /index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | react-voice-visualizer 7 | 8 | 9 |
10 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | pnpm-debug.log* 8 | lerna-debug.log* 9 | 10 | node_modules 11 | dist 12 | dist-ssr 13 | *.local 14 | 15 | # Editor directories and files 16 | .vscode/* 17 | !.vscode/extensions.json 18 | .idea 19 | .DS_Store 20 | *.suo 21 | *.ntvs* 22 | *.njsproj 23 | *.sln 24 | *.sw? 25 | -------------------------------------------------------------------------------- /src/helpers/formatToInlineStyleValue.ts: -------------------------------------------------------------------------------- 1 | export function formatToInlineStyleValue( 2 | value: string | number, 3 | ): string | number { 4 | if (typeof value === "string") { 5 | const numericValue = Number(value); 6 | if (!Number.isNaN(numericValue)) { 7 | return `${Math.trunc(numericValue / 2) * 2}px`; 8 | } 9 | } 10 | 11 | return value; 12 | } 13 | -------------------------------------------------------------------------------- /src/hooks/useLatest.tsx: -------------------------------------------------------------------------------- 1 | import { useLayoutEffect, useRef } from "react"; 2 | 3 | type UseLatestReturnType = { readonly current: T }; 4 | 5 | export function useLatest(value: T): UseLatestReturnType { 6 | const valueRef = useRef(value); 7 | 8 | useLayoutEffect(() => { 9 | valueRef.current = value; 10 | }, [value]); 11 | 12 | return valueRef; 13 | } 14 | -------------------------------------------------------------------------------- /src/helpers/paintLineFromCenterToRight.ts: -------------------------------------------------------------------------------- 1 | import { paintLine } from "./paintLine.ts"; 2 | import { PaintLineFromCenterToRightParams } from "../types/types.ts"; 3 | 4 | export function paintLineFromCenterToRight({ 5 | context, 6 | color, 7 | rounded, 8 | width, 9 | height, 10 | barWidth, 11 | }: PaintLineFromCenterToRightParams) { 12 | paintLine({ 13 | context, 14 | color, 15 | rounded, 16 | x: width / 2 + barWidth / 2, 17 | y: height / 2 - 1, 18 | h: 2, 19 | w: width - (width / 2 + barWidth / 2), 20 | }); 21 | } 22 | -------------------------------------------------------------------------------- /src/helpers/paintLine.ts: -------------------------------------------------------------------------------- 1 | import { PaintLineParams } from "../types/types.ts"; 2 | 3 | export const paintLine = ({ 4 | context, 5 | color, 6 | rounded, 7 | x, 8 | y, 9 | w, 10 | h, 11 | }: PaintLineParams) => { 12 | context.fillStyle = color; 13 | context.beginPath(); 14 | 15 | if (context.roundRect) { 16 | // ensuring roundRect is supported by the browser 17 | context.roundRect(x, y, w, h, rounded); 18 | context.fill(); 19 | } else { 20 | // Fallback for browsers that do not support roundRect 21 | context.fillRect(x, y, w, h); 22 | } 23 | }; 24 | -------------------------------------------------------------------------------- /src/helpers/formatRecordingTime.ts: -------------------------------------------------------------------------------- 1 | export const formatRecordingTime = (milliseconds: number): string => { 2 | const totalSeconds = Math.floor(milliseconds / 1000); 3 | const hours = Math.floor(totalSeconds / 3600); 4 | const minutes = Math.floor((totalSeconds % 3600) / 60); 5 | const seconds = totalSeconds % 60; 6 | 7 | return hours > 0 8 | ? `${String(hours).padStart(2, "0")}:${String(minutes).padStart( 9 | 2, 10 | "0", 11 | )}:${String(seconds).padStart(2, "0")}` 12 | : `${String(minutes).padStart(2, "0")}:${String(seconds).padStart(2, "0")}`; 13 | }; 14 | -------------------------------------------------------------------------------- /src/assets/microphone.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | -------------------------------------------------------------------------------- /src/helpers/initialCanvasSetup.ts: -------------------------------------------------------------------------------- 1 | import { GetDataForCanvasParams } from "../types/types.ts"; 2 | 3 | export const initialCanvasSetup = ({ 4 | canvas, 5 | backgroundColor, 6 | }: GetDataForCanvasParams) => { 7 | const height = canvas.height; 8 | const width = canvas.width; 9 | const halfWidth = Math.round(width / 2); 10 | const context = canvas.getContext("2d"); 11 | if (!context) return null; 12 | 13 | context.clearRect(0, 0, width, height); 14 | 15 | if (backgroundColor !== "transparent") { 16 | context.fillStyle = backgroundColor; 17 | context.fillRect(0, 0, width, height); 18 | } 19 | 20 | return { context, height, width, halfWidth }; 21 | }; 22 | -------------------------------------------------------------------------------- /src/helpers/index.ts: -------------------------------------------------------------------------------- 1 | export { drawByBlob } from "./drawByBlob"; 2 | export { drawByLiveStream } from "./drawByLiveStream"; 3 | export { formatRecordedAudioTime } from "./formatRecordedAudioTime"; 4 | export { formatRecordingTime } from "./formatRecordingTime"; 5 | export { formatToInlineStyleValue } from "./formatToInlineStyleValue"; 6 | export { getBarsData } from "./getBarsData"; 7 | export { getFileExtensionFromMimeType } from "./getFileExtensionFromMimeType"; 8 | export { initialCanvasSetup } from "./initialCanvasSetup"; 9 | export { paintLine } from "./paintLine"; 10 | export { formatDurationTime } from "./formatDurationTime"; 11 | export { paintLineFromCenterToRight } from "./paintLineFromCenterToRight"; 12 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "ES2020", 4 | "useDefineForClassFields": true, 5 | "lib": ["ES2020", "DOM", "DOM.Iterable"], 6 | "module": "ESNext", 7 | "skipLibCheck": true, 8 | 9 | /* Bundler mode */ 10 | "moduleResolution": "bundler", 11 | "allowImportingTsExtensions": true, 12 | "resolveJsonModule": true, 13 | "isolatedModules": true, 14 | "noEmit": true, 15 | "jsx": "react-jsx", 16 | 17 | /* Linting */ 18 | "strict": true, 19 | "noUnusedLocals": true, 20 | "noUnusedParameters": true, 21 | "noFallthroughCasesInSwitch": true 22 | }, 23 | "include": ["src"], 24 | "references": [{ "path": "./tsconfig.node.json" }] 25 | } 26 | -------------------------------------------------------------------------------- /src/hooks/useDebounce.tsx: -------------------------------------------------------------------------------- 1 | import { useCallback, useRef } from "react"; 2 | import { AnyFunction } from "../types/types.ts"; 3 | 4 | export const useDebounce = (func: AnyFunction, wait = 250) => { 5 | const timeout = useRef>(); 6 | 7 | return useCallback( 8 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 9 | (...args: any[]) => { 10 | const later = () => { 11 | clearTimeout(timeout.current); 12 | // eslint-disable-next-line @typescript-eslint/no-unsafe-argument 13 | func(...args); 14 | }; 15 | 16 | clearTimeout(timeout.current); 17 | timeout.current = setTimeout(later, wait); 18 | }, 19 | [func, wait], 20 | ); 21 | }; 22 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | title: '' 5 | labels: '' 6 | assignees: YZarytskyi 7 | 8 | --- 9 | 10 | **Describe the bug** 11 | A clear and concise description of what the bug is. 12 | 13 | **To Reproduce** 14 | Steps to reproduce the behavior: 15 | 1. Go to '...' 16 | 2. Click on '....' 17 | 3. Scroll down to '....' 18 | 4. See error 19 | 20 | **Expected behavior** 21 | A clear and concise description of what you expected to happen. 22 | 23 | **Screenshots** 24 | If applicable, add screenshots to help explain your problem. 25 | 26 | **Package info (please complete the following information):** 27 | - Version 28 | 29 | **Additional context** 30 | Add any other context about the problem here. 31 | -------------------------------------------------------------------------------- /.eslintrc.cjs: -------------------------------------------------------------------------------- 1 | /* eslint-env node */ 2 | 3 | module.exports = { 4 | root: true, 5 | env: { browser: true, es2020: true }, 6 | extends: [ 7 | 'eslint:recommended', 8 | 'plugin:@typescript-eslint/recommended', 9 | 'plugin:@typescript-eslint/recommended-requiring-type-checking', 10 | 'plugin:react-hooks/recommended', 11 | ], 12 | parser: '@typescript-eslint/parser', 13 | parserOptions: { 14 | ecmaVersion: 'latest', 15 | sourceType: 'module', 16 | project: 'tsconfig.json', 17 | tsconfigRootDir: __dirname, 18 | }, 19 | plugins: ['react-refresh'], 20 | rules: { 21 | 'react-refresh/only-export-components': [ 22 | 'warn', 23 | { allowConstantExport: true }, 24 | ], 25 | '@typescript-eslint/no-non-null-assertion': 'off', 26 | }, 27 | } 28 | -------------------------------------------------------------------------------- /src/helpers/formatDurationTime.ts: -------------------------------------------------------------------------------- 1 | export const formatDurationTime = (seconds: number): string => { 2 | const hours = Math.floor(seconds / 3600); 3 | const minutes = Math.floor((seconds % 3600) / 60); 4 | const remainingSeconds = seconds % 60; 5 | const milliseconds = Math.floor( 6 | (remainingSeconds - Math.floor(remainingSeconds)) * 1000, 7 | ); 8 | 9 | if (hours > 0) { 10 | return `${String(hours).padStart(2, "0")}:${String(minutes).padStart( 11 | 2, 12 | "0", 13 | )}:${String(Math.floor(remainingSeconds)).padStart(2, "0")}}h`; 14 | } else if (minutes > 0) { 15 | return `${String(minutes).padStart(2, "0")}:${String( 16 | Math.floor(remainingSeconds), 17 | ).padStart(2, "0")}m`; 18 | } else { 19 | return `${String(Math.floor(remainingSeconds)).padStart(2, "0")}:${String( 20 | milliseconds, 21 | ).charAt(0)}${String(milliseconds).charAt(1)}s`; 22 | } 23 | }; 24 | -------------------------------------------------------------------------------- /src/helpers/formatRecordedAudioTime.ts: -------------------------------------------------------------------------------- 1 | export const formatRecordedAudioTime = (seconds: number): string => { 2 | const hours = Math.floor(seconds / 3600); 3 | const minutes = Math.floor((seconds % 3600) / 60); 4 | const remainingSeconds = seconds % 60; 5 | const milliseconds = Math.floor( 6 | (remainingSeconds - Math.floor(remainingSeconds)) * 1000, 7 | ); 8 | 9 | if (hours > 0) { 10 | return `${String(hours).padStart(2, "0")}:${String(minutes).padStart( 11 | 2, 12 | "0", 13 | )}:${String(Math.floor(remainingSeconds)).padStart(2, "0")}:${String( 14 | milliseconds, 15 | ).charAt(0)}`; 16 | } else if (minutes > 0) { 17 | return `${String(minutes).padStart(2, "0")}:${String( 18 | Math.floor(remainingSeconds), 19 | ).padStart(2, "0")}:${String(milliseconds).charAt(0)}`; 20 | } else { 21 | return `${String(Math.floor(remainingSeconds)).padStart(2, "0")}:${String( 22 | milliseconds, 23 | ).charAt(0)}`; 24 | } 25 | }; 26 | -------------------------------------------------------------------------------- /src/helpers/drawByBlob.ts: -------------------------------------------------------------------------------- 1 | import { initialCanvasSetup } from "./initialCanvasSetup.ts"; 2 | import { paintLine } from "./paintLine.ts"; 3 | 4 | import { DrawByBlob } from "../types/types.ts"; 5 | 6 | export const drawByBlob = ({ 7 | barsData, 8 | canvas, 9 | barWidth, 10 | gap, 11 | backgroundColor, 12 | mainBarColor, 13 | secondaryBarColor, 14 | currentAudioTime = 0, 15 | rounded, 16 | duration, 17 | }: DrawByBlob): void => { 18 | const canvasData = initialCanvasSetup({ canvas, backgroundColor }); 19 | if (!canvasData) return; 20 | 21 | const { context, height } = canvasData; 22 | 23 | const playedPercent = currentAudioTime / duration; 24 | 25 | barsData.forEach((barData, i) => { 26 | const mappingPercent = i / barsData.length; 27 | const played = playedPercent > mappingPercent; 28 | 29 | paintLine({ 30 | context, 31 | color: played ? secondaryBarColor : mainBarColor, 32 | rounded, 33 | x: i * (barWidth + gap * barWidth), 34 | y: height / 2 - barData.max, 35 | h: barData.max * 2, 36 | w: barWidth, 37 | }); 38 | }); 39 | }; 40 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2023 Yurii Zarytskyi 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /src/assets/MicrophoneIcon.tsx: -------------------------------------------------------------------------------- 1 | import { FC } from "react"; 2 | 3 | interface MicrophoneIconProps { 4 | color?: string; 5 | stroke?: number; 6 | className?: string; 7 | } 8 | 9 | const MicrophoneIcon: FC = ({ 10 | color = "#000000", 11 | stroke = 2, 12 | className, 13 | }) => { 14 | return ( 15 | 21 | 28 | 29 | ); 30 | }; 31 | 32 | export default MicrophoneIcon; 33 | -------------------------------------------------------------------------------- /src/hooks/useWebWorker.tsx: -------------------------------------------------------------------------------- 1 | import { useState } from "react"; 2 | 3 | import { AnyFunction, UseWebWorkerParams } from "../types/types"; 4 | 5 | const workerHandler = (fn: AnyFunction) => { 6 | onmessage = (event) => { 7 | postMessage(fn(event.data)); 8 | }; 9 | }; 10 | 11 | export function useWebWorker({ 12 | fn, 13 | initialValue, 14 | onMessageReceived, 15 | }: UseWebWorkerParams) { 16 | const [result, setResult] = useState(initialValue); 17 | 18 | const run = (value: V) => { 19 | const worker = new Worker( 20 | // eslint-disable-next-line @typescript-eslint/restrict-template-expressions 21 | URL.createObjectURL(new Blob([`(${workerHandler})(${fn})`])), 22 | ); 23 | worker.onmessage = (event) => { 24 | if (event.data) { 25 | // eslint-disable-next-line @typescript-eslint/no-unsafe-argument 26 | setResult(event.data); 27 | if (onMessageReceived) onMessageReceived(); 28 | worker.terminate(); 29 | } 30 | }; 31 | worker.onerror = (error) => { 32 | console.error(error.message); 33 | worker.terminate(); 34 | }; 35 | worker.postMessage(value); 36 | }; 37 | 38 | return { 39 | result, 40 | setResult, 41 | run, 42 | }; 43 | } 44 | -------------------------------------------------------------------------------- /src/helpers/getBarsData.ts: -------------------------------------------------------------------------------- 1 | import { BarsData, GetBarsDataParams } from "../types/types.ts"; 2 | 3 | export const getBarsData = ({ 4 | bufferData, 5 | height, 6 | width, 7 | barWidth, 8 | gap, 9 | }: GetBarsDataParams): BarsData[] => { 10 | const units = width / (barWidth + gap * barWidth); 11 | const step = Math.floor(bufferData.length / units); 12 | const halfHeight = height / 2; 13 | 14 | let barsData: BarsData[] = []; 15 | let maxDataPoint = 0; 16 | 17 | for (let i = 0; i < units; i++) { 18 | const maximums: number[] = []; 19 | let maxCount = 0; 20 | 21 | for (let j = 0; j < step && i * step + j < bufferData.length; j++) { 22 | const result = bufferData[i * step + j]; 23 | if (result > 0) { 24 | maximums.push(result); 25 | maxCount++; 26 | } 27 | } 28 | const maxAvg = maximums.reduce((a, c) => a + c, 0) / maxCount; 29 | 30 | if (maxAvg > maxDataPoint) { 31 | maxDataPoint = maxAvg; 32 | } 33 | 34 | barsData.push({ max: maxAvg }); 35 | } 36 | 37 | if (halfHeight * 0.95 > maxDataPoint * halfHeight) { 38 | const adjustmentFactor = (halfHeight * 0.95) / maxDataPoint; 39 | barsData = barsData.map((bar) => ({ 40 | max: bar.max > 0.01 ? bar.max * adjustmentFactor : 1, 41 | })); 42 | } 43 | 44 | return barsData; 45 | }; 46 | -------------------------------------------------------------------------------- /vite.config.ts: -------------------------------------------------------------------------------- 1 | import { resolve } from "path"; 2 | import { defineConfig } from "vite"; 3 | import dts from "vite-plugin-dts"; 4 | import cssInjectedByJsPlugin from "vite-plugin-css-injected-by-js"; 5 | import react from "@vitejs/plugin-react"; 6 | 7 | const injectCodeFunction = (cssCode) => { 8 | try { 9 | if (typeof window === "undefined") return; 10 | 11 | var elementStyle = document.createElement("style"); 12 | elementStyle.appendChild(document.createTextNode(cssCode)); 13 | 14 | const nonce = 15 | document.querySelector('meta[property="csp-nonce"]')?.getAttribute('nonce'); 16 | 17 | if (nonce) { 18 | elementStyle.setAttribute('nonce', nonce); 19 | } 20 | 21 | document.head.appendChild(elementStyle); 22 | } catch (e) { 23 | console.error("vite-plugin-css-injected-by-js", e); 24 | } 25 | }; 26 | 27 | export default defineConfig({ 28 | plugins: [ 29 | react(), 30 | cssInjectedByJsPlugin({ injectCodeFunction }), 31 | dts({ 32 | insertTypesEntry: true, 33 | }), 34 | ], 35 | build: { 36 | lib: { 37 | entry: resolve(__dirname, "src/index.tsx"), 38 | name: "react-voice-visualizer", 39 | fileName: "react-voice-visualizer", 40 | }, 41 | rollupOptions: { 42 | external: ["react", "react-dom", "react/jsx-runtime"], 43 | output: { 44 | globals: { 45 | react: "React", 46 | "react-dom": "ReactDOM", 47 | "react/jsx-runtime": "react/jsx-runtime", 48 | }, 49 | }, 50 | }, 51 | }, 52 | }); 53 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "react-voice-visualizer", 3 | "private": false, 4 | "version": "2.0.8", 5 | "type": "module", 6 | "author": "Yurii Zarytskyi", 7 | "repository": { 8 | "type": "git", 9 | "url": "git+https://github.com/YZarytskyi/react-voice-visualizer.git" 10 | }, 11 | "description": "React library for audio recording and visualization using Web Audio API", 12 | "license": "MIT", 13 | "keywords": [ 14 | "audio", 15 | "recording", 16 | "visualization", 17 | "voice visualizer", 18 | "audio visualizer", 19 | "audio wave", 20 | "audio recorder", 21 | "audio recording", 22 | "audio player", 23 | "voice", 24 | "recorder", 25 | "recording", 26 | "sound", 27 | "microphone", 28 | "media", 29 | "voice recorder", 30 | "voice recording", 31 | "mediaDevices", 32 | "getUserMedia", 33 | "react", 34 | "reactjs", 35 | "library" 36 | ], 37 | "files": [ 38 | "dist" 39 | ], 40 | "main": "./dist/react-voice-visualizer.umd.cjs", 41 | "module": "./dist/react-voice-visualizer.js", 42 | "types": "./dist/index.d.ts", 43 | "exports": { 44 | ".": { 45 | "types": "./dist/index.d.ts", 46 | "import": "./dist/react-voice-visualizer.js", 47 | "require": "./dist/react-voice-visualizer.umd.cjs" 48 | } 49 | }, 50 | "scripts": { 51 | "dev": "vite", 52 | "build": "tsc && vite build", 53 | "lint": "eslint src --ext ts,tsx --report-unused-disable-directives --max-warnings 0", 54 | "preview": "vite preview" 55 | }, 56 | "peerDependencies": { 57 | "react": "^18.2.0", 58 | "react-dom": "^18.2.0" 59 | }, 60 | "devDependencies": { 61 | "@types/node": "^20.14.2", 62 | "@types/react": "^18.2.14", 63 | "@types/react-dom": "^18.2.6", 64 | "@typescript-eslint/eslint-plugin": "^6.6.0", 65 | "@typescript-eslint/parser": "^6.6.0", 66 | "@vitejs/plugin-react": "^4.0.1", 67 | "eslint": "^8.44.0", 68 | "eslint-plugin-react-hooks": "^4.6.0", 69 | "eslint-plugin-react-refresh": "^0.4.1", 70 | "prettier": "^3.0.0", 71 | "typescript": "^5.0.2", 72 | "vite": "^4.4.0", 73 | "vite-plugin-css-injected-by-js": "^3.3.0", 74 | "vite-plugin-dts": "^3.5.3" 75 | }, 76 | "bugs": { 77 | "url": "https://github.com/YZarytskyi/react-voice-visualizer/issues" 78 | }, 79 | "homepage": "https://github.com/YZarytskyi/react-voice-visualizer#readme" 80 | } 81 | -------------------------------------------------------------------------------- /src/assets/AudioWaveIcon.tsx: -------------------------------------------------------------------------------- 1 | import { FC } from "react"; 2 | 3 | interface AudioWaveIconProps { 4 | color?: string; 5 | reflect?: boolean; 6 | } 7 | 8 | const AudioWaveIcon: FC = ({ 9 | color = "#FFFFFF", 10 | reflect, 11 | }) => { 12 | return ( 13 | 20 | 24 | 25 | ); 26 | }; 27 | 28 | export default AudioWaveIcon; 29 | -------------------------------------------------------------------------------- /src/helpers/drawByLiveStream.ts: -------------------------------------------------------------------------------- 1 | import { initialCanvasSetup } from "./initialCanvasSetup.ts"; 2 | import { paintLine } from "./paintLine.ts"; 3 | import { paintLineFromCenterToRight } from "./paintLineFromCenterToRight.ts"; 4 | 5 | import { DrawByLiveStreamParams, BarItem } from "../types/types.ts"; 6 | 7 | export const drawByLiveStream = ({ 8 | audioData, 9 | unit, 10 | index, 11 | index2, 12 | canvas, 13 | isRecordingInProgress, 14 | isPausedRecording, 15 | picks, 16 | backgroundColor, 17 | barWidth, 18 | mainBarColor, 19 | secondaryBarColor, 20 | rounded, 21 | animateCurrentPick, 22 | fullscreen, 23 | }: DrawByLiveStreamParams) => { 24 | const canvasData = initialCanvasSetup({ canvas, backgroundColor }); 25 | if (!canvasData) return; 26 | 27 | const { context, height, width, halfWidth } = canvasData; 28 | if (audioData?.length && isRecordingInProgress) { 29 | const maxPick = Math.max(...audioData); 30 | 31 | if (!isPausedRecording) { 32 | if (index2.current >= barWidth) { 33 | index2.current = 0; 34 | 35 | const startY = ((height - (maxPick / 258) * height) / height) * 100; 36 | const barHeight = 37 | ((-height + (maxPick / 258) * height * 2) / height) * 100; 38 | 39 | const newPick: BarItem | null = 40 | index.current === barWidth 41 | ? { 42 | startY, 43 | barHeight, 44 | } 45 | : null; 46 | 47 | if (index.current >= unit) { 48 | index.current = barWidth; 49 | } else { 50 | index.current += barWidth; 51 | } 52 | 53 | // quantity of picks enough for visualisation 54 | if (picks.length > (fullscreen ? width : halfWidth) / barWidth) { 55 | picks.pop(); 56 | } 57 | picks.unshift(newPick); 58 | } 59 | 60 | index2.current += 1; 61 | } 62 | 63 | !fullscreen && paintInitialLine(); 64 | 65 | // animate current pick 66 | if (animateCurrentPick) { 67 | paintLine({ 68 | context, 69 | rounded, 70 | color: mainBarColor, 71 | x: fullscreen ? width : halfWidth, 72 | y: height - (maxPick / 258) * height, 73 | h: -height + (maxPick / 258) * height * 2, 74 | w: barWidth, 75 | }); 76 | } 77 | 78 | // picks visualisation 79 | let x = (fullscreen ? width : halfWidth) - index2.current; 80 | picks.forEach((pick) => { 81 | if (pick) { 82 | paintLine({ 83 | context, 84 | color: mainBarColor, 85 | rounded, 86 | x, 87 | y: 88 | (pick.startY * height) / 100 > height / 2 - 1 89 | ? height / 2 - 1 90 | : (pick.startY * height) / 100, 91 | h: 92 | (pick.barHeight * height) / 100 > 2 93 | ? (pick.barHeight * height) / 100 94 | : 2, 95 | w: barWidth, 96 | }); 97 | } 98 | x -= barWidth; 99 | }); 100 | } else { 101 | picks.length = 0; 102 | } 103 | 104 | function paintInitialLine() { 105 | paintLineFromCenterToRight({ 106 | context, 107 | color: secondaryBarColor, 108 | rounded, 109 | width, 110 | height, 111 | barWidth, 112 | }); 113 | } 114 | }; 115 | -------------------------------------------------------------------------------- /src/types/types.ts: -------------------------------------------------------------------------------- 1 | import { Dispatch, MutableRefObject, SetStateAction } from "react"; 2 | 3 | export interface BarItem { 4 | startY: number; 5 | barHeight: number; 6 | } 7 | 8 | export interface Controls { 9 | audioRef: MutableRefObject; 10 | isRecordingInProgress: boolean; 11 | isPausedRecording: boolean; 12 | audioData: Uint8Array; 13 | recordingTime: number; 14 | mediaRecorder: MediaRecorder | null; 15 | duration: number; 16 | currentAudioTime: number; 17 | audioSrc: string; 18 | isPausedRecordedAudio: boolean; 19 | isProcessingRecordedAudio: boolean; 20 | isCleared: boolean; 21 | isAvailableRecordedAudio: boolean; 22 | recordedBlob: Blob | null; 23 | bufferFromRecordedBlob: AudioBuffer | null; 24 | formattedDuration: string; 25 | formattedRecordingTime: string; 26 | formattedRecordedAudioCurrentTime: string; 27 | startRecording: () => void; 28 | togglePauseResume: () => void; 29 | startAudioPlayback: () => void; 30 | stopAudioPlayback: () => void; 31 | stopRecording: () => void; 32 | saveAudioFile: () => void; 33 | clearCanvas: () => void; 34 | setCurrentAudioTime: Dispatch>; 35 | error: Error | null; 36 | isProcessingOnResize: boolean; 37 | isProcessingStartRecording: boolean; 38 | isPreloadedBlob: boolean; 39 | setPreloadedAudioBlob: (blob: Blob) => void; 40 | _setIsProcessingAudioOnComplete: Dispatch>; 41 | _setIsProcessingOnResize: Dispatch>; 42 | } 43 | 44 | export interface BarsData { 45 | max: number; 46 | } 47 | 48 | export interface DrawByLiveStreamParams { 49 | audioData: Uint8Array; 50 | unit: number; 51 | index: MutableRefObject; 52 | index2: MutableRefObject; 53 | canvas: HTMLCanvasElement; 54 | isRecordingInProgress: boolean; 55 | isPausedRecording: boolean; 56 | picks: Array; 57 | backgroundColor: string; 58 | barWidth: number; 59 | mainBarColor: string; 60 | secondaryBarColor: string; 61 | rounded: number; 62 | animateCurrentPick: boolean; 63 | fullscreen: boolean; 64 | } 65 | 66 | export interface DrawByBlob { 67 | barsData: BarsData[]; 68 | canvas: HTMLCanvasElement; 69 | barWidth: number; 70 | gap: number; 71 | backgroundColor: string; 72 | mainBarColor: string; 73 | secondaryBarColor: string; 74 | currentAudioTime?: number; 75 | rounded: number; 76 | duration: number; 77 | } 78 | 79 | export interface PaintLineFromCenterToRightParams { 80 | context: CanvasRenderingContext2D; 81 | color: string; 82 | rounded: number; 83 | width: number; 84 | height: number; 85 | barWidth: number; 86 | } 87 | 88 | export interface GetDataForCanvasParams { 89 | canvas: HTMLCanvasElement; 90 | backgroundColor: string; 91 | } 92 | 93 | export interface PaintLineParams { 94 | context: CanvasRenderingContext2D; 95 | color: string; 96 | rounded: number | number[]; 97 | x: number; 98 | y: number; 99 | w: number; 100 | h: number; 101 | } 102 | 103 | export type GetBarsDataParams = { 104 | bufferData: Float32Array; 105 | height: number; 106 | width: number; 107 | barWidth: number; 108 | gap: number; 109 | }; 110 | 111 | export interface useVoiceVisualizerParams { 112 | onStartRecording?: () => void; 113 | onStopRecording?: () => void; 114 | onPausedRecording?: () => void; 115 | onResumedRecording?: () => void; 116 | onClearCanvas?: () => void; 117 | onEndAudioPlayback?: () => void; 118 | onStartAudioPlayback?: () => void; 119 | onPausedAudioPlayback?: () => void; 120 | onResumedAudioPlayback?: () => void; 121 | onErrorPlayingAudio?: (error: Error) => void; 122 | shouldHandleBeforeUnload?: boolean; 123 | } 124 | 125 | export interface UseWebWorkerParams { 126 | fn: AnyFunction; 127 | initialValue: T; 128 | onMessageReceived?: () => void; 129 | } 130 | 131 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 132 | export type AnyFunction = (...args: any[]) => any; 133 | -------------------------------------------------------------------------------- /src/index.css: -------------------------------------------------------------------------------- 1 | .voice-visualizer__buttons-container { 2 | display: flex; 3 | justify-content: center; 4 | align-items: center; 5 | column-gap: 20px; 6 | row-gap: 15px; 7 | flex-wrap: wrap; 8 | margin-bottom: 40px; 9 | } 10 | 11 | .voice-visualizer__btn-center { 12 | box-sizing: border-box; 13 | flex-shrink: 0; 14 | width: 60px; 15 | height: 60px; 16 | padding: 0; 17 | display: flex; 18 | justify-content: center; 19 | align-items: center; 20 | border-radius: 50%; 21 | background-color: white; 22 | border: 4px solid #c5c5c5; 23 | outline: none; 24 | cursor: pointer; 25 | transition: border-color 300ms, background-color 300ms; 26 | } 27 | 28 | .voice-visualizer__btn-center:disabled { 29 | opacity: 0.85; 30 | cursor: default; 31 | } 32 | 33 | .voice-visualizer__btn-center:hover { 34 | background-color: #eaeaea; 35 | border: 4px solid #9f9f9f; 36 | } 37 | 38 | .voice-visualizer__btn-center > img { 39 | width: auto; 40 | height: 50%; 41 | max-height: 30px; 42 | } 43 | 44 | .voice-visualizer__btn-center.voice-visualizer__btn-center-pause { 45 | background-color: #ff3030; 46 | } 47 | 48 | .voice-visualizer__btn-center.voice-visualizer__btn-center-pause:hover { 49 | background-color: #ff4f4f; 50 | } 51 | 52 | .voice-visualizer__btn-center.voice-visualizer__btn-center-pause > img { 53 | height: 50%; 54 | max-height: 16px; 55 | } 56 | 57 | .voice-visualizer__btn-center.voice-visualizer__btn-center--border-transparent { 58 | border-color: transparent; 59 | } 60 | 61 | .voice-visualizer__btn-center.voice-visualizer__btn-center--border-transparent:hover { 62 | background-color: white 63 | } 64 | 65 | .voice-visualizer__btn-left { 66 | box-sizing: border-box; 67 | flex-shrink: 0; 68 | width: 60px; 69 | height: 60px; 70 | padding: 0; 71 | display: flex; 72 | justify-content: center; 73 | align-items: center; 74 | border-radius: 50%; 75 | background-color: #ff3030; 76 | border: 4px solid #c5c5c5; 77 | outline: none; 78 | cursor: pointer; 79 | transition: border-color 300ms, background-color 300ms, opacity 300ms; 80 | } 81 | 82 | .voice-visualizer__btn-container { 83 | min-width: 100px; 84 | display: flex; 85 | justify-content: flex-end; 86 | } 87 | 88 | .voice-visualizer__btn-left:hover { 89 | background-color: #ff4f4f; 90 | } 91 | 92 | .voice-visualizer__btn-left:disabled { 93 | opacity: 0.6; 94 | background-color: #ff3030; 95 | cursor: default; 96 | } 97 | 98 | .voice-visualizer__btn-left.voice-visualizer__btn-left-microphone { 99 | background-color: white; 100 | } 101 | 102 | .voice-visualizer__btn-left.voice-visualizer__btn-left-microphone > img { 103 | width: auto; 104 | height: 50%; 105 | max-height: 30px; 106 | } 107 | 108 | .voice-visualizer__btn-left > img { 109 | width: auto; 110 | height: 50%; 111 | max-height: 16px; 112 | } 113 | 114 | .voice-visualizer__btn-left:hover { 115 | border: 4px solid #9f9f9f; 116 | } 117 | 118 | .voice-visualizer__btn { 119 | box-sizing: border-box; 120 | min-width: 100px; 121 | min-height: 60px; 122 | padding: 5px 20px; 123 | border-radius: 40px; 124 | font-size: 15px; 125 | background-color: #f0f0f0; 126 | transition: background-color 300ms, opacity 300ms; 127 | } 128 | 129 | .voice-visualizer__btn:disabled { 130 | opacity: 0.8; 131 | background-color: #f0f0f0; 132 | cursor: default; 133 | } 134 | 135 | .voice-visualizer__btn:hover { 136 | background-color: #bebebe; 137 | } 138 | 139 | .voice-visualizer__canvas-container { 140 | position: relative; 141 | width: fit-content; 142 | margin: 0 auto; 143 | overflow: hidden; 144 | } 145 | 146 | .voice-visualizer__canvas-container canvas { 147 | display: block; 148 | } 149 | 150 | .voice-visualizer__canvas-microphone-btn { 151 | position: absolute; 152 | top: 50%; 153 | left: 50%; 154 | width: auto; 155 | max-width: 12%; 156 | min-width: 24px; 157 | height: 50%; 158 | max-height: 100px; 159 | background-color: transparent; 160 | border: none; 161 | outline: none; 162 | transform: translate(-50%, -50%); 163 | } 164 | 165 | .voice-visualizer__canvas-microphone-icon { 166 | width: 100%; 167 | height: 100%; 168 | will-change: transform; 169 | transition: transform 300ms; 170 | } 171 | 172 | .voice-visualizer__canvas-microphone-btn:hover .voice-visualizer__canvas-microphone-icon { 173 | transform: scale(1.03); 174 | } 175 | 176 | .voice-visualizer__canvas-audio-wave-icon { 177 | position: absolute; 178 | top: 50%; 179 | left: 50%; 180 | width: auto; 181 | max-width: 40%; 182 | height: 40%; 183 | max-height: 100px; 184 | transform: translate(-118%, -50%) scale(-1); 185 | } 186 | 187 | .voice-visualizer__canvas-audio-wave-icon2 { 188 | transform: translate(18%, -50%); 189 | } 190 | 191 | .voice-visualizer__canvas-audio-processing { 192 | position: absolute; 193 | top: 50%; 194 | left: 50%; 195 | margin: 0; 196 | transform: translate(-50%, -50%); 197 | } 198 | 199 | .voice-visualizer__progress-indicator-hovered { 200 | position: absolute; 201 | top: 0; 202 | pointer-events: none; 203 | height: 100%; 204 | width: 1px; 205 | background-color: rgba(133, 133, 133, 0.6); 206 | } 207 | 208 | .voice-visualizer__progress-indicator-hovered-time { 209 | position: absolute; 210 | top: 3%; 211 | left: 1px; 212 | width: fit-content; 213 | margin: 0; 214 | padding: 0 7px; 215 | opacity: 0.8; 216 | font-size: 12px; 217 | border-radius: 0 4px 4px 0; 218 | background-color: #575757; 219 | text-align: left; 220 | } 221 | 222 | .voice-visualizer__progress-indicator-hovered-time.voice-visualizer__progress-indicator-hovered-time-left { 223 | left: unset; 224 | right: 1px; 225 | border-radius: 4px 0 0 4px; 226 | } 227 | 228 | .voice-visualizer__progress-indicator { 229 | position: absolute; 230 | top: 0; 231 | pointer-events: none; 232 | height: 100%; 233 | width: 1px; 234 | background-color: #efefef; 235 | } 236 | 237 | .voice-visualizer__progress-indicator-time { 238 | position: absolute; 239 | top: 3%; 240 | left: 1px; 241 | width: fit-content; 242 | box-sizing: border-box; 243 | min-width: 37px; 244 | margin: 0; 245 | padding: 0 7px; 246 | font-size: 12px; 247 | border-radius: 0 4px 4px 0; 248 | text-align: left; 249 | color: black; 250 | font-weight: 500; 251 | background-color: #efefef; 252 | } 253 | 254 | .voice-visualizer__progress-indicator-time.voice-visualizer__progress-indicator-time-left { 255 | left: unset; 256 | right: 1px; 257 | border-radius: 4px 0 0 4px; 258 | } 259 | 260 | .voice-visualizer__audio-info-container { 261 | box-sizing: border-box; 262 | height: 55px; 263 | display: flex; 264 | align-items: center; 265 | justify-content: center; 266 | gap: 30px; 267 | } 268 | 269 | .voice-visualizer__audio-info-time { 270 | margin: 15px 0; 271 | min-width: 38px; 272 | text-align: left; 273 | } 274 | 275 | .voice-visualizer__visually-hidden { 276 | position: absolute; 277 | width: 1px; 278 | height: 1px; 279 | margin: -1px; 280 | padding: 0; 281 | border: 4px solid #c5c5c5; 282 | 283 | white-space: nowrap; 284 | clip-path: inset(100%); 285 | clip: rect(0 0 0 0); 286 | overflow: hidden; 287 | } 288 | 289 | .voice-visualizer__relative { 290 | position: relative; 291 | } 292 | 293 | .voice-visualizer__spinner-wrapper { 294 | position: absolute; 295 | top: 50%; 296 | left: 50%; 297 | transform: translate(-50%, -50%); 298 | width: 52px; 299 | height: 52px; 300 | display: flex; 301 | justify-content: center; 302 | align-items: center; 303 | } 304 | 305 | .voice-visualizer__spinner { 306 | flex-shrink: 0; 307 | width: 100%; 308 | height: 100%; 309 | border: 4px solid rgba(197, 197, 197, 0.5); 310 | border-radius: 50%; 311 | border-top-color: #b7b7b7; 312 | animation: voice-visualizer__spin 1s ease-in-out infinite; 313 | -webkit-animation: voice-visualizer__spin 1s ease-in-out infinite; 314 | } 315 | 316 | @keyframes voice-visualizer__spin { 317 | to { transform: rotate(360deg); } 318 | } 319 | @-webkit-keyframes voice-visualizer__spin { 320 | to { transform: rotate(360deg); } 321 | } -------------------------------------------------------------------------------- /src/hooks/useVoiceVisualizer.tsx: -------------------------------------------------------------------------------- 1 | import { useEffect, useRef, useState } from "react"; 2 | 3 | import { 4 | formatDurationTime, 5 | formatRecordedAudioTime, 6 | formatRecordingTime, 7 | getFileExtensionFromMimeType, 8 | } from "../helpers"; 9 | import { Controls, useVoiceVisualizerParams } from "../types/types.ts"; 10 | 11 | function useVoiceVisualizer({ 12 | onStartRecording, 13 | onStopRecording, 14 | onPausedRecording, 15 | onResumedRecording, 16 | onClearCanvas, 17 | onEndAudioPlayback, 18 | onStartAudioPlayback, 19 | onPausedAudioPlayback, 20 | onResumedAudioPlayback, 21 | onErrorPlayingAudio, 22 | shouldHandleBeforeUnload = true, 23 | }: useVoiceVisualizerParams = {}): Controls { 24 | const [isRecordingInProgress, setIsRecordingInProgress] = useState(false); 25 | const [isPausedRecording, setIsPausedRecording] = useState(false); 26 | const [audioStream, setAudioStream] = useState(null); 27 | const [audioData, setAudioData] = useState(new Uint8Array(0)); 28 | const [isProcessingAudioOnComplete, _setIsProcessingAudioOnComplete] = 29 | useState(false); 30 | const [recordedBlob, setRecordedBlob] = useState(null); 31 | const [bufferFromRecordedBlob, setBufferFromRecordedBlob] = 32 | useState(null); 33 | const [recordingTime, setRecordingTime] = useState(0); 34 | const [prevTime, setPrevTime] = useState(0); 35 | const [duration, setDuration] = useState(0); 36 | const [audioSrc, setAudioSrc] = useState(""); 37 | const [isPausedRecordedAudio, setIsPausedRecordedAudio] = useState(true); 38 | const [currentAudioTime, setCurrentAudioTime] = useState(0); 39 | const [isCleared, setIsCleared] = useState(true); 40 | const [isProcessingOnResize, _setIsProcessingOnResize] = useState(false); 41 | const [isPreloadedBlob, setIsPreloadedBlob] = useState(false); 42 | const [error, setError] = useState(null); 43 | const [isProcessingStartRecording, setIsProcessingStartRecording] = 44 | useState(false); 45 | 46 | const mediaRecorderRef = useRef(null); 47 | const audioContextRef = useRef(null); 48 | const analyserRef = useRef(null); 49 | const dataArrayRef = useRef(null); 50 | const sourceRef = useRef(null); 51 | const rafRecordingRef = useRef(null); 52 | const rafCurrentTimeUpdateRef = useRef(null); 53 | const audioRef = useRef(null); 54 | 55 | const isAvailableRecordedAudio = Boolean( 56 | bufferFromRecordedBlob && !isProcessingAudioOnComplete, 57 | ); 58 | const formattedDuration = formatDurationTime(duration); 59 | const formattedRecordingTime = formatRecordingTime(recordingTime); 60 | const formattedRecordedAudioCurrentTime = 61 | formatRecordedAudioTime(currentAudioTime); 62 | const isProcessingRecordedAudio = 63 | isProcessingOnResize || isProcessingAudioOnComplete; 64 | 65 | useEffect(() => { 66 | if (!isRecordingInProgress || isPausedRecording) return; 67 | 68 | const updateTimer = () => { 69 | const timeNow = performance.now(); 70 | setRecordingTime((prev) => prev + (timeNow - prevTime)); 71 | setPrevTime(timeNow); 72 | }; 73 | 74 | const interval = setInterval(updateTimer, 1000); 75 | 76 | return () => clearInterval(interval); 77 | }, [prevTime, isPausedRecording, isRecordingInProgress]); 78 | 79 | useEffect(() => { 80 | if (error) { 81 | clearCanvas(); 82 | return; 83 | } 84 | // eslint-disable-next-line react-hooks/exhaustive-deps 85 | }, [error]); 86 | 87 | useEffect(() => { 88 | return () => { 89 | clearCanvas(); 90 | }; 91 | // eslint-disable-next-line react-hooks/exhaustive-deps 92 | }, []); 93 | 94 | useEffect(() => { 95 | if (!isCleared && shouldHandleBeforeUnload) { 96 | window.addEventListener("beforeunload", handleBeforeUnload); 97 | } 98 | 99 | return () => { 100 | window.removeEventListener("beforeunload", handleBeforeUnload); 101 | }; 102 | }, [isCleared, shouldHandleBeforeUnload]); 103 | 104 | const handleBeforeUnload = (e: BeforeUnloadEvent) => { 105 | e.preventDefault(); 106 | e.returnValue = ""; 107 | }; 108 | 109 | const processBlob = async (blob: Blob) => { 110 | if (!blob) return; 111 | 112 | try { 113 | if (blob.size === 0) { 114 | throw new Error("Error: The audio blob is empty"); 115 | } 116 | const audioSrcFromBlob = URL.createObjectURL(blob); 117 | setAudioSrc(audioSrcFromBlob); 118 | 119 | const audioBuffer = await blob.arrayBuffer(); 120 | const audioContext = new AudioContext(); 121 | const buffer = await audioContext.decodeAudioData(audioBuffer); 122 | setBufferFromRecordedBlob(buffer); 123 | setDuration(buffer.duration - 0.06); 124 | 125 | setError(null); 126 | } catch (error) { 127 | console.error("Error processing the audio blob:", error); 128 | setError( 129 | error instanceof Error 130 | ? error 131 | : new Error("Error processing the audio blob"), 132 | ); 133 | } 134 | }; 135 | 136 | const setPreloadedAudioBlob = (blob: Blob) => { 137 | if (blob instanceof Blob) { 138 | clearCanvas(); 139 | setIsPreloadedBlob(true); 140 | setIsCleared(false); 141 | _setIsProcessingAudioOnComplete(true); 142 | setIsRecordingInProgress(false); 143 | setRecordingTime(0); 144 | setIsPausedRecording(false); 145 | audioRef.current = new Audio(); 146 | setRecordedBlob(blob); 147 | void processBlob(blob); 148 | } 149 | }; 150 | 151 | const getUserMedia = () => { 152 | setIsProcessingStartRecording(true); 153 | 154 | navigator.mediaDevices 155 | .getUserMedia({ audio: true }) 156 | .then((stream) => { 157 | setIsCleared(false); 158 | setIsProcessingStartRecording(false); 159 | setIsRecordingInProgress(true); 160 | setPrevTime(performance.now()); 161 | setAudioStream(stream); 162 | audioContextRef.current = new window.AudioContext(); 163 | analyserRef.current = audioContextRef.current.createAnalyser(); 164 | dataArrayRef.current = new Uint8Array( 165 | analyserRef.current.frequencyBinCount, 166 | ); 167 | sourceRef.current = 168 | audioContextRef.current.createMediaStreamSource(stream); 169 | sourceRef.current.connect(analyserRef.current); 170 | mediaRecorderRef.current = new MediaRecorder(stream); 171 | mediaRecorderRef.current.addEventListener( 172 | "dataavailable", 173 | handleDataAvailable, 174 | ); 175 | mediaRecorderRef.current.start(); 176 | if (onStartRecording) onStartRecording(); 177 | 178 | recordingFrame(); 179 | }) 180 | .catch((error) => { 181 | setIsProcessingStartRecording(false); 182 | setError( 183 | error instanceof Error 184 | ? error 185 | : new Error("Error starting audio recording"), 186 | ); 187 | }); 188 | }; 189 | 190 | const recordingFrame = () => { 191 | analyserRef.current!.getByteTimeDomainData(dataArrayRef.current!); 192 | setAudioData(new Uint8Array(dataArrayRef.current!)); 193 | rafRecordingRef.current = requestAnimationFrame(recordingFrame); 194 | }; 195 | 196 | const handleDataAvailable = (event: BlobEvent) => { 197 | if (!mediaRecorderRef.current) return; 198 | 199 | mediaRecorderRef.current = null; 200 | audioRef.current = new Audio(); 201 | setRecordedBlob(event.data); 202 | void processBlob(event.data); 203 | }; 204 | 205 | const handleTimeUpdate = () => { 206 | if (!audioRef.current) return; 207 | 208 | setCurrentAudioTime(audioRef.current.currentTime); 209 | 210 | rafCurrentTimeUpdateRef.current = requestAnimationFrame(handleTimeUpdate); 211 | }; 212 | 213 | const startRecording = () => { 214 | if (isRecordingInProgress || isProcessingStartRecording) return; 215 | 216 | if (!isCleared) clearCanvas(); 217 | getUserMedia(); 218 | }; 219 | 220 | const stopRecording = () => { 221 | if (!isRecordingInProgress) return; 222 | 223 | setIsRecordingInProgress(false); 224 | if (mediaRecorderRef.current) { 225 | mediaRecorderRef.current.stop(); 226 | mediaRecorderRef.current.removeEventListener( 227 | "dataavailable", 228 | handleDataAvailable, 229 | ); 230 | } 231 | audioStream?.getTracks().forEach((track) => track.stop()); 232 | if (rafRecordingRef.current) cancelAnimationFrame(rafRecordingRef.current); 233 | if (sourceRef.current) sourceRef.current.disconnect(); 234 | if (audioContextRef.current && audioContextRef.current.state !== "closed") { 235 | void audioContextRef.current.close(); 236 | } 237 | _setIsProcessingAudioOnComplete(true); 238 | setRecordingTime(0); 239 | setIsPausedRecording(false); 240 | if (onStopRecording) onStopRecording(); 241 | }; 242 | 243 | const clearCanvas = () => { 244 | if (rafRecordingRef.current) { 245 | cancelAnimationFrame(rafRecordingRef.current); 246 | rafRecordingRef.current = null; 247 | } 248 | if (rafCurrentTimeUpdateRef.current) { 249 | cancelAnimationFrame(rafCurrentTimeUpdateRef.current); 250 | rafCurrentTimeUpdateRef.current = null; 251 | } 252 | if (mediaRecorderRef.current) { 253 | mediaRecorderRef.current.removeEventListener( 254 | "dataavailable", 255 | handleDataAvailable, 256 | ); 257 | mediaRecorderRef.current.stop(); 258 | mediaRecorderRef.current = null; 259 | } 260 | 261 | audioStream?.getTracks().forEach((track) => track.stop()); 262 | if (audioRef?.current) { 263 | audioRef.current.removeEventListener("ended", onEndedRecordedAudio); 264 | audioRef.current.pause(); 265 | audioRef.current.src = ""; 266 | audioRef.current = null; 267 | } 268 | audioContextRef.current = null; 269 | analyserRef.current = null; 270 | dataArrayRef.current = null; 271 | sourceRef.current = null; 272 | 273 | setAudioStream(null); 274 | setIsProcessingStartRecording(false); 275 | setIsRecordingInProgress(false); 276 | setIsPreloadedBlob(false); 277 | _setIsProcessingAudioOnComplete(false); 278 | setRecordedBlob(null); 279 | setBufferFromRecordedBlob(null); 280 | setRecordingTime(0); 281 | setPrevTime(0); 282 | setDuration(0); 283 | setAudioSrc(""); 284 | setCurrentAudioTime(0); 285 | setIsPausedRecordedAudio(true); 286 | setIsPausedRecording(false); 287 | _setIsProcessingOnResize(false); 288 | setAudioData(new Uint8Array(0)); 289 | setError(null); 290 | setIsCleared(true); 291 | if (onClearCanvas) onClearCanvas(); 292 | }; 293 | 294 | const startPlayingAudio = () => { 295 | if (audioRef.current && audioRef.current.paused) { 296 | const audioPromise = audioRef.current.play(); 297 | if (audioPromise !== undefined) { 298 | audioPromise.catch((error) => { 299 | console.error(error); 300 | if (onErrorPlayingAudio) { 301 | onErrorPlayingAudio( 302 | error instanceof Error ? error : new Error("Error playing audio"), 303 | ); 304 | } 305 | }); 306 | } 307 | } 308 | }; 309 | 310 | const startAudioPlayback = () => { 311 | if (!audioRef.current || isRecordingInProgress) return; 312 | 313 | requestAnimationFrame(handleTimeUpdate); 314 | startPlayingAudio(); 315 | audioRef.current.addEventListener("ended", onEndedRecordedAudio); 316 | setIsPausedRecordedAudio(false); 317 | if (onStartAudioPlayback && currentAudioTime === 0) { 318 | onStartAudioPlayback(); 319 | } 320 | if (onResumedAudioPlayback && currentAudioTime !== 0) { 321 | onResumedAudioPlayback(); 322 | } 323 | }; 324 | 325 | const stopAudioPlayback = () => { 326 | if (!audioRef.current || isRecordingInProgress) return; 327 | 328 | if (rafCurrentTimeUpdateRef.current) { 329 | cancelAnimationFrame(rafCurrentTimeUpdateRef.current); 330 | } 331 | audioRef.current.removeEventListener("ended", onEndedRecordedAudio); 332 | audioRef.current.pause(); 333 | setIsPausedRecordedAudio(true); 334 | const newCurrentTime = audioRef.current.currentTime; 335 | setCurrentAudioTime(newCurrentTime); 336 | audioRef.current.currentTime = newCurrentTime; 337 | if (onPausedAudioPlayback) onPausedAudioPlayback(); 338 | }; 339 | 340 | const togglePauseResume = () => { 341 | if (isRecordingInProgress) { 342 | setIsPausedRecording((prevPaused) => !prevPaused); 343 | if (mediaRecorderRef.current?.state === "recording") { 344 | mediaRecorderRef.current?.pause(); 345 | setRecordingTime((prev) => prev + (performance.now() - prevTime)); 346 | if (rafRecordingRef.current) { 347 | cancelAnimationFrame(rafRecordingRef.current); 348 | } 349 | if (onPausedRecording) onPausedRecording(); 350 | } else { 351 | rafRecordingRef.current = requestAnimationFrame(recordingFrame); 352 | mediaRecorderRef.current?.resume(); 353 | setPrevTime(performance.now()); 354 | if (onResumedRecording) onResumedRecording(); 355 | } 356 | return; 357 | } 358 | 359 | if (audioRef.current && isAvailableRecordedAudio) { 360 | audioRef.current.paused ? startAudioPlayback() : stopAudioPlayback(); 361 | } 362 | }; 363 | 364 | const onEndedRecordedAudio = () => { 365 | if (rafCurrentTimeUpdateRef.current) { 366 | cancelAnimationFrame(rafCurrentTimeUpdateRef.current); 367 | } 368 | setIsPausedRecordedAudio(true); 369 | if (!audioRef?.current) return; 370 | audioRef.current.currentTime = 0; 371 | setCurrentAudioTime(0); 372 | if (onEndAudioPlayback) onEndAudioPlayback(); 373 | }; 374 | 375 | const saveAudioFile = () => { 376 | if (!audioSrc) return; 377 | 378 | const downloadAnchor = document.createElement("a"); 379 | downloadAnchor.href = audioSrc; 380 | downloadAnchor.download = `recorded_audio${getFileExtensionFromMimeType( 381 | mediaRecorderRef.current?.mimeType, 382 | )}`; 383 | document.body.appendChild(downloadAnchor); 384 | downloadAnchor.click(); 385 | document.body.removeChild(downloadAnchor); 386 | URL.revokeObjectURL(audioSrc); 387 | }; 388 | 389 | return { 390 | audioRef, 391 | isRecordingInProgress, 392 | isPausedRecording, 393 | audioData, 394 | recordingTime, 395 | isProcessingRecordedAudio, 396 | recordedBlob, 397 | mediaRecorder: mediaRecorderRef.current, 398 | duration, 399 | currentAudioTime, 400 | audioSrc, 401 | isPausedRecordedAudio, 402 | bufferFromRecordedBlob, 403 | isCleared, 404 | isAvailableRecordedAudio, 405 | formattedDuration, 406 | formattedRecordingTime, 407 | formattedRecordedAudioCurrentTime, 408 | startRecording, 409 | togglePauseResume, 410 | startAudioPlayback, 411 | stopAudioPlayback, 412 | stopRecording, 413 | saveAudioFile, 414 | clearCanvas, 415 | setCurrentAudioTime, 416 | error, 417 | isProcessingOnResize, 418 | isProcessingStartRecording, 419 | isPreloadedBlob, 420 | setPreloadedAudioBlob, 421 | _setIsProcessingAudioOnComplete, 422 | _setIsProcessingOnResize, 423 | }; 424 | } 425 | 426 | export default useVoiceVisualizer; 427 | -------------------------------------------------------------------------------- /src/components/VoiceVisualizer.tsx: -------------------------------------------------------------------------------- 1 | import { 2 | useState, 3 | useEffect, 4 | useLayoutEffect, 5 | useRef, 6 | MouseEventHandler, 7 | } from "react"; 8 | 9 | import { 10 | drawByLiveStream, 11 | drawByBlob, 12 | getBarsData, 13 | initialCanvasSetup, 14 | formatToInlineStyleValue, 15 | formatRecordedAudioTime, 16 | } from "../helpers"; 17 | import { useWebWorker } from "../hooks/useWebWorker.tsx"; 18 | import { useDebounce } from "../hooks/useDebounce.tsx"; 19 | import { 20 | BarsData, 21 | Controls, 22 | BarItem, 23 | GetBarsDataParams, 24 | } from "../types/types.ts"; 25 | 26 | import "../index.css"; 27 | 28 | import MicrophoneIcon from "../assets/MicrophoneIcon.tsx"; 29 | import AudioWaveIcon from "../assets/AudioWaveIcon.tsx"; 30 | import microphoneIcon from "../assets/microphone.svg"; 31 | import playIcon from "../assets/play.svg"; 32 | import pauseIcon from "../assets/pause.svg"; 33 | import stopIcon from "../assets/stop.svg"; 34 | 35 | interface VoiceVisualizerProps { 36 | controls: Controls; 37 | height?: string | number; 38 | width?: string | number; 39 | speed?: number; 40 | backgroundColor?: string; 41 | mainBarColor?: string; 42 | secondaryBarColor?: string; 43 | barWidth?: number; 44 | gap?: number; 45 | rounded?: number; 46 | fullscreen?: boolean; 47 | isControlPanelShown?: boolean; 48 | isDownloadAudioButtonShown?: boolean; 49 | animateCurrentPick?: boolean; 50 | onlyRecording?: boolean; 51 | isDefaultUIShown?: boolean; 52 | defaultMicrophoneIconColor?: string; 53 | defaultAudioWaveIconColor?: string; 54 | mainContainerClassName?: string; 55 | canvasContainerClassName?: string; 56 | isProgressIndicatorShown?: boolean; 57 | progressIndicatorClassName?: string; 58 | isProgressIndicatorTimeShown?: boolean; 59 | progressIndicatorTimeClassName?: string; 60 | isProgressIndicatorOnHoverShown?: boolean; 61 | progressIndicatorOnHoverClassName?: string; 62 | isProgressIndicatorTimeOnHoverShown?: boolean; 63 | progressIndicatorTimeOnHoverClassName?: string; 64 | isAudioProcessingTextShown?: boolean; 65 | audioProcessingTextClassName?: string; 66 | controlButtonsClassName?: string; 67 | } 68 | 69 | const VoiceVisualizer = ({ 70 | controls: { 71 | audioRef, 72 | audioData, 73 | isRecordingInProgress, 74 | recordedBlob, 75 | duration, 76 | currentAudioTime, 77 | audioSrc, 78 | bufferFromRecordedBlob, 79 | togglePauseResume, 80 | startRecording, 81 | stopRecording, 82 | saveAudioFile, 83 | isAvailableRecordedAudio, 84 | isPausedRecordedAudio, 85 | isPausedRecording, 86 | isProcessingStartRecording, 87 | isProcessingRecordedAudio, 88 | isCleared, 89 | formattedDuration, 90 | formattedRecordingTime, 91 | formattedRecordedAudioCurrentTime, 92 | clearCanvas, 93 | setCurrentAudioTime, 94 | isProcessingOnResize, 95 | _setIsProcessingOnResize, 96 | _setIsProcessingAudioOnComplete, 97 | }, 98 | width = "100%", 99 | height = 200, 100 | speed = 3, 101 | backgroundColor = "transparent", 102 | mainBarColor = "#FFFFFF", 103 | secondaryBarColor = "#5e5e5e", 104 | barWidth = 2, 105 | gap = 1, 106 | rounded = 5, 107 | isControlPanelShown = true, 108 | isDownloadAudioButtonShown = false, 109 | animateCurrentPick = true, 110 | fullscreen = false, 111 | onlyRecording = false, 112 | isDefaultUIShown = true, 113 | defaultMicrophoneIconColor = mainBarColor, 114 | defaultAudioWaveIconColor = mainBarColor, 115 | mainContainerClassName, 116 | canvasContainerClassName, 117 | isProgressIndicatorShown = !onlyRecording, 118 | progressIndicatorClassName, 119 | isProgressIndicatorTimeShown = true, 120 | progressIndicatorTimeClassName, 121 | isProgressIndicatorOnHoverShown = !onlyRecording, 122 | progressIndicatorOnHoverClassName, 123 | isProgressIndicatorTimeOnHoverShown = true, 124 | progressIndicatorTimeOnHoverClassName, 125 | isAudioProcessingTextShown = true, 126 | audioProcessingTextClassName, 127 | controlButtonsClassName, 128 | }: VoiceVisualizerProps) => { 129 | const [hoveredOffsetX, setHoveredOffsetX] = useState(0); 130 | const [canvasCurrentWidth, setCanvasCurrentWidth] = useState(0); 131 | const [canvasCurrentHeight, setCanvasCurrentHeight] = useState(0); 132 | const [canvasWidth, setCanvasWidth] = useState(0); 133 | const [isRecordedCanvasHovered, setIsRecordedCanvasHovered] = useState(false); 134 | const [screenWidth, setScreenWidth] = useState(window.innerWidth); 135 | const [isResizing, setIsResizing] = useState(false); 136 | 137 | const isMobile = screenWidth < 768; 138 | const formattedSpeed = Math.trunc(speed); 139 | const formattedGap = Math.trunc(gap); 140 | const formattedBarWidth = Math.trunc( 141 | isMobile && formattedGap > 0 ? barWidth + 1 : barWidth, 142 | ); 143 | const unit = formattedBarWidth + formattedGap * formattedBarWidth; 144 | 145 | const canvasRef = useRef(null); 146 | const picksRef = useRef>([]); 147 | const indexSpeedRef = useRef(formattedSpeed); 148 | const indexRef = useRef(formattedBarWidth); 149 | const index2Ref = useRef(formattedBarWidth); 150 | const canvasContainerRef = useRef(null); 151 | 152 | const { 153 | result: barsData, 154 | setResult: setBarsData, 155 | run, 156 | } = useWebWorker({ 157 | fn: getBarsData, 158 | initialValue: [], 159 | onMessageReceived: completedAudioProcessing, 160 | }); 161 | 162 | const debouncedOnResize = useDebounce(onResize); 163 | 164 | useEffect(() => { 165 | if (!canvasContainerRef.current) return; 166 | 167 | const handleResize = () => { 168 | setScreenWidth(window.innerWidth); 169 | 170 | if (isAvailableRecordedAudio) { 171 | _setIsProcessingOnResize(true); 172 | setIsResizing(true); 173 | debouncedOnResize(); 174 | } else { 175 | onResize(); 176 | } 177 | }; 178 | 179 | const resizeObserver = new ResizeObserver(handleResize); 180 | resizeObserver.observe(canvasContainerRef.current); 181 | 182 | return () => { 183 | resizeObserver.disconnect(); 184 | }; 185 | // eslint-disable-next-line react-hooks/exhaustive-deps 186 | }, [width, isAvailableRecordedAudio]); 187 | 188 | useLayoutEffect(() => { 189 | if (!canvasRef.current) return; 190 | 191 | if (indexSpeedRef.current >= formattedSpeed || !audioData.length) { 192 | indexSpeedRef.current = audioData.length ? 0 : formattedSpeed; 193 | drawByLiveStream({ 194 | audioData, 195 | unit, 196 | index: indexRef, 197 | index2: index2Ref, 198 | canvas: canvasRef.current, 199 | picks: picksRef.current, 200 | isRecordingInProgress, 201 | isPausedRecording: isPausedRecording, 202 | backgroundColor, 203 | mainBarColor, 204 | secondaryBarColor, 205 | barWidth: formattedBarWidth, 206 | rounded, 207 | animateCurrentPick, 208 | fullscreen, 209 | }); 210 | } 211 | 212 | indexSpeedRef.current += 1; 213 | // eslint-disable-next-line react-hooks/exhaustive-deps 214 | }, [ 215 | canvasRef.current, 216 | audioData, 217 | formattedBarWidth, 218 | backgroundColor, 219 | mainBarColor, 220 | secondaryBarColor, 221 | rounded, 222 | fullscreen, 223 | isDefaultUIShown, 224 | canvasWidth, 225 | ]); 226 | 227 | useEffect(() => { 228 | if (!isAvailableRecordedAudio) return; 229 | 230 | if (isRecordedCanvasHovered) { 231 | canvasRef.current?.addEventListener("mouseleave", hideTimeIndicator); 232 | } else { 233 | canvasRef.current?.addEventListener("mouseenter", showTimeIndicator); 234 | } 235 | 236 | return () => { 237 | if (isRecordedCanvasHovered) { 238 | canvasRef.current?.removeEventListener("mouseleave", hideTimeIndicator); 239 | } else { 240 | // eslint-disable-next-line react-hooks/exhaustive-deps 241 | canvasRef.current?.removeEventListener("mouseenter", showTimeIndicator); 242 | } 243 | }; 244 | }, [isRecordedCanvasHovered, isAvailableRecordedAudio]); 245 | 246 | useEffect(() => { 247 | if ( 248 | !bufferFromRecordedBlob || 249 | !canvasRef.current || 250 | isRecordingInProgress || 251 | isResizing 252 | ) { 253 | return; 254 | } 255 | 256 | if (onlyRecording) { 257 | clearCanvas(); 258 | return; 259 | } 260 | 261 | picksRef.current = []; 262 | const bufferData = bufferFromRecordedBlob.getChannelData(0); 263 | 264 | run({ 265 | bufferData, 266 | height: canvasCurrentHeight, 267 | width: canvasWidth, 268 | barWidth: formattedBarWidth, 269 | gap: formattedGap, 270 | }); 271 | 272 | canvasRef.current?.addEventListener("mousemove", setCurrentHoveredOffsetX); 273 | 274 | return () => { 275 | // eslint-disable-next-line react-hooks/exhaustive-deps 276 | canvasRef.current?.removeEventListener( 277 | "mousemove", 278 | setCurrentHoveredOffsetX, 279 | ); 280 | }; 281 | // eslint-disable-next-line react-hooks/exhaustive-deps 282 | }, [ 283 | bufferFromRecordedBlob, 284 | canvasCurrentWidth, 285 | canvasCurrentHeight, 286 | gap, 287 | barWidth, 288 | isResizing, 289 | ]); 290 | 291 | useEffect(() => { 292 | if ( 293 | onlyRecording || 294 | !barsData?.length || 295 | !canvasRef.current || 296 | isProcessingRecordedAudio 297 | ) 298 | return; 299 | 300 | if (isCleared) { 301 | setBarsData([]); 302 | return; 303 | } 304 | 305 | drawByBlob({ 306 | barsData, 307 | canvas: canvasRef.current, 308 | barWidth: formattedBarWidth, 309 | gap: formattedGap, 310 | backgroundColor, 311 | mainBarColor, 312 | secondaryBarColor, 313 | currentAudioTime, 314 | rounded, 315 | duration, 316 | }); 317 | // eslint-disable-next-line react-hooks/exhaustive-deps 318 | }, [ 319 | barsData, 320 | currentAudioTime, 321 | isCleared, 322 | rounded, 323 | backgroundColor, 324 | mainBarColor, 325 | secondaryBarColor, 326 | ]); 327 | 328 | useEffect(() => { 329 | if (isProcessingRecordedAudio && canvasRef.current) { 330 | initialCanvasSetup({ 331 | canvas: canvasRef.current, 332 | backgroundColor, 333 | }); 334 | } 335 | // eslint-disable-next-line react-hooks/exhaustive-deps 336 | }, [isProcessingRecordedAudio]); 337 | 338 | function onResize() { 339 | if (!canvasContainerRef.current || !canvasRef.current) return; 340 | 341 | indexSpeedRef.current = formattedSpeed; 342 | 343 | const roundedHeight = 344 | Math.trunc( 345 | (canvasContainerRef.current.clientHeight * window.devicePixelRatio) / 2, 346 | ) * 2; 347 | 348 | setCanvasCurrentWidth(canvasContainerRef.current.clientWidth); 349 | setCanvasCurrentHeight(roundedHeight); 350 | setCanvasWidth( 351 | Math.round( 352 | canvasContainerRef.current.clientWidth * window.devicePixelRatio, 353 | ), 354 | ); 355 | 356 | setIsResizing(false); 357 | } 358 | 359 | function completedAudioProcessing() { 360 | _setIsProcessingOnResize(false); 361 | _setIsProcessingAudioOnComplete(false); 362 | if (audioRef?.current && !isProcessingOnResize) { 363 | audioRef.current.src = audioSrc; 364 | } 365 | } 366 | 367 | const showTimeIndicator = () => { 368 | setIsRecordedCanvasHovered(true); 369 | }; 370 | 371 | const hideTimeIndicator = () => { 372 | setIsRecordedCanvasHovered(false); 373 | }; 374 | 375 | const setCurrentHoveredOffsetX = (e: MouseEvent) => { 376 | setHoveredOffsetX(e.offsetX); 377 | }; 378 | 379 | const handleRecordedAudioCurrentTime: MouseEventHandler = ( 380 | e, 381 | ) => { 382 | if (audioRef?.current && canvasRef.current) { 383 | const newCurrentTime = 384 | (duration / canvasCurrentWidth) * 385 | (e.clientX - canvasRef.current.getBoundingClientRect().left); 386 | 387 | audioRef.current.currentTime = newCurrentTime; 388 | setCurrentAudioTime(newCurrentTime); 389 | } 390 | }; 391 | 392 | const timeIndicatorStyleLeft = 393 | (currentAudioTime / duration) * canvasCurrentWidth; 394 | 395 | return ( 396 |
397 |
404 | 414 | Your browser does not support HTML5 Canvas. 415 | 416 | {isDefaultUIShown && isCleared && ( 417 | <> 418 | 419 | 420 | 431 | 432 | )} 433 | {isAudioProcessingTextShown && isProcessingRecordedAudio && ( 434 |

440 | Processing Audio... 441 |

442 | )} 443 | {isRecordedCanvasHovered && 444 | isAvailableRecordedAudio && 445 | !isProcessingRecordedAudio && 446 | !isMobile && 447 | isProgressIndicatorOnHoverShown && ( 448 |
456 | {isProgressIndicatorTimeOnHoverShown && ( 457 |

466 | {formatRecordedAudioTime( 467 | (duration / canvasCurrentWidth) * hoveredOffsetX, 468 | )} 469 |

470 | )} 471 |
472 | )} 473 | {isProgressIndicatorShown && 474 | isAvailableRecordedAudio && 475 | !isProcessingRecordedAudio && 476 | duration ? ( 477 |
488 | {isProgressIndicatorTimeShown && ( 489 |

498 | {formattedRecordedAudioCurrentTime} 499 |

500 | )} 501 |
502 | ) : null} 503 |
504 | 505 | {isControlPanelShown && ( 506 | <> 507 |
508 | {isRecordingInProgress && ( 509 |

510 | {formattedRecordingTime} 511 |

512 | )} 513 | {duration && !isProcessingRecordedAudio ? ( 514 |

{formattedDuration}

515 | ) : null} 516 |
517 | 518 |
519 | {isRecordingInProgress && ( 520 |
521 | 535 |
536 | )} 537 | {!isCleared && ( 538 | 553 | )} 554 | {isCleared && ( 555 | 572 | )} 573 | 584 | {!isCleared && ( 585 | 595 | )} 596 | {isDownloadAudioButtonShown && recordedBlob && ( 597 | 607 | )} 608 |
609 | 610 | )} 611 |
612 | ); 613 | }; 614 | 615 | export default VoiceVisualizer; 616 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # react-voice-visualizer 2 | 3 | # [Demo App](https://react-voice-visualizer.vercel.app/) 4 | 5 | ## Overview 6 | 7 | The `react-voice-visualizer` library offers a comprehensive and highly customizable solution for capturing, visualizing, and manipulating audio recordings within your web applications. Built with React hook and component, this library simplifies the process of integrating audio recording and visualization functionalities using the [Web Audio API](https://developer.mozilla.org/en-US/docs/Web/API/Web_Audio_API). 8 | 9 | ![screenshot](./public/voiceVisualizer.png) 10 | 11 | Experience the [Demo App: Click here to explore the react-voice-visualizer](https://react-voice-visualizer.vercel.app/). Try it out and see it in action! 12 | 13 | ### Key Features: 14 | 15 | - **Audio Recording**: Easily capture audio recordings with minimal setup using React hook and component. 16 | 17 | - **Visualization**: Visualize audio data in real-time, making it suitable for applications such as voice recognition, sound analysis, and more. 18 | 19 | - **Customization**: The library is absolutely customizable, allowing you to tailor the audio recording and visualization components to fit your project's unique requirements. 20 | 21 | - **Responsiveness**: Create audio applications that adapt seamlessly to various screen sizes and devices, ensuring a consistent user experience across platforms. 22 | 23 | This README provides a comprehensive guide to effectively utilizing the library's features. 24 | 25 | ## Installation 26 | 27 | To integrate the React Voice Visualizer library into your project, simply install it via npm or yarn: 28 | 29 | ```bash 30 | npm install react-voice-visualizer 31 | ``` 32 | 33 | or 34 | 35 | ```bash 36 | yarn add react-voice-visualizer 37 | ``` 38 | 39 | ## Version 2.x.x Release Notes 40 | 41 | **Breaking Changes:** 42 | - Ref Handling Update: In this version, the library has been enhanced to manage audio references (audioRef) internally. Users no longer need to pass ref={audioRef} separately to components. This change offers a more seamless and intuitive experience. 43 | 44 | **New Features:** 45 | - Preloaded Audio Blob Support: Version 2.x.x introduces the capability to set preloaded audio blobs. Users now have the flexibility to load audio blobs from various sources, such as user inputs or files using `setPreloadedAudioBlob` function, expanding the library's versatility in different scenarios. 46 | 47 | ## [Demo App](https://react-voice-visualizer.vercel.app/) 48 | For a live demonstration of the React Voice Visualizer library, you can check out the [Demo Voice Visualizer App](https://react-voice-visualizer.vercel.app/). This app showcases various features and functionalities of the library in action. 49 | 50 | Feel free to explore the demo app to see how the **React Voice Visualizer** can be used in different scenarios. You can refer to the source code of the demo app for additional examples and inspiration for using the library effectively. 51 | 52 | ## Usage 53 | 54 | To start using the VoiceVisualizer component, you will need to import the necessary hook and component from the library. 55 | Here's an example of how to use this library in your `App` component: 56 | 57 | ```typescript jsx 58 | import { useEffect } from "react"; 59 | import { useVoiceVisualizer, VoiceVisualizer } from "react-voice-visualizer"; 60 | 61 | const App = () => { 62 | // Initialize the recorder controls using the hook 63 | const recorderControls = useVoiceVisualizer(); 64 | const { 65 | // ... (Extracted controls and states, if necessary) 66 | recordedBlob, 67 | error, 68 | } = recorderControls; 69 | 70 | // Get the recorded audio blob 71 | useEffect(() => { 72 | if (!recordedBlob) return; 73 | 74 | console.log(recordedBlob); 75 | }, [recordedBlob]); 76 | 77 | // Get the error when it occurs 78 | useEffect(() => { 79 | if (!error) return; 80 | 81 | console.error(error); 82 | }, [error]); 83 | 84 | return ( 85 | 86 | ); 87 | }; 88 | 89 | export default App; 90 | ``` 91 | 92 | ## Getting started 93 | 94 | 1. Import the required components and hooks from the library. 95 | 2. Initialize the recorder controls using the `useVoiceVisualizer` hook. 96 | 3. Use the provided state and functions to manage audio recording and playback. 97 | 4. Render the `VoiceVisualizer` component to display the real-time audio visualization. 98 | 5. Use the provided buttons to start, pause, stop, and save the audio recording. 99 | 100 | Remember to include necessary CSS styles to customize the components and buttons according to your design preferences. 101 | 102 | ## API Reference 103 | 104 | ### `useVoiceVisualizer()` Hook 105 | 106 | A hook that provides recorder controls and state for audio visualization. 107 | 108 | ##### Usage 109 | 110 | ```jsx 111 | const recorderControls = useVoiceVisualizer(); 112 | ``` 113 | 114 | ##### Parameters (All parameters are optional) 115 | 116 | | Parameter | Type | Description | 117 | |:---------------------------|:-------------------------|:----------------------------------------------------------------------------------------------------------------------------------------------------------| 118 | | `onStartRecording` | `() => void` | Callback function triggered when recording starts. | 119 | | `onStopRecording` | `() => void` | Callback function triggered when recording stops. | 120 | | `onPausedRecording` | `() => void` | Callback function triggered when recording is paused. | 121 | | `onResumedRecording` | `() => void` | Callback function triggered when recording is resumed. | 122 | | `onClearCanvas` | `() => void` | Callback function triggered when the canvas is cleared. | 123 | | `onEndAudioPlayback` | `() => void` | Callback function triggered when audio playback ends. | 124 | | `onStartAudioPlayback` | `() => void` | Callback function triggered when audio playback starts. | 125 | | `onPausedAudioPlayback` | `() => void` | Callback function triggered when audio playback is paused. | 126 | | `onResumedAudioPlayback` | `() => void` | Callback function triggered when audio playback is resumed. | 127 | | `onErrorPlayingAudio` | `(error: Error) => void` | Callback function is invoked when an error occurs during the execution of `audio.play()`. It provides an opportunity to handle and respond to such error. | 128 | | `shouldHandleBeforeUnload` | `boolean` | Determines whether the `beforeunload` event handler should be added to the window, preventing page unload if necessary (`true` by default). | 129 | 130 | ##### Returns 131 | 132 | | Returns | Type | Description | 133 | |:------------------------------------|:----------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| 134 | | `audioRef` | `MutableRefObject`
`` | Reference to the audio element used for playback. | 135 | | `isRecordingInProgress` | `boolean` | Indicates if audio recording is currently in progress. | 136 | | `isPausedRecording` | `boolean` | Indicates if audio recording is currently paused. | 137 | | `audioData` | `Uint8Array` | Audio data for real-time visualization. | 138 | | `recordingTime` | `number` | Elapsed time during recording in milliseconds. | 139 | | `mediaRecorder` | `MediaRecorder \| null` | MediaRecorder instance used for recording audio. | 140 | | `duration` | `number` | Duration of the recorded audio in seconds. | 141 | | `currentAudioTime` | `number` | Current playback time of the recorded audio in seconds. | 142 | | `audioSrc` | `string` | Source URL of the recorded audio file for playback. | 143 | | `isPausedRecordedAudio` | `boolean` | Indicates if recorded audio playback is paused. | 144 | | `isProcessingRecordedAudio` | `boolean` | Indicates if the recorded audio is being processed and 'Processing Audio...' text shown. | 145 | | `isCleared` | `boolean` | Indicates if the canvas has been cleared. | 146 | | `isAvailableRecordedAudio` | `boolean` | Indicates whether recorded audi is available and not currently being processed. This return value can be used to check if it's an appropriate time to work with recorded audio data in your application. | 147 | | `recordedBlob` | `Blob \| null` | Recorded audio data in Blob format. | 148 | | `bufferFromRecordedBlob` | `AudioBuffer \| null` | Audio buffer from the recorded Blob. | 149 | | `formattedDuration` | `string` | Formatted duration time in format 09:51m. | 150 | | `formattedRecordingTime` | `string` | Formatted recording current time in format 09:51. | 151 | | `formattedRecordedAudioCurrentTime` | `string` | Formatted recorded audio current time in format 09:51:1. | 152 | | `startRecording` | `() => void` | Function to start audio recording. | 153 | | `togglePauseResume` | `() => void` | Function to toggle pause/resume during recording and playback of recorded audio. | 154 | | `startAudioPlayback` | `() => void` | Function to start/resume playback of recorded audio. | 155 | | `stopAudioPlayback` | `() => void` | Function to pause playback of recorded audio. | 156 | | `stopRecording` | `() => void` | Function to stop audio recording. | 157 | | `saveAudioFile` | `() => void` | This function allows you to save the recorded audio as a `webm` file format. Please note that it supports saving audio only in the webm format. If you need to save the audio in a different format, you can use external libraries like `FFmpeg` to convert the Blob to your desired format. This flexibility allows you to tailor the output format according to your specific needs. | 158 | | `clearCanvas` | `() => void` | Function to clear the visualization canvas. | 159 | | `setCurrentAudioTime` | `Dispatch>` | Internal function to handle current audio time updates during playback. | 160 | | `error` | `Error \| null` | Error object if any error occurred during recording or playback. | 161 | | `isProcessingOnResize` | `boolean` | Indicates whether audio processing is occurring during a resize event when audio is recorded and a blob is present. | 162 | | `isProcessingStartRecording` | `boolean` | When set to `true`, it indicates that the start recording button has been pressed, but either the permission to record has not yet been granted or the recording itself has not yet commenced. This prop serves as a helpful flag to manage the state of the recording process, allowing components to react accordingly to the current stage of recording initiation. | 163 | | `isPreloadedBlob` | `boolean` | This property indicates whether a preloaded audio blob is available for playback or processing. | 164 | | `setPreloadedAudioBlob` | `(blob: Blob) => void` | Users can use this method to set a preloaded audio blob, enabling seamless integration with user inputs, file uploads etc. | 165 | | `_setIsProcessingAudioOnComplete` | `Dispatch>` | Internal function to set `isProcessingAudioOnComplete` state. | 166 | | `_setIsProcessingOnResize` | `Dispatch>` | Internal function to set `isProcessingOnResize` state. | 167 | 168 | ### `VoiceVisualizer` Component 169 | 170 | A component that visualizes the real-time audio wave during recording. 171 | 172 | ### Props for AudioVisualizer Component 173 | 174 | | Props | Description | Default | Type | 175 | |:--------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:--------------|:-----------------------------| 176 | | **`controls`** | Provides the audio recording controls and states required for visualization. | - | `Controls` (Required) | 177 | | **`height`** | The height of the visualization canvas. | `200` | `string \| number` (Optional) | 178 | | **`width`** | The width of the visualization canvas. | `100%` | `string \| number` (Optional) | 179 | | **`backgroundColor`** | The background color of the visualization canvas. | `transparent` | `string` (Optional) | 180 | | **`mainBarColor`** | The color of the main audio wave line. | `#FFFFFF` | `string` (Optional) | 181 | | **`secondaryBarColor`** | The secondary color of the audio wave line. | `#5e5e5e` | `string` (Optional) | 182 | | **`speed`** | The speed of the audio visualization animation (Integer from 1 to 6, higher number is slower). | `3` | `number` (Optional) | 183 | | **`barWidth`** | The width of each audio wave bar. | `2` | `number` (Optional) | 184 | | **`gap`** | The gap between each audio wave bar. | `1` | `number` (Optional) | 185 | | **`rounded`** | The border radius of the audio wave bars. | `5` | `number` (Optional) | 186 | | **`isControlPanelShown`** | Whether to display the audio control panel, including features such as recorded audio duration, current recording time, and control buttons. If you want to create your own UI, set it to false and utilize functions from the useVoiceVisualizer hook to manage audio control. | `true` | `boolean` (Optional) | 187 | | **`isDownloadAudioButtonShown`** | Whether to display the Download audio button. | `false` | `boolean` (Optional) | 188 | | **`fullscreen`** | Whether the visualization should be displayed in fullscreen mode. It begins from the center by default. | `false` | `boolean` (Optional) | 189 | | **`animateCurrentPick`** | Whether to animate the current pick in the visualization. | `true` | `boolean` (Optional) | 190 | | **`onlyRecording`** | Whether to show the visualization only during voice recording. | `false` | `boolean` (Optional) | 191 | | **`isDefaultUIShown`** | Whether to show a default UI on Canvas before recording. If you want to create your own UI, set it to false. | `true` | `boolean` (Optional) | 192 | | **`mainContainerClassName`** | The CSS class name for the main container. | - | `string` (Optional) | 193 | | **`canvasContainerClassName`** | The CSS class name for the container of the visualization canvas. | - | `string` (Optional) | 194 | | **`isProgressIndicatorShown`** | Whether to show the progress indicator after recording. | `true` | `boolean` (Optional) | 195 | | **`progressIndicatorClassName`** | The CSS class name for the progress indicator. | - | `string` (Optional) | 196 | | **`isProgressIndicatorTimeShown`** | Whether to show the progress indicator time. | `true` | `boolean` (Optional) | 197 | | **`progressIndicatorTimeClassName`** | The CSS class name for the progress indicator with time. | - | `string` (Optional) | 198 | | **`isProgressIndicatorOnHoverShown`** | Whether to show the progress indicator on hover. | `true` | `boolean` (Optional) | 199 | | **`progressIndicatorOnHoverClassName`** | The CSS class name for the progress indicator on hover. | - | `string` (Optional) | 200 | | **`isProgressIndicatorTimeOnHoverShown`** | Whether to show the progress indicator time on hover. | `true` | `boolean` (Optional) | 201 | | **`progressIndicatorTimeOnHoverClassName`** | The CSS class name for the progress indicator with time on hover. | - | `string` (Optional) | 202 | | **`isAudioProcessingTextShown`** | Whether to show the audio processing text. | `true` | `boolean` (Optional) | 203 | | **`audioProcessingTextClassName`** | The CSS class name for the audio processing text. | - | `string` (Optional) | 204 | | **`controlButtonsClassName`** | The CSS class name for the Clear Button and Download Audio button components. | - | `string` (Optional) | 205 | 206 | 207 | ## License 208 | 209 | This library is distributed under the MIT License. 210 | 211 | ## Issues 212 | 213 | If you encounter any bugs or have suggestions for improvements, please report them in the GitHub Issues section. 214 | 215 | ## Support 216 | 217 | For support or general questions, you can reach out to [zarytskyi222@gmail.com](mailto:zarytskyi222@gmail.com). 218 | 219 | If you find my project valuable and would like to support its development, you can buy me a coffee ☕. Your contribution helps keep this project active and growing. 220 | 221 | Buy Me A Coffee 222 | 223 | Your contribution is greatly appreciated! ❤️🚀 224 | 225 | ## Credits 226 | 227 | This library was created by [Yurii Zarytskyi](https://github.com/YZarytskyi) 228 | 229 | 230 | linkedIn LinkedIn 231 | 232 | --------------------------------------------------------------------------------