├── .eslintignore
├── .eslintrc
├── .github
├── funding.yml
└── workflows
│ └── test.yml
├── .gitignore
├── .npmrc
├── .prettierignore
├── .prettierrc
├── index.d.ts
├── index.html
├── license
├── media
├── 0.jpeg
├── 1.jpeg
└── audio
│ ├── ChillyGonzales-SampleThis.mp3
│ ├── EverythingPersists-AndThenYouSeeIt.mp3
│ ├── FortMinor-WheredYouGo.mp3
│ ├── Test.wav
│ ├── Wizard-DreamOn.mp3
│ ├── demo.m4a
│ ├── hotd-podcast-coupling-clip.m4a
│ └── voice.m4a
├── package.json
├── pnpm-lock.yaml
├── public
├── bg.mp4
├── bg0.jpg
├── bg1.jpg
└── index.css
├── readme.md
├── src
├── AudioVisualization.ts
├── BGPass.ts
├── BGShader.ts
├── CanvasAudioVisualization.ts
├── HybridAudioVisualization.ts
├── MeydaHybridAudioVisualization.ts
├── TransparentBackgroundFixedUnrealBloomPass.ts
├── app.ts
├── index.ts
└── vite-env.d.ts
├── tsconfig.json
└── vite.config.js
/.eslintignore:
--------------------------------------------------------------------------------
1 | .snapshots/
2 | build/
3 | dist/
4 | node_modules/
5 | .next/
6 | .vercel/
7 | audio/
--------------------------------------------------------------------------------
/.eslintrc:
--------------------------------------------------------------------------------
1 | {
2 | "root": true,
3 | "parser": "@typescript-eslint/parser",
4 | "parserOptions": {
5 | "ecmaVersion": 2020
6 | },
7 | "plugins": ["@typescript-eslint"],
8 | "extends": [
9 | "plugin:@typescript-eslint/recommended",
10 | "plugin:prettier/recommended"
11 | ],
12 | "env": { "browser": true },
13 | "rules": {
14 | "@typescript-eslint/explicit-module-boundary-types": 0,
15 | "@typescript-eslint/no-explicit-any": 0,
16 | "@typescript-eslint/no-non-null-assertion": 0
17 | }
18 | }
19 |
--------------------------------------------------------------------------------
/.github/funding.yml:
--------------------------------------------------------------------------------
1 | github: [transitive-bullshit]
2 |
--------------------------------------------------------------------------------
/.github/workflows/test.yml:
--------------------------------------------------------------------------------
1 | name: CI
2 |
3 | on: [push, pull_request]
4 |
5 | jobs:
6 | test:
7 | name: Test Node.js ${{ matrix.node-version }}
8 | runs-on: ubuntu-latest
9 | strategy:
10 | fail-fast: false
11 | matrix:
12 | node-version:
13 | - 16
14 |
15 | steps:
16 | - name: Checkout
17 | uses: actions/checkout@v3
18 |
19 | - name: Install Node.js
20 | uses: actions/setup-node@v3
21 | with:
22 | node-version: ${{ matrix.node-version }}
23 |
24 | - name: Install pnpm
25 | uses: pnpm/action-setup@v2
26 | id: pnpm-install
27 | with:
28 | version: 7
29 | run_install: false
30 |
31 | - name: Get pnpm store directory
32 | id: pnpm-cache
33 | shell: bash
34 | run: |
35 | echo "STORE_PATH=$(pnpm store path)" >> $GITHUB_OUTPUT
36 |
37 | - uses: actions/cache@v3
38 | name: Setup pnpm cache
39 | with:
40 | path: ${{ steps.pnpm-cache.outputs.STORE_PATH }}
41 | key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
42 | restore-keys: |
43 | ${{ runner.os }}-pnpm-store-
44 |
45 | - name: Install dependencies
46 | run: pnpm install --frozen-lockfile
47 |
48 | - name: Run test
49 | run: pnpm run test
50 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
2 |
3 | # dependencies
4 | /node_modules
5 | /.pnp
6 | .pnp.js
7 |
8 | # testing
9 | /coverage
10 |
11 | # next.js
12 | /.next/
13 | /out/
14 |
15 | # production
16 | /build
17 | /dist
18 |
19 | # misc
20 | .DS_Store
21 | *.pem
22 |
23 | # debug
24 | npm-debug.log*
25 | yarn-debug.log*
26 | yarn-error.log*
27 |
28 | # local env files
29 | .env
30 | .env.local
31 | .env.build
32 | .env.development.local
33 | .env.test.local
34 | .env.production.local
35 |
36 | # vercel
37 | .vercel
38 |
--------------------------------------------------------------------------------
/.npmrc:
--------------------------------------------------------------------------------
1 | enable-pre-post-scripts=true
2 |
--------------------------------------------------------------------------------
/.prettierignore:
--------------------------------------------------------------------------------
1 | .snapshots/
2 | build/
3 | dist/
4 | node_modules/
5 | .next/
6 | .vercel/
7 | audio/
--------------------------------------------------------------------------------
/.prettierrc:
--------------------------------------------------------------------------------
1 | {
2 | "singleQuote": true,
3 | "jsxSingleQuote": true,
4 | "semi": false,
5 | "useTabs": false,
6 | "tabWidth": 2,
7 | "bracketSpacing": true,
8 | "arrowParens": "always",
9 | "trailingComma": "none"
10 | }
11 |
--------------------------------------------------------------------------------
/index.d.ts:
--------------------------------------------------------------------------------
1 | declare module '*.mp3' {
2 | const value: string
3 | export default value
4 | }
5 |
6 | declare module '*.m4a' {
7 | const value: string
8 | export default value
9 | }
10 |
--------------------------------------------------------------------------------
/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 | AVP
11 |
12 |
13 |
14 | Welcome to AVP!
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
--------------------------------------------------------------------------------
/license:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2023 Travis Fischer
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/media/0.jpeg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/transitive-bullshit/avp/7e06f798d0f2a29407144645c80c4d7726c281bb/media/0.jpeg
--------------------------------------------------------------------------------
/media/1.jpeg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/transitive-bullshit/avp/7e06f798d0f2a29407144645c80c4d7726c281bb/media/1.jpeg
--------------------------------------------------------------------------------
/media/audio/ChillyGonzales-SampleThis.mp3:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/transitive-bullshit/avp/7e06f798d0f2a29407144645c80c4d7726c281bb/media/audio/ChillyGonzales-SampleThis.mp3
--------------------------------------------------------------------------------
/media/audio/EverythingPersists-AndThenYouSeeIt.mp3:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/transitive-bullshit/avp/7e06f798d0f2a29407144645c80c4d7726c281bb/media/audio/EverythingPersists-AndThenYouSeeIt.mp3
--------------------------------------------------------------------------------
/media/audio/FortMinor-WheredYouGo.mp3:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/transitive-bullshit/avp/7e06f798d0f2a29407144645c80c4d7726c281bb/media/audio/FortMinor-WheredYouGo.mp3
--------------------------------------------------------------------------------
/media/audio/Test.wav:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/transitive-bullshit/avp/7e06f798d0f2a29407144645c80c4d7726c281bb/media/audio/Test.wav
--------------------------------------------------------------------------------
/media/audio/Wizard-DreamOn.mp3:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/transitive-bullshit/avp/7e06f798d0f2a29407144645c80c4d7726c281bb/media/audio/Wizard-DreamOn.mp3
--------------------------------------------------------------------------------
/media/audio/demo.m4a:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/transitive-bullshit/avp/7e06f798d0f2a29407144645c80c4d7726c281bb/media/audio/demo.m4a
--------------------------------------------------------------------------------
/media/audio/hotd-podcast-coupling-clip.m4a:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/transitive-bullshit/avp/7e06f798d0f2a29407144645c80c4d7726c281bb/media/audio/hotd-podcast-coupling-clip.m4a
--------------------------------------------------------------------------------
/media/audio/voice.m4a:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/transitive-bullshit/avp/7e06f798d0f2a29407144645c80c4d7726c281bb/media/audio/voice.m4a
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "avp",
3 | "version": "0.2.0",
4 | "private": true,
5 | "description": "Audio Visual Playground (or Alien vs Predator!)",
6 | "author": "Travis Fischer ",
7 | "repository": "transitive-bullshit/avp",
8 | "license": "MIT",
9 | "scripts": {
10 | "start": "run-s dev",
11 | "dev": "vite",
12 | "build": "vite build",
13 | "test": "run-s test:*",
14 | "test:lint": "eslint .",
15 | "test:prettier": "prettier '**/*.{js,jsx,ts,tsx}' --check",
16 | "posttest": "run-s build"
17 | },
18 | "dependencies": {
19 | "dat.gui": "^0.7.9",
20 | "meyda": "^5.5.1",
21 | "raf": "^3.4.1",
22 | "random": "^5.1.1",
23 | "three": "0.131.3"
24 | },
25 | "devDependencies": {
26 | "@types/dat.gui": "^0.7.7",
27 | "@types/node": "^22.7.5",
28 | "@types/raf": "^3.4.0",
29 | "@types/three": "0.131.0",
30 | "@typescript-eslint/eslint-plugin": "^5.48.0",
31 | "@typescript-eslint/parser": "^5.48.0",
32 | "eslint": "^8.31.0",
33 | "eslint-config-prettier": "^8.6.0",
34 | "eslint-plugin-prettier": "^4.2.1",
35 | "npm-run-all": "^4.1.5",
36 | "prettier": "^2.8.1",
37 | "typescript": "^5.6.3",
38 | "update-markdown-jsdoc": "^1.0.11",
39 | "vite": "^5.4.8"
40 | }
41 | }
42 |
--------------------------------------------------------------------------------
/public/bg.mp4:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/transitive-bullshit/avp/7e06f798d0f2a29407144645c80c4d7726c281bb/public/bg.mp4
--------------------------------------------------------------------------------
/public/bg0.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/transitive-bullshit/avp/7e06f798d0f2a29407144645c80c4d7726c281bb/public/bg0.jpg
--------------------------------------------------------------------------------
/public/bg1.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/transitive-bullshit/avp/7e06f798d0f2a29407144645c80c4d7726c281bb/public/bg1.jpg
--------------------------------------------------------------------------------
/public/index.css:
--------------------------------------------------------------------------------
1 | * {
2 | box-sizing: border-box;
3 | }
4 |
5 | a {
6 | color: inherit;
7 | text-decoration: none;
8 | }
9 |
10 | body,
11 | html {
12 | padding: 0;
13 | margin: 0;
14 | font-family: -apple-system, BlinkMacSystemFont, Segoe UI, Roboto, Oxygen,
15 | Ubuntu, Cantarell, Fira Sans, Droid Sans, Helvetica Neue, sans-serif;
16 | }
17 |
18 | body {
19 | display: flex;
20 | flex-direction: column;
21 | justify-content: center;
22 | align-items: center;
23 | background: #eee;
24 | }
25 |
26 | #container {
27 | position: relative;
28 | }
29 |
30 | #controls {
31 | display: flex;
32 | flex-direction: row;
33 | justify-content: center;
34 | gap: 2em;
35 | margin: 2em auto;
36 | }
37 |
--------------------------------------------------------------------------------
/readme.md:
--------------------------------------------------------------------------------
1 |
2 |
3 | (quick demo; enable sound)
4 |
5 |
6 | # Audio Visual Playground
7 |
8 | [](https://github.com/transitive-bullshit/avp/actions/workflows/test.yml) [](https://github.com/transitive-bullshit/avp/blob/main/license) [](https://prettier.io)
9 |
10 | ## How it works
11 |
12 | - [Animated stable diffusion](https://replicate.com/andreasjansson/stable-diffusion-animation) - Hosted on Replicate
13 | - WebGL, [three.js](https://threejs.org/), glsl for rendering
14 | - [Meyda](https://meyda.js.org/) for audio feature extraction
15 | - [MediaRecorder](https://developer.mozilla.org/en-US/docs/Web/API/MediaRecorder) for recording video in-browser
16 | - Great so I don't need to host any heavyweight servers
17 | - [ffmpeg](https://ffmpeg.org/) for converting webm ⇒ mp4
18 | - `ffmpeg -i test.webm -ss 0.05 -vf scale="iw/2:ih/2" -c:v libx264 -crf 16 -movflags faststart -pix_fmt yuv420p -r 40 -profile:v main -preset medium test.mp4`
19 |
20 | ## TODO
21 |
22 | - [x] add stroke styles in addition to fill
23 | - [x] add circle style
24 | - [x] add mirror option
25 | - [x] fix output pixel density
26 | - [x] start/pause/stop should be async
27 | - [ ] separate download or get blob methods
28 | - [ ] mp4 output support
29 | - [ ] render offscreen sped-up
30 | - [ ] add demo to readme
31 | - [ ] add basic docs
32 | - [x] hosted demo
33 | - [ ] explore backgrounds, color palettes, and avatars
34 | - [ ] explore different post-processing effects
35 | - [ ] add descript-style animated captions
36 | - [ ] add UX for generating custom backgrounds using replicate API
37 |
38 | ## Inspiration
39 |
40 | - https://www.youtube.com/watch?v=QykkWNOtap4
41 | - https://www.youtube.com/watch?v=Q1bxyKOZ5RI
42 |
43 | ## License
44 |
45 | MIT © [Travis Fischer](https://transitivebullsh.it)
46 |
47 | If you found this project interesting, please consider [sponsoring me](https://github.com/sponsors/transitive-bullshit) or following me on twitter
48 |
--------------------------------------------------------------------------------
/src/AudioVisualization.ts:
--------------------------------------------------------------------------------
1 | import {
2 | AudioListener as ThreeAudioListener,
3 | Audio as ThreeAudio,
4 | AudioLoader as ThreeAudioLoader,
5 | AudioAnalyser as ThreeAudioAnalyser
6 | } from 'three'
7 |
8 | import raf from 'raf'
9 |
10 | export interface AudioVisualizationOptions {
11 | /**
12 | * A Canvas where the renderer draws its output.
13 | */
14 | canvas: HTMLCanvasElement | OffscreenCanvas
15 |
16 | // input audio
17 | mediaElement?: HTMLMediaElement
18 | mediaStream?: MediaStream
19 | mediaUrl?: string
20 |
21 | // misc settings
22 | autoplay?: boolean
23 | fftSize?: number
24 | isRecordingEnabled?: boolean
25 | frameRequestRate?: number
26 |
27 | mediaRecorderOptions?: MediaRecorderOptions
28 | }
29 |
30 | export type AnimationStatus = 'stopped' | 'playing'
31 |
32 | export abstract class AudioVisualization {
33 | // visual output canvas
34 | canvas: HTMLCanvasElement | OffscreenCanvas
35 |
36 | // audio input
37 | mediaElement?: HTMLMediaElement
38 | mediaStream?: MediaStream
39 | mediaUrl?: string
40 |
41 | // recording output
42 | mediaRecorder?: MediaRecorder
43 | recordingP?: Promise
44 | frameRequestRate: number
45 | mediaRecorderOptions: MediaRecorderOptions
46 | mediaRecorderChunks: BlobPart[] = []
47 |
48 | // internal audio analysis
49 | listener: ThreeAudioListener
50 | audio: ThreeAudio
51 | analyser: ThreeAudioAnalyser
52 |
53 | protected _rafHandle: number | null
54 | protected _isRecordingEnabled: boolean
55 |
56 | constructor(opts: AudioVisualizationOptions) {
57 | this._rafHandle = null
58 |
59 | if (!opts.canvas) {
60 | throw new Error('AudioVisualization requires "canvas"')
61 | }
62 |
63 | this.canvas = opts.canvas
64 |
65 | this.mediaElement = opts.mediaElement
66 | this.mediaStream = opts.mediaStream
67 | this.mediaUrl = opts.mediaUrl
68 |
69 | this.listener = new ThreeAudioListener()
70 | this.audio = new ThreeAudio(this.listener)
71 |
72 | this._isRecordingEnabled = !!opts.isRecordingEnabled
73 | this.frameRequestRate = opts.frameRequestRate ?? 60
74 | this.mediaRecorderOptions = {
75 | mimeType: 'video/webm',
76 | audioBitsPerSecond: 128000,
77 | videoBitsPerSecond: 4500000,
78 | ...opts.mediaRecorderOptions
79 | }
80 |
81 | if (this.mediaUrl) {
82 | if (/(iPad|iPhone|iPod)/g.test(navigator.userAgent)) {
83 | // TODO: this will break recording right now
84 | const loader = new ThreeAudioLoader()
85 | loader.load(this.mediaUrl, (buffer: any) => {
86 | this.audio.setBuffer(buffer)
87 | if (opts.autoplay) {
88 | this.audio.play()
89 | }
90 | })
91 | } else {
92 | const mediaElement = new Audio(this.mediaUrl)
93 | this.mediaElement = mediaElement
94 | if (opts.autoplay) {
95 | mediaElement.play()
96 | }
97 |
98 | this.audio.setMediaElementSource(mediaElement)
99 | }
100 | } else if (this.mediaElement) {
101 | this.audio.setMediaElementSource(this.mediaElement)
102 | if (opts.autoplay) {
103 | this.mediaElement.play()
104 | }
105 | } else if (this.mediaStream) {
106 | this.audio.setMediaStreamSource(this.mediaStream)
107 | } else {
108 | throw new Error(
109 | 'AudioVisualization requires one of "mediaElement", "mediaStream", or "mediaUrl"'
110 | )
111 | }
112 |
113 | const fftSize = opts.fftSize || 1024
114 | this.analyser = new ThreeAudioAnalyser(this.audio, fftSize)
115 | // window.addEventListener('resize', this._resize.bind(this))
116 | }
117 |
118 | dispose() {
119 | this.stop()
120 | // window.removeEventListener('resize', this._resize.bind(this))
121 | this.audio.disconnect()
122 | }
123 |
124 | protected _resize() {
125 | // TODO: override in subclass
126 | }
127 |
128 | public get isPlaying(): boolean {
129 | // TODO: this is super janky
130 | return !!(
131 | this.mediaElement &&
132 | this.mediaElement!.currentTime > 0 &&
133 | !this.mediaElement!.paused &&
134 | !this.mediaElement!.ended &&
135 | this.mediaElement!.readyState > 2
136 | )
137 | // return this.audio.isPlaying
138 | }
139 |
140 | public get isRecordingEnabled() {
141 | return this._isRecordingEnabled
142 | }
143 |
144 | public set isRecordingEnabled(value: boolean) {
145 | if (!!value !== this._isRecordingEnabled) {
146 | if (this.isPlaying) {
147 | throw new Error(
148 | 'AudioVisualization.isRecordingEnabled may only be set when audio is stopped'
149 | )
150 | }
151 |
152 | this._isRecordingEnabled = !!value
153 | }
154 | }
155 |
156 | public get isRecording() {
157 | return this._isRecordingEnabled && this.isPlaying
158 | }
159 |
160 | public async start() {
161 | if (this.isPlaying) return
162 |
163 | this.mediaElement?.play()
164 | this.audio.play()
165 | this._animate()
166 |
167 | if (!this._isRecordingEnabled) return
168 |
169 | // TODO: handle pausing
170 | // TODO: does this work with offscreencanvas?
171 | const captureStream = (this.canvas as HTMLCanvasElement).captureStream(
172 | this.frameRequestRate
173 | )
174 |
175 | const waitForAudioTrackP = new Promise((resolve, reject) => {
176 | const stream: MediaStream =
177 | this.mediaStream ?? (this.mediaElement as any).captureStream()
178 | let audioTracks = stream.getAudioTracks()
179 |
180 | if (audioTracks.length) {
181 | for (const audioTrack of audioTracks) {
182 | console.log('audio track', audioTrack)
183 | captureStream.addTrack(audioTrack)
184 | }
185 | resolve()
186 | } else {
187 | setTimeout(
188 | () =>
189 | reject(
190 | new Error('timeout initializing audio track for mediarecorder')
191 | ),
192 | 10000
193 | )
194 |
195 | stream.onaddtrack = (ev) => {
196 | let hasAudioTrack = false
197 | audioTracks = stream.getAudioTracks()
198 | for (const audioTrack of audioTracks) {
199 | if (audioTrack.id === ev.track.id) {
200 | console.log('audio track', audioTrack)
201 | hasAudioTrack = true
202 | captureStream.addTrack(audioTrack)
203 | }
204 | }
205 |
206 | if (hasAudioTrack) {
207 | resolve()
208 | }
209 | }
210 | }
211 | })
212 |
213 | console.log({
214 | captureStream,
215 | mediaRecorderOptions: this.mediaRecorderOptions
216 | })
217 |
218 | this.mediaRecorder = new MediaRecorder(
219 | captureStream,
220 | this.mediaRecorderOptions
221 | )
222 | this.mediaRecorderChunks = []
223 |
224 | this.recordingP = new Promise((resolve, reject) => {
225 | if (!this.mediaRecorder) return
226 |
227 | this.mediaRecorder.ondataavailable = (e: any) =>
228 | this.mediaRecorderChunks.push(e.data)
229 | this.mediaRecorder.onerror = (ev) => {
230 | console.warn('mediarecorder ERROR', ev)
231 | reject(ev)
232 | }
233 | this.mediaRecorder.onstop = (ev) => {
234 | console.log('mediarecorder STOP', ev)
235 | resolve()
236 | }
237 |
238 | waitForAudioTrackP
239 | .then(() => {
240 | this.mediaRecorder?.start()
241 | })
242 | .catch(reject)
243 | }).then(() => {
244 | // TODO: cleanup
245 | const mimeType = this.mediaRecorderOptions.mimeType
246 | const blob = new Blob(this.mediaRecorderChunks, {
247 | type: mimeType
248 | })
249 | const p = mimeType!.split('/')
250 | const ext = p[p.length - 1]
251 |
252 | const filename = `test.${ext}`
253 | console.log('download', blob.size, filename)
254 |
255 | const downloadAnchor = document.createElement('a')
256 | downloadAnchor.onclick = () => {
257 | downloadAnchor.href = URL.createObjectURL(blob)
258 | downloadAnchor.download = filename
259 | }
260 | downloadAnchor.click()
261 | })
262 |
263 | return waitForAudioTrackP
264 | }
265 |
266 | public pause() {
267 | this.mediaRecorder?.pause()
268 | this.mediaElement?.pause()
269 | this.audio.pause()
270 | this._cancelAnimation()
271 | }
272 |
273 | public stop() {
274 | this.mediaRecorder?.stop()
275 | delete this.mediaRecorder
276 |
277 | this.mediaElement?.pause()
278 | this.audio.stop()
279 | this._cancelAnimation()
280 | }
281 |
282 | protected _cancelAnimation() {
283 | if (this._rafHandle) {
284 | raf.cancel(this._rafHandle)
285 | this._rafHandle = null
286 | }
287 | }
288 |
289 | protected _animate() {
290 | this._rafHandle = raf(this._animate.bind(this))
291 | this.render()
292 | }
293 |
294 | // TODO: override in subclass
295 | protected abstract render(): void
296 | }
297 |
--------------------------------------------------------------------------------
/src/BGPass.ts:
--------------------------------------------------------------------------------
1 | /* eslint-disable dot-notation */
2 |
3 | import {
4 | ShaderMaterial,
5 | UniformsUtils,
6 | WebGLRenderer,
7 | WebGLRenderTarget,
8 | Texture
9 | } from 'three'
10 |
11 | import { Pass, FullScreenQuad } from 'three/examples/jsm/postprocessing/Pass'
12 | import { BGShader } from './BGShader'
13 |
14 | export class BGPass extends Pass {
15 | map: Texture
16 | uniforms: any
17 | material: ShaderMaterial
18 | fsQuad: FullScreenQuad
19 |
20 | constructor(map: Texture) {
21 | super()
22 |
23 | const shader = BGShader
24 |
25 | this.map = map
26 |
27 | this.uniforms = UniformsUtils.clone(shader.uniforms)
28 |
29 | this.material = new ShaderMaterial({
30 | uniforms: this.uniforms,
31 | vertexShader: shader.vertexShader,
32 | fragmentShader: shader.fragmentShader,
33 | depthTest: false,
34 | depthWrite: false
35 | })
36 |
37 | this.needsSwap = true
38 | this.clear = false
39 |
40 | this.fsQuad = new FullScreenQuad()
41 | }
42 |
43 | override render(
44 | renderer: WebGLRenderer,
45 | writeBuffer: WebGLRenderTarget,
46 | readBuffer: WebGLRenderTarget
47 | // deltaTime: number,
48 | // maskActive: boolean,
49 | ) {
50 | const oldAutoClear = renderer.autoClear
51 | renderer.autoClear = false
52 |
53 | this.fsQuad.material = this.material
54 |
55 | this.uniforms['tBG'].value = this.map
56 | this.uniforms['tFG'].value = readBuffer.texture
57 | this.material.transparent = false
58 |
59 | if (this.renderToScreen) {
60 | renderer.setRenderTarget(null)
61 | } else {
62 | renderer.setRenderTarget(writeBuffer)
63 | }
64 |
65 | if (this.clear) renderer.clear()
66 | this.fsQuad.render(renderer)
67 |
68 | renderer.autoClear = oldAutoClear
69 | }
70 |
71 | dispose() {
72 | this.material.dispose()
73 |
74 | this.fsQuad.dispose()
75 | }
76 | }
77 |
--------------------------------------------------------------------------------
/src/BGShader.ts:
--------------------------------------------------------------------------------
1 | export const BGShader = {
2 | uniforms: {
3 | tBG: { value: null },
4 | tFG: { value: null }
5 | },
6 |
7 | vertexShader: /* glsl */ `
8 |
9 | varying vec2 vUv;
10 |
11 | void main() {
12 |
13 | vUv = uv;
14 | gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
15 |
16 | }`,
17 |
18 | fragmentShader: /* glsl */ `
19 |
20 | #include
21 |
22 | uniform sampler2D tBG;
23 | uniform sampler2D tFG;
24 |
25 | varying vec2 vUv;
26 |
27 | void main() {
28 |
29 | vec4 bg = texture2D( tBG, vUv );
30 | vec4 fg = texture2D( tFG, vUv );
31 |
32 | vec3 res = bg.rgb * (1.0 - fg.a) + fg.rgb * (fg.a);
33 |
34 | gl_FragColor = vec4( res, bg.a );
35 |
36 | }`
37 | }
38 |
--------------------------------------------------------------------------------
/src/CanvasAudioVisualization.ts:
--------------------------------------------------------------------------------
1 | import {
2 | AudioVisualization,
3 | AudioVisualizationOptions
4 | } from './AudioVisualization'
5 |
6 | export class CanvasAudioVisualization extends AudioVisualization {
7 | ctx: CanvasRenderingContext2D | OffscreenCanvasRenderingContext2D
8 |
9 | constructor(opts: AudioVisualizationOptions) {
10 | super(opts)
11 |
12 | const ctx = this.canvas.getContext('2d')
13 | if (ctx) {
14 | this.ctx = ctx as
15 | | CanvasRenderingContext2D
16 | | OffscreenCanvasRenderingContext2D
17 | } else {
18 | throw new Error('Unable to initialize canvas 2d context')
19 | }
20 | }
21 |
22 | protected render() {
23 | // TODO: override in subclass
24 | // this.analyser.getFrequencyData()
25 | // const { width, height } = this.canvas
26 | // this.ctx.fillStyle = 'red'
27 | // this.ctx.fillRect(0, 0, width, height)
28 | }
29 | }
30 |
--------------------------------------------------------------------------------
/src/HybridAudioVisualization.ts:
--------------------------------------------------------------------------------
1 | import {
2 | WebGLRenderer,
3 | Scene,
4 | Camera,
5 | MeshBasicMaterial,
6 | CanvasTexture,
7 | PlaneGeometry,
8 | Mesh
9 | } from 'three'
10 |
11 | import { EffectComposer } from 'three/examples/jsm/postprocessing/EffectComposer'
12 | import { RenderPass } from 'three/examples/jsm/postprocessing/RenderPass'
13 |
14 | import {
15 | AudioVisualization,
16 | AudioVisualizationOptions
17 | } from './AudioVisualization'
18 |
19 | export interface HybridAudioVisualizationOptions
20 | extends AudioVisualizationOptions {
21 | offscreenCanvas?: HTMLCanvasElement
22 | offscreenScale?: number
23 | }
24 |
25 | export class HybridAudioVisualization extends AudioVisualization {
26 | offscreenCanvas: HTMLCanvasElement
27 | ctx: CanvasRenderingContext2D
28 | offscreenScale: number
29 |
30 | renderer: WebGLRenderer
31 | scene: Scene
32 | camera: Camera
33 | offscreenCanvasMaterial: MeshBasicMaterial
34 | composer: EffectComposer
35 |
36 | constructor(opts: HybridAudioVisualizationOptions) {
37 | super(opts)
38 |
39 | this.offscreenScale = opts.offscreenScale || 1.0
40 |
41 | if (opts.offscreenCanvas) {
42 | this.offscreenCanvas = opts.offscreenCanvas
43 | } else {
44 | this.offscreenCanvas = document.createElement('canvas')
45 | this.offscreenCanvas.width = this.canvas.width * this.offscreenScale
46 | this.offscreenCanvas.height = this.canvas.height * this.offscreenScale
47 | }
48 |
49 | const ctx = this.offscreenCanvas.getContext('2d')
50 | if (ctx) {
51 | this.ctx = ctx
52 | } else {
53 | throw new Error('Unable to initialize offscreen canvas 2d context')
54 | }
55 |
56 | this.renderer = new WebGLRenderer({
57 | antialias: true,
58 | canvas: this.canvas,
59 | alpha: true
60 | })
61 | this.renderer.setSize(this.canvas.width, this.canvas.height)
62 | this.renderer.setClearColor(0x000000, 0.0)
63 | this.renderer.setPixelRatio(window.devicePixelRatio)
64 | // this.renderer.autoClear = false
65 |
66 | this.scene = new Scene()
67 | this.camera = new Camera()
68 |
69 | {
70 | this.offscreenCanvasMaterial = new MeshBasicMaterial()
71 | this.offscreenCanvasMaterial.map = new CanvasTexture(this.offscreenCanvas)
72 |
73 | const geometry = new PlaneGeometry(2, 2)
74 | const mesh = new Mesh(geometry, this.offscreenCanvasMaterial)
75 | mesh.scale.setY(-1)
76 | this.scene.add(mesh)
77 | }
78 |
79 | this.composer = new EffectComposer(this.renderer)
80 | this.composer.setSize(this.canvas.width, this.canvas.height)
81 | this.composer.addPass(new RenderPass(this.scene, this.camera))
82 | }
83 |
84 | protected _resize() {
85 | super._resize()
86 |
87 | const { width, height } = this.canvas
88 |
89 | this.offscreenCanvas.width = (width * this.offscreenScale) | 0
90 | this.offscreenCanvas.height = (height * this.offscreenScale) | 0
91 |
92 | this.renderer.setSize(width, height)
93 | this.composer.setSize(width, height)
94 | }
95 |
96 | // super basic example renderer that mixes offscreen canvas rendering with
97 | // webgl post-processing
98 | protected render() {
99 | this.analyser.getFrequencyData()
100 |
101 | // draw to the offscreen canvas via html5 2d canvas api
102 | const { width, height } = this.offscreenCanvas
103 | this.ctx.clearRect(0, 0, width, height)
104 |
105 | const n = this.analyser.data.length
106 | const invN = width / n
107 | this.ctx.fillStyle = '#F998B9'
108 |
109 | for (let i = 0; i < n; ++i) {
110 | const amp = this.analyser.data[i] / 255.0
111 | const x0 = i * invN
112 | const y = 0
113 | const h = amp * height
114 | this.ctx.fillRect(x0, y, invN, h)
115 | }
116 |
117 | // render to the final canvas via webgl
118 | this.offscreenCanvasMaterial.map!.needsUpdate = true
119 |
120 | // render without post-processing
121 | // this.renderer.render(this.scene, this.camera)
122 |
123 | // render with post-processing
124 | this.composer.render()
125 | }
126 | }
127 |
--------------------------------------------------------------------------------
/src/MeydaHybridAudioVisualization.ts:
--------------------------------------------------------------------------------
1 | import {
2 | TextureLoader,
3 | Vector2
4 | //, VideoTexture
5 | } from 'three'
6 |
7 | // import { UnrealBloomPass } from 'three/examples/jsm/postprocessing/UnrealBloomPass'
8 | import { UnrealBloomPass } from './TransparentBackgroundFixedUnrealBloomPass'
9 | // import { BloomPass } from 'three/examples/jsm/postprocessing/BloomPass'
10 | // import { AfterimagePass } from 'three/examples/jsm/postprocessing/AfterimagePass'
11 | // import { FilmPass } from 'three/examples/jsm/postprocessing/FilmPass'
12 | import { GlitchPass } from 'three/examples/jsm/postprocessing/GlitchPass'
13 | import { BGPass } from './BGPass'
14 |
15 | import Meyda from 'meyda'
16 | import type { MeydaAnalyzer } from 'meyda/dist/esm/meyda-wa'
17 |
18 | import {
19 | HybridAudioVisualization,
20 | HybridAudioVisualizationOptions
21 | } from './HybridAudioVisualization'
22 |
23 | export type DrawStyle = 'bars' | 'lines' | 'curves'
24 | export type DrawShape = 'basic' | 'triangle' | 'circle' | 'waveform'
25 |
26 | export type MeydaAudioFeature =
27 | | 'amplitudeSpectrum'
28 | | 'buffer'
29 | | 'chroma'
30 | | 'complexSpectrum'
31 | | 'energy'
32 | | 'loudness'
33 | | 'mfcc'
34 | | 'perceptualSharpness'
35 | | 'perceptualSpread'
36 | | 'powerSpectrum'
37 | | 'rms'
38 | | 'spectralCentroid'
39 | | 'spectralFlatness'
40 | | 'spectralKurtosis'
41 | | 'spectralRolloff'
42 | | 'spectralSkewness'
43 | | 'spectralSlope'
44 | | 'spectralSpread'
45 | | 'zcr'
46 |
47 | export interface MeydaHybridVisualizationOptions
48 | extends HybridAudioVisualizationOptions {
49 | drawStyle?: DrawStyle
50 | drawShape?: DrawShape
51 | featureExtractor?: MeydaAudioFeature
52 | smoothingFactor?: number
53 | accentuationFactor?: number
54 | visualScalingFactor?: number
55 | bufferSize?: number
56 | hopSize?: number
57 | numberOfBarkBands?: number
58 | fill?: boolean
59 | mirror?: boolean
60 | bloom?: boolean
61 | glitch?: boolean
62 | }
63 |
64 | export class MeydaHybridAudioVisualization extends HybridAudioVisualization {
65 | meyda: MeydaAnalyzer
66 | drawStyle: DrawStyle
67 | drawShape: DrawShape
68 | featureExtractor: MeydaAudioFeature
69 | maxRMS: number
70 | smoothingFactor: number
71 | accentuationFactor: number
72 | visualScalingFactor: number
73 | fill: boolean
74 | mirror: boolean
75 | _samples: number[] = []
76 |
77 | _bloom = false
78 | _glitch = false
79 | _bloomPass: UnrealBloomPass
80 | _glitchPass: GlitchPass
81 |
82 | _bg?: HTMLVideoElement
83 |
84 | constructor(opts: MeydaHybridVisualizationOptions) {
85 | super(opts)
86 |
87 | this.drawStyle = opts.drawStyle ?? 'bars'
88 | this.drawShape = opts.drawShape ?? 'triangle'
89 | this.featureExtractor = opts.featureExtractor ?? 'loudness'
90 | this.maxRMS = 0
91 | this.smoothingFactor = Math.max(
92 | 0.0,
93 | Math.min(1.0, opts.smoothingFactor ?? 0.5)
94 | )
95 | this.accentuationFactor = Math.max(
96 | 1.0,
97 | Math.min(16.0, opts.accentuationFactor ?? 2.0)
98 | )
99 | this.visualScalingFactor = Math.max(0.00001, opts.visualScalingFactor ?? 1)
100 | this.fill = !!opts.fill
101 | this.mirror = !!opts.mirror
102 |
103 | this.meyda = Meyda.createMeydaAnalyzer({
104 | audioContext: this.audio.context,
105 | source: this.analyser.analyser,
106 | bufferSize: opts.bufferSize ?? 1024,
107 | // smaller => slightly smoother but more computation
108 | hopSize: opts.hopSize ?? 512,
109 | featureExtractors: [this.featureExtractor].concat(['rms']),
110 | numberOfBarkBands: opts.numberOfBarkBands ?? 32
111 | // numberOfMFCCCoefficients: 128
112 | } as any)
113 |
114 | // setup post-processing shader effects
115 | this._bloomPass = new UnrealBloomPass(new Vector2(256, 256), 1.0, 0.1)
116 |
117 | // TODO: maybe make glitches respond to audio signal peaks?
118 | this._glitchPass = new GlitchPass()
119 |
120 | this.bloom = opts.bloom !== false
121 | this.glitch = !!opts.glitch
122 |
123 | // {
124 | // const effect1 = new FilmPass(10, 2, 2048)
125 | // this.composer.addPass(effect1)
126 | // }
127 |
128 | // {
129 | // const effect1 = new AfterimagePass()
130 | // this.composer.addPass(effect1)
131 | // }
132 |
133 | {
134 | // should be after any passes we don't want affected by effects
135 | // const video = document.createElement('video')
136 | // video.src = '/bg.mp4'
137 | // video.loop = true
138 | // this._bg = video
139 | // const t = new VideoTexture(video)
140 |
141 | const t = new TextureLoader().load('/bg1.jpg')
142 | this.composer.addPass(new BGPass(t))
143 | }
144 | }
145 |
146 | get bloom(): boolean {
147 | return this._bloom
148 | }
149 |
150 | set bloom(value: boolean) {
151 | if (!!value !== this._bloom) {
152 | this._bloom = !!value
153 |
154 | if (this._bloom) {
155 | this.composer.insertPass(
156 | this._bloomPass,
157 | Math.max(1, this.composer.passes.length - 1)
158 | )
159 | } else {
160 | this.composer.removePass(this._bloomPass)
161 | }
162 | }
163 | }
164 |
165 | get glitch(): boolean {
166 | return this._glitch
167 | }
168 |
169 | set glitch(value: boolean) {
170 | if (!!value !== this._glitch) {
171 | this._glitch = !!value
172 |
173 | if (this._glitch) {
174 | this.composer.insertPass(
175 | this._glitchPass,
176 | Math.max(1, this.composer.passes.length - 1)
177 | )
178 | } else {
179 | this.composer.removePass(this._glitchPass)
180 | }
181 | }
182 | }
183 |
184 | async start() {
185 | this._bg?.play()
186 | // if (this._bg) {
187 | // await new Promise((resolve, reject) => {
188 | // this._bg.onabort('
189 | // })
190 | // }
191 |
192 | await super.start()
193 | this.meyda.start()
194 | }
195 |
196 | pause() {
197 | super.pause()
198 | this._bg?.pause()
199 | this.meyda.stop()
200 | }
201 |
202 | stop() {
203 | super.stop()
204 | if (this._bg) {
205 | this._bg.pause()
206 | this._bg.currentTime = 0
207 | }
208 | this.meyda.stop()
209 | }
210 |
211 | protected render() {
212 | // update sample values for each frame
213 | this._update()
214 |
215 | // draw visualization to offscreen canvas
216 | this._draw()
217 |
218 | // render without post-processing
219 | // this.renderer.clear()
220 | // this.renderer.render(this.scene2, this.camera)
221 | // this.renderer.render(this.scene, this.camera)
222 |
223 | // render with post-processing
224 | this.composer.render()
225 | }
226 |
227 | protected _update() {
228 | // weight this frame's spectrum by its relative loudness compared to the
229 | // loudest frame we've seen so far
230 | const rms = (this.meyda.get('rms') as number) || 0
231 | this.maxRMS = Math.max(this.maxRMS, rms)
232 | const rmsNormalizationWeight = this.maxRMS <= 0 ? 1.0 : rms / this.maxRMS
233 | // console.log(rms, rmsNormalizationWeight)
234 |
235 | // we're relying on meyda for audio analysis
236 | // this.analyser.getFrequencyData()
237 | // const spectrum = this.analyser.data
238 |
239 | const feature = this.featureExtractor
240 | const features = this.meyda.get([feature])
241 | if (!features) {
242 | return
243 | }
244 | const spectrum =
245 | feature === 'loudness'
246 | ? features[feature]?.specific
247 | : (features[feature] as Float32Array | number[])
248 | if (!spectrum) {
249 | return
250 | }
251 | const n = spectrum.length
252 | if (n <= 1) {
253 | return
254 | }
255 |
256 | // lazily initialize initial samples
257 | if (this._samples?.length !== n) {
258 | this._samples = []
259 | for (let i = 0; i < n; i++) {
260 | this._samples[i] = 0
261 | }
262 | }
263 |
264 | // normalize samples
265 | let maxS = 0
266 | let meanS = 0
267 | // let minS = Number.POSITIVE_INFINITY
268 | for (let i = 0; i < n; ++i) {
269 | const value = spectrum[i]
270 | maxS = Math.max(maxS, value)
271 | // minS = Math.min(minS, value)
272 | meanS += value
273 | }
274 | if (maxS === 0) {
275 | return
276 | }
277 |
278 | // eslint-disable-next-line @typescript-eslint/no-unused-vars
279 | meanS /= n
280 | // console.log(minS, maxS, meanS)
281 | // const diff = maxS - meanS
282 |
283 | const w = this.smoothingFactor
284 | const invW = 1.0 - w
285 |
286 | for (let i = 0; i < n; ++i) {
287 | const sample = spectrum[i]
288 | // take the normalized sample value
289 | let value = sample / maxS
290 |
291 | // if (feature === 'mfcc') {
292 | // value = (sample - minS) / (maxS - minS)
293 | // }
294 |
295 | // cutoff any values that are less than the mean
296 | // let value = Math.max(0, (sample - meanS) / diff)
297 |
298 | // accentuate differences in the signal
299 | value = Math.max(0, Math.min(1, Math.pow(value, this.accentuationFactor)))
300 |
301 | const y = value * rmsNormalizationWeight
302 | this._samples[i] = this._samples[i] * w + y * invW
303 | }
304 | }
305 |
306 | protected _draw() {
307 | const { width, height } = this.offscreenCanvas
308 |
309 | const drawSamples = (
310 | samples: number[] = this._samples,
311 | // coordinate transformation
312 | t: (x: number, y: number) => { x: number; y: number } = (x, y) => ({
313 | x: x * width,
314 | y: y * this.visualScalingFactor * height
315 | }),
316 | mirrored = false
317 | ) => {
318 | // TODO: do we want to override n here for circles?
319 | const n = samples.length
320 | const invN = 1.0 / (n - 1)
321 |
322 | if (this.mirror && !mirrored) {
323 | if (this.drawShape === 'circle') {
324 | this.ctx.save()
325 | drawSamples(samples, (x, y) => t(x / 2, y), true)
326 | this.ctx.restore()
327 |
328 | this.ctx.save()
329 | drawSamples(samples, (x, y) => t(n - x / 2 - 1, y), true)
330 | this.ctx.restore()
331 | } else {
332 | this.ctx.save()
333 | this.ctx.translate(0, 0)
334 | this.ctx.scale(0.5, 1)
335 | drawSamples(samples, t, true)
336 | this.ctx.restore()
337 |
338 | this.ctx.save()
339 | this.ctx.translate(width, 0)
340 | this.ctx.scale(-0.5, 1)
341 | drawSamples(samples, t, true)
342 | this.ctx.restore()
343 | }
344 | return
345 | }
346 |
347 | if (this.drawStyle !== 'bars') {
348 | if (this.drawShape === 'circle') {
349 | this.ctx.beginPath()
350 | const p = t(0, samples[0])
351 | this.ctx.moveTo(p.x, p.y)
352 | } else {
353 | this.ctx.beginPath()
354 | const p = t(0, 0)
355 | this.ctx.moveTo(p.x, p.y)
356 | }
357 | }
358 |
359 | for (let i = 0; i < n - 1; ++i) {
360 | const x0 = i * invN
361 | const y0 = samples[i]
362 | const x1 = (i + 1) * invN
363 | const y1 = samples[i + 1]
364 |
365 | if (this.drawStyle === 'curves') {
366 | const xMid = (x0 + x1) / 2
367 | const yMid = (y0 + y1) / 2
368 | const cpx0 = (xMid + x0) / 2
369 | const cpx1 = (xMid + x1) / 2
370 |
371 | const cp0 = t(cpx0, y0)
372 | const cp1 = t(xMid, yMid)
373 | const cp2 = t(cpx1, y1)
374 | const cp3 = t(x1, y1)
375 | this.ctx.quadraticCurveTo(cp0.x, cp0.y, cp1.x, cp1.y)
376 | this.ctx.quadraticCurveTo(cp2.x, cp2.y, cp3.x, cp3.y)
377 | } else if (this.drawStyle === 'lines') {
378 | const p0 = t(x0, y0)
379 | this.ctx.lineTo(p0.x, p0.y)
380 | } else if (this.drawStyle === 'bars') {
381 | const yMid = (y0 + y1) / 2
382 |
383 | if (this.fill) {
384 | const p0 = t(x0, 0)
385 | const p1 = t((x1 - x0) / 2, yMid)
386 |
387 | if (this.drawShape === 'circle') {
388 | const xMid = (x0 + x1) / 2
389 |
390 | const p0 = t(x0, 0)
391 | const p1 = t(x0, yMid)
392 | const p2 = t(xMid, yMid)
393 | const p3 = t(xMid, 0)
394 |
395 | this.ctx.beginPath()
396 | this.ctx.moveTo(p0.x, p0.y)
397 | this.ctx.lineTo(p1.x, p1.y)
398 | this.ctx.lineTo(p2.x, p2.y)
399 | this.ctx.lineTo(p3.x, p3.y)
400 | this.ctx.closePath()
401 | this.ctx.fill()
402 | } else {
403 | this.ctx.fillRect(p0.x, p0.y, p1.x, p1.y)
404 | }
405 | } else {
406 | const p0 = t(x0, 0)
407 | const p1 = t(x0, y0)
408 |
409 | this.ctx.beginPath()
410 | this.ctx.moveTo(p0.x, p0.y)
411 | this.ctx.lineTo(p1.x, p1.y)
412 | this.ctx.stroke()
413 | }
414 | }
415 | }
416 |
417 | if (this.drawShape === 'circle') {
418 | if (this.drawStyle !== 'bars') {
419 | if (this.fill) {
420 | const k = 256
421 | for (let i = 0; i < k; ++i) {
422 | const p0 = t((k - i) / (k - 1), 0)
423 |
424 | this.ctx.lineTo(p0.x, p0.y)
425 | }
426 |
427 | this.ctx.closePath()
428 | this.ctx.fill()
429 | } else {
430 | const p0 = t(1.0, this._samples[0])
431 | this.ctx.lineTo(p0.x, p0.y)
432 | this.ctx.stroke()
433 | }
434 | }
435 |
436 | // draw floor
437 | if (this.fill) {
438 | const p0 = t(0, 0) as any
439 | const p1 = t(1.0, 0) as any
440 |
441 | this.ctx.save()
442 | this.ctx.beginPath()
443 | this.ctx.ellipse(
444 | 0,
445 | 0,
446 | p0.r,
447 | p0.r,
448 | 0,
449 | Math.min(p0.theta, p1.theta),
450 | Math.max(p0.theta, p1.theta)
451 | )
452 | this.ctx.stroke()
453 | this.ctx.restore()
454 | }
455 | } else {
456 | if (this.drawStyle !== 'bars') {
457 | const p0 = t(1.0, 0)
458 | this.ctx.lineTo(p0.x, p0.y)
459 |
460 | if (this.fill) {
461 | this.ctx.closePath()
462 | this.ctx.fill()
463 | } else {
464 | this.ctx.stroke()
465 | }
466 | }
467 |
468 | // draw floor
469 | if (this.fill) {
470 | const p0 = t(0, 0)
471 | const p1 = t(1.0, 4 / (this.visualScalingFactor * height))
472 |
473 | this.ctx.fillRect(p0.x, p0.y, p1.x, p1.y)
474 | }
475 | }
476 | }
477 |
478 | // draw to the offscreen canvas via html5 2d canvas api
479 | this.ctx.clearRect(0, 0, width, height)
480 | this.ctx.fillStyle = '#fff'
481 | this.ctx.strokeStyle = '#fff'
482 | this.ctx.lineWidth = 4
483 |
484 | if (this.drawShape === 'basic') {
485 | // just draw normally
486 | this.ctx.save()
487 | drawSamples()
488 | this.ctx.restore()
489 | } else if (this.drawShape === 'triangle') {
490 | // draw a triangle
491 | const p0 = {
492 | x: width / 4,
493 | y: (height * 3) / 4
494 | }
495 |
496 | const p1 = {
497 | x: (width * 3) / 4,
498 | y: (height * 3) / 4
499 | }
500 |
501 | const p2 = {
502 | x: width / 2,
503 | y: (height * 1) / 4
504 | }
505 |
506 | const scaleX = (p1.x - p0.x) / width
507 | const scaleY = 0.15
508 | const h = p0.y - p2.y
509 | const w = p2.x - p0.x
510 | const t0 = Math.atan2(h, w)
511 | const h0 = Math.sqrt(w * w + h * h)
512 | const hs = h0 / width
513 |
514 | this.ctx.save()
515 | this.ctx.translate(p0.x, p0.y)
516 | this.ctx.scale(scaleX, scaleY)
517 | drawSamples()
518 | this.ctx.restore()
519 |
520 | // console.log(h0, width, hs)
521 | this.ctx.save()
522 | this.ctx.translate(p0.x, p0.y)
523 | this.ctx.rotate(-t0)
524 | this.ctx.scale(hs, -scaleY)
525 | drawSamples()
526 | this.ctx.restore()
527 |
528 | this.ctx.save()
529 | this.ctx.translate(p1.x, p1.y)
530 | this.ctx.rotate(Math.PI + t0)
531 | this.ctx.scale(hs, scaleY)
532 | drawSamples()
533 | this.ctx.restore()
534 | } else if (this.drawShape === 'circle') {
535 | const r = width / 4
536 | const f = (width / 8) * this.visualScalingFactor
537 |
538 | // if (this.fill) {
539 | // this.ctx.save()
540 | // this.ctx.translate(width / 2, height / 2)
541 | // this.ctx.strokeStyle = '#fff'
542 | // this.ctx.lineWidth = 2
543 | // this.ctx.beginPath()
544 | // this.ctx.ellipse(0, 0, r, r, 0, 0, 2 * Math.PI)
545 | // this.ctx.stroke()
546 | // this.ctx.restore()
547 | // }
548 |
549 | this.ctx.save()
550 | this.ctx.translate(width / 2, height / 2)
551 | drawSamples(
552 | this._samples.concat([this._samples[0]]),
553 | (x: number, d: number) => {
554 | const theta = x * 2 * Math.PI
555 | const dist = r + d * f
556 |
557 | return {
558 | theta,
559 | r,
560 | dist,
561 | x: Math.cos(theta) * dist,
562 | y: Math.sin(theta) * dist
563 | }
564 | }
565 | )
566 | this.ctx.restore()
567 | } else if (this.drawShape === 'waveform') {
568 | this.ctx.save()
569 | this.ctx.translate(0, height / 2)
570 | this.ctx.scale(1.0, 0.25)
571 | drawSamples()
572 | this.ctx.restore()
573 |
574 | this.ctx.save()
575 | this.ctx.translate(0, height / 2)
576 | this.ctx.scale(1.0, -0.25)
577 | drawSamples()
578 | this.ctx.restore()
579 | }
580 |
581 | // tell webgl that the canvas texture needs updating
582 | this.offscreenCanvasMaterial.map!.needsUpdate = true
583 | }
584 | }
585 |
--------------------------------------------------------------------------------
/src/TransparentBackgroundFixedUnrealBloomPass.ts:
--------------------------------------------------------------------------------
1 | /* eslint-disable dot-notation */
2 |
3 | import {
4 | AdditiveBlending,
5 | Color,
6 | LinearFilter,
7 | MeshBasicMaterial,
8 | RGBAFormat,
9 | ShaderMaterial,
10 | Texture,
11 | UniformsUtils,
12 | Vector2,
13 | Vector3,
14 | WebGLRenderer,
15 | WebGLRenderTarget
16 | } from 'three'
17 |
18 | import { Pass, FullScreenQuad } from 'three/examples/jsm/postprocessing/Pass'
19 |
20 | // typescript definitions doesn't have FullScreenQuad
21 | // //@ts-ignore
22 | // import type { FullScreenQuad } from 'three/examples/jsm/postprocessing/Pass'
23 |
24 | import { CopyShader } from 'three/examples/jsm/shaders/CopyShader.js'
25 | import { LuminosityHighPassShader } from 'three/examples/jsm/shaders/LuminosityHighPassShader.js'
26 |
27 | /**
28 | * Thanks to https://github.com/mrdoob/three.js/issues/14104#issuecomment-429664412
29 | * for this fragment shader fix.
30 | *
31 | * UnrealBloomPass is inspired by the bloom pass of Unreal Engine. It creates a
32 | * mip map chain of bloom textures and blurs them with different radii. Because
33 | * of the weighted combination of mips, and because larger blurs are done on
34 | * higher mips, this effect provides good quality and performance.
35 | *
36 | * Reference:
37 | * - https://docs.unrealengine.com/latest/INT/Engine/Rendering/PostProcessEffects/Bloom/
38 | */
39 | class TransparentBackgroundFixedUnrealBloomPass extends Pass {
40 | strength: number
41 | radius: number
42 | threshold: number
43 | resolution: Vector2
44 | clearColor: Color
45 | renderTargetsHorizontal: any[]
46 | renderTargetsVertical: any[]
47 | nMips: number
48 | renderTargetBright: WebGLRenderTarget
49 | highPassUniforms: any
50 | materialHighPassFilter: ShaderMaterial
51 | separableBlurMaterials: any[]
52 | compositeMaterial: ShaderMaterial
53 | bloomTintColors: Vector3[]
54 | copyUniforms: any
55 | materialCopy: ShaderMaterial
56 | _oldClearColor: Color
57 | oldClearAlpha: number
58 | basic: MeshBasicMaterial
59 | fsQuad: FullScreenQuad
60 | static BlurDirectionX: any
61 | static BlurDirectionY: any
62 | constructor(
63 | resolution: Vector2,
64 | strength: number,
65 | radius: number,
66 | threshold?: number
67 | ) {
68 | super()
69 |
70 | this.strength = strength !== undefined ? strength : 1
71 | this.radius = radius
72 | this.threshold = threshold ?? 0
73 | this.resolution =
74 | resolution !== undefined
75 | ? new Vector2(resolution.x, resolution.y)
76 | : new Vector2(256, 256)
77 |
78 | // create color only once here, reuse it later inside the render function
79 | this.clearColor = new Color(0, 0, 0)
80 |
81 | // render targets
82 | const pars = {
83 | minFilter: LinearFilter,
84 | magFilter: LinearFilter,
85 | format: RGBAFormat
86 | }
87 | this.renderTargetsHorizontal = []
88 | this.renderTargetsVertical = []
89 | this.nMips = 5
90 | let resx = Math.round(this.resolution.x / 2)
91 | let resy = Math.round(this.resolution.y / 2)
92 |
93 | this.renderTargetBright = new WebGLRenderTarget(resx, resy, pars)
94 | this.renderTargetBright.texture.name = 'UnrealBloomPass.bright'
95 | this.renderTargetBright.texture.generateMipmaps = false
96 |
97 | for (let i = 0; i < this.nMips; i++) {
98 | const renderTargetHorizonal = new WebGLRenderTarget(resx, resy, pars)
99 |
100 | renderTargetHorizonal.texture.name = 'UnrealBloomPass.h' + i
101 | renderTargetHorizonal.texture.generateMipmaps = false
102 |
103 | this.renderTargetsHorizontal.push(renderTargetHorizonal)
104 |
105 | const renderTargetVertical = new WebGLRenderTarget(resx, resy, pars)
106 |
107 | renderTargetVertical.texture.name = 'UnrealBloomPass.v' + i
108 | renderTargetVertical.texture.generateMipmaps = false
109 |
110 | this.renderTargetsVertical.push(renderTargetVertical)
111 |
112 | resx = Math.round(resx / 2)
113 | resy = Math.round(resy / 2)
114 | }
115 |
116 | // luminosity high pass material
117 |
118 | if (LuminosityHighPassShader === undefined)
119 | console.error('THREE.UnrealBloomPass relies on LuminosityHighPassShader')
120 |
121 | const highPassShader = LuminosityHighPassShader
122 | this.highPassUniforms = UniformsUtils.clone(highPassShader.uniforms)
123 |
124 | this.highPassUniforms['luminosityThreshold'].value = threshold
125 | this.highPassUniforms['smoothWidth'].value = 0.01
126 |
127 | this.materialHighPassFilter = new ShaderMaterial({
128 | uniforms: this.highPassUniforms,
129 | vertexShader: highPassShader.vertexShader,
130 | fragmentShader: highPassShader.fragmentShader,
131 | defines: {}
132 | })
133 |
134 | // Gaussian Blur Materials
135 | this.separableBlurMaterials = []
136 | const kernelSizeArray = [3, 5, 7, 9, 11]
137 | resx = Math.round(this.resolution.x / 2)
138 | resy = Math.round(this.resolution.y / 2)
139 |
140 | for (let i = 0; i < this.nMips; i++) {
141 | this.separableBlurMaterials.push(
142 | this.getSeperableBlurMaterial(kernelSizeArray[i])
143 | )
144 |
145 | this.separableBlurMaterials[i].uniforms['texSize'].value = new Vector2(
146 | resx,
147 | resy
148 | )
149 |
150 | resx = Math.round(resx / 2)
151 | resy = Math.round(resy / 2)
152 | }
153 |
154 | // Composite material
155 | this.compositeMaterial = this.getCompositeMaterial(this.nMips)
156 | this.compositeMaterial.uniforms['blurTexture1'].value =
157 | this.renderTargetsVertical[0].texture
158 | this.compositeMaterial.uniforms['blurTexture2'].value =
159 | this.renderTargetsVertical[1].texture
160 | this.compositeMaterial.uniforms['blurTexture3'].value =
161 | this.renderTargetsVertical[2].texture
162 | this.compositeMaterial.uniforms['blurTexture4'].value =
163 | this.renderTargetsVertical[3].texture
164 | this.compositeMaterial.uniforms['blurTexture5'].value =
165 | this.renderTargetsVertical[4].texture
166 | this.compositeMaterial.uniforms['bloomStrength'].value = strength
167 | this.compositeMaterial.uniforms['bloomRadius'].value = 0.1
168 | this.compositeMaterial.needsUpdate = true
169 |
170 | const bloomFactors = [1.0, 0.8, 0.6, 0.4, 0.2]
171 | this.compositeMaterial.uniforms['bloomFactors'].value = bloomFactors
172 | this.bloomTintColors = [
173 | new Vector3(1, 1, 1),
174 | new Vector3(1, 1, 1),
175 | new Vector3(1, 1, 1),
176 | new Vector3(1, 1, 1),
177 | new Vector3(1, 1, 1)
178 | ]
179 | this.compositeMaterial.uniforms['bloomTintColors'].value =
180 | this.bloomTintColors
181 |
182 | // copy material
183 | if (CopyShader === undefined) {
184 | console.error('THREE.UnrealBloomPass relies on CopyShader')
185 | }
186 |
187 | const copyShader = CopyShader
188 |
189 | this.copyUniforms = UniformsUtils.clone(copyShader.uniforms)
190 | this.copyUniforms.opacity.value = 1.0
191 |
192 | this.materialCopy = new ShaderMaterial({
193 | uniforms: this.copyUniforms,
194 | vertexShader: copyShader.vertexShader,
195 | fragmentShader: copyShader.fragmentShader,
196 | blending: AdditiveBlending,
197 | depthTest: false,
198 | depthWrite: false,
199 | transparent: true
200 | })
201 |
202 | this.enabled = true
203 | this.needsSwap = false
204 |
205 | this._oldClearColor = new Color()
206 | this.oldClearAlpha = 1
207 |
208 | this.basic = new MeshBasicMaterial()
209 | this.fsQuad = new FullScreenQuad()
210 | }
211 |
212 | dispose() {
213 | for (let i = 0; i < this.renderTargetsHorizontal.length; i++) {
214 | this.renderTargetsHorizontal[i].dispose()
215 | }
216 |
217 | for (let i = 0; i < this.renderTargetsVertical.length; i++) {
218 | this.renderTargetsVertical[i].dispose()
219 | }
220 |
221 | this.renderTargetBright.dispose()
222 | }
223 |
224 | setSize(width: number, height: number) {
225 | let resx = Math.round(width / 2)
226 | let resy = Math.round(height / 2)
227 |
228 | this.renderTargetBright.setSize(resx, resy)
229 |
230 | for (let i = 0; i < this.nMips; i++) {
231 | this.renderTargetsHorizontal[i].setSize(resx, resy)
232 | this.renderTargetsVertical[i].setSize(resx, resy)
233 |
234 | this.separableBlurMaterials[i].uniforms['texSize'].value = new Vector2(
235 | resx,
236 | resy
237 | )
238 |
239 | resx = Math.round(resx / 2)
240 | resy = Math.round(resy / 2)
241 | }
242 | }
243 |
244 | override render(
245 | renderer: WebGLRenderer,
246 | writeBuffer: any,
247 | readBuffer: { texture: Texture },
248 | deltaTime: any,
249 | maskActive: any
250 | ) {
251 | renderer.getClearColor(this._oldClearColor)
252 | this.oldClearAlpha = renderer.getClearAlpha()
253 | const oldAutoClear = renderer.autoClear
254 | renderer.autoClear = false
255 |
256 | renderer.setClearColor(this.clearColor, 0)
257 |
258 | if (maskActive) renderer.state.buffers.stencil.setTest(false)
259 |
260 | // Render input to screen
261 |
262 | if (this.renderToScreen) {
263 | this.fsQuad.material = this.basic
264 | this.basic.map = readBuffer.texture
265 |
266 | renderer.setRenderTarget(null)
267 | renderer.clear()
268 | this.fsQuad.render(renderer)
269 | }
270 |
271 | // 1. Extract Bright Areas
272 |
273 | this.highPassUniforms['tDiffuse'].value = readBuffer.texture
274 | this.highPassUniforms['luminosityThreshold'].value = this.threshold
275 | this.fsQuad.material = this.materialHighPassFilter
276 |
277 | renderer.setRenderTarget(this.renderTargetBright)
278 | renderer.clear()
279 | this.fsQuad.render(renderer)
280 |
281 | // 2. Blur all the mips progressively
282 |
283 | let inputRenderTarget = this.renderTargetBright
284 |
285 | for (let i = 0; i < this.nMips; i++) {
286 | this.fsQuad.material = this.separableBlurMaterials[i]
287 |
288 | this.separableBlurMaterials[i].uniforms['colorTexture'].value =
289 | inputRenderTarget.texture
290 | this.separableBlurMaterials[i].uniforms['direction'].value =
291 | TransparentBackgroundFixedUnrealBloomPass.BlurDirectionX
292 | renderer.setRenderTarget(this.renderTargetsHorizontal[i])
293 | renderer.clear()
294 | this.fsQuad.render(renderer)
295 |
296 | this.separableBlurMaterials[i].uniforms['colorTexture'].value =
297 | this.renderTargetsHorizontal[i].texture
298 | this.separableBlurMaterials[i].uniforms['direction'].value =
299 | TransparentBackgroundFixedUnrealBloomPass.BlurDirectionY
300 | renderer.setRenderTarget(this.renderTargetsVertical[i])
301 | renderer.clear()
302 | this.fsQuad.render(renderer)
303 |
304 | inputRenderTarget = this.renderTargetsVertical[i]
305 | }
306 |
307 | // Composite all the mips
308 |
309 | this.fsQuad.material = this.compositeMaterial
310 | this.compositeMaterial.uniforms['bloomStrength'].value = this.strength
311 | this.compositeMaterial.uniforms['bloomRadius'].value = this.radius
312 | this.compositeMaterial.uniforms['bloomTintColors'].value =
313 | this.bloomTintColors
314 |
315 | renderer.setRenderTarget(this.renderTargetsHorizontal[0])
316 | renderer.clear()
317 | this.fsQuad.render(renderer)
318 |
319 | // Blend it additively over the input texture
320 |
321 | this.fsQuad.material = this.materialCopy
322 | this.copyUniforms['tDiffuse'].value =
323 | this.renderTargetsHorizontal[0].texture
324 |
325 | if (maskActive) renderer.state.buffers.stencil.setTest(true)
326 |
327 | // console.log('renderToScreen', this.renderToScreen)
328 |
329 | if (this.renderToScreen) {
330 | renderer.setRenderTarget(null)
331 | this.fsQuad.render(renderer)
332 | } else {
333 | renderer.setRenderTarget(readBuffer)
334 | this.fsQuad.render(renderer)
335 | }
336 |
337 | // Restore renderer settings
338 |
339 | renderer.setClearColor(this._oldClearColor, this.oldClearAlpha)
340 | renderer.autoClear = oldAutoClear
341 | }
342 |
343 | getSeperableBlurMaterial(kernelRadius: number) {
344 | return new ShaderMaterial({
345 | defines: {
346 | KERNEL_RADIUS: kernelRadius,
347 | SIGMA: kernelRadius
348 | },
349 |
350 | uniforms: {
351 | colorTexture: { value: null },
352 | texSize: { value: new Vector2(0.5, 0.5) },
353 | direction: { value: new Vector2(0.5, 0.5) }
354 | },
355 |
356 | vertexShader: `varying vec2 vUv;
357 | void main() {
358 | vUv = uv;
359 | gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
360 | }`,
361 |
362 | fragmentShader: `#include
363 | varying vec2 vUv;
364 | uniform sampler2D colorTexture;
365 | uniform vec2 texSize;
366 | uniform vec2 direction;
367 |
368 | float gaussianPdf(in float x, in float sigma) {
369 | return 0.39894 * exp( -0.5 * x * x/( sigma * sigma))/sigma;
370 | }
371 | void main() {\n\
372 | vec2 invSize = 1.0 / texSize;\
373 | float fSigma = float(SIGMA);\
374 | float weightSum = gaussianPdf(0.0, fSigma);\
375 | float alphaSum = 0.0;\
376 | vec3 diffuseSum = texture2D( colorTexture, vUv).rgb * weightSum;\
377 | for( int i = 1; i < KERNEL_RADIUS; i ++ ) {\
378 | float x = float(i);\
379 | float w = gaussianPdf(x, fSigma);\
380 | vec2 uvOffset = direction * invSize * x;\
381 | vec4 sample1 = texture2D( colorTexture, vUv + uvOffset);\
382 | vec4 sample2 = texture2D( colorTexture, vUv - uvOffset);\
383 | diffuseSum += (sample1.rgb + sample2.rgb) * w;\
384 | alphaSum += (sample1.a + sample2.a) * w;\
385 | weightSum += 2.0 * w;\
386 | }\
387 | gl_FragColor = vec4(diffuseSum/weightSum, alphaSum/weightSum);\n\
388 | }`
389 | })
390 | }
391 |
392 | getCompositeMaterial(nMips: number) {
393 | return new ShaderMaterial({
394 | defines: {
395 | NUM_MIPS: nMips
396 | },
397 |
398 | uniforms: {
399 | blurTexture1: { value: null },
400 | blurTexture2: { value: null },
401 | blurTexture3: { value: null },
402 | blurTexture4: { value: null },
403 | blurTexture5: { value: null },
404 | dirtTexture: { value: null },
405 | bloomStrength: { value: 1.0 },
406 | bloomFactors: { value: null },
407 | bloomTintColors: { value: null },
408 | bloomRadius: { value: 0.0 }
409 | },
410 |
411 | vertexShader: `varying vec2 vUv;
412 | void main() {
413 | vUv = uv;
414 | gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
415 | }`,
416 |
417 | fragmentShader: `varying vec2 vUv;
418 | uniform sampler2D blurTexture1;
419 | uniform sampler2D blurTexture2;
420 | uniform sampler2D blurTexture3;
421 | uniform sampler2D blurTexture4;
422 | uniform sampler2D blurTexture5;
423 | uniform sampler2D dirtTexture;
424 | uniform float bloomStrength;
425 | uniform float bloomRadius;
426 | uniform float bloomFactors[NUM_MIPS];
427 | uniform vec3 bloomTintColors[NUM_MIPS];
428 |
429 | float lerpBloomFactor(const in float factor) {
430 | float mirrorFactor = 1.2 - factor;
431 | return mix(factor, mirrorFactor, bloomRadius);
432 | }
433 |
434 | void main() {
435 | gl_FragColor = bloomStrength * ( lerpBloomFactor(bloomFactors[0]) * vec4(bloomTintColors[0], 1.0) * texture2D(blurTexture1, vUv) +
436 | lerpBloomFactor(bloomFactors[1]) * vec4(bloomTintColors[1], 1.0) * texture2D(blurTexture2, vUv) +
437 | lerpBloomFactor(bloomFactors[2]) * vec4(bloomTintColors[2], 1.0) * texture2D(blurTexture3, vUv) +
438 | lerpBloomFactor(bloomFactors[3]) * vec4(bloomTintColors[3], 1.0) * texture2D(blurTexture4, vUv) +
439 | lerpBloomFactor(bloomFactors[4]) * vec4(bloomTintColors[4], 1.0) * texture2D(blurTexture5, vUv) );
440 | }`
441 | })
442 | }
443 | }
444 |
445 | TransparentBackgroundFixedUnrealBloomPass.BlurDirectionX = new Vector2(1.0, 0.0)
446 | TransparentBackgroundFixedUnrealBloomPass.BlurDirectionY = new Vector2(0.0, 1.0)
447 |
448 | export { TransparentBackgroundFixedUnrealBloomPass as UnrealBloomPass }
449 |
--------------------------------------------------------------------------------
/src/app.ts:
--------------------------------------------------------------------------------
1 | import * as dat from 'dat.gui'
2 |
3 | import demo0 from '../media/audio/demo.m4a'
4 | import coupling0 from '../media/audio/hotd-podcast-coupling-clip.m4a'
5 | import audioUrl0 from '../media/audio/voice.m4a'
6 | import audioUrl1 from '../media/audio/Wizard-DreamOn.mp3'
7 | import audioUrl2 from '../media/audio/ChillyGonzales-SampleThis.mp3'
8 | import audioUrl3 from '../media/audio/EverythingPersists-AndThenYouSeeIt.mp3'
9 | import audioUrl4 from '../media/audio/FortMinor-WheredYouGo.mp3'
10 |
11 | // import { CanvasAudioVisualization as AudioViz } from './CanvasAudioVisualization'
12 | // import { HybridAudioVisualization as AudioViz } from './HybridAudioVisualization'
13 | import {
14 | MeydaHybridAudioVisualization as AudioViz,
15 | DrawStyle,
16 | DrawShape,
17 | MeydaAudioFeature
18 | } from './MeydaHybridAudioVisualization'
19 |
20 | const audioTracks: { [key: string]: string } = {
21 | transitive_bs: demo0,
22 | coupling: coupling0,
23 | 'li jin': audioUrl0,
24 | 'dream on': audioUrl1,
25 | 'sample this': audioUrl2,
26 | 'and then you see it': audioUrl3,
27 | "where'd you go": audioUrl4
28 | }
29 |
30 | export interface Params {
31 | audioTrack: string
32 |
33 | offscreenScale: number
34 | fftSizePower: number
35 | bufferSizePower: number
36 |
37 | drawStyle: DrawStyle
38 | drawShape: DrawShape
39 | featureExtractor: MeydaAudioFeature
40 | smoothingFactor: number
41 | accentuationFactor: number
42 | visualScalingFactor: number
43 | numberOfBarkBands: number
44 | fill: boolean
45 | mirror: boolean
46 | bloom: boolean
47 | glitch: boolean
48 |
49 | isRecordingEnabled: boolean
50 | width: number
51 | height: number
52 | }
53 |
54 | let vis: AudioViz | null = null
55 |
56 | const params: Params = {
57 | audioTrack: 'transitive_bs',
58 | featureExtractor: 'loudness' as MeydaAudioFeature,
59 | drawStyle: 'curves' as DrawStyle,
60 | drawShape: 'triangle' as DrawShape,
61 | offscreenScale: 2.0,
62 | fftSizePower: 8,
63 | bufferSizePower: 10,
64 | numberOfBarkBands: 32,
65 | smoothingFactor: 0.7,
66 | accentuationFactor: 3.0,
67 | visualScalingFactor: 1.0,
68 | fill: true,
69 | mirror: true,
70 | bloom: true,
71 | glitch: false,
72 | isRecordingEnabled: false,
73 | width: 480,
74 | height: 480
75 | }
76 |
77 | const gui = new dat.GUI({})
78 | gui
79 | .add(params, 'audioTrack')
80 | .options(Object.keys(audioTracks))
81 | .name('audio track')
82 | .onFinishChange(reset)
83 | gui
84 | .add(params, 'featureExtractor')
85 | .options([
86 | 'loudness',
87 | 'chroma',
88 | 'buffer',
89 | 'mfcc',
90 | 'amplitudeSpectrum',
91 | 'powerSpectrum'
92 | ])
93 | .name('feature')
94 | .onFinishChange(reset)
95 | gui
96 | .add(params, 'drawShape')
97 | .options(['triangle', 'basic', 'circle', 'waveform'])
98 | .name('shape')
99 | .onChange((value) => {
100 | if (vis) {
101 | vis.drawShape = value
102 | }
103 | })
104 | gui
105 | .add(params, 'drawStyle')
106 | .options(['curves', 'lines', 'bars'])
107 | .name('style')
108 | .onChange((value) => {
109 | if (vis) {
110 | vis.drawStyle = value
111 | }
112 | })
113 | gui
114 | .add(params, 'offscreenScale', 1.0, 4.0)
115 | .step(1.0)
116 | .name('offscreen')
117 | .onFinishChange(reset)
118 | gui
119 | .add(params, 'fftSizePower', 5, 12)
120 | .step(1)
121 | .name('fft size (log)')
122 | .onFinishChange(reset)
123 | gui
124 | .add(params, 'bufferSizePower', 9, 12)
125 | .step(1)
126 | .name('buffer size (log)')
127 | .onFinishChange(reset)
128 | gui
129 | .add(params, 'numberOfBarkBands', 4, 128)
130 | .step(1.0)
131 | .name('loudness bins')
132 | .onFinishChange(reset)
133 | gui
134 | .add(params, 'smoothingFactor', 0.0, 1.0)
135 | .step(0.000001)
136 | .name('smoothing')
137 | .onChange((value) => {
138 | if (vis) {
139 | vis.smoothingFactor = value
140 | }
141 | })
142 | gui
143 | .add(params, 'accentuationFactor', 1.0, 16.0)
144 | .step(0.1)
145 | .name('accentuation')
146 | .onChange((value) => {
147 | if (vis) {
148 | vis.accentuationFactor = value
149 | }
150 | })
151 | gui
152 | .add(params, 'visualScalingFactor', 0.00001, 3.0)
153 | .step(0.1)
154 | .name('visual scaling')
155 | .onChange((value) => {
156 | if (vis) {
157 | vis.visualScalingFactor = value
158 | }
159 | })
160 | gui
161 | .add(params, 'fill')
162 | .name('fill')
163 | .onChange((value) => {
164 | if (vis) {
165 | vis.fill = value
166 | }
167 | })
168 | gui
169 | .add(params, 'mirror')
170 | .name('mirror')
171 | .onChange((value) => {
172 | if (vis) {
173 | vis.mirror = value
174 | }
175 | })
176 | gui
177 | .add(params, 'bloom')
178 | .name('bloom')
179 | .onChange((value) => {
180 | if (vis) {
181 | vis.bloom = value
182 | }
183 | })
184 | gui
185 | .add(params, 'glitch')
186 | .name('glitch')
187 | .onChange((value) => {
188 | if (vis) {
189 | vis.glitch = value
190 | }
191 | })
192 | gui
193 | .add(params, 'isRecordingEnabled')
194 | .name('record')
195 | .onChange((value) => {
196 | if (vis) {
197 | if (vis.isPlaying) {
198 | vis.stop()
199 | }
200 |
201 | vis.isRecordingEnabled = value
202 | }
203 | })
204 |
205 | function reset() {
206 | if (!vis) {
207 | return
208 | }
209 |
210 | restart()
211 | }
212 |
213 | function restart(autoplay = false) {
214 | let isPlaying = false
215 | if (vis) {
216 | isPlaying = vis.isPlaying
217 |
218 | vis.stop()
219 | vis = null
220 | }
221 |
222 | const canvas = document.getElementById('canvas') as HTMLCanvasElement
223 | canvas.width = params.width
224 | canvas.height = params.height
225 | console.log(params)
226 |
227 | vis = new AudioViz({
228 | canvas,
229 | autoplay: false,
230 | ...params,
231 | mediaUrl: audioTracks[params.audioTrack],
232 | fftSize: 1 << params.fftSizePower,
233 | bufferSize: 1 << params.bufferSizePower
234 | })
235 | ;(globalThis as any).vis = vis
236 |
237 | if (isPlaying || autoplay) {
238 | vis.start()
239 | }
240 | }
241 |
242 | const play = document.getElementById('play')
243 | play?.addEventListener('click', () => {
244 | if (!vis || !vis.isPlaying) {
245 | restart(true)
246 | }
247 | })
248 |
249 | const pause = document.getElementById('pause')
250 | pause?.addEventListener('click', () => {
251 | if (vis) {
252 | vis.pause()
253 | }
254 | })
255 |
256 | const stop = document.getElementById('stop')
257 | stop?.addEventListener('click', () => {
258 | if (vis) {
259 | vis.stop()
260 | }
261 | })
262 |
--------------------------------------------------------------------------------
/src/index.ts:
--------------------------------------------------------------------------------
1 | export * from './CanvasAudioVisualization'
2 | export * from './HybridAudioVisualization'
3 | export * from './MeydaHybridAudioVisualization'
4 |
--------------------------------------------------------------------------------
/src/vite-env.d.ts:
--------------------------------------------------------------------------------
1 | ///
2 |
--------------------------------------------------------------------------------
/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "lib": ["dom", "dom.iterable", "esnext"],
4 | "module": "esnext",
5 | "target": "esnext",
6 | "moduleResolution": "node",
7 | "jsx": "preserve",
8 | "baseUrl": "./",
9 | "allowSyntheticDefaultImports": true,
10 | "importsNotUsedAsValues": "error",
11 | "isolatedModules": true,
12 | "noEmit": true,
13 | "strict": true,
14 | "skipLibCheck": true,
15 | "forceConsistentCasingInFileNames": true,
16 | "resolveJsonModule": true,
17 | "useDefineForClassFields": true
18 | },
19 | "include": ["src", "types", "index.d.ts", "node_modules/@types"]
20 | }
21 |
--------------------------------------------------------------------------------
/vite.config.js:
--------------------------------------------------------------------------------
1 | import { defineConfig } from 'vite'
2 |
3 | export default defineConfig({
4 | assetsInclude: ['media/*.mp4', 'media/audio/*.mp3', 'media/audio/*.m4a']
5 | })
6 |
--------------------------------------------------------------------------------