├── .envrc
├── .gitignore
├── entrypoints
├── animationWindow
│ ├── index.html
│ └── main.js
├── background.ts
├── offscreenWindow
│ ├── index.html
│ └── main.ts
├── sandbox
│ ├── index.html
│ └── main.ts
└── settingsWindow
│ ├── index.html
│ └── main.ts
├── flake.lock
├── flake.nix
├── package.json
├── pnpm-lock.yaml
├── public
└── icon
│ ├── icon128.png
│ ├── icon256.png
│ └── icon64.png
├── readme.md
├── src
├── scene
│ ├── events
│ │ ├── setSceneEvent.ts
│ │ └── setSceneSettingsEvent.ts
│ ├── scene.ts
│ ├── sceneManager.ts
│ ├── sceneNames.ts
│ ├── sceneSetting.ts
│ └── scenes
│ │ ├── butterchurn
│ │ ├── butterchurn.ts
│ │ └── setting.ts
│ │ ├── dancingHorizon
│ │ ├── dancingHorizon.ts
│ │ └── setting.ts
│ │ ├── frostfire
│ │ ├── frostfire.ts
│ │ └── settings.ts
│ │ ├── sunflower
│ │ ├── setting.ts
│ │ └── sunflower.ts
│ │ └── synthBars
│ │ ├── setting.ts
│ │ └── synthBars.ts
├── userInterface
│ └── settings
│ │ ├── events
│ │ └── SettingsWindowEvent.ts
│ │ ├── sceneSettings
│ │ ├── butterchurnSettings.ts
│ │ ├── dancingHorizonSettings.ts
│ │ ├── frostfireSettings.ts
│ │ ├── sunflowerSettings.ts
│ │ └── synthBarSettings.ts
│ │ ├── settingsManager.ts
│ │ └── settingsUserInterface.ts
└── utils
│ ├── eventMessage.ts
│ ├── openGl
│ ├── colorConverter.ts
│ └── openGl.ts
│ └── settings.ts
├── tsconfig.json
└── wxt.config.ts
/.envrc:
--------------------------------------------------------------------------------
1 | use flake
2 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | *.swp
2 | *.swo
3 | # Logs
4 | logs
5 | *.log
6 | npm-debug.log*
7 | yarn-debug.log*
8 | yarn-error.log*
9 | pnpm-debug.log*
10 | lerna-debug.log*
11 |
12 | node_modules
13 | .output
14 | stats.html
15 | stats-*.json
16 | .wxt
17 | web-ext.config.ts
18 |
19 | # Editor directories and files
20 | .vscode/*
21 | !.vscode/extensions.json
22 | .idea
23 | .DS_Store
24 | *.suo
25 | *.ntvs*
26 | *.njsproj
27 | *.sln
28 | *.sw?
29 |
30 | .direnv/
31 | >>>>>>> yeye/main
32 |
--------------------------------------------------------------------------------
/entrypoints/animationWindow/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | Audio Visualizer
5 |
6 |
7 |
8 |
9 |
10 |
11 |
30 |
31 |
32 |
33 |
--------------------------------------------------------------------------------
/entrypoints/animationWindow/main.js:
--------------------------------------------------------------------------------
1 | import {
2 | messageAction,
3 | messageTarget,
4 | } from '@/src/utils/eventMessage';
5 | chrome.runtime.onMessage.addListener((message) => {
6 | document
7 | .getElementById('theFrame')
8 | ?.contentWindow?.postMessage(message, '*');
9 | });
10 | chrome.runtime.onMessage.addListener((message) => {
11 | if (
12 | message.target === "animation" &&
13 | message.action === "toggle-full-screen"
14 | ) {
15 | if (!document.fullscreenElement) {
16 | document.documentElement.requestFullscreen(); // Make the whole page fullscreen
17 | } else {
18 | if (document.exitFullscreen) {
19 | document.exitFullscreen(); // Exit fullscreen mode
20 | }
21 | }
22 | }
23 | });
24 |
25 | window.addEventListener('load', function() {
26 | document
27 | .getElementById('theFrame')
28 | ?.contentWindow?.postMessage({ target: 'animationWindowReadyEvent' }, '*');
29 | });
30 |
31 | window.addEventListener('message', function(e) {
32 | chrome.runtime.sendMessage(e.data);
33 | });
34 |
--------------------------------------------------------------------------------
/entrypoints/background.ts:
--------------------------------------------------------------------------------
1 | import { SettingsWindowEvent } from '@/src/userInterface/settings/events/SettingsWindowEvent';
2 | import { GenericEvent } from '@/src/utils/eventMessage';
3 |
4 | export default defineBackground(async () => {
5 | const { InitiateStreamEvent, messageTarget, messageAction, GenericEvent } =
6 | await import('@/src/utils/eventMessage');
7 | let streaming = false;
8 | let animationWindowId: number | null = null;
9 | let settingsWindowId: number | null = null;
10 | let tabId: number;
11 | async function initiateStream() {
12 | const streamId = await chrome.tabCapture.getMediaStreamId({
13 | targetTabId: tabId,
14 | });
15 |
16 | const startStreamMessage = new InitiateStreamEvent(
17 | messageTarget.offscreen,
18 | messageAction.initiateStream,
19 | streamId
20 | );
21 | chrome.runtime.sendMessage(startStreamMessage.toMessage());
22 | streaming = true;
23 | }
24 | function stopStream() {
25 | const stopStreamMessage = new GenericEvent(
26 | messageTarget.offscreen,
27 | messageAction.stopStream
28 | );
29 | chrome.runtime.sendMessage(stopStreamMessage.toMessage());
30 | streaming = false;
31 | }
32 |
33 | chrome.action.onClicked.addListener(async (tab) => {
34 | if (streaming) {
35 | return;
36 | }
37 | tabId = tab.id as number;
38 | streaming = true;
39 |
40 | const existingContexts = await chrome.runtime.getContexts({});
41 | const offscreenDocument = existingContexts.find(
42 | (c) => c.contextType === 'OFFSCREEN_DOCUMENT'
43 | );
44 | if (!offscreenDocument) {
45 | await chrome.offscreen.createDocument({
46 | url: 'offscreenWindow.html',
47 | reasons: [chrome.offscreen.Reason.USER_MEDIA],
48 | justification: 'play sound effects',
49 | });
50 | }
51 | await initiateStream();
52 |
53 | // Create the animation window
54 | let win = await chrome.windows.create({
55 | url: chrome.runtime.getURL('animationWindow.html'),
56 | type: 'popup',
57 | width: 1600,
58 | height: 900,
59 | });
60 | animationWindowId = win.id as number;
61 | });
62 |
63 | chrome.runtime.onMessage.addListener(
64 | async (message: SettingsWindowEvent) => {
65 | if (
66 | message.target === messageTarget.background &&
67 | message.action === messageAction.openSettingsWindow
68 | ) {
69 | let win = await chrome.windows.create({
70 | url: chrome.runtime.getURL('settingsWindow.html'),
71 | type: 'popup',
72 | width: 400,
73 | height: 600,
74 | });
75 | settingsWindowId = win.id as number;
76 |
77 | const closeSettingsInAnimation = new SettingsWindowEvent(
78 | messageTarget.animation,
79 | messageAction.openSettingsWindow
80 | );
81 | chrome.runtime.sendMessage(
82 | closeSettingsInAnimation.toMessage()
83 | );
84 | }
85 | }
86 | );
87 | // Listen for the settings-window being closed
88 | chrome.windows.onRemoved.addListener((windowId) => {
89 | if (windowId === settingsWindowId) {
90 | settingsWindowId = null;
91 |
92 | const closeSettingsWindow = new SettingsWindowEvent(
93 | messageTarget.animation,
94 | messageAction.closeSettingsWindow
95 | );
96 | chrome.runtime.sendMessage(closeSettingsWindow.toMessage());
97 | }
98 | });
99 | // Listen for the animation-window being closed
100 | chrome.windows.onRemoved.addListener((windowId) => {
101 | if (windowId === animationWindowId) {
102 | animationWindowId = null;
103 | if (settingsWindowId) {
104 | chrome.windows.remove(settingsWindowId, () => {
105 | console.log('Window with ID', windowId, 'has been closed.');
106 | });
107 | }
108 | if (streaming) {
109 | stopStream();
110 | return;
111 | }
112 | }
113 | });
114 | });
115 |
--------------------------------------------------------------------------------
/entrypoints/offscreenWindow/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
--------------------------------------------------------------------------------
/entrypoints/offscreenWindow/main.ts:
--------------------------------------------------------------------------------
1 | import {
2 | StartStreamEvent,
3 | GenericEvent,
4 | AudioDataEvent,
5 | NormalAudioDataDto,
6 | ButterChurnAudioDataDto,
7 | messageAction,
8 | messageTarget,
9 | streamType,
10 | InitiateStreamEvent,
11 | } from '@/src/utils/eventMessage';
12 |
13 | let currentStreamType: streamType | null = null;
14 | let stream: MediaStream | null = null;
15 | let audioContext: AudioContext | null = null;
16 |
17 | let numSamplesNormal = 2048;
18 | let numSamplesButterChurn = 1024;
19 | let analyserNormal: AnalyserNode | null = null;
20 | let analyserButterChurn: AnalyserNode | null = null;
21 | let analyserButterChurnL: AnalyserNode | null = null;
22 | let analyserButterChurnR: AnalyserNode | null = null;
23 |
24 |
25 | chrome.runtime.onMessage.addListener((message: GenericEvent) => {
26 | if (
27 | message.target === messageTarget.offscreen &&
28 | message.action === messageAction.toggleFullScreen
29 | ) {
30 | const fullScreenEventMessage = new GenericEvent(messageTarget.animation, messageAction.toggleFullScreen);
31 | chrome.runtime.sendMessage(fullScreenEventMessage.toMessage());
32 | }
33 | });
34 | chrome.runtime.onMessage.addListener((message: StartStreamEvent) => {
35 | if (
36 | message.target === messageTarget.offscreen &&
37 | message.action === messageAction.startStream
38 | ) {
39 | currentStreamType = message.streamType;
40 | startStream();
41 | }
42 | });
43 | chrome.runtime.onMessage.addListener((message: InitiateStreamEvent) => {
44 | if (
45 | message.target === messageTarget.offscreen &&
46 | message.action === messageAction.initiateStream
47 | ) {
48 | window.captureIsActive = true;
49 | initiateStream(message.streamId);
50 | }
51 | });
52 | chrome.runtime.onMessage.addListener((message: GenericEvent) => {
53 | if (
54 | message.target === messageTarget.offscreen &&
55 | message.action === messageAction.stopStream
56 | ) {
57 | stopStream();
58 | }
59 | });
60 | async function initiateStream(streamId: string) {
61 | stream = await navigator.mediaDevices.getUserMedia({
62 | audio: {
63 | mandatory: {
64 | chromeMediaSource: 'tab',
65 | chromeMediaSourceId: streamId,
66 | },
67 | },
68 | });
69 | audioContext = new AudioContext();
70 |
71 | const source = audioContext.createMediaStreamSource(stream);
72 | // Create the normal analyser
73 | analyserNormal = audioContext.createAnalyser();
74 | analyserNormal.fftSize = numSamplesNormal;
75 | source.connect(analyserNormal);
76 | // Connect the normal analyser to the destination
77 | analyserNormal.connect(audioContext.destination);
78 |
79 | // Create the butterchurn analysers
80 | analyserButterChurn = audioContext.createAnalyser();
81 | analyserButterChurn.smoothingTimeConstant = 0.0;
82 | analyserButterChurn.fftSize = numSamplesButterChurn;
83 |
84 | analyserButterChurnL = audioContext.createAnalyser();
85 | analyserButterChurnL.smoothingTimeConstant = 0.0;
86 | analyserButterChurnL.fftSize = numSamplesButterChurn;
87 |
88 | analyserButterChurnR = audioContext.createAnalyser();
89 | analyserButterChurnR.smoothingTimeConstant = 0.0;
90 | analyserButterChurnR.fftSize = numSamplesButterChurn;
91 |
92 | source.connect(analyserButterChurn);
93 |
94 | const splitter = audioContext.createChannelSplitter(2);
95 |
96 | source.connect(splitter);
97 | splitter.connect(analyserButterChurnL, 0);
98 | splitter.connect(analyserButterChurnR, 1);
99 | }
100 |
101 | async function startStream() {
102 | const updateAudioDataEvent = () => {
103 | if (!window.captureIsActive) {
104 | return;
105 | }
106 | if (
107 | currentStreamType === streamType.normal &&
108 | analyserNormal !== null
109 | ) {
110 | const dataArray = new Uint8Array(numSamplesNormal / 4);
111 | analyserNormal.getByteFrequencyData(dataArray);
112 |
113 | const data = Array.from(dataArray);
114 | const audioData = new NormalAudioDataDto(data);
115 | const audioDataMessage = new AudioDataEvent(
116 | messageTarget.animation,
117 | messageAction.updateAudioData,
118 | audioData
119 | );
120 | chrome.runtime.sendMessage(audioDataMessage.toMessage());
121 | } else if (
122 | currentStreamType === streamType.butterChurn &&
123 | analyserButterChurn !== null &&
124 | analyserButterChurnL !== null &&
125 | analyserButterChurnR !== null
126 | ) {
127 | const dataArray = new Uint8Array(numSamplesButterChurn);
128 | const dataArrayL = new Uint8Array(numSamplesButterChurn);
129 | const dataArrayR = new Uint8Array(numSamplesButterChurn);
130 | analyserButterChurn.getByteTimeDomainData(dataArray);
131 | analyserButterChurnL.getByteTimeDomainData(dataArrayL);
132 | analyserButterChurnR.getByteTimeDomainData(dataArrayR);
133 |
134 | const data = Array.from(dataArray);
135 | const dataL = Array.from(dataArrayL);
136 | const dataR = Array.from(dataArrayR);
137 | const audioData = new ButterChurnAudioDataDto(data, dataL, dataR);
138 | const audioDataMessage = new AudioDataEvent(
139 | messageTarget.animation,
140 | messageAction.updateAudioData,
141 | audioData
142 | );
143 | chrome.runtime.sendMessage(audioDataMessage.toMessage());
144 | }
145 |
146 | // 1000/10 = 100 frames per second
147 | setTimeout(updateAudioDataEvent, 10);
148 | };
149 |
150 | updateAudioDataEvent();
151 | }
152 |
153 | function stopStream() {
154 | window.captureIsActive = false;
155 | if (stream) {
156 | stream.getTracks().forEach((track) => track.stop());
157 | console.log('Stream stopped.');
158 | }
159 | if (audioContext) {
160 | audioContext.close(); // Properly closes the audio context
161 | console.log('Audio context closed.');
162 | }
163 | }
164 |
--------------------------------------------------------------------------------
/entrypoints/sandbox/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | Audio Visualizer
5 |
6 |
7 |
8 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/entrypoints/sandbox/main.ts:
--------------------------------------------------------------------------------
1 | import {
2 | messageAction,
3 | messageTarget,
4 | AudioDataEvent,
5 | GenericEvent,
6 | } from '@/src/utils/eventMessage';
7 | import { IScene } from '@/src/scene/scene';
8 | import { SceneManager } from '@/src/scene/sceneManager';
9 | import { SunFlower } from '@/src/scene/scenes/sunflower/sunflower';
10 | import { SynthBars } from '@/src/scene/scenes/synthBars/synthBars';
11 | import { DancingHorizon } from '@/src/scene/scenes/dancingHorizon/dancingHorizon';
12 | import { ISceneSetting } from '@/src/scene/sceneSetting';
13 | import { SetSceneEvent } from '@/src/scene/events/setSceneEvent';
14 | import { SetSceneSettingsEvent } from '@/src/scene/events/setSceneSettingsEvent';
15 | import { Butterchurn } from '@/src/scene/scenes/butterchurn/butterchurn';
16 | import { sceneNames } from '@/src/scene/sceneNames';
17 | import { SettingsWindowEvent } from '@/src/userInterface/settings/events/SettingsWindowEvent';
18 | import { SettingsUserInterface } from '@/src/userInterface/settings/settingsUserInterface';
19 | import { FrostFire } from '@/src/scene/scenes/frostfire/frostfire';
20 |
21 | // Initialize scenes
22 | const scenesMap = new Map();
23 | scenesMap.set(sceneNames.Butterchurn.toString(), new Butterchurn());
24 | scenesMap.set(sceneNames.SunFlower.toString(), new SunFlower());
25 | scenesMap.set(sceneNames.FrostFire.toString(), new FrostFire());
26 | scenesMap.set(sceneNames.SynthBars.toString(), new SynthBars());
27 | scenesMap.set(sceneNames.DancingHorizon.toString(), new DancingHorizon());
28 | // Initialize scene manager
29 | const sceneManager = new SceneManager();
30 | window.sandboxEventMessageHolder = null;
31 |
32 | window.addEventListener('message', (message: MessageEvent) => {
33 | window.sandboxEventMessageHolder = message;
34 | if (message.data.target === 'animationWindowReadyEvent') {
35 | console.log('animationWindowReadyEvent');
36 | settingsUserInterface.buildScene();
37 | }
38 | if (
39 | message.data.target === messageTarget.animation &&
40 | message.data.action === messageAction.toggleFullScreen
41 | ) {
42 | const fullScreenEventMessage = new GenericEvent(
43 | messageTarget.animation,
44 | messageAction.toggleFullScreen
45 | );
46 |
47 | window.sandboxEventMessageHolder.source.postMessage(
48 | fullScreenEventMessage.toMessage(),
49 | window.sandboxEventMessageHolder.origin
50 | );
51 | }
52 | });
53 | window.addEventListener(messageAction.toggleFullScreen, (event) => {
54 | const toggleFullScreenViaOffscreen = new GenericEvent(
55 | messageTarget.offscreen,
56 | messageAction.toggleFullScreen
57 | );
58 |
59 | window.sandboxEventMessageHolder.source.postMessage(
60 | toggleFullScreenViaOffscreen.toMessage(),
61 | window.sandboxEventMessageHolder.origin
62 | );
63 | });
64 | window.addEventListener(messageAction.setScene, (event) => {
65 | const sceneEvent = event.detail.event as SetSceneEvent;
66 | sceneManager.setScene(
67 | scenesMap.get(sceneEvent.sceneName) as IScene,
68 | sceneEvent.sceneSettings as ISceneSetting
69 | );
70 | });
71 | window.addEventListener('message', (message: MessageEvent) => {
72 | if (
73 | message.data.target === messageTarget.animation &&
74 | message.data.action === messageAction.setScene
75 | ) {
76 | sceneManager.setScene(
77 | scenesMap.get(message.data.sceneName) as IScene,
78 | message.data.sceneSettings
79 | );
80 | }
81 | });
82 | // Update scene settings event
83 | window.addEventListener(messageAction.setSceneSettings, (event) => {
84 | const sceneSettingsEvent = event.detail.event as SetSceneSettingsEvent;
85 | sceneManager.updateSettings(sceneSettingsEvent.sceneSettings);
86 | });
87 | window.addEventListener(
88 | 'message',
89 | (message: MessageEvent) => {
90 | if (
91 | message.data.target === messageTarget.animation &&
92 | message.data.action === messageAction.setSceneSettings
93 | ) {
94 | sceneManager.updateSettings(message.data.sceneSettings);
95 | }
96 | }
97 | );
98 | // Update audio data event
99 | window.addEventListener('message', (message: MessageEvent) => {
100 | if (
101 | message.data.target === messageTarget.animation &&
102 | message.data.action === messageAction.updateAudioData
103 | ) {
104 | sceneManager.updateAudioData(message.data.audioData);
105 | }
106 | });
107 |
108 | // Initialize settings UI
109 | let settingsUserInterface = new SettingsUserInterface(false);
110 | window.addEventListener(
111 | 'message',
112 | (message: MessageEvent) => {
113 | if (message.data.target === messageTarget.animation) {
114 | if (message.data.action === messageAction.openSettingsWindow) {
115 | settingsUserInterface.destroy();
116 | } else if (
117 | message.data.action === messageAction.closeSettingsWindow
118 | ) {
119 | settingsUserInterface.buildScene();
120 | }
121 | }
122 | }
123 | );
124 | function render() {
125 | sceneManager.renderScene();
126 | requestAnimationFrame(render);
127 | }
128 | render();
129 |
--------------------------------------------------------------------------------
/entrypoints/settingsWindow/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | Audio Visualizer - Settings
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
--------------------------------------------------------------------------------
/entrypoints/settingsWindow/main.ts:
--------------------------------------------------------------------------------
1 | import { SettingsUserInterface } from "@/src/userInterface/settings/settingsUserInterface";
2 |
3 | document.body.style.backgroundColor = "black";
4 | let settingsUserInterface = new SettingsUserInterface(true);
5 | settingsUserInterface.buildScene();
6 |
--------------------------------------------------------------------------------
/flake.lock:
--------------------------------------------------------------------------------
1 | {
2 | "nodes": {
3 | "flake-utils": {
4 | "inputs": {
5 | "systems": "systems"
6 | },
7 | "locked": {
8 | "lastModified": 1710146030,
9 | "narHash": "sha256-SZ5L6eA7HJ/nmkzGG7/ISclqe6oZdOZTNoesiInkXPQ=",
10 | "owner": "numtide",
11 | "repo": "flake-utils",
12 | "rev": "b1d9ab70662946ef0850d488da1c9019f3a9752a",
13 | "type": "github"
14 | },
15 | "original": {
16 | "id": "flake-utils",
17 | "type": "indirect"
18 | }
19 | },
20 | "nixpkgs": {
21 | "locked": {
22 | "lastModified": 0,
23 | "narHash": "sha256-rwz8NJZV+387rnWpTYcXaRNvzUSnnF9aHONoJIYmiUQ=",
24 | "path": "/nix/store/dk2rpyb6ndvfbf19bkb2plcz5y3k8i5v-source",
25 | "type": "path"
26 | },
27 | "original": {
28 | "id": "nixpkgs",
29 | "type": "indirect"
30 | }
31 | },
32 | "root": {
33 | "inputs": {
34 | "flake-utils": "flake-utils",
35 | "nixpkgs": "nixpkgs"
36 | }
37 | },
38 | "systems": {
39 | "locked": {
40 | "lastModified": 1681028828,
41 | "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
42 | "owner": "nix-systems",
43 | "repo": "default",
44 | "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
45 | "type": "github"
46 | },
47 | "original": {
48 | "owner": "nix-systems",
49 | "repo": "default",
50 | "type": "github"
51 | }
52 | }
53 | },
54 | "root": "root",
55 | "version": 7
56 | }
57 |
--------------------------------------------------------------------------------
/flake.nix:
--------------------------------------------------------------------------------
1 | {
2 | outputs = { self, nixpkgs, flake-utils }:
3 | flake-utils.lib.eachDefaultSystem
4 | (system:
5 | let
6 | pkgs = import nixpkgs { inherit system; };
7 | in
8 | {
9 | devShell = pkgs.mkShell
10 | {
11 | buildInputs = [
12 | pkgs.pnpm
13 | ];
14 |
15 | shellHook = ''
16 | echo hi
17 | '';
18 | };
19 | }
20 | );
21 | }
22 |
23 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "audiovisualizer",
3 | "description": "Visualizes music in a cool way",
4 | "private": true,
5 | "version": "10.0.0",
6 | "type": "module",
7 | "scripts": {
8 | "dev": "wxt",
9 | "dev:firefox": "wxt -b firefox",
10 | "build": "wxt build",
11 | "build:firefox": "wxt build -b firefox",
12 | "zip": "wxt zip",
13 | "zip:firefox": "wxt zip -b firefox",
14 | "compile": "tsc --noEmit",
15 | "postinstall": "wxt prepare"
16 | },
17 | "devDependencies": {
18 | "@types/chrome": "^0.0.263",
19 | "typescript": "^5.5.3",
20 | "wxt": "^0.18.14"
21 | },
22 | "dependencies": {
23 | "butterchurn": "^2.6.7",
24 | "butterchurn-presets": "^2.4.7",
25 | "dat.gui": "^0.7.9",
26 | "gl-matrix": "^3.4.3"
27 | }
28 | }
29 |
--------------------------------------------------------------------------------
/public/icon/icon128.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/afreakk/ChromeAudioVisualizerExtension/aa67b759a8188197b1e83f0d974d591603662453/public/icon/icon128.png
--------------------------------------------------------------------------------
/public/icon/icon256.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/afreakk/ChromeAudioVisualizerExtension/aa67b759a8188197b1e83f0d974d591603662453/public/icon/icon256.png
--------------------------------------------------------------------------------
/public/icon/icon64.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/afreakk/ChromeAudioVisualizerExtension/aa67b759a8188197b1e83f0d974d591603662453/public/icon/icon64.png
--------------------------------------------------------------------------------
/readme.md:
--------------------------------------------------------------------------------
1 | # Audio visualizer
2 |
3 | ## Dev environment
4 |
5 | ### How to run the project
6 |
7 | Install node modules
8 |
9 | ```bash
10 | pnpm install
11 | ```
12 |
13 | Run extension
14 |
15 | ```bash
16 | pnpm dev
17 | ```
18 |
--------------------------------------------------------------------------------
/src/scene/events/setSceneEvent.ts:
--------------------------------------------------------------------------------
1 | import { GenericEvent, messageTarget, messageAction } from "@/src/utils/eventMessage";
2 | import { ISceneSetting } from "../sceneSetting";
3 |
4 | export class SetSceneEvent extends GenericEvent {
5 | sceneName: string;
6 | sceneSettings: ISceneSetting;
7 |
8 | constructor(target: messageTarget, action: messageAction, sceneName: string, sceneSettings: ISceneSetting) {
9 | super(target, action);
10 | this.sceneName = sceneName;
11 | this.sceneSettings = sceneSettings;
12 | }
13 |
14 | override toMessage() {
15 | return {
16 | target: this.target,
17 | action: this.action,
18 | sceneName: this.sceneName,
19 | sceneSettings: this.sceneSettings,
20 | };
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/src/scene/events/setSceneSettingsEvent.ts:
--------------------------------------------------------------------------------
1 | import { GenericEvent, messageTarget, messageAction } from "@/src/utils/eventMessage";
2 | import { ISceneSetting } from "../sceneSetting";
3 |
4 | export class SetSceneSettingsEvent extends GenericEvent {
5 | sceneSettings: ISceneSetting;
6 |
7 | constructor(target: messageTarget, action: messageAction, sceneSettings: ISceneSetting) {
8 | super(target, action);
9 | this.sceneSettings = sceneSettings;
10 | }
11 |
12 | override toMessage() {
13 | return {
14 | target: this.target,
15 | action: this.action,
16 | sceneSettings: this.sceneSettings,
17 | };
18 | }
19 | }
20 |
--------------------------------------------------------------------------------
/src/scene/scene.ts:
--------------------------------------------------------------------------------
1 | import { IAudioDataDto, streamType } from "@/src/utils/eventMessage";
2 | import { ISceneSetting } from "./sceneSetting";
3 | export interface IScene {
4 | streamType: streamType;
5 | build(): void;
6 | updateSettings(settings: ISceneSetting): void;
7 | updateAudioData(data: IAudioDataDto): void;
8 | render(): void;
9 | clean(): void;
10 | }
11 |
12 |
--------------------------------------------------------------------------------
/src/scene/sceneManager.ts:
--------------------------------------------------------------------------------
1 | import { IScene } from '@/src/scene/scene';
2 | import { ISceneSetting } from '@/src/scene/sceneSetting';
3 | import {
4 | IAudioDataDto,
5 | StartStreamEvent,
6 | messageAction,
7 | messageTarget,
8 | streamType,
9 | } from '@/src/utils/eventMessage';
10 |
11 | export class SceneManager {
12 | private scene: IScene | null = null;
13 | private buildingScene = false;
14 |
15 | updateAudioData(data: IAudioDataDto) {
16 | // Return if there is no scene
17 | if (!this.scene) {
18 | return;
19 | }
20 | // Return if the scene is still being built
21 | if (this.buildingScene) {
22 | return;
23 | }
24 | this.scene.updateAudioData(data);
25 | }
26 | updateSettings(settings: ISceneSetting) {
27 | // Return if there is no scene
28 | if (!this.scene) {
29 | return;
30 | }
31 | // Return if the scene is still being built
32 | if (this.buildingScene) {
33 | return;
34 | }
35 | this.scene.updateSettings(settings);
36 | }
37 |
38 | setScene(scene: IScene, settings: ISceneSetting) {
39 | // Return if the scene is already set
40 | if (this.scene instanceof scene.constructor) {
41 | return;
42 | }
43 |
44 | let newScene = scene;
45 | this.buildingScene = true;
46 | try {
47 | newScene.build();
48 | // Clean up the current scene if there is one
49 | if (this.scene) {
50 | this.scene.clean();
51 | }
52 | // Set the new scene
53 | this.scene = newScene;
54 | } catch (error) {
55 | console.error('Error building scene:', error);
56 | } finally {
57 | const animationWindowCreated = new StartStreamEvent(
58 | messageTarget.offscreen,
59 | messageAction.startStream,
60 | this.scene ? this.scene.streamType : streamType.normal
61 | );
62 | window.sandboxEventMessageHolder.source.postMessage(
63 | animationWindowCreated.toMessage(),
64 | window.sandboxEventMessageHolder.origin
65 | );
66 | this.buildingScene = false;
67 | this.updateSettings(settings);
68 | }
69 | }
70 |
71 | renderScene() {
72 | if (!this.scene) {
73 | // Return if there is no scene
74 | return;
75 | }
76 | // Return if the scene is still being built
77 | if (this.buildingScene) {
78 | return;
79 | }
80 | this.scene.render();
81 | }
82 | }
83 |
--------------------------------------------------------------------------------
/src/scene/sceneNames.ts:
--------------------------------------------------------------------------------
1 | export enum sceneNames {
2 | Butterchurn = 'Butterchurn',
3 | SunFlower = 'SunFlower',
4 | FrostFire = 'FrostFire',
5 | SynthBars = 'SynthBars',
6 | DancingHorizon = 'DancingHorizon',
7 | }
8 |
--------------------------------------------------------------------------------
/src/scene/sceneSetting.ts:
--------------------------------------------------------------------------------
1 | export interface ISceneSetting {
2 | }
3 |
--------------------------------------------------------------------------------
/src/scene/scenes/butterchurn/butterchurn.ts:
--------------------------------------------------------------------------------
1 | import { IScene } from '@/src/scene/scene';
2 | import { ButterchurnSettings, getRandomPreset } from './setting';
3 | import {
4 | ButterChurnAudioDataDto,
5 | IAudioDataDto,
6 | streamType,
7 | } from '@/src/utils/eventMessage';
8 | import butterchurn from 'butterchurn';
9 | import butterchurnPresets from 'butterchurn-presets';
10 | const presets = butterchurnPresets.getPresets();
11 |
12 | export class Butterchurn implements IScene {
13 | private canvas: HTMLCanvasElement | null = null;
14 | private audioData: ButterChurnAudioDataDto;
15 | private visualizer: any = null;
16 | private lastTime: any;
17 | private lastCycleSeconds: number = 0;
18 | private cyclePresetInterval: NodeJS.Timeout | null = null;
19 | constructor() {
20 | this.audioData = new ButterChurnAudioDataDto([], [], []);
21 | }
22 | streamType = streamType.butterChurn;
23 | build(): void {
24 | this.canvas = document.createElement('canvas');
25 | this.canvas.width = window.innerWidth;
26 | this.canvas.height = window.innerHeight;
27 | this.canvas.style.position = 'fixed';
28 | this.canvas.style.left = '0';
29 | this.canvas.style.top = '0';
30 | this.canvas.style.zIndex = '-1';
31 | document.body.insertBefore(this.canvas, document.body.firstChild);
32 | this.lastTime = +Date.now();
33 | this.visualizer = butterchurn.createVisualizer(null, this.canvas, {
34 | width: this.canvas.width,
35 | height: this.canvas.height,
36 | mesh_width: 64,
37 | mesh_height: 48,
38 | pixelRatio: window.devicePixelRatio || 1,
39 | textureRatio: 1,
40 | });
41 | }
42 | updateSettings(settings: ButterchurnSettings): void {
43 | const preset = presets[settings.preset];
44 | this.visualizer.loadPreset(preset, settings.blendLength); // 2nd argument is the number of seconds to blend presets
45 | if (!settings.cyclePresets) {
46 | clearInterval(this.cyclePresetInterval as NodeJS.Timeout);
47 | } else if (
48 | settings.cycleSeconds != this.lastCycleSeconds ||
49 | this.cyclePresetInterval === null
50 | ) {
51 | clearInterval(this.cyclePresetInterval as NodeJS.Timeout);
52 | this.lastCycleSeconds = settings.cycleSeconds;
53 | this.cyclePresetInterval = setInterval(() => {
54 | settings.preset = getRandomPreset();
55 | this.updateSettings(settings);
56 | }, settings.cycleSeconds * 1000);
57 | }
58 | }
59 | updateAudioData(data: ButterChurnAudioDataDto): void {
60 | if (data.timeByteArrayLeft !== undefined) {
61 | this.audioData = data;
62 | }
63 | }
64 | render(): void {
65 | if (this.canvas === null) {
66 | return;
67 | }
68 |
69 | const data = new Uint8Array(this.audioData.timeByteArray);
70 | const dataL = new Uint8Array(this.audioData.timeByteArrayLeft);
71 | const dataR = new Uint8Array(this.audioData.timeByteArrayRight);
72 | const currentTime = +Date.now();
73 | const elapsedTime = (currentTime - this.lastTime) / 1000;
74 | this.canvas.width = window.innerWidth;
75 | this.canvas.height = window.innerHeight;
76 | this.visualizer.setRendererSize(this.canvas.width, this.canvas.height);
77 | this.lastTime = currentTime;
78 | this.visualizer.render({
79 | elapsedTime: elapsedTime,
80 | audioLevels: {
81 | timeByteArray: data,
82 | timeByteArrayL: dataL,
83 | timeByteArrayR: dataR,
84 | },
85 | width: this.canvas.width,
86 | });
87 | }
88 | clean(): void {
89 | if (this.canvas === null) {
90 | return;
91 | }
92 | this.canvas.remove();
93 | }
94 | }
95 |
--------------------------------------------------------------------------------
/src/scene/scenes/butterchurn/setting.ts:
--------------------------------------------------------------------------------
1 | import { ISceneSetting } from '@/src/scene/sceneSetting';
2 | import butterchurnPresets from 'butterchurn-presets';
3 | const presets = butterchurnPresets.getPresets();
4 | export const getRandomPreset = () => {
5 | return Object.keys(presets)[
6 | Math.round(Math.random() * Object.keys(presets).length) - 1
7 | ];
8 | };
9 |
10 | export class ButterchurnSettings implements ISceneSetting {
11 | public blendLength: number = 5;
12 | public cycleSeconds: number = 20;
13 | public cyclePresets: boolean = true;
14 | public preset: string = getRandomPreset();
15 | }
16 |
--------------------------------------------------------------------------------
/src/scene/scenes/dancingHorizon/dancingHorizon.ts:
--------------------------------------------------------------------------------
1 | import { IScene } from '@/src/scene/scene';
2 | import { NormalAudioDataDto, streamType } from '@/src/utils/eventMessage';
3 | import { bindAudioDataToTexture, initTexture, initShaderProgram } from '@/src/utils/openGl/openGl';
4 | import { DancingHorizonSetting } from '@/src/scene/scenes/dancingHorizon/setting';
5 | import { hexToRGBNormalized } from '@/src/utils/openGl/colorConverter';
6 |
7 | export class DancingHorizon implements IScene {
8 | private canvas: HTMLCanvasElement | null = null;
9 | private gl: WebGLRenderingContext | null = null;
10 | private audioTexture: WebGLTexture | null = null;
11 | private audioTextureUniformLocation: WebGLUniformLocation | null = null;
12 | private resolutionUniformLocation: WebGLUniformLocation | null = null;
13 | private horizonColorNightUniformLocation: WebGLUniformLocation | null = null;
14 | private horizonColorDayUniformLocation: WebGLUniformLocation | null = null;
15 | private skyColorNightUniformLocation: WebGLUniformLocation | null = null;
16 | private skyColorDayUniformLocation: WebGLUniformLocation | null = null;
17 | private oceanColorNightUniformLocation: WebGLUniformLocation | null = null;
18 | private oceanColorDayUniformLocation: WebGLUniformLocation | null = null;
19 | private moonColorUniformLocation: WebGLUniformLocation | null = null;
20 | private sunColorUniformLocation: WebGLUniformLocation | null = null;
21 | private timeGainUniformLocation: WebGLUniformLocation | null = null;
22 | private noiseGainUniformLocation: WebGLUniformLocation | null = null;
23 | private cloudGainUniformLocation: WebGLUniformLocation | null = null;
24 | private cloudDensityUniformLocation: WebGLUniformLocation | null = null;
25 | private timeUniformLocation: WebGLUniformLocation | null = null;
26 | private vertexBuffer: WebGLBuffer | null = null;
27 | private shaderProgram: WebGLProgram | null = null;
28 | private audioData: NormalAudioDataDto;
29 | constructor() {
30 | this.audioData = new NormalAudioDataDto([]);
31 | }
32 | streamType = streamType.normal;
33 | build(): void {
34 | this.canvas = document.createElement('canvas');
35 | this.canvas.width = window.innerWidth;
36 | this.canvas.height = window.innerHeight;
37 | this.canvas.style.position = 'fixed';
38 | this.canvas.style.left = '0';
39 | this.canvas.style.top = '0';
40 | this.canvas.style.zIndex = '-1';
41 | document.body.insertBefore(this.canvas, document.body.firstChild);
42 | this.gl = this.canvas.getContext('webgl');
43 | if (!this.gl) {
44 | console.error('Unable to initialize WebGL. Your browser may not support it.');
45 | return;
46 | }
47 | const vs =
48 | `
49 | attribute vec4 vertexPosition;
50 | void main() {
51 | gl_Position = vertexPosition;
52 | }
53 | `;
54 |
55 | const fs =
56 | `
57 | precision mediump float;
58 | uniform vec2 resolution;
59 | uniform float time;
60 | uniform vec3 horizonColorNight;
61 | uniform vec3 horizonColorDay;
62 | uniform vec3 skyColorNight;
63 | uniform vec3 skyColorDay;
64 | uniform vec3 oceanColorNight;
65 | uniform vec3 oceanColorDay;
66 | uniform vec3 moonColor;
67 | uniform vec3 sunColor;
68 | uniform float timeGain;
69 | uniform float noiseGain;
70 | uniform float cloudGain;
71 | uniform float cloudDensity;
72 | uniform sampler2D audioTexture;
73 |
74 | float rand(vec2 co) {
75 | return fract(fract(sin(dot(co.xy, vec2(12.9898, 78.233))) * 43758.5453));
76 | }
77 |
78 | float interpolate(float a, float b, float x) {
79 | return mix(a, b, smoothstep(0.0, 1.0, x));
80 | }
81 |
82 | float valueNoise(vec2 p) {
83 | vec2 i = floor(p);
84 | vec2 f = fract(p);
85 |
86 | float a = rand(i);
87 | float b = rand(i + vec2(1.0, 0.0));
88 | float c = rand(i + vec2(0.0, 1.0));
89 | float d = rand(i + vec2(1.0, 1.0));
90 |
91 | // Interpolate along x
92 | float ab = interpolate(a, b, f.x);
93 | float cd = interpolate(c, d, f.x);
94 |
95 | // Interpolate along y
96 | return interpolate(ab, cd, f.y);
97 | }
98 |
99 | float fbm(vec2 position) {
100 | float total = 0.0;
101 | float persistence = 0.5;
102 | float frequency = 1.0;
103 | float amplitude = 1.0;
104 |
105 | for (int i = 0; i < 4; i++) {
106 | total += valueNoise(position * frequency) * amplitude;
107 | frequency *= 2.0;
108 | amplitude *= persistence;
109 | }
110 |
111 | return clamp(total, 0.0, 1.0);
112 | }
113 | float waveEffect(vec2 uv, float time)
114 | {
115 | float wave = sin(uv.x + time) * 0.5 + 0.5;
116 | return wave * cos(uv.y + time) * 0.5 + 0.5;
117 | }
118 | float hash(float n) {
119 | return fract(sin(n) * 43758.5453123);
120 | }
121 | float starIntensity(vec2 st) {
122 | float intensity = 0.0;
123 | float pixelSize = 1920.0;
124 | vec2 uv = floor(st * pixelSize) / pixelSize;
125 | float seed = hash(uv.x * 4321.0 + uv.y * 6789.0);
126 |
127 | if (seed > 0.999) {
128 | intensity = smoothstep(0.0, 1.0, hash(seed));
129 | }
130 | return intensity;
131 | }
132 | vec3 circle(vec3 backColor, vec3 circleColor, vec2 pos, vec2 uvCoords, float size, float audioValue, float waveValue) {
133 | float maxDist = size + audioValue;
134 | float dist = distance(uvCoords, pos + waveValue);
135 |
136 | if (dist < maxDist)
137 | {
138 | return mix(backColor, circleColor, 1.0 - smoothstep(maxDist - size / 3.0, maxDist, dist));
139 | }
140 | return backColor;
141 | }
142 | vec3 illumination(vec3 color, vec2 pos, vec2 uvCoords, float gain, float dayNightValue) {
143 | return (1.0 - smoothstep(0.0, 2.0, distance(uvCoords, pos))) * smoothstep(0.45, 0.55, dayNightValue) * color * gain;
144 | }
145 |
146 | const float PI = 3.141592653589793;
147 |
148 | float horizonLine = 0.4;
149 | void main()
150 | {
151 | vec2 fragCoord = gl_FragCoord.xy;
152 | // Day/nigth cycle
153 | float timeGainer = time * timeGain;
154 |
155 | float dayNigthCycle = smoothstep(-1.0, 1.0, sin(timeGainer));
156 |
157 | // Coords
158 | vec2 uv = fragCoord/resolution.xy;
159 | float pixelSize = 256.0; // Increase for more pixelation
160 | //uv = floor(uv * pixelSize) / pixelSize;
161 | vec2 centeredCoords = uv * 2.0 - 1.0;
162 |
163 | // Colors
164 | float dayNightValue = smoothstep(0.4, 0.6, dayNigthCycle);
165 | float starValue = starIntensity(uv);
166 | vec3 nightSkyColor = mix(skyColorNight, vec3(1.0), starValue * smoothstep(horizonLine, horizonLine + 0.1, uv.y));
167 | vec3 colorOceanNigth = mix(oceanColorNight, vec3(1.0), starValue * 0.15);
168 |
169 | vec3 colorMoon = moonColor;
170 | vec3 colorSun = sunColor;
171 | vec3 skyColor = mix(nightSkyColor, skyColorDay, dayNigthCycle);
172 | vec3 oceanColor = mix(colorOceanNigth, oceanColorDay, dayNigthCycle);
173 | vec3 horizonColor = mix(horizonColorNight, horizonColorDay, dayNightValue);
174 |
175 | // Audio animation
176 | float audioValue = texture2D(audioTexture, vec2(uv.x, 0.0)).x;
177 |
178 |
179 |
180 | float wave = waveEffect(centeredCoords * vec2(90.0, 67.0), dayNigthCycle);
181 | float reflectWave = waveEffect(centeredCoords * vec2(40.0), dayNigthCycle * 2.0);
182 |
183 | // Moon
184 | float moonSize = 0.15;
185 | vec2 moonPos = vec2(-cos(timeGainer + PI), sin(timeGainer + PI));
186 | float diminishMoonGain = 1.0 - smoothstep(0.4, 1.0, moonPos.y) * 0.3;
187 | float moonReflectSize = moonSize * diminishMoonGain;
188 | moonPos.y += horizonLine - 0.5;
189 | vec2 moonReflectPos = moonPos;
190 | moonReflectPos.y = -(0.5 - horizonLine) * 4.0 - moonPos.y;
191 | moonReflectPos.y *= diminishMoonGain;
192 |
193 |
194 | skyColor = circle(skyColor, colorMoon, moonPos, centeredCoords, moonSize, audioValue * 0.05, wave * 0.012);
195 | oceanColor = mix(oceanColor, circle(oceanColor, colorMoon, moonReflectPos, centeredCoords, moonReflectSize, audioValue * 0.05, reflectWave * 0.012), 0.2);
196 |
197 |
198 | // Sun
199 | float sunSize = 0.2;
200 | vec2 sunPos = vec2(-cos(timeGainer) * 1.0, sin(timeGainer));
201 | float diminishSunGain = 1.0 - smoothstep(0.4, 1.0, sunPos.y) * 0.3;
202 | float sunReflectSize = sunSize * diminishSunGain;
203 | sunPos.y -= (0.5 - horizonLine) * 2.0;
204 | vec2 sunReflectPos = sunPos;
205 | sunReflectPos.y = -(0.5 - horizonLine) * 4.0 - sunPos.y;
206 | sunReflectPos.y *= diminishSunGain;
207 |
208 | skyColor = circle(skyColor, colorSun, sunPos, centeredCoords, sunSize, audioValue * 0.05, wave * 0.012);
209 | oceanColor = mix(oceanColor, circle(oceanColor, colorSun, sunReflectPos, centeredCoords, sunReflectSize , audioValue * 0.05, reflectWave * 0.012), 0.2);
210 |
211 | // Illumination
212 | skyColor += illumination(colorSun, sunPos, centeredCoords, 0.25, dayNigthCycle);
213 | oceanColor += illumination(colorSun, sunReflectPos, centeredCoords, 0.25, dayNigthCycle);
214 | skyColor += illumination(colorMoon, moonPos, centeredCoords, 0.15, 1.0 - dayNigthCycle);
215 | oceanColor += illumination(colorMoon, moonReflectPos, centeredCoords, 0.15, 1.0 - dayNigthCycle);
216 |
217 |
218 | // Clouds
219 | vec2 pos = fragCoord/resolution.xy;
220 | pos.x += time * 0.03;
221 | pos.y = pos.y * 2.0 + time * 0.01;
222 | float n = fbm(pos * cloudDensity);
223 | skyColor = mix(skyColor, vec3(1.0), n * cloudGain);
224 | oceanColor = mix(oceanColor, vec3(1.0), n * cloudGain * 0.33);
225 |
226 | // Horizon
227 | float audioGain = audioValue * 0.2;
228 | float horizonCurve = sin(uv.x * PI) * 0.015;
229 | float horizonAudio = audioGain + horizonCurve ;
230 | float horizonReflectAudio = -audioGain + horizonCurve;
231 | vec3 skyBlend = mix(horizonColor, skyColor, smoothstep(horizonLine + horizonCurve, horizonLine*1.01 + horizonCurve, uv.y));
232 | vec3 oceanBlend = mix(oceanColor, horizonColor, smoothstep(horizonLine*0.99 + horizonCurve, horizonLine + horizonCurve, uv.y));
233 | vec3 color = mix(oceanBlend, skyBlend, smoothstep(horizonLine*0.99 + horizonReflectAudio, horizonLine*1.01 + horizonAudio, uv.y));
234 |
235 |
236 | // Audio flicker
237 | color = mix(color, horizonColor, audioValue * sin(uv.y * 2.0 + timeGainer * 10.0) * noiseGain * 0.5);
238 |
239 | gl_FragColor = vec4(color, 1.0);
240 | }
241 | `;
242 |
243 | // Vertex data for a square
244 | const vertices = new Float32Array([
245 | -1.0, 1.0,
246 | -1.0, -1.0,
247 | 1.0, 1.0,
248 | 1.0, -1.0,
249 | ]);
250 | this.vertexBuffer = this.gl.createBuffer();
251 | this.gl.bindBuffer(this.gl.ARRAY_BUFFER, this.vertexBuffer);
252 | this.gl.bufferData(this.gl.ARRAY_BUFFER, vertices, this.gl.STATIC_DRAW);
253 |
254 | this.audioTexture = initTexture(this.gl);
255 |
256 | this.shaderProgram = initShaderProgram(this.gl, vs, fs);
257 | if (!this.shaderProgram) {
258 | console.error('Unable to initialize the shader program');
259 | alert('Unable to initialize the shader program');
260 | return;
261 | }
262 |
263 | this.gl.useProgram(this.shaderProgram);
264 |
265 |
266 | this.audioTextureUniformLocation = this.gl.getUniformLocation(this.shaderProgram, 'audioTexture');
267 | const position = this.gl.getAttribLocation(this.shaderProgram, 'vertexPosition');
268 | this.gl.vertexAttribPointer(position, 2, this.gl.FLOAT, false, 0, 0);
269 | this.gl.enableVertexAttribArray(position);
270 | this.resolutionUniformLocation = this.gl.getUniformLocation(this.shaderProgram, 'resolution');
271 | this.timeUniformLocation = this.gl.getUniformLocation(this.shaderProgram, "time");
272 | this.horizonColorNightUniformLocation = this.gl.getUniformLocation(this.shaderProgram, 'horizonColorNight');
273 | this.horizonColorDayUniformLocation = this.gl.getUniformLocation(this.shaderProgram, 'horizonColorDay');
274 | this.skyColorNightUniformLocation = this.gl.getUniformLocation(this.shaderProgram, 'skyColorNight');
275 | this.skyColorDayUniformLocation = this.gl.getUniformLocation(this.shaderProgram, 'skyColorDay');
276 | this.oceanColorNightUniformLocation = this.gl.getUniformLocation(this.shaderProgram, 'oceanColorNight');
277 | this.oceanColorDayUniformLocation = this.gl.getUniformLocation(this.shaderProgram, 'oceanColorDay');
278 | this.moonColorUniformLocation = this.gl.getUniformLocation(this.shaderProgram, 'moonColor');
279 | this.sunColorUniformLocation = this.gl.getUniformLocation(this.shaderProgram, 'sunColor');
280 | this.timeGainUniformLocation = this.gl.getUniformLocation(this.shaderProgram, 'timeGain');
281 | this.noiseGainUniformLocation = this.gl.getUniformLocation(this.shaderProgram, 'noiseGain');
282 | this.cloudGainUniformLocation = this.gl.getUniformLocation(this.shaderProgram, 'cloudGain');
283 | this.cloudDensityUniformLocation = this.gl.getUniformLocation(this.shaderProgram, 'cloudDensity');
284 |
285 | }
286 | updateSettings(settings: DancingHorizonSetting): void {
287 | if (!this.gl) {
288 | return;
289 | }
290 | this.gl.useProgram(this.shaderProgram);
291 |
292 | this.gl.uniform1f(this.noiseGainUniformLocation, settings.noiseGain);
293 | this.gl.uniform1f(this.timeGainUniformLocation, settings.timeGain);
294 | this.gl.uniform1f(this.cloudGainUniformLocation, settings.cloudGain);
295 | this.gl.uniform1f(this.cloudDensityUniformLocation, settings.cloudDensity);
296 |
297 | // Colors
298 | const horizonColorNight = hexToRGBNormalized(settings.horizonColorNight);
299 | this.gl.uniform3fv(this.horizonColorNightUniformLocation, horizonColorNight);
300 | const horizonColorDay = hexToRGBNormalized(settings.horizonColorDay);
301 | this.gl.uniform3fv(this.horizonColorDayUniformLocation, horizonColorDay);
302 | const skyColorNight = hexToRGBNormalized(settings.skyColorNight);
303 | this.gl.uniform3fv(this.skyColorNightUniformLocation, skyColorNight);
304 | const skyColorDay = hexToRGBNormalized(settings.skyColorDay);
305 | this.gl.uniform3fv(this.skyColorDayUniformLocation, skyColorDay);
306 | const oceanColorNight = hexToRGBNormalized(settings.oceanColorNight);
307 | this.gl.uniform3fv(this.oceanColorNightUniformLocation, oceanColorNight);
308 | const oceanColorDay = hexToRGBNormalized(settings.oceanColorDay);
309 | this.gl.uniform3fv(this.oceanColorDayUniformLocation, oceanColorDay);
310 | const moonColor = hexToRGBNormalized(settings.moonColor);
311 | this.gl.uniform3fv(this.moonColorUniformLocation, moonColor);
312 | const sunColor = hexToRGBNormalized(settings.sunColor);
313 | this.gl.uniform3fv(this.sunColorUniformLocation, sunColor);
314 | }
315 | updateAudioData(data: NormalAudioDataDto): void {
316 | this.audioData = data;
317 | }
318 | render(): void {
319 | if (this.canvas === null) {
320 | return;
321 | }
322 | if (!this.gl) {
323 | return;
324 | }
325 | // Update canvas size and viewport
326 | this.canvas.width = window.innerWidth;
327 | this.canvas.height = window.innerHeight;
328 | this.gl.clear(this.gl.COLOR_BUFFER_BIT);
329 | this.gl.viewport(0, 0, this.canvas.width, this.canvas.height);
330 |
331 | // Bind texture
332 | this.gl.activeTexture(this.gl.TEXTURE0);
333 | this.gl.bindTexture(this.gl.TEXTURE_2D, this.audioTexture);
334 | this.gl.uniform1i(this.audioTextureUniformLocation, 0);
335 | bindAudioDataToTexture(new Uint8Array(this.audioData.timeByteArray), this.gl);
336 |
337 | // Update resolution
338 | this.gl.uniform2f(this.resolutionUniformLocation, this.gl.canvas.width, this.gl.canvas.height);
339 |
340 | // Updated time
341 | const timeInSeconds = performance.now() / 1000.0;
342 | this.gl.uniform1f(this.timeUniformLocation, timeInSeconds);
343 |
344 | // Draw the quad
345 | this.gl.drawArrays(this.gl.TRIANGLE_STRIP, 0, 4);
346 | }
347 | clean(): void {
348 | if (this.canvas === null) {
349 | return;
350 | }
351 | if (!this.gl) {
352 | return;
353 | }
354 |
355 | if (this.vertexBuffer !== null) {
356 | this.gl.deleteBuffer(this.vertexBuffer);
357 | }
358 |
359 | if (this.audioTexture !== null) {
360 | this.gl.deleteTexture(this.audioTexture);
361 | }
362 | if (this.shaderProgram !== null) {
363 | const shaders = this.gl.getAttachedShaders(this.shaderProgram);
364 | if (shaders !== null && shaders.length > 0) {
365 | for (const shader of shaders) {
366 | this.gl.detachShader(this.shaderProgram, shader);
367 | this.gl.deleteShader(shader);
368 | }
369 | }
370 | this.gl.deleteProgram(this.shaderProgram);
371 | }
372 |
373 | this.gl.bindBuffer(this.gl.ARRAY_BUFFER, null);
374 | this.gl.bindTexture(this.gl.TEXTURE_2D, null);
375 |
376 | this.gl.clear(this.gl.COLOR_BUFFER_BIT | this.gl.DEPTH_BUFFER_BIT);
377 | this.canvas.remove();
378 | }
379 | }
380 |
--------------------------------------------------------------------------------
/src/scene/scenes/dancingHorizon/setting.ts:
--------------------------------------------------------------------------------
1 | import { ISceneSetting } from '@/src/scene/sceneSetting';
2 |
3 |
4 | export class DancingHorizonSetting implements ISceneSetting {
5 | public horizonColorNight: string = '#FFDAB9';
6 | public horizonColorDay: string = '#800080';
7 | public skyColorNight: string = '#191970';
8 | public skyColorDay: string = '#1E90FF';
9 | public oceanColorNight: string = '#000033';
10 | public oceanColorDay: string = '#4682B4';
11 | public moonColor: string = '#F0E68C';
12 | public sunColor: string = '#FFD700';
13 | public timeGain: number = 0.1;
14 | public noiseGain: number = 0.1;
15 | public cloudDensity: number = 6.0;
16 | public cloudGain: number = 0.4;
17 | }
18 |
--------------------------------------------------------------------------------
/src/scene/scenes/frostfire/frostfire.ts:
--------------------------------------------------------------------------------
1 | import { IScene } from '@/src/scene/scene';
2 | import { NormalAudioDataDto, streamType } from '@/src/utils/eventMessage';
3 | import { bindAudioDataToTexture, initTexture, initShaderProgram } from '@/src/utils/openGl/openGl';
4 | import { hexToRGBNormalized } from '@/src/utils/openGl/colorConverter';
5 | import { FrostFireSetting } from './settings';
6 |
7 | export class FrostFire implements IScene {
8 | private canvas: HTMLCanvasElement | null = null;
9 | private gl: WebGLRenderingContext | null = null;
10 | private audioTexture: WebGLTexture | null = null;
11 | private audioTextureUniformLocation: WebGLUniformLocation | null = null;
12 | private resolutionUniformLocation: WebGLUniformLocation | null = null;
13 | private timeUniformLocation: WebGLUniformLocation | null = null;
14 |
15 | private numberOfHexagonsUniformLocation: WebGLUniformLocation | null = null;
16 | private heightUniformLocation: WebGLUniformLocation | null = null;
17 | private colorBlendUniformLocation: WebGLUniformLocation | null = null;
18 | private dynamicColorUniformLocation: WebGLUniformLocation | null = null;
19 | private breathingUniformLocation: WebGLUniformLocation | null = null;
20 | private frostColorUniformLocation: WebGLUniformLocation | null = null;
21 | private fireColorUniformLocation: WebGLUniformLocation | null = null;
22 | private blendColorUniformLocation: WebGLUniformLocation | null = null;
23 |
24 | private vertexBuffer: WebGLBuffer | null = null;
25 | private shaderProgram: WebGLProgram | null = null;
26 | private audioData: NormalAudioDataDto;
27 | constructor() {
28 | this.audioData = new NormalAudioDataDto([]);
29 | }
30 | streamType = streamType.normal;
31 | build(): void {
32 | this.canvas = document.createElement('canvas');
33 | this.canvas.width = window.innerWidth;
34 | this.canvas.height = window.innerHeight;
35 | this.canvas.style.position = 'fixed';
36 | this.canvas.style.left = '0';
37 | this.canvas.style.top = '0';
38 | this.canvas.style.zIndex = '-1';
39 | document.body.insertBefore(this.canvas, document.body.firstChild);
40 | this.gl = this.canvas.getContext('webgl');
41 | if (!this.gl) {
42 | console.error('Unable to initialize WebGL. Your browser may not support it.');
43 | return;
44 | }
45 | const vs =
46 | `
47 | attribute vec4 vertexPosition;
48 | void main() {
49 | gl_Position = vertexPosition;
50 | }
51 | `;
52 |
53 | const fs =
54 | `
55 | precision mediump float;
56 | uniform float time;
57 | uniform sampler2D audioTexture;
58 | uniform vec2 resolution;
59 |
60 | uniform vec3 frostColor;
61 | uniform vec3 blendColor;
62 | uniform vec3 fireColor;
63 | uniform float numberOfHexagons;
64 | uniform float height;
65 | uniform float colorBlend;
66 | uniform float dynamicColor;
67 | uniform float breathing;
68 | float hexDist(vec2 p) {
69 | p = abs(p);
70 |
71 | float c = dot(p, normalize(vec2(1.0,1.73)));
72 |
73 | return max(c, p.x);
74 | }
75 |
76 | void main() {
77 | vec2 fragCoord = gl_FragCoord.xy;
78 |
79 |
80 |
81 | float timeGain = time * 2.0;
82 | vec2 uv = (fragCoord-.5*resolution.xy)/resolution.y;
83 |
84 |
85 | uv *= numberOfHexagons;
86 |
87 | vec2 r = vec2(1.0,1.73);
88 | vec2 h = r * 0.5;
89 | vec2 a = mod(uv, r) - h;
90 | vec2 b = mod(uv - h, r) - h;
91 | vec2 gv = b;
92 | if (length(a) < length(b)) {
93 | gv = a;
94 | }
95 |
96 | float size = 0.5 - hexDist(gv);
97 | vec2 id = (uv - gv);
98 | size *= (1.0 - breathing) - abs(sin(id.x * id.y + timeGain)) * breathing;
99 |
100 | id /= numberOfHexagons;
101 | id.x = id.x * 0.5 + 0.5;
102 | id.y = id.y * 0.5 + 0.5;
103 | id.x = smoothstep(0.0, 1.0, id.x);
104 | id.y = smoothstep(0.0, 1.0, id.y);
105 |
106 |
107 | float audio = texture2D(audioTexture, vec2(id.x, 0.0)).x;
108 | float audioInv = texture2D(audioTexture, vec2(1.0-id.x, 0.0)).x;
109 |
110 |
111 | audio *= height;
112 | audioInv = 1.0 - audioInv * height;
113 |
114 |
115 | bool audioHit = false;
116 | bool audioInverseHit = false;
117 |
118 | vec3 fireSideColor = mix(blendColor, fireColor, id.x * colorBlend + 1.0 - colorBlend);
119 | vec3 frostSideColor = mix(blendColor, frostColor, 1.0 - id.x * colorBlend);
120 | vec3 col = mix(frostSideColor, fireSideColor, smoothstep(0.3, 0.7, id.y));
121 |
122 | float colorGain = 0.10;
123 | if (audio > id.y) {
124 | colorGain = max(audio, dynamicColor);
125 | col = frostSideColor;
126 | audioHit = true;
127 | }
128 | if (audioInv < id.y) {
129 | colorGain = max(audioInv, dynamicColor);
130 | col = fireSideColor;
131 | audioInverseHit = true;
132 |
133 | }
134 | if (audioHit && audioInverseHit) {
135 | col = mix(frostSideColor, fireSideColor, smoothstep(0.3, 0.7, id.y));
136 | }
137 | float c = smoothstep(0.01, 0.03, size) * colorGain;
138 | col *= c;
139 |
140 |
141 | gl_FragColor = vec4(col, 1.0);
142 | }
143 | `;
144 |
145 | // Vertex data for a square
146 | const vertices = new Float32Array([
147 | -1.0, 1.0,
148 | -1.0, -1.0,
149 | 1.0, 1.0,
150 | 1.0, -1.0,
151 | ]);
152 | this.vertexBuffer = this.gl.createBuffer();
153 | this.gl.bindBuffer(this.gl.ARRAY_BUFFER, this.vertexBuffer);
154 | this.gl.bufferData(this.gl.ARRAY_BUFFER, vertices, this.gl.STATIC_DRAW);
155 |
156 | this.audioTexture = initTexture(this.gl);
157 |
158 | this.shaderProgram = initShaderProgram(this.gl, vs, fs);
159 | if (!this.shaderProgram) {
160 | console.error('Unable to initialize the shader program');
161 | return;
162 | }
163 |
164 | this.gl.useProgram(this.shaderProgram);
165 |
166 |
167 | this.audioTextureUniformLocation = this.gl.getUniformLocation(this.shaderProgram, 'audioTexture');
168 | const position = this.gl.getAttribLocation(this.shaderProgram, 'vertexPosition');
169 | this.gl.vertexAttribPointer(position, 2, this.gl.FLOAT, false, 0, 0);
170 | this.gl.enableVertexAttribArray(position);
171 | this.resolutionUniformLocation = this.gl.getUniformLocation(this.shaderProgram, 'resolution');
172 | this.timeUniformLocation = this.gl.getUniformLocation(this.shaderProgram, "time");
173 | this.numberOfHexagonsUniformLocation = this.gl.getUniformLocation(this.shaderProgram, 'numberOfHexagons');
174 | this.heightUniformLocation = this.gl.getUniformLocation(this.shaderProgram, 'height');
175 | this.colorBlendUniformLocation = this.gl.getUniformLocation(this.shaderProgram, 'colorBlend');
176 | this.dynamicColorUniformLocation = this.gl.getUniformLocation(this.shaderProgram, 'dynamicColor');
177 | this.breathingUniformLocation = this.gl.getUniformLocation(this.shaderProgram, 'breathing');
178 | this.frostColorUniformLocation = this.gl.getUniformLocation(this.shaderProgram, 'frostColor');
179 | this.fireColorUniformLocation = this.gl.getUniformLocation(this.shaderProgram, 'fireColor');
180 | this.blendColorUniformLocation = this.gl.getUniformLocation(this.shaderProgram, 'blendColor');
181 |
182 | }
183 | updateSettings(settings: FrostFireSetting): void {
184 | if (!this.gl) {
185 | return;
186 | }
187 | this.gl.useProgram(this.shaderProgram);
188 | this.gl.uniform1f(this.heightUniformLocation, settings.height);
189 | this.gl.uniform1f(this.numberOfHexagonsUniformLocation, settings.numberOfHexagons);
190 | this.gl.uniform1f(this.colorBlendUniformLocation, settings.colorBlend);
191 | this.gl.uniform1f(this.dynamicColorUniformLocation, settings.dynamicColor);
192 | this.gl.uniform1f(this.breathingUniformLocation, settings.breathing);
193 |
194 | const frostColor = hexToRGBNormalized(settings.frostColor);
195 | const fireColor = hexToRGBNormalized(settings.fireColor);
196 | const blendColor = hexToRGBNormalized(settings.blendColor);
197 |
198 | this.gl.uniform3fv(this.frostColorUniformLocation, frostColor);
199 | this.gl.uniform3fv(this.fireColorUniformLocation, fireColor);
200 | this.gl.uniform3fv(this.blendColorUniformLocation, blendColor);
201 | }
202 | updateAudioData(data: NormalAudioDataDto): void {
203 | this.audioData = data;
204 | }
205 | render(): void {
206 | if (this.canvas === null) {
207 | return;
208 | }
209 | if (!this.gl) {
210 | return;
211 | }
212 | this.canvas.width = window.innerWidth;
213 | this.canvas.height = window.innerHeight;
214 | this.canvas.width = window.innerWidth;
215 | this.canvas.height = window.innerHeight;
216 | // Update canvas size and viewport
217 | this.gl.clear(this.gl.COLOR_BUFFER_BIT);
218 | this.gl.viewport(0, 0, this.canvas.width, this.canvas.height);
219 |
220 | // Bind texture
221 | this.gl.activeTexture(this.gl.TEXTURE0);
222 | this.gl.bindTexture(this.gl.TEXTURE_2D, this.audioTexture);
223 | this.gl.uniform1i(this.audioTextureUniformLocation, 0);
224 | bindAudioDataToTexture(new Uint8Array(this.audioData.timeByteArray), this.gl);
225 |
226 | // Update resolution
227 | this.gl.uniform2f(this.resolutionUniformLocation, this.gl.canvas.width, this.gl.canvas.height);
228 |
229 | // Updated time
230 | const timeInSeconds = performance.now() / 1000.0;
231 | this.gl.uniform1f(this.timeUniformLocation, timeInSeconds);
232 |
233 | // Draw the quad
234 | this.gl.drawArrays(this.gl.TRIANGLE_STRIP, 0, 4);
235 | }
236 | clean(): void {
237 | if (this.canvas === null) {
238 | return;
239 | }
240 | if (!this.gl) {
241 | return;
242 | }
243 |
244 | if (this.vertexBuffer !== null) {
245 | this.gl.deleteBuffer(this.vertexBuffer);
246 | }
247 |
248 | if (this.audioTexture !== null) {
249 | this.gl.deleteTexture(this.audioTexture);
250 | }
251 | if (this.shaderProgram !== null) {
252 | const shaders = this.gl.getAttachedShaders(this.shaderProgram);
253 | if (shaders !== null && shaders.length > 0) {
254 | for (const shader of shaders) {
255 | this.gl.detachShader(this.shaderProgram, shader);
256 | this.gl.deleteShader(shader);
257 | }
258 | }
259 | this.gl.deleteProgram(this.shaderProgram);
260 | }
261 |
262 | this.gl.bindBuffer(this.gl.ARRAY_BUFFER, null);
263 | this.gl.bindTexture(this.gl.TEXTURE_2D, null);
264 |
265 | this.gl.clear(this.gl.COLOR_BUFFER_BIT | this.gl.DEPTH_BUFFER_BIT);
266 | this.canvas.remove();
267 | }
268 | }
269 |
--------------------------------------------------------------------------------
/src/scene/scenes/frostfire/settings.ts:
--------------------------------------------------------------------------------
1 |
2 | import { ISceneSetting } from '@/src/scene/sceneSetting';
3 | export class FrostFireSetting implements ISceneSetting {
4 | public numberOfHexagons: number = 40.0;
5 | public height: number = 0.55;
6 | public colorBlend: number = 0.7;
7 | public dynamicColor: number = 0.5;
8 | public breathing: number = 0.25;
9 | public frostColor: string = '#0000FF';
10 | public fireColor: string = '#FF0000';
11 | public blendColor: string = '#00FF00';
12 | }
13 |
--------------------------------------------------------------------------------
/src/scene/scenes/sunflower/setting.ts:
--------------------------------------------------------------------------------
1 | import { ISceneSetting } from '@/src/scene/sceneSetting';
2 |
3 | export class SunFlowerSetting implements ISceneSetting {
4 | public innerColor: string = '#FFD700';
5 | public midColor: string = '#FF4500';
6 | public outerColor: string = '#6A5ACD';
7 | public radius: number = 0.2;
8 | public size: number = 0.8;
9 | public innerRadiusGain: number = 0.15;
10 | public midRadiusGain: number = 0.45;
11 | public outerRadiusGain: number = 0.75;
12 | }
13 |
--------------------------------------------------------------------------------
/src/scene/scenes/sunflower/sunflower.ts:
--------------------------------------------------------------------------------
1 | import { IScene } from '@/src/scene/scene';
2 | import { NormalAudioDataDto, streamType } from '@/src/utils/eventMessage';
3 | import { bindAudioDataToTexture, initTexture, initShaderProgram } from '@/src/utils/openGl/openGl';
4 | import { SunFlowerSetting } from './setting';
5 | import { hexToRGBNormalized } from '@/src/utils/openGl/colorConverter';
6 |
7 | export class SunFlower implements IScene {
8 | private canvas: HTMLCanvasElement | null = null;
9 | private gl: WebGLRenderingContext | null = null;
10 | private audioTexture: WebGLTexture | null = null;
11 | private audioTextureUniformLocation: WebGLUniformLocation | null = null;
12 | private resolutionUniformLocation: WebGLUniformLocation | null = null;
13 | private timeUniformLocation: WebGLUniformLocation | null = null;
14 | private radiusUniformLocation: WebGLUniformLocation | null = null;
15 | private sizeUniformLocation: WebGLUniformLocation | null = null;
16 | private innerColorUniformLocation: WebGLUniformLocation | null = null;
17 | private midColorUniformLocation: WebGLUniformLocation | null = null;
18 | private outerColorUniformLocation: WebGLUniformLocation | null = null;
19 | private innerRadiusGainUniformLocation: WebGLUniformLocation | null = null;
20 | private midRadiusGainUniformLocation: WebGLUniformLocation | null = null;
21 | private outerRadiusGainUniformLocation: WebGLUniformLocation | null = null;
22 | private vertexBuffer: WebGLBuffer | null = null;
23 | private shaderProgram: WebGLProgram | null = null;
24 | private audioData: NormalAudioDataDto;
25 | constructor() {
26 | this.audioData = new NormalAudioDataDto([]);
27 | }
28 | streamType = streamType.normal;
29 | build(): void {
30 | this.canvas = document.createElement('canvas');
31 | this.canvas.width = window.innerWidth;
32 | this.canvas.height = window.innerHeight;
33 | this.canvas.style.position = 'fixed';
34 | this.canvas.style.left = '0';
35 | this.canvas.style.top = '0';
36 | this.canvas.style.zIndex = '-1';
37 | document.body.insertBefore(this.canvas, document.body.firstChild);
38 | this.gl = this.canvas.getContext('webgl');
39 | if (!this.gl) {
40 | console.error('Unable to initialize WebGL. Your browser may not support it.');
41 | return;
42 | }
43 | const vs =
44 | `
45 | attribute vec4 vertexPosition;
46 | void main() {
47 | gl_Position = vertexPosition;
48 | }
49 | `;
50 |
51 | const fs =
52 | `
53 | precision mediump float;
54 | uniform vec2 resolution;
55 | uniform vec3 innerColor;
56 | uniform vec3 midColor;
57 | uniform vec3 outerColor;
58 |
59 | uniform float innerRadiusGain;
60 | uniform float midRadiusGain;
61 | uniform float outerRadiusGain;
62 | uniform float radius;
63 | uniform float size;
64 | uniform float time;
65 | uniform sampler2D audioTexture;
66 |
67 | // Parameters
68 | const float PI = 3.141592653589793;
69 | float random(vec2 co) {
70 | return fract(sin(dot(co.xy ,vec2(12.9898,78.233))) * 43758.5453);
71 | }
72 | void main() {
73 | vec2 fragCoord = gl_FragCoord.xy;
74 | vec2 uv = fragCoord.xy / resolution.xy;
75 | uv.y -= 0.1;
76 |
77 | // Center the coordinates
78 | vec2 centeredUV = uv * 2.0 - 1.0;
79 | centeredUV.x *= resolution.x / resolution.y;
80 |
81 | // Flip the Y axis to create symmetry
82 | if (centeredUV.x >= 0.0)
83 | {
84 | centeredUV.y = -centeredUV.y;
85 | }
86 |
87 | // Noice
88 | float noise = random(uv + time);
89 |
90 | // Convert UV to polar coordinates
91 | float angle = atan(centeredUV.x, centeredUV.y);
92 | if(angle < 0.0) angle += 1.0 * PI;
93 |
94 | float dist = length(centeredUV);
95 | float index = angle / (1.0 * PI);
96 | float audioValue = texture2D(audioTexture, vec2(index, 0.0)).x;
97 | if (audioValue < 0.01)
98 | {
99 | audioValue = 0.01 + sin(noise) * 0.01;
100 | }
101 |
102 |
103 | float dynamicInnerRadius = radius + audioValue * size * innerRadiusGain;
104 | float dynamicMidRadius = radius + audioValue * size * midRadiusGain;
105 | float dynamicOuterRadius = radius + audioValue * size * outerRadiusGain;
106 | float dynamicEndRadius = radius + audioValue * size;
107 |
108 | vec3 color = mix(midColor, outerColor, smoothstep(dynamicMidRadius, dynamicOuterRadius, dist));
109 | color = mix(innerColor, color, smoothstep(dynamicInnerRadius, dynamicMidRadius, dist));
110 | color *= 1.0 - smoothstep(dynamicMidRadius, dynamicEndRadius, dist);
111 |
112 | gl_FragColor = vec4(color, 1.0); // Color based on intensit
113 | }
114 | `;
115 |
116 | // Vertex data for a square
117 | const vertices = new Float32Array([
118 | -1.0, 1.0,
119 | -1.0, -1.0,
120 | 1.0, 1.0,
121 | 1.0, -1.0,
122 | ]);
123 | this.vertexBuffer = this.gl.createBuffer();
124 | this.gl.bindBuffer(this.gl.ARRAY_BUFFER, this.vertexBuffer);
125 | this.gl.bufferData(this.gl.ARRAY_BUFFER, vertices, this.gl.STATIC_DRAW);
126 |
127 | this.audioTexture = initTexture(this.gl);
128 |
129 | this.shaderProgram = initShaderProgram(this.gl, vs, fs);
130 | if (!this.shaderProgram) {
131 | console.error('Unable to initialize the shader program');
132 | return;
133 | }
134 |
135 | this.gl.useProgram(this.shaderProgram);
136 |
137 |
138 | this.audioTextureUniformLocation = this.gl.getUniformLocation(this.shaderProgram, 'audioTexture');
139 | const position = this.gl.getAttribLocation(this.shaderProgram, 'vertexPosition');
140 | this.gl.vertexAttribPointer(position, 2, this.gl.FLOAT, false, 0, 0);
141 | this.gl.enableVertexAttribArray(position);
142 | this.resolutionUniformLocation = this.gl.getUniformLocation(this.shaderProgram, 'resolution');
143 | this.timeUniformLocation = this.gl.getUniformLocation(this.shaderProgram, "time");
144 | this.radiusUniformLocation = this.gl.getUniformLocation(this.shaderProgram, "radius");
145 | this.sizeUniformLocation = this.gl.getUniformLocation(this.shaderProgram, "size");
146 | this.innerColorUniformLocation = this.gl.getUniformLocation(this.shaderProgram, "innerColor");
147 | this.midColorUniformLocation = this.gl.getUniformLocation(this.shaderProgram, "midColor");
148 | this.outerColorUniformLocation = this.gl.getUniformLocation(this.shaderProgram, "outerColor");
149 | this.innerRadiusGainUniformLocation = this.gl.getUniformLocation(this.shaderProgram, "innerRadiusGain");
150 | this.midRadiusGainUniformLocation = this.gl.getUniformLocation(this.shaderProgram, "midRadiusGain");
151 | this.outerRadiusGainUniformLocation = this.gl.getUniformLocation(this.shaderProgram, "outerRadiusGain");
152 |
153 | }
154 | updateSettings(settings: SunFlowerSetting): void {
155 | if (!this.gl) {
156 | return;
157 | }
158 | this.gl.useProgram(this.shaderProgram);
159 | this.gl.uniform1f(this.innerRadiusGainUniformLocation, settings.innerRadiusGain);
160 | this.gl.uniform1f(this.midRadiusGainUniformLocation, settings.midRadiusGain);
161 | this.gl.uniform1f(this.outerRadiusGainUniformLocation, settings.outerRadiusGain);
162 | this.gl.uniform1f(this.radiusUniformLocation, settings.radius);
163 | this.gl.uniform1f(this.sizeUniformLocation, settings.size);
164 | const innerColor = hexToRGBNormalized(settings.innerColor);
165 | const midColor = hexToRGBNormalized(settings.midColor);
166 | const outerColor = hexToRGBNormalized(settings.outerColor);
167 |
168 | this.gl.uniform3fv(this.innerColorUniformLocation, innerColor);
169 | this.gl.uniform3fv(this.midColorUniformLocation, midColor);
170 | this.gl.uniform3fv(this.outerColorUniformLocation, outerColor);
171 | }
172 | updateAudioData(data: NormalAudioDataDto): void {
173 | this.audioData = data;
174 | }
175 | render(): void {
176 | if (this.canvas === null) {
177 | return;
178 | }
179 | if (!this.gl) {
180 | return;
181 | }
182 | this.canvas.width = window.innerWidth;
183 | this.canvas.height = window.innerHeight;
184 | this.canvas.width = window.innerWidth;
185 | this.canvas.height = window.innerHeight;
186 | // Update canvas size and viewport
187 | this.gl.clear(this.gl.COLOR_BUFFER_BIT);
188 | this.gl.viewport(0, 0, this.canvas.width, this.canvas.height);
189 |
190 | // Bind texture
191 | this.gl.activeTexture(this.gl.TEXTURE0);
192 | this.gl.bindTexture(this.gl.TEXTURE_2D, this.audioTexture);
193 | this.gl.uniform1i(this.audioTextureUniformLocation, 0);
194 | bindAudioDataToTexture(new Uint8Array(this.audioData.timeByteArray), this.gl);
195 |
196 | // Update resolution
197 | this.gl.uniform2f(this.resolutionUniformLocation, this.gl.canvas.width, this.gl.canvas.height);
198 |
199 | // Updated time
200 | const timeInSeconds = performance.now() / 1000.0;
201 | this.gl.uniform1f(this.timeUniformLocation, timeInSeconds);
202 |
203 | // Draw the quad
204 | this.gl.drawArrays(this.gl.TRIANGLE_STRIP, 0, 4);
205 | }
206 | clean(): void {
207 | if (this.canvas === null) {
208 | return;
209 | }
210 | if (!this.gl) {
211 | return;
212 | }
213 |
214 | if (this.vertexBuffer !== null) {
215 | this.gl.deleteBuffer(this.vertexBuffer);
216 | }
217 |
218 | if (this.audioTexture !== null) {
219 | this.gl.deleteTexture(this.audioTexture);
220 | }
221 | if (this.shaderProgram !== null) {
222 | const shaders = this.gl.getAttachedShaders(this.shaderProgram);
223 | if (shaders !== null && shaders.length > 0) {
224 | for (const shader of shaders) {
225 | this.gl.detachShader(this.shaderProgram, shader);
226 | this.gl.deleteShader(shader);
227 | }
228 | }
229 | this.gl.deleteProgram(this.shaderProgram);
230 | }
231 |
232 | this.gl.bindBuffer(this.gl.ARRAY_BUFFER, null);
233 | this.gl.bindTexture(this.gl.TEXTURE_2D, null);
234 |
235 | this.gl.clear(this.gl.COLOR_BUFFER_BIT | this.gl.DEPTH_BUFFER_BIT);
236 | this.canvas.remove();
237 | }
238 | }
239 |
--------------------------------------------------------------------------------
/src/scene/scenes/synthBars/setting.ts:
--------------------------------------------------------------------------------
1 | import { ISceneSetting } from '@/src/scene/sceneSetting';
2 |
3 |
4 | export class SynthBarsSetting implements ISceneSetting {
5 | public bottomColor: string = "#33FF33";
6 | public topColor: string = "#5555FF";
7 | public numberOfbars: number = 40.0;
8 | public noiseGain: number = 0.5;
9 | }
10 |
--------------------------------------------------------------------------------
/src/scene/scenes/synthBars/synthBars.ts:
--------------------------------------------------------------------------------
1 | import { IScene } from '@/src/scene/scene';
2 | import { NormalAudioDataDto, streamType } from '@/src/utils/eventMessage';
3 | import { bindAudioDataToTexture, initTexture, initShaderProgram } from '@/src/utils/openGl/openGl';
4 | import { SynthBarsSetting } from './setting';
5 | import { hexToRGBNormalized } from '@/src/utils/openGl/colorConverter';
6 |
7 | export class SynthBars implements IScene {
8 | private canvas: HTMLCanvasElement | null = null;
9 | private gl: WebGLRenderingContext | null = null;
10 | private audioTexture: WebGLTexture | null = null;
11 | private audioTextureUniformLocation: WebGLUniformLocation | null = null;
12 | private resolutionUniformLocation: WebGLUniformLocation | null = null;
13 | private timeUniformLocation: WebGLUniformLocation | null = null;
14 | private bottomColorUniformLocation: WebGLUniformLocation | null = null;
15 | private topColorUniformLocation: WebGLUniformLocation | null = null;
16 | private numberOfbarsUniformLocation: WebGLUniformLocation | null = null;
17 | private noiseGainUniformLocation: WebGLUniformLocation | null = null;
18 |
19 | private vertexBuffer: WebGLBuffer | null = null;
20 | private shaderProgram: WebGLProgram | null = null;
21 | private audioData: NormalAudioDataDto;
22 | constructor() {
23 | this.audioData = new NormalAudioDataDto([]);
24 | }
25 | streamType = streamType.normal;
26 | build(): void {
27 | this.canvas = document.createElement('canvas');
28 | this.canvas.width = window.innerWidth;
29 | this.canvas.height = window.innerHeight;
30 | this.canvas.style.position = 'fixed';
31 | this.canvas.style.left = '0';
32 | this.canvas.style.top = '0';
33 | this.canvas.style.zIndex = '-1';
34 | document.body.insertBefore(this.canvas, document.body.firstChild);
35 | this.gl = this.canvas.getContext('webgl');
36 | if (!this.gl) {
37 | console.error('Unable to initialize WebGL. Your browser may not support it.');
38 | return;
39 | }
40 | const vs =
41 | `
42 | attribute vec4 vertexPosition;
43 | void main() {
44 | gl_Position = vertexPosition;
45 | }
46 | `;
47 |
48 | const fs =
49 | `
50 | precision mediump float;
51 | uniform vec2 resolution;
52 | uniform vec3 bottomColor;
53 | uniform vec3 topColor;
54 | uniform float numberOfbars;
55 | uniform float noiseGain;
56 | uniform float time;
57 | uniform sampler2D audioTexture;
58 |
59 | float random(vec2 co) {
60 | return fract(sin(dot(co.xy ,vec2(12.9898,78.233))) * 43758.5453);
61 | }
62 | void main() {
63 | vec2 fragCoord = gl_FragCoord.xy;
64 | vec2 uv = fragCoord.xy / resolution.xy;
65 |
66 | float numberOfbarsWidth = floor(numberOfbars * resolution.x / resolution.y * 0.5);
67 | vec2 p;
68 | p.x = floor(uv.x * numberOfbarsWidth) / numberOfbarsWidth;
69 | p.y = floor(uv.y * numberOfbars) / numberOfbars;
70 |
71 | float fft = texture2D(audioTexture, vec2(p.x, 0.0)).x;
72 |
73 | // color
74 | vec3 color = mix(bottomColor, topColor, sqrt(uv.y));
75 |
76 | // mask for bar graph
77 | float mask = (p.y < fft) ? 1.0 : 0.1;
78 |
79 | // led shape
80 | vec2 d = fract((uv - p) * vec2(numberOfbarsWidth, numberOfbars)) - 0.5;
81 | float led = smoothstep(0.5, 0.35, abs(d.x)) * smoothstep(0.5, 0.35, abs(d.y));
82 | vec3 ledColor = led*color*mask;
83 |
84 | // Horizontal line
85 | float lineSpeed = 0.2;
86 | float lineThickness = 0.005;
87 | float linePosition = mod(time * lineSpeed, 1.0);
88 | float distanceFromLine = abs(uv.y - linePosition);
89 | if(distanceFromLine < lineThickness) {
90 | ledColor += 0.1 * noiseGain;
91 | }
92 |
93 | float noise = random(uv + time);
94 | ledColor += noise * 0.12 * noiseGain;
95 |
96 | gl_FragColor = vec4(ledColor, 1.0);
97 | }
98 | `;
99 |
100 | // Vertex data for a square
101 | const vertices = new Float32Array([
102 | -1.0, 1.0,
103 | -1.0, -1.0,
104 | 1.0, 1.0,
105 | 1.0, -1.0,
106 | ]);
107 | this.vertexBuffer = this.gl.createBuffer();
108 | this.gl.bindBuffer(this.gl.ARRAY_BUFFER, this.vertexBuffer);
109 | this.gl.bufferData(this.gl.ARRAY_BUFFER, vertices, this.gl.STATIC_DRAW);
110 |
111 | this.audioTexture = initTexture(this.gl);
112 |
113 | this.shaderProgram = initShaderProgram(this.gl, vs, fs);
114 | if (!this.shaderProgram) {
115 | console.error('Unable to initialize the shader program');
116 | return;
117 | }
118 |
119 | this.gl.useProgram(this.shaderProgram);
120 |
121 |
122 | this.audioTextureUniformLocation = this.gl.getUniformLocation(this.shaderProgram, 'audioTexture');
123 | const position = this.gl.getAttribLocation(this.shaderProgram, 'vertexPosition');
124 | this.gl.vertexAttribPointer(position, 2, this.gl.FLOAT, false, 0, 0);
125 | this.gl.enableVertexAttribArray(position);
126 | this.resolutionUniformLocation = this.gl.getUniformLocation(this.shaderProgram, 'resolution');
127 | this.timeUniformLocation = this.gl.getUniformLocation(this.shaderProgram, "time");
128 | this.numberOfbarsUniformLocation = this.gl.getUniformLocation(this.shaderProgram, "numberOfbars");
129 | this.noiseGainUniformLocation = this.gl.getUniformLocation(this.shaderProgram, "noiseGain");
130 | this.bottomColorUniformLocation = this.gl.getUniformLocation(this.shaderProgram, "bottomColor");
131 | this.topColorUniformLocation = this.gl.getUniformLocation(this.shaderProgram, "topColor");
132 |
133 | }
134 | updateSettings(settings: SynthBarsSetting): void {
135 | if (!this.gl) {
136 | return;
137 | }
138 | this.gl.useProgram(this.shaderProgram);
139 | this.gl.uniform1f(this.noiseGainUniformLocation, settings.noiseGain);
140 | this.gl.uniform1f(this.numberOfbarsUniformLocation, settings.numberOfbars);
141 | const bottomColor = hexToRGBNormalized(settings.bottomColor);
142 | const topColor = hexToRGBNormalized(settings.topColor);
143 | this.gl.uniform3fv(this.bottomColorUniformLocation, bottomColor);
144 | this.gl.uniform3fv(this.topColorUniformLocation, topColor);
145 | }
146 | updateAudioData(data: NormalAudioDataDto): void {
147 | this.audioData = data;
148 | }
149 | render(): void {
150 | if (this.canvas === null) {
151 | return;
152 | }
153 | if (!this.gl) {
154 | return;
155 | }
156 | // Update canvas size and viewport
157 | this.canvas.width = window.innerWidth;
158 | this.canvas.height = window.innerHeight;
159 | this.gl.clear(this.gl.COLOR_BUFFER_BIT);
160 | this.gl.viewport(0, 0, this.canvas.width, this.canvas.height);
161 |
162 | // Bind texture
163 | this.gl.activeTexture(this.gl.TEXTURE0);
164 | this.gl.bindTexture(this.gl.TEXTURE_2D, this.audioTexture);
165 | this.gl.uniform1i(this.audioTextureUniformLocation, 0);
166 | bindAudioDataToTexture(new Uint8Array(this.audioData.timeByteArray), this.gl);
167 |
168 | // Update resolution
169 | this.gl.uniform2f(this.resolutionUniformLocation, this.gl.canvas.width, this.gl.canvas.height);
170 |
171 | // Updated time
172 | const timeInSeconds = performance.now() / 1000.0;
173 | this.gl.uniform1f(this.timeUniformLocation, timeInSeconds);
174 |
175 | // Draw the quad
176 | this.gl.drawArrays(this.gl.TRIANGLE_STRIP, 0, 4);
177 | }
178 | clean(): void {
179 | if (this.canvas === null) {
180 | return;
181 | }
182 | if (!this.gl) {
183 | return;
184 | }
185 |
186 | if (this.vertexBuffer !== null) {
187 | this.gl.deleteBuffer(this.vertexBuffer);
188 | }
189 |
190 | if (this.audioTexture !== null) {
191 | this.gl.deleteTexture(this.audioTexture);
192 | }
193 | if (this.shaderProgram !== null) {
194 | const shaders = this.gl.getAttachedShaders(this.shaderProgram);
195 | if (shaders !== null && shaders.length > 0) {
196 | for (const shader of shaders) {
197 | this.gl.detachShader(this.shaderProgram, shader);
198 | this.gl.deleteShader(shader);
199 | }
200 | }
201 | this.gl.deleteProgram(this.shaderProgram);
202 | }
203 |
204 | this.gl.bindBuffer(this.gl.ARRAY_BUFFER, null);
205 | this.gl.bindTexture(this.gl.TEXTURE_2D, null);
206 |
207 | this.gl.clear(this.gl.COLOR_BUFFER_BIT | this.gl.DEPTH_BUFFER_BIT);
208 | this.canvas.remove();
209 | }
210 | }
211 |
--------------------------------------------------------------------------------
/src/userInterface/settings/events/SettingsWindowEvent.ts:
--------------------------------------------------------------------------------
1 | import { GenericEvent, messageAction, messageTarget } from "@/src/utils/eventMessage";
2 |
3 | export class SettingsWindowEvent extends GenericEvent {
4 |
5 | constructor(target: messageTarget, action: messageAction) {
6 | super(target, action);
7 | }
8 | override toMessage() {
9 | return {
10 | target: this.target,
11 | action: this.action,
12 | };
13 | }
14 | }
15 |
16 |
--------------------------------------------------------------------------------
/src/userInterface/settings/sceneSettings/butterchurnSettings.ts:
--------------------------------------------------------------------------------
1 | import { ButterchurnSettings } from '@/src/scene/scenes/butterchurn/setting';
2 | import { setSceneSettings } from '../settingsManager';
3 | import butterchurnPresets from 'butterchurn-presets';
4 |
5 | export function buildButterchurnSetting(
6 | sceneName: string,
7 | butterchurnSettings: ButterchurnSettings,
8 | settingsFolder: any,
9 | isExternalUi: boolean
10 | ): void {
11 | settingsFolder
12 | .add(
13 | butterchurnSettings,
14 | 'preset',
15 | Object.keys(butterchurnPresets.getPresets())
16 | )
17 | .onChange((value: string) => {
18 | butterchurnSettings.preset = value;
19 | setSceneSettings(butterchurnSettings, sceneName, isExternalUi);
20 | });
21 | settingsFolder
22 | .add(butterchurnSettings, 'blendLength')
23 | .onChange((value: number) => {
24 | butterchurnSettings.blendLength = value;
25 | setSceneSettings(butterchurnSettings, sceneName, isExternalUi);
26 | });
27 | settingsFolder
28 | .add(butterchurnSettings, 'cycleSeconds')
29 | .onChange((value: number) => {
30 | butterchurnSettings.cycleSeconds = value;
31 | setSceneSettings(butterchurnSettings, sceneName, isExternalUi);
32 | });
33 | settingsFolder
34 | .add(butterchurnSettings, 'cyclePresets')
35 | .onChange((value: boolean) => {
36 | butterchurnSettings.cyclePresets = value;
37 | setSceneSettings(butterchurnSettings, sceneName, isExternalUi);
38 | });
39 | }
40 |
--------------------------------------------------------------------------------
/src/userInterface/settings/sceneSettings/dancingHorizonSettings.ts:
--------------------------------------------------------------------------------
1 | import { DancingHorizonSetting } from "@/src/scene/scenes/dancingHorizon/setting";
2 | import { setSceneSettings } from "../settingsManager";
3 | import { SynthBarsSetting } from "@/src/scene/scenes/synthBars/setting";
4 |
5 | export function dancingHorizonSettings(sceneName: string, dancingHorizonSettings: DancingHorizonSetting, settingsFolder: any, isExternalUi: boolean): void {
6 |
7 | settingsFolder.addColor(dancingHorizonSettings, 'horizonColorNight').onChange((value: string) => {
8 | dancingHorizonSettings.horizonColorNight = value;
9 | setSceneSettings(dancingHorizonSettings, sceneName, isExternalUi);
10 | });
11 | settingsFolder.addColor(dancingHorizonSettings, 'horizonColorDay').onChange((value: string) => {
12 | dancingHorizonSettings.horizonColorDay = value;
13 | setSceneSettings(dancingHorizonSettings, sceneName, isExternalUi);
14 | });
15 | settingsFolder.addColor(dancingHorizonSettings, 'skyColorNight').onChange((value: string) => {
16 | dancingHorizonSettings.skyColorNight = value;
17 | setSceneSettings(dancingHorizonSettings, sceneName, isExternalUi);
18 | });
19 | settingsFolder.addColor(dancingHorizonSettings, 'skyColorDay').onChange((value: string) => {
20 | dancingHorizonSettings.skyColorDay = value;
21 | setSceneSettings(dancingHorizonSettings, sceneName, isExternalUi);
22 | });
23 | settingsFolder.addColor(dancingHorizonSettings, 'oceanColorNight').onChange((value: string) => {
24 | dancingHorizonSettings.oceanColorNight = value;
25 | setSceneSettings(dancingHorizonSettings, sceneName, isExternalUi);
26 | });
27 | settingsFolder.addColor(dancingHorizonSettings, 'oceanColorDay').onChange((value: string) => {
28 | dancingHorizonSettings.oceanColorDay = value;
29 | setSceneSettings(dancingHorizonSettings, sceneName, isExternalUi);
30 | });
31 | settingsFolder.addColor(dancingHorizonSettings, 'moonColor').onChange((value: string) => {
32 | dancingHorizonSettings.moonColor = value;
33 | setSceneSettings(dancingHorizonSettings, sceneName, isExternalUi);
34 | });
35 | settingsFolder.addColor(dancingHorizonSettings, 'sunColor').onChange((value: string) => {
36 | dancingHorizonSettings.sunColor = value;
37 | setSceneSettings(dancingHorizonSettings, sceneName, isExternalUi);
38 | });
39 | settingsFolder.add(dancingHorizonSettings, 'timeGain', 0.1, 1.0).onChange((value: number) => {
40 | dancingHorizonSettings.timeGain = value;
41 | setSceneSettings(dancingHorizonSettings, sceneName, isExternalUi);
42 | });
43 | settingsFolder.add(dancingHorizonSettings, 'noiseGain', 0.0, 1.0).onChange((value: number) => {
44 | dancingHorizonSettings.noiseGain = value;
45 | setSceneSettings(dancingHorizonSettings, sceneName, isExternalUi);
46 | });
47 | settingsFolder.add(dancingHorizonSettings, 'cloudDensity', 0.0, 10.0).onChange((value: number) => {
48 | dancingHorizonSettings.cloudDensity = value;
49 | setSceneSettings(dancingHorizonSettings, sceneName, isExternalUi);
50 | });
51 | settingsFolder.add(dancingHorizonSettings, 'cloudGain', 0.0, 1.0).onChange((value: number) => {
52 | dancingHorizonSettings.cloudGain = value;
53 | setSceneSettings(dancingHorizonSettings, sceneName, isExternalUi);
54 | });
55 | }
56 |
--------------------------------------------------------------------------------
/src/userInterface/settings/sceneSettings/frostfireSettings.ts:
--------------------------------------------------------------------------------
1 | import { setSceneSettings } from "../settingsManager";
2 | import { FrostFireSetting } from "@/src/scene/scenes/frostfire/settings";
3 |
4 | export function frostFireSettings(sceneName: string, frostFireSettings: FrostFireSetting, settingsFolder: any, isExternalUi: boolean): void {
5 |
6 | settingsFolder.addColor(frostFireSettings, 'frostColor').onChange((value: string) => {
7 | frostFireSettings.frostColor = value;
8 | setSceneSettings(frostFireSettings, sceneName, isExternalUi);
9 | });
10 | settingsFolder.addColor(frostFireSettings, 'fireColor').onChange((value: string) => {
11 | frostFireSettings.fireColor = value;
12 | setSceneSettings(frostFireSettings, sceneName, isExternalUi);
13 | });
14 | settingsFolder.addColor(frostFireSettings, 'blendColor').onChange((value: string) => {
15 | frostFireSettings.blendColor = value;
16 | setSceneSettings(frostFireSettings, sceneName, isExternalUi);
17 | });
18 | settingsFolder.add(frostFireSettings, 'numberOfHexagons', 10, 100.0).onChange((value: number) => {
19 | frostFireSettings.numberOfHexagons = value;
20 | setSceneSettings(frostFireSettings, sceneName, isExternalUi);
21 | });
22 | settingsFolder.add(frostFireSettings, 'height', 0.2, 1.0).onChange((value: number) => {
23 | frostFireSettings.height = value;
24 | setSceneSettings(frostFireSettings, sceneName, isExternalUi);
25 | });
26 | settingsFolder.add(frostFireSettings, 'colorBlend', 0.1, 1.0).onChange((value: number) => {
27 | frostFireSettings.colorBlend = value;
28 | setSceneSettings(frostFireSettings, sceneName, isExternalUi);
29 | });
30 | settingsFolder.add(frostFireSettings, 'dynamicColor', 0.3, 1.0).onChange((value: number) => {
31 | frostFireSettings.dynamicColor = value;
32 | setSceneSettings(frostFireSettings, sceneName, isExternalUi);
33 | });
34 | settingsFolder.add(frostFireSettings, 'breathing', 0.0, 0.45).onChange((value: number) => {
35 | frostFireSettings.breathing = value;
36 | setSceneSettings(frostFireSettings, sceneName, isExternalUi);
37 | });
38 | }
39 |
--------------------------------------------------------------------------------
/src/userInterface/settings/sceneSettings/sunflowerSettings.ts:
--------------------------------------------------------------------------------
1 | import { SunFlowerSetting } from "@/src/scene/scenes/sunflower/setting";
2 | import { setSceneSettings } from "../settingsManager";
3 |
4 | export function sunFlowerSettings(sceneName: string, sunFlowerSettings: SunFlowerSetting, settingsFolder: any, isExternalUi: boolean): void {
5 | settingsFolder.addColor(sunFlowerSettings, 'innerColor').onChange((value: string) => {
6 | sunFlowerSettings.innerColor = value;
7 | setSceneSettings(sunFlowerSettings, sceneName, isExternalUi);
8 | });
9 |
10 | settingsFolder.addColor(sunFlowerSettings, 'midColor').onChange((value: string) => {
11 | sunFlowerSettings.midColor = value;
12 | setSceneSettings(sunFlowerSettings, sceneName, isExternalUi);
13 | });
14 | settingsFolder.addColor(sunFlowerSettings, 'outerColor').onChange((value: string) => {
15 | sunFlowerSettings.outerColor = value;
16 | setSceneSettings(sunFlowerSettings, sceneName, isExternalUi);
17 | });
18 | settingsFolder.add(sunFlowerSettings, 'innerRadiusGain', 0.0, 0.3).onChange((value: number) => {
19 | sunFlowerSettings.innerRadiusGain = value;
20 | setSceneSettings(sunFlowerSettings, sceneName, isExternalUi);
21 | });
22 | settingsFolder.add(sunFlowerSettings, 'midRadiusGain', 0.3, 0.6).onChange((value: number) => {
23 | sunFlowerSettings.midRadiusGain = value;
24 | setSceneSettings(sunFlowerSettings, sceneName, isExternalUi);
25 | });
26 | settingsFolder.add(sunFlowerSettings, 'outerRadiusGain', 0.6, 0.9).onChange((value: number) => {
27 | sunFlowerSettings.outerRadiusGain = value;
28 | setSceneSettings(sunFlowerSettings, sceneName, isExternalUi);
29 | });
30 | settingsFolder.add(sunFlowerSettings, 'radius', 0.05, 0.5).onChange((value: number) => {
31 | sunFlowerSettings.radius = value;
32 | setSceneSettings(sunFlowerSettings, sceneName, isExternalUi);
33 | });
34 | settingsFolder.add(sunFlowerSettings, 'size', 0.1, 1.5).onChange((value: number) => {
35 | sunFlowerSettings.size = value;
36 | setSceneSettings(sunFlowerSettings, sceneName, isExternalUi);
37 | });
38 | }
39 |
--------------------------------------------------------------------------------
/src/userInterface/settings/sceneSettings/synthBarSettings.ts:
--------------------------------------------------------------------------------
1 | import { setSceneSettings } from "../settingsManager";
2 | import { SynthBarsSetting } from "@/src/scene/scenes/synthBars/setting";
3 |
4 | export function synthBarSettings(sceneName: string, synthbarSettings: SynthBarsSetting, settingsFolder: any, isExternalUi: boolean): void {
5 | settingsFolder.addColor(synthbarSettings, 'bottomColor').onChange((value: string) => {
6 | synthbarSettings.bottomColor = value;
7 | setSceneSettings(synthbarSettings, sceneName, isExternalUi);
8 | });
9 | settingsFolder.addColor(synthbarSettings, 'topColor').onChange((value: string) => {
10 | synthbarSettings.topColor = value;
11 | setSceneSettings(synthbarSettings, sceneName, isExternalUi);
12 | });
13 |
14 | settingsFolder.add(synthbarSettings, 'noiseGain', 0.0, 1.0).onChange((value: number) => {
15 | synthbarSettings.noiseGain = value;
16 | setSceneSettings(synthbarSettings, sceneName, isExternalUi);
17 | });
18 | settingsFolder.add(synthbarSettings, 'numberOfbars', 5.0, 80.0).onChange((value: number) => {
19 | synthbarSettings.numberOfbars = value;
20 | setSceneSettings(synthbarSettings, sceneName, isExternalUi);
21 | });
22 | }
23 |
--------------------------------------------------------------------------------
/src/userInterface/settings/settingsManager.ts:
--------------------------------------------------------------------------------
1 | import { messageAction, messageTarget } from '@/src/utils/eventMessage';
2 | import { ISceneSetting } from '@/src/scene/sceneSetting';
3 | import { saveSettings } from '@/src/utils/settings';
4 | import { SetSceneSettingsEvent } from '@/src/scene/events/setSceneSettingsEvent';
5 |
6 | export function setSceneSettings(sceneSettings: ISceneSetting, sceneName: string, isExternalUI: boolean): void {
7 | // Store the settings in local storage
8 | saveSettings(sceneName, sceneSettings);
9 |
10 | // Send the settings to the animation
11 | const sceneSettingEventMessage = new SetSceneSettingsEvent(messageTarget.animation, messageAction.setSceneSettings, sceneSettings);
12 | if (!isExternalUI) {
13 | const changeSceneEvent = new CustomEvent(messageAction.setSceneSettings, {
14 | detail: { event: sceneSettingEventMessage.toMessage() }
15 | });
16 | window.dispatchEvent(changeSceneEvent);
17 | } else {
18 | chrome.runtime.sendMessage(sceneSettingEventMessage.toMessage());
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/src/userInterface/settings/settingsUserInterface.ts:
--------------------------------------------------------------------------------
1 | import * as dat from 'dat.gui';
2 | import {
3 | GenericEvent,
4 | messageAction,
5 | messageTarget,
6 | } from '@/src/utils/eventMessage';
7 | import { DancingHorizonSetting } from '@/src/scene/scenes/dancingHorizon/setting';
8 | import { SynthBarsSetting } from '@/src/scene/scenes/synthBars/setting';
9 | import { SunFlowerSetting } from '@/src/scene/scenes/sunflower/setting';
10 | import { ISceneSetting } from '@/src/scene/sceneSetting';
11 | import { loadSettings } from '@/src/utils/settings';
12 | import { SetSceneEvent } from '@/src/scene/events/setSceneEvent';
13 | import { sunFlowerSettings } from './sceneSettings/sunflowerSettings';
14 | import { synthBarSettings } from './sceneSettings/synthBarSettings';
15 | import { dancingHorizonSettings } from './sceneSettings/dancingHorizonSettings';
16 | import { setSceneSettings } from './settingsManager';
17 | import { sceneNames } from '@/src/scene/sceneNames';
18 | import { SettingsWindowEvent } from './events/SettingsWindowEvent';
19 | import { FrostFireSetting } from '@/src/scene/scenes/frostfire/settings';
20 | import { frostFireSettings } from './sceneSettings/frostfireSettings';
21 | import { ButterchurnSettings } from '@/src/scene/scenes/butterchurn/setting';
22 | import { buildButterchurnSetting } from './sceneSettings/butterchurnSettings';
23 |
24 | export class SettingsUserInterface {
25 | private gui: dat.GUI | null = null;
26 | private sceneFolder: dat.GUIFolder | null;
27 | private sceneSettingsFolder: dat.GUIFolder | null = null;
28 | private generalSettingsFolder: dat.GUIFolder | null = null;
29 | private isExternalUI: boolean = false;
30 | private sceneNames: string[] = [];
31 |
32 | constructor(isExternalUI: boolean) {
33 | this.isExternalUI = isExternalUI;
34 | this.sceneNames = [];
35 | for (const scene in sceneNames) {
36 | this.sceneNames.push(scene);
37 | }
38 | }
39 | public buildScene() {
40 | this.gui = new dat.GUI();
41 | this.generalSettingsFolder = this.gui.addFolder('General Settings');
42 | if (!this.isExternalUI) {
43 | this.generalSettingsFolder
44 | .add(
45 | {
46 | openInWindow: () => {
47 | const openSettingsWindowEvent =
48 | new SettingsWindowEvent(
49 | messageTarget.background,
50 | messageAction.openSettingsWindow
51 | );
52 |
53 | window.sandboxEventMessageHolder.source.postMessage(
54 | openSettingsWindowEvent.toMessage(),
55 | window.sandboxEventMessageHolder.origin
56 | );
57 | },
58 | },
59 | 'openInWindow'
60 | )
61 | .name('Open settings in Window');
62 | }
63 | this.generalSettingsFolder
64 | .add(
65 | {
66 | toggleFullScreen: () => {
67 | this.toggleFullScreen();
68 | },
69 | },
70 | 'toggleFullScreen'
71 | )
72 | .name('Toggle Fullscreen');
73 | this.generalSettingsFolder.open();
74 |
75 | const selection = {
76 | selectedSceneName: this.sceneNames[0].toString(),
77 | };
78 | this.sceneFolder = this.gui.addFolder('Scenes');
79 | const sceneSelector = this.sceneFolder
80 | .add(selection, 'selectedSceneName', sceneNames)
81 | .name('Select Scene');
82 |
83 | sceneSelector.onChange((selectedSceneName: string) => {
84 | this.setScene(selectedSceneName);
85 | });
86 |
87 | this.sceneFolder.open();
88 |
89 | this.setScene(selection.selectedSceneName);
90 | }
91 | private toggleFullScreen() {
92 | const fullScreenEventMessage = new GenericEvent(
93 | messageTarget.animation,
94 | messageAction.toggleFullScreen
95 | );
96 | if (!this.isExternalUI) {
97 | const fullScreenEvent = new CustomEvent(
98 | messageAction.toggleFullScreen,
99 | {
100 | detail: { event: fullScreenEventMessage.toMessage() },
101 | }
102 | );
103 | window.dispatchEvent(fullScreenEvent);
104 | } else {
105 | chrome.runtime.sendMessage(fullScreenEventMessage.toMessage());
106 | }
107 | }
108 | private setScene(sceneName: string) {
109 | const settings = this.buildSettings(sceneName);
110 | const sceneEventMessage = new SetSceneEvent(
111 | messageTarget.animation,
112 | messageAction.setScene,
113 | sceneName,
114 | settings
115 | );
116 | if (!this.isExternalUI) {
117 | const changeSceneEvent = new CustomEvent(messageAction.setScene, {
118 | detail: { event: sceneEventMessage.toMessage() },
119 | });
120 | window.dispatchEvent(changeSceneEvent);
121 | } else {
122 | chrome.runtime.sendMessage(sceneEventMessage.toMessage());
123 | }
124 | }
125 |
126 | private buildSettings(sceneName: string): ISceneSetting {
127 | // Remove the existing settings folder if it exists
128 | if (this.sceneSettingsFolder) {
129 | this.sceneFolder.removeFolder(this.sceneSettingsFolder);
130 | }
131 |
132 | // Create a new folder for scene-specific settings
133 | this.sceneSettingsFolder = this.sceneFolder.addFolder('Scene Settings');
134 | this.sceneSettingsFolder.open();
135 |
136 | const settings = loadSettings(sceneName);
137 | if (sceneName === sceneNames.SunFlower.toString()) {
138 | let sunFlowerSetting = settings
139 | ? (settings as SunFlowerSetting)
140 | : new SunFlowerSetting();
141 | this.sceneSettingsFolder
142 | .add(
143 | {
144 | reset: () => {
145 | sunFlowerSetting = new SunFlowerSetting();
146 | setSceneSettings(
147 | sunFlowerSetting,
148 | sceneName,
149 | this.isExternalUI
150 | );
151 | this.buildSettings(sceneName);
152 | },
153 | },
154 | 'reset'
155 | )
156 | .name('Reset Settings');
157 |
158 | sunFlowerSettings(
159 | sceneName,
160 | sunFlowerSetting,
161 | this.sceneSettingsFolder,
162 | this.isExternalUI
163 | );
164 |
165 | return sunFlowerSetting;
166 | } else if (sceneName === sceneNames.SynthBars.toString()) {
167 | let synthbarSetting = settings
168 | ? (settings as SynthBarsSetting)
169 | : new SynthBarsSetting();
170 | this.sceneSettingsFolder
171 | .add(
172 | {
173 | reset: () => {
174 | synthbarSetting = new SynthBarsSetting();
175 | setSceneSettings(
176 | synthbarSetting,
177 | sceneName,
178 | this.isExternalUI
179 | );
180 | this.buildSettings(sceneName);
181 | },
182 | },
183 | 'reset'
184 | )
185 | .name('Reset Settings');
186 | synthBarSettings(
187 | sceneName,
188 | synthbarSetting,
189 | this.sceneSettingsFolder,
190 | this.isExternalUI
191 | );
192 |
193 | return synthbarSetting;
194 | } else if (sceneName === sceneNames.FrostFire.toString()) {
195 | let frostFireSetting = settings
196 | ? (settings as FrostFireSetting)
197 | : new FrostFireSetting();
198 | this.sceneSettingsFolder
199 | .add(
200 | {
201 | reset: () => {
202 | frostFireSetting = new FrostFireSetting();
203 | setSceneSettings(
204 | frostFireSetting,
205 | sceneName,
206 | this.isExternalUI
207 | );
208 | this.buildSettings(sceneName);
209 | },
210 | },
211 | 'reset'
212 | )
213 | .name('Reset Settings');
214 | frostFireSettings(
215 | sceneName,
216 | frostFireSetting,
217 | this.sceneSettingsFolder,
218 | this.isExternalUI
219 | );
220 |
221 | return frostFireSetting;
222 | } else if (sceneName === sceneNames.DancingHorizon.toString()) {
223 | let dancingHorizonSetting = settings
224 | ? (settings as DancingHorizonSetting)
225 | : new DancingHorizonSetting();
226 | setSceneSettings(
227 | dancingHorizonSetting,
228 | sceneName,
229 | this.isExternalUI
230 | );
231 |
232 | this.sceneSettingsFolder
233 | .add(
234 | {
235 | reset: () => {
236 | dancingHorizonSetting = new DancingHorizonSetting();
237 | setSceneSettings(
238 | dancingHorizonSetting,
239 | sceneName,
240 | this.isExternalUI
241 | );
242 | this.buildSettings(sceneName);
243 | },
244 | },
245 | 'reset'
246 | )
247 | .name('Reset Settings');
248 | dancingHorizonSettings(
249 | sceneName,
250 | dancingHorizonSetting,
251 | this.sceneSettingsFolder,
252 | this.isExternalUI
253 | );
254 |
255 | return dancingHorizonSetting;
256 | } else if (sceneName === sceneNames.Butterchurn.toString()) {
257 | let butterChurnSettings = settings
258 | ? (settings as ButterchurnSettings)
259 | : new ButterchurnSettings();
260 | setSceneSettings(butterChurnSettings, sceneName, this.isExternalUI);
261 |
262 | this.sceneSettingsFolder
263 | .add(
264 | {
265 | reset: () => {
266 | butterChurnSettings = new ButterchurnSettings();
267 | setSceneSettings(
268 | butterChurnSettings,
269 | sceneName,
270 | this.isExternalUI
271 | );
272 | this.buildSettings(sceneName);
273 | },
274 | },
275 | 'reset'
276 | )
277 | .name('Reset Settings');
278 | buildButterchurnSetting(
279 | sceneName,
280 | butterChurnSettings,
281 | this.sceneSettingsFolder,
282 | this.isExternalUI
283 | );
284 |
285 | return butterChurnSettings;
286 | }
287 | return {};
288 | }
289 |
290 | public destroy(): void {
291 | // Proper cleanup of GUI components
292 | if (this.sceneSettingsFolder !== null) {
293 | this.sceneFolder.removeFolder(this.sceneSettingsFolder);
294 | this.sceneSettingsFolder = null;
295 | }
296 | if (this.sceneFolder !== null) {
297 | this.gui.removeFolder(this.sceneFolder);
298 | this.sceneFolder = null;
299 | }
300 | if (this.generalSettingsFolder !== null) {
301 | this.gui.removeFolder(this.generalSettingsFolder);
302 | this.generalSettingsFolder = null;
303 | }
304 |
305 | // Destroy the main GUI
306 | if (this.gui !== null) {
307 | this.gui.destroy();
308 | this.gui = null;
309 | }
310 | }
311 | }
312 |
--------------------------------------------------------------------------------
/src/utils/eventMessage.ts:
--------------------------------------------------------------------------------
1 | export enum messageTarget {
2 | background = "background",
3 | settings = "settings",
4 | offscreen = "offscreen",
5 | animation = "animation",
6 | }
7 | export enum messageAction {
8 | initiateStream = "initiate-stream",
9 | startStream = "start-stream",
10 | stopStream = "stop-stream",
11 | updateAudioData = "start-animation",
12 | openSettingsWindow = "open-settings-window",
13 | closeSettingsWindow = "close-settings-window",
14 | setScene = "set-scene",
15 | setSceneSettings = "set-scene-settings",
16 | toggleFullScreen = "toggle-full-screen",
17 | }
18 | export enum streamType {
19 | butterChurn = "butterChurn",
20 | normal = "singleChannel",
21 | }
22 | export interface IAudioDataDto {
23 | timeByteArray: number[];
24 | }
25 | export class NormalAudioDataDto implements IAudioDataDto {
26 | timeByteArray: number[];
27 | constructor(timeByteArray: number[]) {
28 | this.timeByteArray = timeByteArray;
29 | }
30 | }
31 | export class ButterChurnAudioDataDto implements IAudioDataDto {
32 | timeByteArray: number[];
33 | timeByteArrayLeft: number[];
34 | timeByteArrayRight: number[];
35 | constructor(timeByteArray: number[], dataLeft: number[], dataRight: number[]) {
36 | this.timeByteArray = timeByteArray;
37 | this.timeByteArrayLeft = dataLeft;
38 | this.timeByteArrayRight = dataRight;
39 | }
40 | }
41 | export class GenericEvent {
42 | target: messageTarget;
43 | action: messageAction;
44 |
45 | constructor(target: messageTarget, action: messageAction) {
46 | this.target = target;
47 | this.action = action;
48 | }
49 |
50 | toMessage() {
51 | return {
52 | target: this.target,
53 | action: this.action,
54 | };
55 | }
56 | }
57 | export class AudioDataEvent extends GenericEvent {
58 | audioData: IAudioDataDto;
59 |
60 | constructor(target: messageTarget, action: messageAction, audioData: IAudioDataDto) {
61 | super(target, action);
62 | this.audioData = audioData;
63 | }
64 |
65 | override toMessage() {
66 | return {
67 | target: this.target,
68 | action: this.action,
69 | audioData: this.audioData,
70 | };
71 | }
72 | }
73 | export class InitiateStreamEvent extends GenericEvent {
74 | streamId: string;
75 |
76 | constructor(target: messageTarget, action: messageAction, streamId: string) {
77 | super(target, action);
78 | this.streamId = streamId;
79 | }
80 |
81 | override toMessage() {
82 | return {
83 | target: this.target,
84 | action: this.action,
85 | streamId: this.streamId,
86 | };
87 | }
88 | }
89 | export class StartStreamEvent extends GenericEvent {
90 | streamType: streamType;
91 |
92 | constructor(target: messageTarget, action: messageAction, streamType: streamType) {
93 | super(target, action);
94 | this.streamType = streamType;
95 | }
96 | override toMessage() {
97 | return {
98 | target: this.target,
99 | action: this.action,
100 | streamType: this.streamType,
101 | };
102 | }
103 | }
104 |
105 |
--------------------------------------------------------------------------------
/src/utils/openGl/colorConverter.ts:
--------------------------------------------------------------------------------
1 | export function hexToRGBNormalized(color: string): Float32Array {
2 | if (typeof color !== 'string') {
3 | throw new Error("Invalid input: HEX color must be a string.");
4 | }
5 | let hex = color.replace(/^#/, '');
6 | if (hex.length !== 6) {
7 | throw new Error("Invalid HEX color.");
8 | }
9 |
10 | const r = parseInt(hex.substring(0, 2), 16) / 255;
11 | const g = parseInt(hex.substring(2, 4), 16) / 255;
12 | const b = parseInt(hex.substring(4, 6), 16) / 255;
13 | return new Float32Array([r, g, b]);
14 | }
15 |
--------------------------------------------------------------------------------
/src/utils/openGl/openGl.ts:
--------------------------------------------------------------------------------
1 |
2 | export function bindAudioDataToTexture(audioData: Uint8Array, gl: WebGLRenderingContext) {
3 | // Check if audioData is an ArrayBuffer
4 | let data = audioData;
5 | if (!ArrayBuffer.isView(data)) {
6 | data = new Uint8Array(data);
7 | }
8 | gl.texImage2D(gl.TEXTURE_2D, 0, gl.LUMINANCE, data.length, 1, 0, gl.LUMINANCE, gl.UNSIGNED_BYTE, data);
9 | }
10 | export function initTexture(gl: WebGLRenderingContext) {
11 | let tex = gl.createTexture();
12 | gl.bindTexture(gl.TEXTURE_2D, tex);
13 | gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
14 | gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
15 | gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
16 | gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
17 | return tex;
18 | }
19 | function loadShader(gl: WebGLRenderingContext, type: number, source: string) {
20 | const shader = gl.createShader(type);
21 | if (shader === null) {
22 | console.error('Unable to initialize the shader: shader is null');
23 | return null;
24 | }
25 | gl.shaderSource(shader, source);
26 | gl.compileShader(shader);
27 |
28 | // Check if the shader compiled successfully
29 | if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
30 | gl.deleteShader(shader);
31 | console.error('An error occurred compiling the shaders: ' + gl.getShaderInfoLog(shader));
32 | return null;
33 | }
34 |
35 | return shader;
36 | }
37 |
38 | export function initShaderProgram(gl: WebGLRenderingContext, vs: string, fs: string) {
39 |
40 | const vertexShader = loadShader(gl, gl.VERTEX_SHADER, vs);
41 | const fragmentShader = loadShader(gl, gl.FRAGMENT_SHADER, fs);
42 |
43 | // Create the shader program
44 | const shaderProgram = gl.createProgram();
45 | if (shaderProgram === null) {
46 | console.error('Unable to initialize the shader program: shaderProgram is null');
47 | return null;
48 | }
49 | if (vertexShader === null) {
50 | console.error('Unable to initialize the shader program: vertexShader is null');
51 | return null;
52 | }
53 | if (fragmentShader === null) {
54 | console.error('Unable to initialize the shader program: fragmentShader is null');
55 | return null;
56 | }
57 | gl.attachShader(shaderProgram, vertexShader);
58 | gl.attachShader(shaderProgram, fragmentShader);
59 | gl.linkProgram(shaderProgram);
60 |
61 | // Check if the program was linked successfully
62 | if (!gl.getProgramParameter(shaderProgram, gl.LINK_STATUS)) {
63 | console.error('Unable to initialize the shader program: ' + gl.getProgramInfoLog(shaderProgram));
64 | alert('Unable to initialize the shader program: ' + gl.getProgramInfoLog(shaderProgram));
65 | return null;
66 | }
67 |
68 | return shaderProgram;
69 | }
70 |
--------------------------------------------------------------------------------
/src/utils/settings.ts:
--------------------------------------------------------------------------------
1 | function keyGenerator(name: string): string {
2 | return `audio-visualizer-settings-${name}`;
3 | }
4 |
5 | let x = {};
6 | export function loadSettings(settingsName: string): T | null {
7 | const settingsJson = x[settingsName];
8 | if (settingsJson === undefined) {
9 | return null;
10 | }
11 | return JSON.parse(settingsJson) as T;
12 | }
13 | export function saveSettings(settingsName: string, settings: T): void {
14 | x[settingsName] = JSON.stringify(settings);
15 | // localStorage.setItem(keyGenerator(settingsName), JSON.stringify(settings));
16 | }
17 | export function checkClassType(
18 | object: any,
19 | constructor: { new (...args: any[]): T }
20 | ): object is T {
21 | return object instanceof constructor;
22 | }
23 |
--------------------------------------------------------------------------------
/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "extends": "./.wxt/tsconfig.json",
3 | "compilerOptions": {
4 | "types": ["chrome"]
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/wxt.config.ts:
--------------------------------------------------------------------------------
1 | import { defineConfig } from 'wxt';
2 |
3 | export default defineConfig({
4 | runner: {
5 | startUrls: ['https://soundcloud.com/ferzrrn/sets/synthwave'],
6 | },
7 | manifest: {
8 | permissions: ['tabCapture', 'offscreen'],
9 | action: {
10 | default_icon: 'icon/icon256.png',
11 | },
12 | },
13 | });
14 |
--------------------------------------------------------------------------------