├── env.d.ts ├── images ├── screenshot1.png ├── screenshot2.png ├── screenshot3.png └── screenshot4.png ├── components ├── background │ ├── renderer │ │ ├── index.ts │ │ ├── BaseBackgroundRender.ts │ │ ├── UIBackgroundRender.ts │ │ ├── WebWorkerBackgroundRender.ts │ │ └── webWorkerBackground.worker.ts │ └── mobile │ │ └── index.ts ├── lyrics │ ├── ILyricLine.ts │ └── InterludeDots.ts ├── visualizer │ ├── AudioProcessor.ts │ ├── Visualizer.tsx │ └── VisualizerWorker.ts ├── GeminiButton.tsx ├── Toast.tsx ├── ImportMusicDialog.tsx ├── MediaSessionController.tsx ├── AboutDialog.tsx ├── TopBar.tsx ├── KeyboardShortcuts.tsx ├── SmartImage.tsx └── FluidBackground.tsx ├── metadata.json ├── .gitignore ├── index.tsx ├── tsconfig.json ├── package.json ├── vite.config.ts ├── hooks ├── useToast.ts ├── useKeyboardScope.ts ├── useSearchProvider.ts ├── useQueueSearchProvider.ts ├── useNeteaseSearchProvider.ts ├── useCanvasRenderer.ts ├── useSearchModal.ts └── usePlaylist.ts ├── LICENSE ├── .github └── workflows │ └── deploy.yml ├── services ├── geminiService.ts ├── keyboardRegistry.ts ├── spring.ts ├── lyrics │ ├── utils.ts │ ├── types.ts │ ├── index.ts │ ├── translation.ts │ ├── parser.ts │ ├── lrc.ts │ └── netease.ts ├── cache.ts ├── springSystem.ts ├── utils.ts └── lyricsService.ts ├── types.ts ├── README.md └── index.html /env.d.ts: -------------------------------------------------------------------------------- 1 | declare module "*?worker&url" { 2 | const url: string; 3 | export default url; 4 | } 5 | -------------------------------------------------------------------------------- /images/screenshot1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dingyi222666/aura-music/HEAD/images/screenshot1.png -------------------------------------------------------------------------------- /images/screenshot2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dingyi222666/aura-music/HEAD/images/screenshot2.png -------------------------------------------------------------------------------- /images/screenshot3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dingyi222666/aura-music/HEAD/images/screenshot3.png -------------------------------------------------------------------------------- /images/screenshot4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dingyi222666/aura-music/HEAD/images/screenshot4.png -------------------------------------------------------------------------------- /components/background/renderer/index.ts: -------------------------------------------------------------------------------- 1 | export { BaseBackgroundRender } from "./BaseBackgroundRender"; 2 | export { UIBackgroundRender } from "./UIBackgroundRender"; 3 | export { WebWorkerBackgroundRender } from "./WebWorkerBackgroundRender"; 4 | -------------------------------------------------------------------------------- /metadata.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "Aura Music", 3 | "description": "A high-fidelity, immersive music player inspired by Apple Music. Features fluid animations, multi-language synchronized lyrics (original + translation), word-level timing support, and Gemini-powered song analysis.", 4 | "requestFramePermissions": [] 5 | } -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | pnpm-debug.log* 8 | lerna-debug.log* 9 | 10 | node_modules 11 | dist 12 | dist-ssr 13 | *.local 14 | 15 | # Editor directories and files 16 | .vscode/* 17 | !.vscode/extensions.json 18 | .idea 19 | .DS_Store 20 | *.suo 21 | *.ntvs* 22 | *.njsproj 23 | *.sln 24 | *.sw? 25 | 26 | tests/* 27 | bun.lock 28 | -------------------------------------------------------------------------------- /components/lyrics/ILyricLine.ts: -------------------------------------------------------------------------------- 1 | export interface ILyricLine { 2 | draw(currentTime: number, isActive: boolean, isHovered: boolean): void; 3 | measure(containerWidth: number, suggestedTranslationWidth?: number): void; 4 | getHeight(): number; 5 | getCurrentHeight(): number; 6 | getCanvas(): OffscreenCanvas | HTMLCanvasElement; 7 | getLogicalWidth(): number; 8 | getLogicalHeight(): number; 9 | getTextWidth(): number; 10 | isInterlude(): boolean; 11 | } 12 | -------------------------------------------------------------------------------- /index.tsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import ReactDOM from 'react-dom/client'; 3 | import App from './App'; 4 | import { ToastProvider } from './components/Toast'; 5 | 6 | const rootElement = document.getElementById('root'); 7 | if (!rootElement) { 8 | throw new Error("Could not find root element to mount to"); 9 | } 10 | 11 | const root = ReactDOM.createRoot(rootElement); 12 | root.render( 13 | 14 | 15 | 16 | 17 | , 18 | ); 19 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "ES2022", 4 | "experimentalDecorators": true, 5 | "useDefineForClassFields": false, 6 | "module": "ESNext", 7 | "lib": [ 8 | "ES2022", 9 | "DOM", 10 | "DOM.Iterable" 11 | ], 12 | "skipLibCheck": true, 13 | "types": [ 14 | "node" 15 | ], 16 | "moduleResolution": "bundler", 17 | "isolatedModules": true, 18 | "moduleDetection": "force", 19 | "allowJs": true, 20 | "jsx": "react-jsx", 21 | "paths": { 22 | "@/*": [ 23 | "./*" 24 | ] 25 | }, 26 | "allowImportingTsExtensions": true, 27 | "noEmit": true 28 | } 29 | } -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "aura-music", 3 | "private": true, 4 | "version": "0.0.0", 5 | "type": "module", 6 | "scripts": { 7 | "dev": "vite", 8 | "build": "vite build", 9 | "preview": "vite preview", 10 | "test": "bun test tests" 11 | }, 12 | "dependencies": { 13 | "@google/genai": "^1.30.0", 14 | "@react-spring/web": "^10.0.3", 15 | "react": "^19.2.0", 16 | "react-dom": "^19.2.0" 17 | }, 18 | "devDependencies": { 19 | "@types/bun": "^1.3.3", 20 | "@types/node": "^22.14.0", 21 | "@types/react": "^19.2.6", 22 | "@vitejs/plugin-react": "^5.0.0", 23 | "typescript": "~5.8.2", 24 | "vite": "^6.2.0" 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /vite.config.ts: -------------------------------------------------------------------------------- 1 | import path from 'path'; 2 | import { defineConfig, loadEnv } from 'vite'; 3 | import react from '@vitejs/plugin-react'; 4 | 5 | export default defineConfig(({ mode }) => { 6 | const env = loadEnv(mode, '.', ''); 7 | const productionBase = env.VITE_BASE_PATH || '/aura-music/'; 8 | return { 9 | base: mode === 'production' ? productionBase : '/', 10 | server: { 11 | port: 3000, 12 | host: '0.0.0.0', 13 | }, 14 | plugins: [react()], 15 | define: { 16 | 'process.env.API_KEY': JSON.stringify(env.GEMINI_API_KEY), 17 | 'process.env.GEMINI_API_KEY': JSON.stringify(env.GEMINI_API_KEY), 18 | }, 19 | resolve: { 20 | alias: { 21 | '@': path.resolve(__dirname, '.'), 22 | }, 23 | }, 24 | }; 25 | }); 26 | -------------------------------------------------------------------------------- /hooks/useToast.ts: -------------------------------------------------------------------------------- 1 | import { createContext, useContext } from 'react'; 2 | 3 | export type ToastType = 'success' | 'error' | 'info'; 4 | 5 | export interface Toast { 6 | id: string; 7 | message: string; 8 | type: ToastType; 9 | duration?: number; 10 | } 11 | 12 | interface ToastContextType { 13 | toast: { 14 | success: (message: string, duration?: number) => void; 15 | error: (message: string, duration?: number) => void; 16 | info: (message: string, duration?: number) => void; 17 | }; 18 | } 19 | 20 | export const ToastContext = createContext(undefined); 21 | 22 | export const useToast = () => { 23 | const context = useContext(ToastContext); 24 | if (!context) { 25 | throw new Error('useToast must be used within a ToastProvider'); 26 | } 27 | return context; 28 | }; 29 | -------------------------------------------------------------------------------- /hooks/useKeyboardScope.ts: -------------------------------------------------------------------------------- 1 | import { useEffect, useId, useRef } from "react"; 2 | import { keyboardRegistry } from "../services/keyboardRegistry"; 3 | 4 | export const useKeyboardScope = ( 5 | handler: (e: KeyboardEvent) => boolean | void, 6 | priority: number, 7 | active: boolean = true, 8 | ) => { 9 | const id = useId(); 10 | // Keep handler ref to avoid re-registering on every render if handler identity changes 11 | const handlerRef = useRef(handler); 12 | 13 | useEffect(() => { 14 | handlerRef.current = handler; 15 | }, [handler]); 16 | 17 | useEffect(() => { 18 | if (!active) return; 19 | 20 | const wrappedHandler = (e: KeyboardEvent) => { 21 | return handlerRef.current(e); 22 | }; 23 | 24 | keyboardRegistry.register(id, wrappedHandler, priority); 25 | return () => keyboardRegistry.unregister(id); 26 | }, [priority, active, id]); 27 | }; 28 | -------------------------------------------------------------------------------- /hooks/useSearchProvider.ts: -------------------------------------------------------------------------------- 1 | import { Song } from "../types"; 2 | import { NeteaseTrackInfo } from "../services/lyricsService"; 3 | 4 | export type SearchResultItem = Song | NeteaseTrackInfo; 5 | 6 | export interface SearchProvider { 7 | // Unique identifier for this provider 8 | id: string; 9 | // Display name for the tab 10 | label: string; 11 | // Whether this provider requires explicit search action (e.g., pressing Enter) 12 | requiresExplicitSearch: boolean; 13 | // Search function - returns results based on query 14 | search: (query: string, options?: any) => Promise; 15 | // Load more results (for pagination) 16 | loadMore?: (query: string, offset: number, limit: number) => Promise; 17 | // Whether there are more results to load 18 | hasMore?: boolean; 19 | // Loading state 20 | isLoading?: boolean; 21 | } 22 | 23 | export interface UseSearchProviderResult { 24 | providers: SearchProvider[]; 25 | activeProvider: SearchProvider; 26 | setActiveProviderId: (id: string) => void; 27 | } 28 | -------------------------------------------------------------------------------- /hooks/useQueueSearchProvider.ts: -------------------------------------------------------------------------------- 1 | import { useMemo } from "react"; 2 | import { Song } from "../types"; 3 | import { SearchProvider } from "./useSearchProvider"; 4 | 5 | interface UseQueueSearchProviderParams { 6 | queue: Song[]; 7 | } 8 | 9 | export const useQueueSearchProvider = ({ 10 | queue, 11 | }: UseQueueSearchProviderParams): SearchProvider => { 12 | const provider: SearchProvider = useMemo( 13 | () => ({ 14 | id: "queue", 15 | label: "Current Queue", 16 | requiresExplicitSearch: false, 17 | isLoading: false, 18 | hasMore: false, 19 | 20 | search: async (query: string): Promise => { 21 | // Real-time filtering - no need for explicit search 22 | if (!query.trim()) { 23 | return queue; 24 | } 25 | 26 | const lower = query.toLowerCase(); 27 | return queue.filter( 28 | (s) => 29 | s.title.toLowerCase().includes(lower) || 30 | s.artist.toLowerCase().includes(lower) 31 | ); 32 | }, 33 | }), 34 | [queue] 35 | ); 36 | 37 | return provider; 38 | }; 39 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2025 dingyi222666@foxmail.com 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /components/background/renderer/BaseBackgroundRender.ts: -------------------------------------------------------------------------------- 1 | export abstract class BaseBackgroundRender { 2 | protected targetFps: number; 3 | private frameInterval: number; 4 | protected lastRenderTime = 0; 5 | protected isPaused = false; 6 | 7 | constructor(targetFps: number = 60) { 8 | this.targetFps = targetFps; 9 | this.frameInterval = 1000 / targetFps; 10 | } 11 | 12 | setTargetFps(fps: number) { 13 | this.targetFps = fps; 14 | this.frameInterval = 1000 / fps; 15 | } 16 | 17 | setPaused(paused: boolean) { 18 | this.isPaused = paused; 19 | } 20 | 21 | protected shouldRender(now: number) { 22 | if (this.lastRenderTime === 0) { 23 | this.lastRenderTime = now; 24 | return true; 25 | } 26 | 27 | const elapsed = now - this.lastRenderTime; 28 | if (elapsed < this.frameInterval) { 29 | return false; 30 | } 31 | 32 | this.lastRenderTime = now - (elapsed % this.frameInterval); 33 | return true; 34 | } 35 | 36 | protected resetClock(startTime: number) { 37 | this.lastRenderTime = startTime; 38 | } 39 | 40 | abstract start(colors?: string[]): void; 41 | abstract stop(): void; 42 | } 43 | -------------------------------------------------------------------------------- /.github/workflows/deploy.yml: -------------------------------------------------------------------------------- 1 | name: deploy 2 | 3 | permissions: 4 | contents: write 5 | pages: write 6 | id-token: write 7 | 8 | concurrency: 9 | group: pages 10 | cancel-in-progress: false 11 | 12 | on: 13 | push: 14 | branches: 15 | - main 16 | workflow_dispatch: 17 | 18 | jobs: 19 | deploy: 20 | environment: 21 | name: github-pages 22 | url: ${{ steps.deployment.outputs.page_url }} 23 | runs-on: ubuntu-latest 24 | steps: 25 | - uses: actions/checkout@v4 26 | with: 27 | fetch-depth: 0 28 | - uses: actions/setup-node@v4 29 | with: 30 | node-version: 20 31 | 32 | - name: Enable corepack 33 | run: corepack enable 34 | 35 | - name: Install dependencies 36 | run: npm install 37 | 38 | - name: Build 39 | run: npm run build 40 | 41 | - name: Setup Pages 42 | uses: actions/configure-pages@v5 43 | - name: Upload artifact 44 | uses: actions/upload-pages-artifact@v3 45 | with: 46 | path: dist 47 | - name: Deploy to GitHub Pages 48 | id: deployment 49 | uses: actions/deploy-pages@v4 50 | -------------------------------------------------------------------------------- /services/geminiService.ts: -------------------------------------------------------------------------------- 1 | import { GoogleGenAI } from "@google/genai"; 2 | 3 | const getClient = () => { 4 | const apiKey = process.env.API_KEY; 5 | if (!apiKey) throw new Error("API Key missing"); 6 | return new GoogleGenAI({ apiKey }); 7 | }; 8 | 9 | export const analyzeLyrics = async (title: string, artist: string, lyrics: string) => { 10 | try { 11 | const ai = getClient(); 12 | 13 | const prompt = ` 14 | Analyze the song "${title}" by "${artist}". 15 | Based on the following lyrics, provide a short, poetic "Vibe Check" (max 50 words) describing the emotional atmosphere, 16 | and then 3 bullet points explaining the deeper meaning. 17 | Return the response as JSON with keys: "vibe", "meanings" (array of strings). 18 | 19 | Lyrics snippet: 20 | ${lyrics.slice(0, 1000)}... 21 | `; 22 | 23 | const response = await ai.models.generateContent({ 24 | model: 'gemini-2.5-flash', 25 | contents: prompt, 26 | config: { 27 | responseMimeType: 'application/json' 28 | } 29 | }); 30 | 31 | return JSON.parse(response.text || '{}'); 32 | } catch (error) { 33 | console.error("Gemini Analysis Error:", error); 34 | return null; 35 | } 36 | }; -------------------------------------------------------------------------------- /types.ts: -------------------------------------------------------------------------------- 1 | export interface LyricWord { 2 | text: string; 3 | startTime: number; 4 | endTime: number; 5 | } 6 | 7 | export interface LyricLine { 8 | time: number; // Start time in seconds 9 | text: string; // Main text (e.g. Original Language) 10 | translation?: string; // Secondary text (e.g. Translation) 11 | words?: LyricWord[]; // For enhanced LRC animation of the main text 12 | isPreciseTiming?: boolean; // If true, end times are exact (from YRC) and shouldn't be auto-extended 13 | isInterlude?: boolean; // If true, this is an instrumental interlude line ("...") 14 | isMetadata?: boolean; // If true, line represents metadata and shouldn't drive playback 15 | } 16 | 17 | export interface Song { 18 | id: string; 19 | title: string; 20 | artist: string; 21 | fileUrl: string; 22 | coverUrl?: string; 23 | lyrics?: LyricLine[]; 24 | colors?: string[]; // Array of dominant colors 25 | needsLyricsMatch?: boolean; // Flag indicating song needs cloud lyrics matching 26 | // Netease specific fields 27 | isNetease?: boolean; 28 | neteaseId?: string; 29 | album?: string; 30 | } 31 | 32 | export enum PlayState { 33 | PAUSED, 34 | PLAYING, 35 | } 36 | 37 | export enum PlayMode { 38 | LOOP_ALL, 39 | LOOP_ONE, 40 | SHUFFLE 41 | } 42 | -------------------------------------------------------------------------------- /services/keyboardRegistry.ts: -------------------------------------------------------------------------------- 1 | type Handler = (e: KeyboardEvent) => boolean | void; // return true to stop propagation 2 | 3 | interface Listener { 4 | id: string; 5 | priority: number; 6 | handler: Handler; 7 | } 8 | 9 | class KeyboardRegistry { 10 | private listeners: Listener[] = []; 11 | 12 | register(id: string, handler: Handler, priority: number) { 13 | // Remove existing if re-registering with same ID 14 | this.unregister(id); 15 | this.listeners.push({ id, handler, priority }); 16 | // Sort descending by priority 17 | this.listeners.sort((a, b) => b.priority - a.priority); 18 | } 19 | 20 | unregister(id: string) { 21 | this.listeners = this.listeners.filter((l) => l.id !== id); 22 | } 23 | 24 | handle(e: KeyboardEvent) { 25 | // Iterate through listeners by priority 26 | for (const listener of this.listeners) { 27 | // If a handler returns true, it claims the event 28 | if (listener.handler(e) === true) { 29 | e.stopPropagation(); 30 | // We don't prevent default globally here to allow browser defaults like F5, 31 | // unless the specific handler calls e.preventDefault() 32 | return; 33 | } 34 | } 35 | } 36 | } 37 | 38 | export const keyboardRegistry = new KeyboardRegistry(); 39 | -------------------------------------------------------------------------------- /services/spring.ts: -------------------------------------------------------------------------------- 1 | 2 | /** MIT License github.com/pushkine/ */ 3 | export interface SpringParams { 4 | mass?: number; // = 1.0 5 | damping?: number; // = 10.0 6 | stiffness?: number; // = 100.0 7 | soft?: boolean; // = false 8 | } 9 | 10 | type seconds = number; 11 | 12 | export function solve_spring(from: number, velocity: number, to: number, params: SpringParams) { 13 | // Defaults 14 | const p = { 15 | mass: params.mass ?? 1.0, 16 | damping: params.damping ?? 10.0, 17 | stiffness: params.stiffness ?? 100.0, 18 | soft: params.soft ?? false 19 | }; 20 | 21 | const delta = to - from; 22 | if (true === p.soft || 1.0 <= p.damping / (2.0 * Math.sqrt(p.stiffness * p.mass))) { 23 | const angular_frequency = -Math.sqrt(p.stiffness / p.mass); 24 | const leftover = -angular_frequency * delta - velocity; 25 | return (t: seconds) => to - (delta + t * leftover) * Math.E ** (t * angular_frequency); 26 | } else { 27 | const damping_frequency = Math.sqrt(4.0 * p.mass * p.stiffness - p.damping ** 2.0); 28 | const leftover = (p.damping * delta - 2.0 * p.mass * velocity) / damping_frequency; 29 | const dfm = (0.5 * damping_frequency) / p.mass; 30 | const dm = -(0.5 * p.damping) / p.mass; 31 | return (t: seconds) => to - (Math.cos(t * dfm) * delta + Math.sin(t * dfm) * leftover) * Math.E ** (t * dm); 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 |
2 | GHBanner 3 |
4 | 5 | # Run and deploy your AI Studio app 6 | 7 | This contains everything you need to run your app locally. 8 | 9 | View your app in AI Studio: https://ai.studio/apps/drive/1ggcfQNwQs0cGrbzb1oapySzBvuP5I1ha 10 | 11 | ## Feature (Github Version) 12 | 13 | - [x] **WebGL Fluid Background**: Implements a dynamic fluid background effect using WebGL shaders. [Reference](https://www.shadertoy.com/view/wdyczG) 14 | - [x] **Canvas Lyric Rendering**: High-performance, custom-drawn lyric visualization on HTML5 Canvas. 15 | - [x] **Music Import & Search**: Seamlessly search and import music from external providers or local files. 16 | - [x] **Audio Manipulation**: Real-time control over playback speed and pitch shifting. 17 | 18 | ## Run Locally 19 | 20 | **Prerequisites:** Node.js 21 | 22 | 1. Install dependencies: 23 | `npm install` 24 | 2. Set the `GEMINI_API_KEY` in [.env.local](.env.local) to your Gemini API key 25 | 3. Run the app: 26 | `npm run dev` 27 | 28 | ## Screenshot 29 | 30 | ![Screenshot1](./images/screenshot1.png) 31 | ![Screenshot2](./images/screenshot2.png) 32 | ![Screenshot3](./images/screenshot3.png) 33 | ![Screenshot4](./images/screenshot4.png) 34 | 35 | > Shader source: https://www.shadertoy.com/view/wdyczG 36 | 37 | > Vibe coding with gemini3-pro, gpt-5.1-codex-mini, and claude-sonnet-4.5. The first version only took 10 mins. 38 | -------------------------------------------------------------------------------- /services/lyrics/utils.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Shared utilities (minimal - most moved to parser.ts). 3 | */ 4 | 5 | export { INTERLUDE_TEXT } from "./parser"; 6 | export { parseTime as parseTimeTag } from "./parser"; 7 | export { createWord, createLine } from "./parser"; 8 | export { isPunctuation as isPunctuationOnly } from "./parser"; 9 | export { normalizeText } from "./parser"; 10 | 11 | // Legacy regex for backward compatibility 12 | export const LRC_LINE_REGEX = /\[(\d{2}):(\d{2})\.(\d{2,3})\](.*)/; 13 | 14 | // Re-export for backward compatibility 15 | export { addDurations as processLyricsDurations } from "./parser"; 16 | export { mergePunctuation as mergePunctuationWords } from "./parser"; 17 | export { hasContent as hasMeaningfulContent } from "./parser"; 18 | 19 | /** 20 | * Normalize time key for map lookups. 21 | */ 22 | export const normalizeTimeKey = (time: number): number => { 23 | return Math.round(time * 100) / 100; 24 | }; 25 | 26 | /** 27 | * Get display text from parsed line data. 28 | * @deprecated - use line.text directly 29 | */ 30 | export const getEntryDisplayText = (entry: { text: string; words?: { text: string }[] }): string => { 31 | if (entry.text?.trim()) return entry.text.trim(); 32 | if (entry.words?.length) { 33 | return entry.words.map(w => w.text).join("").trim(); 34 | } 35 | return ""; 36 | }; 37 | 38 | /** 39 | * Fix word end times (now handled during parsing). 40 | * @deprecated - handled inline during parse 41 | */ 42 | export const fixWordEndTimes = (): void => { 43 | // No-op: handled during parsing 44 | }; 45 | -------------------------------------------------------------------------------- /components/visualizer/AudioProcessor.ts: -------------------------------------------------------------------------------- 1 | // AudioProcessor.ts (AudioWorklet) 2 | 3 | // Declare AudioWorkletGlobalScope types since they are not in the default TS lib 4 | interface AudioWorkletProcessor { 5 | readonly port: MessagePort; 6 | process(inputs: Float32Array[][], outputs: Float32Array[][], parameters: Record): boolean; 7 | } 8 | 9 | declare var AudioWorkletProcessor: { 10 | prototype: AudioWorkletProcessor; 11 | new(options?: any): AudioWorkletProcessor; 12 | }; 13 | 14 | declare function registerProcessor(name: string, processorCtor: (new (options?: any) => AudioWorkletProcessor)): void; 15 | 16 | 17 | class AudioProcessor extends AudioWorkletProcessor { 18 | port2: MessagePort | null = null; 19 | constructor() { 20 | super(); 21 | this.port2 = null; 22 | this.port.onmessage = (e) => { 23 | console.log("AudioProcessor: Received message", e.data); 24 | if (e.data.type === 'PORT') { 25 | this.port2 = e.data.port; 26 | console.log("AudioProcessor: Port received and set"); 27 | this.port.postMessage({ type: 'PORT_RECEIVED' }); 28 | } 29 | }; 30 | console.log("AudioProcessor: Initialized"); 31 | } 32 | 33 | process(inputs: Float32Array[][], outputs: Float32Array[][], parameters: Record): boolean { 34 | // We only care about the first input 35 | const input = inputs[0]; 36 | if (!input || input.length === 0) return true; 37 | 38 | const channelData = input[0]; // Mono or Left channel 39 | 40 | // If we have a port to the worker, send data 41 | if (this.port2) { 42 | 43 | // We send a copy. 128 samples is small. 44 | const data = new Float32Array(channelData); 45 | this.port2.postMessage({ type: 'AUDIO_DATA', data }, [data.buffer]); 46 | } 47 | 48 | return true; 49 | } 50 | } 51 | 52 | registerProcessor('audio-processor', AudioProcessor); 53 | -------------------------------------------------------------------------------- /services/lyrics/types.ts: -------------------------------------------------------------------------------- 1 | import { LyricLine, LyricWord } from "../../types"; 2 | 3 | // Re-export types for convenience 4 | export type { LyricLine, LyricWord }; 5 | 6 | /** 7 | * Internal representation of a parsed lyric line during processing. 8 | * Contains additional metadata used for sorting and merging. 9 | */ 10 | export interface ParsedLineData { 11 | time: number; 12 | text: string; 13 | words: LyricWord[]; 14 | tagCount: number; // Priority indicator: higher = more precise timing data 15 | originalIndex: number; // For stable sorting 16 | isMetadata?: boolean; // Whether this line is metadata (artist info, etc.) 17 | } 18 | 19 | /** 20 | * Result from parsing a single lyrics format (before translation merge). 21 | */ 22 | export interface ParsedLyricsResult { 23 | lines: LyricLine[]; 24 | hasWordTiming: boolean; // Whether the lyrics contain word-level timing 25 | } 26 | 27 | /** 28 | * Metadata indicators for filtering out non-lyric content. 29 | */ 30 | export const METADATA_INDICATORS = [ 31 | "by:", // Common LRC metadata 32 | "offset:", 33 | ]; 34 | 35 | /** 36 | * Chinese metadata indicators (NetEase style). 37 | */ 38 | export const CHINESE_METADATA_INDICATORS = [ 39 | "歌词贡献者", 40 | "翻译贡献者", 41 | "作词", 42 | "作曲", 43 | "编曲", 44 | "制作", 45 | "词曲", 46 | ]; 47 | 48 | /** 49 | * Check if the given text is a metadata line. 50 | */ 51 | export const isMetadataLine = (text: string): boolean => { 52 | if (!text) return false; 53 | 54 | // Check for NetEase JSON metadata lines 55 | if (text.trim().startsWith("{") && text.trim().endsWith("}")) return true; 56 | 57 | const normalized = text.replace(/\s+/g, "").toLowerCase(); 58 | 59 | // Check English metadata 60 | if ( 61 | METADATA_INDICATORS.some((indicator) => 62 | normalized.includes(indicator.toLowerCase()), 63 | ) 64 | ) { 65 | return true; 66 | } 67 | 68 | // Check Chinese metadata 69 | return CHINESE_METADATA_INDICATORS.some((indicator) => 70 | normalized.includes(indicator), 71 | ); 72 | }; 73 | -------------------------------------------------------------------------------- /components/background/renderer/UIBackgroundRender.ts: -------------------------------------------------------------------------------- 1 | import { BaseBackgroundRender } from "./BaseBackgroundRender"; 2 | 3 | export type UIRenderCallback = ( 4 | ctx: CanvasRenderingContext2D, 5 | now: number, 6 | ) => void; 7 | 8 | export class UIBackgroundRender extends BaseBackgroundRender { 9 | private canvas: HTMLCanvasElement; 10 | private ctx: CanvasRenderingContext2D | null = null; 11 | private rafId: number | null = null; 12 | private running = false; 13 | private readonly renderCallback: UIRenderCallback; 14 | 15 | constructor( 16 | canvas: HTMLCanvasElement, 17 | renderCallback: UIRenderCallback, 18 | targetFps: number = 60, 19 | ) { 20 | super(targetFps); 21 | this.canvas = canvas; 22 | this.renderCallback = renderCallback; 23 | } 24 | 25 | private tick = (now: number) => { 26 | if (!this.running) return; 27 | if (!this.ctx) { 28 | this.ctx = this.canvas.getContext("2d"); 29 | } 30 | 31 | if (!this.ctx) return; 32 | 33 | if (!this.isPaused && this.shouldRender(now)) { 34 | this.renderCallback(this.ctx, now); 35 | } 36 | 37 | this.rafId = window.requestAnimationFrame(this.tick); 38 | }; 39 | 40 | start() { 41 | if (this.running) { 42 | this.stop(); 43 | } 44 | 45 | this.ctx = this.canvas.getContext("2d"); 46 | if (!this.ctx) { 47 | console.error("Failed to get 2D context for UI background renderer"); 48 | return; 49 | } 50 | 51 | this.running = true; 52 | this.resetClock(performance.now()); 53 | this.rafId = window.requestAnimationFrame(this.tick); 54 | } 55 | 56 | stop() { 57 | this.running = false; 58 | if (this.rafId) { 59 | window.cancelAnimationFrame(this.rafId); 60 | this.rafId = null; 61 | } 62 | } 63 | 64 | resize(width?: number, height?: number) { 65 | const w = width ?? this.canvas.clientWidth; 66 | const h = height ?? this.canvas.clientHeight; 67 | if (this.canvas.width !== w || this.canvas.height !== h) { 68 | this.canvas.width = w; 69 | this.canvas.height = h; 70 | } 71 | } 72 | } 73 | -------------------------------------------------------------------------------- /components/background/renderer/WebWorkerBackgroundRender.ts: -------------------------------------------------------------------------------- 1 | import { BaseBackgroundRender } from "./BaseBackgroundRender"; 2 | import backgroundWorkerUrl from "./webWorkerBackground.worker.ts?worker&url"; 3 | 4 | type WorkerCommand = 5 | | { type: "init"; canvas: OffscreenCanvas; width: number; height: number; colors: string[] } 6 | | { type: "resize"; width: number; height: number } 7 | | { type: "colors"; colors: string[] } 8 | | { type: "play"; isPlaying: boolean } 9 | | { type: "pause"; paused: boolean }; 10 | 11 | export class WebWorkerBackgroundRender extends BaseBackgroundRender { 12 | private canvas: HTMLCanvasElement; 13 | private worker: Worker | null = null; 14 | 15 | constructor(canvas: HTMLCanvasElement, targetFps: number = 60) { 16 | super(targetFps); 17 | this.canvas = canvas; 18 | } 19 | 20 | start(colors: string[]) { 21 | if (!WebWorkerBackgroundRender.isSupported(this.canvas)) { 22 | console.warn("WebWorker background renderer requires OffscreenCanvas support"); 23 | return; 24 | } 25 | 26 | this.stop(); 27 | 28 | try { 29 | const offscreen = this.canvas.transferControlToOffscreen(); 30 | this.canvas.dataset.offscreenTransferred = "true"; 31 | this.worker = new Worker(backgroundWorkerUrl, { type: "module" }); 32 | const command: WorkerCommand = { 33 | type: "init", 34 | canvas: offscreen, 35 | width: this.canvas.clientWidth, 36 | height: this.canvas.clientHeight, 37 | colors, 38 | }; 39 | this.worker.postMessage(command, [offscreen]); 40 | } catch (error) { 41 | console.error("Failed to initialize web worker renderer", error); 42 | this.worker = null; 43 | } 44 | } 45 | 46 | stop() { 47 | if (this.worker) { 48 | this.worker.terminate(); 49 | this.worker = null; 50 | } 51 | } 52 | 53 | resize(width: number, height: number) { 54 | if (this.worker) { 55 | const command: WorkerCommand = { type: "resize", width, height }; 56 | this.worker.postMessage(command); 57 | } 58 | } 59 | 60 | override setPaused(paused: boolean) { 61 | super.setPaused(paused); 62 | if (this.worker) { 63 | this.worker.postMessage({ type: "pause", paused }); 64 | } 65 | } 66 | 67 | setPlaying(isPlaying: boolean) { 68 | if (this.worker) { 69 | this.worker.postMessage({ type: "play", isPlaying }); 70 | } 71 | } 72 | 73 | setColors(colors: string[]) { 74 | if (this.worker) { 75 | this.worker.postMessage({ type: "colors", colors }); 76 | } 77 | } 78 | 79 | static isSupported(canvas: HTMLCanvasElement) { 80 | return ( 81 | typeof window !== "undefined" && 82 | typeof OffscreenCanvas !== "undefined" && 83 | typeof canvas.transferControlToOffscreen === "function" 84 | ); 85 | } 86 | } 87 | -------------------------------------------------------------------------------- /services/lyrics/index.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Lyrics Parsing Module 3 | * 4 | * Unified lyrics parsing for various formats: 5 | * - Standard LRC with optional word-by-word timing 6 | * - Netease YRC format with word timing 7 | * - Translation merging 8 | * 9 | * Architecture: 10 | * - Tokenizer-based parsing (not regex) 11 | * - Single-pass processing 12 | * - Inline duplicate handling 13 | * - Automatic interlude insertion 14 | */ 15 | 16 | import { LyricLine } from "./types"; 17 | import { parseLrc } from "./lrc"; 18 | import { parseNeteaseLyrics, isNeteaseFormat } from "./netease"; 19 | import { mergeTranslations } from "./translation"; 20 | 21 | // Re-export types 22 | export type { LyricLine, LyricWord } from "./types"; 23 | 24 | // Re-export parsers 25 | export { parseLrc } from "./lrc"; 26 | export { parseNeteaseLyrics, isNeteaseFormat } from "./netease"; 27 | export { mergeTranslations, buildTranslationMap } from "./translation"; 28 | 29 | // Re-export utilities for backward compatibility 30 | export { INTERLUDE_TEXT } from "./parser"; 31 | export { parseTime as parseTimeTag } from "./parser"; 32 | 33 | /** 34 | * Parse lyrics with automatic format detection. 35 | * 36 | * @param content - Main lyrics content (LRC or YRC) 37 | * @param translationContent - Optional translation content (LRC format) 38 | * @param options - Optional YRC content for dual-format parsing 39 | * @returns Parsed lyrics with translations and interludes 40 | * 41 | * @example 42 | * // Standard LRC 43 | * const lyrics = parseLyrics("[00:12.34]Hello world"); 44 | * 45 | * @example 46 | * // With translation 47 | * const lyrics = parseLyrics(lrcContent, translationContent); 48 | * 49 | * @example 50 | * // Netease YRC with LRC base 51 | * const lyrics = parseLyrics(lrcContent, translation, { yrcContent }); 52 | */ 53 | export const parseLyrics = ( 54 | content: string, 55 | translationContent?: string, 56 | options?: { yrcContent?: string } 57 | ): LyricLine[] => { 58 | if (!content?.trim()) return []; 59 | 60 | // Detect format and parse 61 | let lines: LyricLine[]; 62 | 63 | if (options?.yrcContent) { 64 | // Use LRC as base, enrich with YRC word timing 65 | lines = parseNeteaseLyrics(options.yrcContent, content); 66 | } else if (isNeteaseFormat(content)) { 67 | // Pure YRC format 68 | lines = parseNeteaseLyrics(content); 69 | } else { 70 | // Standard LRC format 71 | lines = parseLrc(content); 72 | } 73 | 74 | // Merge translations if provided 75 | if (translationContent?.trim()) { 76 | lines = mergeTranslations(lines, translationContent); 77 | } 78 | 79 | return lines; 80 | }; 81 | 82 | /** 83 | * Merge raw lyrics strings. 84 | * @deprecated Use parseLyrics with translationContent parameter 85 | */ 86 | export const mergeLyrics = (original: string, translation: string): string => { 87 | return `${original}\n${translation}`; 88 | }; 89 | -------------------------------------------------------------------------------- /hooks/useNeteaseSearchProvider.ts: -------------------------------------------------------------------------------- 1 | import { useState, useCallback } from "react"; 2 | import { SearchProvider, SearchResultItem } from "./useSearchProvider"; 3 | import { 4 | searchNetEase, 5 | NeteaseTrackInfo, 6 | } from "../services/lyricsService"; 7 | 8 | const LIMIT = 30; 9 | 10 | export interface NeteaseSearchProviderExtended extends SearchProvider { 11 | performSearch: (query: string) => Promise; 12 | hasSearched: boolean; 13 | results: NeteaseTrackInfo[]; 14 | } 15 | 16 | export const useNeteaseSearchProvider = (): NeteaseSearchProviderExtended => { 17 | const [results, setResults] = useState([]); 18 | const [isLoading, setIsLoading] = useState(false); 19 | const [hasMore, setHasMore] = useState(true); 20 | const [hasSearched, setHasSearched] = useState(false); 21 | 22 | const performSearch = useCallback(async (query: string) => { 23 | if (!query.trim()) { 24 | setResults([]); 25 | setHasSearched(false); 26 | return; 27 | } 28 | 29 | setIsLoading(true); 30 | setHasSearched(true); 31 | setResults([]); 32 | setHasMore(true); 33 | 34 | try { 35 | const searchResults = await searchNetEase(query, { 36 | limit: LIMIT, 37 | offset: 0, 38 | }) 39 | setResults(searchResults); 40 | setHasMore(searchResults.length >= LIMIT); 41 | } catch (e) { 42 | console.error("Netease search failed:", e); 43 | setHasMore(false); 44 | } finally { 45 | setIsLoading(false); 46 | } 47 | }, []); 48 | 49 | const loadMore = useCallback( 50 | async (query: string, offset: number, limit: number): Promise => { 51 | if (isLoading || !hasMore) return []; 52 | 53 | setIsLoading(true); 54 | try { 55 | const searchResults = await searchNetEase(query, { 56 | limit, 57 | offset, 58 | }); 59 | 60 | if (searchResults.length === 0) { 61 | setHasMore(false); 62 | } else { 63 | setResults((prev) => [...prev, ...searchResults]); 64 | } 65 | return searchResults; 66 | } catch (e) { 67 | console.error("Load more failed:", e); 68 | setHasMore(false); 69 | return []; 70 | } finally { 71 | setIsLoading(false); 72 | } 73 | }, 74 | [isLoading, hasMore] 75 | ); 76 | 77 | const provider: NeteaseSearchProviderExtended = { 78 | id: "netease", 79 | label: "Cloud Music", 80 | requiresExplicitSearch: true, 81 | isLoading, 82 | hasMore, 83 | hasSearched, 84 | results, 85 | 86 | search: async (query: string): Promise => { 87 | // For explicit search providers, this returns current results 88 | // Actual search is triggered by performSearch 89 | return results; 90 | }, 91 | 92 | loadMore, 93 | performSearch, 94 | }; 95 | 96 | return provider; 97 | }; 98 | -------------------------------------------------------------------------------- /hooks/useCanvasRenderer.ts: -------------------------------------------------------------------------------- 1 | import { useEffect, useRef, useLayoutEffect } from "react"; 2 | 3 | interface UseCanvasRendererProps { 4 | onRender: ( 5 | ctx: CanvasRenderingContext2D, 6 | width: number, 7 | height: number, 8 | deltaTime: number, 9 | ) => void; 10 | } 11 | 12 | export const useCanvasRenderer = ({ onRender }: UseCanvasRendererProps) => { 13 | const canvasRef = useRef(null); 14 | const requestRef = useRef(0); 15 | const previousTimeRef = useRef(0); 16 | 17 | // Use a ref to store the latest callback to avoid restarting the animation loop 18 | const onRenderRef = useRef(onRender); 19 | 20 | useLayoutEffect(() => { 21 | onRenderRef.current = onRender; 22 | }); 23 | 24 | useEffect(() => { 25 | const canvas = canvasRef.current; 26 | if (!canvas) return; 27 | 28 | const ctx = canvas.getContext("2d", { alpha: true }); 29 | if (!ctx) return; 30 | 31 | const handleResize = () => { 32 | if (!canvas) return; 33 | const parent = canvas.parentElement; 34 | if (parent) { 35 | const dpr = window.devicePixelRatio || 1; 36 | const rect = parent.getBoundingClientRect(); 37 | 38 | // Only resize if dimensions actually changed to avoid flicker 39 | if ( 40 | canvas.width !== rect.width * dpr || 41 | canvas.height !== rect.height * dpr 42 | ) { 43 | canvas.width = rect.width * dpr; 44 | canvas.height = rect.height * dpr; 45 | canvas.style.width = `${rect.width}px`; 46 | canvas.style.height = `${rect.height}px`; 47 | // Reset transform before applying DPR scaling to avoid cumulative scaling 48 | ctx.resetTransform(); 49 | ctx.scale(dpr, dpr); 50 | } 51 | } 52 | }; 53 | 54 | // Initial resize 55 | handleResize(); 56 | window.addEventListener("resize", handleResize); 57 | 58 | const animate = (time: number) => { 59 | if (previousTimeRef.current !== undefined) { 60 | const deltaTime = time - previousTimeRef.current; 61 | 62 | // Logical dimensions 63 | const width = canvas.width / (window.devicePixelRatio || 1); 64 | const height = canvas.height / (window.devicePixelRatio || 1); 65 | 66 | ctx.clearRect(0, 0, width, height); 67 | 68 | // Call latest render function 69 | onRenderRef.current(ctx, width, height, deltaTime); 70 | } 71 | previousTimeRef.current = time; 72 | requestRef.current = requestAnimationFrame(animate); 73 | }; 74 | 75 | requestRef.current = requestAnimationFrame(animate); 76 | 77 | return () => { 78 | window.removeEventListener("resize", handleResize); 79 | cancelAnimationFrame(requestRef.current); 80 | }; 81 | }, []); 82 | 83 | return canvasRef; 84 | }; 85 | -------------------------------------------------------------------------------- /services/cache.ts: -------------------------------------------------------------------------------- 1 | const MOBILE_BREAKPOINT = 1024; 2 | 3 | const isMobileViewport = () => { 4 | if (typeof window === "undefined" || typeof window.matchMedia !== "function") { 5 | return false; 6 | } 7 | return window.matchMedia(`(max-width: ${MOBILE_BREAKPOINT}px)`).matches; 8 | }; 9 | 10 | const createSizeLimitedLRU = (limitBytes: number) => { 11 | const map = new Map(); 12 | let totalSize = 0; 13 | 14 | const evictIfNeeded = () => { 15 | while (totalSize > limitBytes && map.size > 0) { 16 | const oldestKey = map.keys().next().value; 17 | if (!oldestKey) break; 18 | const entry = map.get(oldestKey); 19 | map.delete(oldestKey); 20 | if (entry) { 21 | totalSize -= entry.size; 22 | } 23 | } 24 | }; 25 | 26 | return { 27 | get(key: string): Blob | null { 28 | const entry = map.get(key); 29 | if (!entry) return null; 30 | map.delete(key); 31 | map.set(key, entry); 32 | return entry.blob; 33 | }, 34 | set(key: string, blob: Blob) { 35 | const size = blob.size || 0; 36 | if (size <= 0 || size > limitBytes) { 37 | return; 38 | } 39 | if (map.has(key)) { 40 | const existing = map.get(key); 41 | if (existing) { 42 | totalSize -= existing.size; 43 | } 44 | map.delete(key); 45 | } 46 | map.set(key, { blob, size }); 47 | totalSize += size; 48 | evictIfNeeded(); 49 | }, 50 | delete(key: string) { 51 | const entry = map.get(key); 52 | if (!entry) return; 53 | totalSize -= entry.size; 54 | map.delete(key); 55 | }, 56 | clear() { 57 | map.clear(); 58 | totalSize = 0; 59 | }, 60 | getLimit() { 61 | return limitBytes; 62 | }, 63 | }; 64 | }; 65 | 66 | const IMAGE_CACHE_LIMIT = isMobileViewport() ? 50 * 1024 * 1024 : 100 * 1024 * 1024; 67 | const AUDIO_CACHE_LIMIT = isMobileViewport() ? 100 * 1024 * 1024 : 200 * 1024 * 1024; 68 | const RAW_IMAGE_CACHE_LIMIT = 50 * 1024 * 1024; 69 | 70 | const rawImageCache = createSizeLimitedLRU(RAW_IMAGE_CACHE_LIMIT); 71 | 72 | export const imageResourceCache = createSizeLimitedLRU(IMAGE_CACHE_LIMIT); 73 | export const audioResourceCache = createSizeLimitedLRU(AUDIO_CACHE_LIMIT); 74 | 75 | export const fetchImageBlobWithCache = async (url: string): Promise => { 76 | const cached = rawImageCache.get(url); 77 | if (cached) { 78 | return cached; 79 | } 80 | const response = await fetch(url); 81 | if (!response.ok) { 82 | throw new Error(`Failed to fetch image: ${response.status}`); 83 | } 84 | const blob = await response.blob(); 85 | rawImageCache.set(url, blob); 86 | return blob; 87 | }; 88 | 89 | export const loadImageElementWithCache = async ( 90 | url: string, 91 | ): Promise => { 92 | const blob = await fetchImageBlobWithCache(url); 93 | return new Promise((resolve, reject) => { 94 | const img = new Image(); 95 | img.crossOrigin = "anonymous"; 96 | const objectUrl = URL.createObjectURL(blob); 97 | img.onload = () => { 98 | URL.revokeObjectURL(objectUrl); 99 | resolve(img); 100 | }; 101 | img.onerror = (error) => { 102 | URL.revokeObjectURL(objectUrl); 103 | reject(error); 104 | }; 105 | img.src = objectUrl; 106 | }); 107 | }; 108 | -------------------------------------------------------------------------------- /services/lyrics/translation.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Translation merging for lyrics. 3 | * 4 | * Simplified approach: translations are just standard LRC format. 5 | * Parse them with the LRC parser and map by timestamp. 6 | */ 7 | 8 | import { LyricLine } from "./types"; 9 | import { parseLrc } from "./lrc"; 10 | 11 | /** 12 | * Normalize time to consistent precision for lookups. 13 | */ 14 | const normalizeTime = (time: number): number => { 15 | return Math.round(time * 100) / 100; 16 | }; 17 | 18 | /** 19 | * Build translation map from LRC content. 20 | * Translations follow standard LRC format, so we can reuse the parser. 21 | */ 22 | export const buildTranslationMap = (content?: string): Map => { 23 | if (!content?.trim()) return new Map(); 24 | 25 | // Parse as standard LRC 26 | const lines = parseLrc(content); 27 | const map = new Map(); 28 | 29 | for (const line of lines) { 30 | if (line.isInterlude || !line.text?.trim()) continue; 31 | 32 | const key = normalizeTime(line.time); 33 | const existing = map.get(key); 34 | 35 | if (existing) { 36 | map.set(key, `${existing}\n${line.text.trim()}`); 37 | } else { 38 | map.set(key, line.text.trim()); 39 | } 40 | } 41 | 42 | return map; 43 | }; 44 | 45 | /** 46 | * Find translation for a line with tolerance. 47 | */ 48 | const findTranslation = ( 49 | map: Map, 50 | line: LyricLine 51 | ): string | undefined => { 52 | const key = normalizeTime(line.time); 53 | 54 | // Try exact match first 55 | const exact = map.get(key); 56 | if (exact) { 57 | map.delete(key); 58 | return exact; 59 | } 60 | 61 | // Try nearby matches with directional tolerance 62 | const isPrecise = Boolean(line.isPreciseTiming); 63 | const forwardTolerance = isPrecise ? 1.0 : 0.35; 64 | const backwardTolerance = isPrecise ? 0.35 : 0.2; 65 | 66 | let bestKey: number | null = null; 67 | let bestDiff = Infinity; 68 | 69 | for (const [mapKey, value] of map.entries()) { 70 | const delta = mapKey - key; 71 | 72 | // Check tolerance direction 73 | if (delta >= 0 && delta <= forwardTolerance) { 74 | // Forward match 75 | if (delta < bestDiff) { 76 | bestDiff = delta; 77 | bestKey = mapKey; 78 | } 79 | } else if (delta < 0 && -delta <= backwardTolerance) { 80 | // Backward match 81 | if (-delta < bestDiff) { 82 | bestDiff = -delta; 83 | bestKey = mapKey; 84 | } 85 | } 86 | } 87 | 88 | if (bestKey !== null) { 89 | const value = map.get(bestKey)!; 90 | map.delete(bestKey); 91 | return value; 92 | } 93 | 94 | return undefined; 95 | }; 96 | 97 | /** 98 | * Merge translations into lyrics. 99 | * 100 | * Translations are parsed as standard LRC and matched by timestamp. 101 | * Matching uses tolerance to handle timing drift. 102 | */ 103 | export const mergeTranslations = ( 104 | lines: LyricLine[], 105 | translationContent?: string 106 | ): LyricLine[] => { 107 | if (!translationContent?.trim()) return lines; 108 | 109 | const map = buildTranslationMap(translationContent); 110 | if (map.size === 0) return lines; 111 | 112 | return lines.map(line => { 113 | if (line.isInterlude) return line; 114 | 115 | const translation = findTranslation(map, line); 116 | 117 | if (!translation) return line; 118 | 119 | const trimmed = translation.trim(); 120 | if (!trimmed) return line; 121 | 122 | // Don't override existing translation 123 | if (line.translation) return line; 124 | 125 | return { 126 | ...line, 127 | translation: trimmed, 128 | }; 129 | }); 130 | }; 131 | -------------------------------------------------------------------------------- /index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | Aura Music 7 | 12 | 13 | 14 | 15 | 19 | 20 | 21 | 86 | 87 | 88 |
89 | 90 | 91 | 92 | -------------------------------------------------------------------------------- /components/GeminiButton.tsx: -------------------------------------------------------------------------------- 1 | import React, { useState } from 'react'; 2 | import { analyzeLyrics } from '../services/geminiService'; 3 | import { Song } from '../types'; 4 | 5 | interface GeminiButtonProps { 6 | song: Song; 7 | } 8 | 9 | const GeminiButton: React.FC = ({ song }) => { 10 | const [loading, setLoading] = useState(false); 11 | const [data, setData] = useState<{ vibe: string; meanings: string[] } | null>(null); 12 | const [isOpen, setIsOpen] = useState(false); 13 | 14 | const handleAnalyze = async () => { 15 | if (data) { 16 | setIsOpen(true); 17 | return; 18 | } 19 | if (!song.lyrics || song.lyrics.length === 0) return; 20 | 21 | setLoading(true); 22 | setIsOpen(true); 23 | 24 | // Convert lyrics array back to string 25 | const fullText = song.lyrics.map(l => l.text).join('\n'); 26 | 27 | const result = await analyzeLyrics(song.title, song.artist, fullText); 28 | setData(result); 29 | setLoading(false); 30 | }; 31 | 32 | if (!song.lyrics || song.lyrics.length === 0) return null; 33 | 34 | return ( 35 | <> 36 | 45 | 46 | {isOpen && ( 47 |
setIsOpen(false)}> 48 |
e.stopPropagation()}> 49 | {loading ? ( 50 |
51 |
52 |

Consulting the music spirits...

53 |
54 | ) : data ? ( 55 |
56 |

Vibe Check

57 |

"{data.vibe}"

58 |
59 |

Interpretation

60 |
    61 | {data.meanings?.map((m, i) => ( 62 |
  • 63 | {m} 64 |
  • 65 | ))} 66 |
67 | 73 |
74 | ) : ( 75 |
Failed to analyze. Try again.
76 | )} 77 |
78 |
79 | )} 80 | 81 | ); 82 | }; 83 | 84 | export default GeminiButton; -------------------------------------------------------------------------------- /components/Toast.tsx: -------------------------------------------------------------------------------- 1 | import React, { useState, useCallback, useEffect } from 'react'; 2 | import { createPortal } from 'react-dom'; 3 | import { ToastContext, Toast, ToastType } from '../hooks/useToast'; 4 | import { CheckIcon } from './Icons'; 5 | 6 | const ToastItem: React.FC<{ toast: Toast; onRemove: (id: string) => void }> = ({ toast, onRemove }) => { 7 | const [isExiting, setIsExiting] = useState(false); 8 | 9 | useEffect(() => { 10 | const timer = setTimeout(() => { 11 | setIsExiting(true); 12 | }, toast.duration || 3000); 13 | 14 | return () => clearTimeout(timer); 15 | }, [toast.duration]); 16 | 17 | useEffect(() => { 18 | if (isExiting) { 19 | const timer = setTimeout(() => { 20 | onRemove(toast.id); 21 | }, 300); // Match animation duration 22 | return () => clearTimeout(timer); 23 | } 24 | }, [isExiting, onRemove, toast.id]); 25 | 26 | const getIcon = () => { 27 | switch (toast.type) { 28 | case 'success': 29 | return ; 30 | case 'error': 31 | return ( 32 | 33 | 34 | 35 | ); 36 | case 'info': 37 | default: 38 | return ( 39 | 40 | 41 | 42 | ); 43 | } 44 | }; 45 | 46 | return ( 47 |
56 |
57 | {getIcon()} 58 |
59 |

{toast.message}

60 |
61 | ); 62 | }; 63 | 64 | export const ToastProvider: React.FC<{ children: React.ReactNode }> = ({ children }) => { 65 | const [toasts, setToasts] = useState([]); 66 | 67 | const addToast = useCallback((message: string, type: ToastType, duration = 3000) => { 68 | const id = Math.random().toString(36).substring(2, 9); 69 | setToasts((prev) => [...prev, { id, message, type, duration }]); 70 | }, []); 71 | 72 | const removeToast = useCallback((id: string) => { 73 | setToasts((prev) => prev.filter((t) => t.id !== id)); 74 | }, []); 75 | 76 | const contextValue = { 77 | toast: { 78 | success: (message: string, duration?: number) => addToast(message, 'success', duration), 79 | error: (message: string, duration?: number) => addToast(message, 'error', duration), 80 | info: (message: string, duration?: number) => addToast(message, 'info', duration), 81 | }, 82 | }; 83 | 84 | return ( 85 | 86 | {children} 87 | {createPortal( 88 |
89 | {toasts.map((toast) => ( 90 |
91 | 92 |
93 | ))} 94 |
, 95 | document.body 96 | )} 97 |
98 | ); 99 | }; 100 | -------------------------------------------------------------------------------- /components/ImportMusicDialog.tsx: -------------------------------------------------------------------------------- 1 | import React, { useState } from "react"; 2 | import { createPortal } from "react-dom"; 3 | import { LinkIcon } from "./Icons"; 4 | 5 | interface ImportMusicDialogProps { 6 | isOpen: boolean; 7 | onClose: () => void; 8 | onImport: (url: string) => Promise; 9 | } 10 | 11 | const ImportMusicDialog: React.FC = ({ 12 | isOpen, 13 | onClose, 14 | onImport, 15 | }) => { 16 | const [importUrl, setImportUrl] = useState(""); 17 | const [isLoading, setIsLoading] = useState(false); 18 | 19 | const handleImport = async () => { 20 | if (!importUrl.trim() || isLoading) return; 21 | 22 | setIsLoading(true); 23 | try { 24 | const success = await onImport(importUrl); 25 | if (success) { 26 | setImportUrl(""); 27 | onClose(); 28 | } 29 | } finally { 30 | setIsLoading(false); 31 | } 32 | }; 33 | 34 | const handleClose = () => { 35 | setImportUrl(""); 36 | onClose(); 37 | }; 38 | 39 | if (!isOpen) return null; 40 | 41 | return createPortal( 42 |
46 | {/* Backdrop */} 47 |
48 | 49 | {/* Modal */} 50 |
e.stopPropagation()} 53 | > 54 | {/* Content */} 55 |
56 |
57 | 58 |
59 | 60 |

61 | Import Music 62 |

63 |

64 | Paste a{" "} 65 | 66 | Netease Cloud Music 67 | {" "} 68 | song or playlist link to add to queue. 69 |

70 | 71 | setImportUrl(e.target.value)} 75 | placeholder="https://music.163.com/..." 76 | className="w-full mt-5 bg-white/10 border border-white/10 rounded-xl px-4 py-3.5 text-white placeholder:text-white/20 focus:outline-none focus:ring-2 focus:ring-blue-500/50 focus:bg-white/10 transition-all text-[15px]" 77 | disabled={isLoading} 78 | autoFocus 79 | onKeyDown={(e) => { 80 | if (e.key === "Enter") { 81 | handleImport(); 82 | } 83 | }} 84 | /> 85 |
86 | 87 | {/* Action Buttons (iOS Style) */} 88 |
89 | 95 | 131 |
132 |
133 |
, 134 | document.body, 135 | ); 136 | }; 137 | 138 | export default ImportMusicDialog; 139 | -------------------------------------------------------------------------------- /services/springSystem.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Advanced Spring Physics System 3 | * Supports multiple properties (x, y, scale, etc.) simultaneously. 4 | */ 5 | 6 | export interface SpringConfig { 7 | mass: number; 8 | stiffness: number; 9 | damping: number; 10 | precision?: number; // Stop threshold 11 | } 12 | 13 | export const DEFAULT_SPRING: SpringConfig = { 14 | mass: 1, 15 | stiffness: 120, 16 | damping: 20, 17 | precision: 0.01, 18 | }; 19 | 20 | export const POS_Y_SPRING: SpringConfig = { 21 | mass: 0.9, 22 | stiffness: 100, 23 | damping: 20, // Critical ~19 24 | precision: 0.1, 25 | }; 26 | 27 | export const SCALE_SPRING: SpringConfig = { 28 | mass: 2, 29 | stiffness: 100, 30 | damping: 28, // Increased damping 31 | precision: 0.01, 32 | }; 33 | 34 | // --- Apple Music Style Physics Presets --- 35 | 36 | // Past lines: Very High stiffness. 37 | // When a line moves from Active -> Past, it should "snap" up out of the way quickly. 38 | export const PAST_SPRING: SpringConfig = { 39 | mass: 1, 40 | stiffness: 350, // Very stiff 41 | damping: 45, // High damping to prevent bounce on the snap 42 | precision: 0.1, 43 | }; 44 | 45 | // Current line: Fast arrival, responsive. 46 | export const ACTIVE_SPRING: SpringConfig = { 47 | mass: 1, 48 | stiffness: 220, // Fast response 49 | damping: 30, // Critical damping 50 | precision: 0.1, 51 | }; 52 | 53 | // Future lines: Low stiffness (loose spring). 54 | // This creates the "drag" effect where they scroll slower than the active line. 55 | export const FUTURE_SPRING: SpringConfig = { 56 | mass: 1.2, 57 | stiffness: 70, // Soft/Loose spring 58 | damping: 20, // Sufficient damping to avoid oscillation 59 | precision: 0.1, 60 | }; 61 | 62 | // Seek Spring: Faster than camera, but smooth 63 | export const SEEK_SPRING: SpringConfig = { 64 | mass: 1, 65 | stiffness: 180, 66 | damping: 30, 67 | precision: 0.1, 68 | }; 69 | 70 | // Camera Spring: Smooth global scrolling 71 | export const CAMERA_SPRING: SpringConfig = { 72 | mass: 1, 73 | stiffness: 100, // Smooth but responsive 74 | damping: 25, 75 | precision: 0.1, 76 | }; 77 | 78 | // Interlude Spring: Smooth expansion/collapse 79 | export const INTERLUDE_SPRING: SpringConfig = { 80 | mass: 1, 81 | stiffness: 120, 82 | damping: 20, 83 | precision: 0.001, 84 | }; 85 | 86 | export class SpringSystem { 87 | private current: Record = {}; 88 | private target: Record = {}; 89 | private velocity: Record = {}; 90 | private config: Record = {}; 91 | 92 | constructor(initialValues: Record) { 93 | this.current = { ...initialValues }; 94 | this.target = { ...initialValues }; 95 | // Initialize velocities to 0 96 | Object.keys(initialValues).forEach((k) => (this.velocity[k] = 0)); 97 | } 98 | 99 | setTarget(key: string, value: number, config: SpringConfig = DEFAULT_SPRING) { 100 | this.target[key] = value; 101 | this.config[key] = config; 102 | if (this.velocity[key] === undefined) this.velocity[key] = 0; 103 | if (this.current[key] === undefined) this.current[key] = value; 104 | } 105 | 106 | // Force a value immediately (reset) 107 | setValue(key: string, value: number) { 108 | this.current[key] = value; 109 | this.target[key] = value; 110 | this.velocity[key] = 0; 111 | } 112 | 113 | // Inject momentum (e.g. scroll flick) 114 | setVelocity(key: string, value: number) { 115 | this.velocity[key] = value; 116 | } 117 | 118 | getCurrent(key: string): number { 119 | return this.current[key] || 0; 120 | } 121 | 122 | getTarget(key: string): number { 123 | return this.target[key] || 0; 124 | } 125 | 126 | getVelocity(key: string): number { 127 | return this.velocity[key] || 0; 128 | } 129 | 130 | update(dt: number): boolean { 131 | let isMoving = false; 132 | 133 | Object.keys(this.current).forEach((key) => { 134 | const p = this.config[key] || DEFAULT_SPRING; 135 | const current = this.current[key]; 136 | const target = this.target[key] ?? current; 137 | const velocity = this.velocity[key] ?? 0; 138 | 139 | // Spring Force Calculation (Hooke's Law + Damping) 140 | // F = -k(x - target) - c(v) 141 | const displacement = current - target; 142 | const springForce = -p.stiffness * displacement; 143 | const dampingForce = -p.damping * velocity; 144 | const acceleration = (springForce + dampingForce) / p.mass; 145 | 146 | const newVelocity = velocity + acceleration * dt; 147 | const newPosition = current + newVelocity * dt; 148 | 149 | const precision = p.precision ?? 0.001; 150 | 151 | // Removed overshoot check which caused the snapping effect 152 | // We rely on critical/over-damping and low velocity threshold 153 | const isNearRest = 154 | Math.abs(newVelocity) < precision && 155 | Math.abs(newPosition - target) < precision; 156 | 157 | if (isNearRest) { 158 | this.current[key] = target; 159 | this.velocity[key] = 0; 160 | } else { 161 | this.current[key] = newPosition; 162 | this.velocity[key] = newVelocity; 163 | isMoving = true; 164 | } 165 | }); 166 | 167 | return isMoving; 168 | } 169 | } 170 | -------------------------------------------------------------------------------- /components/MediaSessionController.tsx: -------------------------------------------------------------------------------- 1 | import React, { useEffect, useState } from "react"; 2 | import { PlayState, Song } from "../types"; 3 | import { fetchImageBlobWithCache } from "../services/cache"; 4 | 5 | const MEDIA_SESSION_SEEK_STEP = 10; 6 | 7 | interface MediaSessionControllerProps { 8 | currentSong: Song | null; 9 | playState: PlayState; 10 | currentTime: number; 11 | duration: number; 12 | playbackRate: number; 13 | onPlay: () => void; 14 | onPause: () => void; 15 | onNext: () => void; 16 | onPrev: () => void; 17 | onSeek: (time: number, playImmediately?: boolean) => void; 18 | } 19 | 20 | const clamp = (value: number, min: number, max: number) => 21 | Math.max(min, Math.min(max, value)); 22 | 23 | const MediaSessionController: React.FC = ({ 24 | currentSong, 25 | playState, 26 | currentTime, 27 | duration, 28 | playbackRate, 29 | onPlay, 30 | onPause, 31 | onNext, 32 | onPrev, 33 | onSeek, 34 | }) => { 35 | const [artworkSrc, setArtworkSrc] = useState(null); 36 | 37 | useEffect(() => { 38 | let canceled = false; 39 | let objectUrl: string | null = null; 40 | 41 | if (!currentSong?.coverUrl) { 42 | setArtworkSrc(null); 43 | return () => { 44 | if (objectUrl) { 45 | URL.revokeObjectURL(objectUrl); 46 | } 47 | }; 48 | } 49 | 50 | fetchImageBlobWithCache(currentSong.coverUrl) 51 | .then((blob) => { 52 | if (canceled) return; 53 | objectUrl = URL.createObjectURL(blob); 54 | setArtworkSrc(objectUrl); 55 | }) 56 | .catch(() => { 57 | if (!canceled) { 58 | setArtworkSrc(null); 59 | } 60 | }); 61 | 62 | return () => { 63 | canceled = true; 64 | if (objectUrl) { 65 | URL.revokeObjectURL(objectUrl); 66 | } 67 | }; 68 | }, [currentSong?.coverUrl]); 69 | 70 | useEffect(() => { 71 | if (typeof navigator === "undefined" || !("mediaSession" in navigator)) { 72 | return; 73 | } 74 | 75 | const mediaSession = navigator.mediaSession; 76 | 77 | if (!currentSong) { 78 | mediaSession.metadata = null; 79 | } else if (typeof window !== "undefined" && "MediaMetadata" in window) { 80 | mediaSession.metadata = new window.MediaMetadata({ 81 | title: currentSong.title, 82 | artist: currentSong.artist, 83 | album: currentSong.album ?? undefined, 84 | artwork: 85 | artworkSrc || currentSong.coverUrl 86 | ? [ 87 | { 88 | src: artworkSrc || currentSong.coverUrl!, 89 | sizes: "512x512", 90 | type: "image/jpeg", 91 | }, 92 | ] 93 | : undefined, 94 | }); 95 | } 96 | 97 | mediaSession.playbackState = 98 | playState === PlayState.PLAYING ? "playing" : "paused"; 99 | }, [currentSong, playState, artworkSrc]); 100 | 101 | useEffect(() => { 102 | if ( 103 | typeof navigator === "undefined" || 104 | !("mediaSession" in navigator) || 105 | duration <= 0 || 106 | !Number.isFinite(currentTime) 107 | ) { 108 | return; 109 | } 110 | 111 | const mediaSession = navigator.mediaSession; 112 | if (typeof mediaSession.setPositionState === "function") { 113 | mediaSession.setPositionState({ 114 | duration, 115 | playbackRate, 116 | position: clamp(currentTime, 0, duration), 117 | }); 118 | } 119 | }, [currentTime, duration, playbackRate]); 120 | 121 | useEffect(() => { 122 | if (typeof navigator === "undefined" || !("mediaSession" in navigator)) { 123 | return; 124 | } 125 | 126 | const mediaSession = navigator.mediaSession; 127 | 128 | const clampedSeek = (time: number) => { 129 | const target = clamp(time, 0, duration || 0); 130 | onSeek(target, playState === PlayState.PLAYING); 131 | }; 132 | 133 | const seekToHandler = (details?: MediaSessionActionDetails) => { 134 | if (details && typeof details.seekTime === "number") { 135 | clampedSeek(details.seekTime); 136 | } 137 | }; 138 | 139 | const handlers: Array< 140 | [MediaSessionAction, MediaSessionActionHandler | null] 141 | > = [ 142 | ["play", onPlay], 143 | ["pause", onPause], 144 | ["previoustrack", onPrev], 145 | ["nexttrack", onNext], 146 | ["seekto", seekToHandler], 147 | [ 148 | "seekbackward", 149 | (details?: MediaSessionActionDetails) => { 150 | const offset = details?.seekOffset ?? MEDIA_SESSION_SEEK_STEP; 151 | clampedSeek(currentTime - offset); 152 | }, 153 | ], 154 | [ 155 | "seekforward", 156 | (details?: MediaSessionActionDetails) => { 157 | const offset = details?.seekOffset ?? MEDIA_SESSION_SEEK_STEP; 158 | clampedSeek(currentTime + offset); 159 | }, 160 | ], 161 | ]; 162 | 163 | handlers.forEach(([action, handler]) => { 164 | try { 165 | mediaSession.setActionHandler(action, handler); 166 | } catch (error) { 167 | // Some browsers restrict certain actions; ignore failures. 168 | console.debug(`MediaSession handler registration failed for ${action}`, error); 169 | } 170 | }); 171 | 172 | return () => { 173 | handlers.forEach(([action]) => { 174 | try { 175 | mediaSession.setActionHandler(action, null); 176 | } catch { 177 | // ignore 178 | } 179 | }); 180 | }; 181 | }, [ 182 | onPlay, 183 | onPause, 184 | onNext, 185 | onPrev, 186 | onSeek, 187 | duration, 188 | currentTime, 189 | playState, 190 | ]); 191 | 192 | return null; 193 | }; 194 | 195 | export default MediaSessionController; 196 | -------------------------------------------------------------------------------- /components/AboutDialog.tsx: -------------------------------------------------------------------------------- 1 | import React from "react"; 2 | import { createPortal } from "react-dom"; 3 | import { AuraLogo } from "./Icons"; 4 | 5 | interface AboutDialogProps { 6 | isOpen: boolean; 7 | onClose: () => void; 8 | } 9 | 10 | const AboutDialog: React.FC = ({ isOpen, onClose }) => { 11 | if (!isOpen) return null; 12 | 13 | return createPortal( 14 |
17 | 28 | 29 | {/* Shared backdrop */} 30 |
34 | 35 | {/* Modal */} 36 |
e.stopPropagation()} 39 | > 40 | {/* Decorative Gradient Blob */} 41 |
42 | 43 | {/* Content */} 44 |
45 | {/* Logo Section */} 46 |
47 |
48 |
49 | 50 |
51 |
52 | 53 | {/* Title & Version */} 54 |

55 | Aura Music 56 |

57 |
58 | v1.4.0 • Web 59 |
60 | 61 | {/* Description */} 62 |

63 | An experimental, pure web music player crafted with 64 | 65 | Vibe Coding 66 | 67 | technology. 68 |

69 | 70 | {/* Tech Stack Grid */} 71 |
72 | 73 | 74 | 75 |
76 | 77 | {/* Selection List */} 78 | 101 |
102 | 103 | {/* Footer / Close */} 104 |
105 | 111 |
112 |
113 |
, 114 | document.body 115 | ); 116 | }; 117 | 118 | const TechBadge = ({ label }: { label: string }) => ( 119 |
120 | {label} 121 |
122 | ); 123 | 124 | export default AboutDialog; 125 | -------------------------------------------------------------------------------- /components/visualizer/Visualizer.tsx: -------------------------------------------------------------------------------- 1 | import React, { useEffect, useRef } from 'react'; 2 | import audioProcessorUrl from './AudioProcessor.ts?worker&url'; 3 | 4 | interface VisualizerProps { 5 | audioRef: React.RefObject; 6 | isPlaying: boolean; 7 | } 8 | 9 | // Global map to store source nodes to prevent "MediaElementAudioSourceNode" double-connection errors 10 | const sourceMap = new WeakMap(); 11 | const contextMap = new WeakMap(); 12 | 13 | const Visualizer: React.FC = ({ audioRef, isPlaying }) => { 14 | const canvasRef = useRef(null); 15 | const workerRef = useRef(null); 16 | const audioContextRef = useRef(null); 17 | const workletNodeRef = useRef(null); 18 | 19 | // Effect 1: Audio Context and Worklet Initialization 20 | useEffect(() => { 21 | const initAudio = async () => { 22 | if (!audioRef.current) return; 23 | const audioEl = audioRef.current; 24 | 25 | let ctx = contextMap.get(audioEl); 26 | if (!ctx) { 27 | ctx = new (window.AudioContext || (window as any).webkitAudioContext)(); 28 | contextMap.set(audioEl, ctx); 29 | } 30 | audioContextRef.current = ctx; 31 | 32 | if (ctx.state === 'suspended' && isPlaying) { 33 | await ctx.resume(); 34 | } 35 | 36 | // Load AudioWorklet 37 | if (!workletNodeRef.current) { 38 | try { 39 | console.log("Visualizer: Loading AudioWorklet module..."); 40 | // Load the module using a URL pointing to the JS file 41 | await ctx.audioWorklet.addModule(audioProcessorUrl); 42 | console.log("Visualizer: AudioWorklet module loaded successfully."); 43 | 44 | const workletNode = new AudioWorkletNode(ctx, 'audio-processor'); 45 | workletNode.port.onmessage = (e) => { 46 | console.log("Visualizer: Message from Worklet:", e.data); 47 | }; 48 | workletNodeRef.current = workletNode; 49 | console.log("Visualizer: AudioWorkletNode created."); 50 | 51 | // Connect Source -> Worklet -> Destination 52 | if (!sourceMap.has(audioEl)) { 53 | const source = ctx.createMediaElementSource(audioEl); 54 | source.connect(ctx.destination); // Output to speakers 55 | source.connect(workletNode); // Output to visualizer 56 | sourceMap.set(audioEl, source); 57 | } else { 58 | const source = sourceMap.get(audioEl); 59 | if (source) { 60 | // Ensure connection 61 | try { source.connect(workletNode); } catch (e) { } 62 | } 63 | } 64 | 65 | } catch (e) { 66 | console.error("Visualizer: Failed to load AudioWorklet", e); 67 | } 68 | } 69 | }; 70 | 71 | if (isPlaying) { 72 | initAudio(); 73 | } 74 | 75 | return () => { 76 | // Cleanup logic if needed 77 | }; 78 | }, [isPlaying, audioRef]); 79 | 80 | // Effect 2: Worker Initialization 81 | useEffect(() => { 82 | if (!isPlaying) { 83 | if (workerRef.current) { 84 | workerRef.current.postMessage({ type: 'DESTROY' }); 85 | workerRef.current.terminate(); 86 | workerRef.current = null; 87 | } 88 | return; 89 | } 90 | 91 | const canvasEl = canvasRef.current; 92 | if (!canvasEl) { 93 | return; 94 | } 95 | 96 | if (workerRef.current) { 97 | return; 98 | } 99 | 100 | const isOffscreenSupported = !!canvasEl.transferControlToOffscreen; 101 | if (!isOffscreenSupported) { 102 | console.warn("Visualizer: OffscreenCanvas not available, skipping worker"); 103 | return; 104 | } 105 | 106 | try { 107 | const worker = new Worker(new URL('./VisualizerWorker.ts', import.meta.url), { 108 | type: 'module' 109 | }); 110 | workerRef.current = worker; 111 | 112 | const dpr = window.devicePixelRatio || 1; 113 | canvasEl.width = 320 * dpr; 114 | canvasEl.height = 32 * dpr; 115 | 116 | const offscreen = canvasEl.transferControlToOffscreen(); 117 | 118 | const channel = new MessageChannel(); 119 | 120 | worker.postMessage( 121 | { 122 | type: 'INIT', 123 | canvas: offscreen, 124 | config: { 125 | barCount: 128, 126 | gap: 2, 127 | fftSize: 256, 128 | smoothingTimeConstant: 0.5, 129 | dpr: dpr 130 | }, 131 | port: channel.port1 132 | }, 133 | [offscreen, channel.port1] 134 | ); 135 | 136 | const sendPortToWorklet = () => { 137 | if (workletNodeRef.current) { 138 | workletNodeRef.current.port.postMessage({ type: 'PORT', port: channel.port2 }, [ 139 | channel.port2 140 | ]); 141 | } else { 142 | requestAnimationFrame(sendPortToWorklet); 143 | } 144 | }; 145 | sendPortToWorklet(); 146 | } catch (e) { 147 | console.error("Visualizer: Failed to initialize worker", e); 148 | } 149 | 150 | return () => { 151 | if (workerRef.current) { 152 | workerRef.current.postMessage({ type: 'DESTROY' }); 153 | workerRef.current.terminate(); 154 | workerRef.current = null; 155 | } 156 | }; 157 | }, [isPlaying]); 158 | 159 | if (!isPlaying) return
; 160 | 161 | return ( 162 | 168 | ); 169 | }; 170 | 171 | export default Visualizer; 172 | -------------------------------------------------------------------------------- /components/TopBar.tsx: -------------------------------------------------------------------------------- 1 | import React, { useRef, useState } from "react"; 2 | import { AuraLogo, SearchIcon, CloudDownloadIcon, InfoIcon, FullscreenIcon } from "./Icons"; 3 | import AboutDialog from "./AboutDialog"; 4 | 5 | interface TopBarProps { 6 | onFilesSelected: (files: FileList) => void; 7 | onSearchClick: () => void; 8 | disabled?: boolean; 9 | } 10 | 11 | const TopBar: React.FC = ({ 12 | onFilesSelected, 13 | onSearchClick, 14 | disabled, 15 | }) => { 16 | const fileInputRef = useRef(null); 17 | const [isAboutOpen, setIsAboutOpen] = useState(false); 18 | const [isFullscreen, setIsFullscreen] = useState(false); 19 | const [isTopBarActive, setIsTopBarActive] = useState(false); 20 | const hideTimeoutRef = useRef | null>(null); 21 | 22 | const toggleFullscreen = () => { 23 | if (!document.fullscreenElement) { 24 | document.documentElement.requestFullscreen().then(() => { 25 | setIsFullscreen(true); 26 | }).catch((err) => { 27 | console.error(`Error attempting to enable fullscreen: ${err.message} (${err.name})`); 28 | }); 29 | } else { 30 | if (document.exitFullscreen) { 31 | document.exitFullscreen().then(() => { 32 | setIsFullscreen(false); 33 | }); 34 | } 35 | } 36 | }; 37 | 38 | const activateTopBar = () => { 39 | if (hideTimeoutRef.current) { 40 | clearTimeout(hideTimeoutRef.current); 41 | } 42 | setIsTopBarActive(true); 43 | hideTimeoutRef.current = setTimeout(() => { 44 | setIsTopBarActive(false); 45 | hideTimeoutRef.current = null; 46 | }, 2500); 47 | }; 48 | 49 | const handlePointerDownCapture = (event: React.PointerEvent) => { 50 | if (event.pointerType !== "touch") { 51 | return; 52 | } 53 | 54 | const wasActive = isTopBarActive; 55 | 56 | if (!wasActive) { 57 | event.preventDefault(); 58 | event.stopPropagation(); 59 | } 60 | 61 | activateTopBar(); 62 | }; 63 | 64 | React.useEffect(() => { 65 | const handleFullscreenChange = () => { 66 | setIsFullscreen(!!document.fullscreenElement); 67 | }; 68 | 69 | document.addEventListener("fullscreenchange", handleFullscreenChange); 70 | return () => { 71 | document.removeEventListener("fullscreenchange", handleFullscreenChange); 72 | }; 73 | }, []); 74 | 75 | React.useEffect(() => { 76 | return () => { 77 | if (hideTimeoutRef.current) { 78 | clearTimeout(hideTimeoutRef.current); 79 | } 80 | }; 81 | }, []); 82 | 83 | const handleFileChange = (e: React.ChangeEvent) => { 84 | const files = e.target.files; 85 | if (files && files.length > 0) { 86 | onFilesSelected(files); 87 | } 88 | e.target.value = ""; 89 | }; 90 | 91 | const baseTransitionClasses = "transition-all duration-500 ease-out"; 92 | const mobileActiveClasses = isTopBarActive 93 | ? "opacity-100 translate-y-0 pointer-events-auto" 94 | : "opacity-0 -translate-y-2 pointer-events-none"; 95 | const hoverSupportClasses = "group-hover:opacity-100 group-hover:translate-y-0 group-hover:pointer-events-auto"; 96 | 97 | return ( 98 |
102 | {/* Blur Background Layer (Animate in) */} 103 |
106 | 107 | {/* Content (Animate in) */} 108 |
109 | {/* Logo / Title */} 110 |
111 |
112 | 113 |
114 |

115 | Aura Music 116 |

117 |
118 | 119 | {/* Actions (iOS 18 Style Glass Buttons) */} 120 |
123 | {/* Search Button */} 124 | 131 | 132 | {/* Import Button */} 133 | 141 | 142 | {/* About Button */} 143 | 150 | 151 | {/* Fullscreen Button */} 152 | 159 | 160 | 168 |
169 |
170 | setIsAboutOpen(false)} /> 171 |
172 | ); 173 | }; 174 | 175 | export default TopBar; 176 | -------------------------------------------------------------------------------- /services/utils.ts: -------------------------------------------------------------------------------- 1 | import { LyricLine } from "../types"; 2 | import { parseLyrics } from "./lyrics"; 3 | import { loadImageElementWithCache } from "./cache"; 4 | 5 | // Declare global for the script loaded in index.html 6 | declare const jsmediatags: any; 7 | declare const ColorThief: any; 8 | 9 | export const formatTime = (seconds: number): string => { 10 | if (isNaN(seconds)) return "0:00"; 11 | const mins = Math.floor(seconds / 60); 12 | const secs = Math.floor(seconds % 60); 13 | return `${mins}:${secs.toString().padStart(2, "0")}`; 14 | }; 15 | 16 | export const shuffleArray = (array: T[]): T[] => { 17 | const newArr = [...array]; 18 | for (let i = newArr.length - 1; i > 0; i--) { 19 | const j = Math.floor(Math.random() * (i + 1)); 20 | [newArr[i], newArr[j]] = [newArr[j], newArr[i]]; 21 | } 22 | return newArr; 23 | }; 24 | 25 | // Helper to request via CORS proxy (api.allorigins.win is reliable for GET requests) 26 | // Try direct request first, fallback to proxy if CORS fails 27 | export const fetchViaProxy = async (targetUrl: string): Promise => { 28 | let text: string; 29 | 30 | // 1. Try direct request first 31 | try { 32 | const response = await fetch(targetUrl); 33 | if (!response.ok) { 34 | throw new Error( 35 | `Direct fetch failed with status: ${response.status} ${targetUrl}`, 36 | ); 37 | } 38 | text = await response.text(); 39 | return JSON.parse(text); 40 | } catch (directError) { 41 | // 2. Direct request failed (likely CORS), try proxy 42 | console.warn( 43 | "Direct fetch failed (likely CORS), trying proxy:", 44 | directError, 45 | ); 46 | 47 | try { 48 | const proxyUrl = `https://api.allorigins.win/raw?url=${encodeURIComponent(targetUrl)}`; 49 | const response = await fetch(proxyUrl); 50 | if (!response.ok) { 51 | throw new Error(`Proxy fetch failed with status: ${response.status}`); 52 | } 53 | text = await response.text(); 54 | return JSON.parse(text); 55 | } catch (proxyError) { 56 | console.error( 57 | "Both direct and proxy requests failed:", 58 | proxyError, 59 | targetUrl, 60 | ); 61 | throw proxyError; 62 | } 63 | } 64 | }; 65 | 66 | export const parseNeteaseLink = ( 67 | input: string, 68 | ): { type: "song" | "playlist"; id: string } | null => { 69 | try { 70 | const url = new URL(input); 71 | const params = new URLSearchParams(url.search); 72 | // Handle music.163.com/#/song?id=... (Hash router) 73 | if (url.hash.includes("/song") || url.hash.includes("/playlist")) { 74 | const hashParts = url.hash.split("?"); 75 | if (hashParts.length > 1) { 76 | const hashParams = new URLSearchParams(hashParts[1]); 77 | const id = hashParams.get("id"); 78 | if (id) { 79 | if (url.hash.includes("/song")) return { type: "song", id }; 80 | if (url.hash.includes("/playlist")) return { type: "playlist", id }; 81 | } 82 | } 83 | } 84 | // Handle standard params 85 | const id = params.get("id"); 86 | if (id) { 87 | if (url.pathname.includes("song")) return { type: "song", id }; 88 | if (url.pathname.includes("playlist")) return { type: "playlist", id }; 89 | } 90 | return null; 91 | } catch (e) { 92 | return null; 93 | } 94 | }; 95 | 96 | /** 97 | * @deprecated Use parseLyrics from services/lyrics instead 98 | */ 99 | export const parseLrc = ( 100 | lrcContent: string, 101 | translationContent?: string, 102 | ): LyricLine[] => { 103 | return parseLyrics(lrcContent, translationContent); 104 | }; 105 | 106 | /** 107 | * @deprecated Use parseLyrics from services/lyrics instead 108 | */ 109 | export const mergeLyrics = (original: string, translation: string): string => { 110 | return original + "\n" + translation; 111 | }; 112 | 113 | // Metadata Parser using jsmediatags 114 | export const parseAudioMetadata = ( 115 | file: File, 116 | ): Promise<{ 117 | title?: string; 118 | artist?: string; 119 | picture?: string; 120 | lyrics?: string; 121 | }> => { 122 | return new Promise((resolve) => { 123 | if (typeof jsmediatags === "undefined") { 124 | console.warn("jsmediatags not loaded"); 125 | resolve({}); 126 | return; 127 | } 128 | 129 | try { 130 | jsmediatags.read(file, { 131 | onSuccess: (tag: any) => { 132 | try { 133 | const tags = tag.tags; 134 | let pictureUrl = undefined; 135 | let lyricsText = undefined; 136 | 137 | if (tags.picture) { 138 | const { data, format } = tags.picture; 139 | let base64String = ""; 140 | const len = data.length; 141 | for (let i = 0; i < len; i++) { 142 | base64String += String.fromCharCode(data[i]); 143 | } 144 | pictureUrl = `data:${format};base64,${window.btoa(base64String)}`; 145 | } 146 | 147 | // Extract embedded lyrics (USLT tag for unsynchronized lyrics) 148 | // Some formats also use "lyrics" or "LYRICS" tag 149 | if (tags.USLT) { 150 | // USLT can be an object with lyrics.text or just a string 151 | lyricsText = 152 | typeof tags.USLT === "object" 153 | ? tags.USLT.lyrics || tags.USLT.text 154 | : tags.USLT; 155 | } else if (tags.lyrics) { 156 | lyricsText = tags.lyrics; 157 | } else if (tags.LYRICS) { 158 | lyricsText = tags.LYRICS; 159 | } 160 | 161 | resolve({ 162 | title: tags.title, 163 | artist: tags.artist, 164 | picture: pictureUrl, 165 | lyrics: lyricsText, 166 | }); 167 | } catch (innerErr) { 168 | console.error("Error parsing tags structure:", innerErr); 169 | resolve({}); 170 | } 171 | }, 172 | onError: (error: any) => { 173 | console.warn("Error reading tags:", error); 174 | resolve({}); 175 | }, 176 | }); 177 | } catch (err) { 178 | console.error("jsmediatags crashed:", err); 179 | resolve({}); 180 | } 181 | }); 182 | }; 183 | 184 | export const extractColors = async (imageSrc: string): Promise => { 185 | if (typeof ColorThief === "undefined") { 186 | console.warn("ColorThief not loaded"); 187 | return ["#4f46e5", "#db2777", "#1f2937"]; 188 | } 189 | 190 | try { 191 | const img = await loadImageElementWithCache(imageSrc); 192 | const colorThief = new ColorThief(); 193 | const palette = colorThief.getPalette(img, 5); 194 | 195 | if (!palette || palette.length === 0) { 196 | return []; 197 | } 198 | 199 | const vibrantCandidates = palette.filter((rgb: number[]) => { 200 | const lum = 0.2126 * rgb[0] + 0.7152 * rgb[1] + 0.0722 * rgb[2]; 201 | return lum > 30; 202 | }); 203 | 204 | const candidates = 205 | vibrantCandidates.length > 0 ? vibrantCandidates : palette; 206 | 207 | candidates.sort((a: number[], b: number[]) => { 208 | const satA = Math.max(...a) - Math.min(...a); 209 | const satB = Math.max(...b) - Math.min(...b); 210 | return satB - satA; 211 | }); 212 | 213 | const topColors = candidates.slice(0, 4); 214 | return topColors.map((c: number[]) => `rgb(${c[0]}, ${c[1]}, ${c[2]})`); 215 | } catch (err) { 216 | console.warn("Color extraction failed", err); 217 | return []; 218 | } 219 | }; 220 | -------------------------------------------------------------------------------- /hooks/useSearchModal.ts: -------------------------------------------------------------------------------- 1 | import { useState, useEffect, useCallback, useRef } from "react"; 2 | import { Song } from "../types"; 3 | import { NeteaseTrackInfo } from "../services/lyricsService"; 4 | import { useQueueSearchProvider } from "./useQueueSearchProvider"; 5 | import { 6 | useNeteaseSearchProvider, 7 | NeteaseSearchProviderExtended, 8 | } from "./useNeteaseSearchProvider"; 9 | 10 | export type SearchSource = "queue" | "netease"; 11 | export type SearchResultItem = Song | NeteaseTrackInfo; 12 | 13 | interface ContextMenuState { 14 | visible: boolean; 15 | x: number; 16 | y: number; 17 | track: SearchResultItem; 18 | type: SearchSource; 19 | } 20 | 21 | interface UseSearchModalParams { 22 | queue: Song[]; 23 | currentSong: Song | null; 24 | isPlaying: boolean; 25 | isOpen: boolean; 26 | } 27 | 28 | export const useSearchModal = ({ 29 | queue, 30 | currentSong, 31 | isPlaying, 32 | isOpen, 33 | }: UseSearchModalParams) => { 34 | // Search query state 35 | const [query, setQuery] = useState(""); 36 | const [activeTab, setActiveTab] = useState("queue"); 37 | 38 | // Navigation State 39 | const [selectedIndex, setSelectedIndex] = useState(-1); 40 | const itemRefs = useRef<(HTMLDivElement | null)[]>([]); 41 | 42 | // Context Menu State 43 | const [contextMenu, setContextMenu] = useState(null); 44 | 45 | // Search Providers 46 | const queueProvider = useQueueSearchProvider({ queue }); 47 | const neteaseProvider = useNeteaseSearchProvider(); 48 | 49 | // Queue search results (real-time) 50 | const [queueResults, setQueueResults] = useState<{ s: Song; i: number }[]>( 51 | [], 52 | ); 53 | 54 | // Offset for Netease pagination 55 | const [neteaseOffset, setNeteaseOffset] = useState(0); 56 | const LIMIT = 30; 57 | 58 | // Update queue results in real-time 59 | useEffect(() => { 60 | if (activeTab === "queue") { 61 | queueProvider.search(query).then((results) => { 62 | const mappedResults = (results as Song[]).map((s) => { 63 | const originalIndex = queue.findIndex((qs) => qs.id === s.id); 64 | return { s, i: originalIndex }; 65 | }); 66 | setQueueResults(mappedResults); 67 | }); 68 | } 69 | }, [query, activeTab, queue]); 70 | 71 | // Reset selected index when switching tabs or query changes 72 | useEffect(() => { 73 | setSelectedIndex(-1); 74 | }, [activeTab, query]); 75 | 76 | // Reset context menu when modal closes 77 | useEffect(() => { 78 | if (!isOpen) { 79 | setContextMenu(null); 80 | } 81 | }, [isOpen]); 82 | 83 | // --- Search Actions --- 84 | 85 | const performNeteaseSearch = useCallback(async () => { 86 | if (!query.trim()) return; 87 | setNeteaseOffset(0); 88 | setSelectedIndex(-1); 89 | await neteaseProvider.performSearch(query); 90 | }, [query, neteaseProvider]); 91 | 92 | const loadMoreNetease = useCallback(async () => { 93 | if (neteaseProvider.isLoading || !neteaseProvider.hasMore) return; 94 | const nextOffset = neteaseOffset + LIMIT; 95 | await neteaseProvider.loadMore(query, nextOffset, LIMIT); 96 | setNeteaseOffset(nextOffset); 97 | }, [neteaseProvider, neteaseOffset, query]); 98 | 99 | const handleScroll = useCallback( 100 | (e: React.UIEvent) => { 101 | if (activeTab !== "netease") return; 102 | const { scrollTop, clientHeight, scrollHeight } = e.currentTarget; 103 | if (scrollHeight - scrollTop - clientHeight < 100) { 104 | loadMoreNetease(); 105 | } 106 | }, 107 | [activeTab, loadMoreNetease], 108 | ); 109 | 110 | // --- Navigation --- 111 | 112 | const scrollToItem = useCallback((index: number) => { 113 | const el = itemRefs.current[index]; 114 | if (el) { 115 | el.scrollIntoView({ block: "nearest", behavior: "smooth" }); 116 | } 117 | }, []); 118 | 119 | const navigateDown = useCallback(() => { 120 | const listLength = 121 | activeTab === "queue" 122 | ? queueResults.length 123 | : neteaseProvider.results.length; 124 | if (listLength === 0) return; 125 | 126 | const next = Math.min(selectedIndex + 1, listLength - 1); 127 | setSelectedIndex(next); 128 | scrollToItem(next); 129 | }, [ 130 | activeTab, 131 | selectedIndex, 132 | queueResults.length, 133 | neteaseProvider.results.length, 134 | scrollToItem, 135 | ]); 136 | 137 | const navigateUp = useCallback(() => { 138 | const prev = Math.max(selectedIndex - 1, 0); 139 | setSelectedIndex(prev); 140 | scrollToItem(prev); 141 | }, [selectedIndex, scrollToItem]); 142 | 143 | const switchTab = useCallback(() => { 144 | setActiveTab((prev) => (prev === "queue" ? "netease" : "queue")); 145 | setSelectedIndex(-1); 146 | }, []); 147 | 148 | // --- Context Menu --- 149 | 150 | const openContextMenu = useCallback( 151 | (e: React.MouseEvent, item: SearchResultItem, type: SearchSource) => { 152 | e.preventDefault(); 153 | let x = e.clientX; 154 | let y = e.clientY; 155 | 156 | if (x + 200 > window.innerWidth) x -= 200; 157 | if (y + 100 > window.innerHeight) y -= 100; 158 | 159 | setContextMenu({ 160 | visible: true, 161 | x, 162 | y, 163 | track: item, 164 | type, 165 | }); 166 | }, 167 | [], 168 | ); 169 | 170 | const closeContextMenu = useCallback(() => { 171 | setContextMenu(null); 172 | }, []); 173 | 174 | // --- Now Playing Matcher --- 175 | const isNowPlaying = useCallback( 176 | (item: SearchResultItem) => { 177 | if (!currentSong) return false; 178 | if ("isNetease" in item && item.isNetease && currentSong.isNetease) { 179 | return item.neteaseId === currentSong.neteaseId; 180 | } 181 | return ( 182 | item.title === currentSong.title && item.artist === currentSong.artist 183 | ); 184 | }, 185 | [currentSong], 186 | ); 187 | 188 | // Determine what to show in results area 189 | const showNeteasePrompt = 190 | activeTab === "netease" && 191 | !neteaseProvider.hasSearched && 192 | query.trim().length > 0; 193 | 194 | const showNeteaseEmpty = 195 | activeTab === "netease" && 196 | neteaseProvider.hasSearched && 197 | neteaseProvider.results.length === 0 && 198 | !neteaseProvider.isLoading; 199 | 200 | const showNeteaseLoading = 201 | activeTab === "netease" && 202 | neteaseProvider.isLoading && 203 | neteaseProvider.results.length === 0; 204 | 205 | const showNeteaseInitial = 206 | activeTab === "netease" && 207 | !neteaseProvider.hasSearched && 208 | query.trim().length === 0; 209 | 210 | return { 211 | // State 212 | query, 213 | setQuery, 214 | activeTab, 215 | setActiveTab, 216 | selectedIndex, 217 | contextMenu, 218 | 219 | // Providers 220 | queueProvider, 221 | neteaseProvider, 222 | 223 | // Results 224 | queueResults, 225 | 226 | // Refs 227 | itemRefs, 228 | 229 | // Actions 230 | performNeteaseSearch, 231 | loadMoreNetease, 232 | handleScroll, 233 | 234 | // Navigation 235 | navigateDown, 236 | navigateUp, 237 | switchTab, 238 | scrollToItem, 239 | 240 | // Context Menu 241 | openContextMenu, 242 | closeContextMenu, 243 | 244 | // Helpers 245 | isNowPlaying, 246 | 247 | // Display flags 248 | showNeteasePrompt, 249 | showNeteaseEmpty, 250 | showNeteaseInitial, 251 | showNeteaseLoading, 252 | 253 | // Constants 254 | LIMIT, 255 | }; 256 | }; 257 | -------------------------------------------------------------------------------- /components/visualizer/VisualizerWorker.ts: -------------------------------------------------------------------------------- 1 | // VisualizerWorker.ts 2 | 3 | export type WorkerMessage = 4 | | { type: 'INIT'; canvas: OffscreenCanvas; config: VisualizerConfig; port: MessagePort } 5 | | { type: 'AUDIO_DATA'; data: Float32Array } 6 | | { type: 'RESIZE'; width: number; height: number } 7 | | { type: 'DESTROY' }; 8 | 9 | export interface VisualizerConfig { 10 | barCount: number; 11 | gap: number; 12 | fftSize: number; 13 | smoothingTimeConstant: number; 14 | dpr?: number; 15 | } 16 | 17 | const ctx: Worker = self as any; 18 | 19 | console.log("VisualizerWorker: Worker script loaded"); 20 | 21 | let canvas: OffscreenCanvas | null = null; 22 | let canvasCtx: OffscreenCanvasRenderingContext2D | null = null; 23 | let config: VisualizerConfig | null = null; 24 | let animationFrameId: number | null = null; 25 | let workletPort: MessagePort | null = null; 26 | 27 | // Ring buffer for smoothing/history 28 | const BUFFER_SIZE = 2048; // Store enough history for a nice wave 29 | const historyBuffer = new Float32Array(BUFFER_SIZE); 30 | let historyIndex = 0; 31 | 32 | ctx.onmessage = (e: MessageEvent) => { 33 | const { type } = e.data; 34 | console.log("VisualizerWorker: Received message", type); 35 | 36 | switch (type) { 37 | case 'INIT': { 38 | const payload = e.data as { type: 'INIT'; canvas: OffscreenCanvas; config: VisualizerConfig; port: MessagePort }; 39 | console.log("VisualizerWorker: Initializing..."); 40 | canvas = payload.canvas; 41 | config = payload.config; 42 | canvasCtx = canvas.getContext('2d'); 43 | console.log("VisualizerWorker: Canvas context created", !!canvasCtx); 44 | 45 | // Setup port to worklet 46 | workletPort = payload.port; 47 | console.log("VisualizerWorker: Port received"); 48 | workletPort.onmessage = (ev) => { 49 | if (ev.data.type === 'AUDIO_DATA') { 50 | const newData = ev.data.data as Float32Array; 51 | // Write to ring buffer 52 | for (let i = 0; i < newData.length; i++) { 53 | historyBuffer[historyIndex] = newData[i]; 54 | historyIndex = (historyIndex + 1) % BUFFER_SIZE; 55 | } 56 | } 57 | }; 58 | 59 | startLoop(); 60 | break; 61 | } 62 | case 'AUDIO_DATA': { 63 | // This case is now handled by the workletPort.onmessage handler 64 | break; 65 | } 66 | case 'RESIZE': { 67 | const payload = e.data as { type: 'RESIZE'; width: number; height: number }; 68 | if (canvas) { 69 | canvas.width = payload.width; 70 | canvas.height = payload.height; 71 | } 72 | break; 73 | } 74 | case 'DESTROY': { 75 | console.log("VisualizerWorker: Destroying"); 76 | if (animationFrameId) { 77 | cancelAnimationFrame(animationFrameId); 78 | } 79 | if (workletPort) { 80 | workletPort.close(); 81 | } 82 | canvas = null; 83 | canvasCtx = null; 84 | break; 85 | } 86 | } 87 | }; 88 | 89 | function startLoop() { 90 | if (animationFrameId) cancelAnimationFrame(animationFrameId); 91 | 92 | const loop = () => { 93 | if (canvas && canvasCtx && config) { 94 | draw(canvasCtx, canvas.width, canvas.height); 95 | } 96 | animationFrameId = requestAnimationFrame(loop); 97 | }; 98 | loop(); 99 | } 100 | 101 | 102 | // State for bar smoothing 103 | let bars: number[] = []; 104 | 105 | function draw(ctx: OffscreenCanvasRenderingContext2D, width: number, height: number) { 106 | ctx.clearRect(0, 0, width, height); 107 | 108 | if (!config) return; 109 | 110 | const { barCount, gap, smoothingTimeConstant, dpr = 1 } = config; 111 | 112 | // Initialize bars if needed 113 | if (bars.length !== barCount) { 114 | bars = new Array(barCount).fill(0); 115 | } 116 | 117 | // Analyze audio data 118 | const windowSize = 4096; // Analyze last 4096 samples 119 | const recentData = new Float32Array(windowSize); 120 | 121 | // Copy recent data from ring buffer 122 | for (let i = 0; i < windowSize; i++) { 123 | const idx = (historyIndex - windowSize + i + BUFFER_SIZE) % BUFFER_SIZE; 124 | recentData[i] = historyBuffer[idx]; 125 | } 126 | 127 | // Calculate bar amplitudes 128 | // User wants "0 based" and "highest point represents the highest point of the entire frequency" 129 | // We will map amplitude 0-1 to height-0. 130 | // We use the full step for accuracy (no sparse sampling). 131 | 132 | const step = Math.floor(windowSize / barCount); 133 | let targetBars = new Array(barCount).fill(0); 134 | 135 | for (let i = 0; i < barCount; i++) { 136 | let maxVal = 0; 137 | const start = i * step; 138 | 139 | // Scan full step for accuracy 140 | for (let j = 0; j < step; j++) { 141 | if (start + j >= recentData.length) break; 142 | const val = Math.abs(recentData[start + j] || 0); 143 | if (val > maxVal) maxVal = val; 144 | } 145 | targetBars[i] = maxVal; 146 | } 147 | 148 | // Reverse direction: Newest data (right of window) should be on the Left (index 0) 149 | targetBars.reverse(); 150 | 151 | // Apply Savitzky-Golay Smoothing (Window Size 7) 152 | // "Front that one and last that one not needed anymore" -> Skip smoothing for edges 153 | const smoothedTarget = new Array(barCount).fill(0); 154 | for (let i = 0; i < barCount; i++) { 155 | if (i < 3 || i >= barCount - 3) { 156 | smoothedTarget[i] = targetBars[i]; 157 | } else { 158 | const y_m3 = targetBars[i - 3]; 159 | const y_m2 = targetBars[i - 2]; 160 | const y_m1 = targetBars[i - 1]; 161 | const y_0 = targetBars[i]; 162 | const y_p1 = targetBars[i + 1]; 163 | const y_p2 = targetBars[i + 2]; 164 | const y_p3 = targetBars[i + 3]; 165 | 166 | const val = (-2 * y_m3 + 3 * y_m2 + 6 * y_m1 + 7 * y_0 + 6 * y_p1 + 3 * y_p2 - 2 * y_p3) / 21; 167 | smoothedTarget[i] = Math.max(0, val); 168 | } 169 | } 170 | targetBars = smoothedTarget; 171 | 172 | const effectiveHeight = height / dpr; 173 | const effectiveWidth = width / dpr; 174 | 175 | ctx.save(); 176 | ctx.scale(dpr, dpr); 177 | 178 | // Smooth the bars temporally 179 | for (let i = 0; i < barCount; i++) { 180 | const factor = 0.15; 181 | bars[i] += (targetBars[i] - bars[i]) * factor; 182 | } 183 | 184 | // Draw small rounded bars (cylinders) 185 | ctx.fillStyle = '#ffffff'; 186 | 187 | const barWidth = Math.max(1, effectiveWidth / barCount - 1); 188 | const barGap = Math.max(0.5, effectiveWidth / barCount - barWidth); 189 | 190 | for (let i = 0; i < barCount; i++) { 191 | let amplitude = bars[i]; 192 | if (amplitude > 1) amplitude = 1; 193 | 194 | const x = i * (barWidth + barGap); 195 | const barHeight = Math.max(2, amplitude * effectiveHeight); 196 | const y = effectiveHeight - barHeight; 197 | 198 | // Draw rounded rect (small cylinder appearance) 199 | ctx.beginPath(); 200 | ctx.roundRect(x, y, barWidth, barHeight, barWidth / 2); 201 | ctx.fill(); 202 | } 203 | 204 | ctx.restore(); 205 | } 206 | -------------------------------------------------------------------------------- /services/lyrics/parser.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Core parser infrastructure for lyrics formats. 3 | * 4 | * This module provides tokenization and parsing utilities for: 5 | * - Standard LRC format with word-level timing 6 | * - Netease YRC format with word-level timing 7 | */ 8 | 9 | import { LyricLine, LyricWord } from "./types"; 10 | 11 | export const INTERLUDE_TEXT = "..."; 12 | 13 | // Configuration constants 14 | export const GAP_THRESHOLD = 10; // Seconds gap to insert interlude 15 | export const PRELUDE_THRESHOLD = 3; // Seconds before first lyric to insert prelude 16 | export const DEFAULT_DURATION = 4; // Default line duration estimate 17 | export const MIN_INTERLUDE_DURATION = 10; // Minimum silence to render interlude 18 | 19 | /** 20 | * Parse time tag string (mm:ss.xx or mm:ss.xxx) to seconds. 21 | */ 22 | export const parseTime = (timeStr: string): number => { 23 | const match = timeStr.match(/(\d{2}):(\d{2})\.(\d{2,3})/); 24 | if (!match) return 0; 25 | 26 | const minutes = parseInt(match[1], 10); 27 | const seconds = parseInt(match[2], 10); 28 | const msStr = match[3]; 29 | const ms = parseInt(msStr, 10); 30 | 31 | // .xx is centiseconds (10ms), .xxx is milliseconds 32 | const msValue = msStr.length === 3 ? ms / 1000 : ms / 100; 33 | 34 | return minutes * 60 + seconds + msValue; 35 | }; 36 | 37 | /** 38 | * Check if text contains only punctuation. 39 | */ 40 | export const isPunctuation = (text: string): boolean => { 41 | if (!text) return true; 42 | return !/[\p{L}\p{N}\p{Script=Han}\p{Script=Hiragana}\p{Script=Katakana}]/u.test( 43 | text 44 | ); 45 | }; 46 | 47 | /** 48 | * Create a word object. 49 | */ 50 | export const createWord = ( 51 | text: string, 52 | start: number, 53 | end: number 54 | ): LyricWord => ({ 55 | text, 56 | startTime: start, 57 | endTime: end, 58 | }); 59 | 60 | /** 61 | * Create a line object. 62 | */ 63 | export const createLine = ( 64 | time: number, 65 | text: string, 66 | options?: { 67 | words?: LyricWord[]; 68 | translation?: string; 69 | isPreciseTiming?: boolean; 70 | isInterlude?: boolean; 71 | } 72 | ): LyricLine => ({ 73 | time, 74 | text, 75 | ...(options?.words?.length && { words: options.words }), 76 | ...(options?.translation && { translation: options.translation }), 77 | ...(options?.isPreciseTiming && { isPreciseTiming: true }), 78 | ...(options?.isInterlude && { isInterlude: true }), 79 | }); 80 | 81 | /** 82 | * Normalize text for comparison (remove punctuation, spaces, case). 83 | */ 84 | export const normalizeText = (text?: string): string => { 85 | if (!text) return ""; 86 | return text 87 | .toLowerCase() 88 | .replace(/['`´‘’']/g, "") 89 | .replace(/\s+/g, "") 90 | .replace(/[.,!?,。!?:;""\[\]()\-_/\\…()【】「」]/g, ""); 91 | }; 92 | 93 | /** 94 | * Merge punctuation-only words with previous word. 95 | */ 96 | export const mergePunctuation = (words: LyricWord[]): LyricWord[] => { 97 | if (words.length <= 1) return words; 98 | 99 | const result: LyricWord[] = []; 100 | let leadingBuffer: LyricWord | null = null; 101 | 102 | for (const word of words) { 103 | if (isPunctuation(word.text)) { 104 | if (result.length > 0) { 105 | const prev = result[result.length - 1]; 106 | prev.text += word.text; 107 | prev.endTime = word.endTime; 108 | } else if (leadingBuffer) { 109 | leadingBuffer = { 110 | ...leadingBuffer, 111 | text: `${leadingBuffer.text}${word.text}`, 112 | endTime: word.endTime, 113 | }; 114 | } else { 115 | leadingBuffer = { ...word }; 116 | } 117 | continue; 118 | } 119 | 120 | let mergedWord = { ...word }; 121 | if (leadingBuffer) { 122 | mergedWord = { 123 | ...mergedWord, 124 | text: `${leadingBuffer.text}${mergedWord.text}`, 125 | startTime: Math.min(leadingBuffer.startTime, mergedWord.startTime), 126 | }; 127 | leadingBuffer = null; 128 | } 129 | 130 | result.push(mergedWord); 131 | } 132 | 133 | if (leadingBuffer) { 134 | result.push(leadingBuffer); 135 | } 136 | 137 | return result; 138 | }; 139 | 140 | /** 141 | * Calculate line end time based on words or estimate. 142 | */ 143 | export const calculateEndTime = ( 144 | line: LyricLine, 145 | nextTime: number = Infinity 146 | ): number => { 147 | if (line.words?.length) { 148 | const lastWord = line.words[line.words.length - 1]; 149 | if (lastWord.endTime > line.time) { 150 | return Math.min(lastWord.endTime, nextTime); 151 | } 152 | } 153 | 154 | return Math.min(line.time + DEFAULT_DURATION, nextTime); 155 | }; 156 | 157 | /** 158 | * Add duration metadata to lines for lookahead. 159 | */ 160 | export const addDurations = (lines: LyricLine[]): LyricLine[] => { 161 | return lines.map((line, i) => { 162 | const nextContentLine = lines.slice(i + 1).find(hasContent); 163 | const nextTime = 164 | nextContentLine?.time ?? 165 | lines[i + 1]?.time ?? 166 | line.time + 5; 167 | let endTime = calculateEndTime(line, nextTime); 168 | 169 | if (isInterlude(line)) { 170 | if (nextContentLine && nextContentLine.time > line.time) { 171 | endTime = nextContentLine.time; 172 | } else if (!nextContentLine) { 173 | endTime = Math.max(endTime, line.time + MIN_INTERLUDE_DURATION); 174 | } 175 | } 176 | 177 | if (endTime <= line.time) { 178 | endTime = line.time + 3; 179 | } 180 | 181 | return { ...line, _endTime: endTime } as LyricLine; 182 | }); 183 | }; 184 | 185 | /** 186 | * Check if line is an interlude. 187 | */ 188 | export function isInterlude(line?: LyricLine): boolean { 189 | if (!line) return false; 190 | return Boolean(line.isInterlude || line.text?.trim() === INTERLUDE_TEXT); 191 | } 192 | 193 | /** 194 | * Check if line has lyric content (not interlude or empty). 195 | */ 196 | export function hasContent(line: LyricLine): boolean { 197 | if (isInterlude(line)) return false; 198 | return Boolean(line.text?.trim()); 199 | } 200 | 201 | /** 202 | * Insert interlude at specified time. 203 | */ 204 | export const createInterlude = (time: number): LyricLine => { 205 | return createLine(Math.max(time, 0), INTERLUDE_TEXT, { isInterlude: true }); 206 | }; 207 | 208 | /** 209 | * Insert interludes for gaps between lyrics. 210 | * Checks for prelude (before first lyric) and gaps between consecutive lyrics. 211 | */ 212 | export const insertInterludes = (lines: LyricLine[]): LyricLine[] => { 213 | if (lines.length === 0) return lines; 214 | 215 | const result: LyricLine[] = []; 216 | const firstLyric = lines.find(hasContent); 217 | 218 | // Add prelude if first lyric starts late 219 | const hasPrelude = lines.some( 220 | (line) => 221 | isInterlude(line) && 222 | firstLyric && 223 | line.time >= 0 && 224 | line.time < firstLyric.time 225 | ); 226 | 227 | if (firstLyric && firstLyric.time > PRELUDE_THRESHOLD && !hasPrelude) { 228 | result.push(createInterlude(0)); 229 | } 230 | 231 | // Process each line and check for gaps 232 | for (let i = 0; i < lines.length; i++) { 233 | const current = lines[i]; 234 | result.push(current); 235 | 236 | if (!hasContent(current)) continue; 237 | 238 | // Find next lyric line 239 | let nextLyric: LyricLine | undefined; 240 | let hasInterludeBetween = false; 241 | 242 | for (let j = i + 1; j < lines.length; j++) { 243 | if (hasContent(lines[j])) { 244 | nextLyric = lines[j]; 245 | break; 246 | } 247 | if (isInterlude(lines[j])) { 248 | hasInterludeBetween = true; 249 | } 250 | } 251 | 252 | if (!nextLyric || hasInterludeBetween) continue; 253 | 254 | // Check gap and insert interlude if needed 255 | const estimatedEnd = calculateEndTime(current, nextLyric.time); 256 | const gap = nextLyric.time - estimatedEnd; 257 | 258 | if (gap > GAP_THRESHOLD && gap >= MIN_INTERLUDE_DURATION) { 259 | result.push(createInterlude(estimatedEnd)); 260 | } 261 | } 262 | 263 | return result; 264 | }; 265 | -------------------------------------------------------------------------------- /components/background/renderer/webWorkerBackground.worker.ts: -------------------------------------------------------------------------------- 1 | const defaultColors = [ 2 | "rgb(60, 20, 80)", 3 | "rgb(100, 40, 60)", 4 | "rgb(20, 20, 40)", 5 | "rgb(40, 40, 90)", 6 | ]; 7 | 8 | const vertexShaderSource = ` 9 | attribute vec2 position; 10 | void main() { 11 | gl_Position = vec4(position, 0.0, 1.0); 12 | } 13 | `; 14 | 15 | const fragmentShaderSource = ` 16 | precision highp float; 17 | 18 | uniform vec2 uResolution; 19 | uniform float uTime; 20 | uniform vec3 uColor1; 21 | uniform vec3 uColor2; 22 | uniform vec3 uColor3; 23 | uniform vec3 uColor4; 24 | 25 | #define S(a,b,t) smoothstep(a,b,t) 26 | 27 | mat2 Rot(float a) { 28 | float s = sin(a); 29 | float c = cos(a); 30 | return mat2(c, -s, s, c); 31 | } 32 | 33 | vec2 hash(vec2 p) { 34 | p = vec2(dot(p, vec2(2127.1, 81.17)), dot(p, vec2(1269.5, 283.37))); 35 | return fract(sin(p) * 43758.5453); 36 | } 37 | 38 | float noise(vec2 p) { 39 | vec2 i = floor(p); 40 | vec2 f = fract(p); 41 | 42 | vec2 u = f * f * (3.0 - 2.0 * f); 43 | 44 | float n = mix( 45 | mix(dot(-1.0 + 2.0 * hash(i + vec2(0.0, 0.0)), f - vec2(0.0, 0.0)), 46 | dot(-1.0 + 2.0 * hash(i + vec2(1.0, 0.0)), f - vec2(1.0, 0.0)), u.x), 47 | mix(dot(-1.0 + 2.0 * hash(i + vec2(0.0, 1.0)), f - vec2(0.0, 1.0)), 48 | dot(-1.0 + 2.0 * hash(i + vec2(1.0, 1.0)), f - vec2(1.0, 1.0)), u.x), u.y); 49 | return 0.5 + 0.5 * n; 50 | } 51 | 52 | void main() { 53 | vec2 uv = gl_FragCoord.xy / uResolution.xy; 54 | float ratio = uResolution.x / uResolution.y; 55 | 56 | vec2 tuv = uv; 57 | tuv -= 0.5; 58 | 59 | float degree = noise(vec2(uTime * 0.1, tuv.x * tuv.y)); 60 | 61 | tuv.y *= 1.0 / ratio; 62 | tuv *= Rot(radians((degree - 0.5) * 720.0 + 180.0)); 63 | tuv.y *= ratio; 64 | 65 | float frequency = 5.0; 66 | float amplitude = 30.0; 67 | float speed = uTime * 2.0; 68 | tuv.x += sin(tuv.y * frequency + speed) / amplitude; 69 | tuv.y += sin(tuv.x * frequency * 1.5 + speed) / (amplitude * 0.5); 70 | 71 | vec3 layer1 = mix(uColor1, uColor2, S(-0.3, 0.2, (tuv * Rot(radians(-5.0))).x)); 72 | vec3 layer2 = mix(uColor3, uColor4, S(-0.3, 0.2, (tuv * Rot(radians(-5.0))).x)); 73 | 74 | vec3 finalComp = mix(layer1, layer2, S(0.5, -0.3, tuv.y)); 75 | vec3 col = finalComp; 76 | 77 | gl_FragColor = vec4(col, 1.0); 78 | } 79 | `; 80 | 81 | const FRAME_INTERVAL = 1000 / 60; 82 | 83 | interface WorkerCommand { 84 | type: "init" | "resize" | "colors" | "play" | "pause"; 85 | canvas?: OffscreenCanvas; 86 | width?: number; 87 | height?: number; 88 | colors?: string[]; 89 | isPlaying?: boolean; 90 | paused?: boolean; 91 | } 92 | 93 | let gl: WebGLRenderingContext | null = null; 94 | let program: WebGLProgram | null = null; 95 | let resolutionUniform: WebGLUniformLocation | null = null; 96 | let timeUniform: WebGLUniformLocation | null = null; 97 | let color1Uniform: WebGLUniformLocation | null = null; 98 | let color2Uniform: WebGLUniformLocation | null = null; 99 | let color3Uniform: WebGLUniformLocation | null = null; 100 | let color4Uniform: WebGLUniformLocation | null = null; 101 | 102 | let timeAccumulator = 0; 103 | let lastFrameTime = 0; 104 | let lastRenderTime = 0; 105 | let playing = true; 106 | let paused = false; 107 | let currentColors = [...defaultColors]; 108 | let rafId: number | null = null; 109 | 110 | const parseColor = (colorStr: string): [number, number, number] => { 111 | const match = colorStr.match(/rgb\((\d+),\s*(\d+),\s*(\d+)\)/); 112 | if (!match) return [0, 0, 0]; 113 | return [ 114 | parseInt(match[1], 10) / 255, 115 | parseInt(match[2], 10) / 255, 116 | parseInt(match[3], 10) / 255, 117 | ]; 118 | }; 119 | 120 | const createShader = ( 121 | glCtx: WebGLRenderingContext, 122 | type: number, 123 | source: string, 124 | ): WebGLShader | null => { 125 | const shader = glCtx.createShader(type); 126 | if (!shader) return null; 127 | glCtx.shaderSource(shader, source); 128 | glCtx.compileShader(shader); 129 | if (!glCtx.getShaderParameter(shader, glCtx.COMPILE_STATUS)) { 130 | console.error(glCtx.getShaderInfoLog(shader)); 131 | glCtx.deleteShader(shader); 132 | return null; 133 | } 134 | return shader; 135 | }; 136 | 137 | const initProgram = () => { 138 | if (!gl) return false; 139 | 140 | const vertexShader = createShader(gl, gl.VERTEX_SHADER, vertexShaderSource); 141 | const fragmentShader = createShader(gl, gl.FRAGMENT_SHADER, fragmentShaderSource); 142 | if (!vertexShader || !fragmentShader) return false; 143 | 144 | const prog = gl.createProgram(); 145 | if (!prog) return false; 146 | 147 | gl.attachShader(prog, vertexShader); 148 | gl.attachShader(prog, fragmentShader); 149 | gl.linkProgram(prog); 150 | gl.useProgram(prog); 151 | 152 | const positionBuffer = gl.createBuffer(); 153 | gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer); 154 | gl.bufferData( 155 | gl.ARRAY_BUFFER, 156 | new Float32Array([-1, -1, 1, -1, -1, 1, -1, 1, 1, -1, 1, 1]), 157 | gl.STATIC_DRAW, 158 | ); 159 | 160 | const positionLocation = gl.getAttribLocation(prog, "position"); 161 | gl.enableVertexAttribArray(positionLocation); 162 | gl.vertexAttribPointer(positionLocation, 2, gl.FLOAT, false, 0, 0); 163 | 164 | resolutionUniform = gl.getUniformLocation(prog, "uResolution"); 165 | timeUniform = gl.getUniformLocation(prog, "uTime"); 166 | color1Uniform = gl.getUniformLocation(prog, "uColor1"); 167 | color2Uniform = gl.getUniformLocation(prog, "uColor2"); 168 | color3Uniform = gl.getUniformLocation(prog, "uColor3"); 169 | color4Uniform = gl.getUniformLocation(prog, "uColor4"); 170 | 171 | program = prog; 172 | return true; 173 | }; 174 | 175 | const render = (now: number) => { 176 | if (!gl || !program || !resolutionUniform || !timeUniform) return; 177 | 178 | if (now - lastRenderTime < FRAME_INTERVAL) { 179 | return; 180 | } 181 | lastRenderTime = now - ((now - lastRenderTime) % FRAME_INTERVAL); 182 | 183 | gl.viewport(0, 0, gl.canvas.width, gl.canvas.height); 184 | gl.useProgram(program); 185 | gl.uniform2f(resolutionUniform, gl.canvas.width, gl.canvas.height); 186 | 187 | const delta = now - lastFrameTime; 188 | lastFrameTime = now; 189 | if (playing && !paused) { 190 | timeAccumulator += delta; 191 | } 192 | 193 | const colors = currentColors.length >= 4 ? currentColors : defaultColors; 194 | const [c1, c2, c3, c4] = colors.map(parseColor); 195 | 196 | gl.uniform1f(timeUniform, timeAccumulator * 0.0005); 197 | gl.uniform3f(color1Uniform, c1[0], c1[1], c1[2]); 198 | gl.uniform3f(color2Uniform, c2[0], c2[1], c2[2]); 199 | gl.uniform3f(color3Uniform, c3[0], c3[1], c3[2]); 200 | gl.uniform3f(color4Uniform, c4[0], c4[1], c4[2]); 201 | 202 | gl.drawArrays(gl.TRIANGLES, 0, 6); 203 | }; 204 | 205 | const loop = (now: number) => { 206 | render(now); 207 | rafId = self.requestAnimationFrame(loop); 208 | }; 209 | 210 | self.onmessage = (event: MessageEvent) => { 211 | const { data } = event; 212 | if (data.type === "init" && data.canvas) { 213 | const canvas = data.canvas; 214 | if (!canvas) return; 215 | 216 | gl = canvas.getContext("webgl"); 217 | if (!gl) { 218 | console.error("WebGL not supported in web worker background"); 219 | return; 220 | } 221 | 222 | gl.canvas.width = data.width; 223 | gl.canvas.height = data.height; 224 | gl.viewport(0, 0, gl.canvas.width, gl.canvas.height); 225 | 226 | if (!initProgram()) { 227 | console.error("Failed to initialize shader program in worker"); 228 | return; 229 | } 230 | 231 | currentColors = data.colors ?? defaultColors; 232 | lastFrameTime = performance.now(); 233 | lastRenderTime = performance.now(); 234 | timeAccumulator = 0; 235 | playing = true; 236 | paused = false; 237 | 238 | rafId = self.requestAnimationFrame(loop); 239 | return; 240 | } 241 | 242 | if (!gl) return; 243 | 244 | if (data.type === "resize" && typeof data.width === "number" && typeof data.height === "number") { 245 | gl.canvas.width = data.width; 246 | gl.canvas.height = data.height; 247 | gl.viewport(0, 0, gl.canvas.width, gl.canvas.height); 248 | return; 249 | } 250 | 251 | if (data.type === "colors" && data.colors) { 252 | currentColors = data.colors; 253 | return; 254 | } 255 | 256 | if (data.type === "play" && typeof data.isPlaying === "boolean") { 257 | playing = data.isPlaying; 258 | return; 259 | } 260 | 261 | if (data.type === "pause" && typeof data.paused === "boolean") { 262 | paused = data.paused; 263 | } 264 | }; 265 | -------------------------------------------------------------------------------- /services/lyrics/lrc.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Standard LRC format parser. 3 | * 4 | * Supports: 5 | * - Basic LRC: [mm:ss.xx]lyrics 6 | * - Enhanced LRC: [mm:ss.xx]word1word2 7 | * - Multiple timestamps: [mm:ss.xx][mm:ss.xx]same lyrics 8 | * 9 | * Features: 10 | * - Single-pass parsing 11 | * - Inline duplicate merging 12 | * - Inline interlude insertion 13 | * - Word-level timing support 14 | */ 15 | 16 | import { LyricLine, LyricWord, isMetadataLine } from "./types"; 17 | import { 18 | parseTime, 19 | createWord, 20 | createLine, 21 | mergePunctuation, 22 | insertInterludes, 23 | addDurations, 24 | INTERLUDE_TEXT, 25 | } from "./parser"; 26 | 27 | /** 28 | * Token types for LRC parsing. 29 | */ 30 | type LrcToken = 31 | | { type: "time"; value: number; raw: string } 32 | | { type: "word_time"; value: number; raw: string } 33 | | { type: "text"; value: string } 34 | | { type: "metadata"; key: string; value: string }; 35 | 36 | /** 37 | * Tokenize LRC line into structured tokens. 38 | */ 39 | const tokenizeLine = (line: string): LrcToken[] => { 40 | const trimmed = line.trim(); 41 | if (!trimmed) return []; 42 | 43 | const tokens: LrcToken[] = []; 44 | let cursor = 0; 45 | 46 | // Extract time tags: [mm:ss.xx] 47 | const timeRegex = /\[(\d{2}):(\d{2})\.(\d{2,3})\]/g; 48 | let match: RegExpExecArray | null; 49 | 50 | while ((match = timeRegex.exec(trimmed)) !== null) { 51 | const timeStr = `${match[1]}:${match[2]}.${match[3]}`; 52 | tokens.push({ 53 | type: "time", 54 | value: parseTime(timeStr), 55 | raw: match[0], 56 | }); 57 | cursor = match.index + match[0].length; 58 | } 59 | 60 | if (tokens.length === 0) return []; 61 | 62 | // Extract content after last time tag 63 | const content = trimmed.slice(cursor).trim(); 64 | 65 | // Check if this is metadata (e.g., [ar:artist]) 66 | const metaMatch = trimmed.match(/^\[([a-z]+):(.+)\]$/); 67 | if (metaMatch && tokens.length === 0) { 68 | tokens.push({ 69 | type: "metadata", 70 | key: metaMatch[1], 71 | value: metaMatch[2], 72 | }); 73 | return tokens; 74 | } 75 | 76 | // Parse word timing tags: word 77 | const wordRegex = /<(\d{2}):(\d{2})\.(\d{2,3})>([^<]*)/g; 78 | const wordMatches = [...content.matchAll(wordRegex)]; 79 | 80 | if (wordMatches.length > 0) { 81 | // Has word-level timing 82 | for (const m of wordMatches) { 83 | tokens.push({ 84 | type: "word_time", 85 | value: parseTime(`${m[1]}:${m[2]}.${m[3]}`), 86 | raw: m[4], 87 | }); 88 | } 89 | } else if (content) { 90 | // No word timing, just text 91 | tokens.push({ 92 | type: "text", 93 | value: content, 94 | }); 95 | } 96 | 97 | return tokens; 98 | }; 99 | 100 | /** 101 | * Parse word timing tokens into words with start/end times. 102 | */ 103 | const parseWords = (tokens: LrcToken[]): { words: LyricWord[]; text: string } => { 104 | const wordTokens = tokens.filter(t => t.type === "word_time"); 105 | if (wordTokens.length === 0) { 106 | return { words: [], text: "" }; 107 | } 108 | 109 | const words: LyricWord[] = []; 110 | let fullText = ""; 111 | 112 | for (let i = 0; i < wordTokens.length; i++) { 113 | const token = wordTokens[i] as Extract; 114 | const wordText = token.raw; 115 | const startTime = token.value; 116 | 117 | // Calculate end time from next word or estimate 118 | const nextToken = wordTokens[i + 1] as Extract | undefined; 119 | const endTime = nextToken ? nextToken.value : startTime + 1.0; 120 | 121 | fullText += wordText; 122 | if (wordText) { 123 | words.push(createWord(wordText, startTime, endTime)); 124 | } 125 | } 126 | 127 | return { 128 | words: mergePunctuation(words), 129 | text: fullText, 130 | }; 131 | }; 132 | 133 | /** 134 | * Parsed line data before grouping. 135 | */ 136 | interface ParsedLine { 137 | time: number; 138 | text: string; 139 | words: LyricWord[]; 140 | hasWordTiming: boolean; 141 | originalIndex: number; 142 | isMetadata: boolean; 143 | } 144 | 145 | /** 146 | * Parse all lines and group by timestamp. 147 | */ 148 | const parseAndGroup = (content: string): LyricLine[] => { 149 | const lines = content.split("\n"); 150 | const parsed: ParsedLine[] = []; 151 | 152 | lines.forEach((line, index) => { 153 | const tokens = tokenizeLine(line); 154 | if (tokens.length === 0) return; 155 | 156 | // Skip metadata tokens 157 | if (tokens[0].type === "metadata") return; 158 | 159 | const timeTags = tokens.filter(t => t.type === "time") as Extract[]; 160 | if (timeTags.length === 0) return; 161 | 162 | // Parse words and text 163 | const { words, text } = parseWords(tokens); 164 | const textContent = text || (tokens.find(t => t.type === "text") as Extract)?.value || ""; 165 | 166 | // Create entry for each timestamp 167 | for (const timeTag of timeTags) { 168 | parsed.push({ 169 | time: timeTag.value, 170 | text: textContent, 171 | words: words.map(w => ({ ...w })), 172 | hasWordTiming: words.length > 0, 173 | originalIndex: index, 174 | isMetadata: isMetadataLine(textContent), 175 | }); 176 | } 177 | }); 178 | 179 | // Sort by time, then by original index 180 | parsed.sort((a, b) => { 181 | const timeDiff = a.time - b.time; 182 | return Math.abs(timeDiff) > 0.01 ? timeDiff : a.originalIndex - b.originalIndex; 183 | }); 184 | 185 | return groupDuplicates(parsed); 186 | }; 187 | 188 | /** 189 | * Group lines with same timestamp and merge duplicates. 190 | */ 191 | const groupDuplicates = (entries: ParsedLine[]): LyricLine[] => { 192 | const result: LyricLine[] = []; 193 | let i = 0; 194 | 195 | while (i < entries.length) { 196 | const current = entries[i]; 197 | const group = [current]; 198 | let j = i + 1; 199 | 200 | // Group entries within 0.1s 201 | while (j < entries.length && Math.abs(entries[j].time - current.time) < 0.1) { 202 | group.push(entries[j]); 203 | j++; 204 | } 205 | 206 | // Sort by priority: word timing > original order 207 | group.sort((a, b) => { 208 | if (a.hasWordTiming !== b.hasWordTiming) { 209 | return a.hasWordTiming ? -1 : 1; 210 | } 211 | return a.originalIndex - b.originalIndex; 212 | }); 213 | 214 | // Find main line (non-metadata with content) 215 | const main = group.find(e => !e.isMetadata && e.text.trim()) ?? group[0]; 216 | 217 | // Skip metadata-only lines 218 | if (main.isMetadata) { 219 | i = j; 220 | continue; 221 | } 222 | 223 | // Skip empty placeholders; gap handling happens later 224 | if (!main.text.trim()) { 225 | i = j; 226 | continue; 227 | } 228 | 229 | // Collect translations from other lines in group 230 | const mainNormalized = main.text.toLowerCase(); 231 | const translations = group 232 | .filter(e => e !== main && !e.isMetadata && e.text.trim()) 233 | .map(e => e.text.trim()) 234 | .filter(t => t && t.toLowerCase() !== mainNormalized); 235 | 236 | result.push( 237 | createLine(main.time, main.text, { 238 | words: main.words.length > 0 ? main.words : undefined, 239 | translation: translations.length > 0 ? translations.join("\n") : undefined, 240 | isPreciseTiming: false, 241 | }) 242 | ); 243 | 244 | i = j; 245 | } 246 | 247 | return result; 248 | }; 249 | 250 | /** 251 | * Fix word end times based on next line start. 252 | */ 253 | const fixWordTiming = (lines: LyricLine[]): void => { 254 | for (let i = 0; i < lines.length; i++) { 255 | const line = lines[i]; 256 | if (line.isPreciseTiming || !line.words?.length) continue; 257 | 258 | const nextTime = lines[i + 1]?.time ?? line.time + 5; 259 | const lastWord = line.words[line.words.length - 1]; 260 | const duration = nextTime - lastWord.startTime; 261 | lastWord.endTime = lastWord.startTime + Math.min(duration, 5); 262 | } 263 | }; 264 | 265 | /** 266 | * Parse standard LRC format lyrics. 267 | * 268 | * Single-pass parser that: 269 | * 1. Tokenizes and parses all lines 270 | * 2. Groups and merges duplicates inline 271 | * 3. Inserts interludes for gaps 272 | * 4. Adds duration metadata 273 | */ 274 | export const parseLrc = (content: string): LyricLine[] => { 275 | if (!content?.trim()) return []; 276 | 277 | const lines = parseAndGroup(content); 278 | 279 | fixWordTiming(lines); 280 | 281 | const withInterludes = insertInterludes(lines); 282 | 283 | return addDurations(withInterludes); 284 | }; 285 | -------------------------------------------------------------------------------- /components/KeyboardShortcuts.tsx: -------------------------------------------------------------------------------- 1 | import React, { useEffect, useState } from "react"; 2 | import { createPortal } from "react-dom"; 3 | import { useKeyboardScope } from "../hooks/useKeyboardScope"; 4 | 5 | interface KeyboardShortcutsProps { 6 | isPlaying: boolean; 7 | onPlayPause: () => void; 8 | onNext: () => void; 9 | onPrev: () => void; 10 | onSeek: (time: number) => void; 11 | currentTime: number; 12 | duration: number; 13 | volume: number; 14 | onVolumeChange: (vol: number) => void; 15 | onToggleMode: () => void; 16 | onTogglePlaylist: () => void; 17 | speed: number; 18 | onSpeedChange: (speed: number) => void; 19 | onToggleVolumeDialog: () => void; 20 | onToggleSpeedDialog: () => void; 21 | } 22 | 23 | const KeyboardShortcuts: React.FC = ({ 24 | isPlaying, 25 | onPlayPause, 26 | onNext, 27 | onPrev, 28 | onSeek, 29 | currentTime, 30 | duration, 31 | volume, 32 | onVolumeChange, 33 | onToggleMode, 34 | onTogglePlaylist, 35 | speed, 36 | onSpeedChange, 37 | onToggleVolumeDialog, 38 | onToggleSpeedDialog, 39 | }) => { 40 | const [isOpen, setIsOpen] = useState(false); 41 | const [isVisible, setIsVisible] = useState(false); 42 | 43 | useEffect(() => { 44 | if (isOpen) { 45 | setIsVisible(true); 46 | } else { 47 | const timer = setTimeout(() => setIsVisible(false), 300); 48 | return () => clearTimeout(timer); 49 | } 50 | }, [isOpen]); 51 | 52 | // Use keyboard scope with lower priority (50) for global shortcuts 53 | useKeyboardScope( 54 | (e) => { 55 | const target = e.target as HTMLElement; 56 | if ( 57 | ["INPUT", "TEXTAREA"].includes(target.tagName) || 58 | target.isContentEditable 59 | ) 60 | return false; 61 | 62 | // Ctrl + / 63 | if ((e.ctrlKey || e.metaKey) && e.key === "/") { 64 | e.preventDefault(); 65 | setIsOpen((prev) => !prev); 66 | return true; 67 | } 68 | 69 | // Ctrl + P 70 | if ((e.ctrlKey || e.metaKey) && e.key === "p") { 71 | e.preventDefault(); 72 | onTogglePlaylist(); 73 | return true; 74 | } 75 | 76 | if (e.key === "Escape") { 77 | if (isOpen) { 78 | e.preventDefault(); 79 | setIsOpen(false); 80 | return true; 81 | } 82 | return false; 83 | } 84 | 85 | switch (e.key) { 86 | case " ": // Space 87 | e.preventDefault(); 88 | onPlayPause(); 89 | return true; 90 | case "ArrowRight": 91 | e.preventDefault(); 92 | if (e.ctrlKey || e.metaKey) { 93 | onNext(); 94 | } else { 95 | onSeek(Math.min(currentTime + 5, duration)); 96 | } 97 | return true; 98 | case "ArrowLeft": 99 | e.preventDefault(); 100 | if (e.ctrlKey || e.metaKey) { 101 | onPrev(); 102 | } else { 103 | onSeek(Math.max(currentTime - 5, 0)); 104 | } 105 | return true; 106 | case "ArrowUp": 107 | e.preventDefault(); 108 | onVolumeChange(Math.min(volume + 0.1, 1)); 109 | return true; 110 | case "ArrowDown": 111 | e.preventDefault(); 112 | onVolumeChange(Math.max(volume - 0.1, 0)); 113 | return true; 114 | case "l": 115 | case "L": 116 | e.preventDefault(); 117 | onToggleMode(); 118 | return true; 119 | case "v": 120 | case "V": 121 | e.preventDefault(); 122 | onToggleVolumeDialog(); 123 | return true; 124 | case "s": 125 | case "S": 126 | e.preventDefault(); 127 | onToggleSpeedDialog(); 128 | return true; 129 | } 130 | 131 | return false; 132 | }, 133 | 50, // Lower priority than SearchModal (100) 134 | true, 135 | ); 136 | 137 | if (!isVisible) return null; 138 | 139 | return createPortal( 140 |
141 | 153 | 154 | {/* Shared backdrop */} 155 |
setIsOpen(false)} 158 | /> 159 | 160 | {/* Help Dialog */} 161 | {isOpen && ( 162 |
174 | {/* Content Container */} 175 |
176 | {/* Header */} 177 |
178 |
179 |

180 | Keyboard Shortcuts 181 |

182 |

183 | Quick controls for playback 184 |

185 |
186 | 205 |
206 | 207 | {/* Grid */} 208 |
209 | 210 | 211 | 212 | 213 | 214 | 215 | 216 | 217 | 218 | 219 |
220 | 221 | {/* Footer Hint */} 222 |
223 | Press{" "} 224 | 225 | Esc 226 | {" "} 227 | to close 228 |
229 |
230 |
231 | )} 232 |
, 233 | document.body, 234 | ); 235 | }; 236 | 237 | const ShortcutItem = ({ keys, label }: { keys: string[]; label: string }) => ( 238 |
239 | 240 | {label} 241 | 242 |
243 | {keys.map((k, i) => ( 244 | 248 | {k} 249 | 250 | ))} 251 |
252 |
253 | ); 254 | 255 | export default KeyboardShortcuts; 256 | -------------------------------------------------------------------------------- /components/SmartImage.tsx: -------------------------------------------------------------------------------- 1 | import React, { 2 | CSSProperties, 3 | ImgHTMLAttributes, 4 | useCallback, 5 | useEffect, 6 | useLayoutEffect, 7 | useMemo, 8 | useRef, 9 | useState, 10 | } from "react"; 11 | import { imageResourceCache } from "../services/cache"; 12 | 13 | const makeCacheKey = (src: string, width: number, height: number) => { 14 | const dpr = typeof window === "undefined" ? 1 : window.devicePixelRatio || 1; 15 | const ratio = (width / height).toFixed(3); 16 | return `${src}|${ratio}|${width}x${height}@${Math.round(dpr * 100)}`; 17 | }; 18 | 19 | interface SmartImageProps 20 | extends Omit, "src" | "className" | "style"> { 21 | src: string; 22 | containerClassName?: string; 23 | containerStyle?: CSSProperties; 24 | imgClassName?: string; 25 | imgStyle?: CSSProperties; 26 | placeholder?: React.ReactNode; 27 | targetWidth?: number; 28 | targetHeight?: number; 29 | loading?: "lazy" | "eager"; 30 | } 31 | 32 | const DEFAULT_PLACEHOLDER = ( 33 |
34 | 35 |
36 | ); 37 | 38 | const SmartImage: React.FC = ({ 39 | src, 40 | containerClassName, 41 | containerStyle, 42 | imgClassName, 43 | imgStyle, 44 | placeholder, 45 | alt = "", 46 | targetWidth, 47 | targetHeight, 48 | loading = "lazy", 49 | ...imgProps 50 | }) => { 51 | const [isVisible, setIsVisible] = useState(loading === "eager"); 52 | 53 | const containerRef = useRef(null); 54 | const [measuredSize, setMeasuredSize] = useState<{ width: number; height: number } | null>( 55 | null, 56 | ); 57 | const [displaySrc, setDisplaySrc] = useState(null); 58 | const currentUrlRef = useRef(null); 59 | const currentUrlIsBlobRef = useRef(false); 60 | 61 | const revokeCurrentObjectUrl = useCallback(() => { 62 | if (currentUrlRef.current && currentUrlIsBlobRef.current) { 63 | URL.revokeObjectURL(currentUrlRef.current); 64 | } 65 | }, []); 66 | 67 | const resetDisplay = useCallback(() => { 68 | revokeCurrentObjectUrl(); 69 | currentUrlRef.current = null; 70 | currentUrlIsBlobRef.current = false; 71 | setDisplaySrc(null); 72 | }, [revokeCurrentObjectUrl]); 73 | 74 | const setFinalUrl = useCallback( 75 | (url: string, isBlob: boolean) => { 76 | revokeCurrentObjectUrl(); 77 | currentUrlRef.current = url; 78 | currentUrlIsBlobRef.current = isBlob; 79 | setDisplaySrc(url); 80 | }, 81 | [revokeCurrentObjectUrl], 82 | ); 83 | 84 | useEffect(() => { 85 | if (loading === "eager") { 86 | setIsVisible(true); 87 | return undefined; 88 | } 89 | 90 | const element = containerRef.current; 91 | if (!element) { 92 | setIsVisible(false); 93 | return undefined; 94 | } 95 | 96 | if (typeof IntersectionObserver === "undefined") { 97 | setIsVisible(true); 98 | return undefined; 99 | } 100 | 101 | const observer = new IntersectionObserver( 102 | (entries) => { 103 | const entry = entries[0]; 104 | setIsVisible(entry?.isIntersecting ?? false); 105 | }, 106 | { 107 | rootMargin: "200px", 108 | threshold: 0.01, 109 | }, 110 | ); 111 | 112 | observer.observe(element); 113 | return () => { 114 | observer.disconnect(); 115 | }; 116 | }, [loading]); 117 | 118 | useLayoutEffect(() => { 119 | if (typeof targetWidth === "number" && typeof targetHeight === "number") { 120 | setMeasuredSize({ 121 | width: targetWidth, 122 | height: targetHeight, 123 | }); 124 | return; 125 | } 126 | 127 | const element = containerRef.current; 128 | if (!element) { 129 | setMeasuredSize(null); 130 | return; 131 | } 132 | 133 | const updateSize = () => { 134 | const rect = element.getBoundingClientRect(); 135 | setMeasuredSize((prev) => { 136 | const roundedWidth = Math.round(rect.width); 137 | const roundedHeight = Math.round(rect.height); 138 | if ( 139 | prev && 140 | Math.round(prev.width) === roundedWidth && 141 | Math.round(prev.height) === roundedHeight 142 | ) { 143 | return prev; 144 | } 145 | return { 146 | width: rect.width, 147 | height: rect.height, 148 | }; 149 | }); 150 | }; 151 | 152 | updateSize(); 153 | 154 | if (typeof ResizeObserver === "undefined") { 155 | return; 156 | } 157 | 158 | const observer = new ResizeObserver(() => { 159 | updateSize(); 160 | }); 161 | 162 | observer.observe(element); 163 | return () => observer.disconnect(); 164 | }, [targetHeight, targetWidth]); 165 | 166 | const normalizedSize = useMemo(() => { 167 | if (!measuredSize) return null; 168 | const width = Math.max(1, Math.round(measuredSize.width)); 169 | const height = Math.max(1, Math.round(measuredSize.height)); 170 | if (width <= 0 || height <= 0) return null; 171 | return { width, height }; 172 | }, [measuredSize]); 173 | 174 | const effectiveKey = useMemo(() => { 175 | if (!normalizedSize || !src) return null; 176 | return makeCacheKey(src, normalizedSize.width, normalizedSize.height); 177 | }, [normalizedSize, src]); 178 | 179 | useEffect(() => { 180 | if (!normalizedSize || !src || !effectiveKey) { 181 | resetDisplay(); 182 | return; 183 | } 184 | if (!isVisible) { 185 | return; 186 | } 187 | 188 | let canceled = false; 189 | const cachedBlob = imageResourceCache.get(effectiveKey); 190 | if (cachedBlob) { 191 | const cachedUrl = URL.createObjectURL(cachedBlob); 192 | setFinalUrl(cachedUrl, true); 193 | return () => { 194 | canceled = true; 195 | URL.revokeObjectURL(cachedUrl); 196 | }; 197 | } 198 | 199 | const imageElement = new Image(); 200 | 201 | const handleFallback = () => { 202 | if (canceled) return; 203 | resetDisplay(); 204 | }; 205 | 206 | const loadImage = () => { 207 | if (canceled) return; 208 | const ratio = Math.min( 209 | normalizedSize.width / imageElement.naturalWidth, 210 | normalizedSize.height / imageElement.naturalHeight, 211 | 1, 212 | ); 213 | const targetWidth = Math.max(1, Math.round(imageElement.naturalWidth * ratio)); 214 | const targetHeight = Math.max(1, Math.round(imageElement.naturalHeight * ratio)); 215 | 216 | const canvas = document.createElement("canvas"); 217 | canvas.width = targetWidth; 218 | canvas.height = targetHeight; 219 | 220 | const ctx = canvas.getContext("2d"); 221 | if (!ctx) { 222 | handleFallback(); 223 | return; 224 | } 225 | 226 | ctx.drawImage(imageElement, 0, 0, targetWidth, targetHeight); 227 | 228 | try { 229 | canvas.toBlob( 230 | (blob) => { 231 | if (!blob || canceled) { 232 | handleFallback(); 233 | return; 234 | } 235 | 236 | try { 237 | imageResourceCache.set(effectiveKey, blob); 238 | } catch { 239 | // Silently ignore cache failures. 240 | } 241 | 242 | const optimizedUrl = URL.createObjectURL(blob); 243 | if (canceled) { 244 | URL.revokeObjectURL(optimizedUrl); 245 | return; 246 | } 247 | setFinalUrl(optimizedUrl, true); 248 | }, 249 | "image/jpeg", 250 | 0.78, 251 | ); 252 | } catch { 253 | handleFallback(); 254 | } 255 | }; 256 | 257 | imageElement.crossOrigin = "anonymous"; 258 | imageElement.onload = () => { 259 | if (canceled) return; 260 | if (!imageElement.naturalWidth || !imageElement.naturalHeight) { 261 | handleFallback(); 262 | return; 263 | } 264 | loadImage(); 265 | }; 266 | imageElement.onerror = () => { 267 | if (canceled) return; 268 | handleFallback(); 269 | }; 270 | imageElement.src = src; 271 | 272 | return () => { 273 | canceled = true; 274 | imageElement.onload = null; 275 | imageElement.onerror = null; 276 | imageElement.src = ""; 277 | }; 278 | }, [effectiveKey, normalizedSize, resetDisplay, setFinalUrl, src, isVisible]); 279 | 280 | return ( 281 |
286 | {displaySrc ? ( 287 | {alt} 295 | ) : ( 296 | placeholder ?? DEFAULT_PLACEHOLDER 297 | )} 298 |
299 | ); 300 | }; 301 | 302 | export default SmartImage; 303 | -------------------------------------------------------------------------------- /components/background/mobile/index.ts: -------------------------------------------------------------------------------- 1 | import { loadImageElementWithCache } from "../../../services/cache"; 2 | 3 | interface FlowingLayer { 4 | image: HTMLCanvasElement; 5 | startX: number; 6 | startY: number; 7 | startScale: number; 8 | duration: number; 9 | startTime: number; 10 | } 11 | 12 | const defaultColors = ["#8b5cf6", "#ec4899", "#f97316", "#3b82f6"]; 13 | 14 | const MESH_FLOATS = [ 15 | -0.2351, -0.0967, 0.2135, -0.1414, 0.9221, -0.0908, 0.9221, -0.0685, 1.3027, 16 | 0.0253, 1.2351, 0.1786, -0.3768, 0.1851, 0.2, 0.2, 0.6615, 0.3146, 0.9543, 17 | 0.0, 0.6969, 0.1911, 1.0, 0.2, 0.0, 0.4, 0.2, 0.4, 0.0776, 0.2318, 0.6, 0.4, 18 | 0.6615, 0.3851, 1.0, 0.4, 0.0, 0.6, 0.1291, 0.6, 0.4, 0.6, 0.4, 0.4304, 19 | 0.4264, 0.5792, 1.2029, 0.8188, -0.1192, 1.0, 0.6, 0.8, 0.4264, 0.8104, 0.6, 20 | 0.8, 0.8, 0.8, 1.0, 0.8, 0.0, 1.0, 0.0776, 1.0283, 0.4, 1.0, 0.6, 1.0, 0.8, 21 | 1.0, 1.1868, 1.0283, 22 | ]; 23 | const scaleCanvas = ( 24 | source: HTMLCanvasElement, 25 | newWidth: number, 26 | newHeight: number, 27 | ): HTMLCanvasElement => { 28 | const canvas = document.createElement("canvas"); 29 | canvas.width = newWidth; 30 | canvas.height = newHeight; 31 | const ctx = canvas.getContext("2d", { willReadFrequently: true }); 32 | if (!ctx) return source; 33 | 34 | ctx.imageSmoothingEnabled = true; 35 | ctx.imageSmoothingQuality = "high"; 36 | ctx.drawImage(source, 0, 0, newWidth, newHeight); 37 | return canvas; 38 | }; 39 | 40 | const blurCanvas = (source: HTMLCanvasElement, radius: number) => { 41 | const canvas = document.createElement("canvas"); 42 | canvas.width = source.width; 43 | canvas.height = source.height; 44 | const ctx = canvas.getContext("2d"); 45 | if (!ctx) return source; 46 | 47 | ctx.filter = `blur(${radius}px)`; 48 | ctx.drawImage(source, 0, 0); 49 | return canvas; 50 | }; 51 | 52 | const applyMeshDistortion = ( 53 | source: HTMLCanvasElement, 54 | meshVerts: number[], 55 | ) => { 56 | const canvas = document.createElement("canvas"); 57 | canvas.width = source.width; 58 | canvas.height = source.height; 59 | const ctx = canvas.getContext("2d"); 60 | if (!ctx) return source; 61 | 62 | const gridWidth = 5; 63 | const gridHeight = 5; 64 | 65 | const verts: number[] = []; 66 | for (let i = 0; i < meshVerts.length; i += 2) { 67 | verts.push(meshVerts[i] * source.width); 68 | verts.push(meshVerts[i + 1] * source.height); 69 | } 70 | 71 | for (let row = 0; row < gridHeight; row++) { 72 | for (let col = 0; col < gridWidth; col++) { 73 | const topLeft = row * 6 + col; 74 | const topRight = topLeft + 1; 75 | const bottomLeft = (row + 1) * 6 + col; 76 | const bottomRight = bottomLeft + 1; 77 | 78 | const srcX = (col / gridWidth) * source.width; 79 | const srcY = (row / gridHeight) * source.height; 80 | const srcW = source.width / gridWidth; 81 | const srcH = source.height / gridHeight; 82 | 83 | const x1 = verts[topLeft * 2]; 84 | const y1 = verts[topLeft * 2 + 1]; 85 | const x2 = verts[topRight * 2]; 86 | const y2 = verts[topRight * 2 + 1]; 87 | const x3 = verts[bottomRight * 2]; 88 | const y3 = verts[bottomRight * 2 + 1]; 89 | const x4 = verts[bottomLeft * 2]; 90 | const y4 = verts[bottomLeft * 2 + 1]; 91 | 92 | ctx.save(); 93 | ctx.beginPath(); 94 | ctx.moveTo(x1, y1); 95 | ctx.lineTo(x2, y2); 96 | ctx.lineTo(x4, y4); 97 | ctx.closePath(); 98 | ctx.clip(); 99 | 100 | const dx1 = x2 - x1; 101 | const dy1 = y2 - y1; 102 | const dx2 = x4 - x1; 103 | const dy2 = y4 - y1; 104 | 105 | if (Math.abs(dx1 * dy2 - dx2 * dy1) > 1) { 106 | ctx.transform(dx1 / srcW, dy1 / srcW, dx2 / srcH, dy2 / srcH, x1, y1); 107 | ctx.drawImage(source, srcX, srcY, srcW, srcH, 0, 0, srcW, srcH); 108 | } 109 | ctx.restore(); 110 | 111 | ctx.save(); 112 | ctx.beginPath(); 113 | ctx.moveTo(x2, y2); 114 | ctx.lineTo(x3, y3); 115 | ctx.lineTo(x4, y4); 116 | ctx.closePath(); 117 | ctx.clip(); 118 | 119 | const dx3 = x3 - x2; 120 | const dy3 = y3 - y2; 121 | const dx4 = x4 - x2; 122 | const dy4 = y4 - y2; 123 | 124 | if (Math.abs(dx3 * dy4 - dx4 * dy3) > 1) { 125 | ctx.transform(dx3 / srcW, dy3 / srcW, dx4 / srcH, dy4 / srcH, x2, y2); 126 | ctx.drawImage(source, srcX, srcY, srcW, srcH, 0, 0, srcW, srcH); 127 | } 128 | ctx.restore(); 129 | } 130 | } 131 | 132 | return canvas; 133 | }; 134 | 135 | const adjustSaturation = (source: HTMLCanvasElement, saturation: number) => { 136 | const canvas = document.createElement("canvas"); 137 | canvas.width = source.width; 138 | canvas.height = source.height; 139 | const ctx = canvas.getContext("2d"); 140 | if (!ctx) return source; 141 | 142 | ctx.filter = `saturate(${saturation})`; 143 | ctx.drawImage(source, 0, 0); 144 | return canvas; 145 | }; 146 | 147 | const getBrightness = (canvas: HTMLCanvasElement) => { 148 | const ctx = canvas.getContext("2d", { willReadFrequently: true }); 149 | if (!ctx) return 0.5; 150 | 151 | const centerX = Math.floor(canvas.width / 2); 152 | const centerY = Math.floor(canvas.height / 2); 153 | const pixel = ctx.getImageData(centerX, centerY, 1, 1).data; 154 | const r = pixel[0] / 255; 155 | const g = pixel[1] / 255; 156 | const b = pixel[2] / 255; 157 | return 0.299 * r + 0.587 * g + 0.114 * b; 158 | }; 159 | 160 | const applyBrightnessMask = (canvas: HTMLCanvasElement) => { 161 | const brightness = getBrightness(canvas); 162 | const ctx = canvas.getContext("2d"); 163 | if (!ctx) return canvas; 164 | 165 | if (brightness > 0.8) { 166 | ctx.fillStyle = "rgba(0, 0, 0, 0.31)"; 167 | ctx.fillRect(0, 0, canvas.width, canvas.height); 168 | } else if (brightness < 0.2) { 169 | ctx.fillStyle = "rgba(255, 255, 255, 0.31)"; 170 | ctx.fillRect(0, 0, canvas.width, canvas.height); 171 | } 172 | 173 | return canvas; 174 | }; 175 | 176 | const processBitmap = (source: HTMLCanvasElement) => { 177 | const smallWidth = 150; 178 | const smallHeight = Math.floor((source.height / source.width) * smallWidth); 179 | let canvas = scaleCanvas(source, smallWidth, smallHeight); 180 | canvas = blurCanvas(canvas, 25); 181 | canvas = applyMeshDistortion(canvas, MESH_FLOATS); 182 | const largeWidth = 1000; 183 | const largeHeight = Math.floor((canvas.height / canvas.width) * largeWidth); 184 | canvas = scaleCanvas(canvas, largeWidth, largeHeight); 185 | canvas = applyMeshDistortion(canvas, MESH_FLOATS); 186 | canvas = blurCanvas(canvas, 12); 187 | canvas = adjustSaturation(canvas, 1.8); 188 | canvas = applyBrightnessMask(canvas); 189 | return canvas; 190 | }; 191 | 192 | const createBaseTexture = async ( 193 | colors: string[], 194 | coverUrl: string | undefined, 195 | ) => { 196 | const size = 600; 197 | const canvas = document.createElement("canvas"); 198 | canvas.width = size; 199 | canvas.height = size; 200 | const ctx = canvas.getContext("2d"); 201 | if (!ctx) return canvas; 202 | 203 | const gradient = ctx.createLinearGradient(0, 0, size, size); 204 | colors.forEach((color, idx) => { 205 | gradient.addColorStop(idx / Math.max(1, colors.length - 1), color); 206 | }); 207 | ctx.fillStyle = gradient; 208 | ctx.fillRect(0, 0, size, size); 209 | 210 | if (coverUrl) { 211 | try { 212 | const img = await loadImageElementWithCache(coverUrl); 213 | const scale = Math.max(size / img.width, size / img.height); 214 | const w = img.width * scale; 215 | const h = img.height * scale; 216 | const x = (size - w) / 2; 217 | const y = (size - h) / 2; 218 | ctx.globalAlpha = 0.9; 219 | ctx.drawImage(img, x, y, w, h); 220 | ctx.globalAlpha = 1.0; 221 | } catch (error) { 222 | console.warn("Failed to load cover", error); 223 | } 224 | } 225 | 226 | for (let i = 0; i < 8; i++) { 227 | const cx = Math.random() * size; 228 | const cy = Math.random() * size; 229 | const radius = size * (0.3 + Math.random() * 0.4); 230 | const color = colors[Math.floor(Math.random() * colors.length)]; 231 | 232 | const grad = ctx.createRadialGradient(cx, cy, 0, cx, cy, radius); 233 | grad.addColorStop(0, color); 234 | grad.addColorStop(1, "rgba(0,0,0,0)"); 235 | 236 | ctx.globalAlpha = 0.3 + Math.random() * 0.3; 237 | ctx.fillStyle = grad; 238 | ctx.fillRect(cx - radius, cy - radius, radius * 2, radius * 2); 239 | } 240 | 241 | return canvas; 242 | }; 243 | 244 | const normalizeColors = (colors: string[] | undefined): string[] => { 245 | if (!colors || colors.length === 0) { 246 | return defaultColors; 247 | } 248 | return colors; 249 | }; 250 | 251 | export const createFlowingLayers = async ( 252 | colors: string[] | undefined, 253 | coverUrl: string | undefined, 254 | count: number = 4, 255 | ): Promise => { 256 | const normalized = normalizeColors(colors); 257 | const layers: FlowingLayer[] = []; 258 | 259 | for (let i = 0; i < count; i++) { 260 | const baseCanvas = await createBaseTexture(normalized, coverUrl); 261 | const processed = processBitmap(baseCanvas); 262 | 263 | layers.push({ 264 | image: processed, 265 | startX: (Math.random() - 0.5) * 0.2, 266 | startY: (Math.random() - 0.5) * 0.2, 267 | startScale: 1.15 + Math.random() * 0.1, 268 | duration: 20000 + Math.random() * 15000, 269 | startTime: -i * 5000, 270 | }); 271 | } 272 | 273 | return layers; 274 | }; 275 | 276 | export type { FlowingLayer }; 277 | export { defaultColors }; 278 | -------------------------------------------------------------------------------- /components/FluidBackground.tsx: -------------------------------------------------------------------------------- 1 | import React, { useCallback, useEffect, useMemo, useRef, useState } from "react"; 2 | import { FlowingLayer, createFlowingLayers, defaultColors as mobileDefaultColors } from "./background/mobile"; 3 | import { UIBackgroundRender } from "./background/renderer/UIBackgroundRender"; 4 | import { WebWorkerBackgroundRender } from "./background/renderer/WebWorkerBackgroundRender"; 5 | 6 | const desktopGradientDefaults = [ 7 | "rgb(60, 20, 80)", 8 | "rgb(100, 40, 60)", 9 | "rgb(20, 20, 40)", 10 | "rgb(40, 40, 90)", 11 | ]; 12 | 13 | const easeInOutSine = (t: number) => -(Math.cos(Math.PI * t) - 1) / 2; 14 | 15 | const calculateTransform = (layer: FlowingLayer, elapsed: number) => { 16 | const progress = ((elapsed + layer.startTime) % layer.duration) / layer.duration; 17 | const eased = easeInOutSine(progress); 18 | 19 | const x = layer.startX + Math.sin(progress * Math.PI * 2) * 0.15; 20 | const y = layer.startY + Math.cos(progress * Math.PI * 2) * 0.12; 21 | const scale = layer.startScale + Math.sin(progress * Math.PI * 2) * 0.08; 22 | const rotation = Math.sin(progress * Math.PI * 2) * 0.08; 23 | 24 | return { x, y, scale, rotation, eased }; 25 | }; 26 | 27 | interface FluidBackgroundProps { 28 | colors?: string[]; 29 | isPlaying?: boolean; 30 | coverUrl?: string; 31 | isMobileLayout?: boolean; 32 | } 33 | 34 | const FluidBackground: React.FC = ({ 35 | colors, 36 | isPlaying = true, 37 | coverUrl, 38 | isMobileLayout = false, 39 | }) => { 40 | const canvasRef = useRef(null); 41 | const rendererRef = useRef(null); 42 | const layersRef = useRef([]); 43 | const isPlayingRef = useRef(isPlaying); 44 | const startTimeOffsetRef = useRef(0); 45 | const lastPausedTimeRef = useRef(0); 46 | const colorsRef = useRef(colors); 47 | const [canvasInstanceKey, setCanvasInstanceKey] = useState(0); 48 | const previousModeRef = useRef(isMobileLayout); 49 | 50 | const normalizedColors = useMemo( 51 | () => (colors && colors.length > 0 ? colors : mobileDefaultColors), 52 | [colors], 53 | ); 54 | 55 | const colorKey = useMemo(() => normalizedColors.join("|"), [normalizedColors]); 56 | 57 | useEffect(() => { 58 | colorsRef.current = colors; 59 | }, [colors]); 60 | 61 | useEffect(() => { 62 | isPlayingRef.current = isPlaying; 63 | }, [isPlaying]); 64 | 65 | useEffect(() => { 66 | if (previousModeRef.current !== isMobileLayout) { 67 | setCanvasInstanceKey((prev) => prev + 1); 68 | previousModeRef.current = isMobileLayout; 69 | } 70 | }, [isMobileLayout]); 71 | 72 | useEffect(() => { 73 | if (!isMobileLayout) { 74 | layersRef.current = []; 75 | return; 76 | } 77 | let cancelled = false; 78 | const generate = async () => { 79 | const newLayers = await createFlowingLayers(normalizedColors, coverUrl, 4); 80 | if (cancelled) return; 81 | layersRef.current = newLayers; 82 | }; 83 | generate(); 84 | return () => { 85 | cancelled = true; 86 | }; 87 | }, [colorKey, coverUrl, normalizedColors, isMobileLayout]); 88 | 89 | const renderMobileFrame = useCallback( 90 | (ctx: CanvasRenderingContext2D, currentTime: number) => { 91 | const width = ctx.canvas.width; 92 | const height = ctx.canvas.height; 93 | let elapsed = currentTime; 94 | 95 | if (!isPlayingRef.current) { 96 | lastPausedTimeRef.current = currentTime; 97 | elapsed = startTimeOffsetRef.current; 98 | } else if (lastPausedTimeRef.current > 0) { 99 | startTimeOffsetRef.current = elapsed; 100 | lastPausedTimeRef.current = 0; 101 | } 102 | 103 | ctx.fillStyle = "#000"; 104 | ctx.fillRect(0, 0, width, height); 105 | 106 | if (layersRef.current.length === 0) { 107 | ctx.fillStyle = "#222"; 108 | ctx.fillRect(0, 0, width, height); 109 | ctx.fillStyle = "#666"; 110 | ctx.font = "16px sans-serif"; 111 | ctx.textAlign = "center"; 112 | ctx.fillText("Loading layers...", width / 2, height / 2); 113 | return; 114 | } 115 | 116 | layersRef.current.forEach((layer, index) => { 117 | const transform = calculateTransform(layer, elapsed); 118 | ctx.save(); 119 | ctx.translate(width / 2, height / 2); 120 | ctx.rotate(transform.rotation); 121 | ctx.scale(transform.scale, transform.scale); 122 | ctx.translate(width * transform.x, height * transform.y); 123 | ctx.globalCompositeOperation = "screen"; 124 | ctx.globalAlpha = 0.5 + index * 0.05; 125 | ctx.filter = "blur(35px)"; 126 | const drawWidth = width * 1.5; 127 | const drawHeight = height * 1.5; 128 | ctx.drawImage( 129 | layer.image, 130 | -drawWidth / 2, 131 | -drawHeight / 2, 132 | drawWidth, 133 | drawHeight, 134 | ); 135 | ctx.restore(); 136 | }); 137 | }, 138 | [], 139 | ); 140 | 141 | const renderGradientFrame = useCallback((ctx: CanvasRenderingContext2D) => { 142 | const width = ctx.canvas.width; 143 | const height = ctx.canvas.height; 144 | const palette = 145 | colorsRef.current && colorsRef.current.length > 0 146 | ? colorsRef.current 147 | : desktopGradientDefaults; 148 | const gradient = ctx.createLinearGradient(0, 0, width, height); 149 | palette.forEach((color, index) => { 150 | gradient.addColorStop(index / Math.max(1, palette.length - 1), color); 151 | }); 152 | ctx.fillStyle = gradient; 153 | ctx.fillRect(0, 0, width, height); 154 | }, []); 155 | 156 | useEffect(() => { 157 | const resize = () => { 158 | const width = window.innerWidth; 159 | const height = window.innerHeight; 160 | const canvas = canvasRef.current; 161 | if (!canvas) return; 162 | 163 | if (canvas.dataset.offscreenTransferred === "true") { 164 | if (rendererRef.current instanceof WebWorkerBackgroundRender) { 165 | rendererRef.current.resize(width, height); 166 | } 167 | return; 168 | } 169 | 170 | if (rendererRef.current instanceof WebWorkerBackgroundRender) { 171 | rendererRef.current.resize(width, height); 172 | return; 173 | } 174 | 175 | canvas.width = width; 176 | canvas.height = height; 177 | rendererRef.current?.resize(width, height); 178 | }; 179 | 180 | resize(); 181 | window.addEventListener("resize", resize); 182 | return () => window.removeEventListener("resize", resize); 183 | }, [isMobileLayout, canvasInstanceKey]); 184 | 185 | useEffect(() => { 186 | const canvas = canvasRef.current; 187 | if (!canvas) return; 188 | 189 | if (canvas.dataset.offscreenTransferred === "true") { 190 | setCanvasInstanceKey((prev) => prev + 1); 191 | return; 192 | } 193 | 194 | const shouldUseWorker = 195 | !isMobileLayout && WebWorkerBackgroundRender.isSupported(canvas); 196 | 197 | if (shouldUseWorker && rendererRef.current instanceof WebWorkerBackgroundRender) { 198 | return; 199 | } 200 | 201 | if (rendererRef.current) { 202 | rendererRef.current.stop(); 203 | rendererRef.current = null; 204 | } 205 | 206 | if (shouldUseWorker) { 207 | canvas.width = window.innerWidth; 208 | canvas.height = window.innerHeight; 209 | const workerRenderer = new WebWorkerBackgroundRender(canvas); 210 | workerRenderer.start(colorsRef.current ?? []); 211 | rendererRef.current = workerRenderer; 212 | return () => { 213 | workerRenderer.stop(); 214 | rendererRef.current = null; 215 | }; 216 | } 217 | 218 | const renderCallback = isMobileLayout ? renderMobileFrame : renderGradientFrame; 219 | const uiRenderer = new UIBackgroundRender(canvas, renderCallback); 220 | uiRenderer.resize(window.innerWidth, window.innerHeight); 221 | uiRenderer.setPaused(!isPlaying); 222 | uiRenderer.start(); 223 | rendererRef.current = uiRenderer; 224 | 225 | return () => { 226 | uiRenderer.stop(); 227 | rendererRef.current = null; 228 | }; 229 | }, [isMobileLayout, renderGradientFrame, renderMobileFrame, canvasInstanceKey]); 230 | 231 | useEffect(() => { 232 | const renderer = rendererRef.current; 233 | if (renderer instanceof WebWorkerBackgroundRender) { 234 | renderer.setColors(colors ?? []); 235 | renderer.setPlaying(isPlaying); 236 | } else if (renderer instanceof UIBackgroundRender) { 237 | renderer.setPaused(!isPlaying); 238 | } 239 | }, [colors, isPlaying]); 240 | 241 | const canvasKey = `${isMobileLayout ? "mobile" : "desktop"}-${canvasInstanceKey}`; 242 | 243 | return ( 244 | <> 245 | 251 |
258 | 259 | ); 260 | }; 261 | 262 | export default FluidBackground; 263 | -------------------------------------------------------------------------------- /components/lyrics/InterludeDots.ts: -------------------------------------------------------------------------------- 1 | import { LyricLine as LyricLineType } from "../../types"; 2 | import { ILyricLine } from "./ILyricLine"; 3 | import { SpringSystem, INTERLUDE_SPRING } from "../../services/springSystem"; 4 | 5 | export class InterludeDots implements ILyricLine { 6 | private canvas: OffscreenCanvas | HTMLCanvasElement; 7 | private ctx: OffscreenCanvasRenderingContext2D | CanvasRenderingContext2D; 8 | private lyricLine: LyricLineType; 9 | private index: number; 10 | private isMobile: boolean; 11 | private pixelRatio: number; 12 | private logicalWidth: number = 0; 13 | private logicalHeight: number = 0; 14 | private _height: number = 0; 15 | private springSystem: SpringSystem; 16 | private lastDrawTime: number = -1; 17 | private textWidth: number = 0; 18 | private duration: number = 0; 19 | 20 | constructor(line: LyricLineType, index: number, isMobile: boolean, duration: number = 0) { 21 | this.lyricLine = line; 22 | this.index = index; 23 | this.isMobile = isMobile; 24 | this.duration = duration; 25 | this.pixelRatio = 26 | typeof window !== "undefined" ? window.devicePixelRatio || 1 : 1; 27 | 28 | this.canvas = document.createElement("canvas"); 29 | const ctx = this.canvas.getContext("2d"); 30 | if (!ctx) throw new Error("Could not get canvas context"); 31 | this.ctx = ctx as 32 | | OffscreenCanvasRenderingContext2D 33 | | CanvasRenderingContext2D; 34 | 35 | // Initialize spring system for expansion animation 36 | this.springSystem = new SpringSystem({ 37 | expansion: 0, // 0 = hidden/collapsed, 1 = fully visible 38 | }); 39 | } 40 | 41 | public measure(containerWidth: number, suggestedTranslationWidth?: number) { 42 | const baseSize = this.isMobile ? 32 : 40; 43 | const paddingY = 18; 44 | 45 | // Fixed height for interlude dots 46 | this._height = baseSize + paddingY * 2; 47 | this.logicalWidth = containerWidth; 48 | this.logicalHeight = this._height; 49 | 50 | // Set canvas size 51 | this.canvas.width = containerWidth * this.pixelRatio; 52 | this.canvas.height = this._height * this.pixelRatio; 53 | 54 | // Reset transform 55 | this.ctx.resetTransform(); 56 | if (this.pixelRatio !== 1) { 57 | this.ctx.scale(this.pixelRatio, this.pixelRatio); 58 | } 59 | 60 | // Calculate approximate width for hover background 61 | const dotSpacing = this.isMobile ? 16 : 24; 62 | this.textWidth = dotSpacing * 2 + 40; // Approximate width 63 | } 64 | 65 | public draw(currentTime: number, isActive: boolean, isHovered: boolean) { 66 | const now = performance.now(); 67 | 68 | // Calculate dt with clamping to prevent physics explosions on re-entry 69 | let dt = this.lastDrawTime === -1 ? 0.016 : (now - this.lastDrawTime) / 1000; 70 | this.lastDrawTime = now; 71 | 72 | // Determine target expansion state 73 | const currentTarget = this.springSystem.getTarget("expansion") || 0; 74 | const targetExpansion = isActive ? 1 : 0; 75 | 76 | // Detect transition from Active -> Inactive (Exit animation start) 77 | // "Finally scale up once, then completely scale down" 78 | if (currentTarget === 1 && targetExpansion === 0) { 79 | // Apply a positive velocity to create a "pop" effect before shrinking 80 | // The spring will pull it to 0, but velocity will push it up first. 81 | this.springSystem.setVelocity("expansion", 8); 82 | } 83 | 84 | this.springSystem.setTarget("expansion", targetExpansion, INTERLUDE_SPRING); 85 | this.springSystem.update(dt); 86 | 87 | // Clamp expansion to [0, 1.5] to allow for pop effect, but preventing negative 88 | // We allow > 1 for the pop effect 89 | const expansion = Math.max(0, this.springSystem.getCurrent("expansion")); 90 | 91 | // Clear canvas 92 | this.ctx.clearRect(0, 0, this.logicalWidth, this.logicalHeight); 93 | 94 | // If completely collapsed and not active, don't draw anything 95 | // Increased threshold to ensure it disappears cleanly 96 | if (expansion < 0.01 && !isActive) { 97 | return; 98 | } 99 | 100 | const paddingX = this.isMobile ? 24 : 56; 101 | const baseRadius = this.isMobile ? 5 : 7; 102 | const dotSpacing = this.isMobile ? 16 : 24; 103 | const totalDotsWidth = dotSpacing * 2; 104 | 105 | // Calculate Progress 106 | // If active, we calculate progress based on line time and duration. 107 | // If not active, we don't care about progress color as much, but let's keep it consistent or fade out. 108 | let progress = 0; 109 | if (this.duration > 0) { 110 | const elapsed = currentTime - this.lyricLine.time; 111 | progress = Math.max(0, Math.min(1, elapsed / this.duration)); 112 | } else if (isActive) { 113 | // If no duration, maybe pulse active? 114 | progress = 0.5; 115 | } else { 116 | // If inactive, progress is 1 (finished) or 0? 117 | // Usually if we passed it, it's 1. But drawing loop handles isActive. 118 | progress = 1; 119 | } 120 | 121 | this.ctx.save(); 122 | 123 | // Draw hover background (round rect) 124 | if (isHovered) { 125 | this.ctx.fillStyle = `rgba(255, 255, 255, ${0.08 * Math.min(1, expansion)})`; 126 | const bgWidth = Math.max(totalDotsWidth + 80, 200); 127 | const bgHeight = this._height * Math.min(1, expansion); 128 | const bgY = (this._height - bgHeight) / 2; 129 | 130 | this.roundRect(paddingX - 16, bgY, bgWidth, bgHeight, 16 * Math.min(1, expansion)); 131 | this.ctx.fill(); 132 | } 133 | 134 | // Position dots - Left aligned with text but slightly offset 135 | // "Still a bit to the right" -> Add small offset 136 | const offsetX = 6; 137 | 138 | // Calculate center of the dot group for scaling pivot 139 | // Dot 0 is at 0, Dot 1 at spacing, Dot 2 at 2*spacing (relative to start) 140 | // Center is at Dot 1 (spacing) 141 | // We want to translate to the center of the middle dot 142 | const groupCenterX = paddingX + offsetX + baseRadius + dotSpacing; 143 | const groupCenterY = this._height / 2; 144 | 145 | // Center vertically and horizontally at group center 146 | this.ctx.translate(groupCenterX, groupCenterY); 147 | 148 | // Global Breathing Animation (only when active/visible) 149 | // "Effect is too big. Scale down!" -> Reduce amplitude 150 | const breatheSpeed = 3.0; 151 | const breatheAmt = 0.12; 152 | const breatheScale = 1.0 + Math.sin(now / 1000 * breatheSpeed) * breatheAmt; 153 | 154 | // Combine physics expansion with breathing 155 | const finalGlobalScale = expansion * breatheScale; 156 | 157 | this.ctx.scale(finalGlobalScale, finalGlobalScale); 158 | 159 | for (let i = 0; i < 3; i++) { 160 | // Calculate color based on progress 161 | const dotProgressStart = i / 3; 162 | const dotProgressEnd = (i + 1) / 3; 163 | 164 | const localProgress = (progress - dotProgressStart) / (dotProgressEnd - dotProgressStart); 165 | const clampedLocal = Math.max(0, Math.min(1, localProgress)); 166 | 167 | // "Like lyrics... gradual change white... to gray" 168 | // Inactive lyrics are usually 0.5 or 0.6 opacity. 169 | // Base opacity 0.5 (Gray), Active 1.0 (White) 170 | const colorIntensity = 0.5 + 0.5 * clampedLocal; 171 | 172 | const visibilityOpacity = Math.min(1, expansion); 173 | 174 | const opacity = colorIntensity * visibilityOpacity; 175 | 176 | this.ctx.fillStyle = `rgba(255, 255, 255, ${opacity})`; 177 | this.ctx.beginPath(); 178 | 179 | // Draw relative to center (Dot 1 is at 0) 180 | // Dot 0: -spacing 181 | // Dot 1: 0 182 | // Dot 2: +spacing 183 | const relativeX = (i - 1) * dotSpacing; 184 | 185 | this.ctx.arc(relativeX, 0, baseRadius, 0, Math.PI * 2); 186 | this.ctx.fill(); 187 | } 188 | 189 | this.ctx.restore(); 190 | } 191 | 192 | private roundRect(x: number, y: number, w: number, h: number, r: number) { 193 | if (w < 2 * r) r = w / 2; 194 | if (h < 2 * r) r = h / 2; 195 | this.ctx.beginPath(); 196 | this.ctx.moveTo(x + r, y); 197 | this.ctx.arcTo(x + w, y, x + w, y + h, r); 198 | this.ctx.arcTo(x + w, y + h, x, y + h, r); 199 | this.ctx.arcTo(x, y + h, x, y, r); 200 | this.ctx.arcTo(x, y, x + w, y, r); 201 | this.ctx.closePath(); 202 | } 203 | 204 | public getHeight() { 205 | return this._height; 206 | } 207 | 208 | public getCurrentHeight() { 209 | // Return dynamic height based on expansion state 210 | // Clamp to [0, 1] to prevent layout jitter during "pop" (expansion > 1) 211 | // When expansion is 0, height is 0 (hidden) 212 | const expansion = Math.max(0, Math.min(1, this.springSystem.getCurrent("expansion"))); 213 | return this._height * expansion; 214 | } 215 | 216 | public isInterlude() { 217 | return true; 218 | } 219 | 220 | public getCanvas() { 221 | return this.canvas; 222 | } 223 | 224 | public getLogicalWidth() { 225 | return this.logicalWidth; 226 | } 227 | 228 | public getLogicalHeight() { 229 | return this.logicalHeight; 230 | } 231 | 232 | public getTextWidth() { 233 | return this.textWidth; 234 | } 235 | } -------------------------------------------------------------------------------- /services/lyricsService.ts: -------------------------------------------------------------------------------- 1 | import { fetchViaProxy } from "./utils"; 2 | 3 | const LYRIC_API_BASE = "https://163api.qijieya.cn"; 4 | const METING_API = "https://api.qijieya.cn/meting/"; 5 | const NETEASE_SEARCH_API = "https://163api.qijieya.cn/cloudsearch"; 6 | const NETEASE_API_BASE = "http://music.163.com/api"; 7 | const NETEASECLOUD_API_BASE = "https://163api.qijieya.cn"; 8 | 9 | const METADATA_KEYWORDS = [ 10 | "歌词贡献者", 11 | "翻译贡献者", 12 | "作词", 13 | "作曲", 14 | "编曲", 15 | "制作", 16 | "词曲", 17 | "词 / 曲", 18 | "lyricist", 19 | "composer", 20 | "arrange", 21 | "translation", 22 | "translator", 23 | "producer", 24 | ]; 25 | 26 | const escapeRegex = (value: string) => 27 | value.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"); 28 | 29 | const metadataKeywordRegex = new RegExp( 30 | `^(${METADATA_KEYWORDS.map(escapeRegex).join("|")})\\s*[::]`, 31 | "iu", 32 | ); 33 | 34 | const TIMESTAMP_REGEX = /^\[(\d{2}):(\d{2})\.(\d{2,3})\](.*)$/; 35 | 36 | interface NeteaseApiArtist { 37 | name?: string; 38 | } 39 | 40 | interface NeteaseApiAlbum { 41 | name?: string; 42 | picUrl?: string; 43 | } 44 | 45 | interface NeteaseApiSong { 46 | id: number; 47 | name?: string; 48 | ar?: NeteaseApiArtist[]; 49 | al?: NeteaseApiAlbum; 50 | dt?: number; 51 | } 52 | 53 | interface NeteaseSearchResponse { 54 | result?: { 55 | songs?: NeteaseApiSong[]; 56 | }; 57 | } 58 | 59 | interface NeteasePlaylistResponse { 60 | songs?: NeteaseApiSong[]; 61 | } 62 | 63 | interface NeteaseSongDetailResponse { 64 | code?: number; 65 | songs?: NeteaseApiSong[]; 66 | } 67 | 68 | export interface NeteaseTrackInfo { 69 | id: string; 70 | title: string; 71 | artist: string; 72 | album: string; 73 | coverUrl?: string; 74 | duration?: number; 75 | isNetease: true; 76 | neteaseId: string; 77 | } 78 | 79 | type SearchOptions = { 80 | limit?: number; 81 | offset?: number; 82 | }; 83 | 84 | const formatArtists = (artists?: NeteaseApiArtist[]) => 85 | (artists ?? []) 86 | .map((artist) => artist.name?.trim()) 87 | .filter(Boolean) 88 | .join("/") || ""; 89 | 90 | const mapNeteaseSongToTrack = (song: NeteaseApiSong): NeteaseTrackInfo => ({ 91 | id: song.id.toString(), 92 | title: song.name?.trim() ?? "", 93 | artist: formatArtists(song.ar), 94 | album: song.al?.name?.trim() ?? "", 95 | coverUrl: song.al?.picUrl?.replaceAll("http:", "https:"), 96 | duration: song.dt, 97 | isNetease: true, 98 | neteaseId: song.id.toString(), 99 | }); 100 | 101 | const isMetadataTimestampLine = (line: string): boolean => { 102 | const trimmed = line.trim(); 103 | const match = trimmed.match(TIMESTAMP_REGEX); 104 | if (!match) return false; 105 | const content = match[4].trim(); 106 | return metadataKeywordRegex.test(content); 107 | }; 108 | 109 | const parseTimestampMetadata = (line: string) => { 110 | const match = line.trim().match(TIMESTAMP_REGEX); 111 | return match ? match[4].trim() : line.trim(); 112 | }; 113 | 114 | const isMetadataJsonLine = (line: string): boolean => { 115 | const trimmed = line.trim(); 116 | if (!trimmed.startsWith("{") || !trimmed.endsWith("}")) return false; 117 | try { 118 | const json = JSON.parse(trimmed); 119 | if (json.c && Array.isArray(json.c)) { 120 | const content = json.c.map((item: any) => item.tx || "").join(""); 121 | return metadataKeywordRegex.test(content); 122 | } 123 | } catch { 124 | // ignore invalid json 125 | } 126 | return false; 127 | }; 128 | 129 | const parseJsonMetadata = (line: string) => { 130 | try { 131 | const json = JSON.parse(line.trim()); 132 | if (json.c && Array.isArray(json.c)) { 133 | return json.c 134 | .map((item: any) => item.tx || "") 135 | .join("") 136 | .trim(); 137 | } 138 | } catch { 139 | // ignore 140 | } 141 | return line.trim(); 142 | }; 143 | 144 | const extractMetadataLines = (content: string) => { 145 | const metadataSet = new Set(); 146 | const bodyLines: string[] = []; 147 | 148 | content.split("\n").forEach((line) => { 149 | if (!line.trim()) return; 150 | if (isMetadataTimestampLine(line)) { 151 | metadataSet.add(parseTimestampMetadata(line)); 152 | } else if (isMetadataJsonLine(line)) { 153 | metadataSet.add(parseJsonMetadata(line)); 154 | } else { 155 | bodyLines.push(line); 156 | } 157 | }); 158 | 159 | return { 160 | clean: bodyLines.join("\n").trim(), 161 | metadata: Array.from(metadataSet), 162 | }; 163 | }; 164 | 165 | export const getNeteaseAudioUrl = (id: string) => { 166 | return `${METING_API}?type=url&id=${id}`; 167 | }; 168 | 169 | // Implements the search logic from the user provided code snippet 170 | export const searchNetEase = async ( 171 | keyword: string, 172 | options: SearchOptions = {}, 173 | ): Promise => { 174 | const { limit = 20, offset = 0 } = options; 175 | const searchApiUrl = `${NETEASE_SEARCH_API}?keywords=${encodeURIComponent( 176 | keyword, 177 | )}&limit=${limit}&offset=${offset}`; 178 | 179 | try { 180 | const parsedSearchApiResponse = (await fetchViaProxy( 181 | searchApiUrl, 182 | )) as NeteaseSearchResponse; 183 | const songs = parsedSearchApiResponse.result?.songs ?? []; 184 | 185 | if (songs.length === 0) { 186 | return []; 187 | } 188 | 189 | return songs.map(mapNeteaseSongToTrack); 190 | } catch (error) { 191 | console.error("NetEase search error", error); 192 | return []; 193 | } 194 | }; 195 | 196 | export const fetchNeteasePlaylist = async ( 197 | playlistId: string, 198 | ): Promise => { 199 | try { 200 | // 使用網易雲音樂 API 獲取歌單所有歌曲 201 | // 由於接口限制,需要分頁獲取,每次獲取 50 首 202 | const allTracks: NeteaseTrackInfo[] = []; 203 | const limit = 50; 204 | let offset = 0; 205 | let shouldContinue = true; 206 | 207 | while (shouldContinue) { 208 | const url = `${NETEASECLOUD_API_BASE}/playlist/track/all?id=${playlistId}&limit=${limit}&offset=${offset}`; 209 | const data = (await fetchViaProxy(url)) as NeteasePlaylistResponse; 210 | const songs = data.songs ?? []; 211 | if (songs.length === 0) { 212 | break; 213 | } 214 | 215 | const tracks = songs.map(mapNeteaseSongToTrack); 216 | 217 | allTracks.push(...tracks); 218 | 219 | // Continue fetching if the current page was full 220 | if (songs.length < limit) { 221 | shouldContinue = false; 222 | } else { 223 | offset += limit; 224 | } 225 | } 226 | 227 | return allTracks; 228 | } catch (e) { 229 | console.error("Playlist fetch error", e); 230 | return []; 231 | } 232 | }; 233 | 234 | export const fetchNeteaseSong = async ( 235 | songId: string, 236 | ): Promise => { 237 | try { 238 | const url = `${NETEASECLOUD_API_BASE}/song/detail?ids=${songId}`; 239 | const data = (await fetchViaProxy( 240 | url, 241 | )) as NeteaseSongDetailResponse; 242 | const track = data.songs?.[0]; 243 | if (data.code === 200 && track) { 244 | return mapNeteaseSongToTrack(track); 245 | } 246 | return null; 247 | } catch (e) { 248 | console.error("Song fetch error", e); 249 | return null; 250 | } 251 | }; 252 | 253 | // Keeps the old search for lyric matching fallbacks 254 | export const searchAndMatchLyrics = async ( 255 | title: string, 256 | artist: string, 257 | ): Promise<{ lrc: string; yrc?: string; tLrc?: string; metadata: string[] } | null> => { 258 | try { 259 | const songs = await searchNetEase(`${title} ${artist}`, { limit: 5 }); 260 | 261 | if (songs.length === 0) { 262 | console.warn("No songs found on Cloud"); 263 | return null; 264 | } 265 | 266 | const songId = songs[0].id; 267 | console.log(`Found Song ID: ${songId}`); 268 | 269 | const lyricsResult = await fetchLyricsById(songId); 270 | return lyricsResult; 271 | } catch (error) { 272 | console.error("Cloud lyrics match failed:", error); 273 | return null; 274 | } 275 | }; 276 | 277 | export const fetchLyricsById = async ( 278 | songId: string, 279 | ): Promise<{ lrc: string; yrc?: string; tLrc?: string; metadata: string[] } | null> => { 280 | try { 281 | // 使用網易雲音樂 API 獲取歌詞 282 | const lyricUrl = `${NETEASECLOUD_API_BASE}/lyric/new?id=${songId}`; 283 | const lyricData = await fetchViaProxy(lyricUrl); 284 | 285 | const rawYrc = lyricData.yrc?.lyric; 286 | const rawLrc = lyricData.lrc?.lyric; 287 | const tLrc = lyricData.tlyric?.lyric; 288 | 289 | if (!rawYrc && !rawLrc) return null; 290 | 291 | const { 292 | clean: cleanLrc, 293 | metadata: lrcMetadata, 294 | } = rawLrc 295 | ? extractMetadataLines(rawLrc) 296 | : { clean: undefined, metadata: [] }; 297 | 298 | const { 299 | clean: cleanYrc, 300 | metadata: yrcMetadata, 301 | } = rawYrc 302 | ? extractMetadataLines(rawYrc) 303 | : { clean: undefined, metadata: [] }; 304 | 305 | // Extract metadata from translation if available 306 | let cleanTranslation: string | undefined; 307 | let translationMetadata: string[] = []; 308 | if (tLrc) { 309 | const result = extractMetadataLines(tLrc); 310 | cleanTranslation = result.clean; 311 | translationMetadata = result.metadata; 312 | } 313 | 314 | const metadataSet = Array.from( 315 | new Set([...lrcMetadata, ...yrcMetadata, ...translationMetadata]), 316 | ); 317 | 318 | if (lyricData.transUser?.nickname) { 319 | metadataSet.unshift(`翻译贡献者: ${lyricData.transUser.nickname}`); 320 | } 321 | 322 | if (lyricData.lyricUser?.nickname) { 323 | metadataSet.unshift(`歌词贡献者: ${lyricData.lyricUser.nickname}`); 324 | } 325 | 326 | const baseLyrics = cleanLrc || cleanYrc || rawLrc || rawYrc; 327 | if (!baseLyrics) return null; 328 | 329 | const yrcForEnrichment = cleanYrc && cleanLrc ? cleanYrc : undefined; 330 | return { 331 | lrc: baseLyrics, 332 | yrc: yrcForEnrichment, 333 | tLrc: cleanTranslation, 334 | metadata: Array.from(metadataSet), 335 | }; 336 | } catch (e) { 337 | console.error("Lyric fetch error", e); 338 | return null; 339 | } 340 | }; 341 | -------------------------------------------------------------------------------- /hooks/usePlaylist.ts: -------------------------------------------------------------------------------- 1 | import { useCallback, useState } from "react"; 2 | import { Song } from "../types"; 3 | import { 4 | extractColors, 5 | parseAudioMetadata, 6 | parseNeteaseLink, 7 | } from "../services/utils"; 8 | import { parseLyrics } from "../services/lyrics"; 9 | import { 10 | fetchNeteasePlaylist, 11 | fetchNeteaseSong, 12 | getNeteaseAudioUrl, 13 | } from "../services/lyricsService"; 14 | import { audioResourceCache } from "../services/cache"; 15 | 16 | // Levenshtein distance for fuzzy matching 17 | const levenshteinDistance = (str1: string, str2: string): number => { 18 | const len1 = str1.length; 19 | const len2 = str2.length; 20 | const matrix: number[][] = []; 21 | 22 | for (let i = 0; i <= len1; i++) { 23 | matrix[i] = [i]; 24 | } 25 | for (let j = 0; j <= len2; j++) { 26 | matrix[0][j] = j; 27 | } 28 | 29 | for (let i = 1; i <= len1; i++) { 30 | for (let j = 1; j <= len2; j++) { 31 | const cost = str1[i - 1] === str2[j - 1] ? 0 : 1; 32 | matrix[i][j] = Math.min( 33 | matrix[i - 1][j] + 1, // deletion 34 | matrix[i][j - 1] + 1, // insertion 35 | matrix[i - 1][j - 1] + cost // substitution 36 | ); 37 | } 38 | } 39 | 40 | return matrix[len1][len2]; 41 | }; 42 | 43 | // Calculate similarity score (0-1, higher is better) 44 | const calculateSimilarity = (str1: string, str2: string): number => { 45 | const distance = levenshteinDistance(str1, str2); 46 | const maxLen = Math.max(str1.length, str2.length); 47 | if (maxLen === 0) return 1; 48 | return 1 - distance / maxLen; 49 | }; 50 | 51 | export interface ImportResult { 52 | success: boolean; 53 | message?: string; 54 | songs: Song[]; 55 | } 56 | 57 | export const usePlaylist = () => { 58 | const [queue, setQueue] = useState([]); 59 | const [originalQueue, setOriginalQueue] = useState([]); 60 | 61 | const updateSongInQueue = useCallback( 62 | (id: string, updates: Partial) => { 63 | setQueue((prev) => 64 | prev.map((song) => (song.id === id ? { ...song, ...updates } : song)), 65 | ); 66 | setOriginalQueue((prev) => 67 | prev.map((song) => (song.id === id ? { ...song, ...updates } : song)), 68 | ); 69 | }, 70 | [], 71 | ); 72 | 73 | const appendSongs = useCallback((songs: Song[]) => { 74 | if (songs.length === 0) return; 75 | setOriginalQueue((prev) => [...prev, ...songs]); 76 | setQueue((prev) => [...prev, ...songs]); 77 | }, []); 78 | 79 | const removeSongs = useCallback((ids: string[]) => { 80 | if (ids.length === 0) return; 81 | setQueue((prev) => { 82 | prev.forEach((song) => { 83 | if (ids.includes(song.id) && song.fileUrl && !song.fileUrl.startsWith("blob:")) { 84 | audioResourceCache.delete(song.fileUrl); 85 | } 86 | }); 87 | return prev.filter((song) => !ids.includes(song.id)); 88 | }); 89 | setOriginalQueue((prev) => prev.filter((song) => !ids.includes(song.id))); 90 | }, []); 91 | 92 | const addLocalFiles = useCallback( 93 | async (files: FileList | File[]) => { 94 | const fileList = 95 | files instanceof FileList ? Array.from(files) : Array.from(files); 96 | 97 | // Separate audio and lyrics files 98 | const audioFiles: File[] = []; 99 | const lyricsFiles: File[] = []; 100 | 101 | fileList.forEach((file) => { 102 | const ext = file.name.split(".").pop()?.toLowerCase(); 103 | if (ext === "lrc" || ext === "txt") { 104 | lyricsFiles.push(file); 105 | } else { 106 | audioFiles.push(file); 107 | } 108 | }); 109 | 110 | const newSongs: Song[] = []; 111 | 112 | // Build lyrics map: extract song title from filename (part after first "-") 113 | // Remove Netease IDs like (12345678) from title 114 | const lyricsMap = new Map(); 115 | lyricsFiles.forEach((file) => { 116 | const basename = file.name.replace(/\.[^/.]+$/, ""); 117 | const firstDashIndex = basename.indexOf("-"); 118 | 119 | // If has "-", use part after first dash as title, otherwise use full basename 120 | let title = firstDashIndex > 0 && firstDashIndex < basename.length - 1 121 | ? basename.substring(firstDashIndex + 1).trim() 122 | : basename; 123 | 124 | // Remove Netease ID pattern like (12345678) or [12345678] 125 | title = title.replace(/[\(\[]?\d{7,9}[\)\]]?/g, "").trim(); 126 | 127 | lyricsMap.set(title.toLowerCase(), file); 128 | }); 129 | 130 | // Process audio files 131 | for (let i = 0; i < audioFiles.length; i++) { 132 | const file = audioFiles[i]; 133 | const url = URL.createObjectURL(file); 134 | const basename = file.name.replace(/\.[^/.]+$/, ""); 135 | let title = basename; 136 | let artist = "Unknown Artist"; 137 | let coverUrl: string | undefined; 138 | let colors: string[] | undefined; 139 | let lyrics: { time: number; text: string }[] = []; 140 | 141 | const nameParts = title.split("-"); 142 | if (nameParts.length > 1) { 143 | artist = nameParts[0].trim(); 144 | title = nameParts[1].trim(); 145 | } 146 | 147 | try { 148 | const metadata = await parseAudioMetadata(file); 149 | if (metadata.title) title = metadata.title; 150 | if (metadata.artist) artist = metadata.artist; 151 | if (metadata.picture) { 152 | coverUrl = metadata.picture; 153 | colors = await extractColors(coverUrl); 154 | } 155 | 156 | // Check for embedded lyrics first (highest priority) 157 | if (metadata.lyrics && metadata.lyrics.trim().length > 0) { 158 | try { 159 | lyrics = parseLyrics(metadata.lyrics); 160 | } catch (err) { 161 | console.warn("Failed to parse embedded lyrics", err); 162 | } 163 | } 164 | 165 | // If no embedded lyrics, try to match lyrics by fuzzy matching 166 | if (lyrics.length === 0) { 167 | // Normalize song title for matching 168 | const songTitle = title.toLowerCase().trim(); 169 | 170 | // Try exact match first 171 | let matchedLyricsFile = lyricsMap.get(songTitle); 172 | 173 | // If no exact match, try fuzzy matching 174 | if (!matchedLyricsFile && lyricsMap.size > 0) { 175 | let bestMatch: { file: File; score: number } | null = null; 176 | const minSimilarity = 0.75; // Require 75% similarity (allows 1-2 errors for typical song titles) 177 | 178 | for (const [lyricsTitle, lyricsFile] of lyricsMap.entries()) { 179 | const similarity = calculateSimilarity(songTitle, lyricsTitle); 180 | 181 | if (similarity >= minSimilarity) { 182 | if (!bestMatch || similarity > bestMatch.score) { 183 | bestMatch = { file: lyricsFile, score: similarity }; 184 | } 185 | } 186 | } 187 | 188 | if (bestMatch) { 189 | matchedLyricsFile = bestMatch.file; 190 | } 191 | } 192 | 193 | // Load matched lyrics file 194 | if (matchedLyricsFile) { 195 | const reader = new FileReader(); 196 | const lrcText = await new Promise((resolve) => { 197 | reader.onload = (e) => 198 | resolve((e.target?.result as string) || ""); 199 | reader.readAsText(matchedLyricsFile!); 200 | }); 201 | if (lrcText) { 202 | lyrics = parseLyrics(lrcText); 203 | } 204 | } 205 | } 206 | } catch (err) { 207 | console.warn("Local metadata extraction failed", err); 208 | } 209 | 210 | newSongs.push({ 211 | id: `local-${Date.now()}-${i}`, 212 | title, 213 | artist, 214 | fileUrl: url, 215 | coverUrl, 216 | lyrics, 217 | colors: colors && colors.length > 0 ? colors : undefined, 218 | needsLyricsMatch: lyrics.length === 0, // Flag for cloud matching 219 | }); 220 | } 221 | 222 | appendSongs(newSongs); 223 | return newSongs; 224 | }, 225 | [appendSongs], 226 | ); 227 | 228 | const importFromUrl = useCallback( 229 | async (input: string): Promise => { 230 | const parsed = parseNeteaseLink(input); 231 | if (!parsed) { 232 | return { 233 | success: false, 234 | message: 235 | "Invalid Netease URL. Use https://music.163.com/#/song?id=... or playlist", 236 | songs: [], 237 | }; 238 | } 239 | 240 | const newSongs: Song[] = []; 241 | try { 242 | if (parsed.type === "playlist") { 243 | const songs = await fetchNeteasePlaylist(parsed.id); 244 | songs.forEach((song) => { 245 | newSongs.push({ 246 | ...song, 247 | fileUrl: getNeteaseAudioUrl(song.id), 248 | lyrics: [], 249 | colors: [], 250 | needsLyricsMatch: true, 251 | }); 252 | }); 253 | } else { 254 | const song = await fetchNeteaseSong(parsed.id); 255 | if (song) { 256 | newSongs.push({ 257 | ...song, 258 | fileUrl: getNeteaseAudioUrl(song.id), 259 | lyrics: [], 260 | colors: [], 261 | needsLyricsMatch: true, 262 | }); 263 | } 264 | } 265 | } catch (err) { 266 | console.error("Failed to fetch Netease music", err); 267 | return { 268 | success: false, 269 | message: "Failed to load songs from URL", 270 | songs: [], 271 | }; 272 | } 273 | 274 | appendSongs(newSongs); 275 | if (newSongs.length === 0) { 276 | return { 277 | success: false, 278 | message: "Failed to load songs from URL", 279 | songs: [], 280 | }; 281 | } 282 | 283 | return { success: true, songs: newSongs }; 284 | }, 285 | [appendSongs], 286 | ); 287 | 288 | return { 289 | queue, 290 | originalQueue, 291 | updateSongInQueue, 292 | removeSongs, 293 | addLocalFiles, 294 | importFromUrl, 295 | setQueue, 296 | setOriginalQueue, 297 | }; 298 | }; 299 | -------------------------------------------------------------------------------- /services/lyrics/netease.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Netease YRC format parser. 3 | * 4 | * Supports: 5 | * - YRC format: [startMs,duration](wordStartMs,wordDuration,flag)word 6 | * - JSON metadata: {"t":0,"c":[{"tx":"text"}]} 7 | * - Fallback LRC: [mm:ss.xx]text 8 | * 9 | * Features: 10 | * - Single-pass YRC parsing 11 | * - Word timing enrichment for LRC content 12 | * - Inline duplicate detection 13 | * - Automatic word duration fixing 14 | */ 15 | 16 | import { LyricLine, LyricWord, isMetadataLine } from "./types"; 17 | import { parseLrc } from "./lrc"; 18 | import { 19 | createWord, 20 | createLine, 21 | mergePunctuation, 22 | normalizeText, 23 | insertInterludes, 24 | addDurations, 25 | INTERLUDE_TEXT, 26 | } from "./parser"; 27 | 28 | const MAX_WORD_DURATION = 10.0; // Max duration per word in seconds 29 | 30 | /** 31 | * Token types for Netease YRC parsing. 32 | */ 33 | type NeteaseToken = 34 | | { type: "yrc"; time: number; duration: number; words: LyricWord[]; text: string } 35 | | { type: "json"; time: number; text: string } 36 | | { type: "lrc"; time: number; text: string }; 37 | 38 | /** 39 | * Parse JSON metadata line. 40 | */ 41 | const parseJsonLine = (line: string): NeteaseToken | null => { 42 | try { 43 | const json = JSON.parse(line); 44 | if (json.c && Array.isArray(json.c)) { 45 | const text = json.c.map((item: { tx: string }) => item.tx).join(""); 46 | return { 47 | type: "json", 48 | time: (json.t || 0) / 1000, 49 | text, 50 | }; 51 | } 52 | } catch { 53 | // Not valid JSON 54 | } 55 | return null; 56 | }; 57 | 58 | /** 59 | * Parse YRC line with word timing. 60 | */ 61 | const parseYrcLine = (line: string): NeteaseToken | null => { 62 | const match = line.match(/^\[(\d+),(\d+)\](.*)/); 63 | if (!match) return null; 64 | 65 | const startTime = parseInt(match[1], 10) / 1000; 66 | const duration = parseInt(match[2], 10) / 1000; 67 | const content = match[3]; 68 | 69 | const words: LyricWord[] = []; 70 | let text = ""; 71 | 72 | // Parse word timing: (startMs,durationMs,flag)wordText 73 | const wordRegex = /\((\d+),(\d+),(\d+)\)([^\(]*)/g; 74 | const matches = [...content.matchAll(wordRegex)]; 75 | 76 | if (matches.length > 0) { 77 | for (const m of matches) { 78 | const wordStart = parseInt(m[1], 10) / 1000; 79 | const wordDuration = parseInt(m[2], 10) / 1000; 80 | const wordText = m[4]; 81 | 82 | text += wordText; 83 | words.push(createWord(wordText, wordStart, wordStart + wordDuration)); 84 | } 85 | } else { 86 | text = content; 87 | } 88 | 89 | return { 90 | type: "yrc", 91 | time: startTime, 92 | duration, 93 | words: mergePunctuation(words), 94 | text, 95 | }; 96 | }; 97 | 98 | /** 99 | * Tokenize Netease content into structured tokens. 100 | */ 101 | const tokenizeNetease = (content: string): NeteaseToken[] => { 102 | const lines = content.split("\n"); 103 | const tokens: NeteaseToken[] = []; 104 | 105 | for (const line of lines) { 106 | const trimmed = line.trim(); 107 | if (!trimmed) continue; 108 | 109 | // Try JSON format 110 | if (trimmed.startsWith("{") && trimmed.endsWith("}")) { 111 | const jsonToken = parseJsonLine(trimmed); 112 | if (jsonToken) { 113 | tokens.push(jsonToken); 114 | continue; 115 | } 116 | } 117 | 118 | // Try YRC format 119 | const yrcToken = parseYrcLine(trimmed); 120 | if (yrcToken) { 121 | tokens.push(yrcToken); 122 | continue; 123 | } 124 | 125 | // Fallback to LRC format 126 | const lrcMatch = trimmed.match(/\[(\d{2}):(\d{2})\.(\d{2,3})\](.*)/); 127 | if (lrcMatch) { 128 | const minutes = parseInt(lrcMatch[1], 10); 129 | const seconds = parseInt(lrcMatch[2], 10); 130 | const msStr = lrcMatch[3]; 131 | const ms = parseInt(msStr, 10); 132 | const msValue = msStr.length === 3 ? ms / 1000 : ms / 100; 133 | const time = minutes * 60 + seconds + msValue; 134 | 135 | tokens.push({ 136 | type: "lrc", 137 | time, 138 | text: lrcMatch[4].trim(), 139 | }); 140 | } 141 | } 142 | 143 | // Sort by time 144 | tokens.sort((a, b) => a.time - b.time); 145 | 146 | return tokens; 147 | }; 148 | 149 | /** 150 | * Fix abnormal word durations in YRC tokens. 151 | */ 152 | const fixWordDurations = (tokens: NeteaseToken[]): void => { 153 | for (let i = 0; i < tokens.length; i++) { 154 | const token = tokens[i]; 155 | if (token.type !== "yrc" || !token.words.length) continue; 156 | 157 | const nextToken = tokens[i + 1]; 158 | 159 | for (let j = 0; j < token.words.length; j++) { 160 | const word = token.words[j]; 161 | const nextWord = token.words[j + 1]; 162 | 163 | // Calculate max end time 164 | const maxEnd = nextWord 165 | ? nextWord.startTime 166 | : nextToken 167 | ? nextToken.time 168 | : word.startTime + MAX_WORD_DURATION; 169 | 170 | // Fix duration if too long 171 | const duration = word.endTime - word.startTime; 172 | if (duration > MAX_WORD_DURATION) { 173 | word.endTime = Math.min(word.startTime + MAX_WORD_DURATION, maxEnd); 174 | } 175 | 176 | // Ensure doesn't exceed max 177 | if (word.endTime > maxEnd) { 178 | word.endTime = maxEnd; 179 | } 180 | 181 | // Ensure end > start 182 | if (word.endTime <= word.startTime) { 183 | word.endTime = word.startTime + 0.1; 184 | } 185 | } 186 | } 187 | }; 188 | 189 | /** 190 | * Convert tokens to lyric lines, merging translations. 191 | */ 192 | const tokensToLines = (tokens: NeteaseToken[]): LyricLine[] => { 193 | const yrcTokens = tokens.filter(t => t.type === "yrc"); 194 | const otherTokens = tokens.filter(t => t.type !== "yrc"); 195 | const hasYrcWordAt = (time: number): boolean => { 196 | return yrcTokens.some(t => { 197 | if (t.type !== "yrc" || !t.words.length) return false; 198 | return t.words.some(word => word.startTime <= time && word.endTime > time); 199 | }); 200 | }; 201 | 202 | if (yrcTokens.length === 0) { 203 | // No YRC data, convert all to plain lines 204 | return tokens 205 | .filter(t => !isMetadataLine(t.text)) 206 | .map(t => { 207 | if (!t.text.trim()) { 208 | return createLine(t.time, INTERLUDE_TEXT, { isInterlude: true }); 209 | } 210 | return createLine(t.time, t.text, { 211 | words: t.type === "yrc" && t.words.length > 0 ? t.words : undefined, 212 | isPreciseTiming: t.type === "yrc", 213 | }); 214 | }); 215 | } 216 | 217 | // Use YRC as main lines, others as translations 218 | const lines: LyricLine[] = []; 219 | const usedIndices = new Set(); 220 | 221 | for (const yrcToken of yrcTokens) { 222 | const translations: string[] = []; 223 | 224 | // Find translations within 3s tolerance 225 | for (let i = 0; i < otherTokens.length; i++) { 226 | if (usedIndices.has(i)) continue; 227 | const other = otherTokens[i]; 228 | if (isMetadataLine(other.text)) continue; 229 | 230 | const timeDiff = Math.abs(other.time - yrcToken.time); 231 | if (timeDiff < 3.0) { 232 | const normalized = normalizeText(other.text); 233 | const yrcNormalized = normalizeText(yrcToken.text); 234 | 235 | if (normalized && normalized !== yrcNormalized) { 236 | translations.push(other.text.trim()); 237 | usedIndices.add(i); 238 | } 239 | } 240 | } 241 | 242 | if (!yrcToken.text.trim()) { 243 | lines.push(createLine(yrcToken.time, INTERLUDE_TEXT, { isInterlude: true })); 244 | } else { 245 | lines.push( 246 | createLine(yrcToken.time, yrcToken.text, { 247 | words: yrcToken.words.length > 0 ? yrcToken.words : undefined, 248 | translation: translations.length > 0 ? translations.join("\n") : undefined, 249 | isPreciseTiming: true, 250 | }) 251 | ); 252 | } 253 | } 254 | 255 | // Add orphan lines not matched as translations 256 | for (let i = 0; i < otherTokens.length; i++) { 257 | if (usedIndices.has(i)) continue; 258 | const token = otherTokens[i]; 259 | if (isMetadataLine(token.text)) continue; 260 | 261 | if (!token.text.trim()) { 262 | if (hasYrcWordAt(token.time)) { 263 | continue; 264 | } 265 | lines.push(createLine(token.time, INTERLUDE_TEXT, { isInterlude: true })); 266 | } else { 267 | lines.push(createLine(token.time, token.text, { isPreciseTiming: false })); 268 | } 269 | } 270 | 271 | // Re-sort by time 272 | lines.sort((a, b) => a.time - b.time); 273 | 274 | return lines; 275 | }; 276 | 277 | /** 278 | * Deduplicate lines with same normalized text within time window. 279 | */ 280 | const deduplicate = (lines: LyricLine[]): LyricLine[] => { 281 | const result: LyricLine[] = []; 282 | 283 | for (const line of lines) { 284 | const prev = result[result.length - 1]; 285 | 286 | if ( 287 | prev && 288 | normalizeText(prev.text) === normalizeText(line.text) && 289 | Math.abs(line.time - prev.time) <= 1.5 290 | ) { 291 | // Merge: keep line with more words 292 | if ((line.words?.length ?? 0) > (prev.words?.length ?? 0)) { 293 | prev.words = line.words; 294 | } 295 | // Merge translations 296 | if (!prev.translation && line.translation) { 297 | prev.translation = line.translation; 298 | } 299 | } else { 300 | result.push(line); 301 | } 302 | } 303 | 304 | return result; 305 | }; 306 | 307 | /** 308 | * Enrich LRC lines with YRC word timing. 309 | */ 310 | const enrichWithWordTiming = (lrcLines: LyricLine[], yrcTokens: NeteaseToken[]): LyricLine[] => { 311 | const yrcData = yrcTokens 312 | .filter(t => t.type === "yrc" && t.words.length > 0 && !isMetadataLine(t.text)) 313 | .map(t => ({ 314 | token: t, 315 | normalized: normalizeText(t.text), 316 | used: false, 317 | })) 318 | .filter(d => d.normalized); 319 | 320 | return lrcLines.map(line => { 321 | if (!line.text || line.isInterlude) return line; 322 | 323 | const targetNormalized = normalizeText(line.text); 324 | if (!targetNormalized) return line; 325 | 326 | // Find matching YRC segments 327 | let bestMatch: { indexes: number[]; score: number } | null = null; 328 | 329 | for (let start = 0; start < yrcData.length; start++) { 330 | if (yrcData[start].used) continue; 331 | 332 | const timeDiff = Math.abs(yrcData[start].token.time - line.time); 333 | if (timeDiff > 2.5) continue; 334 | 335 | if (!targetNormalized.startsWith(yrcData[start].normalized)) continue; 336 | 337 | // Try to match consecutive segments 338 | let combined = yrcData[start].normalized; 339 | const indexes = [start]; 340 | 341 | while ( 342 | combined.length < targetNormalized.length && 343 | indexes[indexes.length - 1] + 1 < yrcData.length && 344 | !yrcData[indexes[indexes.length - 1] + 1].used 345 | ) { 346 | const next = yrcData[indexes[indexes.length - 1] + 1]; 347 | const prospective = combined + next.normalized; 348 | 349 | if (!targetNormalized.startsWith(prospective)) break; 350 | 351 | combined = prospective; 352 | indexes.push(indexes[indexes.length - 1] + 1); 353 | } 354 | 355 | if (combined === targetNormalized) { 356 | const score = timeDiff; 357 | if (!bestMatch || score < bestMatch.score) { 358 | bestMatch = { indexes, score }; 359 | } 360 | } 361 | } 362 | 363 | // Apply best match 364 | if (bestMatch) { 365 | const words: LyricWord[] = []; 366 | 367 | for (const idx of bestMatch.indexes) { 368 | yrcData[idx].used = true; 369 | const token = yrcData[idx].token as Extract; 370 | words.push(...token.words.map(w => ({ ...w }))); 371 | } 372 | 373 | const adjustedWords = alignWordsWithText(line.text, words); 374 | 375 | return { 376 | ...line, 377 | words: adjustedWords, 378 | isPreciseTiming: true, 379 | }; 380 | } 381 | 382 | return line; 383 | }); 384 | }; 385 | 386 | const alignWordsWithText = (text: string, words: LyricWord[]): LyricWord[] => { 387 | if (!text || !words.length) return words; 388 | 389 | const chars = Array.from(text); 390 | let pointer = 0; 391 | 392 | const adjusted = words.map(word => { 393 | const normalizedTarget = normalizeText(word.text); 394 | if (!normalizedTarget) { 395 | return { ...word }; 396 | } 397 | 398 | let chunk = ""; 399 | let matched = ""; 400 | 401 | while (pointer < chars.length && matched.length < normalizedTarget.length) { 402 | const char = chars[pointer]; 403 | chunk += char; 404 | const normalizedChar = normalizeText(char); 405 | if (normalizedChar) { 406 | matched += normalizedChar; 407 | } 408 | pointer++; 409 | } 410 | 411 | while (pointer < chars.length) { 412 | const lookahead = chars[pointer]; 413 | if (normalizeText(lookahead)) { 414 | break; 415 | } 416 | chunk += lookahead; 417 | pointer++; 418 | } 419 | 420 | return chunk 421 | ? { 422 | ...word, 423 | text: chunk, 424 | } 425 | : { ...word }; 426 | }); 427 | 428 | if (pointer < chars.length && adjusted.length) { 429 | adjusted[adjusted.length - 1] = { 430 | ...adjusted[adjusted.length - 1], 431 | text: `${adjusted[adjusted.length - 1].text}${chars.slice(pointer).join("")}`, 432 | }; 433 | } 434 | 435 | return adjusted; 436 | }; 437 | 438 | /** 439 | * Check if content is Netease format. 440 | */ 441 | export const isNeteaseFormat = (content: string): boolean => { 442 | return content.split("\n").some(line => { 443 | const trimmed = line.trim(); 444 | return ( 445 | /^\[\d+,\d+\]/.test(trimmed) || 446 | (trimmed.startsWith("{") && trimmed.includes('"c":[')) 447 | ); 448 | }); 449 | }; 450 | 451 | /** 452 | * Parse Netease YRC format lyrics. 453 | * 454 | * If LRC content is provided, use it as the base and enrich with YRC word timing. 455 | * Otherwise, parse YRC directly and merge with other formats as translations. 456 | */ 457 | export const parseNeteaseLyrics = ( 458 | yrcContent: string, 459 | lrcContent?: string 460 | ): LyricLine[] => { 461 | if (!yrcContent?.trim()) return []; 462 | 463 | const tokens = tokenizeNetease(yrcContent); 464 | fixWordDurations(tokens); 465 | 466 | // If LRC content provided, use as base and enrich 467 | if (lrcContent?.trim()) { 468 | const baseLines = parseLrc(lrcContent); 469 | return addDurations(enrichWithWordTiming(baseLines, tokens)); 470 | } 471 | 472 | // Otherwise parse YRC directly 473 | const lines = tokensToLines(tokens); 474 | const deduped = deduplicate(lines); 475 | const withInterludes = insertInterludes(deduped); 476 | 477 | return addDurations(withInterludes); 478 | }; 479 | --------------------------------------------------------------------------------