141 |
153 |
154 | {/* Shared backdrop */}
155 |
setIsOpen(false)}
158 | />
159 |
160 | {/* Help Dialog */}
161 | {isOpen && (
162 |
174 | {/* Content Container */}
175 |
176 | {/* Header */}
177 |
178 |
179 |
180 | Keyboard Shortcuts
181 |
182 |
183 | Quick controls for playback
184 |
185 |
186 |
205 |
206 |
207 | {/* Grid */}
208 |
209 |
210 |
211 |
212 |
213 |
214 |
215 |
216 |
217 |
218 |
219 |
220 |
221 | {/* Footer Hint */}
222 |
223 | Press{" "}
224 |
225 | Esc
226 | {" "}
227 | to close
228 |
229 |
230 |
231 | )}
232 |
,
233 | document.body,
234 | );
235 | };
236 |
237 | const ShortcutItem = ({ keys, label }: { keys: string[]; label: string }) => (
238 |
239 |
240 | {label}
241 |
242 |
243 | {keys.map((k, i) => (
244 |
248 | {k}
249 |
250 | ))}
251 |
252 |
253 | );
254 |
255 | export default KeyboardShortcuts;
256 |
--------------------------------------------------------------------------------
/components/SmartImage.tsx:
--------------------------------------------------------------------------------
1 | import React, {
2 | CSSProperties,
3 | ImgHTMLAttributes,
4 | useCallback,
5 | useEffect,
6 | useLayoutEffect,
7 | useMemo,
8 | useRef,
9 | useState,
10 | } from "react";
11 | import { imageResourceCache } from "../services/cache";
12 |
13 | const makeCacheKey = (src: string, width: number, height: number) => {
14 | const dpr = typeof window === "undefined" ? 1 : window.devicePixelRatio || 1;
15 | const ratio = (width / height).toFixed(3);
16 | return `${src}|${ratio}|${width}x${height}@${Math.round(dpr * 100)}`;
17 | };
18 |
19 | interface SmartImageProps
20 | extends Omit
, "src" | "className" | "style"> {
21 | src: string;
22 | containerClassName?: string;
23 | containerStyle?: CSSProperties;
24 | imgClassName?: string;
25 | imgStyle?: CSSProperties;
26 | placeholder?: React.ReactNode;
27 | targetWidth?: number;
28 | targetHeight?: number;
29 | loading?: "lazy" | "eager";
30 | }
31 |
32 | const DEFAULT_PLACEHOLDER = (
33 |
34 | ♪
35 |
36 | );
37 |
38 | const SmartImage: React.FC = ({
39 | src,
40 | containerClassName,
41 | containerStyle,
42 | imgClassName,
43 | imgStyle,
44 | placeholder,
45 | alt = "",
46 | targetWidth,
47 | targetHeight,
48 | loading = "lazy",
49 | ...imgProps
50 | }) => {
51 | const [isVisible, setIsVisible] = useState(loading === "eager");
52 |
53 | const containerRef = useRef(null);
54 | const [measuredSize, setMeasuredSize] = useState<{ width: number; height: number } | null>(
55 | null,
56 | );
57 | const [displaySrc, setDisplaySrc] = useState(null);
58 | const currentUrlRef = useRef(null);
59 | const currentUrlIsBlobRef = useRef(false);
60 |
61 | const revokeCurrentObjectUrl = useCallback(() => {
62 | if (currentUrlRef.current && currentUrlIsBlobRef.current) {
63 | URL.revokeObjectURL(currentUrlRef.current);
64 | }
65 | }, []);
66 |
67 | const resetDisplay = useCallback(() => {
68 | revokeCurrentObjectUrl();
69 | currentUrlRef.current = null;
70 | currentUrlIsBlobRef.current = false;
71 | setDisplaySrc(null);
72 | }, [revokeCurrentObjectUrl]);
73 |
74 | const setFinalUrl = useCallback(
75 | (url: string, isBlob: boolean) => {
76 | revokeCurrentObjectUrl();
77 | currentUrlRef.current = url;
78 | currentUrlIsBlobRef.current = isBlob;
79 | setDisplaySrc(url);
80 | },
81 | [revokeCurrentObjectUrl],
82 | );
83 |
84 | useEffect(() => {
85 | if (loading === "eager") {
86 | setIsVisible(true);
87 | return undefined;
88 | }
89 |
90 | const element = containerRef.current;
91 | if (!element) {
92 | setIsVisible(false);
93 | return undefined;
94 | }
95 |
96 | if (typeof IntersectionObserver === "undefined") {
97 | setIsVisible(true);
98 | return undefined;
99 | }
100 |
101 | const observer = new IntersectionObserver(
102 | (entries) => {
103 | const entry = entries[0];
104 | setIsVisible(entry?.isIntersecting ?? false);
105 | },
106 | {
107 | rootMargin: "200px",
108 | threshold: 0.01,
109 | },
110 | );
111 |
112 | observer.observe(element);
113 | return () => {
114 | observer.disconnect();
115 | };
116 | }, [loading]);
117 |
118 | useLayoutEffect(() => {
119 | if (typeof targetWidth === "number" && typeof targetHeight === "number") {
120 | setMeasuredSize({
121 | width: targetWidth,
122 | height: targetHeight,
123 | });
124 | return;
125 | }
126 |
127 | const element = containerRef.current;
128 | if (!element) {
129 | setMeasuredSize(null);
130 | return;
131 | }
132 |
133 | const updateSize = () => {
134 | const rect = element.getBoundingClientRect();
135 | setMeasuredSize((prev) => {
136 | const roundedWidth = Math.round(rect.width);
137 | const roundedHeight = Math.round(rect.height);
138 | if (
139 | prev &&
140 | Math.round(prev.width) === roundedWidth &&
141 | Math.round(prev.height) === roundedHeight
142 | ) {
143 | return prev;
144 | }
145 | return {
146 | width: rect.width,
147 | height: rect.height,
148 | };
149 | });
150 | };
151 |
152 | updateSize();
153 |
154 | if (typeof ResizeObserver === "undefined") {
155 | return;
156 | }
157 |
158 | const observer = new ResizeObserver(() => {
159 | updateSize();
160 | });
161 |
162 | observer.observe(element);
163 | return () => observer.disconnect();
164 | }, [targetHeight, targetWidth]);
165 |
166 | const normalizedSize = useMemo(() => {
167 | if (!measuredSize) return null;
168 | const width = Math.max(1, Math.round(measuredSize.width));
169 | const height = Math.max(1, Math.round(measuredSize.height));
170 | if (width <= 0 || height <= 0) return null;
171 | return { width, height };
172 | }, [measuredSize]);
173 |
174 | const effectiveKey = useMemo(() => {
175 | if (!normalizedSize || !src) return null;
176 | return makeCacheKey(src, normalizedSize.width, normalizedSize.height);
177 | }, [normalizedSize, src]);
178 |
179 | useEffect(() => {
180 | if (!normalizedSize || !src || !effectiveKey) {
181 | resetDisplay();
182 | return;
183 | }
184 | if (!isVisible) {
185 | return;
186 | }
187 |
188 | let canceled = false;
189 | const cachedBlob = imageResourceCache.get(effectiveKey);
190 | if (cachedBlob) {
191 | const cachedUrl = URL.createObjectURL(cachedBlob);
192 | setFinalUrl(cachedUrl, true);
193 | return () => {
194 | canceled = true;
195 | URL.revokeObjectURL(cachedUrl);
196 | };
197 | }
198 |
199 | const imageElement = new Image();
200 |
201 | const handleFallback = () => {
202 | if (canceled) return;
203 | resetDisplay();
204 | };
205 |
206 | const loadImage = () => {
207 | if (canceled) return;
208 | const ratio = Math.min(
209 | normalizedSize.width / imageElement.naturalWidth,
210 | normalizedSize.height / imageElement.naturalHeight,
211 | 1,
212 | );
213 | const targetWidth = Math.max(1, Math.round(imageElement.naturalWidth * ratio));
214 | const targetHeight = Math.max(1, Math.round(imageElement.naturalHeight * ratio));
215 |
216 | const canvas = document.createElement("canvas");
217 | canvas.width = targetWidth;
218 | canvas.height = targetHeight;
219 |
220 | const ctx = canvas.getContext("2d");
221 | if (!ctx) {
222 | handleFallback();
223 | return;
224 | }
225 |
226 | ctx.drawImage(imageElement, 0, 0, targetWidth, targetHeight);
227 |
228 | try {
229 | canvas.toBlob(
230 | (blob) => {
231 | if (!blob || canceled) {
232 | handleFallback();
233 | return;
234 | }
235 |
236 | try {
237 | imageResourceCache.set(effectiveKey, blob);
238 | } catch {
239 | // Silently ignore cache failures.
240 | }
241 |
242 | const optimizedUrl = URL.createObjectURL(blob);
243 | if (canceled) {
244 | URL.revokeObjectURL(optimizedUrl);
245 | return;
246 | }
247 | setFinalUrl(optimizedUrl, true);
248 | },
249 | "image/jpeg",
250 | 0.78,
251 | );
252 | } catch {
253 | handleFallback();
254 | }
255 | };
256 |
257 | imageElement.crossOrigin = "anonymous";
258 | imageElement.onload = () => {
259 | if (canceled) return;
260 | if (!imageElement.naturalWidth || !imageElement.naturalHeight) {
261 | handleFallback();
262 | return;
263 | }
264 | loadImage();
265 | };
266 | imageElement.onerror = () => {
267 | if (canceled) return;
268 | handleFallback();
269 | };
270 | imageElement.src = src;
271 |
272 | return () => {
273 | canceled = true;
274 | imageElement.onload = null;
275 | imageElement.onerror = null;
276 | imageElement.src = "";
277 | };
278 | }, [effectiveKey, normalizedSize, resetDisplay, setFinalUrl, src, isVisible]);
279 |
280 | return (
281 |
286 | {displaySrc ? (
287 |

295 | ) : (
296 | placeholder ?? DEFAULT_PLACEHOLDER
297 | )}
298 |
299 | );
300 | };
301 |
302 | export default SmartImage;
303 |
--------------------------------------------------------------------------------
/components/background/mobile/index.ts:
--------------------------------------------------------------------------------
1 | import { loadImageElementWithCache } from "../../../services/cache";
2 |
3 | interface FlowingLayer {
4 | image: HTMLCanvasElement;
5 | startX: number;
6 | startY: number;
7 | startScale: number;
8 | duration: number;
9 | startTime: number;
10 | }
11 |
12 | const defaultColors = ["#8b5cf6", "#ec4899", "#f97316", "#3b82f6"];
13 |
14 | const MESH_FLOATS = [
15 | -0.2351, -0.0967, 0.2135, -0.1414, 0.9221, -0.0908, 0.9221, -0.0685, 1.3027,
16 | 0.0253, 1.2351, 0.1786, -0.3768, 0.1851, 0.2, 0.2, 0.6615, 0.3146, 0.9543,
17 | 0.0, 0.6969, 0.1911, 1.0, 0.2, 0.0, 0.4, 0.2, 0.4, 0.0776, 0.2318, 0.6, 0.4,
18 | 0.6615, 0.3851, 1.0, 0.4, 0.0, 0.6, 0.1291, 0.6, 0.4, 0.6, 0.4, 0.4304,
19 | 0.4264, 0.5792, 1.2029, 0.8188, -0.1192, 1.0, 0.6, 0.8, 0.4264, 0.8104, 0.6,
20 | 0.8, 0.8, 0.8, 1.0, 0.8, 0.0, 1.0, 0.0776, 1.0283, 0.4, 1.0, 0.6, 1.0, 0.8,
21 | 1.0, 1.1868, 1.0283,
22 | ];
23 | const scaleCanvas = (
24 | source: HTMLCanvasElement,
25 | newWidth: number,
26 | newHeight: number,
27 | ): HTMLCanvasElement => {
28 | const canvas = document.createElement("canvas");
29 | canvas.width = newWidth;
30 | canvas.height = newHeight;
31 | const ctx = canvas.getContext("2d", { willReadFrequently: true });
32 | if (!ctx) return source;
33 |
34 | ctx.imageSmoothingEnabled = true;
35 | ctx.imageSmoothingQuality = "high";
36 | ctx.drawImage(source, 0, 0, newWidth, newHeight);
37 | return canvas;
38 | };
39 |
40 | const blurCanvas = (source: HTMLCanvasElement, radius: number) => {
41 | const canvas = document.createElement("canvas");
42 | canvas.width = source.width;
43 | canvas.height = source.height;
44 | const ctx = canvas.getContext("2d");
45 | if (!ctx) return source;
46 |
47 | ctx.filter = `blur(${radius}px)`;
48 | ctx.drawImage(source, 0, 0);
49 | return canvas;
50 | };
51 |
52 | const applyMeshDistortion = (
53 | source: HTMLCanvasElement,
54 | meshVerts: number[],
55 | ) => {
56 | const canvas = document.createElement("canvas");
57 | canvas.width = source.width;
58 | canvas.height = source.height;
59 | const ctx = canvas.getContext("2d");
60 | if (!ctx) return source;
61 |
62 | const gridWidth = 5;
63 | const gridHeight = 5;
64 |
65 | const verts: number[] = [];
66 | for (let i = 0; i < meshVerts.length; i += 2) {
67 | verts.push(meshVerts[i] * source.width);
68 | verts.push(meshVerts[i + 1] * source.height);
69 | }
70 |
71 | for (let row = 0; row < gridHeight; row++) {
72 | for (let col = 0; col < gridWidth; col++) {
73 | const topLeft = row * 6 + col;
74 | const topRight = topLeft + 1;
75 | const bottomLeft = (row + 1) * 6 + col;
76 | const bottomRight = bottomLeft + 1;
77 |
78 | const srcX = (col / gridWidth) * source.width;
79 | const srcY = (row / gridHeight) * source.height;
80 | const srcW = source.width / gridWidth;
81 | const srcH = source.height / gridHeight;
82 |
83 | const x1 = verts[topLeft * 2];
84 | const y1 = verts[topLeft * 2 + 1];
85 | const x2 = verts[topRight * 2];
86 | const y2 = verts[topRight * 2 + 1];
87 | const x3 = verts[bottomRight * 2];
88 | const y3 = verts[bottomRight * 2 + 1];
89 | const x4 = verts[bottomLeft * 2];
90 | const y4 = verts[bottomLeft * 2 + 1];
91 |
92 | ctx.save();
93 | ctx.beginPath();
94 | ctx.moveTo(x1, y1);
95 | ctx.lineTo(x2, y2);
96 | ctx.lineTo(x4, y4);
97 | ctx.closePath();
98 | ctx.clip();
99 |
100 | const dx1 = x2 - x1;
101 | const dy1 = y2 - y1;
102 | const dx2 = x4 - x1;
103 | const dy2 = y4 - y1;
104 |
105 | if (Math.abs(dx1 * dy2 - dx2 * dy1) > 1) {
106 | ctx.transform(dx1 / srcW, dy1 / srcW, dx2 / srcH, dy2 / srcH, x1, y1);
107 | ctx.drawImage(source, srcX, srcY, srcW, srcH, 0, 0, srcW, srcH);
108 | }
109 | ctx.restore();
110 |
111 | ctx.save();
112 | ctx.beginPath();
113 | ctx.moveTo(x2, y2);
114 | ctx.lineTo(x3, y3);
115 | ctx.lineTo(x4, y4);
116 | ctx.closePath();
117 | ctx.clip();
118 |
119 | const dx3 = x3 - x2;
120 | const dy3 = y3 - y2;
121 | const dx4 = x4 - x2;
122 | const dy4 = y4 - y2;
123 |
124 | if (Math.abs(dx3 * dy4 - dx4 * dy3) > 1) {
125 | ctx.transform(dx3 / srcW, dy3 / srcW, dx4 / srcH, dy4 / srcH, x2, y2);
126 | ctx.drawImage(source, srcX, srcY, srcW, srcH, 0, 0, srcW, srcH);
127 | }
128 | ctx.restore();
129 | }
130 | }
131 |
132 | return canvas;
133 | };
134 |
135 | const adjustSaturation = (source: HTMLCanvasElement, saturation: number) => {
136 | const canvas = document.createElement("canvas");
137 | canvas.width = source.width;
138 | canvas.height = source.height;
139 | const ctx = canvas.getContext("2d");
140 | if (!ctx) return source;
141 |
142 | ctx.filter = `saturate(${saturation})`;
143 | ctx.drawImage(source, 0, 0);
144 | return canvas;
145 | };
146 |
147 | const getBrightness = (canvas: HTMLCanvasElement) => {
148 | const ctx = canvas.getContext("2d", { willReadFrequently: true });
149 | if (!ctx) return 0.5;
150 |
151 | const centerX = Math.floor(canvas.width / 2);
152 | const centerY = Math.floor(canvas.height / 2);
153 | const pixel = ctx.getImageData(centerX, centerY, 1, 1).data;
154 | const r = pixel[0] / 255;
155 | const g = pixel[1] / 255;
156 | const b = pixel[2] / 255;
157 | return 0.299 * r + 0.587 * g + 0.114 * b;
158 | };
159 |
160 | const applyBrightnessMask = (canvas: HTMLCanvasElement) => {
161 | const brightness = getBrightness(canvas);
162 | const ctx = canvas.getContext("2d");
163 | if (!ctx) return canvas;
164 |
165 | if (brightness > 0.8) {
166 | ctx.fillStyle = "rgba(0, 0, 0, 0.31)";
167 | ctx.fillRect(0, 0, canvas.width, canvas.height);
168 | } else if (brightness < 0.2) {
169 | ctx.fillStyle = "rgba(255, 255, 255, 0.31)";
170 | ctx.fillRect(0, 0, canvas.width, canvas.height);
171 | }
172 |
173 | return canvas;
174 | };
175 |
176 | const processBitmap = (source: HTMLCanvasElement) => {
177 | const smallWidth = 150;
178 | const smallHeight = Math.floor((source.height / source.width) * smallWidth);
179 | let canvas = scaleCanvas(source, smallWidth, smallHeight);
180 | canvas = blurCanvas(canvas, 25);
181 | canvas = applyMeshDistortion(canvas, MESH_FLOATS);
182 | const largeWidth = 1000;
183 | const largeHeight = Math.floor((canvas.height / canvas.width) * largeWidth);
184 | canvas = scaleCanvas(canvas, largeWidth, largeHeight);
185 | canvas = applyMeshDistortion(canvas, MESH_FLOATS);
186 | canvas = blurCanvas(canvas, 12);
187 | canvas = adjustSaturation(canvas, 1.8);
188 | canvas = applyBrightnessMask(canvas);
189 | return canvas;
190 | };
191 |
192 | const createBaseTexture = async (
193 | colors: string[],
194 | coverUrl: string | undefined,
195 | ) => {
196 | const size = 600;
197 | const canvas = document.createElement("canvas");
198 | canvas.width = size;
199 | canvas.height = size;
200 | const ctx = canvas.getContext("2d");
201 | if (!ctx) return canvas;
202 |
203 | const gradient = ctx.createLinearGradient(0, 0, size, size);
204 | colors.forEach((color, idx) => {
205 | gradient.addColorStop(idx / Math.max(1, colors.length - 1), color);
206 | });
207 | ctx.fillStyle = gradient;
208 | ctx.fillRect(0, 0, size, size);
209 |
210 | if (coverUrl) {
211 | try {
212 | const img = await loadImageElementWithCache(coverUrl);
213 | const scale = Math.max(size / img.width, size / img.height);
214 | const w = img.width * scale;
215 | const h = img.height * scale;
216 | const x = (size - w) / 2;
217 | const y = (size - h) / 2;
218 | ctx.globalAlpha = 0.9;
219 | ctx.drawImage(img, x, y, w, h);
220 | ctx.globalAlpha = 1.0;
221 | } catch (error) {
222 | console.warn("Failed to load cover", error);
223 | }
224 | }
225 |
226 | for (let i = 0; i < 8; i++) {
227 | const cx = Math.random() * size;
228 | const cy = Math.random() * size;
229 | const radius = size * (0.3 + Math.random() * 0.4);
230 | const color = colors[Math.floor(Math.random() * colors.length)];
231 |
232 | const grad = ctx.createRadialGradient(cx, cy, 0, cx, cy, radius);
233 | grad.addColorStop(0, color);
234 | grad.addColorStop(1, "rgba(0,0,0,0)");
235 |
236 | ctx.globalAlpha = 0.3 + Math.random() * 0.3;
237 | ctx.fillStyle = grad;
238 | ctx.fillRect(cx - radius, cy - radius, radius * 2, radius * 2);
239 | }
240 |
241 | return canvas;
242 | };
243 |
244 | const normalizeColors = (colors: string[] | undefined): string[] => {
245 | if (!colors || colors.length === 0) {
246 | return defaultColors;
247 | }
248 | return colors;
249 | };
250 |
251 | export const createFlowingLayers = async (
252 | colors: string[] | undefined,
253 | coverUrl: string | undefined,
254 | count: number = 4,
255 | ): Promise => {
256 | const normalized = normalizeColors(colors);
257 | const layers: FlowingLayer[] = [];
258 |
259 | for (let i = 0; i < count; i++) {
260 | const baseCanvas = await createBaseTexture(normalized, coverUrl);
261 | const processed = processBitmap(baseCanvas);
262 |
263 | layers.push({
264 | image: processed,
265 | startX: (Math.random() - 0.5) * 0.2,
266 | startY: (Math.random() - 0.5) * 0.2,
267 | startScale: 1.15 + Math.random() * 0.1,
268 | duration: 20000 + Math.random() * 15000,
269 | startTime: -i * 5000,
270 | });
271 | }
272 |
273 | return layers;
274 | };
275 |
276 | export type { FlowingLayer };
277 | export { defaultColors };
278 |
--------------------------------------------------------------------------------
/components/FluidBackground.tsx:
--------------------------------------------------------------------------------
1 | import React, { useCallback, useEffect, useMemo, useRef, useState } from "react";
2 | import { FlowingLayer, createFlowingLayers, defaultColors as mobileDefaultColors } from "./background/mobile";
3 | import { UIBackgroundRender } from "./background/renderer/UIBackgroundRender";
4 | import { WebWorkerBackgroundRender } from "./background/renderer/WebWorkerBackgroundRender";
5 |
6 | const desktopGradientDefaults = [
7 | "rgb(60, 20, 80)",
8 | "rgb(100, 40, 60)",
9 | "rgb(20, 20, 40)",
10 | "rgb(40, 40, 90)",
11 | ];
12 |
13 | const easeInOutSine = (t: number) => -(Math.cos(Math.PI * t) - 1) / 2;
14 |
15 | const calculateTransform = (layer: FlowingLayer, elapsed: number) => {
16 | const progress = ((elapsed + layer.startTime) % layer.duration) / layer.duration;
17 | const eased = easeInOutSine(progress);
18 |
19 | const x = layer.startX + Math.sin(progress * Math.PI * 2) * 0.15;
20 | const y = layer.startY + Math.cos(progress * Math.PI * 2) * 0.12;
21 | const scale = layer.startScale + Math.sin(progress * Math.PI * 2) * 0.08;
22 | const rotation = Math.sin(progress * Math.PI * 2) * 0.08;
23 |
24 | return { x, y, scale, rotation, eased };
25 | };
26 |
27 | interface FluidBackgroundProps {
28 | colors?: string[];
29 | isPlaying?: boolean;
30 | coverUrl?: string;
31 | isMobileLayout?: boolean;
32 | }
33 |
34 | const FluidBackground: React.FC = ({
35 | colors,
36 | isPlaying = true,
37 | coverUrl,
38 | isMobileLayout = false,
39 | }) => {
40 | const canvasRef = useRef(null);
41 | const rendererRef = useRef(null);
42 | const layersRef = useRef([]);
43 | const isPlayingRef = useRef(isPlaying);
44 | const startTimeOffsetRef = useRef(0);
45 | const lastPausedTimeRef = useRef(0);
46 | const colorsRef = useRef(colors);
47 | const [canvasInstanceKey, setCanvasInstanceKey] = useState(0);
48 | const previousModeRef = useRef(isMobileLayout);
49 |
50 | const normalizedColors = useMemo(
51 | () => (colors && colors.length > 0 ? colors : mobileDefaultColors),
52 | [colors],
53 | );
54 |
55 | const colorKey = useMemo(() => normalizedColors.join("|"), [normalizedColors]);
56 |
57 | useEffect(() => {
58 | colorsRef.current = colors;
59 | }, [colors]);
60 |
61 | useEffect(() => {
62 | isPlayingRef.current = isPlaying;
63 | }, [isPlaying]);
64 |
65 | useEffect(() => {
66 | if (previousModeRef.current !== isMobileLayout) {
67 | setCanvasInstanceKey((prev) => prev + 1);
68 | previousModeRef.current = isMobileLayout;
69 | }
70 | }, [isMobileLayout]);
71 |
72 | useEffect(() => {
73 | if (!isMobileLayout) {
74 | layersRef.current = [];
75 | return;
76 | }
77 | let cancelled = false;
78 | const generate = async () => {
79 | const newLayers = await createFlowingLayers(normalizedColors, coverUrl, 4);
80 | if (cancelled) return;
81 | layersRef.current = newLayers;
82 | };
83 | generate();
84 | return () => {
85 | cancelled = true;
86 | };
87 | }, [colorKey, coverUrl, normalizedColors, isMobileLayout]);
88 |
89 | const renderMobileFrame = useCallback(
90 | (ctx: CanvasRenderingContext2D, currentTime: number) => {
91 | const width = ctx.canvas.width;
92 | const height = ctx.canvas.height;
93 | let elapsed = currentTime;
94 |
95 | if (!isPlayingRef.current) {
96 | lastPausedTimeRef.current = currentTime;
97 | elapsed = startTimeOffsetRef.current;
98 | } else if (lastPausedTimeRef.current > 0) {
99 | startTimeOffsetRef.current = elapsed;
100 | lastPausedTimeRef.current = 0;
101 | }
102 |
103 | ctx.fillStyle = "#000";
104 | ctx.fillRect(0, 0, width, height);
105 |
106 | if (layersRef.current.length === 0) {
107 | ctx.fillStyle = "#222";
108 | ctx.fillRect(0, 0, width, height);
109 | ctx.fillStyle = "#666";
110 | ctx.font = "16px sans-serif";
111 | ctx.textAlign = "center";
112 | ctx.fillText("Loading layers...", width / 2, height / 2);
113 | return;
114 | }
115 |
116 | layersRef.current.forEach((layer, index) => {
117 | const transform = calculateTransform(layer, elapsed);
118 | ctx.save();
119 | ctx.translate(width / 2, height / 2);
120 | ctx.rotate(transform.rotation);
121 | ctx.scale(transform.scale, transform.scale);
122 | ctx.translate(width * transform.x, height * transform.y);
123 | ctx.globalCompositeOperation = "screen";
124 | ctx.globalAlpha = 0.5 + index * 0.05;
125 | ctx.filter = "blur(35px)";
126 | const drawWidth = width * 1.5;
127 | const drawHeight = height * 1.5;
128 | ctx.drawImage(
129 | layer.image,
130 | -drawWidth / 2,
131 | -drawHeight / 2,
132 | drawWidth,
133 | drawHeight,
134 | );
135 | ctx.restore();
136 | });
137 | },
138 | [],
139 | );
140 |
141 | const renderGradientFrame = useCallback((ctx: CanvasRenderingContext2D) => {
142 | const width = ctx.canvas.width;
143 | const height = ctx.canvas.height;
144 | const palette =
145 | colorsRef.current && colorsRef.current.length > 0
146 | ? colorsRef.current
147 | : desktopGradientDefaults;
148 | const gradient = ctx.createLinearGradient(0, 0, width, height);
149 | palette.forEach((color, index) => {
150 | gradient.addColorStop(index / Math.max(1, palette.length - 1), color);
151 | });
152 | ctx.fillStyle = gradient;
153 | ctx.fillRect(0, 0, width, height);
154 | }, []);
155 |
156 | useEffect(() => {
157 | const resize = () => {
158 | const width = window.innerWidth;
159 | const height = window.innerHeight;
160 | const canvas = canvasRef.current;
161 | if (!canvas) return;
162 |
163 | if (canvas.dataset.offscreenTransferred === "true") {
164 | if (rendererRef.current instanceof WebWorkerBackgroundRender) {
165 | rendererRef.current.resize(width, height);
166 | }
167 | return;
168 | }
169 |
170 | if (rendererRef.current instanceof WebWorkerBackgroundRender) {
171 | rendererRef.current.resize(width, height);
172 | return;
173 | }
174 |
175 | canvas.width = width;
176 | canvas.height = height;
177 | rendererRef.current?.resize(width, height);
178 | };
179 |
180 | resize();
181 | window.addEventListener("resize", resize);
182 | return () => window.removeEventListener("resize", resize);
183 | }, [isMobileLayout, canvasInstanceKey]);
184 |
185 | useEffect(() => {
186 | const canvas = canvasRef.current;
187 | if (!canvas) return;
188 |
189 | if (canvas.dataset.offscreenTransferred === "true") {
190 | setCanvasInstanceKey((prev) => prev + 1);
191 | return;
192 | }
193 |
194 | const shouldUseWorker =
195 | !isMobileLayout && WebWorkerBackgroundRender.isSupported(canvas);
196 |
197 | if (shouldUseWorker && rendererRef.current instanceof WebWorkerBackgroundRender) {
198 | return;
199 | }
200 |
201 | if (rendererRef.current) {
202 | rendererRef.current.stop();
203 | rendererRef.current = null;
204 | }
205 |
206 | if (shouldUseWorker) {
207 | canvas.width = window.innerWidth;
208 | canvas.height = window.innerHeight;
209 | const workerRenderer = new WebWorkerBackgroundRender(canvas);
210 | workerRenderer.start(colorsRef.current ?? []);
211 | rendererRef.current = workerRenderer;
212 | return () => {
213 | workerRenderer.stop();
214 | rendererRef.current = null;
215 | };
216 | }
217 |
218 | const renderCallback = isMobileLayout ? renderMobileFrame : renderGradientFrame;
219 | const uiRenderer = new UIBackgroundRender(canvas, renderCallback);
220 | uiRenderer.resize(window.innerWidth, window.innerHeight);
221 | uiRenderer.setPaused(!isPlaying);
222 | uiRenderer.start();
223 | rendererRef.current = uiRenderer;
224 |
225 | return () => {
226 | uiRenderer.stop();
227 | rendererRef.current = null;
228 | };
229 | }, [isMobileLayout, renderGradientFrame, renderMobileFrame, canvasInstanceKey]);
230 |
231 | useEffect(() => {
232 | const renderer = rendererRef.current;
233 | if (renderer instanceof WebWorkerBackgroundRender) {
234 | renderer.setColors(colors ?? []);
235 | renderer.setPlaying(isPlaying);
236 | } else if (renderer instanceof UIBackgroundRender) {
237 | renderer.setPaused(!isPlaying);
238 | }
239 | }, [colors, isPlaying]);
240 |
241 | const canvasKey = `${isMobileLayout ? "mobile" : "desktop"}-${canvasInstanceKey}`;
242 |
243 | return (
244 | <>
245 |
251 |
258 | >
259 | );
260 | };
261 |
262 | export default FluidBackground;
263 |
--------------------------------------------------------------------------------
/components/lyrics/InterludeDots.ts:
--------------------------------------------------------------------------------
1 | import { LyricLine as LyricLineType } from "../../types";
2 | import { ILyricLine } from "./ILyricLine";
3 | import { SpringSystem, INTERLUDE_SPRING } from "../../services/springSystem";
4 |
5 | export class InterludeDots implements ILyricLine {
6 | private canvas: OffscreenCanvas | HTMLCanvasElement;
7 | private ctx: OffscreenCanvasRenderingContext2D | CanvasRenderingContext2D;
8 | private lyricLine: LyricLineType;
9 | private index: number;
10 | private isMobile: boolean;
11 | private pixelRatio: number;
12 | private logicalWidth: number = 0;
13 | private logicalHeight: number = 0;
14 | private _height: number = 0;
15 | private springSystem: SpringSystem;
16 | private lastDrawTime: number = -1;
17 | private textWidth: number = 0;
18 | private duration: number = 0;
19 |
20 | constructor(line: LyricLineType, index: number, isMobile: boolean, duration: number = 0) {
21 | this.lyricLine = line;
22 | this.index = index;
23 | this.isMobile = isMobile;
24 | this.duration = duration;
25 | this.pixelRatio =
26 | typeof window !== "undefined" ? window.devicePixelRatio || 1 : 1;
27 |
28 | this.canvas = document.createElement("canvas");
29 | const ctx = this.canvas.getContext("2d");
30 | if (!ctx) throw new Error("Could not get canvas context");
31 | this.ctx = ctx as
32 | | OffscreenCanvasRenderingContext2D
33 | | CanvasRenderingContext2D;
34 |
35 | // Initialize spring system for expansion animation
36 | this.springSystem = new SpringSystem({
37 | expansion: 0, // 0 = hidden/collapsed, 1 = fully visible
38 | });
39 | }
40 |
41 | public measure(containerWidth: number, suggestedTranslationWidth?: number) {
42 | const baseSize = this.isMobile ? 32 : 40;
43 | const paddingY = 18;
44 |
45 | // Fixed height for interlude dots
46 | this._height = baseSize + paddingY * 2;
47 | this.logicalWidth = containerWidth;
48 | this.logicalHeight = this._height;
49 |
50 | // Set canvas size
51 | this.canvas.width = containerWidth * this.pixelRatio;
52 | this.canvas.height = this._height * this.pixelRatio;
53 |
54 | // Reset transform
55 | this.ctx.resetTransform();
56 | if (this.pixelRatio !== 1) {
57 | this.ctx.scale(this.pixelRatio, this.pixelRatio);
58 | }
59 |
60 | // Calculate approximate width for hover background
61 | const dotSpacing = this.isMobile ? 16 : 24;
62 | this.textWidth = dotSpacing * 2 + 40; // Approximate width
63 | }
64 |
65 | public draw(currentTime: number, isActive: boolean, isHovered: boolean) {
66 | const now = performance.now();
67 |
68 | // Calculate dt with clamping to prevent physics explosions on re-entry
69 | let dt = this.lastDrawTime === -1 ? 0.016 : (now - this.lastDrawTime) / 1000;
70 | this.lastDrawTime = now;
71 |
72 | // Determine target expansion state
73 | const currentTarget = this.springSystem.getTarget("expansion") || 0;
74 | const targetExpansion = isActive ? 1 : 0;
75 |
76 | // Detect transition from Active -> Inactive (Exit animation start)
77 | // "Finally scale up once, then completely scale down"
78 | if (currentTarget === 1 && targetExpansion === 0) {
79 | // Apply a positive velocity to create a "pop" effect before shrinking
80 | // The spring will pull it to 0, but velocity will push it up first.
81 | this.springSystem.setVelocity("expansion", 8);
82 | }
83 |
84 | this.springSystem.setTarget("expansion", targetExpansion, INTERLUDE_SPRING);
85 | this.springSystem.update(dt);
86 |
87 | // Clamp expansion to [0, 1.5] to allow for pop effect, but preventing negative
88 | // We allow > 1 for the pop effect
89 | const expansion = Math.max(0, this.springSystem.getCurrent("expansion"));
90 |
91 | // Clear canvas
92 | this.ctx.clearRect(0, 0, this.logicalWidth, this.logicalHeight);
93 |
94 | // If completely collapsed and not active, don't draw anything
95 | // Increased threshold to ensure it disappears cleanly
96 | if (expansion < 0.01 && !isActive) {
97 | return;
98 | }
99 |
100 | const paddingX = this.isMobile ? 24 : 56;
101 | const baseRadius = this.isMobile ? 5 : 7;
102 | const dotSpacing = this.isMobile ? 16 : 24;
103 | const totalDotsWidth = dotSpacing * 2;
104 |
105 | // Calculate Progress
106 | // If active, we calculate progress based on line time and duration.
107 | // If not active, we don't care about progress color as much, but let's keep it consistent or fade out.
108 | let progress = 0;
109 | if (this.duration > 0) {
110 | const elapsed = currentTime - this.lyricLine.time;
111 | progress = Math.max(0, Math.min(1, elapsed / this.duration));
112 | } else if (isActive) {
113 | // If no duration, maybe pulse active?
114 | progress = 0.5;
115 | } else {
116 | // If inactive, progress is 1 (finished) or 0?
117 | // Usually if we passed it, it's 1. But drawing loop handles isActive.
118 | progress = 1;
119 | }
120 |
121 | this.ctx.save();
122 |
123 | // Draw hover background (round rect)
124 | if (isHovered) {
125 | this.ctx.fillStyle = `rgba(255, 255, 255, ${0.08 * Math.min(1, expansion)})`;
126 | const bgWidth = Math.max(totalDotsWidth + 80, 200);
127 | const bgHeight = this._height * Math.min(1, expansion);
128 | const bgY = (this._height - bgHeight) / 2;
129 |
130 | this.roundRect(paddingX - 16, bgY, bgWidth, bgHeight, 16 * Math.min(1, expansion));
131 | this.ctx.fill();
132 | }
133 |
134 | // Position dots - Left aligned with text but slightly offset
135 | // "Still a bit to the right" -> Add small offset
136 | const offsetX = 6;
137 |
138 | // Calculate center of the dot group for scaling pivot
139 | // Dot 0 is at 0, Dot 1 at spacing, Dot 2 at 2*spacing (relative to start)
140 | // Center is at Dot 1 (spacing)
141 | // We want to translate to the center of the middle dot
142 | const groupCenterX = paddingX + offsetX + baseRadius + dotSpacing;
143 | const groupCenterY = this._height / 2;
144 |
145 | // Center vertically and horizontally at group center
146 | this.ctx.translate(groupCenterX, groupCenterY);
147 |
148 | // Global Breathing Animation (only when active/visible)
149 | // "Effect is too big. Scale down!" -> Reduce amplitude
150 | const breatheSpeed = 3.0;
151 | const breatheAmt = 0.12;
152 | const breatheScale = 1.0 + Math.sin(now / 1000 * breatheSpeed) * breatheAmt;
153 |
154 | // Combine physics expansion with breathing
155 | const finalGlobalScale = expansion * breatheScale;
156 |
157 | this.ctx.scale(finalGlobalScale, finalGlobalScale);
158 |
159 | for (let i = 0; i < 3; i++) {
160 | // Calculate color based on progress
161 | const dotProgressStart = i / 3;
162 | const dotProgressEnd = (i + 1) / 3;
163 |
164 | const localProgress = (progress - dotProgressStart) / (dotProgressEnd - dotProgressStart);
165 | const clampedLocal = Math.max(0, Math.min(1, localProgress));
166 |
167 | // "Like lyrics... gradual change white... to gray"
168 | // Inactive lyrics are usually 0.5 or 0.6 opacity.
169 | // Base opacity 0.5 (Gray), Active 1.0 (White)
170 | const colorIntensity = 0.5 + 0.5 * clampedLocal;
171 |
172 | const visibilityOpacity = Math.min(1, expansion);
173 |
174 | const opacity = colorIntensity * visibilityOpacity;
175 |
176 | this.ctx.fillStyle = `rgba(255, 255, 255, ${opacity})`;
177 | this.ctx.beginPath();
178 |
179 | // Draw relative to center (Dot 1 is at 0)
180 | // Dot 0: -spacing
181 | // Dot 1: 0
182 | // Dot 2: +spacing
183 | const relativeX = (i - 1) * dotSpacing;
184 |
185 | this.ctx.arc(relativeX, 0, baseRadius, 0, Math.PI * 2);
186 | this.ctx.fill();
187 | }
188 |
189 | this.ctx.restore();
190 | }
191 |
192 | private roundRect(x: number, y: number, w: number, h: number, r: number) {
193 | if (w < 2 * r) r = w / 2;
194 | if (h < 2 * r) r = h / 2;
195 | this.ctx.beginPath();
196 | this.ctx.moveTo(x + r, y);
197 | this.ctx.arcTo(x + w, y, x + w, y + h, r);
198 | this.ctx.arcTo(x + w, y + h, x, y + h, r);
199 | this.ctx.arcTo(x, y + h, x, y, r);
200 | this.ctx.arcTo(x, y, x + w, y, r);
201 | this.ctx.closePath();
202 | }
203 |
204 | public getHeight() {
205 | return this._height;
206 | }
207 |
208 | public getCurrentHeight() {
209 | // Return dynamic height based on expansion state
210 | // Clamp to [0, 1] to prevent layout jitter during "pop" (expansion > 1)
211 | // When expansion is 0, height is 0 (hidden)
212 | const expansion = Math.max(0, Math.min(1, this.springSystem.getCurrent("expansion")));
213 | return this._height * expansion;
214 | }
215 |
216 | public isInterlude() {
217 | return true;
218 | }
219 |
220 | public getCanvas() {
221 | return this.canvas;
222 | }
223 |
224 | public getLogicalWidth() {
225 | return this.logicalWidth;
226 | }
227 |
228 | public getLogicalHeight() {
229 | return this.logicalHeight;
230 | }
231 |
232 | public getTextWidth() {
233 | return this.textWidth;
234 | }
235 | }
--------------------------------------------------------------------------------
/services/lyricsService.ts:
--------------------------------------------------------------------------------
1 | import { fetchViaProxy } from "./utils";
2 |
3 | const LYRIC_API_BASE = "https://163api.qijieya.cn";
4 | const METING_API = "https://api.qijieya.cn/meting/";
5 | const NETEASE_SEARCH_API = "https://163api.qijieya.cn/cloudsearch";
6 | const NETEASE_API_BASE = "http://music.163.com/api";
7 | const NETEASECLOUD_API_BASE = "https://163api.qijieya.cn";
8 |
9 | const METADATA_KEYWORDS = [
10 | "歌词贡献者",
11 | "翻译贡献者",
12 | "作词",
13 | "作曲",
14 | "编曲",
15 | "制作",
16 | "词曲",
17 | "词 / 曲",
18 | "lyricist",
19 | "composer",
20 | "arrange",
21 | "translation",
22 | "translator",
23 | "producer",
24 | ];
25 |
26 | const escapeRegex = (value: string) =>
27 | value.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
28 |
29 | const metadataKeywordRegex = new RegExp(
30 | `^(${METADATA_KEYWORDS.map(escapeRegex).join("|")})\\s*[::]`,
31 | "iu",
32 | );
33 |
34 | const TIMESTAMP_REGEX = /^\[(\d{2}):(\d{2})\.(\d{2,3})\](.*)$/;
35 |
36 | interface NeteaseApiArtist {
37 | name?: string;
38 | }
39 |
40 | interface NeteaseApiAlbum {
41 | name?: string;
42 | picUrl?: string;
43 | }
44 |
45 | interface NeteaseApiSong {
46 | id: number;
47 | name?: string;
48 | ar?: NeteaseApiArtist[];
49 | al?: NeteaseApiAlbum;
50 | dt?: number;
51 | }
52 |
53 | interface NeteaseSearchResponse {
54 | result?: {
55 | songs?: NeteaseApiSong[];
56 | };
57 | }
58 |
59 | interface NeteasePlaylistResponse {
60 | songs?: NeteaseApiSong[];
61 | }
62 |
63 | interface NeteaseSongDetailResponse {
64 | code?: number;
65 | songs?: NeteaseApiSong[];
66 | }
67 |
68 | export interface NeteaseTrackInfo {
69 | id: string;
70 | title: string;
71 | artist: string;
72 | album: string;
73 | coverUrl?: string;
74 | duration?: number;
75 | isNetease: true;
76 | neteaseId: string;
77 | }
78 |
79 | type SearchOptions = {
80 | limit?: number;
81 | offset?: number;
82 | };
83 |
84 | const formatArtists = (artists?: NeteaseApiArtist[]) =>
85 | (artists ?? [])
86 | .map((artist) => artist.name?.trim())
87 | .filter(Boolean)
88 | .join("/") || "";
89 |
90 | const mapNeteaseSongToTrack = (song: NeteaseApiSong): NeteaseTrackInfo => ({
91 | id: song.id.toString(),
92 | title: song.name?.trim() ?? "",
93 | artist: formatArtists(song.ar),
94 | album: song.al?.name?.trim() ?? "",
95 | coverUrl: song.al?.picUrl?.replaceAll("http:", "https:"),
96 | duration: song.dt,
97 | isNetease: true,
98 | neteaseId: song.id.toString(),
99 | });
100 |
101 | const isMetadataTimestampLine = (line: string): boolean => {
102 | const trimmed = line.trim();
103 | const match = trimmed.match(TIMESTAMP_REGEX);
104 | if (!match) return false;
105 | const content = match[4].trim();
106 | return metadataKeywordRegex.test(content);
107 | };
108 |
109 | const parseTimestampMetadata = (line: string) => {
110 | const match = line.trim().match(TIMESTAMP_REGEX);
111 | return match ? match[4].trim() : line.trim();
112 | };
113 |
114 | const isMetadataJsonLine = (line: string): boolean => {
115 | const trimmed = line.trim();
116 | if (!trimmed.startsWith("{") || !trimmed.endsWith("}")) return false;
117 | try {
118 | const json = JSON.parse(trimmed);
119 | if (json.c && Array.isArray(json.c)) {
120 | const content = json.c.map((item: any) => item.tx || "").join("");
121 | return metadataKeywordRegex.test(content);
122 | }
123 | } catch {
124 | // ignore invalid json
125 | }
126 | return false;
127 | };
128 |
129 | const parseJsonMetadata = (line: string) => {
130 | try {
131 | const json = JSON.parse(line.trim());
132 | if (json.c && Array.isArray(json.c)) {
133 | return json.c
134 | .map((item: any) => item.tx || "")
135 | .join("")
136 | .trim();
137 | }
138 | } catch {
139 | // ignore
140 | }
141 | return line.trim();
142 | };
143 |
144 | const extractMetadataLines = (content: string) => {
145 | const metadataSet = new Set();
146 | const bodyLines: string[] = [];
147 |
148 | content.split("\n").forEach((line) => {
149 | if (!line.trim()) return;
150 | if (isMetadataTimestampLine(line)) {
151 | metadataSet.add(parseTimestampMetadata(line));
152 | } else if (isMetadataJsonLine(line)) {
153 | metadataSet.add(parseJsonMetadata(line));
154 | } else {
155 | bodyLines.push(line);
156 | }
157 | });
158 |
159 | return {
160 | clean: bodyLines.join("\n").trim(),
161 | metadata: Array.from(metadataSet),
162 | };
163 | };
164 |
165 | export const getNeteaseAudioUrl = (id: string) => {
166 | return `${METING_API}?type=url&id=${id}`;
167 | };
168 |
169 | // Implements the search logic from the user provided code snippet
170 | export const searchNetEase = async (
171 | keyword: string,
172 | options: SearchOptions = {},
173 | ): Promise => {
174 | const { limit = 20, offset = 0 } = options;
175 | const searchApiUrl = `${NETEASE_SEARCH_API}?keywords=${encodeURIComponent(
176 | keyword,
177 | )}&limit=${limit}&offset=${offset}`;
178 |
179 | try {
180 | const parsedSearchApiResponse = (await fetchViaProxy(
181 | searchApiUrl,
182 | )) as NeteaseSearchResponse;
183 | const songs = parsedSearchApiResponse.result?.songs ?? [];
184 |
185 | if (songs.length === 0) {
186 | return [];
187 | }
188 |
189 | return songs.map(mapNeteaseSongToTrack);
190 | } catch (error) {
191 | console.error("NetEase search error", error);
192 | return [];
193 | }
194 | };
195 |
196 | export const fetchNeteasePlaylist = async (
197 | playlistId: string,
198 | ): Promise => {
199 | try {
200 | // 使用網易雲音樂 API 獲取歌單所有歌曲
201 | // 由於接口限制,需要分頁獲取,每次獲取 50 首
202 | const allTracks: NeteaseTrackInfo[] = [];
203 | const limit = 50;
204 | let offset = 0;
205 | let shouldContinue = true;
206 |
207 | while (shouldContinue) {
208 | const url = `${NETEASECLOUD_API_BASE}/playlist/track/all?id=${playlistId}&limit=${limit}&offset=${offset}`;
209 | const data = (await fetchViaProxy(url)) as NeteasePlaylistResponse;
210 | const songs = data.songs ?? [];
211 | if (songs.length === 0) {
212 | break;
213 | }
214 |
215 | const tracks = songs.map(mapNeteaseSongToTrack);
216 |
217 | allTracks.push(...tracks);
218 |
219 | // Continue fetching if the current page was full
220 | if (songs.length < limit) {
221 | shouldContinue = false;
222 | } else {
223 | offset += limit;
224 | }
225 | }
226 |
227 | return allTracks;
228 | } catch (e) {
229 | console.error("Playlist fetch error", e);
230 | return [];
231 | }
232 | };
233 |
234 | export const fetchNeteaseSong = async (
235 | songId: string,
236 | ): Promise => {
237 | try {
238 | const url = `${NETEASECLOUD_API_BASE}/song/detail?ids=${songId}`;
239 | const data = (await fetchViaProxy(
240 | url,
241 | )) as NeteaseSongDetailResponse;
242 | const track = data.songs?.[0];
243 | if (data.code === 200 && track) {
244 | return mapNeteaseSongToTrack(track);
245 | }
246 | return null;
247 | } catch (e) {
248 | console.error("Song fetch error", e);
249 | return null;
250 | }
251 | };
252 |
253 | // Keeps the old search for lyric matching fallbacks
254 | export const searchAndMatchLyrics = async (
255 | title: string,
256 | artist: string,
257 | ): Promise<{ lrc: string; yrc?: string; tLrc?: string; metadata: string[] } | null> => {
258 | try {
259 | const songs = await searchNetEase(`${title} ${artist}`, { limit: 5 });
260 |
261 | if (songs.length === 0) {
262 | console.warn("No songs found on Cloud");
263 | return null;
264 | }
265 |
266 | const songId = songs[0].id;
267 | console.log(`Found Song ID: ${songId}`);
268 |
269 | const lyricsResult = await fetchLyricsById(songId);
270 | return lyricsResult;
271 | } catch (error) {
272 | console.error("Cloud lyrics match failed:", error);
273 | return null;
274 | }
275 | };
276 |
277 | export const fetchLyricsById = async (
278 | songId: string,
279 | ): Promise<{ lrc: string; yrc?: string; tLrc?: string; metadata: string[] } | null> => {
280 | try {
281 | // 使用網易雲音樂 API 獲取歌詞
282 | const lyricUrl = `${NETEASECLOUD_API_BASE}/lyric/new?id=${songId}`;
283 | const lyricData = await fetchViaProxy(lyricUrl);
284 |
285 | const rawYrc = lyricData.yrc?.lyric;
286 | const rawLrc = lyricData.lrc?.lyric;
287 | const tLrc = lyricData.tlyric?.lyric;
288 |
289 | if (!rawYrc && !rawLrc) return null;
290 |
291 | const {
292 | clean: cleanLrc,
293 | metadata: lrcMetadata,
294 | } = rawLrc
295 | ? extractMetadataLines(rawLrc)
296 | : { clean: undefined, metadata: [] };
297 |
298 | const {
299 | clean: cleanYrc,
300 | metadata: yrcMetadata,
301 | } = rawYrc
302 | ? extractMetadataLines(rawYrc)
303 | : { clean: undefined, metadata: [] };
304 |
305 | // Extract metadata from translation if available
306 | let cleanTranslation: string | undefined;
307 | let translationMetadata: string[] = [];
308 | if (tLrc) {
309 | const result = extractMetadataLines(tLrc);
310 | cleanTranslation = result.clean;
311 | translationMetadata = result.metadata;
312 | }
313 |
314 | const metadataSet = Array.from(
315 | new Set([...lrcMetadata, ...yrcMetadata, ...translationMetadata]),
316 | );
317 |
318 | if (lyricData.transUser?.nickname) {
319 | metadataSet.unshift(`翻译贡献者: ${lyricData.transUser.nickname}`);
320 | }
321 |
322 | if (lyricData.lyricUser?.nickname) {
323 | metadataSet.unshift(`歌词贡献者: ${lyricData.lyricUser.nickname}`);
324 | }
325 |
326 | const baseLyrics = cleanLrc || cleanYrc || rawLrc || rawYrc;
327 | if (!baseLyrics) return null;
328 |
329 | const yrcForEnrichment = cleanYrc && cleanLrc ? cleanYrc : undefined;
330 | return {
331 | lrc: baseLyrics,
332 | yrc: yrcForEnrichment,
333 | tLrc: cleanTranslation,
334 | metadata: Array.from(metadataSet),
335 | };
336 | } catch (e) {
337 | console.error("Lyric fetch error", e);
338 | return null;
339 | }
340 | };
341 |
--------------------------------------------------------------------------------
/hooks/usePlaylist.ts:
--------------------------------------------------------------------------------
1 | import { useCallback, useState } from "react";
2 | import { Song } from "../types";
3 | import {
4 | extractColors,
5 | parseAudioMetadata,
6 | parseNeteaseLink,
7 | } from "../services/utils";
8 | import { parseLyrics } from "../services/lyrics";
9 | import {
10 | fetchNeteasePlaylist,
11 | fetchNeteaseSong,
12 | getNeteaseAudioUrl,
13 | } from "../services/lyricsService";
14 | import { audioResourceCache } from "../services/cache";
15 |
16 | // Levenshtein distance for fuzzy matching
17 | const levenshteinDistance = (str1: string, str2: string): number => {
18 | const len1 = str1.length;
19 | const len2 = str2.length;
20 | const matrix: number[][] = [];
21 |
22 | for (let i = 0; i <= len1; i++) {
23 | matrix[i] = [i];
24 | }
25 | for (let j = 0; j <= len2; j++) {
26 | matrix[0][j] = j;
27 | }
28 |
29 | for (let i = 1; i <= len1; i++) {
30 | for (let j = 1; j <= len2; j++) {
31 | const cost = str1[i - 1] === str2[j - 1] ? 0 : 1;
32 | matrix[i][j] = Math.min(
33 | matrix[i - 1][j] + 1, // deletion
34 | matrix[i][j - 1] + 1, // insertion
35 | matrix[i - 1][j - 1] + cost // substitution
36 | );
37 | }
38 | }
39 |
40 | return matrix[len1][len2];
41 | };
42 |
43 | // Calculate similarity score (0-1, higher is better)
44 | const calculateSimilarity = (str1: string, str2: string): number => {
45 | const distance = levenshteinDistance(str1, str2);
46 | const maxLen = Math.max(str1.length, str2.length);
47 | if (maxLen === 0) return 1;
48 | return 1 - distance / maxLen;
49 | };
50 |
51 | export interface ImportResult {
52 | success: boolean;
53 | message?: string;
54 | songs: Song[];
55 | }
56 |
57 | export const usePlaylist = () => {
58 | const [queue, setQueue] = useState([]);
59 | const [originalQueue, setOriginalQueue] = useState([]);
60 |
61 | const updateSongInQueue = useCallback(
62 | (id: string, updates: Partial) => {
63 | setQueue((prev) =>
64 | prev.map((song) => (song.id === id ? { ...song, ...updates } : song)),
65 | );
66 | setOriginalQueue((prev) =>
67 | prev.map((song) => (song.id === id ? { ...song, ...updates } : song)),
68 | );
69 | },
70 | [],
71 | );
72 |
73 | const appendSongs = useCallback((songs: Song[]) => {
74 | if (songs.length === 0) return;
75 | setOriginalQueue((prev) => [...prev, ...songs]);
76 | setQueue((prev) => [...prev, ...songs]);
77 | }, []);
78 |
79 | const removeSongs = useCallback((ids: string[]) => {
80 | if (ids.length === 0) return;
81 | setQueue((prev) => {
82 | prev.forEach((song) => {
83 | if (ids.includes(song.id) && song.fileUrl && !song.fileUrl.startsWith("blob:")) {
84 | audioResourceCache.delete(song.fileUrl);
85 | }
86 | });
87 | return prev.filter((song) => !ids.includes(song.id));
88 | });
89 | setOriginalQueue((prev) => prev.filter((song) => !ids.includes(song.id)));
90 | }, []);
91 |
92 | const addLocalFiles = useCallback(
93 | async (files: FileList | File[]) => {
94 | const fileList =
95 | files instanceof FileList ? Array.from(files) : Array.from(files);
96 |
97 | // Separate audio and lyrics files
98 | const audioFiles: File[] = [];
99 | const lyricsFiles: File[] = [];
100 |
101 | fileList.forEach((file) => {
102 | const ext = file.name.split(".").pop()?.toLowerCase();
103 | if (ext === "lrc" || ext === "txt") {
104 | lyricsFiles.push(file);
105 | } else {
106 | audioFiles.push(file);
107 | }
108 | });
109 |
110 | const newSongs: Song[] = [];
111 |
112 | // Build lyrics map: extract song title from filename (part after first "-")
113 | // Remove Netease IDs like (12345678) from title
114 | const lyricsMap = new Map();
115 | lyricsFiles.forEach((file) => {
116 | const basename = file.name.replace(/\.[^/.]+$/, "");
117 | const firstDashIndex = basename.indexOf("-");
118 |
119 | // If has "-", use part after first dash as title, otherwise use full basename
120 | let title = firstDashIndex > 0 && firstDashIndex < basename.length - 1
121 | ? basename.substring(firstDashIndex + 1).trim()
122 | : basename;
123 |
124 | // Remove Netease ID pattern like (12345678) or [12345678]
125 | title = title.replace(/[\(\[]?\d{7,9}[\)\]]?/g, "").trim();
126 |
127 | lyricsMap.set(title.toLowerCase(), file);
128 | });
129 |
130 | // Process audio files
131 | for (let i = 0; i < audioFiles.length; i++) {
132 | const file = audioFiles[i];
133 | const url = URL.createObjectURL(file);
134 | const basename = file.name.replace(/\.[^/.]+$/, "");
135 | let title = basename;
136 | let artist = "Unknown Artist";
137 | let coverUrl: string | undefined;
138 | let colors: string[] | undefined;
139 | let lyrics: { time: number; text: string }[] = [];
140 |
141 | const nameParts = title.split("-");
142 | if (nameParts.length > 1) {
143 | artist = nameParts[0].trim();
144 | title = nameParts[1].trim();
145 | }
146 |
147 | try {
148 | const metadata = await parseAudioMetadata(file);
149 | if (metadata.title) title = metadata.title;
150 | if (metadata.artist) artist = metadata.artist;
151 | if (metadata.picture) {
152 | coverUrl = metadata.picture;
153 | colors = await extractColors(coverUrl);
154 | }
155 |
156 | // Check for embedded lyrics first (highest priority)
157 | if (metadata.lyrics && metadata.lyrics.trim().length > 0) {
158 | try {
159 | lyrics = parseLyrics(metadata.lyrics);
160 | } catch (err) {
161 | console.warn("Failed to parse embedded lyrics", err);
162 | }
163 | }
164 |
165 | // If no embedded lyrics, try to match lyrics by fuzzy matching
166 | if (lyrics.length === 0) {
167 | // Normalize song title for matching
168 | const songTitle = title.toLowerCase().trim();
169 |
170 | // Try exact match first
171 | let matchedLyricsFile = lyricsMap.get(songTitle);
172 |
173 | // If no exact match, try fuzzy matching
174 | if (!matchedLyricsFile && lyricsMap.size > 0) {
175 | let bestMatch: { file: File; score: number } | null = null;
176 | const minSimilarity = 0.75; // Require 75% similarity (allows 1-2 errors for typical song titles)
177 |
178 | for (const [lyricsTitle, lyricsFile] of lyricsMap.entries()) {
179 | const similarity = calculateSimilarity(songTitle, lyricsTitle);
180 |
181 | if (similarity >= minSimilarity) {
182 | if (!bestMatch || similarity > bestMatch.score) {
183 | bestMatch = { file: lyricsFile, score: similarity };
184 | }
185 | }
186 | }
187 |
188 | if (bestMatch) {
189 | matchedLyricsFile = bestMatch.file;
190 | }
191 | }
192 |
193 | // Load matched lyrics file
194 | if (matchedLyricsFile) {
195 | const reader = new FileReader();
196 | const lrcText = await new Promise((resolve) => {
197 | reader.onload = (e) =>
198 | resolve((e.target?.result as string) || "");
199 | reader.readAsText(matchedLyricsFile!);
200 | });
201 | if (lrcText) {
202 | lyrics = parseLyrics(lrcText);
203 | }
204 | }
205 | }
206 | } catch (err) {
207 | console.warn("Local metadata extraction failed", err);
208 | }
209 |
210 | newSongs.push({
211 | id: `local-${Date.now()}-${i}`,
212 | title,
213 | artist,
214 | fileUrl: url,
215 | coverUrl,
216 | lyrics,
217 | colors: colors && colors.length > 0 ? colors : undefined,
218 | needsLyricsMatch: lyrics.length === 0, // Flag for cloud matching
219 | });
220 | }
221 |
222 | appendSongs(newSongs);
223 | return newSongs;
224 | },
225 | [appendSongs],
226 | );
227 |
228 | const importFromUrl = useCallback(
229 | async (input: string): Promise => {
230 | const parsed = parseNeteaseLink(input);
231 | if (!parsed) {
232 | return {
233 | success: false,
234 | message:
235 | "Invalid Netease URL. Use https://music.163.com/#/song?id=... or playlist",
236 | songs: [],
237 | };
238 | }
239 |
240 | const newSongs: Song[] = [];
241 | try {
242 | if (parsed.type === "playlist") {
243 | const songs = await fetchNeteasePlaylist(parsed.id);
244 | songs.forEach((song) => {
245 | newSongs.push({
246 | ...song,
247 | fileUrl: getNeteaseAudioUrl(song.id),
248 | lyrics: [],
249 | colors: [],
250 | needsLyricsMatch: true,
251 | });
252 | });
253 | } else {
254 | const song = await fetchNeteaseSong(parsed.id);
255 | if (song) {
256 | newSongs.push({
257 | ...song,
258 | fileUrl: getNeteaseAudioUrl(song.id),
259 | lyrics: [],
260 | colors: [],
261 | needsLyricsMatch: true,
262 | });
263 | }
264 | }
265 | } catch (err) {
266 | console.error("Failed to fetch Netease music", err);
267 | return {
268 | success: false,
269 | message: "Failed to load songs from URL",
270 | songs: [],
271 | };
272 | }
273 |
274 | appendSongs(newSongs);
275 | if (newSongs.length === 0) {
276 | return {
277 | success: false,
278 | message: "Failed to load songs from URL",
279 | songs: [],
280 | };
281 | }
282 |
283 | return { success: true, songs: newSongs };
284 | },
285 | [appendSongs],
286 | );
287 |
288 | return {
289 | queue,
290 | originalQueue,
291 | updateSongInQueue,
292 | removeSongs,
293 | addLocalFiles,
294 | importFromUrl,
295 | setQueue,
296 | setOriginalQueue,
297 | };
298 | };
299 |
--------------------------------------------------------------------------------
/services/lyrics/netease.ts:
--------------------------------------------------------------------------------
1 | /**
2 | * Netease YRC format parser.
3 | *
4 | * Supports:
5 | * - YRC format: [startMs,duration](wordStartMs,wordDuration,flag)word
6 | * - JSON metadata: {"t":0,"c":[{"tx":"text"}]}
7 | * - Fallback LRC: [mm:ss.xx]text
8 | *
9 | * Features:
10 | * - Single-pass YRC parsing
11 | * - Word timing enrichment for LRC content
12 | * - Inline duplicate detection
13 | * - Automatic word duration fixing
14 | */
15 |
16 | import { LyricLine, LyricWord, isMetadataLine } from "./types";
17 | import { parseLrc } from "./lrc";
18 | import {
19 | createWord,
20 | createLine,
21 | mergePunctuation,
22 | normalizeText,
23 | insertInterludes,
24 | addDurations,
25 | INTERLUDE_TEXT,
26 | } from "./parser";
27 |
28 | const MAX_WORD_DURATION = 10.0; // Max duration per word in seconds
29 |
30 | /**
31 | * Token types for Netease YRC parsing.
32 | */
33 | type NeteaseToken =
34 | | { type: "yrc"; time: number; duration: number; words: LyricWord[]; text: string }
35 | | { type: "json"; time: number; text: string }
36 | | { type: "lrc"; time: number; text: string };
37 |
38 | /**
39 | * Parse JSON metadata line.
40 | */
41 | const parseJsonLine = (line: string): NeteaseToken | null => {
42 | try {
43 | const json = JSON.parse(line);
44 | if (json.c && Array.isArray(json.c)) {
45 | const text = json.c.map((item: { tx: string }) => item.tx).join("");
46 | return {
47 | type: "json",
48 | time: (json.t || 0) / 1000,
49 | text,
50 | };
51 | }
52 | } catch {
53 | // Not valid JSON
54 | }
55 | return null;
56 | };
57 |
58 | /**
59 | * Parse YRC line with word timing.
60 | */
61 | const parseYrcLine = (line: string): NeteaseToken | null => {
62 | const match = line.match(/^\[(\d+),(\d+)\](.*)/);
63 | if (!match) return null;
64 |
65 | const startTime = parseInt(match[1], 10) / 1000;
66 | const duration = parseInt(match[2], 10) / 1000;
67 | const content = match[3];
68 |
69 | const words: LyricWord[] = [];
70 | let text = "";
71 |
72 | // Parse word timing: (startMs,durationMs,flag)wordText
73 | const wordRegex = /\((\d+),(\d+),(\d+)\)([^\(]*)/g;
74 | const matches = [...content.matchAll(wordRegex)];
75 |
76 | if (matches.length > 0) {
77 | for (const m of matches) {
78 | const wordStart = parseInt(m[1], 10) / 1000;
79 | const wordDuration = parseInt(m[2], 10) / 1000;
80 | const wordText = m[4];
81 |
82 | text += wordText;
83 | words.push(createWord(wordText, wordStart, wordStart + wordDuration));
84 | }
85 | } else {
86 | text = content;
87 | }
88 |
89 | return {
90 | type: "yrc",
91 | time: startTime,
92 | duration,
93 | words: mergePunctuation(words),
94 | text,
95 | };
96 | };
97 |
98 | /**
99 | * Tokenize Netease content into structured tokens.
100 | */
101 | const tokenizeNetease = (content: string): NeteaseToken[] => {
102 | const lines = content.split("\n");
103 | const tokens: NeteaseToken[] = [];
104 |
105 | for (const line of lines) {
106 | const trimmed = line.trim();
107 | if (!trimmed) continue;
108 |
109 | // Try JSON format
110 | if (trimmed.startsWith("{") && trimmed.endsWith("}")) {
111 | const jsonToken = parseJsonLine(trimmed);
112 | if (jsonToken) {
113 | tokens.push(jsonToken);
114 | continue;
115 | }
116 | }
117 |
118 | // Try YRC format
119 | const yrcToken = parseYrcLine(trimmed);
120 | if (yrcToken) {
121 | tokens.push(yrcToken);
122 | continue;
123 | }
124 |
125 | // Fallback to LRC format
126 | const lrcMatch = trimmed.match(/\[(\d{2}):(\d{2})\.(\d{2,3})\](.*)/);
127 | if (lrcMatch) {
128 | const minutes = parseInt(lrcMatch[1], 10);
129 | const seconds = parseInt(lrcMatch[2], 10);
130 | const msStr = lrcMatch[3];
131 | const ms = parseInt(msStr, 10);
132 | const msValue = msStr.length === 3 ? ms / 1000 : ms / 100;
133 | const time = minutes * 60 + seconds + msValue;
134 |
135 | tokens.push({
136 | type: "lrc",
137 | time,
138 | text: lrcMatch[4].trim(),
139 | });
140 | }
141 | }
142 |
143 | // Sort by time
144 | tokens.sort((a, b) => a.time - b.time);
145 |
146 | return tokens;
147 | };
148 |
149 | /**
150 | * Fix abnormal word durations in YRC tokens.
151 | */
152 | const fixWordDurations = (tokens: NeteaseToken[]): void => {
153 | for (let i = 0; i < tokens.length; i++) {
154 | const token = tokens[i];
155 | if (token.type !== "yrc" || !token.words.length) continue;
156 |
157 | const nextToken = tokens[i + 1];
158 |
159 | for (let j = 0; j < token.words.length; j++) {
160 | const word = token.words[j];
161 | const nextWord = token.words[j + 1];
162 |
163 | // Calculate max end time
164 | const maxEnd = nextWord
165 | ? nextWord.startTime
166 | : nextToken
167 | ? nextToken.time
168 | : word.startTime + MAX_WORD_DURATION;
169 |
170 | // Fix duration if too long
171 | const duration = word.endTime - word.startTime;
172 | if (duration > MAX_WORD_DURATION) {
173 | word.endTime = Math.min(word.startTime + MAX_WORD_DURATION, maxEnd);
174 | }
175 |
176 | // Ensure doesn't exceed max
177 | if (word.endTime > maxEnd) {
178 | word.endTime = maxEnd;
179 | }
180 |
181 | // Ensure end > start
182 | if (word.endTime <= word.startTime) {
183 | word.endTime = word.startTime + 0.1;
184 | }
185 | }
186 | }
187 | };
188 |
189 | /**
190 | * Convert tokens to lyric lines, merging translations.
191 | */
192 | const tokensToLines = (tokens: NeteaseToken[]): LyricLine[] => {
193 | const yrcTokens = tokens.filter(t => t.type === "yrc");
194 | const otherTokens = tokens.filter(t => t.type !== "yrc");
195 | const hasYrcWordAt = (time: number): boolean => {
196 | return yrcTokens.some(t => {
197 | if (t.type !== "yrc" || !t.words.length) return false;
198 | return t.words.some(word => word.startTime <= time && word.endTime > time);
199 | });
200 | };
201 |
202 | if (yrcTokens.length === 0) {
203 | // No YRC data, convert all to plain lines
204 | return tokens
205 | .filter(t => !isMetadataLine(t.text))
206 | .map(t => {
207 | if (!t.text.trim()) {
208 | return createLine(t.time, INTERLUDE_TEXT, { isInterlude: true });
209 | }
210 | return createLine(t.time, t.text, {
211 | words: t.type === "yrc" && t.words.length > 0 ? t.words : undefined,
212 | isPreciseTiming: t.type === "yrc",
213 | });
214 | });
215 | }
216 |
217 | // Use YRC as main lines, others as translations
218 | const lines: LyricLine[] = [];
219 | const usedIndices = new Set();
220 |
221 | for (const yrcToken of yrcTokens) {
222 | const translations: string[] = [];
223 |
224 | // Find translations within 3s tolerance
225 | for (let i = 0; i < otherTokens.length; i++) {
226 | if (usedIndices.has(i)) continue;
227 | const other = otherTokens[i];
228 | if (isMetadataLine(other.text)) continue;
229 |
230 | const timeDiff = Math.abs(other.time - yrcToken.time);
231 | if (timeDiff < 3.0) {
232 | const normalized = normalizeText(other.text);
233 | const yrcNormalized = normalizeText(yrcToken.text);
234 |
235 | if (normalized && normalized !== yrcNormalized) {
236 | translations.push(other.text.trim());
237 | usedIndices.add(i);
238 | }
239 | }
240 | }
241 |
242 | if (!yrcToken.text.trim()) {
243 | lines.push(createLine(yrcToken.time, INTERLUDE_TEXT, { isInterlude: true }));
244 | } else {
245 | lines.push(
246 | createLine(yrcToken.time, yrcToken.text, {
247 | words: yrcToken.words.length > 0 ? yrcToken.words : undefined,
248 | translation: translations.length > 0 ? translations.join("\n") : undefined,
249 | isPreciseTiming: true,
250 | })
251 | );
252 | }
253 | }
254 |
255 | // Add orphan lines not matched as translations
256 | for (let i = 0; i < otherTokens.length; i++) {
257 | if (usedIndices.has(i)) continue;
258 | const token = otherTokens[i];
259 | if (isMetadataLine(token.text)) continue;
260 |
261 | if (!token.text.trim()) {
262 | if (hasYrcWordAt(token.time)) {
263 | continue;
264 | }
265 | lines.push(createLine(token.time, INTERLUDE_TEXT, { isInterlude: true }));
266 | } else {
267 | lines.push(createLine(token.time, token.text, { isPreciseTiming: false }));
268 | }
269 | }
270 |
271 | // Re-sort by time
272 | lines.sort((a, b) => a.time - b.time);
273 |
274 | return lines;
275 | };
276 |
277 | /**
278 | * Deduplicate lines with same normalized text within time window.
279 | */
280 | const deduplicate = (lines: LyricLine[]): LyricLine[] => {
281 | const result: LyricLine[] = [];
282 |
283 | for (const line of lines) {
284 | const prev = result[result.length - 1];
285 |
286 | if (
287 | prev &&
288 | normalizeText(prev.text) === normalizeText(line.text) &&
289 | Math.abs(line.time - prev.time) <= 1.5
290 | ) {
291 | // Merge: keep line with more words
292 | if ((line.words?.length ?? 0) > (prev.words?.length ?? 0)) {
293 | prev.words = line.words;
294 | }
295 | // Merge translations
296 | if (!prev.translation && line.translation) {
297 | prev.translation = line.translation;
298 | }
299 | } else {
300 | result.push(line);
301 | }
302 | }
303 |
304 | return result;
305 | };
306 |
307 | /**
308 | * Enrich LRC lines with YRC word timing.
309 | */
310 | const enrichWithWordTiming = (lrcLines: LyricLine[], yrcTokens: NeteaseToken[]): LyricLine[] => {
311 | const yrcData = yrcTokens
312 | .filter(t => t.type === "yrc" && t.words.length > 0 && !isMetadataLine(t.text))
313 | .map(t => ({
314 | token: t,
315 | normalized: normalizeText(t.text),
316 | used: false,
317 | }))
318 | .filter(d => d.normalized);
319 |
320 | return lrcLines.map(line => {
321 | if (!line.text || line.isInterlude) return line;
322 |
323 | const targetNormalized = normalizeText(line.text);
324 | if (!targetNormalized) return line;
325 |
326 | // Find matching YRC segments
327 | let bestMatch: { indexes: number[]; score: number } | null = null;
328 |
329 | for (let start = 0; start < yrcData.length; start++) {
330 | if (yrcData[start].used) continue;
331 |
332 | const timeDiff = Math.abs(yrcData[start].token.time - line.time);
333 | if (timeDiff > 2.5) continue;
334 |
335 | if (!targetNormalized.startsWith(yrcData[start].normalized)) continue;
336 |
337 | // Try to match consecutive segments
338 | let combined = yrcData[start].normalized;
339 | const indexes = [start];
340 |
341 | while (
342 | combined.length < targetNormalized.length &&
343 | indexes[indexes.length - 1] + 1 < yrcData.length &&
344 | !yrcData[indexes[indexes.length - 1] + 1].used
345 | ) {
346 | const next = yrcData[indexes[indexes.length - 1] + 1];
347 | const prospective = combined + next.normalized;
348 |
349 | if (!targetNormalized.startsWith(prospective)) break;
350 |
351 | combined = prospective;
352 | indexes.push(indexes[indexes.length - 1] + 1);
353 | }
354 |
355 | if (combined === targetNormalized) {
356 | const score = timeDiff;
357 | if (!bestMatch || score < bestMatch.score) {
358 | bestMatch = { indexes, score };
359 | }
360 | }
361 | }
362 |
363 | // Apply best match
364 | if (bestMatch) {
365 | const words: LyricWord[] = [];
366 |
367 | for (const idx of bestMatch.indexes) {
368 | yrcData[idx].used = true;
369 | const token = yrcData[idx].token as Extract;
370 | words.push(...token.words.map(w => ({ ...w })));
371 | }
372 |
373 | const adjustedWords = alignWordsWithText(line.text, words);
374 |
375 | return {
376 | ...line,
377 | words: adjustedWords,
378 | isPreciseTiming: true,
379 | };
380 | }
381 |
382 | return line;
383 | });
384 | };
385 |
386 | const alignWordsWithText = (text: string, words: LyricWord[]): LyricWord[] => {
387 | if (!text || !words.length) return words;
388 |
389 | const chars = Array.from(text);
390 | let pointer = 0;
391 |
392 | const adjusted = words.map(word => {
393 | const normalizedTarget = normalizeText(word.text);
394 | if (!normalizedTarget) {
395 | return { ...word };
396 | }
397 |
398 | let chunk = "";
399 | let matched = "";
400 |
401 | while (pointer < chars.length && matched.length < normalizedTarget.length) {
402 | const char = chars[pointer];
403 | chunk += char;
404 | const normalizedChar = normalizeText(char);
405 | if (normalizedChar) {
406 | matched += normalizedChar;
407 | }
408 | pointer++;
409 | }
410 |
411 | while (pointer < chars.length) {
412 | const lookahead = chars[pointer];
413 | if (normalizeText(lookahead)) {
414 | break;
415 | }
416 | chunk += lookahead;
417 | pointer++;
418 | }
419 |
420 | return chunk
421 | ? {
422 | ...word,
423 | text: chunk,
424 | }
425 | : { ...word };
426 | });
427 |
428 | if (pointer < chars.length && adjusted.length) {
429 | adjusted[adjusted.length - 1] = {
430 | ...adjusted[adjusted.length - 1],
431 | text: `${adjusted[adjusted.length - 1].text}${chars.slice(pointer).join("")}`,
432 | };
433 | }
434 |
435 | return adjusted;
436 | };
437 |
438 | /**
439 | * Check if content is Netease format.
440 | */
441 | export const isNeteaseFormat = (content: string): boolean => {
442 | return content.split("\n").some(line => {
443 | const trimmed = line.trim();
444 | return (
445 | /^\[\d+,\d+\]/.test(trimmed) ||
446 | (trimmed.startsWith("{") && trimmed.includes('"c":['))
447 | );
448 | });
449 | };
450 |
451 | /**
452 | * Parse Netease YRC format lyrics.
453 | *
454 | * If LRC content is provided, use it as the base and enrich with YRC word timing.
455 | * Otherwise, parse YRC directly and merge with other formats as translations.
456 | */
457 | export const parseNeteaseLyrics = (
458 | yrcContent: string,
459 | lrcContent?: string
460 | ): LyricLine[] => {
461 | if (!yrcContent?.trim()) return [];
462 |
463 | const tokens = tokenizeNetease(yrcContent);
464 | fixWordDurations(tokens);
465 |
466 | // If LRC content provided, use as base and enrich
467 | if (lrcContent?.trim()) {
468 | const baseLines = parseLrc(lrcContent);
469 | return addDurations(enrichWithWordTiming(baseLines, tokens));
470 | }
471 |
472 | // Otherwise parse YRC directly
473 | const lines = tokensToLines(tokens);
474 | const deduped = deduplicate(lines);
475 | const withInterludes = insertInterludes(deduped);
476 |
477 | return addDurations(withInterludes);
478 | };
479 |
--------------------------------------------------------------------------------