├── .github └── FUNDING.yml ├── .vscode └── settings.json ├── app ├── favicon.ico ├── layout.tsx ├── api │ └── generate │ │ └── route.ts ├── AGENTS.md ├── globals.css └── page.tsx ├── postcss.config.mjs ├── lib ├── constants.ts ├── utils.ts ├── services │ ├── timestamp-generation │ │ ├── types.ts │ │ └── TimestampGenerationService.ts │ └── srt-metadata-extractor.ts ├── ai │ ├── retry-handler.ts │ └── gemini-client.ts ├── api │ └── error-handler.ts ├── AGENTS.md ├── validation │ └── request-validator.ts ├── schemas.ts ├── timestamp-utils │ ├── normalizer.ts │ └── prompt-builder.ts └── srt-parser.ts ├── next.config.ts ├── public ├── file.svg └── globe.svg ├── eslint.config.mjs ├── components.json ├── .gitignore ├── tsconfig.json ├── components ├── ui │ ├── progress.tsx │ ├── tooltip.tsx │ ├── button.tsx │ ├── card.tsx │ ├── theme-toggle.tsx │ ├── ghibli-background.tsx │ └── CLAUDE.md ├── AGENTS.md ├── magicui │ ├── sparkles-text.tsx │ └── particles.tsx ├── SrtUploader.tsx └── TimestampResults.tsx ├── package.json ├── .cursor └── rules │ ├── vercel-ai-sdk.mdc │ └── project.mdc ├── .claude ├── settings.json └── commands │ ├── review.md │ ├── fix-issue.md │ ├── check-pr.md │ ├── add-component.md │ └── add-test.md ├── AGENTS.md ├── .windsurfrules ├── README.md └── CLAUDE.md /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | github: RayFernando1337 2 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "dotenv.enableAutocloaking": false 3 | } 4 | -------------------------------------------------------------------------------- /app/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RayFernando1337/vibestamps/HEAD/app/favicon.ico -------------------------------------------------------------------------------- /postcss.config.mjs: -------------------------------------------------------------------------------- 1 | const config = { 2 | plugins: ["@tailwindcss/postcss"], 3 | }; 4 | 5 | export default config; 6 | -------------------------------------------------------------------------------- /lib/constants.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Application-wide constants 3 | */ 4 | 5 | // Max file size in bytes (1 MB - supports 6+ hour livestream SRT files) 6 | export const MAX_FILE_SIZE = 1024 * 1024; 7 | -------------------------------------------------------------------------------- /lib/utils.ts: -------------------------------------------------------------------------------- 1 | import { clsx, type ClassValue } from "clsx" 2 | import { twMerge } from "tailwind-merge" 3 | 4 | export function cn(...inputs: ClassValue[]) { 5 | return twMerge(clsx(inputs)) 6 | } 7 | -------------------------------------------------------------------------------- /next.config.ts: -------------------------------------------------------------------------------- 1 | import type { NextConfig } from "next"; 2 | 3 | const nextConfig: NextConfig = { 4 | compiler: { 5 | // Remove console.log/info/warn in production, keep console.error for debugging 6 | removeConsole: { 7 | exclude: ["error"], 8 | }, 9 | }, 10 | }; 11 | 12 | export default nextConfig; 13 | -------------------------------------------------------------------------------- /public/file.svg: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /eslint.config.mjs: -------------------------------------------------------------------------------- 1 | import { dirname } from "path"; 2 | import { fileURLToPath } from "url"; 3 | import { FlatCompat } from "@eslint/eslintrc"; 4 | 5 | const __filename = fileURLToPath(import.meta.url); 6 | const __dirname = dirname(__filename); 7 | 8 | const compat = new FlatCompat({ 9 | baseDirectory: __dirname, 10 | }); 11 | 12 | const eslintConfig = [ 13 | ...compat.extends("next/core-web-vitals", "next/typescript"), 14 | ]; 15 | 16 | export default eslintConfig; 17 | -------------------------------------------------------------------------------- /components.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://ui.shadcn.com/schema.json", 3 | "style": "new-york", 4 | "rsc": true, 5 | "tsx": true, 6 | "tailwind": { 7 | "config": "", 8 | "css": "app/globals.css", 9 | "baseColor": "neutral", 10 | "cssVariables": true, 11 | "prefix": "" 12 | }, 13 | "aliases": { 14 | "components": "@/components", 15 | "utils": "@/lib/utils", 16 | "ui": "@/components/ui", 17 | "lib": "@/lib", 18 | "hooks": "@/hooks" 19 | }, 20 | "iconLibrary": "lucide" 21 | } -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # See https://help.github.com/articles/ignoring-files/ for more about ignoring files. 2 | 3 | # dependencies 4 | /node_modules 5 | /.pnp 6 | .pnp.* 7 | .yarn/* 8 | !.yarn/patches 9 | !.yarn/plugins 10 | !.yarn/releases 11 | !.yarn/versions 12 | 13 | # testing 14 | /coverage 15 | 16 | # next.js 17 | /.next/ 18 | /out/ 19 | 20 | # production 21 | /build 22 | 23 | # misc 24 | .DS_Store 25 | *.pem 26 | 27 | # debug 28 | npm-debug.log* 29 | yarn-debug.log* 30 | yarn-error.log* 31 | .pnpm-debug.log* 32 | 33 | # env files (can opt-in for committing if needed) 34 | .env* 35 | 36 | # vercel 37 | .vercel 38 | 39 | # typescript 40 | *.tsbuildinfo 41 | next-env.d.ts 42 | .env*.local 43 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "ES2017", 4 | "lib": ["dom", "dom.iterable", "esnext"], 5 | "allowJs": true, 6 | "skipLibCheck": true, 7 | "strict": true, 8 | "noEmit": true, 9 | "esModuleInterop": true, 10 | "module": "esnext", 11 | "moduleResolution": "bundler", 12 | "resolveJsonModule": true, 13 | "isolatedModules": true, 14 | "jsx": "preserve", 15 | "incremental": true, 16 | "plugins": [ 17 | { 18 | "name": "next" 19 | } 20 | ], 21 | "paths": { 22 | "@/*": ["./*"] 23 | } 24 | }, 25 | "include": ["next-env.d.ts", "**/*.ts", "**/*.tsx", ".next/types/**/*.ts"], 26 | "exclude": ["node_modules"] 27 | } 28 | -------------------------------------------------------------------------------- /lib/services/timestamp-generation/types.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Type definitions for timestamp generation service 3 | */ 4 | 5 | export interface SrtMetadata { 6 | durationInSeconds: number; 7 | durationFormatted: string; 8 | isLongContent: boolean; // > 1 hour 9 | entriesCount?: number; 10 | } 11 | 12 | export interface TimestampGenerationRequest { 13 | srtContent: string; 14 | metadata: SrtMetadata; 15 | } 16 | 17 | export interface GenerationResult { 18 | keyMoments: Array<{ 19 | time: string; 20 | description: string; 21 | }>; 22 | metadata: { 23 | totalMoments: number; 24 | timeRange: { 25 | first: string; 26 | last: string; 27 | }; 28 | warningFlags?: string[]; 29 | }; 30 | } 31 | -------------------------------------------------------------------------------- /components/ui/progress.tsx: -------------------------------------------------------------------------------- 1 | "use client"; 2 | 3 | import * as ProgressPrimitive from "@radix-ui/react-progress"; 4 | import * as React from "react"; 5 | 6 | import { cn } from "@/lib/utils"; 7 | 8 | function Progress({ 9 | className, 10 | value, 11 | ...props 12 | }: React.ComponentProps) { 13 | return ( 14 | 22 | 27 | 28 | ); 29 | } 30 | 31 | export { Progress }; 32 | -------------------------------------------------------------------------------- /public/globe.svg: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "timestamps-chill", 3 | "version": "0.1.0", 4 | "private": true, 5 | "scripts": { 6 | "dev": "next dev --turbopack", 7 | "build": "next build", 8 | "start": "next start", 9 | "lint": "next lint" 10 | }, 11 | "dependencies": { 12 | "@ai-sdk/gateway": "^1.0.41", 13 | "@ai-sdk/google": "^2.0.28", 14 | "@radix-ui/react-progress": "^1.1.8", 15 | "@radix-ui/react-slot": "^1.2.4", 16 | "@radix-ui/react-tooltip": "^1.2.8", 17 | "ai": "^5.0.87", 18 | "class-variance-authority": "^0.7.1", 19 | "clsx": "^2.1.1", 20 | "lucide-react": "^0.484.0", 21 | "motion": "^12.23.24", 22 | "next": "15.5.7", 23 | "next-themes": "^0.4.6", 24 | "react": "^19.2.0", 25 | "react-dom": "^19.2.0", 26 | "tailwind-merge": "^3.3.1", 27 | "tw-animate-css": "^1.4.0", 28 | "zod": "^4.1.12" 29 | }, 30 | "devDependencies": { 31 | "@eslint/eslintrc": "^3.3.1", 32 | "@tailwindcss/postcss": "^4.1.16", 33 | "@types/node": "^20.19.24", 34 | "@types/react": "^19.2.2", 35 | "@types/react-dom": "^19.2.2", 36 | "eslint": "^9.39.1", 37 | "eslint-config-next": "15.5.6", 38 | "tailwindcss": "^4.1.16", 39 | "typescript": "^5.9.3" 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /app/layout.tsx: -------------------------------------------------------------------------------- 1 | import GhibliBackground from "@/components/ui/ghibli-background"; 2 | import type { Metadata } from "next"; 3 | import { ThemeProvider } from "next-themes"; 4 | import { Doto, Geist, Geist_Mono } from "next/font/google"; 5 | import "./globals.css"; 6 | 7 | const geistSans = Geist({ 8 | variable: "--font-geist-sans", 9 | subsets: ["latin"], 10 | }); 11 | 12 | const geistMono = Geist_Mono({ 13 | variable: "--font-geist-mono", 14 | subsets: ["latin"], 15 | }); 16 | 17 | const doto = Doto({ 18 | variable: "--font-doto", 19 | weight: "900", 20 | subsets: ["latin"], 21 | }); 22 | 23 | export const metadata: Metadata = { 24 | title: "Vibestamps | Timestamp Generator for YouTube", 25 | description: 26 | "Vibestamps helps you upload a .srt file to generate meaningful timestamps for YouTube videos", 27 | }; 28 | 29 | export default function RootLayout({ 30 | children, 31 | }: Readonly<{ 32 | children: React.ReactNode; 33 | }>) { 34 | return ( 35 | 36 | 37 | 38 | 39 | {children} 40 | 41 | 42 | 43 | ); 44 | } 45 | -------------------------------------------------------------------------------- /lib/ai/retry-handler.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Retry handler with exponential backoff for AI operations 3 | */ 4 | 5 | export interface RetryConfig { 6 | maxRetries: number; 7 | baseDelayMs: number; 8 | onRetry?: (attempt: number, error: Error) => void; 9 | } 10 | 11 | export const DEFAULT_RETRY_CONFIG: RetryConfig = { 12 | maxRetries: 3, 13 | baseDelayMs: 1000, // 1 second base delay 14 | }; 15 | 16 | /** 17 | * Execute an async operation with exponential backoff retry logic 18 | */ 19 | export async function withExponentialRetry( 20 | operation: () => Promise, 21 | config: RetryConfig = DEFAULT_RETRY_CONFIG 22 | ): Promise { 23 | let lastError: Error | null = null; 24 | 25 | for (let attempt = 0; attempt < config.maxRetries; attempt++) { 26 | try { 27 | return await operation(); 28 | } catch (error) { 29 | lastError = error as Error; 30 | 31 | // Call retry callback if provided 32 | if (config.onRetry) { 33 | config.onRetry(attempt + 1, lastError); 34 | } 35 | 36 | // Don't retry on the last attempt 37 | if (attempt < config.maxRetries - 1) { 38 | const delayMs = config.baseDelayMs * Math.pow(2, attempt); // Exponential backoff: 1s, 2s, 4s 39 | console.log(`⏳ Waiting ${delayMs}ms before retry ${attempt + 2}/${config.maxRetries}...`); 40 | await new Promise((resolve) => setTimeout(resolve, delayMs)); 41 | } 42 | } 43 | } 44 | 45 | throw lastError || new Error("Failed after all retries"); 46 | } 47 | -------------------------------------------------------------------------------- /app/api/generate/route.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * API Route: POST /api/generate 3 | * Generates AI-powered timestamps from SRT subtitle files 4 | * 5 | * Architecture: Service Layer Pattern 6 | * - Request validation: validateGenerateRequest 7 | * - Metadata extraction: SrtMetadataExtractor 8 | * - Business logic: TimestampGenerationService 9 | * - Error handling: ApiErrorHandler 10 | * 11 | * Refactored from 451 lines → 90 lines → 35 lines (92% reduction) 12 | */ 13 | 14 | import { validateGenerateRequest } from "@/lib/validation/request-validator"; 15 | import { SrtMetadataExtractor } from "@/lib/services/srt-metadata-extractor"; 16 | import { TimestampGenerationService } from "@/lib/services/timestamp-generation/TimestampGenerationService"; 17 | import { ApiErrorHandler } from "@/lib/api/error-handler"; 18 | 19 | /** 20 | * POST /api/generate 21 | * Generate timestamps from SRT content 22 | */ 23 | export async function POST(request: Request) { 24 | try { 25 | // 1. Validate request 26 | const { srtContent } = await validateGenerateRequest(request); 27 | 28 | // 2. Extract metadata 29 | const metadata = SrtMetadataExtractor.extract(srtContent); 30 | 31 | // 3. Generate timestamps 32 | const service = TimestampGenerationService.create(); 33 | const result = await service.generateTimestamps({ srtContent, metadata }); 34 | 35 | // 4. Return streaming response 36 | return result.toTextStreamResponse(); 37 | } catch (error) { 38 | return ApiErrorHandler.handleGenerationError(error); 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /lib/api/error-handler.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Centralized API error handling for consistent error responses 3 | * Provides type-safe error handling with appropriate HTTP status codes 4 | */ 5 | 6 | import { NextResponse } from "next/server"; 7 | import { ValidationError } from "@/lib/validation/request-validator"; 8 | import { GeminiClient } from "@/lib/ai/gemini-client"; 9 | 10 | export class ApiErrorHandler { 11 | /** 12 | * Handle errors from timestamp generation API 13 | * Returns appropriate NextResponse based on error type with correct status codes 14 | */ 15 | static handleGenerationError(error: unknown): NextResponse { 16 | console.error("Error processing request:", error); 17 | 18 | // Validation errors (400 Bad Request) 19 | if (error instanceof ValidationError) { 20 | return error.toResponse(); 21 | } 22 | 23 | // AI-specific errors (500 Internal Server Error) 24 | if (GeminiClient.isNoObjectError(error)) { 25 | return this.createJsonResponse( 26 | { 27 | error: "Failed to generate valid timestamps. The AI response could not be parsed.", 28 | details: error.message, 29 | }, 30 | 500 31 | ); 32 | } 33 | 34 | // Generic errors (500 Internal Server Error) 35 | return this.createJsonResponse( 36 | { 37 | error: "Failed to process request", 38 | details: error instanceof Error ? error.message : "Unknown error", 39 | }, 40 | 500 41 | ); 42 | } 43 | 44 | /** 45 | * Helper to create consistent JSON error responses 46 | */ 47 | private static createJsonResponse(body: Record, status: number): NextResponse { 48 | return NextResponse.json(body, { status }); 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /.cursor/rules/vercel-ai-sdk.mdc: -------------------------------------------------------------------------------- 1 | --- 2 | description: Reference guide for the Vercel AI SDK, including links to documentation, core concepts, API reference, and usage examples for integrating AI models like Gemini. 3 | globs: 4 | alwaysApply: false 5 | --- 6 | # Vercel AI SDK Documentation & Usage 7 | 8 | This rule provides quick access to documentation and key information about the Vercel AI SDK, used for integrating AI models into applications. 9 | 10 | ## Key Documentation Links 11 | 12 | - **Main SDK Docs:** [https://sdk.vercel.ai/docs](mdc:https:/sdk.vercel.ai/docs) 13 | - **Core Concepts:** [https://sdk.vercel.ai/docs/concepts](mdc:https:/sdk.vercel.ai/docs/concepts) 14 | - **API Reference:** [https://sdk.vercel.ai/docs/api-reference](mdc:https:/sdk.vercel.ai/docs/api-reference) 15 | - **Supported Models/Providers (including Google Gemini):** [https://sdk.vercel.ai/docs/providers](mdc:https:/sdk.vercel.ai/docs/providers) 16 | - **Next.js Integration:** [https://sdk.vercel.ai/docs/integrations/nextjs](mdc:https:/sdk.vercel.ai/docs/integrations/nextjs) 17 | 18 | ## Core Functions/Hooks 19 | 20 | - `streamText`: For streaming text responses from an API route. 21 | - `generateText`: For non-streaming text generation. 22 | - `streamObject`: For streaming structured JSON objects. 23 | - `generateObject`: For non-streaming structured object generation. 24 | - `useChat` (React Hook): For building chat interfaces. 25 | - `useCompletion` (React Hook): For text completion interfaces. 26 | 27 | ## Provider Integration 28 | 29 | The SDK uses provider-specific packages (e.g., `@ai-sdk/google`, `@ai-sdk/openai`) or compatible layers (`createOpenAICompatible`) to interact with different LLMs. Remember to install the necessary provider package (e.g., `bun add @ai-sdk/google` for Gemini). -------------------------------------------------------------------------------- /lib/AGENTS.md: -------------------------------------------------------------------------------- 1 | # lib — AGENTS.md 2 | 3 | ## Package Identity 4 | - Shared utilities: constants, Zod schemas, SRT parsing helpers, and CSS utils (cn). 5 | 6 | ## Setup & Run 7 | - Reuse root commands: 8 | - Typecheck: `bunx tsc -p tsconfig.json --noEmit` 9 | - Lint/Build via root scripts 10 | 11 | ## Patterns & Conventions 12 | - ✅ Keep utilities pure and framework-agnostic. 13 | - Example: `lib/srt-parser.ts` (no side effects) 14 | - ✅ Centralize limits and shared values in constants. 15 | - Example: `lib/constants.ts` with `MAX_FILE_SIZE` 16 | - ✅ Validate at the edges with Zod schemas. 17 | - Example: `lib/schemas.ts` (srtEntrySchema, srtContentSchema, srtEntriesSchema, generateApiRequestSchema) 18 | - ✅ Prefer reusable parsing helpers and structured types. 19 | - Example: `parseSrtContent`, `extractTextFromSrt` in `lib/srt-parser.ts` 20 | - ✅ Merge classes via `cn` to keep Tailwind manageable. 21 | - Example: `lib/utils.ts` 22 | - ❌ Don’t redefine constants in components; import from `lib/constants.ts`. 23 | - ❌ Don’t bypass validation in routes/UI; use `lib/schemas.ts`. 24 | 25 | ## Touch Points / Key Files 26 | - Limits: `lib/constants.ts` 27 | - Schemas: `lib/schemas.ts` 28 | - SRT utilities: `lib/srt-parser.ts` 29 | - CSS utilities: `lib/utils.ts` 30 | 31 | ## JIT Index Hints 32 | - Find schemas: `rg -n "export const .*Schema" lib/schemas.ts` 33 | - SRT helpers: `rg -n "parseSrtContent|extractTextFromSrt|formatTimestamp" lib/srt-parser.ts` 34 | - Constants usage: `rg -n "MAX_FILE_SIZE" app lib components` 35 | 36 | ## Common Gotchas 37 | - `MAX_FILE_SIZE` is in bytes; compare to file sizes and string lengths appropriately. 38 | - SRT timestamps pattern must match `HH:MM:SS,mmm` for parsing; keep regex in sync with route/client logic. 39 | 40 | ## Pre-PR Checks 41 | - `bunx tsc -p tsconfig.json --noEmit && bun run lint && bun run build` 42 | -------------------------------------------------------------------------------- /.cursor/rules/project.mdc: -------------------------------------------------------------------------------- 1 | --- 2 | description: Project overview: Goals, tech stack (Next.js, shadcn/ui, Bun), design system details, and development guidelines 3 | globs: 4 | alwaysApply: false 5 | --- 6 | ## Goal 7 | 8 | This project aims to create a web application that takes SubRip Text (`.srt`) files as input, processes them using the Google Gemini language model via the Vercel AI SDK, and generates meaningful timestamps or summaries based on the content. 9 | 10 | ## Tech Stack 11 | 12 | - **Framework:** Next.js (App Router) 13 | - **Language:** TypeScript 14 | - **Styling:** Tailwind CSS v4 15 | - **UI Components:** shadcn/ui 16 | - **AI Integration:** Vercel AI SDK 17 | - **LLM:** Google Gemini 18 | - **Package Manager:** Bun 19 | 20 | ## Design System 21 | 22 | We are using **shadcn/ui** for our component library. Components are added individually as needed. 23 | 24 | - **Documentation:** [https://ui.shadcn.com/docs/components/](mdc:https:/ui.shadcn.com/docs/components) 25 | - **Adding Components:** Use the CLI with Bun: 26 | ```bash 27 | bunx --bun shadcn@latest add 28 | ``` 29 | 30 | ## Development Rules 31 | 32 | 1. **Package Management:** Always use `bun` for installing, removing, or managing dependencies (`bun add`, `bun install`, `bun remove`, etc.). 33 | 2. **UI Components:** Prefer components from `shadcn/ui` where possible. Install them using the command above. 34 | 3. **Environment Variables:** Store sensitive information like API keys in environment variables (`.env.local`) and do not commit them to version control. 35 | 4. **Code Style:** Follow standard TypeScript and React best practices. Ensure code is formatted (consider adding a formatter like Prettier later). 36 | 5. **Tailwind CSS v4:** Use Tailwind CSS v4 for styling. Refer to the [Tailwind CSS documentation](https://tailwindcss.com/docs) for more information. -------------------------------------------------------------------------------- /lib/validation/request-validator.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Request validation utilities for API endpoints 3 | */ 4 | 5 | import { NextResponse } from "next/server"; 6 | import { MAX_FILE_SIZE } from "@/lib/constants"; 7 | import { generateApiRequestSchema } from "@/lib/schemas"; 8 | 9 | export interface ValidatedRequest { 10 | srtContent: string; 11 | } 12 | 13 | /** 14 | * Validate incoming request for timestamp generation 15 | * Checks size, parses JSON, and validates schema 16 | */ 17 | export async function validateGenerateRequest( 18 | request: Request 19 | ): Promise { 20 | // Check request size before parsing 21 | const contentLength = request.headers.get("content-length"); 22 | if (contentLength && parseInt(contentLength) > MAX_FILE_SIZE) { 23 | throw new ValidationError( 24 | `Request too large. Maximum size is ${MAX_FILE_SIZE / 1024}KB`, 25 | 413 26 | ); 27 | } 28 | 29 | // Parse JSON body 30 | let body: unknown; 31 | try { 32 | body = await request.json(); 33 | } catch { 34 | throw new ValidationError("Invalid JSON in request body", 400); 35 | } 36 | 37 | // Validate with Zod schema 38 | const validationResult = generateApiRequestSchema.safeParse(body); 39 | 40 | if (!validationResult.success) { 41 | throw new ValidationError( 42 | validationResult.error.issues[0].message, 43 | 400 44 | ); 45 | } 46 | 47 | return validationResult.data; 48 | } 49 | 50 | /** 51 | * Custom error class for validation errors with HTTP status codes 52 | */ 53 | export class ValidationError extends Error { 54 | constructor( 55 | message: string, 56 | public statusCode: number = 400 57 | ) { 58 | super(message); 59 | this.name = "ValidationError"; 60 | } 61 | 62 | toResponse(): NextResponse { 63 | return NextResponse.json({ error: this.message }, { status: this.statusCode }); 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /.claude/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "hooks": { 3 | "PreToolUse": [ 4 | { 5 | "matcher": "Bash", 6 | "hooks": [ 7 | { 8 | "type": "command", 9 | "command": "if [[ \"$CLAUDE_TOOL_INPUT\" =~ (rm[[:space:]]+-rf|git[[:space:]]+push[[:space:]]+--force|git[[:space:]]+push[[:space:]]+-f) ]]; then echo '❌ BLOCKED: Dangerous command detected. Please use safer alternatives or ask for permission.' && exit 2; fi" 10 | }, 11 | { 12 | "type": "command", 13 | "command": "if [[ \"$CLAUDE_TOOL_INPUT\" =~ (git[[:space:]]+reset[[:space:]]+--hard|git[[:space:]]+clean[[:space:]]+-fd|bun[[:space:]]+remove[[:space:]]+react) ]]; then echo '⚠️ WARNING: Potentially destructive command. Confirm this action is intended.' && exit 2; fi" 14 | } 15 | ] 16 | }, 17 | { 18 | "matcher": "Edit|Write", 19 | "hooks": [ 20 | { 21 | "type": "command", 22 | "command": "if [[ \"$CLAUDE_FILE_PATHS\" =~ (\\.env\\.local|bun\\.lock|\\.vercel/) ]]; then echo '🔒 BLOCKED: This file requires explicit permission to edit. Ask the user first.' && exit 2; fi" 23 | } 24 | ] 25 | } 26 | ], 27 | "PostToolUse": [ 28 | { 29 | "matcher": "Edit|Write", 30 | "hooks": [ 31 | { 32 | "type": "command", 33 | "command": "if [[ \"$CLAUDE_FILE_PATHS\" =~ \\.(ts|tsx)$ ]] && [[ ! \"$CLAUDE_FILE_PATHS\" =~ \\.d\\.ts$ ]]; then echo '🔍 Type checking: '$CLAUDE_FILE_PATHS'' && bunx tsc --noEmit --pretty \"$CLAUDE_FILE_PATHS\" 2>&1 | head -20 || echo '✅ Types OK'; fi" 34 | } 35 | ] 36 | } 37 | ], 38 | "UserPromptSubmit": [ 39 | { 40 | "hooks": [ 41 | { 42 | "type": "command", 43 | "command": "if [[ \"$CLAUDE_USER_PROMPT\" =~ (ready for PR|create PR|open PR|merge) ]]; then echo '📋 Pre-PR Reminder: Run `bunx tsc --noEmit && bun run lint && bun run build` and complete manual testing before creating PR.'; fi" 44 | } 45 | ] 46 | } 47 | ] 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /components/ui/tooltip.tsx: -------------------------------------------------------------------------------- 1 | "use client"; 2 | 3 | import * as TooltipPrimitive from "@radix-ui/react-tooltip"; 4 | import * as React from "react"; 5 | 6 | import { cn } from "@/lib/utils"; 7 | 8 | function TooltipProvider({ 9 | delayDuration = 0, 10 | ...props 11 | }: React.ComponentProps) { 12 | return ( 13 | 18 | ); 19 | } 20 | 21 | function Tooltip({ ...props }: React.ComponentProps) { 22 | return ( 23 | 24 | 25 | 26 | ); 27 | } 28 | 29 | function TooltipTrigger({ ...props }: React.ComponentProps) { 30 | return ; 31 | } 32 | 33 | function TooltipContent({ 34 | className, 35 | sideOffset = 4, 36 | children, 37 | ...props 38 | }: React.ComponentProps) { 39 | return ( 40 | 41 | 50 | {children} 51 | 52 | 53 | 54 | ); 55 | } 56 | 57 | export { Tooltip, TooltipContent, TooltipProvider, TooltipTrigger }; 58 | -------------------------------------------------------------------------------- /lib/services/srt-metadata-extractor.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Extracts and validates metadata from SRT subtitle files 3 | * Handles duration detection with fallback logging for edge cases 4 | */ 5 | 6 | import { getDurationInSeconds, formatDuration } from "@/lib/srt-parser"; 7 | import type { SrtMetadata } from "./timestamp-generation/types"; 8 | 9 | export class SrtMetadataExtractor { 10 | /** 11 | * Extract video metadata from SRT content 12 | * Includes duration detection with fallback logging for zero duration cases 13 | */ 14 | static extract(srtContent: string): SrtMetadata { 15 | const durationInSeconds = getDurationInSeconds(srtContent); 16 | const durationFormatted = formatDuration(durationInSeconds); 17 | const isLongContent = durationInSeconds >= 3600; // > 1 hour 18 | 19 | // Log metadata for debugging 20 | this.logMetadata(durationInSeconds, durationFormatted, isLongContent); 21 | 22 | // Handle zero duration edge case 23 | if (durationInSeconds === 0) { 24 | this.handleZeroDuration(srtContent); 25 | } 26 | 27 | return { 28 | durationInSeconds, 29 | durationFormatted, 30 | isLongContent, 31 | }; 32 | } 33 | 34 | /** 35 | * Log extracted metadata for debugging purposes 36 | */ 37 | private static logMetadata( 38 | durationInSeconds: number, 39 | durationFormatted: string, 40 | isLongContent: boolean 41 | ): void { 42 | console.log(`📹 Video duration detected: ${durationInSeconds} seconds (${durationFormatted})`); 43 | console.log(`⏱️ Content type: ${isLongContent ? "Long (>1hr)" : "Short (<1hr)"}`); 44 | } 45 | 46 | /** 47 | * Handle zero duration edge case with fallback timestamp extraction 48 | */ 49 | private static handleZeroDuration(srtContent: string): void { 50 | console.warn("⚠️ WARNING: Duration detection returned 0! Attempting manual extraction..."); 51 | 52 | // Try to find the last timestamp in the SRT content 53 | const lines = srtContent.split("\n"); 54 | const lastTimestampLine = lines.reverse().find((line) => /\d{2}:\d{2}:\d{2}/.test(line)); 55 | 56 | if (lastTimestampLine) { 57 | console.log(`🔍 Last timestamp line found: ${lastTimestampLine}`); 58 | } 59 | } 60 | } 61 | -------------------------------------------------------------------------------- /lib/schemas.ts: -------------------------------------------------------------------------------- 1 | import { z } from "zod"; 2 | import { MAX_FILE_SIZE } from "./constants"; 3 | 4 | // SRT Entry schema for validating individual entries 5 | export const srtEntrySchema = z.object({ 6 | id: z.number(), 7 | startTime: z.string(), 8 | endTime: z.string(), 9 | text: z.string(), 10 | }); 11 | 12 | // SRT Content schema for validating the entire SRT content 13 | export const srtContentSchema = z.object({ 14 | srtContent: z 15 | .string() 16 | .min(1, "SRT content is required") 17 | .max(MAX_FILE_SIZE, `SRT content is too large. Maximum size is ${MAX_FILE_SIZE / 1024}KB`), 18 | }); 19 | 20 | // SRT File schema for validating file uploads 21 | export const srtFileSchema = z.object({ 22 | fileName: z.string().endsWith(".srt", "File must be an .srt file"), 23 | fileContent: z 24 | .string() 25 | .min(1, "File content is required") 26 | .max(MAX_FILE_SIZE, `File is too large. Maximum size is ${MAX_FILE_SIZE / 1024}KB`), 27 | }); 28 | 29 | // API Request schema for validating the generate endpoint 30 | export const generateApiRequestSchema = z.object({ 31 | srtContent: z 32 | .string() 33 | .min(1, "SRT content is required") 34 | .max(MAX_FILE_SIZE, `SRT content is too large. Maximum size is ${MAX_FILE_SIZE / 1024}KB`), 35 | }); 36 | 37 | // SRT Entries array schema 38 | export const srtEntriesSchema = z.array(srtEntrySchema); 39 | 40 | // Timestamp output schema for AI-generated timestamps 41 | export const timestampItemSchema = z.object({ 42 | time: z 43 | .string() 44 | .regex(/^\d{1,2}:\d{2}(:\d{2})?$/, "Invalid timestamp format (expected MM:SS or HH:MM:SS)") 45 | .describe("Timestamp in MM:SS or HH:MM:SS format"), 46 | description: z 47 | .string() 48 | .min(3, "Description must be at least 3 characters") 49 | .max(150, "Description must be at most 150 characters") 50 | .describe("Brief description of what happens at this timestamp"), 51 | }); 52 | 53 | // Schema for the complete AI-generated timestamp response 54 | export const timestampResponseSchema = z.object({ 55 | keyMoments: z 56 | .array(timestampItemSchema) 57 | .min(1, "At least one timestamp must be generated") 58 | .describe("Array of key moments with timestamps and descriptions"), 59 | }); 60 | -------------------------------------------------------------------------------- /app/AGENTS.md: -------------------------------------------------------------------------------- 1 | # app — AGENTS.md 2 | 3 | ## Package Identity 4 | - Next.js App Router UI and server routes. 5 | - Hosts main page (`page.tsx`), layout, global styles, and API route `api/generate/route.ts`. 6 | 7 | ## Setup & Run 8 | - Dev: `bun run dev` 9 | - Build: `bun run build` 10 | - Start (prod): `bun run start` 11 | - Lint: `bun run lint` 12 | - Typecheck: `bunx tsc -p tsconfig.json --noEmit` 13 | 14 | ## Patterns & Conventions 15 | - ✅ Route handlers in `app/api/**/route.ts` exporting HTTP methods. 16 | - Example: `app/api/generate/route.ts` exports `POST` and returns `result.toTextStreamResponse()`. 17 | - ✅ Validate inputs with Zod before processing. 18 | - Example: `generateApiRequestSchema` in `app/api/generate/route.ts`. 19 | - ✅ Enforce file/req limits using shared constants. 20 | - Example: `MAX_FILE_SIZE` in `app/api/generate/route.ts`. 21 | - ✅ Client components must declare `"use client"` at top. 22 | - Example: `app/page.tsx`. 23 | - ✅ Stream handling on client: use ReadableStream reader + TextDecoder. 24 | - Example: streaming code in `app/page.tsx`. 25 | - ✅ Use absolute imports (`@/lib/...`, `@/components/...`). 26 | - Example: `app/page.tsx` imports from `@/lib/schemas`. 27 | - ❌ Don’t call AI providers directly from UI. 28 | - Instead of importing `@ai-sdk/gateway` in `app/page.tsx`, call `/api/generate`. 29 | - ❌ Don’t place server logic in client components. 30 | - Keep model calls and validation in `app/api/generate/route.ts`. 31 | 32 | ## Touch Points / Key Files 33 | - Main UI: `app/page.tsx` 34 | - API route: `app/api/generate/route.ts` 35 | - Layout: `app/layout.tsx` 36 | - Global styles: `app/globals.css` 37 | 38 | ## JIT Index Hints 39 | - Route handlers: `rg -n "export async function (GET|POST)" app/api` 40 | - Client components: `rg -n '^"use client"' app` 41 | - Streaming usage: `rg -n "getReader\\(|TextDecoder\\(" app/page.tsx` 42 | - Schemas referenced in app: `rg -n "srtContentSchema|srtEntriesSchema" app` 43 | 44 | ## Common Gotchas 45 | - `AI_GATEWAY_API_KEY` is needed locally if using gateway; never expose keys client-side. 46 | - Respect `MAX_FILE_SIZE` or the route returns 413. 47 | - Client-only code must include `"use client"` (e.g., components that use hooks). 48 | 49 | ## Pre-PR Checks 50 | - `bunx tsc -p tsconfig.json --noEmit && bun run lint && bun run build` 51 | -------------------------------------------------------------------------------- /components/ui/button.tsx: -------------------------------------------------------------------------------- 1 | import { Slot } from "@radix-ui/react-slot"; 2 | import { cva, type VariantProps } from "class-variance-authority"; 3 | import * as React from "react"; 4 | 5 | import { cn } from "@/lib/utils"; 6 | 7 | const buttonVariants = cva( 8 | "inline-flex items-center justify-center gap-2 whitespace-nowrap rounded-full text-sm font-medium transition-all duration-300 disabled:pointer-events-none disabled:opacity-50 [&_svg]:pointer-events-none [&_svg:not([class*='size-'])]:size-4 shrink-0 [&_svg]:shrink-0 outline-none focus-visible:border-ring focus-visible:ring-ring/50 focus-visible:ring-[3px] aria-invalid:ring-error/20 aria-invalid:border-error", 9 | { 10 | variants: { 11 | variant: { 12 | default: 13 | "bg-brand/90 text-brand-foreground shadow-md hover:bg-brand-hover/90 hover:shadow-glow-brand", 14 | destructive: 15 | "bg-error/90 text-brand-foreground shadow-md hover:bg-error-hover/90 hover:shadow-glow-brand focus-visible:ring-error/20", 16 | outline: 17 | "border border-border/80 bg-surface backdrop-blur-sm shadow-md hover:bg-info/10 hover:text-info hover:border-info", 18 | secondary: 19 | "bg-info/10 text-info shadow-md hover:bg-info/20 hover:shadow-glow-info", 20 | ghost: 21 | "hover:bg-neutral/10 hover:text-neutral", 22 | link: "text-brand underline-offset-4 hover:underline hover:text-brand-hover", 23 | }, 24 | size: { 25 | default: "h-10 px-5 py-2 has-[>svg]:px-4", 26 | sm: "h-9 rounded-full gap-1.5 px-4 has-[>svg]:px-3", 27 | lg: "h-11 rounded-full px-7 has-[>svg]:px-5 text-base", 28 | icon: "size-10", 29 | }, 30 | }, 31 | defaultVariants: { 32 | variant: "default", 33 | size: "default", 34 | }, 35 | } 36 | ); 37 | 38 | function Button({ 39 | className, 40 | variant, 41 | size, 42 | asChild = false, 43 | ...props 44 | }: React.ComponentProps<"button"> & 45 | VariantProps & { 46 | asChild?: boolean; 47 | }) { 48 | const Comp = asChild ? Slot : "button"; 49 | 50 | return ( 51 | 56 | ); 57 | } 58 | 59 | export { Button, buttonVariants }; 60 | -------------------------------------------------------------------------------- /components/ui/card.tsx: -------------------------------------------------------------------------------- 1 | import * as React from "react"; 2 | 3 | import { cn } from "@/lib/utils"; 4 | 5 | function Card({ className, ...props }: React.ComponentProps<"div">) { 6 | return ( 7 |
15 | ); 16 | } 17 | 18 | function CardHeader({ className, ...props }: React.ComponentProps<"div">) { 19 | return ( 20 |
28 | ); 29 | } 30 | 31 | function CardTitle({ className, ...props }: React.ComponentProps<"div">) { 32 | return ( 33 |
38 | ); 39 | } 40 | 41 | function CardDescription({ className, ...props }: React.ComponentProps<"div">) { 42 | return ( 43 |
48 | ); 49 | } 50 | 51 | function CardAction({ className, ...props }: React.ComponentProps<"div">) { 52 | return ( 53 |
58 | ); 59 | } 60 | 61 | function CardContent({ className, ...props }: React.ComponentProps<"div">) { 62 | return
; 63 | } 64 | 65 | function CardFooter({ className, ...props }: React.ComponentProps<"div">) { 66 | return ( 67 |
72 | ); 73 | } 74 | 75 | export { Card, CardAction, CardContent, CardDescription, CardFooter, CardHeader, CardTitle }; 76 | -------------------------------------------------------------------------------- /AGENTS.md: -------------------------------------------------------------------------------- 1 | # Vibestamps — AGENTS.md (Root) 2 | 3 | ## Project Snapshot 4 | - Simple Next.js 15 (App Router) + TypeScript project using Bun. 5 | - Styling: Tailwind CSS v4; UI: shadcn/ui; State/UI utils in components/. 6 | - AI: Vercel AI SDK + @ai-sdk/gateway (Google Gemini 2.5 Pro). 7 | - Sub-folders have their own AGENTS.md for “nearest-wins” guidance. 8 | 9 | ## Root Setup Commands 10 | - Install: `bun install` 11 | - Dev: `bun run dev` (http://localhost:3000) 12 | - Build: `bun run build` 13 | - Start (prod): `bun run start` 14 | - Lint: `bun run lint` 15 | - Typecheck: `bunx tsc -p tsconfig.json --noEmit` 16 | - Tests: not configured (none present) 17 | 18 | ## Universal Conventions 19 | - TypeScript: strict mode enabled; noEmit true. 20 | - ESLint: Next core-web-vitals + TypeScript (see eslint.config.mjs). 21 | - Imports: use absolute paths via `@/*` (see tsconfig paths). 22 | - Commits: Conventional Commits style recommended (e.g., chore:, feat:, fix:). 23 | - Branching/PR: feature branches from main; open PRs with passing build, lint, typecheck. 24 | 25 | ## Security & Secrets 26 | - Never commit API keys or `.env*` files (see .gitignore). 27 | - Local secrets in `.env.local` (e.g., `GOOGLE_API_KEY`, optionally `AI_GATEWAY_API_KEY` for local gateway use). 28 | - Client-visible envs must be prefixed with `NEXT_PUBLIC_` if needed client-side. 29 | 30 | ## JIT Index (what to open, not what to paste) 31 | 32 | ### Directory Map 33 | - App (UI + routes): `app/` → see [app/AGENTS.md](app/AGENTS.md) 34 | - Components (UI + feature): `components/` → see [components/AGENTS.md](components/AGENTS.md) 35 | - Shared libs (schemas, parsers): `lib/` → see [lib/AGENTS.md](lib/AGENTS.md) 36 | - Public assets: `public/` 37 | 38 | ### Quick Find Commands 39 | - Search functions: `rg -n "export (async )?function|export const" app components lib` 40 | - Find client components: `rg -n '^"use client"' app components` 41 | - API route handlers: `rg -n "export async function (GET|POST)" app/api` 42 | - Zod schemas usage: `rg -n "srtContentSchema|generateApiRequestSchema|srtEntriesSchema" app lib` 43 | - SRT utilities: `rg -n "parseSrtContent|extractTextFromSrt|MAX_FILE_SIZE" lib components app` 44 | 45 | ## Definition of Done 46 | - Build, lint, and typecheck succeed: 47 | - `bunx tsc -p tsconfig.json --noEmit && bun run lint && bun run build` 48 | - Dev server renders main page and SRT upload works end-to-end. 49 | - No secrets or `.env` files committed; streaming API responds without errors. 50 | -------------------------------------------------------------------------------- /components/AGENTS.md: -------------------------------------------------------------------------------- 1 | # components — AGENTS.md 2 | 3 | ## Package Identity 4 | - Feature components and UI primitives (shadcn-style) used by the app. 5 | - Subfolders: 6 | - `components/ui/*`: primitives (Button, Card, Input, Progress, Tooltip, ThemeToggle) 7 | - `components/magicui/*`: visual effects 8 | - Feature: `SrtUploader.tsx`, `TimestampResults.tsx` 9 | 10 | ## Setup & Run 11 | - Reuse root commands (no separate build): 12 | - Dev: `bun run dev` | Build: `bun run build` | Lint: `bun run lint` | Typecheck: `bunx tsc -p tsconfig.json --noEmit` 13 | 14 | ## Patterns & Conventions 15 | - ✅ Functional components with typed props. 16 | - Example: `components/SrtUploader.tsx`, `components/TimestampResults.tsx` 17 | - ✅ Use `cn` from `@/lib/utils` for class merging. 18 | - Example: `components/ui/button.tsx`, `components/ui/card.tsx` 19 | - ✅ Prefer absolute imports (`@/lib/...`, `@/components/...`). 20 | - Example: `components/ui/tooltip.tsx` 21 | - ✅ UI primitives live under `components/ui/*`. 22 | - Examples: `components/ui/button.tsx`, `components/ui/progress.tsx`, `components/ui/tooltip.tsx` 23 | - ✅ Input validation close to boundaries (e.g., file validation in uploader). 24 | - Example: `components/SrtUploader.tsx` uses Zod and `MAX_FILE_SIZE`. 25 | - ✅ Display streamed results incrementally and clearly. 26 | - Example: `components/TimestampResults.tsx` parses and highlights new lines. 27 | - ❌ Don’t import server-only modules (e.g., `@ai-sdk/gateway`) in components. 28 | - Keep server logic in `app/api/**/route.ts`. 29 | - ❌ Don’t use relative imports to shared libs (e.g., `../lib/utils`); use `@/lib/utils`. 30 | 31 | ## Touch Points / Key Files 32 | - Upload flow: `components/SrtUploader.tsx` 33 | - Streaming display: `components/TimestampResults.tsx` 34 | - UI primitives: `components/ui/*.tsx` 35 | - Visuals: `components/magicui/*` 36 | 37 | ## JIT Index Hints 38 | - Find exported components: `rg -n "export (default )?function|export const .* = \(" components` 39 | - UI primitives: `rg -n "" components/ui/*.tsx` 40 | - Find uploader/stream results: `rg -n "SrtUploader|TimestampResults" components` 41 | 42 | ## Common Gotchas 43 | - Theme toggle relies on `next-themes` and mount guard (`mounted`): see `components/ui/theme-toggle.tsx`. 44 | - Tooltip/Progress are client components (`"use client"`). Ensure correct usage in server vs client contexts. 45 | 46 | ## Pre-PR Checks 47 | - `bunx tsc -p tsconfig.json --noEmit && bun run lint && bun run build` 48 | -------------------------------------------------------------------------------- /components/ui/theme-toggle.tsx: -------------------------------------------------------------------------------- 1 | "use client"; 2 | 3 | import { useTheme } from "next-themes"; 4 | import { useEffect, useState } from "react"; 5 | import { Button } from "./button"; 6 | 7 | export function ThemeToggle() { 8 | const [mounted, setMounted] = useState(false); 9 | const { theme, setTheme } = useTheme(); 10 | 11 | // useEffect only runs on the client, so now we can safely show the UI 12 | useEffect(() => { 13 | setMounted(true); 14 | }, []); 15 | 16 | if (!mounted) { 17 | return
; // Placeholder to prevent layout shift 18 | } 19 | 20 | return ( 21 | 70 | ); 71 | } 72 | -------------------------------------------------------------------------------- /.claude/commands/review.md: -------------------------------------------------------------------------------- 1 | Perform a comprehensive code review of recent changes. 2 | 3 | ## Review Checklist 4 | 5 | ### 1. Code Conventions (from CLAUDE.md) 6 | - ✅ TypeScript strict mode compliance (no `any` without justification) 7 | - ✅ Absolute imports (`@/components`, `@/lib`) used correctly 8 | - ✅ Functional components with hooks (no class components) 9 | - ✅ Server-side imports (`@ai-sdk/*`) only in API routes, not client components 10 | - ✅ `"use client"` directive present only when needed (hooks, state, events) 11 | 12 | ### 2. Zod Validation 13 | - ✅ All user inputs validated with Zod schemas (client + server) 14 | - ✅ Schemas defined in `lib/schemas.ts` or co-located 15 | - ✅ `.safeParse()` used with proper error handling 16 | 17 | ### 3. Error Handling & Loading States 18 | - ✅ Try-catch blocks for async operations 19 | - ✅ Loading states shown during streaming/API calls 20 | - ✅ Error messages user-friendly (no stack traces exposed) 21 | - ✅ Streaming responses handle connection errors 22 | 23 | ### 4. Security 24 | - ✅ No API keys or secrets in client code 25 | - ✅ No sensitive data logged (API keys, PII, user content) 26 | - ✅ Client env vars use `NEXT_PUBLIC_` prefix 27 | - ✅ File size limits enforced (430 KB for SRT uploads) 28 | 29 | ### 5. Accessibility 30 | - ✅ Semantic HTML elements used 31 | - ✅ ARIA labels for interactive elements 32 | - ✅ Keyboard navigation supported (Tab, Enter, Escape) 33 | - ✅ Color contrast meets WCAG standards 34 | - ✅ Focus states visible 35 | 36 | ### 6. Performance 37 | - ✅ No unnecessary re-renders (proper memoization if needed) 38 | - ✅ Dynamic imports for heavy components if applicable 39 | - ✅ Streaming used for AI responses (no buffering entire response) 40 | - ✅ Tailwind classes optimized (no redundant utilities) 41 | 42 | ### 7. Testing Coverage (when tests exist) 43 | - ✅ Unit tests for new utilities in `lib/` 44 | - ✅ Component tests for new UI components 45 | - ✅ E2E tests updated if user flow changed 46 | 47 | ### 8. Documentation 48 | - ✅ Complex logic has comments explaining "why", not "what" 49 | - ✅ New patterns documented in relevant CLAUDE.md file 50 | - ✅ README updated if user-facing features changed 51 | 52 | ## Output Format 53 | 54 | Provide specific, actionable feedback with file/line references: 55 | - **Good**: ✅ `components/SrtUploader.tsx:45` - Excellent Zod validation pattern 56 | - **Issue**: ⚠️ `app/page.tsx:78` - Missing error boundary, add try-catch for API call 57 | - **Critical**: 🚨 `components/Button.tsx:12` - Hardcoded color `bg-blue-500`, use design token 58 | 59 | ## Summary 60 | 61 | - **Total Issues**: X 62 | - **Critical**: X 63 | - **Warnings**: X 64 | - **Suggestions**: X 65 | - **Overall Assessment**: Ready for merge / Needs fixes / Needs discussion 66 | -------------------------------------------------------------------------------- /.windsurfrules: -------------------------------------------------------------------------------- 1 | ## Tech Stack 2 | 3 | - **Framework:** Next.js 15 (App Router) 4 | - **Language:** TypeScript 5 | - **Styling:** Tailwind CSS v4 6 | - **UI Components:** shadcn/ui 7 | - **AI Integration:** Vercel AI SDK 8 | - **LLM:** Google Gemini 9 | - **Package Manager:** Bun 10 | 11 | ## Design System 12 | 13 | We are using **shadcn/ui** for our component library. Components are added individually as needed. 14 | 15 | - **Documentation:** [https://ui.shadcn.com/docs/components/](mdc:https:/ui.shadcn.com/docs/components) 16 | - **Adding Components:** Use the CLI with Bun: 17 | ```bash 18 | bunx --bun shadcn@latest add 19 | ``` 20 | 21 | ## Development Rules 22 | 23 | 1. **Package Management:** Always use `bun` for installing, removing, or managing dependencies (`bun add`, `bun install`, `bun remove`, etc.). 24 | 2. **UI Components:** Prefer components from `shadcn/ui` where possible. Install them using the command above. 25 | 3. **Environment Variables:** Store sensitive information like API keys in environment variables (`.env.local`) and do not commit them to version control. 26 | 4. **Code Style:** Follow standard TypeScript and React best practices. Ensure code is formatted (consider adding a formatter like Prettier later). 27 | 5. **Tailwind CSS v4:** Use Tailwind CSS v4 for styling. Refer to the [Tailwind CSS documentation](https://tailwindcss.com/docs) for more information. 28 | 29 | # Vercel AI SDK Documentation & Usage 30 | 31 | This rule provides quick access to documentation and key information about the Vercel AI SDK, used for integrating AI models into applications. 32 | 33 | ## Key Documentation Links 34 | 35 | - **Main SDK Docs:** [https://sdk.vercel.ai/docs](mdc:https:/sdk.vercel.ai/docs) 36 | - **Core Concepts:** [https://sdk.vercel.ai/docs/concepts](mdc:https:/sdk.vercel.ai/docs/concepts) 37 | - **API Reference:** [https://sdk.vercel.ai/docs/api-reference](mdc:https:/sdk.vercel.ai/docs/api-reference) 38 | - **Supported Models/Providers (including Google Gemini):** [https://sdk.vercel.ai/docs/providers](mdc:https:/sdk.vercel.ai/docs/providers) 39 | - **Next.js Integration:** [https://sdk.vercel.ai/docs/integrations/nextjs](mdc:https:/sdk.vercel.ai/docs/integrations/nextjs) 40 | 41 | ## Core Functions/Hooks 42 | 43 | - `streamText`: For streaming text responses from an API route. 44 | - `generateText`: For non-streaming text generation. 45 | - `streamObject`: For streaming structured JSON objects. 46 | - `generateObject`: For non-streaming structured object generation. 47 | - `useChat` (React Hook): For building chat interfaces. 48 | - `useCompletion` (React Hook): For text completion interfaces. 49 | 50 | ## Provider Integration 51 | 52 | The SDK uses provider-specific packages (e.g., `@ai-sdk/google`, `@ai-sdk/openai`) or compatible layers (`createOpenAICompatible`) to interact with different LLMs. Remember to install the necessary provider package (e.g., `bun add @ai-sdk/google` for Gemini). -------------------------------------------------------------------------------- /lib/timestamp-utils/normalizer.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Timestamp normalization utilities 3 | * Handles conversion between MM:SS and HH:MM:SS formats based on video duration 4 | */ 5 | 6 | export interface KeyMoment { 7 | time: string; 8 | description: string; 9 | } 10 | 11 | export interface TimestampResponse { 12 | keyMoments: KeyMoment[]; 13 | } 14 | 15 | /** 16 | * Normalize timestamp format based on video duration (YouTube standard) 17 | * For videos under 1 hour: MM:SS with leading zeros (00:00, 01:23, 15:30) 18 | * For videos over 1 hour: HH:MM:SS with leading zeros (00:00:00, 00:01:23, 02:23:02) 19 | */ 20 | export function normalizeTimestampFormat(timestamp: string, isLongContent: boolean): string { 21 | // Parse the timestamp 22 | const parts = timestamp.split(":"); 23 | 24 | if (parts.length === 3) { 25 | // HH:MM:SS format 26 | const hours = parseInt(parts[0], 10); 27 | const minutes = parseInt(parts[1], 10); 28 | const seconds = parseInt(parts[2], 10); 29 | 30 | if (!isLongContent && hours === 0) { 31 | // Video is under 1 hour and timestamp has hours - convert to MM:SS 32 | return `${minutes.toString().padStart(2, "0")}:${seconds.toString().padStart(2, "0")}`; 33 | } 34 | 35 | // For long content, ensure proper HH:MM:SS format with leading zeros 36 | if (isLongContent) { 37 | return `${hours.toString().padStart(2, "0")}:${minutes.toString().padStart(2, "0")}:${seconds 38 | .toString() 39 | .padStart(2, "0")}`; 40 | } 41 | } else if (parts.length === 2) { 42 | // MM:SS format 43 | const minutes = parseInt(parts[0], 10); 44 | const seconds = parseInt(parts[1], 10); 45 | 46 | if (isLongContent) { 47 | // Video is over 1 hour but timestamp is in MM:SS - convert to HH:MM:SS 48 | return `00:${minutes.toString().padStart(2, "0")}:${seconds.toString().padStart(2, "0")}`; 49 | } 50 | 51 | // For short content, ensure MM:SS format with leading zeros 52 | return `${minutes.toString().padStart(2, "0")}:${seconds.toString().padStart(2, "0")}`; 53 | } 54 | 55 | return timestamp; 56 | } 57 | 58 | /** 59 | * Normalize all timestamps in the response based on video duration 60 | */ 61 | export function normalizeTimestampResponse( 62 | response: TimestampResponse, 63 | isLongContent: boolean 64 | ): TimestampResponse { 65 | const normalized = { 66 | ...response, 67 | keyMoments: response.keyMoments.map((moment) => ({ 68 | ...moment, 69 | time: normalizeTimestampFormat(moment.time, isLongContent), 70 | })), 71 | }; 72 | 73 | // Log normalization for debugging 74 | const changedCount = response.keyMoments.filter( 75 | (moment, index) => moment.time !== normalized.keyMoments[index].time 76 | ).length; 77 | 78 | if (changedCount > 0) { 79 | console.log(`📝 Normalized ${changedCount} timestamps for format consistency`); 80 | console.log(` Format: ${isLongContent ? "HH:MM:SS (video >1hr)" : "MM:SS (video <1hr)"}`); 81 | } 82 | 83 | return normalized; 84 | } 85 | -------------------------------------------------------------------------------- /.claude/commands/fix-issue.md: -------------------------------------------------------------------------------- 1 | Analyze and fix GitHub issue: $ARGUMENTS 2 | 3 | ## Workflow 4 | 5 | ### 1. Fetch Issue Details 6 | Use `gh issue view $ARGUMENTS` to get: 7 | - Issue title and description 8 | - Labels and assignees 9 | - Related PRs or commits 10 | - User-provided reproduction steps 11 | 12 | ### 2. Understand the Problem 13 | - Identify the root cause (bug, feature request, enhancement) 14 | - Determine affected files/components 15 | - Check if issue is reproducible locally 16 | 17 | ### 3. Search Codebase 18 | Use ripgrep to find relevant code: 19 | ```bash 20 | # Find component/function mentioned in issue 21 | rg -n "export (function|const) ComponentName" app components 22 | 23 | # Find error messages 24 | rg -n "error message text" app components lib 25 | 26 | # Find related functionality 27 | rg -n "keyword from issue" app components lib 28 | ``` 29 | 30 | ### 4. Read Relevant CLAUDE.md Files 31 | - Check `CLAUDE.md` in affected directories for established patterns 32 | - Follow coding conventions and anti-patterns guidance 33 | - Reference JIT index for quick navigation 34 | 35 | ### 5. Implement Fix 36 | - Follow TypeScript strict mode 37 | - Add/update Zod validation if input-related 38 | - Maintain consistency with existing patterns 39 | - Add comments explaining fix if non-obvious 40 | 41 | ### 6. Write/Update Tests 42 | - Add test case reproducing the bug (if bug fix) 43 | - Verify fix with test: `bun test ` (when tests exist) 44 | - Update existing tests if behavior changed 45 | 46 | ### 7. Validate Fix 47 | Run quality checks: 48 | ```bash 49 | bunx tsc --noEmit # Type check 50 | bun run lint # Linting 51 | bun run build # Build verification 52 | bun run dev # Manual testing 53 | ``` 54 | 55 | ### 8. Create Commit 56 | Use Conventional Commits format: 57 | ```bash 58 | git add . 59 | git commit -m "fix: [issue #$ARGUMENTS] brief description 60 | 61 | Resolves #$ARGUMENTS 62 | 63 | - Specific change 1 64 | - Specific change 2 65 | 66 | 🤖 Generated with Claude Code 67 | Co-Authored-By: Claude " 68 | ``` 69 | 70 | ### 9. Push and Create PR 71 | ```bash 72 | git push -u origin $(git branch --show-current) 73 | 74 | gh pr create --title "Fix: [Issue #$ARGUMENTS] brief description" --body "$(cat <<'EOF' 75 | ## Summary 76 | Fixes #$ARGUMENTS 77 | 78 | ## Changes 79 | - Change 1 80 | - Change 2 81 | 82 | ## Testing 83 | - [ ] Type check passes 84 | - [ ] Lint passes 85 | - [ ] Build succeeds 86 | - [ ] Manual testing complete 87 | - [ ] Issue reproduction no longer occurs 88 | 89 | ## Screenshots (if UI change) 90 | [Add before/after screenshots] 91 | 92 | 🤖 Generated with Claude Code 93 | EOF 94 | )" 95 | ``` 96 | 97 | ## Important Reminders 98 | 99 | - Reference issue number in commit and PR: `Fixes #123` 100 | - Follow existing code patterns (see CLAUDE.md in affected directories) 101 | - Run all quality gates before pushing 102 | - Add tests if testing framework exists 103 | - Update documentation if behavior changed 104 | -------------------------------------------------------------------------------- /lib/services/timestamp-generation/TimestampGenerationService.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Main service for timestamp generation 3 | * Orchestrates prompt building, AI invocation, and result normalization 4 | */ 5 | 6 | import { GeminiClient, DEFAULT_AI_CONFIG } from "@/lib/ai/gemini-client"; 7 | import { PromptBuilder } from "@/lib/timestamp-utils/prompt-builder"; 8 | import { withExponentialRetry, DEFAULT_RETRY_CONFIG } from "@/lib/ai/retry-handler"; 9 | import type { TimestampGenerationRequest } from "./types"; 10 | 11 | export class TimestampGenerationService { 12 | private aiClient: GeminiClient; 13 | private promptBuilder: PromptBuilder; 14 | 15 | constructor(aiClient?: GeminiClient, promptBuilder?: PromptBuilder) { 16 | this.aiClient = aiClient || new GeminiClient(DEFAULT_AI_CONFIG); 17 | this.promptBuilder = promptBuilder || new PromptBuilder(); 18 | } 19 | 20 | /** 21 | * Factory method to create service with default configuration 22 | */ 23 | static create(): TimestampGenerationService { 24 | return new TimestampGenerationService(); 25 | } 26 | 27 | /** 28 | * Generate timestamps from SRT content with retry logic 29 | */ 30 | async generateTimestamps(request: TimestampGenerationRequest) { 31 | const { srtContent, metadata } = request; 32 | 33 | // Use retry wrapper for resilience 34 | return withExponentialRetry( 35 | async () => { 36 | // 1. Build the prompt 37 | const systemPrompt = this.promptBuilder.buildSystemPrompt({ 38 | srtContent, 39 | durationInSeconds: metadata.durationInSeconds, 40 | durationFormatted: metadata.durationFormatted, 41 | isLongContent: metadata.isLongContent, 42 | endTimestamp: "", // Will be calculated by PromptBuilder 43 | }); 44 | 45 | // 2. Stream AI response with validation 46 | const result = await this.aiClient.streamObject( 47 | systemPrompt, 48 | metadata.isLongContent, 49 | { 50 | onFinish: (object, error) => { 51 | if (error) { 52 | console.error("Generation finished with error:", error); 53 | return; 54 | } 55 | 56 | if (object && object.keyMoments && object.keyMoments.length > 0) { 57 | // Check if last timestamp is significantly before video end 58 | const lastTime = object.keyMoments[object.keyMoments.length - 1].time; 59 | const lastTimeParts = lastTime.split(":").map(Number); 60 | const lastTimeSeconds = 61 | lastTimeParts.length === 3 62 | ? lastTimeParts[0] * 3600 + lastTimeParts[1] * 60 + lastTimeParts[2] 63 | : lastTimeParts[0] * 60 + lastTimeParts[1]; 64 | 65 | const timeDifference = metadata.durationInSeconds - lastTimeSeconds; 66 | 67 | if (timeDifference > 300) { 68 | // More than 5 minutes short 69 | console.warn( 70 | `⚠️ WARNING: Last timestamp (${lastTime}) is ${Math.floor( 71 | timeDifference / 60 72 | )} minutes before video end` 73 | ); 74 | } 75 | } 76 | }, 77 | } 78 | ); 79 | 80 | return result; 81 | }, 82 | { 83 | ...DEFAULT_RETRY_CONFIG, 84 | onRetry: (attempt, error) => { 85 | console.error(`Attempt ${attempt} failed:`, error); 86 | 87 | // Log additional details for NoObjectGeneratedError 88 | if (GeminiClient.isNoObjectError(error)) { 89 | console.error("NoObjectGeneratedError details:", { 90 | cause: error.cause, 91 | text: error.text?.substring(0, 200), 92 | usage: error.usage, 93 | }); 94 | } 95 | }, 96 | } 97 | ); 98 | } 99 | } 100 | -------------------------------------------------------------------------------- /.claude/commands/check-pr.md: -------------------------------------------------------------------------------- 1 | Pre-PR validation checklist - ensures code is ready for pull request. 2 | 3 | ## Automated Checks 4 | 5 | Run the full quality gate: 6 | ```bash 7 | bunx tsc --noEmit && bun run lint && bun run build 8 | ``` 9 | 10 | Wait for all checks to complete. Address any errors before proceeding. 11 | 12 | --- 13 | 14 | ## Manual Testing Checklist 15 | 16 | ### Core Functionality 17 | - [ ] **Dev server starts**: `bun run dev` runs without errors 18 | - [ ] **Production build succeeds**: `bun run build` completes successfully 19 | - [ ] **No console errors**: Check browser console for errors/warnings 20 | - [ ] **No TypeScript errors**: `bunx tsc --noEmit` passes 21 | - [ ] **Linting passes**: `bun run lint` reports no issues 22 | 23 | ### Feature-Specific Testing 24 | 25 | #### SRT Upload & Processing 26 | - [ ] Upload valid SRT file (< 430 KB) 27 | - [ ] Verify file parsing works correctly 28 | - [ ] Check file size validation (try > 430 KB file) 29 | - [ ] Test invalid file format handling 30 | 31 | #### AI Timestamp Generation 32 | - [ ] Click "Generate Timestamps" button 33 | - [ ] Verify streaming starts immediately 34 | - [ ] Check timestamps display correctly line-by-line 35 | - [ ] Confirm generation completes without errors 36 | - [ ] Test error handling (invalid API key, network failure) 37 | 38 | #### UI & Theming 39 | - [ ] Toggle dark/light theme - verify styles correct 40 | - [ ] Test responsive design (mobile, tablet, desktop) 41 | - [ ] Check all interactive elements (buttons, inputs, tooltips) 42 | - [ ] Verify loading states show during operations 43 | - [ ] Confirm error messages are user-friendly 44 | 45 | ### Code Quality Review 46 | 47 | #### TypeScript & Patterns 48 | - [ ] No `any` types without justification 49 | - [ ] No `@ts-ignore` comments (fix types instead) 50 | - [ ] Absolute imports used (`@/components`, `@/lib`) 51 | - [ ] Server-side code not imported in client components 52 | - [ ] `"use client"` directive only when needed 53 | 54 | #### Security & Secrets 55 | - [ ] No API keys or secrets in code 56 | - [ ] No sensitive data in logs 57 | - [ ] Zod validation on all user inputs (client + server) 58 | - [ ] File size limits enforced 59 | 60 | #### Code Organization 61 | - [ ] Functions under 50 lines (extract complex logic) 62 | - [ ] Components properly separated (feature vs UI) 63 | - [ ] Utilities in `lib/`, not scattered 64 | - [ ] Comments explain "why", not "what" 65 | 66 | ### Documentation 67 | 68 | - [ ] **CLAUDE.md updated** if new patterns introduced 69 | - [ ] **README updated** if user-facing features changed 70 | - [ ] **Comments added** for complex logic 71 | - [ ] **Commit message** follows Conventional Commits format 72 | 73 | --- 74 | 75 | ## Git Status Check 76 | 77 | Run: `git status` 78 | 79 | Verify: 80 | - [ ] All intended changes are staged 81 | - [ ] No unintended files included (`.env.local`, `bun.lock` unless intentional) 82 | - [ ] No merge conflicts 83 | 84 | --- 85 | 86 | ## Commit Message Template 87 | 88 | If not already committed: 89 | ```bash 90 | git add . 91 | git commit -m "type: brief description 92 | 93 | Detailed explanation if needed. 94 | 95 | 🤖 Generated with Claude Code 96 | Co-Authored-By: Claude " 97 | ``` 98 | 99 | Types: `feat`, `fix`, `chore`, `refactor`, `docs`, `test`, `perf` 100 | 101 | --- 102 | 103 | ## Create Pull Request 104 | 105 | ```bash 106 | gh pr create --title "Type: Brief description" --body "$(cat <<'EOF' 107 | ## Summary 108 | Brief overview of changes. 109 | 110 | ## Changes 111 | - Change 1 112 | - Change 2 113 | 114 | ## Testing 115 | - [x] Type check passes 116 | - [x] Lint passes 117 | - [x] Build succeeds 118 | - [x] Manual testing complete 119 | - [x] All checklist items above verified 120 | 121 | ## Screenshots (if UI changes) 122 | [Add screenshots here] 123 | 124 | 🤖 Generated with Claude Code 125 | EOF 126 | )" 127 | ``` 128 | 129 | --- 130 | 131 | ## Final Confirmation 132 | 133 | ✅ **All automated checks pass** 134 | ✅ **Manual testing complete** 135 | ✅ **Code reviewed for quality** 136 | ✅ **Documentation updated** 137 | ✅ **Commit message clear** 138 | 139 | **Ready to create PR!** 🚀 140 | -------------------------------------------------------------------------------- /.claude/commands/add-component.md: -------------------------------------------------------------------------------- 1 | Add a shadcn/ui component: $ARGUMENTS 2 | 3 | ## Installation 4 | 5 | Run the shadcn CLI to add the component: 6 | ```bash 7 | bunx --bun shadcn@latest add $ARGUMENTS 8 | ``` 9 | 10 | This will: 11 | 1. Download the component source code 12 | 2. Place it in `components/ui/` 13 | 3. Add any required dependencies 14 | 4. Configure imports in `components.json` 15 | 16 | --- 17 | 18 | ## Verify Installation 19 | 20 | ### Check Files Created 21 | ```bash 22 | # List new files in components/ui/ 23 | ls -la components/ui/ | grep -i "$ARGUMENTS" 24 | ``` 25 | 26 | ### Check Dependencies Added 27 | ```bash 28 | # View package.json for new Radix UI packages 29 | cat package.json | grep -A 5 "dependencies" 30 | ``` 31 | 32 | If new dependencies were added, install them: 33 | ```bash 34 | bun install 35 | ``` 36 | 37 | --- 38 | 39 | ## Documentation 40 | 41 | ### 1. Read Component Docs 42 | Visit shadcn/ui documentation: 43 | - **Docs URL**: https://ui.shadcn.com/docs/components/$ARGUMENTS 44 | - Review: Props, usage examples, variants, accessibility notes 45 | 46 | ### 2. Check Component Source 47 | ```bash 48 | # Read the component source code 49 | cat components/ui/$ARGUMENTS.tsx 50 | ``` 51 | 52 | Look for: 53 | - Available props and variants 54 | - Default styling 55 | - Accessibility features (ARIA labels, keyboard navigation) 56 | - Dependencies on other components 57 | 58 | --- 59 | 60 | ## Usage Example 61 | 62 | Create an example usage in the appropriate location: 63 | 64 | ### If Feature Component 65 | Add to `components/` directory: 66 | ```tsx 67 | // components/Example$ARGUMENTSUsage.tsx 68 | "use client" 69 | 70 | import { $ARGUMENTS } from "@/components/ui/$ARGUMENTS" 71 | 72 | export function Example$ARGUMENTSUsage() { 73 | return ( 74 | <$ARGUMENTS> 75 | {/* Component content */} 76 | 77 | ) 78 | } 79 | ``` 80 | 81 | ### If Page Integration 82 | Add to `app/page.tsx` or relevant route: 83 | ```tsx 84 | import { $ARGUMENTS } from "@/components/ui/$ARGUMENTS" 85 | 86 | // Use in JSX 87 | <$ARGUMENTS variant="default">Content 88 | ``` 89 | 90 | --- 91 | 92 | ## Customization 93 | 94 | ### Styling with Tailwind 95 | shadcn components use Tailwind classes. Customize by: 96 | 97 | 1. **Passing className prop**: 98 | ```tsx 99 | <$ARGUMENTS className="custom-class">Content 100 | ``` 101 | 102 | 2. **Modifying component source** (if needed): 103 | - Edit `components/ui/$ARGUMENTS.tsx` 104 | - Preserve accessibility features 105 | - Document changes in `components/ui/CLAUDE.md` 106 | 107 | ### Using Variants 108 | Check component for available variants: 109 | ```tsx 110 | <$ARGUMENTS variant="outline" size="sm">Content 111 | ``` 112 | 113 | --- 114 | 115 | ## Testing 116 | 117 | ### Manual Testing 118 | 1. **Dev server**: `bun run dev` 119 | 2. **Visual verification**: Open http://localhost:3000 120 | 3. **Interaction testing**: Click, hover, keyboard navigation 121 | 4. **Theme testing**: Toggle dark/light mode 122 | 5. **Responsive testing**: Mobile, tablet, desktop 123 | 124 | ### Accessibility Testing 125 | - [ ] Keyboard navigation works (Tab, Enter, Escape) 126 | - [ ] Screen reader friendly (ARIA labels present) 127 | - [ ] Focus states visible 128 | - [ ] Color contrast meets standards 129 | 130 | --- 131 | 132 | ## Update Documentation 133 | 134 | ### If New Pattern 135 | Add to `components/ui/CLAUDE.md`: 136 | ```markdown 137 | ### $ARGUMENTS Component 138 | 139 | **Usage**: 140 | \`\`\`tsx 141 | import { $ARGUMENTS } from "@/components/ui/$ARGUMENTS" 142 | 143 | <$ARGUMENTS variant="default">Content 144 | \`\`\` 145 | 146 | **Common Props**: 147 | - `variant`: "default" | "outline" | "ghost" 148 | - `size`: "sm" | "md" | "lg" 149 | - `className`: Additional Tailwind classes 150 | 151 | **Example**: See `components/Example$ARGUMENTSUsage.tsx` 152 | ``` 153 | 154 | --- 155 | 156 | ## Commit Changes 157 | 158 | ```bash 159 | git add components/ui/$ARGUMENTS.tsx package.json bun.lock 160 | 161 | git commit -m "chore: add shadcn/ui $ARGUMENTS component 162 | 163 | Installed using shadcn CLI for consistent styling and accessibility. 164 | 165 | 🤖 Generated with Claude Code 166 | Co-Authored-By: Claude " 167 | ``` 168 | 169 | --- 170 | 171 | ## Quick Reference 172 | 173 | - **Component Location**: `components/ui/$ARGUMENTS.tsx` 174 | - **Documentation**: https://ui.shadcn.com/docs/components/$ARGUMENTS 175 | - **Radix UI Source**: https://www.radix-ui.com/primitives/docs/components/$ARGUMENTS 176 | 177 | Component installed successfully! ✅ 178 | -------------------------------------------------------------------------------- /lib/ai/gemini-client.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Gemini AI client wrapper for timestamp generation 3 | */ 4 | 5 | import { gateway } from "@ai-sdk/gateway"; 6 | import { NoObjectGeneratedError, streamObject } from "ai"; 7 | import { timestampResponseSchema } from "@/lib/schemas"; 8 | import { normalizeTimestampResponse, TimestampResponse } from "@/lib/timestamp-utils/normalizer"; 9 | 10 | export interface AIConfig { 11 | model: string; 12 | temperature: number; 13 | maxOutputTokens: number; 14 | topP: number; 15 | } 16 | 17 | export const DEFAULT_AI_CONFIG: AIConfig = { 18 | model: "google/gemini-2.5-pro", 19 | temperature: 1, 20 | maxOutputTokens: 65536, 21 | topP: 0.95, 22 | }; 23 | 24 | export interface StreamCallbacks { 25 | onError?: (error: Error) => void; 26 | onFinish?: (object: TimestampResponse | null, error: Error | null) => void; 27 | onRepair?: (text: string, error: Error) => Promise; 28 | } 29 | 30 | export class GeminiClient { 31 | private model: ReturnType; 32 | private config: AIConfig; 33 | 34 | constructor(config: AIConfig = DEFAULT_AI_CONFIG) { 35 | this.config = config; 36 | this.model = gateway(config.model); 37 | } 38 | 39 | /** 40 | * Stream AI-generated timestamps with validation and normalization 41 | */ 42 | async streamObject( 43 | prompt: string, 44 | isLongContent: boolean, 45 | callbacks?: StreamCallbacks 46 | ) { 47 | const result = streamObject({ 48 | model: this.model, 49 | schema: timestampResponseSchema, 50 | prompt: prompt, 51 | temperature: this.config.temperature, 52 | maxOutputTokens: this.config.maxOutputTokens, 53 | topP: this.config.topP, 54 | providerOptions: { 55 | google: { 56 | thinkingConfig: { 57 | thinkingBudget: -1, 58 | includeThoughts: false, 59 | }, 60 | }, 61 | }, 62 | // Handle streaming errors 63 | onError({ error }) { 64 | console.error("Stream error:", error); 65 | callbacks?.onError?.(error as Error); 66 | }, 67 | // Validate and normalize final object 68 | onFinish({ object, error }) { 69 | if (error) { 70 | console.error("Validation error:", error); 71 | callbacks?.onFinish?.(null, error as Error); 72 | } else if (object) { 73 | // Normalize timestamp format based on video duration 74 | const normalizedObject = normalizeTimestampResponse(object, isLongContent); 75 | 76 | // Update the object with normalized timestamps 77 | object.keyMoments = normalizedObject.keyMoments; 78 | 79 | const timestampCount = object.keyMoments?.length || 0; 80 | console.log(`✅ Successfully generated ${timestampCount} timestamps`); 81 | 82 | // Log the time range covered 83 | if (object.keyMoments && object.keyMoments.length > 0) { 84 | const firstTime = object.keyMoments[0].time; 85 | const lastTime = object.keyMoments[object.keyMoments.length - 1].time; 86 | console.log(`⏱️ Time range: ${firstTime} to ${lastTime}`); 87 | } 88 | 89 | callbacks?.onFinish?.(object, null); 90 | } 91 | }, 92 | // Attempt to repair malformed JSON 93 | experimental_repairText: async ({ text, error }) => { 94 | console.log("Attempting to repair malformed JSON:", error.message); 95 | 96 | if (callbacks?.onRepair) { 97 | return callbacks.onRepair(text, error as Error); 98 | } 99 | 100 | // Default repair logic 101 | let repaired = text.trim(); 102 | 103 | // Add missing closing braces if needed 104 | const openBraces = (repaired.match(/\{/g) || []).length; 105 | const closeBraces = (repaired.match(/\}/g) || []).length; 106 | if (openBraces > closeBraces) { 107 | repaired += "}".repeat(openBraces - closeBraces); 108 | } 109 | 110 | // Add missing closing brackets if needed 111 | const openBrackets = (repaired.match(/\[/g) || []).length; 112 | const closeBrackets = (repaired.match(/\]/g) || []).length; 113 | if (openBrackets > closeBrackets) { 114 | repaired += "]".repeat(openBrackets - closeBrackets); 115 | } 116 | 117 | console.log("Repaired JSON:", repaired); 118 | return repaired; 119 | }, 120 | }); 121 | 122 | return result; 123 | } 124 | 125 | /** 126 | * Check if an error is a NoObjectGeneratedError 127 | */ 128 | static isNoObjectError(error: unknown): error is NoObjectGeneratedError { 129 | return NoObjectGeneratedError.isInstance(error); 130 | } 131 | } 132 | -------------------------------------------------------------------------------- /components/magicui/sparkles-text.tsx: -------------------------------------------------------------------------------- 1 | "use client"; 2 | 3 | import { motion } from "motion/react"; 4 | import { CSSProperties, ReactElement, useEffect, useState } from "react"; 5 | 6 | import { cn } from "@/lib/utils"; 7 | 8 | interface Sparkle { 9 | id: string; 10 | x: string; 11 | y: string; 12 | color: string; 13 | delay: number; 14 | scale: number; 15 | lifespan: number; 16 | } 17 | 18 | const Sparkle: React.FC = ({ id, x, y, color, delay, scale }) => { 19 | return ( 20 | 34 | 38 | 39 | ); 40 | }; 41 | 42 | interface SparklesTextProps { 43 | /** 44 | * @default
45 | * @type ReactElement 46 | * @description 47 | * The component to be rendered as the text 48 | * */ 49 | as?: ReactElement; 50 | 51 | /** 52 | * @default "" 53 | * @type string 54 | * @description 55 | * The className of the text 56 | */ 57 | className?: string; 58 | 59 | /** 60 | * @required 61 | * @type string 62 | * @description 63 | * The text to be displayed 64 | * */ 65 | text: string; 66 | 67 | /** 68 | * @default 10 69 | * @type number 70 | * @description 71 | * The count of sparkles 72 | * */ 73 | sparklesCount?: number; 74 | 75 | /** 76 | * @default "{first: '#16a34a', second: '#0ea5e9'}" 77 | * @type string 78 | * @description 79 | * The colors of the sparkles 80 | * */ 81 | colors?: { 82 | first: string; 83 | second: string; 84 | third?: string; 85 | }; 86 | } 87 | 88 | export const SparklesText: React.FC = ({ 89 | text, 90 | colors = { first: "#16a34a", second: "#0ea5e9", third: "#f59e0b" }, 91 | className, 92 | sparklesCount = 12, 93 | ...props 94 | }) => { 95 | const [sparkles, setSparkles] = useState([]); 96 | 97 | useEffect(() => { 98 | const generateStar = (): Sparkle => { 99 | const starX = `${Math.random() * 100}%`; 100 | const starY = `${Math.random() * 100}%`; 101 | 102 | // Choose randomly between three colors for more variety 103 | const colorRand = Math.random(); 104 | let color; 105 | if (colorRand < 0.33) { 106 | color = colors.first; 107 | } else if (colorRand < 0.66) { 108 | color = colors.second; 109 | } else { 110 | color = colors.third || colors.first; 111 | } 112 | 113 | const delay = Math.random() * 3; 114 | const scale = Math.random() * 0.7 + 0.4; // More consistent but varied sizes 115 | const lifespan = Math.random() * 12 + 8; // Longer lifespans for a dreamier effect 116 | const id = `${starX}-${starY}-${Date.now()}`; 117 | return { id, x: starX, y: starY, color, delay, scale, lifespan }; 118 | }; 119 | 120 | const initializeStars = () => { 121 | const newSparkles = Array.from({ length: sparklesCount }, generateStar); 122 | setSparkles(newSparkles); 123 | }; 124 | 125 | const updateStars = () => { 126 | setSparkles((currentSparkles) => 127 | currentSparkles.map((star) => { 128 | if (star.lifespan <= 0) { 129 | return generateStar(); 130 | } else { 131 | return { ...star, lifespan: star.lifespan - 0.025 }; // Slower rate of change 132 | } 133 | }) 134 | ); 135 | }; 136 | 137 | initializeStars(); 138 | const interval = setInterval(updateStars, 250); // Slower interval for dreamier effect 139 | 140 | return () => clearInterval(interval); 141 | }, [colors.first, colors.second, colors.third, sparklesCount]); 142 | 143 | return ( 144 |
155 | 156 | {sparkles.map((sparkle) => ( 157 | 158 | ))} 159 | {text} 160 | 161 |
162 | ); 163 | }; 164 | -------------------------------------------------------------------------------- /lib/srt-parser.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Utilities for parsing SRT files and extracting their content 3 | */ 4 | 5 | export interface SrtEntry { 6 | id: number; 7 | startTime: string; 8 | endTime: string; 9 | text: string; 10 | } 11 | 12 | /** 13 | * Parse SRT file content into structured entries 14 | */ 15 | export function parseSrtContent(content: string): SrtEntry[] { 16 | // Split the content by double newline (entry separator) 17 | const blocks = content.trim().split(/\r?\n\r?\n/); 18 | const entries: SrtEntry[] = []; 19 | 20 | for (const block of blocks) { 21 | const lines = block.split(/\r?\n/); 22 | 23 | // Need at least 3 lines for a valid SRT entry (id, timestamp, and text) 24 | if (lines.length < 3) continue; 25 | 26 | // First line is the entry id 27 | const id = parseInt(lines[0].trim(), 10); 28 | if (isNaN(id)) continue; 29 | 30 | // Second line contains the timestamps 31 | const timeMatch = lines[1].match(/(\d{2}:\d{2}:\d{2},\d{3}) --> (\d{2}:\d{2}:\d{2},\d{3})/); 32 | if (!timeMatch) continue; 33 | 34 | const [, startTime, endTime] = timeMatch; 35 | 36 | // Remaining lines are the text content 37 | const text = lines.slice(2).join(" ").trim(); 38 | 39 | entries.push({ 40 | id, 41 | startTime, 42 | endTime, 43 | text, 44 | }); 45 | } 46 | 47 | return entries; 48 | } 49 | 50 | /** 51 | * Extract plain text from SRT entries for AI processing 52 | */ 53 | export function extractTextFromSrt(entries: SrtEntry[]): string { 54 | return entries.map((entry) => entry.text).join(" "); 55 | } 56 | 57 | /** 58 | * Format time from SRT format (00:00:00,000) to more readable format (00:00:00) 59 | */ 60 | export function formatTimestamp(timestamp: string): string { 61 | return timestamp.replace(",", "."); 62 | } 63 | 64 | /** 65 | * Get full transcript with timestamps 66 | */ 67 | export function getTimestampedTranscript(entries: SrtEntry[]): string { 68 | return entries 69 | .map( 70 | (entry) => 71 | `[${formatTimestamp(entry.startTime)} - ${formatTimestamp(entry.endTime)}] ${entry.text}` 72 | ) 73 | .join("\n"); 74 | } 75 | 76 | /** 77 | * Convert SRT timestamp (HH:MM:SS,mmm) to total seconds 78 | */ 79 | function timestampToSeconds(timestamp: string): number { 80 | const parts = timestamp.split(/[,:]/); 81 | const hours = parseInt(parts[0], 10); 82 | const minutes = parseInt(parts[1], 10); 83 | const seconds = parseInt(parts[2], 10); 84 | const milliseconds = parseInt(parts[3] || "0", 10); 85 | 86 | return hours * 3600 + minutes * 60 + seconds + milliseconds / 1000; 87 | } 88 | 89 | /** 90 | * Get the duration of the video from SRT content in seconds 91 | * Returns the maximum end time found in the SRT timestamps 92 | */ 93 | export function getDurationInSeconds(srtContent: string): number { 94 | // Extract all timestamps using the same pattern as the API route 95 | const timestampRegex = /(\d{2}:\d{2}:\d{2},\d{3}) --> (\d{2}:\d{2}:\d{2},\d{3})/g; 96 | let maxSeconds = 0; 97 | let match; 98 | let matchCount = 0; 99 | 100 | // Find the latest end time 101 | while ((match = timestampRegex.exec(srtContent)) !== null) { 102 | matchCount++; 103 | const endTime = match[2]; // Second capture group is the end time 104 | const endTimeSeconds = timestampToSeconds(endTime); 105 | 106 | if (endTimeSeconds > maxSeconds) { 107 | maxSeconds = endTimeSeconds; 108 | } 109 | } 110 | 111 | console.log(`🎬 Found ${matchCount} SRT timestamp ranges, max duration: ${maxSeconds} seconds`); 112 | 113 | // If no matches found, log a sample of the content for debugging 114 | if (matchCount === 0) { 115 | console.error("❌ No SRT timestamps found! Sample content:"); 116 | console.error(srtContent.substring(0, 500)); 117 | } 118 | 119 | return maxSeconds; 120 | } 121 | 122 | /** 123 | * Format duration in seconds to human-readable format 124 | * Returns format like "2 hours and 16 mins" or "45 mins" if under an hour 125 | */ 126 | export function formatDuration(durationInSeconds: number): string { 127 | const totalMinutes = Math.floor(durationInSeconds / 60); 128 | const hours = Math.floor(totalMinutes / 60); 129 | const minutes = totalMinutes % 60; 130 | 131 | if (hours > 0) { 132 | if (minutes > 0) { 133 | return `${hours} ${hours === 1 ? "hour" : "hours"} and ${minutes} ${ 134 | minutes === 1 ? "min" : "mins" 135 | }`; 136 | } else { 137 | return `${hours} ${hours === 1 ? "hour" : "hours"}`; 138 | } 139 | } else { 140 | return `${minutes} ${minutes === 1 ? "min" : "mins"}`; 141 | } 142 | } 143 | 144 | /** 145 | * Get the duration of the video from SRT content in a human-readable format 146 | * Returns format like "2 hours and 16 mins" or "45 mins" if under an hour 147 | * @deprecated Use getDurationInSeconds and formatDuration separately for better control 148 | */ 149 | export function getDurationFromSrtContent(srtContent: string): string { 150 | return formatDuration(getDurationInSeconds(srtContent)); 151 | } 152 | -------------------------------------------------------------------------------- /components/ui/ghibli-background.tsx: -------------------------------------------------------------------------------- 1 | import React from "react"; 2 | 3 | interface GhibliBackgroundProps { 4 | className?: string; 5 | } 6 | 7 | const GhibliBackground: React.FC = ({ className }) => { 8 | return ( 9 |
10 | {/* Sky gradient background */} 11 |
15 | {/* Subtle background stars */} 16 |
17 | 18 | {/* Constellations (only visible in dark mode) */} 19 |
20 | {/* Big Dipper */} 21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 | 29 | {/* Orion */} 30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 | 38 | {/* Cassiopeia (W-shape) */} 39 |
40 |
41 |
42 |
43 |
44 |
45 | 46 | {/* Sun/Moon */} 47 |
48 | 49 | {/* Floating clouds */} 50 |
51 |
52 |
53 |
54 | 55 | {/* Distant hills */} 56 |
57 |
58 |
59 |
60 |
61 | 62 | {/* Grass at the bottom */} 63 |
64 |
65 |
66 | ); 67 | }; 68 | 69 | export default GhibliBackground; 70 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Vibestamps 2 | 3 | ![Vibestamps](https://github.com/user-attachments/assets/c3d55375-791f-4cb1-bbc6-e2d7640ad359) 4 | 5 | ## Overview 6 | 7 | Vibestamps is a modern web application that simplifies the process of extracting valuable information from subtitle files. The app allows users to upload SubRip Text (`.srt`) files and leverages Google's Gemini AI model to generate meaningful timestamps, chapters, or summaries based on the content. 8 | 9 | Visit [vibestamps.com](https://vibestamps.com) to try it out! 10 | 11 | ## Features 12 | 13 | - **SRT File Upload**: Drag-and-drop or select SubRip Text (`.srt`) files 14 | - **AI-Powered Analysis**: Process subtitle content using Google Gemini 15 | - **Smart Timestamp Generation**: Extract key moments and organize content chronologically 16 | - **Content Summarization**: Generate concise summaries of video/audio content 17 | - **Responsive UI**: Beautiful, user-friendly interface that works on all devices 18 | - **Real-time Processing**: Stream AI responses for immediate feedback 19 | 20 | ## Tech Stack 21 | 22 | - **Framework:** Next.js (App Router) 23 | - **Language:** TypeScript 24 | - **Styling:** Tailwind CSS v4 25 | - **UI Components:** shadcn/ui 26 | - **AI Integration:** Vercel AI SDK 27 | - **LLM:** Google Gemini 28 | - **Package Manager:** Bun 29 | 30 | ## How It Works 31 | 32 | 1. **Upload**: User uploads an SRT file through the file input component 33 | 2. **Processing**: The app extracts text content from the SRT file client-side 34 | 3. **AI Analysis**: Extracted content is sent to the backend API, which leverages Gemini via Vercel AI SDK 35 | 4. **Generation**: The AI analyzes the dialogue and generates structured timestamps or summaries 36 | 5. **Display**: Results are streamed back to the UI and presented in a readable format 37 | 38 | ## Getting Started 39 | 40 | ### Prerequisites 41 | 42 | - [Bun](https://bun.sh/) installed on your system (v1.0.0 or newer) 43 | - Google Gemini API key (see below for instructions) 44 | - Node.js 18.17.0 or later 45 | 46 | ### Setting Up the Google Gemini API Key 47 | 48 | 1. Visit [Google AI Studio](https://makersuite.google.com/app/apikey) 49 | 2. Sign in with your Google account if prompted 50 | 3. Click on "Get API Key" in the left menu 51 | 4. Click "Create API Key" button 52 | 5. Copy your API key - **important**: this is the only time you'll see this key, so be sure to save it securely 53 | 6. Never share your API key or commit it to version control 54 | 55 | ### Installation 56 | 57 | 1. Clone the repository: 58 | 59 | ```bash 60 | git clone https://github.com/RayFernando1337/vibestamps.git 61 | cd vibestamps 62 | ``` 63 | 64 | 2. Install dependencies: 65 | 66 | ```bash 67 | bun install 68 | ``` 69 | 70 | 3. Create a `.env.local` file in the project root: 71 | 72 | ``` 73 | GOOGLE_API_KEY=your_api_key_here 74 | ``` 75 | 76 | 4. Run the development server: 77 | 78 | ```bash 79 | bun dev 80 | ``` 81 | 82 | 5. Open [http://localhost:3000](http://localhost:3000) with your browser to see the application. 83 | 84 | ## Usage 85 | 86 | 1. Upload an SRT subtitle file by clicking the upload area or dragging and dropping 87 | 2. Wait for the file to be processed and analyzed by the AI 88 | 3. View the generated timestamps that highlight key moments in the content 89 | 4. Copy the timestamps to use in your YouTube descriptions or video editing software 90 | 91 | ## Design System 92 | 93 | We are using **shadcn/ui** for our component library. Components are added individually as needed. 94 | 95 | - **Documentation:** [https://ui.shadcn.com/docs/components/](https://ui.shadcn.com/docs/components/) 96 | - **Adding Components:** Use the CLI with Bun: 97 | ```bash 98 | bunx --bun shadcn@latest add 99 | ``` 100 | 101 | ## API Integration 102 | 103 | The application uses the Vercel AI SDK to communicate with Google's Gemini model: 104 | 105 | - **Backend Route**: `/api/generate` handles processing SRT content 106 | - **SDK Functions**: Uses `streamText` or `streamObject` for real-time AI responses 107 | - **Provider Package**: Requires `@ai-sdk/google` for Gemini integration 108 | 109 | ## Development Rules 110 | 111 | 1. **Package Management:** Always use `bun` for installing, removing, or managing dependencies (`bun add`, `bun install`, `bun remove`, etc.). 112 | 2. **UI Components:** Prefer components from `shadcn/ui` where possible. Install them using the command above. 113 | 3. **Environment Variables:** Store sensitive information like API keys in environment variables (`.env.local`) and do not commit them to version control. 114 | 4. **Code Style:** Follow standard TypeScript and React best practices. Ensure code is formatted (consider adding a formatter like Prettier later). 115 | 116 | ## Project Structure 117 | 118 | - `/app`: Next.js application code 119 | - `/api`: Backend API routes 120 | - `/components`: Reusable UI components 121 | - `/lib`: Utility functions and helpers 122 | - `/public`: Static assets 123 | 124 | ## Troubleshooting 125 | 126 | - **API Key Issues**: If you receive authentication errors, double-check your API key in the `.env.local` file. 127 | - **File Upload Problems**: Make sure you're using a valid SRT file format. 128 | - **Bun Errors**: Ensure you have Bun installed correctly. Run `bun --version` to verify. 129 | 130 | ## Learn More 131 | 132 | To learn more about the technologies used in this project: 133 | 134 | - [Next.js Documentation](https://nextjs.org/docs) - Next.js features and API 135 | - [Vercel AI SDK](https://sdk.vercel.ai/docs) - AI integration tools 136 | - [Google Gemini API](https://ai.google.dev/docs) - Google's multimodal AI model 137 | - [shadcn/ui](https://ui.shadcn.com/) - UI component library 138 | 139 | ## Sponsorship 140 | 141 | This project is currently seeking sponsors to help with: 142 | 143 | - Hosting costs for deploying the application 144 | - Google Gemini API credits to support processing larger volumes of SRT files 145 | 146 | If you're interested in sponsoring this project or would like to discuss partnership opportunities, please reach out to me via [X fka Twitter @RayFernando1337](https://x.com/Rayfernando1337). 147 | 148 | ## License 149 | 150 | This project is licensed under the MIT License - see the LICENSE file for details. 151 | -------------------------------------------------------------------------------- /.claude/commands/add-test.md: -------------------------------------------------------------------------------- 1 | Generate missing tests for: $ARGUMENTS 2 | 3 | ⚠️ **Note**: This project currently has no testing framework configured. This command is a template for when Vitest + React Testing Library are added. 4 | 5 | --- 6 | 7 | ## Prerequisites (Not Yet Installed) 8 | 9 | Before running this command, the project needs: 10 | 11 | ```bash 12 | # Install testing dependencies 13 | bun add -D vitest @vitest/ui 14 | bun add -D @testing-library/react @testing-library/jest-dom @testing-library/user-event 15 | bun add -D happy-dom # or jsdom 16 | 17 | # For E2E tests 18 | bun add -D @playwright/test 19 | ``` 20 | 21 | **Update `package.json`** with test scripts: 22 | ```json 23 | { 24 | "scripts": { 25 | "test": "vitest", 26 | "test:ui": "vitest --ui", 27 | "test:coverage": "vitest --coverage", 28 | "test:e2e": "playwright test" 29 | } 30 | } 31 | ``` 32 | 33 | --- 34 | 35 | ## Test Generation Strategy 36 | 37 | ### 1. Identify Test Targets 38 | 39 | Determine what needs testing: 40 | - **Utilities** (`lib/`): Pure functions, Zod schemas, SRT parsing 41 | - **Components** (`components/`): UI behavior, user interactions 42 | - **API Routes** (`app/api/`): Request/response handling, validation 43 | - **E2E Flows**: Upload → Generate → Display 44 | 45 | ### 2. Locate Existing Code 46 | 47 | ```bash 48 | # Find the code to test 49 | rg -n "export (function|const) $ARGUMENTS" app components lib 50 | ``` 51 | 52 | ### 3. Determine Test Type 53 | 54 | | Target | Test Type | Location | 55 | |--------|-----------|----------| 56 | | `lib/` utilities | Unit test (Vitest) | Co-located: `lib/file.test.ts` | 57 | | React components | Component test (RTL) | Co-located: `components/File.test.tsx` | 58 | | API routes | Integration test | `tests/integration/api-generate.test.ts` | 59 | | User flows | E2E test (Playwright) | `tests/e2e/upload-flow.spec.ts` | 60 | 61 | --- 62 | 63 | ## Unit Test Template (lib/ utilities) 64 | 65 | Example: `lib/srt-parser.test.ts` 66 | 67 | ```typescript 68 | import { describe, it, expect } from 'vitest' 69 | import { parseSrtContent, extractTextFromSrt } from './srt-parser' 70 | 71 | describe('parseSrtContent', () => { 72 | it('should parse valid SRT content', () => { 73 | const srtContent = `1 74 | 00:00:01,000 --> 00:00:05,000 75 | First subtitle 76 | 77 | 2 78 | 00:00:06,000 --> 00:00:10,000 79 | Second subtitle` 80 | 81 | const result = parseSrtContent(srtContent) 82 | 83 | expect(result).toHaveLength(2) 84 | expect(result[0]).toEqual({ 85 | id: 1, 86 | startTime: '00:00:01,000', 87 | endTime: '00:00:05,000', 88 | text: 'First subtitle' 89 | }) 90 | }) 91 | 92 | it('should handle empty content', () => { 93 | expect(parseSrtContent('')).toEqual([]) 94 | }) 95 | 96 | it('should throw error for invalid format', () => { 97 | expect(() => parseSrtContent('invalid')).toThrow() 98 | }) 99 | }) 100 | ``` 101 | 102 | --- 103 | 104 | ## Component Test Template (React components) 105 | 106 | Example: `components/SrtUploader.test.tsx` 107 | 108 | ```typescript 109 | import { describe, it, expect, vi } from 'vitest' 110 | import { render, screen, fireEvent, waitFor } from '@testing-library/react' 111 | import userEvent from '@testing-library/user-event' 112 | import { SrtUploader } from './SrtUploader' 113 | 114 | describe('SrtUploader', () => { 115 | it('should render upload button', () => { 116 | render() 117 | expect(screen.getByText(/upload/i)).toBeInTheDocument() 118 | }) 119 | 120 | it('should handle file selection', async () => { 121 | const onUpload = vi.fn() 122 | render() 123 | 124 | const file = new File(['1\n00:00:01,000 --> 00:00:05,000\nTest'], 'test.srt', { 125 | type: 'application/x-subrip' 126 | }) 127 | 128 | const input = screen.getByLabelText(/upload/i) as HTMLInputElement 129 | await userEvent.upload(input, file) 130 | 131 | await waitFor(() => { 132 | expect(onUpload).toHaveBeenCalledWith(expect.objectContaining({ 133 | name: 'test.srt' 134 | })) 135 | }) 136 | }) 137 | 138 | it('should reject files over size limit', async () => { 139 | render() 140 | 141 | const largeContent = 'a'.repeat(450 * 1024) // > 430 KB 142 | const file = new File([largeContent], 'large.srt') 143 | 144 | const input = screen.getByLabelText(/upload/i) as HTMLInputElement 145 | await userEvent.upload(input, file) 146 | 147 | await waitFor(() => { 148 | expect(screen.getByText(/file size exceeds/i)).toBeInTheDocument() 149 | }) 150 | }) 151 | }) 152 | ``` 153 | 154 | --- 155 | 156 | ## API Route Test Template 157 | 158 | Example: `tests/integration/api-generate.test.ts` 159 | 160 | ```typescript 161 | import { describe, it, expect } from 'vitest' 162 | import { POST } from '@/app/api/generate/route' 163 | 164 | describe('/api/generate', () => { 165 | it('should return 400 for missing body', async () => { 166 | const request = new Request('http://localhost:3000/api/generate', { 167 | method: 'POST', 168 | body: JSON.stringify({}) 169 | }) 170 | 171 | const response = await POST(request) 172 | expect(response.status).toBe(400) 173 | }) 174 | 175 | it('should stream response for valid input', async () => { 176 | const request = new Request('http://localhost:3000/api/generate', { 177 | method: 'POST', 178 | body: JSON.stringify({ 179 | srtContent: '1\n00:00:01,000 --> 00:00:05,000\nTest subtitle' 180 | }) 181 | }) 182 | 183 | const response = await POST(request) 184 | expect(response.status).toBe(200) 185 | expect(response.headers.get('content-type')).toContain('text/plain') 186 | }) 187 | }) 188 | ``` 189 | 190 | --- 191 | 192 | ## E2E Test Template (Playwright) 193 | 194 | Example: `tests/e2e/upload-flow.spec.ts` 195 | 196 | ```typescript 197 | import { test, expect } from '@playwright/test' 198 | 199 | test.describe('SRT Upload Flow', () => { 200 | test('should upload SRT and generate timestamps', async ({ page }) => { 201 | await page.goto('http://localhost:3000') 202 | 203 | // Upload file 204 | const fileInput = page.locator('input[type="file"]') 205 | await fileInput.setInputFiles('tests/fixtures/sample.srt') 206 | 207 | // Wait for parsing 208 | await expect(page.getByText(/file uploaded/i)).toBeVisible() 209 | 210 | // Generate timestamps 211 | await page.getByRole('button', { name: /generate/i }).click() 212 | 213 | // Verify streaming starts 214 | await expect(page.getByText(/timestamp/i)).toBeVisible({ timeout: 10000 }) 215 | 216 | // Verify results display 217 | const results = page.locator('[data-testid="timestamp-results"]') 218 | await expect(results).toContainText('00:') 219 | }) 220 | 221 | test('should show error for oversized file', async ({ page }) => { 222 | await page.goto('http://localhost:3000') 223 | 224 | // Upload large file 225 | await page.locator('input[type="file"]').setInputFiles('tests/fixtures/large.srt') 226 | 227 | // Verify error message 228 | await expect(page.getByText(/file size exceeds/i)).toBeVisible() 229 | }) 230 | }) 231 | ``` 232 | 233 | --- 234 | 235 | ## Run Tests 236 | 237 | Once testing framework is installed: 238 | 239 | ```bash 240 | # Unit + Component tests 241 | bun test # Run all tests 242 | bun test $ARGUMENTS # Run specific test file 243 | bun test --watch # Watch mode 244 | bun test --coverage # Coverage report 245 | 246 | # E2E tests 247 | bun test:e2e # Run Playwright tests 248 | ``` 249 | 250 | --- 251 | 252 | ## Coverage Goals 253 | 254 | Aim for: 255 | - **Utilities (`lib/`)**: 90%+ coverage 256 | - **Components**: 80%+ coverage 257 | - **API Routes**: 85%+ coverage 258 | - **Critical paths**: 100% E2E coverage 259 | 260 | --- 261 | 262 | ## Update CLAUDE.md 263 | 264 | After adding tests, document patterns in relevant CLAUDE.md: 265 | ```markdown 266 | ## Testing Patterns 267 | 268 | ### Unit Tests (lib/) 269 | - Location: Co-located with source 270 | - Example: See `lib/srt-parser.test.ts` 271 | 272 | ### Component Tests 273 | - Location: Co-located with components 274 | - Example: See `components/SrtUploader.test.tsx` 275 | ``` 276 | 277 | --- 278 | 279 | **When testing is set up, this command will generate comprehensive test coverage for $ARGUMENTS.** 280 | -------------------------------------------------------------------------------- /components/SrtUploader.tsx: -------------------------------------------------------------------------------- 1 | import { Button } from "@/components/ui/button"; 2 | import { Card, CardContent } from "@/components/ui/card"; 3 | import { Tooltip, TooltipContent, TooltipProvider, TooltipTrigger } from "@/components/ui/tooltip"; 4 | import { MAX_FILE_SIZE } from "@/lib/constants"; 5 | import { srtFileSchema } from "@/lib/schemas"; 6 | import { extractTextFromSrt, parseSrtContent, SrtEntry } from "@/lib/srt-parser"; 7 | import { useRef, useState } from "react"; 8 | 9 | interface SrtUploaderProps { 10 | onContentExtracted: (rawContent: string, extractedText: string, entries: SrtEntry[]) => void; 11 | onProcessFile: () => void; 12 | disabled: boolean; 13 | entriesCount: number; 14 | hasContent: boolean; 15 | } 16 | 17 | export function SrtUploader({ 18 | onContentExtracted, 19 | onProcessFile, 20 | disabled, 21 | entriesCount, 22 | hasContent, 23 | }: SrtUploaderProps) { 24 | const [fileName, setFileName] = useState(""); 25 | const [error, setError] = useState(""); 26 | const [isDragging, setIsDragging] = useState(false); 27 | const fileInputRef = useRef(null); 28 | 29 | const handleFileChange = (event: React.ChangeEvent) => { 30 | const file = event.target.files?.[0]; 31 | if (file) processFile(file); 32 | }; 33 | 34 | const processFile = async (file: File) => { 35 | setFileName(file.name); 36 | setError(""); 37 | 38 | // Check file size before any other validation 39 | if (file.size > MAX_FILE_SIZE) { 40 | setError(`File is too large. Maximum size is ${MAX_FILE_SIZE / 1024}KB`); 41 | return; 42 | } 43 | 44 | try { 45 | // Validate file name with Zod 46 | const validationResult = srtFileSchema.safeParse({ 47 | fileName: file.name, 48 | fileContent: "placeholder", // Will be replaced with actual content 49 | }); 50 | 51 | if (!validationResult.success) { 52 | setError(validationResult.error.issues[0].message); 53 | return; 54 | } 55 | 56 | const content = await file.text(); 57 | 58 | // Now validate actual content 59 | const contentValidation = srtFileSchema.safeParse({ 60 | fileName: file.name, 61 | fileContent: content, 62 | }); 63 | 64 | if (!contentValidation.success) { 65 | setError(contentValidation.error.issues[0].message); 66 | return; 67 | } 68 | 69 | const entries = parseSrtContent(content); 70 | 71 | if (entries.length === 0) { 72 | setError("Could not parse any valid entries from the SRT file"); 73 | return; 74 | } 75 | 76 | const extractedText = extractTextFromSrt(entries); 77 | // Pass both raw SRT content (with timestamps) and extracted text 78 | onContentExtracted(content, extractedText, entries); 79 | 80 | // Auto-process after a short delay to allow UI to update 81 | setTimeout(() => { 82 | if (!disabled) { 83 | onProcessFile(); 84 | } 85 | }, 500); 86 | } catch (err) { 87 | console.error("Error reading file:", err); 88 | setError("Failed to read the file. Please try again."); 89 | } 90 | }; 91 | 92 | const handleDragOver = (e: React.DragEvent) => { 93 | e.preventDefault(); 94 | setIsDragging(true); 95 | }; 96 | 97 | const handleDragLeave = () => { 98 | setIsDragging(false); 99 | }; 100 | 101 | const handleDrop = (e: React.DragEvent) => { 102 | e.preventDefault(); 103 | setIsDragging(false); 104 | 105 | const file = e.dataTransfer.files?.[0]; 106 | if (file) processFile(file); 107 | }; 108 | 109 | const triggerFileInput = () => { 110 | fileInputRef.current?.click(); 111 | }; 112 | 113 | return ( 114 | 124 | 125 | {!hasContent && ( 126 | <> 127 |
128 |

129 | Upload SRT File 130 |

131 |

132 | Drag & drop your .srt file here or click to browse 133 |

134 |
135 | 136 | 144 | 145 | 169 | 170 | )} 171 | 172 | {fileName && ( 173 |
174 | 186 | 187 | 188 | 189 | 190 | 191 | Selected file: 192 | 193 | 194 | 195 | 196 | 197 | {fileName} 198 | 199 | 200 | 201 |

{fileName}

202 |
203 |
204 |
205 |
206 | )} 207 | 208 | {hasContent && !disabled && ( 209 |
210 |

211 | {entriesCount} entries found in the SRT file 212 |

213 | 236 |
237 | )} 238 | 239 | {error && ( 240 |
241 | 253 | 254 | 255 | 256 | 257 | {error} 258 |
259 | )} 260 |
261 |
262 | ); 263 | } 264 | -------------------------------------------------------------------------------- /components/magicui/particles.tsx: -------------------------------------------------------------------------------- 1 | "use client"; 2 | 3 | import { cn } from "@/lib/utils"; 4 | import React, { 5 | ComponentPropsWithoutRef, 6 | useEffect, 7 | useRef, 8 | useState, 9 | } from "react"; 10 | 11 | interface MousePosition { 12 | x: number; 13 | y: number; 14 | } 15 | 16 | function MousePosition(): MousePosition { 17 | const [mousePosition, setMousePosition] = useState({ 18 | x: 0, 19 | y: 0, 20 | }); 21 | 22 | useEffect(() => { 23 | const handleMouseMove = (event: MouseEvent) => { 24 | setMousePosition({ x: event.clientX, y: event.clientY }); 25 | }; 26 | 27 | window.addEventListener("mousemove", handleMouseMove); 28 | 29 | return () => { 30 | window.removeEventListener("mousemove", handleMouseMove); 31 | }; 32 | }, []); 33 | 34 | return mousePosition; 35 | } 36 | 37 | interface ParticlesProps extends ComponentPropsWithoutRef<"div"> { 38 | className?: string; 39 | quantity?: number; 40 | staticity?: number; 41 | ease?: number; 42 | size?: number; 43 | refresh?: boolean; 44 | color?: string; 45 | vx?: number; 46 | vy?: number; 47 | } 48 | 49 | function hexToRgb(hex: string): number[] { 50 | hex = hex.replace("#", ""); 51 | 52 | if (hex.length === 3) { 53 | hex = hex 54 | .split("") 55 | .map((char) => char + char) 56 | .join(""); 57 | } 58 | 59 | const hexInt = parseInt(hex, 16); 60 | const red = (hexInt >> 16) & 255; 61 | const green = (hexInt >> 8) & 255; 62 | const blue = hexInt & 255; 63 | return [red, green, blue]; 64 | } 65 | 66 | type Circle = { 67 | x: number; 68 | y: number; 69 | translateX: number; 70 | translateY: number; 71 | size: number; 72 | alpha: number; 73 | targetAlpha: number; 74 | dx: number; 75 | dy: number; 76 | magnetism: number; 77 | }; 78 | 79 | export const Particles: React.FC = ({ 80 | className = "", 81 | quantity = 100, 82 | staticity = 50, 83 | ease = 50, 84 | size = 0.4, 85 | refresh = false, 86 | color = "#ffffff", 87 | vx = 0, 88 | vy = 0, 89 | ...props 90 | }) => { 91 | const canvasRef = useRef(null); 92 | const canvasContainerRef = useRef(null); 93 | const context = useRef(null); 94 | const circles = useRef([]); 95 | const mousePosition = MousePosition(); 96 | const mouse = useRef<{ x: number; y: number }>({ x: 0, y: 0 }); 97 | const canvasSize = useRef<{ w: number; h: number }>({ w: 0, h: 0 }); 98 | const dpr = typeof window !== "undefined" ? window.devicePixelRatio : 1; 99 | const rafID = useRef(null); 100 | const resizeTimeout = useRef(null); 101 | 102 | useEffect(() => { 103 | if (canvasRef.current) { 104 | context.current = canvasRef.current.getContext("2d"); 105 | } 106 | initCanvas(); 107 | animate(); 108 | 109 | const handleResize = () => { 110 | if (resizeTimeout.current) { 111 | clearTimeout(resizeTimeout.current); 112 | } 113 | resizeTimeout.current = setTimeout(() => { 114 | initCanvas(); 115 | }, 200); 116 | }; 117 | 118 | window.addEventListener("resize", handleResize); 119 | 120 | return () => { 121 | if (rafID.current != null) { 122 | window.cancelAnimationFrame(rafID.current); 123 | } 124 | if (resizeTimeout.current) { 125 | clearTimeout(resizeTimeout.current); 126 | } 127 | window.removeEventListener("resize", handleResize); 128 | }; 129 | }, [color]); 130 | 131 | useEffect(() => { 132 | onMouseMove(); 133 | }, [mousePosition.x, mousePosition.y]); 134 | 135 | useEffect(() => { 136 | initCanvas(); 137 | }, [refresh]); 138 | 139 | const initCanvas = () => { 140 | resizeCanvas(); 141 | drawParticles(); 142 | }; 143 | 144 | const onMouseMove = () => { 145 | if (canvasRef.current) { 146 | const rect = canvasRef.current.getBoundingClientRect(); 147 | const { w, h } = canvasSize.current; 148 | const x = mousePosition.x - rect.left - w / 2; 149 | const y = mousePosition.y - rect.top - h / 2; 150 | const inside = x < w / 2 && x > -w / 2 && y < h / 2 && y > -h / 2; 151 | if (inside) { 152 | mouse.current.x = x; 153 | mouse.current.y = y; 154 | } 155 | } 156 | }; 157 | 158 | const resizeCanvas = () => { 159 | if (canvasContainerRef.current && canvasRef.current && context.current) { 160 | canvasSize.current.w = canvasContainerRef.current.offsetWidth; 161 | canvasSize.current.h = canvasContainerRef.current.offsetHeight; 162 | 163 | canvasRef.current.width = canvasSize.current.w * dpr; 164 | canvasRef.current.height = canvasSize.current.h * dpr; 165 | canvasRef.current.style.width = `${canvasSize.current.w}px`; 166 | canvasRef.current.style.height = `${canvasSize.current.h}px`; 167 | context.current.scale(dpr, dpr); 168 | 169 | // Clear existing particles and create new ones with exact quantity 170 | circles.current = []; 171 | for (let i = 0; i < quantity; i++) { 172 | const circle = circleParams(); 173 | drawCircle(circle); 174 | } 175 | } 176 | }; 177 | 178 | const circleParams = (): Circle => { 179 | const x = Math.floor(Math.random() * canvasSize.current.w); 180 | const y = Math.floor(Math.random() * canvasSize.current.h); 181 | const translateX = 0; 182 | const translateY = 0; 183 | const pSize = Math.floor(Math.random() * 2) + size; 184 | const alpha = 0; 185 | const targetAlpha = parseFloat((Math.random() * 0.6 + 0.1).toFixed(1)); 186 | const dx = (Math.random() - 0.5) * 0.1; 187 | const dy = (Math.random() - 0.5) * 0.1; 188 | const magnetism = 0.1 + Math.random() * 4; 189 | return { 190 | x, 191 | y, 192 | translateX, 193 | translateY, 194 | size: pSize, 195 | alpha, 196 | targetAlpha, 197 | dx, 198 | dy, 199 | magnetism, 200 | }; 201 | }; 202 | 203 | const rgb = hexToRgb(color); 204 | 205 | const drawCircle = (circle: Circle, update = false) => { 206 | if (context.current) { 207 | const { x, y, translateX, translateY, size, alpha } = circle; 208 | context.current.translate(translateX, translateY); 209 | context.current.beginPath(); 210 | context.current.arc(x, y, size, 0, 2 * Math.PI); 211 | context.current.fillStyle = `rgba(${rgb.join(", ")}, ${alpha})`; 212 | context.current.fill(); 213 | context.current.setTransform(dpr, 0, 0, dpr, 0, 0); 214 | 215 | if (!update) { 216 | circles.current.push(circle); 217 | } 218 | } 219 | }; 220 | 221 | const clearContext = () => { 222 | if (context.current) { 223 | context.current.clearRect( 224 | 0, 225 | 0, 226 | canvasSize.current.w, 227 | canvasSize.current.h, 228 | ); 229 | } 230 | }; 231 | 232 | const drawParticles = () => { 233 | clearContext(); 234 | const particleCount = quantity; 235 | for (let i = 0; i < particleCount; i++) { 236 | const circle = circleParams(); 237 | drawCircle(circle); 238 | } 239 | }; 240 | 241 | const remapValue = ( 242 | value: number, 243 | start1: number, 244 | end1: number, 245 | start2: number, 246 | end2: number, 247 | ): number => { 248 | const remapped = 249 | ((value - start1) * (end2 - start2)) / (end1 - start1) + start2; 250 | return remapped > 0 ? remapped : 0; 251 | }; 252 | 253 | const animate = () => { 254 | clearContext(); 255 | circles.current.forEach((circle: Circle, i: number) => { 256 | // Handle the alpha value 257 | const edge = [ 258 | circle.x + circle.translateX - circle.size, // distance from left edge 259 | canvasSize.current.w - circle.x - circle.translateX - circle.size, // distance from right edge 260 | circle.y + circle.translateY - circle.size, // distance from top edge 261 | canvasSize.current.h - circle.y - circle.translateY - circle.size, // distance from bottom edge 262 | ]; 263 | const closestEdge = edge.reduce((a, b) => Math.min(a, b)); 264 | const remapClosestEdge = parseFloat( 265 | remapValue(closestEdge, 0, 20, 0, 1).toFixed(2), 266 | ); 267 | if (remapClosestEdge > 1) { 268 | circle.alpha += 0.02; 269 | if (circle.alpha > circle.targetAlpha) { 270 | circle.alpha = circle.targetAlpha; 271 | } 272 | } else { 273 | circle.alpha = circle.targetAlpha * remapClosestEdge; 274 | } 275 | circle.x += circle.dx + vx; 276 | circle.y += circle.dy + vy; 277 | circle.translateX += 278 | (mouse.current.x / (staticity / circle.magnetism) - circle.translateX) / 279 | ease; 280 | circle.translateY += 281 | (mouse.current.y / (staticity / circle.magnetism) - circle.translateY) / 282 | ease; 283 | 284 | drawCircle(circle, true); 285 | 286 | // circle gets out of the canvas 287 | if ( 288 | circle.x < -circle.size || 289 | circle.x > canvasSize.current.w + circle.size || 290 | circle.y < -circle.size || 291 | circle.y > canvasSize.current.h + circle.size 292 | ) { 293 | // remove the circle from the array 294 | circles.current.splice(i, 1); 295 | // create a new circle 296 | const newCircle = circleParams(); 297 | drawCircle(newCircle); 298 | } 299 | }); 300 | rafID.current = window.requestAnimationFrame(animate); 301 | }; 302 | 303 | return ( 304 | 312 | ); 313 | }; 314 | -------------------------------------------------------------------------------- /lib/timestamp-utils/prompt-builder.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Prompt building utilities for AI timestamp generation 3 | * Handles dynamic prompt construction based on video metadata 4 | */ 5 | 6 | export interface PromptConfig { 7 | srtContent: string; 8 | durationInSeconds: number; 9 | durationFormatted: string; 10 | isLongContent: boolean; 11 | endTimestamp: string; 12 | } 13 | 14 | export class PromptBuilder { 15 | /** 16 | * Calculate end timestamp in appropriate format 17 | */ 18 | private calculateEndTimestamp(durationInSeconds: number): string { 19 | const hours = Math.floor(durationInSeconds / 3600); 20 | const minutes = Math.floor((durationInSeconds % 3600) / 60); 21 | const seconds = Math.floor(durationInSeconds % 60); 22 | 23 | return hours > 0 24 | ? `${hours}:${minutes.toString().padStart(2, "0")}:${seconds.toString().padStart(2, "0")}` 25 | : `${minutes}:${seconds.toString().padStart(2, "0")}`; 26 | } 27 | 28 | /** 29 | * Generate format example based on video duration 30 | */ 31 | private buildFormatExample(isLongContent: boolean): string { 32 | return isLongContent 33 | ? "01:08:08 (HH:MM:SS with leading zeros)" 34 | : "08:20 (MM:SS with leading zeros)"; 35 | } 36 | 37 | /** 38 | * Generate gold standard example based on video duration 39 | */ 40 | private buildGoldStandardExample(isLongContent: boolean): string { 41 | if (isLongContent) { 42 | return `{ 43 | "keyMoments": [ 44 | {"time": "00:00:00", "description": "Cursor to refund unexpected charges"}, 45 | {"time": "00:02:11", "description": "Explaining the new Cursor pricing tiers"}, 46 | {"time": "00:04:38", "description": "How to claim a refund for overages"}, 47 | {"time": "00:08:20", "description": "Showcasing the Ray Transcribes app"}, 48 | {"time": "00:10:43", "description": "Recommending Magic UI templates"}, 49 | {"time": "00:14:40", "description": "How to integrate Claude Code into Cursor"}, 50 | {"time": "00:18:30", "description": "Discovering the Magic UI Command Palette"}, 51 | {"time": "00:24:25", "description": "Detailing advanced Claude Code workflow"}, 52 | {"time": "00:32:40", "description": "Pro-tip for Stripe integration (closed-loop problem)"}, 53 | {"time": "00:35:20", "description": "Claude Code terminal navigation tips"}, 54 | {"time": "00:39:35", "description": "The MLX transcriber repo hits 420 stars"}, 55 | {"time": "00:44:40", "description": "Deep dive into the Claude Code workflow"}, 56 | {"time": "00:52:56", "description": "Explaining the full development stack"}, 57 | {"time": "00:59:08", "description": "Final recap of the Cursor pricing changes"}, 58 | {"time": "01:08:08", "description": "Explaining YouTube memberships and Discord access"}, 59 | {"time": "01:12:00", "description": "Ray's birthday preach on AI engineering"}, 60 | {"time": "01:19:50", "description": "Using Claude Code to plan app launch"}, 61 | {"time": "01:25:36", "description": "Deep dive on the planning mode workflow"}, 62 | {"time": "01:32:21", "description": "Final walkthrough of Claude Code setup in Cursor"} 63 | ] 64 | }`; 65 | } 66 | 67 | return `{ 68 | "keyMoments": [ 69 | {"time": "00:00", "description": "Introduction to the topic"}, 70 | {"time": "02:15", "description": "Explaining key concepts"}, 71 | {"time": "05:30", "description": "Live demonstration begins"}, 72 | {"time": "08:45", "description": "Common pitfalls to avoid"}, 73 | {"time": "12:20", "description": "Pro-tip for implementation"}, 74 | {"time": "15:40", "description": "Q&A and community feedback"}, 75 | {"time": "18:30", "description": "Advanced techniques walkthrough"}, 76 | {"time": "22:15", "description": "Real-world example"}, 77 | {"time": "25:50", "description": "Troubleshooting common issues"}, 78 | {"time": "28:30", "description": "Final thoughts and recap"} 79 | ] 80 | }`; 81 | } 82 | 83 | /** 84 | * Build the complete system prompt for AI timestamp generation 85 | */ 86 | public buildSystemPrompt(config: PromptConfig): string { 87 | const endTimestamp = this.calculateEndTimestamp(config.durationInSeconds); 88 | const formatExample = this.buildFormatExample(config.isLongContent); 89 | const goldStandardExample = this.buildGoldStandardExample(config.isLongContent); 90 | 91 | return ` 92 | 93 | \`\`\`srt 94 | ${config.srtContent} 95 | \`\`\` 96 | 97 | 98 | Video Duration: ${config.durationFormatted} (ends at timestamp ${endTimestamp}) 99 | Total Length: ${config.durationInSeconds} seconds 100 | 101 | 102 | # Timestamp Generation Guidelines v4.0 103 | 104 | These instructions are designed to generate a comprehensive, yet scannable, set of timestamps from a video transcript, especially for longer formats like livestreams. The goal is to capture not just major topics, but also specific demonstrations, key insights, and memorable moments that provide maximum value to the viewer. 105 | 106 | ### Core Principles 107 | 108 | 1. **Content-Density Over Fixed Numbers:** The number of timestamps should reflect the density of the content, not a fixed count. As a general guideline, aim for **one key moment every 5-10 minutes**, but be flexible. A dense 10-minute segment might need two timestamps, while a 15-minute casual chat might only need one. 109 | 2. **Capture Value, Not Just Topics:** The best timestamps point to specific, valuable information. A viewer should be able to look at the list and immediately find a pro-tip, a deep-dive, or a specific answer. 110 | 3. **Be Specific and Action-Oriented:** Descriptions should be concise (3-6 words) and clearly state what is happening. Use action verbs to convey activity and learning. 111 | 112 | ### Step-by-Step Process 113 | 114 | ### Step 1: Initial Analysis 115 | 116 | - Determine the total video duration from the final timestamp in the transcript. 117 | - Quickly read through the transcript to get a high-level sense of the main themes and the overall flow of the session. 118 | 119 | ### Step 2: Identify Key Moments 120 | 121 | Scan the transcript for the following types of content. This goes beyond simple topic changes and is the key to creating a rich, useful list. 122 | 123 | - **The Hook:** Always create a \`${ 124 | config.isLongContent ? "00:00:00" : "00:00" 125 | }\` timestamp that uses the first few impactful words of the video. 126 | - **Major Topic Shifts:** The most obvious markers, such as moving from a news update (Cursor pricing) to a personal project demo (Ray Transcribes). 127 | - **Specific Feature Demonstrations:** Pinpoint the exact moment a feature is shown and explained. 128 | - *Example:* "How to integrate Claude Code into Cursor" 129 | - **"Pro-Tip" or "Nugget" Segments:** Isolate moments where a specific, non-obvious piece of advice is given that could save a viewer time or trouble. 130 | - *Example:* "Pro-tip for Stripe integration (the 'closed-loop' problem)" 131 | - **Workflow Deep Dives:** Capture segments dedicated to explaining *how* the host accomplishes a complex task from start to finish. 132 | - *Example:* "Detailing his advanced Claude Code workflow" 133 | - **Live Discoveries or "Aha!" Moments:** If the host discovers a new feature or has a moment of realization live on stream, capture it. It adds personality and is often highly engaging. 134 | - *Example:* "Discovering the Magic UI Command Palette" 135 | - **Community & Meta Moments:** Acknowledge significant interactions with the community or milestones reached during the stream. 136 | - *Example:* "The MLX transcriber repo hits 420 stars" 137 | - **Philosophical or "Soapbox" Segments:** If the host takes a moment to share their broader thoughts on a topic, it's a distinct content block worth timestamping. 138 | - *Example:* "His birthday 'preach' on AI engineering" 139 | 140 | ### Step 3: Draft Timestamps and Descriptions 141 | 142 | - For each identified moment, note the timestamp where it begins. 143 | - Write a concise, specific, and action-oriented description (3-6 words). 144 | - **Good:** "Explaining the new Cursor pricing tiers" 145 | - **Avoid:** "Talks about pricing" 146 | - **Good:** "Final walkthrough of Claude Code setup in Cursor" 147 | - **Avoid:** "Claude Code" 148 | - Use parentheses to add clarifying context where needed (e.g., \`(the 'closed-loop' problem)\`). 149 | 150 | ### Step 4: Format and Review 151 | 152 | 1. Assemble the final list in chronological order. 153 | 2. Generate a structured JSON object with \`keyMoments\` array containing objects with \`time\` and \`description\` fields. 154 | 3. Read the entire list from top to bottom. Does it tell the story of the video? Is it easy to scan? Ensure the timestamps are accurate and the descriptions are valuable. Adjust wording for clarity and impact. 155 | 156 | ### Gold Standard Example Format 157 | 158 | The output should be a JSON object with this structure: 159 | ${goldStandardExample} 160 | 161 | 162 | Generate timestamps for this content using the Generate Timestamps v4 instructions. This content is ${ 163 | config.durationFormatted 164 | } long${ 165 | config.isLongContent ? ", so I'm going to need you to give me more timestamps than normal" : "" 166 | }. Provide an appropriate number of timestamps based on content density (aim for one key moment every 5-10 minutes as a guideline). 167 | 168 | CRITICAL REQUIREMENTS: 169 | 1. You MUST analyze the ENTIRE transcript from start (${ 170 | config.isLongContent ? "00:00:00" : "00:00" 171 | }) to the END (${endTimestamp}). 172 | 2. The video is ${config.durationFormatted} long - your final timestamp should be close to ${endTimestamp}. 173 | 3. Do NOT stop early - generate timestamps that span the COMPLETE duration from beginning to ${endTimestamp}. 174 | 4. ALL timestamps must use the ${config.isLongContent ? "HH:MM:SS" : "MM:SS"} format with leading zeros. 175 | 176 | TIMESTAMP FORMAT REQUIREMENT (CRITICAL - YouTube Standard): 177 | - Video duration: ${config.durationFormatted} (${config.durationInSeconds} seconds) 178 | - ${ 179 | config.isLongContent 180 | ? "This video is OVER 1 HOUR long. You MUST use HH:MM:SS format with LEADING ZEROS for ALL timestamps (e.g., 00:00:00, 00:15:30, 01:08:08)." 181 | : "This video is UNDER 1 HOUR long. You MUST use MM:SS format with LEADING ZEROS for ALL timestamps (e.g., 00:00, 08:20, 15:30). DO NOT use HH:MM:SS format." 182 | } 183 | - Format example: ${formatExample} 184 | - ALWAYS include leading zeros (e.g., "08:20" NOT "8:20", "01:08:08" NOT "1:08:08") 185 | 186 | IMPORTANT: Return ONLY a valid JSON object matching the structure shown in the Gold Standard Example Format above. Use the exact field names "keyMoments", "time", and "description". ${ 187 | config.isLongContent 188 | ? "Use HH:MM:SS format with leading zeros for ALL timestamps." 189 | : "Use MM:SS format with leading zeros for ALL timestamps - DO NOT include hours." 190 | } 191 | 192 | Expected JSON structure (timestamps should go all the way to ${endTimestamp}): 193 | { 194 | "keyMoments": [ 195 | {"time": "${config.isLongContent ? "00:00:00" : "00:00"}", "description": "Opening"}, 196 | {"time": "${config.isLongContent ? "00:15:30" : "15:30"}", "description": "Key topic"}, 197 | ...continue through to approximately ${endTimestamp}... 198 | {"time": "${endTimestamp}", "description": "Closing"} 199 | ] 200 | } 201 | 202 | `; 203 | } 204 | } 205 | -------------------------------------------------------------------------------- /app/globals.css: -------------------------------------------------------------------------------- 1 | @import "tailwindcss"; 2 | @import "tw-animate-css"; 3 | 4 | @custom-variant dark (&:is(.dark *)); 5 | 6 | @theme inline { 7 | /* Base System Colors */ 8 | --color-background: var(--background); 9 | --color-foreground: var(--foreground); 10 | --font-sans: var(--font-geist-sans); 11 | --font-mono: var(--font-geist-mono); 12 | --color-sidebar-ring: var(--sidebar-ring); 13 | --color-sidebar-border: var(--sidebar-border); 14 | --color-sidebar-accent-foreground: var(--sidebar-accent-foreground); 15 | --color-sidebar-accent: var(--sidebar-accent); 16 | --color-sidebar-primary-foreground: var(--sidebar-primary-foreground); 17 | --color-sidebar-primary: var(--sidebar-primary); 18 | --color-sidebar-foreground: var(--sidebar-foreground); 19 | --color-sidebar: var(--sidebar); 20 | --color-chart-5: var(--chart-5); 21 | --color-chart-4: var(--chart-4); 22 | --color-chart-3: var(--chart-3); 23 | --color-chart-2: var(--chart-2); 24 | --color-chart-1: var(--chart-1); 25 | --color-ring: var(--ring); 26 | --color-input: var(--input); 27 | --color-border: var(--border); 28 | --color-destructive: var(--destructive); 29 | --color-accent-foreground: var(--accent-foreground); 30 | --color-accent: var(--accent); 31 | --color-muted-foreground: var(--muted-foreground); 32 | --color-muted: var(--muted); 33 | --color-secondary-foreground: var(--secondary-foreground); 34 | --color-secondary: var(--secondary); 35 | --color-primary-foreground: var(--primary-foreground); 36 | --color-primary: var(--primary); 37 | --color-popover-foreground: var(--popover-foreground); 38 | --color-popover: var(--popover); 39 | --color-card-foreground: var(--card-foreground); 40 | --color-card: var(--card); 41 | 42 | /* Semantic Color System - Brand & Interactive */ 43 | --color-brand: var(--brand); 44 | --color-brand-hover: var(--brand-hover); 45 | --color-brand-foreground: var(--brand-foreground); 46 | --color-info: var(--info); 47 | --color-info-hover: var(--info-hover); 48 | --color-info-foreground: var(--info-foreground); 49 | --color-success: var(--success); 50 | --color-success-hover: var(--success-hover); 51 | --color-warning: var(--warning); 52 | --color-error: var(--error); 53 | --color-error-hover: var(--error-hover); 54 | 55 | /* Semantic - Specific Use Cases */ 56 | --color-timestamp: var(--brand); 57 | --color-timestamp-hover: var(--brand-hover); 58 | --color-description: var(--info); 59 | --color-description-hover: var(--info-hover); 60 | --color-surface: var(--surface); 61 | --color-surface-hover: var(--surface-hover); 62 | --color-neutral: var(--neutral); 63 | --color-neutral-hover: var(--neutral-hover); 64 | 65 | /* Border Radius */ 66 | --radius-sm: calc(var(--radius) - 4px); 67 | --radius-md: calc(var(--radius) - 2px); 68 | --radius-lg: var(--radius); 69 | --radius-xl: calc(var(--radius) + 4px); 70 | 71 | /* Shadow System - Elevation */ 72 | --shadow-xs: var(--shadow-elevation-xs); 73 | --shadow-sm: var(--shadow-elevation-sm); 74 | --shadow-md: var(--shadow-elevation-md); 75 | --shadow-lg: var(--shadow-elevation-lg); 76 | --shadow-xl: var(--shadow-elevation-xl); 77 | 78 | /* Shadow System - Interactive States */ 79 | --shadow-focus: var(--shadow-focus-ring); 80 | --shadow-focus-brand: var(--shadow-focus-brand-ring); 81 | --shadow-focus-error: var(--shadow-focus-error-ring); 82 | 83 | /* Shadow System - Special Effects */ 84 | --shadow-glow-brand: var(--shadow-glow-brand-effect); 85 | --shadow-glow-info: var(--shadow-glow-info-effect); 86 | --shadow-inset-sm: var(--shadow-inset-subtle); 87 | --shadow-inset-md: var(--shadow-inset-medium); 88 | 89 | /* Opacity System */ 90 | --opacity-surface: 0.8; 91 | --opacity-backdrop: 0.9; 92 | --opacity-disabled: 0.5; 93 | --opacity-hover: 0.9; 94 | --opacity-subtle: 0.5; 95 | } 96 | 97 | :root { 98 | --radius: 0.625rem; 99 | 100 | /* Base System Colors */ 101 | --background: oklch(1 0 0); 102 | --foreground: oklch(0.145 0 0); 103 | --card: oklch(1 0 0 / 0.8); 104 | --card-foreground: oklch(0.145 0 0); 105 | --popover: oklch(1 0 0); 106 | --popover-foreground: oklch(0.145 0 0); 107 | --primary: oklch(0.205 0 0); 108 | --primary-foreground: oklch(0.985 0 0); 109 | --secondary: oklch(0.97 0 0); 110 | --secondary-foreground: oklch(0.205 0 0); 111 | --muted: oklch(0.97 0 0); 112 | --muted-foreground: oklch(0.556 0 0); 113 | --accent: oklch(0.97 0 0); 114 | --accent-foreground: oklch(0.205 0 0); 115 | --destructive: oklch(0.577 0.245 27.325); 116 | --border: oklch(0.922 0 0); 117 | --input: oklch(0.922 0 0); 118 | --ring: oklch(0.708 0 0); 119 | --chart-1: oklch(0.646 0.222 41.116); 120 | --chart-2: oklch(0.6 0.118 184.704); 121 | --chart-3: oklch(0.398 0.07 227.392); 122 | --chart-4: oklch(0.828 0.189 84.429); 123 | --chart-5: oklch(0.769 0.188 70.08); 124 | --sidebar: oklch(0.985 0 0); 125 | --sidebar-foreground: oklch(0.145 0 0); 126 | --sidebar-primary: oklch(0.205 0 0); 127 | --sidebar-primary-foreground: oklch(0.985 0 0); 128 | --sidebar-accent: oklch(0.97 0 0); 129 | --sidebar-accent-foreground: oklch(0.205 0 0); 130 | --sidebar-border: oklch(0.922 0 0); 131 | --sidebar-ring: oklch(0.708 0 0); 132 | 133 | /* Semantic Color Palette - Light Mode */ 134 | --brand: oklch(0.6 0.15 160); /* emerald-600 equivalent */ 135 | --brand-hover: oklch(0.55 0.16 160); /* emerald-700 equivalent */ 136 | --brand-foreground: oklch(1 0 0); /* white */ 137 | 138 | --info: oklch(0.6 0.12 230); /* sky-600 equivalent */ 139 | --info-hover: oklch(0.55 0.13 230); /* sky-700 equivalent */ 140 | --info-foreground: oklch(1 0 0); /* white */ 141 | 142 | --success: oklch(0.6 0.15 160); /* emerald-600 (same as brand) */ 143 | --success-hover: oklch(0.55 0.16 160); /* emerald-700 */ 144 | 145 | --warning: oklch(0.75 0.15 85); /* amber-500 equivalent */ 146 | 147 | --error: oklch(0.577 0.245 27.325); /* rose-600 (using destructive) */ 148 | --error-hover: oklch(0.55 0.25 27); /* rose-700 equivalent */ 149 | 150 | --surface: oklch(1 0 0 / 0.8); /* white/80 */ 151 | --surface-hover: oklch(0.97 0 0 / 0.8); /* slate-50/80 */ 152 | 153 | --neutral: oklch(0.6 0 0); /* slate-600 equivalent */ 154 | --neutral-hover: oklch(0.5 0 0); /* slate-700 equivalent */ 155 | 156 | /* Shadow System - Light Mode */ 157 | --shadow-elevation-xs: 0 1px 3px rgba(0, 0, 0, 0.02); 158 | --shadow-elevation-sm: 0 2px 10px rgba(0, 0, 0, 0.03); 159 | --shadow-elevation-md: 0 4px 12px rgba(0, 0, 0, 0.08); 160 | --shadow-elevation-lg: 0 8px 30px rgba(0, 0, 0, 0.12); 161 | --shadow-elevation-xl: 0 12px 40px rgba(0, 0, 0, 0.15); 162 | 163 | --shadow-focus-ring: 0 0 0 3px rgba(14, 165, 233, 0.15); 164 | --shadow-focus-brand-ring: 0 4px 15px rgba(14, 165, 233, 0.15); 165 | --shadow-focus-error-ring: 0 0 0 3px rgba(225, 29, 72, 0.15); 166 | 167 | --shadow-glow-brand-effect: 0 0 8px rgba(16, 185, 129, 0.3); 168 | --shadow-glow-info-effect: 0 6px 15px rgba(14, 165, 233, 0.15); 169 | 170 | --shadow-inset-subtle: inset 0 1px 2px rgba(0, 0, 0, 0.05); 171 | --shadow-inset-medium: inset 0 1px 3px rgba(0, 0, 0, 0.1); 172 | } 173 | 174 | .dark { 175 | /* Base System Colors */ 176 | --background: oklch(0.145 0 0); 177 | --foreground: oklch(0.985 0 0); 178 | --card: oklch(0.205 0 0 / 0.8); 179 | --card-foreground: oklch(0.985 0 0); 180 | --popover: oklch(0.205 0 0); 181 | --popover-foreground: oklch(0.985 0 0); 182 | --primary: oklch(0.922 0 0); 183 | --primary-foreground: oklch(0.205 0 0); 184 | --secondary: oklch(0.269 0 0); 185 | --secondary-foreground: oklch(0.985 0 0); 186 | --muted: oklch(0.269 0 0); 187 | --muted-foreground: oklch(0.708 0 0); 188 | --accent: oklch(0.269 0 0); 189 | --accent-foreground: oklch(0.985 0 0); 190 | --destructive: oklch(0.704 0.191 22.216); 191 | --border: oklch(1 0 0 / 10%); 192 | --input: oklch(1 0 0 / 15%); 193 | --ring: oklch(0.556 0 0); 194 | --chart-1: oklch(0.488 0.243 264.376); 195 | --chart-2: oklch(0.696 0.17 162.48); 196 | --chart-3: oklch(0.769 0.188 70.08); 197 | --chart-4: oklch(0.627 0.265 303.9); 198 | --chart-5: oklch(0.645 0.246 16.439); 199 | --sidebar: oklch(0.205 0 0); 200 | --sidebar-foreground: oklch(0.985 0 0); 201 | --sidebar-primary: oklch(0.488 0.243 264.376); 202 | --sidebar-primary-foreground: oklch(0.985 0 0); 203 | --sidebar-accent: oklch(0.269 0 0); 204 | --sidebar-accent-foreground: oklch(0.985 0 0); 205 | --sidebar-border: oklch(1 0 0 / 10%); 206 | --sidebar-ring: oklch(0.556 0 0); 207 | 208 | /* Semantic Color Palette - Dark Mode */ 209 | --brand: oklch(0.65 0.15 160); /* emerald-500 equivalent (darker for better contrast) */ 210 | --brand-hover: oklch(0.7 0.15 160); /* emerald-400 equivalent */ 211 | --brand-foreground: oklch(1 0 0); /* white (consistent in both modes) */ 212 | 213 | --info: oklch(0.6 0.12 230); /* sky-500 equivalent (darker for better contrast) */ 214 | --info-hover: oklch(0.65 0.12 230); /* sky-400 equivalent */ 215 | --info-foreground: oklch(1 0 0); /* white (consistent in both modes) */ 216 | 217 | --success: oklch(0.7 0.14 160); /* emerald-400 (same as brand) */ 218 | --success-hover: oklch(0.75 0.15 160); /* emerald-300 */ 219 | 220 | --warning: oklch(0.8 0.14 85); /* amber-400 equivalent */ 221 | 222 | --error: oklch(0.704 0.191 22.216); /* rose-400 (using destructive) */ 223 | --error-hover: oklch(0.75 0.2 22); /* rose-300 equivalent */ 224 | 225 | --surface: oklch(0.205 0 0 / 0.85); /* slate-900/85 (slightly more opaque) */ 226 | --surface-hover: oklch(0.269 0 0 / 0.85); /* slate-800/85 */ 227 | 228 | --neutral: oklch(0.6 0 0); /* slate-400 equivalent */ 229 | --neutral-hover: oklch(0.7 0 0); /* slate-300 equivalent */ 230 | 231 | /* Shadow System - Dark Mode */ 232 | --shadow-elevation-xs: 0 1px 3px rgba(0, 0, 0, 0.2); 233 | --shadow-elevation-sm: 0 2px 10px rgba(0, 0, 0, 0.08); 234 | --shadow-elevation-md: 0 4px 12px rgba(0, 0, 0, 0.2); 235 | --shadow-elevation-lg: 0 8px 30px rgba(0, 0, 0, 0.25); 236 | --shadow-elevation-xl: 0 12px 40px rgba(0, 0, 0, 0.3); 237 | 238 | --shadow-focus-ring: 0 0 0 3px rgba(14, 165, 233, 0.3); 239 | --shadow-focus-brand-ring: 0 4px 15px rgba(14, 165, 233, 0.15); 240 | --shadow-focus-error-ring: 0 0 0 3px rgba(225, 29, 72, 0.3); 241 | 242 | --shadow-glow-brand-effect: 0 0 8px rgba(16, 185, 129, 0.2); 243 | --shadow-glow-info-effect: 0 6px 15px rgba(14, 165, 233, 0.15); 244 | 245 | --shadow-inset-subtle: inset 0 1px 3px rgba(0, 0, 0, 0.2); 246 | --shadow-inset-medium: inset 0 1px 3px rgba(0, 0, 0, 0.3); 247 | } 248 | 249 | @layer base { 250 | * { 251 | @apply border-border outline-ring/50; 252 | } 253 | body { 254 | @apply bg-background text-foreground; 255 | } 256 | } 257 | 258 | /* Ghibli-style animation keyframes */ 259 | @keyframes cloudMove { 260 | 0% { transform: translateX(-100%); } 261 | 100% { transform: translateX(100vw); } 262 | } 263 | 264 | @keyframes cloudMoveReverse { 265 | 0% { transform: translateX(100vw); } 266 | 100% { transform: translateX(-100%); } 267 | } 268 | 269 | @keyframes float { 270 | 0%, 100% { transform: translateY(0); } 271 | 50% { transform: translateY(-10px); } 272 | } 273 | 274 | @keyframes ghibliPulse { 275 | 0%, 100% { opacity: 0.7; } 276 | 50% { opacity: 1; } 277 | } 278 | 279 | /* Ghibli-style animation utilities */ 280 | .animate-cloud1 { 281 | animation: cloudMove 120s linear infinite; 282 | } 283 | 284 | .animate-cloud2 { 285 | animation: cloudMoveReverse 90s linear infinite; 286 | } 287 | 288 | .animate-cloud3 { 289 | animation: cloudMove 110s linear infinite; 290 | animation-delay: 10s; 291 | } 292 | 293 | .animate-cloud4 { 294 | animation: cloudMoveReverse 100s linear infinite; 295 | animation-delay: 5s; 296 | } 297 | 298 | .animate-float { 299 | animation: float 6s ease-in-out infinite; 300 | } 301 | 302 | .animate-ghibliPulse { 303 | animation: ghibliPulse 4s ease-in-out infinite; 304 | } 305 | -------------------------------------------------------------------------------- /components/ui/CLAUDE.md: -------------------------------------------------------------------------------- 1 | # UI Components - shadcn/ui Primitives 2 | 3 | **Technology**: shadcn/ui (New York style) + Radix UI + Tailwind CSS 4 | **Entry Point**: Individual component files in this directory 5 | **Parent Context**: Extends [../../CLAUDE.md](../../CLAUDE.md) and [../CLAUDE.md](../CLAUDE.md) 6 | 7 | This directory contains UI primitive components from shadcn/ui, customized for the Vibestamps design system. 8 | 9 | --- 10 | 11 | ## Overview 12 | 13 | ### What is shadcn/ui? 14 | - **NOT a component library**: Components are copied into your project (you own the code) 15 | - **Built on Radix UI**: Accessible, unstyled primitives 16 | - **Styled with Tailwind**: Fully customizable via Tailwind classes 17 | - **TypeScript**: Full type safety 18 | 19 | ### Configuration 20 | 21 | **File**: [../../components.json](../../components.json) 22 | 23 | ```json 24 | { 25 | "style": "new-york", // Clean, modern style variant 26 | "rsc": true, // React Server Components support 27 | "tsx": true, // TypeScript 28 | "tailwind": { 29 | "css": "app/globals.css", // Global styles location 30 | "cssVariables": true, // Use CSS variables for theming 31 | "prefix": "" // No class prefix 32 | }, 33 | "aliases": { 34 | "components": "@/components", 35 | "ui": "@/components/ui", 36 | "lib": "@/lib", 37 | "utils": "@/lib/utils" 38 | }, 39 | "iconLibrary": "lucide" // Lucide React for icons 40 | } 41 | ``` 42 | 43 | --- 44 | 45 | ## Available Components 46 | 47 | Current components in this directory: 48 | 49 | | Component | Purpose | Radix UI Base | 50 | |-----------|---------|---------------| 51 | | [button.tsx](button.tsx) | Buttons with variants | `@radix-ui/react-slot` | 52 | | [card.tsx](card.tsx) | Content containers | None (pure Tailwind) | 53 | | [input.tsx](input.tsx) | Text input fields | None (native ``) | 54 | | [progress.tsx](progress.tsx) | Progress bars | `@radix-ui/react-progress` | 55 | | [tooltip.tsx](tooltip.tsx) | Hover tooltips | `@radix-ui/react-tooltip` | 56 | | [textarea.tsx](textarea.tsx) | Multi-line text input | None (native `