├── .env.example
├── .gitignore
├── .prettierignore
├── LICENSE
├── README.md
├── app
├── api
│ ├── feedback
│ │ └── route.ts
│ ├── keys
│ │ └── route.ts
│ └── research
│ │ └── route.ts
├── favicon.ico
├── globals.css
├── layout.tsx
└── page.tsx
├── components.json
├── components
├── chat
│ ├── api-key-dialog.tsx
│ ├── chat.tsx
│ ├── download-txt.tsx
│ ├── input.tsx
│ ├── markdown.tsx
│ ├── message.tsx
│ ├── research-progress.tsx
│ └── site-header.tsx
└── ui
│ ├── button.tsx
│ ├── dialog.tsx
│ ├── input.tsx
│ ├── slider.tsx
│ └── tooltip.tsx
├── lib
├── deep-research
│ ├── ai
│ │ ├── providers.ts
│ │ ├── text-splitter.test.ts
│ │ └── text-splitter.ts
│ ├── deep-research.ts
│ ├── feedback.ts
│ ├── index.ts
│ └── prompt.ts
├── hooks
│ └── use-scroll-to-bottom.ts
└── utils.ts
├── middleware.ts
├── next-env.d.ts
├── next.config.js
├── package.json
├── postcss.config.js
├── public
├── favicon-16x16.png
├── favicon-32x32.png
├── favicon.ico
├── logo-bg.png
├── logo-text.png
├── og.png
├── providers
│ └── openai.webp
└── site.webmanifest
├── tailwind.config.ts
└── tsconfig.json
/.env.example:
--------------------------------------------------------------------------------
1 |
2 | #### AI API KEYS
3 | OPENAI_API_KEY=your-openai-api-key
4 | FIRECRAWL_KEY=your-firecrawl-api-key
5 | NEXT_PUBLIC_ENABLE_API_KEYS=false
6 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
2 |
3 | # Output files
4 | output.md
5 |
6 | # Dependencies
7 | node_modules
8 | .pnp
9 | .pnp.js
10 |
11 | # Local env files
12 | .env
13 | .env.local
14 | .env.development.local
15 | .env.test.local
16 | .env.production.local
17 |
18 | # Testing
19 | coverage
20 |
21 | # Turbo
22 | .turbo
23 |
24 | # Vercel
25 | .vercel
26 |
27 | # Build Outputs
28 | .next/
29 | out/
30 | build
31 | dist
32 |
33 |
34 | # Debug
35 | npm-debug.log*
36 | yarn-debug.log*
37 | yarn-error.log*
38 |
39 | # Misc
40 | .DS_Store
41 | *.pem
42 |
43 | # Package Manager Lock Files
44 | package-lock.json
45 | pnpm-lock.yaml
46 | yarn.lock
47 |
48 | # IDE/Editor specific
49 | .idea/
50 | .vscode/
51 | *.swp
52 | *.swo
53 |
--------------------------------------------------------------------------------
/.prettierignore:
--------------------------------------------------------------------------------
1 | *.hbs
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2025 David Zhang
4 | Copyright (c) 2025 Fekri
5 |
6 | Permission is hereby granted, free of charge, to any person obtaining a copy
7 | of this software and associated documentation files (the "Software"), to deal
8 | in the Software without restriction, including without limitation the rights
9 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
10 | copies of the Software, and to permit persons to whom the Software is
11 | furnished to do so, subject to the following conditions:
12 |
13 | The above copyright notice and this permission notice shall be included in all
14 | copies or substantial portions of the Software.
15 |
16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22 | SOFTWARE.
23 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Open Deep Research Web UI
2 |
3 | A modern, interactive web interface built on top of the original [Deep Research CLI](https://github.com/dzhng/deep-research) project. This web app transforms the command-line research assistant into an intuitive, visually appealing experience using Next.js and shadcn/ui.
4 |
5 | ## Overview
6 |
7 | Open Deep Research Web UI is an AI-powered research assistant that transforms the original CLI tool into a modern web interface using Next.js and shadcn/ui. Try it out at [anotherwrapper.com/open-deep-research](https://anotherwrapper.com/open-deep-research) with your own API keys, or host it yourself.
8 |
9 | The system combines search engines (via FireCrawl), web scraping, and language models (via OpenAI) to perform deep research on any topic. Key features include:
10 |
11 | - **Intelligent Research Process:**
12 |
13 | - Performs iterative research by recursively exploring topics in depth
14 | - Uses LLMs to generate targeted search queries based on research goals
15 | - Creates follow-up questions to better understand research needs
16 | - Processes multiple searches and results in parallel for efficiency
17 | - Configurable depth and breadth parameters to control research scope
18 |
19 | - **Research Output:**
20 |
21 | - Produces detailed markdown reports with findings and sources
22 | - Real-time progress tracking of research steps
23 | - Built-in markdown viewer for reviewing results
24 | - Downloadable research reports
25 |
26 | - **Modern Interface:**
27 | - Interactive controls for adjusting research parameters
28 | - Visual feedback for ongoing research progress
29 | - HTTP-only cookie storage for API keys
30 |
31 | The system maintains the core research capabilities of the original CLI while providing an intuitive visual interface for controlling and monitoring the research process.
32 |
33 | ## Sponsors
34 |
35 | This project is proudly sponsored by [AnotherWrapper](https://anotherwrapper.com).
36 |
37 | [](https://anotherwrapper.com)
38 |
39 | ## Getting Started
40 |
41 | ### Prerequisites
42 |
43 | - Node.js v14 or later
44 | - API keys for OpenAI and FireCrawl
45 |
46 | ### Installation
47 |
48 | 1. **Clone and Install**
49 |
50 | ```bash
51 | git clone https://github.com/fdarkaou/open-deep-research.git
52 | cd open-deep-research
53 | npm install
54 | ```
55 |
56 | 2. **Configure Environment**
57 |
58 | Create `.env.local` and add:
59 |
60 | ```bash
61 | OPENAI_API_KEY=your-openai-api-key
62 | FIRECRAWL_KEY=your-firecrawl-api-key
63 | NEXT_PUBLIC_ENABLE_API_KEYS=false # Set to false to disable API key dialog
64 | ```
65 |
66 | 3. **Run the App**
67 | ```bash
68 | npm run dev
69 | ```
70 | Visit [http://localhost:3000](http://localhost:3000)
71 |
72 | ## API Key Management
73 |
74 | By default (`NEXT_PUBLIC_ENABLE_API_KEYS=true`), the app includes an API key input dialog that allows users to try out the research assistant directly in their browser using their own API keys. Keys are stored securely in HTTP-only cookies and are never exposed to client-side JavaScript.
75 |
76 | For your own deployment, you can disable this dialog by setting `NEXT_PUBLIC_ENABLE_API_KEYS=false` and configure the API keys directly in your `.env.local` file instead.
77 |
78 | ## License
79 |
80 | MIT License. Feel free to use and modify the code for your own projects as you wish.
81 |
82 | ## Acknowledgements
83 |
84 | - **Original CLI:** [dzhng/deep-research](https://github.com/dzhng/deep-research)
85 | - **Sponsor:** [AnotherWrapper](https://anotherwrapper.com)
86 | - **Tools:** Next.js, shadcn/ui, anotherwrapper, Vercel AI SDK
87 |
88 | Happy researching!
89 |
--------------------------------------------------------------------------------
/app/api/feedback/route.ts:
--------------------------------------------------------------------------------
1 | import { NextRequest, NextResponse } from "next/server";
2 |
3 | import { AIModel } from "@/lib/deep-research/ai/providers";
4 | import { generateFeedback } from "@/lib/deep-research/feedback";
5 |
6 | export async function POST(req: NextRequest) {
7 | try {
8 | const { query, numQuestions, modelId = "o3-mini" } = await req.json();
9 |
10 | // Retrieve API key(s) from secure cookies
11 | const openaiKey = req.cookies.get("openai-key")?.value;
12 | const firecrawlKey = req.cookies.get("firecrawl-key")?.value;
13 |
14 | // Add API key validation
15 | if (process.env.NEXT_PUBLIC_ENABLE_API_KEYS === "true") {
16 | if (!openaiKey || !firecrawlKey) {
17 | return NextResponse.json(
18 | { error: "API keys are required but not provided" },
19 | { status: 401 }
20 | );
21 | }
22 | }
23 |
24 | console.log("\n🔍 [FEEDBACK ROUTE] === Request Started ===");
25 | console.log("Query:", query);
26 | console.log("Model ID:", modelId);
27 | console.log("Number of Questions:", numQuestions);
28 | console.log("API Keys Present:", {
29 | OpenAI: openaiKey ? "✅" : "❌",
30 | FireCrawl: firecrawlKey ? "✅" : "❌",
31 | });
32 |
33 | try {
34 | const questions = await generateFeedback({
35 | query,
36 | numQuestions,
37 | modelId: modelId as AIModel,
38 | apiKey: openaiKey,
39 | });
40 |
41 | console.log("\n✅ [FEEDBACK ROUTE] === Success ===");
42 | console.log("Generated Questions:", questions);
43 | console.log("Number of Questions Generated:", questions.length);
44 |
45 | return NextResponse.json({ questions });
46 | } catch (error) {
47 | console.error("\n❌ [FEEDBACK ROUTE] === Generation Error ===");
48 | console.error("Error:", error);
49 | throw error;
50 | }
51 | } catch (error) {
52 | console.error("\n💥 [FEEDBACK ROUTE] === Route Error ===");
53 | console.error("Error:", error);
54 |
55 | return NextResponse.json(
56 | {
57 | error: "Feedback generation failed",
58 | details: error instanceof Error ? error.message : String(error),
59 | },
60 | { status: 500 }
61 | );
62 | }
63 | }
64 |
--------------------------------------------------------------------------------
/app/api/keys/route.ts:
--------------------------------------------------------------------------------
1 | import { NextRequest, NextResponse } from 'next/server';
2 |
3 | // Handle GET requests to check keys and POST requests to set keys
4 | export async function GET(req: NextRequest) {
5 | const openaiKey = req.cookies.get('openai-key')?.value;
6 | const firecrawlKey = req.cookies.get('firecrawl-key')?.value;
7 | const keysPresent = Boolean(openaiKey && firecrawlKey);
8 | return NextResponse.json({ keysPresent });
9 | }
10 |
11 | export async function POST(req: NextRequest) {
12 | try {
13 | const { openaiKey, firecrawlKey } = await req.json();
14 | const response = NextResponse.json({ success: true });
15 | response.cookies.set('openai-key', openaiKey, {
16 | httpOnly: true,
17 | secure: process.env.NODE_ENV === 'production',
18 | path: '/',
19 | sameSite: 'strict',
20 | });
21 | response.cookies.set('firecrawl-key', firecrawlKey, {
22 | httpOnly: true,
23 | secure: process.env.NODE_ENV === 'production',
24 | path: '/',
25 | sameSite: 'strict',
26 | });
27 | return response;
28 | } catch (error) {
29 | console.error(error);
30 | return NextResponse.json(
31 | { error: 'Failed to set API keys' },
32 | { status: 500 },
33 | );
34 | }
35 | }
36 |
37 | // New: DELETE handler to remove API keys
38 | export async function DELETE(req: NextRequest) {
39 | try {
40 | const response = NextResponse.json({ success: true });
41 | response.cookies.delete('openai-key');
42 | response.cookies.delete('firecrawl-key');
43 | return response;
44 | } catch (error) {
45 | console.error(error);
46 | return NextResponse.json(
47 | { error: 'Failed to remove API keys' },
48 | { status: 500 },
49 | );
50 | }
51 | }
52 |
--------------------------------------------------------------------------------
/app/api/research/route.ts:
--------------------------------------------------------------------------------
1 | import { NextRequest } from "next/server";
2 |
3 | import {
4 | deepResearch,
5 | generateFeedback,
6 | writeFinalReport,
7 | } from "@/lib/deep-research";
8 | import { createModel, type AIModel } from "@/lib/deep-research/ai/providers";
9 |
10 | export async function POST(req: NextRequest) {
11 | try {
12 | const {
13 | query,
14 | breadth = 3,
15 | depth = 2,
16 | modelId = "o3-mini",
17 | } = await req.json();
18 |
19 | // Retrieve API keys from secure cookies
20 | const openaiKey = req.cookies.get("openai-key")?.value;
21 | const firecrawlKey = req.cookies.get("firecrawl-key")?.value;
22 |
23 | // Add API key validation
24 | if (process.env.NEXT_PUBLIC_ENABLE_API_KEYS === "true") {
25 | if (!openaiKey || !firecrawlKey) {
26 | return Response.json(
27 | { error: "API keys are required but not provided" },
28 | { status: 401 }
29 | );
30 | }
31 | }
32 |
33 | console.log("\n🔬 [RESEARCH ROUTE] === Request Started ===");
34 | console.log("Query:", query);
35 | console.log("Model ID:", modelId);
36 | console.log("Configuration:", {
37 | breadth,
38 | depth,
39 | });
40 | console.log("API Keys Present:", {
41 | OpenAI: openaiKey ? "✅" : "❌",
42 | FireCrawl: firecrawlKey ? "✅" : "❌",
43 | });
44 |
45 | try {
46 | const model = createModel(modelId as AIModel, openaiKey);
47 | console.log("\n🤖 [RESEARCH ROUTE] === Model Created ===");
48 | console.log("Using Model:", modelId);
49 |
50 | const encoder = new TextEncoder();
51 | const stream = new TransformStream();
52 | const writer = stream.writable.getWriter();
53 |
54 | (async () => {
55 | try {
56 | console.log("\n🚀 [RESEARCH ROUTE] === Research Started ===");
57 |
58 | const feedbackQuestions = await generateFeedback({
59 | query,
60 | modelId,
61 | apiKey: openaiKey,
62 | });
63 | await writer.write(
64 | encoder.encode(
65 | `data: ${JSON.stringify({
66 | type: "progress",
67 | step: {
68 | type: "query",
69 | content: "Generated feedback questions",
70 | },
71 | })}\n\n`
72 | )
73 | );
74 |
75 | const { learnings, visitedUrls } = await deepResearch({
76 | query,
77 | breadth,
78 | depth,
79 | model,
80 | firecrawlKey,
81 | onProgress: async (update: string) => {
82 | console.log("\n📊 [RESEARCH ROUTE] Progress Update:", update);
83 | await writer.write(
84 | encoder.encode(
85 | `data: ${JSON.stringify({
86 | type: "progress",
87 | step: {
88 | type: "research",
89 | content: update,
90 | },
91 | })}\n\n`
92 | )
93 | );
94 | },
95 | });
96 |
97 | console.log("\n✅ [RESEARCH ROUTE] === Research Completed ===");
98 | console.log("Learnings Count:", learnings.length);
99 | console.log("Visited URLs Count:", visitedUrls.length);
100 |
101 | const report = await writeFinalReport({
102 | prompt: query,
103 | learnings,
104 | visitedUrls,
105 | model,
106 | });
107 |
108 | await writer.write(
109 | encoder.encode(
110 | `data: ${JSON.stringify({
111 | type: "result",
112 | feedbackQuestions,
113 | learnings,
114 | visitedUrls,
115 | report,
116 | })}\n\n`
117 | )
118 | );
119 | } catch (error) {
120 | console.error("\n❌ [RESEARCH ROUTE] === Research Process Error ===");
121 | console.error("Error:", error);
122 | await writer.write(
123 | encoder.encode(
124 | `data: ${JSON.stringify({
125 | type: "error",
126 | message: "Research failed",
127 | })}\n\n`
128 | )
129 | );
130 | } finally {
131 | await writer.close();
132 | }
133 | })();
134 |
135 | return new Response(stream.readable, {
136 | headers: {
137 | "Content-Type": "text/event-stream",
138 | "Cache-Control": "no-cache",
139 | Connection: "keep-alive",
140 | },
141 | });
142 | } catch (error) {
143 | console.error("\n💥 [RESEARCH ROUTE] === Route Error ===");
144 | console.error("Error:", error);
145 | return Response.json({ error: "Research failed" }, { status: 500 });
146 | }
147 | } catch (error) {
148 | console.error("\n💥 [RESEARCH ROUTE] === Parse Error ===");
149 | console.error("Error:", error);
150 | return Response.json({ error: "Research failed" }, { status: 500 });
151 | }
152 | }
153 |
--------------------------------------------------------------------------------
/app/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fdarkaou/open-deep-research/ce15e95589b55dd6811cc0c3564b4380ae0c7519/app/favicon.ico
--------------------------------------------------------------------------------
/app/globals.css:
--------------------------------------------------------------------------------
1 | /* @import url("https://fonts.googleapis.com/css2?family=Poetsen+One&display=swap"); */
2 |
3 | @tailwind base;
4 | @tailwind components;
5 | @tailwind utilities;
6 |
7 | /* Hide scroll bar */
8 | @layer utilities {
9 | /* Hide scrollbar for Chrome, Safari and Opera */
10 | .no-scrollbar::-webkit-scrollbar {
11 | display: none;
12 | }
13 |
14 | /* Hide scrollbar for IE, Edge and Firefox */
15 | .no-scrollbar {
16 | -ms-overflow-style: none; /* IE and Edge */
17 | scrollbar-width: none; /* Firefox */
18 | }
19 | }
20 |
21 | @layer base {
22 | :root {
23 | --background: 0 0% 100%;
24 | --foreground: 222.2 84% 4.9%;
25 |
26 | --card: 0 0% 100%;
27 | --card-foreground: 222.2 84% 4.9%;
28 |
29 | --popover: 0 0% 100%;
30 | --popover-foreground: 222.2 84% 4.9%;
31 |
32 | --primary: 222.2 47.4% 11.2%;
33 | --primary-foreground: 210 40% 98%;
34 |
35 | --secondary: 210 40% 96.1%;
36 | --secondary-foreground: 222.2 47.4% 11.2%;
37 |
38 | --muted: 210 40% 96.1%;
39 | --muted-foreground: 215.4 16.3% 46.9%;
40 |
41 | --accent: 0 0% 9%; /* #171717 */
42 | --accent-foreground: 0 0% 100%; /* White text */
43 |
44 | --destructive: 0 84.2% 60.2%;
45 | --destructive-foreground: 210 40% 98%;
46 |
47 | --border: 214.3 31.8% 91.4%;
48 | --input: 214.3 31.8% 91.4%;
49 | --ring: 222.2 84% 4.9%;
50 |
51 | --radius: 0.5rem;
52 |
53 | --sidebar-background: 0 0% 98%;
54 |
55 | --sidebar-foreground: 240 5.3% 26.1%;
56 |
57 | --sidebar-primary: 240 5.9% 10%;
58 |
59 | --sidebar-primary-foreground: 0 0% 98%;
60 |
61 | --sidebar-accent: 240 4.8% 95.9%;
62 |
63 | --sidebar-accent-foreground: 240 5.9% 10%;
64 |
65 | --sidebar-border: 220 13% 91%;
66 |
67 | --sidebar-ring: 217.2 91.2% 59.8%;
68 | }
69 |
70 | .dark {
71 | --background: 222.2 84% 4.9%;
72 | --foreground: 210 40% 98%;
73 |
74 | --card: 222.2 84% 4.9%;
75 | --card-foreground: 210 40% 98%;
76 |
77 | --popover: 222.2 84% 4.9%;
78 | --popover-foreground: 210 40% 98%;
79 |
80 | --primary: 210 40% 98%;
81 | --primary-foreground: 222.2 47.4% 11.2%;
82 |
83 | --secondary: 217.2 32.6% 17.5%;
84 | --secondary-foreground: 210 40% 98%;
85 |
86 | --muted: 217.2 32.6% 17.5%;
87 | --muted-foreground: 215 20.2% 65.1%;
88 |
89 | --accent: 217.2 32.6% 17.5%;
90 | --accent-foreground: 210 40% 98%;
91 |
92 | --destructive: 0 62.8% 30.6%;
93 | --destructive-foreground: 210 40% 98%;
94 |
95 | --border: 217.2 32.6% 17.5%;
96 | --input: 217.2 32.6% 17.5%;
97 | --ring: 212.7 26.8% 83.9%;
98 | --sidebar-background: 240 5.9% 10%;
99 | --sidebar-foreground: 240 4.8% 95.9%;
100 | --sidebar-primary: 224.3 76.3% 48%;
101 | --sidebar-primary-foreground: 0 0% 100%;
102 | --sidebar-accent: 240 3.7% 15.9%;
103 | --sidebar-accent-foreground: 240 4.8% 95.9%;
104 | --sidebar-border: 240 3.7% 15.9%;
105 | --sidebar-ring: 217.2 91.2% 59.8%;
106 | }
107 | }
108 |
109 | @layer base {
110 | * {
111 | @apply border-border;
112 | }
113 | body {
114 | @apply bg-background text-foreground;
115 | }
116 | }
117 |
--------------------------------------------------------------------------------
/app/layout.tsx:
--------------------------------------------------------------------------------
1 | import "./globals.css";
2 |
3 | import type { Metadata } from "next";
4 | import { Inter } from "next/font/google";
5 |
6 | const inter = Inter({ subsets: ["latin"] });
7 |
8 | export const metadata: Metadata = {
9 | title: "Open Deep Research - Open Source AI Research Assistant",
10 | description:
11 | "Open Deep Research is a free, open-source alternative to OpenAI's Deep Research, Google's Gemini, and Anthropic's Claude Research. Powered by o3-mini, this advanced AI research assistant delivers comprehensive market analysis, competitive intelligence, and academic research capabilities. Experience enterprise-grade research automation with features like recursive exploration, multi-source validation, and structured insights extraction. Perfect for startups, researchers, and businesses seeking a transparent, customizable, and powerful research solution without vendor lock-in. Dive deep into any topic with our state-of-the-art natural language processing and automated insight generation.",
12 | };
13 | export default function RootLayout({
14 | children,
15 | }: {
16 | children: React.ReactNode;
17 | }) {
18 | return (
19 |
20 |
{children}
21 |
22 | );
23 | }
24 |
--------------------------------------------------------------------------------
/app/page.tsx:
--------------------------------------------------------------------------------
1 | 'use client';
2 |
3 | import { Chat } from '@/components/chat/chat';
4 | import { Header } from '@/components/chat/site-header';
5 |
6 | export default function ResearchPage() {
7 | return (
8 |
9 |
10 |
11 |
12 | );
13 | }
14 |
--------------------------------------------------------------------------------
/components.json:
--------------------------------------------------------------------------------
1 | {
2 | "$schema": "https://ui.shadcn.com/schema.json",
3 | "style": "default",
4 | "rsc": true,
5 | "tsx": true,
6 | "tailwind": {
7 | "config": "tailwind.config.ts",
8 | "css": "app/globals.css",
9 | "baseColor": "slate",
10 | "cssVariables": true,
11 | "prefix": ""
12 | },
13 | "aliases": {
14 | "components": "@/components",
15 | "utils": "@/lib/utils"
16 | }
17 | }
--------------------------------------------------------------------------------
/components/chat/api-key-dialog.tsx:
--------------------------------------------------------------------------------
1 | "use client";
2 |
3 | import { useState } from "react";
4 | import Image from "next/image";
5 | import {
6 | LockIcon,
7 | KeyIcon,
8 | Loader2Icon,
9 | ShieldCheckIcon,
10 | GithubIcon,
11 | } from "lucide-react";
12 |
13 | import {
14 | Dialog,
15 | DialogContent,
16 | DialogHeader,
17 | DialogTitle,
18 | DialogDescription,
19 | DialogFooter,
20 | } from "@/components/ui/dialog";
21 | import { Input } from "@/components/ui/input";
22 | import { Button } from "@/components/ui/button";
23 |
24 | interface ApiKeyDialogProps {
25 | show: boolean;
26 | onClose: (open: boolean) => void;
27 | onSuccess: () => void;
28 | }
29 |
30 | export function ApiKeyDialog({ show, onClose, onSuccess }: ApiKeyDialogProps) {
31 | const [openaiKey, setOpenaiKey] = useState("");
32 | const [firecrawlKey, setFirecrawlKey] = useState("");
33 | const [loading, setLoading] = useState(false);
34 |
35 | const handleApiKeySubmit = async () => {
36 | if (!openaiKey || !firecrawlKey) return;
37 | setLoading(true);
38 | const res = await fetch("/api/keys", {
39 | method: "POST",
40 | headers: { "Content-Type": "application/json" },
41 | body: JSON.stringify({ openaiKey, firecrawlKey }),
42 | });
43 | if (res.ok) {
44 | onClose(false);
45 | onSuccess();
46 | }
47 | setLoading(false);
48 | };
49 |
50 | return (
51 |
52 |
53 |
54 |
55 | Open Deep Research
56 |
57 |
58 |
59 |
60 |
61 | Secure API Key Setup
62 |
63 |
64 | To use Deep Research, you'll need to provide your API keys.
65 | These keys are stored securely using HTTP-only cookies and are
66 | never exposed to client-side JavaScript.
67 |
68 |
69 |
70 |
71 | Self-hosting option: {" "}
72 | You can clone the repository and host this application on
73 | your own infrastructure. This gives you complete control
74 | over your data and API key management.
75 |
76 |
82 | View self-hosting instructions
83 |
89 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
111 | OpenAI API Key
112 |
113 |
114 | Powers our advanced language models for research analysis and
115 | synthesis.
116 |
122 | Get your OpenAI key →
123 |
124 |
125 |
126 |
127 |
144 |
145 |
146 |
147 |
148 |
149 |
150 |
151 | OpenAI API Key
152 |
153 |
154 |
setOpenaiKey(e.target.value)}
158 | placeholder="sk-..."
159 | className="pr-10 font-mono text-sm bg-white/50 border-zinc-200 focus:border-zinc-400 focus:ring-zinc-400 h-9 sm:h-10"
160 | />
161 |
162 |
163 |
164 |
165 |
166 | Starts with 'sk-' and contains about 50 characters
167 |
168 |
169 |
170 |
171 |
172 | FireCrawl API Key
173 |
174 |
175 |
setFirecrawlKey(e.target.value)}
179 | placeholder="fc-..."
180 | className="pr-10 font-mono text-sm bg-white/50 border-zinc-200 focus:border-zinc-400 focus:ring-zinc-400 h-9 sm:h-10"
181 | />
182 |
183 |
184 |
185 |
186 |
187 | Usually starts with 'fc-' for production keys
188 |
189 |
190 |
191 |
192 |
193 |
194 |
195 | Your keys are stored securely
196 |
197 |
208 |
214 | {loading ? (
215 |
216 |
217 | Setting up...
218 |
219 | ) : (
220 | "Start Researching"
221 | )}
222 |
223 |
224 |
225 |
226 | );
227 | }
228 |
--------------------------------------------------------------------------------
/components/chat/chat.tsx:
--------------------------------------------------------------------------------
1 | "use client";
2 |
3 | import { useState, useEffect } from "react";
4 | import { Message } from "ai";
5 | import { motion } from "framer-motion";
6 | import { BrainCircuitIcon, GithubIcon, PanelRightOpen } from "lucide-react";
7 |
8 | import { useScrollToBottom } from "@/lib/hooks/use-scroll-to-bottom";
9 |
10 | import DownloadTxtButton from "./download-txt";
11 | import { MultimodalInput } from "./input";
12 | import { PreviewMessage, ProgressStep } from "./message";
13 | import { ResearchProgress } from "./research-progress";
14 |
15 | export function Chat({
16 | id,
17 | initialMessages,
18 | }: {
19 | id: string;
20 | initialMessages: Message[];
21 | }) {
22 | const [messages, setMessages] = useState(initialMessages);
23 | const [isLoading, setIsLoading] = useState(false);
24 | const [progress, setProgress] = useState([]);
25 | const [containerRef, messagesEndRef] = useScrollToBottom();
26 |
27 | // New state to store the final report text
28 | const [finalReport, setFinalReport] = useState(null);
29 |
30 | // States for interactive feedback workflow
31 | const [stage, setStage] = useState<"initial" | "feedback" | "researching">(
32 | "initial"
33 | );
34 | const [initialQuery, setInitialQuery] = useState("");
35 |
36 | // Add state for mobile progress panel visibility
37 | const [showProgress, setShowProgress] = useState(false);
38 |
39 | // New state to track if we're on mobile (using 768px as breakpoint for md)
40 | const [isMobile, setIsMobile] = useState(false);
41 | useEffect(() => {
42 | const handleResize = () => {
43 | setIsMobile(window.innerWidth < 768);
44 | };
45 | handleResize();
46 | window.addEventListener("resize", handleResize);
47 | return () => window.removeEventListener("resize", handleResize);
48 | }, []);
49 |
50 | // Update the condition to only be true when there are actual research steps
51 | const hasStartedResearch =
52 | progress.filter(
53 | (step) =>
54 | // Only count non-report steps or initial report steps
55 | step.type !== "report" ||
56 | step.content.includes("Generating") ||
57 | step.content.includes("Synthesizing")
58 | ).length > 0;
59 |
60 | // Helper function to call the research endpoint
61 | const sendResearchQuery = async (
62 | query: string,
63 | config: { breadth: number; depth: number; modelId: string }
64 | ) => {
65 | try {
66 | setIsLoading(true);
67 | setProgress([]);
68 | // Inform the user that research has started
69 | setMessages((prev) => [
70 | ...prev,
71 | {
72 | id: Date.now().toString(),
73 | role: "assistant",
74 | content: "Starting in-depth research based on your inputs...",
75 | },
76 | ]);
77 |
78 | const response = await fetch("/api/research", {
79 | method: "POST",
80 | headers: {
81 | "Content-Type": "application/json",
82 | },
83 | body: JSON.stringify({
84 | query,
85 | breadth: config.breadth,
86 | depth: config.depth,
87 | modelId: config.modelId,
88 | }),
89 | });
90 |
91 | const reader = response.body?.getReader();
92 | if (!reader) throw new Error("No reader available");
93 |
94 | const textDecoder = new TextDecoder();
95 | let buffer = "";
96 | const reportParts: string[] = [];
97 |
98 | while (true) {
99 | const { done, value } = await reader.read();
100 | if (done) break;
101 |
102 | buffer += textDecoder.decode(value, { stream: true });
103 | const parts = buffer.split("\n\n");
104 | buffer = parts.pop() || "";
105 |
106 | for (const part of parts) {
107 | if (part.startsWith("data: ")) {
108 | const jsonStr = part.substring(6).trim();
109 | if (!jsonStr) continue;
110 | try {
111 | const event = JSON.parse(jsonStr);
112 | if (event.type === "progress") {
113 | if (event.step.type !== "report") {
114 | // Check for duplicates before adding this progress step.
115 | setProgress((prev) => {
116 | if (
117 | prev.length > 0 &&
118 | prev[prev.length - 1].content === event.step.content
119 | ) {
120 | return prev;
121 | }
122 | return [...prev, event.step];
123 | });
124 | }
125 | } else if (event.type === "result") {
126 | // Save the final report so we can download it later
127 | setFinalReport(event.report);
128 | setMessages((prev) => [
129 | ...prev,
130 | {
131 | id: Date.now().toString(),
132 | role: "assistant",
133 | content: event.report,
134 | },
135 | ]);
136 | } else if (event.type === "report_part") {
137 | reportParts.push(event.content);
138 | }
139 | } catch (e) {
140 | console.error("Error parsing event:", e);
141 | }
142 | }
143 | }
144 | }
145 |
146 | if (reportParts.length > 0) {
147 | // In case the report was sent in parts
148 | const fullReport = reportParts.join("\n");
149 | setFinalReport(fullReport);
150 | setMessages((prev) => [
151 | ...prev,
152 | {
153 | id: Date.now().toString(),
154 | role: "assistant",
155 | content: fullReport,
156 | },
157 | ]);
158 | }
159 | } catch (error) {
160 | console.error("Research error:", error);
161 | setMessages((prev) => [
162 | ...prev,
163 | {
164 | id: Date.now().toString(),
165 | role: "assistant",
166 | content: "Sorry, there was an error conducting the research.",
167 | },
168 | ]);
169 | } finally {
170 | setIsLoading(false);
171 | }
172 | };
173 |
174 | const handleSubmit = async (
175 | userInput: string,
176 | config: { breadth: number; depth: number; modelId: string }
177 | ) => {
178 | if (!userInput.trim() || isLoading) return;
179 |
180 | // Add user message immediately
181 | setMessages((prev) => [
182 | ...prev,
183 | {
184 | id: Date.now().toString(),
185 | role: "user",
186 | content: userInput,
187 | },
188 | ]);
189 |
190 | setIsLoading(true);
191 |
192 | if (stage === "initial") {
193 | // Add thinking message only for initial query
194 | setMessages((prev) => [
195 | ...prev,
196 | {
197 | id: "thinking",
198 | role: "assistant",
199 | content: "Thinking...",
200 | },
201 | ]);
202 |
203 | // Handle the user's initial query
204 | setInitialQuery(userInput);
205 |
206 | try {
207 | const response = await fetch("/api/feedback", {
208 | method: "POST",
209 | headers: { "Content-Type": "application/json" },
210 | body: JSON.stringify({
211 | query: userInput,
212 | numQuestions: 3,
213 | modelId: config.modelId,
214 | }),
215 | });
216 | const data = await response.json();
217 | const questions: string[] = data.questions || [];
218 | setMessages((prev) => {
219 | const filtered = prev.filter((m) => m.id !== "thinking");
220 | if (questions.length > 0) {
221 | const formattedQuestions = questions
222 | .map((q, index) => `${index + 1}. ${q}`)
223 | .join("\n\n");
224 | return [
225 | ...filtered,
226 | {
227 | id: Date.now().toString(),
228 | role: "assistant",
229 | content: `Please answer the following follow-up questions to help clarify your research:\n\n${formattedQuestions}`,
230 | },
231 | ];
232 | }
233 | return filtered;
234 | });
235 | setStage("feedback");
236 | } catch (error) {
237 | console.error("Feedback generation error:", error);
238 | setMessages((prev) => [
239 | ...prev.filter((m) => m.id !== "thinking"),
240 | {
241 | id: Date.now().toString(),
242 | role: "assistant",
243 | content: "Sorry, there was an error generating feedback questions.",
244 | },
245 | ]);
246 | } finally {
247 | setIsLoading(false);
248 | }
249 | } else if (stage === "feedback") {
250 | // In feedback stage, combine the initial query and follow-up answers
251 | const combined = `Initial Query: ${initialQuery}\nFollow-up Answers:\n${userInput}`;
252 | setStage("researching");
253 | try {
254 | await sendResearchQuery(combined, config);
255 | } finally {
256 | setIsLoading(false);
257 | // Reset the stage so further messages will be processed
258 | setStage("initial");
259 | // Inform the user that a new research session can be started
260 | setMessages((prev) => [
261 | ...prev,
262 | {
263 | id: Date.now().toString(),
264 | role: "assistant",
265 | content:
266 | "Research session complete. You can now ask another question to begin a new research session.",
267 | },
268 | ]);
269 | }
270 | }
271 | };
272 |
273 | return (
274 |
275 | {/* Main container with dynamic width */}
276 |
287 | {/* Messages Container */}
288 |
294 | {/* Welcome Message (if no research started and no messages) */}
295 | {!hasStartedResearch && messages.length === 0 && (
296 |
297 |
313 |
325 |
338 |
339 |
340 |
341 |
342 |
349 | Open Deep Research
350 |
351 |
352 |
358 | An open source alternative to OpenAI and Gemini's deep
359 | research capabilities. Ask any question to generate a
360 | comprehensive report.
361 |
362 |
363 |
369 |
379 |
380 | View source code
381 |
382 |
383 |
384 |
385 |
386 | )}
387 |
388 | {/* Messages */}
389 |
390 | {messages.map((message) => (
391 |
392 | ))}
393 |
394 | {finalReport && (
395 |
396 |
397 |
398 | )}
399 |
400 |
401 |
402 | {/* Input - Fixed to bottom */}
403 |
418 |
419 |
420 | {/* Research Progress Panel */}
421 |
434 |
435 |
436 |
437 | {/* Mobile Toggle Button - Only show when research has started */}
438 | {hasStartedResearch && (
439 |
setShowProgress(!showProgress)}
441 | className={`
442 | md:hidden
443 | fixed
444 | bottom-24
445 | right-4
446 | z-50
447 | p-3
448 | bg-primary
449 | text-primary-foreground
450 | rounded-full
451 | shadow-lg
452 | transition-transform
453 | ${showProgress ? "rotate-180" : ""}
454 | `}
455 | aria-label="Toggle research progress"
456 | >
457 |
458 |
459 | )}
460 |
461 | );
462 | }
463 |
--------------------------------------------------------------------------------
/components/chat/download-txt.tsx:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 | import { DownloadIcon } from 'lucide-react';
3 |
4 | interface DownloadTxtButtonProps {
5 | reportText: string;
6 | fileName?: string;
7 | }
8 |
9 | const DownloadTxtButton: React.FC = ({
10 | reportText,
11 | fileName = 'research_report.txt',
12 | }) => {
13 | const handleDownload = () => {
14 | // Create a blob from the report text content.
15 | const blob = new Blob([reportText], { type: 'text/plain;charset=utf-8' });
16 | // Create a temporary URL for the blob.
17 | const url = window.URL.createObjectURL(blob);
18 | // Create a temporary anchor element.
19 | const link = document.createElement('a');
20 | link.href = url;
21 | link.download = fileName;
22 | // Append the link, trigger click, remove it, and revoke the URL.
23 | document.body.appendChild(link);
24 | link.click();
25 | document.body.removeChild(link);
26 | window.URL.revokeObjectURL(url);
27 | };
28 |
29 | return (
30 |
60 |
61 |
62 | Download Report
63 |
64 | );
65 | };
66 |
67 | export default DownloadTxtButton;
68 |
--------------------------------------------------------------------------------
/components/chat/input.tsx:
--------------------------------------------------------------------------------
1 | "use client";
2 |
3 | import { useEffect, useRef, useState } from "react";
4 | import Image from "next/image";
5 | import cx from "classnames";
6 | import { motion } from "framer-motion";
7 | import {
8 | ArrowUpIcon,
9 | CheckCircleIcon,
10 | ChevronDown,
11 | DownloadIcon,
12 | Settings2,
13 | XCircleIcon,
14 | } from "lucide-react";
15 |
16 | import { Button } from "@/components/ui/button";
17 | import { Slider } from "@/components/ui/slider";
18 | import {
19 | availableModels,
20 | type AIModelDisplayInfo,
21 | } from "@/lib/deep-research/ai/providers";
22 | import { ApiKeyDialog } from "@/components/chat/api-key-dialog";
23 |
24 | interface MultimodalInputProps {
25 | onSubmit: (
26 | input: string,
27 | config: {
28 | breadth: number;
29 | depth: number;
30 | modelId: string;
31 | }
32 | ) => void;
33 | isLoading: boolean;
34 | placeholder?: string;
35 | isAuthenticated?: boolean;
36 | onDownload?: () => void;
37 | canDownload?: boolean;
38 | }
39 |
40 | export function MultimodalInput({
41 | onSubmit,
42 | isLoading,
43 | placeholder = "What would you like to research?",
44 | onDownload,
45 | canDownload = false,
46 | }: MultimodalInputProps) {
47 | const [input, setInput] = useState("");
48 | const [breadth, setBreadth] = useState(4);
49 | const [depth, setDepth] = useState(2);
50 | const [selectedModel, setSelectedModel] = useState(
51 | availableModels.find((model) => model.id === "o3-mini") ||
52 | availableModels[0]
53 | );
54 | const [isModelDropdownOpen, setIsModelDropdownOpen] = useState(false);
55 | const [showApiKeyPrompt, setShowApiKeyPrompt] = useState(false);
56 | const [hasKeys, setHasKeys] = useState(false);
57 | const textareaRef = useRef(null);
58 |
59 | // Read the feature flag from environment variables.
60 | const enableApiKeys = process.env.NEXT_PUBLIC_ENABLE_API_KEYS === "true";
61 | // When API keys are disabled via env flag, always consider keys as present.
62 | const effectiveHasKeys = enableApiKeys ? hasKeys : true;
63 |
64 | // Check for keys using the consolidated endpoint
65 | useEffect(() => {
66 | const checkKeys = async () => {
67 | const res = await fetch("/api/keys");
68 | const data = await res.json();
69 | setHasKeys(data.keysPresent);
70 | if (!data.keysPresent && enableApiKeys) {
71 | setShowApiKeyPrompt(true);
72 | } else {
73 | setShowApiKeyPrompt(false);
74 | }
75 | };
76 | checkKeys();
77 | }, [enableApiKeys]);
78 |
79 | // New: Remove API keys handler
80 | const handleRemoveKeys = async () => {
81 | if (!window.confirm("Are you sure you want to remove your API keys?"))
82 | return;
83 | try {
84 | const res = await fetch("/api/keys", {
85 | method: "DELETE",
86 | });
87 | if (res.ok) {
88 | setHasKeys(false);
89 | }
90 | } catch (error) {
91 | console.error("Error removing keys:", error);
92 | }
93 | };
94 |
95 | const handleSubmit = () => {
96 | if (!input.trim() || isLoading) return;
97 | if (enableApiKeys && !effectiveHasKeys) {
98 | // Re-open the API key modal if keys are missing
99 | setShowApiKeyPrompt(true);
100 | return;
101 | }
102 | onSubmit(input, {
103 | breadth,
104 | depth,
105 | modelId: selectedModel.id,
106 | });
107 | setInput("");
108 | };
109 |
110 | useEffect(() => {
111 | if (textareaRef.current) {
112 | textareaRef.current.style.height = "inherit";
113 | textareaRef.current.style.height = `${textareaRef.current.scrollHeight}px`;
114 | }
115 | }, [input]);
116 |
117 | const DownloadButton = () => (
118 |
124 |
125 | Download Report
126 |
127 | );
128 |
129 | return (
130 |
131 | {/* Conditionally render API key dialog only if enabled */}
132 | {enableApiKeys && (
133 |
{
137 | setShowApiKeyPrompt(false);
138 | setHasKeys(true);
139 | }}
140 | />
141 | )}
142 |
143 |
423 | );
424 | }
425 |
--------------------------------------------------------------------------------
/components/chat/markdown.tsx:
--------------------------------------------------------------------------------
1 | import React, { memo } from 'react';
2 | import Link from 'next/link';
3 | import ReactMarkdown from 'react-markdown';
4 | import remarkGfm from 'remark-gfm';
5 |
6 | const NonMemoizedMarkdown = ({ children }: { children: string }) => {
7 | const components = {
8 | // Root wrapper
9 | root: ({ children }: any) => (
10 |
11 | {children}
12 |
13 | ),
14 |
15 | // Code blocks remain the same size
16 | code: ({ node, inline, className, children, ...props }: any) => {
17 | const match = /language-(\w+)/.exec(className || '');
18 | return !inline && match ? (
19 |
34 | {children}
35 |
36 | ) : (
37 |
48 | {children}
49 |
50 | );
51 | },
52 |
53 | // Headings keep their larger sizes
54 | h1: ({ node, children, ...props }: any) => {
55 | return (
56 |
68 | {children}
69 |
70 | );
71 | },
72 |
73 | h2: ({ node, children, ...props }: any) => {
74 | return (
75 |
85 | {children}
86 |
87 | );
88 | },
89 |
90 | h3: ({ node, children, ...props }: any) => {
91 | return (
92 |
102 | {children}
103 |
104 | );
105 | },
106 |
107 | // All regular text elements use text-sm
108 | p: ({ node, children, ...props }: any) => {
109 | return (
110 |
119 | {children}
120 |
121 | );
122 | },
123 |
124 | ul: ({ node, children, ...props }: any) => {
125 | return (
126 |
140 | );
141 | },
142 |
143 | ol: ({ node, children, ...props }: any) => {
144 | return (
145 |
157 | {children}
158 |
159 | );
160 | },
161 |
162 | li: ({ node, children, ...props }: any) => {
163 | return (
164 |
172 | {children}
173 |
174 | );
175 | },
176 |
177 | // Table elements use text-sm
178 | table: ({ node, children, ...props }: any) => {
179 | return (
180 |
193 | );
194 | },
195 |
196 | th: ({ node, children, ...props }: any) => {
197 | return (
198 |
211 | {children}
212 |
213 | );
214 | },
215 |
216 | td: ({ node, children, ...props }: any) => {
217 | return (
218 |
228 | {children}
229 |
230 | );
231 | },
232 |
233 | // Blockquotes use text-sm
234 | blockquote: ({ node, children, ...props }: any) => {
235 | return (
236 |
248 | {children}
249 |
250 | );
251 | },
252 |
253 | // Links
254 | a: ({ node, children, ...props }: any) => {
255 | return (
256 |
270 | {children}
271 |
272 | );
273 | },
274 |
275 | // Bold text
276 | strong: ({ node, children, ...props }: any) => {
277 | return (
278 |
285 | {children}
286 |
287 | );
288 | },
289 | };
290 |
291 | return (
292 |
293 |
294 | {children}
295 |
296 |
297 | );
298 | };
299 |
300 | export const Markdown = memo(
301 | NonMemoizedMarkdown,
302 | (prevProps, nextProps) => prevProps.children === nextProps.children,
303 | );
304 |
--------------------------------------------------------------------------------
/components/chat/message.tsx:
--------------------------------------------------------------------------------
1 | "use client";
2 |
3 | import { Message } from "ai";
4 | import { motion } from "framer-motion";
5 | import {
6 | BookOpenIcon,
7 | BrainCircuitIcon,
8 | GavelIcon,
9 | SearchIcon,
10 | } from "lucide-react";
11 |
12 | import { Markdown } from "./markdown";
13 |
14 | export type ProgressStep = {
15 | type: "query" | "research" | "learning" | "report";
16 | content: string;
17 | queries?: Array<{
18 | query: string;
19 | researchGoal: string;
20 | }>;
21 | };
22 |
23 | export function PreviewMessage({ message }: { message: Message }) {
24 | // Helper function to format follow-up questions into markdown
25 | const formatFollowUpQuestions = (content: string) => {
26 | if (content.includes("follow-up questions")) {
27 | // Split the content into introduction and questions
28 | const [intro, ...questions] = content.split("\n").filter(Boolean);
29 |
30 | // Format as markdown
31 | return `${intro}\n\n${questions
32 | .map((q) => {
33 | // If the line starts with a number, format it as a markdown list item
34 | if (/^\d+\./.test(q)) {
35 | return q.trim();
36 | }
37 | return q;
38 | })
39 | .join("\n\n")}`;
40 | }
41 | return content;
42 | };
43 |
44 | return (
45 |
50 |
57 |
64 | {message.role === "assistant" ? (
65 |
66 | {formatFollowUpQuestions(message.content)}
67 |
68 | ) : (
69 |
{message.content}
70 | )}
71 |
72 |
73 |
74 | );
75 | }
76 |
77 | export function ResearchProgress({
78 | progress,
79 | isLoading,
80 | }: {
81 | progress: ProgressStep[];
82 | isLoading: boolean;
83 | }) {
84 | // Filter out individual report word updates
85 | const filteredProgress = progress.filter((step) => {
86 | if (step.type === "report") {
87 | // Only show the initial "Generating report" step
88 | return (
89 | step.content.includes("Generating") ||
90 | step.content.includes("Synthesizing")
91 | );
92 | }
93 | return true;
94 | });
95 |
96 | if (!isLoading && filteredProgress.length === 0) {
97 | return (
98 |
99 |
100 |
101 |
102 | Begin your research journey
103 |
104 |
105 |
106 | );
107 | }
108 |
109 | return (
110 |
111 | {filteredProgress.map((step, index) => (
112 |
118 |
119 | {step.type === "query" && }
120 | {step.type === "research" && }
121 | {step.type === "learning" && }
122 | {step.type === "report" && }
123 |
124 |
125 |
{step.type}
126 |
{step.content}
127 |
128 | {step.queries && (
129 |
130 | {step.queries.map((query, idx) => (
131 |
138 | {query.query}
139 |
140 | {query.researchGoal}
141 |
142 |
143 | ))}
144 |
145 | )}
146 |
147 |
148 | ))}
149 |
150 | );
151 | }
152 |
--------------------------------------------------------------------------------
/components/chat/research-progress.tsx:
--------------------------------------------------------------------------------
1 | 'use client';
2 |
3 | import { useEffect, useRef, useState } from 'react';
4 | import { AnimatePresence, motion } from 'framer-motion';
5 | import {
6 | BrainCircuitIcon,
7 | FileSearchIcon,
8 | Loader2Icon,
9 | PlayIcon,
10 | SearchIcon,
11 | SparklesIcon,
12 | } from 'lucide-react';
13 |
14 | import { ProgressStep } from './message';
15 |
16 | // Simplified configuration with a more minimal color palette
17 | const actionConfig = {
18 | 'Generating up to': {
19 | icon: ,
20 | text: 'Generating',
21 | },
22 | Created: {
23 | icon: ,
24 | text: 'Created',
25 | },
26 | Researching: {
27 | icon: ,
28 | text: 'Researching',
29 | },
30 | Found: {
31 | icon: ,
32 | text: 'Found',
33 | },
34 | Ran: {
35 | icon: ,
36 | text: 'Processing',
37 | },
38 | Generated: {
39 | icon: ,
40 | text: 'Generated',
41 | },
42 | };
43 |
44 | export function ResearchProgress({
45 | progress,
46 | isLoading,
47 | }: {
48 | progress: ProgressStep[];
49 | isLoading: boolean;
50 | }) {
51 | const containerRef = useRef(null);
52 | const [userHasScrolled, setUserHasScrolled] = useState(false);
53 |
54 | // Handle auto-scrolling
55 | useEffect(() => {
56 | const container = containerRef.current;
57 | if (!container || userHasScrolled) return;
58 |
59 | container.scrollTop = container.scrollHeight;
60 | }, [progress, userHasScrolled]);
61 |
62 | // Handle scroll events
63 | const handleScroll = () => {
64 | const container = containerRef.current;
65 | if (!container) return;
66 |
67 | const isAtBottom =
68 | Math.abs(
69 | container.scrollHeight - container.scrollTop - container.clientHeight,
70 | ) < 10;
71 |
72 | setUserHasScrolled(!isAtBottom);
73 | };
74 |
75 | const getConfig = (content: string) => {
76 | const firstWord = content.split('\n')[0].split(' ')[0];
77 | for (const [key, config] of Object.entries(actionConfig)) {
78 | if (firstWord.startsWith(key)) {
79 | return config;
80 | }
81 | }
82 | return actionConfig['Researching'];
83 | };
84 |
85 | // Remove the empty state UI since it's now in the main chat
86 | if (!isLoading && progress.length === 0) {
87 | return null;
88 | }
89 |
90 | return (
91 |
96 |
97 | {progress.map((step, index) => {
98 | const [title, ...details] = step.content.split('\n');
99 | const config = getConfig(title);
100 |
101 | return (
102 |
112 |
136 |
137 |
138 |
139 | {config.icon}
140 |
141 |
142 |
143 |
144 |
145 | {config.text}
146 | {' '}
147 |
148 | {title.split(' ').slice(1).join(' ')}
149 |
150 |
151 |
152 | {details.length > 0 && (
153 |
154 | {details.join('\n')}
155 |
156 | )}
157 |
158 | {step.queries && (
159 |
160 | {step.queries.map((query, idx) => (
161 |
183 |
184 | {query.query}
185 |
186 |
187 | {query.researchGoal}
188 |
189 |
190 | ))}
191 |
192 | )}
193 |
194 |
195 |
196 | );
197 | })}
198 |
199 |
200 | );
201 | }
202 |
--------------------------------------------------------------------------------
/components/chat/site-header.tsx:
--------------------------------------------------------------------------------
1 | import Image from "next/image";
2 | import Link from "next/link";
3 |
4 | export function Header() {
5 | return (
6 | <>
7 |
8 |
9 |
14 |
21 |
22 |
23 |
24 | >
25 | );
26 | }
27 |
--------------------------------------------------------------------------------
/components/ui/button.tsx:
--------------------------------------------------------------------------------
1 | import * as React from "react"
2 | import { Slot } from "@radix-ui/react-slot"
3 | import { cva, type VariantProps } from "class-variance-authority"
4 |
5 | import { cn } from "@/lib/utils"
6 |
7 | const buttonVariants = cva(
8 | "inline-flex items-center justify-center whitespace-nowrap rounded-md text-sm font-medium ring-offset-background transition-colors focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:pointer-events-none disabled:opacity-50",
9 | {
10 | variants: {
11 | variant: {
12 | default: "bg-primary text-primary-foreground hover:bg-primary/90",
13 | destructive:
14 | "bg-destructive text-destructive-foreground hover:bg-destructive/90",
15 | outline:
16 | "border border-input bg-background hover:bg-accent hover:text-accent-foreground",
17 | secondary:
18 | "bg-secondary text-secondary-foreground hover:bg-secondary/80",
19 | ghost: "hover:bg-accent hover:text-accent-foreground",
20 | link: "text-primary underline-offset-4 hover:underline",
21 | },
22 | size: {
23 | default: "h-10 px-4 py-2",
24 | sm: "h-9 rounded-md px-3",
25 | lg: "h-11 rounded-md px-8",
26 | icon: "h-10 w-10",
27 | },
28 | },
29 | defaultVariants: {
30 | variant: "default",
31 | size: "default",
32 | },
33 | }
34 | )
35 |
36 | export interface ButtonProps
37 | extends React.ButtonHTMLAttributes,
38 | VariantProps {
39 | asChild?: boolean
40 | }
41 |
42 | const Button = React.forwardRef(
43 | ({ className, variant, size, asChild = false, ...props }, ref) => {
44 | const Comp = asChild ? Slot : "button"
45 | return (
46 |
51 | )
52 | }
53 | )
54 | Button.displayName = "Button"
55 |
56 | export { Button, buttonVariants }
57 |
--------------------------------------------------------------------------------
/components/ui/dialog.tsx:
--------------------------------------------------------------------------------
1 | "use client";
2 |
3 | import * as React from "react";
4 | import * as DialogPrimitive from "@radix-ui/react-dialog";
5 | import { X } from "lucide-react";
6 |
7 | import { cn } from "@/lib/utils";
8 |
9 | const Dialog = DialogPrimitive.Root;
10 |
11 | const DialogTrigger = DialogPrimitive.Trigger;
12 |
13 | const DialogPortal = DialogPrimitive.Portal;
14 |
15 | const DialogClose = DialogPrimitive.Close;
16 |
17 | const DialogOverlay = React.forwardRef<
18 | React.ElementRef,
19 | React.ComponentPropsWithoutRef
20 | >(({ className, ...props }, ref) => (
21 |
29 | ));
30 | DialogOverlay.displayName = DialogPrimitive.Overlay.displayName;
31 |
32 | const DialogContent = React.forwardRef<
33 | React.ElementRef,
34 | React.ComponentPropsWithoutRef
35 | >(({ className, children, ...props }, ref) => (
36 |
37 |
38 |
46 | {children}
47 |
48 |
49 | Close
50 |
51 |
52 |
53 | ));
54 | DialogContent.displayName = DialogPrimitive.Content.displayName;
55 |
56 | const DialogHeader = ({
57 | className,
58 | ...props
59 | }: React.HTMLAttributes) => (
60 |
67 | );
68 | DialogHeader.displayName = "DialogHeader";
69 |
70 | const DialogFooter = ({
71 | className,
72 | ...props
73 | }: React.HTMLAttributes) => (
74 |
81 | );
82 | DialogFooter.displayName = "DialogFooter";
83 |
84 | const DialogTitle = React.forwardRef<
85 | React.ElementRef,
86 | React.ComponentPropsWithoutRef
87 | >(({ className, ...props }, ref) => (
88 |
96 | ));
97 | DialogTitle.displayName = DialogPrimitive.Title.displayName;
98 |
99 | const DialogDescription = React.forwardRef<
100 | React.ElementRef,
101 | React.ComponentPropsWithoutRef
102 | >(({ className, ...props }, ref) => (
103 |
108 | ));
109 | DialogDescription.displayName = DialogPrimitive.Description.displayName;
110 |
111 | export {
112 | Dialog,
113 | DialogPortal,
114 | DialogOverlay,
115 | DialogClose,
116 | DialogTrigger,
117 | DialogContent,
118 | DialogHeader,
119 | DialogFooter,
120 | DialogTitle,
121 | DialogDescription,
122 | };
123 |
--------------------------------------------------------------------------------
/components/ui/input.tsx:
--------------------------------------------------------------------------------
1 | import * as React from "react"
2 |
3 | import { cn } from "@/lib/utils"
4 |
5 | export interface InputProps
6 | extends React.InputHTMLAttributes {}
7 |
8 | const Input = React.forwardRef(
9 | ({ className, type, ...props }, ref) => {
10 | return (
11 |
20 | )
21 | }
22 | )
23 | Input.displayName = "Input"
24 |
25 | export { Input }
26 |
--------------------------------------------------------------------------------
/components/ui/slider.tsx:
--------------------------------------------------------------------------------
1 | 'use client';
2 |
3 | import * as React from 'react';
4 | import * as SliderPrimitive from '@radix-ui/react-slider';
5 |
6 | import { cn } from '@/lib/utils';
7 |
8 | const Slider = React.forwardRef<
9 | React.ElementRef,
10 | React.ComponentPropsWithoutRef
11 | >(({ className, ...props }, ref) => (
12 |
20 |
21 |
22 |
23 |
24 |
25 | ));
26 | Slider.displayName = SliderPrimitive.Root.displayName;
27 |
28 | export { Slider };
29 |
--------------------------------------------------------------------------------
/components/ui/tooltip.tsx:
--------------------------------------------------------------------------------
1 | 'use client';
2 |
3 | import * as React from 'react';
4 | import * as TooltipPrimitive from '@radix-ui/react-tooltip';
5 |
6 | import { cn } from '@/lib/utils';
7 |
8 | const TooltipProvider = TooltipPrimitive.Provider;
9 |
10 | const Tooltip = TooltipPrimitive.Root;
11 |
12 | const TooltipTrigger = TooltipPrimitive.Trigger;
13 |
14 | const TooltipContent = React.forwardRef<
15 | React.ElementRef,
16 | React.ComponentPropsWithoutRef
17 | >(({ className, sideOffset = 4, ...props }, ref) => (
18 |
19 |
28 |
29 | ));
30 | TooltipContent.displayName = TooltipPrimitive.Content.displayName;
31 |
32 | export { Tooltip, TooltipTrigger, TooltipContent, TooltipProvider };
33 |
--------------------------------------------------------------------------------
/lib/deep-research/ai/providers.ts:
--------------------------------------------------------------------------------
1 | import { createOpenAI } from '@ai-sdk/openai';
2 | import { getEncoding } from 'js-tiktoken';
3 |
4 | import { RecursiveCharacterTextSplitter } from './text-splitter';
5 |
6 | // Model Display Information
7 | export const AI_MODEL_DISPLAY = {
8 | 'gpt-4o': {
9 | id: 'gpt-4o',
10 | name: 'GPT-4o',
11 | logo: '/providers/openai.webp',
12 | vision: true,
13 | },
14 | 'gpt-4o-mini': {
15 | id: 'gpt-4o-mini',
16 | name: 'GPT-4o mini',
17 | logo: '/providers/openai.webp',
18 | vision: true,
19 | },
20 | 'o3-mini': {
21 | id: 'o3-mini',
22 | name: 'o3 mini',
23 | logo: '/providers/openai.webp',
24 | vision: false,
25 | },
26 | } as const;
27 |
28 | export type AIModel = keyof typeof AI_MODEL_DISPLAY;
29 | export type AIModelDisplayInfo = (typeof AI_MODEL_DISPLAY)[AIModel];
30 | export const availableModels = Object.values(AI_MODEL_DISPLAY);
31 |
32 | // OpenAI Client
33 | const openai = createOpenAI({
34 | apiKey: process.env.OPENAI_KEY!,
35 | });
36 |
37 | // Create model instances with configurations
38 | export function createModel(modelId: AIModel, apiKey?: string) {
39 | const client = createOpenAI({
40 | apiKey: apiKey || process.env.OPENAI_KEY!,
41 | });
42 |
43 | return client(modelId, {
44 | structuredOutputs: true,
45 | ...(modelId === 'o3-mini' ? { reasoningEffort: 'medium' } : {}),
46 | });
47 | }
48 |
49 | // Token handling
50 | const MinChunkSize = 140;
51 | const encoder = getEncoding('o200k_base');
52 |
53 | // trim prompt to maximum context size
54 | export function trimPrompt(prompt: string, contextSize = 120_000) {
55 | if (!prompt) {
56 | return '';
57 | }
58 |
59 | const length = encoder.encode(prompt).length;
60 | if (length <= contextSize) {
61 | return prompt;
62 | }
63 |
64 | const overflowTokens = length - contextSize;
65 | // on average it's 3 characters per token, so multiply by 3 to get a rough estimate of the number of characters
66 | const chunkSize = prompt.length - overflowTokens * 3;
67 | if (chunkSize < MinChunkSize) {
68 | return prompt.slice(0, MinChunkSize);
69 | }
70 |
71 | const splitter = new RecursiveCharacterTextSplitter({
72 | chunkSize,
73 | chunkOverlap: 0,
74 | });
75 | const trimmedPrompt = splitter.splitText(prompt)[0] ?? '';
76 |
77 | // last catch, there's a chance that the trimmed prompt is same length as the original prompt, due to how tokens are split & innerworkings of the splitter, handle this case by just doing a hard cut
78 | if (trimmedPrompt.length === prompt.length) {
79 | return trimPrompt(prompt.slice(0, chunkSize), contextSize);
80 | }
81 |
82 | // recursively trim until the prompt is within the context size
83 | return trimPrompt(trimmedPrompt, contextSize);
84 | }
85 |
--------------------------------------------------------------------------------
/lib/deep-research/ai/text-splitter.test.ts:
--------------------------------------------------------------------------------
1 | import assert from 'node:assert';
2 | import { describe, it } from 'node:test';
3 |
4 | import { RecursiveCharacterTextSplitter } from './text-splitter';
5 |
6 | describe('RecursiveCharacterTextSplitter', () => {
7 | it('Should correctly split text by separators', () => {
8 | const splitter = new RecursiveCharacterTextSplitter({
9 | chunkSize: 50,
10 | chunkOverlap: 10,
11 | });
12 | assert.deepEqual(
13 | splitter.splitText(
14 | 'Hello world, this is a test of the recursive text splitter.',
15 | ),
16 | ['Hello world', 'this is a test of the recursive text splitter'],
17 | );
18 |
19 | splitter.chunkSize = 100;
20 | assert.deepEqual(
21 | splitter.splitText(
22 | 'Hello world, this is a test of the recursive text splitter. If I have a period, it should split along the period.',
23 | ),
24 | [
25 | 'Hello world, this is a test of the recursive text splitter',
26 | 'If I have a period, it should split along the period.',
27 | ],
28 | );
29 |
30 | splitter.chunkSize = 110;
31 | assert.deepEqual(
32 | splitter.splitText(
33 | 'Hello world, this is a test of the recursive text splitter. If I have a period, it should split along the period.\nOr, if there is a new line, it should prioritize splitting on new lines instead.',
34 | ),
35 | [
36 | 'Hello world, this is a test of the recursive text splitter',
37 | 'If I have a period, it should split along the period.',
38 | 'Or, if there is a new line, it should prioritize splitting on new lines instead.',
39 | ],
40 | );
41 | });
42 |
43 | it('Should handle empty string', () => {
44 | const splitter = new RecursiveCharacterTextSplitter({
45 | chunkSize: 50,
46 | chunkOverlap: 10,
47 | });
48 | assert.deepEqual(splitter.splitText(''), []);
49 | });
50 | });
51 |
--------------------------------------------------------------------------------
/lib/deep-research/ai/text-splitter.ts:
--------------------------------------------------------------------------------
1 | interface TextSplitterParams {
2 | chunkSize: number;
3 |
4 | chunkOverlap: number;
5 | }
6 |
7 | abstract class TextSplitter implements TextSplitterParams {
8 | chunkSize = 1000;
9 | chunkOverlap = 200;
10 |
11 | constructor(fields?: Partial) {
12 | this.chunkSize = fields?.chunkSize ?? this.chunkSize;
13 | this.chunkOverlap = fields?.chunkOverlap ?? this.chunkOverlap;
14 | if (this.chunkOverlap >= this.chunkSize) {
15 | throw new Error('Cannot have chunkOverlap >= chunkSize');
16 | }
17 | }
18 |
19 | abstract splitText(text: string): string[];
20 |
21 | createDocuments(texts: string[]): string[] {
22 | const documents: string[] = [];
23 | for (let i = 0; i < texts.length; i += 1) {
24 | const text = texts[i];
25 | for (const chunk of this.splitText(text!)) {
26 | documents.push(chunk);
27 | }
28 | }
29 | return documents;
30 | }
31 |
32 | splitDocuments(documents: string[]): string[] {
33 | return this.createDocuments(documents);
34 | }
35 |
36 | private joinDocs(docs: string[], separator: string): string | null {
37 | const text = docs.join(separator).trim();
38 | return text === '' ? null : text;
39 | }
40 |
41 | mergeSplits(splits: string[], separator: string): string[] {
42 | const docs: string[] = [];
43 | const currentDoc: string[] = [];
44 | let total = 0;
45 | for (const d of splits) {
46 | const _len = d.length;
47 | if (total + _len >= this.chunkSize) {
48 | if (total > this.chunkSize) {
49 | console.warn(
50 | `Created a chunk of size ${total}, +
51 | which is longer than the specified ${this.chunkSize}`,
52 | );
53 | }
54 | if (currentDoc.length > 0) {
55 | const doc = this.joinDocs(currentDoc, separator);
56 | if (doc !== null) {
57 | docs.push(doc);
58 | }
59 | // Keep on popping if:
60 | // - we have a larger chunk than in the chunk overlap
61 | // - or if we still have any chunks and the length is long
62 | while (
63 | total > this.chunkOverlap ||
64 | (total + _len > this.chunkSize && total > 0)
65 | ) {
66 | total -= currentDoc[0]!.length;
67 | currentDoc.shift();
68 | }
69 | }
70 | }
71 | currentDoc.push(d);
72 | total += _len;
73 | }
74 | const doc = this.joinDocs(currentDoc, separator);
75 | if (doc !== null) {
76 | docs.push(doc);
77 | }
78 | return docs;
79 | }
80 | }
81 |
82 | export interface RecursiveCharacterTextSplitterParams
83 | extends TextSplitterParams {
84 | separators: string[];
85 | }
86 |
87 | export class RecursiveCharacterTextSplitter
88 | extends TextSplitter
89 | implements RecursiveCharacterTextSplitterParams
90 | {
91 | separators: string[] = ['\n\n', '\n', '.', ',', '>', '<', ' ', ''];
92 |
93 | constructor(fields?: Partial) {
94 | super(fields);
95 | this.separators = fields?.separators ?? this.separators;
96 | }
97 |
98 | splitText(text: string): string[] {
99 | const finalChunks: string[] = [];
100 |
101 | // Get appropriate separator to use
102 | let separator: string = this.separators[this.separators.length - 1]!;
103 | for (const s of this.separators) {
104 | if (s === '') {
105 | separator = s;
106 | break;
107 | }
108 | if (text.includes(s)) {
109 | separator = s;
110 | break;
111 | }
112 | }
113 |
114 | // Now that we have the separator, split the text
115 | let splits: string[];
116 | if (separator) {
117 | splits = text.split(separator);
118 | } else {
119 | splits = text.split('');
120 | }
121 |
122 | // Now go merging things, recursively splitting longer texts.
123 | let goodSplits: string[] = [];
124 | for (const s of splits) {
125 | if (s.length < this.chunkSize) {
126 | goodSplits.push(s);
127 | } else {
128 | if (goodSplits.length) {
129 | const mergedText = this.mergeSplits(goodSplits, separator);
130 | finalChunks.push(...mergedText);
131 | goodSplits = [];
132 | }
133 | const otherInfo = this.splitText(s);
134 | finalChunks.push(...otherInfo);
135 | }
136 | }
137 | if (goodSplits.length) {
138 | const mergedText = this.mergeSplits(goodSplits, separator);
139 | finalChunks.push(...mergedText);
140 | }
141 | return finalChunks;
142 | }
143 | }
144 |
--------------------------------------------------------------------------------
/lib/deep-research/deep-research.ts:
--------------------------------------------------------------------------------
1 | import FirecrawlApp, { SearchResponse } from '@mendable/firecrawl-js';
2 | import { generateObject } from 'ai';
3 | import { compact } from 'lodash-es';
4 | import { z } from 'zod';
5 |
6 | import { createModel, trimPrompt } from './ai/providers';
7 | import { systemPrompt } from './prompt';
8 |
9 | type ResearchResult = {
10 | learnings: string[];
11 | visitedUrls: string[];
12 | };
13 |
14 | type DeepResearchOptions = {
15 | query: string;
16 | breadth?: number;
17 | depth?: number;
18 | learnings?: string[];
19 | visitedUrls?: string[];
20 | onProgress?: (update: string) => Promise;
21 | model: ReturnType;
22 | firecrawlKey?: string;
23 | };
24 |
25 | // Update the firecrawl initialization to use the provided key
26 | const getFirecrawl = (apiKey?: string) =>
27 | new FirecrawlApp({
28 | apiKey: apiKey ?? process.env.FIRECRAWL_KEY ?? '',
29 | apiUrl: process.env.FIRECRAWL_BASE_URL,
30 | });
31 |
32 | // Helper function to format progress messages consistently
33 | const formatProgress = {
34 | generating: (count: number, query: string) =>
35 | `Generating up to ${count} SERP queries\n${query}`,
36 |
37 | created: (count: number, queries: string) =>
38 | `Created ${count} SERP queries\n${queries}`,
39 |
40 | researching: (query: string) => `Researching\n${query}`,
41 |
42 | found: (count: number, query: string) => `Found ${count} results\n${query}`,
43 |
44 | ran: (query: string, count: number) =>
45 | `Ran "${query}"\n${count} content items found`,
46 |
47 | generated: (count: number, query: string) =>
48 | `Generated ${count} learnings\n${query}`,
49 | };
50 |
51 | // Helper function to log and stream messages
52 | async function logProgress(
53 | message: string,
54 | onProgress?: (update: string) => Promise,
55 | ) {
56 | if (onProgress) {
57 | await onProgress(message);
58 | }
59 | }
60 |
61 | // take en user query, return a list of SERP queries
62 | async function generateSerpQueries({
63 | query,
64 | numQueries = 3,
65 | learnings,
66 | onProgress,
67 | model,
68 | }: {
69 | query: string;
70 | numQueries?: number;
71 |
72 | // optional, if provided, the research will continue from the last learning
73 | learnings?: string[];
74 | onProgress?: (update: string) => Promise;
75 | model: ReturnType;
76 | }) {
77 | await logProgress(formatProgress.generating(numQueries, query), onProgress);
78 |
79 | const res = await generateObject({
80 | model,
81 | system: systemPrompt(),
82 | prompt: `Given the following prompt from the user, generate a list of SERP queries to research the topic. Return a maximum of ${numQueries} queries, but feel free to return less if the original prompt is clear. Make sure each query is unique and not similar to each other: ${query} \n\n${
83 | learnings
84 | ? `Here are some learnings from previous research, use them to generate more specific queries: ${learnings.join(
85 | '\n',
86 | )}`
87 | : ''
88 | }`,
89 | schema: z.object({
90 | queries: z
91 | .array(
92 | z.object({
93 | query: z.string().describe('The SERP query'),
94 | researchGoal: z
95 | .string()
96 | .describe(
97 | 'First talk about the goal of the research that this query is meant to accomplish, then go deeper into how to advance the research once the results are found, mention additional research directions. Be as specific as possible, especially for additional research directions.',
98 | ),
99 | }),
100 | )
101 | .describe(`List of SERP queries, max of ${numQueries}`),
102 | }),
103 | });
104 |
105 | const queriesList = res.object.queries.map(q => q.query).join(', ');
106 | await logProgress(
107 | formatProgress.created(res.object.queries.length, queriesList),
108 | onProgress,
109 | );
110 |
111 | return res.object.queries.slice(0, numQueries).map(q => q.query);
112 | }
113 |
114 | async function processSerpResult({
115 | query,
116 | result,
117 | numLearnings = 3,
118 | numFollowUpQuestions = 3,
119 | onProgress,
120 | model,
121 | }: {
122 | query: string;
123 | result: SearchResponse;
124 | numLearnings?: number;
125 | numFollowUpQuestions?: number;
126 | onProgress?: (update: string) => Promise;
127 | model: ReturnType;
128 | }) {
129 | const contents = compact(result.data.map(item => item.markdown)).map(
130 | content => trimPrompt(content, 25_000),
131 | );
132 |
133 | await logProgress(formatProgress.ran(query, contents.length), onProgress);
134 |
135 | const res = await generateObject({
136 | model,
137 | abortSignal: AbortSignal.timeout(60_000),
138 | system: systemPrompt(),
139 | prompt: `Given the following contents from a SERP search for the query ${query} , generate a list of learnings from the contents. Return a maximum of ${numLearnings} learnings, but feel free to return less if the contents are clear. Make sure each learning is unique and not similar to each other. The learnings should be concise and to the point, as detailed and information dense as possible. Make sure to include any entities like people, places, companies, products, things, etc in the learnings, as well as any exact metrics, numbers, or dates. The learnings will be used to research the topic further.\n\n${contents
140 | .map(content => `\n${content}\n `)
141 | .join('\n')} `,
142 | schema: z.object({
143 | learnings: z
144 | .array(z.string())
145 | .describe(`List of learnings, max of ${numLearnings}`),
146 | followUpQuestions: z
147 | .array(z.string())
148 | .describe(
149 | `List of follow-up questions to research the topic further, max of ${numFollowUpQuestions}`,
150 | ),
151 | }),
152 | });
153 |
154 | await logProgress(
155 | formatProgress.generated(res.object.learnings.length, query),
156 | onProgress,
157 | );
158 |
159 | return res.object;
160 | }
161 |
162 | export async function writeFinalReport({
163 | prompt,
164 | learnings,
165 | visitedUrls,
166 | model,
167 | }: {
168 | prompt: string;
169 | learnings: string[];
170 | visitedUrls: string[];
171 | model: ReturnType;
172 | }) {
173 | const learningsString = trimPrompt(
174 | learnings
175 | .map(learning => `\n${learning}\n `)
176 | .join('\n'),
177 | 150_000,
178 | );
179 |
180 | const res = await generateObject({
181 | model,
182 | system: systemPrompt(),
183 | prompt: `Given the following prompt from the user, write a final report on the topic using the learnings from research and format it in proper Markdown. Use Markdown syntax (headings, lists, horizontal rules, etc.) to structure the document. Aim for a detailed report of at least 3 pages.\n\n${prompt} \n\nHere are all the learnings from previous research:\n\n\n${learningsString}\n `,
184 | schema: z.object({
185 | reportMarkdown: z
186 | .string()
187 | .describe('Final report on the topic in Markdown'),
188 | }),
189 | });
190 |
191 | // Append the visited URLs as a markdown formatted Sources section
192 | const urlsSection = `\n\n## Sources\n\n${visitedUrls
193 | .map(url => `- ${url}`)
194 | .join('\n')}`;
195 |
196 | // Prepend a primary markdown heading to make sure the UI renders it as markdown
197 | return `# Research Report\n\n${res.object.reportMarkdown}${urlsSection}`;
198 | }
199 |
200 | export async function deepResearch({
201 | query,
202 | breadth = 3,
203 | depth = 2,
204 | learnings = [],
205 | visitedUrls = [],
206 | onProgress,
207 | model,
208 | firecrawlKey,
209 | }: DeepResearchOptions): Promise {
210 | const firecrawl = getFirecrawl(firecrawlKey);
211 | const results: ResearchResult[] = [];
212 |
213 | // Generate SERP queries
214 | await logProgress(formatProgress.generating(breadth, query), onProgress);
215 |
216 | const serpQueries = await generateSerpQueries({
217 | query,
218 | learnings,
219 | numQueries: breadth,
220 | onProgress,
221 | model,
222 | });
223 |
224 | await logProgress(
225 | formatProgress.created(serpQueries.length, serpQueries.join(', ')),
226 | onProgress,
227 | );
228 |
229 | // Process each SERP query
230 | for (const serpQuery of serpQueries) {
231 | try {
232 | await logProgress(formatProgress.researching(serpQuery), onProgress);
233 |
234 | const searchResults = await firecrawl.search(serpQuery, {
235 | timeout: 15000,
236 | limit: 5,
237 | scrapeOptions: { formats: ['markdown'] },
238 | });
239 |
240 | await logProgress(
241 | formatProgress.found(searchResults.data.length, serpQuery),
242 | onProgress,
243 | );
244 |
245 | if (searchResults.data.length > 0) {
246 | await logProgress(
247 | formatProgress.ran(serpQuery, searchResults.data.length),
248 | onProgress,
249 | );
250 |
251 | const newLearnings = await processSerpResult({
252 | query: serpQuery,
253 | result: searchResults,
254 | numLearnings: Math.ceil(breadth / 2),
255 | numFollowUpQuestions: Math.ceil(breadth / 2),
256 | onProgress,
257 | model,
258 | });
259 |
260 | await logProgress(
261 | formatProgress.generated(newLearnings.learnings.length, serpQuery),
262 | onProgress,
263 | );
264 |
265 | results.push({
266 | learnings: newLearnings.learnings,
267 | visitedUrls: searchResults.data
268 | .map(r => r.url)
269 | .filter((url): url is string => url != null),
270 | });
271 | }
272 | } catch (e) {
273 | console.error(`Error running query: ${serpQuery}: `, e);
274 | await logProgress(`Error running "${serpQuery}": ${e}`, onProgress);
275 | results.push({
276 | learnings: [],
277 | visitedUrls: [],
278 | });
279 | }
280 | }
281 |
282 | return {
283 | learnings: Array.from(new Set(results.flatMap(r => r.learnings))),
284 | visitedUrls: Array.from(new Set(results.flatMap(r => r.visitedUrls))),
285 | };
286 | }
287 |
--------------------------------------------------------------------------------
/lib/deep-research/feedback.ts:
--------------------------------------------------------------------------------
1 | import { generateObject } from 'ai';
2 | import { z } from 'zod';
3 |
4 | import { createModel, type AIModel } from './ai/providers';
5 | import { systemPrompt } from './prompt';
6 |
7 | export async function generateFeedback({
8 | query,
9 | numQuestions = 3,
10 | modelId = 'o3-mini',
11 | apiKey,
12 | }: {
13 | query: string;
14 | numQuestions?: number;
15 | modelId?: AIModel;
16 | apiKey?: string;
17 | }) {
18 | const model = createModel(modelId, apiKey);
19 |
20 | const userFeedback = await generateObject({
21 | model,
22 | system: systemPrompt(),
23 | prompt: `Given the following query from the user, ask some follow up questions to clarify the research direction. Return a maximum of ${numQuestions} questions, but feel free to return less if the original query is clear: ${query} `,
24 | schema: z.object({
25 | questions: z
26 | .array(z.string())
27 | .describe(
28 | `Follow up questions to clarify the research direction, max of ${numQuestions}`,
29 | ),
30 | }),
31 | });
32 |
33 | return userFeedback.object.questions.slice(0, numQuestions);
34 | }
35 |
--------------------------------------------------------------------------------
/lib/deep-research/index.ts:
--------------------------------------------------------------------------------
1 | export { deepResearch } from './deep-research';
2 | export { generateFeedback } from './feedback';
3 | export { writeFinalReport } from './deep-research';
4 | export { systemPrompt } from './prompt';
5 |
--------------------------------------------------------------------------------
/lib/deep-research/prompt.ts:
--------------------------------------------------------------------------------
1 | export const systemPrompt = () => {
2 | const now = new Date().toISOString();
3 | return `You are an expert researcher. Today is ${now}. Follow these instructions when responding:
4 | - You may be asked to research subjects that is after your knowledge cutoff, assume the user is right when presented with news.
5 | - The user is a highly experienced analyst, no need to simplify it, be as detailed as possible and make sure your response is correct.
6 | - Be highly organized.
7 | - Suggest solutions that I didn't think about.
8 | - Be proactive and anticipate my needs.
9 | - Treat me as an expert in all subject matter.
10 | - Mistakes erode my trust, so be accurate and thorough.
11 | - Provide detailed explanations, I'm comfortable with lots of detail.
12 | - Value good arguments over authorities, the source is irrelevant.
13 | - Consider new technologies and contrarian ideas, not just the conventional wisdom.
14 | - You may use high levels of speculation or prediction, just flag it for me.`;
15 | };
16 |
--------------------------------------------------------------------------------
/lib/hooks/use-scroll-to-bottom.ts:
--------------------------------------------------------------------------------
1 | import { useEffect, useRef, RefObject } from 'react';
2 |
3 | export function useScrollToBottom(): [
4 | RefObject,
5 | RefObject,
6 | ] {
7 | const containerRef = useRef(null);
8 | const endRef = useRef(null);
9 |
10 | useEffect(() => {
11 | const container = containerRef.current;
12 | const end = endRef.current;
13 |
14 | if (container && end) {
15 | const observer = new MutationObserver(() => {
16 | end.scrollIntoView({ behavior: 'instant', block: 'end' });
17 | });
18 |
19 | observer.observe(container, {
20 | childList: true,
21 | subtree: true,
22 | attributes: true,
23 | characterData: true,
24 | });
25 |
26 | return () => observer.disconnect();
27 | }
28 | }, []);
29 |
30 | return [containerRef, endRef];
31 | }
32 |
--------------------------------------------------------------------------------
/lib/utils.ts:
--------------------------------------------------------------------------------
1 | import { type ClassValue, clsx } from "clsx";
2 | import { twMerge } from "tailwind-merge";
3 |
4 | export function cn(...inputs: ClassValue[]) {
5 | return twMerge(clsx(inputs));
6 | }
7 |
8 | export const isMobile = () => {
9 | if (typeof window === "undefined") return false;
10 | const width = window.innerWidth;
11 | return width <= 1024;
12 | };
13 |
14 | export function getCurrentFormattedDate(): string {
15 | const currentDate = new Date();
16 | const options: Intl.DateTimeFormatOptions = {
17 | year: "numeric",
18 | month: "long",
19 | day: "numeric",
20 | hour: "numeric",
21 | minute: "numeric",
22 | hour12: true,
23 | };
24 | return new Intl.DateTimeFormat("en-US", options).format(currentDate);
25 | }
26 |
27 | export function formatTimestamp(timestamp: number): string {
28 | const date = new Date(timestamp);
29 | const dateString = date.toLocaleDateString("en-US", {
30 | year: "numeric",
31 | month: "long",
32 | day: "numeric",
33 | });
34 | const timeString = date.toLocaleTimeString("en-US", {
35 | hour: "numeric",
36 | minute: "2-digit",
37 | hour12: true,
38 | });
39 |
40 | return `${dateString} at ${timeString}`;
41 | }
42 |
--------------------------------------------------------------------------------
/middleware.ts:
--------------------------------------------------------------------------------
1 | import { type NextRequest } from 'next/server';
2 |
3 | export async function middleware(request: NextRequest) {}
4 |
5 | export const config = {
6 | matcher: [
7 | /*
8 | * Match all request paths except for the ones starting with:
9 | * - _next/static (static files)
10 | * - _next/image (image optimization files)
11 | * - favicon.ico (favicon file)
12 | * Feel free to modify this pattern to include more paths.
13 | */
14 | '/((?!_next/static|_next/image|favicon.ico|.*\\.(?:svg|png|jpg|jpeg|gif|webp)$).*)',
15 | ],
16 | };
17 |
--------------------------------------------------------------------------------
/next-env.d.ts:
--------------------------------------------------------------------------------
1 | ///
2 | ///
3 |
4 | // NOTE: This file should not be edited
5 | // see https://nextjs.org/docs/app/building-your-application/configuring/typescript for more information.
6 |
--------------------------------------------------------------------------------
/next.config.js:
--------------------------------------------------------------------------------
1 | /** @type {import('next').NextConfig} */
2 | const nextConfig = {
3 | reactStrictMode: true,
4 | };
5 |
6 | module.exports = nextConfig;
7 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "private": true,
3 | "scripts": {
4 | "dev": "next dev",
5 | "build": "next build",
6 | "start": "next start"
7 | },
8 | "dependencies": {
9 | "@ai-sdk/openai": "^1.1.9",
10 | "@mendable/firecrawl-js": "^1.16.0",
11 | "@radix-ui/react-dialog": "^1.1.6",
12 | "@radix-ui/react-select": "^2.1.6",
13 | "@radix-ui/react-slider": "^1.2.3",
14 | "@radix-ui/react-slot": "^1.1.2",
15 | "@radix-ui/react-tooltip": "^1.1.8",
16 | "ai": "^4.1.17",
17 | "autoprefixer": "10.4.17",
18 | "class-variance-authority": "^0.7.0",
19 | "classnames": "^2.5.1",
20 | "framer-motion": "^11.18.1",
21 | "js-tiktoken": "^1.0.17",
22 | "lodash-es": "^4.17.21",
23 | "lucide-react": "^0.378.0",
24 | "next": "^14.2.8",
25 | "p-limit": "^6.2.0",
26 | "postcss": "8.4.33",
27 | "react": "^18.3.1",
28 | "react-dom": "^18.2.0",
29 | "react-markdown": "^9.0.1",
30 | "remark-gfm": "^4.0.0",
31 | "tailwindcss": "3.4.1",
32 | "tailwindcss-animate": "^1.0.7",
33 | "typescript": "5.3.3",
34 | "usehooks-ts": "^3.1.0",
35 | "zod": "^3.24.1"
36 | },
37 | "devDependencies": {
38 | "@tailwindcss/typography": "^0.5.12",
39 | "@types/lodash-es": "^4.17.12",
40 | "@types/node": "20.11.5",
41 | "@types/react": "^18.3.4",
42 | "@types/react-dom": "^18.2.18",
43 | "clsx": "^2.1.1",
44 | "tailwind-merge": "^2.3.0"
45 | }
46 | }
47 |
--------------------------------------------------------------------------------
/postcss.config.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | plugins: {
3 | tailwindcss: {},
4 | autoprefixer: {},
5 | },
6 | };
7 |
--------------------------------------------------------------------------------
/public/favicon-16x16.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fdarkaou/open-deep-research/ce15e95589b55dd6811cc0c3564b4380ae0c7519/public/favicon-16x16.png
--------------------------------------------------------------------------------
/public/favicon-32x32.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fdarkaou/open-deep-research/ce15e95589b55dd6811cc0c3564b4380ae0c7519/public/favicon-32x32.png
--------------------------------------------------------------------------------
/public/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fdarkaou/open-deep-research/ce15e95589b55dd6811cc0c3564b4380ae0c7519/public/favicon.ico
--------------------------------------------------------------------------------
/public/logo-bg.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fdarkaou/open-deep-research/ce15e95589b55dd6811cc0c3564b4380ae0c7519/public/logo-bg.png
--------------------------------------------------------------------------------
/public/logo-text.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fdarkaou/open-deep-research/ce15e95589b55dd6811cc0c3564b4380ae0c7519/public/logo-text.png
--------------------------------------------------------------------------------
/public/og.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fdarkaou/open-deep-research/ce15e95589b55dd6811cc0c3564b4380ae0c7519/public/og.png
--------------------------------------------------------------------------------
/public/providers/openai.webp:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fdarkaou/open-deep-research/ce15e95589b55dd6811cc0c3564b4380ae0c7519/public/providers/openai.webp
--------------------------------------------------------------------------------
/public/site.webmanifest:
--------------------------------------------------------------------------------
1 | {"name":"","short_name":"","icons":[{"src":"/android-chrome-192x192.png","sizes":"192x192","type":"image/png"},{"src":"/android-chrome-512x512.png","sizes":"512x512","type":"image/png"}],"theme_color":"#ffffff","background_color":"#ffffff","display":"standalone"}
--------------------------------------------------------------------------------
/tailwind.config.ts:
--------------------------------------------------------------------------------
1 | import type { Config } from 'tailwindcss';
2 |
3 | const config = {
4 | darkMode: ['class'],
5 | content: [
6 | './pages/**/*.{ts,tsx}',
7 | './components/**/*.{ts,tsx}',
8 | './app/**/*.{ts,tsx}',
9 | './src/**/*.{ts,tsx}',
10 | ],
11 | theme: {
12 | container: {
13 | center: true,
14 | padding: '2rem',
15 | screens: {
16 | '2xl': '1400px',
17 | },
18 | },
19 | extend: {
20 | colors: {
21 | border: 'hsl(var(--border))',
22 | input: 'hsl(var(--input))',
23 | ring: 'hsl(var(--ring))',
24 | background: 'hsl(var(--background))',
25 | foreground: 'hsl(var(--foreground))',
26 | primary: {
27 | DEFAULT: 'hsl(var(--primary))',
28 | foreground: 'hsl(var(--primary-foreground))',
29 | },
30 | secondary: {
31 | DEFAULT: 'hsl(var(--secondary))',
32 | foreground: 'hsl(var(--secondary-foreground))',
33 | },
34 | destructive: {
35 | DEFAULT: 'hsl(var(--destructive))',
36 | foreground: 'hsl(var(--destructive-foreground))',
37 | },
38 | muted: {
39 | DEFAULT: 'hsl(var(--muted))',
40 | foreground: 'hsl(var(--muted-foreground))',
41 | },
42 | accent: {
43 | DEFAULT: 'hsl(var(--accent))',
44 | foreground: 'hsl(var(--accent-foreground))',
45 | },
46 | popover: {
47 | DEFAULT: 'hsl(var(--popover))',
48 | foreground: 'hsl(var(--popover-foreground))',
49 | },
50 | card: {
51 | DEFAULT: 'hsl(var(--card))',
52 | foreground: 'hsl(var(--card-foreground))',
53 | },
54 | },
55 | borderRadius: {
56 | lg: 'var(--radius)',
57 | md: 'calc(var(--radius) - 2px)',
58 | sm: 'calc(var(--radius) - 4px)',
59 | },
60 | keyframes: {
61 | 'accordion-down': {
62 | from: { height: '0' },
63 | to: { height: 'var(--radix-accordion-content-height)' },
64 | },
65 | 'accordion-up': {
66 | from: { height: 'var(--radix-accordion-content-height)' },
67 | to: { height: '0' },
68 | },
69 | },
70 | animation: {
71 | 'accordion-down': 'accordion-down 0.2s ease-out',
72 | 'accordion-up': 'accordion-up 0.2s ease-out',
73 | },
74 | },
75 | },
76 | plugins: [require('tailwindcss-animate')],
77 | } satisfies Config;
78 |
79 | export default config;
80 |
--------------------------------------------------------------------------------
/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "target": "es5",
4 | "lib": ["dom", "dom.iterable", "esnext"],
5 | "allowJs": true,
6 | "skipLibCheck": true,
7 | "strict": true,
8 | "forceConsistentCasingInFileNames": true,
9 | "noEmit": true,
10 | "esModuleInterop": true,
11 | "module": "esnext",
12 | "moduleResolution": "node",
13 | "resolveJsonModule": true,
14 | "isolatedModules": true,
15 | "jsx": "preserve",
16 | "incremental": true,
17 | "plugins": [
18 | {
19 | "name": "next"
20 | }
21 | ],
22 | "paths": {
23 | "@/*": ["./*"],
24 | "contentlayer/generated": ["./.contentlayer/generated"]
25 | },
26 | "baseUrl": "."
27 | // ^^^^^^^^^^^
28 | },
29 | "include": [
30 | "next-env.d.ts",
31 | "**/*.ts",
32 | "**/*.tsx",
33 | ".next/types/**/*.ts",
34 | ".contentlayer/generated"
35 | // ^^^^^^^^^^^^^^^^^^^^^^
36 | ],
37 | "exclude": ["node_modules"]
38 | }
39 |
--------------------------------------------------------------------------------