├── .gitignore ├── LICENSE ├── README.md ├── app-icon.png ├── app ├── (chat) │ ├── layout.tsx │ └── page.tsx ├── action.tsx ├── agent │ └── page.tsx ├── api │ ├── chat │ │ └── route.ts │ └── retrieval │ │ ├── chat │ │ └── route.ts │ │ └── ingest │ │ └── route.ts ├── favicon.ico ├── globals.css └── layout.tsx ├── build-server.ts ├── components.json ├── components ├── AppLayout │ ├── index.tsx │ └── style.css ├── HomeBlock │ ├── index.tsx │ └── style.css ├── agents.tsx ├── button-scroll-to-bottom.tsx ├── chat-history.tsx ├── chat-list.tsx ├── chat-message-actions.tsx ├── chat-message.tsx ├── chat-scroll-anchor.tsx ├── chat.tsx ├── clear-history.tsx ├── collapsible-message.tsx ├── copilot.tsx ├── empty-screen.tsx ├── external-link.tsx ├── followup-panel.tsx ├── footer.tsx ├── fortune.tsx ├── header.tsx ├── markdown.tsx ├── message.tsx ├── prompt-form.tsx ├── providers.tsx ├── search-related.tsx ├── search-results-image.tsx ├── search-results.tsx ├── search-skeleton.tsx ├── section.tsx ├── sidebar-actions.tsx ├── sidebar-desktop.tsx ├── sidebar-footer.tsx ├── sidebar-item.tsx ├── sidebar-items.tsx ├── sidebar-list.tsx ├── sidebar-mobile.tsx ├── sidebar-toggle.tsx ├── sidebar.tsx ├── tailwind-indicator.tsx ├── theme-toggle.tsx ├── tool-badge.tsx ├── ui │ ├── alert-dialog.tsx │ ├── avatar.tsx │ ├── badge.tsx │ ├── button.tsx │ ├── card.tsx │ ├── carousel.tsx │ ├── checkbox.tsx │ ├── codeblock.tsx │ ├── command.tsx │ ├── dialog.tsx │ ├── dropdown-menu.tsx │ ├── icons.tsx │ ├── input.tsx │ ├── label.tsx │ ├── markdown.tsx │ ├── popover.tsx │ ├── select.tsx │ ├── separator.tsx │ ├── sheet.tsx │ ├── skeleton.tsx │ ├── switch.tsx │ ├── table.tsx │ ├── tabs.tsx │ ├── textarea.tsx │ └── tooltip.tsx ├── user-message.tsx └── writer.tsx ├── lib ├── agents │ ├── index.tsx │ ├── inquire.tsx │ ├── query-suggestor.tsx │ ├── researcher.tsx │ └── task-manager.tsx ├── hooks │ ├── use-at-bottom.tsx │ ├── use-copy-to-clipboard.tsx │ ├── use-local-storage.ts │ ├── use-setting.tsx │ └── use-sidebar.tsx ├── schema │ ├── inquiry.tsx │ ├── next-action.tsx │ ├── related.tsx │ └── search.tsx ├── types.ts └── utils.ts ├── next-env.d.ts ├── next.config.mjs ├── package.json ├── pnpm-lock.yaml ├── postcss.config.js ├── prettier.config.cjs ├── server ├── agent.ts ├── custom │ ├── llm │ │ └── gemini │ │ │ ├── chat_models.d.ts │ │ │ ├── chat_models.js │ │ │ ├── embeddings.d.ts │ │ │ ├── embeddings.js │ │ │ ├── index.d.ts │ │ │ ├── index.js │ │ │ ├── utils.d.ts │ │ │ └── utils.js │ └── tools │ │ └── dalle │ │ ├── dalle.d.ts │ │ └── dalle.js ├── server.ts └── tsconfig.json ├── src-tauri ├── .gitignore ├── Cargo.lock ├── Cargo.toml ├── build.rs ├── icons │ ├── 128x128.png │ ├── 128x128@2x.png │ ├── 32x32.png │ ├── Square107x107Logo.png │ ├── Square142x142Logo.png │ ├── Square150x150Logo.png │ ├── Square284x284Logo.png │ ├── Square30x30Logo.png │ ├── Square310x310Logo.png │ ├── Square44x44Logo.png │ ├── Square71x71Logo.png │ ├── Square89x89Logo.png │ ├── StoreLogo.png │ ├── icon.icns │ ├── icon.ico │ └── icon.png ├── src │ └── main.rs └── tauri.conf.json ├── tailwind.config.js ├── tailwind.config.ts └── tsconfig.json /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | lerna-debug.log* 8 | .pnpm-debug.log* 9 | 10 | # Diagnostic reports (https://nodejs.org/api/report.html) 11 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 12 | 13 | # Runtime data 14 | pids 15 | *.pid 16 | *.seed 17 | *.pid.lock 18 | 19 | # Directory for instrumented libs generated by jscoverage/JSCover 20 | lib-cov 21 | 22 | # Coverage directory used by tools like istanbul 23 | coverage 24 | *.lcov 25 | 26 | # nyc test coverage 27 | .nyc_output 28 | 29 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 30 | .grunt 31 | 32 | # Bower dependency directory (https://bower.io/) 33 | bower_components 34 | 35 | # node-waf configuration 36 | .lock-wscript 37 | 38 | # Compiled binary addons (https://nodejs.org/api/addons.html) 39 | build/Release 40 | 41 | # Dependency directories 42 | node_modules/ 43 | jspm_packages/ 44 | 45 | # Snowpack dependency directory (https://snowpack.dev/) 46 | web_modules/ 47 | 48 | # TypeScript cache 49 | *.tsbuildinfo 50 | 51 | # Optional npm cache directory 52 | .npm 53 | 54 | # Optional eslint cache 55 | .eslintcache 56 | 57 | # Optional stylelint cache 58 | .stylelintcache 59 | 60 | # Microbundle cache 61 | .rpt2_cache/ 62 | .rts2_cache_cjs/ 63 | .rts2_cache_es/ 64 | .rts2_cache_umd/ 65 | 66 | # Optional REPL history 67 | .node_repl_history 68 | 69 | # Output of 'npm pack' 70 | *.tgz 71 | 72 | # Yarn Integrity file 73 | .yarn-integrity 74 | 75 | # dotenv environment variable files 76 | .env 77 | .env.development.local 78 | .env.test.local 79 | .env.production.local 80 | .env.local 81 | 82 | # parcel-bundler cache (https://parceljs.org/) 83 | .cache 84 | .parcel-cache 85 | 86 | # Next.js build output 87 | .next 88 | out 89 | 90 | # Nuxt.js build / generate output 91 | .nuxt 92 | dist 93 | 94 | # Gatsby files 95 | .cache/ 96 | # Comment in the public line in if your project uses Gatsby and not Next.js 97 | # https://nextjs.org/blog/next-9-1#public-directory-support 98 | # public 99 | 100 | # vuepress build output 101 | .vuepress/dist 102 | 103 | # vuepress v2.x temp and cache directory 104 | .temp 105 | .cache 106 | 107 | # Docusaurus cache and generated files 108 | .docusaurus 109 | 110 | # Serverless directories 111 | .serverless/ 112 | 113 | # FuseBox cache 114 | .fusebox/ 115 | 116 | # DynamoDB Local files 117 | .dynamodb/ 118 | 119 | # TernJS port file 120 | .tern-port 121 | 122 | # Stores VSCode versions used for testing VSCode extensions 123 | .vscode-test 124 | 125 | # yarn v2 126 | .yarn/cache 127 | .yarn/unplugged 128 | .yarn/build-state.yml 129 | .yarn/install-state.gz 130 | .pnp.* 131 | 132 | */.env 133 | .env 134 | /build 135 | /src-tauri/target/ 136 | /src-tauri/bin/ 137 | .DS_Store 138 | /server/*.js 139 | .vercel 140 | .env*.local 141 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2024 Frank Lin 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Next-Langchain-Tauri 2 | 3 | Next-Langchain-Tauri is a desktop application that combines the power of Next.js for frontend development, Langchain.js for AI processing, and Tauri for packaging the application as a desktop app. 4 | 5 | image 6 | 7 | ## How to use 8 | 9 | Click the ``` Key Setting``` button to enter API keys, which are only stored in your computer, and then start to chat. 10 | 11 | Screenshot 2024-03-06 at 12 00 48 AM 12 | 13 | ## Development 14 | 15 | 1. Ensure you have Node.js v20, npm, rust, and cargo installed on your system. 16 | 17 | 2. Installation 18 | ``` bash 19 | pnpm install 20 | ``` 21 | 22 | 3. use [yao-pkg](https://github.com/yao-pkg/pkg-binaries) to pack the server into a single executable file and make it as a sidecar binary for Tauri, before packing you need to check your computer arch by running: 23 | ``` bash 24 | rustc -Vv | grep host | cut -f2 -d' ' 25 | ``` 26 | then change the word ```server-aarch64-apple-darwin``` to ```server-yours``` in packages.json, for example ```server-x86_64-apple-darwin``` 27 | 28 | save and run: 29 | ``` bash 30 | pnpm install -g @yao-pkg/pkg 31 | pnpm pkg-server 32 | ``` 33 | 34 | 4. Change 'next.config.mjs' to the part for tarui 35 | 36 | 37 | 5. Build 38 | ``` bash 39 | pnpm tauri build 40 | ``` 41 | 42 | ## Credits 43 | 44 | This project was inspired by and incorporates code from the following repositories: 45 | 46 | - [Vercel/ai-chatbot](https://github.com/vercel/ai-chatbot) 47 | - [langchain-ai/langchain-nextjs-template](https://github.com/langchain-ai/langchain-nextjs-template) 48 | - [srsholmes/tauri-nextjs-api-routes](https://github.com/srsholmes/tauri-nextjs-api-routes) 49 | 50 | -------------------------------------------------------------------------------- /app-icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/etrobot/next-langchain-tauri/fffbcdf1da5c71fb18eb444a3f0b5853011e9517/app-icon.png -------------------------------------------------------------------------------- /app/(chat)/layout.tsx: -------------------------------------------------------------------------------- 1 | import { SidebarDesktop } from '@/components/sidebar-desktop' 2 | 3 | interface ChatLayoutProps { 4 | children: React.ReactNode 5 | } 6 | 7 | export default async function ChatLayout({ children }: ChatLayoutProps) { 8 | return ( 9 |
10 | 11 |
12 | {children} 13 |
14 |
15 | ) 16 | } 17 | -------------------------------------------------------------------------------- /app/(chat)/page.tsx: -------------------------------------------------------------------------------- 1 | 'use client' 2 | import { Chat } from '@/components/chat' 3 | import { useSearchParams } from 'next/navigation' 4 | export default function IndexPage() { 5 | const params = useSearchParams() 6 | const timestamp = `${new Date().toISOString().split('.')[0]}` 7 | return 8 | } 9 | -------------------------------------------------------------------------------- /app/action.tsx: -------------------------------------------------------------------------------- 1 | import { 2 | StreamableValue, 3 | createAI, 4 | createStreamableUI, 5 | createStreamableValue, 6 | getMutableAIState 7 | } from 'ai/rsc' 8 | import { ExperimentalMessage } from 'ai' 9 | import { IconSpinner} from '@/components/ui/icons' 10 | 11 | import { Section } from '@/components/section' 12 | import { FollowupPanel } from '@/components/followup-panel' 13 | import { inquire, researcher, taskManager, querySuggestor } from '@/lib/agents' 14 | 15 | async function submit(formData?: FormData, skip?: boolean,apikeys?:any,) { 16 | 'use server' 17 | const aiState = getMutableAIState() 18 | const uiStream = createStreamableUI() 19 | const isGenerating = createStreamableValue(true) 20 | const isCollapsed = createStreamableValue(false) 21 | 22 | const messages: ExperimentalMessage[] = aiState.get() as any 23 | console.log(formData) 24 | // Limit the number of messages to 10 25 | messages.splice(0, Math.max(messages.length - 10, 0)) 26 | // Get the user input from the form data 27 | const userInput = skip 28 | ? `{"action": "skip"}` 29 | : (formData?.get('input') as string) 30 | const content = skip 31 | ? userInput 32 | : formData 33 | ? JSON.stringify(Object.fromEntries(formData)) 34 | : null 35 | // Add the user message to the state 36 | if (content) { 37 | const message = { role: 'user', content } 38 | messages.push(message as ExperimentalMessage) 39 | aiState.update([...(aiState.get() as any), message]) 40 | } 41 | 42 | async function processEvents() { 43 | uiStream.update() 44 | let action: any = { object: { next: 'proceed' } } 45 | // If the user skips the task, we proceed to the search 46 | if (!skip) action = (await taskManager(messages,apikeys?.llm_base_url as string,apikeys?.llm_api_key as string,apikeys?.llm_model as string)) ?? action 47 | if (action.object.next === 'inquire') { 48 | 49 | // Generate inquiry 50 | const inquiry = await inquire(uiStream, messages,apikeys?.llm_base_url as string,apikeys?.llm_api_key as string,apikeys?.llm_model as string) 51 | 52 | uiStream.done() 53 | isGenerating.done() 54 | isCollapsed.done(false) 55 | aiState.done([ 56 | ...aiState.get(), 57 | { role: 'assistant', content: `inquiry: ${inquiry?.question}` } 58 | ]) 59 | return 60 | } 61 | // Set the collapsed state to true 62 | isCollapsed.done(true) 63 | 64 | // Generate the answer 65 | let answer = '' 66 | let errorOccurred = false 67 | const streamText = createStreamableValue() 68 | while (answer.length === 0) { 69 | // Search the web and generate the answer 70 | const { fullResponse, hasError } = await researcher( 71 | uiStream, 72 | streamText, 73 | messages, 74 | apikeys?.llm_base_url as string, 75 | apikeys?.llm_api_key as string, 76 | apikeys?.llm_model as string, 77 | apikeys?.tavilyserp_api_key as string 78 | ) 79 | answer = fullResponse 80 | errorOccurred = hasError 81 | } 82 | streamText.done() 83 | 84 | if (!errorOccurred) { 85 | // Generate related queries 86 | await querySuggestor(uiStream, messages,apikeys?.llm_base_url as string,apikeys?.llm_api_key as string,apikeys?.llm_model as string) 87 | 88 | // Add follow-up panel 89 | uiStream.append( 90 |
91 | 92 |
93 | ) 94 | } 95 | 96 | isGenerating.done(false) 97 | uiStream.done() 98 | aiState.done([...aiState.get(), { role: 'assistant', content: answer }]) 99 | } 100 | 101 | processEvents() 102 | 103 | return { 104 | id: Date.now(), 105 | isGenerating: isGenerating.value, 106 | component: uiStream.value, 107 | isCollapsed: isCollapsed.value 108 | } 109 | } 110 | 111 | // Define the initial state of the AI. It can be any JSON object. 112 | const initialAIState: { 113 | role: 'user' | 'assistant' | 'system' | 'function' | 'tool' 114 | content: string 115 | id?: string 116 | name?: string 117 | }[] = [] 118 | 119 | // The initial UI state that the client will keep track of, which contains the message IDs and their UI nodes. 120 | const initialUIState: { 121 | id: number 122 | isGenerating?: StreamableValue 123 | isCollapsed?: StreamableValue 124 | component: React.ReactNode 125 | }[] = [] 126 | 127 | // AI is a provider you wrap your application with so you can access AI and UI state in your components. 128 | export const AI = createAI({ 129 | actions: { 130 | submit 131 | }, 132 | // Each state can be any shape of object, but for chat applications 133 | // it makes sense to have an array of messages. Or you may prefer something like { id: number, messages: Message[] } 134 | initialUIState, 135 | initialAIState 136 | }) 137 | -------------------------------------------------------------------------------- /app/agent/page.tsx: -------------------------------------------------------------------------------- 1 | 'use client' 2 | import Agents from '@/components/agents' 3 | import { useState } from 'react' 4 | 5 | export default function AgentsPage() { 6 | const [showPinnedOnly, setShowPinnedOnly] = useState(false); 7 | return 8 | } 9 | -------------------------------------------------------------------------------- /app/api/chat/route.ts: -------------------------------------------------------------------------------- 1 | import { Chat } from '@/server/agent' 2 | // export const runtime = 'edge'; 3 | // edge conflicts with duckduckgo-scape 4 | export async function POST(req: Request) { 5 | const json = await req.json() 6 | return Chat(json) 7 | } -------------------------------------------------------------------------------- /app/api/retrieval/chat/route.ts: -------------------------------------------------------------------------------- 1 | import { NextRequest, NextResponse } from "next/server"; 2 | import { Message as VercelChatMessage, StreamingTextResponse } from "ai"; 3 | 4 | import { createClient } from "@supabase/supabase-js"; 5 | 6 | import { ChatOpenAI, OpenAIEmbeddings } from "@langchain/openai"; 7 | import { PromptTemplate } from "@langchain/core/prompts"; 8 | import { SupabaseVectorStore } from "@langchain/community/vectorstores/supabase"; 9 | import { Document } from "@langchain/core/documents"; 10 | import { RunnableSequence } from "@langchain/core/runnables"; 11 | import { 12 | BytesOutputParser, 13 | StringOutputParser, 14 | } from "@langchain/core/output_parsers"; 15 | 16 | export const runtime = "edge"; 17 | 18 | const combineDocumentsFn = (docs: Document[]) => { 19 | const serializedDocs = docs.map((doc) => doc.pageContent); 20 | return serializedDocs.join("\n\n"); 21 | }; 22 | 23 | const formatVercelMessages = (chatHistory: VercelChatMessage[]) => { 24 | const formattedDialogueTurns = chatHistory.map((message) => { 25 | if (message.role === "user") { 26 | return `Human: ${message.content}`; 27 | } else if (message.role === "assistant") { 28 | return `Assistant: ${message.content}`; 29 | } else { 30 | return `${message.role}: ${message.content}`; 31 | } 32 | }); 33 | return formattedDialogueTurns.join("\n"); 34 | }; 35 | 36 | const CONDENSE_QUESTION_TEMPLATE = `Given the following conversation and a follow up question, rephrase the follow up question to be a standalone question, in its original language. 37 | 38 | 39 | {chat_history} 40 | 41 | 42 | Follow Up Input: {question} 43 | Standalone question:`; 44 | const condenseQuestionPrompt = PromptTemplate.fromTemplate( 45 | CONDENSE_QUESTION_TEMPLATE, 46 | ); 47 | 48 | const ANSWER_TEMPLATE = ` 49 | 50 | {context} 51 | 52 | 53 | 54 | {chat_history} 55 | 56 | 57 | Question: {question} 58 | `; 59 | const answerPrompt = PromptTemplate.fromTemplate(ANSWER_TEMPLATE); 60 | 61 | /** 62 | * This handler initializes and calls a retrieval chain. It composes the chain using 63 | * LangChain Expression Language. See the docs for more information: 64 | * 65 | * https://js.langchain.com/docs/guides/expression_language/cookbook#conversational-retrieval-chain 66 | */ 67 | export async function POST(req: NextRequest) { 68 | try { 69 | const body = await req.json(); 70 | const messages = body.messages ?? []; 71 | const previousMessages = messages.slice(0, -1); 72 | const currentMessageContent = messages[messages.length - 1].content; 73 | 74 | const model = new ChatOpenAI({ 75 | temperature: 0.7, 76 | modelName: (body.annotations?body.annotations[0]:undefined) || body.previewToken.llm_model || 'gpt-3.5-turbo-0125', 77 | openAIApiKey: body.previewToken.llm_api_key, 78 | configuration: { baseURL:body.previewToken.llm_base_url }, 79 | maxTokens: 4096, 80 | streaming: true 81 | }); 82 | 83 | const client = createClient( 84 | process.env.SUPABASE_URL!, 85 | process.env.SUPABASE_PRIVATE_KEY!, 86 | ); 87 | const vectorstore = new SupabaseVectorStore(new OpenAIEmbeddings({openAIApiKey:body.previewToken.llm_api_key,configuration: { baseURL:body.previewToken.llm_base_url }}), { 88 | client, 89 | tableName: "documents", 90 | queryName: "match_documents", 91 | }); 92 | 93 | /** 94 | * We use LangChain Expression Language to compose two chains. 95 | * To learn more, see the guide here: 96 | * 97 | * https://js.langchain.com/docs/guides/expression_language/cookbook 98 | * 99 | * You can also use the "createRetrievalChain" method with a 100 | * "historyAwareRetriever" to get something prebaked. 101 | */ 102 | const standaloneQuestionChain = RunnableSequence.from([ 103 | condenseQuestionPrompt, 104 | model, 105 | new StringOutputParser(), 106 | ]); 107 | 108 | let resolveWithDocuments: (value: Document[]) => void; 109 | const documentPromise = new Promise((resolve) => { 110 | resolveWithDocuments = resolve; 111 | }); 112 | 113 | const retriever = vectorstore.asRetriever({ 114 | callbacks: [ 115 | { 116 | handleRetrieverEnd(documents) { 117 | resolveWithDocuments(documents); 118 | }, 119 | }, 120 | ], 121 | }); 122 | 123 | const retrievalChain = retriever.pipe(combineDocumentsFn); 124 | 125 | const answerChain = RunnableSequence.from([ 126 | { 127 | context: RunnableSequence.from([ 128 | (input) => input.question, 129 | retrievalChain, 130 | ]), 131 | chat_history: (input) => input.chat_history, 132 | question: (input) => input.question, 133 | }, 134 | answerPrompt, 135 | model, 136 | ]); 137 | 138 | const conversationalRetrievalQAChain = RunnableSequence.from([ 139 | { 140 | question: standaloneQuestionChain, 141 | chat_history: (input) => input.chat_history, 142 | }, 143 | answerChain, 144 | new BytesOutputParser(), 145 | ]); 146 | 147 | const stream = await conversationalRetrievalQAChain.stream({ 148 | question: currentMessageContent, 149 | chat_history: formatVercelMessages(previousMessages), 150 | }); 151 | 152 | const documents = await documentPromise; 153 | const serializedSources = Buffer.from( 154 | JSON.stringify( 155 | documents.map((doc) => { 156 | return { 157 | pageContent: doc.pageContent.slice(0, 50) + "...", 158 | metadata: doc.metadata, 159 | }; 160 | }), 161 | ), 162 | ).toString("base64"); 163 | 164 | return new StreamingTextResponse(stream, { 165 | headers: { 166 | "x-message-index": (previousMessages.length + 1).toString(), 167 | "x-sources": serializedSources, 168 | }, 169 | }); 170 | } catch (e: any) { 171 | console.log(e); 172 | return NextResponse.json({ error: e.message }, { status: e.status ?? 500 }); 173 | } 174 | } -------------------------------------------------------------------------------- /app/api/retrieval/ingest/route.ts: -------------------------------------------------------------------------------- 1 | import { NextRequest, NextResponse } from "next/server"; 2 | import { RecursiveCharacterTextSplitter } from "langchain/text_splitter"; 3 | 4 | import { createClient } from "@supabase/supabase-js"; 5 | import { SupabaseVectorStore } from "@langchain/community/vectorstores/supabase"; 6 | import { OpenAIEmbeddings } from "@langchain/openai"; 7 | 8 | 9 | export const runtime = "edge"; 10 | 11 | // Before running, follow set-up instructions at 12 | // https://js.langchain.com/docs/modules/indexes/vector_stores/integrations/supabase 13 | 14 | /** 15 | * This handler takes input text, splits it into chunks, and embeds those chunks 16 | * into a vector store for later retrieval. See the following docs for more information: 17 | * 18 | * https://js.langchain.com/docs/modules/data_connection/document_transformers/text_splitters/recursive_text_splitter 19 | * https://js.langchain.com/docs/modules/data_connection/vectorstores/integrations/supabase 20 | */ 21 | export async function POST(req: NextRequest) { 22 | const body = await req.json(); 23 | const text = body.text; 24 | 25 | if (process.env.NEXT_PUBLIC_DEMO === "true") { 26 | return NextResponse.json( 27 | { 28 | error: [ 29 | "Ingest is not supported in demo mode.", 30 | "Please set up your own version of the repo here: https://github.com/langchain-ai/langchain-nextjs-template", 31 | ].join("\n"), 32 | }, 33 | { status: 403 }, 34 | ); 35 | } 36 | 37 | try { 38 | const client = createClient( 39 | process.env.SUPABASE_URL!, 40 | process.env.SUPABASE_PRIVATE_KEY!, 41 | ); 42 | 43 | const splitter = RecursiveCharacterTextSplitter.fromLanguage("markdown", { 44 | chunkSize: 256, 45 | chunkOverlap: 20, 46 | }); 47 | 48 | const splitDocuments = await splitter.createDocuments([text]); 49 | 50 | const vectorstore = await SupabaseVectorStore.fromDocuments( 51 | splitDocuments, 52 | new OpenAIEmbeddings( {openAIApiKey:body.llm_api_key,configuration: { baseURL:body.llm_base_url }}), 53 | { 54 | client, 55 | tableName: "documents", 56 | queryName: "match_documents", 57 | }, 58 | ); 59 | 60 | return NextResponse.json({ ok: true }, { status: 200 }); 61 | } catch (e: any) { 62 | return NextResponse.json({ error: e.message }, { status: 500 }); 63 | } 64 | } 65 | -------------------------------------------------------------------------------- /app/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/etrobot/next-langchain-tauri/fffbcdf1da5c71fb18eb444a3f0b5853011e9517/app/favicon.ico -------------------------------------------------------------------------------- /app/globals.css: -------------------------------------------------------------------------------- 1 | @tailwind base; 2 | @tailwind components; 3 | @tailwind utilities; 4 | 5 | 6 | @layer base { 7 | :root { 8 | --background: 0 0% 100%; 9 | --foreground: 240 10% 3.9%; 10 | --card: 0 0% 100%; 11 | --card-foreground: 240 10% 3.9%; 12 | --popover: 0 0% 100%; 13 | --popover-foreground: 240 10% 3.9%; 14 | --primary: 240 5.9% 10%; 15 | --primary-foreground: 0 0% 98%; 16 | --secondary: 240 4.8% 95.9%; 17 | --secondary-foreground: 240 5.9% 10%; 18 | --muted: 240 4.8% 95.9%; 19 | --muted-foreground: 240 3.8% 46.1%; 20 | --accent: 240 4.8% 95.9%; 21 | --accent-foreground: 240 5.9% 10%; 22 | --destructive: 0 84.2% 60.2%; 23 | --destructive-foreground: 0 0% 98%; 24 | --border: 240 5.9% 90%; 25 | --input: 240 5.9% 90%; 26 | --ring: 240 5.9% 10%; 27 | --radius: 0.5rem; 28 | } 29 | 30 | .dark { 31 | --background: 240 10% 3.9%; 32 | --foreground: 0 0% 98%; 33 | --card: 240 10% 3.9%; 34 | --card-foreground: 0 0% 98%; 35 | --popover: 240 10% 3.9%; 36 | --popover-foreground: 0 0% 98%; 37 | --primary: 0 0% 98%; 38 | --primary-foreground: 240 5.9% 10%; 39 | --secondary: 240 3.7% 15.9%; 40 | --secondary-foreground: 0 0% 98%; 41 | --muted: 240 3.7% 15.9%; 42 | --muted-foreground: 240 5% 64.9%; 43 | --accent: 240 3.7% 15.9%; 44 | --accent-foreground: 0 0% 98%; 45 | --destructive: 0 62.8% 30.6%; 46 | --destructive-foreground: 0 0% 98%; 47 | --border: 240 3.7% 15.9%; 48 | --input: 240 3.7% 15.9%; 49 | --ring: 240 4.9% 83.9%; 50 | } 51 | } 52 | 53 | 54 | @layer base { 55 | * { 56 | @apply border-border; 57 | } 58 | body { 59 | @apply bg-background text-foreground; 60 | } 61 | } 62 | 63 | .title { 64 | background: linear-gradient(to right, orange,#10b981); 65 | -webkit-background-clip: text; 66 | color: transparent; 67 | display: inline-block; 68 | } -------------------------------------------------------------------------------- /app/layout.tsx: -------------------------------------------------------------------------------- 1 | import { Toaster } from 'react-hot-toast' 2 | import { GeistSans } from 'geist/font/sans' 3 | import { GeistMono } from 'geist/font/mono' 4 | 5 | import '@/app/globals.css' 6 | import { cn } from '@/lib/utils' 7 | import { Providers } from '@/components/providers' 8 | import Header from '@/components/header' 9 | 10 | export const metadata = { 11 | metadataBase: new URL(`http://${process.env.VERCEL_URL}`), 12 | title: { 13 | default: 'Next.js AI Chatbot', 14 | template: `%s - Next.js AI Chatbot` 15 | }, 16 | description: 'An AI-powered chatbot template built with Next.js and Vercel.', 17 | icons: { 18 | icon: '/favicon.ico', 19 | shortcut: '/favicon-16x16.png', 20 | apple: '/apple-touch-icon.png' 21 | } 22 | } 23 | 24 | export const viewport = { 25 | themeColor: [ 26 | { media: '(prefers-color-scheme: light)', color: 'white' }, 27 | { media: '(prefers-color-scheme: dark)', color: 'black' } 28 | ] 29 | } 30 | 31 | interface RootLayoutProps { 32 | children: React.ReactNode 33 | } 34 | 35 | export default function RootLayout({ children }: RootLayoutProps) { 36 | return ( 37 | 38 | 45 | 46 | 52 |
53 |
54 |
{children}
55 |
56 | {/* */} 57 |
58 | 59 | 60 | ) 61 | } 62 | -------------------------------------------------------------------------------- /build-server.ts: -------------------------------------------------------------------------------- 1 | import * as esbuild from 'esbuild'; 2 | 3 | (async () => { 4 | await esbuild.build({ 5 | entryPoints: ['./server/server.ts'], 6 | bundle: true, 7 | platform: 'node', 8 | target: ['node20.0'], 9 | outfile: 'build/server.js', 10 | plugins: [], 11 | }); 12 | })(); 13 | -------------------------------------------------------------------------------- /components.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://ui.shadcn.com/schema.json", 3 | "style": "default", 4 | "rsc": true, 5 | "tsx": true, 6 | "tailwind": { 7 | "config": "tailwind.config.ts", 8 | "css": "app/globals.css", 9 | "baseColor": "zinc", 10 | "cssVariables": true, 11 | "prefix": "" 12 | }, 13 | "aliases": { 14 | "components": "@/components", 15 | "utils": "@/lib/utils" 16 | } 17 | } -------------------------------------------------------------------------------- /components/AppLayout/index.tsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | 3 | export type AppLayoutProps = { 4 | children?: React.ReactNode; 5 | } 6 | 7 | export default function AppLayout(props: AppLayoutProps) { 8 | return ( 9 |
10 | {props.children} 11 |
12 | ) 13 | } 14 | -------------------------------------------------------------------------------- /components/AppLayout/style.css: -------------------------------------------------------------------------------- 1 | .app-layout { 2 | display: flex; 3 | flex: 1; 4 | width: 100%; 5 | height: 100%; 6 | } 7 | -------------------------------------------------------------------------------- /components/HomeBlock/index.tsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | 3 | export type HomeBlockProps = { 4 | url: string, 5 | title: string, 6 | description: string, 7 | }; 8 | 9 | const HomeBlock = (props: HomeBlockProps) => ( 10 | 11 |
12 |

📚 {props.title}

13 |

{props.description}

14 |
15 |
16 | ) 17 | 18 | export default HomeBlock; 19 | -------------------------------------------------------------------------------- /components/HomeBlock/style.css: -------------------------------------------------------------------------------- 1 | .home-block-anchor { 2 | text-decoration: none; 3 | border-radius: 8px; 4 | border: 1px solid #E9EBEE; 5 | padding: 0px 8px; 6 | width: 200px; 7 | height: fit-content; 8 | color: black; 9 | } 10 | 11 | .home-block-anchor:hover { 12 | transform: scale(1.1); 13 | background-color: #E5E7EB; 14 | } 15 | 16 | .home-block { 17 | display: flex; 18 | flex-direction: column; 19 | } 20 | 21 | .home-block-title { 22 | font-size: 18px; 23 | margin: 8px 0px 0px 0px; 24 | } 25 | 26 | .home-block-description { 27 | font-size: 16px; 28 | } 29 | -------------------------------------------------------------------------------- /components/button-scroll-to-bottom.tsx: -------------------------------------------------------------------------------- 1 | 'use client' 2 | 3 | import * as React from 'react' 4 | 5 | import { cn } from '@/lib/utils' 6 | import { useAtBottom } from '@/lib/hooks/use-at-bottom' 7 | import { Button, type ButtonProps } from '@/components/ui/button' 8 | import { IconArrowDown } from '@/components/ui/icons' 9 | 10 | export function ButtonScrollToBottom({ className, ...props }: ButtonProps) { 11 | const isAtBottom = useAtBottom() 12 | 13 | return ( 14 | 33 | ) 34 | } 35 | -------------------------------------------------------------------------------- /components/chat-history.tsx: -------------------------------------------------------------------------------- 1 | import * as React from 'react' 2 | 3 | import Link from 'next/link' 4 | 5 | import { cn } from '@/lib/utils' 6 | import { SidebarList } from '@/components/sidebar-list' 7 | import { buttonVariants } from '@/components/ui/button' 8 | import { IconPlus } from '@/components/ui/icons' 9 | 10 | interface ChatHistoryProps { 11 | userId?: string 12 | } 13 | 14 | export function ChatHistory({ userId }: ChatHistoryProps) { 15 | return ( 16 |
17 | 20 | {Array.from({ length: 10 }).map((_, i) => ( 21 |
25 | ))} 26 |
27 | } 28 | > 29 | {/* @ts-ignore */} 30 | 31 |
32 |
33 | ) 34 | } 35 | -------------------------------------------------------------------------------- /components/chat-list.tsx: -------------------------------------------------------------------------------- 1 | import { type Message } from 'ai' 2 | 3 | import { Separator } from '@/components/ui/separator' 4 | import { ChatMessage } from '@/components/chat-message' 5 | 6 | export interface ChatList { 7 | messages: Message[] 8 | } 9 | 10 | export function ChatList({ messages }: ChatList) { 11 | // if (!messages.length) { 12 | // return null 13 | // } 14 | 15 | return ( 16 |
17 | {messages.map((message, index) => ( 18 |
19 | 20 | {index < messages.length - 1 && ( 21 | 22 | )} 23 |
24 | ))} 25 |
26 | ) 27 | } 28 | -------------------------------------------------------------------------------- /components/chat-message.tsx: -------------------------------------------------------------------------------- 1 | // Inspired by Chatbot-UI and modified to fit the needs of this project 2 | // @see https://github.com/mckaywrigley/chatbot-ui/blob/main/components/Chat/ChatMessage.tsx 3 | 4 | import { Message } from 'ai' 5 | import remarkGfm from 'remark-gfm' 6 | import remarkMath from 'remark-math' 7 | 8 | import { cn } from '@/lib/utils' 9 | import { CodeBlock } from '@/components/ui/codeblock' 10 | import { MemoizedReactMarkdown } from '@/components/markdown' 11 | import { IconOpenAI, IconUser } from '@/components/ui/icons' 12 | import { ChatMessageActions } from '@/components/chat-message-actions' 13 | import rehypeExternalLinks from "rehype-external-links"; 14 | 15 | export interface ChatMessageProps { 16 | message: Message 17 | } 18 | 19 | export function ChatMessage({ message, ...props }: ChatMessageProps) { 20 | return ( 21 |
25 |
33 | {message.role === 'user' ? : } 34 |
35 |
36 | {children}

43 | }, 44 | code({children, className, node, ...props }) { 45 | if(node?.children[0]){ 46 | const child = node.children[0] 47 | if (child.data == '▍') { 48 | return ( 49 | 50 | ) 51 | } 52 | } 53 | 54 | const match = /language-(\w+)/.exec(className || '') 55 | 56 | if(node?.properties.inline === true) { 57 | return ( 58 | 59 | {children} 60 | 61 | ) 62 | } 63 | 64 | return ( 65 | 71 | ) 72 | } 73 | }} 74 | > 75 | {message.content} 76 |
77 | 78 |
79 |
80 | ) 81 | } -------------------------------------------------------------------------------- /components/chat-scroll-anchor.tsx: -------------------------------------------------------------------------------- 1 | 'use client' 2 | 3 | import * as React from 'react' 4 | import { useInView } from 'react-intersection-observer' 5 | 6 | import { useAtBottom } from '@/lib/hooks/use-at-bottom' 7 | 8 | interface ChatScrollAnchorProps { 9 | trackVisibility?: boolean 10 | } 11 | 12 | export function ChatScrollAnchor({ trackVisibility }: ChatScrollAnchorProps) { 13 | const isAtBottom = useAtBottom() 14 | const { ref, entry, inView } = useInView({ 15 | trackVisibility, 16 | delay: 100, 17 | rootMargin: '0px 0px -150px 0px' 18 | }) 19 | 20 | React.useEffect(() => { 21 | if (isAtBottom && trackVisibility && !inView) { 22 | entry?.target.scrollIntoView({ 23 | block: 'start' 24 | }) 25 | } 26 | }, [inView, entry, isAtBottom, trackVisibility]) 27 | 28 | return
29 | } 30 | -------------------------------------------------------------------------------- /components/clear-history.tsx: -------------------------------------------------------------------------------- 1 | 'use client' 2 | 3 | import * as React from 'react' 4 | import { useRouter } from 'next/navigation' 5 | import { toast } from 'react-hot-toast' 6 | 7 | import { ServerActionResult } from '@/lib/types' 8 | import { Button } from '@/components/ui/button' 9 | import { 10 | AlertDialog, 11 | AlertDialogAction, 12 | AlertDialogCancel, 13 | AlertDialogContent, 14 | AlertDialogDescription, 15 | AlertDialogFooter, 16 | AlertDialogHeader, 17 | AlertDialogTitle, 18 | AlertDialogTrigger 19 | } from '@/components/ui/alert-dialog' 20 | import { IconSpinner } from '@/components/ui/icons' 21 | 22 | interface ClearHistoryProps { 23 | isEnabled: boolean 24 | } 25 | 26 | export function ClearHistory({ 27 | isEnabled = false 28 | }: ClearHistoryProps) { 29 | const [open, setOpen] = React.useState(false) 30 | const [isPending, startTransition] = React.useTransition() 31 | const router = useRouter() 32 | 33 | return ( 34 | 35 | 36 | 40 | 41 | 42 | 43 | Are you absolutely sure? 44 | 45 | This will permanently delete your chat history and remove your data 46 | from our servers. 47 | 48 | 49 | 50 | Cancel 51 | { 54 | event.preventDefault() 55 | startTransition(() => { 56 | const chats = Object.keys(localStorage).filter(key => key.startsWith('cid_')); 57 | chats.forEach(key => { 58 | localStorage.removeItem(key); 59 | }); 60 | setOpen(false) 61 | router.replace('/') 62 | router.refresh() 63 | toast.success('Chat deleted') 64 | }) 65 | }} 66 | > 67 | {isPending && } 68 | Delete 69 | 70 | 71 | 72 | 73 | ) 74 | } 75 | -------------------------------------------------------------------------------- /components/collapsible-message.tsx: -------------------------------------------------------------------------------- 1 | import React, { useEffect, useState } from 'react' 2 | import { 3 | Collapsible, 4 | CollapsibleTrigger, 5 | CollapsibleContent 6 | } from '@radix-ui/react-collapsible' 7 | import { Button } from './ui/button' 8 | import { ChevronDown } from 'lucide-react' 9 | import { StreamableValue, useStreamableValue } from 'ai/rsc' 10 | import { cn } from '@/lib/utils' 11 | import { Separator } from './ui/separator' 12 | 13 | interface CollapsibleMessageProps { 14 | message: { 15 | id: number 16 | isCollapsed?: StreamableValue 17 | component: React.ReactNode 18 | } 19 | isLastMessage?: boolean 20 | } 21 | 22 | export const CollapsibleMessage: React.FC = ({ 23 | message, 24 | isLastMessage = false 25 | }) => { 26 | const [data] = useStreamableValue(message.isCollapsed) 27 | const isCollapsed = data ?? false 28 | const [open, setOpen] = useState(isLastMessage) 29 | 30 | useEffect(() => { 31 | setOpen(isLastMessage) 32 | }, [isCollapsed, isLastMessage]) 33 | 34 | // if not collapsed, return the component 35 | if (!isCollapsed) { 36 | return message.component 37 | } 38 | 39 | return ( 40 | { 43 | setOpen(value) 44 | }} 45 | > 46 | 47 |
53 | 66 |
67 |
68 | {message.component} 69 | {!open && } 70 |
71 | ) 72 | } -------------------------------------------------------------------------------- /components/copilot.tsx: -------------------------------------------------------------------------------- 1 | 'use client' 2 | 3 | import React, { useEffect, useState } from 'react' 4 | import { PartialInquiry } from '@/lib/schema/inquiry' 5 | import { Input } from './ui/input' 6 | import { Checkbox } from './ui/checkbox' 7 | import { Button } from './ui/button' 8 | import { Card } from './ui/card' 9 | import { ArrowRight, Check, FastForward, Sparkles } from 'lucide-react' 10 | import { useActions, useStreamableValue, useUIState } from 'ai/rsc' 11 | import { AI } from '@/app/action' 12 | import { cn } from '@/lib/utils' 13 | import {useSetting} from '@/lib/hooks/use-setting' 14 | 15 | export type CopilotProps = { 16 | inquiry?: PartialInquiry 17 | } 18 | 19 | export const Copilot: React.FC = ({ inquiry }: CopilotProps) => { 20 | const [completed, setCompleted] = useState(false) 21 | const [query, setQuery] = useState('') 22 | const [skipped, setSkipped] = useState(false) 23 | const [data, error, pending] = useStreamableValue(inquiry) 24 | const [checkedOptions, setCheckedOptions] = useState<{ 25 | [key: string]: boolean 26 | }>({}) 27 | const [isButtonDisabled, setIsButtonDisabled] = useState(true) 28 | const [messages, setMessages] = useUIState() 29 | const { submit } = useActions() 30 | const [keys, setKeys] = useSetting(); 31 | const handleInputChange = (event: React.ChangeEvent) => { 32 | setQuery(event.target.value) 33 | checkIfButtonShouldBeEnabled() 34 | } 35 | 36 | const handleOptionChange = (selectedOption: string) => { 37 | const updatedCheckedOptions = { 38 | ...checkedOptions, 39 | [selectedOption]: !checkedOptions[selectedOption] 40 | } 41 | setCheckedOptions(updatedCheckedOptions) 42 | checkIfButtonShouldBeEnabled(updatedCheckedOptions) 43 | } 44 | 45 | const checkIfButtonShouldBeEnabled = (currentOptions = checkedOptions) => { 46 | const anyCheckboxChecked = Object.values(currentOptions).some( 47 | checked => checked 48 | ) 49 | setIsButtonDisabled(!(anyCheckboxChecked || query)) 50 | } 51 | 52 | const updatedQuery = () => { 53 | const selectedOptions = Object.entries(checkedOptions) 54 | .filter(([, checked]) => checked) 55 | .map(([option]) => option) 56 | return [...selectedOptions, query].filter(Boolean).join(', ') 57 | } 58 | 59 | useEffect(() => { 60 | checkIfButtonShouldBeEnabled() 61 | // eslint-disable-next-line react-hooks/exhaustive-deps 62 | }, [query]) 63 | 64 | const onFormSubmit = async ( 65 | e: React.FormEvent, 66 | skip?: boolean 67 | ) => { 68 | e.preventDefault() 69 | setCompleted(true) 70 | setSkipped(skip || false) 71 | 72 | const formData = skip 73 | ? undefined 74 | : new FormData(e.target as HTMLFormElement) 75 | 76 | const apikeys = {'llm_api_key':keys.current.llm_api_key,'llm_base_url':keys.current.llm_base_url,'llm_model':keys.current.llm_model,'tavilyserp_api_key':keys.current.tavilyserp_api_key} 77 | const responseMessage = await submit(formData, skip,apikeys) 78 | setMessages(currentMessages => [...currentMessages, responseMessage]) 79 | } 80 | 81 | const handleSkip = (e: React.MouseEvent) => { 82 | onFormSubmit(e as unknown as React.FormEvent, true) 83 | } 84 | 85 | if (error) { 86 | return ( 87 | 88 |
89 | 90 |
91 | {`error: ${error}`} 92 |
93 |
94 |
95 | ) 96 | } 97 | 98 | if (skipped) { 99 | return null 100 | } 101 | 102 | if (completed) { 103 | return ( 104 | 105 |
106 |
107 | {updatedQuery()} 108 |
109 |
110 | 111 |
112 | ) 113 | } else { 114 | return ( 115 | 116 |
117 | 118 |

119 | {data?.question} 120 |

121 |
122 |
123 |
124 | {data?.options?.map((option: any, index: number) => ( 125 |
129 | 133 | handleOptionChange(option?.label as string) 134 | } 135 | /> 136 | 142 |
143 | ))} 144 |
145 | {data?.allowsInput && ( 146 |
147 | 150 | 159 |
160 | )} 161 |
162 | 171 | 175 |
176 |
177 |
178 | ) 179 | } 180 | } 181 | -------------------------------------------------------------------------------- /components/empty-screen.tsx: -------------------------------------------------------------------------------- 1 | import { Button } from '@/components/ui/button' 2 | import { ArrowRight } from 'lucide-react' 3 | 4 | const exampleMessages = [ 5 | { 6 | heading: 'Why is Nvidia growing rapidly?', 7 | message: 'Why is Nvidia growing rapidly?' 8 | }, 9 | { 10 | heading: 'Is the Apple Vision Pro worth buying?', 11 | message: 'Is the Apple Vision Pro worth buying?' 12 | }, 13 | { 14 | heading: 'How does the Vercel AI SDK work?', 15 | message: 'How does the Vercel AI SDK work?' 16 | }, 17 | { 18 | heading: 'Tesla vs Rivian', 19 | message: 'Tesla vs Rivian' 20 | } 21 | ] 22 | export function EmptyScreen({ 23 | submitMessage, 24 | className 25 | }: { 26 | submitMessage: (message: string) => void 27 | className?: string 28 | }) { 29 | return ( 30 |
31 |
32 |
33 | {exampleMessages.map((message, index) => ( 34 | 46 | ))} 47 |
48 |
49 |
50 | ) 51 | } -------------------------------------------------------------------------------- /components/external-link.tsx: -------------------------------------------------------------------------------- 1 | export function ExternalLink({ 2 | href, 3 | children 4 | }: { 5 | href: string 6 | children: React.ReactNode 7 | }) { 8 | return ( 9 | 14 | {children} 15 | 27 | 28 | ) 29 | } 30 | -------------------------------------------------------------------------------- /components/followup-panel.tsx: -------------------------------------------------------------------------------- 1 | 'use client' 2 | 3 | import { useState } from 'react' 4 | import { Button } from './ui/button' 5 | import { Input } from './ui/input' 6 | import { useActions, useUIState } from 'ai/rsc' 7 | import type { AI } from '@/app/action' 8 | import { UserMessage } from './user-message' 9 | import { ArrowRight } from 'lucide-react' 10 | import {useSetting} from '@/lib/hooks/use-setting' 11 | 12 | export function FollowupPanel() { 13 | const [keys, setKeys] = useSetting(); 14 | const [input, setInput] = useState('') 15 | const { submit } = useActions() 16 | const [, setMessages] = useUIState() 17 | 18 | const handleSubmit = async (event: React.FormEvent) => { 19 | event.preventDefault() 20 | const formData = new FormData(event.currentTarget as HTMLFormElement) 21 | 22 | const userMessage = { 23 | id: Date.now(), 24 | isGenerating: false, 25 | component: 26 | } 27 | const apikeys = {'llm_api_key':keys.current.llm_api_key,'llm_base_url':keys.current.llm_base_url,'llm_model':keys.current.llm_model,'tavilyserp_api_key':keys.current.tavilyserp_api_key} 28 | const skip=undefined 29 | const responseMessage = await submit(formData,skip,apikeys) 30 | setMessages(currentMessages => [ 31 | ...currentMessages, 32 | userMessage, 33 | responseMessage 34 | ]) 35 | 36 | setInput('') 37 | } 38 | 39 | return ( 40 |
44 | setInput(e.target.value)} 51 | /> 52 | 61 |
62 | ) 63 | } 64 | -------------------------------------------------------------------------------- /components/footer.tsx: -------------------------------------------------------------------------------- 1 | import React from 'react' 2 | 3 | import { cn } from '@/lib/utils' 4 | import { ExternalLink } from '@/components/external-link' 5 | 6 | export function FooterText({ className, ...props }: React.ComponentProps<'p'>) { 7 | return ( 8 |

15 | Input @ to mention agents. 16 |

17 | ) 18 | } 19 | -------------------------------------------------------------------------------- /components/fortune.tsx: -------------------------------------------------------------------------------- 1 | 'use client' 2 | import { useEffect, useState, useRef } from 'react' 3 | import type { AI } from '@/app/action' 4 | import {StreamableValue, useUIState, useActions, useAIState } from 'ai/rsc' 5 | import { cn } from '@/lib/utils' 6 | import { UserMessage } from './user-message' 7 | import { Input } from './ui/input' 8 | import { Button } from './ui/button' 9 | import { ArrowRight, Plus, Square } from 'lucide-react' 10 | import { EmptyScreen } from './empty-screen' 11 | import {useSetting} from '@/lib/hooks/use-setting' 12 | import { CollapsibleMessage } from './collapsible-message' 13 | 14 | export function Fortune() { 15 | const [input, setInput] = useState('') 16 | const [messages, setMessages] = useUIState() 17 | const [aiMessages, setAiMessages] = useAIState() 18 | const { submit } = useActions() 19 | const [isButtonPressed, setIsButtonPressed] = useState(false) 20 | const inputRef = useRef(null) 21 | const [showEmptyScreen, setShowEmptyScreen] = useState(false) 22 | const [keys, setKeys] = useSetting(); 23 | // Focus on input when button is pressed 24 | useEffect(() => { 25 | if (isButtonPressed) { 26 | inputRef.current?.focus() 27 | setIsButtonPressed(false) 28 | } 29 | }, [isButtonPressed]) 30 | 31 | const handleSubmit = async (e: React.FormEvent) => { 32 | e.preventDefault() 33 | 34 | // Clear messages if button is pressed 35 | if (isButtonPressed) { 36 | handleClear() 37 | setIsButtonPressed(false) 38 | } 39 | 40 | // Add user message to UI state 41 | setMessages(currentMessages => [ 42 | ...currentMessages, 43 | { 44 | id: Date.now(), 45 | component: 46 | } 47 | ]) 48 | 49 | // Submit and get response message 50 | const formData = new FormData(e.currentTarget) 51 | const apikeys = {'llm_api_key':keys.current.llm_api_key,'llm_base_url':keys.current.llm_base_url,'llm_model':keys.current.llm_model,'tavilyserp_api_key':keys.current.tavilyserp_api_key} 52 | var skip=undefined 53 | const responseMessage = await submit(formData,skip,apikeys) 54 | setMessages(currentMessages => [...currentMessages, responseMessage as any]) 55 | 56 | setInput('') 57 | } 58 | 59 | // Clear messages 60 | const handleClear = () => { 61 | setIsButtonPressed(true) 62 | setMessages([]) 63 | setAiMessages([]) 64 | } 65 | 66 | useEffect(() => { 67 | // focus on input when the page loads 68 | inputRef.current?.focus() 69 | }, []) 70 | 71 | // If there are messages and the new button has not been pressed, display the new Button 72 | if (messages.length > 0 && !isButtonPressed) { 73 | return ( 74 |
75 | {messages.map( 76 | (message: { 77 | id: number 78 | component: React.ReactNode 79 | isCollapsed?: StreamableValue 80 | }) => ( 81 | 86 | ) 87 | )} 88 |
89 | 100 |
101 |
102 | ) 103 | } 104 | 105 | // Condition 1 and 3: If there are no messages or the button is pressed, display the form 106 | const formPositionClass = 107 | messages.length === 0 108 | ? 'fixed bottom-8 left-0 right-0 top-10 mx-auto h-screen flex flex-col items-center justify-center' 109 | : 'fixed bottom-8-ml-6' 110 | return ( 111 |
112 | {/* */} 113 |
114 |
115 | { 123 | setInput(e.target.value) 124 | setShowEmptyScreen(e.target.value.length === 0) 125 | }} 126 | onFocus={() => setShowEmptyScreen(true)} 127 | onBlur={() => setShowEmptyScreen(false)} 128 | /> 129 | 138 |
139 | { 141 | setInput(message) 142 | }} 143 | className={cn(showEmptyScreen ? 'visible' : 'invisible')} 144 | /> 145 | 146 |
147 | ) 148 | } 149 | -------------------------------------------------------------------------------- /components/markdown.tsx: -------------------------------------------------------------------------------- 1 | import { FC, memo } from 'react' 2 | import ReactMarkdown, { Options } from 'react-markdown' 3 | 4 | export const MemoizedReactMarkdown: FC = memo( 5 | ReactMarkdown, 6 | (prevProps, nextProps) => 7 | prevProps.children === nextProps.children && 8 | prevProps.className === nextProps.className 9 | ) 10 | -------------------------------------------------------------------------------- /components/message.tsx: -------------------------------------------------------------------------------- 1 | 'use client' 2 | 3 | import { StreamableValue, useStreamableValue } from 'ai/rsc' 4 | import { MemoizedReactMarkdown } from './ui/markdown' 5 | 6 | export function BotMessage({ 7 | content 8 | }: { 9 | content: string | StreamableValue 10 | }) { 11 | const [data, error, pending] = useStreamableValue(content) 12 | 13 | // Currently, sometimes error occurs after finishing the stream. 14 | if (error) return
Error
15 | 16 | return ( 17 | 18 | {data} 19 | 20 | ) 21 | } 22 | -------------------------------------------------------------------------------- /components/prompt-form.tsx: -------------------------------------------------------------------------------- 1 | import Textarea from 'react-textarea-autosize' 2 | import { UseChatHelpers } from 'ai/react' 3 | import { cn } from '@/lib/utils' 4 | import { Button, buttonVariants } from '@/components/ui/button' 5 | import { 6 | Tooltip, 7 | TooltipContent, 8 | TooltipTrigger 9 | } from '@/components/ui/tooltip' 10 | import { IconArrowElbow,IconPlus } from '@/components/ui/icons' 11 | import { useRouter } from 'next/navigation' 12 | import { useState, useRef, useEffect } from 'react' 13 | 14 | import { FooterText } from '@/components/footer' 15 | import { Agent } from '@/components/agents' 16 | import { 17 | Command, 18 | CommandGroup, 19 | CommandItem, 20 | } from "@/components/ui/command" 21 | 22 | export interface PromptProps 23 | extends Pick { 24 | onSubmit: (value: string) => void 25 | isLoading: boolean, 26 | agents: any, 27 | } 28 | export function PromptForm({ 29 | onSubmit, 30 | input, 31 | setInput, 32 | isLoading, 33 | agents, 34 | }: PromptProps) { 35 | const formRef = useRef(null); 36 | const inputRef = useRef(null) 37 | 38 | const router = useRouter() 39 | 40 | const [showPopup, setshowPopup] = useState(false); 41 | const onKeyDown = (e: React.KeyboardEvent) => { 42 | if (e.key === 'Enter' && !e.shiftKey) { 43 | e.preventDefault(); 44 | if (!input?.trim()) { 45 | return; 46 | } 47 | setInput(''); 48 | formRef.current?.requestSubmit() 49 | } else if (e.key === 'Enter' && e.shiftKey) { 50 | setInput(input + '\n'); 51 | } 52 | }; 53 | 54 | const handleInputChange = (e: React.ChangeEvent) => { 55 | const value = e.target.value; 56 | setInput(value) 57 | if (value.split(' ')[0] === '@' || value === '@') { 58 | setshowPopup(true); 59 | } else { 60 | setshowPopup(false); 61 | } 62 | }; 63 | 64 | return ( 65 | <> 66 | 67 |
{ 69 | e.preventDefault() 70 | if (!input?.trim()) { 71 | return 72 | } 73 | setInput('') 74 | await onSubmit(input) 75 | }} 76 | ref={formRef} 77 | > 78 | {showPopup && 79 | 80 | {Object.entries(agents).filter(([key, agent]) => (agent as unknown as Agent).pin === true).map(([key, agent]) => ( 81 | { 85 | setInput(`@${(agent as Agent).name} ` + input.slice(1)) 86 | setshowPopup(false) 87 | } 88 | } 89 | > 90 | {'@' + (agent as Agent).name} {(agent as Agent).usetool && -usetool} 91 | 92 | ))} 93 | 94 | } 95 |
96 | 97 | 98 | 113 | 114 | New Chat 115 | 116 | 117 |