├── .env.example ├── .eslintrc.json ├── .gitignore ├── README.md ├── next.config.mjs ├── package.json ├── postcss.config.js ├── public ├── bot-image.png ├── next.svg ├── usericon.png └── vercel.svg ├── src ├── app │ ├── api │ │ └── chat │ │ │ └── route.ts │ ├── favicon.ico │ ├── globals.css │ ├── layout.tsx │ ├── page.tsx │ ├── sign-in │ │ └── [[...sign-in]] │ │ │ └── page.tsx │ └── sign-up │ │ └── [[...sign-up]] │ │ └── page.tsx ├── components │ └── ui │ │ ├── Accordion │ │ └── index.tsx │ │ ├── LoadingDots │ │ ├── index.tsx │ │ └── loading-dots.module.css │ │ └── TextArea │ │ └── index.tsx ├── config │ └── env.mjs ├── middleware.ts ├── styles │ └── Home.module.css ├── types │ └── chat.ts └── utils │ ├── cn.ts │ ├── conversationLog.ts │ ├── matches.ts │ ├── pinecone-client.ts │ ├── summarizer.ts │ ├── supabase.ts │ └── templates.ts ├── tailwind.config.js ├── tsconfig.json └── yarn.lock /.env.example: -------------------------------------------------------------------------------- 1 | OPENAI_API_KEY= 2 | PINECONE_API_KEY= 3 | PINECONE_ENVIRONMENT= 4 | PINECONE_INDEX_NAME= 5 | NEXT_PUBLIC_SUPABASE_URL= 6 | NEXT_PUBLIC_SUPABASE_KEY= 7 | NEXT_PUBLIC_CLERK_PUBLISHABLE_KEY= 8 | CLERK_SECRET_KEY= -------------------------------------------------------------------------------- /.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "next/core-web-vitals" 3 | } 4 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # See https://help.github.com/articles/ignoring-files/ for more about ignoring files. 2 | 3 | # dependencies 4 | /node_modules 5 | /.pnp 6 | .pnp.js 7 | 8 | # testing 9 | /coverage 10 | 11 | # next.js 12 | /.next/ 13 | /out/ 14 | 15 | # production 16 | /build 17 | 18 | # misc 19 | .DS_Store 20 | *.pem 21 | 22 | # debug 23 | npm-debug.log* 24 | yarn-debug.log* 25 | yarn-error.log* 26 | 27 | # local env files 28 | .env 29 | 30 | # vercel 31 | .vercel 32 | 33 | # typescript 34 | *.tsbuildinfo 35 | next-env.d.ts 36 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Next.js Multi-User Chatbot with LangChainAI and Pinecone 2 | 3 | 4 | [![Watch the video](https://i9.ytimg.com/vi_webp/hbFuBZ7LUZY/maxresdefault.webp?v=647d1366&sqp=CPTTg6QG&rs=AOn4CLDZ9sAyvdFjiSmF1iTJNphT8jJeXA)](https://youtu.be/hbFuBZ7LUZY) 5 | 6 | ### Credit to: **Roie Schwaber-Cohen** from Pinecone. Please check out this beautiful article: [Building a Multi-User Chatbot with Langchain and Pinecone in Next.JS](https://www.pinecone.io/learn/javascript-chatbot/) 7 | 8 | ## Services used in this app 9 | - Pinecone 10 | - Supabase 11 | - clerk 12 | 13 | # **Setup** 14 | ## 1. Clone this repository 15 | ``` 16 | git clone https://github.com/tarikrazine/multi-User-Chatbot-langChain-pinecone.git 17 | ``` 18 | 19 | ## 2. Install dependencies 20 | ``` 21 | cd multi-User-Chatbot-langChain-pinecone 22 | yarn 23 | ``` 24 | 25 | ## 3. Move your .env.example to .env 26 | ``` 27 | mv .env.example .env 28 | ``` 29 | 30 | ## 4. Pinecone store 31 | - Assuming you already have a Pinecone Account, you will need the index name of your vectors data and your Pinecone API and Pinecone Environment. 32 | 33 | ## 5. Supabase 34 | - Create an account 35 | - Create a new project 36 | - Add a new table named "conversations" with the following columns: user_id (type: text), entry (type: text), speaker (type: text) 37 | 38 | 39 | ## 6. Clerk 40 | - Create an account 41 | - Create a new project 42 | 43 | ## 7. Supabase and Clerk walkthrough 44 | - To properly use Supabase and Clerk, this article will help you understand how these two services work together: [NextJS + Supabase + Clerk: Build a simple todo app with multifactor authentication](https://clerk.com/blog/nextjs-supabase-todos-with-multifactor-authentication) 45 | 46 | ## 8. Add your keys to the .env 47 | ``` 48 | OPENAI_API_KEY= 49 | PINECONE_API_KEY= 50 | PINECONE_ENVIRONMENT= 51 | PINECONE_INDEX_NAME= 52 | NEXT_PUBLIC_SUPABASE_URL= 53 | NEXT_PUBLIC_SUPABASE_KEY= 54 | NEXT_PUBLIC_CLERK_PUBLISHABLE_KEY= 55 | CLERK_SECRET_KEY= 56 | ``` 57 | 58 | ## 9. Start the project 59 | ``` 60 | yarn dev 61 | ``` 62 | 63 | ## 10. Deploy to vercel 64 | ``` 65 | Soon 66 | ``` -------------------------------------------------------------------------------- /next.config.mjs: -------------------------------------------------------------------------------- 1 | import "./src/config/env.mjs"; 2 | 3 | /** @type {import("next").NextConfig} */ 4 | const nextConfig = {}; 5 | 6 | export default nextConfig; -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "nextjs-langchain-edge", 3 | "version": "0.1.0", 4 | "private": true, 5 | "scripts": { 6 | "dev": "next dev", 7 | "build": "next build", 8 | "start": "next start", 9 | "lint": "next lint" 10 | }, 11 | "dependencies": { 12 | "@clerk/nextjs": "^4.18.5", 13 | "@microsoft/fetch-event-source": "^2.0.1", 14 | "@pinecone-database/pinecone": "^0.1.6", 15 | "@radix-ui/react-accordion": "^1.1.1", 16 | "@supabase/supabase-js": "^2.23.0", 17 | "@t3-oss/env-nextjs": "^0.3.1", 18 | "@types/node": "20.2.1", 19 | "@types/react": "18.2.6", 20 | "@types/react-dom": "18.2.4", 21 | "autoprefixer": "10.4.14", 22 | "bottleneck": "^2.19.5", 23 | "clsx": "^1.2.1", 24 | "eslint": "8.40.0", 25 | "eslint-config-next": "13.4.3", 26 | "langchain": "^0.0.83", 27 | "lucide-react": "^0.221.0", 28 | "next": "13.4.3", 29 | "postcss": "8.4.23", 30 | "react": "18.2.0", 31 | "react-dom": "18.2.0", 32 | "react-markdown": "^8.0.7", 33 | "tailwind-merge": "^1.12.0", 34 | "tailwindcss": "3.3.2", 35 | "typescript": "5.0.4", 36 | "zod": "^3.21.4" 37 | }, 38 | "engines": { 39 | "node": ">=18" 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /postcss.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | plugins: { 3 | tailwindcss: {}, 4 | autoprefixer: {}, 5 | }, 6 | } 7 | -------------------------------------------------------------------------------- /public/bot-image.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tarikrazine/multi-User-Chatbot-langChain-pinecone/a78f98ce6accbaa4336ad3ccad6aef69d16aab67/public/bot-image.png -------------------------------------------------------------------------------- /public/next.svg: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /public/usericon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tarikrazine/multi-User-Chatbot-langChain-pinecone/a78f98ce6accbaa4336ad3ccad6aef69d16aab67/public/usericon.png -------------------------------------------------------------------------------- /public/vercel.svg: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/app/api/chat/route.ts: -------------------------------------------------------------------------------- 1 | import { type NextRequest, NextResponse } from "next/server"; 2 | 3 | import { auth } from "@clerk/nextjs"; 4 | import { z } from "zod"; 5 | import { ChatOpenAI } from "langchain/chat_models/openai"; 6 | import { OpenAIEmbeddings } from "langchain/embeddings/openai"; 7 | import { LLMChain } from "langchain/chains"; 8 | import { PromptTemplate } from "langchain/prompts"; 9 | import { CallbackManager } from "langchain/callbacks"; 10 | 11 | import { supabaseClient } from "@/utils/supabase"; 12 | import { ConversationLog } from "@/utils/conversationLog"; 13 | import { templates } from "@/utils/templates"; 14 | import { getMatchesFromEmbeddings } from "@/utils/matches"; 15 | import initPinecone from "@/utils/pinecone-client"; 16 | import { summarizeLongDocument } from "@/utils/summarizer"; 17 | 18 | export const runtime = "edge"; 19 | 20 | const bodySchema = z.object({ 21 | question: z.string({ 22 | required_error: "Please ask a question", 23 | }), 24 | }); 25 | 26 | type BodyInput = z.infer; 27 | 28 | export async function POST(request: NextRequest) { 29 | const { userId, getToken } = auth(); 30 | 31 | const token = await getToken({ template: "supabase" }); 32 | 33 | if (!token) { 34 | return NextResponse.json("Not authorized.", { status: 404 }); 35 | } 36 | 37 | const body = (await request.json()) as BodyInput; 38 | 39 | let parseData; 40 | 41 | try { 42 | parseData = bodySchema.parse(body); 43 | } catch (error) { 44 | if (error instanceof z.ZodError) { 45 | const errorInput = error.issues.map((e) => ({ 46 | path: e.path[0], 47 | message: e.message, 48 | })); 49 | 50 | return NextResponse.json({ errorInput }, { status: 400 }); 51 | } 52 | } 53 | 54 | const { question } = parseData as BodyInput; 55 | 56 | const supabase = await supabaseClient(token); 57 | 58 | // Retrieve the conversation log and save the user's prompt 59 | const conversationLog = new ConversationLog(supabase); 60 | 61 | let conversationHistory: void | String | string[]; 62 | 63 | try { 64 | conversationHistory = await conversationLog.getConversation({ 65 | limit: 10, 66 | }); 67 | } catch (error: any) { 68 | return NextResponse.json({ error: error.message }); 69 | } 70 | 71 | try { 72 | await conversationLog.addEntry({ 73 | entry: question, 74 | speaker: "user", 75 | userId: userId!, 76 | }); 77 | } catch (error: any) { 78 | return NextResponse.json({ error: error.message }); 79 | } 80 | 81 | const llm = new ChatOpenAI({ modelName: "gpt-3.5-turbo" }); 82 | 83 | const inquiryChain = new LLMChain({ 84 | llm, 85 | prompt: new PromptTemplate({ 86 | template: templates.inquiryTemplate, 87 | inputVariables: ["userPrompt", "conversationHistory"], 88 | }), 89 | verbose: true, 90 | }); 91 | 92 | const inquiryChainResult = await inquiryChain.call({ 93 | userPrompt: question, 94 | conversationHistory: conversationHistory, 95 | }); 96 | 97 | const inquiry = inquiryChainResult.text; 98 | 99 | const embedder = new OpenAIEmbeddings({ 100 | modelName: "text-embedding-ada-002", 101 | }); 102 | 103 | const embeddings = await embedder.embedQuery(inquiry); 104 | 105 | const pinecone = await initPinecone(); 106 | 107 | const matches = await getMatchesFromEmbeddings(embeddings, pinecone, 3); 108 | 109 | interface Metadata { 110 | loc: string; 111 | pageContent: string; 112 | txtPath: string; 113 | } 114 | 115 | const docs = matches && Array.from( 116 | matches.reduce((map, match) => { 117 | const metadata = match.metadata as Metadata; 118 | const { pageContent, loc } = metadata; 119 | if (!map.has(loc)) { 120 | map.set(loc, pageContent); 121 | } 122 | return map; 123 | }, new Map()), 124 | ).map(([_, text]) => text); 125 | 126 | const encoder = new TextEncoder(); 127 | const stream = new TransformStream(); 128 | const writer = stream.writable.getWriter(); 129 | 130 | const chat = new ChatOpenAI({ 131 | streaming: true, 132 | verbose: true, 133 | modelName: "gpt-3.5-turbo", 134 | callbackManager: CallbackManager.fromHandlers({ 135 | async handleLLMNewToken(token) { 136 | await writer.ready; 137 | await writer.write(encoder.encode(`data: ${token}\n\n`)); 138 | }, 139 | async handleLLMEnd(result) { 140 | await writer.ready; 141 | await writer.close(); 142 | 143 | const textResponse = result.generations[0].map((res) => res.text); 144 | 145 | await conversationLog.addEntry({ 146 | entry: textResponse[0], 147 | speaker: "ai", 148 | userId: userId!, 149 | }); 150 | }, 151 | handleLLMError: async (e) => { 152 | await writer.ready; 153 | await writer.abort(e); 154 | }, 155 | }), 156 | }); 157 | 158 | const chain = new LLMChain({ 159 | prompt: new PromptTemplate({ 160 | template: templates.qaTemplate, 161 | inputVariables: ["summaries", "question", "conversationHistory"], 162 | }), 163 | llm: chat, 164 | verbose: true, 165 | }); 166 | 167 | const allDocs = docs.join("\n"); 168 | 169 | if (allDocs.length > 4000) { 170 | console.log(`Just a second, forming final answer...`); 171 | } 172 | 173 | const summary = allDocs.length > 4000 174 | ? await summarizeLongDocument({ document: allDocs, inquiry }) 175 | : allDocs; 176 | 177 | chain.call({ 178 | summaries: summary, 179 | question: inquiry, 180 | conversationHistory, 181 | }).catch((e) => console.error(e)); 182 | 183 | return new NextResponse(stream.readable, { 184 | headers: { 185 | "Content-Type": "text/event-stream", 186 | "Cache-Control": "no-cache", 187 | }, 188 | }); 189 | } 190 | -------------------------------------------------------------------------------- /src/app/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tarikrazine/multi-User-Chatbot-langChain-pinecone/a78f98ce6accbaa4336ad3ccad6aef69d16aab67/src/app/favicon.ico -------------------------------------------------------------------------------- /src/app/globals.css: -------------------------------------------------------------------------------- 1 | @tailwind base; 2 | @tailwind components; 3 | @tailwind utilities; -------------------------------------------------------------------------------- /src/app/layout.tsx: -------------------------------------------------------------------------------- 1 | import "./globals.css"; 2 | import { Inter } from "next/font/google"; 3 | import { ClerkProvider, UserButton } from "@clerk/nextjs"; 4 | 5 | const inter = Inter({ subsets: ["latin"] }); 6 | 7 | export const metadata = { 8 | title: "Create Next App", 9 | description: "Generated by create next app", 10 | }; 11 | 12 | export default function RootLayout({ 13 | children, 14 | }: { 15 | children: React.ReactNode; 16 | }) { 17 | return ( 18 | 19 | 20 | 21 |
22 |
23 |
24 | 29 |
30 | 31 |
32 |
33 |
34 |
35 |
36 | {children} 37 |
38 |
39 |
40 | 41 | 42 |
43 | ); 44 | } 45 | -------------------------------------------------------------------------------- /src/app/page.tsx: -------------------------------------------------------------------------------- 1 | "use client" 2 | 3 | import { useRef, useState, useEffect } from "react"; 4 | 5 | import Image from "next/image"; 6 | 7 | import ReactMarkdown from "react-markdown"; 8 | import { Document } from "langchain/document"; 9 | import { fetchEventSource } from "@microsoft/fetch-event-source"; 10 | 11 | import { 12 | Accordion, 13 | AccordionContent, 14 | AccordionItem, 15 | AccordionTrigger, 16 | } from "@/components/ui/Accordion"; 17 | import LoadingDots from "@/components/ui/LoadingDots"; 18 | 19 | import { Message } from "@/types/chat"; 20 | 21 | import styles from "@/styles/Home.module.css"; 22 | 23 | export default function Home() { 24 | const [query, setQuery] = useState(""); 25 | const [loading, setLoading] = useState(false); 26 | const [error, setError] = useState(null); 27 | 28 | const [messageState, setMessageState] = useState<{ 29 | messages: Message[]; 30 | pending?: string; 31 | history: [string, string][]; 32 | pendingSourceDocs?: Document[]; 33 | }>({ 34 | messages: [ 35 | { 36 | message: "I am AI Assistant. How may I serve you today?", 37 | type: "apiMessage", 38 | }, 39 | ], 40 | history: [], 41 | }); 42 | 43 | const { messages, history } = messageState; 44 | 45 | const messageListRef = useRef(null); 46 | const textAreaRef = useRef(null); 47 | 48 | useEffect(() => { 49 | textAreaRef.current?.focus(); 50 | 51 | }, []); 52 | 53 | //handle form submission 54 | async function handleSubmit(e: any) { 55 | e.preventDefault(); 56 | 57 | setError(null); 58 | 59 | if (!query) { 60 | alert("Please input a question"); 61 | return; 62 | } 63 | 64 | const question = query.trim(); 65 | 66 | setMessageState((state) => ({ 67 | ...state, 68 | messages: [ 69 | ...state.messages, 70 | { 71 | type: "userMessage", 72 | message: question, 73 | }, 74 | ], 75 | })); 76 | 77 | setLoading(true); 78 | setQuery(""); 79 | 80 | try { 81 | const message: Message = { 82 | type: "apiMessage", 83 | message: "", 84 | isStreaming: true, 85 | sourceDocs: [], 86 | }; 87 | 88 | //const history = [...messageState.history, [question, message.message]]; 89 | 90 | setMessageState((state) => ({ 91 | ...state, 92 | messages: [...state.messages, message], 93 | //history: [...state.history, [question, message.message]], 94 | })); 95 | 96 | fetchEventSource("/api/chat", { 97 | method: "POST", 98 | headers: { 99 | "Content-Type": "application/json", 100 | }, 101 | body: JSON.stringify({ 102 | question, 103 | }), 104 | onmessage: (event) => { 105 | setLoading(false); 106 | if (event.data === "DONE") { 107 | // Complete 108 | } else { 109 | // Stream text 110 | message.message = message.message + event.data; 111 | setMessageState((state) => ({ 112 | ...state, 113 | messages: [...state.messages], 114 | history: [...state.history], 115 | })); 116 | } 117 | }, 118 | onerror: (error) => { 119 | setLoading(false); 120 | setError( 121 | "An error occurred while fetching the data. Please try again." 122 | ); 123 | console.log("error", error); 124 | }, 125 | openWhenHidden: true, 126 | }); 127 | messageListRef.current?.scrollTo(0, messageListRef.current.scrollHeight); 128 | 129 | setLoading(false); 130 | } catch (error) { 131 | setLoading(false); 132 | setError("An error occurred while fetching the data. Please try again."); 133 | console.log("error", error); 134 | } 135 | } 136 | 137 | //prevent empty submissions 138 | const handleEnter = (e: any) => { 139 | if (e.key === "Enter" && query) { 140 | handleSubmit(e); 141 | } else if (e.key == "Enter") { 142 | e.preventDefault(); 143 | } 144 | }; 145 | 146 | return ( 147 | <> 148 |
149 |
150 |
151 |
152 | {messages.map((message, index) => { 153 | let icon; 154 | let className; 155 | if (message.type === "apiMessage") { 156 | icon = ( 157 | AI 166 | ); 167 | className = styles.apimessage; 168 | } else { 169 | icon = ( 170 | Me 179 | ); 180 | // The latest message sent by the user will be animated while waiting for a response 181 | className = 182 | loading && index === messages.length - 1 183 | ? styles.usermessagewaiting 184 | : styles.usermessage; 185 | } 186 | return ( 187 | <> 188 |
189 | {icon} 190 |
191 | 196 | {message.message} 197 | 198 |
199 |
200 | {message.sourceDocs && ( 201 |
202 | 207 | {message.sourceDocs.map((doc, index) => { 208 | // Extract file name from path 209 | const pathParts = doc.metadata.source.split("/"); 210 | let fileName = pathParts[pathParts.length - 1]; 211 | // Remove extension and replace hyphens with spaces 212 | fileName = fileName 213 | .split(".")[0] 214 | .replace(/-/g, " "); 215 | 216 | // If page number is available in metadata, append it 217 | const pageNumber = doc.metadata.pageNumber 218 | ? ` {page ${doc.metadata.pageNumber}}` 219 | : ""; 220 | 221 | return ( 222 |
223 | 224 | 225 |

Source {index + 1}

226 |
227 | 228 | 233 | {doc.pageContent} 234 | 235 |

236 | Source: {fileName + pageNumber} 237 |

238 |
239 |
240 |
241 | ); 242 | } 243 | )} 244 |
245 |
246 | )} 247 | 248 | ); 249 | })} 250 |
251 |
252 |
253 |
254 |
255 |