├── .env.local.example ├── .gitignore ├── README.md ├── app ├── api │ └── chat │ │ └── route.ts ├── favicon.ico ├── globals.css ├── layout.tsx └── page.tsx ├── next.config.js ├── package.json ├── pnpm-lock.yaml ├── postcss.config.js ├── public ├── anthropic-logo.png ├── openai-logo.png ├── pinecone-logo.png └── supabase-logo.png ├── tailwind.config.js └── tsconfig.json /.env.local.example: -------------------------------------------------------------------------------- 1 | OPENAI_API_KEY="YOUR_OPENAI_API_KEY_GOES_HERE" 2 | SUPABASE_PRIVATE_KEY="YOUR_SUPABASE_PRIVATE_KEY_GOES_HERE" 3 | SUPABASE_URL="YOUR_SUPABASE_URL_GOES_HERE" 4 | # Langsmith variables below are optional 5 | LANGCHAIN_TRACING_V2="true" 6 | LANGCHAIN_ENDPOINT="https://api.smith.langchain.com" 7 | LANGCHAIN_API_KEY="YOUR_LANGSMITH_API_KEY_GOES_HERE" 8 | LANGCHAIN_PROJECT="No Embeddings Custom Chatbot" -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # See https://help.github.com/articles/ignoring-files/ for more about ignoring files. 2 | 3 | # dependencies 4 | /node_modules 5 | /.pnp 6 | .pnp.js 7 | 8 | # testing 9 | /coverage 10 | 11 | # next.js 12 | /.next/ 13 | /out/ 14 | 15 | # production 16 | /build 17 | 18 | # misc 19 | .DS_Store 20 | *.pem 21 | 22 | # debug 23 | npm-debug.log* 24 | yarn-debug.log* 25 | yarn-error.log* 26 | 27 | # local env files 28 | .env*.local 29 | 30 | # vercel 31 | .vercel 32 | 33 | # typescript 34 | *.tsbuildinfo 35 | next-env.d.ts 36 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Antropic Claude Clone With Langchain, Next.js, Vercel, AI SDK, and Supabase 2 | 3 | Antropic Claude Clone is an AI chat application built using Next.js, Langchain, and Vercel, featuring AI capabilities powered by Langchain's AI SDK and data storage using Supabase. This application enables users to engage in conversations with an AI-powered assistant and perform various tasks using different functions. 4 | 5 | ## Video Demo 6 | 7 | For a visual walkthrough of the application's features and functionalities, please watch the [YouTube video](https://youtu.be/brElptE736k). 8 | 9 | ## Frontend 10 | 11 | The frontend of the application is developed using Next.js and React.js, providing an intuitive and responsive user interface. It comprises the following key components: 12 | 13 | ### ModelSelector Component 14 | 15 | The ModelSelector component allows users to choose the AI model they wish to interact with during the conversation. The currently selected model's logo is displayed, and users can switch between available models using a dedicated button. 16 | 17 | ### WelcomeBack Component 18 | 19 | The WelcomeBack component greets users with a visually appealing welcome message when they enter the chat interface. 20 | 21 | ### VectorSelector Component 22 | 23 | The VectorSelector component empowers users to select vector storage options for any attached files. It offers a selection of icons representing different vector storage choices, giving users the flexibility to switch between these options seamlessly. 24 | 25 | ### Main App Component 26 | 27 | The heart of the application is the Main App component, which handles core chat functionality. This component takes care of: 28 | 29 | - Sending and receiving messages between the user and the AI assistant. 30 | - Presenting user and assistant messages, each with their respective avatars. 31 | - Attaching files to messages and displaying pertinent file information. 32 | - Enabling or disabling various functions and presenting a function selector. 33 | - Providing keyboard shortcuts information for user convenience. 34 | 35 | ## Backend 36 | 37 | The backend functionality is implemented using Node.js and leverages serverless functions to handle user requests and AI-related tasks. The backend is responsible for: 38 | 39 | - Processing incoming POST requests containing messages, functions, files, and additional data. 40 | - Managing user input and handling attached files as needed. 41 | - Interacting with vector storage via Supabase to manage file content. 42 | - Executing AI models and tools based on user selections. 43 | - Delivering AI-generated responses to the frontend in a streaming format. 44 | 45 | ## Dependencies 46 | 47 | The project relies on a combination of technologies and libraries to deliver its robust functionality: 48 | 49 | - Next.js and React.js for creating the frontend. 50 | - Node.js for the serverless backend functions. 51 | - Langchain SDK for interacting with AI models and tools. 52 | - Vercel for hosting and deployment. 53 | - Supabase for efficient vector storage. 54 | - @supabase/supabase-js for interfacing with Supabase. 55 | - zod for schema validation. 56 | 57 | ## Setup 58 | 59 | To set up the project on your local environment or deploy it to a live platform: 60 | 61 | 1. Clone the repository. 62 | 2. Navigate to both the frontend and backend directories. 63 | 3. Install required dependencies using `npm install` within each directory. 64 | 4. Configure environment variables within the backend. 65 | 5. Run the frontend by executing `npm run dev` in the frontend directory. 66 | 6. Deploy the backend to a serverless platform such as Vercel for live deployment. 67 | 68 | ## Contributions 69 | 70 | Contributions to enhance and refine the project are encouraged and warmly welcomed! Whether you uncover issues or have innovative ideas to enhance the application, please feel free to open issues or submit pull requests. 71 | 72 | ## License 73 | 74 | This project operates under the [MIT License](LICENSE), allowing for freedom of use and modification. 75 | -------------------------------------------------------------------------------- /app/api/chat/route.ts: -------------------------------------------------------------------------------- 1 | // 1. Import dependencies 2 | import { initializeAgentExecutorWithOptions } from "langchain/agents"; 3 | import { DynamicTool, DynamicStructuredTool, WikipediaQueryRun } from "langchain/tools"; 4 | import { ChatOpenAI } from "langchain/chat_models/openai"; 5 | import { OpenAIEmbeddings } from 'langchain/embeddings/openai'; 6 | import { SupabaseVectorStore } from "langchain/vectorstores/supabase"; 7 | import { createClient } from "@supabase/supabase-js"; 8 | import { StreamingTextResponse } from 'ai'; 9 | import * as z from 'zod'; 10 | 11 | // 2. Define interfaces 12 | interface File { 13 | base64: string; 14 | content: string; 15 | } 16 | interface FunctionInfo { 17 | name: string; 18 | active: boolean; 19 | } 20 | 21 | // 3. Set up environment variables 22 | const privateKey: string = process.env.SUPABASE_PRIVATE_KEY!; 23 | const url: string = process.env.SUPABASE_URL!; 24 | if (!privateKey) throw new Error(`Expected env var SUPABASE_PRIVATE_KEY`); 25 | if (!url) throw new Error(`Expected env var SUPABASE_URL`); 26 | 27 | // 4. Define the POST function 28 | export async function POST(req: Request, res: Response) { 29 | // 5. Extract data from the request 30 | const { messages, functions, files, selectedModel, selectedVectorStorage } = await req.json(); 31 | 32 | // 6. Handle the 'claude-2-100k' model case 33 | if (selectedModel === 'claude-2-100k') { 34 | // 7. Generate an example response for the Claude model 35 | const result = "This is an example response from the Claude model." 36 | const chunks: string[] = result.split(" "); 37 | const responseStream = new ReadableStream({ 38 | async start(controller) { 39 | for (const chunk of chunks) { 40 | const bytes = new TextEncoder().encode(chunk + " "); 41 | controller.enqueue(bytes); 42 | await new Promise((r) => setTimeout(r, Math.floor(Math.random() * 20 + 10))); 43 | } 44 | controller.close(); 45 | }, 46 | }); 47 | return new StreamingTextResponse(responseStream); 48 | } else { 49 | // 8. Process the input data 50 | const latestMessage: string = messages[messages.length - 1].content; 51 | const decodedFiles: File[] = files.map((file: { base64: string }) => { 52 | return { 53 | ...file, 54 | content: Buffer.from(file.base64, 'base64').toString('utf-8') 55 | }; 56 | }); 57 | let argForExecutor: string = latestMessage; 58 | if (files.length > 0) { 59 | // 9. Set up Supabase vector store for file content 60 | const client = createClient(url, privateKey); 61 | const string: string = decodedFiles.map((file) => file.content).join('\n'); 62 | const vectorStore = await SupabaseVectorStore.fromTexts( 63 | [string], 64 | [], 65 | new OpenAIEmbeddings(), 66 | { 67 | client, 68 | tableName: "documents", 69 | queryName: "match_documents", 70 | } 71 | ); 72 | // 10. Perform similarity search using vector store 73 | const vectorResultsArr = await vectorStore.similaritySearch(latestMessage, 3); 74 | const vectorResultsStr: string = vectorResultsArr.map((result) => result.pageContent).join('\n'); 75 | argForExecutor = `USER QUERY: ${latestMessage} --- Before using prior knowledge base, use the following from new info: ${vectorResultsStr}`; 76 | } 77 | 78 | // 11. Set up agent executor with tools and model 79 | const model = new ChatOpenAI({ temperature: 0, streaming: true }); 80 | const wikipediaQuery = new WikipediaQueryRun({ 81 | topKResults: 1, 82 | maxDocContentLength: 300, 83 | }); 84 | 85 | // 12. Define a dynamic tool for returning the value of foo 86 | const foo = new DynamicTool({ 87 | name: 'foo', 88 | description: 'Returns the value of foo', 89 | func: async (): Promise => { 90 | return 'The value of foo is "this is a langchain, next.js, supabase, claude, openai and AI demo"'; 91 | } 92 | }); 93 | 94 | // 13. Define a dynamic structured tool for fetching crypto price 95 | const fetchCryptoPrice = new DynamicStructuredTool({ 96 | name: 'fetchCryptoPrice', 97 | description: 'Fetches the current price of a specified cryptocurrency', 98 | schema: z.object({ 99 | cryptoName: z.string(), 100 | vsCurrency: z.string().optional().default('USD'), 101 | }), 102 | func: async (options: { cryptoName: string; vsCurrency?: string; }): Promise => { 103 | const { cryptoName, vsCurrency } = options; 104 | const url = `https://api.coingecko.com/api/v3/simple/price?ids=${cryptoName}&vs_currencies=${vsCurrency}`; 105 | const response = await fetch(url); 106 | const data = await response.json(); 107 | return data[cryptoName.toLowerCase()][vsCurrency!.toLowerCase()].toString(); 108 | }, 109 | }); 110 | 111 | // 14. Define available functions and tools 112 | const availableFunctions: Record = { 113 | wikipediaQuery, 114 | fetchCryptoPrice, 115 | foo 116 | }; 117 | const tools: Array = [foo]; 118 | if (functions) { 119 | functions.forEach((func: FunctionInfo) => { 120 | if (func.active) { 121 | tools.push(availableFunctions[func.name]); 122 | } 123 | }); 124 | } 125 | 126 | // 15. Initialize agent executor with tools and model 127 | const executor = await initializeAgentExecutorWithOptions(tools, model, { 128 | agentType: "openai-functions", 129 | }); 130 | 131 | // 16. Run the executor and return the result as a streaming response 132 | const result: string = await executor.run(argForExecutor); 133 | const chunks: string[] = result.split(" "); 134 | const responseStream = new ReadableStream({ 135 | async start(controller) { 136 | for (const chunk of chunks) { 137 | const bytes = new TextEncoder().encode(chunk + " "); 138 | controller.enqueue(bytes); 139 | await new Promise((r) => setTimeout(r, Math.floor(Math.random() * 20 + 10))); 140 | } 141 | controller.close(); 142 | }, 143 | }); 144 | return new StreamingTextResponse(responseStream); 145 | } 146 | } 147 | -------------------------------------------------------------------------------- /app/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/developersdigest/anthropic-claude-clone-v1/cd716717b616bc346a02d45f8119cd72a02e6c29/app/favicon.ico -------------------------------------------------------------------------------- /app/globals.css: -------------------------------------------------------------------------------- 1 | @tailwind base; 2 | @tailwind components; 3 | @tailwind utilities; 4 | 5 | :focus { 6 | outline: none; 7 | } 8 | .bg-uivory-100 { 9 | background-color: rgba(255, 255, 245, 1); 10 | } 11 | 12 | /* CSS Variables for bg-uivory-100 */ 13 | :root { 14 | --bg-uivory-100: rgba(255, 255, 245, 1); 15 | } 16 | -------------------------------------------------------------------------------- /app/layout.tsx: -------------------------------------------------------------------------------- 1 | import './globals.css' 2 | import { Inter } from 'next/font/google' 3 | 4 | const inter = Inter({ subsets: ['latin'] }) 5 | 6 | export const metadata = { 7 | title: 'Claude + Langchain', 8 | description: 'Claude Inspired UI Kit for Langchain apps.' 9 | } 10 | 11 | export default function RootLayout({ 12 | children 13 | }: { 14 | children: React.ReactNode 15 | }) { 16 | return ( 17 | 18 | {children} 19 | 20 | ) 21 | } 22 | -------------------------------------------------------------------------------- /app/page.tsx: -------------------------------------------------------------------------------- 1 | "use client"; 2 | 3 | // 1. Import necessary modules and components 4 | import { useChat } from "ai/react"; 5 | import { useState, useEffect } from "react"; 6 | import { 7 | Function, 8 | Paperclip, 9 | ArrowFatRight, 10 | XCircle, 11 | Circle, 12 | CheckCircle, 13 | User, 14 | } from "@phosphor-icons/react"; 15 | 16 | // 2. Define the ModelSelector component 17 | function ModelSelector({ onSelectModel }) { 18 | // 3. Initialize state for the currentModel 19 | const [currentModel, setCurrentModel] = useState({ 20 | model: "claude-2-100k", 21 | src: "anthropic-logo.png", 22 | alt: "Anthropic Logo", 23 | }); 24 | 25 | // 4. Define modelSelection array 26 | const modelSelection = [ 27 | { 28 | model: "claude-2-100k", 29 | src: "anthropic-logo.png", 30 | alt: "Anthropic Logo", 31 | }, 32 | { 33 | model: "gpt-3.5", 34 | src: "openai-logo.png", 35 | alt: "OpenAI Logo", 36 | }, 37 | ]; 38 | 39 | // 5. Define handleClick function 40 | const handleClick = () => { 41 | const currentIndex = modelSelection.findIndex( 42 | (image) => image.src === currentModel.src 43 | ); 44 | const nextIndex = (currentIndex + 1) % modelSelection.length; 45 | setCurrentModel(modelSelection[nextIndex]); 46 | onSelectModel(modelSelection[nextIndex].model); 47 | }; 48 | 49 | // 6. Return JSX for ModelSelector component 50 | return ( 51 |
52 |
53 | 66 |
67 |
68 | ); 69 | } 70 | 71 | // 7. Define WelcomeBack component 72 | function WelcomeBack() { 73 | return ( 74 | <> 75 |

76 | Welcome back 🦜 77 |

78 | 79 | ); 80 | } 81 | 82 | // 8. Define VectorSelector component 83 | function VectorSelector({ onSelectVectorStorage }) { 84 | // 9. Initialize state for currentIcon 85 | const [currentIcon, setCurrentIcon] = useState({ 86 | vectorName: "Supabase", 87 | src: "supabase-logo.png", 88 | alt: "Supabase Logo", 89 | }); 90 | 91 | // 10. Define iconSelection array 92 | const iconSelection = [ 93 | { 94 | vectorName: "Supabase", 95 | src: "supabase-logo.png", 96 | alt: "Supabase Logo", 97 | }, 98 | { 99 | vectorName: "Pinecone", 100 | src: "pinecone-logo.png", 101 | alt: "Pinecone Logo", 102 | }, 103 | ]; 104 | 105 | // 11. Define handleClick function 106 | const handleClick = () => { 107 | const currentIndex = iconSelection.findIndex( 108 | (icon) => icon.src === currentIcon.src 109 | ); 110 | const nextIndex = (currentIndex + 1) % iconSelection.length; 111 | setCurrentIcon(iconSelection[nextIndex]); 112 | onSelectVectorStorage(iconSelection[nextIndex].vectorName); 113 | }; 114 | 115 | // 12. Return JSX for VectorSelector component 116 | return ( 117 |
118 | 121 |
122 | ); 123 | } 124 | 125 | // 13. Define main App component 126 | export default function App() { 127 | // 14. Initialize state variables 128 | const [showWelcomeBack, setShowWelcomeBack] = useState(true); 129 | const [isInputFocused, setInputFocused] = useState(false); 130 | const [files, setFiles] = useState([]); 131 | const [showSlideUp, setShowSlideUp] = useState(false); 132 | const [selectedModel, setSelectedModel] = useState("gpt-3.5"); 133 | const [selectedVectorStorage, setSelectedVectorStorage] = 134 | useState("Supabase"); 135 | const [functions, setFunctions] = useState([ 136 | { name: "wikipediaQuery", active: false, label: "Wikipedia Search" }, 137 | { name: "fetchCryptoPrice", active: false, label: "Crypto Price" }, 138 | ]); 139 | 140 | // 15. Define chat related hooks using useChat() 141 | const { messages, input, handleInputChange, handleSubmit } = useChat(); 142 | 143 | // 16. Handle vector storage selection 144 | const handleSelectedVectorStorage = (model) => { 145 | setSelectedVectorStorage(model); 146 | }; 147 | 148 | // 17. Handle model selection 149 | const handleModelSelection = (model) => { 150 | setSelectedModel(model); 151 | }; 152 | 153 | // 18. Handle input focus 154 | const handleInputFocus = () => { 155 | if (!showWelcomeBack) { 156 | setShowSlideUp(false); 157 | } 158 | setInputFocused(true); 159 | }; 160 | 161 | // 19. Handle input blur 162 | const handleInputBlur = () => { 163 | setInputFocused(false); 164 | }; 165 | 166 | // 20. Handle key down event 167 | const handleKeyDown = (event) => { 168 | if ((event.metaKey || event.ctrlKey) && event.key === "k") { 169 | setShowWelcomeBack((showWelcomeBack) => !showWelcomeBack); 170 | } 171 | }; 172 | 173 | // 21. Attach keydown event listener 174 | useEffect(() => { 175 | window.addEventListener("keydown", handleKeyDown); 176 | return () => window.removeEventListener("keydown", handleKeyDown); 177 | }, []); 178 | 179 | // 22. Hide welcome back message if messages are present 180 | useEffect(() => { 181 | if (messages.length > 0 && showWelcomeBack) { 182 | setShowWelcomeBack(false); 183 | } 184 | }, [messages]); 185 | 186 | // 23. Remove a file from the files state 187 | const removeFile = (index) => { 188 | setFiles(files.filter((_, i) => i !== index)); 189 | }; 190 | 191 | // 24. Handle file change event 192 | const handleFileChange = (event) => { 193 | const selectedFiles = Array.from(event.target.files).slice(0, 5); 194 | const filePromises = selectedFiles.map((file) => { 195 | return new Promise((resolve) => { 196 | const reader = new FileReader(); 197 | reader.onload = (e) => { 198 | const base64File = e.target?.result; 199 | if (base64File) { 200 | resolve({ 201 | base64: base64File, 202 | title: file.name, 203 | filetype: file.type, 204 | size: file.size, 205 | }); 206 | } 207 | }; 208 | reader.readAsDataURL(file); 209 | }); 210 | }); 211 | Promise.all(filePromises).then((fileObjects) => { 212 | setFiles(fileObjects); 213 | }); 214 | }; 215 | 216 | // 25. Handle icon click 217 | const handleIconClick = (index) => { 218 | const newFunctions = [...functions]; 219 | newFunctions[index].active = !newFunctions[index].active; 220 | setFunctions(newFunctions); 221 | console.log(newFunctions[index]); 222 | }; 223 | return ( 224 | <> 225 | {/* 26. Include ModelSelector Component */} 226 | 227 | {/* 27. Position input bar conditionally on whether it is the welcome screen */} 228 |
233 | {showWelcomeBack && } 234 | {messages.length > 0 && !showWelcomeBack 235 | ? messages.map((m, index) => ( 236 | // 28. Conditional rendering based on user or assistant 237 |
238 | {m.role === "user" ? ( 239 | <> 240 | {/* 29. User message display */} 241 |
242 |
243 |
244 |
245 |

{m.content}

246 |
247 |
248 |
249 |
250 | {/* 30. User avatar */} 251 |
252 | 253 |
254 |
255 |
256 | 257 | ) : ( 258 | <> 259 | {/* 31. Assistant message display */} 260 |
261 |
265 | 🦜 266 |
267 |
268 | {/* 32. Assistant message content */} 269 |
272 |
273 |

{m.content}

274 |
275 |
276 |
277 |
278 | 279 | )} 280 |
281 | )) 282 | : null} 283 |
284 |
285 |
{ 287 | handleSubmit(e, { 288 | options: { 289 | body: { 290 | selectedModel, 291 | selectedVectorStorage, 292 | files, 293 | functions, 294 | }, 295 | }, 296 | }); 297 | }} 298 | > 299 | {showSlideUp && ( 300 |
301 | {functions.map((icon, index) => ( 302 | 329 | ))} 330 |
331 | )} 332 | {/* 35. Textarea for user input */} 333 |