├── .env.example ├── .gitignore ├── LICENSE ├── README.md ├── ai └── providers.ts ├── app ├── actions.ts ├── api │ ├── chat │ │ └── route.ts │ └── chats │ │ ├── [id] │ │ └── route.ts │ │ └── route.ts ├── chat │ └── [id] │ │ └── page.tsx ├── favicon.ico ├── globals.css ├── layout.tsx ├── opengraph-image.png ├── page.tsx ├── providers.tsx └── twitter-image.png ├── components.json ├── components ├── api-key-manager.tsx ├── chat-sidebar.tsx ├── chat.tsx ├── copy-button.tsx ├── deploy-button.tsx ├── icons.tsx ├── input.tsx ├── markdown.tsx ├── mcp-server-manager.tsx ├── message.tsx ├── messages.tsx ├── model-picker.tsx ├── project-overview.tsx ├── suggested-prompts.tsx ├── textarea.tsx ├── theme-provider.tsx ├── theme-toggle.tsx ├── tool-invocation.tsx └── ui │ ├── accordion.tsx │ ├── avatar.tsx │ ├── badge.tsx │ ├── button.tsx │ ├── dialog.tsx │ ├── dropdown-menu.tsx │ ├── input.tsx │ ├── label.tsx │ ├── popover.tsx │ ├── scroll-area.tsx │ ├── select.tsx │ ├── separator.tsx │ ├── sheet.tsx │ ├── sidebar.tsx │ ├── skeleton.tsx │ ├── sonner.tsx │ ├── text-morph.tsx │ ├── textarea.tsx │ └── tooltip.tsx ├── drizzle.config.ts ├── drizzle ├── 0000_supreme_rocket_raccoon.sql ├── 0001_curious_paper_doll.sql ├── 0002_free_cobalt_man.sql ├── 0003_oval_energizer.sql ├── 0004_tense_ricochet.sql ├── 0005_early_payback.sql └── meta │ ├── 0000_snapshot.json │ ├── 0001_snapshot.json │ ├── 0002_snapshot.json │ ├── 0003_snapshot.json │ ├── 0004_snapshot.json │ ├── 0005_snapshot.json │ └── _journal.json ├── eslint.config.mjs ├── hooks └── use-mobile.ts ├── lib ├── chat-store.ts ├── constants.ts ├── context │ └── mcp-context.tsx ├── db │ ├── index.ts │ └── schema.ts ├── hooks │ ├── use-chats.ts │ ├── use-copy.ts │ ├── use-local-storage.ts │ └── use-scroll-to-bottom.tsx ├── user-id.ts └── utils.ts ├── next.config.ts ├── package.json ├── pnpm-lock.yaml ├── postcss.config.mjs ├── public ├── file.svg ├── globe.svg ├── next.svg ├── scira.png ├── vercel.svg └── window.svg ├── railpack.json └── tsconfig.json /.env.example: -------------------------------------------------------------------------------- 1 | XAI_API_KEY="" 2 | OPENAI_API_KEY= 3 | DATABASE_URL="postgresql://username:password@host:port/database" 4 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # See https://help.github.com/articles/ignoring-files/ for more about ignoring files. 2 | 3 | # dependencies 4 | /node_modules 5 | /.pnp 6 | .pnp.* 7 | .yarn/* 8 | !.yarn/patches 9 | !.yarn/plugins 10 | !.yarn/releases 11 | !.yarn/versions 12 | 13 | # testing 14 | /coverage 15 | 16 | # next.js 17 | /.next/ 18 | /out/ 19 | 20 | # production 21 | /build 22 | 23 | # misc 24 | .DS_Store 25 | *.pem 26 | 27 | # debug 28 | npm-debug.log* 29 | yarn-debug.log* 30 | yarn-error.log* 31 | .pnpm-debug.log* 32 | 33 | # env files (can opt-in for committing if needed) 34 | .env.local 35 | .env 36 | 37 | # vercel 38 | .vercel 39 | 40 | # typescript 41 | *.tsbuildinfo 42 | next-env.d.ts 43 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | Copyright 2025 Zaid Mukaddam 179 | 180 | Licensed under the Apache License, Version 2.0 (the "License"); 181 | you may not use this file except in compliance with the License. 182 | You may obtain a copy of the License at 183 | 184 | http://www.apache.org/licenses/LICENSE-2.0 185 | 186 | Unless required by applicable law or agreed to in writing, software 187 | distributed under the License is distributed on an "AS IS" BASIS, 188 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 189 | See the License for the specific language governing permissions and 190 | limitations under the License. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | 2 |

Scira MCP Chat

3 |
4 | 5 |

6 | An open-source AI chatbot app powered by Model Context Protocol (MCP), built with Next.js and the AI SDK by Vercel. 7 |

8 | 9 |

10 | Features • 11 | MCP Configuration • 12 | License 13 |

14 |
15 | 16 | ## Features 17 | 18 | - Streaming text responses powered by the [AI SDK by Vercel](https://sdk.vercel.ai/docs), allowing multiple AI providers to be used interchangeably with just a few lines of code. 19 | - Full integration with [Model Context Protocol (MCP)](https://modelcontextprotocol.io) servers to expand available tools and capabilities. 20 | - Multiple MCP transport types (SSE and stdio) for connecting to various tool providers. 21 | - Built-in tool integration for extending AI capabilities. 22 | - Reasoning model support. 23 | - [shadcn/ui](https://ui.shadcn.com/) components for a modern, responsive UI powered by [Tailwind CSS](https://tailwindcss.com). 24 | - Built with the latest [Next.js](https://nextjs.org) App Router. 25 | 26 | ## MCP Server Configuration 27 | 28 | This application supports connecting to Model Context Protocol (MCP) servers to access their tools. You can add and manage MCP servers through the settings icon in the chat interface. 29 | 30 | ### Adding an MCP Server 31 | 32 | 1. Click the settings icon (⚙️) next to the model selector in the chat interface. 33 | 2. Enter a name for your MCP server. 34 | 3. Select the transport type: 35 | - **SSE (Server-Sent Events)**: For HTTP-based remote servers 36 | - **stdio (Standard I/O)**: For local servers running on the same machine 37 | 38 | #### SSE Configuration 39 | 40 | If you select SSE transport: 41 | 1. Enter the server URL (e.g., `https://mcp.example.com/token/sse`) 42 | 2. Click "Add Server" 43 | 44 | #### stdio Configuration 45 | 46 | If you select stdio transport: 47 | 1. Enter the command to execute (e.g., `npx`) 48 | 2. Enter the command arguments (e.g., `-y @modelcontextprotocol/server-google-maps`) 49 | - You can enter space-separated arguments or paste a JSON array 50 | 3. Click "Add Server" 51 | 52 | 4. Click "Use" to activate the server for the current chat session. 53 | 54 | ### Available MCP Servers 55 | 56 | You can use any MCP-compatible server with this application. Here are some examples: 57 | 58 | - [Composio](https://composio.dev/mcp) - Provides search, code interpreter, and other tools 59 | - [Zapier MCP](https://zapier.com/mcp) - Provides access to Zapier tools 60 | - Any MCP server using stdio transport with npx and python3 61 | 62 | ## License 63 | 64 | This project is licensed under the Apache License 2.0 - see the [LICENSE](LICENSE) file for details. -------------------------------------------------------------------------------- /ai/providers.ts: -------------------------------------------------------------------------------- 1 | import { createOpenAI } from "@ai-sdk/openai"; 2 | import { createGroq } from "@ai-sdk/groq"; 3 | import { createAnthropic } from "@ai-sdk/anthropic"; 4 | import { createXai } from "@ai-sdk/xai"; 5 | 6 | import { 7 | customProvider, 8 | wrapLanguageModel, 9 | extractReasoningMiddleware 10 | } from "ai"; 11 | 12 | export interface ModelInfo { 13 | provider: string; 14 | name: string; 15 | description: string; 16 | apiVersion: string; 17 | capabilities: string[]; 18 | } 19 | 20 | const middleware = extractReasoningMiddleware({ 21 | tagName: 'think', 22 | }); 23 | 24 | // Helper to get API keys from environment variables first, then localStorage 25 | const getApiKey = (key: string): string | undefined => { 26 | // Check for environment variables first 27 | if (process.env[key]) { 28 | return process.env[key] || undefined; 29 | } 30 | 31 | // Fall back to localStorage if available 32 | if (typeof window !== 'undefined') { 33 | return window.localStorage.getItem(key) || undefined; 34 | } 35 | 36 | return undefined; 37 | }; 38 | 39 | // Create provider instances with API keys from localStorage 40 | const openaiClient = createOpenAI({ 41 | apiKey: getApiKey('OPENAI_API_KEY'), 42 | }); 43 | 44 | const anthropicClient = createAnthropic({ 45 | apiKey: getApiKey('ANTHROPIC_API_KEY'), 46 | }); 47 | 48 | const groqClient = createGroq({ 49 | apiKey: getApiKey('GROQ_API_KEY'), 50 | }); 51 | 52 | const xaiClient = createXai({ 53 | apiKey: getApiKey('XAI_API_KEY'), 54 | }); 55 | 56 | const languageModels = { 57 | "gpt-4.1-mini": openaiClient("gpt-4.1-mini"), 58 | "claude-3-7-sonnet": anthropicClient('claude-3-7-sonnet-20250219'), 59 | "qwen-qwq": wrapLanguageModel( 60 | { 61 | model: groqClient("qwen-qwq-32b"), 62 | middleware 63 | } 64 | ), 65 | "grok-3-mini": xaiClient("grok-3-mini-latest"), 66 | }; 67 | 68 | export const modelDetails: Record = { 69 | "gpt-4.1-mini": { 70 | provider: "OpenAI", 71 | name: "GPT-4.1 Mini", 72 | description: "Compact version of OpenAI's GPT-4.1 with good balance of capabilities, including vision.", 73 | apiVersion: "gpt-4.1-mini", 74 | capabilities: ["Balance", "Creative", "Vision"] 75 | }, 76 | "claude-3-7-sonnet": { 77 | provider: "Anthropic", 78 | name: "Claude 3.7 Sonnet", 79 | description: "Latest version of Anthropic's Claude 3.7 Sonnet with strong reasoning and coding capabilities.", 80 | apiVersion: "claude-3-7-sonnet-20250219", 81 | capabilities: ["Reasoning", "Efficient", "Agentic"] 82 | }, 83 | "qwen-qwq": { 84 | provider: "Groq", 85 | name: "Qwen QWQ", 86 | description: "Latest version of Alibaba's Qwen QWQ with strong reasoning and coding capabilities.", 87 | apiVersion: "qwen-qwq", 88 | capabilities: ["Reasoning", "Efficient", "Agentic"] 89 | }, 90 | "grok-3-mini": { 91 | provider: "XAI", 92 | name: "Grok 3 Mini", 93 | description: "Latest version of XAI's Grok 3 Mini with strong reasoning and coding capabilities.", 94 | apiVersion: "grok-3-mini-latest", 95 | capabilities: ["Reasoning", "Efficient", "Agentic"] 96 | }, 97 | }; 98 | 99 | // Update API keys when localStorage changes (for runtime updates) 100 | if (typeof window !== 'undefined') { 101 | window.addEventListener('storage', (event) => { 102 | // Reload the page if any API key changed to refresh the providers 103 | if (event.key?.includes('API_KEY')) { 104 | window.location.reload(); 105 | } 106 | }); 107 | } 108 | 109 | export const model = customProvider({ 110 | languageModels, 111 | }); 112 | 113 | export type modelID = keyof typeof languageModels; 114 | 115 | export const MODELS = Object.keys(languageModels); 116 | 117 | export const defaultModel: modelID = "qwen-qwq"; 118 | -------------------------------------------------------------------------------- /app/actions.ts: -------------------------------------------------------------------------------- 1 | "use server"; 2 | 3 | import { openai } from "@ai-sdk/openai"; 4 | import { generateObject } from "ai"; 5 | import { z } from "zod"; 6 | 7 | // Helper to extract text content from a message regardless of format 8 | function getMessageText(message: any): string { 9 | // Check if the message has parts (new format) 10 | if (message.parts && Array.isArray(message.parts)) { 11 | const textParts = message.parts.filter((p: any) => p.type === 'text' && p.text); 12 | if (textParts.length > 0) { 13 | return textParts.map((p: any) => p.text).join('\n'); 14 | } 15 | } 16 | 17 | // Fallback to content (old format) 18 | if (typeof message.content === 'string') { 19 | return message.content; 20 | } 21 | 22 | // If content is an array (potentially of parts), try to extract text 23 | if (Array.isArray(message.content)) { 24 | const textItems = message.content.filter((item: any) => 25 | typeof item === 'string' || (item.type === 'text' && item.text) 26 | ); 27 | 28 | if (textItems.length > 0) { 29 | return textItems.map((item: any) => 30 | typeof item === 'string' ? item : item.text 31 | ).join('\n'); 32 | } 33 | } 34 | 35 | return ''; 36 | } 37 | 38 | export async function generateTitle(messages: any[]) { 39 | // Convert messages to a format that OpenAI can understand 40 | const normalizedMessages = messages.map(msg => ({ 41 | role: msg.role, 42 | content: getMessageText(msg) 43 | })); 44 | 45 | const { object } = await generateObject({ 46 | model: openai("gpt-4.1"), 47 | schema: z.object({ 48 | title: z.string().min(1).max(100), 49 | }), 50 | system: ` 51 | You are a helpful assistant that generates titles for chat conversations. 52 | The title should be a short description of the conversation. 53 | The title should be no more than 30 characters. 54 | The title should be unique and not generic. 55 | `, 56 | messages: [ 57 | ...normalizedMessages, 58 | { 59 | role: "user", 60 | content: "Generate a title for the conversation.", 61 | }, 62 | ], 63 | }); 64 | 65 | return object.title; 66 | } 67 | -------------------------------------------------------------------------------- /app/api/chat/route.ts: -------------------------------------------------------------------------------- 1 | import { model, type modelID } from "@/ai/providers"; 2 | import { streamText, type UIMessage } from "ai"; 3 | import { appendResponseMessages } from 'ai'; 4 | import { saveChat, saveMessages, convertToDBMessages } from '@/lib/chat-store'; 5 | import { nanoid } from 'nanoid'; 6 | import { db } from '@/lib/db'; 7 | import { chats } from '@/lib/db/schema'; 8 | import { eq, and } from 'drizzle-orm'; 9 | 10 | import { experimental_createMCPClient as createMCPClient, MCPTransport } from 'ai'; 11 | import { Experimental_StdioMCPTransport as StdioMCPTransport } from 'ai/mcp-stdio'; 12 | import { spawn } from "child_process"; 13 | 14 | // Allow streaming responses up to 30 seconds 15 | export const maxDuration = 120; 16 | 17 | interface KeyValuePair { 18 | key: string; 19 | value: string; 20 | } 21 | 22 | interface MCPServerConfig { 23 | url: string; 24 | type: 'sse' | 'stdio'; 25 | command?: string; 26 | args?: string[]; 27 | env?: KeyValuePair[]; 28 | headers?: KeyValuePair[]; 29 | } 30 | 31 | export async function POST(req: Request) { 32 | const { 33 | messages, 34 | chatId, 35 | selectedModel, 36 | userId, 37 | mcpServers = [], 38 | }: { 39 | messages: UIMessage[]; 40 | chatId?: string; 41 | selectedModel: modelID; 42 | userId: string; 43 | mcpServers?: MCPServerConfig[]; 44 | } = await req.json(); 45 | 46 | if (!userId) { 47 | return new Response( 48 | JSON.stringify({ error: "User ID is required" }), 49 | { status: 400, headers: { "Content-Type": "application/json" } } 50 | ); 51 | } 52 | 53 | const id = chatId || nanoid(); 54 | 55 | // Check if chat already exists for the given ID 56 | // If not, we'll create it in onFinish 57 | let isNewChat = false; 58 | if (chatId) { 59 | try { 60 | const existingChat = await db.query.chats.findFirst({ 61 | where: and( 62 | eq(chats.id, chatId), 63 | eq(chats.userId, userId) 64 | ) 65 | }); 66 | isNewChat = !existingChat; 67 | } catch (error) { 68 | console.error("Error checking for existing chat:", error); 69 | // Continue anyway, we'll create the chat in onFinish 70 | isNewChat = true; 71 | } 72 | } else { 73 | // No ID provided, definitely new 74 | isNewChat = true; 75 | } 76 | 77 | // Initialize tools 78 | let tools = {}; 79 | const mcpClients: any[] = []; 80 | 81 | // Process each MCP server configuration 82 | for (const mcpServer of mcpServers) { 83 | try { 84 | // Create appropriate transport based on type 85 | let transport: MCPTransport | { type: 'sse', url: string, headers?: Record }; 86 | 87 | if (mcpServer.type === 'sse') { 88 | // Convert headers array to object for SSE transport 89 | const headers: Record = {}; 90 | if (mcpServer.headers && mcpServer.headers.length > 0) { 91 | mcpServer.headers.forEach(header => { 92 | if (header.key) headers[header.key] = header.value || ''; 93 | }); 94 | } 95 | 96 | transport = { 97 | type: 'sse' as const, 98 | url: mcpServer.url, 99 | headers: Object.keys(headers).length > 0 ? headers : undefined 100 | }; 101 | } else if (mcpServer.type === 'stdio') { 102 | // For stdio transport, we need command and args 103 | if (!mcpServer.command || !mcpServer.args || mcpServer.args.length === 0) { 104 | console.warn("Skipping stdio MCP server due to missing command or args"); 105 | continue; 106 | } 107 | 108 | // Convert env array to object for stdio transport 109 | const env: Record = {}; 110 | if (mcpServer.env && mcpServer.env.length > 0) { 111 | mcpServer.env.forEach(envVar => { 112 | if (envVar.key) env[envVar.key] = envVar.value || ''; 113 | }); 114 | } 115 | 116 | // Check for uvx pattern and transform to python3 -m uv run 117 | if (mcpServer.command === 'uvx') { 118 | // install uv 119 | const subprocess = spawn('pip3', ['install', 'uv']); 120 | subprocess.on('close', (code: number) => { 121 | if (code !== 0) { 122 | console.error(`Failed to install uv: ${code}`); 123 | } 124 | }); 125 | // wait for the subprocess to finish 126 | await new Promise((resolve) => { 127 | subprocess.on('close', resolve); 128 | console.log("installed uv"); 129 | }); 130 | console.log("Detected uvx pattern, transforming to python3 -m uv run"); 131 | mcpServer.command = 'python3'; 132 | // Get the tool name (first argument) 133 | const toolName = mcpServer.args[0]; 134 | // Replace args with the new pattern 135 | mcpServer.args = ['-m', 'uv', 'run', toolName, ...mcpServer.args.slice(1)]; 136 | } 137 | // if python is passed in the command, install the python package mentioned in args after -m with subprocess or use regex to find the package name 138 | else if (mcpServer.command.includes('python3')) { 139 | const packageName = mcpServer.args[mcpServer.args.indexOf('-m') + 1]; 140 | console.log("installing python package", packageName); 141 | const subprocess = spawn('pip3', ['install', packageName]); 142 | subprocess.on('close', (code: number) => { 143 | if (code !== 0) { 144 | console.error(`Failed to install python package: ${code}`); 145 | } 146 | }); 147 | // wait for the subprocess to finish 148 | await new Promise((resolve) => { 149 | subprocess.on('close', resolve); 150 | console.log("installed python package", packageName); 151 | }); 152 | } 153 | 154 | transport = new StdioMCPTransport({ 155 | command: mcpServer.command, 156 | args: mcpServer.args, 157 | env: Object.keys(env).length > 0 ? env : undefined 158 | }); 159 | } else { 160 | console.warn(`Skipping MCP server with unsupported transport type: ${mcpServer.type}`); 161 | continue; 162 | } 163 | 164 | const mcpClient = await createMCPClient({ transport }); 165 | mcpClients.push(mcpClient); 166 | 167 | const mcptools = await mcpClient.tools(); 168 | 169 | console.log(`MCP tools from ${mcpServer.type} transport:`, Object.keys(mcptools)); 170 | 171 | // Add MCP tools to tools object 172 | tools = { ...tools, ...mcptools }; 173 | } catch (error) { 174 | console.error("Failed to initialize MCP client:", error); 175 | // Continue with other servers instead of failing the entire request 176 | } 177 | } 178 | 179 | // Register cleanup for all clients 180 | if (mcpClients.length > 0) { 181 | req.signal.addEventListener('abort', async () => { 182 | for (const client of mcpClients) { 183 | try { 184 | await client.close(); 185 | } catch (error) { 186 | console.error("Error closing MCP client:", error); 187 | } 188 | } 189 | }); 190 | } 191 | 192 | console.log("messages", messages); 193 | console.log("parts", messages.map(m => m.parts.map(p => p))); 194 | 195 | // If there was an error setting up MCP clients but we at least have composio tools, continue 196 | const result = streamText({ 197 | model: model.languageModel(selectedModel), 198 | system: `You are a helpful assistant with access to a variety of tools. 199 | 200 | Today's date is ${new Date().toISOString().split('T')[0]}. 201 | 202 | The tools are very powerful, and you can use them to answer the user's question. 203 | So choose the tool that is most relevant to the user's question. 204 | 205 | If tools are not available, say you don't know or if the user wants a tool they can add one from the server icon in bottom left corner in the sidebar. 206 | 207 | You can use multiple tools in a single response. 208 | Always respond after using the tools for better user experience. 209 | You can run multiple steps using all the tools!!!! 210 | Make sure to use the right tool to respond to the user's question. 211 | 212 | Multiple tools can be used in a single response and multiple steps can be used to answer the user's question. 213 | 214 | ## Response Format 215 | - Markdown is supported. 216 | - Respond according to tool's response. 217 | - Use the tools to answer the user's question. 218 | - If you don't know the answer, use the tools to find the answer or say you don't know. 219 | `, 220 | messages, 221 | tools, 222 | maxSteps: 20, 223 | providerOptions: { 224 | google: { 225 | thinkingConfig: { 226 | thinkingBudget: 2048, 227 | }, 228 | }, 229 | anthropic: { 230 | thinking: { 231 | type: 'enabled', 232 | budgetTokens: 12000 233 | }, 234 | } 235 | }, 236 | onError: (error) => { 237 | console.error(JSON.stringify(error, null, 2)); 238 | }, 239 | async onFinish({ response }) { 240 | const allMessages = appendResponseMessages({ 241 | messages, 242 | responseMessages: response.messages, 243 | }); 244 | 245 | await saveChat({ 246 | id, 247 | userId, 248 | messages: allMessages, 249 | }); 250 | 251 | const dbMessages = convertToDBMessages(allMessages, id); 252 | await saveMessages({ messages: dbMessages }); 253 | // close all mcp clients 254 | // for (const client of mcpClients) { 255 | // await client.close(); 256 | // } 257 | } 258 | }); 259 | 260 | result.consumeStream() 261 | return result.toDataStreamResponse({ 262 | sendReasoning: true, 263 | getErrorMessage: (error) => { 264 | if (error instanceof Error) { 265 | if (error.message.includes("Rate limit")) { 266 | return "Rate limit exceeded. Please try again later."; 267 | } 268 | } 269 | console.error(error); 270 | return "An error occurred."; 271 | }, 272 | }); 273 | } 274 | -------------------------------------------------------------------------------- /app/api/chats/[id]/route.ts: -------------------------------------------------------------------------------- 1 | import { NextResponse } from "next/server"; 2 | import { getChatById, deleteChat } from "@/lib/chat-store"; 3 | 4 | interface Params { 5 | params: { 6 | id: string; 7 | }; 8 | } 9 | 10 | export async function GET(request: Request, { params }: Params) { 11 | try { 12 | const userId = request.headers.get('x-user-id'); 13 | 14 | if (!userId) { 15 | return NextResponse.json({ error: "User ID is required" }, { status: 400 }); 16 | } 17 | 18 | const { id } = await params; 19 | const chat = await getChatById(id, userId); 20 | 21 | if (!chat) { 22 | return NextResponse.json( 23 | { error: "Chat not found" }, 24 | { status: 404 } 25 | ); 26 | } 27 | 28 | return NextResponse.json(chat); 29 | } catch (error) { 30 | console.error("Error fetching chat:", error); 31 | return NextResponse.json( 32 | { error: "Failed to fetch chat" }, 33 | { status: 500 } 34 | ); 35 | } 36 | } 37 | 38 | export async function DELETE(request: Request, { params }: Params) { 39 | try { 40 | const userId = request.headers.get('x-user-id'); 41 | 42 | if (!userId) { 43 | return NextResponse.json({ error: "User ID is required" }, { status: 400 }); 44 | } 45 | 46 | const { id } = await params; 47 | await deleteChat(id, userId); 48 | return NextResponse.json({ success: true }); 49 | } catch (error) { 50 | console.error("Error deleting chat:", error); 51 | return NextResponse.json( 52 | { error: "Failed to delete chat" }, 53 | { status: 500 } 54 | ); 55 | } 56 | } -------------------------------------------------------------------------------- /app/api/chats/route.ts: -------------------------------------------------------------------------------- 1 | import { NextResponse } from "next/server"; 2 | import { getChats } from "@/lib/chat-store"; 3 | 4 | export async function GET(request: Request) { 5 | try { 6 | const userId = request.headers.get('x-user-id'); 7 | 8 | if (!userId) { 9 | return NextResponse.json({ error: "User ID is required" }, { status: 400 }); 10 | } 11 | 12 | const chats = await getChats(userId); 13 | return NextResponse.json(chats); 14 | } catch (error) { 15 | console.error("Error fetching chats:", error); 16 | return NextResponse.json( 17 | { error: "Failed to fetch chats" }, 18 | { status: 500 } 19 | ); 20 | } 21 | } -------------------------------------------------------------------------------- /app/chat/[id]/page.tsx: -------------------------------------------------------------------------------- 1 | "use client"; 2 | 3 | import Chat from "@/components/chat"; 4 | import { getUserId } from "@/lib/user-id"; 5 | import { useQueryClient } from "@tanstack/react-query"; 6 | import { useParams } from "next/navigation"; 7 | import { useEffect } from "react"; 8 | 9 | export default function ChatPage() { 10 | const params = useParams(); 11 | const chatId = params?.id as string; 12 | const queryClient = useQueryClient(); 13 | const userId = getUserId(); 14 | 15 | // Prefetch chat data 16 | useEffect(() => { 17 | async function prefetchChat() { 18 | if (!chatId || !userId) return; 19 | 20 | // Check if data already exists in cache 21 | const existingData = queryClient.getQueryData(['chat', chatId, userId]); 22 | if (existingData) return; 23 | 24 | // Prefetch the data 25 | await queryClient.prefetchQuery({ 26 | queryKey: ['chat', chatId, userId] as const, 27 | queryFn: async () => { 28 | try { 29 | const response = await fetch(`/api/chats/${chatId}`, { 30 | headers: { 31 | 'x-user-id': userId 32 | } 33 | }); 34 | 35 | if (!response.ok) { 36 | throw new Error('Failed to load chat'); 37 | } 38 | 39 | return response.json(); 40 | } catch (error) { 41 | console.error('Error prefetching chat:', error); 42 | return null; 43 | } 44 | }, 45 | staleTime: 1000 * 60 * 5, // 5 minutes 46 | }); 47 | } 48 | 49 | prefetchChat(); 50 | }, [chatId, userId, queryClient]); 51 | 52 | return ; 53 | } -------------------------------------------------------------------------------- /app/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/zaidmukaddam/scira-mcp-chat/a476a7eaa714f3952d7ad07cca8227e0c898b18e/app/favicon.ico -------------------------------------------------------------------------------- /app/layout.tsx: -------------------------------------------------------------------------------- 1 | import type { Metadata } from "next"; 2 | import { Inter } from "next/font/google"; 3 | import { ChatSidebar } from "@/components/chat-sidebar"; 4 | import { SidebarTrigger } from "@/components/ui/sidebar"; 5 | import { Menu } from "lucide-react"; 6 | import { Providers } from "./providers"; 7 | import "./globals.css"; 8 | import Script from "next/script"; 9 | 10 | const inter = Inter({ subsets: ["latin"] }); 11 | 12 | export const metadata: Metadata = { 13 | metadataBase: new URL("https://mcp.scira.ai"), 14 | title: "Scira MCP Chat", 15 | description: "Scira MCP Chat is a minimalistic MCP client with a good feature set.", 16 | openGraph: { 17 | siteName: "Scira MCP Chat", 18 | url: "https://mcp.scira.ai", 19 | images: [ 20 | { 21 | url: "https://mcp.scira.ai/opengraph-image.png", 22 | width: 1200, 23 | height: 630, 24 | }, 25 | ], 26 | }, 27 | twitter: { 28 | card: "summary_large_image", 29 | title: "Scira MCP Chat", 30 | description: "Scira MCP Chat is a minimalistic MCP client with a good feature set.", 31 | images: ["https://mcp.scira.ai/twitter-image.png"], 32 | }, 33 | }; 34 | 35 | export default function RootLayout({ 36 | children, 37 | }: Readonly<{ 38 | children: React.ReactNode; 39 | }>) { 40 | return ( 41 | 42 | 43 | 44 |
45 | 46 |
47 |
48 | 49 | 52 | 53 |
54 |
55 | {children} 56 |
57 |
58 |
59 |
60 |