├── .gitignore ├── LICENSE ├── README.md ├── assets ├── intellichat-demo.mp4 └── intellichat-screenshot-v1.png └── intellichat ├── .dockerignore ├── .env.example ├── .eslintrc.json ├── .github └── workflows │ └── ci.yml ├── .gitignore ├── .prettierignore ├── .prettierrc.json ├── Dockerfile ├── README.md ├── components.json ├── intellinode.d.ts ├── next.config.js ├── package.json ├── postcss.config.js ├── public ├── next.svg └── vercel.svg ├── src ├── app │ ├── api │ │ ├── chat │ │ │ └── route.ts │ │ └── route.ts │ ├── favicon.ico │ ├── globals.css │ ├── layout.tsx │ └── page.tsx ├── components │ ├── apikey-input.tsx │ ├── chat-message.tsx │ ├── chat-panel.tsx │ ├── chat-prompt.tsx │ ├── chat-settings.tsx │ ├── chat.tsx │ ├── field-group.tsx │ ├── field-tooltip.tsx │ ├── form-ui.tsx │ ├── shared │ │ ├── container.tsx │ │ ├── header.tsx │ │ ├── logo.tsx │ │ ├── providers.tsx │ │ └── sidebar.tsx │ └── ui │ │ ├── button.tsx │ │ ├── collapsible.tsx │ │ ├── form.tsx │ │ ├── input.tsx │ │ ├── label.tsx │ │ ├── popover.tsx │ │ ├── scroll-area.tsx │ │ ├── select.tsx │ │ ├── separator.tsx │ │ ├── sheet.tsx │ │ ├── switch.tsx │ │ ├── textarea.tsx │ │ ├── toast.tsx │ │ ├── toaster.tsx │ │ ├── tooltip.tsx │ │ └── use-toast.ts ├── lib │ ├── ai-providers.ts │ ├── helpers.ts │ ├── intellinode.ts │ ├── schema.ts │ ├── types.ts │ ├── utils.ts │ └── validators.ts └── store │ └── chat-settings.ts ├── tailwind.config.js └── tsconfig.json /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | lerna-debug.log* 8 | .pnpm-debug.log* 9 | 10 | # Diagnostic reports (https://nodejs.org/api/report.html) 11 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 12 | 13 | # Runtime data 14 | pids 15 | *.pid 16 | *.seed 17 | *.pid.lock 18 | 19 | # Directory for instrumented libs generated by jscoverage/JSCover 20 | lib-cov 21 | 22 | # Coverage directory used by tools like istanbul 23 | coverage 24 | *.lcov 25 | 26 | # nyc test coverage 27 | .nyc_output 28 | 29 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 30 | .grunt 31 | 32 | # Bower dependency directory (https://bower.io/) 33 | bower_components 34 | 35 | # node-waf configuration 36 | .lock-wscript 37 | 38 | # Compiled binary addons (https://nodejs.org/api/addons.html) 39 | build/Release 40 | 41 | # Dependency directories 42 | node_modules/ 43 | jspm_packages/ 44 | 45 | # Snowpack dependency directory (https://snowpack.dev/) 46 | web_modules/ 47 | 48 | # TypeScript cache 49 | *.tsbuildinfo 50 | 51 | # Optional npm cache directory 52 | .npm 53 | 54 | # Optional eslint cache 55 | .eslintcache 56 | 57 | # Optional stylelint cache 58 | .stylelintcache 59 | 60 | # Microbundle cache 61 | .rpt2_cache/ 62 | .rts2_cache_cjs/ 63 | .rts2_cache_es/ 64 | .rts2_cache_umd/ 65 | 66 | # Optional REPL history 67 | .node_repl_history 68 | 69 | # Output of 'npm pack' 70 | *.tgz 71 | 72 | # Yarn Integrity file 73 | .yarn-integrity 74 | 75 | # dotenv environment variable files 76 | .env 77 | .env.development.local 78 | .env.test.local 79 | .env.production.local 80 | .env.local 81 | 82 | # parcel-bundler cache (https://parceljs.org/) 83 | .cache 84 | .parcel-cache 85 | 86 | # Next.js build output 87 | .next 88 | out 89 | 90 | # Nuxt.js build / generate output 91 | .nuxt 92 | dist 93 | 94 | # Gatsby files 95 | .cache/ 96 | # Comment in the public line in if your project uses Gatsby and not Next.js 97 | # https://nextjs.org/blog/next-9-1#public-directory-support 98 | # public 99 | 100 | # vuepress build output 101 | .vuepress/dist 102 | 103 | # vuepress v2.x temp and cache directory 104 | .temp 105 | .cache 106 | 107 | # Docusaurus cache and generated files 108 | .docusaurus 109 | 110 | # Serverless directories 111 | .serverless/ 112 | 113 | # FuseBox cache 114 | .fusebox/ 115 | 116 | # DynamoDB Local files 117 | .dynamodb/ 118 | 119 | # TernJS port file 120 | .tern-port 121 | 122 | # Stores VSCode versions used for testing VSCode extensions 123 | .vscode-test 124 | 125 | # yarn v2 126 | .yarn/cache 127 | .yarn/unplugged 128 | .yarn/build-state.yml 129 | .yarn/install-state.gz 130 | .pnp.* 131 | 132 | # custom 133 | */.idea/* 134 | .idea/* 135 | intellichat/instructions.txt 136 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2023 IntelliNode 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # IntelliChat 2 | 3 |

4 | 5 | 6 | 7 | 8 |

9 | 10 | 11 | IntelliChat is an open-source AI chatbot built with [IntelliNode](https://github.com/intelligentnode/IntelliNode) and Next.js. It is designed to accelerate the integration of multiple language models into chatbot apps. 12 | 13 | 14 | 15 | https://github.com/intelligentnode/IntelliChat/assets/2751950/47d7db12-e299-449f-9351-39185c659d84 16 | 17 | 18 | 19 | ## Features 20 | 21 | - Select your preferred AI Provider and model from the UI. 22 | - **OpenAI ChatGPT**: o1, o3-mini, gpt-4o. 23 | - **Google Gemini**. 24 | - **Azure Openai**. 25 | - **Cohere Coral**. 26 | - **Replicate**: Llama (70b-chat, 13b-chat, 34b-code, 34b-python 13b-code-instruct). 27 | - **Mistral AI**: Open-weight models. 28 | - **Anthropic**: claude 3.5 29 | - **vLLM**: any local model. 30 | - Manage your API keys via the UI. 31 | - Access your data using intellinode one key. 32 | 33 | 34 | ## Installing and Running the App 35 | 36 | 1. `cd intellichat`. 37 | 2. Install the dependencies: `pnpm install` or `npm install` or `yarn install`. 38 | 3. Start the Next.js server `pnpm dev` or `npm run dev` or `yarn dev`. 39 | 40 | Open [http://localhost:3000](http://localhost:3000) with your browser to see the result. 41 | 42 | --- 43 | 44 |
45 | 46 | **Built with:** [Intellinode](https://github.com/intelligentnode/IntelliNode), [Next.js](https://nextjs.org/), [Shadcn](https://ui.shadcn.com/), and [TailwindCSS](https://tailwindcss.com/). 47 | -------------------------------------------------------------------------------- /assets/intellichat-demo.mp4: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/intelligentnode/IntelliChat/c741d1cfda9fd9120adc81a8e6c616541f25b7bc/assets/intellichat-demo.mp4 -------------------------------------------------------------------------------- /assets/intellichat-screenshot-v1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/intelligentnode/IntelliChat/c741d1cfda9fd9120adc81a8e6c616541f25b7bc/assets/intellichat-screenshot-v1.png -------------------------------------------------------------------------------- /intellichat/.dockerignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | .next 3 | .git 4 | .gitignore 5 | .DS_Store 6 | instructions.txt 7 | -------------------------------------------------------------------------------- /intellichat/.env.example: -------------------------------------------------------------------------------- 1 | OPENAI_API_KEY= 2 | REPLICATE_API_KEY= -------------------------------------------------------------------------------- /intellichat/.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": ["next/core-web-vitals", "plugin:jsx-a11y/recommended", "prettier"], 3 | "plugins": ["jsx-a11y", "react-hooks"], 4 | "rules": { 5 | "react-hooks/rules-of-hooks": "error", 6 | "react-hooks/exhaustive-deps": "error" 7 | } 8 | } 9 | 10 | -------------------------------------------------------------------------------- /intellichat/.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: [push, pull_request] 4 | 5 | jobs: 6 | build: 7 | runs-on: ubuntu-latest 8 | 9 | steps: 10 | - uses: actions/checkout@v2 11 | 12 | - name: Set up Node.js 13 | uses: actions/setup-node@v2 14 | 15 | - name: Install dependencies 16 | run: pnpm install --frozen-lockfile 17 | 18 | - name: Typecheck 19 | run: pnpm run typecheck 20 | 21 | - name: Run lint 22 | run: pnpm run lint 23 | -------------------------------------------------------------------------------- /intellichat/.gitignore: -------------------------------------------------------------------------------- 1 | # See https://help.github.com/articles/ignoring-files/ for more about ignoring files. 2 | 3 | # dependencies 4 | /node_modules 5 | /.pnp 6 | .pnp.js 7 | 8 | # testing 9 | /coverage 10 | 11 | # next.js 12 | /.next/ 13 | /out/ 14 | 15 | # production 16 | /build 17 | 18 | # misc 19 | .DS_Store 20 | *.pem 21 | 22 | # debug 23 | npm-debug.log* 24 | yarn-debug.log* 25 | yarn-error.log* 26 | 27 | # local env files 28 | .env*.local 29 | 30 | # vercel 31 | .vercel 32 | 33 | # typescript 34 | *.tsbuildinfo 35 | next-env.d.ts 36 | 37 | # lockfiles 38 | pnpm-lock.yaml 39 | yarn.lock 40 | package-lock.json -------------------------------------------------------------------------------- /intellichat/.prettierignore: -------------------------------------------------------------------------------- 1 | # See https://help.github.com/articles/ignoring-files/ for more about ignoring files. 2 | 3 | # dependencies 4 | /node_modules 5 | /.pnp 6 | .pnp.js 7 | 8 | # testing 9 | /coverage 10 | 11 | # next.js 12 | /.next/ 13 | /out/ 14 | 15 | # production 16 | /build 17 | 18 | # misc 19 | .DS_Store 20 | *.pem 21 | 22 | # debug 23 | npm-debug.log* 24 | yarn-debug.log* 25 | yarn-error.log* 26 | 27 | # local env files 28 | .env*.local 29 | 30 | # vercel 31 | .vercel 32 | 33 | # typescript 34 | *.tsbuildinfo 35 | next-env.d.ts 36 | -------------------------------------------------------------------------------- /intellichat/.prettierrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "trailingComma": "es5", 3 | "semi": true, 4 | "singleQuote": true, 5 | "jsxSingleQuote": true, 6 | "plugins": [ 7 | "prettier-plugin-tailwindcss" 8 | ] 9 | } -------------------------------------------------------------------------------- /intellichat/Dockerfile: -------------------------------------------------------------------------------- 1 | # ---------------------------------------- 2 | # 1) Base Image 3 | # ---------------------------------------- 4 | FROM node:18-alpine AS base 5 | RUN apk add --no-cache libc6-compat 6 | WORKDIR /app 7 | 8 | # ---------------------------------------- 9 | # 2) Dependencies (deps) 10 | # ---------------------------------------- 11 | FROM base AS deps 12 | COPY package.json pnpm-lock.yaml* ./ 13 | RUN npm install -g pnpm 14 | RUN pnpm install --frozen-lockfile 15 | 16 | # ---------------------------------------- 17 | # 3) Builder 18 | # ---------------------------------------- 19 | FROM base AS builder 20 | WORKDIR /app 21 | # Install pnpm so the build command is available 22 | RUN npm install -g pnpm 23 | # Copy node_modules from deps 24 | COPY --from=deps /app/node_modules ./node_modules 25 | # Copy source code 26 | COPY . . 27 | # Build the Next.js app 28 | RUN pnpm build 29 | 30 | # ---------------------------------------- 31 | # 4) Production Runner 32 | # ---------------------------------------- 33 | FROM base AS runner 34 | WORKDIR /app 35 | ENV NODE_ENV=production 36 | RUN addgroup -g 1001 -S nodejs && adduser -S nextjs -u 1001 37 | # Install pnpm in runner stage (if needed by any command) 38 | RUN npm install -g pnpm 39 | # Copy package manifest and installed node_modules from builder stage 40 | COPY --from=builder /app/package.json ./ 41 | COPY --from=builder /app/node_modules ./node_modules 42 | # Copy build output and public assets 43 | COPY --from=builder /app/public ./public 44 | COPY --from=builder /app/.next ./.next 45 | COPY --from=builder /app/.env ./.env 46 | 47 | USER nextjs 48 | EXPOSE 3000 49 | ENV PORT=3000 50 | ENV HOSTNAME=0.0.0.0 51 | CMD ["pnpm", "start"] 52 | -------------------------------------------------------------------------------- /intellichat/README.md: -------------------------------------------------------------------------------- 1 | This is a [Next.js](https://nextjs.org/) project bootstrapped with [`create-next-app`](https://github.com/vercel/next.js/tree/canary/packages/create-next-app). 2 | 3 | ## Getting Started 4 | 5 | First, run the development server: 6 | 7 | 1. Install the dependencies: `pnpm install` or `npm install` or `yarn install`. 8 | 2. Start the Next.js server `pnpm dev` or `npm run` dev or `yarn dev`. 9 | 10 | 11 | Open [http://localhost:3000](http://localhost:3000) with your browser to see the result. 12 | 13 | You can start editing the page by modifying `app/page.tsx`. The page auto-updates as you edit the file. 14 | 15 | This project uses [`next/font`](https://nextjs.org/docs/basic-features/font-optimization) to automatically optimize and load Inter, a custom Google Font. 16 | 17 | ## Learn More 18 | 19 | To learn more about Next.js, take a look at the following resources: 20 | 21 | - [Next.js Documentation](https://nextjs.org/docs) - learn about Next.js features and API. 22 | - [Learn Next.js](https://nextjs.org/learn) - an interactive Next.js tutorial. 23 | 24 | You can check out [the Next.js GitHub repository](https://github.com/vercel/next.js/) - your feedback and contributions are welcome! 25 | 26 | ## Deploy on Vercel 27 | 28 | The easiest way to deploy your Next.js app is to use the [Vercel Platform](https://vercel.com/new?utm_medium=default-template&filter=next.js&utm_source=create-next-app&utm_campaign=create-next-app-readme) from the creators of Next.js. 29 | 30 | Check out our [Next.js deployment documentation](https://nextjs.org/docs/deployment) for more details. 31 | -------------------------------------------------------------------------------- /intellichat/components.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://ui.shadcn.com/schema.json", 3 | "style": "default", 4 | "rsc": true, 5 | "tsx": true, 6 | "tailwind": { 7 | "config": "tailwind.config.ts", 8 | "css": "app/globals.css", 9 | "baseColor": "zinc", 10 | "cssVariables": true 11 | }, 12 | "aliases": { 13 | "components": "@/components", 14 | "utils": "@/lib/utils" 15 | } 16 | } -------------------------------------------------------------------------------- /intellichat/intellinode.d.ts: -------------------------------------------------------------------------------- 1 | declare module 'intellinode' { 2 | type SupportedChatModels = 3 | | 'openai' 4 | | 'replicate' 5 | | 'sagemaker' 6 | | 'azure' 7 | | 'gemini' 8 | | 'cohere' 9 | | 'mistral' 10 | | 'anthropic' 11 | | 'vllm'; 12 | 13 | class Chatbot { 14 | constructor( 15 | keyValue?: string, 16 | provider?: string, 17 | customProxy?: ProxyHelper | null, 18 | options?: { 19 | oneKey?: string; 20 | intelliBase?: string; 21 | } 22 | ); 23 | chat( 24 | modelInput?: 25 | | ChatGPTInput 26 | | LLamaReplicateInput 27 | | CohereInput 28 | | GeminiInput 29 | | AnthropicInput 30 | | VLLMInput 31 | ); 32 | 33 | stream( 34 | modelInput?: 35 | | ChatGPTInput 36 | | LLamaReplicateInput 37 | | CohereInput 38 | | GeminiInput 39 | | AnthropicInput 40 | | VLLMInput 41 | ); 42 | } 43 | 44 | class ChatGPTInput { 45 | model: string = 'gpt-4o-mini'; 46 | temperature: number = 1; 47 | maxTokens: number | null = null; 48 | numberOfOutputs: number = 1; 49 | 50 | constructor( 51 | systemMessage: string, 52 | options?: { 53 | model?: string; 54 | temperature?: number; 55 | maxTokens?: number; 56 | numberOfOutputs?: number; 57 | attachReference?: boolean; 58 | } 59 | ); 60 | 61 | addMessage(message: ChatGPTMessage); 62 | addUserMessage(message: string): void; 63 | addAssistantMessage(message: string): void; 64 | } 65 | class ChatGPTMessage { 66 | constructor(message: string, role: string); 67 | } 68 | 69 | class LLamaReplicateInput { 70 | constructor( 71 | message: string, 72 | options?: { 73 | model?: string; 74 | attachReference?: boolean; 75 | } 76 | ); 77 | 78 | addUserMessage(message: string): void; 79 | addAssistantMessage(message: string): void; 80 | } 81 | 82 | class CohereInput { 83 | constructor( 84 | message: string, 85 | options?: { 86 | model?: string; 87 | web?: boolean; 88 | attachReference?: boolean; 89 | } 90 | ); 91 | 92 | addUserMessage(message: string): void; 93 | addAssistantMessage(message: string): void; 94 | } 95 | 96 | class GeminiInput { 97 | constructor( 98 | message: string, 99 | options?: { 100 | model?: string; 101 | attachReference?: boolean; 102 | } 103 | ); 104 | 105 | addUserMessage(message: string): void; 106 | addAssistantMessage(message: string): void; 107 | } 108 | 109 | class MistralInput { 110 | constructor( 111 | message: string, 112 | options?: { 113 | model?: string; 114 | attachReference?: boolean; 115 | } 116 | ); 117 | 118 | addUserMessage(message: string): void; 119 | addAssistantMessage(message: string): void; 120 | } 121 | 122 | class AnthropicInput { 123 | constructor( 124 | message: string, 125 | options?: { 126 | model?: string; 127 | attachReference?: boolean; 128 | } 129 | ); 130 | 131 | addUserMessage(message: string): void; 132 | addAssistantMessage(message: string): void; 133 | } 134 | 135 | class VLLMInput { 136 | constructor( 137 | systemMessage: string, 138 | options?: { 139 | model?: string; 140 | // e.g. maxTokens, temperature, attachReference, etc. 141 | maxTokens?: number; 142 | temperature?: number; 143 | attachReference?: boolean; 144 | } 145 | ); 146 | addUserMessage(message: string): void; 147 | addAssistantMessage(message: string): void; 148 | } 149 | 150 | class ChatContext { 151 | constructor( 152 | apiKey: string, 153 | provider?: SupportedChatModels, 154 | customProxy?: ProxyHelper | null 155 | ); 156 | getRoleContext( 157 | userMessage: string, 158 | historyMessages: { role: 'user' | 'assistant'; content: string }[], 159 | n: number, 160 | embeddingName?: string | null 161 | ); 162 | } 163 | class ProxyHelper { 164 | static getInstance(): ProxyHelper; 165 | setAzureOpenai(resourceName: string): void; 166 | } 167 | } 168 | -------------------------------------------------------------------------------- /intellichat/next.config.js: -------------------------------------------------------------------------------- 1 | /** @type {import('next').NextConfig} */ 2 | const nextConfig = { 3 | typescript: { 4 | ignoreBuildErrors: true, 5 | }, 6 | eslint: { 7 | ignoreDuringBuilds: true, 8 | }, 9 | swcMinify: true, 10 | }; 11 | 12 | module.exports = nextConfig; 13 | -------------------------------------------------------------------------------- /intellichat/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "intellinext", 3 | "version": "0.1.0", 4 | "private": true, 5 | "scripts": { 6 | "dev": "next dev", 7 | "build": "next build", 8 | "start": "next start", 9 | "lint": "next lint", 10 | "typecheck": "tsc" 11 | }, 12 | "dependencies": { 13 | "@hookform/resolvers": "^3.3.1", 14 | "@radix-ui/react-collapsible": "1.0.3", 15 | "@radix-ui/react-dialog": "1.0.4", 16 | "@radix-ui/react-label": "2.0.2", 17 | "@radix-ui/react-popover": "^1.0.7", 18 | "@radix-ui/react-scroll-area": "^1.0.4", 19 | "@radix-ui/react-select": "1.2.2", 20 | "@radix-ui/react-separator": "1.0.3", 21 | "@radix-ui/react-slot": "1.0.2", 22 | "@radix-ui/react-switch": "^1.0.3", 23 | "@radix-ui/react-toast": "1.1.4", 24 | "@radix-ui/react-tooltip": "^1.0.6", 25 | "@tanstack/react-query": "4.33.0", 26 | "@types/node": "20.5.9", 27 | "@types/react": "18.2.21", 28 | "@types/react-dom": "18.2.7", 29 | "autoprefixer": "10.4.15", 30 | "class-variance-authority": "0.7.0", 31 | "clsx": "2.0.0", 32 | "encoding": "^0.1.13", 33 | "eslint": "8.48.0", 34 | "eslint-config-next": "13.4.19", 35 | "intellinode": "^2.2.9", 36 | "lucide-react": "0.274.0", 37 | "nanoid": "4.0.2", 38 | "next": "13.4.19", 39 | "postcss": "8.4.29", 40 | "react": "18.2.0", 41 | "react-dom": "18.2.0", 42 | "react-github-btn": "^1.4.0", 43 | "react-hook-form": "^7.46.1", 44 | "react-markdown": "8.0.7", 45 | "tailwind-merge": "1.14.0", 46 | "tailwindcss": "3.3.3", 47 | "tailwindcss-animate": "1.0.7", 48 | "typescript": "5.2.2", 49 | "zod": "3.22.2", 50 | "zustand": "4.4.1" 51 | }, 52 | "devDependencies": { 53 | "@tailwindcss/typography": "0.5.10", 54 | "eslint-config-prettier": "9.0.0", 55 | "eslint-plugin-jsx-a11y": "6.7.1", 56 | "eslint-plugin-react-hooks": "4.6.0", 57 | "prettier": "3.0.3", 58 | "prettier-plugin-tailwindcss": "0.5.4" 59 | } 60 | } 61 | -------------------------------------------------------------------------------- /intellichat/postcss.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | plugins: { 3 | tailwindcss: {}, 4 | autoprefixer: {}, 5 | }, 6 | } 7 | -------------------------------------------------------------------------------- /intellichat/public/next.svg: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /intellichat/public/vercel.svg: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /intellichat/src/app/api/chat/route.ts: -------------------------------------------------------------------------------- 1 | import { NextResponse } from 'next/server'; 2 | import { chatbotValidator } from '@/lib/validators'; 3 | import { 4 | getAzureChatResponse, 5 | getChatProviderKey, 6 | getChatResponse, 7 | getDefaultProviderKey, 8 | } from '@/lib/intellinode'; 9 | import { serializeError } from '@/lib/helpers'; 10 | 11 | const defaultSystemMessage = 12 | 'You are a helpful assistant. Format response in Markdown where needed.'; 13 | const defaultProvider = 'openai'; 14 | 15 | function extractJsonFromString(str: string): any[] { 16 | try { 17 | const jsonStart = str.indexOf('{'); 18 | if (jsonStart !== -1) { 19 | const jsonPart = str.slice(jsonStart); 20 | return [JSON.parse(jsonPart)]; 21 | } 22 | } catch { 23 | // Ignore JSON parsing errors 24 | } 25 | return []; 26 | } 27 | 28 | export async function POST(req: Request) { 29 | const json = await req.json(); 30 | const parsedJson : any = chatbotValidator.safeParse(json); 31 | 32 | if (!parsedJson.success) { 33 | const { error } = parsedJson; 34 | return NextResponse.json({ error: error.message }, { status: 400 }); 35 | } 36 | 37 | const { 38 | messages, 39 | providers, 40 | provider, 41 | systemMessage = defaultSystemMessage, 42 | n = 2, 43 | withContext, 44 | intellinodeData, 45 | oneKey, 46 | stream: streamResponse, 47 | } = parsedJson.data; 48 | 49 | const isVllm = (provider === 'vllm'); 50 | 51 | const key = isVllm 52 | ? null 53 | : (provider && providers[provider]?.apiKey) || 54 | getChatProviderKey(provider) || 55 | getDefaultProviderKey(provider, oneKey); 56 | 57 | if (!isVllm && !key) { 58 | console.log('error'); 59 | const missingKeyError = `no api key provided for ${provider} ...`; 60 | return NextResponse.json({ error: missingKeyError }, { status: 400 }); 61 | } 62 | 63 | const contextKey = providers.openai?.apiKey || getChatProviderKey('openai'); 64 | 65 | if (withContext && !contextKey) { 66 | const missingContextKey = `OpenAi key was not provided, either add it to your .env file or in the chat settings`; 67 | return NextResponse.json({ error: missingContextKey }, { status: 400 }); 68 | } 69 | 70 | if (intellinodeData && !oneKey) { 71 | const missingOneKey = `oneKey is required when intellinodeData is enabled`; 72 | return NextResponse.json({ error: missingOneKey }, { status: 400 }); 73 | } 74 | 75 | const chatSystemMessage = 76 | systemMessage.trim() !== '' ? systemMessage : defaultSystemMessage; 77 | const chatProvider = provider || defaultProvider; 78 | const chatProviderProps = providers[chatProvider]; 79 | 80 | try { 81 | if (chatProvider === 'azure' && providers.azure) { 82 | const responses = await getAzureChatResponse({ 83 | provider: { ...providers.azure, apiKey: key }, 84 | systemMessage: chatSystemMessage, 85 | withContext, 86 | messages, 87 | n, 88 | oneKey: intellinodeData ? oneKey : undefined, 89 | }); 90 | return NextResponse.json({ response: responses }); 91 | } else if (chatProviderProps && chatProviderProps?.name !== 'azure') { 92 | let shouldStream = (chatProviderProps.name === 'openai' || chatProviderProps.name === 'cohere' || chatProviderProps.name === 'vllm') && req.headers.get('Accept') === 'text/event-stream'; 93 | 94 | if (shouldStream) { 95 | const encoder = new TextEncoder(); 96 | const stream = new TransformStream(); 97 | const writer = stream.writable.getWriter(); 98 | 99 | // Start the streaming response 100 | getChatResponse({ 101 | provider: { ...chatProviderProps, apiKey: key }, 102 | systemMessage: chatSystemMessage, 103 | withContext, 104 | contextKey, 105 | messages, 106 | n, 107 | stream : streamResponse, 108 | oneKey: intellinodeData ? oneKey : undefined, 109 | intellinodeData, 110 | intelliBase: intellinodeData ? process.env.CUSTOM_INTELLIBASE_URL : undefined, 111 | onChunk: async (chunk: string) => { 112 | try { 113 | // Ensure proper SSE format 114 | const data = `${chunk}`; 115 | await writer.write(encoder.encode(data)); 116 | } catch (error) { 117 | console.error('Error writing chunk:', error); 118 | throw error; 119 | } 120 | }, 121 | }).then(async () => { 122 | // Send end message and close the stream 123 | // await writer.write(encoder.encode('[DONE]\n\n')); 124 | await writer.close(); 125 | }).catch(async (error) => { 126 | console.error('Streaming error:', error); 127 | // Safely serialize the error before sending to client 128 | try { 129 | const safeError = serializeError(error); 130 | let errMsg = (safeError as any).error?.message || safeError.message || ''; 131 | const extracted = extractJsonFromString(errMsg); 132 | if (extracted.length && extracted[0]?.error?.message) { 133 | errMsg = extracted[0].error.message; 134 | } 135 | await writer.write( 136 | encoder.encode(errMsg || 'Something went wrong; unable to generate a response.') 137 | ); 138 | } finally { 139 | await writer.close(); 140 | } 141 | });; 142 | 143 | return new Response(stream.readable, { 144 | headers: { 145 | 'Content-Type': 'text/event-stream', 146 | 'Cache-Control': 'no-cache', 147 | 'Connection': 'keep-alive', 148 | }, 149 | }); 150 | } else { 151 | // Non-streaming response remains the same 152 | const responses = await getChatResponse({ 153 | provider: { ...chatProviderProps, apiKey: key, baseUrl: chatProviderProps.baseUrl }, 154 | systemMessage: chatSystemMessage, 155 | withContext, 156 | contextKey, 157 | messages, 158 | stream : false, 159 | n, 160 | oneKey: intellinodeData ? oneKey : undefined, 161 | intellinodeData, 162 | }); 163 | 164 | if (Array.isArray(responses)) { 165 | // If the response is an array (as with vLLM), wrap it accordingly. 166 | return NextResponse.json({ 167 | response: responses, 168 | references: null, 169 | }); 170 | } else { 171 | return NextResponse.json({ 172 | response: responses.result, 173 | references: responses.references, 174 | }); 175 | } 176 | } 177 | } 178 | } catch (e) { 179 | console.error('Error:', e); 180 | const defaultErrorMsg = 'invalid api key or provider'; 181 | try { 182 | const safeError = serializeError(e); 183 | let errMsg = (safeError as any).error?.message || safeError.message || ''; 184 | const extracted = extractJsonFromString(errMsg); 185 | if (extracted.length && extracted[0]?.error?.message) { 186 | errMsg = extracted[0].error.message; 187 | } 188 | return NextResponse.json({ error: errMsg || defaultErrorMsg }, { status: 400 }); 189 | } catch { 190 | return NextResponse.json({ error: defaultErrorMsg }, { status: 400 }); 191 | } 192 | } 193 | } 194 | 195 | export const maxDuration = 180; 196 | -------------------------------------------------------------------------------- /intellichat/src/app/api/route.ts: -------------------------------------------------------------------------------- 1 | import { getChatProviderKey } from '@/lib/intellinode'; 2 | import { NextResponse } from 'next/server'; 3 | 4 | // Check if the user has set up their API keys for OpenAI and Replicate in .env 5 | export async function GET() { 6 | const OpenAIKey = getChatProviderKey('openai'); 7 | const ReplicateKey = getChatProviderKey('replicate'); 8 | const CohereKey = getChatProviderKey('cohere'); 9 | const GoogleKey = getChatProviderKey('google'); 10 | const AzureKey = getChatProviderKey('azure'); 11 | const MistralKey = getChatProviderKey('mistral'); 12 | const anthropicKey = getChatProviderKey('anthropic'); 13 | 14 | return NextResponse.json({ 15 | openai: !!OpenAIKey, 16 | replicate: !!ReplicateKey, 17 | cohere: !!CohereKey, 18 | google: !!GoogleKey, 19 | azure: !!AzureKey, 20 | mistral: !!MistralKey, 21 | anthropic: !!anthropicKey, 22 | }); 23 | } 24 | -------------------------------------------------------------------------------- /intellichat/src/app/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/intelligentnode/IntelliChat/c741d1cfda9fd9120adc81a8e6c616541f25b7bc/intellichat/src/app/favicon.ico -------------------------------------------------------------------------------- /intellichat/src/app/globals.css: -------------------------------------------------------------------------------- 1 | @tailwind base; 2 | @tailwind components; 3 | @tailwind utilities; 4 | 5 | @layer base { 6 | :root { 7 | --background: 240 5% 17%; 8 | --foreground: 0 0% 100%; 9 | 10 | --card: 0 0% 100%; 11 | --card-foreground: 240 10% 3.9%; 12 | 13 | --popover: 0 0% 100%; 14 | --popover-foreground: 240 10% 3.9%; 15 | 16 | --primary: 210 99% 58%; 17 | --primary-foreground: 0 0% 98%; 18 | 19 | --secondary: 257 81% 79%; 20 | --secondary-foreground: 240 5.9% 10%; 21 | 22 | --muted: 240 4.8% 95.9%; 23 | --muted-foreground: 240 3.8% 46.1%; 24 | 25 | --accent: 240 4.8% 95.9%; 26 | --accent-foreground: 240 5.9% 10%; 27 | 28 | --destructive: 0 84.2% 60.2%; 29 | --destructive-foreground: 0 0% 98%; 30 | 31 | --border: 240 5.9% 90%; 32 | --input: 0 0% 95%; 33 | --ring: 240 10% 3.9%; 34 | 35 | --radius: 0.5rem; 36 | 37 | --header-height: 104px; 38 | } 39 | 40 | .dark { 41 | --background: 240 10% 3.9%; 42 | --foreground: 0 0% 98%; 43 | 44 | --card: 240 10% 3.9%; 45 | --card-foreground: 0 0% 98%; 46 | 47 | --popover: 240 10% 3.9%; 48 | --popover-foreground: 0 0% 98%; 49 | 50 | --primary: 0 0% 98%; 51 | --primary-foreground: 240 5.9% 10%; 52 | 53 | --secondary: 240 3.7% 15.9%; 54 | --secondary-foreground: 0 0% 98%; 55 | 56 | --muted: 240 3.7% 15.9%; 57 | --muted-foreground: 240 5% 64.9%; 58 | 59 | --accent: 240 3.7% 15.9%; 60 | --accent-foreground: 0 0% 98%; 61 | 62 | --destructive: 0 62.8% 30.6%; 63 | --destructive-foreground: 0 0% 98%; 64 | 65 | --border: 240 3.7% 15.9%; 66 | --input: 240 3.7% 15.9%; 67 | --ring: 240 4.9% 83.9%; 68 | } 69 | } 70 | 71 | @layer base { 72 | * { 73 | @apply border-border; 74 | } 75 | body { 76 | @apply bg-background text-foreground; 77 | } 78 | } 79 | 80 | .rtl { 81 | direction: rtl; 82 | } -------------------------------------------------------------------------------- /intellichat/src/app/layout.tsx: -------------------------------------------------------------------------------- 1 | import './globals.css'; 2 | import { Inter } from 'next/font/google'; 3 | 4 | import Header from '@/components/shared/header'; 5 | import { RQProvider } from '@/components/shared/providers'; 6 | 7 | import type { Metadata } from 'next'; 8 | import { Toaster } from '@/components/ui/toaster'; 9 | 10 | const inter = Inter({ subsets: ['latin'] }); 11 | 12 | export const metadata: Metadata = { 13 | title: 'IntelliChat', 14 | description: 15 | 'An open-source AI chatbot built with IntelliNode and Next.js. It is designed to accelerate the integration of multiple language models.', 16 | }; 17 | 18 | export default function RootLayout({ 19 | children, 20 | }: { 21 | children: React.ReactNode; 22 | }) { 23 | return ( 24 | 25 | 28 | 29 |
30 |
31 |
{children}
32 |
33 |
34 | 35 | 36 | 37 | ); 38 | } 39 | -------------------------------------------------------------------------------- /intellichat/src/app/page.tsx: -------------------------------------------------------------------------------- 1 | import Chat from '@/components/chat'; 2 | import Image from 'next/image'; 3 | 4 | export default function Home() { 5 | return ; 6 | } 7 | -------------------------------------------------------------------------------- /intellichat/src/components/apikey-input.tsx: -------------------------------------------------------------------------------- 1 | import { useChatSettings } from '@/store/chat-settings'; 2 | import { Input } from '@/components/ui/input'; 3 | import { 4 | FormControl, 5 | FormDescription, 6 | FormField, 7 | FormItem, 8 | FormLabel, 9 | } from '@/components/ui/form'; 10 | 11 | export default function ApiKeyInput({ 12 | name, 13 | id, 14 | label, 15 | control, 16 | provider, 17 | withContext, 18 | }: { 19 | name: string; 20 | provider: 'openai' | 'replicate' | 'cohere' | 'google'; 21 | id: string; 22 | label: string; 23 | control: any; 24 | withContext: boolean; 25 | }) { 26 | const envKeys = useChatSettings((s) => s.envKeys); 27 | const isVisible = provider === id || (withContext && id === 'openai'); 28 | if (!isVisible) return null; 29 | const hasEnvKey = envKeys[id]; 30 | return ( 31 | ( 35 | 36 | {label} 37 | 38 | 39 | 40 | {hasEnvKey && ( 41 | 42 | API Key is set as an environment variable, but you can override it 43 | here. 44 | 45 | )} 46 | 47 | )} 48 | > 49 | ); 50 | } 51 | -------------------------------------------------------------------------------- /intellichat/src/components/chat-message.tsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import { ReactMarkdown } from 'react-markdown/lib/react-markdown'; 3 | import { cn, isPrimarilyRtl } from '@/lib/utils'; 4 | import { Message } from '@/lib/types'; 5 | import { BookIcon } from 'lucide-react'; 6 | import { Popover, PopoverContent, PopoverTrigger } from './ui/popover'; 7 | 8 | type Props = Message & { 9 | last?: boolean; 10 | isStreaming?: boolean; 11 | }; 12 | 13 | export const ChatMessage = ({ 14 | role, 15 | content, 16 | last, 17 | references, 18 | id, 19 | isStreaming 20 | }: Props) => { 21 | const isUser = role === 'user'; 22 | const isRtl = isPrimarilyRtl(content); 23 | 24 | return ( 25 |
26 | 27 |
28 | {isUser ? ( 29 |
30 | {content} 31 |
32 | ) : ( 33 |
34 | 35 | {content} 36 | 37 | {last && references && references.length > 0 && !isStreaming && ( 38 |
39 | 40 | 41 | Sources 42 | 43 | 44 | {references.map((ref, index) => ( 45 |
  • 46 | {ref} 47 |
  • 48 | ))} 49 |
    50 |
    51 |
    52 | )} 53 | {isStreaming && ( 54 |
    55 |
    56 |
    57 |
    58 |
    59 | )} 60 |
    61 | )} 62 |
    63 |
    64 | ); 65 | }; 66 | 67 | const ChatAvatar = ({ isUser }: { isUser: boolean }) => { 68 | return ( 69 |
    75 | ); 76 | }; -------------------------------------------------------------------------------- /intellichat/src/components/chat-panel.tsx: -------------------------------------------------------------------------------- 1 | import React, { useEffect } from 'react'; 2 | import { ChatMessage } from './chat-message'; 3 | import { Message } from '@/lib/types'; 4 | 5 | export const ChatPanel = ({ 6 | chat, 7 | initialMessage, 8 | streamingMessage, 9 | isStreaming, 10 | }: { 11 | chat?: Message[]; 12 | initialMessage?: Message | null; 13 | streamingMessage: Message | null; 14 | isStreaming: boolean; 15 | }) => { 16 | const endRef = React.useRef(null); 17 | 18 | useEffect(() => { 19 | requestAnimationFrame(() => { 20 | endRef.current?.scrollIntoView({ behavior: 'smooth' }); 21 | }); 22 | }, [chat?.length, streamingMessage]); // Add streamingMessage to dependencies 23 | 24 | if (chat) { 25 | return ( 26 | <> 27 | {/* Render regular messages */} 28 | {chat.map((message, index) => { 29 | if (message.id !== streamingMessage?.id) { 30 | return ( 31 | 36 | ); 37 | } 38 | })} 39 | 40 | {/* Render streaming message if present */} 41 | {streamingMessage && ( 42 | 47 | )} 48 | 49 |
    50 | 51 | ); 52 | } 53 | 54 | if (initialMessage) { 55 | return ; 56 | } 57 | 58 | return null; 59 | }; 60 | -------------------------------------------------------------------------------- /intellichat/src/components/chat-prompt.tsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import { Textarea } from './ui/textarea'; 3 | import { Button } from './ui/button'; 4 | import { CornerDownLeft } from 'lucide-react'; 5 | 6 | type Props = { 7 | isLoading: boolean; 8 | onSubmit: () => void; 9 | }; 10 | 11 | export const ChatPrompt = React.forwardRef( 12 | function ChatPrompt(props, ref) { 13 | const { isLoading, onSubmit } = props; 14 | 15 | const onEnter = (event: React.KeyboardEvent) => { 16 | if (isLoading) return; 17 | if (event.key === 'Enter') { 18 | event.preventDefault(); 19 | onSubmit(); 20 | } 21 | }; 22 | 23 | return ( 24 |
    25 | {isLoading && ( 26 |
    27 | Generating Response ... 28 |
    29 | )} 30 |
    31 |