├── package.json ├── src ├── components │ ├── Global │ │ ├── Hover.tsx │ │ ├── Badge.tsx │ │ ├── DeleteConfirmationDialog.tsx │ │ ├── Spinner.tsx │ │ └── Markdown.tsx │ ├── Markdown │ │ ├── MemoizedReactMarkdown.tsx │ │ ├── Image.tsx │ │ └── CodeBlock.tsx │ ├── Chat │ │ ├── ChatLoader.tsx │ │ ├── CopyButton.tsx │ │ ├── ErrorMessageDiv.tsx │ │ ├── Regenerate.tsx │ │ ├── ModelSelect.tsx │ │ ├── PromptList.tsx │ │ ├── Conversation.tsx │ │ ├── GroupListItem.tsx │ │ ├── VariableModal.tsx │ │ ├── ChatMessage.tsx │ │ └── SystemPrompt.tsx │ ├── Mobile │ │ └── Navbar.tsx │ └── Agent │ │ └── agentListItem.tsx ├── types │ ├── index.ts │ ├── data.ts │ ├── env.ts │ ├── group.ts │ ├── error.ts │ ├── folder.ts │ ├── prompt.ts │ ├── openai.ts │ └── storage.ts ├── public │ ├── locales │ │ ├── he │ │ │ ├── common.json │ │ │ ├── markdown.json │ │ │ ├── sidebar.json │ │ │ └── chat.json │ │ ├── id │ │ │ ├── common.json │ │ │ ├── markdown.json │ │ │ ├── sidebar.json │ │ │ └── chat.json │ │ ├── ja │ │ │ ├── common.json │ │ │ ├── markdown.json │ │ │ ├── sidebar.json │ │ │ └── chat.json │ │ ├── vi │ │ │ ├── common.json │ │ │ ├── markdown.json │ │ │ ├── sidebar.json │ │ │ └── chat.json │ │ ├── bn │ │ │ ├── common.json │ │ │ ├── markdown.json │ │ │ ├── sidebar.json │ │ │ └── chat.json │ │ ├── de │ │ │ ├── common.json │ │ │ ├── markdown.json │ │ │ ├── sidebar.json │ │ │ └── chat.json │ │ ├── en │ │ │ └── common.json │ │ ├── es │ │ │ ├── common.json │ │ │ ├── markdown.json │ │ │ ├── sidebar.json │ │ │ └── chat.json │ │ ├── fr │ │ │ ├── common.json │ │ │ ├── markdown.json │ │ │ ├── sidebar.json │ │ │ └── chat.json │ │ ├── ko │ │ │ ├── common.json │ │ │ ├── markdown.json │ │ │ ├── sidebar.json │ │ │ └── chat.json │ │ ├── pt │ │ │ ├── common.json │ │ │ ├── markdown.json │ │ │ ├── sidebar.json │ │ │ └── chat.json │ │ ├── ru │ │ │ ├── common.json │ │ │ ├── markdown.json │ │ │ ├── sidebar.json │ │ │ └── chat.json │ │ ├── sv │ │ │ ├── common.json │ │ │ ├── markdown.json │ │ │ ├── sidebar.json │ │ │ └── chat.json │ │ ├── te │ │ │ ├── common.json │ │ │ ├── markdown.json │ │ │ ├── sidebar.json │ │ │ └── chat.json │ │ └── zh │ │ │ ├── common.json │ │ │ ├── markdown.json │ │ │ ├── sidebar.json │ │ │ └── chat.json │ ├── favicon.ico │ └── screenshot.png ├── .eslintrc.json ├── model │ ├── index.tsx │ ├── llmprovider.ts │ ├── openai │ │ ├── index.ts │ │ ├── ModelConfig.tsx │ │ └── GPT.ts │ ├── type.ts │ ├── azure │ │ ├── index.ts │ │ ├── ConfigPanel.tsx │ │ └── GPT.ts │ └── utils.ts ├── __mocks__ │ ├── remark-gfm.tsx │ └── react-markdown.tsx ├── postcss.config.js ├── utils │ ├── app │ │ ├── setup.ts │ │ ├── folders.ts │ │ ├── const.ts │ │ ├── prompts.ts │ │ ├── convertJson.ts │ │ ├── recordProvider.ts │ │ ├── importExport.ts │ │ ├── conversation.ts │ │ ├── codeblock.ts │ │ ├── agentReducer.ts │ │ ├── groupReducer.ts │ │ ├── provider.ts │ │ ├── clean.ts │ │ └── storageReducer.ts │ ├── logger.ts │ ├── index.ts │ ├── blobStorage.test.ts │ └── blobStorage.ts ├── test │ └── jest.setup.ts ├── memory │ ├── type.ts │ ├── memoryProvider.ts │ ├── chatMemoryConfigPanel.tsx │ ├── inMemorySavableVectorStore.ts │ ├── index.tsx │ ├── chatMemory.test.ts │ └── chatMemory.ts ├── prettier.config.js ├── tsconfig.test.json ├── agent │ ├── agentProvider.ts │ ├── userProxyAgent.ts │ ├── type.ts │ ├── index.ts │ ├── gptAgent.test.ts │ └── gptAgent.ts ├── message │ ├── messageProvider.ts │ ├── type.ts │ ├── index.tsx │ ├── MarkdownMessage.tsx │ └── LogMessage.tsx ├── pages │ ├── _app.tsx │ ├── _document.tsx │ └── index.tsx ├── next-i18next.config.js ├── tailwind.config.js ├── tsconfig.json ├── styles │ └── globals.css ├── chat │ ├── type.ts │ ├── group.test.ts │ └── groupConfigModal.tsx ├── next.config.js ├── jest.config.js └── package.json ├── assets ├── chat1.png ├── chat2.png ├── mathchat.png └── role-play.png ├── azure.yaml ├── infra ├── core │ ├── host │ │ ├── appserviceplan.bicep │ │ ├── staticwebapp.bicep │ │ └── appservice.bicep │ ├── security │ │ └── role.bicep │ ├── search │ │ └── search-services.bicep │ ├── ai │ │ └── cognitiveservices.bicep │ └── storage │ │ └── storage-account.bicep ├── main.parameters.json ├── main.bicep └── abbreviations.json ├── .gitignore ├── LICENSE.txt └── README.md /package.json: -------------------------------------------------------------------------------- 1 | {} 2 | -------------------------------------------------------------------------------- /src/components/Global/Hover.tsx: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/types/index.ts: -------------------------------------------------------------------------------- 1 | export {}; 2 | -------------------------------------------------------------------------------- /src/public/locales/he/common.json: -------------------------------------------------------------------------------- 1 | {} -------------------------------------------------------------------------------- /src/public/locales/id/common.json: -------------------------------------------------------------------------------- 1 | {} -------------------------------------------------------------------------------- /src/public/locales/ja/common.json: -------------------------------------------------------------------------------- 1 | {} -------------------------------------------------------------------------------- /src/public/locales/vi/common.json: -------------------------------------------------------------------------------- 1 | {} -------------------------------------------------------------------------------- /src/public/locales/bn/common.json: -------------------------------------------------------------------------------- 1 | {} 2 | -------------------------------------------------------------------------------- /src/public/locales/de/common.json: -------------------------------------------------------------------------------- 1 | {} 2 | -------------------------------------------------------------------------------- /src/public/locales/en/common.json: -------------------------------------------------------------------------------- 1 | {} 2 | -------------------------------------------------------------------------------- /src/public/locales/es/common.json: -------------------------------------------------------------------------------- 1 | {} 2 | -------------------------------------------------------------------------------- /src/public/locales/fr/common.json: -------------------------------------------------------------------------------- 1 | {} 2 | -------------------------------------------------------------------------------- /src/public/locales/ko/common.json: -------------------------------------------------------------------------------- 1 | {} 2 | -------------------------------------------------------------------------------- /src/public/locales/pt/common.json: -------------------------------------------------------------------------------- 1 | {} 2 | -------------------------------------------------------------------------------- /src/public/locales/ru/common.json: -------------------------------------------------------------------------------- 1 | {} 2 | -------------------------------------------------------------------------------- /src/public/locales/sv/common.json: -------------------------------------------------------------------------------- 1 | {} 2 | -------------------------------------------------------------------------------- /src/public/locales/te/common.json: -------------------------------------------------------------------------------- 1 | {} 2 | -------------------------------------------------------------------------------- /src/public/locales/zh/common.json: -------------------------------------------------------------------------------- 1 | {} 2 | -------------------------------------------------------------------------------- /src/.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "next/core-web-vitals" 3 | } 4 | -------------------------------------------------------------------------------- /src/model/index.tsx: -------------------------------------------------------------------------------- 1 | import './azure/index'; 2 | import './openai/index'; -------------------------------------------------------------------------------- /src/__mocks__/remark-gfm.tsx: -------------------------------------------------------------------------------- 1 | function remarkGfm(){ 2 | return () => {} 3 | } -------------------------------------------------------------------------------- /src/types/data.ts: -------------------------------------------------------------------------------- 1 | export interface KeyValuePair { 2 | key: string; 3 | value: any; 4 | } 5 | -------------------------------------------------------------------------------- /assets/chat1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LittleLittleCloud/Multi-Agent-ChatUI/HEAD/assets/chat1.png -------------------------------------------------------------------------------- /assets/chat2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LittleLittleCloud/Multi-Agent-ChatUI/HEAD/assets/chat2.png -------------------------------------------------------------------------------- /assets/mathchat.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LittleLittleCloud/Multi-Agent-ChatUI/HEAD/assets/mathchat.png -------------------------------------------------------------------------------- /assets/role-play.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LittleLittleCloud/Multi-Agent-ChatUI/HEAD/assets/role-play.png -------------------------------------------------------------------------------- /src/public/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LittleLittleCloud/Multi-Agent-ChatUI/HEAD/src/public/favicon.ico -------------------------------------------------------------------------------- /src/types/env.ts: -------------------------------------------------------------------------------- 1 | export interface ProcessEnv { 2 | OPENAI_API_KEY: string; 3 | OPENAI_API_HOST?: string; 4 | } 5 | -------------------------------------------------------------------------------- /src/public/screenshot.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LittleLittleCloud/Multi-Agent-ChatUI/HEAD/src/public/screenshot.png -------------------------------------------------------------------------------- /src/postcss.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | plugins: { 3 | tailwindcss: {}, 4 | autoprefixer: {}, 5 | }, 6 | }; 7 | -------------------------------------------------------------------------------- /src/types/group.ts: -------------------------------------------------------------------------------- 1 | import { IMessageRecord } from "@/message/type"; 2 | import { IRecord } from "@/types/storage"; 3 | 4 | 5 | -------------------------------------------------------------------------------- /src/public/locales/zh/markdown.json: -------------------------------------------------------------------------------- 1 | { 2 | "Copy code": "复制代码", 3 | "Copied!": "已复制!", 4 | "Enter file name": "输入文件名" 5 | } 6 | -------------------------------------------------------------------------------- /src/utils/app/setup.ts: -------------------------------------------------------------------------------- 1 | import "@/model/index"; 2 | import "@/agent/index"; 3 | import "@/message/index"; 4 | import "@/memory/index"; -------------------------------------------------------------------------------- /src/public/locales/ja/markdown.json: -------------------------------------------------------------------------------- 1 | { 2 | "Copy code": "コードをコピー", 3 | "Copied!": "コピーしました!", 4 | "Enter file name": "ファイル名を入力" 5 | } -------------------------------------------------------------------------------- /src/test/jest.setup.ts: -------------------------------------------------------------------------------- 1 | import "reflect-metadata"; 2 | import dotenv from "dotenv" 3 | import "@/utils/app/setup"; 4 | 5 | dotenv.config(); -------------------------------------------------------------------------------- /src/types/error.ts: -------------------------------------------------------------------------------- 1 | export interface ErrorMessage { 2 | code: String | null; 3 | title: String; 4 | messageLines: String[]; 5 | } 6 | -------------------------------------------------------------------------------- /src/public/locales/ko/markdown.json: -------------------------------------------------------------------------------- 1 | { 2 | "Copy code": "코드 복사", 3 | "Copied!": "복사 완료!", 4 | "Enter file name": "파일 이름을 입력하세요" 5 | } 6 | -------------------------------------------------------------------------------- /src/memory/type.ts: -------------------------------------------------------------------------------- 1 | import { IRecord } from '@/types/storage'; 2 | 3 | export interface IMemory extends IRecord{ 4 | memoryKey: string; 5 | } -------------------------------------------------------------------------------- /src/public/locales/he/markdown.json: -------------------------------------------------------------------------------- 1 | { 2 | "Copy code": "העתק קוד", 3 | "Copied!": "נשמר בזכרון", 4 | "Enter file name": "הקלד שם לקובץ" 5 | } -------------------------------------------------------------------------------- /src/__mocks__/react-markdown.tsx: -------------------------------------------------------------------------------- 1 | function ReactMarkdown({ children }){ 2 | return <>{children}; 3 | } 4 | 5 | export default ReactMarkdown; -------------------------------------------------------------------------------- /src/public/locales/sv/markdown.json: -------------------------------------------------------------------------------- 1 | { 2 | "Copy code": "Kopiera kod", 3 | "Copied!": "Kopierad!", 4 | "Enter file name": "Ange filnamn" 5 | } 6 | -------------------------------------------------------------------------------- /src/public/locales/vi/markdown.json: -------------------------------------------------------------------------------- 1 | { 2 | "Copy code": "Sao chép mã", 3 | "Copied!": "Đã sao chép!", 4 | "Enter file name": "Nhập tên file" 5 | } 6 | -------------------------------------------------------------------------------- /src/prettier.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | trailingComma: 'all', 3 | singleQuote: true, 4 | plugins: [require('prettier-plugin-tailwindcss')] 5 | }; 6 | -------------------------------------------------------------------------------- /src/public/locales/bn/markdown.json: -------------------------------------------------------------------------------- 1 | { 2 | "Copy code": "কোড কপি করুন", 3 | "Copied!": "কপি করা হয়েছে!", 4 | "Enter file name": "ফাইল নাম লিখুন" 5 | } 6 | -------------------------------------------------------------------------------- /src/public/locales/de/markdown.json: -------------------------------------------------------------------------------- 1 | { 2 | "Copy code": "Code kopieren", 3 | "Copied!": "Kopiert!", 4 | "Enter file name": "Dateinamen eingeben" 5 | } 6 | -------------------------------------------------------------------------------- /src/public/locales/id/markdown.json: -------------------------------------------------------------------------------- 1 | { 2 | "Copy code": "Salin kode", 3 | "Copied!": "Kode disalin!", 4 | "Enter file name": "Masukkan nama file" 5 | } -------------------------------------------------------------------------------- /src/public/locales/fr/markdown.json: -------------------------------------------------------------------------------- 1 | { 2 | "Copy code": "Copier le code", 3 | "Copied!": "Copié !", 4 | "Enter file name": "Entrez le nom du fichier" 5 | } 6 | -------------------------------------------------------------------------------- /src/public/locales/pt/markdown.json: -------------------------------------------------------------------------------- 1 | { 2 | "Copy code": "Copiar código", 3 | "Copied!": "Copiado!", 4 | "Enter file name": "Insira o nome do arquivo" 5 | } 6 | -------------------------------------------------------------------------------- /src/public/locales/es/markdown.json: -------------------------------------------------------------------------------- 1 | { 2 | "Copy code": "Copiar código", 3 | "Copied!": "¡Copiado!", 4 | "Enter file name": "Ingrese el nombre del archivo" 5 | } 6 | -------------------------------------------------------------------------------- /src/public/locales/ru/markdown.json: -------------------------------------------------------------------------------- 1 | { 2 | "Copy code": "Скопировать", 3 | "Copied!": "Скопировано!", 4 | "Enter file name": "Введите имя файла для загрузки" 5 | } 6 | -------------------------------------------------------------------------------- /src/types/folder.ts: -------------------------------------------------------------------------------- 1 | export interface Folder { 2 | id: string; 3 | name: string; 4 | type: FolderType; 5 | } 6 | 7 | export type FolderType = 'chat' | 'prompt'; 8 | -------------------------------------------------------------------------------- /src/public/locales/te/markdown.json: -------------------------------------------------------------------------------- 1 | { 2 | "Copy code": "కోడ్‌ను కాపీ చేయండి", 3 | "Copied!": "కాపీ చేయబడింది!", 4 | "Enter file name": "ఫైల్ పేరు నమోదు చేయండి" 5 | } 6 | -------------------------------------------------------------------------------- /src/utils/app/folders.ts: -------------------------------------------------------------------------------- 1 | import { Folder } from '@/types/folder'; 2 | 3 | export const saveFolders = (folders: Folder[]) => { 4 | localStorage.setItem('folders', JSON.stringify(folders)); 5 | }; 6 | -------------------------------------------------------------------------------- /src/tsconfig.test.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends" : "./tsconfig.json", 3 | "compilerOptions": { 4 | "isolatedModules": false, 5 | "jsx": "react-jsx", 6 | }, 7 | "exclude": ["node_modules"] 8 | } -------------------------------------------------------------------------------- /src/agent/agentProvider.ts: -------------------------------------------------------------------------------- 1 | import { Provider } from "@/utils/app/provider"; 2 | import { IAgentRecord, IAgent } from "./type"; 3 | 4 | export const AgentProvider = new Provider IAgent>(); 5 | -------------------------------------------------------------------------------- /src/message/messageProvider.ts: -------------------------------------------------------------------------------- 1 | import { Provider } from "@/utils/app/provider"; 2 | import { IMessageRecord } from "./type"; 3 | 4 | export const MessageProvider = new Provider IMessageRecord>(); 5 | -------------------------------------------------------------------------------- /src/types/prompt.ts: -------------------------------------------------------------------------------- 1 | import { OpenAIModel } from './openai'; 2 | 3 | export interface Prompt { 4 | id: string; 5 | name: string; 6 | description: string; 7 | content: string; 8 | model: OpenAIModel; 9 | folderId: string | null; 10 | } 11 | -------------------------------------------------------------------------------- /azure.yaml: -------------------------------------------------------------------------------- 1 | # yaml-language-server: $schema=https://raw.githubusercontent.com/Azure/azure-dev/main/schemas/v1.0/azure.yaml.json 2 | 3 | name: chatroom 4 | services: 5 | frontend: 6 | project: ./src 7 | host: staticwebapp 8 | dist: ./build 9 | language: ts -------------------------------------------------------------------------------- /src/utils/app/const.ts: -------------------------------------------------------------------------------- 1 | export const DEFAULT_SYSTEM_PROMPT = 2 | "You are ChatGPT, a large language model trained by OpenAI. Follow the user's instructions carefully. Respond using markdown."; 3 | 4 | export const OPENAI_API_HOST = 5 | process.env.OPENAI_API_HOST || 'https://api.openai.com'; 6 | -------------------------------------------------------------------------------- /src/components/Markdown/MemoizedReactMarkdown.tsx: -------------------------------------------------------------------------------- 1 | import { FC, memo } from 'react'; 2 | import ReactMarkdown, { Options } from 'react-markdown'; 3 | import { Markdown } from '../Global/Markdown'; 4 | 5 | export const MemoizedReactMarkdown: FC = memo(Markdown, (prevProps, nextProps) => { 6 | return prevProps.content === nextProps.content; 7 | }); 8 | -------------------------------------------------------------------------------- /src/public/locales/zh/sidebar.json: -------------------------------------------------------------------------------- 1 | { 2 | "New folder": "新建文件夹", 3 | "New chat": "新建聊天", 4 | "No conversations.": "无对话。", 5 | "Search conversations...": "搜索对话...", 6 | "OpenAI API Key": "OpenAI API 密钥", 7 | "Import conversations": "导入对话", 8 | "Are you sure?": "确定吗?", 9 | "Clear conversations": "清空对话", 10 | "Export conversations": "导出对话", 11 | "Dark mode": "深色模式", 12 | "Light mode": "浅色模式" 13 | } 14 | -------------------------------------------------------------------------------- /src/memory/memoryProvider.ts: -------------------------------------------------------------------------------- 1 | import { Provider } from "@/utils/app/provider"; 2 | import { IMemory } from "./type"; 3 | import { IEmbeddingModel } from "@/model/type"; 4 | import { BaseMemory } from "langchain/memory"; 5 | import { IChatMessageRecord } from "@/message/type"; 6 | 7 | export const MemoryProvider = new Provider BaseMemory >(); 8 | -------------------------------------------------------------------------------- /src/model/llmprovider.ts: -------------------------------------------------------------------------------- 1 | import { Provider } from "@/utils/app/provider"; 2 | import { IEmbeddingModel, IChatModelRecord, IChatModel } from "@/model/type"; 3 | import { Embeddings } from "langchain/embeddings/base"; 4 | 5 | export const LLMProvider = new Provider IChatModel>(); 6 | 7 | export const EmbeddingProvider = new Provider Embeddings>(); -------------------------------------------------------------------------------- /src/public/locales/ko/sidebar.json: -------------------------------------------------------------------------------- 1 | { 2 | "New folder": "새 폴더", 3 | "New chat": "새 채팅", 4 | "No conversations.": "대화가 없습니다.", 5 | "Search conversations...": "대화 검색...", 6 | "OpenAI API Key": "OpenAI API 키", 7 | "Import conversations": "대화 가져오기", 8 | "Are you sure?": "확실합니까?", 9 | "Clear conversations": "대화 지우기", 10 | "Export conversations": "대화 내보내기", 11 | "Dark mode": "다크 모드", 12 | "Light mode": "라이트 모드" 13 | } 14 | -------------------------------------------------------------------------------- /src/public/locales/ja/sidebar.json: -------------------------------------------------------------------------------- 1 | { 2 | "New folder": "新規フォルダ", 3 | "New chat": "新規チャット", 4 | "No conversations.": "会話履歴はありません。", 5 | "Search conversations...": "会話を検索...", 6 | "OpenAI API Key": "OpenAI API Key", 7 | "Import conversations": "会話履歴をインポート", 8 | "Are you sure?": "よろしいですか?", 9 | "Clear conversations": " 会話をクリア", 10 | "Export conversations": "会話履歴をエクスポート", 11 | "Dark mode": "ダークモード", 12 | "Light mode": "ライトモード" 13 | } -------------------------------------------------------------------------------- /src/public/locales/he/sidebar.json: -------------------------------------------------------------------------------- 1 | { 2 | "New folder": "תיקיה חדשה", 3 | "New chat": "שיחה חדשה", 4 | "No conversations.": "אין שיחות חדשות", 5 | "Search conversations...": "חיפוש שיחות...", 6 | "OpenAI API Key": "מפתח אישי ל openAI", 7 | "Import conversations": "ייבוא שיחות", 8 | "Are you sure?": "אתה בטוח?", 9 | "Clear conversations": "ניקוי שיחות", 10 | "Export conversations": "ייצוא שיחות", 11 | "Dark mode": "מצב כהה", 12 | "Light mode": "מצב בהיר" 13 | } -------------------------------------------------------------------------------- /src/types/openai.ts: -------------------------------------------------------------------------------- 1 | export interface OpenAIModel { 2 | id: string; 3 | name: string; 4 | } 5 | 6 | export enum OpenAIModelID { 7 | GPT_3_5 = 'gpt-3.5-turbo', 8 | GPT_4 = 'gpt-4', 9 | } 10 | 11 | export const OpenAIModels: Record = { 12 | [OpenAIModelID.GPT_3_5]: { 13 | id: OpenAIModelID.GPT_3_5, 14 | name: 'Default (GPT-3.5)', 15 | }, 16 | [OpenAIModelID.GPT_4]: { 17 | id: OpenAIModelID.GPT_4, 18 | name: 'GPT-4', 19 | }, 20 | }; 21 | -------------------------------------------------------------------------------- /src/agent/userProxyAgent.ts: -------------------------------------------------------------------------------- 1 | import { IChatMessageRecord } from "@/message/type"; 2 | import { AgentCallParams, IAgent } from "./type"; 3 | 4 | export class UserProxyAgent implements IAgent{ 5 | public name: string; 6 | 7 | constructor( 8 | name: string, 9 | ){ 10 | this.name = name; 11 | } 12 | 13 | public async callAsync(params: AgentCallParams): Promise{ 14 | throw new Error("Method not implemented."); 15 | } 16 | } -------------------------------------------------------------------------------- /src/pages/_app.tsx: -------------------------------------------------------------------------------- 1 | import '@/styles/globals.css'; 2 | import { appWithTranslation } from 'next-i18next'; 3 | import type { AppProps } from 'next/app'; 4 | import { Inter } from 'next/font/google'; 5 | 6 | const inter = Inter({ subsets: ['latin'] }); 7 | 8 | function App({ Component, pageProps }: AppProps<{}>) { 9 | return ( 10 |
11 | 12 |
13 | ); 14 | } 15 | 16 | export default appWithTranslation(App); 17 | -------------------------------------------------------------------------------- /src/public/locales/ru/sidebar.json: -------------------------------------------------------------------------------- 1 | { 2 | "New folder": "Новая папка", 3 | "New chat": "Новый чат", 4 | "No conversations.": "Нет чатов.", 5 | "Search conversations...": "Поиск чатов...", 6 | "OpenAI API Key": "API-ключ OpenAI", 7 | "Import conversations": "Импортировать чаты", 8 | "Are you sure?": "Вы уверены?", 9 | "Clear conversations": "Удалить чаты", 10 | "Export conversations": "Экспортировать чаты", 11 | "Dark mode": "Темный режим", 12 | "Light mode": "Светлый режим" 13 | } 14 | -------------------------------------------------------------------------------- /src/utils/logger.ts: -------------------------------------------------------------------------------- 1 | class logger{ 2 | public log(message: string){ 3 | console.log(message); 4 | } 5 | public error(message: string){ 6 | console.error(message); 7 | } 8 | public warn(message: string){ 9 | console.warn(message); 10 | } 11 | public info(message: string){ 12 | console.info(message); 13 | } 14 | public debug(message: string){ 15 | console.debug(message); 16 | } 17 | } 18 | 19 | export const Logger = new logger(); -------------------------------------------------------------------------------- /src/public/locales/pt/sidebar.json: -------------------------------------------------------------------------------- 1 | { 2 | "New folder": "Nova pasta", 3 | "New chat": "Novo chat", 4 | "No conversations.": "Não há conversas.", 5 | "Search conversations...": "Buscar conversas...", 6 | "OpenAI API Key": "API Key da OpenAI", 7 | "Import conversations": "Importar conversas", 8 | "Are you sure?": "Tem certeza?", 9 | "Clear conversations": "Apagar conversas", 10 | "Export conversations": "Exportar conversas", 11 | "Dark mode": "Modo escuro", 12 | "Light mode": "Modo claro" 13 | } 14 | -------------------------------------------------------------------------------- /src/public/locales/bn/sidebar.json: -------------------------------------------------------------------------------- 1 | { 2 | "New folder": "নতুন ফোল্ডার", 3 | "New chat": "নতুন আড্ডা", 4 | "No conversations.": "কোনো আলাপচারিতা নেই।", 5 | "Search conversations...": "আলাপচারিতা খুঁজুন...", 6 | "OpenAI API Key": "OpenAI API Key", 7 | "Import conversations": "আলাপচারিতা ইমপোর্ট", 8 | "Are you sure?": "আপনি কি নিশ্চিত?", 9 | "Clear conversations": "আলাপচারিতা ক্লিয়ার", 10 | "Export conversations": "আলাপচারিতা এক্সপোর্ট", 11 | "Dark mode": "ডার্ক মোড", 12 | "Light mode": "লাইট মোড" 13 | } 14 | -------------------------------------------------------------------------------- /src/next-i18next.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | i18n: { 3 | defaultLocale: "en", 4 | locales: [ 5 | "bn", 6 | "de", 7 | "en", 8 | "es", 9 | "fr", 10 | "he", 11 | "id", 12 | "ja", 13 | "ko", 14 | "pt", 15 | "ru", 16 | "sv", 17 | "te", 18 | "vi", 19 | "zh", 20 | ], 21 | }, 22 | localePath: 23 | typeof window === 'undefined' 24 | ? require('path').resolve('./public/locales') 25 | : '/public/locales', 26 | }; 27 | -------------------------------------------------------------------------------- /src/public/locales/sv/sidebar.json: -------------------------------------------------------------------------------- 1 | { 2 | "New folder": "Ny mapp", 3 | "New chat": "Ny chatt", 4 | "No conversations.": "Inga konversationer.", 5 | "Search conversations...": "Sök konversationer...", 6 | "OpenAI API Key": "OpenAI API-nyckel", 7 | "Import conversations": "Importera konversationer", 8 | "Are you sure?": "Är du säker?", 9 | "Clear conversations": "Radera konversationer", 10 | "Export conversations": "Exportera konversationer", 11 | "Dark mode": "Mörkt läge", 12 | "Light mode": "Ljust läge" 13 | } 14 | -------------------------------------------------------------------------------- /src/public/locales/te/sidebar.json: -------------------------------------------------------------------------------- 1 | { 2 | "New folder": "కొత్త ఫోల్డర్", 3 | "New chat": "కొత్త చాట్", 4 | "No conversations.": "సంభాషణలు లేవు.", 5 | "Search conversations...": "సంభాషణలు వెతకండి...", 6 | "OpenAI API Key": "ఒపెన్ ఎయి ఐ API కీ ", 7 | "Import conversations": "సంభాషణలు దిగుమతి చేయండి", 8 | "Are you sure?": "మీరు ఖచ్చితంగా ఉన్నారా?", 9 | "Clear conversations": "సంభాషణలు తొలగించు", 10 | "Export conversations": "సంభాషణలు ఎగుమతి చేయండి", 11 | "Dark mode": "డార్క్ మోడ్", 12 | "Light mode": "లైట్ మోడ్" 13 | } 14 | -------------------------------------------------------------------------------- /infra/core/host/appserviceplan.bicep: -------------------------------------------------------------------------------- 1 | param name string 2 | param location string = resourceGroup().location 3 | param tags object = {} 4 | 5 | param kind string = '' 6 | param reserved bool = true 7 | param sku object 8 | 9 | resource appServicePlan 'Microsoft.Web/serverfarms@2022-03-01' = { 10 | name: name 11 | location: location 12 | tags: tags 13 | sku: sku 14 | kind: kind 15 | properties: { 16 | reserved: reserved 17 | } 18 | } 19 | 20 | output id string = appServicePlan.id 21 | output name string = appServicePlan.name 22 | -------------------------------------------------------------------------------- /src/public/locales/id/sidebar.json: -------------------------------------------------------------------------------- 1 | { 2 | "New folder": "Folder baru", 3 | "New chat": "Percakapan baru", 4 | "No conversations.": "Tidak ada percakapan.", 5 | "Search conversations...": "Cari percakapan...", 6 | "OpenAI API Key": "Kunci API OpenAI", 7 | "Import conversations": "Impor percakapan", 8 | "Are you sure?": "Apakah Anda yakin?", 9 | "Clear conversations": "Hapus percakapan", 10 | "Export conversations": "Ekspor percakapan", 11 | "Dark mode": "Mode gelap", 12 | "Light mode": "Mode terang" 13 | } -------------------------------------------------------------------------------- /src/tailwind.config.js: -------------------------------------------------------------------------------- 1 | /** @type {import('tailwindcss').Config} */ 2 | module.exports = { 3 | content: [ 4 | './app/**/*.{js,ts,jsx,tsx}', 5 | './pages/**/*.{js,ts,jsx,tsx}', 6 | './components/**/*.{js,ts,jsx,tsx}', 7 | './message/**/*.{js,ts,jsx,tsx}', 8 | './agent/**/*.{js,ts,jsx,tsx}', 9 | './chat/**/*.{js,ts,jsx,tsx}', 10 | ], 11 | darkMode: 'class', 12 | theme: { 13 | extend: {}, 14 | }, 15 | plugins: [ 16 | require('@tailwindcss/typography'), 17 | require('@tailwindcss/forms') 18 | ], 19 | }; 20 | -------------------------------------------------------------------------------- /src/public/locales/de/sidebar.json: -------------------------------------------------------------------------------- 1 | { 2 | "New folder": "Neuer Ordner", 3 | "New chat": "Neue Konversation", 4 | "No conversations.": "Keine Konversationen.", 5 | "Search conversations...": "Konversationen suchen...", 6 | "OpenAI API Key": "OpenAI API-Schlüssel", 7 | "Import conversations": "Konversationen importieren", 8 | "Are you sure?": "Bist du sicher?", 9 | "Clear conversations": "Konversationen löschen", 10 | "Export conversations": "Konversationen exportieren", 11 | "Dark mode": "Dark Mode", 12 | "Light mode": "Light Mode" 13 | } -------------------------------------------------------------------------------- /src/public/locales/es/sidebar.json: -------------------------------------------------------------------------------- 1 | { 2 | "New folder": "Nueva carpeta", 3 | "New chat": "Nueva conversación", 4 | "No conversations.": "No hay conversaciones.", 5 | "Search conversations...": "Buscar conversaciones...", 6 | "OpenAI API Key": "Llave de API de OpenAI", 7 | "Import conversations": "Importar conversaciones", 8 | "Are you sure?": "¿Estás seguro?", 9 | "Clear conversations": "Borrar conversaciones", 10 | "Export conversations": "Exportar conversaciones", 11 | "Dark mode": "Modo oscuro", 12 | "Light mode": "Modo claro" 13 | } 14 | -------------------------------------------------------------------------------- /src/public/locales/vi/sidebar.json: -------------------------------------------------------------------------------- 1 | { 2 | "New folder": "Thư mục mới", 3 | "New chat": "Tạo hội thoại mới", 4 | "No conversations.": "Không có hội thoại nào.", 5 | "Search conversations...": "Tìm kiếm các cuộc hội thoại...", 6 | "OpenAI API Key": "OpenAI API Key", 7 | "Import conversations": "Nhập dữ liệu hội thoại", 8 | "Are you sure?": "Bạn chắc chắn chứ?", 9 | "Clear conversations": "Xoá các đoạn hội thoại", 10 | "Export conversations": "Xuất dữ liệu hội thoại", 11 | "Dark mode": "Chế độ tối", 12 | "Light mode": "Chế độ sáng" 13 | } 14 | -------------------------------------------------------------------------------- /src/agent/type.ts: -------------------------------------------------------------------------------- 1 | import { IRecord } from "@/types/storage"; 2 | import { IChatMessageRecord } from "@/message/type"; 3 | 4 | export interface IAgentRecord extends IRecord{ 5 | name: string, 6 | system_message: string, 7 | avatar: string, 8 | } 9 | export interface AgentCallParams{ 10 | messages: IChatMessageRecord[], 11 | maxTokens?: number, 12 | temperature?: number, 13 | stopWords?: string[], 14 | } 15 | export interface IAgent{ 16 | name: string; 17 | 18 | callAsync(params: AgentCallParams): Promise; 19 | } 20 | -------------------------------------------------------------------------------- /src/public/locales/fr/sidebar.json: -------------------------------------------------------------------------------- 1 | { 2 | "New folder": "Nouveau dossier", 3 | "New chat": "Nouvelle discussion", 4 | "No conversations.": "Aucune conversation.", 5 | "Search conversations...": "Rechercher des conversations...", 6 | "OpenAI API Key": "Clé API OpenAI", 7 | "Import conversations": "Importer des conversations", 8 | "Are you sure?": "Êtes-vous sûr ?", 9 | "Clear conversations": "Effacer les conversations", 10 | "Export conversations": "Exporter les conversations", 11 | "Dark mode": "Mode sombre", 12 | "Light mode": "Mode clair" 13 | } 14 | -------------------------------------------------------------------------------- /src/utils/app/prompts.ts: -------------------------------------------------------------------------------- 1 | import { Prompt } from '@/types/prompt'; 2 | 3 | export const updatePrompt = (updatedPrompt: Prompt, allPrompts: Prompt[]) => { 4 | const updatedPrompts = allPrompts.map((c) => { 5 | if (c.id === updatedPrompt.id) { 6 | return updatedPrompt; 7 | } 8 | 9 | return c; 10 | }); 11 | 12 | savePrompts(updatedPrompts); 13 | 14 | return { 15 | single: updatedPrompt, 16 | all: updatedPrompts, 17 | }; 18 | }; 19 | 20 | export const savePrompts = (prompts: Prompt[]) => { 21 | localStorage.setItem('prompts', JSON.stringify(prompts)); 22 | }; 23 | -------------------------------------------------------------------------------- /src/utils/app/convertJson.ts: -------------------------------------------------------------------------------- 1 | import { IRecord } from "@/types/storage"; 2 | import { RecordMap } from "./recordProvider"; 3 | 4 | export function extract(properties: RecordMap){ 5 | return function(obj: TActual): T{ 6 | return Object.keys(properties).reduce<{}>((acc, key) => { 7 | Object.assign(acc, { [key]: Object.getOwnPropertyDescriptor(obj, key)?.value }) 8 | 9 | return acc; 10 | }, {}) as T; 11 | } 12 | } 13 | 14 | export interface IJsonConverter { 15 | serialize(obj: T): string; 16 | deserialize(json: string): T; 17 | } 18 | -------------------------------------------------------------------------------- /infra/core/security/role.bicep: -------------------------------------------------------------------------------- 1 | param principalId string 2 | 3 | @allowed([ 4 | 'Device' 5 | 'ForeignGroup' 6 | 'Group' 7 | 'ServicePrincipal' 8 | 'User' 9 | ]) 10 | param principalType string = 'ServicePrincipal' 11 | param roleDefinitionId string 12 | 13 | resource role 'Microsoft.Authorization/roleAssignments@2022-04-01' = { 14 | name: guid(subscription().id, resourceGroup().id, principalId, roleDefinitionId) 15 | properties: { 16 | principalId: principalId 17 | principalType: principalType 18 | roleDefinitionId: resourceId('Microsoft.Authorization/roleDefinitions', roleDefinitionId) 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /src/utils/app/recordProvider.ts: -------------------------------------------------------------------------------- 1 | import { IRecord } from "@/types/storage"; 2 | import { extract } from "./convertJson"; 3 | 4 | export type RecordMap = { 5 | [P in keyof T]-?: true | false; 6 | } 7 | 8 | const recordMaps: Record> = {}; 9 | 10 | export function registerRecordMap(name: string, recordMap: RecordMap){ 11 | recordMaps[name] = recordMap; 12 | } 13 | 14 | export function extractRecord(instance: T) : IRecord{ 15 | var map = recordMaps[instance.type]; 16 | var extractor = extract(map); 17 | return extractor(instance); 18 | } 19 | -------------------------------------------------------------------------------- /src/utils/index.ts: -------------------------------------------------------------------------------- 1 | export function throttle any>(func: T, limit: number): T { 2 | let lastFunc: ReturnType; 3 | let lastRan: number; 4 | 5 | return ((...args) => { 6 | if (!lastRan) { 7 | func(...args); 8 | lastRan = Date.now(); 9 | } else { 10 | clearTimeout(lastFunc); 11 | lastFunc = setTimeout(() => { 12 | if (Date.now() - lastRan >= limit) { 13 | func(...args); 14 | lastRan = Date.now(); 15 | } 16 | }, limit - (Date.now() - lastRan)); 17 | } 18 | }) as T; 19 | } -------------------------------------------------------------------------------- /src/components/Global/Badge.tsx: -------------------------------------------------------------------------------- 1 | export const TinyGrayBadge = (props: {children: any}) => { 2 | return ( 3 | {props.children} 6 | ) 7 | } 8 | 9 | export const TinyGreenBadge = (props: {children: any}) => { 10 | return ( 11 | {props.children} 14 | ) 15 | } -------------------------------------------------------------------------------- /src/components/Chat/ChatLoader.tsx: -------------------------------------------------------------------------------- 1 | import { IconDots } from '@tabler/icons-react'; 2 | import { FC } from 'react'; 3 | 4 | interface Props {} 5 | 6 | export const ChatLoader: FC = () => { 7 | return ( 8 |
12 |
13 |
AI:
14 | 15 |
16 |
17 | ); 18 | }; 19 | -------------------------------------------------------------------------------- /infra/core/host/staticwebapp.bicep: -------------------------------------------------------------------------------- 1 | param name string 2 | param location string = resourceGroup().location 3 | param tags object = {} 4 | 5 | // Microsoft.Web/staticSites/config 6 | param appSettings object={} 7 | 8 | param sku object = { 9 | name: 'Free' 10 | tier: 'Free' 11 | } 12 | 13 | resource web 'Microsoft.Web/staticSites@2022-03-01' = { 14 | name: name 15 | location: location 16 | tags: tags 17 | sku: sku 18 | properties: { 19 | provider: 'Custom' 20 | } 21 | 22 | resource configAppSettings 'config' = { 23 | name: 'appsettings' 24 | properties: appSettings 25 | } 26 | } 27 | 28 | output name string = web.name 29 | output uri string = 'https://${web.properties.defaultHostname}' 30 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # See https://help.github.com/articles/ignoring-files/ for more about ignoring files. 2 | 3 | # dependencies 4 | node_modules/ 5 | .next/ 6 | out/ 7 | wwwroot/ 8 | /.pnp 9 | .pnp.js 10 | 11 | # testing 12 | /coverage 13 | 14 | # next.js 15 | /.next/ 16 | /out/ 17 | /dist 18 | 19 | # production 20 | build 21 | 22 | # misc 23 | .DS_Store 24 | *.pem 25 | 26 | # debug 27 | npm-debug.log* 28 | yarn-debug.log* 29 | yarn-error.log* 30 | .pnpm-debug.log* 31 | 32 | # local env files 33 | .env*.local 34 | 35 | # vercel 36 | .vercel 37 | 38 | # typescript 39 | *.tsbuildinfo 40 | next-env.d.ts 41 | .idea 42 | .azure 43 | 44 | # doxfx 45 | **/DROP/ 46 | **/TEMP/ 47 | **/packages/ 48 | **/bin/ 49 | **/obj/ 50 | _site 51 | 52 | .env 53 | -------------------------------------------------------------------------------- /src/pages/_document.tsx: -------------------------------------------------------------------------------- 1 | import { Html, Head, Main, NextScript, DocumentProps } from 'next/document'; 2 | import i18nextConfig from '../next-i18next.config'; 3 | 4 | type Props = DocumentProps & { 5 | // add custom document props 6 | }; 7 | 8 | export default function Document(props: Props) { 9 | const currentLocale = 10 | props.__NEXT_DATA__.locale ?? i18nextConfig.i18n.defaultLocale; 11 | return ( 12 | 13 | 14 | 15 | 16 | 17 | 18 |
19 | 20 | 21 | 22 | ); 23 | } 24 | -------------------------------------------------------------------------------- /src/utils/app/importExport.ts: -------------------------------------------------------------------------------- 1 | import { Folder } from '@/types/folder'; 2 | import { IStorageRecord, exportZip } from '@/types/storage'; 3 | 4 | function currentDate() { 5 | const date = new Date(); 6 | let month = date.getMonth() + 1; 7 | let day = date.getDate(); 8 | return `${month}-${day}`; 9 | } 10 | 11 | export const exportData = async (storage: IStorageRecord) => { 12 | const blob = await exportZip(storage); 13 | const url = URL.createObjectURL(blob); 14 | const link = document.createElement('a'); 15 | link.download = `chatbot_ui_storage_${currentDate()}.chat`; 16 | link.href = url; 17 | link.style.display = 'none'; 18 | document.body.appendChild(link); 19 | link.click(); 20 | document.body.removeChild(link); 21 | URL.revokeObjectURL(url); 22 | }; 23 | -------------------------------------------------------------------------------- /src/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "types": ["reflect-metadata", "jest"], 4 | "target": "es6", 5 | "lib": ["dom", "dom.iterable", "esnext"], 6 | "allowJs": true, 7 | "skipLibCheck": true, 8 | "strict": true, 9 | "forceConsistentCasingInFileNames": true, 10 | "noEmit": true, 11 | "esModuleInterop": true, 12 | "module": "esnext", 13 | "moduleResolution": "node", 14 | "resolveJsonModule": true, 15 | "isolatedModules": true, 16 | "jsx": "preserve", 17 | "experimentalDecorators": true, 18 | "emitDecoratorMetadata": true, 19 | "incremental": true, 20 | "paths": { 21 | "@/*": ["./*"] 22 | } 23 | }, 24 | "include": ["next-env.d.ts", "**/*.ts", "**/*.tsx"], 25 | "exclude": ["node_modules"] 26 | } 27 | -------------------------------------------------------------------------------- /src/memory/chatMemoryConfigPanel.tsx: -------------------------------------------------------------------------------- 1 | import { SmallNumberSetting, SmallTextSetting } from "@/components/Global/EditableSavableTextField"; 2 | import { IChatMemory } from "./chatMemory"; 3 | 4 | export const ChatMemoryConfigPanel: React.FC<{chatMemoryConfig: IChatMemory, onChange: (chatMemoryConfig: IChatMemory) => void}> = (props) => { 5 | return ( 6 | <> 7 | props.onChange({ ...props.chatMemoryConfig, maxHistoryLength: value})}/> 8 | props.onChange({ ...props.chatMemoryConfig, memoryKey: value})}/> 9 | 10 | ) 11 | } -------------------------------------------------------------------------------- /src/components/Chat/CopyButton.tsx: -------------------------------------------------------------------------------- 1 | import { IconCheck, IconCopy } from '@tabler/icons-react'; 2 | import { FC } from 'react'; 3 | 4 | type Props = { 5 | messagedCopied: boolean; 6 | copyOnClick: () => void; 7 | }; 8 | 9 | export const CopyButton: FC = ({ messagedCopied, copyOnClick }) => ( 10 | 25 | ); 26 | -------------------------------------------------------------------------------- /src/components/Mobile/Navbar.tsx: -------------------------------------------------------------------------------- 1 | import { Conversation } from '@/types/chat'; 2 | import { IconPlus } from '@tabler/icons-react'; 3 | import { FC } from 'react'; 4 | 5 | interface Props { 6 | selectedConversation: Conversation; 7 | onNewConversation: () => void; 8 | } 9 | 10 | export const Navbar: FC = ({ 11 | selectedConversation, 12 | onNewConversation, 13 | }) => { 14 | return ( 15 | 27 | ); 28 | }; 29 | -------------------------------------------------------------------------------- /src/styles/globals.css: -------------------------------------------------------------------------------- 1 | @tailwind base; 2 | @tailwind components; 3 | @tailwind utilities; 4 | 5 | ::-webkit-scrollbar-track { 6 | background-color: transparent; 7 | } 8 | 9 | ::-webkit-scrollbar-thumb { 10 | background-color: #ccc; 11 | border-radius: 10px; 12 | } 13 | 14 | ::-webkit-scrollbar-thumb:hover { 15 | background-color: #aaa; 16 | } 17 | 18 | ::-webkit-scrollbar-track:hover { 19 | background-color: #f2f2f2; 20 | } 21 | 22 | ::-webkit-scrollbar-corner { 23 | background-color: transparent; 24 | } 25 | 26 | ::-webkit-scrollbar { 27 | width: 6px; 28 | height: 6px; 29 | } 30 | 31 | html { 32 | background: #202123; 33 | } 34 | 35 | pre:has(div.codeblock) { 36 | padding: 0; 37 | } 38 | 39 | input { 40 | background: transparent; 41 | color: #fff; 42 | flex: 1; 43 | overflow:hidden; 44 | overflow: ellipsis; 45 | } 46 | -------------------------------------------------------------------------------- /src/components/Global/DeleteConfirmationDialog.tsx: -------------------------------------------------------------------------------- 1 | import { Dialog, DialogTitle, DialogContent, DialogContentText, DialogActions, Button } from "@mui/material"; 2 | import { FC } from "react"; 3 | 4 | export const DeleteConfirmationDialog: FC<{open: boolean, message: string, onConfirm: () => void, onCancel: () => void}> = ({open, message, onConfirm, onCancel}) => { 5 | return ( 6 | 7 | {message} 8 | 9 | 10 | This action cannot be undone. 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | ) 19 | }; -------------------------------------------------------------------------------- /infra/main.parameters.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentParameters.json#", 3 | "contentVersion": "1.0.0.0", 4 | "parameters": { 5 | "environmentName": { 6 | "value": "${AZURE_ENV_NAME}" 7 | }, 8 | "location": { 9 | "value": "${AZURE_LOCATION}" 10 | }, 11 | "principalId": { 12 | "value": "${AZURE_PRINCIPAL_ID}" 13 | }, 14 | "openAiServiceName": { 15 | "value": "${AZURE_OPENAI_SERVICE}" 16 | }, 17 | "openAiResourceGroupName": { 18 | "value": "${AZURE_OPENAI_RESOURCE_GROUP}" 19 | }, 20 | "openAiSkuName": { 21 | "value": "S0" 22 | }, 23 | "storageAccountName": { 24 | "value": "${AZURE_STORAGE_ACCOUNT}" 25 | }, 26 | "storageResourceGroupName": { 27 | "value": "${AZURE_STORAGE_RESOURCE_GROUP}" 28 | } 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /src/model/openai/index.ts: -------------------------------------------------------------------------------- 1 | import { OpenAIGPT, IOpenAIGPTRecord, ITextDavinci003 } from "./GPT"; 2 | import { ModelConfig } from "./ModelConfig"; 3 | import { LLMProvider } from "../llmprovider"; 4 | 5 | LLMProvider.registerProvider( 6 | "openai.gpt", 7 | (model) => { 8 | var rc = new OpenAIGPT( 9 | { ...model, isChatModel: true, isStreaming: true, type: "openai.gpt" } 10 | ); 11 | 12 | return new OpenAIGPT(rc); 13 | }, 14 | (model, onConfigChange) => ModelConfig(model, (model) => onConfigChange!(model)), 15 | new OpenAIGPT({ 16 | type: "openai.gpt", 17 | maxTokens: 64, 18 | temperature: 0.7, 19 | topP: 1, 20 | frequencyPenalty: 0, 21 | presencePenalty: 0, 22 | isChatModel: true, 23 | isStreaming: true, 24 | model: "gpt-3.5-turbo", 25 | })); 26 | -------------------------------------------------------------------------------- /src/utils/blobStorage.test.ts: -------------------------------------------------------------------------------- 1 | import { TestBlobStorage } from "./blobStorage"; 2 | 3 | test('test blob storage', async () => { 4 | const blob = new Blob(["test"]); 5 | const name = "test"; 6 | const testBlobStorage = await TestBlobStorage; 7 | await testBlobStorage.saveBlob(blob, name); 8 | const result = await testBlobStorage.getBlob(name); 9 | expect(result).toEqual(blob); 10 | const isExist = await testBlobStorage.isBlobExist(name); 11 | expect(isExist).toEqual(true); 12 | const list = await testBlobStorage.listBlobs(); 13 | expect(list).toEqual([name]); 14 | await testBlobStorage.deleteBlob(name); 15 | const isExist2 = await testBlobStorage.isBlobExist(name); 16 | expect(isExist2).toEqual(false); 17 | const list2 = await testBlobStorage.listBlobs(); 18 | expect(list2).toEqual([]); 19 | } 20 | ); -------------------------------------------------------------------------------- /src/memory/inMemorySavableVectorStore.ts: -------------------------------------------------------------------------------- 1 | import { IEmbeddingModel } from "@/types/model"; 2 | import { MemoryVectorStore, MemoryVectorStoreArgs } from "langchain/vectorstores/memory" 3 | import { getEmbeddingProvider } from "@/utils/app/embeddingProvider"; 4 | import { vectorStorage } from "@/utils/blobStorage"; 5 | 6 | export class InMemorySavableVectorStore extends MemoryVectorStore{ 7 | blobName: string; 8 | constructor(embeddings: IEmbeddingModel, 9 | args: MemoryVectorStoreArgs){ 10 | var embeddingModelProvider = getEmbeddingProvider(embeddings); 11 | super(embeddingModelProvider(embeddings), args); 12 | this.blobName = embeddings.type; 13 | } 14 | 15 | async save_v0(): Promise{ 16 | // save memoryStore to blob 17 | const vector = await vectorStorage; 18 | await vector.saveBlob(this.memoryVectors, this.blobName); 19 | 20 | } -------------------------------------------------------------------------------- /src/model/type.ts: -------------------------------------------------------------------------------- 1 | import { IChatMessageRecord, IFunctionCall } from "@/message/type"; 2 | import { IRecord } from "@/types/storage"; 3 | import { FunctionDefinition } from "@azure/openai"; 4 | 5 | export interface IModel extends IRecord{ 6 | type: string; 7 | description?: string; 8 | } 9 | 10 | export interface IEmbeddingModel extends IModel{ 11 | } 12 | 13 | export interface IChatModelRecord extends IModel{ 14 | isStreaming: boolean; 15 | } 16 | 17 | export interface ChatCompletionParams{ 18 | messages: IChatMessageRecord[], 19 | temperature?: number, 20 | maxTokens?: number, 21 | topP?: number, 22 | presencePenalty?: number, 23 | frequencyPenalty?: number, 24 | stop?: string[] | undefined, 25 | functions?: FunctionDefinition[] | undefined, 26 | } 27 | 28 | export interface IChatModel{ 29 | getChatCompletion(messages: ChatCompletionParams): Promise; 30 | } -------------------------------------------------------------------------------- /src/components/Chat/ErrorMessageDiv.tsx: -------------------------------------------------------------------------------- 1 | import { ErrorMessage } from '@/types/error'; 2 | import { IconCircleX } from '@tabler/icons-react'; 3 | import { FC } from 'react'; 4 | 5 | interface Props { 6 | error: ErrorMessage; 7 | } 8 | 9 | export const ErrorMessageDiv: FC = ({ error }) => { 10 | return ( 11 |
12 |
13 | 14 |
15 |
{error.title}
16 | {error.messageLines.map((line, index) => ( 17 |
18 | {' '} 19 | {line}{' '} 20 |
21 | ))} 22 |
23 | {error.code ? Code: {error.code} : ''} 24 |
25 |
26 | ); 27 | }; 28 | -------------------------------------------------------------------------------- /src/message/type.ts: -------------------------------------------------------------------------------- 1 | import { IRecord } from '@/types/storage'; 2 | export type Role = 'assistant' | 'user' | 'system' | 'function'; 3 | 4 | export interface IFunctionCall 5 | { 6 | name: string; 7 | arguments: string; 8 | } 9 | 10 | export interface IMessageRecord extends IRecord { 11 | content?: string; 12 | } 13 | export interface IChatMessageRecord extends IMessageRecord { 14 | role: Role; 15 | name?: string; 16 | functionCall?: IFunctionCall; 17 | from?: string; // agent name 18 | timestamp?: number; 19 | } 20 | 21 | export function IsUserMessage(message: IChatMessageRecord): boolean{ 22 | return message.role === "user"; 23 | } 24 | 25 | export function IsFunctionCallMessage(message: IChatMessageRecord): boolean{ 26 | return message.functionCall !== undefined; 27 | } 28 | 29 | export function IsChatMessage(message: IMessageRecord): boolean{ 30 | return (message as IChatMessageRecord).role !== undefined; 31 | } -------------------------------------------------------------------------------- /src/memory/index.tsx: -------------------------------------------------------------------------------- 1 | import { IEmbeddingModel } from "@/model/type"; 2 | import { ChatMemory, IChatMemory } from "./chatMemory"; 3 | import { ChatMemoryConfigPanel } from "./chatMemoryConfigPanel"; 4 | import { MemoryProvider } from "./memoryProvider"; 5 | import { IMemory } from "./type"; 6 | import { IChatMessageRecord } from "@/message/type"; 7 | 8 | MemoryProvider.registerProvider( 9 | "memory.baseMemory", 10 | (config: IMemory, embedding?: IEmbeddingModel, history?: IChatMessageRecord[]) => new ChatMemory({history:history, ...config as IChatMemory}), 11 | (config: IMemory, onConfigChange: (config: IMemory) => void) => { 12 | var chatMemory = config as IChatMemory; 13 | return ; 14 | }, 15 | { 16 | type: "memory.baseMemory", 17 | maxHistoryLength: 64, 18 | memoryKey: "history", 19 | } as IChatMemory, 20 | ); -------------------------------------------------------------------------------- /src/utils/app/conversation.ts: -------------------------------------------------------------------------------- 1 | import { Conversation } from '@/types/chat'; 2 | 3 | export const updateConversation = ( 4 | updatedConversation: Conversation, 5 | allConversations: Conversation[], 6 | ) => { 7 | const updatedConversations = allConversations.map((c) => { 8 | if (c.id === updatedConversation.id) { 9 | return updatedConversation; 10 | } 11 | 12 | return c; 13 | }); 14 | 15 | saveConversation(updatedConversation); 16 | saveConversations(updatedConversations); 17 | 18 | return { 19 | single: updatedConversation, 20 | all: updatedConversations, 21 | }; 22 | }; 23 | 24 | export const saveConversation = (conversation: Conversation) => { 25 | localStorage.setItem('selectedConversation', JSON.stringify(conversation)); 26 | }; 27 | 28 | export const saveConversations = (conversations: Conversation[]) => { 29 | localStorage.setItem('conversationHistory', JSON.stringify(conversations)); 30 | }; 31 | -------------------------------------------------------------------------------- /src/chat/type.ts: -------------------------------------------------------------------------------- 1 | import { LogMessageLevel } from "@/message/LogMessage"; 2 | import { IChatMessageRecord, IMessageRecord } from "@/message/type"; 3 | import { IChatModelRecord } from "@/model/type"; 4 | import { IRecord } from "@/types/storage"; 5 | 6 | export const GroupTypeString: GroupType = 'group'; 7 | export type GroupType = 'group'; 8 | export type SelectSpeakerMode = 'auto' | 'manual' | 'semi-auto' 9 | export interface IGroupRecord extends IRecord{ 10 | type: GroupType; 11 | name: string; 12 | agentNames: string[]; 13 | conversation: IMessageRecord[]; 14 | llmModel?: IChatModelRecord; 15 | logLevel?: LogMessageLevel; 16 | maxRound?: number; 17 | selectSpeakerMode?: SelectSpeakerMode; 18 | initialMessages?: IChatMessageRecord[]; 19 | } 20 | 21 | export interface IGroup extends IGroupRecord{ 22 | callAsync( 23 | messages: IChatMessageRecord[], 24 | max_round?: number) : Promise; 25 | } 26 | -------------------------------------------------------------------------------- /src/next.config.js: -------------------------------------------------------------------------------- 1 | const { i18n } = require('./next-i18next.config'); 2 | const { version } = require('./package.json'); 3 | /** @type {import('next').NextConfig} */ 4 | 5 | const removeImports = require('next-remove-imports')(); 6 | const nextConfig = { 7 | // i18n, 8 | output: 'export', 9 | reactStrictMode: true, 10 | publicRuntimeConfig: { 11 | version, 12 | }, 13 | webpack(config, { isServer, dev }) { 14 | config.experiments = { 15 | asyncWebAssembly: true, 16 | layers: true, 17 | }; 18 | 19 | return config; 20 | }, 21 | eslint: { 22 | // Warning: This allows production builds to successfully complete even if 23 | // your project has ESLint errors. 24 | ignoreDuringBuilds: false, 25 | }, 26 | typescript: { 27 | // !! WARN !! 28 | // Dangerously allow production builds to successfully complete even if 29 | // your project has type errors. 30 | // !! WARN !! 31 | ignoreBuildErrors: true, 32 | }, 33 | }; 34 | 35 | module.exports = {...nextConfig}; 36 | -------------------------------------------------------------------------------- /src/components/Chat/Regenerate.tsx: -------------------------------------------------------------------------------- 1 | import { IconRefresh } from '@tabler/icons-react'; 2 | import { useTranslation } from 'next-i18next'; 3 | import { FC } from 'react'; 4 | 5 | interface Props { 6 | onRegenerate: () => void; 7 | } 8 | 9 | export const Regenerate: FC = ({ onRegenerate }) => { 10 | const { t } = useTranslation('chat'); 11 | return ( 12 |
13 |
14 | {t('Sorry, there was an error.')} 15 |
16 | 23 |
24 | ); 25 | }; 26 | -------------------------------------------------------------------------------- /infra/core/search/search-services.bicep: -------------------------------------------------------------------------------- 1 | param name string 2 | param location string = resourceGroup().location 3 | param tags object = {} 4 | 5 | param sku object = { 6 | name: 'standard' 7 | } 8 | 9 | param authOptions object = {} 10 | param semanticSearch string = 'disabled' 11 | 12 | resource search 'Microsoft.Search/searchServices@2021-04-01-preview' = { 13 | name: name 14 | location: location 15 | tags: tags 16 | identity: { 17 | type: 'SystemAssigned' 18 | } 19 | properties: { 20 | authOptions: authOptions 21 | disableLocalAuth: false 22 | disabledDataExfiltrationOptions: [] 23 | encryptionWithCmk: { 24 | enforcement: 'Unspecified' 25 | } 26 | hostingMode: 'default' 27 | networkRuleSet: { 28 | bypass: 'None' 29 | ipRules: [] 30 | } 31 | partitionCount: 1 32 | publicNetworkAccess: 'Enabled' 33 | replicaCount: 1 34 | semanticSearch: semanticSearch 35 | } 36 | sku: sku 37 | } 38 | 39 | output id string = search.id 40 | output endpoint string = 'https://${name}.search.windows.net/' 41 | output name string = search.name 42 | -------------------------------------------------------------------------------- /src/jest.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | rootDir: "./", 3 | clearMocks: true, 4 | coverageDirectory: "coverage", 5 | collectCoverageFrom: ["src/**/*.ts"], 6 | coveragePathIgnorePatterns: [ 7 | "/node_modules/", 8 | "types\\.ts", 9 | "test\\.ts", 10 | ], 11 | globals: { 12 | 'ts-jest': { 13 | tsConfig: '/tsconfig.test.json', 14 | diagnostics: { 15 | exclude: ['**'], 16 | } 17 | }, 18 | 'window': {}, 19 | }, 20 | testPathIgnorePatterns: ["/node_modules/"], 21 | testEnvironment: "node", 22 | setupFilesAfterEnv: ["/test/jest.setup.ts"], 23 | moduleNameMapper: { 24 | '^@/(.*)$': '/$1', 25 | // "react-markdown": "/node_modules/react-markdown/react-markdown.min.js", 26 | // "micromark-extension-gfm": "/node_modules/micromark-extension-gfm/index.js" 27 | }, 28 | transform: { 29 | "^.+\\.tsx?$": ["ts-jest", "/tsconfig.test.json"] 30 | }, 31 | setupFiles: [ 32 | "fake-indexeddb/auto" 33 | ], 34 | testTimeout: 60000, 35 | }; -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2023 LittleLittleCloud 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /src/utils/app/codeblock.ts: -------------------------------------------------------------------------------- 1 | interface languageMap { 2 | [key: string]: string | undefined; 3 | } 4 | 5 | export const programmingLanguages: languageMap = { 6 | javascript: '.js', 7 | python: '.py', 8 | java: '.java', 9 | c: '.c', 10 | cpp: '.cpp', 11 | 'c++': '.cpp', 12 | 'c#': '.cs', 13 | ruby: '.rb', 14 | php: '.php', 15 | swift: '.swift', 16 | 'objective-c': '.m', 17 | kotlin: '.kt', 18 | typescript: '.ts', 19 | go: '.go', 20 | perl: '.pl', 21 | rust: '.rs', 22 | scala: '.scala', 23 | haskell: '.hs', 24 | lua: '.lua', 25 | shell: '.sh', 26 | sql: '.sql', 27 | html: '.html', 28 | css: '.css', 29 | // add more file extensions here, make sure the key is same as language prop in CodeBlock.tsx component 30 | }; 31 | 32 | export const generateRandomString = (length: Number, lowercase = false) => { 33 | const chars = 'ABCDEFGHJKLMNPQRSTUVWXY3456789'; // excluding similar looking characters like Z, 2, I, 1, O, 0 34 | let result = ''; 35 | for (let i = 0; i < length; i++) { 36 | result += chars.charAt(Math.floor(Math.random() * chars.length)); 37 | } 38 | return lowercase ? result.toLowerCase() : result; 39 | }; 40 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | > ![Note] 2 | > # This repository has been archived. The project has been moved into [Agent ChatRoom](https://github.com/LittleLittleCloud/Agent-ChatRoom/tree/main/chatroom-ui) 3 | 4 | 5 | --- 6 | --- 7 | --- 8 | --- 9 | 10 | # [Multi-Agent ChatUI](https://www.llmchat.me) - Chat with multiple agents in a role-playing game style 11 | 12 | ## Multi-agent role-playing algorithm 13 | ![Multi-agent role-playing algorithm](./assets/role-play.png) 14 | 15 | 16 | ## Support LLM 17 | - OpenAI.GPT 18 | - Azure.GPT 19 | 20 | ## Run locally ## 21 | ### Prerequisites 22 | - Node.js 23 | - NPM 24 | 25 | ### Clone 26 | ```bash 27 | git clone https://github.com/LittleLittleCloud/Multi-agent-ChatUI.git 28 | ``` 29 | ### Install dependencies 30 | ```bash 31 | cd ./src 32 | npm install 33 | ``` 34 | 35 | ### Build && Run 36 | ```bash 37 | npm run build 38 | npm run start 39 | ``` 40 | 41 | ## Example ## 42 | ### Note 43 | - All agents are powered by OpenAI GPT3.5 turbo. 44 | - Credit on my cat, PanPan, for contributing his profile picture as agent avatar on a 100% voluntary basis. 45 | ### Math Chat ### 46 | ![Math Chat](./assets/mathchat.png) 47 | 48 | ## License 49 | [MIT](./LICENSE.txt) 50 | -------------------------------------------------------------------------------- /infra/core/ai/cognitiveservices.bicep: -------------------------------------------------------------------------------- 1 | param name string 2 | param location string = resourceGroup().location 3 | param tags object = {} 4 | 5 | param customSubDomainName string = name 6 | param deployments array = [] 7 | param kind string = 'OpenAI' 8 | param publicNetworkAccess string = 'Enabled' 9 | param sku object = { 10 | name: 'S0' 11 | } 12 | 13 | resource account 'Microsoft.CognitiveServices/accounts@2022-10-01' = { 14 | name: name 15 | location: location 16 | tags: tags 17 | kind: kind 18 | properties: { 19 | customSubDomainName: customSubDomainName 20 | publicNetworkAccess: publicNetworkAccess 21 | } 22 | sku: sku 23 | } 24 | 25 | @batchSize(1) 26 | resource deployment 'Microsoft.CognitiveServices/accounts/deployments@2022-10-01' = [for deployment in deployments: { 27 | parent: account 28 | name: deployment.name 29 | properties: { 30 | model: deployment.model 31 | raiPolicyName: contains(deployment, 'raiPolicyName') ? deployment.raiPolicyName : null 32 | scaleSettings: deployment.scaleSettings 33 | } 34 | }] 35 | 36 | output endpoint string = account.properties.endpoint 37 | output id string = account.id 38 | output name string = account.name 39 | -------------------------------------------------------------------------------- /src/utils/app/agentReducer.ts: -------------------------------------------------------------------------------- 1 | import { IAgent } from "@/types/agent"; 2 | 3 | export type AgentCmd = "add" | "remove" | "update" | "addOrUpdate"; 4 | export type AgentAction = {type: AgentCmd, payload: IAgent, original?: IAgent}; 5 | export function agentReducer(agents : IAgent[], action: AgentAction){ 6 | switch(action.type){ 7 | case "add": 8 | if(agents.find(a => a.alias === action.payload.alias)){ 9 | throw new Error("Agent already exists"); 10 | } 11 | return [...agents, action.payload]; 12 | case "remove": 13 | return agents.filter(a => a.alias !== action.payload.alias); 14 | case "update": 15 | var originalAlias = action.original?.alias ?? action.payload.alias; 16 | return agents.map(a => a.alias === originalAlias ? action.payload : a); 17 | case "addOrUpdate": 18 | var existing = agents.find(a => a.alias === action.payload.alias); 19 | if(existing){ 20 | return agents.map(a => a.alias === action.payload.alias ? action.payload : a); 21 | } 22 | return [...agents, action.payload]; 23 | default: 24 | throw new Error("Invalid agent command"); 25 | } 26 | } -------------------------------------------------------------------------------- /infra/main.bicep: -------------------------------------------------------------------------------- 1 | targetScope = 'subscription' 2 | 3 | @minLength(1) 4 | @maxLength(64) 5 | @description('Name of the the environment which is used to generate a short unique hash used in all resources.') 6 | param environmentName string 7 | 8 | @minLength(1) 9 | @description('Primary location for all resources') 10 | param location string 11 | 12 | param backendServiceName string = '' 13 | param resourceGroupName string = '' 14 | 15 | var abbrs = loadJsonContent('abbreviations.json') 16 | var tags = { 'azd-env-name': environmentName } 17 | // Organize resources in a resource group 18 | resource resourceGroup 'Microsoft.Resources/resourceGroups@2021-04-01' = { 19 | name: !empty(resourceGroupName) ? resourceGroupName : '${abbrs.resourcesResourceGroups}${environmentName}' 20 | location: location 21 | tags: tags 22 | } 23 | 24 | var resourceToken = toLower(uniqueString(subscription().id, environmentName, location)) 25 | 26 | // The application frontend 27 | module frontend 'core/host/staticwebapp.bicep' = { 28 | name: 'frontend' 29 | scope: resourceGroup 30 | params: { 31 | name: !empty(backendServiceName) ? backendServiceName : '${abbrs.webSitesAppService}frontend-${resourceToken}' 32 | location: location 33 | tags: union(tags, { 'azd-service-name': 'frontend' }) 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /src/utils/app/groupReducer.ts: -------------------------------------------------------------------------------- 1 | import { IGroupRecord } from '@/types/group'; 2 | import {useReducer} from 'react'; 3 | 4 | export type GroupCmd = "add" | "remove" | "update" | "addOrUpdate"; 5 | export type GroupAction = {type: GroupCmd, payload: IGroupRecord, original?: IGroupRecord}; 6 | export function groupReducer(groups : IGroupRecord[], action: GroupAction){ 7 | switch(action.type){ 8 | case "add": 9 | if(groups.find(g => g.name === action.payload.name)){ 10 | throw new Error("Group already exists"); 11 | } 12 | return [...groups, action.payload]; 13 | case "remove": 14 | return groups.filter(g => g.name !== action.payload.name); 15 | case "update": 16 | var originalName = action.original?.name ?? action.payload.name; 17 | return groups.map(g => g.name === originalName ? action.payload : g); 18 | case "addOrUpdate": 19 | var existing = groups.find(g => g.name === action.payload.name); 20 | if(existing){ 21 | return groups.map(g => g.name === action.payload.name ? action.payload : g); 22 | } 23 | return [...groups, action.payload]; 24 | default: 25 | throw new Error("Invalid group command"); 26 | } 27 | } -------------------------------------------------------------------------------- /src/message/index.tsx: -------------------------------------------------------------------------------- 1 | import { SmallLabel } from "@/components/Global/EditableSavableTextField"; 2 | import { ILogMessageRecord, LogMessage } from "./LogMessage"; 3 | import { IMarkdownMessageRecord, MarkdownMessage } from "./MarkdownMessage"; 4 | import { MessageProvider } from "./messageProvider"; 5 | import { IChatMessageRecord, IMessageRecord } from "./type"; 6 | 7 | MessageProvider.registerProvider( 8 | "message.markdown", 9 | (message) => message, 10 | (message, _) => MarkdownMessage(message), 11 | { 12 | type: "message.markdown", 13 | content: "", 14 | } as IMarkdownMessageRecord); 15 | 16 | MessageProvider.registerProvider( 17 | "message.log", 18 | (message) => message, 19 | (message, _) => LogMessage(message), 20 | { 21 | type: "message.log", 22 | content: "", 23 | } as ILogMessageRecord 24 | ) 25 | 26 | export const MessageElement = (props: { message: IMessageRecord, onConfigChange?: (msg: IMessageRecord) => void }) => { 27 | if(!MessageProvider.hasProvider(props.message.type)){ 28 | return {props.message.content?.toString()} 29 | } 30 | 31 | return MessageProvider.getConfigUIProvider(props.message.type)(props.message, props.onConfigChange); 32 | } -------------------------------------------------------------------------------- /src/agent/index.ts: -------------------------------------------------------------------------------- 1 | import {IGPTAgentRecord, initializeGPTAgent } from "./gptAgent"; 2 | import { GPTAgentConfigPanel } from "./gptAgentConfigPanel"; 3 | import { AgentProvider } from "./agentProvider"; 4 | 5 | // register gptAgent 6 | AgentProvider.registerProvider( 7 | "agent.gpt", 8 | (agent) => { 9 | if (agent.type != "agent.gpt") { 10 | throw new Error("Invalid agent type"); 11 | } else { 12 | const gpt_record: IGPTAgentRecord = { 13 | ...agent, 14 | type: "agent.gpt", 15 | }; 16 | 17 | return initializeGPTAgent(gpt_record); 18 | } 19 | }, 20 | (agent, onConfigChange) => { 21 | if (agent.type != "agent.gpt") { 22 | throw new Error("Invalid agent type"); 23 | } 24 | 25 | var gpt_record: IGPTAgentRecord = { 26 | ...agent, 27 | type: "agent.gpt", 28 | }; 29 | 30 | return GPTAgentConfigPanel(gpt_record, onConfigChange); 31 | }, 32 | { 33 | type: "agent.gpt", 34 | name: "GPT Agent", 35 | system_message: "you are a helpful ai assistant", 36 | } as IGPTAgentRecord); -------------------------------------------------------------------------------- /src/model/azure/index.ts: -------------------------------------------------------------------------------- 1 | import { IAzureGPTRecord, AzureGPT, AzureTextEmbeddingsAda002V2, IAzureTextEmbeddingAda002V2 } from "./GPT"; 2 | import { AzureEmbeddingConfig, GPTConfig } from "./ConfigPanel"; 3 | import { EmbeddingProvider, LLMProvider } from "@/model/llmprovider"; 4 | 5 | // register LLM provider 6 | LLMProvider.registerProvider( 7 | "azure.gpt", 8 | (model) => { 9 | const azureGPTRecord = new AzureGPT({ 10 | ...model, 11 | isChatModel: true, 12 | isStreaming: true, 13 | type: "azure.gpt", 14 | }); 15 | 16 | return new AzureGPT(azureGPTRecord); 17 | }, 18 | (model, onConfigChange) => GPTConfig(model, (model) => onConfigChange!(model)), 19 | new AzureGPT({ 20 | maxTokens: 64, 21 | temperature: 0.7, 22 | topP: 1, 23 | frequencyPenalty: 0, 24 | presencePenalty: 0, 25 | })); 26 | 27 | // // register embedding provider 28 | // EmbeddingProvider.registerProvider( 29 | // "azure.text-embedding-ada-002-v2", 30 | // (model) => new AzureTextEmbeddingsAda002V2(model as IAzureTextEmbeddingAda002V2), 31 | // (model, onConfigChange) => AzureEmbeddingConfig(model, (model) => onConfigChange(model)), 32 | // { 33 | // type: "azure.text-embedding-ada-002-v2", 34 | // apiVersion: "2021-03-01-preview", 35 | // } as IAzureTextEmbeddingAda002V2); 36 | -------------------------------------------------------------------------------- /src/public/locales/zh/chat.json: -------------------------------------------------------------------------------- 1 | { 2 | "OpenAI API Key Required": "需要 OpenAI API 密钥", 3 | "Please set your OpenAI API key in the bottom left of the sidebar.": "请在侧边栏左下角设置您的 OpenAI API 密钥。", 4 | "Stop Generating": "停止生成", 5 | "Prompt limit is {{maxLength}} characters": "提示字数限制为 {{maxLength}} 个字符", 6 | "System Prompt": "系统提示", 7 | "You are ChatGPT, a large language model trained by OpenAI. Follow the user's instructions carefully. Respond using markdown.": "你是 ChatGPT,一个由 OpenAI 训练的大型语言模型。请仔细遵循用户的指示。使用 Markdown 格式进行回应。", 8 | "Enter a prompt": "输入一个提示", 9 | "Regenerate response": "重新生成回应", 10 | "Sorry, there was an error.": "抱歉,出现了错误。", 11 | "Model": "模型", 12 | "Conversation": "对话", 13 | "OR": "或", 14 | "Loading...": "加载中...", 15 | "Type a message...": "输入一条消息...", 16 | "Error fetching models.": "获取模型时出错。", 17 | "AI": "AI", 18 | "You": "你", 19 | "Make sure your OpenAI API key is set in the bottom left of the sidebar.": "请确保您的 OpenAI API 密钥已在侧边栏左下角设置。", 20 | "If you completed this step, OpenAI may be experiencing issues.": "如果您已完成此步骤,OpenAI 可能遇到了问题。", 21 | "Message limit is {{maxLength}} characters. You have entered {{valueLength}} characters.": "消息字数限制为 {{maxLength}} 个字符。您已输入 {{valueLength}} 个字符。", 22 | "Please enter a message": "请输入一条消息", 23 | "Chatbot UI is an advanced chatbot kit for OpenAI's chat models aiming to mimic ChatGPT's interface and functionality.": "Chatbot UI 是一个高级聊天机器人工具包,旨在模仿 OpenAI 聊天模型的 ChatGPT 界面和功能。", 24 | "Are you sure you want to clear all messages?": "你确定要清除所有的消息吗?" 25 | } 26 | -------------------------------------------------------------------------------- /src/components/Chat/ModelSelect.tsx: -------------------------------------------------------------------------------- 1 | import { OpenAIModel } from '@/types/openai'; 2 | import { useTranslation } from 'next-i18next'; 3 | import { FC } from 'react'; 4 | 5 | interface Props { 6 | model: OpenAIModel; 7 | models: OpenAIModel[]; 8 | onModelChange: (model: OpenAIModel) => void; 9 | } 10 | 11 | export const ModelSelect: FC = ({ model, models, onModelChange }) => { 12 | const { t } = useTranslation('chat'); 13 | return ( 14 |
15 | 18 |
19 | 41 |
42 |
43 | ); 44 | }; 45 | -------------------------------------------------------------------------------- /src/components/Chat/PromptList.tsx: -------------------------------------------------------------------------------- 1 | import { Prompt } from '@/types/prompt'; 2 | import { FC, MutableRefObject } from 'react'; 3 | 4 | interface Props { 5 | prompts: Prompt[]; 6 | activePromptIndex: number; 7 | onSelect: () => void; 8 | onMouseOver: (index: number) => void; 9 | promptListRef: MutableRefObject; 10 | } 11 | 12 | export const PromptList: FC = ({ 13 | prompts, 14 | activePromptIndex, 15 | onSelect, 16 | onMouseOver, 17 | promptListRef, 18 | }) => { 19 | return ( 20 |
    30 | {prompts.map((prompt, index) => ( 31 |
  • { 39 | e.preventDefault(); 40 | e.stopPropagation(); 41 | onSelect(); 42 | }} 43 | onMouseEnter={() => onMouseOver(index)} 44 | > 45 | {prompt.name} 46 |
  • 47 | ))} 48 |
49 | ); 50 | }; 51 | -------------------------------------------------------------------------------- /src/public/locales/ko/chat.json: -------------------------------------------------------------------------------- 1 | { 2 | "OpenAI API Key Required": "OpenAI API 키가 필요합니다", 3 | "Please set your OpenAI API key in the bottom left of the sidebar.": "사이드바 왼쪽 하단에 OpenAI API 키를 설정하세요.", 4 | "Stop Generating": "생성 중지", 5 | "Prompt limit is {{maxLength}} characters": "프롬프트 제한은 {{maxLength}}자입니다", 6 | "System Prompt": "시스템 프롬트", 7 | "You are ChatGPT, a large language model trained by OpenAI. Follow the user's instructions carefully. Respond using markdown.": "당신은 OpenAI에서 훈련된 대규모 언어 모델 ChatGPT입니다. 사용자의 지시를 주의해서 따르세요. 마크다운을 사용하여 응답하세요.", 8 | "Enter a prompt": "프롬프트를 입력하세요", 9 | "Regenerate response": "응답 재생성", 10 | "Sorry, there was an error.": "죄송합니다, 오류가 발생했습니다.", 11 | "Model": "모델", 12 | "Conversation": "대화", 13 | "OR": "또는", 14 | "Loading...": "로드 중...", 15 | "Type a message...": "메시지를 입력하세요...", 16 | "Error fetching models.": "모델을 가져오는 중 오류가 발생했습니다.", 17 | "AI": "인공지능", 18 | "You": "당신", 19 | "Make sure your OpenAI API key is set in the bottom left of the sidebar.": "OpenAI API 키가 사이드바 왼쪽 하단에 설정되어 있는지 확인하세요.", 20 | "If you completed this step, OpenAI may be experiencing issues.": "이 단계를 완료한 경우 OpenAI에 문제가 있을 수도 있습니다.", 21 | "Message limit is {{maxLength}} characters. You have entered {{valueLength}} characters.": "메시지 제한은 {{maxLength}}자입니다. {{valueLength}}자를 입력했습니다.", 22 | "Please enter a message": "메시지를 입력하세요", 23 | "Chatbot UI is an advanced chatbot kit for OpenAI's chat models aiming to mimic ChatGPT's interface and functionality.": "Chatbot UI는 ChatGPT의 인터페이스와 기능을 모방하는 것을 목표로 둔 OpenAI의 채팅 모델들을 위한 고급 챗봇 키트입니다.", 24 | "Are you sure you want to clear all messages?": "모든 메시지를 지우시겠습니까?" 25 | } 26 | -------------------------------------------------------------------------------- /src/public/locales/ja/chat.json: -------------------------------------------------------------------------------- 1 | { 2 | "OpenAI API Key Required": "OpenAIのAPIキーが必要です", 3 | "Please set your OpenAI API key in the bottom left of the sidebar.": "左下のサイドバーでOpenAIのAPIキーを設定してください", 4 | "Stop Generating": "回答をストップ", 5 | "Prompt limit is {{maxLength}} characters": "プロンプトの文字数は{{maxLength}}文字までです", 6 | "System Prompt": "システムのプロンプト", 7 | "You are ChatGPT, a large language model trained by OpenAI. Follow the user's instructions carefully. Respond using markdown.": "あなたはOpenAIによってトレーニングされた大規模言語モデルのChatGPTです。ユーザーの指示には注意深く従ってください。マークダウンを使用して応答してください。", 8 | "Enter a prompt": "プロンプトを入力してください", 9 | "Regenerate response": "もう一度回答する", 10 | "Sorry, there was an error.": "すみません、エラーが発生しました。", 11 | "Model": "モデル", 12 | "Conversation": "会話", 13 | "OR": "または", 14 | "Loading...": "読み込み中...", 15 | "Type a message...": "メッセージを入力...", 16 | "Error fetching models.": "モデルの取得中にエラーが発生しました。", 17 | "AI": "AI", 18 | "You": "あなた", 19 | "Make sure your OpenAI API key is set in the bottom left of the sidebar.": "OpenAIのAPIキーがサイドバーの左下に設定されていることを確認してください。", 20 | "If you completed this step, OpenAI may be experiencing issues.": "このステップを完了した場合、OpenAIに問題が発生している可能性があります。", 21 | "click if using a .env.local file": "もし.env.localファイルを使用している場合はこちらをクリックしてください", 22 | "Message limit is {{maxLength}} characters. You have entered {{valueLength}} characters.": "メッセージの文字数は{{maxLength}}文字までです。あなたは{{valueLength}}文字を入力しました。", 23 | "Please enter a message": "メッセージを入力してください", 24 | "Chatbot UI is an advanced chatbot kit for OpenAI's chat models aiming to mimic ChatGPT's interface and functionality.": "Chatbot UIは、ChatGPTと同様のインターフェイスと機能を実現するための、チャットボットキットです。", 25 | "Are you sure you want to clear all messages?": "すべてのメッセージを削除してもよろしいですか?" 26 | } -------------------------------------------------------------------------------- /src/memory/chatMemory.test.ts: -------------------------------------------------------------------------------- 1 | import { IChatMessageRecord } from "@/message/type"; 2 | import { ChatMemory } from "./chatMemory"; 3 | 4 | test('ChatMemory can parse history using ChatML', async () => { 5 | var chatHistory: IChatMessageRecord[] = [ 6 | {from: "user", content: "hello", type: "message.plainText"}, 7 | {from: "assistant", content: "hi", type: "message.plainText"}, 8 | {from: "user", content: "how are you?", type: "message.plainText"}, 9 | {from: "assistant", content: "I'm fine", type: "message.plainText"}, 10 | ]; 11 | 12 | var chatMemory = new ChatMemory({history: chatHistory, useChatML: true}); 13 | var memoryVariables = await chatMemory.loadMemoryVariables({}); 14 | var expected = `<|im_start|>user 15 | hello 16 | <|im_end|> 17 | <|im_start|>assistant 18 | hi 19 | <|im_end|> 20 | <|im_start|>user 21 | how are you? 22 | <|im_end|> 23 | <|im_start|>assistant 24 | I'm fine 25 | <|im_end|>`; 26 | expect(memoryVariables["history"]).toBe(expected); 27 | }) 28 | 29 | test('ChatMemory can parse history using plain text', async () => { 30 | var chatHistory: IChatMessageRecord[] = [ 31 | {from: "user", content: "hello", type: "message.plainText"}, 32 | {from: "assistant", content: "hi", type: "message.plainText"}, 33 | {from: "user", content: "how are you?", type: "message.plainText"}, 34 | {from: "assistant", content: "I'm fine", type: "message.plainText"}, 35 | ]; 36 | 37 | var chatMemory = new ChatMemory({history: chatHistory}); 38 | var memoryVariables = await chatMemory.loadMemoryVariables({}); 39 | var expected = `user:hello 40 | assistant:hi 41 | user:how are you? 42 | assistant:I'm fine`; 43 | expect(memoryVariables["history"]).toBe(expected); 44 | }) -------------------------------------------------------------------------------- /src/message/MarkdownMessage.tsx: -------------------------------------------------------------------------------- 1 | import { SmallLabel, TinyClickableLabel, TinyLabel } from "@/components/Global/EditableSavableTextField"; 2 | import { MemoizedReactMarkdown } from "@/components/Markdown/MemoizedReactMarkdown"; 3 | import { Stack, Divider, Box } from "@mui/material"; 4 | import React from "react"; 5 | import { IChatMessageRecord } from "./type"; 6 | 7 | export type MarkdownMessageType = 'message.markdown'; 8 | 9 | export interface IMarkdownMessageRecord extends IChatMessageRecord { 10 | type: MarkdownMessageType, 11 | } 12 | 13 | export const MarkdownMessage = (message: IMarkdownMessageRecord) => { 14 | const content = message.content; 15 | const [openContent, setOpenContent] = React.useState<'markdown' | 'plain text'>("markdown"); 16 | return ( 17 | 20 | { 21 | openContent === 'markdown' && 22 | 23 | {content ?? ''} 24 | 25 | } 26 | { 27 | openContent === 'plain text' && 28 | {content?.replace('\n', '
')}
29 | } 30 | 33 | { 34 | setOpenContent('markdown')} 36 | >content 37 | } 38 | 39 | setOpenContent('plain text')} 41 | >plain text 42 | 43 |
44 | ) 45 | } -------------------------------------------------------------------------------- /src/public/locales/he/chat.json: -------------------------------------------------------------------------------- 1 | { 2 | "OpenAI API Key Required": "מפתח openAI API", 3 | "Please set your OpenAI API key in the bottom left of the sidebar.": "עליך להזין את המפתח האישי שלך בצידו השמאלי התחתון של תפריט הניווט.", 4 | "Stop Generating": "עצור תהליך הפקת התשובה", 5 | "Prompt limit is {{maxLength}} characters": "אורך התשובה מוגבל ל {{maxLength}} תווים", 6 | "System Prompt": "הגדרת בסיס לכל תשובה של המערכת", 7 | "You are ChatGPT, a large language model trained by OpenAI. Follow the user's instructions carefully. Respond using markdown.": "You are Hebrew speaking ChatGPT, a large language model trained by OpenAI which responds in Hebrew to any question or User comment. Follow the user's instructions carefully. Respond in Hebrew using markdown.", 8 | "Enter a prompt": "הקלד הודעה", 9 | "Regenerate response": "הפק תשובה מחדש", 10 | "Sorry, there was an error.": "התנצלותנו הכנה, המערכת מדווחת על תקלה", 11 | "Model": "מודל", 12 | "Conversation": "שיחה", 13 | "OR": "או", 14 | "Loading...": "טוען...", 15 | "Type a message...": "הקלד הודעתך...", 16 | "Error fetching models.": "תקלה באיחזור רשימת המודלים", 17 | "AI": "המערכת", 18 | "You": "אתה", 19 | "Make sure your OpenAI API key is set in the bottom left of the sidebar.": "עליך לוודא שמפתח האישי שלך מוזן בתפריט מצד שמאל", 20 | "If you completed this step, OpenAI may be experiencing issues.": "אם טרם השלמת חלק זה יש סבירות גבוהה להתרחשות תקלה", 21 | "Message limit is {{maxLength}} characters. You have entered {{valueLength}} characters.": "מגבלת תווים היא {{maxLength}}. אתה הקלדת עד עכשיו {{valueLength}} תווים.", 22 | "Please enter a message": "הקלד את הודעתך", 23 | "Chatbot UI is an advanced chatbot kit for OpenAI's chat models aiming to mimic ChatGPT's interface and functionality.": "מערכת הצאטבוט היא ערכה מתקדמת לניהול שיחה המכוונת לחקות את המראה והפונקציונאלית של ChatGPT", 24 | "Are you sure you want to clear all messages?": "האם אתה בטוח שברצונך למחוק את כל ההודעות?" 25 | } -------------------------------------------------------------------------------- /src/model/openai/ModelConfig.tsx: -------------------------------------------------------------------------------- 1 | import { useEffect, useState } from "react"; 2 | import { IOpenAIGPTRecord, ITextDavinci003 } from "./GPT"; 3 | import { EditableSavableTextField, SettingSection, SmallNumberSetting, SmallSelectSetting, SmallTextSetting } from "@/components/Global/EditableSavableTextField"; 4 | import { AVAILABLE_GPT_MODELS, getGPTMaxTokenLimit } from "../utils"; 5 | 6 | export const ModelConfig = (model: IOpenAIGPTRecord, onModelConfigChanged : (config: IOpenAIGPTRecord) => void) => { 7 | const [maxToken, setMaxToken] = useState(model.maxTokens); 8 | 9 | useEffect(() => { 10 | var maxToken = getGPTMaxTokenLimit(model.model); 11 | setMaxToken(maxToken); 12 | }, [model]); 13 | 14 | return ( 15 | <> 16 | onModelConfigChanged({ ...model, apiKey: value})}/> 17 | onModelConfigChanged({ ...model, model: value})}/> 18 | onModelConfigChanged({ ...model, maxTokens: value})}/> 19 | onModelConfigChanged({ ...model, temperature: value})}/> 20 | onModelConfigChanged({ ...model, topP: value})}/> 21 | onModelConfigChanged({ ...model, frequencyPenalty: value})}/> 22 | onModelConfigChanged({ ...model, presencePenalty: value})}/> 23 | ); 24 | }; -------------------------------------------------------------------------------- /src/public/locales/sv/chat.json: -------------------------------------------------------------------------------- 1 | { 2 | "OpenAI API Key Required": "OpenAI API-nyckel krävs", 3 | "Please set your OpenAI API key in the bottom left of the sidebar.": "Vänligen ange din OpenAI API-nyckel längst ner till vänster i sidofältet.", 4 | "Stop Generating": "Sluta generera", 5 | "Prompt limit is {{maxLength}} characters": "Din prompt kan inte ha fler än {{maxLength}} tecken", 6 | "System Prompt": "System prompt", 7 | "You are ChatGPT, a large language model trained by OpenAI. Follow the user's instructions carefully. Respond using markdown.": "Du är ChatGPT, en stor språkmodell tränad av OpenAI. Följ användarens instruktioner noggrant. Svara genom att använda markdown.", 8 | "Enter a prompt": "Ange en prompt", 9 | "Regenerate response": "Återskapa svar", 10 | "Sorry, there was an error.": "Ursäkta, det uppstod ett fel.", 11 | "Model": "Modell", 12 | "Conversation": "Konversation", 13 | "OR": "ELLER", 14 | "Loading...": "Laddar...", 15 | "Type a message...": "Skriv ett meddelande...", 16 | "Error fetching models.": "Det gick inte att hämta modeller.", 17 | "AI": "AI", 18 | "You": "Du", 19 | "Make sure your OpenAI API key is set in the bottom left of the sidebar.": "Se till att du har angett din OpenAI API-nyckel längst ner till vänster i sidofältet.", 20 | "If you completed this step, OpenAI may be experiencing issues.": "Om du slutförde det här steget kan OpenAI ha problem.", 21 | "Message limit is {{maxLength}} characters. You have entered {{valueLength}} characters.": "Meddelandegränsen är {{maxLength}} tecken. Du har angett {{valueLength}} tecken.", 22 | "Please enter a message": "Vänligen ange ett meddelande", 23 | "Chatbot UI is an advanced chatbot kit for OpenAI's chat models aiming to mimic ChatGPT's interface and functionality.": "Chatbot UI är ett avancerat chatbot-kit för OpenAI:s chattmodeller som syftar till att efterlikna ChatGPT:s gränssnitt och funktionalitet.", 24 | "Are you sure you want to clear all messages?": "Är du säker på att du vill rensa alla meddelanden?" 25 | } 26 | -------------------------------------------------------------------------------- /src/public/locales/ru/chat.json: -------------------------------------------------------------------------------- 1 | { 2 | "OpenAI API Key Required": "Необходим ключ OpenAI", 3 | "Please set your OpenAI API key in the bottom left of the sidebar.": "Пожалуйста введите API-ключ OpenAI в левом нижнем углу", 4 | "Stop Generating": "Прекратить", 5 | "Prompt limit is {{maxLength}} characters": "Лимит сообщения на символы: {{maxLength}} символов", 6 | "System Prompt": "Системное сообщение", 7 | "You are ChatGPT, a large language model trained by OpenAI. Follow the user's instructions carefully. Respond using markdown.": "Вы ChatGPT, большая языковая модель, созданная компанией OpenAI. Следуйте инструкциям пользователя. Отвечайте на сообщения, используя Markdown", 8 | "Enter a prompt": "Введите сообщение", 9 | "Regenerate response": "Перегенерировать сообщение", 10 | "Sorry, there was an error.": "Просим прощения, произошла ошибка", 11 | "Model": "Модель", 12 | "Conversation": "Чат", 13 | "OR": "ИЛИ", 14 | "Loading...": "Пожалуйста подождите...", 15 | "Type a message...": "Введите сообщение...", 16 | "Error fetching models.": "Ошибка при получении списка моделей", 17 | "AI": "Бот", 18 | "You": "Вы", 19 | "Cancel": "Отмена", 20 | "Save & Submit": "Отредактировать", 21 | "Make sure your OpenAI API key is set in the bottom left of the sidebar.": "Убедитесь, что вы ввели API-ключ OpenAI.", 22 | "If you completed this step, OpenAI may be experiencing issues.": "Если вы выполнили этот шаг, то возможно OpenAI может испытывать проблемы", 23 | "Message limit is {{maxLength}} characters. You have entered {{valueLength}} characters.": "Лимит сообщения: {{maxLength}} символов. Вы ввели {{valueLength}} символов.", 24 | "Please enter a message": "Пожалуйста введите сообщение", 25 | "Chatbot UI is an advanced chatbot kit for OpenAI's chat models aiming to mimic ChatGPT's interface and functionality.": "Chatbot UI - продвинутый интерфейс чатбота для чат-моделей OpenAI, имитирующий интерфейс ChatGPT", 26 | "Are you sure you want to clear all messages?": "Вы уверены, что хотите удалить все сообщения?" 27 | } 28 | -------------------------------------------------------------------------------- /src/public/locales/bn/chat.json: -------------------------------------------------------------------------------- 1 | { 2 | "OpenAI API Key Required": "OpenAI API key বাধ্যতামূলক", 3 | "Please set your OpenAI API key in the bottom left of the sidebar.": "দয়া করে আপনার OpenAI API key বামে সাইডবারের নিচের দিকে সেট করুন।", 4 | "Stop Generating": "বার্তা জেনারেট করা বন্ধ করুন", 5 | "Prompt limit is {{maxLength}} characters": "নির্দেশনা (বার্তা) সীমা সর্বোচ্চ {{maxLength}} অক্ষর", 6 | "System Prompt": "সিস্টেম নির্দেশনা (বার্তা)", 7 | "You are ChatGPT, a large language model trained by OpenAI. Follow the user's instructions carefully. Respond using markdown.": "তুমি ChatGPT, OpenAI দ্বারা প্রশিক্ষিত একটি বড় ভাষা মডেল। সাবধানে ব্যবহারকারীর নির্দেশাবলী অনুসরণ করুন. মার্কডাউন ব্যবহার করে উত্তর দিন।", 8 | "Enter a prompt": "একটি নির্দেশনা (বার্তা) দিন", 9 | "Regenerate response": "বার্তা আবার জেনারেট করুন", 10 | "Sorry, there was an error.": "দুঃখিত, কোনো একটি সমস্যা হয়েছে।", 11 | "Model": "মডেল", 12 | "Conversation": "আলাপচারিতা", 13 | "OR": "অথবা", 14 | "Loading...": "লোড হচ্ছে...", 15 | "Type a message...": "কোনো মেসেজ লিখুন...", 16 | "Error fetching models.": "মডেল পেতে সমস্যা হচ্ছে।", 17 | "AI": "AI", 18 | "You": "তুমি", 19 | "Make sure your OpenAI API key is set in the bottom left of the sidebar.": "নিশ্চিত করুন যে আপনার OpenAI API key সাইডবারের নীচে বাম দিকে সেট করা আছে।", 20 | "If you completed this step, OpenAI may be experiencing issues.": "আপনি এই ধাপটি সম্পন্ন করে থাকলে, হতে পারে যে OpenAI কোনো সমস্যার সম্মুখীন হয়েছে।", 21 | "Message limit is {{maxLength}} characters. You have entered {{valueLength}} characters.": "বার্তার সর্বোচ্চ সীমা হল {{maxLength}} অক্ষর৷ আপনি {{valueLength}} অক্ষর লিখেছেন।", 22 | "Please enter a message": "দয়া করে একটি মেসেজ লিখুন", 23 | "Chatbot UI is an advanced chatbot kit for OpenAI's chat models aiming to mimic ChatGPT's interface and functionality.": "Chatbot UI হল OpenAI-এর চ্যাট মডেলগুলির জন্য একটি উন্নত চ্যাটবট কিট যার লক্ষ্য হল ChatGPT-এর ইন্টারফেস এবং কার্যকারিতা অনুকরণ করা।", 24 | "Are you sure you want to clear all messages?": "সমস্ত বার্তা মুছে ফেলতে আপনি কি নিশ্চিত?" 25 | } 26 | -------------------------------------------------------------------------------- /infra/core/storage/storage-account.bicep: -------------------------------------------------------------------------------- 1 | param name string 2 | param location string = resourceGroup().location 3 | param tags object = {} 4 | 5 | @allowed([ 'Hot', 'Cool', 'Premium' ]) 6 | param accessTier string = 'Hot' 7 | param allowBlobPublicAccess bool = false 8 | param allowCrossTenantReplication bool = true 9 | param allowSharedKeyAccess bool = true 10 | param defaultToOAuthAuthentication bool = false 11 | param deleteRetentionPolicy object = {} 12 | @allowed([ 'AzureDnsZone', 'Standard' ]) 13 | param dnsEndpointType string = 'Standard' 14 | param kind string = 'StorageV2' 15 | param minimumTlsVersion string = 'TLS1_2' 16 | @allowed([ 'Enabled', 'Disabled' ]) 17 | param publicNetworkAccess string = 'Disabled' 18 | param sku object = { name: 'Standard_LRS' } 19 | 20 | param containers array = [] 21 | 22 | resource storage 'Microsoft.Storage/storageAccounts@2022-05-01' = { 23 | name: name 24 | location: location 25 | tags: tags 26 | kind: kind 27 | sku: sku 28 | properties: { 29 | accessTier: accessTier 30 | allowBlobPublicAccess: allowBlobPublicAccess 31 | allowCrossTenantReplication: allowCrossTenantReplication 32 | allowSharedKeyAccess: allowSharedKeyAccess 33 | defaultToOAuthAuthentication: defaultToOAuthAuthentication 34 | dnsEndpointType: dnsEndpointType 35 | minimumTlsVersion: minimumTlsVersion 36 | networkAcls: { 37 | bypass: 'AzureServices' 38 | defaultAction: 'Allow' 39 | } 40 | publicNetworkAccess: publicNetworkAccess 41 | } 42 | 43 | resource blobServices 'blobServices' = if (!empty(containers)) { 44 | name: 'default' 45 | properties: { 46 | deleteRetentionPolicy: deleteRetentionPolicy 47 | } 48 | resource container 'containers' = [for container in containers: { 49 | name: container.name 50 | properties: { 51 | publicAccess: contains(container, 'publicAccess') ? container.publicAccess : 'None' 52 | } 53 | }] 54 | } 55 | } 56 | 57 | output name string = storage.name 58 | output primaryEndpoints object = storage.properties.primaryEndpoints 59 | -------------------------------------------------------------------------------- /src/chat/group.test.ts: -------------------------------------------------------------------------------- 1 | import { GroupChat } from "./group"; 2 | import { Logger } from "@/utils/logger"; 3 | import { AgentProvider } from "@/agent/agentProvider"; 4 | import { IMarkdownMessageRecord } from "@/message/MarkdownMessage"; 5 | import { IAgentRecord } from "@/agent/type"; 6 | import { AzureGPT, IAzureGPTRecord } from "@/model/azure/GPT"; 7 | import { GPTAgent } from "@/agent/gptAgent"; 8 | import { IChatMessageRecord } from "@/message/type"; 9 | 10 | test('multi-agent response test', async () => { 11 | const OPENAI_API_KEY = process.env.OPENAI_API_KEY; 12 | const AZURE_OPENAI_API_KEY = process.env.AZURE_OPENAI_API_KEY; 13 | const AZURE_API_ENDPOINT = process.env.AZURE_API_ENDPOINT; 14 | const AZURE_GPT_3_5_TURBO_16K = process.env.AZURE_GPT_3_5_TURBO_16K; 15 | var llm = new AzureGPT({ 16 | deploymentID: AZURE_GPT_3_5_TURBO_16K, 17 | apiKey: AZURE_OPENAI_API_KEY, 18 | endpoint: AZURE_API_ENDPOINT, 19 | temperature: 0, 20 | }); 21 | 22 | var alice = new GPTAgent( 23 | { 24 | name: "alice", 25 | system_message: 'say hello', 26 | llm: llm, 27 | } 28 | ); 29 | 30 | var bob = new GPTAgent( 31 | { 32 | name: "bob", 33 | system_message: 'say hi', 34 | llm: llm, 35 | } 36 | ); 37 | 38 | var groupChat = new GroupChat({ 39 | name: "group", 40 | llm: llm, 41 | admin: alice, 42 | agents: [bob], 43 | }); 44 | 45 | groupChat.addInitialConversation("hello", alice); 46 | groupChat.addInitialConversation("hi", bob); 47 | var nextMessage = { 48 | from: alice.name, 49 | role: 'user', 50 | content: 'hello bob', 51 | } as IChatMessageRecord; 52 | var nextMessages = await groupChat.callAsync([nextMessage], 1); 53 | expect(nextMessages.length).toBe(2); 54 | expect(nextMessages[0].from).toBe(alice.name); 55 | expect(nextMessages[0].content).toBe("hello bob"); 56 | expect(nextMessages[1].from).toBe(bob.name); 57 | }) 58 | -------------------------------------------------------------------------------- /src/model/utils.ts: -------------------------------------------------------------------------------- 1 | import { IChatMessageRecord } from "@/message/type"; 2 | import { ChatCompletionMessageParam } from "openai/resources"; 3 | 4 | export function getGPTMaxTokenLimit(modelType: string): number{ 5 | switch (modelType) { 6 | case 'gpt-4': 7 | case 'gpt-4-0613': 8 | case 'gpt-4-0314': 9 | return 8192; 10 | case 'gpt-4-32k': 11 | case 'gpt-4-32k-0613': 12 | case 'gpt-4-32k-0314': 13 | return 32768; 14 | case 'gpt-3.5-turbo': 15 | case 'gpt-3.5-turbo-0613': 16 | case 'gpt-3.5-turbo-0314': 17 | return 4096; 18 | case 'gpt-3.5-turbo-16k': 19 | case 'gpt-3.5-turbo-16k-0613': 20 | return 16384; 21 | default: 22 | return 4096; 23 | }; 24 | } 25 | 26 | export const AVAILABLE_GPT_MODELS = [ 27 | "gpt-3.5-turbo", 28 | "gpt-3.5-turbo-16k", 29 | "gpt-3.5-turbo-0613", 30 | "gpt-3.5-turbo-16k-0613", 31 | "gpt-3.5-turbo-0301", 32 | "gpt-4", 33 | "gpt-4-0613", 34 | "gpt-4-32k", 35 | "gpt-4-32k-0613", 36 | "gpt-4-0314", 37 | "gpt-4-32k-0314", 38 | ]; 39 | 40 | export function convertToOpenAIChatMessages(messages: IChatMessageRecord[]): ChatCompletionMessageParam[] { 41 | var msgs = messages.map((message) => { 42 | if (message.functionCall != null){ 43 | return { 44 | role: 'assistant', 45 | function_call: message.functionCall, 46 | content: null, 47 | } as ChatCompletionMessageParam 48 | } 49 | else if (message.role == 'function'){ 50 | return { 51 | role: 'function', 52 | name: message.name, 53 | content: message.content, 54 | } as ChatCompletionMessageParam 55 | } 56 | else{ 57 | return { 58 | role: message.role, 59 | content: message.content, 60 | } as ChatCompletionMessageParam 61 | } 62 | }); 63 | 64 | return msgs; 65 | } -------------------------------------------------------------------------------- /src/components/Agent/agentListItem.tsx: -------------------------------------------------------------------------------- 1 | import { IAgentRecord } from "@/agent/type"; 2 | import { Tooltip } from "@mui/material"; 3 | import { FC } from "react"; 4 | import { SmallLabel } from "../Global/EditableSavableTextField"; 5 | import DeleteIcon from '@mui/icons-material/Delete'; 6 | import AddIcon from '@mui/icons-material/Add'; 7 | 8 | export interface AgentListItemProps { 9 | agent: IAgentRecord; 10 | selected: boolean; 11 | onClick?: (agent: IAgentRecord) => void; 12 | onDeleted?: (agent: IAgentRecord) => void; 13 | onCloned?: (agent: IAgentRecord) => void; 14 | } 15 | 16 | export const AgentListItem: FC = (props) => { 17 | const selected = props.selected; 18 | const agent = props.agent; 19 | 20 | const Element = ( 21 |
props.onClick?.(agent)}> 24 |
25 | {agent.name} 26 |
27 | 28 |
29 | 30 | { 32 | e.stopPropagation(); 33 | props.onCloned?.(agent); 34 | }}/> 35 | 36 | 37 | { 40 | e.stopPropagation(); 41 | props.onDeleted?.(agent); 42 | }}/> 43 | 44 |
45 |
46 | ) 47 | 48 | return selected ? ( 49 |
51 | {Element} 52 |
53 | ) : ( 54 |
56 | {Element} 57 |
58 | ) 59 | } -------------------------------------------------------------------------------- /src/public/locales/vi/chat.json: -------------------------------------------------------------------------------- 1 | { 2 | "OpenAI API Key Required": "Yêu cầu nhập API Key từ tài khoản OpenAI", 3 | "Please set your OpenAI API key in the bottom left of the sidebar.": "Vui lòng nhập API Key từ tài khoản OpenAI của bạn vào ô dưới cùng của thanh bên trái.", 4 | "Stop Generating": "Dừng tạo", 5 | "Prompt limit is {{maxLength}} characters": "Giới hạn yêu cầu là {{maxLength}} ký tự", 6 | "System Prompt": "Yêu cầu hệ thống", 7 | "You are ChatGPT, a large language model trained by OpenAI. Follow the user's instructions carefully. Respond using markdown.": "Bạn là ChatGPT, một mô hình ngôn ngữ lớn được đào tạo bởi OpenAI. Hãy tuân theo hướng dẫn của người dùng một cách cẩn thận. Phản hồi bằng cách sử dụng định dạng markdown.", 8 | "Enter a prompt": "Nhập một lời yêu cầu", 9 | "Regenerate response": "Tạo lại phản hồi", 10 | "Sorry, there was an error.": "Xin lỗi, đã xảy ra lỗi.", 11 | "Model": "Mô hình", 12 | "Conversation": "Cuộc trò chuyện", 13 | "OR": "HOẶC", 14 | "Loading...": "Đang tải...", 15 | "Type a message...": "Nhập một tin nhắn...", 16 | "Error fetching models.": "Lỗi khi truy xuất mô hình.", 17 | "AI": "AI", 18 | "You": "Bạn", 19 | "Make sure your OpenAI API key is set in the bottom left of the sidebar.": "Hãy đảm bảo rằng khóa API từ tài khoản OpenAI của bạn đã được nhập vào ô dưới cùng của thanh bên trái.", 20 | "If you completed this step, OpenAI may be experiencing issues.": "Nếu bạn đã hoàn thành bước này, OpenAI có thể đang gặp sự cố.", 21 | "Message limit is {{maxLength}} characters. You have entered {{valueLength}} characters.": "Giới hạn tin nhắn là {{maxLength}} ký tự. Bạn đã nhập {{valueLength}} ký tự.", 22 | "Please enter a message": "Vui lòng nhập một tin nhắn", 23 | "Chatbot UI is an advanced chatbot kit for OpenAI's chat models aiming to mimic ChatGPT's interface and functionality.": "Chatbot UI là một bộ công cụ chatbot tiên tiến cho các mô hình chat của OpenAI nhằm mô phỏng giao diện và chức năng của ChatGPT.", 24 | "Are you sure you want to clear all messages?": "Bạn có chắc chắn muốn xóa tất cả tin nhắn không?" 25 | } 26 | -------------------------------------------------------------------------------- /src/public/locales/te/chat.json: -------------------------------------------------------------------------------- 1 | { 2 | "OpenAI API Key Required": "ఒపెన్ ఎయి ఐ API కీ అవసరం", 3 | "Please set your OpenAI API key in the bottom left of the sidebar.": "దయచేసి మీ OpenAI API కీని సైడ్ బార్ యొక్క దిగువ ఎడమ భాగంలో సెట్ చేయండి.", 4 | "Stop Generating": "జెనరేట్ చేస్తున్న ప్రక్రియ నిలిపేయి", 5 | "Prompt limit is {{maxLength}} characters": "ప్రాంప్ట్(సంకేతం) పరిమితి {{maxLength}} అక్షరాలు మాత్రమే", 6 | "System Prompt": "సిస్టమ్ ప్రాంప్ట్", 7 | "You are ChatGPT, a large language model trained by OpenAI. Follow the user's instructions carefully. Respond using markdown.": "మీరు ChatGPT, ఒక పెద్ద భాషా మోడల్ ఓపెన్‌ఏఐ ద్వారా ట్రైన్ చేయబడింది. యూజర్ ఇన్స్ట్రక్షన్స్ కనుగొనండి. మార్క్‌డౌన్ ఉపయోగించి సమాధానం ఇవ్వండి.", 8 | "Enter a prompt": "ఒక ప్రాంప్ట్(సంకేతం) నమోదు చేయండి", 9 | "Regenerate response": "పునరుత్పాదించు సమాధానం", 10 | "Sorry, there was an error.": "క్షమించండి, ఒక పొరపాటు జరిగింది.", 11 | "Model": "మోడల్", 12 | "Conversation": "సంవాదం", 13 | "OR": "లేదా", 14 | "Loading...": "లోడ్ అవుతోంది...", 15 | "Type a message...": "సందేశం టైప్ చేయండి...", 16 | "Error fetching models.": "మోడల్స్ పొందడం లోపం జరిగింది.", 17 | "AI": "AI", 18 | "You": "నీవు", 19 | "Make sure your OpenAI API key is set in the bottom left of the sidebar.": "మీరు ఖాళీలో ఎడమ ఎరుగులో మీ OpenAI API కీను సెట్ చేస్తున్నారని ఖచ్చితం చేయండి.", 20 | "If you completed this step, OpenAI may be experiencing issues.": "మీరు ఈ హంతం పూర్తి చేసినా, OpenAI సమస్యలు ఉన్నట్లు ఉంటాయి.", 21 | "Message limit is {{maxLength}} characters. You have entered {{valueLength}} characters.": "సందేశం పరిమితి {{maxLength}} అక్షరాలు. మీరు {{valueLength}} అక్షరాలు నమోదు చేసారు.", 22 | "Please enter a message": "దయచేసి ఒక సందేశం నమోదు చేయండి", 23 | "Chatbot UI is an advanced chatbot kit for OpenAI's chat models aiming to mimic ChatGPT's interface and functionality.": "Chatbot UI ఒక అభ్యంతర సంవిధానం మరియు కార్యాచరణ లక్ష్యం తీసుకున్న OpenAI ఛాట్ మోడల్లలో మార్పులు చేయడానికి ప్రయత్నిస్తుంది, ChatGPT ఇంటర్ఫేస్ మరియు కార్యాచరణను అనుకరించడానికి.", 24 | "Are you sure you want to clear all messages?": "మీరు అన్ని సందేశాలను తొలగించాలా?" 25 | } 26 | -------------------------------------------------------------------------------- /src/utils/app/provider.ts: -------------------------------------------------------------------------------- 1 | import { IRecord } from "@/types/storage"; 2 | 3 | export class Provider{ 4 | private _providers: Record = {}; 5 | private _configUIProviders: Record void) => JSX.Element> = {}; 6 | private _defaultValues: Record = {}; 7 | private _availableModels: string[] = []; 8 | 9 | registerProvider( 10 | id: string, 11 | provider: TProviderType, 12 | configUIProvider: (model: T, onConfigChange?: (config: T) => void) => JSX.Element, 13 | defaultConfig: TModel){ 14 | if(!this._availableModels.includes(id)){ 15 | this._availableModels.push(id); 16 | } 17 | 18 | this._providers[id] = provider; 19 | this._configUIProviders[id] = (config: TModel, onConfigChange?: (config: TModel) => void) => configUIProvider(config as T, onConfigChange as (config: T) => void); 20 | this._defaultValues[id] = defaultConfig; 21 | } 22 | 23 | getDefaultValue(type: string): TModel{ 24 | if(!this._defaultValues[type]){ 25 | throw new Error(`No default value for model ${type}`); 26 | } 27 | 28 | return this._defaultValues[type]; 29 | } 30 | 31 | getConfigUIProvider(type: string): (model: TModel, onConfigChange?: (config: TModel) => void) => JSX.Element{ 32 | if(!this.hasProvider(type)){ 33 | throw new Error(`No provider for model ${type}`); 34 | } 35 | 36 | return this._configUIProviders[type]; 37 | } 38 | 39 | getProvider(model: T): TProviderType{ 40 | if(!this.hasProvider(model.type)){ 41 | throw new Error(`No provider for model ${model.type}`); 42 | } 43 | 44 | return this._providers[model.type]; 45 | } 46 | 47 | getAvailableModels(): string[]{ 48 | return this._availableModels; 49 | } 50 | 51 | hasProvider(type: string): boolean{ 52 | return this._availableModels.includes(type); 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /src/components/Global/Spinner.tsx: -------------------------------------------------------------------------------- 1 | import styled from '@emotion/styled'; 2 | import { Box, Stack } from '@mui/material'; 3 | import { FC } from 'react'; 4 | 5 | interface Props { 6 | size?: string; 7 | className?: string; 8 | } 9 | 10 | export const Spinner: FC = ({ size = '1em', className="" }) => { 11 | return ( 12 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | ); 34 | }; 35 | 36 | export const ThreeDotBouncingLoader: FC = ({ size = '1em', className="" }) => { 37 | return ( 38 | 41 | 42 | 43 | 44 | 45 | ); 46 | }; 47 | 48 | export const BouncingLoader = styled(Box)(({ theme }) => ({ 49 | display: 'flex', 50 | justifyContent: 'center', 51 | animation: `bouncing-loader 0.5s infinite alternate`, 52 | backgroundColor: theme.palette.text.secondary, 53 | width: '0.5rem', 54 | height: '0.5rem', 55 | borderRadius: '50%', 56 | opacity: 1, 57 | '@keyframes bouncing-loader': { 58 | from: { 59 | opacity: 0.1, 60 | transform: 'translateY(0.2rem)', 61 | }, 62 | to: { 63 | opacity: 1, 64 | transform: 'translateY(0.7rem)', 65 | } 66 | } 67 | })); 68 | -------------------------------------------------------------------------------- /src/components/Markdown/Image.tsx: -------------------------------------------------------------------------------- 1 | import { ImageProps } from "next/image"; 2 | import { FC, useEffect, useState, memo } from "react"; 3 | import { ChatBlobStorage } from "@/utils/blobStorage"; 4 | export const Image: FC = ({ ...props }) => { 5 | const [src, setSrc] = useState(); 6 | const memorizedFetchSrc = async (src: string) => { 7 | if (src.startsWith('https://') || src.startsWith('http://')) { 8 | return src; 9 | } 10 | const blobStorage = await ChatBlobStorage; 11 | if (await blobStorage.isBlobExist(src as string)) { 12 | const url = await blobStorage.getBlobUrl(src as string); 13 | return url; 14 | } 15 | else{ 16 | return src; 17 | } 18 | } 19 | useEffect(() => { 20 | (async () => { 21 | const url = await memorizedFetchSrc(props.src as string); 22 | setSrc(url); 23 | })(); 24 | }, []); 25 | 26 | return ; 27 | } 28 | 29 | function isImagePropsEqual(prevProps: ImageProps, nextProps: ImageProps) { 30 | return prevProps.src === nextProps.src; 31 | } 32 | 33 | export const MemorizedImage = memo(Image, isImagePropsEqual); 34 | 35 | export const Anchor: FC<{href: string}> = ({ ...props }) => { 36 | const [href, setHref] = useState(); 37 | useEffect(() => { 38 | (async () => { 39 | // first, check if src starts with https:// or http:// 40 | if (props.href.startsWith('https://') || props.href.startsWith('http://')) { 41 | setHref(props.href); 42 | return; 43 | } 44 | 45 | const blobStorage = await ChatBlobStorage; 46 | if (await blobStorage.isBlobExist(props.href as string)) { 47 | const url = await blobStorage.getBlobUrl(props.href as string); 48 | setHref(url); 49 | } 50 | else{ 51 | setHref(props.href as string); 52 | } 53 | })(); 54 | } 55 | , []); 56 | 57 | return ; 58 | } 59 | -------------------------------------------------------------------------------- /src/public/locales/fr/chat.json: -------------------------------------------------------------------------------- 1 | { 2 | "OpenAI API Key Required": "Clé API OpenAI requise", 3 | "Please set your OpenAI API key in the bottom left of the sidebar.": "Veuillez saisir votre clé API OpenAI dans le coin inférieur gauche de la barre latérale.", 4 | "Stop Generating": "Interrompre la génération", 5 | "Prompt limit is {{maxLength}} characters": "La limite du prompt est de {{maxLength}} caractères", 6 | "System Prompt": "Prompt du système", 7 | "You are ChatGPT, a large language model trained by OpenAI. Follow the user's instructions carefully. Respond using markdown.": "Vous êtes ChatGPT, un grand modèle linguistique entraîné par OpenAI. Suivez attentivement les instructions de l'utilisateur. Répondez en utilisant Markdown.", 8 | "Enter a prompt": "Entrez un prompt", 9 | "Regenerate response": "Régénérer la réponse", 10 | "Sorry, there was an error.": "Désolé, une erreur est survenue.", 11 | "Model": "Modèle", 12 | "Conversation": "Conversation", 13 | "OR": "OU", 14 | "Loading...": "Chargement...", 15 | "Type a message...": "Tapez un message...", 16 | "Error fetching models.": "Erreur lors de la récupération des modèles.", 17 | "AI": "IA", 18 | "You": "Vous", 19 | "Make sure your OpenAI API key is set in the bottom left of the sidebar.": "Assurez-vous que votre clé API OpenAI est définie dans le coin inférieur gauche de la barre latérale.", 20 | "If you completed this step, OpenAI may be experiencing issues.": "Si vous avez effectué cette étape, OpenAI peut rencontrer des problèmes.", 21 | "Message limit is {{maxLength}} characters. You have entered {{valueLength}} characters.": "La limite de message est de {{maxLength}} caractères. Vous avez saisi {{valueLength}} caractères.", 22 | "Please enter a message": "Veuillez entrer un message", 23 | "Chatbot UI is an advanced chatbot kit for OpenAI's chat models aiming to mimic ChatGPT's interface and functionality.": "Chatbot UI est un kit de chatbot avancé pour les modèles de chat d'OpenAI visant à imiter l'interface et les fonctionnalités de ChatGPT.", 24 | "Are you sure you want to clear all messages?": "Êtes-vous sûr de vouloir effacer tous les messages ?" 25 | } 26 | -------------------------------------------------------------------------------- /src/public/locales/id/chat.json: -------------------------------------------------------------------------------- 1 | { 2 | "OpenAI API Key Required": "Memerlukan Kunci API OpenAI", 3 | "Please set your OpenAI API key in the bottom left of the sidebar.": "Silakan atur kunci API OpenAI Anda di bagian kiri bawah bilah sisi.", 4 | "Stop Generating": "Berhenti Menghasilkan", 5 | "Prompt limit is {{maxLength}} characters": "Batas karakter untuk prompt adalah {{maxLength}} karakter", 6 | "System Prompt": "Prompt Sistem", 7 | "You are ChatGPT, a large language model trained by OpenAI. Follow the user's instructions carefully. Respond using markdown.": "Anda adalah ChatGPT, model bahasa besar yang dilatih oleh OpenAI. Ikuti instruksi pengguna dengan hati-hati. Balas menggunakan markdown.", 8 | "Enter a prompt": "Masukkan sebuah prompt", 9 | "Regenerate response": "Hasilkan kembali respons", 10 | "Sorry, there was an error.": "Maaf, terjadi kesalahan.", 11 | "Model": "Model", 12 | "Conversation": "Percakapan", 13 | "OR": "ATAU", 14 | "Loading...": "Memuat...", 15 | "Type a message...": "Ketik sebuah pesan...", 16 | "Error fetching models.": "Kesalahan dalam mengambil model.", 17 | "AI": "AI", 18 | "You": "Anda", 19 | "Make sure your OpenAI API key is set in the bottom left of the sidebar.": "Pastikan kunci API OpenAI Anda diatur di bagian kiri bawah bilah sisi.", 20 | "If you completed this step, OpenAI may be experiencing issues.": "Jika Anda telah menyelesaikan langkah ini, OpenAI mungkin mengalami masalah.", 21 | "click if using a .env.local file": "klik jika menggunakan file .env.local", 22 | "Message limit is {{maxLength}} characters. You have entered {{valueLength}} characters.": "Batas karakter untuk pesan adalah {{maxLength}} karakter. Anda telah memasukkan {{valueLength}} karakter.", 23 | "Please enter a message": "Silakan masukkan sebuah pesan", 24 | "Chatbot UI is an advanced chatbot kit for OpenAI's chat models aiming to mimic ChatGPT's interface and functionality.": "Chatbot UI adalah kit chatbot canggih untuk model obrolan OpenAI yang bertujuan meniru antarmuka dan fungsionalitas ChatGPT.", 25 | "Are you sure you want to clear all messages?": "Apakah Anda yakin ingin menghapus semua pesan?" 26 | } -------------------------------------------------------------------------------- /src/public/locales/pt/chat.json: -------------------------------------------------------------------------------- 1 | { 2 | "OpenAI API Key Required": "A API Key da OpenAI é necessária", 3 | "Please set your OpenAI API key in the bottom left of the sidebar.": "Por favor, insira sua API Key da OpenAI no canto inferior esquerdo da barra lateral.", 4 | "Stop Generating": "Parar de gerar", 5 | "Prompt limit is {{maxLength}} characters": "O limite da mensagem é de {{maxLength}} caracteres", 6 | "System Prompt": "Mensagem do sistema", 7 | "You are ChatGPT, a large language model trained by OpenAI. Follow the user's instructions carefully. Respond using markdown.": "Você é o ChatGPT, um grande modelo de linguagem treinado pela OpenAI. Siga as instruções do usuário cuidadosamente. Responda usando markdown.", 8 | "Enter a prompt": "Insira uma mensagem", 9 | "Regenerate response": "Gerar resposta novamente", 10 | "Sorry, there was an error.": "Desculpe, ocorreu um erro.", 11 | "Model": "Modelo", 12 | "Conversation": "Conversação", 13 | "OR": "Ou", 14 | "Loading...": "Carregando...", 15 | "Type a message...": "Escreva uma mensagem...", 16 | "Error fetching models.": "Erro ao buscar os modelos.", 17 | "AI": "IA", 18 | "You": "Você", 19 | "Make sure your OpenAI API key is set in the bottom left of the sidebar.": "Certifique-se de que sua API Key da OpenAI esteja definida na parte inferior esquerda da barra lateral.", 20 | "If you completed this step, OpenAI may be experiencing issues.": "Se você concluiu esta etapa, o OpenAI pode estar com problemas.", 21 | "click if using a .env.local file": "clique se estiver usando um arquivo .env.local", 22 | "Message limit is {{maxLength}} characters. You have entered {{valueLength}} characters.": "O limite de mensagens é de {{maxLength}} caracteres. Você inseriu {{valueLength}} caracteres", 23 | "Please enter a message": "Por favor, insira uma mensagem", 24 | "Chatbot UI is an advanced chatbot kit for OpenAI's chat models aiming to mimic ChatGPT's interface and functionality.": "Chatbot UI é um kit de chatbot avançado para os modelos de chat do OpenAI com o objetivo de imitar a interface e a funcionalidade do ChatGPT.", 25 | "Are you sure you want to clear all messages?": "Tem certeza de que deseja limpar todas as mensagens?" 26 | } 27 | -------------------------------------------------------------------------------- /src/utils/app/clean.ts: -------------------------------------------------------------------------------- 1 | import { Conversation } from '@/types/chat'; 2 | import { OpenAIModelID, OpenAIModels } from '@/types/openai'; 3 | import { DEFAULT_SYSTEM_PROMPT } from './const'; 4 | 5 | export const cleanSelectedConversation = (conversation: Conversation) => { 6 | // added model for each conversation (3/20/23) 7 | // added system prompt for each conversation (3/21/23) 8 | // added folders (3/23/23) 9 | // added prompts (3/26/23) 10 | 11 | let updatedConversation = conversation; 12 | 13 | // check for model on each conversation 14 | if (!updatedConversation.model) { 15 | updatedConversation = { 16 | ...updatedConversation, 17 | model: updatedConversation.model || OpenAIModels[OpenAIModelID.GPT_3_5], 18 | }; 19 | } 20 | 21 | // check for system prompt on each conversation 22 | if (!updatedConversation.prompt) { 23 | updatedConversation = { 24 | ...updatedConversation, 25 | prompt: updatedConversation.prompt || DEFAULT_SYSTEM_PROMPT, 26 | }; 27 | } 28 | 29 | if (!updatedConversation.folderId) { 30 | updatedConversation = { 31 | ...updatedConversation, 32 | folderId: updatedConversation.folderId || null, 33 | }; 34 | } 35 | 36 | return updatedConversation; 37 | }; 38 | 39 | export const cleanConversationHistory = (history: Conversation[]) => { 40 | // added model for each conversation (3/20/23) 41 | // added system prompt for each conversation (3/21/23) 42 | // added folders (3/23/23) 43 | // added prompts (3/26/23) 44 | 45 | return history.reduce((acc: Conversation[], conversation) => { 46 | try { 47 | if (!conversation.model) { 48 | conversation.model = OpenAIModels[OpenAIModelID.GPT_3_5]; 49 | } 50 | 51 | if (!conversation.prompt) { 52 | conversation.prompt = DEFAULT_SYSTEM_PROMPT; 53 | } 54 | 55 | if (!conversation.folderId) { 56 | conversation.folderId = null; 57 | } 58 | 59 | acc.push(conversation); 60 | return acc; 61 | } catch (error) { 62 | console.warn( 63 | `error while cleaning conversations' history. Removing culprit`, 64 | error, 65 | ); 66 | } 67 | return acc; 68 | }, []); 69 | }; 70 | -------------------------------------------------------------------------------- /src/public/locales/de/chat.json: -------------------------------------------------------------------------------- 1 | { 2 | "OpenAI API Key Required": "OpenAI API-Schlüssel erforderlich", 3 | "Please set your OpenAI API key in the bottom left of the sidebar.": "Bitte trage deinen OpenAI API-Schlüssel in der linken unteren Ecke der Seitenleiste ein.", 4 | "Stop Generating": "Generieren stoppen", 5 | "Prompt limit is {{maxLength}} characters": "Das Eingabelimit liegt bei {{maxLength}} Zeichen", 6 | "System Prompt": "Systemaufforderung", 7 | "You are ChatGPT, a large language model trained by OpenAI. Follow the user's instructions carefully. Respond using markdown.": "Du bist ChatGPT, ein großes Sprachmodell, das von OpenAI trainiert wurde. Befolge die Anweisungen des Benutzers sorgfältig. Antworte in Markdown-Format.", 8 | "Enter a prompt": "Gib eine Anweisung ein", 9 | "Regenerate response": "Antwort erneut generieren", 10 | "Sorry, there was an error.": "Entschuldigung, es ist ein Fehler aufgetreten.", 11 | "Model": "Modell", 12 | "Conversation": "Konversation", 13 | "OR": "ODER", 14 | "Loading...": "Laden...", 15 | "Type a message...": "Schreibe eine Nachricht...", 16 | "Error fetching models.": "Fehler beim Abrufen der Sprachmodelle.", 17 | "AI": "KI", 18 | "You": "Du", 19 | "Make sure your OpenAI API key is set in the bottom left of the sidebar.": "Stelle sicher, dass dein OpenAI API-Schlüssel in der unteren linken Ecke der Seitenleiste eingetragen ist.", 20 | "If you completed this step, OpenAI may be experiencing issues.": "Wenn dies der Fall ist, könnte OpenAI möglicherweise momentan Probleme haben.", 21 | "Message limit is {{maxLength}} characters. You have entered {{valueLength}} characters.": "Das Nachrichtenlimit beträgt {{maxLength}} Zeichen. Du hast bereits {{valueLength}} Zeichen eingegeben.", 22 | "Please enter a message": "Bitte gib eine Nachricht ein", 23 | "Chatbot UI is an advanced chatbot kit for OpenAI's chat models aiming to mimic ChatGPT's interface and functionality.": "Chatbot UI ist ein fortschrittliches Chatbot-Toolkit für OpenAI's Chat-Modelle, das darauf abzielt, die Benutzeroberfläche und Funktionalität von ChatGPT nachzuahmen.", 24 | "Are you sure you want to clear all messages?": "Bist du sicher, dass du alle Nachrichten löschen möchtest?" 25 | } -------------------------------------------------------------------------------- /src/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "llm-chat", 3 | "version": "0.4.5", 4 | "private": true, 5 | "scripts": { 6 | "dev": "next dev", 7 | "build": "next build && next export -o build", 8 | "start": "next start", 9 | "lint": "next lint", 10 | "test": "jest --config jest.config.js", 11 | "format": "prettier --write ." 12 | }, 13 | "dependencies": { 14 | "@azure/openai": "^1.0.0-beta.6", 15 | "@dqbd/tiktoken": "^1.0.7", 16 | "@emotion/react": "^11.11.1", 17 | "@emotion/styled": "^11.11.0", 18 | "@mui/icons-material": "^5.11.16", 19 | "@mui/lab": "^5.0.0-alpha.127", 20 | "@mui/material": "^5.13.6", 21 | "@tabler/icons-react": "^2.23.0", 22 | "@uiw/react-md-editor": "^3.23.3", 23 | "eventsource-parser": "^0.1.0", 24 | "html2canvas": "^1.4.1", 25 | "i18next": "^22.5.1", 26 | "inversify": "^6.0.1", 27 | "jszip": "^3.10.1", 28 | "langchain": "^0.0.73", 29 | "mui": "^0.0.1", 30 | "next": "13.2.4", 31 | "next-i18next": "^13.3.0", 32 | "next-remove-imports": "^1.0.11", 33 | "openai": "^4.12.1", 34 | "react": "18.2.0", 35 | "react-dom": "18.2.0", 36 | "react-i18next": "^12.3.1", 37 | "react-markdown": "^8.0.7", 38 | "react-syntax-highlighter": "^15.5.0", 39 | "reflect-metadata": "^0.1.13", 40 | "rehype-mathjax": "^4.0.2", 41 | "remark-gfm": "^3.0.1", 42 | "remark-math": "^5.1.1", 43 | "uuid": "^9.0.0" 44 | }, 45 | "devDependencies": { 46 | "@tailwindcss/forms": "^0.5.6", 47 | "@tailwindcss/typography": "^0.5.9", 48 | "@types/jest": "^29.5.2", 49 | "@types/node": "18.15.0", 50 | "@types/react": "18.0.28", 51 | "@types/react-dom": "18.0.11", 52 | "@types/react-syntax-highlighter": "^15.5.7", 53 | "@types/uuid": "^9.0.2", 54 | "autoprefixer": "^10.4.14", 55 | "dotenv": "^16.3.1", 56 | "eslint": "8.36.0", 57 | "eslint-config-next": "13.2.4", 58 | "fake-indexeddb": "^4.0.1", 59 | "jest": "^29.5.0", 60 | "jest-environment-jsdom": "^29.6.1", 61 | "postcss": "^8.4.24", 62 | "prettier": "^2.8.8", 63 | "prettier-plugin-tailwindcss": "^0.2.8", 64 | "tailwindcss": "^3.3.2", 65 | "ts-jest": "^29.1.1", 66 | "typescript": "4.9.5" 67 | } 68 | } 69 | -------------------------------------------------------------------------------- /src/public/locales/es/chat.json: -------------------------------------------------------------------------------- 1 | { 2 | "OpenAI API Key Required": "Se requiere la clave de API de OpenAI", 3 | "Please set your OpenAI API key in the bottom left of the sidebar.": "Por favor, ingrese su clave de API de OpenAI en la esquina inferior izquierda de la barra lateral.", 4 | "Stop Generating": "Dejar de generar", 5 | "Prompt limit is {{maxLength}} characters": "El límite del mensaje es de {{maxLength}} caracteres", 6 | "System Prompt": "Mensaje del sistema", 7 | "You are ChatGPT, a large language model trained by OpenAI. Follow the user's instructions carefully. Respond using markdown.": "Eres ChatGPT, un modelo de lenguaje grande entrenado por OpenAI. Sigue las instrucciones del usuario cuidadosamente. Responde usando markdown.", 8 | "Enter a prompt": "Ingrese un mensaje", 9 | "Regenerate response": "Regenerar respuesta", 10 | "Sorry, there was an error.": "Lo sentimos, ha ocurrido un error.", 11 | "Model": "Modelo", 12 | "Conversation": "Conversación", 13 | "OR": "O", 14 | "Loading...": "Cargando...", 15 | "Type a message...": "Escriba un mensaje...", 16 | "Error fetching models.": "Error al obtener los modelos.", 17 | "AI": "IA", 18 | "You": "Tú", 19 | "Make sure your OpenAI API key is set in the bottom left of the sidebar.": "Asegúrate de que hayas ingresado la clave de API de OpenAI en la esquina inferior izquierda de la barra lateral.", 20 | "If you completed this step, OpenAI may be experiencing issues.": "Si completaste este paso, OpenAI podría estar experimentando problemas.", 21 | "click if using a .env.local file": "haz clic si estás utilizando un archivo .env.local", 22 | "Message limit is {{maxLength}} characters. You have entered {{valueLength}} characters.": "El límite del mensaje es de {{maxLength}} caracteres. Has ingresado {{valueLength}} caracteres.", 23 | "Please enter a message": "Por favor, ingrese un mensaje", 24 | "Chatbot UI is an advanced chatbot kit for OpenAI's chat models aiming to mimic ChatGPT's interface and functionality.": "Chatbot UI es un kit avanzado de chatbot para los modelos de chat de OpenAI que busca imitar la interfaz y funcionalidad de ChatGPT.", 25 | "Are you sure you want to clear all messages?": "¿Está seguro de que desea borrar todos los mensajes?" 26 | } 27 | -------------------------------------------------------------------------------- /src/memory/chatMemory.ts: -------------------------------------------------------------------------------- 1 | import { InputValues } from "langchain/dist/schema"; 2 | import { BaseMemory } from "langchain/memory"; 3 | import { IMemory } from "./type"; 4 | import { IChatMessageRecord } from "@/message/type"; 5 | export type OutputValues = Record; 6 | export type MemoryVariables = Record; 7 | 8 | export interface IChatMemory extends IMemory{ 9 | type: "memory.baseMemory"; 10 | maxHistoryLength?: number; 11 | } 12 | 13 | export class ChatMemory extends BaseMemory{ 14 | get memoryKeys(): string[] { 15 | return [this.memoryKey]; 16 | } 17 | memoryKey = "history"; 18 | outputKey = "output"; 19 | useChatML = false; 20 | chatHistory: Omit[] = []; 21 | maxHistoryLength = 16; 22 | 23 | constructor({memoryKey, maxHistoryLength, history}: IChatMemory & { history?: IChatMessageRecord[] }){ 24 | super(); 25 | if(memoryKey){ 26 | this.memoryKey = memoryKey; 27 | } 28 | if(history){ 29 | this.chatHistory = history; 30 | } 31 | if(maxHistoryLength){ 32 | this.maxHistoryLength = maxHistoryLength; 33 | } 34 | } 35 | 36 | async removeChatMessage(message: IChatMessageRecord): Promise { 37 | var index = this.chatHistory.findIndex((m) => m.content === message.content); 38 | if(index >= 0){ 39 | this.chatHistory.splice(index, 1); 40 | } 41 | } 42 | 43 | async saveContext(inputValues: InputValues, outputValues: OutputValues): Promise { 44 | var content: IChatMessageRecord = inputValues['message']; 45 | if (content.from == 'system'){ 46 | return; 47 | } 48 | this.chatHistory.push(content); 49 | var output = outputValues["response"]; 50 | if(output){ 51 | this.chatHistory.push({from: this.outputKey, content: output}); 52 | } 53 | } 54 | 55 | async loadMemoryVariables(_values: InputValues): Promise { 56 | // return last maxHistoryLength messages 57 | var history = this.chatHistory.slice(-this.maxHistoryLength); 58 | return { 59 | [this.memoryKey]: history 60 | } 61 | } 62 | } -------------------------------------------------------------------------------- /src/components/Chat/Conversation.tsx: -------------------------------------------------------------------------------- 1 | import { IChatMessageRecord, IMessageRecord, IsChatMessage } from '@/message/type'; 2 | import { Box, List } from '@mui/material'; 3 | import React from 'react'; 4 | import { ChatMessage } from './ChatMessage'; 5 | import { IAgentRecord } from '@/agent/type'; 6 | import { ILogMessageRecord, LogLevelsToPresent, LogMessage, LogMessageLevel, LogMessageType, LogMessageTypeString } from '@/message/LogMessage'; 7 | import { MessageElement } from '@/message'; 8 | 9 | interface ConversationProps { 10 | conversation: IMessageRecord[]; 11 | agents: IAgentRecord[]; 12 | onResendMessage: (message: IChatMessageRecord, index: number) => void; 13 | onDeleteMessage: (message: IChatMessageRecord, index: number) => void; 14 | logLevel?: LogMessageLevel; 15 | } 16 | 17 | export const Conversation: React.FC = ({ conversation, onDeleteMessage, onResendMessage, agents, logLevel }) => { 18 | const presentLogLoevels = LogLevelsToPresent(logLevel ?? 'info'); 19 | return ( 20 | 24 | {conversation?.map((message, index) => ( 25 | 31 | { 32 | message.type === LogMessageTypeString && 33 | presentLogLoevels.includes((message as ILogMessageRecord).level) && 34 | 35 | } 36 | { 37 | IsChatMessage(message) && 38 | agent.name === (message as IChatMessageRecord).from)} 42 | onDeleteMessage={(message) => onDeleteMessage(message, index)} 43 | onResendMessage={(message) => onResendMessage(message, index)} 44 | /> 45 | } 46 | 47 | ))} 48 | 49 | ); 50 | }; 51 | -------------------------------------------------------------------------------- /src/types/storage.ts: -------------------------------------------------------------------------------- 1 | import { IGroupRecord } from '@/types/group'; 2 | import JSZip from 'jszip'; 3 | import { ChatBlobStorage, ImageBlobStorage } from '@/utils/blobStorage'; 4 | import { IAgent, IAgentRecord } from '@/agent/type'; 5 | 6 | export interface IUISettings extends IRecord{ 7 | } 8 | 9 | export type availableValueTypes = string | number | boolean | Blob | undefined | IRecord; 10 | export interface IRecord{ 11 | type: string; 12 | } 13 | 14 | export interface IStorageRecord extends IRecord{ 15 | agents: IAgentRecord[]; 16 | groups: IGroupRecord[]; 17 | } 18 | 19 | export function saveStorage(storage: IStorageRecord){ 20 | localStorage.setItem('storage', JSON.stringify(storage)); 21 | } 22 | 23 | export function loadStorage(): IStorageRecord{ 24 | var storage = localStorage.getItem('storage'); 25 | if(storage){ 26 | return JSON.parse(storage); 27 | } 28 | return { 29 | type: "storage", 30 | agents: [], 31 | groups: [] 32 | }; 33 | } 34 | 35 | export async function exportZip(storage: IStorageRecord): Promise{ 36 | var zip = new JSZip(); 37 | zip.file("storage.json", JSON.stringify(storage)); 38 | 39 | // save images to images folder 40 | var imgs = zip.folder("images"); 41 | var imageStorage = await ImageBlobStorage; 42 | var images = await imageStorage.listBlobs(); 43 | for(var image of images){ 44 | var blob = await imageStorage.getBlob(image); 45 | imgs!.file(image, blob); 46 | } 47 | 48 | // save chats to chat folder 49 | var chatBlobs = await ChatBlobStorage; 50 | var chats = await chatBlobs.listBlobs(); 51 | var chatBlobsFolder = zip.folder("chat"); 52 | for(var chat of chats){ 53 | var blob = await chatBlobs.getBlob(chat); 54 | chatBlobsFolder!.file(chat, blob); 55 | } 56 | 57 | return await zip.generateAsync({type:"blob"}); 58 | } 59 | 60 | export async function importZip(blob: Blob): Promise{ 61 | var zip = await JSZip.loadAsync(blob); 62 | var storage = await zip.file("storage.json")!.async("string"); 63 | var imageStorage = await ImageBlobStorage; 64 | zip.folder("images")?.forEach(async (relativePath, file) => { 65 | var blob = await file.async("blob"); 66 | await imageStorage.saveBlob(blob, relativePath); 67 | }); 68 | 69 | var chatBlobs = await ChatBlobStorage; 70 | zip.folder("chat")?.forEach(async (relativePath, file) => { 71 | var blob = await file.async("blob"); 72 | await chatBlobs.saveBlob(blob, relativePath); 73 | }); 74 | 75 | return JSON.parse(storage); 76 | } -------------------------------------------------------------------------------- /src/model/azure/ConfigPanel.tsx: -------------------------------------------------------------------------------- 1 | import { useEffect, useState } from "react"; 2 | import { AzureGPT, IAzureTextEmbeddingAda002V2, IAzureGPTRecord } from "./GPT"; 3 | import { TextField, Stack } from "@mui/material"; 4 | import { EditableSavableTextField, SettingSection, SmallNumberSetting, SmallSelectSetting, SmallTextSetting } from "@/components/Global/EditableSavableTextField"; 5 | import { AVAILABLE_GPT_MODELS, getGPTMaxTokenLimit } from "../utils"; 6 | 7 | const GPTConfig = (model: IAzureGPTRecord, onModelConfigChanged: (model: IAzureGPTRecord) => void) => { 8 | const [modelType, setModelType] = useState(); 9 | const [maxToken, setMaxToken] = useState(model.maxTokens ?? 64); 10 | 11 | useEffect(() => { 12 | var maxToken = getGPTMaxTokenLimit(modelType!); 13 | setMaxToken(maxToken); 14 | }, [model, modelType]); 15 | 16 | return ( 17 | <> 18 | onModelConfigChanged({ ...model, apiKey: value})}/> 19 | onModelConfigChanged({ ...model, deploymentID: value})}/> 20 | onModelConfigChanged({ ...model, endpoint: value})}/> 21 | setModelType(value)}/> 22 | onModelConfigChanged({ ...model, maxTokens: value})}/> 23 | onModelConfigChanged({ ...model, temperature: value})}/> 24 | onModelConfigChanged({ ...model, topP: value})}/> 25 | onModelConfigChanged({ ...model, frequencyPenalty: value})}/> 26 | onModelConfigChanged({ ...model, presencePenalty: value})}/> 27 | ); 28 | }; 29 | 30 | export const AzureEmbeddingConfig = (model: IAzureTextEmbeddingAda002V2, onModelConfigChanged: (model: IAzureTextEmbeddingAda002V2) => void) => { 31 | return ( 32 | <> 33 | onModelConfigChanged({ ...model, apiKey: value})}/> 34 | onModelConfigChanged({ ...model, deploymentName: value})}/> 35 | onModelConfigChanged({ ...model, resourceName: value})}/> 36 | ); 37 | } 38 | 39 | export {GPTConfig} -------------------------------------------------------------------------------- /src/components/Chat/GroupListItem.tsx: -------------------------------------------------------------------------------- 1 | import { IAgentRecord } from "@/agent/type"; 2 | import { IGroupRecord } from "@/chat/type"; 3 | import { Tooltip, Divider, AvatarGroup } from "@mui/material"; 4 | import { FC } from "react"; 5 | import { MediumLabel, TinyAvatar } from "../Global/EditableSavableTextField"; 6 | import DeleteIcon from '@mui/icons-material/Delete'; 7 | import AddIcon from '@mui/icons-material/Add'; 8 | import SettingsIcon from '@mui/icons-material/Settings'; 9 | 10 | export interface GroupListItemProps{ 11 | group: IGroupRecord; 12 | agents: IAgentRecord[]; 13 | selected: boolean; 14 | onClick?: (group: IGroupRecord) => void; 15 | onDeleted?: (group: IGroupRecord) => void; 16 | onCloned?: (group: IGroupRecord) => void; 17 | onUpdated?: (group: IGroupRecord) => void; 18 | } 19 | 20 | export const GroupListItem: FC = (props) => { 21 | const selected = props.selected; 22 | const group = props.group; 23 | 24 | const Element = ( 25 |
props.onClick?.(group)}> 28 |
30 |
31 | {group.name} 32 |
33 | 34 |
35 | 36 | { 39 | e.stopPropagation(); 40 | props.onDeleted?.(group); 41 | }}/> 42 | 43 |
44 | 45 |
46 | 47 |
49 |
50 | a.name)}`} placement="top"> 51 | 53 | {props.agents.map((agentRecord, index) => { 54 | return ( 55 | 59 | ) 60 | })} 61 | 62 | 63 |
64 | 65 |
66 | 67 | { 69 | e.stopPropagation(); 70 | var clonedGroup = {...group}; 71 | clonedGroup.name = `${clonedGroup.name}(1)`; 72 | props.onCloned?.(clonedGroup); 73 | }}/> 74 | 75 | 76 | { 78 | e.stopPropagation(); 79 | props.onUpdated?.(group); 80 | }}/> 81 | 82 | 83 |
84 |
85 |
86 | ) 87 | 88 | return selected ? ( 89 |
91 | {Element} 92 |
93 | ) : ( 94 |
96 | {Element} 97 |
98 | ) 99 | } -------------------------------------------------------------------------------- /src/components/Markdown/CodeBlock.tsx: -------------------------------------------------------------------------------- 1 | import { 2 | generateRandomString, 3 | programmingLanguages, 4 | } from '@/utils/app/codeblock'; 5 | import { IconCheck, IconClipboard, IconDownload } from '@tabler/icons-react'; 6 | import { useTranslation } from 'next-i18next'; 7 | import { FC, memo, useState } from 'react'; 8 | import { Prism as SyntaxHighlighter } from 'react-syntax-highlighter'; 9 | import { oneDark } from 'react-syntax-highlighter/dist/cjs/styles/prism'; 10 | import { CentralBox } from '../Global/EditableSavableTextField'; 11 | import { Box } from '@mui/material'; 12 | 13 | interface Props { 14 | language: string; 15 | value: string; 16 | } 17 | 18 | export const CodeBlock: FC = memo(({ language, value }) => { 19 | const { t } = useTranslation('markdown'); 20 | const [isCopied, setIsCopied] = useState(false); 21 | 22 | const copyToClipboard = () => { 23 | if (!navigator.clipboard || !navigator.clipboard.writeText) { 24 | return; 25 | } 26 | 27 | navigator.clipboard.writeText(value).then(() => { 28 | setIsCopied(true); 29 | 30 | setTimeout(() => { 31 | setIsCopied(false); 32 | }, 2000); 33 | }); 34 | }; 35 | const downloadAsFile = () => { 36 | const fileExtension = programmingLanguages[language] || '.file'; 37 | const suggestedFileName = `file-${generateRandomString( 38 | 3, 39 | true, 40 | )}${fileExtension}`; 41 | const fileName = window.prompt( 42 | t('Enter file name') || '', 43 | suggestedFileName, 44 | ); 45 | 46 | if (!fileName) { 47 | // user pressed cancel on prompt 48 | return; 49 | } 50 | 51 | const blob = new Blob([value], { type: 'text/plain' }); 52 | const url = URL.createObjectURL(blob); 53 | const link = document.createElement('a'); 54 | link.download = fileName; 55 | link.href = url; 56 | link.style.display = 'none'; 57 | document.body.appendChild(link); 58 | link.click(); 59 | document.body.removeChild(link); 60 | URL.revokeObjectURL(url); 61 | }; 62 | return ( 63 | 67 |
68 | {language} 69 | 70 |
71 | 82 | 88 |
89 |
90 | 95 | {value} 96 | 97 |
98 | ); 99 | }); 100 | CodeBlock.displayName = 'CodeBlock'; 101 | -------------------------------------------------------------------------------- /src/message/LogMessage.tsx: -------------------------------------------------------------------------------- 1 | import { TinyLabel } from "@/components/Global/EditableSavableTextField"; 2 | import { IMessageRecord } from "./type"; 3 | import { TinyGrayBadge, TinyGreenBadge } from "@/components/Global/Badge"; 4 | import { Collapse } from "@mui/material"; 5 | import { useState } from "react"; 6 | export const LogMessageTypeString: LogMessageType = 'message.log'; 7 | export type LogMessageType = 'message.log'; 8 | export type LogMessageLevel = 'info' | 'warning' | 'error' | 'debug' | 'verbose'; 9 | 10 | export const LogLevelsToPresent = (logLevel: LogMessageLevel) => { 11 | const levels: LogMessageLevel[] = ['error', 'warning', 'info', 'debug', 'verbose']; 12 | return levels.slice(0, levels.indexOf(logLevel) + 1); 13 | } 14 | 15 | export interface ILogMessageRecord extends IMessageRecord { 16 | type: LogMessageType, 17 | level: LogMessageLevel, 18 | detail?: string, 19 | } 20 | 21 | export const LogMessage = (message: ILogMessageRecord) => { 22 | const [showDetail, setShowDetail] = useState(false); 23 | console.log(message.detail?.split('\n')); 24 | return ( message.detail == undefined ? 25 |
27 | {message.level == 'debug' && debug} 28 | {message.level == 'verbose' && } 29 | {message.level == 'info' && } 30 | {message.level == 'warning' && } 31 | {message.level == 'error' && } 32 | {message.content} 33 |
34 | : 35 |
37 |
38 | {message.level == 'debug' && debug} 39 | {message.level == 'verbose' && } 40 | {message.level == 'info' && } 41 | {message.level == 'warning' && } 42 | {message.level == 'error' && } 43 | {message.content} 44 | 49 |
50 | 51 | {message.detail} 52 | 53 |
54 | ) 55 | } 56 | 57 | -------------------------------------------------------------------------------- /src/utils/app/storageReducer.ts: -------------------------------------------------------------------------------- 1 | import { IAgentRecord } from "@/agent/type"; 2 | import { IGroupRecord } from "@/chat/type"; 3 | import { IChatMessageRecord } from "@/message/type"; 4 | import { IStorageRecord } from "@/types/storage"; 5 | 6 | export type StorageCmd = "set" | "get" 7 | | "setGroups" 8 | | "setAgents" 9 | | "addAgent" 10 | | "removeAgent" 11 | | "updateAgent" 12 | | "addOrUpdateAgent" 13 | | "addGroup" 14 | | "removeGroup" 15 | | "updateGroup" 16 | | "addOrUpdateGroup"; 17 | 18 | 19 | export type StorageAction = {type: StorageCmd, payload?: IStorageRecord | IGroupRecord[] | IAgentRecord[] | IAgentRecord | IGroupRecord | IChatMessageRecord, original?: IAgentRecord | IGroupRecord}; 20 | 21 | export function storageReducer(storage: IStorageRecord, action: StorageAction) : IStorageRecord{ 22 | switch(action.type){ 23 | case "set": 24 | return action.payload as IStorageRecord; 25 | case "get": 26 | return storage; 27 | case "setGroups": 28 | return {...storage, groups: action.payload as IGroupRecord[]}; 29 | case "setAgents": 30 | return {...storage, agents: action.payload as IAgentRecord[]}; 31 | case "addAgent": 32 | if(storage.agents.find(a => a.name === (action.payload as IAgentRecord)!.name)){ 33 | throw new Error("Agent already exists"); 34 | } 35 | return {...storage, agents: [...storage.agents, action.payload as IAgentRecord]}; 36 | case "removeAgent": 37 | return {...storage, agents: storage.agents.filter(a => a.name !== (action.payload as IAgentRecord)!.name)}; 38 | case "updateAgent": 39 | var originalAlias = (action.original as IAgentRecord)?.name ?? (action.payload as IAgentRecord)!.name; 40 | return {...storage, agents: storage.agents.map(a => a.name === originalAlias ? action.payload as IAgentRecord : a)}; 41 | case "addOrUpdateAgent": 42 | var existing = storage.agents.find(a => a.name === (action.payload as IAgentRecord)!.name); 43 | if(existing){ 44 | return {...storage, agents: storage.agents.map(a => a.name === (action.payload as IAgentRecord)!.name ? action.payload as IAgentRecord : a)}; 45 | } 46 | return {...storage, agents: [...storage.agents, action.payload as IAgentRecord]}; 47 | case "addGroup": 48 | if(storage.groups.find(a => a.name === (action.payload as IGroupRecord)!.name)){ 49 | throw new Error("Group already exists"); 50 | } 51 | return {...storage, groups: [...storage.groups, action.payload as IGroupRecord]}; 52 | case "removeGroup": 53 | return {...storage, groups: storage.groups.filter(a => a.name !== (action.payload as IGroupRecord)!.name)}; 54 | case "updateGroup": 55 | var originalAlias = (action.original as IGroupRecord)?.name ?? (action.payload as IGroupRecord)!.name; 56 | return {...storage, groups: storage.groups.map(a => a.name === originalAlias ? action.payload as IGroupRecord : a)}; 57 | case "addOrUpdateGroup": 58 | var groupExisting = storage.groups.find(a => a.name === (action.payload as IGroupRecord)!.name) ?? false; 59 | if(groupExisting){ 60 | return {...storage, groups: storage.groups.map(a => a.name === (action.payload as IGroupRecord)!.name ? action.payload as IGroupRecord : a)}; 61 | } 62 | return {...storage, groups: [...storage.groups, action.payload as IGroupRecord]}; 63 | default: 64 | throw new Error("Invalid storage command"); 65 | } 66 | } -------------------------------------------------------------------------------- /infra/core/host/appservice.bicep: -------------------------------------------------------------------------------- 1 | param name string 2 | param location string = resourceGroup().location 3 | param tags object = {} 4 | 5 | // Reference Properties 6 | param applicationInsightsName string = '' 7 | param appServicePlanId string 8 | param keyVaultName string = '' 9 | param managedIdentity bool = !empty(keyVaultName) 10 | 11 | // Runtime Properties 12 | @allowed([ 13 | 'dotnet', 'dotnetcore', 'dotnet-isolated', 'node', 'python', 'java', 'powershell', 'custom' 14 | ]) 15 | param runtimeName string 16 | param runtimeNameAndVersion string = '${runtimeName}|${runtimeVersion}' 17 | param runtimeVersion string 18 | 19 | // Microsoft.Web/sites Properties 20 | param kind string = 'app,linux' 21 | 22 | // Microsoft.Web/sites/config 23 | param allowedOrigins array = [] 24 | param alwaysOn bool = true 25 | param appCommandLine string = '' 26 | param appSettings object = {} 27 | param clientAffinityEnabled bool = false 28 | param enableOryxBuild bool = contains(kind, 'linux') 29 | param functionAppScaleLimit int = -1 30 | param linuxFxVersion string = runtimeNameAndVersion 31 | param minimumElasticInstanceCount int = -1 32 | param numberOfWorkers int = -1 33 | param scmDoBuildDuringDeployment bool = false 34 | param use32BitWorkerProcess bool = false 35 | param ftpsState string = 'FtpsOnly' 36 | param healthCheckPath string = '' 37 | 38 | resource appService 'Microsoft.Web/sites@2022-03-01' = { 39 | name: name 40 | location: location 41 | tags: tags 42 | kind: kind 43 | properties: { 44 | serverFarmId: appServicePlanId 45 | siteConfig: { 46 | linuxFxVersion: linuxFxVersion 47 | alwaysOn: alwaysOn 48 | ftpsState: ftpsState 49 | appCommandLine: appCommandLine 50 | numberOfWorkers: numberOfWorkers != -1 ? numberOfWorkers : null 51 | minimumElasticInstanceCount: minimumElasticInstanceCount != -1 ? minimumElasticInstanceCount : null 52 | use32BitWorkerProcess: use32BitWorkerProcess 53 | functionAppScaleLimit: functionAppScaleLimit != -1 ? functionAppScaleLimit : null 54 | healthCheckPath: healthCheckPath 55 | cors: { 56 | allowedOrigins: union([ 'https://portal.azure.com', 'https://ms.portal.azure.com' ], allowedOrigins) 57 | } 58 | } 59 | clientAffinityEnabled: clientAffinityEnabled 60 | httpsOnly: true 61 | } 62 | 63 | identity: { type: managedIdentity ? 'SystemAssigned' : 'None' } 64 | 65 | resource configAppSettings 'config' = { 66 | name: 'appsettings' 67 | properties: union(appSettings, 68 | { 69 | SCM_DO_BUILD_DURING_DEPLOYMENT: string(scmDoBuildDuringDeployment) 70 | ENABLE_ORYX_BUILD: string(enableOryxBuild) 71 | }, 72 | !empty(applicationInsightsName) ? { APPLICATIONINSIGHTS_CONNECTION_STRING: applicationInsights.properties.ConnectionString } : {}, 73 | !empty(keyVaultName) ? { AZURE_KEY_VAULT_ENDPOINT: keyVault.properties.vaultUri } : {}) 74 | } 75 | 76 | resource configLogs 'config' = { 77 | name: 'logs' 78 | properties: { 79 | applicationLogs: { fileSystem: { level: 'Verbose' } } 80 | detailedErrorMessages: { enabled: true } 81 | failedRequestsTracing: { enabled: true } 82 | httpLogs: { fileSystem: { enabled: true, retentionInDays: 1, retentionInMb: 35 } } 83 | } 84 | dependsOn: [ 85 | configAppSettings 86 | ] 87 | } 88 | } 89 | 90 | resource keyVault 'Microsoft.KeyVault/vaults@2022-07-01' existing = if (!(empty(keyVaultName))) { 91 | name: keyVaultName 92 | } 93 | 94 | resource applicationInsights 'Microsoft.Insights/components@2020-02-02' existing = if (!empty(applicationInsightsName)) { 95 | name: applicationInsightsName 96 | } 97 | 98 | output identityPrincipalId string = managedIdentity ? appService.identity.principalId : '' 99 | output name string = appService.name 100 | output uri string = 'https://${appService.properties.defaultHostName}' 101 | -------------------------------------------------------------------------------- /src/agent/gptAgent.test.ts: -------------------------------------------------------------------------------- 1 | import { IOpenAIGPTRecord } from "@/model/openai/GPT"; 2 | import { IChatMessageRecord } from "@/message/type"; 3 | import { Logger } from "@/utils/logger"; 4 | import { AzureGPT, IAzureGPTRecord } from "@/model/azure/GPT"; 5 | import { GPTAgent, IGPTAgentRecord } from "./gptAgent"; 6 | import { AzureKeyCredential, FunctionDefinition, OpenAIClient } from "@azure/openai"; 7 | import { LLMProvider } from "@/model/llmprovider"; 8 | 9 | test('gpt agent callAsync test', async () => { 10 | const AZURE_OPENAI_API_KEY = process.env.AZURE_OPENAI_API_KEY; 11 | const AZURE_API_ENDPOINT = process.env.AZURE_API_ENDPOINT; 12 | const AZURE_GPT_3_5_TURBO_16K = process.env.AZURE_GPT_3_5_TURBO_16K; 13 | var llm = new AzureGPT( 14 | { 15 | type: "azure.gpt", 16 | deploymentID: AZURE_GPT_3_5_TURBO_16K, 17 | isChatModel: true, 18 | apiKey: AZURE_OPENAI_API_KEY, 19 | isStreaming: true, 20 | maxTokens: 64, 21 | temperature: 0.5, 22 | topP: 1, 23 | endpoint: AZURE_API_ENDPOINT, 24 | frequencyPenalty: 0, 25 | } as IAzureGPTRecord, 26 | ); 27 | 28 | expect(LLMProvider.getDefaultValue("azure.gpt") instanceof AzureGPT).toBe(true); 29 | 30 | var agent = new GPTAgent( 31 | { 32 | name: "alice", 33 | system_message: `Just say 'hello world' to every message 34 | e.g. 35 | hello world`, 36 | avatar: "test", 37 | llm: llm, 38 | }); 39 | 40 | var userMessage = { 41 | role: 'user', 42 | content: 'hello', 43 | } as IChatMessageRecord; 44 | 45 | var response = await agent.callAsync({ 46 | messages: [userMessage], 47 | temperature: 0, 48 | }); 49 | 50 | expect(response.content).toBe("hello world"); 51 | }) 52 | 53 | test('gpt agent callAsync function_call test', async () => { 54 | const AZURE_OPENAI_API_KEY = process.env.AZURE_OPENAI_API_KEY; 55 | const AZURE_API_ENDPOINT = process.env.AZURE_API_ENDPOINT; 56 | const AZURE_GPT_3_5_TURBO_16K = process.env.AZURE_GPT_3_5_TURBO_16K; 57 | var llm = new AzureGPT( 58 | { 59 | type: "azure.gpt", 60 | deploymentID: AZURE_GPT_3_5_TURBO_16K, 61 | isChatModel: true, 62 | apiKey: AZURE_OPENAI_API_KEY, 63 | isStreaming: true, 64 | maxTokens: 64, 65 | temperature: 0.5, 66 | topP: 1, 67 | endpoint: AZURE_API_ENDPOINT, 68 | frequencyPenalty: 0, 69 | } as IAzureGPTRecord, 70 | ); 71 | 72 | var say_hi_function_definition: FunctionDefinition = { 73 | name: "say_hi", 74 | description: "say hi", 75 | parameters: { 76 | type: "object", 77 | properties: { 78 | name:{ 79 | type: "string", 80 | description: "name of the person", 81 | }, 82 | }, 83 | required: ["name"], 84 | } 85 | }; 86 | 87 | var say_hi_function = async (args: string) => { 88 | var name = JSON.parse(args).name; 89 | return `[SAY_HI_FUNCTION] hi ${name}`; 90 | } 91 | 92 | var agent = new GPTAgent( 93 | { 94 | name: "alice", 95 | system_message: `replying using say_hi function`, 96 | avatar: "test", 97 | llm: llm, 98 | function_map: new Map Promise>([ 99 | [say_hi_function_definition, say_hi_function] 100 | ]), 101 | }); 102 | 103 | var userMessage = { 104 | role: 'user', 105 | content: 'hi I am you dad', 106 | } as IChatMessageRecord; 107 | 108 | var response = await agent.callAsync({ 109 | messages: [userMessage], 110 | temperature: 0, 111 | }); 112 | expect(response.functionCall?.name).toBe("say_hi"); 113 | expect(response.role).toBe("assistant"); 114 | expect(response.content).toContain("[SAY_HI_FUNCTION]"); 115 | expect(response.from).toBe("alice"); 116 | }) 117 | 118 | -------------------------------------------------------------------------------- /src/utils/blobStorage.ts: -------------------------------------------------------------------------------- 1 | import {indexedDB as fakeIndexedDB} from "fake-indexeddb"; 2 | export interface IBlobStorage 3 | { 4 | saveBlob(blob: Blob, name: string): Promise; 5 | getBlob(name: string): Promise; 6 | deleteBlob(name: string): Promise; 7 | listBlobs(): Promise; 8 | isBlobExist(name: string): Promise; 9 | } 10 | 11 | 12 | var indexDB : IDBFactory = fakeIndexedDB; 13 | 14 | if(typeof window !== "undefined"){ 15 | indexDB = window?.indexedDB || 16 | (window as any)?.mozIndexedDB || 17 | (window as any)?.webkitIndexedDB || 18 | (window as any)?.msIndexedDB || 19 | (window as any)?.shimIndexedDB; 20 | } 21 | 22 | if (!indexDB) { 23 | throw new Error("No IndexedDB support"); 24 | } 25 | 26 | export class IndexDBBlobStorage implements IBlobStorage 27 | { 28 | private db: IDBDatabase; 29 | 30 | constructor(db: IDBDatabase) { 31 | this.db = db; 32 | } 33 | 34 | static async init(dbName: string): Promise { 35 | return await new Promise((resolve, reject) => { 36 | const request = indexDB.open(dbName, 1); 37 | request.onerror = (event: any) => { 38 | reject(event); 39 | }; 40 | request.onsuccess = (event: any) => { 41 | resolve(new IndexDBBlobStorage(request.result)); 42 | }; 43 | request.onupgradeneeded = () => { 44 | const db = request.result; 45 | db.createObjectStore("blobs"); 46 | }; 47 | }); 48 | } 49 | 50 | async saveBlob(blob: Blob, name: string): Promise { 51 | const transaction = this.db.transaction(["blobs"], "readwrite"); 52 | const store = transaction.objectStore("blobs"); 53 | store.put(blob, name); 54 | } 55 | 56 | async getBlob(name: string): Promise { 57 | const transaction = this.db.transaction(["blobs"], "readonly"); 58 | const store = transaction.objectStore("blobs"); 59 | const request = store.get(name); 60 | return await new Promise((resolve, reject) => { 61 | request.onerror = (event: any) => { 62 | reject(event); 63 | }; 64 | request.onsuccess = (event: any) => { 65 | resolve(request.result); 66 | }; 67 | }); 68 | } 69 | 70 | async deleteBlob(name: string): Promise { 71 | const transaction = this.db.transaction(["blobs"], "readwrite"); 72 | const store = transaction.objectStore("blobs"); 73 | store.delete(name); 74 | } 75 | 76 | async listBlobs(): Promise { 77 | const transaction = this.db.transaction(["blobs"], "readonly"); 78 | const store = transaction.objectStore("blobs"); 79 | const request = store.getAllKeys(); 80 | return await new Promise((resolve, reject) => { 81 | request.onerror = (event: any) => { 82 | reject(event); 83 | }; 84 | request.onsuccess = (event: any) => { 85 | resolve(request.result.map((key) => key.toString())); 86 | }; 87 | }); 88 | } 89 | 90 | async isBlobExist(name: string): Promise { 91 | const transaction = this.db.transaction(["blobs"], "readonly"); 92 | const store = transaction.objectStore("blobs"); 93 | const request = store.getKey(name); 94 | return await new Promise((resolve, reject) => { 95 | request.onerror = (event: any) => { 96 | reject(event); 97 | }; 98 | request.onsuccess = (event: any) => { 99 | resolve(request.result !== undefined); 100 | }; 101 | }); 102 | } 103 | 104 | async getBlobUrl(name: string): Promise { 105 | const blob = await this.getBlob(name); 106 | return URL.createObjectURL(blob); 107 | } 108 | } 109 | 110 | export const ImageBlobStorage = IndexDBBlobStorage.init("image"); 111 | export const ChatBlobStorage = IndexDBBlobStorage.init("chat"); 112 | export const TestBlobStorage = IndexDBBlobStorage.init("test"); 113 | export const vectorStorage = IndexDBBlobStorage.init("vector"); -------------------------------------------------------------------------------- /src/model/openai/GPT.ts: -------------------------------------------------------------------------------- 1 | import { ChatCompletionParams, IChatModel, IChatModelRecord } from "../type"; 2 | import { IChatMessageRecord } from "@/message/type"; 3 | import { ChatCompletionMessageParam } from "openai/resources"; 4 | import { convertToOpenAIChatMessages } from "../utils"; 5 | import { OpenAIClient, OpenAIKeyCredential } from "@azure/openai"; 6 | import { IMarkdownMessageRecord } from "@/message/MarkdownMessage"; 7 | 8 | export interface IOpenAIModel extends IChatModelRecord 9 | { 10 | maxTokens: number; 11 | temperature: number; 12 | topP: number; 13 | presencePenalty: number; 14 | frequencyPenalty: number; 15 | stop: string[]; 16 | apiKey?: string; 17 | model: string; 18 | } 19 | 20 | export interface ITextDavinci003 extends IOpenAIModel{ 21 | type: "openai.text-davinci-003"; 22 | model: "text-davinci-003"; 23 | isStreaming: true; 24 | isChatModel: false; 25 | } 26 | 27 | export interface IOpenAIGPTRecord extends IOpenAIModel{ 28 | type: "openai.gpt"; 29 | model: string | "gpt-3.5-turbo" | "gpt-3.5-turbo-16k" | "gpt-3.5-turbo-0613" | "gpt-3.5-turbo-16k-0613" | "gpt-3.5-turbo-0301" | "gpt-4" | "gpt-4-0613" | "gpt-4-32k" | "gpt-4-32k-0613" | "gpt-4-0314" | "gpt-4-32k-0314"; 30 | isStreaming: true; 31 | isChatModel: true; 32 | } 33 | 34 | export class OpenAIGPT implements IChatModel, IOpenAIGPTRecord{ 35 | type: "openai.gpt"; 36 | isStreaming: true; 37 | isChatModel: true; 38 | description?: string | undefined; 39 | apiKey?: string | undefined; 40 | model: string | "gpt-3.5-turbo" | "gpt-3.5-turbo-16k" | "gpt-3.5-turbo-0613" | "gpt-3.5-turbo-16k-0613" | "gpt-3.5-turbo-0301" | "gpt-4" | "gpt-4-0613" | "gpt-4-32k" | "gpt-4-32k-0613" | "gpt-4-0314" | "gpt-4-32k-0314"; 41 | maxTokens: number; 42 | temperature: number; 43 | topP: number; 44 | presencePenalty: number; 45 | frequencyPenalty: number; 46 | stop: string[]; 47 | 48 | constructor(fields: Partial){ 49 | this.type = "openai.gpt"; 50 | this.isStreaming = true; 51 | this.isChatModel = true; 52 | this.description = fields.description ?? "The ChatGPT model (gpt-35-turbo) is a language model designed for conversational interfaces and the model behaves differently than previous GPT-3 models. Previous models were text-in and text-out, meaning they accepted a prompt string and returned a completion to append to the prompt. However, the ChatGPT model is conversation-in and message-out. The model expects a prompt string formatted in a specific chat-like transcript format, and returns a completion that represents a model-written message in the chat."; 53 | this.apiKey = fields.apiKey; 54 | this.model = fields.model ?? "gpt-3.5-turbo"; 55 | this.maxTokens = fields.maxTokens ?? 64; 56 | this.temperature = fields.temperature ?? 0.7; 57 | this.topP = fields.topP ?? 1; 58 | this.presencePenalty = fields.presencePenalty ?? 0; 59 | this.frequencyPenalty = fields.frequencyPenalty ?? 0; 60 | this.stop = fields.stop ?? []; 61 | } 62 | 63 | async getChatCompletion(params: ChatCompletionParams): Promise { 64 | var client = new OpenAIClient(new OpenAIKeyCredential(this.apiKey!)); 65 | 66 | var msg = convertToOpenAIChatMessages(params.messages); 67 | 68 | var choices = await client.getChatCompletions( 69 | this.model!, 70 | msg, 71 | { 72 | temperature: params.temperature ?? this.temperature ?? 0.7, 73 | maxTokens: params.maxTokens ?? this.maxTokens ?? 64, 74 | topP: params.topP ?? this.topP ?? 1, 75 | presencePenalty: params.presencePenalty ?? this.presencePenalty ?? 0, 76 | frequencyPenalty: params.frequencyPenalty ?? this.frequencyPenalty ?? 0, 77 | stop: params.stop ?? this.stop ?? [], 78 | functions: params.functions, 79 | } 80 | ); 81 | 82 | var replyMessage = choices.choices[0].message; 83 | if (replyMessage == null){ 84 | throw new Error("Reply message is null"); 85 | } 86 | 87 | return { 88 | ...replyMessage, 89 | type: 'message.markdown', 90 | } as IMarkdownMessageRecord; 91 | } 92 | } -------------------------------------------------------------------------------- /src/agent/gptAgent.ts: -------------------------------------------------------------------------------- 1 | import { IAgentRecord, IAgent, AgentCallParams } from "./type"; 2 | import { IChatMessageRecord } from "@/message/type"; 3 | import { IEmbeddingModel, IChatModelRecord } from "@/model/type"; 4 | import { IMemory } from "@/memory/type"; 5 | import { Logger } from "@/utils/logger"; 6 | import { AzureGPT, IAzureGPTRecord } from "@/model/azure/GPT"; 7 | import { IOpenAIGPTRecord } from "@/model/openai/GPT"; 8 | import { LLMProvider } from "@/model/llmprovider"; 9 | import { FunctionDefinition } from "@azure/openai"; 10 | 11 | export interface IGPTAgentRecord extends IAgentRecord { 12 | type: 'agent.gpt'; 13 | llm?: IAzureGPTRecord | IOpenAIGPTRecord; 14 | memory?: IMemory; 15 | embedding?: IEmbeddingModel; 16 | name: string; 17 | group_message?: string; 18 | system_message: string; 19 | avatar: string; // url 20 | }; 21 | 22 | 23 | export class GPTAgent implements IAgent, IGPTAgentRecord { 24 | name: string 25 | type: "agent.gpt"; 26 | llm?: IAzureGPTRecord | IOpenAIGPTRecord | undefined; 27 | memory?: IMemory | undefined; 28 | embedding?: IEmbeddingModel | undefined; 29 | system_message: string; 30 | group_message?: string; 31 | avatar: string; 32 | function_map?: Map Promise>; 33 | 34 | constructor(agent: Partial Promise>}>) { 35 | Logger.debug("initialize chat agent executor"); 36 | this.name = agent.name ?? "GPT"; 37 | this.type = 'agent.gpt'; 38 | this.llm = agent.llm; 39 | this.memory = agent.memory; 40 | this.embedding = agent.embedding; 41 | this.system_message = agent.system_message ?? "You are a helpful AI assistant"; 42 | this.avatar = agent.avatar ?? "GPT"; 43 | this.function_map = agent.function_map; 44 | this.group_message = agent.group_message ?? "Hey"; 45 | } 46 | 47 | async callAsync(params: AgentCallParams): Promise { 48 | var llmRecord = this.llm; 49 | if (!llmRecord) { 50 | throw new Error("No llm provided"); 51 | } 52 | var system_msg = { 53 | role: "system", 54 | content: `Your name is ${this.name}, ${this.system_message}`, 55 | } as IChatMessageRecord; 56 | var llmProvider = LLMProvider.getProvider(llmRecord); 57 | var llm = llmProvider(llmRecord); 58 | var msg = await llm.getChatCompletion({ 59 | messages: [system_msg, ...params.messages], 60 | temperature: params.temperature, 61 | maxTokens: params.maxTokens, 62 | stop: params.stopWords, 63 | functions: this.function_map ? Array.from(this.function_map.keys()) : undefined, 64 | }); 65 | msg.from = this.name; 66 | // if message is a function_call, execute the function 67 | if(msg.functionCall != undefined && this.function_map != undefined){ 68 | var functionDefinitions = Array.from(this.function_map?.keys() ?? []); 69 | var functionDefinition = functionDefinitions.find(f => f.name == msg.functionCall!.name); 70 | var func = functionDefinition ? this.function_map.get(functionDefinition) : undefined; 71 | if(func){ 72 | try{ 73 | var result = await func(msg.functionCall.arguments); 74 | msg.content = result; 75 | msg.name = msg.functionCall.name; 76 | } 77 | catch(e){ 78 | var errorMsg = `Error executing function ${msg.functionCall.name}: ${e}`; 79 | msg.content = errorMsg; 80 | msg.name = msg.functionCall.name; 81 | } 82 | } 83 | else{ 84 | var availableFunctions = Array.from(this.function_map?.keys() ?? []); 85 | var errorMsg = `Function ${msg.functionCall.name} not found. Available functions: ${availableFunctions.map(f => f.name).join(", ")}`; 86 | msg.content = errorMsg; 87 | msg.functionCall = undefined; 88 | } 89 | 90 | return msg; 91 | } 92 | else{ 93 | return msg; 94 | } 95 | } 96 | } 97 | 98 | export function initializeGPTAgent(agent: IGPTAgentRecord, history?: IChatMessageRecord[]): IAgent { 99 | if (!agent.llm) { 100 | throw new Error("No llm provided"); 101 | } 102 | 103 | var agentExecutor = new GPTAgent(agent); 104 | 105 | return agentExecutor; 106 | } 107 | -------------------------------------------------------------------------------- /src/model/azure/GPT.ts: -------------------------------------------------------------------------------- 1 | import { OpenAIEmbeddings } from "langchain/embeddings/openai"; 2 | import { IEmbeddingModel, IChatModelRecord, IChatModel, ChatCompletionParams } from "@/model/type"; 3 | import { IChatMessageRecord } from "@/message/type"; 4 | import { OpenAIClient, AzureKeyCredential } from "@azure/openai"; 5 | import { convertToOpenAIChatMessages } from "../utils"; 6 | import { IMarkdownMessageRecord } from "@/message/MarkdownMessage"; 7 | 8 | // azure openai gpt parameters 9 | export interface IGPTBaseModelConfiguration extends IChatModelRecord { 10 | endpoint?: string, 11 | deploymentID?: string, 12 | apiKey?: string, 13 | temperature?: number, 14 | apiVersion?: string, 15 | maxTokens?: number, 16 | topP?: number, 17 | stop?: string[], 18 | presencePenalty?: number, 19 | frequencyPenalty?: number, 20 | } 21 | 22 | export interface IAzureGPTRecord extends IGPTBaseModelConfiguration{ 23 | type: 'azure.gpt', 24 | isStreaming: true, 25 | isChatModel: true, 26 | } 27 | 28 | 29 | export class AzureGPT implements IChatModel, IAzureGPTRecord { 30 | type: 'azure.gpt'; 31 | isStreaming: true; 32 | isChatModel: true; 33 | endpoint: string; 34 | deploymentID?: string; 35 | apiKey: string; 36 | temperature?: number; 37 | apiVersion?: string; 38 | maxTokens?: number; 39 | topP?: number; 40 | stop?: string[]; 41 | presencePenalty?: number; 42 | frequencyPenalty?: number; 43 | 44 | constructor(fields: Partial){ 45 | this.endpoint = fields.endpoint!; 46 | this.deploymentID = fields.deploymentID; 47 | this.apiKey = fields.apiKey!; 48 | this.temperature = fields.temperature; 49 | this.apiVersion = fields.apiVersion; 50 | this.maxTokens = fields.maxTokens; 51 | this.topP = fields.topP; 52 | this.stop = fields.stop; 53 | this.presencePenalty = fields.presencePenalty; 54 | this.frequencyPenalty = fields.frequencyPenalty; 55 | this.type = 'azure.gpt'; 56 | this.isStreaming = true; 57 | this.isChatModel = true; 58 | } 59 | 60 | async getChatCompletion(params: ChatCompletionParams): Promise { 61 | var client = new OpenAIClient(this.endpoint, new AzureKeyCredential(this.apiKey)); 62 | 63 | var msg = convertToOpenAIChatMessages(params.messages); 64 | 65 | var choices = await client.getChatCompletions( 66 | this.deploymentID!, 67 | msg, 68 | { 69 | temperature: params.temperature ?? this.temperature ?? 0.7, 70 | maxTokens: params.maxTokens ?? this.maxTokens ?? 64, 71 | topP: params.topP ?? this.topP ?? 1, 72 | presencePenalty: params.presencePenalty ?? this.presencePenalty ?? 0, 73 | frequencyPenalty: params.frequencyPenalty ?? this.frequencyPenalty ?? 0, 74 | stop: params.stop ?? this.stop ?? [], 75 | functions: params.functions, 76 | } 77 | ); 78 | 79 | var replyMessage = choices.choices[0].message; 80 | if (replyMessage == null || replyMessage == undefined){ 81 | throw new Error("Reply message is null"); 82 | } 83 | 84 | return { 85 | ...replyMessage, 86 | type: 'message.markdown', 87 | } as IMarkdownMessageRecord; 88 | } 89 | } 90 | 91 | // embedding models 92 | export interface IAzureEmbeddingModel extends IEmbeddingModel{ 93 | apiKey?: string; 94 | resourceName?: string; 95 | deploymentName?: string; 96 | apiVersion?: string; 97 | } 98 | 99 | export interface IAzureTextEmbeddingAda002V2 extends IAzureEmbeddingModel{ 100 | type: "azure.text-embedding-ada-002-v2"; 101 | } 102 | 103 | export class AzureTextEmbeddingsAda002V2 extends OpenAIEmbeddings{ 104 | type = "azure.text-embedding-ada-002-v2"; 105 | 106 | constructor(fields: Partial){ 107 | super({ 108 | azureOpenAIApiKey: fields.apiKey, 109 | azureOpenAIApiDeploymentName: fields.deploymentName, 110 | azureOpenAIApiVersion: fields.apiVersion, 111 | azureOpenAIApiInstanceName: fields.resourceName, 112 | }); 113 | } 114 | } 115 | -------------------------------------------------------------------------------- /src/components/Chat/VariableModal.tsx: -------------------------------------------------------------------------------- 1 | import { Prompt } from '@/types/prompt'; 2 | import { FC, KeyboardEvent, useEffect, useRef, useState } from 'react'; 3 | 4 | interface Props { 5 | prompt: Prompt; 6 | variables: string[]; 7 | onSubmit: (updatedVariables: string[]) => void; 8 | onClose: () => void; 9 | } 10 | 11 | export const VariableModal: FC = ({ 12 | prompt, 13 | variables, 14 | onSubmit, 15 | onClose, 16 | }) => { 17 | const [updatedVariables, setUpdatedVariables] = useState< 18 | { key: string; value: string }[] 19 | >( 20 | variables 21 | .map((variable) => ({ key: variable, value: '' })) 22 | .filter( 23 | (item, index, array) => 24 | array.findIndex((t) => t.key === item.key) === index, 25 | ), 26 | ); 27 | 28 | const modalRef = useRef(null); 29 | const nameInputRef = useRef(null); 30 | 31 | const handleChange = (index: number, value: string) => { 32 | setUpdatedVariables((prev) => { 33 | const updated = [...prev]; 34 | updated[index].value = value; 35 | return updated; 36 | }); 37 | }; 38 | 39 | const handleSubmit = () => { 40 | if (updatedVariables.some((variable) => variable.value === '')) { 41 | alert('Please fill out all variables'); 42 | return; 43 | } 44 | 45 | onSubmit(updatedVariables.map((variable) => variable.value)); 46 | onClose(); 47 | }; 48 | 49 | const handleKeyDown = (e: KeyboardEvent) => { 50 | if (e.key === 'Enter' && !e.shiftKey) { 51 | e.preventDefault(); 52 | handleSubmit(); 53 | } else if (e.key === 'Escape') { 54 | onClose(); 55 | } 56 | }; 57 | 58 | useEffect(() => { 59 | const handleOutsideClick = (e: MouseEvent) => { 60 | if (modalRef.current && !modalRef.current.contains(e.target as Node)) { 61 | onClose(); 62 | } 63 | }; 64 | 65 | window.addEventListener('click', handleOutsideClick); 66 | 67 | return () => { 68 | window.removeEventListener('click', handleOutsideClick); 69 | }; 70 | }, [onClose]); 71 | 72 | useEffect(() => { 73 | if (nameInputRef.current) { 74 | nameInputRef.current.focus(); 75 | } 76 | }, []); 77 | 78 | return ( 79 |
83 |
88 |
89 | {prompt.name} 90 |
91 | 92 |
93 | {prompt.description} 94 |
95 | 96 | {updatedVariables.map((variable, index) => ( 97 |
98 |
99 | {variable.key} 100 |
101 | 102 |