├── .gitattributes ├── .gitignore ├── LICENSE ├── README.md ├── api ├── .dockerignore ├── .env.example ├── .flake8 ├── .gitignore ├── Dockerfile ├── Makefile ├── app │ ├── __init__.py │ ├── agents │ │ ├── __init__.py │ │ ├── base.py │ │ ├── langchain.py │ │ ├── llm.py │ │ └── openai.py │ ├── api │ │ ├── __init__.py │ │ ├── agents.py │ │ ├── api_keys.py │ │ ├── api_user.py │ │ ├── datasources.py │ │ ├── llms.py │ │ ├── tools.py │ │ ├── vector_dbs.py │ │ ├── workflow_configs │ │ │ ├── api │ │ │ │ ├── api_agent_manager.py │ │ │ │ ├── api_agent_tool_manager.py │ │ │ │ ├── api_datasource_superrag_manager.py │ │ │ │ ├── api_manager.py │ │ │ │ └── base.py │ │ │ ├── data_transformer.py │ │ │ ├── exceptions.py │ │ │ ├── processors │ │ │ │ ├── agent_processor.py │ │ │ │ ├── base.py │ │ │ │ ├── openai.py │ │ │ │ ├── processor.py │ │ │ │ ├── superagent.py │ │ │ │ └── utils.py │ │ │ ├── saml_schema.py │ │ │ ├── validator.py │ │ │ └── workflow_configs.py │ │ └── workflows.py │ ├── datasource │ │ ├── __init__.py │ │ ├── flow.py │ │ ├── loader.py │ │ └── types.py │ ├── main.py │ ├── memory │ │ ├── __init__.py │ │ ├── base.py │ │ ├── buffer_memory.py │ │ └── message.py │ ├── models │ │ ├── __init__.py │ │ ├── request.py │ │ ├── response.py │ │ └── tools.py │ ├── routers.py │ ├── tools │ │ ├── __init__.py │ │ ├── advanced_scraper.py │ │ ├── agent.py │ │ ├── algolia.py │ │ ├── base.py │ │ ├── bing_search.py │ │ ├── browser.py │ │ ├── chatgpt.py │ │ ├── code_interpreter.py │ │ ├── datasource.py │ │ ├── e2b.py │ │ ├── flow.py │ │ ├── function.py │ │ ├── gpt_vision.py │ │ ├── hand_off.py │ │ ├── http.py │ │ ├── metaphor.py │ │ ├── openapi.py │ │ ├── openbb.py │ │ ├── prompts.py │ │ ├── pubmed.py │ │ ├── replicate.py │ │ ├── scraper.py │ │ ├── tavily.py │ │ ├── tts_1.py │ │ ├── wolfram_alpha.py │ │ └── zapier.py │ ├── utils │ │ ├── __init__.py │ │ ├── analytics.py │ │ ├── api.py │ │ ├── helpers.py │ │ ├── llm.py │ │ ├── prisma.py │ │ └── streaming.py │ ├── vectorstores │ │ ├── __init__.py │ │ ├── abstract.py │ │ ├── astra.py │ │ ├── astra_client.py │ │ ├── base.py │ │ ├── embeddings.py │ │ ├── pinecone.py │ │ ├── qdrant.py │ │ ├── supabase.py │ │ └── weaviate.py │ └── workflows │ │ ├── __init__.py │ │ └── base.py ├── lint-and-format.sh ├── package-lock.json ├── poetry.lock ├── prisma │ ├── migrations │ │ ├── 20230822214343_agent_llm │ │ │ └── migration.sql │ │ ├── 20230823195402_add_llm │ │ │ └── migration.sql │ │ ├── 20230823200614_change_ids │ │ │ └── migration.sql │ │ ├── 20230823211516_llm_options_default │ │ │ └── migration.sql │ │ ├── 20230823212925_remove_llm_options_default │ │ │ └── migration.sql │ │ ├── 20230824065536_add_agent_prompt │ │ │ └── migration.sql │ │ ├── 20230824070125_llm_agent_one_to_many │ │ │ └── migration.sql │ │ ├── 20230824070755_add_agent_llm_mapping │ │ │ └── migration.sql │ │ ├── 20230824084308_api_user │ │ │ └── migration.sql │ │ ├── 20230824103528_api_user_token_optional │ │ │ └── migration.sql │ │ ├── 20230824110611_add_api_user_to_models │ │ │ └── migration.sql │ │ ├── 20230824114232_add_datasources │ │ │ └── migration.sql │ │ ├── 20230831081114_datasource_metadata │ │ │ └── migration.sql │ │ ├── 20230831092903_datasource_metadata_string │ │ │ └── migration.sql │ │ ├── 20230831105225_add_datasource_types │ │ │ └── migration.sql │ │ ├── 20230901072519_agent_tools │ │ │ └── migration.sql │ │ ├── 20230901115947_tool_bing_search │ │ │ └── migration.sql │ │ ├── 20230901124505_remove_redundant_fields │ │ │ └── migration.sql │ │ ├── 20230901182450_pubmed_tool │ │ │ └── migration.sql │ │ ├── 20230901183619_tool_metadata_optional │ │ │ └── migration.sql │ │ ├── 20230901184227_tool_metadata_mandatory │ │ │ └── migration.sql │ │ ├── 20230904062421_add_worflow │ │ │ └── migration.sql │ │ ├── 20230904063106_workflow_api_user │ │ │ └── migration.sql │ │ ├── 20230904074324_add_workflow_llm │ │ │ └── migration.sql │ │ ├── 20230904082445_fix_workflow_misspelling │ │ │ └── migration.sql │ │ ├── 20230904083153_remove_workflow_llm │ │ │ └── migration.sql │ │ ├── 20230907080928_remove_llm_model │ │ │ └── migration.sql │ │ ├── 20230907090814_agent_description │ │ │ └── migration.sql │ │ ├── 20230912073334_tool_return_direct │ │ │ └── migration.sql │ │ ├── 20230913070205_agent_avatar │ │ │ └── migration.sql │ │ ├── 20230915080507_datasource_status │ │ │ └── migration.sql │ │ ├── 20230917191411_datasource_status_failed │ │ │ └── migration.sql │ │ ├── 20230918070039_agent_cascade_delete │ │ │ └── migration.sql │ │ ├── 20230920060753_add_pptx_datasource_type │ │ │ └── migration.sql │ │ ├── 20230920070547_datasource_docx │ │ │ └── migration.sql │ │ ├── 20230920072352_datasource_xlsx │ │ │ └── migration.sql │ │ ├── 20230920081659_datasource_google_doc │ │ │ └── migration.sql │ │ ├── 20230921064724_code_executor │ │ │ └── migration.sql │ │ ├── 20230928102507_api_user_email │ │ │ └── migration.sql │ │ ├── 20231001110155_llm_azure_openai │ │ │ └── migration.sql │ │ ├── 20231001161850_datassource_content │ │ │ └── migration.sql │ │ ├── 20231016065521_agent_initial_message │ │ │ └── migration.sql │ │ ├── 20231029210807_tool_openbb │ │ │ └── migration.sql │ │ ├── 20231106194639_gpt_4_1106_preview │ │ │ └── migration.sql │ │ ├── 20231106194841_gpt_4_1106_preview_fix │ │ │ └── migration.sql │ │ ├── 20231106224640_vision_tool │ │ │ └── migration.sql │ │ ├── 20231107204227_tts1_tool │ │ │ └── migration.sql │ │ ├── 20231112132755_update_model │ │ │ └── migration.sql │ │ ├── 20231113210515_huggingface_models │ │ │ └── migration.sql │ │ ├── 20231114202204_algolia_tool │ │ │ └── migration.sql │ │ ├── 20231122081046_handoff_tool │ │ │ └── migration.sql │ │ ├── 20231124220817_function_tool │ │ │ └── migration.sql │ │ ├── 20231217152121_add_tool_config │ │ │ └── migration.sql │ │ ├── 20231217220650_remove_workflow_inputs │ │ │ └── migration.sql │ │ ├── 20231223104946_add_http_tool │ │ │ └── migration.sql │ │ ├── 20240102071238_add_vectordb_table │ │ │ └── migration.sql │ │ ├── 20240110062120_add_hugging_face_mixtral_8x7b_model │ │ │ └── migration.sql │ │ ├── 20240119040422_add_supabase_pgvector │ │ │ └── migration.sql │ │ ├── 20240121183424_add_on_delete_cascade_to_workflow_step │ │ │ └── migration.sql │ │ ├── 20240124063011_make_agent_llm_model_optional │ │ │ └── migration.sql │ │ ├── 20240129153542_add_workflow_config_table │ │ │ └── migration.sql │ │ ├── 20240201161130_add_gpt_4_turbo_preview │ │ │ └── migration.sql │ │ ├── 20240201221548_gpt_3_5_turbo_0125 │ │ │ └── migration.sql │ │ ├── 20240201224222_agent_type_v2 │ │ │ └── migration.sql │ │ ├── 20240202033257_add_openai_assistants │ │ │ └── migration.sql │ │ ├── 20240204133952_update_openai_assistants_table │ │ │ └── migration.sql │ │ └── migration_lock.toml │ └── schema.prisma ├── pyproject.toml ├── replit.sh ├── supabase │ ├── .gitignore │ ├── config.toml │ └── seed.sql └── tests │ └── __init__.py ├── biome.json ├── bun.lockb ├── index.ts ├── package.json ├── setup-solana.sh ├── tsconfig.json └── web ├── .editorconfig ├── .env.example ├── .gitignore ├── .lintstagedrc.js ├── .lintstagedrc.json ├── Dockerfile ├── README.md ├── app ├── agents │ ├── [agentId] │ │ ├── avatar.tsx │ │ ├── chat.tsx │ │ ├── delete-agent-button.tsx │ │ ├── header.tsx │ │ ├── page.tsx │ │ ├── prompt-footer.tsx │ │ ├── prompt-form.tsx │ │ └── settings.tsx │ ├── columns.tsx │ ├── data-table.tsx │ ├── loading.tsx │ └── page.tsx ├── api │ └── stripe │ │ └── webhook │ │ └── route.ts ├── auth │ ├── callback │ │ └── route.ts │ ├── login.ts │ ├── logout.ts │ └── sign-up.ts ├── billing-modal.tsx ├── container.tsx ├── integrations │ ├── client-page.tsx │ ├── page.tsx │ └── storage.tsx ├── layout.tsx ├── loading.tsx ├── logs │ └── page.tsx ├── onboarding │ ├── client-page.tsx │ └── page.tsx ├── page.tsx ├── settings │ ├── api-keys │ │ ├── client-page.tsx │ │ └── page.tsx │ ├── appearance │ │ ├── client-page.tsx │ │ └── page.tsx │ ├── billing │ │ ├── client-page.tsx │ │ └── page.tsx │ ├── client-page.tsx │ ├── layout.tsx │ └── page.tsx └── workflows │ ├── [id] │ ├── chat.tsx │ ├── editor.ts │ ├── function-calls.tsx │ ├── llm-dialog.tsx │ ├── overview.tsx │ ├── page.tsx │ ├── prompt-form.tsx │ ├── saml.tsx │ └── workflow.tsx │ ├── cards.tsx │ ├── header.tsx │ ├── layout.tsx │ └── page.tsx ├── bun.lockb ├── components.json ├── components ├── account-sidebar.tsx ├── analytics.tsx ├── codeblock.tsx ├── data-table-pagination.tsx ├── icons.tsx ├── log-list.tsx ├── logo.tsx ├── markdown.tsx ├── message.tsx ├── non-ideal-state.tsx ├── sidebar.tsx ├── theme-provider.tsx ├── theme-toggle.tsx ├── ui │ ├── accordion.tsx │ ├── alert-dialog.tsx │ ├── alert.tsx │ ├── avatar.tsx │ ├── badge.tsx │ ├── button.tsx │ ├── card.tsx │ ├── checkbox.tsx │ ├── command.tsx │ ├── dialog.tsx │ ├── dropdown-menu.tsx │ ├── form.tsx │ ├── input.tsx │ ├── label.tsx │ ├── menubar.tsx │ ├── multi-select.tsx │ ├── popover.tsx │ ├── radio-group.tsx │ ├── resizable.tsx │ ├── scroll-area.tsx │ ├── select.tsx │ ├── separator.tsx │ ├── skeleton.tsx │ ├── spinner.tsx │ ├── table.tsx │ ├── tabs.tsx │ ├── textarea.tsx │ ├── toast.tsx │ ├── toaster.tsx │ ├── tooltip.tsx │ └── use-toast.ts └── upload-button.tsx ├── config ├── saml.ts └── site.ts ├── lib ├── api.ts ├── fonts.ts ├── hooks │ ├── use-copy-to-clipboard.tsx │ └── use-enter-submit.tsx ├── posthog.ts ├── segment.ts ├── stripe.ts ├── supabase.ts └── utils.ts ├── middleware.ts ├── models └── models.ts ├── next-env.d.ts ├── next.config.mjs ├── package.json ├── postcss.config.js ├── prettier.config.js ├── public ├── android-chrome-192x192.png ├── android-chrome-512x512.png ├── apple-touch-icon.png ├── azure-logo.png ├── datastax.jpeg ├── favicon-16x16.png ├── favicon-32x32.png ├── favicon.ico ├── filepickers.png ├── hf-logo.png ├── logo.png ├── meta-logo.png ├── openai-icon-2021x2048-4rpe5x7n.png ├── openai-logo.png ├── pinecone.png ├── qdrant.png ├── supabase.png ├── thirteen.svg ├── weaviate.png └── workflow.png ├── styles └── globals.css ├── supabase ├── .gitignore ├── config.toml ├── migrations │ ├── 20230905121226_create_profiles_table.sql │ ├── 20230905131437_create_profiles_company.sql │ ├── 20230928190307_stripe-id.sql │ ├── 20231223183054_auth_script.sql │ └── 20240130053612_stripe_plan_id.sql └── seed.sql ├── tailwind.config.js ├── tsconfig.json └── types ├── agent.ts ├── llm.ts ├── log-item.ts ├── nav.ts └── profile.ts /.gitattributes: -------------------------------------------------------------------------------- 1 | # Ensure that .sh scripts use LF as line separator, even if they are checked out 2 | # to Windows(NTFS) file-system, by a user of Docker for Window. 3 | # These .sh scripts will be run from the Container after `docker compose up -d`. 4 | # If they appear to be CRLF style, Dash from the Container will fail to execute 5 | # them. 6 | 7 | *.sh text eol=lf 8 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | # Open Source License 2 | 3 | Dify is licensed under the Apache License 2.0, with the following additional conditions: 4 | 5 | 1. Dify may be utilized commercially, including as a backend service for other applications or as an application development platform for enterprises. Should the conditions below be met, a commercial license must be obtained from the producer: 6 | 7 | a. Multi-tenant service: Unless explicitly authorized by Dify in writing, you may not use the Dify source code to operate a multi-tenant environment. 8 | - Tenant Definition: Within the context of Dify, one tenant corresponds to one workspace. The workspace provides a separated area for each tenant's data and configurations. 9 | 10 | b. LOGO and copyright information: In the process of using Dify's frontend, you may not remove or modify the LOGO or copyright information in the Dify console or applications. This restriction is inapplicable to uses of Dify that do not involve its frontend. 11 | - Frontend Definition: For the purposes of this license, the "frontend" of Dify includes all components located in the `web/` directory when running Dify from the raw source code, or the "web" image when running Dify with Docker. 12 | 13 | Please contact business@dify.ai by email to inquire about licensing matters. 14 | 15 | 2. As a contributor, you should agree that: 16 | 17 | a. The producer can adjust the open-source agreement to be more strict or relaxed as deemed necessary. 18 | b. Your contributed code may be used for commercial purposes, including but not limited to its cloud business operations. 19 | 20 | Apart from the specific conditions mentioned above, all other rights and restrictions follow the Apache License 2.0. Detailed information about the Apache License 2.0 can be found at http://www.apache.org/licenses/LICENSE-2.0. 21 | 22 | The interactive design of this product is protected by appearance patent. 23 | 24 | © 2024 LangGenius, Inc. 25 | 26 | 27 | ---------- 28 | 29 | Licensed under the Apache License, Version 2.0 (the "License"); 30 | you may not use this file except in compliance with the License. 31 | You may obtain a copy of the License at 32 | 33 | http://www.apache.org/licenses/LICENSE-2.0 34 | 35 | Unless required by applicable law or agreed to in writing, software 36 | distributed under the License is distributed on an "AS IS" BASIS, 37 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 38 | See the License for the specific language governing permissions and 39 | limitations under the License. 40 | -------------------------------------------------------------------------------- /api/.dockerignore: -------------------------------------------------------------------------------- 1 | .git 2 | .DS_Store 3 | .gitignore 4 | .dockerignore 5 | 6 | # PyCharm specific folder 7 | 8 | .idea 9 | 10 | # Environments 11 | 12 | .env 13 | .venv 14 | env/ 15 | venv/ 16 | ENV/ 17 | 18 | # UI 19 | ui/ -------------------------------------------------------------------------------- /api/.env.example: -------------------------------------------------------------------------------- 1 | # Base (mandatory) 2 | OPENAI_API_KEY= 3 | DATABASE_URL= 4 | DATABASE_MIGRATION_URL= 5 | JWT_SECRET= 6 | # Mandatory for running Open Source LLMs 7 | OPENROUTER_API_KEY= 8 | # Mandatory for Neon DB 9 | DATABASE_SHADOW_URL= 10 | # Memory (mandatory) 11 | MEMORY_API_URL=https://memory.hmai.sh 12 | # NOTE: Vectorstores (one is mandatory if you plan on loading datasources) 13 | VECTORSTORE=pinecone # `qdrant`, `weaviate` etc. 14 | # Qdrant vars 15 | QDRANT_API_KEY= 16 | QDRANT_HOST= 17 | QDRANT_INDEX=hmai 18 | # Weaviate vars 19 | WEAVIATE_API_KEY= 20 | WEAVIATE_INDEX=hmai 21 | WEAVIATE_URL= 22 | # Pinecone vars 23 | PINECONE_ENVIRONMENT= 24 | PINECONE_API_KEY= 25 | PINECONE_INDEX= 26 | # Astra vars 27 | ASTRA_DB_ID= 28 | ASTRA_DB_REGION= 29 | ASTRA_DB_APPLICATION_TOKEN= 30 | ASTRA_DB_COLLECTION_NAME= 31 | ASTRA_DB_KEYSPACE_NAME= 32 | 33 | # Supabase vars 34 | SUPABASE_DB_URL= # e.g. postgresql://janedoe:mypassword@localhost:5432/mydb 35 | SUPABASE_TABLE_NAME= # e.g. hmai 36 | 37 | # E2B (code execution) 38 | E2B_API_KEY=e2b_21b611cdf96fad06a6a819708734be20cfe8b777 39 | # Replicate LLM/tool 40 | REPLICATE_API_TOKEN= 41 | # AgentOps session tracking 42 | AGENTOPS_API_KEY= 43 | AGENTOPS_ORG_KEY=843bf677-e691-45ad-97cf-909e99f9ad83 44 | # Langfuse tracing 45 | LANGFUSE_PUBLIC_KEY= 46 | LANGFUSE_SECRET_KEY= 47 | LANGFUSE_HOST=https://cloud.langfuse.com 48 | # Langsmith tracing 49 | LANGCHAIN_TRACING_V2=False 50 | LANGCHAIN_ENDPOINT="https://api.smith.langchain.com" 51 | LANGCHAIN_API_KEY= 52 | LANGSMITH_PROJECT_ID= 53 | # Agentops tracking 54 | AGENTOPS_API_KEY= 55 | AGENTOPS_ORG_KEY= 56 | # Finetunes 57 | LAMINI_API_KEY= 58 | # Tracking 59 | SEGMENT_WRITE_KEY= 60 | 61 | -------------------------------------------------------------------------------- /api/.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | exclude = 3 | venv 4 | .venv 5 | __pycache__ 6 | notebooks 7 | # Recommend matching the black line length (default 88), 8 | # rather than using the flake8 default of 79: 9 | max-line-length = 88 10 | extend-ignore = 11 | # See https://github.com/PyCQA/pycodestyle/issues/373 12 | E203, 13 | E501, 14 | -------------------------------------------------------------------------------- /api/.gitignore: -------------------------------------------------------------------------------- 1 | __pycache__ 2 | *.pyc 3 | .env 4 | .venv 5 | superenv/ 6 | .DS_Store 7 | venv/ 8 | /.vscode 9 | /.codesandbox 10 | 11 | ## GUI IGNORE 12 | 13 | # dependencies 14 | node_modules 15 | /.pnp 16 | .pnp.js 17 | 18 | # testing 19 | /coverage 20 | 21 | # next.js 22 | .next/ 23 | /out/ 24 | 25 | # production 26 | /build 27 | 28 | # misc 29 | *.pem 30 | 31 | # debug 32 | npm-debug.log* 33 | yarn-debug.log* 34 | yarn-error.log* 35 | 36 | # local env files 37 | .env*.local 38 | .env 39 | 40 | # vercel 41 | .vercel 42 | 43 | # typescript 44 | *.tsbuildinfo 45 | next-env.d.ts 46 | 47 | # docker 48 | .docker/docker.env 49 | .docker/data/ 50 | 51 | # supabase 52 | v2.code-workspace 53 | 54 | # replit 55 | /.pythonlibs 56 | 57 | # service keys 58 | google_cloud_service_key.json -------------------------------------------------------------------------------- /api/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.11 AS builder 2 | # Use the python latest image 3 | 4 | RUN pip install poetry 5 | 6 | ENV POETRY_NO_INTERACTION=1 \ 7 | POETRY_VIRTUALENVS_IN_PROJECT=1 \ 8 | POETRY_VIRTUALENVS_CREATE=1 \ 9 | POETRY_CACHE_DIR=/tmp/poetry_cache \ 10 | MAX_CONCURRENCY=20 11 | 12 | WORKDIR /app 13 | 14 | # Copy only dependency files for layer caching 15 | COPY pyproject.toml poetry.lock ./ 16 | 17 | # Install the required packages of the application into .venv 18 | RUN poetry install --no-root && rm -rf $POETRY_CACHE_DIR 19 | 20 | FROM python:3.11 AS runtime 21 | 22 | RUN apt-get update && apt-get install -y curl ca-certificates gnupg netcat-openbsd && \ 23 | mkdir -p /etc/apt/keyrings && \ 24 | curl -fsSL https://deb.nodesource.com/gpgkey/nodesource.gpg.key | gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg && \ 25 | echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_20.x bookworm main" | tee /etc/apt/sources.list.d/nodesource.list && \ 26 | apt-get update && \ 27 | apt-get install -y nodejs 28 | 29 | ENV PATH="/app/.venv/bin:$PATH" 30 | ENV PORT="8080" 31 | 32 | COPY --from=builder /app/.venv /app/.venv 33 | 34 | COPY . ./ 35 | 36 | RUN prisma generate 37 | 38 | CMD exec gunicorn --bind :$PORT --workers 2 --timeout 0 --worker-class uvicorn.workers.UvicornWorker --threads 8 app.main:app 39 | -------------------------------------------------------------------------------- /api/Makefile: -------------------------------------------------------------------------------- 1 | format: 2 | poetry run black . 3 | poetry run ruff --select I --fix . 4 | poetry run vulture . --exclude=venv 5 | 6 | PYTHON_FILES=. 7 | lint: PYTHON_FILES=. 8 | lint_diff: PYTHON_FILES=$(shell git diff --name-only --diff-filter=d master | grep -E '\.py$$') 9 | 10 | lint lint_diff: 11 | poetry run black $(PYTHON_FILES) --check 12 | poetry run ruff . 13 | poetry run vulture . --exclude=venv -------------------------------------------------------------------------------- /api/app/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/HMaiLab/HMAI/79ddaa324f41f7bf81236991c37c5007eb9c02f8/api/app/__init__.py -------------------------------------------------------------------------------- /api/app/agents/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /api/app/agents/openai.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from langchain.agents import AgentExecutor 4 | from langchain.agents.openai_assistant import OpenAIAssistantRunnable 5 | from langchain.schema.messages import AIMessage 6 | from langchain.schema.output import ChatGeneration, LLMResult 7 | 8 | from app.agents.base import AgentBase 9 | 10 | 11 | class OpenAiAssistant(AgentBase): 12 | async def get_agent(self): 13 | assistant_id = self.agent_config.metadata.get("id") 14 | agent = OpenAIAssistantRunnable(assistant_id=assistant_id, as_agent=True) 15 | enable_streaming = self.enable_streaming 16 | 17 | class CustomAgentExecutor(AgentExecutor): 18 | async def ainvoke(self, *args, **kwargs): 19 | res = await super().ainvoke(*args, **kwargs) 20 | 21 | if enable_streaming: 22 | output = res.get("output").split(" ") 23 | # TODO: find a better way to get the streaming callback 24 | streaming = kwargs["config"]["callbacks"][0] 25 | await streaming.on_llm_start() 26 | 27 | tasks = [] 28 | 29 | for token in output: 30 | task = streaming.on_llm_new_token(token + " ") 31 | tasks.append(task) 32 | 33 | await asyncio.gather(*tasks) 34 | 35 | await streaming.on_llm_end( 36 | response=LLMResult( 37 | generations=[ 38 | [ 39 | ChatGeneration( 40 | message=AIMessage( 41 | content=res.get("output"), 42 | ) 43 | ) 44 | ] 45 | ], 46 | ) 47 | ) 48 | 49 | return res 50 | 51 | agent_executor = CustomAgentExecutor(agent=agent, tools=[]) 52 | 53 | return agent_executor -------------------------------------------------------------------------------- /api/app/api/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/HMaiLab/HMAI/79ddaa324f41f7bf81236991c37c5007eb9c02f8/api/app/api/__init__.py -------------------------------------------------------------------------------- /api/app/api/workflow_configs/api/base.py: -------------------------------------------------------------------------------- 1 | from abc import ABC, abstractmethod 2 | 3 | 4 | class BaseApiAgentManager(ABC): 5 | """ 6 | Abstract class for managing agents. 7 | It can be Agent or Agent as a tool 8 | """ 9 | 10 | @abstractmethod 11 | async def get_assistant(self, assistant: dict): 12 | pass 13 | 14 | @abstractmethod 15 | async def get_datasource(self, assistant: dict, datasource: dict): 16 | pass 17 | 18 | @abstractmethod 19 | async def get_tool(self, assistant: dict, tool: dict): 20 | pass 21 | 22 | @abstractmethod 23 | async def add_assistant(self, data: dict, order: int | None = None): 24 | pass 25 | 26 | @abstractmethod 27 | async def create_assistant(self, data: dict): 28 | pass 29 | 30 | @abstractmethod 31 | async def delete_assistant(self, assistant: dict): 32 | pass 33 | 34 | @abstractmethod 35 | async def update_assistant(self, assistant: dict, data: dict): 36 | pass 37 | 38 | 39 | class BaseApiDatasourceManager(ABC): 40 | """ 41 | Abstract class for managing datasources. 42 | It can be Naive RAG or Super RAG 43 | """ 44 | 45 | @abstractmethod 46 | async def add_datasource(self, assistant: dict, data: dict): 47 | pass 48 | 49 | @abstractmethod 50 | async def delete_datasource(self, assistant: dict, datasource: dict): 51 | pass -------------------------------------------------------------------------------- /api/app/api/workflow_configs/exceptions.py: -------------------------------------------------------------------------------- 1 | class MissingVectorDatabaseProvider(Exception): 2 | pass 3 | 4 | 5 | class UnkownFileType(Exception): 6 | pass -------------------------------------------------------------------------------- /api/app/api/workflow_configs/processors/base.py: -------------------------------------------------------------------------------- 1 | from abc import ABC, abstractmethod 2 | 3 | from app.api.workflow_configs.api.api_manager import ApiManager 4 | 5 | 6 | class BaseProcessor(ABC): 7 | def __init__( 8 | self, 9 | assistant: dict, 10 | api_manager: ApiManager, 11 | api_user, 12 | ): 13 | self.assistant = assistant 14 | self.api_manager = api_manager 15 | self.api_user = api_user 16 | 17 | @abstractmethod 18 | async def process(self, old_data, new_data): 19 | pass -------------------------------------------------------------------------------- /api/app/api/workflow_configs/processors/openai.py: -------------------------------------------------------------------------------- 1 | from app.api.agents import OpenAIAssistantSdk 2 | from app.api.workflow_configs.processors.base import BaseProcessor 3 | from app.utils.helpers import get_first_non_null_key 4 | from app.utils.prisma import prisma 5 | 6 | 7 | class OpenaiDataProcessor(BaseProcessor): 8 | async def process(self, old_data, new_data): 9 | old_urls = old_data.get("urls") or [] 10 | new_urls = new_data.get("urls") or [] 11 | 12 | if set(old_urls) != set(new_urls): 13 | agent = await self.api_manager.agent_manager.get_assistant(self.assistant) 14 | 15 | llm = await prisma.llm.find_first( 16 | where={ 17 | "provider": "OPENAI", 18 | "apiUserId": self.api_manager.api_user.id, 19 | } 20 | ) 21 | 22 | assistant_sdk = OpenAIAssistantSdk(llm) 23 | metadata = agent.metadata 24 | 25 | file_ids = metadata.get("file_ids", []) 26 | 27 | while len(file_ids) > 0: 28 | file_id = file_ids.pop() 29 | await assistant_sdk.delete_file(file_id) 30 | 31 | for url in new_urls: 32 | file = await assistant_sdk.upload_file(url) 33 | file_ids.append(file.id) 34 | 35 | metadata["file_ids"] = file_ids 36 | 37 | await self.api_manager.agent_manager.update_assistant( 38 | assistant=self.assistant, 39 | data={ 40 | "metadata": metadata, 41 | }, 42 | ) 43 | 44 | 45 | class OpenaiToolProcessor(BaseProcessor): 46 | async def process(self, old_tools, new_tools): 47 | if old_tools != new_tools: 48 | agent = await self.api_manager.agent_manager.get_assistant(self.assistant) 49 | 50 | metadata = agent.metadata 51 | 52 | tool_types = [ 53 | { 54 | "type": get_first_non_null_key(tool), 55 | } 56 | for tool in new_tools 57 | ] 58 | 59 | metadata["tools"] = tool_types 60 | 61 | await self.api_manager.agent_manager.update_assistant( 62 | assistant=self.assistant, 63 | data={ 64 | "metadata": metadata, 65 | }, 66 | ) -------------------------------------------------------------------------------- /api/app/api/workflow_configs/processors/processor.py: -------------------------------------------------------------------------------- 1 | from app.api.workflow_configs.api.api_manager import ApiManager 2 | from app.api.workflow_configs.processors.base import BaseProcessor 3 | from prisma.enums import AgentType 4 | 5 | from .openai import ( 6 | OpenaiDataProcessor, 7 | OpenaiToolProcessor, 8 | ) 9 | from .superagent import ( 10 | SuperagentDataProcessor, 11 | SuperagentToolProcessor, 12 | SuperragDataProcessor, 13 | ) 14 | 15 | 16 | class Processor: 17 | def __init__(self, api_user, api_manager: ApiManager): 18 | self.api_user = api_user 19 | self.api_manager = api_manager 20 | 21 | def get_data_processor(self, assistant: dict) -> BaseProcessor: 22 | if assistant.get("type") == AgentType.OPENAI_ASSISTANT: 23 | return OpenaiDataProcessor(assistant, self.api_manager, self.api_user) 24 | return SuperagentDataProcessor(assistant, self.api_manager, self.api_user) 25 | 26 | def get_tool_processor(self, assistant: dict) -> BaseProcessor: 27 | if assistant.get("type") == AgentType.OPENAI_ASSISTANT: 28 | return OpenaiToolProcessor(assistant, self.api_manager, self.api_user) 29 | return SuperagentToolProcessor(assistant, self.api_manager, self.api_user) 30 | 31 | def get_superrag_processor(self, assistant: dict) -> BaseProcessor: 32 | return SuperragDataProcessor(assistant, self.api_manager, self.api_user) -------------------------------------------------------------------------------- /api/app/api/workflow_configs/processors/utils.py: -------------------------------------------------------------------------------- 1 | from prisma.enums import AgentType 2 | 3 | 4 | def check_is_agent_tool(tool_type): 5 | for agent_type in AgentType: 6 | if tool_type == agent_type.value: 7 | return True -------------------------------------------------------------------------------- /api/app/datasource/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/HMaiLab/HMAI/79ddaa324f41f7bf81236991c37c5007eb9c02f8/api/app/datasource/__init__.py -------------------------------------------------------------------------------- /api/app/datasource/types.py: -------------------------------------------------------------------------------- 1 | VALID_UNSTRUCTURED_DATA_TYPES = [ 2 | "TXT", 3 | "PDF", 4 | "DOCX", 5 | "PPTX", 6 | "GOOGLE_DOC", 7 | "MARKDOWN", 8 | "GITHUB_REPOSITORY", 9 | "WEBPAGE", 10 | "NOTION", 11 | "URL", 12 | "YOUTUBE", 13 | ] 14 | 15 | VALID_STRUCTURED_DATA_TYPES = ["AIRTABLE", "CSV", "STRIPE", "XLSX"] -------------------------------------------------------------------------------- /api/app/main.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import time 3 | 4 | import colorlog 5 | from fastapi import FastAPI, Request 6 | from fastapi.middleware.cors import CORSMiddleware 7 | 8 | from app.routers import router 9 | from app.utils.prisma import prisma 10 | 11 | # Create a color formatter 12 | formatter = colorlog.ColoredFormatter( 13 | "%(log_color)s%(levelname)s: %(message)s", 14 | log_colors={ 15 | "DEBUG": "cyan", 16 | "INFO": "green", 17 | "WARNING": "yellow", 18 | "ERROR": "red", 19 | "CRITICAL": "bold_red", 20 | }, 21 | secondary_log_colors={}, 22 | style="%", 23 | ) # Create a console handler and set the formatter 24 | console_handler = logging.StreamHandler() 25 | console_handler.setFormatter(formatter) 26 | 27 | logging.basicConfig( 28 | level=logging.INFO, 29 | format="%(levelname)s: %(message)s", 30 | handlers=[console_handler], 31 | force=True, 32 | ) 33 | 34 | app = FastAPI( 35 | title="hmai", 36 | docs_url="/", 37 | description="The Open Source AI Assistant Framework & API", 38 | version="0.2.3", 39 | servers=[{"url": "https://api.beta.hmai.sh"}], 40 | ) 41 | 42 | app.add_middleware( 43 | CORSMiddleware, 44 | allow_origins=["*"], 45 | allow_credentials=True, 46 | allow_methods=["*"], 47 | allow_headers=["*"], 48 | ) 49 | 50 | 51 | @app.middleware("http") 52 | async def add_process_time_header(request: Request, call_next): 53 | start_time = time.time() 54 | response = await call_next(request) 55 | process_time = time.time() - start_time 56 | print(f"Total request time: {process_time} secs") 57 | return response 58 | 59 | 60 | @app.on_event("startup") 61 | async def startup(): 62 | await prisma.connect() 63 | 64 | 65 | @app.on_event("shutdown") 66 | async def shutdown(): 67 | await prisma.disconnect() 68 | 69 | 70 | app.include_router(router) 71 | -------------------------------------------------------------------------------- /api/app/memory/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/HMaiLab/HMAI/79ddaa324f41f7bf81236991c37c5007eb9c02f8/api/app/memory/__init__.py -------------------------------------------------------------------------------- /api/app/memory/base.py: -------------------------------------------------------------------------------- 1 | from abc import ABC, abstractmethod 2 | from typing import List 3 | 4 | from app.memory.memory_stores.base import BaseMemoryStore 5 | from app.memory.message import BaseMessage 6 | 7 | 8 | class BaseMemory(ABC): 9 | memory_store: BaseMemoryStore 10 | 11 | @abstractmethod 12 | def add_message(self, message: BaseMessage) -> None: 13 | ... 14 | 15 | @abstractmethod 16 | async def aadd_message(self, message: BaseMessage) -> None: 17 | ... 18 | 19 | @abstractmethod 20 | def get_messages(self) -> List[BaseMessage]: 21 | """ 22 | List all the messages stored in the memory. 23 | Messages are returned in the descending order of their creation. 24 | """ 25 | ... 26 | 27 | @abstractmethod 28 | def clear(self) -> None: 29 | ... -------------------------------------------------------------------------------- /api/app/memory/buffer_memory.py: -------------------------------------------------------------------------------- 1 | from typing import Optional 2 | 3 | from litellm import model_cost 4 | 5 | from app.memory.base import BaseMemory 6 | from app.memory.memory_stores.base import BaseMemoryStore 7 | from app.memory.message import BaseMessage 8 | 9 | DEFAULT_TOKEN_LIMIT_RATIO = 0.75 10 | DEFAULT_TOKEN_LIMIT = 3072 11 | 12 | 13 | def get_context_window(model: str) -> int: 14 | max_input_tokens = model_cost.get(model, {}).get("max_input_tokens") 15 | 16 | # Some models don't have a provider prefix in their name 17 | # But they point to the same model 18 | # Example: claude-3-haiku-20240307 and anthropic/claude-3-haiku-20240307 19 | if not max_input_tokens: 20 | model_parts = model.split("/", 1) 21 | if len(model_parts) > 1: 22 | model_without_prefix = model_parts[1] 23 | max_input_tokens = model_cost.get(model_without_prefix, {}).get( 24 | "max_input_tokens", DEFAULT_TOKEN_LIMIT 25 | ) 26 | return max_input_tokens 27 | 28 | 29 | class BufferMemory(BaseMemory): 30 | def __init__( 31 | self, 32 | memory_store: BaseMemoryStore, 33 | tokenizer_fn: callable, 34 | model: str, 35 | max_tokens: Optional[int] = None, 36 | ): 37 | self.memory_store = memory_store 38 | self.tokenizer_fn = tokenizer_fn 39 | self.model = model 40 | self.context_window = ( 41 | max_tokens or get_context_window(model=model) * DEFAULT_TOKEN_LIMIT_RATIO 42 | ) 43 | 44 | def add_message(self, message: BaseMessage) -> None: 45 | self.memory_store.add_message(message) 46 | 47 | async def aadd_message(self, message: BaseMessage) -> None: 48 | await self.memory_store.aadd_message(message) 49 | 50 | def get_messages( 51 | self, 52 | inital_token_usage: int = 0, 53 | ) -> list[BaseMessage]: 54 | messages = self.memory_store.get_messages() 55 | 56 | index = 0 57 | token_usage = inital_token_usage 58 | while index < len(messages): 59 | message = messages[index] 60 | curr_token_usage = self.tokenizer_fn(text=message.content) 61 | if token_usage + curr_token_usage > self.context_window: 62 | break 63 | 64 | token_usage += curr_token_usage 65 | index += 1 66 | 67 | return messages[:index] 68 | 69 | def clear(self) -> None: 70 | self.memory_store.clear() -------------------------------------------------------------------------------- /api/app/memory/message.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | 3 | from pydantic import BaseModel 4 | 5 | 6 | class MessageType(str, Enum): 7 | HUMAN = "human" 8 | AI = "ai" 9 | TOOL_CALL = "tool_call" 10 | TOOL_RESULT = "tool_result" 11 | 12 | 13 | class BaseMessage(BaseModel): 14 | type: MessageType 15 | content: str -------------------------------------------------------------------------------- /api/app/models/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/HMaiLab/HMAI/79ddaa324f41f7bf81236991c37c5007eb9c02f8/api/app/models/__init__.py -------------------------------------------------------------------------------- /api/app/models/tools.py: -------------------------------------------------------------------------------- 1 | from typing import Optional 2 | 3 | from pydantic import BaseModel, Field 4 | 5 | 6 | class AlgoliaInput(BaseModel): 7 | search_query: str 8 | num_of_results: int 9 | 10 | 11 | class DatasourceInput(BaseModel): 12 | question: str 13 | 14 | 15 | class SuperRagInput(BaseModel): 16 | question: str 17 | 18 | 19 | class BingSearchInput(BaseModel): 20 | search_query: str 21 | 22 | 23 | class CodeInterpreterInput(BaseModel): 24 | python_code: str 25 | 26 | 27 | class MetaphorSearchInput(BaseModel): 28 | search_query: str 29 | 30 | 31 | class PubMedInput(BaseModel): 32 | search_query: str 33 | 34 | 35 | class ZapierInput(BaseModel): 36 | input: str 37 | 38 | 39 | class OpenapiInput(BaseModel): 40 | input: str 41 | 42 | 43 | class ChatGPTInput(BaseModel): 44 | input: str 45 | 46 | 47 | class ReplicateInput(BaseModel): 48 | prompt: str 49 | 50 | 51 | class AgentInput(BaseModel): 52 | input: str 53 | 54 | 55 | class WolframInput(BaseModel): 56 | input: str 57 | 58 | 59 | class E2BCodeExecutorInput(BaseModel): 60 | python_code: str 61 | 62 | 63 | class BrowserInput(BaseModel): 64 | url: str = Field(..., description="A valid url including protocol to analyze") 65 | 66 | 67 | class GPTVisionInputModel(BaseModel): 68 | query: str 69 | image_url: str 70 | 71 | 72 | class GPTVisionInput(BaseModel): 73 | input: GPTVisionInputModel 74 | 75 | 76 | class TTS1InputModel(BaseModel): 77 | text: str 78 | voice: str 79 | 80 | 81 | class TTS1Input(BaseModel): 82 | input: TTS1InputModel 83 | 84 | 85 | class HandOffInput(BaseModel): 86 | reason: str 87 | 88 | 89 | class FunctionInput(BaseModel): 90 | config: dict 91 | 92 | 93 | class HTTPInput(BaseModel): 94 | body: Optional[dict] = {} 95 | 96 | 97 | class TavilyInput(BaseModel): 98 | query: str 99 | 100 | 101 | class ScraperInput(BaseModel): 102 | url: str 103 | 104 | 105 | class AdvancedScraperInput(BaseModel): 106 | url: str 107 | 108 | 109 | class GoogleSearchInput(BaseModel): 110 | query: str 111 | 112 | 113 | class SECInput(BaseModel): 114 | ticker: str = Field(..., description="The stock ticker symbol for the company") -------------------------------------------------------------------------------- /api/app/routers.py: -------------------------------------------------------------------------------- 1 | from fastapi import APIRouter 2 | 3 | from app.api import ( 4 | agents, 5 | api_user, 6 | datasources, 7 | llms, 8 | telemetry, 9 | tools, 10 | vector_dbs, 11 | workflow_configs, 12 | workflows, 13 | ) 14 | 15 | router = APIRouter() 16 | api_prefix = "/api/v1" 17 | 18 | router.include_router(agents.router, tags=["Agent"], prefix=api_prefix) 19 | router.include_router(llms.router, tags=["LLM"], prefix=api_prefix) 20 | router.include_router(api_user.router, tags=["Api user"], prefix=api_prefix) 21 | router.include_router(datasources.router, tags=["Datasource"], prefix=api_prefix) 22 | router.include_router(tools.router, tags=["Tool"], prefix=api_prefix) 23 | router.include_router(workflows.router, tags=["Workflow"], prefix=api_prefix) 24 | router.include_router( 25 | workflow_configs.router, tags=["Workflow Config"], prefix=api_prefix 26 | ) 27 | router.include_router(vector_dbs.router, tags=["Vector Database"], prefix=api_prefix) 28 | router.include_router(telemetry.router, tags=["Telemetry"], prefix=api_prefix) 29 | -------------------------------------------------------------------------------- /api/app/tools/advanced_scraper.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | import requests 4 | from langchain_community.tools import BaseTool 5 | 6 | 7 | class AdvancedScraper(BaseTool): 8 | name = "AdvancedScraper" 9 | description = "useful for quickly and easily extracting content from a webpage (uses a real browser via Olostep)" 10 | return_direct = False 11 | 12 | def _run(self, url: str) -> str: 13 | endpoint = "https://agent.olostep.com/olostep-p2p-incomingAPI" 14 | headers = {"Authorization": "Bearer " + self.metadata.get("apiKey")} 15 | 16 | # for more details look at => https://docs.olostep.com/api-reference/start-agent 17 | querystring = { 18 | "url": url, 19 | "saveMarkdown": True, 20 | "expandMarkdown": True, 21 | "waitBeforeScraping": 1, 22 | "fastLane": True, 23 | "removeCSSselectors": "default", 24 | "timeout": 45, 25 | } 26 | 27 | response = requests.get(endpoint, headers=headers, params=querystring) 28 | return response.json().get("markdown_content") 29 | 30 | async def _arun(self, url: str) -> str: 31 | loop = asyncio.get_event_loop() 32 | response_text = await loop.run_in_executor(None, self._run, url) 33 | return response_text -------------------------------------------------------------------------------- /api/app/tools/agent.py: -------------------------------------------------------------------------------- 1 | from decouple import config 2 | from langchain_community.tools import BaseTool 3 | 4 | from app.agents.base import AgentFactory 5 | from app.utils.prisma import prisma 6 | 7 | API_BASE_URL = config("SUPERAGENT_API_URL") 8 | 9 | 10 | class Agent(BaseTool): 11 | name = "Agent as a Tool" 12 | description = "useful for answering questions." 13 | return_direct = False 14 | 15 | def _run(self, input: str) -> str: 16 | agent_id = self.metadata["agentId"] 17 | params = self.metadata["params"] 18 | session_id = params.get("session_id") 19 | 20 | agent_data = prisma.agent.find_unique_or_raise( 21 | where={"id": agent_id}, 22 | include={ 23 | "llms": {"include": {"llm": True}}, 24 | "datasources": { 25 | "include": {"datasource": {"include": {"vectorDb": True}}} 26 | }, 27 | "tools": {"include": {"tool": True}}, 28 | }, 29 | ) 30 | 31 | agent_base = AgentFactory( 32 | agent_data=agent_data, 33 | enable_streaming=False, 34 | session_id=session_id, 35 | ) 36 | 37 | agent = agent_base.get_agent() 38 | # TODO implement invoke function in AgentExecutor 39 | result = agent.invoke( 40 | input=input, 41 | ) 42 | return result.get("output") 43 | 44 | async def _arun(self, input: str) -> str: 45 | agent_id = self.metadata["agentId"] 46 | params = self.metadata["params"] 47 | session_id = params.get("session_id") 48 | 49 | agent_data = await prisma.agent.find_unique_or_raise( 50 | where={"id": agent_id}, 51 | include={ 52 | "llms": {"include": {"llm": True}}, 53 | "datasources": { 54 | "include": {"datasource": {"include": {"vectorDb": True}}} 55 | }, 56 | "tools": {"include": {"tool": True}}, 57 | }, 58 | ) 59 | 60 | agent_base = AgentFactory( 61 | agent_data=agent_data, 62 | enable_streaming=False, 63 | session_id=session_id, 64 | ) 65 | 66 | agent = await agent_base.get_agent() 67 | 68 | result = await agent.ainvoke( 69 | input=input, 70 | ) 71 | 72 | return result.get("output") -------------------------------------------------------------------------------- /api/app/tools/algolia.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from algoliasearch.search_client import SearchClient 4 | from langchain_community.tools import BaseTool 5 | 6 | 7 | class Algolia(BaseTool): 8 | name = "Algolia" 9 | description = "Useful for querying an Agolia index" 10 | return_direct = False 11 | 12 | def _init_client_and_index(self): 13 | app_id = self.metadata["appId"] 14 | api_key = self.metadata["apiKey"] 15 | index = self.metadata["index"] 16 | client = SearchClient.create(app_id, api_key) 17 | index = client.init_index(index) 18 | return index 19 | 20 | def _run(self, search_query: str, num_of_results: int = 3) -> str: 21 | index = self._init_client_and_index() 22 | output = index.search(search_query) 23 | return str(output["hits"][:num_of_results]) 24 | 25 | async def _arun(self, search_query: str, num_of_results: int = 3) -> str: 26 | index = self._init_client_and_index() 27 | loop = asyncio.get_event_loop() 28 | output = await loop.run_in_executor(None, index.search, search_query) 29 | return str(output["hits"][:num_of_results]) -------------------------------------------------------------------------------- /api/app/tools/base.py: -------------------------------------------------------------------------------- 1 | from abc import ABC, abstractmethod 2 | from typing import Any, Dict, Optional 3 | 4 | from pydantic import BaseModel 5 | 6 | 7 | class BaseTool(ABC): 8 | args_schema: BaseModel = None 9 | 10 | def __init__( 11 | self, 12 | name: str, 13 | description: str, 14 | metadata: Optional[Dict[str, Any]] = None, 15 | ): 16 | self.name = name 17 | self.description = description 18 | self.metadata = metadata 19 | 20 | if self.args_schema is None: 21 | raise NotImplementedError("args_schema must be defined in the subclass.") 22 | 23 | def get_function_metadata(self) -> dict: 24 | schema = self.args_schema.schema() 25 | properties = schema.get("properties", {}) 26 | required = schema.get("required", []) 27 | 28 | for prop in properties.values(): 29 | prop.pop("title", None) 30 | 31 | function_metadata = { 32 | "type": "function", 33 | "function": { 34 | "name": self.name, 35 | "description": self.description, 36 | "parameters": { 37 | "type": "object", 38 | "properties": properties, 39 | "required": required, 40 | }, 41 | }, 42 | } 43 | return function_metadata 44 | 45 | async def run(self, **kwargs): 46 | # Validate the input arguments against the args_schema 47 | if self.args_schema: 48 | validated_args = self.args_schema(**kwargs) 49 | else: 50 | raise NotImplementedError("args_schema must be defined in the subclass.") 51 | 52 | # Call the abstract method with validated arguments 53 | return await self.arun(validated_args) 54 | 55 | @abstractmethod 56 | async def arun(self, validated_args: BaseModel): 57 | pass -------------------------------------------------------------------------------- /api/app/tools/bing_search.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from langchain_community.tools import BaseTool as LCBaseTool 4 | from langchain_community.utilities.bing_search import BingSearchAPIWrapper 5 | from pydantic import BaseModel, Field 6 | 7 | from app.tools.base import BaseTool 8 | 9 | 10 | class LCBingSearch(LCBaseTool): 11 | name = "bing search" 12 | description = "useful for searching the internet" 13 | return_direct = False 14 | 15 | def _run(self, search_query: str) -> str: 16 | bing_search_url = self.metadata["bingSearchUrl"] 17 | bing_subscription_key = self.metadata["bingSubscriptionKey"] 18 | search = BingSearchAPIWrapper( 19 | bing_search_url=bing_search_url, 20 | bing_subscription_key=bing_subscription_key, 21 | ) 22 | output = search.run(search_query) 23 | return output 24 | 25 | async def _arun(self, search_query: str) -> str: 26 | bing_search_url = self.metadata["bingSearchUrl"] 27 | bing_subscription_key = self.metadata["bingSubscriptionKey"] 28 | search = BingSearchAPIWrapper( 29 | bing_search_url=bing_search_url, 30 | bing_subscription_key=bing_subscription_key, 31 | ) 32 | loop = asyncio.get_event_loop() 33 | output = await loop.run_in_executor(None, search.run, search_query) 34 | return output 35 | 36 | 37 | class BingSearchArgs(BaseModel): 38 | search_query: str = Field(..., description="A search query") 39 | 40 | 41 | class BingSearch(BaseTool): 42 | args_schema = BingSearchArgs 43 | 44 | async def arun(self, args: BingSearchArgs) -> dict: 45 | bing_search_url = self.metadata["bingSearchUrl"] 46 | bing_subscription_key = self.metadata["bingSubscriptionKey"] 47 | search_query = args.search_query 48 | search = BingSearchAPIWrapper( 49 | bing_search_url=bing_search_url, 50 | bing_subscription_key=bing_subscription_key, 51 | ) 52 | loop = asyncio.get_event_loop() 53 | output = await loop.run_in_executor(None, search.run, search_query) 54 | return {"type": "function_call", "content": output} -------------------------------------------------------------------------------- /api/app/tools/chatgpt.py: -------------------------------------------------------------------------------- 1 | from typing import Any 2 | 3 | from langchain_community.tools import AIPluginTool 4 | 5 | 6 | def get_chatpgt_tool(metadata: dict | None, *_args, **_kwargs) -> Any: 7 | return AIPluginTool.from_plugin_url(metadata["chatgptPluginURL"]) 8 | -------------------------------------------------------------------------------- /api/app/tools/code_interpreter.py: -------------------------------------------------------------------------------- 1 | import aiohttp 2 | import requests 3 | from decouple import config 4 | from langchain_community.tools import BaseTool 5 | 6 | 7 | class CodeInterpreter(BaseTool): 8 | name = "Code executor" 9 | description = "useful for executing code. returns the evaluation/result" 10 | 11 | def _setup_request(self, code: str): 12 | api_token = config("CODE_EXECUTOR_TOKEN") 13 | url = config("CODE_EXECUTOR_URL") 14 | headers = { 15 | "content-type": "application/json", 16 | "authorization": f"Bearer {api_token}", 17 | } 18 | data = {"code": code, "interpreter_mode": True} 19 | return url, headers, data 20 | 21 | def _run(self, python_code: str) -> str: 22 | url, headers, data = self._setup_request(python_code) 23 | return requests.post(url=url, headers=headers, json=data).text 24 | 25 | async def _arun(self, python_code: str) -> str: 26 | url, headers, data = self._setup_request(python_code) 27 | async with aiohttp.ClientSession() as session: 28 | async with session.post(url=url, headers=headers, json=data) as response: 29 | output = await response.text() 30 | return output 31 | -------------------------------------------------------------------------------- /api/app/tools/function.py: -------------------------------------------------------------------------------- 1 | from langchain_community.tools import BaseTool 2 | 3 | 4 | class Function(BaseTool): 5 | name = "cunstom function" 6 | description = "useful for doing something" 7 | return_direct = True 8 | 9 | def _run(self, *args, **kwargs) -> str: 10 | return f"Tell the user that you are pending function {self.name}" 11 | 12 | async def _arun(self, *args, **kwargs) -> str: 13 | return f"Tell the user that you are pending function {self.name}" 14 | -------------------------------------------------------------------------------- /api/app/tools/gpt_vision.py: -------------------------------------------------------------------------------- 1 | from langchain_community.tools import BaseTool 2 | from openai import AsyncOpenAI, OpenAI 3 | 4 | 5 | class GPTVision(BaseTool): 6 | name = "gpt vision" 7 | description = "useful for analyzing images" 8 | return_direct = False 9 | 10 | def _run(self, input: dict) -> str: 11 | client = OpenAI(api_key=self.metadata["openaiApiKey"]) 12 | try: 13 | response = client.chat.completions.create( 14 | model="gpt-4-vision-preview", 15 | messages=[ 16 | { 17 | "role": "user", 18 | "content": [ 19 | {"type": "text", "text": input["query"]}, 20 | { 21 | "type": "image_url", 22 | "image_url": input["image_url"], 23 | }, 24 | ], 25 | } 26 | ], 27 | max_tokens=300, 28 | ) 29 | output = response.choices[0] 30 | except Exception as e: 31 | output = str(e) 32 | return output 33 | 34 | async def _arun(self, input: dict) -> str: 35 | client = AsyncOpenAI(api_key=self.metadata["openaiApiKey"]) 36 | try: 37 | response = await client.chat.completions.create( 38 | model="gpt-4-vision-preview", 39 | messages=[ 40 | { 41 | "role": "user", 42 | "content": [ 43 | {"type": "text", "text": input["query"]}, 44 | { 45 | "type": "image_url", 46 | "image_url": input["image_url"], 47 | }, 48 | ], 49 | } 50 | ], 51 | max_tokens=300, 52 | ) 53 | output = response.choices[0] 54 | except Exception as e: 55 | output = str(e) 56 | return output 57 | -------------------------------------------------------------------------------- /api/app/tools/hand_off.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | from langchain_community.tools import BaseTool 4 | 5 | 6 | class HandOff(BaseTool): 7 | name = "human hand-off" 8 | description = "useful for hand-off of conversation to a human operator" 9 | return_direct = False 10 | 11 | def _run(self, reason: str) -> str: 12 | payload = {"reasons": reason, "action": "hand-off"} 13 | return json.dumps(payload) 14 | 15 | async def _arun(self, reason: str) -> str: 16 | payload = {"reasons": reason, "action": "hand-off"} 17 | return json.dumps(payload) 18 | -------------------------------------------------------------------------------- /api/app/tools/metaphor.py: -------------------------------------------------------------------------------- 1 | from langchain_community.tools import BaseTool 2 | from langchain_community.utilities import MetaphorSearchAPIWrapper 3 | 4 | 5 | class MetaphorSearch(BaseTool): 6 | name = "metaphor search" 7 | description = "useful for researching a certain topic" 8 | return_direct = False 9 | 10 | def _run(self, search_query: str) -> str: 11 | search = MetaphorSearchAPIWrapper( 12 | metaphor_api_key=self.metadata["metaphorApiKey"] 13 | ) 14 | output = search.results(search_query, 10, use_autoprompt=True) 15 | return output 16 | 17 | async def _arun(self, search_query: str) -> str: 18 | search = MetaphorSearchAPIWrapper( 19 | metaphor_api_key=self.metadata["metaphorApiKey"] 20 | ) 21 | output = await search.results_async(search_query, 10, use_autoprompt=True) 22 | return output 23 | -------------------------------------------------------------------------------- /api/app/tools/openapi.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import json 3 | 4 | from langchain.chains.openai_functions.openapi import get_openapi_chain 5 | from langchain_community.tools import BaseTool 6 | 7 | 8 | class Openapi(BaseTool): 9 | name = "API" 10 | description = "useful for querying an api" 11 | return_direct = False 12 | 13 | def _run(self, input: str) -> str: 14 | openapi_url = self.metadata["openApiUrl"] 15 | headers = self.metadata.get("headers") 16 | agent = get_openapi_chain( 17 | spec=openapi_url, headers=json.loads(headers) if headers else None 18 | ) 19 | output = agent.run(input) 20 | return output 21 | 22 | async def _arun(self, input: str) -> str: 23 | openapi_url = self.metadata["openApiUrl"] 24 | headers = self.metadata.get("headers") 25 | try: 26 | agent = get_openapi_chain( 27 | spec=openapi_url, headers=json.loads(headers) if headers else None 28 | ) 29 | loop = asyncio.get_event_loop() 30 | output = await loop.run_in_executor(None, agent.run, input) 31 | except Exception as e: 32 | output = str(e) 33 | return output 34 | -------------------------------------------------------------------------------- /api/app/tools/openbb.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/HMaiLab/HMAI/79ddaa324f41f7bf81236991c37c5007eb9c02f8/api/app/tools/openbb.py -------------------------------------------------------------------------------- /api/app/tools/prompts.py: -------------------------------------------------------------------------------- 1 | from typing import List, Type 2 | 3 | from app.tools.tool import Tool 4 | 5 | 6 | def create_function_calling_prompt(input: str, tools: List[Type[Tool]]) -> str: 7 | """ 8 | Create a custom prompt for selecting the most suitable function and parameters. 9 | 10 | :param input: The user's request in natural language. 11 | :param tool_classes: A list of Tool class types. 12 | :return: A string representing the custom prompt. 13 | """ 14 | 15 | prompt = ( 16 | "As an AI assistant, please select the most suitable function and parameters " 17 | "from the list of available functions below, based on the user's input." 18 | "Provide your response in JSON format.\n\n" 19 | f"Input: {input}\n\n" 20 | ) 21 | prompt += "Available functions:\n\n" 22 | for tool_cls in tools: 23 | tool_name = tool_cls.name 24 | tool_description = tool_cls.description 25 | tool_params = tool_cls.args_model.schema()["properties"] 26 | prompt += f"{tool_name}:\n" 27 | prompt += f" description: {tool_description}\n" 28 | prompt += " args:\n" 29 | for param_name, param_details in tool_params.items(): 30 | description = param_details.get("description", "No description provided") 31 | prompt += f" {param_name}: {description}\n" 32 | return prompt 33 | 34 | 35 | def create_function_response_prompt(input: str, context: str) -> str: 36 | """ 37 | Create a custom prompt for returning a Tool response. 38 | 39 | :param input: The user's request in natural language. 40 | :param context: The context provided by the Tool. 41 | :return: A string representing the custom prompt. 42 | """ 43 | 44 | prompt = ( 45 | "You are an helpful AI Assistant, answer the question by " 46 | "providing the most suitable response based on the context provided.\n\n" 47 | f"Input: {input}\n\n" 48 | f"Context:\n{context}" 49 | ) 50 | return prompt 51 | -------------------------------------------------------------------------------- /api/app/tools/pubmed.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from langchain_community.tools import BaseTool, PubmedQueryRun 4 | 5 | 6 | class PubMed(BaseTool): 7 | name = "PubMed® search" 8 | description = "useful for answering question about medical publications" 9 | return_direct = False 10 | 11 | def _run(self, search_query: str) -> str: 12 | pubmed = PubmedQueryRun(args_schema=self.args_schema) 13 | output = pubmed.run(search_query) 14 | return output 15 | 16 | async def _arun(self, search_query: str) -> str: 17 | pubmed = PubmedQueryRun(args_schema=self.args_schema) 18 | loop = asyncio.get_event_loop() 19 | output = await loop.run_in_executor(None, pubmed.run, search_query) 20 | return output 21 | -------------------------------------------------------------------------------- /api/app/tools/replicate.py: -------------------------------------------------------------------------------- 1 | from langchain.llms.replicate import Replicate as ReplicateModel 2 | from langchain_community.tools import BaseTool 3 | 4 | 5 | class Replicate(BaseTool): 6 | name = "Replicate" 7 | description = "useful for querying a Replicate model." 8 | return_direct = False 9 | 10 | def _run(self, prompt: str) -> str: 11 | model = self.metadata["model"] 12 | api_token = self.metadata["apiKey"] 13 | input = self.metadata["arguments"] 14 | model = ReplicateModel( 15 | model=model, input=input, api_token=api_token, replicate_api_token=api_token 16 | ) 17 | output = model.predict(prompt) 18 | return output 19 | 20 | async def _arun(self, prompt: str) -> str: 21 | model = self.metadata["model"] 22 | api_token = self.metadata["apiKey"] 23 | model = ReplicateModel(model=model, replicate_api_token=api_token) 24 | output = await model.apredict(prompt) 25 | return output 26 | -------------------------------------------------------------------------------- /api/app/tools/scraper.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from langchain_community.tools import BaseTool 4 | from scrapingbee import ScrapingBeeClient 5 | 6 | 7 | class Scraper(BaseTool): 8 | name = "Scraper" 9 | description = "useful for extracting content from a webpage" 10 | return_direct = False 11 | 12 | def _run(self, url: str) -> str: 13 | client = ScrapingBeeClient(api_key=self.metadata.get("apiKey")) 14 | response = client.get( 15 | url, 16 | params={ 17 | "extract_rules": {"text": "body"}, 18 | "render_js": True, 19 | "wait_browser": "load", 20 | }, 21 | ) 22 | return response.text 23 | 24 | async def _arun(self, url: str) -> str: 25 | loop = asyncio.get_event_loop() 26 | response_text = await loop.run_in_executor(None, self._run, url) 27 | return response_text -------------------------------------------------------------------------------- /api/app/tools/tavily.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from decouple import config 4 | from langchain_community.tools import BaseTool 5 | from tavily import TavilyClient 6 | 7 | 8 | class Tavily(BaseTool): 9 | name = "PubMed® search" 10 | description = "useful for answering question about medical publications" 11 | return_direct = False 12 | 13 | def _run(self, query: str) -> str: 14 | tavily = TavilyClient( 15 | api_key=self.metadata.get("apiKey") or config("TAVILY_API_KEY") 16 | ) 17 | response = tavily.search(query=query, search_depth="advanced") 18 | context = [ 19 | {"url": obj["url"], "content": obj["content"]} for obj in response.results 20 | ] 21 | return context 22 | 23 | async def _arun(self, query: str) -> str: 24 | tavily = TavilyClient( 25 | api_key=self.metadata.get("apiKey") or config("TAVILY_API_KEY") 26 | ) 27 | loop = asyncio.get_event_loop() 28 | response = await loop.run_in_executor(None, tavily.search, query, "advanced") 29 | context = [ 30 | {"url": obj["url"], "content": obj["content"]} 31 | for obj in response.get("results") 32 | ] 33 | return context -------------------------------------------------------------------------------- /api/app/tools/tts_1.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | from langchain_community.tools import BaseTool 4 | from openai import AsyncOpenAI, OpenAI 5 | 6 | 7 | class TTS1(BaseTool): 8 | name = "text-to-speech" 9 | description = "useful for generation voice audio from text" 10 | return_direct = False 11 | 12 | def _run(self, input: dict) -> str: 13 | client = OpenAI(api_key=self.metadata["openaiApiKey"]) 14 | speech_file_path = Path(__file__).parent / "speech.mp3" 15 | response = client.audio.speech.create( 16 | model="tts-1", 17 | voice=input["voice"] or "alloy", 18 | input=input["text"], 19 | ) 20 | output = response.stream_to_file(speech_file_path) 21 | return output 22 | 23 | async def _arun(self, input: dict) -> str: 24 | client = AsyncOpenAI(api_key=self.metadata["openaiApiKey"]) 25 | speech_file_path = Path(__file__).parent / "speech.mp3" 26 | response = await client.audio.speech.create( 27 | model="tts-1", 28 | voice=input["voice"] or "alloy", 29 | input=input["text"], 30 | ) 31 | output = response.stream_to_file(speech_file_path) 32 | return output 33 | -------------------------------------------------------------------------------- /api/app/tools/wolfram_alpha.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from langchain_community.tools import BaseTool 4 | from langchain_community.utilities.wolfram_alpha import WolframAlphaAPIWrapper 5 | 6 | 7 | class WolframAlpha(BaseTool): 8 | name = "Wolfram Alpha" 9 | description = "useful for calculation and computation" 10 | return_direct = False 11 | 12 | def _run(self, input: str) -> str: 13 | app_id = self.metadata["appId"] 14 | wolfram = WolframAlphaAPIWrapper(wolfram_alpha_appid=app_id) 15 | return wolfram.run(input) 16 | 17 | async def _arun(self, input: str) -> str: 18 | app_id = self.metadata["appId"] 19 | wolfram = WolframAlphaAPIWrapper(wolfram_alpha_appid=app_id) 20 | loop = asyncio.get_event_loop() 21 | output = await loop.run_in_executor(None, wolfram.run, input) 22 | return output 23 | -------------------------------------------------------------------------------- /api/app/tools/zapier.py: -------------------------------------------------------------------------------- 1 | from langchain.agents import AgentType, initialize_agent 2 | from langchain_community.agent_toolkits import ZapierToolkit 3 | from langchain_community.tools import BaseTool 4 | from langchain_community.utilities.zapier import ZapierNLAWrapper 5 | from langchain_openai import ChatOpenAI 6 | 7 | 8 | class ZapierNLA(BaseTool): 9 | name = "Zapier" 10 | description = ( 11 | "useful for performing actions such sending emails, scheduling meetings etc." 12 | ) 13 | return_direct = False 14 | 15 | def _run(self, input: str) -> str: 16 | zapier_nla_api_key = self.metadata["zapierNlaApiKey"] 17 | zapier = ZapierNLAWrapper(zapier_nla_api_key=zapier_nla_api_key) 18 | toolkit = ZapierToolkit.from_zapier_nla_wrapper(zapier) 19 | agent = initialize_agent( 20 | toolkit.get_tools(), 21 | llm=ChatOpenAI(openai_api_key=self.metadata["openaiApiKey"], model="gpt-4"), 22 | agent=AgentType.ZERO_SHOT_REACT_DESCRIPTION, 23 | verbose=True, 24 | ) 25 | output = agent.run(input) 26 | return output 27 | 28 | async def _arun(self, input: str) -> str: 29 | zapier_nla_api_key = self.metadata["zapierNlaApiKey"] 30 | zapier = ZapierNLAWrapper(zapier_nla_api_key=zapier_nla_api_key) 31 | toolkit = ZapierToolkit.from_zapier_nla_wrapper(zapier) 32 | agent = initialize_agent( 33 | toolkit.get_tools(), 34 | llm=ChatOpenAI(openai_api_key=self.metadata["openaiApiKey"], model="gpt-4"), 35 | agent=AgentType.ZERO_SHOT_REACT_DESCRIPTION, 36 | verbose=True, 37 | ) 38 | output = await agent.arun(input) 39 | return output 40 | -------------------------------------------------------------------------------- /api/app/utils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/HMaiLab/HMAI/79ddaa324f41f7bf81236991c37c5007eb9c02f8/api/app/utils/__init__.py -------------------------------------------------------------------------------- /api/app/utils/analytics.py: -------------------------------------------------------------------------------- 1 | import segment.analytics as analytics 2 | from decouple import config 3 | from pydantic import BaseModel 4 | 5 | from prisma.models import Agent 6 | 7 | SEGMENT_WRITE_KEY = config("SEGMENT_WRITE_KEY", None) 8 | analytics.write_key = SEGMENT_WRITE_KEY 9 | 10 | 11 | class AgentInvocationData(BaseModel): 12 | user_id: str 13 | session_id: str 14 | agent: Agent 15 | llm_model: str 16 | status_code: int 17 | error: str 18 | output: str 19 | input: str 20 | intermediate_steps: list 21 | prompt_tokens: int 22 | completion_tokens: int 23 | prompt_tokens_cost_usd: float 24 | completion_tokens_cost_usd: float 25 | 26 | 27 | def track_agent_invocation(data: AgentInvocationData): 28 | intermediate_steps_to_obj = [ 29 | { 30 | **vars(toolClass), 31 | "message_log": str(toolClass.message_log), 32 | "response": response, 33 | } 34 | for toolClass, response in data.get("intermediate_steps", []) 35 | ] 36 | 37 | analytics.track( 38 | data["user_id"], 39 | "Invoked Agent", 40 | { 41 | "agentId": data.get("agent", {}).id, 42 | "workflowId": data.get("workflow_id", None), 43 | "llm_model": data.get("agent", {}).llmModel, 44 | "sessionId": data["session_id"], 45 | # default http status code is 200 46 | "response": { 47 | "status_code": data.get("status_code", 200), 48 | "error": data.get("error", None), 49 | }, 50 | "output": data.get("output", None), 51 | "input": data.get("input", None), 52 | "intermediate_steps": intermediate_steps_to_obj, 53 | "prompt_tokens": data.get("prompt_tokens", 0), 54 | "completion_tokens": data.get("completion_tokens", 0), 55 | "prompt_tokens_cost_usd": data.get("prompt_tokens_cost_usd", 0), 56 | "completion_tokens_cost_usd": data.get("completion_tokens_cost_usd", 0), 57 | }, 58 | ) 59 | -------------------------------------------------------------------------------- /api/app/utils/api.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | import jwt 4 | from decouple import config 5 | from fastapi import HTTPException, Security, status 6 | from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer 7 | 8 | from app.utils.prisma import prisma 9 | 10 | logger = logging.getLogger(__name__) 11 | security = HTTPBearer() 12 | 13 | 14 | def handle_exception(e): 15 | logger.exception(e) 16 | raise HTTPException( 17 | status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(e) 18 | ) 19 | 20 | 21 | def generate_jwt(data: dict): 22 | token = jwt.encode({**data}, config("JWT_SECRET"), algorithm="HS256") 23 | return token 24 | 25 | 26 | def decode_jwt(token: str): 27 | return jwt.decode(token, config("JWT_SECRET"), algorithms=["HS256"]) 28 | 29 | 30 | async def get_current_api_user( 31 | authorization: HTTPAuthorizationCredentials = Security(security), 32 | ): 33 | token = authorization.credentials 34 | decoded_token = decode_jwt(token) 35 | api_user = await prisma.apiuser.find_unique( 36 | where={"id": decoded_token.get("api_user_id")} 37 | ) 38 | if not api_user: 39 | raise HTTPException(status_code=401, detail="Invalid token or expired token") 40 | return api_user 41 | -------------------------------------------------------------------------------- /api/app/utils/llm.py: -------------------------------------------------------------------------------- 1 | LLM_MAPPING = { 2 | "GPT_3_5_TURBO_16K_0613": "gpt-3.5-turbo-16k-0613", 3 | "GPT_3_5_TURBO_0613": "gpt-3.5-turbo-0613", 4 | "GPT_3_5_TURBO_1106": "gpt-3.5-turbo-1106", 5 | "GPT_4_0613": "gpt-4-0613", 6 | "GPT_4_32K_0613": "gpt-4-32k-0613", 7 | "GPT_4_1106_PREVIEW": "gpt-4-1106-preview", 8 | "GPT_4_TURBO_PREVIEW": "gpt-4-turbo-preview", 9 | "GPT_3_5_TURBO_0125": "gpt-3.5-turbo-0125", 10 | } 11 | 12 | LLM_REVERSE_MAPPING = {v: k for k, v in LLM_MAPPING.items()} 13 | 14 | 15 | LLM_PROVIDER_MAPPING = { 16 | "OPENAI": [ 17 | "GPT_3_5_TURBO_16K_0613", 18 | "GPT_3_5_TURBO_0613", 19 | "GPT_3_5_TURBO_1106", 20 | "GPT_3_5_TURBO_0125", 21 | "GPT_4_0613", 22 | "GPT_4_32K_0613", 23 | "GPT_4_1106_PREVIEW", 24 | "GPT_4_TURBO_PREIVEW", 25 | ] 26 | } 27 | -------------------------------------------------------------------------------- /api/app/utils/prisma.py: -------------------------------------------------------------------------------- 1 | from prisma import Prisma 2 | 3 | prisma = Prisma() 4 | -------------------------------------------------------------------------------- /api/app/vectorstores/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/HMaiLab/HMAI/79ddaa324f41f7bf81236991c37c5007eb9c02f8/api/app/vectorstores/__init__.py -------------------------------------------------------------------------------- /api/app/vectorstores/abstract.py: -------------------------------------------------------------------------------- 1 | from abc import ABC, abstractmethod 2 | 3 | 4 | class VectorStoreBase(ABC): 5 | @abstractmethod 6 | def embed_documents(self): 7 | pass 8 | 9 | @abstractmethod 10 | def query_documents(): 11 | pass 12 | 13 | @abstractmethod 14 | def delete(self): 15 | pass -------------------------------------------------------------------------------- /api/app/vectorstores/embeddings.py: -------------------------------------------------------------------------------- 1 | from decouple import config 2 | from langchain_openai import AzureOpenAIEmbeddings, OpenAIEmbeddings 3 | 4 | from app.models.request import EmbeddingsModelProvider 5 | from app.utils.helpers import get_first_non_null 6 | 7 | 8 | def get_embeddings_model_provider(embeddings_model_provider: EmbeddingsModelProvider): 9 | if embeddings_model_provider == EmbeddingsModelProvider.AZURE_OPENAI: 10 | return AzureOpenAIEmbeddings( 11 | azure_deployment=config("AZURE_OPENAI_EMBEDDINGS_DEPLOYMENT"), 12 | api_version=get_first_non_null( 13 | config("AZURE_OPENAI_EMBEDDINGS_API_VERSION"), 14 | config("AZURE_OPENAI_API_VERSION"), 15 | ), 16 | api_key=get_first_non_null( 17 | config("AZURE_OPENAI_EMBEDDINGS_API_KEY"), 18 | config("AZURE_OPENAI_API_KEY"), 19 | ), 20 | azure_endpoint=get_first_non_null( 21 | config("AZURE_OPENAI_EMBEDDINGS_ENDPOINT"), 22 | config("AZURE_OPENAI_ENDPOINT"), 23 | ), 24 | ) 25 | else: 26 | return OpenAIEmbeddings( 27 | model="text-embedding-3-small", openai_api_key=config("OPENAI_API_KEY") 28 | ) -------------------------------------------------------------------------------- /api/app/workflows/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/HMaiLab/HMAI/79ddaa324f41f7bf81236991c37c5007eb9c02f8/api/app/workflows/__init__.py -------------------------------------------------------------------------------- /api/app/workflows/base.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from typing import Any, List 3 | 4 | from agentops.langchain_callback_handler import ( 5 | AsyncCallbackHandler, 6 | LangchainCallbackHandler, 7 | ) 8 | from langchain.output_parsers.json import SimpleJsonOutputParser 9 | 10 | from app.agents.base import AgentFactory 11 | from app.utils.callbacks import CustomAsyncIteratorCallbackHandler 12 | 13 | logger = logging.getLogger(__name__) 14 | 15 | 16 | class WorkflowBase: 17 | def __init__( 18 | self, 19 | workflow_steps: list[Any], 20 | callbacks: List[CustomAsyncIteratorCallbackHandler], 21 | session_id: str, 22 | constructor_callbacks: List[ 23 | AsyncCallbackHandler | LangchainCallbackHandler 24 | ] = None, 25 | enable_streaming: bool = False, 26 | ): 27 | self.workflow_steps = workflow_steps 28 | self.enable_streaming = enable_streaming 29 | self.session_id = session_id 30 | self.constructor_callbacks = constructor_callbacks 31 | self.callbacks = callbacks 32 | 33 | async def arun(self, input: Any): 34 | previous_output = input 35 | steps_output = [] 36 | 37 | for stepIndex, step in enumerate(self.workflow_steps): 38 | agent_data = step["agent_data"] 39 | input = previous_output 40 | output_schema = step["output_schema"] 41 | agent_base = AgentFactory( 42 | enable_streaming=self.enable_streaming, 43 | callbacks=self.constructor_callbacks, 44 | session_id=self.session_id, 45 | agent_data=agent_data, 46 | output_schema=output_schema, 47 | ) 48 | 49 | agent = await agent_base.get_agent() 50 | 51 | agent_response = await agent.ainvoke( 52 | input=input, 53 | config={ 54 | "callbacks": self.callbacks[stepIndex], 55 | }, 56 | ) 57 | if output_schema: 58 | # TODO: throw error if output is not valid 59 | parser = SimpleJsonOutputParser() 60 | try: 61 | agent_response["output"] = parser.parse( 62 | text=agent_response["output"] 63 | ) 64 | except Exception as e: 65 | logger.error(f"Error parsing output: {e}") 66 | agent_response["output"] = {} 67 | 68 | previous_output = agent_response.get("output") 69 | steps_output.append(agent_response) 70 | 71 | return {"steps": steps_output, "output": previous_output} -------------------------------------------------------------------------------- /api/lint-and-format.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # getting changed files (only staged) 4 | changes=$(git diff --name-only --cached | grep '^libs/hmai.*\.py$' | sed 's|^libs/hmai/||') 5 | 6 | lint() { 7 | poetry run black $changes 8 | # sort imports 9 | poetry run ruff check --select I --fix $changes 10 | # format code 11 | poetry run ruff check --fix $changes 12 | poetry run vulture $changes 13 | git add $changes 14 | echo "Changes applied"; 15 | } 16 | 17 | format() { 18 | poetry run black $changes --check 19 | poetry run ruff $changes 20 | poetry run vulture $changes 21 | } 22 | 23 | if [ -n "$changes" ]; then 24 | case "$1" in 25 | lint) 26 | lint 27 | ;; 28 | format) 29 | format 30 | ;; 31 | *) 32 | echo "Invalid command. Usage: $0 [lint|format]" 33 | exit 1 34 | ;; 35 | esac 36 | else 37 | echo "No changes"; 38 | fi 39 | -------------------------------------------------------------------------------- /api/package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "hmai", 3 | "lockfileVersion": 3, 4 | "requires": true, 5 | "packages": {} 6 | } 7 | -------------------------------------------------------------------------------- /api/prisma/migrations/20230822214343_agent_llm/migration.sql: -------------------------------------------------------------------------------- 1 | -- CreateEnum 2 | CREATE TYPE "DocumentType" AS ENUM ('TXT', 'PDF', 'CSV', 'YOUTUBE', 'OPENAPI', 'URL', 'MARKDOWN', 'FIRESTORE', 'PSYCHIC', 'GITHUB_REPOSITORY', 'WEBPAGE', 'STRIPE', 'AIRTABLE', 'SITEMAP', 'NOTION'); 3 | 4 | -- CreateEnum 5 | CREATE TYPE "ToolType" AS ENUM ('BROWSER', 'SEARCH', 'WOLFRAM_ALPHA', 'REPLICATE', 'ZAPIER_NLA', 'AGENT', 'OPENAPI', 'CHATGPT_PLUGIN', 'METAPHOR'); 6 | 7 | -- CreateTable 8 | CREATE TABLE "Agent" ( 9 | "id" SERIAL NOT NULL, 10 | "name" TEXT NOT NULL, 11 | "isActive" BOOLEAN NOT NULL DEFAULT false, 12 | "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, 13 | "updatedAt" TIMESTAMP(3) NOT NULL, 14 | "llmId" INTEGER NOT NULL, 15 | 16 | CONSTRAINT "Agent_pkey" PRIMARY KEY ("id") 17 | ); 18 | 19 | -- CreateTable 20 | CREATE TABLE "LLM" ( 21 | "id" SERIAL NOT NULL, 22 | "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, 23 | "updatedAt" TIMESTAMP(3) NOT NULL, 24 | 25 | CONSTRAINT "LLM_pkey" PRIMARY KEY ("id") 26 | ); 27 | 28 | -- CreateIndex 29 | CREATE UNIQUE INDEX "Agent_llmId_key" ON "Agent"("llmId"); 30 | 31 | -- AddForeignKey 32 | ALTER TABLE "Agent" ADD CONSTRAINT "Agent_llmId_fkey" FOREIGN KEY ("llmId") REFERENCES "LLM"("id") ON DELETE RESTRICT ON UPDATE CASCADE; 33 | -------------------------------------------------------------------------------- /api/prisma/migrations/20230823195402_add_llm/migration.sql: -------------------------------------------------------------------------------- 1 | /* 2 | Warnings: 3 | 4 | - Added the required column `apiKey` to the `LLM` table without a default value. This is not possible if the table is not empty. 5 | 6 | */ 7 | -- CreateEnum 8 | CREATE TYPE "LLMProvider" AS ENUM ('OPENAI'); 9 | 10 | -- CreateEnum 11 | CREATE TYPE "LLMModel" AS ENUM ('GPT_3_5_TURBO_16K_0613', 'GPT_3_5_TURBO_0613', 'GPT_4_0613', 'GPT_4_32K_0613'); 12 | 13 | -- AlterTable 14 | ALTER TABLE "LLM" ADD COLUMN "apiKey" TEXT NOT NULL, 15 | ADD COLUMN "model" "LLMModel" NOT NULL DEFAULT 'GPT_3_5_TURBO_16K_0613', 16 | ADD COLUMN "options" JSONB, 17 | ADD COLUMN "provider" "LLMProvider" NOT NULL DEFAULT 'OPENAI'; 18 | -------------------------------------------------------------------------------- /api/prisma/migrations/20230823200614_change_ids/migration.sql: -------------------------------------------------------------------------------- 1 | /* 2 | Warnings: 3 | 4 | - The primary key for the `Agent` table will be changed. If it partially fails, the table could be left without primary key constraint. 5 | - The primary key for the `LLM` table will be changed. If it partially fails, the table could be left without primary key constraint. 6 | 7 | */ 8 | -- DropForeignKey 9 | ALTER TABLE "Agent" DROP CONSTRAINT "Agent_llmId_fkey"; 10 | 11 | -- AlterTable 12 | ALTER TABLE "Agent" DROP CONSTRAINT "Agent_pkey", 13 | ALTER COLUMN "id" DROP DEFAULT, 14 | ALTER COLUMN "id" SET DATA TYPE TEXT, 15 | ALTER COLUMN "llmId" SET DATA TYPE TEXT, 16 | ADD CONSTRAINT "Agent_pkey" PRIMARY KEY ("id"); 17 | DROP SEQUENCE "Agent_id_seq"; 18 | 19 | -- AlterTable 20 | ALTER TABLE "LLM" DROP CONSTRAINT "LLM_pkey", 21 | ALTER COLUMN "id" DROP DEFAULT, 22 | ALTER COLUMN "id" SET DATA TYPE TEXT, 23 | ADD CONSTRAINT "LLM_pkey" PRIMARY KEY ("id"); 24 | DROP SEQUENCE "LLM_id_seq"; 25 | 26 | -- AddForeignKey 27 | ALTER TABLE "Agent" ADD CONSTRAINT "Agent_llmId_fkey" FOREIGN KEY ("llmId") REFERENCES "LLM"("id") ON DELETE RESTRICT ON UPDATE CASCADE; 28 | -------------------------------------------------------------------------------- /api/prisma/migrations/20230823211516_llm_options_default/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterTable 2 | ALTER TABLE "LLM" ALTER COLUMN "options" SET DEFAULT '{}'; 3 | -------------------------------------------------------------------------------- /api/prisma/migrations/20230823212925_remove_llm_options_default/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterTable 2 | ALTER TABLE "LLM" ALTER COLUMN "options" DROP DEFAULT; 3 | -------------------------------------------------------------------------------- /api/prisma/migrations/20230824065536_add_agent_prompt/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterTable 2 | ALTER TABLE "Agent" ADD COLUMN "prompt" TEXT; 3 | -------------------------------------------------------------------------------- /api/prisma/migrations/20230824070125_llm_agent_one_to_many/migration.sql: -------------------------------------------------------------------------------- 1 | /* 2 | Warnings: 3 | 4 | - You are about to drop the column `llmId` on the `Agent` table. All the data in the column will be lost. 5 | 6 | */ 7 | -- DropForeignKey 8 | ALTER TABLE "Agent" DROP CONSTRAINT "Agent_llmId_fkey"; 9 | 10 | -- DropIndex 11 | DROP INDEX "Agent_llmId_key"; 12 | 13 | -- AlterTable 14 | ALTER TABLE "Agent" DROP COLUMN "llmId"; 15 | 16 | -- AlterTable 17 | ALTER TABLE "LLM" ADD COLUMN "agentId" TEXT; 18 | 19 | -- AddForeignKey 20 | ALTER TABLE "LLM" ADD CONSTRAINT "LLM_agentId_fkey" FOREIGN KEY ("agentId") REFERENCES "Agent"("id") ON DELETE SET NULL ON UPDATE CASCADE; 21 | -------------------------------------------------------------------------------- /api/prisma/migrations/20230824070755_add_agent_llm_mapping/migration.sql: -------------------------------------------------------------------------------- 1 | /* 2 | Warnings: 3 | 4 | - You are about to drop the column `agentId` on the `LLM` table. All the data in the column will be lost. 5 | 6 | */ 7 | -- DropForeignKey 8 | ALTER TABLE "LLM" DROP CONSTRAINT "LLM_agentId_fkey"; 9 | 10 | -- AlterTable 11 | ALTER TABLE "LLM" DROP COLUMN "agentId"; 12 | 13 | -- CreateTable 14 | CREATE TABLE "AgentLLM" ( 15 | "agentId" TEXT NOT NULL, 16 | "llmId" TEXT NOT NULL, 17 | "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, 18 | "updatedAt" TIMESTAMP(3) NOT NULL, 19 | 20 | CONSTRAINT "AgentLLM_pkey" PRIMARY KEY ("agentId","llmId") 21 | ); 22 | 23 | -- AddForeignKey 24 | ALTER TABLE "AgentLLM" ADD CONSTRAINT "AgentLLM_agentId_fkey" FOREIGN KEY ("agentId") REFERENCES "Agent"("id") ON DELETE RESTRICT ON UPDATE CASCADE; 25 | 26 | -- AddForeignKey 27 | ALTER TABLE "AgentLLM" ADD CONSTRAINT "AgentLLM_llmId_fkey" FOREIGN KEY ("llmId") REFERENCES "LLM"("id") ON DELETE RESTRICT ON UPDATE CASCADE; 28 | -------------------------------------------------------------------------------- /api/prisma/migrations/20230824084308_api_user/migration.sql: -------------------------------------------------------------------------------- 1 | -- CreateTable 2 | CREATE TABLE "ApiUser" ( 3 | "id" TEXT NOT NULL, 4 | "token" TEXT NOT NULL, 5 | "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, 6 | "updatedAt" TIMESTAMP(3) NOT NULL, 7 | 8 | CONSTRAINT "ApiUser_pkey" PRIMARY KEY ("id") 9 | ); 10 | 11 | -- CreateIndex 12 | CREATE UNIQUE INDEX "ApiUser_token_key" ON "ApiUser"("token"); 13 | -------------------------------------------------------------------------------- /api/prisma/migrations/20230824103528_api_user_token_optional/migration.sql: -------------------------------------------------------------------------------- 1 | -- DropIndex 2 | DROP INDEX "ApiUser_token_key"; 3 | 4 | -- AlterTable 5 | ALTER TABLE "ApiUser" ALTER COLUMN "token" DROP NOT NULL; 6 | -------------------------------------------------------------------------------- /api/prisma/migrations/20230824110611_add_api_user_to_models/migration.sql: -------------------------------------------------------------------------------- 1 | /* 2 | Warnings: 3 | 4 | - Added the required column `apiUserId` to the `Agent` table without a default value. This is not possible if the table is not empty. 5 | - Added the required column `apiUserId` to the `LLM` table without a default value. This is not possible if the table is not empty. 6 | 7 | */ 8 | -- AlterTable 9 | ALTER TABLE "Agent" ADD COLUMN "apiUserId" TEXT NOT NULL; 10 | 11 | -- AlterTable 12 | ALTER TABLE "LLM" ADD COLUMN "apiUserId" TEXT NOT NULL; 13 | 14 | -- AddForeignKey 15 | ALTER TABLE "Agent" ADD CONSTRAINT "Agent_apiUserId_fkey" FOREIGN KEY ("apiUserId") REFERENCES "ApiUser"("id") ON DELETE RESTRICT ON UPDATE CASCADE; 16 | 17 | -- AddForeignKey 18 | ALTER TABLE "LLM" ADD CONSTRAINT "LLM_apiUserId_fkey" FOREIGN KEY ("apiUserId") REFERENCES "ApiUser"("id") ON DELETE RESTRICT ON UPDATE CASCADE; 19 | -------------------------------------------------------------------------------- /api/prisma/migrations/20230824114232_add_datasources/migration.sql: -------------------------------------------------------------------------------- 1 | -- CreateEnum 2 | CREATE TYPE "DatasourceType" AS ENUM ('TXT', 'PDF', 'CSV', 'YOUTUBE', 'FUNCTION'); 3 | 4 | -- CreateTable 5 | CREATE TABLE "Datasource" ( 6 | "id" TEXT NOT NULL, 7 | "name" TEXT NOT NULL, 8 | "description" TEXT, 9 | "url" TEXT, 10 | "type" "DatasourceType" NOT NULL, 11 | "apiUserId" TEXT NOT NULL, 12 | "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, 13 | "updatedAt" TIMESTAMP(3) NOT NULL, 14 | 15 | CONSTRAINT "Datasource_pkey" PRIMARY KEY ("id") 16 | ); 17 | 18 | -- CreateTable 19 | CREATE TABLE "AgentDatasource" ( 20 | "agentId" TEXT NOT NULL, 21 | "datasourceId" TEXT NOT NULL, 22 | "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, 23 | "updatedAt" TIMESTAMP(3) NOT NULL, 24 | 25 | CONSTRAINT "AgentDatasource_pkey" PRIMARY KEY ("agentId","datasourceId") 26 | ); 27 | 28 | -- AddForeignKey 29 | ALTER TABLE "Datasource" ADD CONSTRAINT "Datasource_apiUserId_fkey" FOREIGN KEY ("apiUserId") REFERENCES "ApiUser"("id") ON DELETE RESTRICT ON UPDATE CASCADE; 30 | 31 | -- AddForeignKey 32 | ALTER TABLE "AgentDatasource" ADD CONSTRAINT "AgentDatasource_agentId_fkey" FOREIGN KEY ("agentId") REFERENCES "Agent"("id") ON DELETE RESTRICT ON UPDATE CASCADE; 33 | 34 | -- AddForeignKey 35 | ALTER TABLE "AgentDatasource" ADD CONSTRAINT "AgentDatasource_datasourceId_fkey" FOREIGN KEY ("datasourceId") REFERENCES "Datasource"("id") ON DELETE RESTRICT ON UPDATE CASCADE; 36 | -------------------------------------------------------------------------------- /api/prisma/migrations/20230831081114_datasource_metadata/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterEnum 2 | -- This migration adds more than one value to an enum. 3 | -- With PostgreSQL versions 11 and earlier, this is not possible 4 | -- in a single migration. This can be worked around by creating 5 | -- multiple migrations, each migration adding only one value to 6 | -- the enum. 7 | 8 | 9 | ALTER TYPE "DatasourceType" ADD VALUE 'GITHUB_REPOSITORY'; 10 | ALTER TYPE "DatasourceType" ADD VALUE 'MARKDOWN'; 11 | ALTER TYPE "DatasourceType" ADD VALUE 'WEBPAGE'; 12 | ALTER TYPE "DatasourceType" ADD VALUE 'AIRTABLE'; 13 | 14 | -- AlterTable 15 | ALTER TABLE "Datasource" ADD COLUMN "metadata" JSONB; 16 | -------------------------------------------------------------------------------- /api/prisma/migrations/20230831092903_datasource_metadata_string/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterTable 2 | ALTER TABLE "Datasource" ALTER COLUMN "metadata" SET DATA TYPE TEXT; 3 | -------------------------------------------------------------------------------- /api/prisma/migrations/20230831105225_add_datasource_types/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterEnum 2 | -- This migration adds more than one value to an enum. 3 | -- With PostgreSQL versions 11 and earlier, this is not possible 4 | -- in a single migration. This can be worked around by creating 5 | -- multiple migrations, each migration adding only one value to 6 | -- the enum. 7 | 8 | 9 | ALTER TYPE "DatasourceType" ADD VALUE 'STRIPE'; 10 | ALTER TYPE "DatasourceType" ADD VALUE 'NOTION'; 11 | ALTER TYPE "DatasourceType" ADD VALUE 'SITEMAP'; 12 | ALTER TYPE "DatasourceType" ADD VALUE 'URL'; 13 | 14 | -- DropEnum 15 | DROP TYPE "DocumentType"; 16 | -------------------------------------------------------------------------------- /api/prisma/migrations/20230901072519_agent_tools/migration.sql: -------------------------------------------------------------------------------- 1 | -- CreateTable 2 | CREATE TABLE "Tool" ( 3 | "id" TEXT NOT NULL, 4 | "name" TEXT NOT NULL, 5 | "description" TEXT NOT NULL, 6 | "type" "ToolType" NOT NULL, 7 | "metadata" TEXT NOT NULL, 8 | "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, 9 | "updatedAt" TIMESTAMP(3) NOT NULL, 10 | "apiUserId" TEXT NOT NULL, 11 | 12 | CONSTRAINT "Tool_pkey" PRIMARY KEY ("id") 13 | ); 14 | 15 | -- CreateTable 16 | CREATE TABLE "AgentTool" ( 17 | "agentId" TEXT NOT NULL, 18 | "toolId" TEXT NOT NULL, 19 | "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, 20 | "updatedAt" TIMESTAMP(3) NOT NULL, 21 | "datasourceId" TEXT, 22 | 23 | CONSTRAINT "AgentTool_pkey" PRIMARY KEY ("agentId","toolId") 24 | ); 25 | 26 | -- AddForeignKey 27 | ALTER TABLE "Tool" ADD CONSTRAINT "Tool_apiUserId_fkey" FOREIGN KEY ("apiUserId") REFERENCES "ApiUser"("id") ON DELETE RESTRICT ON UPDATE CASCADE; 28 | 29 | -- AddForeignKey 30 | ALTER TABLE "AgentTool" ADD CONSTRAINT "AgentTool_agentId_fkey" FOREIGN KEY ("agentId") REFERENCES "Agent"("id") ON DELETE RESTRICT ON UPDATE CASCADE; 31 | 32 | -- AddForeignKey 33 | ALTER TABLE "AgentTool" ADD CONSTRAINT "AgentTool_toolId_fkey" FOREIGN KEY ("toolId") REFERENCES "Tool"("id") ON DELETE RESTRICT ON UPDATE CASCADE; 34 | 35 | -- AddForeignKey 36 | ALTER TABLE "AgentTool" ADD CONSTRAINT "AgentTool_datasourceId_fkey" FOREIGN KEY ("datasourceId") REFERENCES "Datasource"("id") ON DELETE SET NULL ON UPDATE CASCADE; 37 | -------------------------------------------------------------------------------- /api/prisma/migrations/20230901115947_tool_bing_search/migration.sql: -------------------------------------------------------------------------------- 1 | /* 2 | Warnings: 3 | 4 | - The values [SEARCH] on the enum `ToolType` will be removed. If these variants are still used in the database, this will fail. 5 | 6 | */ 7 | -- AlterEnum 8 | BEGIN; 9 | CREATE TYPE "ToolType_new" AS ENUM ('BROWSER', 'BING_SEARCH', 'REPLICATE', 'WOLFRAM_ALPHA', 'ZAPIER_NLA', 'AGENT', 'OPENAPI', 'CHATGPT_PLUGIN', 'METAPHOR'); 10 | ALTER TABLE "Tool" ALTER COLUMN "type" TYPE "ToolType_new" USING ("type"::text::"ToolType_new"); 11 | ALTER TYPE "ToolType" RENAME TO "ToolType_old"; 12 | ALTER TYPE "ToolType_new" RENAME TO "ToolType"; 13 | DROP TYPE "ToolType_old"; 14 | COMMIT; 15 | -------------------------------------------------------------------------------- /api/prisma/migrations/20230901124505_remove_redundant_fields/migration.sql: -------------------------------------------------------------------------------- 1 | /* 2 | Warnings: 3 | 4 | - You are about to drop the column `datasourceId` on the `AgentTool` table. All the data in the column will be lost. 5 | 6 | */ 7 | -- DropForeignKey 8 | ALTER TABLE "AgentTool" DROP CONSTRAINT "AgentTool_datasourceId_fkey"; 9 | 10 | -- AlterTable 11 | ALTER TABLE "AgentTool" DROP COLUMN "datasourceId"; 12 | -------------------------------------------------------------------------------- /api/prisma/migrations/20230901182450_pubmed_tool/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterEnum 2 | ALTER TYPE "ToolType" ADD VALUE 'PUBMED'; 3 | -------------------------------------------------------------------------------- /api/prisma/migrations/20230901183619_tool_metadata_optional/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterTable 2 | ALTER TABLE "Tool" ALTER COLUMN "metadata" DROP NOT NULL; 3 | -------------------------------------------------------------------------------- /api/prisma/migrations/20230901184227_tool_metadata_mandatory/migration.sql: -------------------------------------------------------------------------------- 1 | /* 2 | Warnings: 3 | 4 | - Made the column `metadata` on table `Tool` required. This step will fail if there are existing NULL values in that column. 5 | 6 | */ 7 | -- AlterTable 8 | ALTER TABLE "Tool" ALTER COLUMN "metadata" SET NOT NULL; 9 | -------------------------------------------------------------------------------- /api/prisma/migrations/20230904062421_add_worflow/migration.sql: -------------------------------------------------------------------------------- 1 | -- CreateTable 2 | CREATE TABLE "Workflow" ( 3 | "id" TEXT NOT NULL, 4 | "name" TEXT NOT NULL, 5 | "description" TEXT, 6 | "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, 7 | "updatedAt" TIMESTAMP(3) NOT NULL, 8 | 9 | CONSTRAINT "Workflow_pkey" PRIMARY KEY ("id") 10 | ); 11 | 12 | -- CreateTable 13 | CREATE TABLE "WorkflowStep" ( 14 | "id" TEXT NOT NULL, 15 | "order" INTEGER NOT NULL, 16 | "workflowId" TEXT NOT NULL, 17 | "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, 18 | "updatedAt" TIMESTAMP(3) NOT NULL, 19 | "input" TEXT NOT NULL, 20 | "output" TEXT NOT NULL, 21 | "agentId" TEXT NOT NULL, 22 | 23 | CONSTRAINT "WorkflowStep_pkey" PRIMARY KEY ("id") 24 | ); 25 | 26 | -- AddForeignKey 27 | ALTER TABLE "WorkflowStep" ADD CONSTRAINT "WorkflowStep_workflowId_fkey" FOREIGN KEY ("workflowId") REFERENCES "Workflow"("id") ON DELETE RESTRICT ON UPDATE CASCADE; 28 | 29 | -- AddForeignKey 30 | ALTER TABLE "WorkflowStep" ADD CONSTRAINT "WorkflowStep_agentId_fkey" FOREIGN KEY ("agentId") REFERENCES "Agent"("id") ON DELETE RESTRICT ON UPDATE CASCADE; 31 | -------------------------------------------------------------------------------- /api/prisma/migrations/20230904063106_workflow_api_user/migration.sql: -------------------------------------------------------------------------------- 1 | /* 2 | Warnings: 3 | 4 | - Added the required column `apiUserId` to the `Workflow` table without a default value. This is not possible if the table is not empty. 5 | 6 | */ 7 | -- AlterTable 8 | ALTER TABLE "Workflow" ADD COLUMN "apiUserId" TEXT NOT NULL; 9 | 10 | -- AddForeignKey 11 | ALTER TABLE "Workflow" ADD CONSTRAINT "Workflow_apiUserId_fkey" FOREIGN KEY ("apiUserId") REFERENCES "ApiUser"("id") ON DELETE RESTRICT ON UPDATE CASCADE; 12 | -------------------------------------------------------------------------------- /api/prisma/migrations/20230904074324_add_workflow_llm/migration.sql: -------------------------------------------------------------------------------- 1 | -- CreateTable 2 | CREATE TABLE "WorkflowLLM" ( 3 | "worflowId" TEXT NOT NULL, 4 | "llmId" TEXT NOT NULL, 5 | "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, 6 | "updatedAt" TIMESTAMP(3) NOT NULL, 7 | 8 | CONSTRAINT "WorkflowLLM_pkey" PRIMARY KEY ("worflowId","llmId") 9 | ); 10 | 11 | -- AddForeignKey 12 | ALTER TABLE "WorkflowLLM" ADD CONSTRAINT "WorkflowLLM_worflowId_fkey" FOREIGN KEY ("worflowId") REFERENCES "Workflow"("id") ON DELETE RESTRICT ON UPDATE CASCADE; 13 | 14 | -- AddForeignKey 15 | ALTER TABLE "WorkflowLLM" ADD CONSTRAINT "WorkflowLLM_llmId_fkey" FOREIGN KEY ("llmId") REFERENCES "LLM"("id") ON DELETE RESTRICT ON UPDATE CASCADE; 16 | -------------------------------------------------------------------------------- /api/prisma/migrations/20230904082445_fix_workflow_misspelling/migration.sql: -------------------------------------------------------------------------------- 1 | /* 2 | Warnings: 3 | 4 | - The primary key for the `WorkflowLLM` table will be changed. If it partially fails, the table could be left without primary key constraint. 5 | - You are about to drop the column `worflowId` on the `WorkflowLLM` table. All the data in the column will be lost. 6 | - Added the required column `workflowId` to the `WorkflowLLM` table without a default value. This is not possible if the table is not empty. 7 | 8 | */ 9 | -- DropForeignKey 10 | ALTER TABLE "WorkflowLLM" DROP CONSTRAINT "WorkflowLLM_worflowId_fkey"; 11 | 12 | -- AlterTable 13 | ALTER TABLE "WorkflowLLM" DROP CONSTRAINT "WorkflowLLM_pkey", 14 | DROP COLUMN "worflowId", 15 | ADD COLUMN "workflowId" TEXT NOT NULL, 16 | ADD CONSTRAINT "WorkflowLLM_pkey" PRIMARY KEY ("workflowId", "llmId"); 17 | 18 | -- AddForeignKey 19 | ALTER TABLE "WorkflowLLM" ADD CONSTRAINT "WorkflowLLM_workflowId_fkey" FOREIGN KEY ("workflowId") REFERENCES "Workflow"("id") ON DELETE RESTRICT ON UPDATE CASCADE; 20 | -------------------------------------------------------------------------------- /api/prisma/migrations/20230904083153_remove_workflow_llm/migration.sql: -------------------------------------------------------------------------------- 1 | /* 2 | Warnings: 3 | 4 | - You are about to drop the `WorkflowLLM` table. If the table is not empty, all the data it contains will be lost. 5 | 6 | */ 7 | -- DropForeignKey 8 | ALTER TABLE "WorkflowLLM" DROP CONSTRAINT "WorkflowLLM_llmId_fkey"; 9 | 10 | -- DropForeignKey 11 | ALTER TABLE "WorkflowLLM" DROP CONSTRAINT "WorkflowLLM_workflowId_fkey"; 12 | 13 | -- DropTable 14 | DROP TABLE "WorkflowLLM"; 15 | -------------------------------------------------------------------------------- /api/prisma/migrations/20230907080928_remove_llm_model/migration.sql: -------------------------------------------------------------------------------- 1 | /* 2 | Warnings: 3 | 4 | - You are about to drop the column `model` on the `LLM` table. All the data in the column will be lost. 5 | 6 | */ 7 | -- AlterTable 8 | ALTER TABLE "Agent" ADD COLUMN "llmModel" "LLMModel" NOT NULL DEFAULT 'GPT_3_5_TURBO_16K_0613'; 9 | 10 | -- AlterTable 11 | ALTER TABLE "LLM" DROP COLUMN "model"; 12 | -------------------------------------------------------------------------------- /api/prisma/migrations/20230907090814_agent_description/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterTable 2 | ALTER TABLE "Agent" ADD COLUMN "description" TEXT NOT NULL DEFAULT 'Add a agent description...'; 3 | -------------------------------------------------------------------------------- /api/prisma/migrations/20230912073334_tool_return_direct/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterTable 2 | ALTER TABLE "Tool" ADD COLUMN "returnDirect" BOOLEAN NOT NULL DEFAULT false; 3 | -------------------------------------------------------------------------------- /api/prisma/migrations/20230913070205_agent_avatar/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterTable 2 | ALTER TABLE "Agent" ADD COLUMN "avatar" TEXT; 3 | -------------------------------------------------------------------------------- /api/prisma/migrations/20230915080507_datasource_status/migration.sql: -------------------------------------------------------------------------------- 1 | -- CreateEnum 2 | CREATE TYPE "DatasourceStatus" AS ENUM ('IN_PROGRESS', 'DONE'); 3 | 4 | -- AlterTable 5 | ALTER TABLE "Datasource" ADD COLUMN "status" "DatasourceStatus" NOT NULL DEFAULT 'IN_PROGRESS'; 6 | -------------------------------------------------------------------------------- /api/prisma/migrations/20230917191411_datasource_status_failed/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterEnum 2 | ALTER TYPE "DatasourceStatus" ADD VALUE 'FAILED'; 3 | -------------------------------------------------------------------------------- /api/prisma/migrations/20230918070039_agent_cascade_delete/migration.sql: -------------------------------------------------------------------------------- 1 | -- DropForeignKey 2 | ALTER TABLE "AgentDatasource" DROP CONSTRAINT "AgentDatasource_agentId_fkey"; 3 | 4 | -- DropForeignKey 5 | ALTER TABLE "AgentLLM" DROP CONSTRAINT "AgentLLM_agentId_fkey"; 6 | 7 | -- DropForeignKey 8 | ALTER TABLE "AgentTool" DROP CONSTRAINT "AgentTool_agentId_fkey"; 9 | 10 | -- AddForeignKey 11 | ALTER TABLE "AgentDatasource" ADD CONSTRAINT "AgentDatasource_agentId_fkey" FOREIGN KEY ("agentId") REFERENCES "Agent"("id") ON DELETE CASCADE ON UPDATE CASCADE; 12 | 13 | -- AddForeignKey 14 | ALTER TABLE "AgentTool" ADD CONSTRAINT "AgentTool_agentId_fkey" FOREIGN KEY ("agentId") REFERENCES "Agent"("id") ON DELETE CASCADE ON UPDATE CASCADE; 15 | 16 | -- AddForeignKey 17 | ALTER TABLE "AgentLLM" ADD CONSTRAINT "AgentLLM_agentId_fkey" FOREIGN KEY ("agentId") REFERENCES "Agent"("id") ON DELETE CASCADE ON UPDATE CASCADE; 18 | -------------------------------------------------------------------------------- /api/prisma/migrations/20230920060753_add_pptx_datasource_type/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterEnum 2 | ALTER TYPE "DatasourceType" ADD VALUE 'PPTX'; 3 | -------------------------------------------------------------------------------- /api/prisma/migrations/20230920070547_datasource_docx/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterEnum 2 | ALTER TYPE "DatasourceType" ADD VALUE 'DOCX'; 3 | -------------------------------------------------------------------------------- /api/prisma/migrations/20230920072352_datasource_xlsx/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterEnum 2 | ALTER TYPE "DatasourceType" ADD VALUE 'XLSX'; 3 | -------------------------------------------------------------------------------- /api/prisma/migrations/20230920081659_datasource_google_doc/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterEnum 2 | ALTER TYPE "DatasourceType" ADD VALUE 'GOOGLE_DOC'; 3 | -------------------------------------------------------------------------------- /api/prisma/migrations/20230921064724_code_executor/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterEnum 2 | ALTER TYPE "ToolType" ADD VALUE 'CODE_EXECUTOR'; 3 | -------------------------------------------------------------------------------- /api/prisma/migrations/20230928102507_api_user_email/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterTable 2 | ALTER TABLE "ApiUser" ADD COLUMN "email" TEXT; 3 | -------------------------------------------------------------------------------- /api/prisma/migrations/20231001110155_llm_azure_openai/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterEnum 2 | ALTER TYPE "LLMProvider" ADD VALUE 'AZURE_OPENAI'; 3 | -------------------------------------------------------------------------------- /api/prisma/migrations/20231001161850_datassource_content/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterTable 2 | ALTER TABLE "Datasource" ADD COLUMN "content" TEXT; 3 | -------------------------------------------------------------------------------- /api/prisma/migrations/20231016065521_agent_initial_message/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterTable 2 | ALTER TABLE "Agent" ADD COLUMN "initialMessage" TEXT; 3 | -------------------------------------------------------------------------------- /api/prisma/migrations/20231029210807_tool_openbb/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterEnum 2 | ALTER TYPE "ToolType" ADD VALUE 'OPENBB'; 3 | -------------------------------------------------------------------------------- /api/prisma/migrations/20231106194639_gpt_4_1106_preview/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterEnum 2 | ALTER TYPE "LLMModel" ADD VALUE 'GPT_4_1106_preview'; 3 | -------------------------------------------------------------------------------- /api/prisma/migrations/20231106194841_gpt_4_1106_preview_fix/migration.sql: -------------------------------------------------------------------------------- 1 | /* 2 | Warnings: 3 | 4 | - The values [GPT_4_1106_preview] on the enum `LLMModel` will be removed. If these variants are still used in the database, this will fail. 5 | 6 | */ 7 | -- AlterEnum 8 | BEGIN; 9 | CREATE TYPE "LLMModel_new" AS ENUM ('GPT_3_5_TURBO_16K_0613', 'GPT_3_5_TURBO_0613', 'GPT_4_0613', 'GPT_4_32K_0613', 'GPT_4_1106_PREVIEW'); 10 | ALTER TABLE "Agent" ALTER COLUMN "llmModel" DROP DEFAULT; 11 | ALTER TABLE "Agent" ALTER COLUMN "llmModel" TYPE "LLMModel_new" USING ("llmModel"::text::"LLMModel_new"); 12 | ALTER TYPE "LLMModel" RENAME TO "LLMModel_old"; 13 | ALTER TYPE "LLMModel_new" RENAME TO "LLMModel"; 14 | DROP TYPE "LLMModel_old"; 15 | ALTER TABLE "Agent" ALTER COLUMN "llmModel" SET DEFAULT 'GPT_3_5_TURBO_16K_0613'; 16 | COMMIT; 17 | -------------------------------------------------------------------------------- /api/prisma/migrations/20231106224640_vision_tool/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterEnum 2 | ALTER TYPE "ToolType" ADD VALUE 'GPT_VISION'; 3 | -------------------------------------------------------------------------------- /api/prisma/migrations/20231107204227_tts1_tool/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterEnum 2 | ALTER TYPE "ToolType" ADD VALUE 'TTS_1'; 3 | -------------------------------------------------------------------------------- /api/prisma/migrations/20231112132755_update_model/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterEnum 2 | ALTER TYPE "LLMModel" ADD VALUE 'GPT_3_5_TURBO_1106'; 3 | -------------------------------------------------------------------------------- /api/prisma/migrations/20231113210515_huggingface_models/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterEnum 2 | ALTER TYPE "LLMModel" ADD VALUE 'MISTRAL_7B_INSTRUCT_V01'; 3 | 4 | -- AlterEnum 5 | ALTER TYPE "LLMProvider" ADD VALUE 'HUGGINGFACE'; 6 | -------------------------------------------------------------------------------- /api/prisma/migrations/20231114202204_algolia_tool/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterEnum 2 | ALTER TYPE "ToolType" ADD VALUE 'ALGOLIA'; 3 | -------------------------------------------------------------------------------- /api/prisma/migrations/20231122081046_handoff_tool/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterEnum 2 | ALTER TYPE "ToolType" ADD VALUE 'HAND_OFF'; 3 | -------------------------------------------------------------------------------- /api/prisma/migrations/20231124220817_function_tool/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterEnum 2 | ALTER TYPE "ToolType" ADD VALUE 'FUNCTION'; 3 | -------------------------------------------------------------------------------- /api/prisma/migrations/20231217152121_add_tool_config/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterTable 2 | ALTER TABLE "Tool" ADD COLUMN "toolConfig" JSONB; 3 | -------------------------------------------------------------------------------- /api/prisma/migrations/20231217220650_remove_workflow_inputs/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterTable 2 | ALTER TABLE "WorkflowStep" ALTER COLUMN "input" DROP NOT NULL, 3 | ALTER COLUMN "output" DROP NOT NULL; 4 | -------------------------------------------------------------------------------- /api/prisma/migrations/20231223104946_add_http_tool/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterEnum 2 | ALTER TYPE "ToolType" ADD VALUE 'HTTP'; 3 | -------------------------------------------------------------------------------- /api/prisma/migrations/20240102071238_add_vectordb_table/migration.sql: -------------------------------------------------------------------------------- 1 | -- CreateEnum 2 | CREATE TYPE "VectorDbProvider" AS ENUM ('PINECONE', 'ASTRA_DB', 'WEAVIATE', 'QDRANT'); 3 | 4 | -- AlterTable 5 | ALTER TABLE "Datasource" ADD COLUMN "vectorDbId" TEXT; 6 | 7 | -- CreateTable 8 | CREATE TABLE "VectorDb" ( 9 | "id" TEXT NOT NULL, 10 | "provider" "VectorDbProvider" NOT NULL DEFAULT 'PINECONE', 11 | "options" JSONB NOT NULL, 12 | "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, 13 | "updatedAt" TIMESTAMP(3) NOT NULL, 14 | "apiUserId" TEXT NOT NULL, 15 | 16 | CONSTRAINT "VectorDb_pkey" PRIMARY KEY ("id") 17 | ); 18 | 19 | -- AddForeignKey 20 | ALTER TABLE "Datasource" ADD CONSTRAINT "Datasource_vectorDbId_fkey" FOREIGN KEY ("vectorDbId") REFERENCES "VectorDb"("id") ON DELETE SET NULL ON UPDATE CASCADE; 21 | 22 | -- AddForeignKey 23 | ALTER TABLE "VectorDb" ADD CONSTRAINT "VectorDb_apiUserId_fkey" FOREIGN KEY ("apiUserId") REFERENCES "ApiUser"("id") ON DELETE RESTRICT ON UPDATE CASCADE; 24 | -------------------------------------------------------------------------------- /api/prisma/migrations/20240110062120_add_hugging_face_mixtral_8x7b_model/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterEnum 2 | ALTER TYPE "LLMModel" ADD VALUE 'MIXTRAL_8X7B_INSTRUCT_V01'; -------------------------------------------------------------------------------- /api/prisma/migrations/20240119040422_add_supabase_pgvector/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterEnum 2 | ALTER TYPE "VectorDbProvider" ADD VALUE 'SUPABASE'; 3 | -------------------------------------------------------------------------------- /api/prisma/migrations/20240121183424_add_on_delete_cascade_to_workflow_step/migration.sql: -------------------------------------------------------------------------------- 1 | -- DropForeignKey 2 | ALTER TABLE "WorkflowStep" DROP CONSTRAINT "WorkflowStep_agentId_fkey"; 3 | 4 | -- DropForeignKey 5 | ALTER TABLE "WorkflowStep" DROP CONSTRAINT "WorkflowStep_workflowId_fkey"; 6 | 7 | -- AddForeignKey 8 | ALTER TABLE "WorkflowStep" ADD CONSTRAINT "WorkflowStep_workflowId_fkey" FOREIGN KEY ("workflowId") REFERENCES "Workflow"("id") ON DELETE CASCADE ON UPDATE CASCADE; 9 | 10 | -- AddForeignKey 11 | ALTER TABLE "WorkflowStep" ADD CONSTRAINT "WorkflowStep_agentId_fkey" FOREIGN KEY ("agentId") REFERENCES "Agent"("id") ON DELETE CASCADE ON UPDATE CASCADE; 12 | -------------------------------------------------------------------------------- /api/prisma/migrations/20240124063011_make_agent_llm_model_optional/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterTable 2 | ALTER TABLE "Agent" ALTER COLUMN "llmModel" DROP NOT NULL; 3 | -------------------------------------------------------------------------------- /api/prisma/migrations/20240129153542_add_workflow_config_table/migration.sql: -------------------------------------------------------------------------------- 1 | -- DropForeignKey 2 | ALTER TABLE "AgentDatasource" DROP CONSTRAINT "AgentDatasource_datasourceId_fkey"; 3 | 4 | -- DropForeignKey 5 | ALTER TABLE "AgentTool" DROP CONSTRAINT "AgentTool_toolId_fkey"; 6 | 7 | -- CreateTable 8 | CREATE TABLE "WorkflowConfig" ( 9 | "id" TEXT NOT NULL, 10 | "config" JSONB NOT NULL, 11 | "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, 12 | "updatedAt" TIMESTAMP(3) NOT NULL, 13 | "workflowId" TEXT NOT NULL, 14 | "apiUserId" TEXT, 15 | 16 | CONSTRAINT "WorkflowConfig_pkey" PRIMARY KEY ("id") 17 | ); 18 | 19 | -- AddForeignKey 20 | ALTER TABLE "AgentDatasource" ADD CONSTRAINT "AgentDatasource_datasourceId_fkey" FOREIGN KEY ("datasourceId") REFERENCES "Datasource"("id") ON DELETE CASCADE ON UPDATE CASCADE; 21 | 22 | -- AddForeignKey 23 | ALTER TABLE "AgentTool" ADD CONSTRAINT "AgentTool_toolId_fkey" FOREIGN KEY ("toolId") REFERENCES "Tool"("id") ON DELETE CASCADE ON UPDATE CASCADE; 24 | 25 | -- AddForeignKey 26 | ALTER TABLE "WorkflowConfig" ADD CONSTRAINT "WorkflowConfig_workflowId_fkey" FOREIGN KEY ("workflowId") REFERENCES "Workflow"("id") ON DELETE CASCADE ON UPDATE CASCADE; 27 | 28 | -- AddForeignKey 29 | ALTER TABLE "WorkflowConfig" ADD CONSTRAINT "WorkflowConfig_apiUserId_fkey" FOREIGN KEY ("apiUserId") REFERENCES "ApiUser"("id") ON DELETE SET NULL ON UPDATE CASCADE; 30 | -------------------------------------------------------------------------------- /api/prisma/migrations/20240201161130_add_gpt_4_turbo_preview/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterEnum 2 | ALTER TYPE "LLMModel" ADD VALUE 'GPT_4_TURBO_PREVIEW'; 3 | -------------------------------------------------------------------------------- /api/prisma/migrations/20240201221548_gpt_3_5_turbo_0125/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterEnum 2 | ALTER TYPE "LLMModel" ADD VALUE 'GPT_3_5_TURBO_0125'; 3 | -------------------------------------------------------------------------------- /api/prisma/migrations/20240201224222_agent_type_v2/migration.sql: -------------------------------------------------------------------------------- 1 | -- CreateEnum 2 | CREATE TYPE "AgentType" AS ENUM ('hmai', 'OPENAI_ASSISTANT'); 3 | 4 | -- AlterTable 5 | ALTER TABLE "Agent" ADD COLUMN "type" "AgentType" NOT NULL DEFAULT 'hmai'; 6 | -------------------------------------------------------------------------------- /api/prisma/migrations/20240202033257_add_openai_assistants/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterTable 2 | ALTER TABLE "Agent" ADD COLUMN "openaiMetadata" JSONB; 3 | -------------------------------------------------------------------------------- /api/prisma/migrations/20240204133952_update_openai_assistants_table/migration.sql: -------------------------------------------------------------------------------- 1 | /* 2 | Warnings: 3 | - You are about to drop the column `openaiMetadata` on the `Agent` table. All the data in the column will be lost. 4 | */ 5 | -- AlterTable 6 | ALTER TABLE "Agent" DROP COLUMN "openaiMetadata", 7 | ADD COLUMN "metadata" JSONB; -------------------------------------------------------------------------------- /api/prisma/migrations/migration_lock.toml: -------------------------------------------------------------------------------- 1 | # Please do not edit this file manually 2 | # It should be added in your version-control system (i.e. Git) 3 | provider = "postgresql" -------------------------------------------------------------------------------- /api/pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "hmai" 3 | version = "0.1.38" 4 | description = "" 5 | authors = ["Ismail Pelaseyed"] 6 | readme = "../../README.md" 7 | packages = [{ include = "app" }] 8 | 9 | [tool.poetry.dependencies] 10 | python = "^3.8.1, <3.12" 11 | fastapi = { extras = ["all"], version = "^0.95.1" } 12 | uvicorn = { extras = ["standard"], version = "^0.22.0" } 13 | gunicorn = "^20.1.0" 14 | python-decouple = "^3.8" 15 | pydantic = "^1.10.7" 16 | sse-starlette = "^1.5.0" 17 | bcrypt = "^4.0.1" 18 | pyjwt = "^2.7.0" 19 | ruff = "^0.0.265" 20 | black = "^23.3.0" 21 | uuid = "^1.30" 22 | flake8 = "^6.0.0" 23 | tiktoken = "0.5.2" 24 | python-slugify = "^8.0.1" 25 | asyncio = "^3.4.3" 26 | colorlog = "^6.7.0" 27 | vulture = "^2.7" 28 | lamini = "^0.0.21" 29 | pytest = "^7.4.0" 30 | prefect = "^2.11.4" 31 | llama-index = "^0.8.11.post3" 32 | pinecone-client = "^2.2.2" 33 | pyairtable = "^2.1.0.post1" 34 | backoff = "^2.2.1" 35 | xmltodict = "^0.13.0" 36 | pypdf = "^3.16.0" 37 | tabulate = "^0.9.0" 38 | gitpython = "^3.1.36" 39 | python-pptx = "^0.6.22" 40 | openpyxl = "^3.1.2" 41 | unstructured = "^0.10.16" 42 | requests = "^2.31.0" 43 | wolframalpha = "^5.0.0" 44 | bs4 = "^0.0.1" 45 | segment-analytics-python = "^2.2.3" 46 | replicate = "^0.15.4" 47 | e2b = "^0.10.8" 48 | youtube-transcript-api = "^0.6.1" 49 | openai = "^1.1.1" 50 | langchain-experimental = "^0.0.37" 51 | pydub = "^0.25.1" 52 | algoliasearch = "^3.0.0" 53 | litellm = "^1.14.1" 54 | weaviate-client = "^3.25.3" 55 | qdrant-client = "^1.6.9" 56 | langfuse = "^2.6.3" 57 | vecs = "^0.4.2" 58 | oauth2client = "^4.1.3" 59 | google-cloud-bigquery = "^3.16.0" 60 | agentops = { extras = ["langchain"], version = "^0.0.20" } 61 | langsmith = "^0.0.83" 62 | langchain = "^0.1.4" 63 | langchain-openai = "^0.0.5" 64 | python-docx = "^1.1.0" 65 | prisma = "^0.12.0" 66 | 67 | 68 | [build-system] 69 | requires = ["poetry-core"] 70 | build-backend = "poetry.core.masonry.api" 71 | 72 | [tool.vulture] 73 | exclude = [ 74 | "*settings.py", 75 | "*/docs/*.py", 76 | "*/test_*.py", 77 | "*/.venv/*.py", 78 | "app/lib/callbacks.py", # Ignoring this file 79 | ] 80 | ignore_decorators = ["@app.route", "@require_*"] 81 | ignore_names = ["visit_*", "do_*"] 82 | make_whitelist = true 83 | min_confidence = 100 84 | paths = ["."] 85 | sort_by_size = true 86 | verbose = false 87 | 88 | [tool.ruff] 89 | exclude = [ 90 | "*settings.py", 91 | "*/docs/*.py", 92 | "*/test_*.py", 93 | "*/.venv/*.py", 94 | "whitelist.py", 95 | ] 96 | -------------------------------------------------------------------------------- /api/replit.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Install dependencies 4 | poetry install 5 | 6 | # Install prisma 7 | poetry run prisma generate 8 | 9 | # Start the application with auto-reload 10 | gunicorn --bind :8000 --workers 2 --timeout 0 --worker-class uvicorn.workers.UvicornWorker --threads 8 app.main:app -------------------------------------------------------------------------------- /api/supabase/.gitignore: -------------------------------------------------------------------------------- 1 | # Supabase 2 | .branches 3 | .temp 4 | -------------------------------------------------------------------------------- /api/supabase/seed.sql: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/HMaiLab/HMAI/79ddaa324f41f7bf81236991c37c5007eb9c02f8/api/supabase/seed.sql -------------------------------------------------------------------------------- /api/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/HMaiLab/HMAI/79ddaa324f41f7bf81236991c37c5007eb9c02f8/api/tests/__init__.py -------------------------------------------------------------------------------- /biome.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://biomejs.dev/schemas/1.9.4/schema.json", 3 | "organizeImports": { 4 | "enabled": true, 5 | "ignore": ["node_modules", ".next", "dist", "output"] 6 | }, 7 | "linter": { 8 | "enabled": true, 9 | "ignore": ["node_modules", ".next", "dist", "output"], 10 | "rules": { 11 | "recommended": true, 12 | "style": { 13 | "noUnusedTemplateLiteral": "off", 14 | "useTemplate": "off" 15 | }, 16 | "suspicious": { 17 | "noExplicitAny": "off" 18 | }, 19 | "complexity": { 20 | "noForEach": "off" 21 | }, 22 | "a11y": { 23 | "useKeyWithClickEvents": "off" 24 | } 25 | } 26 | }, 27 | "formatter": { 28 | "ignore": ["node_modules", ".next", "dist", "output"] 29 | }, 30 | "javascript": { 31 | "formatter": { 32 | "quoteStyle": "single", 33 | "semicolons": "always" 34 | } 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /bun.lockb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/HMaiLab/HMAI/79ddaa324f41f7bf81236991c37c5007eb9c02f8/bun.lockb -------------------------------------------------------------------------------- /index.ts: -------------------------------------------------------------------------------- 1 | console.log("Hello via Bun!"); -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "hmai", 3 | "module": "index.ts", 4 | "type": "module", 5 | "devDependencies": { 6 | "@biomejs/biome": "^1.9.4", 7 | "@types/bun": "latest" 8 | }, 9 | "peerDependencies": { 10 | "typescript": "^5.0.0" 11 | } 12 | } -------------------------------------------------------------------------------- /setup-solana.sh: -------------------------------------------------------------------------------- 1 | # Quick Solana GRPC Node Setup 2 | 3 | Save as `setup-solana.sh`: 4 | 5 | ```bash 6 | #!/bin/bash 7 | 8 | # Create directories and mount disks 9 | mkdir -p /root/sol/{accounts,ledger,bin} 10 | echo "/dev/nvme0n1 /root/sol/ledger ext4 defaults 0 0" >> /etc/fstab 11 | echo "/dev/nvme1n1 /root/sol/accounts ext4 defaults 0 0" >> /etc/fstab 12 | 13 | # System tuning 14 | cat << EOF >> /etc/sysctl.conf 15 | net.core.rmem_default = 134217728 16 | net.core.rmem_max = 134217728 17 | net.core.wmem_default = 134217728 18 | net.core.wmem_max = 134217728 19 | vm.max_map_count = 1000000 20 | fs.nr_open = 1000000 21 | EOF 22 | sysctl -p 23 | 24 | # Install Solana 25 | sh -c "$(curl -sSfL https://release.anza.xyz/v2.0.18/install)" 26 | echo 'export PATH="/root/.local/share/solana/install/active_release/bin:$PATH"' >> ~/.bashrc 27 | 28 | # Create validator script 29 | cat << 'EOF' > /root/sol/bin/validator.sh 30 | #!/bin/bash 31 | exec agave-validator \ 32 | --ledger /root/sol/ledger \ 33 | --accounts /root/sol/accounts \ 34 | --identity /root/validator-keypair.json \ 35 | --known-validator 7Np41oeYqPefeNQEHSv1UDhYrehxin3NStELsSKCT4K2 \ 36 | --entrypoint entrypoint.mainnet-beta.solana.com:8001 \ 37 | --full-rpc-api \ 38 | --no-voting \ 39 | --private-rpc \ 40 | --rpc-port 8899 \ 41 | --dynamic-port-range 8000-8020 \ 42 | --account-index program-id \ 43 | --account-index spl-token-mint \ 44 | --enable-rpc-transaction-history \ 45 | --log /root/solana-rpc.log 46 | EOF 47 | chmod +x /root/sol/bin/validator.sh 48 | 49 | # Create service 50 | cat << EOF > /etc/systemd/system/sol.service 51 | [Unit] 52 | Description=Solana Validator 53 | After=network.target 54 | 55 | [Service] 56 | Type=simple 57 | Restart=always 58 | User=root 59 | LimitNOFILE=1000000 60 | Environment="PATH=/bin:/usr/bin:/root/.local/share/solana/install/active_release/bin" 61 | ExecStart=/root/sol/bin/validator.sh 62 | 63 | [Install] 64 | WantedBy=multi-user.target 65 | EOF 66 | 67 | # Start service 68 | systemctl daemon-reload 69 | systemctl enable sol 70 | systemctl start sol 71 | 72 | echo "Setup complete! Check logs with: journalctl -u sol -f" 73 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | // Enable latest features 4 | "lib": ["ESNext", "DOM"], 5 | "target": "ESNext", 6 | "module": "ESNext", 7 | "moduleDetection": "force", 8 | "jsx": "react-jsx", 9 | "allowJs": true, 10 | 11 | // Bundler mode 12 | "moduleResolution": "bundler", 13 | "allowImportingTsExtensions": true, 14 | "verbatimModuleSyntax": true, 15 | "noEmit": true, 16 | 17 | // Best practices 18 | "strict": true, 19 | "skipLibCheck": true, 20 | "noFallthroughCasesInSwitch": true, 21 | 22 | // Some stricter flags (disabled by default) 23 | "noUnusedLocals": false, 24 | "noUnusedParameters": false, 25 | "noPropertyAccessFromIndexSignature": false 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /web/.editorconfig: -------------------------------------------------------------------------------- 1 | # editorconfig.org 2 | root = true 3 | 4 | [*] 5 | charset = utf-8 6 | end_of_line = lf 7 | indent_size = 2 8 | indent_style = space 9 | insert_final_newline = true 10 | trim_trailing_whitespace = true -------------------------------------------------------------------------------- /web/.env.example: -------------------------------------------------------------------------------- 1 | NEXT_PUBLIC_SUPABASE_URL= 2 | NEXT_PUBLIC_SUPABASE_ANON_KEY= 3 | GITHUB_CLIENT_ID= 4 | GITHUB_CLIENT_SECRET= 5 | NEXT_PUBLIC_SUPERAGENT_API_URL="http://127.0.0.1:8000/api/v1" 6 | NEXT_PUBLIC_SUPABASE_STORAGE_NAME="superagent" 7 | 8 | # TRACING 9 | NEXT_PUBLIC_SEGMENT_WRITE_KEY=6tOuMx1B790SNrcHhd7WQbciZVEK00BY 10 | 11 | # Optional for connecting to external datasources 12 | NEXT_PUBLIC_APIDECK_API_KEY= 13 | NEXT_PUBLIC_APIDECK_API_ID= 14 | 15 | # Optional for adding billing 16 | NEXT_PUBLIC_STRIPE_SECRET_KEY= 17 | NEXT_PUBLIC_STRIPE_PUBLISHABLE_KEY= 18 | 19 | # Optional for Langfuse 20 | NEXT_PUBLIC_LANGFUSE_PUBLIC_KEY= 21 | NEXT_PUBLIC_LANGFUSE_BASE_URL= -------------------------------------------------------------------------------- /web/.gitignore: -------------------------------------------------------------------------------- 1 | # See https://help.github.com/articles/ignoring-files/ for more about ignoring files. 2 | 3 | # dependencies 4 | node_modules 5 | .pnp 6 | .pnp.js 7 | 8 | # testing 9 | coverage 10 | 11 | # next.js 12 | .next/ 13 | out/ 14 | build 15 | 16 | # misc 17 | .DS_Store 18 | *.pem 19 | .vscode 20 | 21 | # debug 22 | npm-debug.log* 23 | yarn-debug.log* 24 | yarn-error.log* 25 | .pnpm-debug.log* 26 | 27 | # local env files 28 | .env.local 29 | .env.development.local 30 | .env.test.local 31 | .env.production.local 32 | 33 | # turbo 34 | .turbo 35 | 36 | .contentlayer 37 | .env 38 | .vscode 39 | 40 | tsconfig.tsbuildinfo -------------------------------------------------------------------------------- /web/.lintstagedrc.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | // Type check TypeScript files 3 | "**/*.(ts|tsx)": () => "npx tsc --noEmit", 4 | 5 | // Lint & Prettify TS and JS files 6 | "**/*.(ts|tsx)": (filenames) => [ 7 | `npx eslint ${filenames.join(" ")}`, 8 | `npx prettier --write ${filenames.join(" ")}`, 9 | ], 10 | 11 | // Prettify only Markdown and JSON files 12 | "**/*.(md|json)": (filenames) => 13 | `npx prettier --write ${filenames.join(" ")}`, 14 | } 15 | -------------------------------------------------------------------------------- /web/.lintstagedrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "*.ts": ["prettier --write", "eslint"], 3 | "*.tsx": ["prettier --write", "eslint"], 4 | "*.html": ["eslint", "prettier --write"], 5 | "*.scss": "prettier --write" 6 | } 7 | -------------------------------------------------------------------------------- /web/README.md: -------------------------------------------------------------------------------- 1 | # next-template 2 | 3 | A Next.js 13 template for building apps with Radix UI and Tailwind CSS. 4 | 5 | ## Usage 6 | 7 | ```bash 8 | npx create-next-app -e https://github.com/shadcn/next-template 9 | ``` 10 | 11 | ## Features 12 | 13 | - Next.js 13 App Directory 14 | - Radix UI Primitives 15 | - Tailwind CSS 16 | - Icons from [Lucide](https://lucide.dev) 17 | - Dark mode with `next-themes` 18 | - Tailwind CSS class sorting, merging and linting. 19 | 20 | ## License 21 | 22 | Licensed under the [MIT license](https://github.com/shadcn/ui/blob/main/LICENSE.md). 23 | -------------------------------------------------------------------------------- /web/app/agents/[agentId]/avatar.tsx: -------------------------------------------------------------------------------- 1 | /* eslint-disable @next/next/no-img-element */ 2 | 'use client'; 3 | 4 | import { type ChangeEvent, useRef, useState } from 'react'; 5 | import { RxImage } from 'react-icons/rx'; 6 | import { v4 as uuid } from 'uuid'; 7 | 8 | import { Input } from '@/components/ui/input'; 9 | import { Spinner } from '@/components/ui/spinner'; 10 | import { getSupabase } from '@/lib/supabase'; 11 | 12 | interface AvatarProps { 13 | accept: string; 14 | onSelect: (url: string) => Promise; 15 | imageUrl: string; 16 | } 17 | const supabase = getSupabase(); 18 | 19 | export default function Avatar({ accept, onSelect, imageUrl }: AvatarProps) { 20 | const fileInputRef = useRef(null); 21 | const [isLoading, setIsLoading] = useState(false); 22 | 23 | const handleFileChange = async (event: ChangeEvent) => { 24 | const file = event.target.files ? event.target.files[0] : null; 25 | setIsLoading(true); 26 | 27 | if (file) { 28 | const path = `public/${uuid()}`; 29 | const { error } = await supabase.storage 30 | .from('superagent') 31 | .upload(path, file, { contentType: file.type }); 32 | 33 | if (error) throw error; 34 | 35 | const { 36 | data: { publicUrl }, 37 | } = supabase.storage.from('superagent').getPublicUrl(path); 38 | console.log(publicUrl); 39 | await onSelect(publicUrl); 40 | setIsLoading(false); 41 | } 42 | }; 43 | 44 | const triggerFileInput = () => { 45 | if (fileInputRef.current) { 46 | fileInputRef.current.click(); 47 | } 48 | }; 49 | 50 | return ( 51 | <> 52 | 59 |
63 | {isLoading ? ( 64 | 65 | ) : ( 66 | <> 67 | Avatar 68 |
69 | 70 |
71 | 72 | )} 73 |
74 | 75 | ); 76 | } 77 | -------------------------------------------------------------------------------- /web/app/agents/[agentId]/delete-agent-button.tsx: -------------------------------------------------------------------------------- 1 | import React from "react" 2 | import { TbTrashX } from "react-icons/tb" 3 | 4 | import { cn } from "@/lib/utils" 5 | import { 6 | AlertDialog, 7 | AlertDialogAction, 8 | AlertDialogCancel, 9 | AlertDialogContent, 10 | AlertDialogDescription, 11 | AlertDialogFooter, 12 | AlertDialogHeader, 13 | AlertDialogTitle, 14 | AlertDialogTrigger, 15 | } from "@/components/ui/alert-dialog" 16 | import { buttonVariants } from "@/components/ui/button" 17 | 18 | interface DeleteAgentButtonProps { 19 | handleDelete: () => void 20 | } 21 | 22 | const DeleteAgentButton: React.FC = ({ 23 | handleDelete, 24 | }) => { 25 | return ( 26 | 27 | 30 | 31 | 32 | 33 | 34 | Are you absolutely sure? 35 | 36 | This action cannot be undone. This will permanently delete the agent 37 | and remove your data from our servers. 38 | 39 | 40 | 41 | Cancel 42 | handleDelete()}> 43 | Delete 44 | 45 | 46 | 47 | 48 | ) 49 | } 50 | 51 | export default DeleteAgentButton 52 | -------------------------------------------------------------------------------- /web/app/agents/[agentId]/header.tsx: -------------------------------------------------------------------------------- 1 | "use client" 2 | 3 | import { useRouter } from "next/navigation" 4 | 5 | import { Agent } from "@/types/agent" 6 | import { Profile } from "@/types/profile" 7 | import { Api } from "@/lib/api" 8 | import { Toaster } from "@/components/ui/toaster" 9 | import { useToast } from "@/components/ui/use-toast" 10 | 11 | import DeleteAgentButton from "./delete-agent-button" 12 | 13 | export default function Header({ 14 | agent, 15 | profile, 16 | }: { 17 | agent: Agent 18 | profile: Profile 19 | }) { 20 | const api = new Api(profile.api_key) 21 | const router = useRouter() 22 | const { toast } = useToast() 23 | 24 | const handleDelete = async () => { 25 | await api.deleteAgentById(agent.id) 26 | toast({ 27 | description: `Agent with ID: ${agent.id} deleted!`, 28 | }) 29 | router.refresh() 30 | router.push("/agents") 31 | } 32 | 33 | return ( 34 | <> 35 |
36 |

{agent.name}

37 |
38 | 39 |
40 |
41 | 42 | 43 | ) 44 | } 45 | -------------------------------------------------------------------------------- /web/app/agents/[agentId]/page.tsx: -------------------------------------------------------------------------------- 1 | import { cookies } from "next/headers" 2 | import Link from "next/link" 3 | import { createRouteHandlerClient } from "@supabase/auth-helpers-nextjs" 4 | import { TbBrandOpenai } from "react-icons/tb" 5 | 6 | import { Api } from "@/lib/api" 7 | import { Alert, AlertDescription, AlertTitle } from "@/components/ui/alert" 8 | 9 | import Chat from "./chat" 10 | import Header from "./header" 11 | import Settings from "./settings" 12 | 13 | export const dynamic = "force-dynamic" 14 | export default async function AgentPage({ params }: { params: any }) { 15 | const { agentId } = params 16 | const supabase = createRouteHandlerClient({ cookies }) 17 | const { 18 | data: { user }, 19 | } = await supabase.auth.getUser() 20 | const { data: profile } = await supabase 21 | .from("profiles") 22 | .select("*") 23 | .eq("user_id", user?.id) 24 | .single() 25 | const api = new Api(profile.api_key) 26 | const [ 27 | { data: agent }, 28 | { data: tools }, 29 | { data: datasources }, 30 | { data: llms }, 31 | ] = await Promise.all([ 32 | api.getAgentById(agentId), 33 | api.getTools(), 34 | api.getDatasources(), 35 | api.getLLMs(), 36 | ]) 37 | 38 | return ( 39 |
40 |
41 | {agent.type === "hmai" ? ( 42 |
43 | 44 | 51 |
52 | ) : ( 53 |
54 | 55 |
56 |

OpenAI Assistants

57 |

58 | We currently don' support running OpenAI Assistants outside 59 | of workflows. Please visit the{" "} 60 | 61 | workflows page 62 | {" "} 63 | to run this assistant. 64 |

65 |
66 |
67 | )} 68 |
69 | ) 70 | } 71 | -------------------------------------------------------------------------------- /web/app/agents/[agentId]/prompt-footer.tsx: -------------------------------------------------------------------------------- 1 | import Link from "next/link" 2 | 3 | import { cn } from "@/lib/utils" 4 | 5 | export function PromptFooter({ 6 | className, 7 | ...props 8 | }: React.ComponentProps<"p">) { 9 | return ( 10 |

17 | Powered by{" "} 18 | 19 | hmai.sh 20 | 21 |

22 | ) 23 | } 24 | -------------------------------------------------------------------------------- /web/app/agents/columns.tsx: -------------------------------------------------------------------------------- 1 | "use client" 2 | 3 | import { ColumnDef } from "@tanstack/react-table" 4 | import { RxCheckCircled, RxCircle } from "react-icons/rx" 5 | 6 | import { Badge } from "@/components/ui/badge" 7 | 8 | export type Agent = { 9 | id: string 10 | name: string 11 | prompt: string 12 | isActive: boolean 13 | } 14 | 15 | export const columns: ColumnDef[] = [ 16 | { 17 | accessorKey: "name", 18 | header: "Name", 19 | }, 20 | { 21 | accessorKey: "isActive", 22 | header: "Status", 23 | cell: ({ row, column }) => 24 | row.getValue(column.id) ? ( 25 | 26 |
27 | 28 | Deployed 29 |
30 |
31 | ) : ( 32 | 33 |
34 | 35 | Paused 36 |
37 |
38 | ), 39 | }, 40 | { 41 | accessorKey: "description", 42 | header: "Description", 43 | }, 44 | { 45 | accessorKey: "id", 46 | header: "ID", 47 | }, 48 | ] 49 | -------------------------------------------------------------------------------- /web/app/agents/loading.tsx: -------------------------------------------------------------------------------- 1 | import { Spinner } from "@/components/ui/spinner" 2 | 3 | export default function Loading() { 4 | return ( 5 |
6 | 7 |
8 | ) 9 | } 10 | -------------------------------------------------------------------------------- /web/app/agents/page.tsx: -------------------------------------------------------------------------------- 1 | import { cookies } from "next/headers" 2 | import Link from "next/link" 3 | import { createRouteHandlerClient } from "@supabase/auth-helpers-nextjs" 4 | 5 | import { Api } from "@/lib/api" 6 | import { Alert, AlertDescription, AlertTitle } from "@/components/ui/alert" 7 | 8 | import { columns } from "./columns" 9 | import { DataTable } from "./data-table" 10 | 11 | export const dynamic = "force-dynamic" 12 | 13 | export default async function Agents({ 14 | searchParams, 15 | }: { 16 | searchParams: { 17 | page: string 18 | take: string 19 | } 20 | }) { 21 | const supabase = createRouteHandlerClient({ cookies }) 22 | const { 23 | data: { user }, 24 | } = await supabase.auth.getUser() 25 | const { data: profile } = await supabase 26 | .from("profiles") 27 | .select("*") 28 | .eq("user_id", user?.id) 29 | .single() 30 | const api = new Api(profile.api_key) 31 | const { take: takeStr, page: pageStr } = searchParams 32 | const take = Number(takeStr) || 10, 33 | page = Number(pageStr) || 1 34 | 35 | const { data: agents, total_pages } = await api.getAgents({ 36 | skip: (page - 1) * take, 37 | take, 38 | }) 39 | 40 | return ( 41 |
42 |

Agents

43 | 44 | 45 | Deprecated 46 | 47 | This page is deprecated and will be removed in a future release. 48 |
49 | For creating new agents, please use the workflows page.{" "} 50 | 51 | Click here to create a new workflows. 52 | 53 |
54 |
55 | 65 |
66 | ) 67 | } 68 | -------------------------------------------------------------------------------- /web/app/api/stripe/webhook/route.ts: -------------------------------------------------------------------------------- 1 | import { NextRequest, NextResponse } from "next/server" 2 | import { createClient } from "@supabase/supabase-js" 3 | import Stripe from "stripe" 4 | 5 | import { stripe } from "@/lib/stripe" 6 | 7 | export async function POST(request: NextRequest) { 8 | const supabase = createClient( 9 | process.env.NEXT_PUBLIC_SUPABASE_URL!, 10 | process.env.SUPABASE_SERVICEROLE_KEY! 11 | ) 12 | const { data, type } = await request.json() 13 | const customer = data.object.customer 14 | switch (type) { 15 | case "customer.subscription.trial_will_end": 16 | const c_data: Stripe.Response = 17 | await stripe.customers.retrieve(customer) 18 | 19 | if ("deleted" in c_data && c_data.deleted === true) { 20 | return NextResponse.json({ success: false }) 21 | } 22 | 23 | const { email, name } = c_data 24 | 25 | await fetch("https://app.loops.so/api/v1/events/send", { 26 | method: "POST", 27 | headers: { 28 | authorization: `Bearer ${process.env.LOOPS_API_KEY}`, 29 | }, 30 | body: JSON.stringify({ 31 | email: email, 32 | eventName: "trial_ends", 33 | company: name, 34 | first_name: c_data.metadata?.first_name, 35 | last_name: c_data.metadata?.last_name, 36 | }), 37 | }) 38 | 39 | return NextResponse.json({ success: true }) 40 | case "customer.subscription.deleted": 41 | await supabase 42 | .from("profiles") 43 | .update({ stripe_plan_id: null }) 44 | .eq("stripe_customer_id", customer) 45 | .select() 46 | return NextResponse.json({ success: true }) 47 | case "customer.subscription.created": 48 | await supabase 49 | .from("profiles") 50 | .update({ stripe_plan_id: data.object.id }) 51 | .eq("stripe_customer_id", customer) 52 | return NextResponse.json({ success: true }) 53 | case "customer.subscription.updated": 54 | await supabase 55 | .from("profiles") 56 | .update({ stripe_plan_id: data.object.id }) 57 | .eq("stripe_customer_id", customer) 58 | default: 59 | return NextResponse.json({ success: false }) 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /web/app/auth/callback/route.ts: -------------------------------------------------------------------------------- 1 | import { cookies } from "next/headers" 2 | import { NextRequest, NextResponse } from "next/server" 3 | import { createRouteHandlerClient } from "@supabase/auth-helpers-nextjs" 4 | 5 | export const dynamic = "force-dynamic" 6 | 7 | export async function GET(request: NextRequest) { 8 | const requestUrl = new URL(request.url) 9 | const code = requestUrl.searchParams.get("code") 10 | 11 | if (code) { 12 | const supabase = createRouteHandlerClient({ cookies }) 13 | await supabase.auth.exchangeCodeForSession(code) 14 | } 15 | 16 | // URL to redirect to after sign in process completes 17 | return NextResponse.redirect("/agents") 18 | } 19 | -------------------------------------------------------------------------------- /web/app/auth/login.ts: -------------------------------------------------------------------------------- 1 | import { cookies } from "next/headers" 2 | import { NextRequest, NextResponse } from "next/server" 3 | import { createRouteHandlerClient } from "@supabase/auth-helpers-nextjs" 4 | 5 | export const dynamic = "force-dynamic" 6 | 7 | export async function POST(request: NextRequest) { 8 | const requestUrl = new URL(request.url) 9 | const formData = await request.formData() 10 | const email = formData.get("email") 11 | const password = formData.get("password") 12 | const supabase = createRouteHandlerClient({ cookies }) 13 | 14 | await supabase.auth.signInWithPassword({ 15 | email: email as string, 16 | password: password as string, 17 | }) 18 | 19 | return NextResponse.redirect(requestUrl.origin, { 20 | status: 301, 21 | }) 22 | } 23 | -------------------------------------------------------------------------------- /web/app/auth/logout.ts: -------------------------------------------------------------------------------- 1 | import { cookies } from "next/headers" 2 | import { NextResponse } from "next/server" 3 | import { createRouteHandlerClient } from "@supabase/auth-helpers-nextjs" 4 | 5 | export const dynamic = "force-dynamic" 6 | 7 | export async function POST(request: Request) { 8 | const requestUrl = new URL(request.url) 9 | const supabase = createRouteHandlerClient({ cookies }) 10 | 11 | await supabase.auth.signOut() 12 | 13 | return NextResponse.redirect(`${requestUrl.origin}/login`, { 14 | status: 301, 15 | }) 16 | } 17 | -------------------------------------------------------------------------------- /web/app/auth/sign-up.ts: -------------------------------------------------------------------------------- 1 | import { cookies } from "next/headers" 2 | import { NextRequest, NextResponse } from "next/server" 3 | import { createRouteHandlerClient } from "@supabase/auth-helpers-nextjs" 4 | 5 | export const dynamic = "force-dynamic" 6 | 7 | export async function POST(request: NextRequest) { 8 | const requestUrl = new URL(request.url) 9 | const formData = await request.formData() 10 | const email = formData.get("email") 11 | const password = formData.get("password") 12 | const supabase = createRouteHandlerClient({ cookies }) 13 | 14 | await supabase.auth.signUp({ 15 | email: email as string, 16 | password: password as string, 17 | options: { 18 | emailRedirectTo: `${requestUrl.origin}/auth/callback`, 19 | }, 20 | }) 21 | 22 | return NextResponse.redirect(requestUrl.origin, { 23 | status: 301, 24 | }) 25 | } 26 | -------------------------------------------------------------------------------- /web/app/billing-modal.tsx: -------------------------------------------------------------------------------- 1 | "use client" 2 | 3 | import { usePathname } from "next/navigation" 4 | import { createClientComponentClient } from "@supabase/auth-helpers-nextjs" 5 | import { useTheme } from "next-themes" 6 | import { useAsync } from "react-use" 7 | 8 | import { 9 | AlertDialog, 10 | AlertDialogContent, 11 | AlertDialogDescription, 12 | AlertDialogHeader, 13 | AlertDialogTitle, 14 | } from "@/components/ui/alert-dialog" 15 | 16 | export default function BillingModal({ session }: { session: any }) { 17 | const theme = useTheme() 18 | const pathname = usePathname() 19 | const supabase = createClientComponentClient() 20 | const { loading, value: profile } = useAsync(async () => { 21 | const { data: profile } = await supabase 22 | .from("profiles") 23 | .select("*") 24 | .eq("user_id", session?.user.id) 25 | .single() 26 | 27 | return profile 28 | }) 29 | 30 | const pricingTableID = 31 | theme.resolvedTheme === "dark" 32 | ? process.env.NEXT_PUBLIC_STRIPE_DARK_PRICING_TABLE_ID 33 | : process.env.NEXT_PUBLIC_STRIPE_LIGHT_PRICING_TABLE_ID 34 | 35 | return ( 36 | 44 | 45 | 46 | Your free trial has ended! 47 | 48 | Hey {profile?.first_name}, your free trial has ended and you need to 49 | subscribe to one of our plans to get access to your agents. 50 | 51 | 52 |
53 | 57 |
58 |
59 |
60 | ) 61 | } -------------------------------------------------------------------------------- /web/app/container.tsx: -------------------------------------------------------------------------------- 1 | import Sidebar from "@/components/sidebar" 2 | 3 | import BillingModal from "./billing-modal" 4 | 5 | interface RootLayoutProps { 6 | children: React.ReactNode 7 | session: any 8 | } 9 | 10 | export default function RootLayout({ children, session }: RootLayoutProps) { 11 | return ( 12 |
13 | {process.env.NEXT_PUBLIC_STRIPE_DARK_PRICING_TABLE_ID && ( 14 | 15 | )} 16 | 17 |
{children}
18 |
19 | ) 20 | } -------------------------------------------------------------------------------- /web/app/integrations/client-page.tsx: -------------------------------------------------------------------------------- 1 | "use client" 2 | 3 | import { Tabs, TabsContent, TabsList, TabsTrigger } from "@/components/ui/tabs" 4 | 5 | import Storage from "./storage" 6 | 7 | export default function IntegrationsClientPage({ 8 | profile, 9 | configuredDBs, 10 | }: { 11 | profile: any 12 | configuredDBs: any 13 | }) { 14 | return ( 15 | 16 | 17 | 18 | STORAGE 19 | 20 | 21 | LOGGING 22 | 23 | 24 | DATASOURCES 25 | 26 | 27 | TOOLS 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | ) 38 | } 39 | -------------------------------------------------------------------------------- /web/app/integrations/page.tsx: -------------------------------------------------------------------------------- 1 | import { cookies } from "next/headers" 2 | import { createRouteHandlerClient } from "@supabase/auth-helpers-nextjs" 3 | 4 | import { Api } from "@/lib/api" 5 | 6 | import IntegrationsClientPage from "./client-page" 7 | 8 | export default async function Integration() { 9 | const supabase = createRouteHandlerClient({ cookies }) 10 | const { 11 | data: { user }, 12 | } = await supabase.auth.getUser() 13 | const { data: profile } = await supabase 14 | .from("profiles") 15 | .select("*") 16 | .eq("user_id", user?.id) 17 | .single() 18 | const api = new Api(profile.api_key) 19 | const { data: configuredDBs } = await api.getVectorDbs() 20 | 21 | return ( 22 |
23 |

Integrations

24 |
25 | 29 |
30 |
31 | ) 32 | } 33 | -------------------------------------------------------------------------------- /web/app/layout.tsx: -------------------------------------------------------------------------------- 1 | import "@/styles/globals.css" 2 | 3 | import { Metadata } from "next" 4 | import { cookies } from "next/headers" 5 | import { createRouteHandlerClient } from "@supabase/auth-helpers-nextjs" 6 | 7 | import { siteConfig } from "@/config/site" 8 | import { fontSans } from "@/lib/fonts" 9 | import PostHogClient from "@/lib/posthog" 10 | import { cn } from "@/lib/utils" 11 | import Analytics from "@/components/analytics" 12 | import { ThemeProvider } from "@/components/theme-provider" 13 | 14 | import Container from "./container" 15 | 16 | export const metadata: Metadata = { 17 | title: { 18 | default: siteConfig.name, 19 | template: `%s - ${siteConfig.name}`, 20 | }, 21 | description: siteConfig.description, 22 | themeColor: [ 23 | { media: "(prefers-color-scheme: light)", color: "white" }, 24 | { media: "(prefers-color-scheme: dark)", color: "black" }, 25 | ], 26 | icons: { 27 | icon: "/favicon.ico", 28 | shortcut: "/favicon-16x16.png", 29 | apple: "/apple-touch-icon.png", 30 | }, 31 | } 32 | 33 | interface RootLayoutProps { 34 | children: React.ReactNode 35 | } 36 | 37 | export const dynamic = "force-dynamic" 38 | 39 | export default async function RootLayout({ children }: RootLayoutProps) { 40 | const supabase = createRouteHandlerClient({ cookies }) 41 | const { 42 | data: { session }, 43 | } = await supabase.auth.getSession() 44 | 45 | if (process.env.NEXT_PUBLIC_POSTHOG_KEY) { 46 | PostHogClient() 47 | } 48 | 49 | return ( 50 | <> 51 | 52 | 53 | 57 | 58 | 64 | 65 |
66 |
67 | {children} 68 |
69 |
70 |
71 | 72 | 73 | 74 | 75 | ) 76 | } -------------------------------------------------------------------------------- /web/app/loading.tsx: -------------------------------------------------------------------------------- 1 | import { Spinner } from "@/components/ui/spinner" 2 | 3 | export default function Loading() { 4 | return ( 5 |
6 | 7 |
8 | ) 9 | } 10 | -------------------------------------------------------------------------------- /web/app/logs/page.tsx: -------------------------------------------------------------------------------- 1 | import { cookies } from "next/headers" 2 | import { createRouteHandlerClient } from "@supabase/auth-helpers-nextjs" 3 | 4 | import { Api } from "@/lib/api" 5 | import LogList from "@/components/log-list" 6 | 7 | export const dynamic = "force-dynamic" 8 | 9 | export default async function Agents({ 10 | searchParams, 11 | }: { 12 | searchParams: { 13 | id: string 14 | } 15 | }) { 16 | let agent = "" 17 | const supabase = createRouteHandlerClient({ cookies }) 18 | const { 19 | data: { user }, 20 | } = await supabase.auth.getUser() 21 | const { data: profile } = await supabase 22 | .from("profiles") 23 | .select("*") 24 | .eq("user_id", user?.id) 25 | .single() 26 | const api = new Api(profile.api_key) 27 | 28 | const { data: logs } = await api.getRuns({ limit: 50 }) 29 | console.log(logs) 30 | 31 | return ( 32 |
33 |

Logs

34 |
35 | 36 |
37 |
38 | ) 39 | } 40 | -------------------------------------------------------------------------------- /web/app/onboarding/page.tsx: -------------------------------------------------------------------------------- 1 | import OnboardingClientPage from "./client-page" 2 | 3 | export default function Onboarding() { 4 | return 5 | } 6 | -------------------------------------------------------------------------------- /web/app/settings/api-keys/client-page.tsx: -------------------------------------------------------------------------------- 1 | "use client" 2 | 3 | import { RxCopy } from "react-icons/rx" 4 | 5 | import { Profile } from "@/types/profile" 6 | import { Button } from "@/components/ui/button" 7 | import { Input } from "@/components/ui/input" 8 | import { useToast } from "@/components/ui/use-toast" 9 | 10 | interface ApiKeysPageProps { 11 | profile: Profile 12 | } 13 | 14 | const ApiKeysClientPage: React.FC = ({ profile }) => { 15 | const { toast } = useToast() 16 | const copyToClipboard = () => { 17 | navigator.clipboard.writeText(profile.api_key) 18 | toast({ description: "API key copied to clipboard" }) 19 | } 20 | 21 | return ( 22 |
23 |
24 |

API keys

25 |

26 | Use the following API key to connect to hmai via the REST API or 27 | SDK 28 |

29 |
30 |
31 | 32 | 40 |
41 |
42 | ) 43 | } 44 | 45 | export default ApiKeysClientPage 46 | -------------------------------------------------------------------------------- /web/app/settings/api-keys/page.tsx: -------------------------------------------------------------------------------- 1 | import { cookies } from "next/headers" 2 | import { createRouteHandlerClient } from "@supabase/auth-helpers-nextjs" 3 | 4 | import ApiKeysClientPage from "./client-page" 5 | 6 | export const dynamic = "force-dynamic" 7 | 8 | export default async function Settings() { 9 | const supabase = createRouteHandlerClient({ cookies }) 10 | const { 11 | data: { user }, 12 | } = await supabase.auth.getUser() 13 | const { data: profile } = await supabase 14 | .from("profiles") 15 | .select("*") 16 | .eq("user_id", user?.id) 17 | .single() 18 | 19 | const copyToClipboard = () => { 20 | navigator.clipboard.writeText(profile.api_key) 21 | } 22 | 23 | return 24 | } 25 | -------------------------------------------------------------------------------- /web/app/settings/appearance/client-page.tsx: -------------------------------------------------------------------------------- 1 | "use client" 2 | 3 | import ThemeToggle from "@/components/theme-toggle" 4 | 5 | export default function AppearanceClientPage() { 6 | return ( 7 |
8 |
9 |

Appearance

10 |

11 | Update the appearance of the hmai dashboard 12 |

13 |
14 | 15 |
16 | ) 17 | } 18 | -------------------------------------------------------------------------------- /web/app/settings/appearance/page.tsx: -------------------------------------------------------------------------------- 1 | import AppearanceClientPage from "./client-page" 2 | 3 | export default async function Settings() { 4 | return 5 | } 6 | -------------------------------------------------------------------------------- /web/app/settings/billing/client-page.tsx: -------------------------------------------------------------------------------- 1 | "use client" 2 | 3 | declare global { 4 | namespace JSX { 5 | interface IntrinsicElements { 6 | "stripe-pricing-table": React.DetailedHTMLProps< 7 | React.HTMLAttributes, 8 | HTMLElement 9 | > 10 | } 11 | } 12 | } 13 | 14 | const BillingClientPage = () => { 15 | return ( 16 |
17 |
18 |

Billing

19 |

20 | Subscribe to a plan to get started. 21 |

22 |
23 |
24 | 29 |
30 |
31 | ) 32 | } 33 | 34 | export default BillingClientPage 35 | -------------------------------------------------------------------------------- /web/app/settings/billing/page.tsx: -------------------------------------------------------------------------------- 1 | import BillingClientPage from "./client-page" 2 | 3 | export const dynamic = "force-dynamic" 4 | 5 | export default async function Billing() { 6 | return 7 | } 8 | -------------------------------------------------------------------------------- /web/app/settings/layout.tsx: -------------------------------------------------------------------------------- 1 | import { cookies } from "next/headers" 2 | import { createRouteHandlerClient } from "@supabase/auth-helpers-nextjs" 3 | 4 | import { siteConfig } from "@/config/site" 5 | import { Toaster } from "@/components/ui/toaster" 6 | import { SettingsSidebar } from "@/components/account-sidebar" 7 | 8 | interface SettingsLayoutProps { 9 | children: React.ReactNode 10 | } 11 | 12 | export default async function SettingsLayout({ 13 | children, 14 | }: SettingsLayoutProps) { 15 | const supabase = createRouteHandlerClient({ cookies }) 16 | const { 17 | data: { user }, 18 | } = await supabase.auth.getUser() 19 | const { data: profile } = await supabase 20 | .from("profiles") 21 | .select("*") 22 | .eq("user_id", user?.id) 23 | .single() 24 | 25 | return ( 26 |
27 |

Settings

28 |
29 | 30 |
{children}
31 |
32 |
33 | ) 34 | } 35 | -------------------------------------------------------------------------------- /web/app/settings/page.tsx: -------------------------------------------------------------------------------- 1 | import { cookies } from "next/headers" 2 | import { createRouteHandlerClient } from "@supabase/auth-helpers-nextjs" 3 | 4 | import SettingsClientPage from "./client-page" 5 | 6 | export const dynamic = "force-dynamic" 7 | 8 | export default async function Settings() { 9 | const supabase = createRouteHandlerClient({ cookies }) 10 | const { 11 | data: { user }, 12 | } = await supabase.auth.getUser() 13 | const { data: profile } = await supabase 14 | .from("profiles") 15 | .select("*") 16 | .eq("user_id", user?.id) 17 | .single() 18 | 19 | return user ? : null 20 | } 21 | -------------------------------------------------------------------------------- /web/app/workflows/[id]/editor.ts: -------------------------------------------------------------------------------- 1 | import * as monaco from "monaco-editor" 2 | import { configureMonacoYaml } from "monaco-yaml" 3 | 4 | window.MonacoEnvironment = { 5 | getWorker(_, label) { 6 | switch (label) { 7 | case "editorWorkerService": 8 | return new Worker( 9 | new URL("monaco-editor/esm/vs/editor/editor.worker", import.meta.url) 10 | ) 11 | case "yaml": 12 | return new Worker(new URL("monaco-yaml/yaml.worker", import.meta.url)) 13 | default: 14 | throw new Error(`Unknown label ${label}`) 15 | } 16 | }, 17 | } 18 | 19 | configureMonacoYaml(monaco, { 20 | enableSchemaRequest: true, 21 | schemas: [ 22 | { 23 | fileMatch: ["*"], 24 | uri: `${process.env.NEXT_PUBLIC_SUPERAGENT_API_URL}/workflows/config/schema`, 25 | }, 26 | ], 27 | }) 28 | 29 | const modelUri = monaco.Uri.parse("config.yaml") 30 | let model = monaco.editor.createModel("initialValue", "yaml", modelUri) 31 | 32 | export function initCodeEditor( 33 | wrapperElement: HTMLElement, 34 | theme: string = "light" 35 | ) { 36 | return monaco.editor.create(wrapperElement, { 37 | automaticLayout: true, 38 | model, 39 | scrollbar: { 40 | vertical: "hidden", 41 | }, 42 | fontSize: 14, 43 | theme: theme === "dark" ? "vs-dark" : "vs-light", 44 | quickSuggestions: { 45 | other: true, 46 | comments: false, 47 | strings: true, 48 | }, 49 | tabSize: 2, 50 | guides: { 51 | highlightActiveIndentation: true, 52 | }, 53 | minimap: { enabled: false }, 54 | }) 55 | } -------------------------------------------------------------------------------- /web/app/workflows/[id]/function-calls.tsx: -------------------------------------------------------------------------------- 1 | import React from "react" 2 | 3 | interface FunctionCallsProps { 4 | functionCalls?: any[] 5 | } 6 | 7 | function FunctionCalls({ functionCalls }: FunctionCallsProps) { 8 | return ( 9 |
10 |

Run Logs

11 | 12 | {functionCalls?.map((call, index) => ( 13 |
14 |
15 | {call?.type == "function_call" && ( 16 |
17 | 18 | TOOL: {call.function} 19 | 20 |
21 | )} 22 | {call?.type == "start" && ( 23 | 24 | INPUT 25 | 26 | )} 27 | {call?.type == "end" && ( 28 | 29 | OUTPUT 30 | 31 | )} 32 |
33 | ))} 34 |
35 | ) 36 | } 37 | 38 | export default FunctionCalls 39 | -------------------------------------------------------------------------------- /web/app/workflows/[id]/overview.tsx: -------------------------------------------------------------------------------- 1 | "use client" 2 | 3 | import { Bar, BarChart, ResponsiveContainer, XAxis, YAxis } from "recharts" 4 | 5 | import { LogItem } from "@/types/log-item" 6 | import { Card, CardHeader } from "@/components/ui/card" 7 | 8 | export default function Overview({ data }: { profile: any; data: LogItem[] }) { 9 | const chartData = data.reduce( 10 | (acc: Record, logItem) => { 11 | const dateObject = new Date(logItem.received_at) 12 | const date = `${dateObject.getFullYear()}-${String( 13 | dateObject.getMonth() + 1 14 | ).padStart(2, "0")}-${String(dateObject.getDate()).padStart(2, "0")}` 15 | acc[date] = (acc[date] || 0) + 1 16 | 17 | return acc 18 | }, 19 | {} as Record 20 | ) 21 | 22 | const chartDataArray = Object.entries(chartData).map(([date, count]) => ({ 23 | date, 24 | count, 25 | })) 26 | 27 | return ( 28 |
29 |
30 | 31 | Requests 32 | 33 | 48 | 49 | 50 | 51 | 52 | 53 |
54 |

55 |

{data.length}

56 |

Total requests

57 |
58 |
59 |
60 |
61 | ) 62 | } 63 | -------------------------------------------------------------------------------- /web/app/workflows/[id]/page.tsx: -------------------------------------------------------------------------------- 1 | import { cookies } from "next/headers" 2 | import { createRouteHandlerClient } from "@supabase/auth-helpers-nextjs" 3 | 4 | import { Api } from "@/lib/api" 5 | 6 | import WorkflowDetail from "./workflow" 7 | 8 | export default async function Assistant({ 9 | params, 10 | }: { 11 | params: { id: string } 12 | }) { 13 | const supabase = createRouteHandlerClient({ cookies }) 14 | const { id } = params 15 | const { 16 | data: { user }, 17 | } = await supabase.auth.getUser() 18 | const { data: profile } = await supabase 19 | .from("profiles") 20 | .select("*") 21 | .eq("user_id", user?.id) 22 | .single() 23 | const api = new Api(profile.api_key) 24 | const { data: workflow } = await api.getWorkflowById(id) 25 | const { data: llms } = await api.getLLMs() 26 | 27 | return workflow ? ( 28 | 29 | ) : ( 30 |
31 |

No assistant selected

32 |

33 | View details about an assistant by navigating the list to the left 34 |

35 |
36 | ) 37 | } 38 | -------------------------------------------------------------------------------- /web/app/workflows/header.tsx: -------------------------------------------------------------------------------- 1 | "use client" 2 | 3 | import { useRouter } from "next/navigation" 4 | import { TbPlus } from "react-icons/tb" 5 | import { useAsyncFn } from "react-use" 6 | 7 | import { Profile } from "@/types/profile" 8 | import { initialSamlValue } from "@/config/saml" 9 | import { Api } from "@/lib/api" 10 | import { Button } from "@/components/ui/button" 11 | import { Spinner } from "@/components/ui/spinner" 12 | 13 | export default function Header({ profile }: { profile: Profile }) { 14 | const api = new Api(profile.api_key) 15 | const router = useRouter() 16 | const [{ loading }, createWorkflow] = useAsyncFn(async () => { 17 | const { data: workflow } = await api.createWorkflow({ 18 | name: "My Workflow", 19 | description: "My new workflow", 20 | }) 21 | await api.generateWorkflow(workflow.id, initialSamlValue) 22 | router.push(`/workflows/${workflow.id}`) 23 | }) 24 | 25 | return ( 26 |
27 | Workflows 28 | 32 |
33 | ) 34 | } 35 | -------------------------------------------------------------------------------- /web/app/workflows/layout.tsx: -------------------------------------------------------------------------------- 1 | interface AssistantsLayoutProps { 2 | children: React.ReactNode 3 | params: { slug: string } 4 | } 5 | 6 | export default async function AssistantsLayout({ 7 | params, 8 | children, 9 | }: AssistantsLayoutProps) { 10 | return ( 11 |
12 |
{children}
13 |
14 | ) 15 | } 16 | -------------------------------------------------------------------------------- /web/app/workflows/page.tsx: -------------------------------------------------------------------------------- 1 | import { cookies } from "next/headers" 2 | import { createRouteHandlerClient } from "@supabase/auth-helpers-nextjs" 3 | 4 | import { Api } from "@/lib/api" 5 | 6 | import WorkflowCards from "./cards" 7 | import Header from "./header" 8 | 9 | export const dynamic = "force-dynamic" 10 | 11 | export default async function Agents() { 12 | const supabase = createRouteHandlerClient({ cookies }) 13 | const { 14 | data: { user }, 15 | } = await supabase.auth.getUser() 16 | const { data: profile } = await supabase 17 | .from("profiles") 18 | .select("*") 19 | .eq("user_id", user?.id) 20 | .single() 21 | const api = new Api(profile.api_key) 22 | 23 | const { data: workflows } = await api.getWorkflows() 24 | 25 | return ( 26 |
27 |
28 | 29 |
30 | ) 31 | } 32 | -------------------------------------------------------------------------------- /web/bun.lockb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/HMaiLab/HMAI/79ddaa324f41f7bf81236991c37c5007eb9c02f8/web/bun.lockb -------------------------------------------------------------------------------- /web/components.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://ui.shadcn.com/schema.json", 3 | "style": "default", 4 | "tailwind": { 5 | "config": "tailwind.config.js", 6 | "css": "app/globals.css", 7 | "baseColor": "slate", 8 | "cssVariables": true 9 | }, 10 | "rsc": false, 11 | "aliases": { 12 | "utils": "@/lib/utils", 13 | "components": "@/components" 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /web/components/account-sidebar.tsx: -------------------------------------------------------------------------------- 1 | "use client" 2 | 3 | import Link from "next/link" 4 | import { usePathname } from "next/navigation" 5 | 6 | import { siteConfig } from "@/config/site" 7 | import { stripe } from "@/lib/stripe" 8 | import { cn } from "@/lib/utils" 9 | import { Button, buttonVariants } from "@/components/ui/button" 10 | 11 | interface SettingsSidebarProps extends React.HTMLAttributes { 12 | profile: any 13 | items: { 14 | id: string 15 | href: string 16 | title: string 17 | disabled?: boolean 18 | }[] 19 | } 20 | 21 | export function SettingsSidebar({ 22 | profile, 23 | className, 24 | items, 25 | ...props 26 | }: SettingsSidebarProps) { 27 | const pathname = usePathname() 28 | 29 | return ( 30 | 68 | ) 69 | } 70 | -------------------------------------------------------------------------------- /web/components/analytics.tsx: -------------------------------------------------------------------------------- 1 | "use client" 2 | 3 | import { useEffect } from "react" 4 | import { usePathname, useSearchParams } from "next/navigation" 5 | 6 | import { analytics } from "@/lib/segment" 7 | 8 | export default function Analytics() { 9 | const pathname = usePathname() 10 | const searchParams = useSearchParams() 11 | 12 | useEffect(() => { 13 | analytics.page() 14 | }, [pathname, searchParams]) 15 | 16 | return null 17 | } 18 | -------------------------------------------------------------------------------- /web/components/logo.tsx: -------------------------------------------------------------------------------- 1 | import NextImage from "next/image" 2 | 3 | export default function Logo({ 4 | width = 38, 5 | height = 38, 6 | }: { 7 | width?: number 8 | height?: number 9 | }) { 10 | return ( 11 | 18 | ) 19 | } 20 | -------------------------------------------------------------------------------- /web/components/markdown.tsx: -------------------------------------------------------------------------------- 1 | import { FC, memo } from "react" 2 | import ReactMarkdown, { Options } from "react-markdown" 3 | 4 | export const MemoizedReactMarkdown: FC = memo( 5 | ReactMarkdown, 6 | (prevProps, nextProps) => 7 | prevProps.children === nextProps.children && 8 | prevProps.className === nextProps.className 9 | ) 10 | -------------------------------------------------------------------------------- /web/components/non-ideal-state.tsx: -------------------------------------------------------------------------------- 1 | import * as React from "react" 2 | 3 | import { Card, CardContent } from "./ui/card" 4 | 5 | interface NonIdealState { 6 | title: string 7 | description: string 8 | icon: React.ComponentType<{ size?: number }> 9 | } 10 | 11 | export default function NonIdealState({ 12 | title, 13 | icon: Icon, 14 | description, 15 | }: NonIdealState) { 16 | return ( 17 |
18 | 19 |
20 |

{title}

21 |

{description}

22 |
23 |
24 | ) 25 | } 26 | -------------------------------------------------------------------------------- /web/components/sidebar.tsx: -------------------------------------------------------------------------------- 1 | "use client" 2 | 3 | import NextLink from "next/link" 4 | import { usePathname } from "next/navigation" 5 | import { createClientComponentClient } from "@supabase/auth-helpers-nextjs" 6 | import { useAsync } from "react-use" 7 | 8 | import { siteConfig } from "@/config/site" 9 | 10 | import Logo from "./logo" 11 | import { Button } from "./ui/button" 12 | 13 | export default function Sidebar() { 14 | const supabase = createClientComponentClient() 15 | const { value: session } = useAsync(async () => { 16 | const { 17 | data: { session }, 18 | } = await supabase.auth.getSession() 19 | return session 20 | }, []) 21 | const pathname = usePathname() 22 | 23 | return ( 24 |
29 |
30 | 31 |
32 | {siteConfig.mainNav.map((navItem) => ( 33 | 34 | 40 | 41 | ))} 42 |
43 |
44 |
45 | {siteConfig.footerNav.map((navItem) => ( 46 | 47 | 53 | 54 | ))} 55 |
56 |
57 | ) 58 | } 59 | -------------------------------------------------------------------------------- /web/components/theme-provider.tsx: -------------------------------------------------------------------------------- 1 | "use client" 2 | 3 | import * as React from "react" 4 | import { ThemeProvider as NextThemesProvider } from "next-themes" 5 | import { type ThemeProviderProps } from "next-themes/dist/types" 6 | 7 | export function ThemeProvider({ children, ...props }: ThemeProviderProps) { 8 | return {children} 9 | } 10 | -------------------------------------------------------------------------------- /web/components/ui/accordion.tsx: -------------------------------------------------------------------------------- 1 | import * as React from "react" 2 | import * as AccordionPrimitive from "@radix-ui/react-accordion" 3 | import { ChevronDown } from "lucide-react" 4 | 5 | import { cn } from "@/lib/utils" 6 | 7 | const Accordion = AccordionPrimitive.Root 8 | 9 | const AccordionItem = React.forwardRef< 10 | React.ElementRef, 11 | React.ComponentPropsWithoutRef 12 | >(({ className, ...props }, ref) => ( 13 | 18 | )) 19 | AccordionItem.displayName = "AccordionItem" 20 | 21 | const AccordionTrigger = React.forwardRef< 22 | React.ElementRef, 23 | React.ComponentPropsWithoutRef 24 | >(({ className, children, ...props }, ref) => ( 25 | 26 | svg]:rotate-180", 30 | className 31 | )} 32 | {...props} 33 | > 34 | {children} 35 | 36 | 37 | 38 | )) 39 | AccordionTrigger.displayName = AccordionPrimitive.Trigger.displayName 40 | 41 | const AccordionContent = React.forwardRef< 42 | React.ElementRef, 43 | React.ComponentPropsWithoutRef 44 | >(({ className, children, ...props }, ref) => ( 45 | 50 |
{children}
51 |
52 | )) 53 | 54 | AccordionContent.displayName = AccordionPrimitive.Content.displayName 55 | 56 | export { Accordion, AccordionItem, AccordionTrigger, AccordionContent } 57 | -------------------------------------------------------------------------------- /web/components/ui/alert.tsx: -------------------------------------------------------------------------------- 1 | import * as React from "react" 2 | import { cva, type VariantProps } from "class-variance-authority" 3 | 4 | import { cn } from "@/lib/utils" 5 | 6 | const alertVariants = cva( 7 | "relative w-full rounded-lg border p-4 [&>svg~*]:pl-7 [&>svg+div]:translate-y-[-3px] [&>svg]:absolute [&>svg]:left-4 [&>svg]:top-4 [&>svg]:text-foreground", 8 | { 9 | variants: { 10 | variant: { 11 | default: "bg-background text-foreground", 12 | destructive: 13 | "border-destructive/50 text-destructive dark:border-destructive [&>svg]:text-destructive bg-destructive/10", 14 | }, 15 | }, 16 | defaultVariants: { 17 | variant: "default", 18 | }, 19 | } 20 | ) 21 | 22 | const Alert = React.forwardRef< 23 | HTMLDivElement, 24 | React.HTMLAttributes & VariantProps 25 | >(({ className, variant, ...props }, ref) => ( 26 |
32 | )) 33 | Alert.displayName = "Alert" 34 | 35 | const AlertTitle = React.forwardRef< 36 | HTMLParagraphElement, 37 | React.HTMLAttributes 38 | >(({ className, ...props }, ref) => ( 39 |
44 | )) 45 | AlertTitle.displayName = "AlertTitle" 46 | 47 | const AlertDescription = React.forwardRef< 48 | HTMLParagraphElement, 49 | React.HTMLAttributes 50 | >(({ className, ...props }, ref) => ( 51 |
56 | )) 57 | AlertDescription.displayName = "AlertDescription" 58 | 59 | export { Alert, AlertTitle, AlertDescription } 60 | -------------------------------------------------------------------------------- /web/components/ui/avatar.tsx: -------------------------------------------------------------------------------- 1 | import * as React from "react" 2 | import * as AvatarPrimitive from "@radix-ui/react-avatar" 3 | 4 | import { cn } from "@/lib/utils" 5 | 6 | const Avatar = React.forwardRef< 7 | React.ElementRef, 8 | React.ComponentPropsWithoutRef 9 | >(({ className, ...props }, ref) => ( 10 | 18 | )) 19 | Avatar.displayName = AvatarPrimitive.Root.displayName 20 | 21 | const AvatarImage = React.forwardRef< 22 | React.ElementRef, 23 | React.ComponentPropsWithoutRef 24 | >(({ className, ...props }, ref) => ( 25 | 30 | )) 31 | AvatarImage.displayName = AvatarPrimitive.Image.displayName 32 | 33 | const AvatarFallback = React.forwardRef< 34 | React.ElementRef, 35 | React.ComponentPropsWithoutRef 36 | >(({ className, ...props }, ref) => ( 37 | 45 | )) 46 | AvatarFallback.displayName = AvatarPrimitive.Fallback.displayName 47 | 48 | export { Avatar, AvatarImage, AvatarFallback } 49 | -------------------------------------------------------------------------------- /web/components/ui/badge.tsx: -------------------------------------------------------------------------------- 1 | import * as React from "react" 2 | import { cva, type VariantProps } from "class-variance-authority" 3 | 4 | import { cn } from "@/lib/utils" 5 | 6 | const badgeVariants = cva( 7 | "inline-flex items-center rounded-full border px-2.5 py-0.5 text-xs font-semibold transition-colors focus:outline-none focus:ring-2 focus:ring-ring focus:ring-offset-2", 8 | { 9 | variants: { 10 | variant: { 11 | default: 12 | "border-transparent bg-primary text-primary-foreground hover:bg-primary/80", 13 | secondary: 14 | "border-transparent bg-secondary text-secondary-foreground hover:bg-secondary/80", 15 | destructive: 16 | "border-transparent bg-destructive text-destructive-foreground hover:bg-destructive/80", 17 | outline: "text-foreground", 18 | }, 19 | }, 20 | defaultVariants: { 21 | variant: "default", 22 | }, 23 | } 24 | ) 25 | 26 | export interface BadgeProps 27 | extends React.HTMLAttributes, 28 | VariantProps {} 29 | 30 | function Badge({ className, variant, ...props }: BadgeProps) { 31 | return ( 32 |
33 | ) 34 | } 35 | 36 | export { Badge, badgeVariants } 37 | -------------------------------------------------------------------------------- /web/components/ui/button.tsx: -------------------------------------------------------------------------------- 1 | import * as React from "react" 2 | import { Slot } from "@radix-ui/react-slot" 3 | import { cva, type VariantProps } from "class-variance-authority" 4 | 5 | import { cn } from "@/lib/utils" 6 | 7 | const buttonVariants = cva( 8 | "inline-flex items-center justify-center rounded-md text-sm font-medium transition-colors focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:opacity-50 disabled:pointer-events-none ring-offset-background", 9 | { 10 | variants: { 11 | variant: { 12 | active: "bg-primary text-primary-foreground", 13 | default: "bg-primary text-primary-foreground hover:bg-primary/90", 14 | destructive: 15 | "bg-destructive text-destructive-foreground hover:bg-destructive/90", 16 | outline: 17 | "border border-input hover:bg-accent hover:text-accent-foreground", 18 | secondary: 19 | "bg-secondary text-secondary-foreground hover:bg-secondary/80", 20 | ghost: "hover:bg-accent hover:text-accent-foreground", 21 | link: "underline-offset-4 hover:underline text-primary", 22 | }, 23 | size: { 24 | default: "h-10 py-2 px-4", 25 | xs: "h-7 px-2 rounded-md", 26 | sm: "h-9 px-3 rounded-md", 27 | lg: "h-11 px-8 rounded-md", 28 | icon: "h-10 w-10", 29 | pill: "h-7 w-7", 30 | }, 31 | }, 32 | defaultVariants: { 33 | variant: "default", 34 | size: "default", 35 | }, 36 | } 37 | ) 38 | 39 | export interface ButtonProps 40 | extends React.ButtonHTMLAttributes, 41 | VariantProps { 42 | asChild?: boolean 43 | } 44 | 45 | const Button = React.forwardRef( 46 | ({ className, variant, size, asChild = false, ...props }, ref) => { 47 | const Comp = asChild ? Slot : "button" 48 | return ( 49 | 54 | ) 55 | } 56 | ) 57 | Button.displayName = "Button" 58 | 59 | export { Button, buttonVariants } 60 | -------------------------------------------------------------------------------- /web/components/ui/card.tsx: -------------------------------------------------------------------------------- 1 | import * as React from "react" 2 | 3 | import { cn } from "@/lib/utils" 4 | 5 | const Card = React.forwardRef< 6 | HTMLDivElement, 7 | React.HTMLAttributes 8 | >(({ className, ...props }, ref) => ( 9 |
17 | )) 18 | Card.displayName = "Card" 19 | 20 | const CardHeader = React.forwardRef< 21 | HTMLDivElement, 22 | React.HTMLAttributes 23 | >(({ className, ...props }, ref) => ( 24 |
29 | )) 30 | CardHeader.displayName = "CardHeader" 31 | 32 | const CardTitle = React.forwardRef< 33 | HTMLParagraphElement, 34 | React.HTMLAttributes 35 | >(({ className, ...props }, ref) => ( 36 |

44 | )) 45 | CardTitle.displayName = "CardTitle" 46 | 47 | const CardDescription = React.forwardRef< 48 | HTMLParagraphElement, 49 | React.HTMLAttributes 50 | >(({ className, ...props }, ref) => ( 51 |

56 | )) 57 | CardDescription.displayName = "CardDescription" 58 | 59 | const CardContent = React.forwardRef< 60 | HTMLDivElement, 61 | React.HTMLAttributes 62 | >(({ className, ...props }, ref) => ( 63 |

64 | )) 65 | CardContent.displayName = "CardContent" 66 | 67 | const CardFooter = React.forwardRef< 68 | HTMLDivElement, 69 | React.HTMLAttributes 70 | >(({ className, ...props }, ref) => ( 71 |
76 | )) 77 | CardFooter.displayName = "CardFooter" 78 | 79 | export { Card, CardHeader, CardFooter, CardTitle, CardDescription, CardContent } 80 | -------------------------------------------------------------------------------- /web/components/ui/checkbox.tsx: -------------------------------------------------------------------------------- 1 | import * as React from "react" 2 | import * as CheckboxPrimitive from "@radix-ui/react-checkbox" 3 | import { Check } from "lucide-react" 4 | 5 | import { cn } from "@/lib/utils" 6 | 7 | const Checkbox = React.forwardRef< 8 | React.ElementRef, 9 | React.ComponentPropsWithoutRef 10 | >(({ className, ...props }, ref) => ( 11 | 19 | 22 | 23 | 24 | 25 | )) 26 | Checkbox.displayName = CheckboxPrimitive.Root.displayName 27 | 28 | export { Checkbox } 29 | -------------------------------------------------------------------------------- /web/components/ui/input.tsx: -------------------------------------------------------------------------------- 1 | import * as React from "react" 2 | 3 | import { cn } from "@/lib/utils" 4 | 5 | export interface InputProps 6 | extends React.InputHTMLAttributes {} 7 | 8 | const Input = React.forwardRef( 9 | ({ className, type, ...props }, ref) => { 10 | return ( 11 | 20 | ) 21 | } 22 | ) 23 | Input.displayName = "Input" 24 | 25 | export { Input } 26 | -------------------------------------------------------------------------------- /web/components/ui/label.tsx: -------------------------------------------------------------------------------- 1 | import * as React from "react" 2 | import * as LabelPrimitive from "@radix-ui/react-label" 3 | import { cva, type VariantProps } from "class-variance-authority" 4 | 5 | import { cn } from "@/lib/utils" 6 | 7 | const labelVariants = cva( 8 | "text-sm font-medium leading-none peer-disabled:cursor-not-allowed peer-disabled:opacity-70" 9 | ) 10 | 11 | const Label = React.forwardRef< 12 | React.ElementRef, 13 | React.ComponentPropsWithoutRef & 14 | VariantProps 15 | >(({ className, ...props }, ref) => ( 16 | 21 | )) 22 | Label.displayName = LabelPrimitive.Root.displayName 23 | 24 | export { Label } 25 | -------------------------------------------------------------------------------- /web/components/ui/popover.tsx: -------------------------------------------------------------------------------- 1 | import * as React from "react" 2 | import * as PopoverPrimitive from "@radix-ui/react-popover" 3 | 4 | import { cn } from "@/lib/utils" 5 | 6 | const Popover = PopoverPrimitive.Root 7 | 8 | const PopoverTrigger = PopoverPrimitive.Trigger 9 | 10 | const PopoverContent = React.forwardRef< 11 | React.ElementRef, 12 | React.ComponentPropsWithoutRef 13 | >(({ className, align = "center", sideOffset = 4, ...props }, ref) => ( 14 | 15 | 25 | 26 | )) 27 | PopoverContent.displayName = PopoverPrimitive.Content.displayName 28 | 29 | export { Popover, PopoverTrigger, PopoverContent } 30 | -------------------------------------------------------------------------------- /web/components/ui/radio-group.tsx: -------------------------------------------------------------------------------- 1 | import * as React from "react" 2 | import * as RadioGroupPrimitive from "@radix-ui/react-radio-group" 3 | import { Circle } from "lucide-react" 4 | 5 | import { cn } from "@/lib/utils" 6 | 7 | const RadioGroup = React.forwardRef< 8 | React.ElementRef, 9 | React.ComponentPropsWithoutRef 10 | >(({ className, ...props }, ref) => { 11 | return ( 12 | 17 | ) 18 | }) 19 | RadioGroup.displayName = RadioGroupPrimitive.Root.displayName 20 | 21 | const RadioGroupItem = React.forwardRef< 22 | React.ElementRef, 23 | React.ComponentPropsWithoutRef 24 | >(({ className, children, ...props }, ref) => { 25 | return ( 26 | 34 | 35 | 36 | 37 | 38 | ) 39 | }) 40 | RadioGroupItem.displayName = RadioGroupPrimitive.Item.displayName 41 | 42 | export { RadioGroup, RadioGroupItem } 43 | -------------------------------------------------------------------------------- /web/components/ui/resizable.tsx: -------------------------------------------------------------------------------- 1 | import { GripVertical } from "lucide-react" 2 | import * as ResizablePrimitive from "react-resizable-panels" 3 | 4 | import { cn } from "@/lib/utils" 5 | 6 | const ResizablePanelGroup = ({ 7 | className, 8 | ...props 9 | }: React.ComponentProps) => ( 10 | 17 | ) 18 | 19 | const ResizablePanel = ResizablePrimitive.Panel 20 | 21 | const ResizableHandle = ({ 22 | withHandle, 23 | className, 24 | ...props 25 | }: React.ComponentProps & { 26 | withHandle?: boolean 27 | }) => ( 28 | div]:rotate-90", 31 | className 32 | )} 33 | {...props} 34 | > 35 | {withHandle && ( 36 |
37 | 38 |
39 | )} 40 |
41 |
42 | ) 43 | 44 | export { ResizablePanelGroup, ResizablePanel, ResizableHandle } 45 | -------------------------------------------------------------------------------- /web/components/ui/scroll-area.tsx: -------------------------------------------------------------------------------- 1 | import * as React from "react" 2 | import * as ScrollAreaPrimitive from "@radix-ui/react-scroll-area" 3 | 4 | import { cn } from "@/lib/utils" 5 | 6 | const ScrollArea = React.forwardRef< 7 | React.ElementRef, 8 | React.ComponentPropsWithoutRef 9 | >(({ className, children, ...props }, ref) => ( 10 | 15 | 16 | {children} 17 | 18 | 19 | 20 | 21 | )) 22 | ScrollArea.displayName = ScrollAreaPrimitive.Root.displayName 23 | 24 | const ScrollBar = React.forwardRef< 25 | React.ElementRef, 26 | React.ComponentPropsWithoutRef 27 | >(({ className, orientation = "vertical", ...props }, ref) => ( 28 | 41 | 42 | 43 | )) 44 | ScrollBar.displayName = ScrollAreaPrimitive.ScrollAreaScrollbar.displayName 45 | 46 | export { ScrollArea, ScrollBar } 47 | -------------------------------------------------------------------------------- /web/components/ui/separator.tsx: -------------------------------------------------------------------------------- 1 | import * as React from "react" 2 | import * as SeparatorPrimitive from "@radix-ui/react-separator" 3 | 4 | import { cn } from "@/lib/utils" 5 | 6 | const Separator = React.forwardRef< 7 | React.ElementRef, 8 | React.ComponentPropsWithoutRef 9 | >( 10 | ( 11 | { className, orientation = "horizontal", decorative = true, ...props }, 12 | ref 13 | ) => ( 14 | 25 | ) 26 | ) 27 | Separator.displayName = SeparatorPrimitive.Root.displayName 28 | 29 | export { Separator } 30 | -------------------------------------------------------------------------------- /web/components/ui/skeleton.tsx: -------------------------------------------------------------------------------- 1 | import { cn } from "@/lib/utils" 2 | 3 | function Skeleton({ 4 | className, 5 | ...props 6 | }: React.HTMLAttributes) { 7 | return ( 8 |
12 | ) 13 | } 14 | 15 | export { Skeleton } 16 | -------------------------------------------------------------------------------- /web/components/ui/spinner.tsx: -------------------------------------------------------------------------------- 1 | import { Loader2 } from "lucide-react" 2 | 3 | export const Icons = { 4 | spinner: Loader2, 5 | } 6 | 7 | export function Spinner() { 8 | return 9 | } 10 | -------------------------------------------------------------------------------- /web/components/ui/tabs.tsx: -------------------------------------------------------------------------------- 1 | import * as React from "react" 2 | import * as TabsPrimitive from "@radix-ui/react-tabs" 3 | 4 | import { cn } from "@/lib/utils" 5 | 6 | const Tabs = TabsPrimitive.Root 7 | 8 | const TabsList = React.forwardRef< 9 | React.ElementRef, 10 | React.ComponentPropsWithoutRef 11 | >(({ className, ...props }, ref) => ( 12 | 20 | )) 21 | TabsList.displayName = TabsPrimitive.List.displayName 22 | 23 | const TabsTrigger = React.forwardRef< 24 | React.ElementRef, 25 | React.ComponentPropsWithoutRef 26 | >(({ className, ...props }, ref) => ( 27 | 35 | )) 36 | TabsTrigger.displayName = TabsPrimitive.Trigger.displayName 37 | 38 | const TabsContent = React.forwardRef< 39 | React.ElementRef, 40 | React.ComponentPropsWithoutRef 41 | >(({ className, ...props }, ref) => ( 42 | 50 | )) 51 | TabsContent.displayName = TabsPrimitive.Content.displayName 52 | 53 | export { Tabs, TabsList, TabsTrigger, TabsContent } 54 | -------------------------------------------------------------------------------- /web/components/ui/textarea.tsx: -------------------------------------------------------------------------------- 1 | import * as React from "react" 2 | 3 | import { cn } from "@/lib/utils" 4 | 5 | export interface TextareaProps 6 | extends React.TextareaHTMLAttributes {} 7 | 8 | const Textarea = React.forwardRef( 9 | ({ className, ...props }, ref) => { 10 | return ( 11 |