├── .github ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── ISSUE_TEMPLATE │ ├── bug-report.yaml │ ├── config.yml │ ├── docs.yml │ └── feature-request.yaml ├── PULL_REQUEST_TEMPLATE.md └── workflows │ ├── ci.yml │ └── lint.yml ├── .gitignore ├── .husky └── pre-commit ├── LICENSE ├── README.md ├── fern ├── apis │ ├── legacy │ │ ├── generators.yml │ │ └── openapi │ │ │ └── openapi.yaml │ └── prod │ │ ├── generators.yml │ │ └── openapi │ │ └── openapi.yaml ├── assets │ ├── favicon.png │ ├── logo-dark.png │ └── logo-light.png ├── docs.yml ├── fern.config.json ├── mdx │ ├── apps │ │ ├── image-gen.mdx │ │ └── stock.mdx │ ├── authentication.mdx │ ├── concepts.mdx │ ├── dependencies │ │ └── supabase.mdx │ ├── deploy │ │ ├── docker-compose.mdx │ │ ├── local.mdx │ │ ├── render.mdx │ │ └── replit.mdx │ ├── intro.mdx │ ├── logging │ │ ├── agentops.mdx │ │ └── langfuse.mdx │ ├── memory │ │ └── motorhead.mdx │ ├── platform.mdx │ ├── rag │ │ ├── super-rag-saml.mdx │ │ └── super-rag-sdk.mdx │ ├── saml │ │ ├── intro.mdx │ │ └── structured_outputs.mdx │ ├── sdk │ │ ├── agent_llm_params.mdx │ │ ├── basic_example.mdx │ │ ├── chat_with_datasources.mdx │ │ ├── chat_with_tools.mdx │ │ ├── configure_vector_database.mdx │ │ ├── human_handoff.mdx │ │ ├── installation.mdx │ │ ├── local_tools.mdx │ │ ├── structured_outputs.mdx │ │ └── workflows.mdx │ └── vector-stores │ │ ├── astra.mdx │ │ ├── pinecone.mdx │ │ ├── qdrant.mdx │ │ ├── supabase.mdx │ │ └── weaviate.mdx └── versions │ ├── legacy.yml │ └── prod.yml ├── libs ├── .docker │ ├── .dockerignore │ ├── .env.example │ ├── README.md │ ├── docker-compose.yml │ ├── external │ │ ├── observabillity │ │ │ └── langfuse │ │ │ │ ├── .env.example │ │ │ │ ├── docker-compose.yml │ │ │ │ ├── run.sh │ │ │ │ ├── stop.sh │ │ │ │ └── uninstall.sh │ │ └── vector-store │ │ │ └── weaviate │ │ │ ├── .env.example │ │ │ ├── docker-compose.yml │ │ │ ├── run.sh │ │ │ ├── stop.sh │ │ │ └── uninstall.sh │ ├── run.sh │ ├── stop.sh │ ├── superagent │ │ ├── db │ │ │ ├── .env.example │ │ │ ├── docker-compose.pgadmin.yml │ │ │ ├── docker-compose.pgdb.yml │ │ │ ├── run.sh │ │ │ ├── stop.sh │ │ │ └── uninstall.sh │ │ └── motorhead │ │ │ └── docker-compose.motorhead.yml │ ├── ui │ │ ├── docker-compose.ui.yml │ │ └── supabase │ │ │ ├── .env.example │ │ │ ├── .gitignore │ │ │ ├── README.md │ │ │ ├── docker-compose.yml │ │ │ ├── run.sh │ │ │ ├── setup.sh │ │ │ ├── stop.sh │ │ │ ├── uninstall.sh │ │ │ └── volumes │ │ │ ├── api │ │ │ └── kong.yml │ │ │ ├── db │ │ │ ├── init │ │ │ │ └── data.sql │ │ │ ├── jwt.sql │ │ │ ├── logs.sql │ │ │ ├── realtime.sql │ │ │ ├── roles.sql │ │ │ └── webhooks.sql │ │ │ ├── functions │ │ │ └── main │ │ │ │ └── index.ts │ │ │ └── logs │ │ │ └── vector.yml │ └── uninstall.sh ├── embed │ ├── .gitignore │ ├── dist │ │ ├── web.js │ │ └── web.js.LICENSE.txt │ ├── package-lock.json │ ├── package.json │ ├── src │ │ └── index.js │ ├── test.html │ └── webpack.config.js ├── superagent │ ├── .dockerignore │ ├── .env.example │ ├── .flake8 │ ├── .gitignore │ ├── Dockerfile │ ├── Makefile │ ├── app │ │ ├── __init__.py │ │ ├── agents │ │ │ ├── __init__.py │ │ │ ├── base.py │ │ │ ├── langchain.py │ │ │ ├── llm.py │ │ │ └── openai.py │ │ ├── api │ │ │ ├── __init__.py │ │ │ ├── agents.py │ │ │ ├── api_keys.py │ │ │ ├── api_user.py │ │ │ ├── datasources.py │ │ │ ├── llms.py │ │ │ ├── tools.py │ │ │ ├── vector_dbs.py │ │ │ ├── workflow_configs │ │ │ │ ├── api │ │ │ │ │ ├── api_agent_manager.py │ │ │ │ │ ├── api_agent_tool_manager.py │ │ │ │ │ ├── api_datasource_superrag_manager.py │ │ │ │ │ ├── api_manager.py │ │ │ │ │ └── base.py │ │ │ │ ├── data_transformer.py │ │ │ │ ├── exceptions.py │ │ │ │ ├── processors │ │ │ │ │ ├── agent_processor.py │ │ │ │ │ ├── base.py │ │ │ │ │ ├── openai.py │ │ │ │ │ ├── processor.py │ │ │ │ │ ├── superagent.py │ │ │ │ │ └── utils.py │ │ │ │ ├── saml_schema.py │ │ │ │ ├── validator.py │ │ │ │ └── workflow_configs.py │ │ │ └── workflows.py │ │ ├── datasource │ │ │ ├── __init__.py │ │ │ ├── flow.py │ │ │ ├── loader.py │ │ │ └── types.py │ │ ├── main.py │ │ ├── memory │ │ │ ├── __init__.py │ │ │ ├── base.py │ │ │ ├── buffer_memory.py │ │ │ ├── memory_stores │ │ │ │ ├── base.py │ │ │ │ └── redis.py │ │ │ └── message.py │ │ ├── models │ │ │ ├── __init__.py │ │ │ ├── request.py │ │ │ ├── response.py │ │ │ └── tools.py │ │ ├── routers.py │ │ ├── tools │ │ │ ├── __init__.py │ │ │ ├── advanced_scraper.py │ │ │ ├── agent.py │ │ │ ├── algolia.py │ │ │ ├── base.py │ │ │ ├── bing_search.py │ │ │ ├── browser.py │ │ │ ├── chatgpt.py │ │ │ ├── code_interpreter.py │ │ │ ├── datasource.py │ │ │ ├── e2b.py │ │ │ ├── flow.py │ │ │ ├── function.py │ │ │ ├── google_search.py │ │ │ ├── gpt_vision.py │ │ │ ├── hand_off.py │ │ │ ├── http.py │ │ │ ├── metaphor.py │ │ │ ├── openapi.py │ │ │ ├── openbb.py │ │ │ ├── prompts.py │ │ │ ├── pubmed.py │ │ │ ├── replicate.py │ │ │ ├── scraper.py │ │ │ ├── sec.py │ │ │ ├── superrag.py │ │ │ ├── tavily.py │ │ │ ├── tts_1.py │ │ │ ├── wolfram_alpha.py │ │ │ └── zapier.py │ │ ├── utils │ │ │ ├── __init__.py │ │ │ ├── analytics.py │ │ │ ├── api.py │ │ │ ├── callbacks.py │ │ │ ├── helpers.py │ │ │ ├── llm.py │ │ │ └── prisma.py │ │ ├── vectorstores │ │ │ ├── __init__.py │ │ │ ├── abstract.py │ │ │ ├── astra.py │ │ │ ├── astra_client.py │ │ │ ├── base.py │ │ │ ├── embeddings.py │ │ │ ├── pinecone.py │ │ │ ├── qdrant.py │ │ │ ├── supabase.py │ │ │ └── weaviate.py │ │ └── workflows │ │ │ ├── __init__.py │ │ │ └── base.py │ ├── lint-and-format.sh │ ├── poetry.lock │ ├── prisma │ │ ├── migrations │ │ │ ├── 20230822214343_agent_llm │ │ │ │ └── migration.sql │ │ │ ├── 20230823195402_add_llm │ │ │ │ └── migration.sql │ │ │ ├── 20230823200614_change_ids │ │ │ │ └── migration.sql │ │ │ ├── 20230823211516_llm_options_default │ │ │ │ └── migration.sql │ │ │ ├── 20230823212925_remove_llm_options_default │ │ │ │ └── migration.sql │ │ │ ├── 20230824065536_add_agent_prompt │ │ │ │ └── migration.sql │ │ │ ├── 20230824070125_llm_agent_one_to_many │ │ │ │ └── migration.sql │ │ │ ├── 20230824070755_add_agent_llm_mapping │ │ │ │ └── migration.sql │ │ │ ├── 20230824084308_api_user │ │ │ │ └── migration.sql │ │ │ ├── 20230824103528_api_user_token_optional │ │ │ │ └── migration.sql │ │ │ ├── 20230824110611_add_api_user_to_models │ │ │ │ └── migration.sql │ │ │ ├── 20230824114232_add_datasources │ │ │ │ └── migration.sql │ │ │ ├── 20230831081114_datasource_metadata │ │ │ │ └── migration.sql │ │ │ ├── 20230831092903_datasource_metadata_string │ │ │ │ └── migration.sql │ │ │ ├── 20230831105225_add_datasource_types │ │ │ │ └── migration.sql │ │ │ ├── 20230901072519_agent_tools │ │ │ │ └── migration.sql │ │ │ ├── 20230901115947_tool_bing_search │ │ │ │ └── migration.sql │ │ │ ├── 20230901124505_remove_redundant_fields │ │ │ │ └── migration.sql │ │ │ ├── 20230901182450_pubmed_tool │ │ │ │ └── migration.sql │ │ │ ├── 20230901183619_tool_metadata_optional │ │ │ │ └── migration.sql │ │ │ ├── 20230901184227_tool_metadata_mandatory │ │ │ │ └── migration.sql │ │ │ ├── 20230904062421_add_worflow │ │ │ │ └── migration.sql │ │ │ ├── 20230904063106_workflow_api_user │ │ │ │ └── migration.sql │ │ │ ├── 20230904074324_add_workflow_llm │ │ │ │ └── migration.sql │ │ │ ├── 20230904082445_fix_workflow_misspelling │ │ │ │ └── migration.sql │ │ │ ├── 20230904083153_remove_workflow_llm │ │ │ │ └── migration.sql │ │ │ ├── 20230907080928_remove_llm_model │ │ │ │ └── migration.sql │ │ │ ├── 20230907090814_agent_description │ │ │ │ └── migration.sql │ │ │ ├── 20230912073334_tool_return_direct │ │ │ │ └── migration.sql │ │ │ ├── 20230913070205_agent_avatar │ │ │ │ └── migration.sql │ │ │ ├── 20230915080507_datasource_status │ │ │ │ └── migration.sql │ │ │ ├── 20230917191411_datasource_status_failed │ │ │ │ └── migration.sql │ │ │ ├── 20230918070039_agent_cascade_delete │ │ │ │ └── migration.sql │ │ │ ├── 20230920060753_add_pptx_datasource_type │ │ │ │ └── migration.sql │ │ │ ├── 20230920070547_datasource_docx │ │ │ │ └── migration.sql │ │ │ ├── 20230920072352_datasource_xlsx │ │ │ │ └── migration.sql │ │ │ ├── 20230920081659_datasource_google_doc │ │ │ │ └── migration.sql │ │ │ ├── 20230921064724_code_executor │ │ │ │ └── migration.sql │ │ │ ├── 20230928102507_api_user_email │ │ │ │ └── migration.sql │ │ │ ├── 20231001110155_llm_azure_openai │ │ │ │ └── migration.sql │ │ │ ├── 20231001161850_datassource_content │ │ │ │ └── migration.sql │ │ │ ├── 20231016065521_agent_initial_message │ │ │ │ └── migration.sql │ │ │ ├── 20231029210807_tool_openbb │ │ │ │ └── migration.sql │ │ │ ├── 20231106194639_gpt_4_1106_preview │ │ │ │ └── migration.sql │ │ │ ├── 20231106194841_gpt_4_1106_preview_fix │ │ │ │ └── migration.sql │ │ │ ├── 20231106224640_vision_tool │ │ │ │ └── migration.sql │ │ │ ├── 20231107204227_tts1_tool │ │ │ │ └── migration.sql │ │ │ ├── 20231112132755_update_model │ │ │ │ └── migration.sql │ │ │ ├── 20231113210515_huggingface_models │ │ │ │ └── migration.sql │ │ │ ├── 20231114202204_algolia_tool │ │ │ │ └── migration.sql │ │ │ ├── 20231122081046_handoff_tool │ │ │ │ └── migration.sql │ │ │ ├── 20231124220817_function_tool │ │ │ │ └── migration.sql │ │ │ ├── 20231217152121_add_tool_config │ │ │ │ └── migration.sql │ │ │ ├── 20231217220650_remove_workflow_inputs │ │ │ │ └── migration.sql │ │ │ ├── 20231223104946_add_http_tool │ │ │ │ └── migration.sql │ │ │ ├── 20240102071238_add_vectordb_table │ │ │ │ └── migration.sql │ │ │ ├── 20240110062120_add_hugging_face_mixtral_8x7b_model │ │ │ │ └── migration.sql │ │ │ ├── 20240119040422_add_supabase_pgvector │ │ │ │ └── migration.sql │ │ │ ├── 20240121183424_add_on_delete_cascade_to_workflow_step │ │ │ │ └── migration.sql │ │ │ ├── 20240124063011_make_agent_llm_model_optional │ │ │ │ └── migration.sql │ │ │ ├── 20240129153542_add_workflow_config_table │ │ │ │ └── migration.sql │ │ │ ├── 20240201161130_add_gpt_4_turbo_preview │ │ │ │ └── migration.sql │ │ │ ├── 20240201221548_gpt_3_5_turbo_0125 │ │ │ │ └── migration.sql │ │ │ ├── 20240201224222_agent_type_v2 │ │ │ │ └── migration.sql │ │ │ ├── 20240202033257_add_openai_assistants │ │ │ │ └── migration.sql │ │ │ ├── 20240204133952_update_openai_assistants_table │ │ │ │ └── migration.sql │ │ │ ├── 20240210104018_make_tool_metadata_optional │ │ │ │ └── migration.sql │ │ │ ├── 20240213050512_llm_agent │ │ │ │ └── migration.sql │ │ │ ├── 20240213051312_add_perplexity │ │ │ │ └── migration.sql │ │ │ ├── 20240213203005_add_togetherai │ │ │ │ └── migration.sql │ │ │ ├── 20240214191602_add_superrag_tool │ │ │ │ └── migration.sql │ │ │ ├── 20240227075836_add_api_keys_table │ │ │ │ └── migration.sql │ │ │ ├── 20240306005619_add_anthropic │ │ │ │ └── migration.sql │ │ │ ├── 20240311200421_tavily_tool │ │ │ │ └── migration.sql │ │ │ ├── 20240318081112_github_tool │ │ │ │ └── migration.sql │ │ │ ├── 20240327030115_scraper_tool │ │ │ │ └── migration.sql │ │ │ ├── 20240328135202_add_output_schema_field_agents_table │ │ │ │ └── migration.sql │ │ │ ├── 20240331072847_google_search_tool │ │ │ │ └── migration.sql │ │ │ ├── 20240405175208_advanced_scraper_tool │ │ │ │ └── migration.sql │ │ │ ├── 20240412075016_add_aws_bedrock │ │ │ │ └── migration.sql │ │ │ ├── 20240418181431_add_mistral │ │ │ │ └── migration.sql │ │ │ ├── 20240418183001_add_groq │ │ │ │ └── migration.sql │ │ │ ├── 20240420075553_add_cohere │ │ │ │ └── migration.sql │ │ │ ├── 20240424143511_add_sec_api │ │ │ │ └── migration.sql │ │ │ ├── 20240503060243_update_gpt_4_models │ │ │ │ └── migration.sql │ │ │ ├── 20240514064040_gpt_4_o │ │ │ │ └── migration.sql │ │ │ └── migration_lock.toml │ │ └── schema.prisma │ ├── prompts │ │ ├── __init__.py │ │ ├── default.py │ │ ├── function_calling_agent.py │ │ └── json.py │ ├── pyproject.toml │ ├── replit.sh │ ├── services │ │ ├── __init__.py │ │ └── superrag.py │ ├── supabase │ │ ├── .gitignore │ │ ├── config.toml │ │ └── seed.sql │ └── tests │ │ └── __init__.py └── ui │ ├── .dockerignore │ ├── .editorconfig │ ├── .env.example │ ├── .eslintignore │ ├── .eslintrc.json │ ├── .gitignore │ ├── .lintstagedrc.js │ ├── .lintstagedrc.json │ ├── .prettierignore │ ├── Dockerfile │ ├── README.md │ ├── app │ ├── agents │ │ ├── [agentId] │ │ │ ├── add-datasource.tsx │ │ │ ├── add-tool.tsx │ │ │ ├── avatar.tsx │ │ │ ├── chat.tsx │ │ │ ├── delete-agent-button.tsx │ │ │ ├── header.tsx │ │ │ ├── page.tsx │ │ │ ├── prompt-footer.tsx │ │ │ ├── prompt-form.tsx │ │ │ └── settings.tsx │ │ ├── columns.tsx │ │ ├── data-table.tsx │ │ ├── header.tsx │ │ ├── loading.tsx │ │ └── page.tsx │ ├── api │ │ ├── onboard │ │ │ ├── form-schema.ts │ │ │ └── route.ts │ │ └── stripe │ │ │ ├── plans │ │ │ └── route.ts │ │ │ ├── sessions │ │ │ ├── [id] │ │ │ │ └── route.ts │ │ │ └── route.ts │ │ │ ├── subscriptions │ │ │ └── [id] │ │ │ │ ├── cancel │ │ │ │ └── route.ts │ │ │ │ └── route.ts │ │ │ └── webhook │ │ │ └── route.ts │ ├── auth │ │ ├── callback │ │ │ └── route.ts │ │ ├── login.ts │ │ ├── logout.ts │ │ └── sign-up.ts │ ├── billing-modal.tsx │ ├── container.tsx │ ├── integrations │ │ ├── client-page.tsx │ │ ├── llm.tsx │ │ ├── page.tsx │ │ └── storage.tsx │ ├── layout.tsx │ ├── loading.tsx │ ├── logs │ │ └── page.tsx │ ├── onboarding │ │ ├── client-page.tsx │ │ └── page.tsx │ ├── page.tsx │ ├── settings │ │ ├── api-keys │ │ │ ├── api-key-actions.tsx │ │ │ ├── api-keys.tsx │ │ │ ├── create-api-key.tsx │ │ │ └── page.tsx │ │ ├── appearance │ │ │ ├── client-page.tsx │ │ │ └── page.tsx │ │ ├── billing │ │ │ ├── page.tsx │ │ │ ├── pricing-table.tsx │ │ │ └── upgrade-plan-button.tsx │ │ ├── client-page.tsx │ │ ├── layout.tsx │ │ └── page.tsx │ └── workflows │ │ ├── [id] │ │ ├── chat.tsx │ │ ├── editor.ts │ │ ├── function-calls.tsx │ │ ├── header.tsx │ │ ├── llm-dialog.tsx │ │ ├── overview.tsx │ │ ├── page.tsx │ │ ├── prompt-form.tsx │ │ ├── saml.tsx │ │ └── workflow.tsx │ │ ├── cards.tsx │ │ ├── checkout-session-status.tsx │ │ ├── header.tsx │ │ ├── layout.tsx │ │ └── page.tsx │ ├── components.json │ ├── components │ ├── account-sidebar.tsx │ ├── analytics.tsx │ ├── codeblock.tsx │ ├── data-table-pagination.tsx │ ├── hooks │ │ ├── index.ts │ │ └── useEditableField.tsx │ ├── icons.tsx │ ├── log-list.tsx │ ├── logo.tsx │ ├── markdown.tsx │ ├── message.tsx │ ├── non-ideal-state.tsx │ ├── sidebar.tsx │ ├── theme-provider.tsx │ ├── theme-toggle.tsx │ ├── ui │ │ ├── accordion.tsx │ │ ├── alert-dialog.tsx │ │ ├── alert.tsx │ │ ├── avatar.tsx │ │ ├── badge.tsx │ │ ├── button.tsx │ │ ├── card.tsx │ │ ├── checkbox.tsx │ │ ├── command.tsx │ │ ├── dialog.tsx │ │ ├── dropdown-menu.tsx │ │ ├── form.tsx │ │ ├── input.tsx │ │ ├── label.tsx │ │ ├── menubar.tsx │ │ ├── multi-select.tsx │ │ ├── popover.tsx │ │ ├── radio-group.tsx │ │ ├── resizable.tsx │ │ ├── scroll-area.tsx │ │ ├── select.tsx │ │ ├── separator.tsx │ │ ├── skeleton.tsx │ │ ├── spinner.tsx │ │ ├── switch.tsx │ │ ├── table.tsx │ │ ├── tabs.tsx │ │ ├── textarea.tsx │ │ ├── toast.tsx │ │ ├── toaster.tsx │ │ ├── tooltip.tsx │ │ └── use-toast.ts │ └── upload-button.tsx │ ├── config │ ├── saml.ts │ └── site.ts │ ├── lib │ ├── api.ts │ ├── fonts.ts │ ├── hooks │ │ ├── use-copy-to-clipboard.tsx │ │ └── use-enter-submit.tsx │ ├── posthog.ts │ ├── segment.ts │ ├── stripe.ts │ ├── supabase.ts │ └── utils.ts │ ├── middleware.ts │ ├── models │ └── models.ts │ ├── next-env.d.ts │ ├── next.config.mjs │ ├── package-lock.json │ ├── package.json │ ├── postcss.config.js │ ├── prettier.config.js │ ├── public │ ├── android-chrome-192x192.png │ ├── android-chrome-512x512.png │ ├── apple-touch-icon.png │ ├── azure-logo.png │ ├── datastax.jpeg │ ├── favicon-16x16.png │ ├── favicon-32x32.png │ ├── favicon.ico │ ├── filepickers.png │ ├── hf-logo.png │ ├── logo.png │ ├── meta-logo.png │ ├── openai-icon-2021x2048-4rpe5x7n.png │ ├── openai-logo.png │ ├── pinecone.png │ ├── qdrant.png │ ├── supabase.png │ ├── thirteen.svg │ ├── weaviate.png │ └── workflow.png │ ├── styles │ └── globals.css │ ├── supabase │ ├── .gitignore │ ├── config.toml │ ├── migrations │ │ ├── 20230905121226_create_profiles_table.sql │ │ ├── 20230905131437_create_profiles_company.sql │ │ ├── 20230928190307_stripe-id.sql │ │ ├── 20231223183054_auth_script.sql │ │ ├── 20240130053612_stripe_plan_id.sql │ │ └── 20240212115233_superagent_bucket_setup.sql │ └── seed.sql │ ├── tailwind.config.js │ ├── tsconfig.json │ ├── types │ ├── agent.ts │ ├── llm.ts │ ├── log-item.ts │ ├── nav.ts │ └── profile.ts │ └── utils │ └── get-stripejs.ts ├── package-lock.json ├── package.json └── superagent.png /.github/CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing to Superagent 2 | 3 | Thanks for being interested in contributing to SuperAgent ❤️. 4 | We are extremely open to any and all contributions you might be interested in making. 5 | To contribute to this project, please follow a ["fork and pull request"](https://docs.github.com/en/get-started/quickstart/contributing-to-projects) workflow. 6 | Please do not try to push directly to this repo unless you are a maintainer. 7 | 8 | ## Guidelines 9 | 10 | ### Issues 11 | 12 | The [issues](https://github.com/homanp/superagent/issues) contain all current bugs, improvements, and feature requests. 13 | Please use the corresponding label when creating an issue. 14 | 15 | ### Getting Help 16 | 17 | Contact a maintainer of SuperAgent with any questions or help you might need. 18 | 19 | ### Release process 20 | 21 | SuperAgent tries to follow the same ad hoc release process. 22 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/config.yml: -------------------------------------------------------------------------------- 1 | blank_issues_enabled: false 2 | contact_links: 3 | - name: Discuss the Project in Discord 4 | url: https://discord.gg/e8j7mgjDUK 5 | about: Join our chat server on Discord! 6 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | ## Summary 2 | 3 | 4 | 5 | Fixes 6 | 7 | Depends on 8 | 9 | ## Test plan 10 | 11 | 12 | 13 | - -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: ci 2 | 3 | on: [push] 4 | 5 | jobs: 6 | fern-check: 7 | runs-on: ubuntu-latest 8 | steps: 9 | - name: Checkout repo 10 | uses: actions/checkout@v4 11 | 12 | - name: Install Fern 13 | run: npm install -g fern-api 14 | 15 | - name: Check Fern API is valid 16 | run: fern check 17 | 18 | fern-generate: 19 | needs: fern-check 20 | if: github.event_name == 'push' && contains(github.ref, 'refs/tags/') 21 | runs-on: ubuntu-latest 22 | steps: 23 | - name: Checkout repo 24 | uses: actions/checkout@v4 25 | 26 | - name: Setup node 27 | uses: actions/setup-node@v4 28 | 29 | - name: Download Fern 30 | run: npm install -g fern-api 31 | 32 | - name: Generate SDKs 33 | env: 34 | FERN_TOKEN: ${{ secrets.FERN_TOKEN }} 35 | PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }} 36 | NPM_TOKEN: ${{ secrets.NPM_TOKEN }} 37 | run: fern generate --group publish --log-level debug --version ${{ github.ref_name }} --api prod 38 | 39 | - name: Update Docs 40 | env: 41 | FERN_TOKEN: ${{ secrets.FERN_TOKEN }} 42 | run: fern generate --docs 43 | -------------------------------------------------------------------------------- /.github/workflows/lint.yml: -------------------------------------------------------------------------------- 1 | name: lint 2 | 3 | on: 4 | push: 5 | branches: [main] 6 | pull_request: 7 | 8 | env: 9 | POETRY_VERSION: "1.4.2" 10 | 11 | jobs: 12 | build: 13 | runs-on: ubuntu-latest 14 | strategy: 15 | matrix: 16 | python-version: 17 | - "3.8" 18 | - "3.9" 19 | - "3.10" 20 | - "3.11" 21 | steps: 22 | - uses: actions/checkout@v4 23 | - name: Install poetry 24 | run: | 25 | pipx install poetry==$POETRY_VERSION 26 | - name: Set up Python ${{ matrix.python-version }} 27 | uses: actions/setup-python@v5 28 | with: 29 | python-version: ${{ matrix.python-version }} 30 | cache: poetry 31 | - name: Install dependencies 32 | run: | 33 | cd libs/superagent 34 | poetry install 35 | - name: Analysing the code with our lint 36 | run: | 37 | cd libs/superagent 38 | ./lint-and-format.sh lint 39 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | __pycache__ 2 | *.pyc 3 | .env 4 | .venv 5 | superenv/ 6 | .DS_Store 7 | venv/ 8 | /.vscode 9 | /.codesandbox 10 | 11 | ## GUI IGNORE 12 | 13 | # dependencies 14 | node_modules 15 | /.pnp 16 | .pnp.js 17 | 18 | # testing 19 | .scratch 20 | /coverage 21 | 22 | # next.js 23 | .next/ 24 | /out/ 25 | 26 | # production 27 | /build 28 | 29 | # misc 30 | *.pem 31 | 32 | # debug 33 | npm-debug.log* 34 | yarn-debug.log* 35 | yarn-error.log* 36 | 37 | # local env files 38 | .env*.local 39 | .env 40 | 41 | # vercel 42 | .vercel 43 | 44 | # typescript 45 | *.tsbuildinfo 46 | next-env.d.ts 47 | 48 | # docker 49 | .docker/docker.env 50 | .docker/data/ 51 | 52 | # supabase 53 | v2.code-workspace 54 | # supabase setup upgrade temp folder 55 | libs/.docker/ui/supabase/supabase-setup 56 | 57 | google_cloud_service_key.json 58 | 59 | # idea 60 | .idea/ 61 | -------------------------------------------------------------------------------- /.husky/pre-commit: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env sh 2 | . "$(dirname -- "$0")/_/husky.sh" 3 | 4 | npm run lint:backend 5 | npm run lint:frontend -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2023 Ismail Pelaseyed 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 6 | 7 | The above copyright notice and this permission notice (including the next paragraph) shall be included in all copies or substantial portions of the Software. 8 | 9 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 10 | 11 | -------------------------------------------------------------------------------- /fern/apis/legacy/generators.yml: -------------------------------------------------------------------------------- 1 | api: 2 | path: ./openapi/openapi.yaml 3 | settings: 4 | use-title: false -------------------------------------------------------------------------------- /fern/apis/prod/generators.yml: -------------------------------------------------------------------------------- 1 | api: 2 | path: ./openapi/openapi.yaml 3 | settings: 4 | use-title: false 5 | groups: 6 | publish: 7 | generators: 8 | - name: fernapi/fern-python-sdk 9 | version: 2.2.0 10 | disable-examples: true 11 | output: 12 | location: pypi 13 | package-name: superagent-py 14 | token: ${PYPI_TOKEN} 15 | github: 16 | repository: homanp/superagent-py 17 | config: 18 | client_class_name: Superagent 19 | inline_request_params: false 20 | improved_imports: false 21 | pydantic_config: 22 | require_optional_fields: true 23 | use_str_enums: false 24 | extra_fields: "forbid" 25 | 26 | - name: fernapi/fern-typescript-node-sdk 27 | version: 0.12.5 28 | output: 29 | location: npm 30 | package-name: superagentai-js 31 | token: ${NPM_TOKEN} 32 | github: 33 | repository: homanp/superagent-js 34 | config: 35 | namespaceExport: SuperAgent 36 | -------------------------------------------------------------------------------- /fern/assets/favicon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/superagent-ai/superagent/168a176f9c67c53955d5eea0c8f743d5254b0606/fern/assets/favicon.png -------------------------------------------------------------------------------- /fern/assets/logo-dark.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/superagent-ai/superagent/168a176f9c67c53955d5eea0c8f743d5254b0606/fern/assets/logo-dark.png -------------------------------------------------------------------------------- /fern/assets/logo-light.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/superagent-ai/superagent/168a176f9c67c53955d5eea0c8f743d5254b0606/fern/assets/logo-light.png -------------------------------------------------------------------------------- /fern/fern.config.json: -------------------------------------------------------------------------------- 1 | { 2 | "organization": "superagent", 3 | "version": "0.26.3" 4 | } -------------------------------------------------------------------------------- /fern/mdx/authentication.mdx: -------------------------------------------------------------------------------- 1 | # Authentication 2 | 3 | Superagent uses API keys to authenticate requests. You can manage API keys using the `api-users` endpoint. 4 | 5 | As API keys carry many privileges such as creating and destroying agents, it is important to keep them private and secure. Do not hardcode or share API keys (particularly in your source version control system), and they should only be used in your backend. 6 | 7 | Authentication is handled via HTTP headers, specifically the `Authorization` header. 8 | 9 | ```bash 10 | curl -X POST 'https://api.beta.superagent.sh/api/v1' \ 11 | --header 'Authorization: Bearer ' 12 | ``` 13 | 14 | -------------------------------------------------------------------------------- /fern/mdx/deploy/render.mdx: -------------------------------------------------------------------------------- 1 | This guide shows you how to deploy the Superagent API to [Render.com](https://www.render.com) 2 | 3 | 1. Create a new `Web service` from within the [Render dashboard](https://dashboard.render.com/create?type=web). 4 | 5 | 2. Select `Build and deploy from a Git repository`. 6 | Screenshot 2023-11-05 at 23 28 24 7 | 8 | 3. Connect your Superaget fork from the list of repositories. 9 | 10 | 4. Set the `Root directory` option to `./libs/superagent`. 11 | Screenshot 2023-11-05 at 23 38 40 12 | 13 | 14 | 5. Set the `Dockerfile Path` to Dockerfile. 15 | Screenshot 2023-11-05 at 23 38 33 16 | 17 | 18 | 6. Add all required enviornment varialbes. See example [here](https://github.com/homanp/superagent/blob/main/libs/superagent/.env.example). 19 | 20 | 7. Press `Create service`. 21 | 22 | 8. After the Render deployment is ready you can access the Superagent API at `https://.com/api/v1`. 23 | 24 | That's it! -------------------------------------------------------------------------------- /fern/mdx/deploy/replit.mdx: -------------------------------------------------------------------------------- 1 | This guide shows you how to deploy the Superagent API to [Replit](https://www.replit.com) 2 | 3 | ## Step-by-step guide 4 | 5 | 1. Create a Replit REPL by importing the Superagent GitHub Repository. [Link](https://docs.replit.com/hosting/deployments/deploying-a-github-repository) 6 | 7 | 2. Set the REPL language to `Python` 8 | 9 | 3. Replace the contents of the `.replit` file in your REPL with the following 10 | ```sh 11 | run = "chmod 777 ./libs/superagent/replit.sh && cd ./libs/superagent && ./replit.sh" 12 | modules = ["python-3.10:v18-20230807-322e88b", "nodejs-18:v3-20230608-f4cd419"] 13 | 14 | hidden = [".pythonlibs"] 15 | 16 | [nix] 17 | channel = "stable-23_05" 18 | 19 | [deployment] 20 | run = ["sh", "-c", "chmod 777 ./libs/superagent/replit.sh && cd ./libs/superagent && ./replit.sh"] 21 | deploymentTarget = "cloudrun" 22 | ``` 23 | 24 | 4. Add all necessary `.env` variables as Replit `Secrets`. Also, add the following additional secret: 25 | ```sh 26 | TZ = Etc/UTC 27 | ``` 28 | 29 | 5. Deploy the REPL using Replit `Autoscale`. 30 | 31 | ## Video tutorial 32 | 33 | 40 | -------------------------------------------------------------------------------- /fern/mdx/intro.mdx: -------------------------------------------------------------------------------- 1 | # The open framework for building AI Assistants 2 | 3 | Superagent is an open source framework that allows anyone to build, manage, and deploy unique ChatGPT-like AI Assistants. 4 | 5 | Superagent also provides a cloud platform that allows you to effortlessly deploy AI Assistants in production without worrying about infrastructure, dependencies, or configuration. 6 | 7 | 14 | 15 | ## Getting started 16 | 17 | The REST API allows you to interact with Superagent programmatically and perform various tasks such as creating, updating, and deleting agents. You can also use the API to attach datasources and tools (third-party APIs) to agents. 18 | If you are looking to setup your instance of Superagent, please check out the official [GitHub repository](https://github.com/homanp/superagent) 19 | 20 | ## Libraries 21 | 22 | Superagent is available as a REST API and maintains client libraries in Python and TypeScript. 23 | 24 | 25 | 26 | 30 | 34 | 35 | -------------------------------------------------------------------------------- /fern/mdx/logging/agentops.mdx: -------------------------------------------------------------------------------- 1 | [AgentOps](https://agentops.ai/) provides session replays, metrics, and monitoring for agents built with Superagent. 2 | 3 | [AgentOps Github](https://github.com/AgentOps-AI/agentops) 4 | 5 | ## Overview 6 | AgentOps provides monotoring for agents in development and production. It provides a dashboard for monitoring agent performance, session replays, and custom reporting. 7 | ![Alt text](https://superagentai.s3.eu-north-1.amazonaws.com/overview.png "Agent analytics") 8 | 9 | Additionally, AgentOps provides session drilldowns that allows users to view the agent's interactions with users in real-time. This feature is useful for debugging and understanding how the agent interacts with users. 10 | ![Alt text](https://superagentai.s3.eu-north-1.amazonaws.com/session.png "Session data") 11 | 12 | ![Alt text](https://superagentai.s3.eu-north-1.amazonaws.com/replay.png "Session replays") 13 | 14 | ## Features 15 | * LLM Cost management and tracking 16 | * Replay Analytics 17 | * Recursive thought detection 18 | * Custom Reporting 19 | * Analytics Dashboard 20 | * Public Model Testing 21 | * Custom Tests 22 | * Time Travel Debugging 23 | * Compliance and Security 24 | 25 | 26 | ## Setting up AgentOps with Superagent locally 27 | 28 | You can create a user API key here: [app.agentops.ai/account](https://app.agentops.ai/account). 29 | 30 | AgentOps is automatically configured once you set an API key in your `.env`. 31 | ``` 32 | AGENTOPS_API_KEY= 33 | AGENTOPS_ORG_KEY= 34 | ``` 35 | 36 | Superagent provides a default `AGENTOPS_ORG_KEY` that grants users access to specialized analytics features. However, you may also remove it or replace it with your own. -------------------------------------------------------------------------------- /fern/mdx/memory/motorhead.mdx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/superagent-ai/superagent/168a176f9c67c53955d5eea0c8f743d5254b0606/fern/mdx/memory/motorhead.mdx -------------------------------------------------------------------------------- /fern/mdx/platform.mdx: -------------------------------------------------------------------------------- 1 | # Superagent Cloud Platform 2 | 3 | The Superagent Cloud Platform is a state-of-the-art infrastructure specifically designed to run agents in a fast and performant way. 4 | It leverages cutting-edge techniques such as Server-Sent Events (SSE), concurrency, and workflow orchestration to ensure optimal performance and efficiency. 5 | 6 | At its core, the platform utilizes some of the best and most reliable frameworks in the industry. 7 | FastAPI, a modern, fast (high-performance), web framework for building APIs is used for its speed and ease of use. 8 | Prefect, a new workflow management system, is used for orchestrating the workflows and managing tasks. 9 | Lastly, Vercel, a cloud platform for static sites and Serverless Functions, is used for deployment of the Superagent UI. 10 | 11 | With this combination of advanced techniques and robust frameworks, the Superagent Cloud Platform provides a reliable and efficient environment for running and managing agents. 12 | -------------------------------------------------------------------------------- /fern/mdx/rag/super-rag-saml.mdx: -------------------------------------------------------------------------------- 1 | ## Features 2 | 3 | Super-Rag is performant Retrieval Augmented Generation pipeline specifcially created to be used by AI Agents. Super-Rag allows developers to ingest large amount of data and perform Q&A, Summarization and Computationl Q&A on that dataset. 4 | 5 | ### Getting started 6 | 7 | 1. Connect your a embedding database in the [Superagent UI](https://beta.superagent.sh/integrations) 8 | 2. Create a workflow using SAML. 9 | 10 | ```yaml 11 | workflows: 12 | - superagent: 13 | name: Rag Agent 14 | llm: gpt-4-turbo-preview 15 | prompt: Use the earning reports to answer any questions 16 | superrag: 17 | - index: 18 | name: earnings 19 | use_for: useful for answering questions about earning reports. 20 | urls: 21 | - https://digitalassets.tesla.com/tesla-contents/image/upload/IR/TSLA-Q4-2023-Update.pdf 22 | - https://s2.q4cdn.com/299287126/files/doc_financials/2023/q4/AMZN-Q4-2023-Earnings-Release.pdf 23 | ``` 24 | 25 | 3. Query the workflow using the API or SDKs 26 | 27 | ### Running Super-Rag as a stand-alone service 28 | 29 | Super-Rag comes as a stand-alone REST API. You can read more on how to setup [here](https://github.com/superagent-ai/super-rag). Alternatively you can run the free Cloud API to ingest/retrieve data. -------------------------------------------------------------------------------- /fern/mdx/sdk/agent_llm_params.mdx: -------------------------------------------------------------------------------- 1 | This page describes how to specify the LLM parameters in an agent invocation. 2 | 3 | When invoking an Agent, you can specify the following LLM parameters: 4 | 5 | - `temperature` (default: 0.0): This parameter controls the randomness of the agent's responses. A higher value makes the output more random, while a lower value makes it more deterministic. 6 | 7 | - `max_tokens` (default: None): This parameter sets the maximum length of the agent's response. If not specified, the agent will use the default maximum length. 8 | 9 | Here's an example of how to set these parameters when invoking an agent: 10 | 11 | 12 | 13 | Note: We assume that you have already created `agent`. If not, please refer to the [quickstart](https://docs.superagent.sh/overview/getting-started/basic-example) guide. 14 | 15 |
16 | 17 | 18 | 19 | ```python 20 | response = client.agent.invoke( 21 | agent_id=agent.data.id, 22 | input="What was Tesla's revenue?", 23 | enable_streaming=False, 24 | session_id="my_session_id", 25 | llm_params={"temperature": 0.0, "max_tokens": 100} 26 | ) 27 | ``` 28 | 29 | 30 | 31 | ```typescript 32 | response = client.agent.invoke({ 33 | agent_id: agentId, 34 | input: "What was Tesla's revenue?", 35 | enable_streaming: false, 36 | session_id: "my_session_id", 37 | llm_params: {temperature: 0.0, max_tokens: 100} 38 | }); 39 | ``` 40 | 41 | 42 | -------------------------------------------------------------------------------- /fern/mdx/vector-stores/astra.mdx: -------------------------------------------------------------------------------- 1 | We are working on the docs of this page, stay tuned -------------------------------------------------------------------------------- /fern/mdx/vector-stores/pinecone.mdx: -------------------------------------------------------------------------------- 1 | We are working on the docs of this page, stay tuned -------------------------------------------------------------------------------- /fern/mdx/vector-stores/qdrant.mdx: -------------------------------------------------------------------------------- 1 | We are working on the docs of this page, stay tuned -------------------------------------------------------------------------------- /fern/mdx/vector-stores/supabase.mdx: -------------------------------------------------------------------------------- 1 | We are working on the docs of this page, stay tuned -------------------------------------------------------------------------------- /fern/versions/legacy.yml: -------------------------------------------------------------------------------- 1 | navigation: 2 | - section: Overview 3 | contents: 4 | - page: Introduction 5 | path: ../mdx/intro.mdx 6 | - page: Authentication 7 | path: ../mdx/authentication.mdx 8 | - page: Concepts 9 | path: ../mdx/concepts.mdx 10 | - page: Platform 11 | path: ../mdx/platform.mdx 12 | - api-name: legacy 13 | api: Superagent API -------------------------------------------------------------------------------- /fern/versions/prod.yml: -------------------------------------------------------------------------------- 1 | 2 | navigation: 3 | - tab: overview 4 | layout: 5 | - section: Overview 6 | contents: 7 | - page: Introduction 8 | path: ../mdx/intro.mdx 9 | - page: Authentication 10 | path: ../mdx/authentication.mdx 11 | - page: Concepts 12 | path: ../mdx/concepts.mdx 13 | - page: Platform 14 | path: ../mdx/platform.mdx 15 | - tab: api 16 | layout: 17 | - section: Introduction 18 | contents: 19 | - page: Authentication 20 | path: ../mdx/authentication.mdx 21 | - api: API Reference 22 | api-name: prod 23 | snippets: 24 | python: superagent-py -------------------------------------------------------------------------------- /libs/.docker/.dockerignore: -------------------------------------------------------------------------------- 1 | # Node modules 2 | node_modules 3 | 4 | # Next.js specific 5 | .next 6 | out 7 | 8 | # Build logs 9 | *.log 10 | 11 | # Environment files 12 | .env 13 | .env* 14 | !.env.docker 15 | 16 | # Docker files 17 | docker-compose.yml 18 | Dockerfile 19 | 20 | # Version control 21 | .git 22 | .gitignore 23 | 24 | # OS generated files 25 | .DS_Store 26 | Thumbs.db 27 | 28 | # Editor directories and files 29 | .idea 30 | *.swp 31 | *.swo 32 | *.sublime-workspace 33 | *.sublime-project 34 | .vscode 35 | 36 | # npm debug logs 37 | npm-debug.log* 38 | -------------------------------------------------------------------------------- /libs/.docker/README.md: -------------------------------------------------------------------------------- 1 | # Run Superagent locally with Docker and docker-compose 2 | 3 | ## Getting Started 4 | 5 | Read the getting started guide [https://docs.superagent.sh/overview/deployments/docker-compose](https://docs.superagent.sh/overview/deployments/docker-compose) 6 | -------------------------------------------------------------------------------- /libs/.docker/external/observabillity/langfuse/.env.example: -------------------------------------------------------------------------------- 1 | PORT=3100 2 | # Prisma 3 | # https://www.prisma.io/docs/reference/database-reference/connection-urls#env 4 | DATABASE_URL="postgresql://postgres:postgres@langfuse-db:5432/postgres" 5 | DATABASE_PORT=5434 6 | 7 | # Next Auth 8 | # You can generate a new secret on the command line with: 9 | # openssl rand -base64 32 10 | # https://next-auth.js.org/configuration/options#secret 11 | # NEXTAUTH_SECRET="" 12 | NEXTAUTH_URL="http://localhost:3100" 13 | NEXTAUTH_SECRET="secret" 14 | 15 | # Langfuse experimental features 16 | LANGFUSE_ENABLE_EXPERIMENTAL_FEATURES="true" 17 | SALT="salt" 18 | 19 | # Email 20 | EMAIL_FROM_ADDRESS="" # Defines the email address to use as the from address. 21 | SMTP_CONNECTION_URL="" # Defines the connection url for smtp server. 22 | -------------------------------------------------------------------------------- /libs/.docker/external/observabillity/langfuse/docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: "3.5" 2 | 3 | services: 4 | langfuse-server: 5 | container_name: langfuse-server 6 | networks: 7 | - superagent_network 8 | image: ghcr.io/langfuse/langfuse:latest 9 | depends_on: 10 | db: 11 | condition: service_healthy 12 | ports: 13 | - ${PORT}:${PORT} 14 | environment: 15 | - PORT=${PORT} 16 | - NODE_ENV=production 17 | - DATABASE_URL=${DATABASE_URL} 18 | - NEXTAUTH_SECRET=${NEXTAUTH_SECRET} 19 | - SALT=${SALT} 20 | - NEXTAUTH_URL=${NEXTAUTH_URL} 21 | - TELEMETRY_ENABLED=${TELEMETRY_ENABLED:-true} 22 | - NEXT_PUBLIC_SIGN_UP_DISABLED=${NEXT_PUBLIC_SIGN_UP_DISABLED:-false} 23 | - LANGFUSE_ENABLE_EXPERIMENTAL_FEATURES=${LANGFUSE_ENABLE_EXPERIMENTAL_FEATURES:-false} 24 | 25 | db: 26 | container_name: langfuse-db 27 | networks: 28 | - superagent_network 29 | image: postgres 30 | restart: unless-stopped 31 | environment: 32 | - POSTGRES_USER=postgres 33 | - POSTGRES_PASSWORD=postgres 34 | - POSTGRES_DB=postgres 35 | ports: 36 | - ${DATABASE_PORT}:5432 37 | volumes: 38 | - database_data:/var/lib/postgresql/data 39 | healthcheck: 40 | test: ["CMD-SHELL", "pg_isready -U postgres"] 41 | interval: 10s 42 | timeout: 5s 43 | retries: 5 44 | start_period: 30s 45 | 46 | volumes: 47 | database_data: 48 | driver: local 49 | 50 | networks: 51 | superagent_network: 52 | external: true 53 | -------------------------------------------------------------------------------- /libs/.docker/external/observabillity/langfuse/run.sh: -------------------------------------------------------------------------------- 1 | # Check if the network exists 2 | if ! docker network ls | grep -q superagent_network; then 3 | # Create the network if it does not exist 4 | docker network create superagent_network 5 | fi 6 | 7 | ./stop.sh 8 | docker compose up -d 9 | 10 | docker logs langfuse-server -------------------------------------------------------------------------------- /libs/.docker/external/observabillity/langfuse/stop.sh: -------------------------------------------------------------------------------- 1 | docker compose down 2 | -------------------------------------------------------------------------------- /libs/.docker/external/observabillity/langfuse/uninstall.sh: -------------------------------------------------------------------------------- 1 | docker compose down -v --remove-orphans 2 | -------------------------------------------------------------------------------- /libs/.docker/external/vector-store/weaviate/.env.example: -------------------------------------------------------------------------------- 1 | QUERY_DEFAULTS_LIMIT=25 2 | AUTHENTICATION_ANONYMOUS_ACCESS_ENABLED="true" 3 | PERSISTENCE_DATA_PATH="/var/lib/weaviate" 4 | DEFAULT_VECTORIZER_MODULE="none" 5 | ENABLE_MODULES="" 6 | CLUSTER_HOSTNAME="node1" 7 | # 8 | AUTHENTICATION_APIKEY_ENABLED="true" 9 | AUTHENTICATION_APIKEY_ALLOWED_KEYS="InsecurePassword" 10 | AUTHENTICATION_APIKEY_USERS="test@test.com" -------------------------------------------------------------------------------- /libs/.docker/external/vector-store/weaviate/docker-compose.yml: -------------------------------------------------------------------------------- 1 | --- 2 | services: 3 | weaviate: 4 | container_name: weaviate 5 | networks: 6 | - superagent_network 7 | command: 8 | - --host 9 | - 0.0.0.0 10 | - --port 11 | - "8082" 12 | - --scheme 13 | - http 14 | image: semitechnologies/weaviate:1.23.3 15 | ports: 16 | - 8082:8082 17 | - 50051:50051 18 | volumes: 19 | - weaviate_data:/var/lib/weaviate 20 | restart: unless-stopped 21 | environment: 22 | QUERY_DEFAULTS_LIMIT: ${QUERY_DEFAULTS_LIMIT} 23 | AUTHENTICATION_ANONYMOUS_ACCESS_ENABLED: ${AUTHENTICATION_ANONYMOUS_ACCESS_ENABLED} 24 | PERSISTENCE_DATA_PATH: ${PERSISTENCE_DATA_PATH} 25 | DEFAULT_VECTORIZER_MODULE: ${DEFAULT_VECTORIZER_MODULE} 26 | ENABLE_MODULES: ${ENABLE_MODULES} 27 | CLUSTER_HOSTNAME: ${CLUSTER_HOSTNAME} 28 | # 29 | AUTHENTICATION_APIKEY_ENABLED: ${AUTHENTICATION_APIKEY_ENABLED} 30 | AUTHENTICATION_APIKEY_ALLOWED_KEYS: ${AUTHENTICATION_APIKEY_ALLOWED_KEYS} 31 | AUTHENTICATION_APIKEY_USERS: ${AUTHENTICATION_APIKEY_USERS} 32 | volumes: 33 | weaviate_data: 34 | 35 | networks: 36 | superagent_network: 37 | external: true 38 | -------------------------------------------------------------------------------- /libs/.docker/external/vector-store/weaviate/run.sh: -------------------------------------------------------------------------------- 1 | # Check if the network exists 2 | if ! docker network ls | grep -q superagent_network; then 3 | # Create the network if it does not exist 4 | docker network create superagent_network 5 | fi 6 | 7 | ./stop.sh && docker compose up -d 8 | 9 | docker logs weaviate -------------------------------------------------------------------------------- /libs/.docker/external/vector-store/weaviate/stop.sh: -------------------------------------------------------------------------------- 1 | docker compose down 2 | -------------------------------------------------------------------------------- /libs/.docker/external/vector-store/weaviate/uninstall.sh: -------------------------------------------------------------------------------- 1 | docker compose down -v --remove-orphans 2 | -------------------------------------------------------------------------------- /libs/.docker/run.sh: -------------------------------------------------------------------------------- 1 | # Remove any running services 2 | ./stop.sh 3 | 4 | # Check if the network exists 5 | if ! docker network ls | grep -q superagent_network; then 6 | # Create the network if it does not exist 7 | docker network create superagent_network 8 | fi 9 | 10 | # Run the core services 11 | docker compose -f docker-compose.yml \ 12 | -f superagent/db/docker-compose.pgdb.yml \ 13 | -f superagent/db/docker-compose.pgadmin.yml \ 14 | -f superagent/motorhead/docker-compose.motorhead.yml \ 15 | -f ui/docker-compose.ui.yml \ 16 | up \ 17 | --build \ 18 | -d 19 | 20 | docker logs superagent-ui 21 | docker logs superagent-api -------------------------------------------------------------------------------- /libs/.docker/stop.sh: -------------------------------------------------------------------------------- 1 | # Remove any running services 2 | docker compose -f docker-compose.yml \ 3 | -f superagent/db/docker-compose.pgdb.yml \ 4 | -f superagent/db/docker-compose.pgadmin.yml \ 5 | -f superagent/motorhead/docker-compose.motorhead.yml \ 6 | -f ui/docker-compose.ui.yml \ 7 | down 8 | # -v # TODO: remove the -v flag when we have a persistent database 9 | -------------------------------------------------------------------------------- /libs/.docker/superagent/db/.env.example: -------------------------------------------------------------------------------- 1 | # Mandatory 2 | POSTGRES_USER=postgres 3 | POSTGRES_PASSWORD=password 4 | POSTGRES_DB_PORT=5432 5 | POSTGRES_DB_HOST=pgdb 6 | # Reccomended but Optional - needed if using docker-compose.pgadmin.yml 7 | PGADMIN_DEFAULT_EMAIL=admin@admin.com 8 | PGADMIN_DEFAULT_PASSWORD=local123 -------------------------------------------------------------------------------- /libs/.docker/superagent/db/docker-compose.pgadmin.yml: -------------------------------------------------------------------------------- 1 | services: 2 | pgadmin: 3 | container_name: pgadmin 4 | networks: 5 | - superagent_network 6 | image: dpage/pgadmin4 7 | pull_policy: missing 8 | environment: 9 | PGADMIN_DEFAULT_EMAIL: ${PGADMIN_DEFAULT_EMAIL} 10 | PGADMIN_DEFAULT_PASSWORD: ${PGADMIN_DEFAULT_PASSWORD} 11 | PGADMIN_LISTEN_ADDRESS: 0.0.0.0 12 | PGADMIN_LISTEN_PORT: 5050 13 | ports: 14 | - 5050:5050 15 | volumes: 16 | - type: volume 17 | source: pgadmin-data 18 | target: /root/.pgadmin 19 | restart: unless-stopped 20 | 21 | volumes: 22 | pgadmin-data: 23 | 24 | networks: 25 | superagent_network: 26 | external: true -------------------------------------------------------------------------------- /libs/.docker/superagent/db/docker-compose.pgdb.yml: -------------------------------------------------------------------------------- 1 | name: superagent 2 | services: 3 | pgdb: 4 | container_name: pgdb 5 | networks: 6 | - superagent_network 7 | image: postgres:12 8 | restart: unless-stopped 9 | volumes: 10 | - type: volume 11 | source: pgdb-data 12 | target: "/var/lib/postgresql/data" 13 | environment: 14 | POSTGRES_DB: ${POSTGRES_USER} 15 | POSTGRES_USER: ${POSTGRES_USER} 16 | POSTGRES_PASSWORD: ${POSTGRES_PASSWORD} 17 | ports: 18 | - ${POSTGRES_DB_PORT}:5432 19 | healthcheck: 20 | test: ["CMD-SHELL", "pg_isready -U ${POSTGRES_USER}"] 21 | interval: 30s 22 | timeout: 30s 23 | retries: 3 24 | 25 | volumes: 26 | pgdb-data: 27 | 28 | networks: 29 | superagent_network: 30 | external: true 31 | -------------------------------------------------------------------------------- /libs/.docker/superagent/db/run.sh: -------------------------------------------------------------------------------- 1 | # Remove any running services 2 | ./stop.sh 3 | 4 | # Check if the network exists 5 | if ! docker network ls | grep -q superagent_network; then 6 | # Create the network if it does not exist 7 | docker network create superagent_network 8 | fi 9 | 10 | # Run the db services 11 | docker compose -f docker-compose.pgdb.yml \ 12 | -f docker-compose.pgadmin.yml \ 13 | up \ 14 | --build \ 15 | -d 16 | 17 | docker logs pgdb -------------------------------------------------------------------------------- /libs/.docker/superagent/db/stop.sh: -------------------------------------------------------------------------------- 1 | # Remove any running services 2 | docker compose -f docker-compose.pgdb.yml \ 3 | -f docker-compose.pgadmin.yml \ 4 | down 5 | # -v # TODO: remove the -v flag when we have a persistent database 6 | -------------------------------------------------------------------------------- /libs/.docker/superagent/db/uninstall.sh: -------------------------------------------------------------------------------- 1 | # Remove any running services 2 | docker compose -f docker-compose.pgdb.yml \ 3 | -f docker-compose.pgadmin.yml \ 4 | down \ 5 | -v \ 6 | --remove-orphans -------------------------------------------------------------------------------- /libs/.docker/superagent/motorhead/docker-compose.motorhead.yml: -------------------------------------------------------------------------------- 1 | name: superagent 2 | services: 3 | motorhead: 4 | container_name: motorhead 5 | networks: 6 | - superagent_network 7 | image: ghcr.io/getmetal/motorhead:latest # Use the pre-built image 8 | restart: unless-stopped 9 | ports: 10 | - 8081:8081 # map host port 8081 to container port 8080 11 | environment: 12 | PORT: 8081 13 | MOTORHEAD_MAX_WINDOW_SIZE: 25 14 | MOTORHEAD_LONG_TERM_MEMORY: "true" 15 | MOTORHEAD_MODEL: "gpt-3.5-turbo" 16 | REDIS_URL: "redis://redis:6379" 17 | OPENAI_API_KEY: ${OPENAI_API_KEY} 18 | links: 19 | - redis 20 | 21 | redis: 22 | container_name: redis 23 | networks: 24 | - superagent_network 25 | image: redis/redis-stack-server:latest 26 | restart: unless-stopped 27 | ports: 28 | - 6379:6379 29 | 30 | networks: 31 | superagent_network: 32 | external: true 33 | -------------------------------------------------------------------------------- /libs/.docker/ui/supabase/.gitignore: -------------------------------------------------------------------------------- 1 | volumes/db/data 2 | volumes/storage 3 | .env 4 | test.http 5 | docker-compose.override.yml -------------------------------------------------------------------------------- /libs/.docker/ui/supabase/README.md: -------------------------------------------------------------------------------- 1 | # Supabase docker compose setup for Superagent 2 | 3 | A basic configuration is provided here for using Supabase with Superagent. Based on the official docker-compose configuration from Supabase. 4 | 5 | ## Supabase for Superagent Usage guide 6 | 7 | []() 8 | 9 | ## Official documentation 10 | 11 | [https://supabase.com/docs/guides/self-hosting/docker-compose](https://supabase.com/docs/guides/self-hosting/docker-compose) 12 | 13 | ## Supabase docker-compose official github repo 14 | 15 | [https://github.com/supabase/supabase/blob/master/docker/docker-compose.yml](https://github.com/supabase/supabase/blob/master/docker/docker-compose.yml) 16 | 17 | ## Supabase GoTrue (auth) github repo 18 | 19 | [https://github.com/supabase/gotrue](https://github.com/supabase/gotrue) 20 | -------------------------------------------------------------------------------- /libs/.docker/ui/supabase/run.sh: -------------------------------------------------------------------------------- 1 | # Check if the network exists 2 | if ! docker network ls | grep -q superagent_network; then 3 | # Create the network if it does not exist 4 | docker network create superagent_network 5 | fi 6 | 7 | ./stop.sh && docker compose up -d 8 | 9 | docker logs supabase-db -------------------------------------------------------------------------------- /libs/.docker/ui/supabase/setup.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Configuration 4 | SUPABASE_REPO="https://raw.githubusercontent.com/supabase/supabase/master/docker" 5 | TARGET_DIR="./supabase-setup" # Set this to your desired directory 6 | 7 | # Check if curl is installed 8 | if ! command -v curl &> /dev/null; then 9 | echo "curl could not be found. Please install curl and rerun the script." 10 | exit 1 11 | fi 12 | 13 | # Create necessary directories 14 | mkdir -p "$TARGET_DIR/volumes/api" 15 | mkdir -p "$TARGET_DIR/volumes/db/init" 16 | mkdir -p "$TARGET_DIR/volumes/db" 17 | mkdir -p "$TARGET_DIR/volumes/functions/main" 18 | mkdir -p "$TARGET_DIR/volumes/logs" 19 | 20 | # Function to download a file and maintain directory structure 21 | download_file() { 22 | local file_path=$1 23 | local target_path="$TARGET_DIR/${file_path#*/docker/}" 24 | echo "Downloading $file_path..." 25 | curl -o "$target_path" "$SUPABASE_REPO/$file_path" 26 | } 27 | 28 | # List of files to download 29 | files_to_download=( 30 | "volumes/api/kong.yml" 31 | "volumes/db/init/data.sql" 32 | "volumes/db/jwt.sql" 33 | "volumes/db/logs.sql" 34 | "volumes/db/realtime.sql" 35 | "volumes/db/roles.sql" 36 | "volumes/db/webhooks.sql" 37 | "volumes/functions/main/index.ts" 38 | "volumes/logs/vector.yml" 39 | "docker-compose.yml" 40 | ) 41 | 42 | # Download each file 43 | for file in "${files_to_download[@]}"; do 44 | download_file "$file" 45 | done 46 | 47 | # Note: Add any additional files or directories as needed 48 | -------------------------------------------------------------------------------- /libs/.docker/ui/supabase/stop.sh: -------------------------------------------------------------------------------- 1 | docker compose down --remove-orphans -------------------------------------------------------------------------------- /libs/.docker/ui/supabase/uninstall.sh: -------------------------------------------------------------------------------- 1 | # Ensure Superagent is down first 2 | cd ../../ 3 | ./stop.sh 4 | # Ensure supabase is down and remove volumes, delete local volume db data once down 5 | cd ui/supabase 6 | docker compose down -v --remove-orphans && rm -rf volumes/db/data/ && rm -rf volumes/storage 7 | 8 | if docker network ls | grep -q superagent_network; then 9 | # Remove the network if it exists 10 | docker network remove superagent_network 11 | fi 12 | -------------------------------------------------------------------------------- /libs/.docker/ui/supabase/volumes/db/init/data.sql: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/superagent-ai/superagent/168a176f9c67c53955d5eea0c8f743d5254b0606/libs/.docker/ui/supabase/volumes/db/init/data.sql -------------------------------------------------------------------------------- /libs/.docker/ui/supabase/volumes/db/jwt.sql: -------------------------------------------------------------------------------- 1 | \set jwt_secret `echo "$JWT_SECRET"` 2 | \set jwt_exp `echo "$JWT_EXP"` 3 | 4 | ALTER DATABASE postgres SET "app.settings.jwt_secret" TO :'jwt_secret'; 5 | ALTER DATABASE postgres SET "app.settings.jwt_exp" TO :'jwt_exp'; 6 | -------------------------------------------------------------------------------- /libs/.docker/ui/supabase/volumes/db/logs.sql: -------------------------------------------------------------------------------- 1 | \set pguser `echo "$POSTGRES_USER"` 2 | 3 | create schema if not exists _analytics; 4 | alter schema _analytics owner to :pguser; 5 | -------------------------------------------------------------------------------- /libs/.docker/ui/supabase/volumes/db/realtime.sql: -------------------------------------------------------------------------------- 1 | \set pguser `echo "$POSTGRES_USER"` 2 | 3 | create schema if not exists _realtime; 4 | alter schema _realtime owner to :pguser; 5 | -------------------------------------------------------------------------------- /libs/.docker/ui/supabase/volumes/db/roles.sql: -------------------------------------------------------------------------------- 1 | -- NOTE: change to your own passwords for production environments 2 | \set pgpass `echo "$POSTGRES_PASSWORD"` 3 | 4 | ALTER USER authenticator WITH PASSWORD :'pgpass'; 5 | ALTER USER pgbouncer WITH PASSWORD :'pgpass'; 6 | ALTER USER supabase_auth_admin WITH PASSWORD :'pgpass'; 7 | ALTER USER supabase_functions_admin WITH PASSWORD :'pgpass'; 8 | ALTER USER supabase_storage_admin WITH PASSWORD :'pgpass'; 9 | -------------------------------------------------------------------------------- /libs/.docker/uninstall.sh: -------------------------------------------------------------------------------- 1 | # Remove any running services 2 | docker compose -f docker-compose.yml \ 3 | -f superagent/db/docker-compose.pgdb.yml \ 4 | -f superagent/db/docker-compose.pgadmin.yml \ 5 | -f superagent/motorhead/docker-compose.motorhead.yml \ 6 | -f ui/docker-compose.ui.yml \ 7 | down \ 8 | -v \ 9 | --remove-orphans -------------------------------------------------------------------------------- /libs/embed/.gitignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | -------------------------------------------------------------------------------- /libs/embed/dist/web.js.LICENSE.txt: -------------------------------------------------------------------------------- 1 | /*! (c) Andrea Giammarchi - ISC */ 2 | 3 | /*! regenerator-runtime -- Copyright (c) 2014-present, Facebook, Inc. -- license (MIT): https://github.com/facebook/regenerator/blob/main/LICENSE */ 4 | 5 | /** 6 | * @license React 7 | * react-dom.production.min.js 8 | * 9 | * Copyright (c) Facebook, Inc. and its affiliates. 10 | * 11 | * This source code is licensed under the MIT license found in the 12 | * LICENSE file in the root directory of this source tree. 13 | */ 14 | 15 | /** 16 | * @license React 17 | * react-jsx-runtime.production.min.js 18 | * 19 | * Copyright (c) Facebook, Inc. and its affiliates. 20 | * 21 | * This source code is licensed under the MIT license found in the 22 | * LICENSE file in the root directory of this source tree. 23 | */ 24 | 25 | /** 26 | * @license React 27 | * react.production.min.js 28 | * 29 | * Copyright (c) Facebook, Inc. and its affiliates. 30 | * 31 | * This source code is licensed under the MIT license found in the 32 | * LICENSE file in the root directory of this source tree. 33 | */ 34 | 35 | /** 36 | * @license React 37 | * scheduler.production.min.js 38 | * 39 | * Copyright (c) Facebook, Inc. and its affiliates. 40 | * 41 | * This source code is licensed under the MIT license found in the 42 | * LICENSE file in the root directory of this source tree. 43 | */ 44 | 45 | /** @license React v16.13.1 46 | * react-is.production.min.js 47 | * 48 | * Copyright (c) Facebook, Inc. and its affiliates. 49 | * 50 | * This source code is licensed under the MIT license found in the 51 | * LICENSE file in the root directory of this source tree. 52 | */ 53 | -------------------------------------------------------------------------------- /libs/embed/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "superagent-chat-embed-v01", 3 | "version": "0.1.2", 4 | "description": "A embadable chat widget for Superagent.sh.", 5 | "main": "dist/web.js", 6 | "scripts": { 7 | "start": "webpack serve --mode development", 8 | "build": "webpack --mode production" 9 | }, 10 | "keywords": [], 11 | "author": "", 12 | "license": "ISC", 13 | "dependencies": { 14 | "@chakra-ui/react": "^2.8.0", 15 | "@emotion/react": "^11.11.1", 16 | "@emotion/styled": "^11.11.0", 17 | "framer-motion": "^10.15.2", 18 | "react": "^18.2.0", 19 | "react-dom": "^18.2.0", 20 | "react-icons": "^4.10.1", 21 | "react-use": "^17.4.0", 22 | "superagentai-js": "^0.0.51" 23 | }, 24 | "devDependencies": { 25 | "@babel/core": "^7.22.10", 26 | "@babel/preset-env": "^7.22.10", 27 | "@babel/preset-react": "^7.22.5", 28 | "babel-loader": "^9.1.3", 29 | "webpack": "^5.88.2", 30 | "webpack-cli": "^5.1.4", 31 | "webpack-dev-server": "^4.15.1" 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /libs/embed/test.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | Widget Test 7 | 8 | 9 | 10 |
11 | 12 | 14 | 15 | 21 | 22 | -------------------------------------------------------------------------------- /libs/embed/webpack.config.js: -------------------------------------------------------------------------------- 1 | const path = require('path'); 2 | 3 | module.exports = { 4 | entry: './src/index.js', 5 | output: { 6 | path: path.resolve(__dirname, 'dist'), 7 | filename: 'web.js', 8 | library: 'Superagent', 9 | libraryTarget: 'umd', 10 | libraryExport: 'init', 11 | globalObject: 'this' 12 | }, 13 | module: { 14 | rules: [ 15 | { 16 | test: /\.(js|jsx)$/, 17 | exclude: /node_modules/, 18 | use: { 19 | loader: 'babel-loader', 20 | options: { 21 | presets: ['@babel/preset-env', '@babel/preset-react'], 22 | }, 23 | }, 24 | }, 25 | ], 26 | }, 27 | resolve: { 28 | extensions: ['.js', '.jsx'], 29 | }, 30 | devServer: { 31 | static: { 32 | directory: path.join(__dirname, './'), 33 | }, 34 | compress: true, 35 | port: 9000, 36 | open: 'index.html', 37 | historyApiFallback: { 38 | rewrites: [ 39 | { from: /^\/$/, to: '/test.html' }, // Redirect root requests to test.html 40 | { from: /^\/index.html$/, to: '/test.html' }, // Optional: also redirect explicit index.html requests 41 | ], 42 | }, 43 | } 44 | }; 45 | -------------------------------------------------------------------------------- /libs/superagent/.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | exclude = 3 | venv 4 | .venv 5 | __pycache__ 6 | notebooks 7 | # Recommend matching the black line length (default 88), 8 | # rather than using the flake8 default of 79: 9 | max-line-length = 88 10 | extend-ignore = 11 | # See https://github.com/PyCQA/pycodestyle/issues/373 12 | E203, 13 | E501, 14 | -------------------------------------------------------------------------------- /libs/superagent/.gitignore: -------------------------------------------------------------------------------- 1 | __pycache__ 2 | *.pyc 3 | .env 4 | .venv 5 | superenv/ 6 | .DS_Store 7 | venv/ 8 | /.vscode 9 | /.codesandbox 10 | 11 | ## GUI IGNORE 12 | 13 | # dependencies 14 | node_modules 15 | /.pnp 16 | .pnp.js 17 | 18 | # testing 19 | /coverage 20 | 21 | # next.js 22 | .next/ 23 | /out/ 24 | 25 | # production 26 | /build 27 | 28 | # misc 29 | *.pem 30 | 31 | # debug 32 | npm-debug.log* 33 | yarn-debug.log* 34 | yarn-error.log* 35 | 36 | # local env files 37 | .env*.local 38 | .env 39 | 40 | # vercel 41 | .vercel 42 | 43 | # typescript 44 | *.tsbuildinfo 45 | next-env.d.ts 46 | 47 | # docker 48 | .docker/docker.env 49 | .docker/data/ 50 | 51 | # supabase 52 | v2.code-workspace 53 | 54 | # replit 55 | /.pythonlibs 56 | 57 | # service keys 58 | google_cloud_service_key.json -------------------------------------------------------------------------------- /libs/superagent/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.11 AS builder 2 | # Use the python latest image 3 | WORKDIR /app 4 | 5 | RUN pip install poetry 6 | 7 | ENV POETRY_NO_INTERACTION=1 \ 8 | POETRY_VIRTUALENVS_IN_PROJECT=1 \ 9 | POETRY_VIRTUALENVS_CREATE=1 \ 10 | POETRY_CACHE_DIR=/tmp/poetry_cache \ 11 | MAX_CONCURRENCY=20 12 | 13 | # Copy only dependency files for layer caching 14 | COPY pyproject.toml poetry.lock ./ 15 | 16 | # Install the required packages of the application into .venv 17 | RUN poetry install --no-root && rm -rf $POETRY_CACHE_DIR 18 | 19 | FROM python:3.11 AS runtime 20 | WORKDIR /app 21 | 22 | RUN apt-get update && apt-get install -y curl ca-certificates gnupg netcat-openbsd && \ 23 | mkdir -p /etc/apt/keyrings && \ 24 | curl -fsSL https://deb.nodesource.com/gpgkey/nodesource.gpg.key | gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg && \ 25 | echo "deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_20.x bookworm main" | tee /etc/apt/sources.list.d/nodesource.list && \ 26 | apt-get update && \ 27 | apt-get install -y nodejs 28 | 29 | ENV PATH="/app/.venv/bin:$PATH" 30 | ENV PORT="8080" 31 | 32 | COPY --from=builder /app/.venv /app/.venv 33 | 34 | COPY . ./ 35 | 36 | # Improve grpc error messages 37 | RUN pip install grpcio-status 38 | 39 | # Enable prisma migrations 40 | RUN prisma generate 41 | 42 | CMD exec gunicorn --bind :$PORT --workers 2 --timeout 0 --worker-class uvicorn.workers.UvicornWorker --threads 8 app.main:app 43 | -------------------------------------------------------------------------------- /libs/superagent/Makefile: -------------------------------------------------------------------------------- 1 | format: 2 | poetry run black . 3 | poetry run ruff --select I --fix . 4 | poetry run vulture . --exclude=venv 5 | 6 | PYTHON_FILES=. 7 | lint: PYTHON_FILES=. 8 | lint_diff: PYTHON_FILES=$(shell git diff --name-only --diff-filter=d master | grep -E '\.py$$') 9 | 10 | lint lint_diff: 11 | poetry run black $(PYTHON_FILES) --check 12 | poetry run ruff . 13 | poetry run vulture . --exclude=venv -------------------------------------------------------------------------------- /libs/superagent/app/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/superagent-ai/superagent/168a176f9c67c53955d5eea0c8f743d5254b0606/libs/superagent/app/__init__.py -------------------------------------------------------------------------------- /libs/superagent/app/agents/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /libs/superagent/app/api/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/superagent-ai/superagent/168a176f9c67c53955d5eea0c8f743d5254b0606/libs/superagent/app/api/__init__.py -------------------------------------------------------------------------------- /libs/superagent/app/api/workflow_configs/api/base.py: -------------------------------------------------------------------------------- 1 | from abc import ABC, abstractmethod 2 | 3 | 4 | class BaseApiAgentManager(ABC): 5 | """ 6 | Abstract class for managing agents. 7 | It can be Agent or Agent as a tool 8 | """ 9 | 10 | @abstractmethod 11 | async def get_assistant(self, assistant: dict): 12 | pass 13 | 14 | @abstractmethod 15 | async def get_datasource(self, assistant: dict, datasource: dict): 16 | pass 17 | 18 | @abstractmethod 19 | async def get_tool(self, assistant: dict, tool: dict): 20 | pass 21 | 22 | @abstractmethod 23 | async def add_assistant(self, data: dict, order: int | None = None): 24 | pass 25 | 26 | @abstractmethod 27 | async def create_assistant(self, data: dict): 28 | pass 29 | 30 | @abstractmethod 31 | async def delete_assistant(self, assistant: dict): 32 | pass 33 | 34 | @abstractmethod 35 | async def update_assistant(self, assistant: dict, data: dict): 36 | pass 37 | 38 | 39 | class BaseApiDatasourceManager(ABC): 40 | """ 41 | Abstract class for managing datasources. 42 | It can be Naive RAG or Super RAG 43 | """ 44 | 45 | @abstractmethod 46 | async def add_datasource(self, assistant: dict, data: dict): 47 | pass 48 | 49 | @abstractmethod 50 | async def delete_datasource(self, assistant: dict, datasource: dict): 51 | pass 52 | -------------------------------------------------------------------------------- /libs/superagent/app/api/workflow_configs/exceptions.py: -------------------------------------------------------------------------------- 1 | class MissingVectorDatabaseProvider(Exception): 2 | pass 3 | 4 | 5 | class UnkownFileType(Exception): 6 | pass 7 | -------------------------------------------------------------------------------- /libs/superagent/app/api/workflow_configs/processors/base.py: -------------------------------------------------------------------------------- 1 | from abc import ABC, abstractmethod 2 | 3 | from app.api.workflow_configs.api.api_manager import ApiManager 4 | 5 | 6 | class BaseProcessor(ABC): 7 | def __init__( 8 | self, 9 | assistant: dict, 10 | api_manager: ApiManager, 11 | api_user, 12 | ): 13 | self.assistant = assistant 14 | self.api_manager = api_manager 15 | self.api_user = api_user 16 | 17 | @abstractmethod 18 | async def process(self, old_data, new_data): 19 | pass 20 | -------------------------------------------------------------------------------- /libs/superagent/app/api/workflow_configs/processors/processor.py: -------------------------------------------------------------------------------- 1 | from app.api.workflow_configs.api.api_manager import ApiManager 2 | from app.api.workflow_configs.processors.base import BaseProcessor 3 | from prisma.enums import AgentType 4 | 5 | from .openai import ( 6 | OpenaiDataProcessor, 7 | OpenaiToolProcessor, 8 | ) 9 | from .superagent import ( 10 | SuperagentDataProcessor, 11 | SuperagentToolProcessor, 12 | SuperragDataProcessor, 13 | ) 14 | 15 | 16 | class Processor: 17 | def __init__(self, api_user, api_manager: ApiManager): 18 | self.api_user = api_user 19 | self.api_manager = api_manager 20 | 21 | def get_data_processor(self, assistant: dict) -> BaseProcessor: 22 | if assistant.get("type") == AgentType.OPENAI_ASSISTANT: 23 | return OpenaiDataProcessor(assistant, self.api_manager, self.api_user) 24 | return SuperagentDataProcessor(assistant, self.api_manager, self.api_user) 25 | 26 | def get_tool_processor(self, assistant: dict) -> BaseProcessor: 27 | if assistant.get("type") == AgentType.OPENAI_ASSISTANT: 28 | return OpenaiToolProcessor(assistant, self.api_manager, self.api_user) 29 | return SuperagentToolProcessor(assistant, self.api_manager, self.api_user) 30 | 31 | def get_superrag_processor(self, assistant: dict) -> BaseProcessor: 32 | return SuperragDataProcessor(assistant, self.api_manager, self.api_user) 33 | -------------------------------------------------------------------------------- /libs/superagent/app/api/workflow_configs/processors/utils.py: -------------------------------------------------------------------------------- 1 | from prisma.enums import AgentType 2 | 3 | 4 | def check_is_agent_tool(tool_type): 5 | for agent_type in AgentType: 6 | if tool_type == agent_type.value: 7 | return True 8 | -------------------------------------------------------------------------------- /libs/superagent/app/datasource/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/superagent-ai/superagent/168a176f9c67c53955d5eea0c8f743d5254b0606/libs/superagent/app/datasource/__init__.py -------------------------------------------------------------------------------- /libs/superagent/app/datasource/types.py: -------------------------------------------------------------------------------- 1 | VALID_UNSTRUCTURED_DATA_TYPES = [ 2 | "TXT", 3 | "PDF", 4 | "DOCX", 5 | "PPTX", 6 | "GOOGLE_DOC", 7 | "MARKDOWN", 8 | "GITHUB_REPOSITORY", 9 | "WEBPAGE", 10 | "NOTION", 11 | "URL", 12 | "YOUTUBE", 13 | ] 14 | 15 | VALID_STRUCTURED_DATA_TYPES = ["AIRTABLE", "CSV", "STRIPE", "XLSX"] 16 | -------------------------------------------------------------------------------- /libs/superagent/app/memory/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/superagent-ai/superagent/168a176f9c67c53955d5eea0c8f743d5254b0606/libs/superagent/app/memory/__init__.py -------------------------------------------------------------------------------- /libs/superagent/app/memory/base.py: -------------------------------------------------------------------------------- 1 | from abc import ABC, abstractmethod 2 | from typing import List 3 | 4 | from app.memory.memory_stores.base import BaseMemoryStore 5 | from app.memory.message import BaseMessage 6 | 7 | 8 | class BaseMemory(ABC): 9 | memory_store: BaseMemoryStore 10 | 11 | @abstractmethod 12 | def add_message(self, message: BaseMessage) -> None: 13 | ... 14 | 15 | @abstractmethod 16 | async def aadd_message(self, message: BaseMessage) -> None: 17 | ... 18 | 19 | @abstractmethod 20 | def get_messages(self) -> List[BaseMessage]: 21 | """ 22 | List all the messages stored in the memory. 23 | Messages are returned in the descending order of their creation. 24 | """ 25 | ... 26 | 27 | @abstractmethod 28 | def clear(self) -> None: 29 | ... 30 | -------------------------------------------------------------------------------- /libs/superagent/app/memory/memory_stores/base.py: -------------------------------------------------------------------------------- 1 | from abc import ABC, abstractmethod 2 | from typing import List 3 | 4 | from app.memory.message import BaseMessage 5 | 6 | 7 | class BaseMemoryStore(ABC): 8 | @abstractmethod 9 | def get_messages(self) -> List[BaseMessage]: 10 | ... # noqa 11 | 12 | @abstractmethod 13 | def add_message(self, value: BaseMessage): 14 | ... # noqa 15 | 16 | @abstractmethod 17 | async def aadd_message(self, value: BaseMessage): 18 | ... # noqa 19 | 20 | @abstractmethod 21 | def clear(self): 22 | ... 23 | -------------------------------------------------------------------------------- /libs/superagent/app/memory/memory_stores/redis.py: -------------------------------------------------------------------------------- 1 | from asyncio import get_event_loop 2 | 3 | from redis import Redis 4 | 5 | from app.memory.memory_stores.base import BaseMemoryStore 6 | from app.memory.message import BaseMessage 7 | 8 | 9 | class RedisMemoryStore(BaseMemoryStore): 10 | key_prefix: str = "message_history:" 11 | 12 | def __init__(self, uri: str, session_id: str): 13 | self.redis = Redis.from_url(uri) 14 | self.session_id = session_id 15 | 16 | @property 17 | def key(self): 18 | return self.key_prefix + self.session_id 19 | 20 | def add_message(self, message: BaseMessage): 21 | self.redis.lpush(self.key, message.json()) 22 | 23 | async def aadd_message(self, message: BaseMessage): 24 | loop = get_event_loop() 25 | await loop.run_in_executor(None, self.add_message, message) 26 | 27 | def get_messages(self) -> list[BaseMessage]: 28 | return [BaseMessage.parse_raw(m) for m in self.redis.lrange(self.key, 0, -1)] 29 | 30 | def clear(self): 31 | self.redis.delete(self.key) 32 | -------------------------------------------------------------------------------- /libs/superagent/app/memory/message.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | 3 | from pydantic import BaseModel 4 | 5 | 6 | class MessageType(str, Enum): 7 | HUMAN = "human" 8 | AI = "ai" 9 | TOOL_CALL = "tool_call" 10 | TOOL_RESULT = "tool_result" 11 | 12 | 13 | class BaseMessage(BaseModel): 14 | type: MessageType 15 | content: str 16 | -------------------------------------------------------------------------------- /libs/superagent/app/models/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/superagent-ai/superagent/168a176f9c67c53955d5eea0c8f743d5254b0606/libs/superagent/app/models/__init__.py -------------------------------------------------------------------------------- /libs/superagent/app/routers.py: -------------------------------------------------------------------------------- 1 | from fastapi import APIRouter 2 | 3 | from app.api import ( 4 | agents, 5 | api_keys, 6 | api_user, 7 | datasources, 8 | llms, 9 | tools, 10 | vector_dbs, 11 | workflows, 12 | ) 13 | from app.api.workflow_configs import workflow_configs 14 | 15 | router = APIRouter() 16 | api_prefix = "/api/v1" 17 | 18 | router.include_router(agents.router, tags=["Agent"], prefix=api_prefix) 19 | router.include_router(llms.router, tags=["LLM"], prefix=api_prefix) 20 | router.include_router(api_user.router, tags=["Api user"], prefix=api_prefix) 21 | router.include_router(api_keys.router, tags=["API key"], prefix=api_prefix) 22 | router.include_router(datasources.router, tags=["Datasource"], prefix=api_prefix) 23 | router.include_router(tools.router, tags=["Tool"], prefix=api_prefix) 24 | router.include_router(workflows.router, tags=["Workflow"], prefix=api_prefix) 25 | router.include_router( 26 | workflow_configs.router, tags=["Workflow Config"], prefix=api_prefix 27 | ) 28 | router.include_router(vector_dbs.router, tags=["Vector Database"], prefix=api_prefix) 29 | -------------------------------------------------------------------------------- /libs/superagent/app/tools/advanced_scraper.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | import requests 4 | from langchain_community.tools import BaseTool 5 | 6 | 7 | class AdvancedScraper(BaseTool): 8 | name = "AdvancedScraper" 9 | description = "useful for quickly and easily extracting content from a webpage (uses a real browser via Olostep)" 10 | return_direct = False 11 | 12 | def _run(self, url: str) -> str: 13 | endpoint = "https://agent.olostep.com/olostep-p2p-incomingAPI" 14 | headers = {"Authorization": "Bearer " + self.metadata.get("apiKey")} 15 | 16 | # for more details look at => https://docs.olostep.com/api-reference/start-agent 17 | querystring = { 18 | "url": url, 19 | "saveMarkdown": True, 20 | "expandMarkdown": True, 21 | "waitBeforeScraping": 1, 22 | "fastLane": True, 23 | "removeCSSselectors": "default", 24 | "timeout": 45, 25 | } 26 | 27 | response = requests.get(endpoint, headers=headers, params=querystring) 28 | return response.json().get("markdown_content") 29 | 30 | async def _arun(self, url: str) -> str: 31 | loop = asyncio.get_event_loop() 32 | response_text = await loop.run_in_executor(None, self._run, url) 33 | return response_text 34 | -------------------------------------------------------------------------------- /libs/superagent/app/tools/algolia.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from algoliasearch.search_client import SearchClient 4 | from langchain_community.tools import BaseTool 5 | 6 | 7 | class Algolia(BaseTool): 8 | name = "Algolia" 9 | description = "Useful for querying an Agolia index" 10 | return_direct = False 11 | 12 | def _init_client_and_index(self): 13 | app_id = self.metadata["appId"] 14 | api_key = self.metadata["apiKey"] 15 | index = self.metadata["index"] 16 | client = SearchClient.create(app_id, api_key) 17 | index = client.init_index(index) 18 | return index 19 | 20 | def _run(self, search_query: str, num_of_results: int = 3) -> str: 21 | index = self._init_client_and_index() 22 | output = index.search(search_query) 23 | return str(output["hits"][:num_of_results]) 24 | 25 | async def _arun(self, search_query: str, num_of_results: int = 3) -> str: 26 | index = self._init_client_and_index() 27 | loop = asyncio.get_event_loop() 28 | output = await loop.run_in_executor(None, index.search, search_query) 29 | return str(output["hits"][:num_of_results]) 30 | -------------------------------------------------------------------------------- /libs/superagent/app/tools/chatgpt.py: -------------------------------------------------------------------------------- 1 | from typing import Any 2 | 3 | from langchain_community.tools import AIPluginTool 4 | 5 | 6 | def get_chatpgt_tool(metadata: dict | None, *_args, **_kwargs) -> Any: 7 | return AIPluginTool.from_plugin_url(metadata["chatgptPluginURL"]) 8 | -------------------------------------------------------------------------------- /libs/superagent/app/tools/code_interpreter.py: -------------------------------------------------------------------------------- 1 | import aiohttp 2 | import requests 3 | from decouple import config 4 | from langchain_community.tools import BaseTool 5 | 6 | 7 | class CodeInterpreter(BaseTool): 8 | name = "Code executor" 9 | description = "useful for executing code. returns the evaluation/result" 10 | 11 | def _setup_request(self, code: str): 12 | api_token = config("CODE_EXECUTOR_TOKEN") 13 | url = config("CODE_EXECUTOR_URL") 14 | headers = { 15 | "content-type": "application/json", 16 | "authorization": f"Bearer {api_token}", 17 | } 18 | data = {"code": code, "interpreter_mode": True} 19 | return url, headers, data 20 | 21 | def _run(self, python_code: str) -> str: 22 | url, headers, data = self._setup_request(python_code) 23 | return requests.post(url=url, headers=headers, json=data).text 24 | 25 | async def _arun(self, python_code: str) -> str: 26 | url, headers, data = self._setup_request(python_code) 27 | async with aiohttp.ClientSession() as session: 28 | async with session.post(url=url, headers=headers, json=data) as response: 29 | output = await response.text() 30 | return output 31 | -------------------------------------------------------------------------------- /libs/superagent/app/tools/function.py: -------------------------------------------------------------------------------- 1 | from langchain_community.tools import BaseTool 2 | 3 | 4 | class Function(BaseTool): 5 | name = "cunstom function" 6 | description = "useful for doing something" 7 | return_direct = True 8 | 9 | def _run(self, *args, **kwargs) -> str: 10 | return f"Tell the user that you are pending function {self.name}" 11 | 12 | async def _arun(self, *args, **kwargs) -> str: 13 | return f"Running {self.name}" 14 | -------------------------------------------------------------------------------- /libs/superagent/app/tools/google_search.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | import aiohttp 4 | import requests 5 | from langchain_community.tools import BaseTool 6 | 7 | url = "https://google.serper.dev/search" 8 | 9 | 10 | class GoogleSearch(BaseTool): 11 | name = "PubMed® search" 12 | description = "useful for answering question about medical publications" 13 | return_direct = False 14 | 15 | def _run(self, query: str) -> str: 16 | headers = { 17 | "X-API-KEY": self.metadata.get("apiKey"), 18 | "Content-Type": "application/json", 19 | } 20 | payload = json.dumps({"q": query}) 21 | response = requests.request("POST", url, headers=headers, data=payload) 22 | return response.text 23 | 24 | async def _arun(self, query: str) -> str: 25 | headers = { 26 | "X-API-KEY": self.metadata.get("apiKey"), 27 | "Content-Type": "application/json", 28 | } 29 | payload = json.dumps({"q": query}) 30 | 31 | async with aiohttp.ClientSession() as session: 32 | async with session.post(url, headers=headers, data=payload) as response: 33 | return await response.text() 34 | -------------------------------------------------------------------------------- /libs/superagent/app/tools/hand_off.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | from langchain_community.tools import BaseTool 4 | 5 | 6 | class HandOff(BaseTool): 7 | name = "human hand-off" 8 | description = "useful for hand-off of conversation to a human operator" 9 | return_direct = False 10 | 11 | def _run(self, reason: str) -> str: 12 | payload = {"reasons": reason, "action": "hand-off"} 13 | return json.dumps(payload) 14 | 15 | async def _arun(self, reason: str) -> str: 16 | payload = {"reasons": reason, "action": "hand-off"} 17 | return json.dumps(payload) 18 | -------------------------------------------------------------------------------- /libs/superagent/app/tools/metaphor.py: -------------------------------------------------------------------------------- 1 | from langchain_community.tools import BaseTool 2 | from langchain_community.utilities import MetaphorSearchAPIWrapper 3 | 4 | 5 | class MetaphorSearch(BaseTool): 6 | name = "metaphor search" 7 | description = "useful for researching a certain topic" 8 | return_direct = False 9 | 10 | def _run(self, search_query: str) -> str: 11 | search = MetaphorSearchAPIWrapper( 12 | metaphor_api_key=self.metadata["metaphorApiKey"] 13 | ) 14 | output = search.results(search_query, 10, use_autoprompt=True) 15 | return output 16 | 17 | async def _arun(self, search_query: str) -> str: 18 | search = MetaphorSearchAPIWrapper( 19 | metaphor_api_key=self.metadata["metaphorApiKey"] 20 | ) 21 | output = await search.results_async(search_query, 10, use_autoprompt=True) 22 | return output 23 | -------------------------------------------------------------------------------- /libs/superagent/app/tools/openapi.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import json 3 | 4 | from langchain.chains.openai_functions.openapi import get_openapi_chain 5 | from langchain_community.tools import BaseTool 6 | 7 | 8 | class Openapi(BaseTool): 9 | name = "API" 10 | description = "useful for querying an api" 11 | return_direct = False 12 | 13 | def _run(self, input: str) -> str: 14 | openapi_url = self.metadata["openApiUrl"] 15 | headers = self.metadata.get("headers") 16 | agent = get_openapi_chain( 17 | spec=openapi_url, headers=json.loads(headers) if headers else None 18 | ) 19 | output = agent.run(input) 20 | return output 21 | 22 | async def _arun(self, input: str) -> str: 23 | openapi_url = self.metadata["openApiUrl"] 24 | headers = self.metadata.get("headers") 25 | try: 26 | agent = get_openapi_chain( 27 | spec=openapi_url, headers=json.loads(headers) if headers else None 28 | ) 29 | loop = asyncio.get_event_loop() 30 | output = await loop.run_in_executor(None, agent.run, input) 31 | except Exception as e: 32 | output = str(e) 33 | return output 34 | -------------------------------------------------------------------------------- /libs/superagent/app/tools/openbb.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/superagent-ai/superagent/168a176f9c67c53955d5eea0c8f743d5254b0606/libs/superagent/app/tools/openbb.py -------------------------------------------------------------------------------- /libs/superagent/app/tools/pubmed.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from langchain_community.tools import BaseTool, PubmedQueryRun 4 | 5 | 6 | class PubMed(BaseTool): 7 | name = "PubMed® search" 8 | description = "useful for answering question about medical publications" 9 | return_direct = False 10 | 11 | def _run(self, search_query: str) -> str: 12 | pubmed = PubmedQueryRun(args_schema=self.args_schema) 13 | output = pubmed.run(search_query) 14 | return output 15 | 16 | async def _arun(self, search_query: str) -> str: 17 | pubmed = PubmedQueryRun(args_schema=self.args_schema) 18 | loop = asyncio.get_event_loop() 19 | output = await loop.run_in_executor(None, pubmed.run, search_query) 20 | return output 21 | -------------------------------------------------------------------------------- /libs/superagent/app/tools/replicate.py: -------------------------------------------------------------------------------- 1 | from langchain.llms.replicate import Replicate as ReplicateModel 2 | from langchain_community.tools import BaseTool 3 | 4 | 5 | class Replicate(BaseTool): 6 | name = "Replicate" 7 | description = "useful for querying a Replicate model." 8 | return_direct = False 9 | 10 | def _run(self, prompt: str) -> str: 11 | model = self.metadata["model"] 12 | api_token = self.metadata["apiKey"] 13 | input = self.metadata["arguments"] 14 | model = ReplicateModel( 15 | model=model, input=input, api_token=api_token, replicate_api_token=api_token 16 | ) 17 | output = model.predict(prompt) 18 | return output 19 | 20 | async def _arun(self, prompt: str) -> str: 21 | model = self.metadata["model"] 22 | api_token = self.metadata["apiKey"] 23 | model = ReplicateModel(model=model, replicate_api_token=api_token) 24 | output = await model.apredict(prompt) 25 | return output 26 | -------------------------------------------------------------------------------- /libs/superagent/app/tools/scraper.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from langchain_community.tools import BaseTool 4 | from scrapingbee import ScrapingBeeClient 5 | 6 | 7 | class Scraper(BaseTool): 8 | name = "Scraper" 9 | description = "useful for extracting content from a webpage" 10 | return_direct = False 11 | 12 | def _run(self, url: str) -> str: 13 | client = ScrapingBeeClient(api_key=self.metadata.get("apiKey")) 14 | response = client.get( 15 | url, 16 | params={ 17 | "extract_rules": {"text": "body"}, 18 | "render_js": True, 19 | "wait_browser": "load", 20 | }, 21 | ) 22 | return response.text 23 | 24 | async def _arun(self, url: str) -> str: 25 | loop = asyncio.get_event_loop() 26 | response_text = await loop.run_in_executor(None, self._run, url) 27 | return response_text 28 | -------------------------------------------------------------------------------- /libs/superagent/app/tools/sec.py: -------------------------------------------------------------------------------- 1 | import aiohttp 2 | from langchain_community.tools import BaseTool 3 | 4 | 5 | class SEC(BaseTool): 6 | name = "SEC" 7 | description = "useful for searching SEC filings for a company" 8 | return_direct = False 9 | 10 | def _run(self, ticker: str) -> str: 11 | pass 12 | 13 | async def _arun(self, ticker: str) -> str: 14 | form = self.metadata.get("form") 15 | identity = self.metadata.get("identity") 16 | url = "https://super-sec.replit.app/search" 17 | data = {"form": form, "identity": identity, "ticker": ticker} 18 | 19 | async with aiohttp.ClientSession() as session: 20 | async with session.post(url, json=data) as response: 21 | return await response.text() 22 | -------------------------------------------------------------------------------- /libs/superagent/app/tools/tavily.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from decouple import config 4 | from langchain_community.tools import BaseTool 5 | from tavily import TavilyClient 6 | 7 | 8 | class Tavily(BaseTool): 9 | name = "PubMed® search" 10 | description = "useful for answering question about medical publications" 11 | return_direct = False 12 | 13 | def _run(self, query: str) -> str: 14 | tavily = TavilyClient( 15 | api_key=self.metadata.get("apiKey") or config("TAVILY_API_KEY") 16 | ) 17 | response = tavily.search(query=query, search_depth="advanced") 18 | context = [ 19 | {"url": obj["url"], "content": obj["content"]} for obj in response.results 20 | ] 21 | return context 22 | 23 | async def _arun(self, query: str) -> str: 24 | tavily = TavilyClient( 25 | api_key=self.metadata.get("apiKey") or config("TAVILY_API_KEY") 26 | ) 27 | loop = asyncio.get_event_loop() 28 | response = await loop.run_in_executor(None, tavily.search, query, "advanced") 29 | context = [ 30 | {"url": obj["url"], "content": obj["content"]} 31 | for obj in response.get("results") 32 | ] 33 | return context 34 | -------------------------------------------------------------------------------- /libs/superagent/app/tools/tts_1.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | from langchain_community.tools import BaseTool 4 | from openai import AsyncOpenAI, OpenAI 5 | 6 | 7 | class TTS1(BaseTool): 8 | name = "text-to-speech" 9 | description = "useful for generation voice audio from text" 10 | return_direct = False 11 | 12 | def _run(self, input: dict) -> str: 13 | client = OpenAI(api_key=self.metadata["openaiApiKey"]) 14 | speech_file_path = Path(__file__).parent / "speech.mp3" 15 | response = client.audio.speech.create( 16 | model="tts-1", 17 | voice=input["voice"] or "alloy", 18 | input=input["text"], 19 | ) 20 | output = response.stream_to_file(speech_file_path) 21 | return output 22 | 23 | async def _arun(self, input: dict) -> str: 24 | client = AsyncOpenAI(api_key=self.metadata["openaiApiKey"]) 25 | speech_file_path = Path(__file__).parent / "speech.mp3" 26 | response = await client.audio.speech.create( 27 | model="tts-1", 28 | voice=input["voice"] or "alloy", 29 | input=input["text"], 30 | ) 31 | output = response.stream_to_file(speech_file_path) 32 | return output 33 | -------------------------------------------------------------------------------- /libs/superagent/app/tools/wolfram_alpha.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | 3 | from langchain_community.tools import BaseTool 4 | from langchain_community.utilities.wolfram_alpha import WolframAlphaAPIWrapper 5 | 6 | 7 | class WolframAlpha(BaseTool): 8 | name = "Wolfram Alpha" 9 | description = "useful for calculation and computation" 10 | return_direct = False 11 | 12 | def _run(self, input: str) -> str: 13 | app_id = self.metadata["appId"] 14 | wolfram = WolframAlphaAPIWrapper(wolfram_alpha_appid=app_id) 15 | return wolfram.run(input) 16 | 17 | async def _arun(self, input: str) -> str: 18 | app_id = self.metadata["appId"] 19 | wolfram = WolframAlphaAPIWrapper(wolfram_alpha_appid=app_id) 20 | loop = asyncio.get_event_loop() 21 | output = await loop.run_in_executor(None, wolfram.run, input) 22 | return output 23 | -------------------------------------------------------------------------------- /libs/superagent/app/tools/zapier.py: -------------------------------------------------------------------------------- 1 | from langchain.agents import AgentType, initialize_agent 2 | from langchain_community.agent_toolkits import ZapierToolkit 3 | from langchain_community.tools import BaseTool 4 | from langchain_community.utilities.zapier import ZapierNLAWrapper 5 | from langchain_openai import ChatOpenAI 6 | 7 | 8 | class ZapierNLA(BaseTool): 9 | name = "Zapier" 10 | description = ( 11 | "useful for performing actions such sending emails, scheduling meetings etc." 12 | ) 13 | return_direct = False 14 | 15 | def _run(self, input: str) -> str: 16 | zapier_nla_api_key = self.metadata["zapierNlaApiKey"] 17 | zapier = ZapierNLAWrapper(zapier_nla_api_key=zapier_nla_api_key) 18 | toolkit = ZapierToolkit.from_zapier_nla_wrapper(zapier) 19 | agent = initialize_agent( 20 | toolkit.get_tools(), 21 | llm=ChatOpenAI(openai_api_key=self.metadata["openaiApiKey"], model="gpt-4"), 22 | agent=AgentType.ZERO_SHOT_REACT_DESCRIPTION, 23 | verbose=True, 24 | ) 25 | output = agent.run(input) 26 | return output 27 | 28 | async def _arun(self, input: str) -> str: 29 | zapier_nla_api_key = self.metadata["zapierNlaApiKey"] 30 | zapier = ZapierNLAWrapper(zapier_nla_api_key=zapier_nla_api_key) 31 | toolkit = ZapierToolkit.from_zapier_nla_wrapper(zapier) 32 | agent = initialize_agent( 33 | toolkit.get_tools(), 34 | llm=ChatOpenAI(openai_api_key=self.metadata["openaiApiKey"], model="gpt-4"), 35 | agent=AgentType.ZERO_SHOT_REACT_DESCRIPTION, 36 | verbose=True, 37 | ) 38 | output = await agent.arun(input) 39 | return output 40 | -------------------------------------------------------------------------------- /libs/superagent/app/utils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/superagent-ai/superagent/168a176f9c67c53955d5eea0c8f743d5254b0606/libs/superagent/app/utils/__init__.py -------------------------------------------------------------------------------- /libs/superagent/app/utils/llm.py: -------------------------------------------------------------------------------- 1 | from litellm import get_llm_provider as litellm_get_llm_provider 2 | 3 | LLM_MAPPING = { 4 | "GPT_3_5_TURBO_16K_0613": "gpt-3.5-turbo-16k-0613", 5 | "GPT_3_5_TURBO_0613": "gpt-3.5-turbo-0613", 6 | "GPT_3_5_TURBO_1106": "gpt-3.5-turbo-1106", 7 | "GPT_3_5_TURBO": "gpt-3.5-turbo", 8 | "GPT_4_0613": "gpt-4-0613", 9 | "GPT_4_32K_0613": "gpt-4-32k-0613", 10 | "GPT_4_32K": "gpt-4-32k", 11 | "GPT_4_1106_PREVIEW": "gpt-4-1106-preview", 12 | "GPT_4_TURBO_PREVIEW": "gpt-4-turbo-preview", 13 | "GPT_3_5_TURBO_0125": "gpt-3.5-turbo-0125", 14 | "GPT_4": "gpt-4", 15 | "GPT_4_TURBO": "gpt-4-turbo", 16 | "GPT_4_TURBO_2024_04_09": "gpt-4-turbo-2024-04-09", 17 | "GPT_4_0125_PREVIEW": "gpt-4-0125-preview", 18 | "GPT_4_O": "gpt-4o", 19 | } 20 | 21 | LLM_REVERSE_MAPPING = {v: k for k, v in LLM_MAPPING.items()} 22 | 23 | 24 | LLM_PROVIDER_MAPPING = { 25 | "OPENAI": [ 26 | "GPT_3_5_TURBO", 27 | "GPT_3_5_TURBO_16K_0613", 28 | "GPT_3_5_TURBO_0613", 29 | "GPT_3_5_TURBO_1106", 30 | "GPT_3_5_TURBO_0125", 31 | "GPT_4_0613", 32 | "GPT_4_32K_0613", 33 | "GPT_4_1106_PREVIEW", 34 | "GPT_4_TURBO_PREVIEW", 35 | "GPT_4", 36 | "GPT_4_TURBO", 37 | "GPT_4_TURBO_2024_04_09", 38 | "GPT_4_0125_PREVIEW", 39 | "GPT_4_32K", 40 | "GPT_4_O", 41 | ] 42 | } 43 | 44 | 45 | def get_llm_provider(model: str): 46 | _, provider, _, _ = litellm_get_llm_provider(model) 47 | provider = provider.upper() 48 | 49 | return provider 50 | -------------------------------------------------------------------------------- /libs/superagent/app/utils/prisma.py: -------------------------------------------------------------------------------- 1 | from prisma import Prisma 2 | 3 | prisma = Prisma() 4 | -------------------------------------------------------------------------------- /libs/superagent/app/vectorstores/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/superagent-ai/superagent/168a176f9c67c53955d5eea0c8f743d5254b0606/libs/superagent/app/vectorstores/__init__.py -------------------------------------------------------------------------------- /libs/superagent/app/vectorstores/abstract.py: -------------------------------------------------------------------------------- 1 | from abc import ABC, abstractmethod 2 | 3 | 4 | class VectorStoreBase(ABC): 5 | @abstractmethod 6 | def embed_documents(self): 7 | pass 8 | 9 | @abstractmethod 10 | def query_documents(): 11 | pass 12 | 13 | @abstractmethod 14 | def delete(self): 15 | pass 16 | -------------------------------------------------------------------------------- /libs/superagent/app/vectorstores/embeddings.py: -------------------------------------------------------------------------------- 1 | from decouple import config 2 | from langchain_openai import AzureOpenAIEmbeddings, OpenAIEmbeddings 3 | 4 | from app.models.request import EmbeddingsModelProvider 5 | from app.utils.helpers import get_first_non_null 6 | 7 | 8 | def get_embeddings_model_provider(embeddings_model_provider: EmbeddingsModelProvider): 9 | if embeddings_model_provider == EmbeddingsModelProvider.AZURE_OPENAI: 10 | return AzureOpenAIEmbeddings( 11 | azure_deployment=config("AZURE_OPENAI_EMBEDDINGS_DEPLOYMENT"), 12 | api_version=get_first_non_null( 13 | config("AZURE_OPENAI_EMBEDDINGS_API_VERSION"), 14 | config("AZURE_OPENAI_API_VERSION"), 15 | ), 16 | api_key=get_first_non_null( 17 | config("AZURE_OPENAI_EMBEDDINGS_API_KEY"), 18 | config("AZURE_OPENAI_API_KEY"), 19 | ), 20 | azure_endpoint=get_first_non_null( 21 | config("AZURE_OPENAI_EMBEDDINGS_ENDPOINT"), 22 | config("AZURE_OPENAI_ENDPOINT"), 23 | ), 24 | ) 25 | else: 26 | return OpenAIEmbeddings( 27 | model="text-embedding-3-small", openai_api_key=config("OPENAI_API_KEY") 28 | ) 29 | -------------------------------------------------------------------------------- /libs/superagent/app/workflows/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/superagent-ai/superagent/168a176f9c67c53955d5eea0c8f743d5254b0606/libs/superagent/app/workflows/__init__.py -------------------------------------------------------------------------------- /libs/superagent/lint-and-format.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # getting changed files (only staged) 4 | changes=$(git diff --name-only --cached | grep '^libs/superagent.*\.py$' | sed 's|^libs/superagent/||') 5 | 6 | # Filter deleted files 7 | changes=$(echo "$changes" | while read -r file; do [ -e "$file" ] && echo "$file"; done) 8 | 9 | lint() { 10 | poetry run black $changes 11 | # sort imports 12 | poetry run ruff check --select I --fix $changes 13 | # format code 14 | poetry run ruff check --fix $changes 15 | poetry run vulture $changes 16 | git add $changes 17 | echo "Changes applied"; 18 | } 19 | 20 | format() { 21 | poetry run black $changes --check 22 | poetry run ruff $changes 23 | poetry run vulture $changes 24 | } 25 | 26 | if [ -n "$changes" ]; then 27 | case "$1" in 28 | lint) 29 | lint 30 | ;; 31 | format) 32 | format 33 | ;; 34 | *) 35 | echo "Invalid command. Usage: $0 [lint|format]" 36 | exit 1 37 | ;; 38 | esac 39 | else 40 | echo "No changes"; 41 | fi 42 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20230822214343_agent_llm/migration.sql: -------------------------------------------------------------------------------- 1 | -- CreateEnum 2 | CREATE TYPE "DocumentType" AS ENUM ('TXT', 'PDF', 'CSV', 'YOUTUBE', 'OPENAPI', 'URL', 'MARKDOWN', 'FIRESTORE', 'PSYCHIC', 'GITHUB_REPOSITORY', 'WEBPAGE', 'STRIPE', 'AIRTABLE', 'SITEMAP', 'NOTION'); 3 | 4 | -- CreateEnum 5 | CREATE TYPE "ToolType" AS ENUM ('BROWSER', 'SEARCH', 'WOLFRAM_ALPHA', 'REPLICATE', 'ZAPIER_NLA', 'AGENT', 'OPENAPI', 'CHATGPT_PLUGIN', 'METAPHOR'); 6 | 7 | -- CreateTable 8 | CREATE TABLE "Agent" ( 9 | "id" SERIAL NOT NULL, 10 | "name" TEXT NOT NULL, 11 | "isActive" BOOLEAN NOT NULL DEFAULT false, 12 | "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, 13 | "updatedAt" TIMESTAMP(3) NOT NULL, 14 | "llmId" INTEGER NOT NULL, 15 | 16 | CONSTRAINT "Agent_pkey" PRIMARY KEY ("id") 17 | ); 18 | 19 | -- CreateTable 20 | CREATE TABLE "LLM" ( 21 | "id" SERIAL NOT NULL, 22 | "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, 23 | "updatedAt" TIMESTAMP(3) NOT NULL, 24 | 25 | CONSTRAINT "LLM_pkey" PRIMARY KEY ("id") 26 | ); 27 | 28 | -- CreateIndex 29 | CREATE UNIQUE INDEX "Agent_llmId_key" ON "Agent"("llmId"); 30 | 31 | -- AddForeignKey 32 | ALTER TABLE "Agent" ADD CONSTRAINT "Agent_llmId_fkey" FOREIGN KEY ("llmId") REFERENCES "LLM"("id") ON DELETE RESTRICT ON UPDATE CASCADE; 33 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20230823195402_add_llm/migration.sql: -------------------------------------------------------------------------------- 1 | /* 2 | Warnings: 3 | 4 | - Added the required column `apiKey` to the `LLM` table without a default value. This is not possible if the table is not empty. 5 | 6 | */ 7 | -- CreateEnum 8 | CREATE TYPE "LLMProvider" AS ENUM ('OPENAI'); 9 | 10 | -- CreateEnum 11 | CREATE TYPE "LLMModel" AS ENUM ('GPT_3_5_TURBO_16K_0613', 'GPT_3_5_TURBO_0613', 'GPT_4_0613', 'GPT_4_32K_0613'); 12 | 13 | -- AlterTable 14 | ALTER TABLE "LLM" ADD COLUMN "apiKey" TEXT NOT NULL, 15 | ADD COLUMN "model" "LLMModel" NOT NULL DEFAULT 'GPT_3_5_TURBO_16K_0613', 16 | ADD COLUMN "options" JSONB, 17 | ADD COLUMN "provider" "LLMProvider" NOT NULL DEFAULT 'OPENAI'; 18 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20230823200614_change_ids/migration.sql: -------------------------------------------------------------------------------- 1 | /* 2 | Warnings: 3 | 4 | - The primary key for the `Agent` table will be changed. If it partially fails, the table could be left without primary key constraint. 5 | - The primary key for the `LLM` table will be changed. If it partially fails, the table could be left without primary key constraint. 6 | 7 | */ 8 | -- DropForeignKey 9 | ALTER TABLE "Agent" DROP CONSTRAINT "Agent_llmId_fkey"; 10 | 11 | -- AlterTable 12 | ALTER TABLE "Agent" DROP CONSTRAINT "Agent_pkey", 13 | ALTER COLUMN "id" DROP DEFAULT, 14 | ALTER COLUMN "id" SET DATA TYPE TEXT, 15 | ALTER COLUMN "llmId" SET DATA TYPE TEXT, 16 | ADD CONSTRAINT "Agent_pkey" PRIMARY KEY ("id"); 17 | DROP SEQUENCE "Agent_id_seq"; 18 | 19 | -- AlterTable 20 | ALTER TABLE "LLM" DROP CONSTRAINT "LLM_pkey", 21 | ALTER COLUMN "id" DROP DEFAULT, 22 | ALTER COLUMN "id" SET DATA TYPE TEXT, 23 | ADD CONSTRAINT "LLM_pkey" PRIMARY KEY ("id"); 24 | DROP SEQUENCE "LLM_id_seq"; 25 | 26 | -- AddForeignKey 27 | ALTER TABLE "Agent" ADD CONSTRAINT "Agent_llmId_fkey" FOREIGN KEY ("llmId") REFERENCES "LLM"("id") ON DELETE RESTRICT ON UPDATE CASCADE; 28 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20230823211516_llm_options_default/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterTable 2 | ALTER TABLE "LLM" ALTER COLUMN "options" SET DEFAULT '{}'; 3 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20230823212925_remove_llm_options_default/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterTable 2 | ALTER TABLE "LLM" ALTER COLUMN "options" DROP DEFAULT; 3 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20230824065536_add_agent_prompt/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterTable 2 | ALTER TABLE "Agent" ADD COLUMN "prompt" TEXT; 3 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20230824070125_llm_agent_one_to_many/migration.sql: -------------------------------------------------------------------------------- 1 | /* 2 | Warnings: 3 | 4 | - You are about to drop the column `llmId` on the `Agent` table. All the data in the column will be lost. 5 | 6 | */ 7 | -- DropForeignKey 8 | ALTER TABLE "Agent" DROP CONSTRAINT "Agent_llmId_fkey"; 9 | 10 | -- DropIndex 11 | DROP INDEX "Agent_llmId_key"; 12 | 13 | -- AlterTable 14 | ALTER TABLE "Agent" DROP COLUMN "llmId"; 15 | 16 | -- AlterTable 17 | ALTER TABLE "LLM" ADD COLUMN "agentId" TEXT; 18 | 19 | -- AddForeignKey 20 | ALTER TABLE "LLM" ADD CONSTRAINT "LLM_agentId_fkey" FOREIGN KEY ("agentId") REFERENCES "Agent"("id") ON DELETE SET NULL ON UPDATE CASCADE; 21 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20230824070755_add_agent_llm_mapping/migration.sql: -------------------------------------------------------------------------------- 1 | /* 2 | Warnings: 3 | 4 | - You are about to drop the column `agentId` on the `LLM` table. All the data in the column will be lost. 5 | 6 | */ 7 | -- DropForeignKey 8 | ALTER TABLE "LLM" DROP CONSTRAINT "LLM_agentId_fkey"; 9 | 10 | -- AlterTable 11 | ALTER TABLE "LLM" DROP COLUMN "agentId"; 12 | 13 | -- CreateTable 14 | CREATE TABLE "AgentLLM" ( 15 | "agentId" TEXT NOT NULL, 16 | "llmId" TEXT NOT NULL, 17 | "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, 18 | "updatedAt" TIMESTAMP(3) NOT NULL, 19 | 20 | CONSTRAINT "AgentLLM_pkey" PRIMARY KEY ("agentId","llmId") 21 | ); 22 | 23 | -- AddForeignKey 24 | ALTER TABLE "AgentLLM" ADD CONSTRAINT "AgentLLM_agentId_fkey" FOREIGN KEY ("agentId") REFERENCES "Agent"("id") ON DELETE RESTRICT ON UPDATE CASCADE; 25 | 26 | -- AddForeignKey 27 | ALTER TABLE "AgentLLM" ADD CONSTRAINT "AgentLLM_llmId_fkey" FOREIGN KEY ("llmId") REFERENCES "LLM"("id") ON DELETE RESTRICT ON UPDATE CASCADE; 28 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20230824084308_api_user/migration.sql: -------------------------------------------------------------------------------- 1 | -- CreateTable 2 | CREATE TABLE "ApiUser" ( 3 | "id" TEXT NOT NULL, 4 | "token" TEXT NOT NULL, 5 | "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, 6 | "updatedAt" TIMESTAMP(3) NOT NULL, 7 | 8 | CONSTRAINT "ApiUser_pkey" PRIMARY KEY ("id") 9 | ); 10 | 11 | -- CreateIndex 12 | CREATE UNIQUE INDEX "ApiUser_token_key" ON "ApiUser"("token"); 13 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20230824103528_api_user_token_optional/migration.sql: -------------------------------------------------------------------------------- 1 | -- DropIndex 2 | DROP INDEX "ApiUser_token_key"; 3 | 4 | -- AlterTable 5 | ALTER TABLE "ApiUser" ALTER COLUMN "token" DROP NOT NULL; 6 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20230824110611_add_api_user_to_models/migration.sql: -------------------------------------------------------------------------------- 1 | /* 2 | Warnings: 3 | 4 | - Added the required column `apiUserId` to the `Agent` table without a default value. This is not possible if the table is not empty. 5 | - Added the required column `apiUserId` to the `LLM` table without a default value. This is not possible if the table is not empty. 6 | 7 | */ 8 | -- AlterTable 9 | ALTER TABLE "Agent" ADD COLUMN "apiUserId" TEXT NOT NULL; 10 | 11 | -- AlterTable 12 | ALTER TABLE "LLM" ADD COLUMN "apiUserId" TEXT NOT NULL; 13 | 14 | -- AddForeignKey 15 | ALTER TABLE "Agent" ADD CONSTRAINT "Agent_apiUserId_fkey" FOREIGN KEY ("apiUserId") REFERENCES "ApiUser"("id") ON DELETE RESTRICT ON UPDATE CASCADE; 16 | 17 | -- AddForeignKey 18 | ALTER TABLE "LLM" ADD CONSTRAINT "LLM_apiUserId_fkey" FOREIGN KEY ("apiUserId") REFERENCES "ApiUser"("id") ON DELETE RESTRICT ON UPDATE CASCADE; 19 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20230824114232_add_datasources/migration.sql: -------------------------------------------------------------------------------- 1 | -- CreateEnum 2 | CREATE TYPE "DatasourceType" AS ENUM ('TXT', 'PDF', 'CSV', 'YOUTUBE', 'FUNCTION'); 3 | 4 | -- CreateTable 5 | CREATE TABLE "Datasource" ( 6 | "id" TEXT NOT NULL, 7 | "name" TEXT NOT NULL, 8 | "description" TEXT, 9 | "url" TEXT, 10 | "type" "DatasourceType" NOT NULL, 11 | "apiUserId" TEXT NOT NULL, 12 | "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, 13 | "updatedAt" TIMESTAMP(3) NOT NULL, 14 | 15 | CONSTRAINT "Datasource_pkey" PRIMARY KEY ("id") 16 | ); 17 | 18 | -- CreateTable 19 | CREATE TABLE "AgentDatasource" ( 20 | "agentId" TEXT NOT NULL, 21 | "datasourceId" TEXT NOT NULL, 22 | "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, 23 | "updatedAt" TIMESTAMP(3) NOT NULL, 24 | 25 | CONSTRAINT "AgentDatasource_pkey" PRIMARY KEY ("agentId","datasourceId") 26 | ); 27 | 28 | -- AddForeignKey 29 | ALTER TABLE "Datasource" ADD CONSTRAINT "Datasource_apiUserId_fkey" FOREIGN KEY ("apiUserId") REFERENCES "ApiUser"("id") ON DELETE RESTRICT ON UPDATE CASCADE; 30 | 31 | -- AddForeignKey 32 | ALTER TABLE "AgentDatasource" ADD CONSTRAINT "AgentDatasource_agentId_fkey" FOREIGN KEY ("agentId") REFERENCES "Agent"("id") ON DELETE RESTRICT ON UPDATE CASCADE; 33 | 34 | -- AddForeignKey 35 | ALTER TABLE "AgentDatasource" ADD CONSTRAINT "AgentDatasource_datasourceId_fkey" FOREIGN KEY ("datasourceId") REFERENCES "Datasource"("id") ON DELETE RESTRICT ON UPDATE CASCADE; 36 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20230831081114_datasource_metadata/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterEnum 2 | -- This migration adds more than one value to an enum. 3 | -- With PostgreSQL versions 11 and earlier, this is not possible 4 | -- in a single migration. This can be worked around by creating 5 | -- multiple migrations, each migration adding only one value to 6 | -- the enum. 7 | 8 | 9 | ALTER TYPE "DatasourceType" ADD VALUE 'GITHUB_REPOSITORY'; 10 | ALTER TYPE "DatasourceType" ADD VALUE 'MARKDOWN'; 11 | ALTER TYPE "DatasourceType" ADD VALUE 'WEBPAGE'; 12 | ALTER TYPE "DatasourceType" ADD VALUE 'AIRTABLE'; 13 | 14 | -- AlterTable 15 | ALTER TABLE "Datasource" ADD COLUMN "metadata" JSONB; 16 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20230831092903_datasource_metadata_string/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterTable 2 | ALTER TABLE "Datasource" ALTER COLUMN "metadata" SET DATA TYPE TEXT; 3 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20230831105225_add_datasource_types/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterEnum 2 | -- This migration adds more than one value to an enum. 3 | -- With PostgreSQL versions 11 and earlier, this is not possible 4 | -- in a single migration. This can be worked around by creating 5 | -- multiple migrations, each migration adding only one value to 6 | -- the enum. 7 | 8 | 9 | ALTER TYPE "DatasourceType" ADD VALUE 'STRIPE'; 10 | ALTER TYPE "DatasourceType" ADD VALUE 'NOTION'; 11 | ALTER TYPE "DatasourceType" ADD VALUE 'SITEMAP'; 12 | ALTER TYPE "DatasourceType" ADD VALUE 'URL'; 13 | 14 | -- DropEnum 15 | DROP TYPE "DocumentType"; 16 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20230901072519_agent_tools/migration.sql: -------------------------------------------------------------------------------- 1 | -- CreateTable 2 | CREATE TABLE "Tool" ( 3 | "id" TEXT NOT NULL, 4 | "name" TEXT NOT NULL, 5 | "description" TEXT NOT NULL, 6 | "type" "ToolType" NOT NULL, 7 | "metadata" TEXT NOT NULL, 8 | "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, 9 | "updatedAt" TIMESTAMP(3) NOT NULL, 10 | "apiUserId" TEXT NOT NULL, 11 | 12 | CONSTRAINT "Tool_pkey" PRIMARY KEY ("id") 13 | ); 14 | 15 | -- CreateTable 16 | CREATE TABLE "AgentTool" ( 17 | "agentId" TEXT NOT NULL, 18 | "toolId" TEXT NOT NULL, 19 | "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, 20 | "updatedAt" TIMESTAMP(3) NOT NULL, 21 | "datasourceId" TEXT, 22 | 23 | CONSTRAINT "AgentTool_pkey" PRIMARY KEY ("agentId","toolId") 24 | ); 25 | 26 | -- AddForeignKey 27 | ALTER TABLE "Tool" ADD CONSTRAINT "Tool_apiUserId_fkey" FOREIGN KEY ("apiUserId") REFERENCES "ApiUser"("id") ON DELETE RESTRICT ON UPDATE CASCADE; 28 | 29 | -- AddForeignKey 30 | ALTER TABLE "AgentTool" ADD CONSTRAINT "AgentTool_agentId_fkey" FOREIGN KEY ("agentId") REFERENCES "Agent"("id") ON DELETE RESTRICT ON UPDATE CASCADE; 31 | 32 | -- AddForeignKey 33 | ALTER TABLE "AgentTool" ADD CONSTRAINT "AgentTool_toolId_fkey" FOREIGN KEY ("toolId") REFERENCES "Tool"("id") ON DELETE RESTRICT ON UPDATE CASCADE; 34 | 35 | -- AddForeignKey 36 | ALTER TABLE "AgentTool" ADD CONSTRAINT "AgentTool_datasourceId_fkey" FOREIGN KEY ("datasourceId") REFERENCES "Datasource"("id") ON DELETE SET NULL ON UPDATE CASCADE; 37 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20230901115947_tool_bing_search/migration.sql: -------------------------------------------------------------------------------- 1 | /* 2 | Warnings: 3 | 4 | - The values [SEARCH] on the enum `ToolType` will be removed. If these variants are still used in the database, this will fail. 5 | 6 | */ 7 | -- AlterEnum 8 | BEGIN; 9 | CREATE TYPE "ToolType_new" AS ENUM ('BROWSER', 'BING_SEARCH', 'REPLICATE', 'WOLFRAM_ALPHA', 'ZAPIER_NLA', 'AGENT', 'OPENAPI', 'CHATGPT_PLUGIN', 'METAPHOR'); 10 | ALTER TABLE "Tool" ALTER COLUMN "type" TYPE "ToolType_new" USING ("type"::text::"ToolType_new"); 11 | ALTER TYPE "ToolType" RENAME TO "ToolType_old"; 12 | ALTER TYPE "ToolType_new" RENAME TO "ToolType"; 13 | DROP TYPE "ToolType_old"; 14 | COMMIT; 15 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20230901124505_remove_redundant_fields/migration.sql: -------------------------------------------------------------------------------- 1 | /* 2 | Warnings: 3 | 4 | - You are about to drop the column `datasourceId` on the `AgentTool` table. All the data in the column will be lost. 5 | 6 | */ 7 | -- DropForeignKey 8 | ALTER TABLE "AgentTool" DROP CONSTRAINT "AgentTool_datasourceId_fkey"; 9 | 10 | -- AlterTable 11 | ALTER TABLE "AgentTool" DROP COLUMN "datasourceId"; 12 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20230901182450_pubmed_tool/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterEnum 2 | ALTER TYPE "ToolType" ADD VALUE 'PUBMED'; 3 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20230901183619_tool_metadata_optional/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterTable 2 | ALTER TABLE "Tool" ALTER COLUMN "metadata" DROP NOT NULL; 3 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20230901184227_tool_metadata_mandatory/migration.sql: -------------------------------------------------------------------------------- 1 | /* 2 | Warnings: 3 | 4 | - Made the column `metadata` on table `Tool` required. This step will fail if there are existing NULL values in that column. 5 | 6 | */ 7 | -- AlterTable 8 | ALTER TABLE "Tool" ALTER COLUMN "metadata" SET NOT NULL; 9 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20230904062421_add_worflow/migration.sql: -------------------------------------------------------------------------------- 1 | -- CreateTable 2 | CREATE TABLE "Workflow" ( 3 | "id" TEXT NOT NULL, 4 | "name" TEXT NOT NULL, 5 | "description" TEXT, 6 | "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, 7 | "updatedAt" TIMESTAMP(3) NOT NULL, 8 | 9 | CONSTRAINT "Workflow_pkey" PRIMARY KEY ("id") 10 | ); 11 | 12 | -- CreateTable 13 | CREATE TABLE "WorkflowStep" ( 14 | "id" TEXT NOT NULL, 15 | "order" INTEGER NOT NULL, 16 | "workflowId" TEXT NOT NULL, 17 | "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, 18 | "updatedAt" TIMESTAMP(3) NOT NULL, 19 | "input" TEXT NOT NULL, 20 | "output" TEXT NOT NULL, 21 | "agentId" TEXT NOT NULL, 22 | 23 | CONSTRAINT "WorkflowStep_pkey" PRIMARY KEY ("id") 24 | ); 25 | 26 | -- AddForeignKey 27 | ALTER TABLE "WorkflowStep" ADD CONSTRAINT "WorkflowStep_workflowId_fkey" FOREIGN KEY ("workflowId") REFERENCES "Workflow"("id") ON DELETE RESTRICT ON UPDATE CASCADE; 28 | 29 | -- AddForeignKey 30 | ALTER TABLE "WorkflowStep" ADD CONSTRAINT "WorkflowStep_agentId_fkey" FOREIGN KEY ("agentId") REFERENCES "Agent"("id") ON DELETE RESTRICT ON UPDATE CASCADE; 31 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20230904063106_workflow_api_user/migration.sql: -------------------------------------------------------------------------------- 1 | /* 2 | Warnings: 3 | 4 | - Added the required column `apiUserId` to the `Workflow` table without a default value. This is not possible if the table is not empty. 5 | 6 | */ 7 | -- AlterTable 8 | ALTER TABLE "Workflow" ADD COLUMN "apiUserId" TEXT NOT NULL; 9 | 10 | -- AddForeignKey 11 | ALTER TABLE "Workflow" ADD CONSTRAINT "Workflow_apiUserId_fkey" FOREIGN KEY ("apiUserId") REFERENCES "ApiUser"("id") ON DELETE RESTRICT ON UPDATE CASCADE; 12 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20230904074324_add_workflow_llm/migration.sql: -------------------------------------------------------------------------------- 1 | -- CreateTable 2 | CREATE TABLE "WorkflowLLM" ( 3 | "worflowId" TEXT NOT NULL, 4 | "llmId" TEXT NOT NULL, 5 | "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, 6 | "updatedAt" TIMESTAMP(3) NOT NULL, 7 | 8 | CONSTRAINT "WorkflowLLM_pkey" PRIMARY KEY ("worflowId","llmId") 9 | ); 10 | 11 | -- AddForeignKey 12 | ALTER TABLE "WorkflowLLM" ADD CONSTRAINT "WorkflowLLM_worflowId_fkey" FOREIGN KEY ("worflowId") REFERENCES "Workflow"("id") ON DELETE RESTRICT ON UPDATE CASCADE; 13 | 14 | -- AddForeignKey 15 | ALTER TABLE "WorkflowLLM" ADD CONSTRAINT "WorkflowLLM_llmId_fkey" FOREIGN KEY ("llmId") REFERENCES "LLM"("id") ON DELETE RESTRICT ON UPDATE CASCADE; 16 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20230904082445_fix_workflow_misspelling/migration.sql: -------------------------------------------------------------------------------- 1 | /* 2 | Warnings: 3 | 4 | - The primary key for the `WorkflowLLM` table will be changed. If it partially fails, the table could be left without primary key constraint. 5 | - You are about to drop the column `worflowId` on the `WorkflowLLM` table. All the data in the column will be lost. 6 | - Added the required column `workflowId` to the `WorkflowLLM` table without a default value. This is not possible if the table is not empty. 7 | 8 | */ 9 | -- DropForeignKey 10 | ALTER TABLE "WorkflowLLM" DROP CONSTRAINT "WorkflowLLM_worflowId_fkey"; 11 | 12 | -- AlterTable 13 | ALTER TABLE "WorkflowLLM" DROP CONSTRAINT "WorkflowLLM_pkey", 14 | DROP COLUMN "worflowId", 15 | ADD COLUMN "workflowId" TEXT NOT NULL, 16 | ADD CONSTRAINT "WorkflowLLM_pkey" PRIMARY KEY ("workflowId", "llmId"); 17 | 18 | -- AddForeignKey 19 | ALTER TABLE "WorkflowLLM" ADD CONSTRAINT "WorkflowLLM_workflowId_fkey" FOREIGN KEY ("workflowId") REFERENCES "Workflow"("id") ON DELETE RESTRICT ON UPDATE CASCADE; 20 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20230904083153_remove_workflow_llm/migration.sql: -------------------------------------------------------------------------------- 1 | /* 2 | Warnings: 3 | 4 | - You are about to drop the `WorkflowLLM` table. If the table is not empty, all the data it contains will be lost. 5 | 6 | */ 7 | -- DropForeignKey 8 | ALTER TABLE "WorkflowLLM" DROP CONSTRAINT "WorkflowLLM_llmId_fkey"; 9 | 10 | -- DropForeignKey 11 | ALTER TABLE "WorkflowLLM" DROP CONSTRAINT "WorkflowLLM_workflowId_fkey"; 12 | 13 | -- DropTable 14 | DROP TABLE "WorkflowLLM"; 15 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20230907080928_remove_llm_model/migration.sql: -------------------------------------------------------------------------------- 1 | /* 2 | Warnings: 3 | 4 | - You are about to drop the column `model` on the `LLM` table. All the data in the column will be lost. 5 | 6 | */ 7 | -- AlterTable 8 | ALTER TABLE "Agent" ADD COLUMN "llmModel" "LLMModel" NOT NULL DEFAULT 'GPT_3_5_TURBO_16K_0613'; 9 | 10 | -- AlterTable 11 | ALTER TABLE "LLM" DROP COLUMN "model"; 12 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20230907090814_agent_description/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterTable 2 | ALTER TABLE "Agent" ADD COLUMN "description" TEXT NOT NULL DEFAULT 'Add a agent description...'; 3 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20230912073334_tool_return_direct/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterTable 2 | ALTER TABLE "Tool" ADD COLUMN "returnDirect" BOOLEAN NOT NULL DEFAULT false; 3 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20230913070205_agent_avatar/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterTable 2 | ALTER TABLE "Agent" ADD COLUMN "avatar" TEXT; 3 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20230915080507_datasource_status/migration.sql: -------------------------------------------------------------------------------- 1 | -- CreateEnum 2 | CREATE TYPE "DatasourceStatus" AS ENUM ('IN_PROGRESS', 'DONE'); 3 | 4 | -- AlterTable 5 | ALTER TABLE "Datasource" ADD COLUMN "status" "DatasourceStatus" NOT NULL DEFAULT 'IN_PROGRESS'; 6 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20230917191411_datasource_status_failed/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterEnum 2 | ALTER TYPE "DatasourceStatus" ADD VALUE 'FAILED'; 3 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20230918070039_agent_cascade_delete/migration.sql: -------------------------------------------------------------------------------- 1 | -- DropForeignKey 2 | ALTER TABLE "AgentDatasource" DROP CONSTRAINT "AgentDatasource_agentId_fkey"; 3 | 4 | -- DropForeignKey 5 | ALTER TABLE "AgentLLM" DROP CONSTRAINT "AgentLLM_agentId_fkey"; 6 | 7 | -- DropForeignKey 8 | ALTER TABLE "AgentTool" DROP CONSTRAINT "AgentTool_agentId_fkey"; 9 | 10 | -- AddForeignKey 11 | ALTER TABLE "AgentDatasource" ADD CONSTRAINT "AgentDatasource_agentId_fkey" FOREIGN KEY ("agentId") REFERENCES "Agent"("id") ON DELETE CASCADE ON UPDATE CASCADE; 12 | 13 | -- AddForeignKey 14 | ALTER TABLE "AgentTool" ADD CONSTRAINT "AgentTool_agentId_fkey" FOREIGN KEY ("agentId") REFERENCES "Agent"("id") ON DELETE CASCADE ON UPDATE CASCADE; 15 | 16 | -- AddForeignKey 17 | ALTER TABLE "AgentLLM" ADD CONSTRAINT "AgentLLM_agentId_fkey" FOREIGN KEY ("agentId") REFERENCES "Agent"("id") ON DELETE CASCADE ON UPDATE CASCADE; 18 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20230920060753_add_pptx_datasource_type/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterEnum 2 | ALTER TYPE "DatasourceType" ADD VALUE 'PPTX'; 3 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20230920070547_datasource_docx/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterEnum 2 | ALTER TYPE "DatasourceType" ADD VALUE 'DOCX'; 3 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20230920072352_datasource_xlsx/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterEnum 2 | ALTER TYPE "DatasourceType" ADD VALUE 'XLSX'; 3 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20230920081659_datasource_google_doc/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterEnum 2 | ALTER TYPE "DatasourceType" ADD VALUE 'GOOGLE_DOC'; 3 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20230921064724_code_executor/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterEnum 2 | ALTER TYPE "ToolType" ADD VALUE 'CODE_EXECUTOR'; 3 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20230928102507_api_user_email/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterTable 2 | ALTER TABLE "ApiUser" ADD COLUMN "email" TEXT; 3 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20231001110155_llm_azure_openai/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterEnum 2 | ALTER TYPE "LLMProvider" ADD VALUE 'AZURE_OPENAI'; 3 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20231001161850_datassource_content/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterTable 2 | ALTER TABLE "Datasource" ADD COLUMN "content" TEXT; 3 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20231016065521_agent_initial_message/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterTable 2 | ALTER TABLE "Agent" ADD COLUMN "initialMessage" TEXT; 3 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20231029210807_tool_openbb/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterEnum 2 | ALTER TYPE "ToolType" ADD VALUE 'OPENBB'; 3 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20231106194639_gpt_4_1106_preview/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterEnum 2 | ALTER TYPE "LLMModel" ADD VALUE 'GPT_4_1106_preview'; 3 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20231106194841_gpt_4_1106_preview_fix/migration.sql: -------------------------------------------------------------------------------- 1 | /* 2 | Warnings: 3 | 4 | - The values [GPT_4_1106_preview] on the enum `LLMModel` will be removed. If these variants are still used in the database, this will fail. 5 | 6 | */ 7 | -- AlterEnum 8 | BEGIN; 9 | CREATE TYPE "LLMModel_new" AS ENUM ('GPT_3_5_TURBO_16K_0613', 'GPT_3_5_TURBO_0613', 'GPT_4_0613', 'GPT_4_32K_0613', 'GPT_4_1106_PREVIEW'); 10 | ALTER TABLE "Agent" ALTER COLUMN "llmModel" DROP DEFAULT; 11 | ALTER TABLE "Agent" ALTER COLUMN "llmModel" TYPE "LLMModel_new" USING ("llmModel"::text::"LLMModel_new"); 12 | ALTER TYPE "LLMModel" RENAME TO "LLMModel_old"; 13 | ALTER TYPE "LLMModel_new" RENAME TO "LLMModel"; 14 | DROP TYPE "LLMModel_old"; 15 | ALTER TABLE "Agent" ALTER COLUMN "llmModel" SET DEFAULT 'GPT_3_5_TURBO_16K_0613'; 16 | COMMIT; 17 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20231106224640_vision_tool/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterEnum 2 | ALTER TYPE "ToolType" ADD VALUE 'GPT_VISION'; 3 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20231107204227_tts1_tool/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterEnum 2 | ALTER TYPE "ToolType" ADD VALUE 'TTS_1'; 3 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20231112132755_update_model/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterEnum 2 | ALTER TYPE "LLMModel" ADD VALUE 'GPT_3_5_TURBO_1106'; 3 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20231113210515_huggingface_models/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterEnum 2 | ALTER TYPE "LLMModel" ADD VALUE 'MISTRAL_7B_INSTRUCT_V01'; 3 | 4 | -- AlterEnum 5 | ALTER TYPE "LLMProvider" ADD VALUE 'HUGGINGFACE'; 6 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20231114202204_algolia_tool/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterEnum 2 | ALTER TYPE "ToolType" ADD VALUE 'ALGOLIA'; 3 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20231122081046_handoff_tool/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterEnum 2 | ALTER TYPE "ToolType" ADD VALUE 'HAND_OFF'; 3 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20231124220817_function_tool/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterEnum 2 | ALTER TYPE "ToolType" ADD VALUE 'FUNCTION'; 3 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20231217152121_add_tool_config/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterTable 2 | ALTER TABLE "Tool" ADD COLUMN "toolConfig" JSONB; 3 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20231217220650_remove_workflow_inputs/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterTable 2 | ALTER TABLE "WorkflowStep" ALTER COLUMN "input" DROP NOT NULL, 3 | ALTER COLUMN "output" DROP NOT NULL; 4 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20231223104946_add_http_tool/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterEnum 2 | ALTER TYPE "ToolType" ADD VALUE 'HTTP'; 3 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20240102071238_add_vectordb_table/migration.sql: -------------------------------------------------------------------------------- 1 | -- CreateEnum 2 | CREATE TYPE "VectorDbProvider" AS ENUM ('PINECONE', 'ASTRA_DB', 'WEAVIATE', 'QDRANT'); 3 | 4 | -- AlterTable 5 | ALTER TABLE "Datasource" ADD COLUMN "vectorDbId" TEXT; 6 | 7 | -- CreateTable 8 | CREATE TABLE "VectorDb" ( 9 | "id" TEXT NOT NULL, 10 | "provider" "VectorDbProvider" NOT NULL DEFAULT 'PINECONE', 11 | "options" JSONB NOT NULL, 12 | "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, 13 | "updatedAt" TIMESTAMP(3) NOT NULL, 14 | "apiUserId" TEXT NOT NULL, 15 | 16 | CONSTRAINT "VectorDb_pkey" PRIMARY KEY ("id") 17 | ); 18 | 19 | -- AddForeignKey 20 | ALTER TABLE "Datasource" ADD CONSTRAINT "Datasource_vectorDbId_fkey" FOREIGN KEY ("vectorDbId") REFERENCES "VectorDb"("id") ON DELETE SET NULL ON UPDATE CASCADE; 21 | 22 | -- AddForeignKey 23 | ALTER TABLE "VectorDb" ADD CONSTRAINT "VectorDb_apiUserId_fkey" FOREIGN KEY ("apiUserId") REFERENCES "ApiUser"("id") ON DELETE RESTRICT ON UPDATE CASCADE; 24 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20240110062120_add_hugging_face_mixtral_8x7b_model/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterEnum 2 | ALTER TYPE "LLMModel" ADD VALUE 'MIXTRAL_8X7B_INSTRUCT_V01'; -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20240119040422_add_supabase_pgvector/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterEnum 2 | ALTER TYPE "VectorDbProvider" ADD VALUE 'SUPABASE'; 3 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20240121183424_add_on_delete_cascade_to_workflow_step/migration.sql: -------------------------------------------------------------------------------- 1 | -- DropForeignKey 2 | ALTER TABLE "WorkflowStep" DROP CONSTRAINT "WorkflowStep_agentId_fkey"; 3 | 4 | -- DropForeignKey 5 | ALTER TABLE "WorkflowStep" DROP CONSTRAINT "WorkflowStep_workflowId_fkey"; 6 | 7 | -- AddForeignKey 8 | ALTER TABLE "WorkflowStep" ADD CONSTRAINT "WorkflowStep_workflowId_fkey" FOREIGN KEY ("workflowId") REFERENCES "Workflow"("id") ON DELETE CASCADE ON UPDATE CASCADE; 9 | 10 | -- AddForeignKey 11 | ALTER TABLE "WorkflowStep" ADD CONSTRAINT "WorkflowStep_agentId_fkey" FOREIGN KEY ("agentId") REFERENCES "Agent"("id") ON DELETE CASCADE ON UPDATE CASCADE; 12 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20240124063011_make_agent_llm_model_optional/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterTable 2 | ALTER TABLE "Agent" ALTER COLUMN "llmModel" DROP NOT NULL; 3 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20240129153542_add_workflow_config_table/migration.sql: -------------------------------------------------------------------------------- 1 | -- DropForeignKey 2 | ALTER TABLE "AgentDatasource" DROP CONSTRAINT "AgentDatasource_datasourceId_fkey"; 3 | 4 | -- DropForeignKey 5 | ALTER TABLE "AgentTool" DROP CONSTRAINT "AgentTool_toolId_fkey"; 6 | 7 | -- CreateTable 8 | CREATE TABLE "WorkflowConfig" ( 9 | "id" TEXT NOT NULL, 10 | "config" JSONB NOT NULL, 11 | "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, 12 | "updatedAt" TIMESTAMP(3) NOT NULL, 13 | "workflowId" TEXT NOT NULL, 14 | "apiUserId" TEXT, 15 | 16 | CONSTRAINT "WorkflowConfig_pkey" PRIMARY KEY ("id") 17 | ); 18 | 19 | -- AddForeignKey 20 | ALTER TABLE "AgentDatasource" ADD CONSTRAINT "AgentDatasource_datasourceId_fkey" FOREIGN KEY ("datasourceId") REFERENCES "Datasource"("id") ON DELETE CASCADE ON UPDATE CASCADE; 21 | 22 | -- AddForeignKey 23 | ALTER TABLE "AgentTool" ADD CONSTRAINT "AgentTool_toolId_fkey" FOREIGN KEY ("toolId") REFERENCES "Tool"("id") ON DELETE CASCADE ON UPDATE CASCADE; 24 | 25 | -- AddForeignKey 26 | ALTER TABLE "WorkflowConfig" ADD CONSTRAINT "WorkflowConfig_workflowId_fkey" FOREIGN KEY ("workflowId") REFERENCES "Workflow"("id") ON DELETE CASCADE ON UPDATE CASCADE; 27 | 28 | -- AddForeignKey 29 | ALTER TABLE "WorkflowConfig" ADD CONSTRAINT "WorkflowConfig_apiUserId_fkey" FOREIGN KEY ("apiUserId") REFERENCES "ApiUser"("id") ON DELETE SET NULL ON UPDATE CASCADE; 30 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20240201161130_add_gpt_4_turbo_preview/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterEnum 2 | ALTER TYPE "LLMModel" ADD VALUE 'GPT_4_TURBO_PREVIEW'; 3 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20240201221548_gpt_3_5_turbo_0125/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterEnum 2 | ALTER TYPE "LLMModel" ADD VALUE 'GPT_3_5_TURBO_0125'; 3 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20240201224222_agent_type_v2/migration.sql: -------------------------------------------------------------------------------- 1 | -- CreateEnum 2 | CREATE TYPE "AgentType" AS ENUM ('SUPERAGENT', 'OPENAI_ASSISTANT'); 3 | 4 | -- AlterTable 5 | ALTER TABLE "Agent" ADD COLUMN "type" "AgentType" NOT NULL DEFAULT 'SUPERAGENT'; 6 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20240202033257_add_openai_assistants/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterTable 2 | ALTER TABLE "Agent" ADD COLUMN "openaiMetadata" JSONB; 3 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20240204133952_update_openai_assistants_table/migration.sql: -------------------------------------------------------------------------------- 1 | /* 2 | Warnings: 3 | - You are about to drop the column `openaiMetadata` on the `Agent` table. All the data in the column will be lost. 4 | */ 5 | -- AlterTable 6 | ALTER TABLE "Agent" DROP COLUMN "openaiMetadata", 7 | ADD COLUMN "metadata" JSONB; -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20240210104018_make_tool_metadata_optional/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterTable 2 | ALTER TABLE "Tool" ALTER COLUMN "metadata" DROP NOT NULL; 3 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20240213050512_llm_agent/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterEnum 2 | ALTER TYPE "AgentType" ADD VALUE 'LLM'; 3 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20240213051312_add_perplexity/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterEnum 2 | ALTER TYPE "LLMProvider" ADD VALUE 'PERPLEXITY'; 3 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20240213203005_add_togetherai/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterEnum 2 | ALTER TYPE "LLMProvider" ADD VALUE 'TOGETHER_AI'; 3 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20240214191602_add_superrag_tool/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterEnum 2 | ALTER TYPE "ToolType" ADD VALUE 'SUPERRAG'; 3 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20240227075836_add_api_keys_table/migration.sql: -------------------------------------------------------------------------------- 1 | -- CreateTable 2 | CREATE TABLE "ApiKey" ( 3 | "id" TEXT NOT NULL, 4 | "name" TEXT NOT NULL, 5 | "displayApiKey" TEXT NOT NULL, 6 | "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP, 7 | "updatedAt" TIMESTAMP(3) NOT NULL, 8 | "apiUserId" TEXT NOT NULL, 9 | 10 | CONSTRAINT "ApiKey_pkey" PRIMARY KEY ("id") 11 | ); 12 | 13 | -- CreateIndex 14 | CREATE INDEX "api_user_id" ON "ApiKey"("apiUserId"); 15 | 16 | -- AddForeignKey 17 | ALTER TABLE "ApiKey" ADD CONSTRAINT "ApiKey_apiUserId_fkey" FOREIGN KEY ("apiUserId") REFERENCES "ApiUser"("id") ON DELETE RESTRICT ON UPDATE CASCADE; 18 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20240306005619_add_anthropic/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterEnum 2 | ALTER TYPE "LLMProvider" ADD VALUE 'ANTHROPIC'; 3 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20240311200421_tavily_tool/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterEnum 2 | ALTER TYPE "ToolType" ADD VALUE 'RESEARCH'; 3 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20240318081112_github_tool/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterEnum 2 | ALTER TYPE "ToolType" ADD VALUE 'GITHUB'; -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20240327030115_scraper_tool/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterEnum 2 | ALTER TYPE "ToolType" ADD VALUE 'SCRAPER'; 3 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20240328135202_add_output_schema_field_agents_table/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterTable 2 | ALTER TABLE "Agent" ADD COLUMN "outputSchema" TEXT; 3 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20240331072847_google_search_tool/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterEnum 2 | ALTER TYPE "ToolType" ADD VALUE 'GOOGLE_SEARCH'; 3 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20240405175208_advanced_scraper_tool/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterEnum 2 | ALTER TYPE "ToolType" ADD VALUE 'ADVANCED_SCRAPER'; 3 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20240412075016_add_aws_bedrock/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterEnum 2 | ALTER TYPE "LLMProvider" ADD VALUE 'BEDROCK'; 3 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20240418181431_add_mistral/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterEnum 2 | ALTER TYPE "LLMProvider" ADD VALUE 'MISTRAL'; 3 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20240418183001_add_groq/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterEnum 2 | ALTER TYPE "LLMProvider" ADD VALUE 'GROQ'; 3 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20240420075553_add_cohere/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterEnum 2 | ALTER TYPE "LLMProvider" ADD VALUE 'COHERE_CHAT'; -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20240424143511_add_sec_api/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterEnum 2 | ALTER TYPE "ToolType" ADD VALUE 'SEC'; 3 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20240503060243_update_gpt_4_models/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterEnum 2 | -- This migration adds more than one value to an enum. 3 | -- With PostgreSQL versions 11 and earlier, this is not possible 4 | -- in a single migration. This can be worked around by creating 5 | -- multiple migrations, each migration adding only one value to 6 | -- the enum. 7 | 8 | 9 | ALTER TYPE "LLMModel" ADD VALUE 'GPT_3_5_TURBO'; 10 | ALTER TYPE "LLMModel" ADD VALUE 'GPT_4'; 11 | ALTER TYPE "LLMModel" ADD VALUE 'GPT_4_32K'; 12 | ALTER TYPE "LLMModel" ADD VALUE 'GPT_4_0125_PREVIEW'; 13 | ALTER TYPE "LLMModel" ADD VALUE 'GPT_4_TURBO'; 14 | ALTER TYPE "LLMModel" ADD VALUE 'GPT_4_TURBO_2024_04_09'; 15 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/20240514064040_gpt_4_o/migration.sql: -------------------------------------------------------------------------------- 1 | -- AlterEnum 2 | ALTER TYPE "LLMModel" ADD VALUE 'GPT_4_0'; 3 | -------------------------------------------------------------------------------- /libs/superagent/prisma/migrations/migration_lock.toml: -------------------------------------------------------------------------------- 1 | # Please do not edit this file manually 2 | # It should be added in your version-control system (i.e. Git) 3 | provider = "postgresql" -------------------------------------------------------------------------------- /libs/superagent/prompts/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/superagent-ai/superagent/168a176f9c67c53955d5eea0c8f743d5254b0606/libs/superagent/prompts/__init__.py -------------------------------------------------------------------------------- /libs/superagent/prompts/default.py: -------------------------------------------------------------------------------- 1 | DEFAULT_PROMPT = ( 2 | "You are a helpful AI Assistant, answer the users questions to " 3 | "the best of your ability." 4 | ) 5 | -------------------------------------------------------------------------------- /libs/superagent/prompts/function_calling_agent.py: -------------------------------------------------------------------------------- 1 | FUNCTION_CALLING_AGENT_PROMPT = """ 2 | Your job is to call available functions if needed to answer the user's question. 3 | You should simply call the functions. If available functions do not help you to answer the question, just return 'None'. 4 | """ 5 | -------------------------------------------------------------------------------- /libs/superagent/prompts/json.py: -------------------------------------------------------------------------------- 1 | # adapted from https://github.com/langchain-ai/langchain/blob/d1a2e194c376f241116bf8e520f1a9bb297cdf3a/libs/core/langchain_core/output_parsers/format_instructions.py 2 | JSON_FORMAT_INSTRUCTIONS = """{base_prompt} 3 | 4 | Always answer using the below output schema. 5 | The output should be formatted as a JSON instance that conforms to the JSON schema below. 6 | 7 | As an example, for the schema {{"properties": {{"foo": {{"title": "Foo", "description": "a list of strings", "type": "array", "items": {{"type": "string"}}}}}}, "required": ["foo"]}} the object {{"foo": ["bar", "baz"]}} is a well-formatted instance of the schema. The object {{"properties": {{"foo": ["bar", "baz"]}}}} is not well-formatted. 8 | 9 | Here is the output schema: 10 | ``` 11 | {output_schema} 12 | ``` 13 | """ 14 | -------------------------------------------------------------------------------- /libs/superagent/replit.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Install dependencies 4 | poetry install 5 | 6 | # Install prisma 7 | poetry run prisma generate 8 | 9 | # Start the application with auto-reload 10 | gunicorn --bind :8000 --workers 2 --timeout 0 --worker-class uvicorn.workers.UvicornWorker --threads 8 app.main:app -------------------------------------------------------------------------------- /libs/superagent/services/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/superagent-ai/superagent/168a176f9c67c53955d5eea0c8f743d5254b0606/libs/superagent/services/__init__.py -------------------------------------------------------------------------------- /libs/superagent/supabase/.gitignore: -------------------------------------------------------------------------------- 1 | # Supabase 2 | .branches 3 | .temp 4 | -------------------------------------------------------------------------------- /libs/superagent/supabase/seed.sql: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/superagent-ai/superagent/168a176f9c67c53955d5eea0c8f743d5254b0606/libs/superagent/supabase/seed.sql -------------------------------------------------------------------------------- /libs/superagent/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/superagent-ai/superagent/168a176f9c67c53955d5eea0c8f743d5254b0606/libs/superagent/tests/__init__.py -------------------------------------------------------------------------------- /libs/ui/.dockerignore: -------------------------------------------------------------------------------- 1 | # Dependency directories 2 | node_modules 3 | .next 4 | out 5 | 6 | # Environment files 7 | .env 8 | .env.local 9 | .env.development.local 10 | .env.test.local 11 | .env.production.local 12 | 13 | # Source control 14 | .git 15 | .gitignore 16 | 17 | # System Files 18 | .DS_Store 19 | Thumbs.db 20 | 21 | # Editor directories and files 22 | .idea 23 | *.swp 24 | *.swo 25 | *.sublime-project 26 | *.sublime-workspace 27 | 28 | # Logs 29 | logs 30 | *.log 31 | npm-debug.log* 32 | yarn-debug.log* 33 | yarn-error.log* 34 | 35 | # Miscellaneous 36 | *.md 37 | LICENSE 38 | .dockerignore 39 | Dockerfile 40 | # *.config.js 41 | # *.config.ts 42 | -------------------------------------------------------------------------------- /libs/ui/.editorconfig: -------------------------------------------------------------------------------- 1 | # editorconfig.org 2 | root = true 3 | 4 | [*] 5 | charset = utf-8 6 | end_of_line = lf 7 | indent_size = 2 8 | indent_style = space 9 | insert_final_newline = true 10 | trim_trailing_whitespace = true 11 | -------------------------------------------------------------------------------- /libs/ui/.env.example: -------------------------------------------------------------------------------- 1 | # Supabase (Required for authentication) 2 | NEXT_PUBLIC_SUPABASE_URL= 3 | NEXT_PUBLIC_SUPABASE_ANON_KEY= 4 | SUPABASE_SERVICEROLE_KEY= 5 | 6 | GITHUB_CLIENT_ID= 7 | GITHUB_CLIENT_SECRET= 8 | NEXT_PUBLIC_SUPERAGENT_API_URL="http://127.0.0.1:8000/api/v1" 9 | NEXT_PUBLIC_SUPABASE_STORAGE_NAME="superagent" 10 | 11 | # TRACING 12 | NEXT_PUBLIC_SEGMENT_WRITE_KEY=6tOuMx1B790SNrcHhd7WQbciZVEK00BY 13 | 14 | # Optional for connecting to external datasources 15 | NEXT_PUBLIC_APIDECK_API_KEY= 16 | NEXT_PUBLIC_APIDECK_API_ID= 17 | 18 | # Optional for adding billing 19 | STRIPE_SECRET_KEY= 20 | NEXT_PUBLIC_STRIPE_PUBLISHABLE_KEY= 21 | STRIPE_WEBHOOK_SECRET= 22 | NEXT_PUBLIC_STRIPE_HOBBY_PLAN= 23 | NEXT_PUBLIC_STRIPE_PRO_PLAN= 24 | 25 | # Optional for Langfuse 26 | NEXT_PUBLIC_LANGFUSE_PUBLIC_KEY= 27 | NEXT_PUBLIC_LANGFUSE_BASE_URL= -------------------------------------------------------------------------------- /libs/ui/.eslintignore: -------------------------------------------------------------------------------- 1 | dist/* 2 | .cache 3 | public 4 | node_modules 5 | *.esm.js 6 | -------------------------------------------------------------------------------- /libs/ui/.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://json.schemastore.org/eslintrc", 3 | "root": true, 4 | "extends": [ 5 | "next/core-web-vitals", 6 | "prettier", 7 | "plugin:tailwindcss/recommended" 8 | ], 9 | "plugins": ["tailwindcss"], 10 | "rules": { 11 | "@next/next/no-html-link-for-pages": "off", 12 | "react/jsx-key": "off", 13 | "tailwindcss/no-custom-classname": "off" 14 | }, 15 | "settings": { 16 | "tailwindcss": { 17 | "callees": ["cn"], 18 | "config": "tailwind.config.js" 19 | }, 20 | "next": { 21 | "rootDir": ["./"] 22 | } 23 | }, 24 | "overrides": [ 25 | { 26 | "files": ["*.ts", "*.tsx"], 27 | "parser": "@typescript-eslint/parser" 28 | } 29 | ] 30 | } 31 | -------------------------------------------------------------------------------- /libs/ui/.gitignore: -------------------------------------------------------------------------------- 1 | # See https://help.github.com/articles/ignoring-files/ for more about ignoring files. 2 | 3 | # dependencies 4 | node_modules 5 | .pnp 6 | .pnp.js 7 | 8 | # testing 9 | coverage 10 | 11 | # next.js 12 | .next/ 13 | out/ 14 | build 15 | 16 | # misc 17 | .DS_Store 18 | *.pem 19 | .vscode 20 | 21 | # debug 22 | npm-debug.log* 23 | yarn-debug.log* 24 | yarn-error.log* 25 | .pnpm-debug.log* 26 | 27 | # local env files 28 | .env.local 29 | .env.development.local 30 | .env.test.local 31 | .env.production.local 32 | 33 | # turbo 34 | .turbo 35 | 36 | .contentlayer 37 | .env 38 | .vscode 39 | 40 | tsconfig.tsbuildinfo -------------------------------------------------------------------------------- /libs/ui/.lintstagedrc.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | // Type check TypeScript files 3 | "**/*.(ts|tsx)": () => "npx tsc --noEmit", 4 | 5 | // Lint & Prettify TS and JS files 6 | "**/*.(ts|tsx)": (filenames) => [ 7 | `npx eslint ${filenames.join(" ")}`, 8 | `npx prettier --write ${filenames.join(" ")}`, 9 | ], 10 | 11 | // Prettify only Markdown and JSON files 12 | "**/*.(md|json)": (filenames) => 13 | `npx prettier --write ${filenames.join(" ")}`, 14 | } 15 | -------------------------------------------------------------------------------- /libs/ui/.lintstagedrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "*.ts": ["prettier --write", "eslint"], 3 | "*.tsx": ["prettier --write", "eslint"], 4 | "*.html": ["eslint", "prettier --write"], 5 | "*.scss": "prettier --write" 6 | } 7 | -------------------------------------------------------------------------------- /libs/ui/.prettierignore: -------------------------------------------------------------------------------- 1 | cache 2 | .cache 3 | package.json 4 | package-lock.json 5 | public 6 | CHANGELOG.md 7 | .yarn 8 | dist 9 | node_modules 10 | .next 11 | build 12 | .contentlayer -------------------------------------------------------------------------------- /libs/ui/README.md: -------------------------------------------------------------------------------- 1 | # next-template 2 | 3 | A Next.js 13 template for building apps with Radix UI and Tailwind CSS. 4 | 5 | ## Usage 6 | 7 | ```bash 8 | npx create-next-app -e https://github.com/shadcn/next-template 9 | ``` 10 | 11 | ## Features 12 | 13 | - Next.js 13 App Directory 14 | - Radix UI Primitives 15 | - Tailwind CSS 16 | - Icons from [Lucide](https://lucide.dev) 17 | - Dark mode with `next-themes` 18 | - Tailwind CSS class sorting, merging and linting. 19 | 20 | ## License 21 | 22 | Licensed under the [MIT license](https://github.com/shadcn/ui/blob/main/LICENSE.md). 23 | -------------------------------------------------------------------------------- /libs/ui/app/agents/[agentId]/delete-agent-button.tsx: -------------------------------------------------------------------------------- 1 | import React from "react" 2 | import { TbTrashX } from "react-icons/tb" 3 | 4 | import { cn } from "@/lib/utils" 5 | import { 6 | AlertDialog, 7 | AlertDialogAction, 8 | AlertDialogCancel, 9 | AlertDialogContent, 10 | AlertDialogDescription, 11 | AlertDialogFooter, 12 | AlertDialogHeader, 13 | AlertDialogTitle, 14 | AlertDialogTrigger, 15 | } from "@/components/ui/alert-dialog" 16 | import { buttonVariants } from "@/components/ui/button" 17 | 18 | interface DeleteAgentButtonProps { 19 | handleDelete: () => void 20 | } 21 | 22 | const DeleteAgentButton: React.FC = ({ 23 | handleDelete, 24 | }) => { 25 | return ( 26 | 27 | 30 | 31 | 32 | 33 | 34 | Are you absolutely sure? 35 | 36 | This action cannot be undone. This will permanently delete the agent 37 | and remove your data from our servers. 38 | 39 | 40 | 41 | Cancel 42 | handleDelete()}> 43 | Delete 44 | 45 | 46 | 47 | 48 | ) 49 | } 50 | 51 | export default DeleteAgentButton 52 | -------------------------------------------------------------------------------- /libs/ui/app/agents/[agentId]/prompt-footer.tsx: -------------------------------------------------------------------------------- 1 | import Link from "next/link" 2 | 3 | import { cn } from "@/lib/utils" 4 | 5 | export function PromptFooter({ 6 | className, 7 | ...props 8 | }: React.ComponentProps<"p">) { 9 | return ( 10 |

17 | Powered by{" "} 18 | 19 | Superagent.sh 20 | 21 |

22 | ) 23 | } 24 | -------------------------------------------------------------------------------- /libs/ui/app/agents/columns.tsx: -------------------------------------------------------------------------------- 1 | "use client" 2 | 3 | import { ColumnDef } from "@tanstack/react-table" 4 | import { RxCheckCircled, RxCircle } from "react-icons/rx" 5 | 6 | import { Badge } from "@/components/ui/badge" 7 | 8 | export type Agent = { 9 | id: string 10 | name: string 11 | prompt: string 12 | isActive: boolean 13 | } 14 | 15 | export const columns: ColumnDef[] = [ 16 | { 17 | accessorKey: "name", 18 | header: "Name", 19 | }, 20 | { 21 | accessorKey: "isActive", 22 | header: "Status", 23 | cell: ({ row, column }) => 24 | row.getValue(column.id) ? ( 25 | 26 |
27 | 28 | Deployed 29 |
30 |
31 | ) : ( 32 | 33 |
34 | 35 | Paused 36 |
37 |
38 | ), 39 | }, 40 | { 41 | accessorKey: "description", 42 | header: "Description", 43 | }, 44 | { 45 | accessorKey: "id", 46 | header: "ID", 47 | }, 48 | ] 49 | -------------------------------------------------------------------------------- /libs/ui/app/agents/header.tsx: -------------------------------------------------------------------------------- 1 | "use client" 2 | 3 | import { useRouter } from "next/navigation" 4 | import { TbPlus } from "react-icons/tb" 5 | import { useAsyncFn } from "react-use" 6 | 7 | import { Profile } from "@/types/profile" 8 | import { initialSamlValue } from "@/config/saml" 9 | import { Api } from "@/lib/api" 10 | import { Button } from "@/components/ui/button" 11 | import { Spinner } from "@/components/ui/spinner" 12 | 13 | export default function Header({ profile }: { profile: Profile }) { 14 | const api = new Api(profile.api_key) 15 | const router = useRouter() 16 | const [{ loading }, createWorkflow] = useAsyncFn(async () => { 17 | const { data: agent } = await api.createAgent({ 18 | name: "My Agent", 19 | description: "", 20 | llmModel: "GPT_3_5_TURBO_16K_0613", 21 | isActive: true, 22 | prompt: "You are a helpful AI Assistant", 23 | }) 24 | router.push(`/agents/${agent.id}`) 25 | }) 26 | 27 | return ( 28 |
29 | Agents 30 | 34 |
35 | ) 36 | } 37 | -------------------------------------------------------------------------------- /libs/ui/app/agents/loading.tsx: -------------------------------------------------------------------------------- 1 | import { Spinner } from "@/components/ui/spinner" 2 | 3 | export default function Loading() { 4 | return ( 5 |
6 | 7 |
8 | ) 9 | } 10 | -------------------------------------------------------------------------------- /libs/ui/app/agents/page.tsx: -------------------------------------------------------------------------------- 1 | import { cookies } from "next/headers" 2 | import { createServerComponentClient } from "@supabase/auth-helpers-nextjs" 3 | 4 | import { Api } from "@/lib/api" 5 | 6 | import { columns } from "./columns" 7 | import { DataTable } from "./data-table" 8 | import Header from "./header" 9 | 10 | export const dynamic = "force-dynamic" 11 | 12 | export default async function Agents({ 13 | searchParams, 14 | }: { 15 | searchParams: { 16 | page: string 17 | take: string 18 | } 19 | }) { 20 | const supabase = createServerComponentClient({ cookies }) 21 | const { 22 | data: { user }, 23 | } = await supabase.auth.getUser() 24 | const { data: profile } = await supabase 25 | .from("profiles") 26 | .select("*") 27 | .eq("user_id", user?.id) 28 | .single() 29 | const api = new Api(profile.api_key) 30 | const { take: takeStr, page: pageStr } = searchParams 31 | const take = Number(takeStr) || 10, 32 | page = Number(pageStr) || 1 33 | 34 | const { data: agents, total_pages } = await api.getAgents({ 35 | skip: (page - 1) * take, 36 | take, 37 | }) 38 | 39 | return ( 40 |
41 |
42 | 52 |
53 | ) 54 | } 55 | -------------------------------------------------------------------------------- /libs/ui/app/api/onboard/form-schema.ts: -------------------------------------------------------------------------------- 1 | import { z } from "zod" 2 | 3 | export const onboardFormSchema = z.object({ 4 | first_name: z.string().nonempty("Invalid first name."), 5 | last_name: z.string().nonempty("Invalid last name."), 6 | company: z.string().nonempty("Enter a company name"), 7 | }) 8 | -------------------------------------------------------------------------------- /libs/ui/app/api/stripe/plans/route.ts: -------------------------------------------------------------------------------- 1 | import { NextRequest, NextResponse } from "next/server" 2 | import Stripe from "stripe" 3 | 4 | import { siteConfig } from "@/config/site" 5 | import { stripe } from "@/lib/stripe" 6 | 7 | export async function GET(req: NextRequest) { 8 | const planNames = Object.keys(siteConfig.paymentPlans) 9 | const planPromises = planNames.map((planName) => 10 | stripe.plans.retrieve( 11 | siteConfig.paymentPlans?.[ 12 | planName as keyof typeof siteConfig.paymentPlans 13 | ] 14 | ) 15 | ) 16 | 17 | try { 18 | const plans = await Promise.all(planPromises) 19 | 20 | const plansObject: Record = {} 21 | planNames.forEach((planName, index) => { 22 | plansObject[planName] = plans[index] 23 | }) 24 | 25 | return NextResponse.json(plansObject, { status: 200 }) 26 | } catch (error) { 27 | console.error("Error fetching payment plans:", error) 28 | return NextResponse.json( 29 | { 30 | error: { 31 | message: "Error fetching payment plans", 32 | }, 33 | }, 34 | { 35 | status: 500, 36 | } 37 | ) 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /libs/ui/app/api/stripe/sessions/[id]/route.ts: -------------------------------------------------------------------------------- 1 | import { NextRequest, NextResponse } from "next/server" 2 | 3 | import { stripe } from "@/lib/stripe" 4 | 5 | export async function GET( 6 | req: NextRequest, 7 | { params }: { params: { id: string } } 8 | ) { 9 | const session = await stripe.checkout.sessions.retrieve(params.id) 10 | return NextResponse.json(session) 11 | } 12 | -------------------------------------------------------------------------------- /libs/ui/app/api/stripe/sessions/route.ts: -------------------------------------------------------------------------------- 1 | import { NextRequest, NextResponse } from "next/server" 2 | 3 | import { stripe } from "@/lib/stripe" 4 | 5 | export async function POST(req: NextRequest) { 6 | const body = await req.json() 7 | const customerId = body.customerId 8 | const planId = body.planId 9 | 10 | const protocol = req.headers.get("x-forwarded-proto") || "http" 11 | const host = req.headers.get("host") 12 | const baseUrl = `${protocol}://${host}` 13 | 14 | const session = await stripe.checkout.sessions.create({ 15 | mode: "subscription", 16 | payment_method_types: ["card"], 17 | line_items: [ 18 | { 19 | price: planId, 20 | quantity: 1, 21 | }, 22 | ], 23 | customer: customerId, 24 | 25 | success_url: `${baseUrl}/workflows?checkout_session_id={CHECKOUT_SESSION_ID}`, 26 | cancel_url: `${baseUrl}/workflows`, 27 | }) 28 | 29 | if (!session) { 30 | return NextResponse.json( 31 | { 32 | success: false, 33 | error: { 34 | message: "Failed to create session", 35 | }, 36 | }, 37 | { status: 500 } 38 | ) 39 | } 40 | 41 | return NextResponse.json(session) 42 | } 43 | -------------------------------------------------------------------------------- /libs/ui/app/api/stripe/subscriptions/[id]/cancel/route.ts: -------------------------------------------------------------------------------- 1 | import { cookies } from "next/headers" 2 | import { NextRequest, NextResponse } from "next/server" 3 | import { createRouteHandlerClient } from "@supabase/auth-helpers-nextjs" 4 | 5 | import { stripe } from "@/lib/stripe" 6 | 7 | export async function POST( 8 | req: NextRequest, 9 | { params }: { params: { id: string } } 10 | ) { 11 | const supabase = createRouteHandlerClient({ cookies }) 12 | 13 | const { data, error } = await supabase.auth.getUser() 14 | 15 | if (error) { 16 | return NextResponse.json({ error: "Something went wrong" }, { status: 500 }) 17 | } 18 | 19 | const { data: profile } = await supabase 20 | .from("profiles") 21 | .select("*") 22 | .eq("user_id", data?.user.id) 23 | .single() 24 | 25 | if (profile?.stripe_plan_id !== params.id) { 26 | return NextResponse.json({ error: "Invalid subscription" }, { status: 400 }) 27 | } 28 | 29 | const subscription = await stripe.subscriptions.update(params.id, { 30 | cancel_at_period_end: true, 31 | }) 32 | 33 | return NextResponse.json(subscription) 34 | } 35 | -------------------------------------------------------------------------------- /libs/ui/app/api/stripe/subscriptions/[id]/route.ts: -------------------------------------------------------------------------------- 1 | import { NextRequest, NextResponse } from "next/server" 2 | 3 | import { stripe } from "@/lib/stripe" 4 | 5 | export async function GET( 6 | req: NextRequest, 7 | { params }: { params: { id: string } } 8 | ) { 9 | const subscription = await stripe.subscriptions.retrieve(params.id, { 10 | expand: ["plan"], 11 | }) 12 | 13 | return NextResponse.json(subscription) 14 | } 15 | -------------------------------------------------------------------------------- /libs/ui/app/auth/callback/route.ts: -------------------------------------------------------------------------------- 1 | import { cookies } from "next/headers" 2 | import { NextRequest, NextResponse } from "next/server" 3 | import { createRouteHandlerClient } from "@supabase/auth-helpers-nextjs" 4 | 5 | export const dynamic = "force-dynamic" 6 | 7 | export async function GET(request: NextRequest) { 8 | const requestUrl = new URL(request.url) 9 | const code = requestUrl.searchParams.get("code") 10 | 11 | if (code) { 12 | const supabase = createRouteHandlerClient({ cookies }) 13 | await supabase.auth.exchangeCodeForSession(code) 14 | } 15 | 16 | // URL to redirect to after sign in process completes 17 | return NextResponse.redirect("/agents") 18 | } 19 | -------------------------------------------------------------------------------- /libs/ui/app/auth/login.ts: -------------------------------------------------------------------------------- 1 | import { cookies } from "next/headers" 2 | import { NextRequest, NextResponse } from "next/server" 3 | import { createRouteHandlerClient } from "@supabase/auth-helpers-nextjs" 4 | 5 | export const dynamic = "force-dynamic" 6 | 7 | export async function POST(request: NextRequest) { 8 | const requestUrl = new URL(request.url) 9 | const formData = await request.formData() 10 | const email = formData.get("email") 11 | const password = formData.get("password") 12 | const supabase = createRouteHandlerClient({ cookies }) 13 | 14 | await supabase.auth.signInWithPassword({ 15 | email: email as string, 16 | password: password as string, 17 | }) 18 | 19 | return NextResponse.redirect(requestUrl.origin, { 20 | status: 301, 21 | }) 22 | } 23 | -------------------------------------------------------------------------------- /libs/ui/app/auth/logout.ts: -------------------------------------------------------------------------------- 1 | import { cookies } from "next/headers" 2 | import { NextResponse } from "next/server" 3 | import { createRouteHandlerClient } from "@supabase/auth-helpers-nextjs" 4 | 5 | export const dynamic = "force-dynamic" 6 | 7 | export async function POST(request: Request) { 8 | const requestUrl = new URL(request.url) 9 | const supabase = createRouteHandlerClient({ cookies }) 10 | 11 | await supabase.auth.signOut() 12 | 13 | return NextResponse.redirect(`${requestUrl.origin}/login`, { 14 | status: 301, 15 | }) 16 | } 17 | -------------------------------------------------------------------------------- /libs/ui/app/auth/sign-up.ts: -------------------------------------------------------------------------------- 1 | import { cookies } from "next/headers" 2 | import { NextRequest, NextResponse } from "next/server" 3 | import { createRouteHandlerClient } from "@supabase/auth-helpers-nextjs" 4 | 5 | export const dynamic = "force-dynamic" 6 | 7 | export async function POST(request: NextRequest) { 8 | const requestUrl = new URL(request.url) 9 | const formData = await request.formData() 10 | const email = formData.get("email") 11 | const password = formData.get("password") 12 | const supabase = createRouteHandlerClient({ cookies }) 13 | 14 | await supabase.auth.signUp({ 15 | email: email as string, 16 | password: password as string, 17 | options: { 18 | emailRedirectTo: `${requestUrl.origin}/auth/callback`, 19 | }, 20 | }) 21 | 22 | return NextResponse.redirect(requestUrl.origin, { 23 | status: 301, 24 | }) 25 | } 26 | -------------------------------------------------------------------------------- /libs/ui/app/container.tsx: -------------------------------------------------------------------------------- 1 | import Sidebar from "@/components/sidebar" 2 | 3 | import BillingModal from "./billing-modal" 4 | 5 | interface RootLayoutProps { 6 | children: React.ReactNode 7 | profile: any 8 | } 9 | 10 | export default function RootLayout({ children, profile }: RootLayoutProps) { 11 | return ( 12 |
13 | {/* 14 | {process.env.NEXT_PUBLIC_STRIPE_PUBLISHABLE_KEY && ( 15 | 16 | )} 17 | */} 18 | 19 |
{children}
20 |
21 | ) 22 | } 23 | -------------------------------------------------------------------------------- /libs/ui/app/integrations/client-page.tsx: -------------------------------------------------------------------------------- 1 | "use client" 2 | 3 | import { Tabs, TabsContent, TabsList, TabsTrigger } from "@/components/ui/tabs" 4 | 5 | import LLM from "./llm" 6 | import Storage from "./storage" 7 | 8 | export default function IntegrationsClientPage({ 9 | profile, 10 | configuredDBs, 11 | configuredLLMs, 12 | }: { 13 | profile: any 14 | configuredDBs: any 15 | configuredLLMs: any 16 | }) { 17 | return ( 18 | 19 | 20 | 21 | STORAGE 22 | 23 | 24 | LANGUAGE MODELS 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | ) 37 | } 38 | -------------------------------------------------------------------------------- /libs/ui/app/integrations/page.tsx: -------------------------------------------------------------------------------- 1 | import { cookies } from "next/headers" 2 | import { createServerComponentClient } from "@supabase/auth-helpers-nextjs" 3 | 4 | import { Api } from "@/lib/api" 5 | 6 | import IntegrationsClientPage from "./client-page" 7 | 8 | export default async function Integration() { 9 | const supabase = createServerComponentClient({ cookies }) 10 | const { 11 | data: { user }, 12 | } = await supabase.auth.getUser() 13 | const { data: profile } = await supabase 14 | .from("profiles") 15 | .select("*") 16 | .eq("user_id", user?.id) 17 | .single() 18 | const api = new Api(profile.api_key) 19 | 20 | const [{ data: configuredDBs }, { data: configuredLLMs }] = await Promise.all( 21 | [await api.getVectorDbs(), await api.getLLMs()] 22 | ) 23 | 24 | return ( 25 |
26 |

Integrations

27 |
28 | 33 |
34 |
35 | ) 36 | } 37 | -------------------------------------------------------------------------------- /libs/ui/app/loading.tsx: -------------------------------------------------------------------------------- 1 | import { Spinner } from "@/components/ui/spinner" 2 | 3 | export default function Loading() { 4 | return ( 5 |
6 | 7 |
8 | ) 9 | } 10 | -------------------------------------------------------------------------------- /libs/ui/app/logs/page.tsx: -------------------------------------------------------------------------------- 1 | import { cookies } from "next/headers" 2 | import { createServerComponentClient } from "@supabase/auth-helpers-nextjs" 3 | import { TbTerminal2 } from "react-icons/tb" 4 | 5 | import { Api } from "@/lib/api" 6 | 7 | export const dynamic = "force-dynamic" 8 | 9 | export default async function Agents({ 10 | searchParams, 11 | }: { 12 | searchParams: { 13 | id: string 14 | } 15 | }) { 16 | let agent = "" 17 | const supabase = createServerComponentClient({ cookies }) 18 | const { 19 | data: { user }, 20 | } = await supabase.auth.getUser() 21 | const { data: profile } = await supabase 22 | .from("profiles") 23 | .select("*") 24 | .eq("user_id", user?.id) 25 | .single() 26 | const api = new Api(profile.api_key) 27 | 28 | return ( 29 |
30 |

Logs

31 |
32 | {/* */} 33 |
34 |
35 | 36 |
37 |

Coming soon

38 |

39 | We will be rolling out detailed logs for your workflows soon! 40 |

41 |
42 |
43 |
44 |
45 |
46 | ) 47 | } 48 | -------------------------------------------------------------------------------- /libs/ui/app/onboarding/page.tsx: -------------------------------------------------------------------------------- 1 | import OnboardingClientPage from "./client-page" 2 | 3 | export default function Onboarding() { 4 | return 5 | } 6 | -------------------------------------------------------------------------------- /libs/ui/app/settings/api-keys/api-keys.tsx: -------------------------------------------------------------------------------- 1 | import { ApiKey } from "@/models/models" 2 | 3 | import { 4 | Table, 5 | TableBody, 6 | TableCell, 7 | TableHeader, 8 | TableRow, 9 | } from "@/components/ui/table" 10 | 11 | import TableActions from "./api-key-actions" 12 | 13 | export default function ApiKeysTable({ 14 | profile, 15 | data, 16 | }: { 17 | profile: any 18 | data: any 19 | }) { 20 | const apiKeys = data.map((obj: any) => new ApiKey(obj)) 21 | 22 | return ( 23 | 24 | 25 | 26 | Name 27 | Key 28 | Created At 29 | 30 | 31 | 32 | {apiKeys?.map((apiKey: any) => { 33 | const { ...apiKeyObj } = apiKey 34 | return ( 35 | 36 | {apiKey.name} 37 | {apiKey.displayApiKey} 38 | 39 | {new Date(apiKey.createdAt).toLocaleDateString()}{" "} 40 | 41 | 42 | 43 | 44 | 45 | 46 | ) 47 | })} 48 | 49 |
50 | ) 51 | } 52 | -------------------------------------------------------------------------------- /libs/ui/app/settings/api-keys/page.tsx: -------------------------------------------------------------------------------- 1 | import { cookies } from "next/headers" 2 | import { createServerComponentClient } from "@supabase/auth-helpers-nextjs" 3 | 4 | import { Api } from "@/lib/api" 5 | 6 | import ApiKeysTable from "./api-keys" 7 | import { CreateSecretKey } from "./create-api-key" 8 | 9 | export default async function Settings() { 10 | const supabase = createServerComponentClient({ cookies }) 11 | const { 12 | data: { user }, 13 | } = await supabase.auth.getUser() 14 | 15 | const { data: profile } = await supabase 16 | .from("profiles") 17 | .select("*") 18 | .eq("user_id", user?.id) 19 | .single() 20 | 21 | const api = new Api(profile?.api_key) 22 | 23 | let { 24 | data = [], 25 | }: { 26 | data: any[] 27 | } = await api.getApiKeys() 28 | 29 | return ( 30 |
31 | 32 |
33 | 34 |
35 |
36 | ) 37 | } 38 | -------------------------------------------------------------------------------- /libs/ui/app/settings/appearance/client-page.tsx: -------------------------------------------------------------------------------- 1 | "use client" 2 | 3 | import ThemeToggle from "@/components/theme-toggle" 4 | 5 | export default function AppearanceClientPage() { 6 | return ( 7 |
8 |
9 |

Appearance

10 |

11 | Update the appearance of the Superagent dashboard 12 |

13 |
14 | 15 |
16 | ) 17 | } 18 | -------------------------------------------------------------------------------- /libs/ui/app/settings/appearance/page.tsx: -------------------------------------------------------------------------------- 1 | import AppearanceClientPage from "./client-page" 2 | 3 | export default async function Settings() { 4 | return 5 | } 6 | -------------------------------------------------------------------------------- /libs/ui/app/settings/billing/page.tsx: -------------------------------------------------------------------------------- 1 | import { cookies } from "next/headers" 2 | import { createServerComponentClient } from "@supabase/auth-helpers-nextjs" 3 | 4 | import PricingTable from "@/app/settings/billing/pricing-table" 5 | 6 | export default async function Billing() { 7 | const supabase = createServerComponentClient({ cookies }) 8 | const { 9 | data: { user }, 10 | } = await supabase.auth.getUser() 11 | const { data: profile } = await supabase 12 | .from("profiles") 13 | .select("*") 14 | .eq("user_id", user?.id) 15 | .single() 16 | 17 | return ( 18 |
19 |
20 |

Billing Plans

21 |

22 | Subscribe to a plan to get started. 23 |

24 |
25 |
26 |
27 | 31 |
32 |
33 | ) 34 | } 35 | -------------------------------------------------------------------------------- /libs/ui/app/settings/layout.tsx: -------------------------------------------------------------------------------- 1 | import { cookies } from "next/headers" 2 | import { createServerComponentClient } from "@supabase/auth-helpers-nextjs" 3 | 4 | import { siteConfig } from "@/config/site" 5 | import { SettingsSidebar } from "@/components/account-sidebar" 6 | 7 | interface SettingsLayoutProps { 8 | children: React.ReactNode 9 | } 10 | 11 | export default async function SettingsLayout({ 12 | children, 13 | }: SettingsLayoutProps) { 14 | const supabase = createServerComponentClient({ cookies }) 15 | const { 16 | data: { user }, 17 | } = await supabase.auth.getUser() 18 | const { data: profile } = await supabase 19 | .from("profiles") 20 | .select("*") 21 | .eq("user_id", user?.id) 22 | .single() 23 | 24 | return ( 25 |
26 |

Settings

27 |
28 | 29 |
{children}
30 |
31 |
32 | ) 33 | } 34 | -------------------------------------------------------------------------------- /libs/ui/app/settings/page.tsx: -------------------------------------------------------------------------------- 1 | import { cookies } from "next/headers" 2 | import { 3 | createRouteHandlerClient, 4 | createServerComponentClient, 5 | } from "@supabase/auth-helpers-nextjs" 6 | 7 | import SettingsClientPage from "./client-page" 8 | 9 | export const dynamic = "force-dynamic" 10 | 11 | export default async function Settings() { 12 | const supabase = createServerComponentClient({ cookies }) 13 | const { 14 | data: { user }, 15 | } = await supabase.auth.getUser() 16 | const { data: profile } = await supabase 17 | .from("profiles") 18 | .select("*") 19 | .eq("user_id", user?.id) 20 | .single() 21 | 22 | return user ? : null 23 | } 24 | -------------------------------------------------------------------------------- /libs/ui/app/workflows/[id]/editor.ts: -------------------------------------------------------------------------------- 1 | import * as monaco from "monaco-editor" 2 | import { configureMonacoYaml } from "monaco-yaml" 3 | 4 | window.MonacoEnvironment = { 5 | getWorker(_, label) { 6 | switch (label) { 7 | case "editorWorkerService": 8 | return new Worker( 9 | new URL("monaco-editor/esm/vs/editor/editor.worker", import.meta.url) 10 | ) 11 | case "yaml": 12 | return new Worker(new URL("monaco-yaml/yaml.worker", import.meta.url)) 13 | default: 14 | throw new Error(`Unknown label ${label}`) 15 | } 16 | }, 17 | } 18 | 19 | configureMonacoYaml(monaco, { 20 | enableSchemaRequest: true, 21 | schemas: [ 22 | { 23 | fileMatch: ["*"], 24 | uri: `${process.env.NEXT_PUBLIC_SUPERAGENT_API_URL}/workflows/config/schema`, 25 | }, 26 | ], 27 | }) 28 | 29 | const modelUri = monaco.Uri.parse("config.yaml") 30 | let model = monaco.editor.createModel("initialValue", "yaml", modelUri) 31 | 32 | export function initCodeEditor( 33 | wrapperElement: HTMLElement, 34 | theme: string = "light" 35 | ) { 36 | return monaco.editor.create(wrapperElement, { 37 | automaticLayout: true, 38 | model, 39 | scrollbar: { 40 | vertical: "hidden", 41 | }, 42 | fontSize: 14, 43 | theme: theme === "dark" ? "vs-dark" : "vs-light", 44 | quickSuggestions: { 45 | other: true, 46 | comments: false, 47 | strings: true, 48 | }, 49 | tabSize: 2, 50 | guides: { 51 | highlightActiveIndentation: true, 52 | }, 53 | minimap: { enabled: false }, 54 | }) 55 | } 56 | -------------------------------------------------------------------------------- /libs/ui/app/workflows/[id]/function-calls.tsx: -------------------------------------------------------------------------------- 1 | import React from "react" 2 | 3 | interface FunctionCallsProps { 4 | functionCalls?: any[] 5 | } 6 | 7 | function FunctionCalls({ functionCalls }: FunctionCallsProps) { 8 | return ( 9 |
10 |

Run Logs

11 | 12 | {functionCalls?.map((call, index) => ( 13 |
14 |
15 | {call?.type == "function_call" && ( 16 |
17 | 18 | TOOL: {call.function} 19 | 20 |
21 | )} 22 | {call?.type == "start" && ( 23 | 24 | INPUT 25 | 26 | )} 27 | {call?.type == "end" && ( 28 | 29 | OUTPUT 30 | 31 | )} 32 |
33 | ))} 34 |
35 | ) 36 | } 37 | 38 | export default FunctionCalls 39 | -------------------------------------------------------------------------------- /libs/ui/app/workflows/[id]/page.tsx: -------------------------------------------------------------------------------- 1 | import { cookies } from "next/headers" 2 | import { createServerComponentClient } from "@supabase/auth-helpers-nextjs" 3 | 4 | import { Api } from "@/lib/api" 5 | 6 | import WorkflowDetail from "./workflow" 7 | 8 | export default async function Assistant({ 9 | params, 10 | }: { 11 | params: { id: string } 12 | }) { 13 | const supabase = createServerComponentClient({ cookies }) 14 | const { id } = params 15 | const { 16 | data: { user }, 17 | } = await supabase.auth.getUser() 18 | const { data: profile } = await supabase 19 | .from("profiles") 20 | .select("*") 21 | .eq("user_id", user?.id) 22 | .single() 23 | const api = new Api(profile.api_key) 24 | const { data: workflow } = await api.getWorkflowById(id) 25 | const { data: llms } = await api.getLLMs() 26 | 27 | return workflow ? ( 28 | 29 | ) : ( 30 |
31 |

No assistant selected

32 |

33 | View details about an assistant by navigating the list to the left 34 |

35 |
36 | ) 37 | } 38 | -------------------------------------------------------------------------------- /libs/ui/app/workflows/checkout-session-status.tsx: -------------------------------------------------------------------------------- 1 | "use client" 2 | 3 | import { useEffect } from "react" 4 | import Stripe from "stripe" 5 | 6 | import { Toaster } from "@/components/ui/toaster" 7 | import { useToast } from "@/components/ui/use-toast" 8 | 9 | interface CheckoutSessionStatusProps { 10 | session: Stripe.Checkout.Session 11 | } 12 | 13 | function CheckoutSessionStatus({ session }: CheckoutSessionStatusProps) { 14 | const { toast } = useToast() 15 | 16 | useEffect(() => { 17 | if (session.status === "complete") { 18 | toast({ 19 | title: "Success", 20 | description: "Your payment was successful", 21 | }) 22 | } else { 23 | toast({ 24 | title: "Error", 25 | description: "Your payment failed", 26 | }) 27 | } 28 | }, [session]) 29 | 30 | return 31 | } 32 | 33 | export default CheckoutSessionStatus 34 | -------------------------------------------------------------------------------- /libs/ui/app/workflows/header.tsx: -------------------------------------------------------------------------------- 1 | "use client" 2 | 3 | import { useRouter } from "next/navigation" 4 | import { TbPlus } from "react-icons/tb" 5 | import { useAsyncFn } from "react-use" 6 | 7 | import { Profile } from "@/types/profile" 8 | import { initialSamlValue } from "@/config/saml" 9 | import { Api } from "@/lib/api" 10 | import { Button } from "@/components/ui/button" 11 | import { Spinner } from "@/components/ui/spinner" 12 | 13 | export default function Header({ profile }: { profile: Profile }) { 14 | const api = new Api(profile.api_key) 15 | const router = useRouter() 16 | const [{ loading }, createWorkflow] = useAsyncFn(async () => { 17 | const { data: workflow } = await api.createWorkflow({ 18 | name: "My Workflow", 19 | description: "My new workflow", 20 | }) 21 | await api.generateWorkflow(workflow.id, initialSamlValue) 22 | router.push(`/workflows/${workflow.id}`) 23 | }) 24 | 25 | return ( 26 |
27 | Workflows 28 | 32 |
33 | ) 34 | } 35 | -------------------------------------------------------------------------------- /libs/ui/app/workflows/layout.tsx: -------------------------------------------------------------------------------- 1 | interface AssistantsLayoutProps { 2 | children: React.ReactNode 3 | params: { slug: string } 4 | } 5 | 6 | export default async function AssistantsLayout({ 7 | params, 8 | children, 9 | }: AssistantsLayoutProps) { 10 | return ( 11 |
12 |
{children}
13 |
14 | ) 15 | } 16 | -------------------------------------------------------------------------------- /libs/ui/components.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://ui.shadcn.com/schema.json", 3 | "style": "default", 4 | "tailwind": { 5 | "config": "tailwind.config.js", 6 | "css": "app/globals.css", 7 | "baseColor": "slate", 8 | "cssVariables": true 9 | }, 10 | "rsc": false, 11 | "aliases": { 12 | "utils": "@/lib/utils", 13 | "components": "@/components" 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /libs/ui/components/account-sidebar.tsx: -------------------------------------------------------------------------------- 1 | "use client" 2 | 3 | import Link from "next/link" 4 | import { usePathname } from "next/navigation" 5 | 6 | import { cn } from "@/lib/utils" 7 | import { buttonVariants } from "@/components/ui/button" 8 | 9 | interface SettingsSidebarProps extends React.HTMLAttributes { 10 | profile: any 11 | items: { 12 | id: string 13 | href: string 14 | title: string 15 | disabled?: boolean 16 | }[] 17 | } 18 | 19 | export function SettingsSidebar({ 20 | profile, 21 | className, 22 | items, 23 | ...props 24 | }: SettingsSidebarProps) { 25 | const pathname = usePathname() 26 | 27 | return ( 28 | 50 | ) 51 | } 52 | -------------------------------------------------------------------------------- /libs/ui/components/analytics.tsx: -------------------------------------------------------------------------------- 1 | "use client" 2 | 3 | import { useEffect } from "react" 4 | import { usePathname, useSearchParams } from "next/navigation" 5 | 6 | import { analytics } from "@/lib/segment" 7 | 8 | export default function Analytics() { 9 | const pathname = usePathname() 10 | const searchParams = useSearchParams() 11 | 12 | useEffect(() => { 13 | analytics.page() 14 | }, [pathname, searchParams]) 15 | 16 | return null 17 | } 18 | -------------------------------------------------------------------------------- /libs/ui/components/hooks/index.ts: -------------------------------------------------------------------------------- 1 | export * from "./useEditableField" 2 | -------------------------------------------------------------------------------- /libs/ui/components/hooks/useEditableField.tsx: -------------------------------------------------------------------------------- 1 | import { useState } from "react" 2 | 3 | import { Button } from "@/components/ui/button" 4 | import { Input } from "@/components/ui/input" 5 | import { Spinner } from "@/components/ui/spinner" 6 | 7 | type ModeStatus = "view" | "edit" 8 | 9 | export const useEditableField = ( 10 | initalValue: string, 11 | // onUpdate function's argument types should infer from the passed function 12 | onUpdate: (value: string) => Promise 13 | ) => { 14 | const [value, setValue] = useState(initalValue) 15 | const [mode, setMode] = useState("view") 16 | const [isLoading, setLoading] = useState(false) 17 | 18 | return mode === "view" ? ( 19 |

setMode("edit")}> 20 | {value} 21 |

22 | ) : ( 23 |
24 | ) => 27 | setValue(event.target.value) 28 | } 29 | placeholder="My Worflow" 30 | className="leading-0 flex-1 border-none p-0 text-2xl ring-offset-0 focus-visible:ring-0 focus-visible:ring-offset-0" 31 | /> 32 | 45 |
46 | ) 47 | } 48 | -------------------------------------------------------------------------------- /libs/ui/components/logo.tsx: -------------------------------------------------------------------------------- 1 | import NextImage from "next/image" 2 | 3 | export default function Logo({ 4 | width = 38, 5 | height = 38, 6 | }: { 7 | width?: number 8 | height?: number 9 | }) { 10 | return ( 11 | 18 | ) 19 | } 20 | -------------------------------------------------------------------------------- /libs/ui/components/markdown.tsx: -------------------------------------------------------------------------------- 1 | import { FC, memo } from "react" 2 | import ReactMarkdown, { Options } from "react-markdown" 3 | 4 | export const MemoizedReactMarkdown: FC = memo( 5 | ReactMarkdown, 6 | (prevProps, nextProps) => 7 | prevProps.children === nextProps.children && 8 | prevProps.className === nextProps.className 9 | ) 10 | -------------------------------------------------------------------------------- /libs/ui/components/non-ideal-state.tsx: -------------------------------------------------------------------------------- 1 | import * as React from "react" 2 | 3 | import { Card, CardContent } from "./ui/card" 4 | 5 | interface NonIdealState { 6 | title: string 7 | description: string 8 | icon: React.ComponentType<{ size?: number }> 9 | } 10 | 11 | export default function NonIdealState({ 12 | title, 13 | icon: Icon, 14 | description, 15 | }: NonIdealState) { 16 | return ( 17 |
18 | 19 |
20 |

{title}

21 |

{description}

22 |
23 |
24 | ) 25 | } 26 | -------------------------------------------------------------------------------- /libs/ui/components/theme-provider.tsx: -------------------------------------------------------------------------------- 1 | "use client" 2 | 3 | import * as React from "react" 4 | import { ThemeProvider as NextThemesProvider } from "next-themes" 5 | import { type ThemeProviderProps } from "next-themes/dist/types" 6 | 7 | export function ThemeProvider({ children, ...props }: ThemeProviderProps) { 8 | return {children} 9 | } 10 | -------------------------------------------------------------------------------- /libs/ui/components/ui/avatar.tsx: -------------------------------------------------------------------------------- 1 | import * as React from "react" 2 | import * as AvatarPrimitive from "@radix-ui/react-avatar" 3 | 4 | import { cn } from "@/lib/utils" 5 | 6 | const Avatar = React.forwardRef< 7 | React.ElementRef, 8 | React.ComponentPropsWithoutRef 9 | >(({ className, ...props }, ref) => ( 10 | 18 | )) 19 | Avatar.displayName = AvatarPrimitive.Root.displayName 20 | 21 | const AvatarImage = React.forwardRef< 22 | React.ElementRef, 23 | React.ComponentPropsWithoutRef 24 | >(({ className, ...props }, ref) => ( 25 | 30 | )) 31 | AvatarImage.displayName = AvatarPrimitive.Image.displayName 32 | 33 | const AvatarFallback = React.forwardRef< 34 | React.ElementRef, 35 | React.ComponentPropsWithoutRef 36 | >(({ className, ...props }, ref) => ( 37 | 45 | )) 46 | AvatarFallback.displayName = AvatarPrimitive.Fallback.displayName 47 | 48 | export { Avatar, AvatarImage, AvatarFallback } 49 | -------------------------------------------------------------------------------- /libs/ui/components/ui/badge.tsx: -------------------------------------------------------------------------------- 1 | import * as React from "react" 2 | import { cva, type VariantProps } from "class-variance-authority" 3 | 4 | import { cn } from "@/lib/utils" 5 | 6 | const badgeVariants = cva( 7 | "inline-flex items-center rounded-full border px-2.5 py-0.5 text-xs font-semibold transition-colors focus:outline-none focus:ring-2 focus:ring-ring focus:ring-offset-2", 8 | { 9 | variants: { 10 | variant: { 11 | default: 12 | "border-transparent bg-primary text-primary-foreground hover:bg-primary/80", 13 | secondary: 14 | "border-transparent bg-secondary text-secondary-foreground hover:bg-secondary/80", 15 | destructive: 16 | "border-transparent bg-destructive text-destructive-foreground hover:bg-destructive/80", 17 | outline: "text-foreground", 18 | }, 19 | }, 20 | defaultVariants: { 21 | variant: "default", 22 | }, 23 | } 24 | ) 25 | 26 | export interface BadgeProps 27 | extends React.HTMLAttributes, 28 | VariantProps {} 29 | 30 | function Badge({ className, variant, ...props }: BadgeProps) { 31 | return ( 32 |
33 | ) 34 | } 35 | 36 | export { Badge, badgeVariants } 37 | -------------------------------------------------------------------------------- /libs/ui/components/ui/checkbox.tsx: -------------------------------------------------------------------------------- 1 | import * as React from "react" 2 | import * as CheckboxPrimitive from "@radix-ui/react-checkbox" 3 | import { Check } from "lucide-react" 4 | 5 | import { cn } from "@/lib/utils" 6 | 7 | const Checkbox = React.forwardRef< 8 | React.ElementRef, 9 | React.ComponentPropsWithoutRef 10 | >(({ className, ...props }, ref) => ( 11 | 19 | 22 | 23 | 24 | 25 | )) 26 | Checkbox.displayName = CheckboxPrimitive.Root.displayName 27 | 28 | export { Checkbox } 29 | -------------------------------------------------------------------------------- /libs/ui/components/ui/input.tsx: -------------------------------------------------------------------------------- 1 | import * as React from "react" 2 | 3 | import { cn } from "@/lib/utils" 4 | 5 | export interface InputProps 6 | extends React.InputHTMLAttributes {} 7 | 8 | const Input = React.forwardRef( 9 | ({ className, type, ...props }, ref) => { 10 | return ( 11 | 20 | ) 21 | } 22 | ) 23 | Input.displayName = "Input" 24 | 25 | export { Input } 26 | -------------------------------------------------------------------------------- /libs/ui/components/ui/label.tsx: -------------------------------------------------------------------------------- 1 | import * as React from "react" 2 | import * as LabelPrimitive from "@radix-ui/react-label" 3 | import { cva, type VariantProps } from "class-variance-authority" 4 | 5 | import { cn } from "@/lib/utils" 6 | 7 | const labelVariants = cva( 8 | "text-sm font-medium leading-none peer-disabled:cursor-not-allowed peer-disabled:opacity-70" 9 | ) 10 | 11 | const Label = React.forwardRef< 12 | React.ElementRef, 13 | React.ComponentPropsWithoutRef & 14 | VariantProps 15 | >(({ className, ...props }, ref) => ( 16 | 21 | )) 22 | Label.displayName = LabelPrimitive.Root.displayName 23 | 24 | export { Label } 25 | -------------------------------------------------------------------------------- /libs/ui/components/ui/popover.tsx: -------------------------------------------------------------------------------- 1 | import * as React from "react" 2 | import * as PopoverPrimitive from "@radix-ui/react-popover" 3 | 4 | import { cn } from "@/lib/utils" 5 | 6 | const Popover = PopoverPrimitive.Root 7 | 8 | const PopoverTrigger = PopoverPrimitive.Trigger 9 | 10 | const PopoverContent = React.forwardRef< 11 | React.ElementRef, 12 | React.ComponentPropsWithoutRef 13 | >(({ className, align = "center", sideOffset = 4, ...props }, ref) => ( 14 | 15 | 25 | 26 | )) 27 | PopoverContent.displayName = PopoverPrimitive.Content.displayName 28 | 29 | export { Popover, PopoverTrigger, PopoverContent } 30 | -------------------------------------------------------------------------------- /libs/ui/components/ui/radio-group.tsx: -------------------------------------------------------------------------------- 1 | import * as React from "react" 2 | import * as RadioGroupPrimitive from "@radix-ui/react-radio-group" 3 | import { Circle } from "lucide-react" 4 | 5 | import { cn } from "@/lib/utils" 6 | 7 | const RadioGroup = React.forwardRef< 8 | React.ElementRef, 9 | React.ComponentPropsWithoutRef 10 | >(({ className, ...props }, ref) => { 11 | return ( 12 | 17 | ) 18 | }) 19 | RadioGroup.displayName = RadioGroupPrimitive.Root.displayName 20 | 21 | const RadioGroupItem = React.forwardRef< 22 | React.ElementRef, 23 | React.ComponentPropsWithoutRef 24 | >(({ className, children, ...props }, ref) => { 25 | return ( 26 | 34 | 35 | 36 | 37 | 38 | ) 39 | }) 40 | RadioGroupItem.displayName = RadioGroupPrimitive.Item.displayName 41 | 42 | export { RadioGroup, RadioGroupItem } 43 | -------------------------------------------------------------------------------- /libs/ui/components/ui/separator.tsx: -------------------------------------------------------------------------------- 1 | import * as React from "react" 2 | import * as SeparatorPrimitive from "@radix-ui/react-separator" 3 | 4 | import { cn } from "@/lib/utils" 5 | 6 | const Separator = React.forwardRef< 7 | React.ElementRef, 8 | React.ComponentPropsWithoutRef 9 | >( 10 | ( 11 | { className, orientation = "horizontal", decorative = true, ...props }, 12 | ref 13 | ) => ( 14 | 25 | ) 26 | ) 27 | Separator.displayName = SeparatorPrimitive.Root.displayName 28 | 29 | export { Separator } 30 | -------------------------------------------------------------------------------- /libs/ui/components/ui/skeleton.tsx: -------------------------------------------------------------------------------- 1 | import { cn } from "@/lib/utils" 2 | 3 | function Skeleton({ 4 | className, 5 | ...props 6 | }: React.HTMLAttributes) { 7 | return ( 8 |
12 | ) 13 | } 14 | 15 | export { Skeleton } 16 | -------------------------------------------------------------------------------- /libs/ui/components/ui/spinner.tsx: -------------------------------------------------------------------------------- 1 | import { Loader2 } from "lucide-react" 2 | 3 | export const Icons = { 4 | spinner: Loader2, 5 | } 6 | 7 | export function Spinner() { 8 | return 9 | } 10 | -------------------------------------------------------------------------------- /libs/ui/components/ui/switch.tsx: -------------------------------------------------------------------------------- 1 | import * as React from "react" 2 | import * as SwitchPrimitives from "@radix-ui/react-switch" 3 | 4 | import { cn } from "@/lib/utils" 5 | 6 | const Switch = React.forwardRef< 7 | React.ElementRef, 8 | React.ComponentPropsWithoutRef 9 | >(({ className, ...props }, ref) => ( 10 | 18 | 23 | 24 | )) 25 | Switch.displayName = SwitchPrimitives.Root.displayName 26 | 27 | export { Switch } 28 | -------------------------------------------------------------------------------- /libs/ui/components/ui/textarea.tsx: -------------------------------------------------------------------------------- 1 | import * as React from "react" 2 | 3 | import { cn } from "@/lib/utils" 4 | 5 | export interface TextareaProps 6 | extends React.TextareaHTMLAttributes {} 7 | 8 | const Textarea = React.forwardRef( 9 | ({ className, ...props }, ref) => { 10 | return ( 11 |