├── .python-version ├── Brewfile ├── src ├── letta_client │ ├── py.typed │ ├── _version.py │ ├── types │ │ ├── blocks │ │ │ └── __init__.py │ │ ├── batches │ │ │ ├── __init__.py │ │ │ └── message_list_params.py │ │ ├── archives │ │ │ ├── __init__.py │ │ │ └── passage_create_params.py │ │ ├── models │ │ │ ├── __init__.py │ │ │ └── embedding_list_response.py │ │ ├── agents │ │ │ ├── job_type.py │ │ │ ├── file_open_response.py │ │ │ ├── file_close_all_response.py │ │ │ ├── message_cancel_response.py │ │ │ ├── message_role.py │ │ │ ├── tool_call.py │ │ │ ├── job_status.py │ │ │ ├── passage_list_response.py │ │ │ ├── passage_create_response.py │ │ │ ├── tool_call_delta.py │ │ │ ├── message_reset_params.py │ │ │ ├── update_reasoning_message_param.py │ │ │ ├── message_cancel_params.py │ │ │ ├── tool_run_params.py │ │ │ ├── letta_user_message_content_union_param.py │ │ │ ├── message_type.py │ │ │ ├── letta_user_message_content_union.py │ │ │ ├── update_system_message_param.py │ │ │ ├── text_content.py │ │ │ ├── redacted_reasoning_content.py │ │ │ ├── tool_return.py │ │ │ ├── redacted_reasoning_content_param.py │ │ │ ├── text_content_param.py │ │ │ ├── letta_assistant_message_content_union.py │ │ │ ├── approval_return.py │ │ │ ├── letta_assistant_message_content_union_param.py │ │ │ ├── tool_return_param.py │ │ │ ├── omitted_reasoning_content.py │ │ │ ├── omitted_reasoning_content_param.py │ │ │ ├── tool_return_content.py │ │ │ ├── update_user_message_param.py │ │ │ ├── approval_return_param.py │ │ │ ├── update_assistant_message_param.py │ │ │ ├── tool_return_content_param.py │ │ │ ├── passage_search_response.py │ │ │ ├── reasoning_content.py │ │ │ ├── passage_create_params.py │ │ │ ├── tool_update_approval_params.py │ │ │ ├── passage_list_params.py │ │ │ ├── reasoning_content_param.py │ │ │ ├── tool_call_content.py │ │ │ ├── summary_message.py │ │ │ ├── tool_call_content_param.py │ │ │ ├── tool_list_params.py │ │ │ ├── block_list_params.py │ │ │ ├── event_message.py │ │ │ ├── folder_list_params.py │ │ │ ├── group_list_params.py │ │ │ ├── tool_execution_result.py │ │ │ ├── approval_create_param.py │ │ │ ├── file_list_params.py │ │ │ ├── system_message.py │ │ │ ├── message.py │ │ │ ├── file_list_response.py │ │ │ ├── passage_search_params.py │ │ │ ├── hidden_reasoning_message.py │ │ │ └── message_list_params.py │ │ ├── agent_export_file_response.py │ │ ├── identity_type.py │ │ ├── provider_category.py │ │ ├── tag_list_response.py │ │ ├── folders │ │ │ ├── agent_list_response.py │ │ │ ├── __init__.py │ │ │ ├── file_upload_params.py │ │ │ ├── agent_list_params.py │ │ │ └── file_list_params.py │ │ ├── vector_db_provider.py │ │ ├── template_delete_response.py │ │ ├── mcp_servers │ │ │ ├── __init__.py │ │ │ ├── tool_list_response.py │ │ │ └── tool_run_params.py │ │ ├── templates │ │ │ ├── __init__.py │ │ │ └── agent_create_response.py │ │ ├── manager_type.py │ │ ├── model_list_response.py │ │ ├── health_response.py │ │ ├── message_list_response.py │ │ ├── access_token_delete_params.py │ │ ├── tool_search_response.py │ │ ├── template_update_response.py │ │ ├── identities │ │ │ ├── __init__.py │ │ │ ├── property_upsert_params.py │ │ │ ├── block_list_params.py │ │ │ └── agent_list_params.py │ │ ├── mcp_server_refresh_params.py │ │ ├── archive_update_params.py │ │ ├── agent_import_file_response.py │ │ ├── supervisor_manager_param.py │ │ ├── npm_requirement.py │ │ ├── pip_requirement.py │ │ ├── round_robin_manager_param.py │ │ ├── runs │ │ │ ├── __init__.py │ │ │ ├── step_list_params.py │ │ │ ├── message_stream_params.py │ │ │ ├── usage_retrieve_response.py │ │ │ └── message_list_params.py │ │ ├── steps │ │ │ ├── __init__.py │ │ │ ├── feedback_create_params.py │ │ │ ├── message_list_params.py │ │ │ └── message_list_response.py │ │ ├── text_response_format_param.py │ │ ├── text_response_format.py │ │ ├── agent_type.py │ │ ├── json_object_response_format_param.py │ │ ├── json_object_response_format.py │ │ ├── mcp_server_create_response.py │ │ ├── mcp_server_update_response.py │ │ ├── sleeptime_manager_param.py │ │ ├── mcp_server_retrieve_response.py │ │ ├── dynamic_manager_param.py │ │ ├── npm_requirement_param.py │ │ ├── pip_requirement_param.py │ │ ├── tool_type.py │ │ ├── groups │ │ │ ├── __init__.py │ │ │ ├── message_list_params.py │ │ │ └── message_update_response.py │ │ ├── stop_reason_type.py │ │ ├── provider_type.py │ │ ├── model_list_params.py │ │ ├── json_schema_response_format.py │ │ ├── agent_export_file_params.py │ │ ├── mcp_server_list_response.py │ │ ├── identity_property.py │ │ ├── json_schema_response_format_param.py │ │ ├── identity_property_param.py │ │ ├── archive_create_params.py │ │ ├── continue_tool_rule.py │ │ ├── terminal_tool_rule.py │ │ ├── access_token_list_params.py │ │ ├── continue_tool_rule_param.py │ │ ├── required_before_exit_tool_rule.py │ │ ├── template_create_response.py │ │ ├── terminal_tool_rule_param.py │ │ ├── tool_search_result.py │ │ ├── parent_tool_rule.py │ │ ├── passage_search_response.py │ │ ├── required_before_exit_tool_rule_param.py │ │ ├── create_stdio_mcp_server_param.py │ │ ├── requires_approval_tool_rule.py │ │ ├── access_token_create_response.py │ │ ├── template_update_params.py │ │ ├── stdio_mcp_server.py │ │ ├── tool_search_params.py │ │ ├── create_sse_mcp_server_param.py │ │ ├── requires_approval_tool_rule_param.py │ │ ├── update_stdio_mcp_server_param.py │ │ ├── parent_tool_rule_param.py │ │ ├── folder_update_params.py │ │ ├── update_sse_mcp_server_param.py │ │ ├── create_streamable_http_mcp_server_param.py │ │ ├── mcp_server_create_params.py │ │ ├── max_count_per_step_tool_rule.py │ │ ├── mcp_server_update_params.py │ │ ├── message_list_params.py │ │ ├── update_streamable_http_mcp_server_param.py │ │ ├── sse_mcp_server.py │ │ ├── max_count_per_step_tool_rule_param.py │ │ ├── voice_sleeptime_manager_param.py │ │ ├── message_search_params.py │ │ ├── batch_list_params.py │ │ ├── access_token_create_params.py │ │ ├── access_token_list_response.py │ │ ├── streamable_http_mcp_server.py │ │ ├── conditional_tool_rule.py │ │ ├── letta_message_content_union_param.py │ │ ├── folder_list_params.py │ │ ├── identity.py │ │ ├── conditional_tool_rule_param.py │ │ ├── folder_create_params.py │ │ ├── init_tool_rule.py │ │ ├── identity_update_params.py │ │ ├── tag_list_params.py │ │ ├── init_tool_rule_param.py │ │ ├── archive_list_params.py │ │ ├── group_list_params.py │ │ ├── agent_retrieve_params.py │ │ ├── identity_create_params.py │ │ ├── identity_upsert_params.py │ │ ├── xai_model_settings_param.py │ │ ├── bedrock_model_settings_param.py │ │ ├── groq_model_settings_param.py │ │ ├── azure_model_settings_param.py │ │ ├── deepseek_model_settings_param.py │ │ ├── together_model_settings_param.py │ │ ├── group_create_params.py │ │ ├── agent_environment_variable.py │ │ ├── message_create_param.py │ │ ├── xai_model_settings.py │ │ ├── bedrock_model_settings.py │ │ ├── groq_model_settings.py │ │ ├── azure_model_settings.py │ │ ├── folder.py │ │ ├── deepseek_model_settings.py │ │ ├── template_create_params.py │ │ ├── together_model_settings.py │ │ ├── identity_list_params.py │ │ ├── passage_search_params.py │ │ └── archive.py │ ├── lib │ │ └── .keep │ ├── _utils │ │ ├── _streams.py │ │ ├── _resources_proxy.py │ │ ├── _logs.py │ │ ├── _compat.py │ │ └── _reflection.py │ ├── _constants.py │ ├── resources │ │ ├── blocks │ │ │ └── __init__.py │ │ ├── groups │ │ │ └── __init__.py │ │ ├── batches │ │ │ └── __init__.py │ │ ├── mcp_servers │ │ │ └── __init__.py │ │ ├── templates │ │ │ └── __init__.py │ │ ├── archives │ │ │ └── __init__.py │ │ └── models │ │ │ └── __init__.py │ └── _resource.py └── letta_sdk │ └── lib │ └── .keep ├── tests ├── sample_file.txt ├── __init__.py ├── api_resources │ ├── __init__.py │ ├── agents │ │ └── __init__.py │ ├── blocks │ │ └── __init__.py │ ├── groups │ │ └── __init__.py │ ├── models │ │ └── __init__.py │ ├── runs │ │ └── __init__.py │ ├── steps │ │ └── __init__.py │ ├── archives │ │ └── __init__.py │ ├── batches │ │ └── __init__.py │ ├── folders │ │ └── __init__.py │ ├── identities │ │ └── __init__.py │ ├── mcp_servers │ │ └── __init__.py │ └── templates │ │ └── __init__.py └── test_utils │ └── test_proxy.py ├── .release-please-manifest.json ├── .vscode └── settings.json ├── bin ├── publish-pypi └── check-release-environment ├── scripts ├── format ├── lint ├── bootstrap ├── utils │ └── upload-artifact.sh └── mock ├── .gitignore ├── examples └── .keep ├── .stats.yml ├── noxfile.py ├── .devcontainer ├── Dockerfile └── devcontainer.json ├── .github └── workflows │ ├── release-doctor.yml │ └── publish-pypi.yml └── SECURITY.md /.python-version: -------------------------------------------------------------------------------- 1 | 3.9.18 2 | -------------------------------------------------------------------------------- /Brewfile: -------------------------------------------------------------------------------- 1 | brew "rye" 2 | 3 | -------------------------------------------------------------------------------- /src/letta_client/py.typed: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/sample_file.txt: -------------------------------------------------------------------------------- 1 | Hello, world! 2 | -------------------------------------------------------------------------------- /.release-please-manifest.json: -------------------------------------------------------------------------------- 1 | { 2 | ".": "1.4.0" 3 | } -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "python.analysis.importFormat": "relative", 3 | } 4 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | -------------------------------------------------------------------------------- /tests/api_resources/__init__.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | -------------------------------------------------------------------------------- /tests/api_resources/agents/__init__.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | -------------------------------------------------------------------------------- /tests/api_resources/blocks/__init__.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | -------------------------------------------------------------------------------- /tests/api_resources/groups/__init__.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | -------------------------------------------------------------------------------- /tests/api_resources/models/__init__.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | -------------------------------------------------------------------------------- /tests/api_resources/runs/__init__.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | -------------------------------------------------------------------------------- /tests/api_resources/steps/__init__.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | -------------------------------------------------------------------------------- /tests/api_resources/archives/__init__.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | -------------------------------------------------------------------------------- /tests/api_resources/batches/__init__.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | -------------------------------------------------------------------------------- /tests/api_resources/folders/__init__.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | -------------------------------------------------------------------------------- /tests/api_resources/identities/__init__.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | -------------------------------------------------------------------------------- /tests/api_resources/mcp_servers/__init__.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | -------------------------------------------------------------------------------- /tests/api_resources/templates/__init__.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | -------------------------------------------------------------------------------- /bin/publish-pypi: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -eux 4 | mkdir -p dist 5 | rye build --clean 6 | rye publish --yes --token=$PYPI_TOKEN 7 | -------------------------------------------------------------------------------- /scripts/format: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -e 4 | 5 | cd "$(dirname "$0")/.." 6 | 7 | echo "==> Running formatters" 8 | rye run format 9 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .prism.log 2 | _dev 3 | 4 | __pycache__ 5 | .mypy_cache 6 | 7 | dist 8 | 9 | .venv 10 | .idea 11 | 12 | .env 13 | .envrc 14 | codegen.log 15 | Brewfile.lock.json 16 | -------------------------------------------------------------------------------- /src/letta_client/_version.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | __title__ = "letta_client" 4 | __version__ = "1.4.0" # x-release-please-version 5 | -------------------------------------------------------------------------------- /scripts/lint: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -e 4 | 5 | cd "$(dirname "$0")/.." 6 | 7 | echo "==> Running lints" 8 | rye run lint 9 | 10 | echo "==> Making sure it imports" 11 | rye run python -c 'import letta_client' 12 | -------------------------------------------------------------------------------- /src/letta_client/types/blocks/__init__.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from .agent_list_params import AgentListParams as AgentListParams 6 | -------------------------------------------------------------------------------- /src/letta_client/types/batches/__init__.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from .message_list_params import MessageListParams as MessageListParams 6 | -------------------------------------------------------------------------------- /src/letta_client/lib/.keep: -------------------------------------------------------------------------------- 1 | File generated from our OpenAPI spec by Stainless. 2 | 3 | This directory can be used to store custom files to expand the SDK. 4 | It is ignored by Stainless code generation and its content (other than this keep file) won't be touched. -------------------------------------------------------------------------------- /src/letta_sdk/lib/.keep: -------------------------------------------------------------------------------- 1 | File generated from our OpenAPI spec by Stainless. 2 | 3 | This directory can be used to store custom files to expand the SDK. 4 | It is ignored by Stainless code generation and its content (other than this keep file) won't be touched. -------------------------------------------------------------------------------- /examples/.keep: -------------------------------------------------------------------------------- 1 | File generated from our OpenAPI spec by Stainless. 2 | 3 | This directory can be used to store example files demonstrating usage of this SDK. 4 | It is ignored by Stainless code generation and its content (other than this keep file) won't be touched. -------------------------------------------------------------------------------- /src/letta_client/types/archives/__init__.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from .passage_create_params import PassageCreateParams as PassageCreateParams 6 | -------------------------------------------------------------------------------- /src/letta_client/types/models/__init__.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from .embedding_list_response import EmbeddingListResponse as EmbeddingListResponse 6 | -------------------------------------------------------------------------------- /src/letta_client/types/agents/job_type.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing_extensions import Literal, TypeAlias 4 | 5 | __all__ = ["JobType"] 6 | 7 | JobType: TypeAlias = Literal["job", "run", "batch"] 8 | -------------------------------------------------------------------------------- /src/letta_client/types/agent_export_file_response.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing_extensions import TypeAlias 4 | 5 | __all__ = ["AgentExportFileResponse"] 6 | 7 | AgentExportFileResponse: TypeAlias = str 8 | -------------------------------------------------------------------------------- /src/letta_client/types/identity_type.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing_extensions import Literal, TypeAlias 4 | 5 | __all__ = ["IdentityType"] 6 | 7 | IdentityType: TypeAlias = Literal["org", "user", "other"] 8 | -------------------------------------------------------------------------------- /src/letta_client/types/provider_category.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing_extensions import Literal, TypeAlias 4 | 5 | __all__ = ["ProviderCategory"] 6 | 7 | ProviderCategory: TypeAlias = Literal["base", "byok"] 8 | -------------------------------------------------------------------------------- /src/letta_client/types/tag_list_response.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import List 4 | from typing_extensions import TypeAlias 5 | 6 | __all__ = ["TagListResponse"] 7 | 8 | TagListResponse: TypeAlias = List[str] 9 | -------------------------------------------------------------------------------- /src/letta_client/types/agents/file_open_response.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import List 4 | from typing_extensions import TypeAlias 5 | 6 | __all__ = ["FileOpenResponse"] 7 | 8 | FileOpenResponse: TypeAlias = List[str] 9 | -------------------------------------------------------------------------------- /src/letta_client/types/folders/agent_list_response.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import List 4 | from typing_extensions import TypeAlias 5 | 6 | __all__ = ["AgentListResponse"] 7 | 8 | AgentListResponse: TypeAlias = List[str] 9 | -------------------------------------------------------------------------------- /src/letta_client/types/vector_db_provider.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing_extensions import Literal, TypeAlias 4 | 5 | __all__ = ["VectorDBProvider"] 6 | 7 | VectorDBProvider: TypeAlias = Literal["native", "tpuf", "pinecone"] 8 | -------------------------------------------------------------------------------- /.stats.yml: -------------------------------------------------------------------------------- 1 | configured_endpoints: 128 2 | openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/letta-ai%2Fletta-sdk-abec2d1698d8b5b9d91927d6a04f1a7a0145b1ddcdc7c1888dd452e616aed492.yml 3 | openapi_spec_hash: 17d123555a831e208608425bc4fceb96 4 | config_hash: af181af60655305bf430e7a5ad358f8d 5 | -------------------------------------------------------------------------------- /src/letta_client/types/template_delete_response.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from .._models import BaseModel 4 | 5 | __all__ = ["TemplateDeleteResponse"] 6 | 7 | 8 | class TemplateDeleteResponse(BaseModel): 9 | success: bool 10 | -------------------------------------------------------------------------------- /src/letta_client/types/agents/file_close_all_response.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import List 4 | from typing_extensions import TypeAlias 5 | 6 | __all__ = ["FileCloseAllResponse"] 7 | 8 | FileCloseAllResponse: TypeAlias = List[str] 9 | -------------------------------------------------------------------------------- /src/letta_client/types/mcp_servers/__init__.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from .tool_run_params import ToolRunParams as ToolRunParams 6 | from .tool_list_response import ToolListResponse as ToolListResponse 7 | -------------------------------------------------------------------------------- /src/letta_client/types/agents/message_cancel_response.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import Dict 4 | from typing_extensions import TypeAlias 5 | 6 | __all__ = ["MessageCancelResponse"] 7 | 8 | MessageCancelResponse: TypeAlias = Dict[str, object] 9 | -------------------------------------------------------------------------------- /src/letta_client/types/agents/message_role.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing_extensions import Literal, TypeAlias 4 | 5 | __all__ = ["MessageRole"] 6 | 7 | MessageRole: TypeAlias = Literal["assistant", "user", "tool", "function", "system", "approval"] 8 | -------------------------------------------------------------------------------- /src/letta_client/types/agents/tool_call.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from ..._models import BaseModel 4 | 5 | __all__ = ["ToolCall"] 6 | 7 | 8 | class ToolCall(BaseModel): 9 | arguments: str 10 | 11 | name: str 12 | 13 | tool_call_id: str 14 | -------------------------------------------------------------------------------- /src/letta_client/types/templates/__init__.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from .agent_create_params import AgentCreateParams as AgentCreateParams 6 | from .agent_create_response import AgentCreateResponse as AgentCreateResponse 7 | -------------------------------------------------------------------------------- /src/letta_client/types/agents/job_status.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing_extensions import Literal, TypeAlias 4 | 5 | __all__ = ["JobStatus"] 6 | 7 | JobStatus: TypeAlias = Literal["created", "running", "completed", "failed", "pending", "cancelled", "expired"] 8 | -------------------------------------------------------------------------------- /src/letta_client/types/manager_type.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing_extensions import Literal, TypeAlias 4 | 5 | __all__ = ["ManagerType"] 6 | 7 | ManagerType: TypeAlias = Literal["round_robin", "supervisor", "dynamic", "sleeptime", "voice_sleeptime", "swarm"] 8 | -------------------------------------------------------------------------------- /noxfile.py: -------------------------------------------------------------------------------- 1 | import nox 2 | 3 | 4 | @nox.session(reuse_venv=True, name="test-pydantic-v1") 5 | def test_pydantic_v1(session: nox.Session) -> None: 6 | session.install("-r", "requirements-dev.lock") 7 | session.install("pydantic<2") 8 | 9 | session.run("pytest", "--showlocals", "--ignore=tests/functional", *session.posargs) 10 | -------------------------------------------------------------------------------- /src/letta_client/types/model_list_response.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import List 4 | from typing_extensions import TypeAlias 5 | 6 | from .model import Model 7 | 8 | __all__ = ["ModelListResponse"] 9 | 10 | ModelListResponse: TypeAlias = List[Model] 11 | -------------------------------------------------------------------------------- /src/letta_client/types/mcp_servers/tool_list_response.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import List 4 | from typing_extensions import TypeAlias 5 | 6 | from ..tool import Tool 7 | 8 | __all__ = ["ToolListResponse"] 9 | 10 | ToolListResponse: TypeAlias = List[Tool] 11 | -------------------------------------------------------------------------------- /src/letta_client/types/health_response.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from .._models import BaseModel 4 | 5 | __all__ = ["HealthResponse"] 6 | 7 | 8 | class HealthResponse(BaseModel): 9 | """Health check response body""" 10 | 11 | status: str 12 | 13 | version: str 14 | -------------------------------------------------------------------------------- /src/letta_client/types/agents/passage_list_response.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import List 4 | from typing_extensions import TypeAlias 5 | 6 | from ..passage import Passage 7 | 8 | __all__ = ["PassageListResponse"] 9 | 10 | PassageListResponse: TypeAlias = List[Passage] 11 | -------------------------------------------------------------------------------- /src/letta_client/types/message_list_response.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import List 4 | from typing_extensions import TypeAlias 5 | 6 | from .agents.message import Message 7 | 8 | __all__ = ["MessageListResponse"] 9 | 10 | MessageListResponse: TypeAlias = List[Message] 11 | -------------------------------------------------------------------------------- /src/letta_client/types/agents/passage_create_response.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import List 4 | from typing_extensions import TypeAlias 5 | 6 | from ..passage import Passage 7 | 8 | __all__ = ["PassageCreateResponse"] 9 | 10 | PassageCreateResponse: TypeAlias = List[Passage] 11 | -------------------------------------------------------------------------------- /src/letta_client/_utils/_streams.py: -------------------------------------------------------------------------------- 1 | from typing import Any 2 | from typing_extensions import Iterator, AsyncIterator 3 | 4 | 5 | def consume_sync_iterator(iterator: Iterator[Any]) -> None: 6 | for _ in iterator: 7 | ... 8 | 9 | 10 | async def consume_async_iterator(iterator: AsyncIterator[Any]) -> None: 11 | async for _ in iterator: 12 | ... 13 | -------------------------------------------------------------------------------- /src/letta_client/types/access_token_delete_params.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing_extensions import TypedDict 6 | 7 | __all__ = ["AccessTokenDeleteParams"] 8 | 9 | 10 | class AccessTokenDeleteParams(TypedDict, total=False): 11 | body: object 12 | -------------------------------------------------------------------------------- /src/letta_client/types/tool_search_response.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import List 4 | from typing_extensions import TypeAlias 5 | 6 | from .tool_search_result import ToolSearchResult 7 | 8 | __all__ = ["ToolSearchResponse"] 9 | 10 | ToolSearchResponse: TypeAlias = List[ToolSearchResult] 11 | -------------------------------------------------------------------------------- /src/letta_client/types/models/embedding_list_response.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import List 4 | from typing_extensions import TypeAlias 5 | 6 | from ..embedding_model import EmbeddingModel 7 | 8 | __all__ = ["EmbeddingListResponse"] 9 | 10 | EmbeddingListResponse: TypeAlias = List[EmbeddingModel] 11 | -------------------------------------------------------------------------------- /src/letta_client/types/template_update_response.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import Optional 4 | 5 | from .._models import BaseModel 6 | 7 | __all__ = ["TemplateUpdateResponse"] 8 | 9 | 10 | class TemplateUpdateResponse(BaseModel): 11 | success: bool 12 | 13 | message: Optional[str] = None 14 | -------------------------------------------------------------------------------- /.devcontainer/Dockerfile: -------------------------------------------------------------------------------- 1 | ARG VARIANT="3.9" 2 | FROM mcr.microsoft.com/vscode/devcontainers/python:0-${VARIANT} 3 | 4 | USER vscode 5 | 6 | RUN curl -sSf https://rye.astral.sh/get | RYE_VERSION="0.44.0" RYE_INSTALL_OPTION="--yes" bash 7 | ENV PATH=/home/vscode/.rye/shims:$PATH 8 | 9 | RUN echo "[[ -d .venv ]] && source .venv/bin/activate || export PATH=\$PATH" >> /home/vscode/.bashrc 10 | -------------------------------------------------------------------------------- /src/letta_client/types/identities/__init__.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from .agent_list_params import AgentListParams as AgentListParams 6 | from .block_list_params import BlockListParams as BlockListParams 7 | from .property_upsert_params import PropertyUpsertParams as PropertyUpsertParams 8 | -------------------------------------------------------------------------------- /src/letta_client/types/mcp_server_refresh_params.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Optional 6 | from typing_extensions import TypedDict 7 | 8 | __all__ = ["McpServerRefreshParams"] 9 | 10 | 11 | class McpServerRefreshParams(TypedDict, total=False): 12 | agent_id: Optional[str] 13 | -------------------------------------------------------------------------------- /src/letta_client/types/agents/tool_call_delta.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import Optional 4 | 5 | from ..._models import BaseModel 6 | 7 | __all__ = ["ToolCallDelta"] 8 | 9 | 10 | class ToolCallDelta(BaseModel): 11 | arguments: Optional[str] = None 12 | 13 | name: Optional[str] = None 14 | 15 | tool_call_id: Optional[str] = None 16 | -------------------------------------------------------------------------------- /src/letta_client/types/archive_update_params.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Optional 6 | from typing_extensions import TypedDict 7 | 8 | __all__ = ["ArchiveUpdateParams"] 9 | 10 | 11 | class ArchiveUpdateParams(TypedDict, total=False): 12 | description: Optional[str] 13 | 14 | name: Optional[str] 15 | -------------------------------------------------------------------------------- /src/letta_client/types/agent_import_file_response.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import List 4 | 5 | from .._models import BaseModel 6 | 7 | __all__ = ["AgentImportFileResponse"] 8 | 9 | 10 | class AgentImportFileResponse(BaseModel): 11 | """Response model for imported agents""" 12 | 13 | agent_ids: List[str] 14 | """List of IDs of the imported agents""" 15 | -------------------------------------------------------------------------------- /src/letta_client/types/agents/message_reset_params.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing_extensions import TypedDict 6 | 7 | __all__ = ["MessageResetParams"] 8 | 9 | 10 | class MessageResetParams(TypedDict, total=False): 11 | add_default_initial_messages: bool 12 | """If true, adds the default initial messages after resetting.""" 13 | -------------------------------------------------------------------------------- /src/letta_client/types/supervisor_manager_param.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing_extensions import Literal, Required, TypedDict 6 | 7 | __all__ = ["SupervisorManagerParam"] 8 | 9 | 10 | class SupervisorManagerParam(TypedDict, total=False): 11 | manager_agent_id: Required[str] 12 | 13 | manager_type: Literal["supervisor"] 14 | -------------------------------------------------------------------------------- /src/letta_client/types/npm_requirement.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import Optional 4 | 5 | from .._models import BaseModel 6 | 7 | __all__ = ["NpmRequirement"] 8 | 9 | 10 | class NpmRequirement(BaseModel): 11 | name: str 12 | """Name of the npm package.""" 13 | 14 | version: Optional[str] = None 15 | """Optional version of the package, following semantic versioning.""" 16 | -------------------------------------------------------------------------------- /src/letta_client/types/pip_requirement.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import Optional 4 | 5 | from .._models import BaseModel 6 | 7 | __all__ = ["PipRequirement"] 8 | 9 | 10 | class PipRequirement(BaseModel): 11 | name: str 12 | """Name of the pip package.""" 13 | 14 | version: Optional[str] = None 15 | """Optional version of the package, following semantic versioning.""" 16 | -------------------------------------------------------------------------------- /src/letta_client/types/round_robin_manager_param.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Optional 6 | from typing_extensions import Literal, TypedDict 7 | 8 | __all__ = ["RoundRobinManagerParam"] 9 | 10 | 11 | class RoundRobinManagerParam(TypedDict, total=False): 12 | manager_type: Literal["round_robin"] 13 | 14 | max_turns: Optional[int] 15 | -------------------------------------------------------------------------------- /src/letta_client/types/agents/update_reasoning_message_param.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing_extensions import Literal, Required, TypedDict 6 | 7 | __all__ = ["UpdateReasoningMessageParam"] 8 | 9 | 10 | class UpdateReasoningMessageParam(TypedDict, total=False): 11 | reasoning: Required[str] 12 | 13 | message_type: Literal["reasoning_message"] 14 | -------------------------------------------------------------------------------- /src/letta_client/types/runs/__init__.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from .step_list_params import StepListParams as StepListParams 6 | from .message_list_params import MessageListParams as MessageListParams 7 | from .message_stream_params import MessageStreamParams as MessageStreamParams 8 | from .usage_retrieve_response import UsageRetrieveResponse as UsageRetrieveResponse 9 | -------------------------------------------------------------------------------- /src/letta_client/types/mcp_servers/tool_run_params.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Dict 6 | from typing_extensions import Required, TypedDict 7 | 8 | __all__ = ["ToolRunParams"] 9 | 10 | 11 | class ToolRunParams(TypedDict, total=False): 12 | mcp_server_id: Required[str] 13 | 14 | args: Dict[str, object] 15 | """Arguments to pass to the tool""" 16 | -------------------------------------------------------------------------------- /src/letta_client/types/steps/__init__.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from .message_list_params import MessageListParams as MessageListParams 6 | from .message_list_response import MessageListResponse as MessageListResponse 7 | from .feedback_create_params import FeedbackCreateParams as FeedbackCreateParams 8 | from .metric_retrieve_response import MetricRetrieveResponse as MetricRetrieveResponse 9 | -------------------------------------------------------------------------------- /src/letta_client/types/text_response_format_param.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing_extensions import Literal, TypedDict 6 | 7 | __all__ = ["TextResponseFormatParam"] 8 | 9 | 10 | class TextResponseFormatParam(TypedDict, total=False): 11 | """Response format for plain text responses.""" 12 | 13 | type: Literal["text"] 14 | """The type of the response format.""" 15 | -------------------------------------------------------------------------------- /src/letta_client/types/text_response_format.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import Optional 4 | from typing_extensions import Literal 5 | 6 | from .._models import BaseModel 7 | 8 | __all__ = ["TextResponseFormat"] 9 | 10 | 11 | class TextResponseFormat(BaseModel): 12 | """Response format for plain text responses.""" 13 | 14 | type: Optional[Literal["text"]] = None 15 | """The type of the response format.""" 16 | -------------------------------------------------------------------------------- /src/letta_client/types/agent_type.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing_extensions import Literal, TypeAlias 4 | 5 | __all__ = ["AgentType"] 6 | 7 | AgentType: TypeAlias = Literal[ 8 | "memgpt_agent", 9 | "memgpt_v2_agent", 10 | "letta_v1_agent", 11 | "react_agent", 12 | "workflow_agent", 13 | "split_thread_agent", 14 | "sleeptime_agent", 15 | "voice_convo_agent", 16 | "voice_sleeptime_agent", 17 | ] 18 | -------------------------------------------------------------------------------- /src/letta_client/types/agents/message_cancel_params.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Optional 6 | from typing_extensions import TypedDict 7 | 8 | from ..._types import SequenceNotStr 9 | 10 | __all__ = ["MessageCancelParams"] 11 | 12 | 13 | class MessageCancelParams(TypedDict, total=False): 14 | run_ids: Optional[SequenceNotStr[str]] 15 | """Optional list of run IDs to cancel""" 16 | -------------------------------------------------------------------------------- /src/letta_client/types/identities/property_upsert_params.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Iterable 6 | from typing_extensions import Required, TypedDict 7 | 8 | from ..identity_property_param import IdentityPropertyParam 9 | 10 | __all__ = ["PropertyUpsertParams"] 11 | 12 | 13 | class PropertyUpsertParams(TypedDict, total=False): 14 | body: Required[Iterable[IdentityPropertyParam]] 15 | -------------------------------------------------------------------------------- /src/letta_client/types/json_object_response_format_param.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing_extensions import Literal, TypedDict 6 | 7 | __all__ = ["JsonObjectResponseFormatParam"] 8 | 9 | 10 | class JsonObjectResponseFormatParam(TypedDict, total=False): 11 | """Response format for JSON object responses.""" 12 | 13 | type: Literal["json_object"] 14 | """The type of the response format.""" 15 | -------------------------------------------------------------------------------- /src/letta_client/types/json_object_response_format.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import Optional 4 | from typing_extensions import Literal 5 | 6 | from .._models import BaseModel 7 | 8 | __all__ = ["JsonObjectResponseFormat"] 9 | 10 | 11 | class JsonObjectResponseFormat(BaseModel): 12 | """Response format for JSON object responses.""" 13 | 14 | type: Optional[Literal["json_object"]] = None 15 | """The type of the response format.""" 16 | -------------------------------------------------------------------------------- /src/letta_client/types/mcp_server_create_response.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import Union 4 | from typing_extensions import TypeAlias 5 | 6 | from .sse_mcp_server import SseMcpServer 7 | from .stdio_mcp_server import StdioMcpServer 8 | from .streamable_http_mcp_server import StreamableHTTPMcpServer 9 | 10 | __all__ = ["McpServerCreateResponse"] 11 | 12 | McpServerCreateResponse: TypeAlias = Union[StdioMcpServer, SseMcpServer, StreamableHTTPMcpServer] 13 | -------------------------------------------------------------------------------- /src/letta_client/types/mcp_server_update_response.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import Union 4 | from typing_extensions import TypeAlias 5 | 6 | from .sse_mcp_server import SseMcpServer 7 | from .stdio_mcp_server import StdioMcpServer 8 | from .streamable_http_mcp_server import StreamableHTTPMcpServer 9 | 10 | __all__ = ["McpServerUpdateResponse"] 11 | 12 | McpServerUpdateResponse: TypeAlias = Union[StdioMcpServer, SseMcpServer, StreamableHTTPMcpServer] 13 | -------------------------------------------------------------------------------- /src/letta_client/types/sleeptime_manager_param.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Optional 6 | from typing_extensions import Literal, Required, TypedDict 7 | 8 | __all__ = ["SleeptimeManagerParam"] 9 | 10 | 11 | class SleeptimeManagerParam(TypedDict, total=False): 12 | manager_agent_id: Required[str] 13 | 14 | manager_type: Literal["sleeptime"] 15 | 16 | sleeptime_agent_frequency: Optional[int] 17 | -------------------------------------------------------------------------------- /src/letta_client/_constants.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | import httpx 4 | 5 | RAW_RESPONSE_HEADER = "X-Stainless-Raw-Response" 6 | OVERRIDE_CAST_TO_HEADER = "____stainless_override_cast_to" 7 | 8 | # default timeout is 1 minute 9 | DEFAULT_TIMEOUT = httpx.Timeout(timeout=60, connect=5.0) 10 | DEFAULT_MAX_RETRIES = 2 11 | DEFAULT_CONNECTION_LIMITS = httpx.Limits(max_connections=100, max_keepalive_connections=20) 12 | 13 | INITIAL_RETRY_DELAY = 0.5 14 | MAX_RETRY_DELAY = 8.0 15 | -------------------------------------------------------------------------------- /src/letta_client/types/agents/tool_run_params.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Dict 6 | from typing_extensions import Required, TypedDict 7 | 8 | __all__ = ["ToolRunParams"] 9 | 10 | 11 | class ToolRunParams(TypedDict, total=False): 12 | agent_id: Required[str] 13 | """The ID of the agent in the format 'agent-'""" 14 | 15 | args: Dict[str, object] 16 | """Arguments to pass to the tool""" 17 | -------------------------------------------------------------------------------- /src/letta_client/types/mcp_server_retrieve_response.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import Union 4 | from typing_extensions import TypeAlias 5 | 6 | from .sse_mcp_server import SseMcpServer 7 | from .stdio_mcp_server import StdioMcpServer 8 | from .streamable_http_mcp_server import StreamableHTTPMcpServer 9 | 10 | __all__ = ["McpServerRetrieveResponse"] 11 | 12 | McpServerRetrieveResponse: TypeAlias = Union[StdioMcpServer, SseMcpServer, StreamableHTTPMcpServer] 13 | -------------------------------------------------------------------------------- /src/letta_client/types/agents/letta_user_message_content_union_param.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Union 6 | from typing_extensions import TypeAlias 7 | 8 | from .text_content_param import TextContentParam 9 | from .image_content_param import ImageContentParam 10 | 11 | __all__ = ["LettaUserMessageContentUnionParam"] 12 | 13 | LettaUserMessageContentUnionParam: TypeAlias = Union[TextContentParam, ImageContentParam] 14 | -------------------------------------------------------------------------------- /src/letta_client/types/agents/message_type.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing_extensions import Literal, TypeAlias 4 | 5 | __all__ = ["MessageType"] 6 | 7 | MessageType: TypeAlias = Literal[ 8 | "system_message", 9 | "user_message", 10 | "assistant_message", 11 | "reasoning_message", 12 | "hidden_reasoning_message", 13 | "tool_call_message", 14 | "tool_return_message", 15 | "approval_request_message", 16 | "approval_response_message", 17 | ] 18 | -------------------------------------------------------------------------------- /src/letta_client/types/dynamic_manager_param.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Optional 6 | from typing_extensions import Literal, Required, TypedDict 7 | 8 | __all__ = ["DynamicManagerParam"] 9 | 10 | 11 | class DynamicManagerParam(TypedDict, total=False): 12 | manager_agent_id: Required[str] 13 | 14 | manager_type: Literal["dynamic"] 15 | 16 | max_turns: Optional[int] 17 | 18 | termination_token: Optional[str] 19 | -------------------------------------------------------------------------------- /src/letta_client/types/npm_requirement_param.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Optional 6 | from typing_extensions import Required, TypedDict 7 | 8 | __all__ = ["NpmRequirementParam"] 9 | 10 | 11 | class NpmRequirementParam(TypedDict, total=False): 12 | name: Required[str] 13 | """Name of the npm package.""" 14 | 15 | version: Optional[str] 16 | """Optional version of the package, following semantic versioning.""" 17 | -------------------------------------------------------------------------------- /src/letta_client/types/pip_requirement_param.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Optional 6 | from typing_extensions import Required, TypedDict 7 | 8 | __all__ = ["PipRequirementParam"] 9 | 10 | 11 | class PipRequirementParam(TypedDict, total=False): 12 | name: Required[str] 13 | """Name of the pip package.""" 14 | 15 | version: Optional[str] 16 | """Optional version of the package, following semantic versioning.""" 17 | -------------------------------------------------------------------------------- /src/letta_client/types/agents/letta_user_message_content_union.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import Union 4 | from typing_extensions import Annotated, TypeAlias 5 | 6 | from ..._utils import PropertyInfo 7 | from .text_content import TextContent 8 | from .image_content import ImageContent 9 | 10 | __all__ = ["LettaUserMessageContentUnion"] 11 | 12 | LettaUserMessageContentUnion: TypeAlias = Annotated[ 13 | Union[TextContent, ImageContent], PropertyInfo(discriminator="type") 14 | ] 15 | -------------------------------------------------------------------------------- /bin/check-release-environment: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | errors=() 4 | 5 | if [ -z "${PYPI_TOKEN}" ]; then 6 | errors+=("The PYPI_TOKEN secret has not been set. Please set it in either this repository's secrets or your organization secrets.") 7 | fi 8 | 9 | lenErrors=${#errors[@]} 10 | 11 | if [[ lenErrors -gt 0 ]]; then 12 | echo -e "Found the following errors in the release environment:\n" 13 | 14 | for error in "${errors[@]}"; do 15 | echo -e "- $error\n" 16 | done 17 | 18 | exit 1 19 | fi 20 | 21 | echo "The environment is ready to push releases!" 22 | -------------------------------------------------------------------------------- /src/letta_client/types/tool_type.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing_extensions import Literal, TypeAlias 4 | 5 | __all__ = ["ToolType"] 6 | 7 | ToolType: TypeAlias = Literal[ 8 | "custom", 9 | "letta_core", 10 | "letta_memory_core", 11 | "letta_multi_agent_core", 12 | "letta_sleeptime_core", 13 | "letta_voice_sleeptime_core", 14 | "letta_builtin", 15 | "letta_files_core", 16 | "external_langchain", 17 | "external_composio", 18 | "external_mcp", 19 | ] 20 | -------------------------------------------------------------------------------- /src/letta_client/types/groups/__init__.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from .message_list_params import MessageListParams as MessageListParams 6 | from .message_create_params import MessageCreateParams as MessageCreateParams 7 | from .message_stream_params import MessageStreamParams as MessageStreamParams 8 | from .message_update_params import MessageUpdateParams as MessageUpdateParams 9 | from .message_update_response import MessageUpdateResponse as MessageUpdateResponse 10 | -------------------------------------------------------------------------------- /src/letta_client/types/agents/update_system_message_param.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing_extensions import Literal, Required, TypedDict 6 | 7 | __all__ = ["UpdateSystemMessageParam"] 8 | 9 | 10 | class UpdateSystemMessageParam(TypedDict, total=False): 11 | content: Required[str] 12 | """ 13 | The message content sent by the system (can be a string or an array of 14 | multi-modal content parts) 15 | """ 16 | 17 | message_type: Literal["system_message"] 18 | -------------------------------------------------------------------------------- /src/letta_client/types/stop_reason_type.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing_extensions import Literal, TypeAlias 4 | 5 | __all__ = ["StopReasonType"] 6 | 7 | StopReasonType: TypeAlias = Literal[ 8 | "end_turn", 9 | "error", 10 | "llm_api_error", 11 | "invalid_llm_response", 12 | "invalid_tool_call", 13 | "max_steps", 14 | "max_tokens_exceeded", 15 | "no_tool_call", 16 | "tool_rule", 17 | "cancelled", 18 | "requires_approval", 19 | "context_window_overflow_in_system_prompt", 20 | ] 21 | -------------------------------------------------------------------------------- /src/letta_client/types/folders/__init__.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from .file_list_params import FileListParams as FileListParams 6 | from .agent_list_params import AgentListParams as AgentListParams 7 | from .file_list_response import FileListResponse as FileListResponse 8 | from .file_upload_params import FileUploadParams as FileUploadParams 9 | from .agent_list_response import AgentListResponse as AgentListResponse 10 | from .file_upload_response import FileUploadResponse as FileUploadResponse 11 | -------------------------------------------------------------------------------- /src/letta_client/types/provider_type.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing_extensions import Literal, TypeAlias 4 | 5 | __all__ = ["ProviderType"] 6 | 7 | ProviderType: TypeAlias = Literal[ 8 | "anthropic", 9 | "azure", 10 | "bedrock", 11 | "cerebras", 12 | "deepseek", 13 | "google_ai", 14 | "google_vertex", 15 | "groq", 16 | "hugging-face", 17 | "letta", 18 | "lmstudio_openai", 19 | "mistral", 20 | "ollama", 21 | "openai", 22 | "together", 23 | "vllm", 24 | "xai", 25 | ] 26 | -------------------------------------------------------------------------------- /src/letta_client/types/model_list_params.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import List, Optional 6 | from typing_extensions import TypedDict 7 | 8 | from .provider_type import ProviderType 9 | from .provider_category import ProviderCategory 10 | 11 | __all__ = ["ModelListParams"] 12 | 13 | 14 | class ModelListParams(TypedDict, total=False): 15 | provider_category: Optional[List[ProviderCategory]] 16 | 17 | provider_name: Optional[str] 18 | 19 | provider_type: Optional[ProviderType] 20 | -------------------------------------------------------------------------------- /src/letta_client/types/agents/text_content.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import Optional 4 | from typing_extensions import Literal 5 | 6 | from ..._models import BaseModel 7 | 8 | __all__ = ["TextContent"] 9 | 10 | 11 | class TextContent(BaseModel): 12 | text: str 13 | """The text content of the message.""" 14 | 15 | signature: Optional[str] = None 16 | """Stores a unique identifier for any reasoning associated with this text content.""" 17 | 18 | type: Optional[Literal["text"]] = None 19 | """The type of the message.""" 20 | -------------------------------------------------------------------------------- /src/letta_client/types/json_schema_response_format.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import Dict, Optional 4 | from typing_extensions import Literal 5 | 6 | from .._models import BaseModel 7 | 8 | __all__ = ["JsonSchemaResponseFormat"] 9 | 10 | 11 | class JsonSchemaResponseFormat(BaseModel): 12 | """Response format for JSON schema-based responses.""" 13 | 14 | json_schema: Dict[str, object] 15 | """The JSON schema of the response.""" 16 | 17 | type: Optional[Literal["json_schema"]] = None 18 | """The type of the response format.""" 19 | -------------------------------------------------------------------------------- /src/letta_client/types/agents/redacted_reasoning_content.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import Optional 4 | from typing_extensions import Literal 5 | 6 | from ..._models import BaseModel 7 | 8 | __all__ = ["RedactedReasoningContent"] 9 | 10 | 11 | class RedactedReasoningContent(BaseModel): 12 | """Sent via the Anthropic Messages API""" 13 | 14 | data: str 15 | """The redacted or filtered intermediate reasoning content.""" 16 | 17 | type: Optional[Literal["redacted_reasoning"]] = None 18 | """Indicates this is a redacted thinking step.""" 19 | -------------------------------------------------------------------------------- /src/letta_client/types/agents/tool_return.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import List, Optional 4 | from typing_extensions import Literal 5 | 6 | from ..._models import BaseModel 7 | 8 | __all__ = ["ToolReturn"] 9 | 10 | 11 | class ToolReturn(BaseModel): 12 | status: Literal["success", "error"] 13 | 14 | tool_call_id: str 15 | 16 | tool_return: str 17 | 18 | stderr: Optional[List[str]] = None 19 | 20 | stdout: Optional[List[str]] = None 21 | 22 | type: Optional[Literal["tool"]] = None 23 | """The message type to be created.""" 24 | -------------------------------------------------------------------------------- /src/letta_client/types/agent_export_file_params.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing_extensions import TypedDict 6 | 7 | __all__ = ["AgentExportFileParams"] 8 | 9 | 10 | class AgentExportFileParams(TypedDict, total=False): 11 | max_steps: int 12 | 13 | use_legacy_format: bool 14 | """ 15 | If True, exports using the legacy single-agent 'v1' format with inline 16 | tools/blocks. If False, exports using the new multi-entity 'v2' format, with 17 | separate agents, tools, blocks, files, etc. 18 | """ 19 | -------------------------------------------------------------------------------- /src/letta_client/types/mcp_server_list_response.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import List, Union 4 | from typing_extensions import TypeAlias 5 | 6 | from .sse_mcp_server import SseMcpServer 7 | from .stdio_mcp_server import StdioMcpServer 8 | from .streamable_http_mcp_server import StreamableHTTPMcpServer 9 | 10 | __all__ = ["McpServerListResponse", "McpServerListResponseItem"] 11 | 12 | McpServerListResponseItem: TypeAlias = Union[StdioMcpServer, SseMcpServer, StreamableHTTPMcpServer] 13 | 14 | McpServerListResponse: TypeAlias = List[McpServerListResponseItem] 15 | -------------------------------------------------------------------------------- /src/letta_client/types/steps/feedback_create_params.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Optional 6 | from typing_extensions import Literal, TypedDict 7 | 8 | from ..._types import SequenceNotStr 9 | 10 | __all__ = ["FeedbackCreateParams"] 11 | 12 | 13 | class FeedbackCreateParams(TypedDict, total=False): 14 | feedback: Optional[Literal["positive", "negative"]] 15 | """Whether this feedback is positive or negative""" 16 | 17 | tags: Optional[SequenceNotStr[str]] 18 | """Feedback tags to add to the step""" 19 | -------------------------------------------------------------------------------- /src/letta_client/types/agents/redacted_reasoning_content_param.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing_extensions import Literal, Required, TypedDict 6 | 7 | __all__ = ["RedactedReasoningContentParam"] 8 | 9 | 10 | class RedactedReasoningContentParam(TypedDict, total=False): 11 | """Sent via the Anthropic Messages API""" 12 | 13 | data: Required[str] 14 | """The redacted or filtered intermediate reasoning content.""" 15 | 16 | type: Literal["redacted_reasoning"] 17 | """Indicates this is a redacted thinking step.""" 18 | -------------------------------------------------------------------------------- /src/letta_client/types/identity_property.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import Dict, Union 4 | from typing_extensions import Literal 5 | 6 | from .._models import BaseModel 7 | 8 | __all__ = ["IdentityProperty"] 9 | 10 | 11 | class IdentityProperty(BaseModel): 12 | """A property of an identity""" 13 | 14 | key: str 15 | """The key of the property""" 16 | 17 | type: Literal["string", "number", "boolean", "json"] 18 | """The type of the property""" 19 | 20 | value: Union[str, float, bool, Dict[str, object]] 21 | """The value of the property""" 22 | -------------------------------------------------------------------------------- /src/letta_client/types/agents/text_content_param.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Optional 6 | from typing_extensions import Literal, Required, TypedDict 7 | 8 | __all__ = ["TextContentParam"] 9 | 10 | 11 | class TextContentParam(TypedDict, total=False): 12 | text: Required[str] 13 | """The text content of the message.""" 14 | 15 | signature: Optional[str] 16 | """Stores a unique identifier for any reasoning associated with this text content.""" 17 | 18 | type: Literal["text"] 19 | """The type of the message.""" 20 | -------------------------------------------------------------------------------- /src/letta_client/types/json_schema_response_format_param.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Dict 6 | from typing_extensions import Literal, Required, TypedDict 7 | 8 | __all__ = ["JsonSchemaResponseFormatParam"] 9 | 10 | 11 | class JsonSchemaResponseFormatParam(TypedDict, total=False): 12 | """Response format for JSON schema-based responses.""" 13 | 14 | json_schema: Required[Dict[str, object]] 15 | """The JSON schema of the response.""" 16 | 17 | type: Literal["json_schema"] 18 | """The type of the response format.""" 19 | -------------------------------------------------------------------------------- /src/letta_client/types/templates/agent_create_response.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import List, Optional 4 | 5 | from ..._models import BaseModel 6 | 7 | __all__ = ["AgentCreateResponse"] 8 | 9 | 10 | class AgentCreateResponse(BaseModel): 11 | """Response containing created agent IDs and associated metadata""" 12 | 13 | agent_ids: List[str] 14 | """Array of created agent IDs""" 15 | 16 | deployment_id: str 17 | """The deployment ID for the created agents""" 18 | 19 | group_id: Optional[str] = None 20 | """Optional group ID if agents were created in a group""" 21 | -------------------------------------------------------------------------------- /src/letta_client/types/folders/file_upload_params.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Optional 6 | from typing_extensions import Literal, Required, TypedDict 7 | 8 | from ..._types import FileTypes 9 | 10 | __all__ = ["FileUploadParams"] 11 | 12 | 13 | class FileUploadParams(TypedDict, total=False): 14 | file: Required[FileTypes] 15 | 16 | duplicate_handling: Literal["skip", "error", "suffix", "replace"] 17 | """How to handle duplicate filenames""" 18 | 19 | name: Optional[str] 20 | """Optional custom name to override the uploaded file's name""" 21 | -------------------------------------------------------------------------------- /src/letta_client/types/agents/letta_assistant_message_content_union.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import Optional 4 | from typing_extensions import Literal 5 | 6 | from ..._models import BaseModel 7 | 8 | __all__ = ["LettaAssistantMessageContentUnion"] 9 | 10 | 11 | class LettaAssistantMessageContentUnion(BaseModel): 12 | text: str 13 | """The text content of the message.""" 14 | 15 | signature: Optional[str] = None 16 | """Stores a unique identifier for any reasoning associated with this text content.""" 17 | 18 | type: Optional[Literal["text"]] = None 19 | """The type of the message.""" 20 | -------------------------------------------------------------------------------- /scripts/bootstrap: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -e 4 | 5 | cd "$(dirname "$0")/.." 6 | 7 | if [ -f "Brewfile" ] && [ "$(uname -s)" = "Darwin" ] && [ "$SKIP_BREW" != "1" ] && [ -t 0 ]; then 8 | brew bundle check >/dev/null 2>&1 || { 9 | echo -n "==> Install Homebrew dependencies? (y/N): " 10 | read -r response 11 | case "$response" in 12 | [yY][eE][sS]|[yY]) 13 | brew bundle 14 | ;; 15 | *) 16 | ;; 17 | esac 18 | echo 19 | } 20 | fi 21 | 22 | echo "==> Installing Python dependencies…" 23 | 24 | # experimental uv support makes installations significantly faster 25 | rye config --set-bool behavior.use-uv=true 26 | 27 | rye sync --all-features 28 | -------------------------------------------------------------------------------- /src/letta_client/types/archives/passage_create_params.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Dict, Optional 6 | from typing_extensions import Required, TypedDict 7 | 8 | from ..._types import SequenceNotStr 9 | 10 | __all__ = ["PassageCreateParams"] 11 | 12 | 13 | class PassageCreateParams(TypedDict, total=False): 14 | text: Required[str] 15 | """The text content of the passage""" 16 | 17 | metadata: Optional[Dict[str, object]] 18 | """Optional metadata for the passage""" 19 | 20 | tags: Optional[SequenceNotStr[str]] 21 | """Optional tags for categorizing the passage""" 22 | -------------------------------------------------------------------------------- /.github/workflows/release-doctor.yml: -------------------------------------------------------------------------------- 1 | name: Release Doctor 2 | on: 3 | pull_request: 4 | branches: 5 | - main 6 | workflow_dispatch: 7 | 8 | jobs: 9 | release_doctor: 10 | name: release doctor 11 | runs-on: ubuntu-latest 12 | if: github.repository == 'letta-ai/letta-python' && (github.event_name == 'push' || github.event_name == 'workflow_dispatch' || startsWith(github.head_ref, 'release-please') || github.head_ref == 'next') 13 | 14 | steps: 15 | - uses: actions/checkout@v4 16 | 17 | - name: Check release environment 18 | run: | 19 | bash ./bin/check-release-environment 20 | env: 21 | PYPI_TOKEN: ${{ secrets.LETTA_PYPI_TOKEN || secrets.PYPI_TOKEN }} 22 | -------------------------------------------------------------------------------- /src/letta_client/types/agents/approval_return.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import Optional 4 | from typing_extensions import Literal 5 | 6 | from ..._models import BaseModel 7 | 8 | __all__ = ["ApprovalReturn"] 9 | 10 | 11 | class ApprovalReturn(BaseModel): 12 | approve: bool 13 | """Whether the tool has been approved""" 14 | 15 | tool_call_id: str 16 | """The ID of the tool call that corresponds to this approval""" 17 | 18 | reason: Optional[str] = None 19 | """An optional explanation for the provided approval status""" 20 | 21 | type: Optional[Literal["approval"]] = None 22 | """The message type to be created.""" 23 | -------------------------------------------------------------------------------- /src/letta_client/types/agents/letta_assistant_message_content_union_param.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Optional 6 | from typing_extensions import Literal, Required, TypedDict 7 | 8 | __all__ = ["LettaAssistantMessageContentUnionParam"] 9 | 10 | 11 | class LettaAssistantMessageContentUnionParam(TypedDict, total=False): 12 | text: Required[str] 13 | """The text content of the message.""" 14 | 15 | signature: Optional[str] 16 | """Stores a unique identifier for any reasoning associated with this text content.""" 17 | 18 | type: Literal["text"] 19 | """The type of the message.""" 20 | -------------------------------------------------------------------------------- /src/letta_client/types/identity_property_param.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Dict, Union 6 | from typing_extensions import Literal, Required, TypedDict 7 | 8 | __all__ = ["IdentityPropertyParam"] 9 | 10 | 11 | class IdentityPropertyParam(TypedDict, total=False): 12 | """A property of an identity""" 13 | 14 | key: Required[str] 15 | """The key of the property""" 16 | 17 | type: Required[Literal["string", "number", "boolean", "json"]] 18 | """The type of the property""" 19 | 20 | value: Required[Union[str, float, bool, Dict[str, object]]] 21 | """The value of the property""" 22 | -------------------------------------------------------------------------------- /src/letta_client/types/agents/tool_return_param.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Optional 6 | from typing_extensions import Literal, Required, TypedDict 7 | 8 | from ..._types import SequenceNotStr 9 | 10 | __all__ = ["ToolReturnParam"] 11 | 12 | 13 | class ToolReturnParam(TypedDict, total=False): 14 | status: Required[Literal["success", "error"]] 15 | 16 | tool_call_id: Required[str] 17 | 18 | tool_return: Required[str] 19 | 20 | stderr: Optional[SequenceNotStr[str]] 21 | 22 | stdout: Optional[SequenceNotStr[str]] 23 | 24 | type: Literal["tool"] 25 | """The message type to be created.""" 26 | -------------------------------------------------------------------------------- /src/letta_client/types/archive_create_params.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Optional 6 | from typing_extensions import Required, TypedDict 7 | 8 | from .embedding_config_param import EmbeddingConfigParam 9 | 10 | __all__ = ["ArchiveCreateParams"] 11 | 12 | 13 | class ArchiveCreateParams(TypedDict, total=False): 14 | name: Required[str] 15 | 16 | description: Optional[str] 17 | 18 | embedding: Optional[str] 19 | """Embedding model handle for the archive""" 20 | 21 | embedding_config: Optional[EmbeddingConfigParam] 22 | """Configuration for embedding model connection and processing parameters.""" 23 | -------------------------------------------------------------------------------- /src/letta_client/_utils/_resources_proxy.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import Any 4 | from typing_extensions import override 5 | 6 | from ._proxy import LazyProxy 7 | 8 | 9 | class ResourcesProxy(LazyProxy[Any]): 10 | """A proxy for the `letta_client.resources` module. 11 | 12 | This is used so that we can lazily import `letta_client.resources` only when 13 | needed *and* so that users can just import `letta_client` and reference `letta_client.resources` 14 | """ 15 | 16 | @override 17 | def __load__(self) -> Any: 18 | import importlib 19 | 20 | mod = importlib.import_module("letta_client.resources") 21 | return mod 22 | 23 | 24 | resources = ResourcesProxy().__as_proxied__() 25 | -------------------------------------------------------------------------------- /src/letta_client/types/agents/omitted_reasoning_content.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import Optional 4 | from typing_extensions import Literal 5 | 6 | from ..._models import BaseModel 7 | 8 | __all__ = ["OmittedReasoningContent"] 9 | 10 | 11 | class OmittedReasoningContent(BaseModel): 12 | """ 13 | A placeholder for reasoning content we know is present, but isn't returned by the provider (e.g. OpenAI GPT-5 on ChatCompletions) 14 | """ 15 | 16 | signature: Optional[str] = None 17 | """A unique identifier for this reasoning step.""" 18 | 19 | type: Optional[Literal["omitted_reasoning"]] = None 20 | """Indicates this is an omitted reasoning step.""" 21 | -------------------------------------------------------------------------------- /src/letta_client/types/continue_tool_rule.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import Optional 4 | from typing_extensions import Literal 5 | 6 | from .._models import BaseModel 7 | 8 | __all__ = ["ContinueToolRule"] 9 | 10 | 11 | class ContinueToolRule(BaseModel): 12 | """ 13 | Represents a tool rule configuration where if this tool gets called, it must continue the agent loop. 14 | """ 15 | 16 | tool_name: str 17 | """The name of the tool. Must exist in the database for the user's organization.""" 18 | 19 | prompt_template: Optional[str] = None 20 | """Optional template string (ignored).""" 21 | 22 | type: Optional[Literal["continue_loop"]] = None 23 | -------------------------------------------------------------------------------- /src/letta_client/types/terminal_tool_rule.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import Optional 4 | from typing_extensions import Literal 5 | 6 | from .._models import BaseModel 7 | 8 | __all__ = ["TerminalToolRule"] 9 | 10 | 11 | class TerminalToolRule(BaseModel): 12 | """ 13 | Represents a terminal tool rule configuration where if this tool gets called, it must end the agent loop. 14 | """ 15 | 16 | tool_name: str 17 | """The name of the tool. Must exist in the database for the user's organization.""" 18 | 19 | prompt_template: Optional[str] = None 20 | """Optional template string (ignored).""" 21 | 22 | type: Optional[Literal["exit_loop"]] = None 23 | -------------------------------------------------------------------------------- /src/letta_client/types/access_token_list_params.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing_extensions import Annotated, TypedDict 6 | 7 | from .._utils import PropertyInfo 8 | 9 | __all__ = ["AccessTokenListParams"] 10 | 11 | 12 | class AccessTokenListParams(TypedDict, total=False): 13 | agent_id: Annotated[str, PropertyInfo(alias="agentId")] 14 | """The agent ID to filter tokens by. 15 | 16 | If provided, only tokens for this agent will be returned. 17 | """ 18 | 19 | limit: float 20 | """The number of tokens to return per page. Defaults to 10.""" 21 | 22 | offset: float 23 | """The offset for pagination. Defaults to 0.""" 24 | -------------------------------------------------------------------------------- /src/letta_client/types/agents/omitted_reasoning_content_param.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Optional 6 | from typing_extensions import Literal, TypedDict 7 | 8 | __all__ = ["OmittedReasoningContentParam"] 9 | 10 | 11 | class OmittedReasoningContentParam(TypedDict, total=False): 12 | """ 13 | A placeholder for reasoning content we know is present, but isn't returned by the provider (e.g. OpenAI GPT-5 on ChatCompletions) 14 | """ 15 | 16 | signature: Optional[str] 17 | """A unique identifier for this reasoning step.""" 18 | 19 | type: Literal["omitted_reasoning"] 20 | """Indicates this is an omitted reasoning step.""" 21 | -------------------------------------------------------------------------------- /src/letta_client/types/agents/tool_return_content.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import Optional 4 | from typing_extensions import Literal 5 | 6 | from ..._models import BaseModel 7 | 8 | __all__ = ["ToolReturnContent"] 9 | 10 | 11 | class ToolReturnContent(BaseModel): 12 | content: str 13 | """The content returned by the tool execution.""" 14 | 15 | is_error: bool 16 | """Indicates whether the tool execution resulted in an error.""" 17 | 18 | tool_call_id: str 19 | """References the ID of the ToolCallContent that initiated this tool call.""" 20 | 21 | type: Optional[Literal["tool_return"]] = None 22 | """Indicates this content represents a tool return event.""" 23 | -------------------------------------------------------------------------------- /src/letta_client/types/agents/update_user_message_param.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Union, Iterable 6 | from typing_extensions import Literal, Required, TypedDict 7 | 8 | from .letta_user_message_content_union_param import LettaUserMessageContentUnionParam 9 | 10 | __all__ = ["UpdateUserMessageParam"] 11 | 12 | 13 | class UpdateUserMessageParam(TypedDict, total=False): 14 | content: Required[Union[Iterable[LettaUserMessageContentUnionParam], str]] 15 | """ 16 | The message content sent by the user (can be a string or an array of multi-modal 17 | content parts) 18 | """ 19 | 20 | message_type: Literal["user_message"] 21 | -------------------------------------------------------------------------------- /src/letta_client/types/agents/approval_return_param.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Optional 6 | from typing_extensions import Literal, Required, TypedDict 7 | 8 | __all__ = ["ApprovalReturnParam"] 9 | 10 | 11 | class ApprovalReturnParam(TypedDict, total=False): 12 | approve: Required[bool] 13 | """Whether the tool has been approved""" 14 | 15 | tool_call_id: Required[str] 16 | """The ID of the tool call that corresponds to this approval""" 17 | 18 | reason: Optional[str] 19 | """An optional explanation for the provided approval status""" 20 | 21 | type: Literal["approval"] 22 | """The message type to be created.""" 23 | -------------------------------------------------------------------------------- /src/letta_client/types/continue_tool_rule_param.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Optional 6 | from typing_extensions import Literal, Required, TypedDict 7 | 8 | __all__ = ["ContinueToolRuleParam"] 9 | 10 | 11 | class ContinueToolRuleParam(TypedDict, total=False): 12 | """ 13 | Represents a tool rule configuration where if this tool gets called, it must continue the agent loop. 14 | """ 15 | 16 | tool_name: Required[str] 17 | """The name of the tool. Must exist in the database for the user's organization.""" 18 | 19 | prompt_template: Optional[str] 20 | """Optional template string (ignored).""" 21 | 22 | type: Literal["continue_loop"] 23 | -------------------------------------------------------------------------------- /src/letta_client/types/required_before_exit_tool_rule.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import Optional 4 | from typing_extensions import Literal 5 | 6 | from .._models import BaseModel 7 | 8 | __all__ = ["RequiredBeforeExitToolRule"] 9 | 10 | 11 | class RequiredBeforeExitToolRule(BaseModel): 12 | """ 13 | Represents a tool rule configuration where this tool must be called before the agent loop can exit. 14 | """ 15 | 16 | tool_name: str 17 | """The name of the tool. Must exist in the database for the user's organization.""" 18 | 19 | prompt_template: Optional[str] = None 20 | """Optional template string (ignored).""" 21 | 22 | type: Optional[Literal["required_before_exit"]] = None 23 | -------------------------------------------------------------------------------- /src/letta_client/types/template_create_response.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import Optional 4 | 5 | from .._models import BaseModel 6 | 7 | __all__ = ["TemplateCreateResponse"] 8 | 9 | 10 | class TemplateCreateResponse(BaseModel): 11 | id: str 12 | 13 | latest_version: str 14 | """The latest version of the template""" 15 | 16 | name: str 17 | """The exact name of the template""" 18 | 19 | project_id: str 20 | 21 | project_slug: str 22 | 23 | template_deployment_slug: str 24 | """The full name of the template, including version and project slug""" 25 | 26 | updated_at: str 27 | """When the template was last updated""" 28 | 29 | description: Optional[str] = None 30 | -------------------------------------------------------------------------------- /src/letta_client/types/terminal_tool_rule_param.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Optional 6 | from typing_extensions import Literal, Required, TypedDict 7 | 8 | __all__ = ["TerminalToolRuleParam"] 9 | 10 | 11 | class TerminalToolRuleParam(TypedDict, total=False): 12 | """ 13 | Represents a terminal tool rule configuration where if this tool gets called, it must end the agent loop. 14 | """ 15 | 16 | tool_name: Required[str] 17 | """The name of the tool. Must exist in the database for the user's organization.""" 18 | 19 | prompt_template: Optional[str] 20 | """Optional template string (ignored).""" 21 | 22 | type: Literal["exit_loop"] 23 | -------------------------------------------------------------------------------- /src/letta_client/types/agents/update_assistant_message_param.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Union, Iterable 6 | from typing_extensions import Literal, Required, TypedDict 7 | 8 | from .letta_assistant_message_content_union_param import LettaAssistantMessageContentUnionParam 9 | 10 | __all__ = ["UpdateAssistantMessageParam"] 11 | 12 | 13 | class UpdateAssistantMessageParam(TypedDict, total=False): 14 | content: Required[Union[Iterable[LettaAssistantMessageContentUnionParam], str]] 15 | """ 16 | The message content sent by the assistant (can be a string or an array of 17 | content parts) 18 | """ 19 | 20 | message_type: Literal["assistant_message"] 21 | -------------------------------------------------------------------------------- /src/letta_client/types/agents/tool_return_content_param.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing_extensions import Literal, Required, TypedDict 6 | 7 | __all__ = ["ToolReturnContentParam"] 8 | 9 | 10 | class ToolReturnContentParam(TypedDict, total=False): 11 | content: Required[str] 12 | """The content returned by the tool execution.""" 13 | 14 | is_error: Required[bool] 15 | """Indicates whether the tool execution resulted in an error.""" 16 | 17 | tool_call_id: Required[str] 18 | """References the ID of the ToolCallContent that initiated this tool call.""" 19 | 20 | type: Literal["tool_return"] 21 | """Indicates this content represents a tool return event.""" 22 | -------------------------------------------------------------------------------- /src/letta_client/types/tool_search_result.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import Optional 4 | 5 | from .tool import Tool 6 | from .._models import BaseModel 7 | 8 | __all__ = ["ToolSearchResult"] 9 | 10 | 11 | class ToolSearchResult(BaseModel): 12 | """Result from a tool search operation.""" 13 | 14 | combined_score: float 15 | """Combined relevance score (RRF for hybrid mode).""" 16 | 17 | tool: Tool 18 | """The matched tool.""" 19 | 20 | embedded_text: Optional[str] = None 21 | """The embedded text content used for matching.""" 22 | 23 | fts_rank: Optional[int] = None 24 | """Full-text search rank position.""" 25 | 26 | vector_rank: Optional[int] = None 27 | """Vector search rank position.""" 28 | -------------------------------------------------------------------------------- /src/letta_client/types/agents/passage_search_response.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import List, Optional 4 | 5 | from ..._models import BaseModel 6 | 7 | __all__ = ["PassageSearchResponse", "Result"] 8 | 9 | 10 | class Result(BaseModel): 11 | content: str 12 | """Text content of the archival memory passage""" 13 | 14 | timestamp: str 15 | """Timestamp of when the memory was created, formatted in agent's timezone""" 16 | 17 | tags: Optional[List[str]] = None 18 | """List of tags associated with this memory""" 19 | 20 | 21 | class PassageSearchResponse(BaseModel): 22 | count: int 23 | """Total number of results returned""" 24 | 25 | results: List[Result] 26 | """List of search results matching the query""" 27 | -------------------------------------------------------------------------------- /src/letta_client/types/parent_tool_rule.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import List, Optional 4 | from typing_extensions import Literal 5 | 6 | from .._models import BaseModel 7 | 8 | __all__ = ["ParentToolRule"] 9 | 10 | 11 | class ParentToolRule(BaseModel): 12 | """ 13 | A ToolRule that only allows a child tool to be called if the parent has been called. 14 | """ 15 | 16 | children: List[str] 17 | """The children tools that can be invoked.""" 18 | 19 | tool_name: str 20 | """The name of the tool. Must exist in the database for the user's organization.""" 21 | 22 | prompt_template: Optional[str] = None 23 | """Optional template string (ignored).""" 24 | 25 | type: Optional[Literal["parent_last_tool"]] = None 26 | -------------------------------------------------------------------------------- /src/letta_client/types/passage_search_response.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import Dict, List, Optional 4 | from typing_extensions import TypeAlias 5 | 6 | from .passage import Passage 7 | from .._models import BaseModel 8 | 9 | __all__ = ["PassageSearchResponse", "PassageSearchResponseItem"] 10 | 11 | 12 | class PassageSearchResponseItem(BaseModel): 13 | """Result from a passage search operation with scoring details.""" 14 | 15 | passage: Passage 16 | """The passage object""" 17 | 18 | score: float 19 | """Relevance score""" 20 | 21 | metadata: Optional[Dict[str, object]] = None 22 | """Additional metadata about the search result""" 23 | 24 | 25 | PassageSearchResponse: TypeAlias = List[PassageSearchResponseItem] 26 | -------------------------------------------------------------------------------- /src/letta_client/types/required_before_exit_tool_rule_param.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Optional 6 | from typing_extensions import Literal, Required, TypedDict 7 | 8 | __all__ = ["RequiredBeforeExitToolRuleParam"] 9 | 10 | 11 | class RequiredBeforeExitToolRuleParam(TypedDict, total=False): 12 | """ 13 | Represents a tool rule configuration where this tool must be called before the agent loop can exit. 14 | """ 15 | 16 | tool_name: Required[str] 17 | """The name of the tool. Must exist in the database for the user's organization.""" 18 | 19 | prompt_template: Optional[str] 20 | """Optional template string (ignored).""" 21 | 22 | type: Literal["required_before_exit"] 23 | -------------------------------------------------------------------------------- /src/letta_client/types/runs/step_list_params.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Optional 6 | from typing_extensions import Literal, TypedDict 7 | 8 | __all__ = ["StepListParams"] 9 | 10 | 11 | class StepListParams(TypedDict, total=False): 12 | after: Optional[str] 13 | """Cursor for pagination""" 14 | 15 | before: Optional[str] 16 | """Cursor for pagination""" 17 | 18 | limit: Optional[int] 19 | """Maximum number of messages to return""" 20 | 21 | order: Literal["asc", "desc"] 22 | """Sort order for steps by creation time. 23 | 24 | 'asc' for oldest first, 'desc' for newest first 25 | """ 26 | 27 | order_by: Literal["created_at"] 28 | """Field to sort by""" 29 | -------------------------------------------------------------------------------- /src/letta_client/types/create_stdio_mcp_server_param.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Dict, Optional 6 | from typing_extensions import Literal, Required, TypedDict 7 | 8 | from .._types import SequenceNotStr 9 | 10 | __all__ = ["CreateStdioMcpServerParam"] 11 | 12 | 13 | class CreateStdioMcpServerParam(TypedDict, total=False): 14 | """Create a new Stdio MCP server""" 15 | 16 | args: Required[SequenceNotStr[str]] 17 | """The arguments to pass to the command""" 18 | 19 | command: Required[str] 20 | """The command to run (MCP 'local' client will run this command)""" 21 | 22 | env: Optional[Dict[str, str]] 23 | """Environment variables to set""" 24 | 25 | mcp_server_type: Literal["stdio"] 26 | -------------------------------------------------------------------------------- /src/letta_client/types/requires_approval_tool_rule.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import Optional 4 | from typing_extensions import Literal 5 | 6 | from .._models import BaseModel 7 | 8 | __all__ = ["RequiresApprovalToolRule"] 9 | 10 | 11 | class RequiresApprovalToolRule(BaseModel): 12 | """ 13 | Represents a tool rule configuration which requires approval before the tool can be invoked. 14 | """ 15 | 16 | tool_name: str 17 | """The name of the tool. Must exist in the database for the user's organization.""" 18 | 19 | prompt_template: Optional[str] = None 20 | """Optional template string (ignored). 21 | 22 | Rendering uses fast built-in formatting for performance. 23 | """ 24 | 25 | type: Optional[Literal["requires_approval"]] = None 26 | -------------------------------------------------------------------------------- /src/letta_client/types/access_token_create_response.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import List 4 | from typing_extensions import Literal 5 | 6 | from pydantic import Field as FieldInfo 7 | 8 | from .._models import BaseModel 9 | 10 | __all__ = ["AccessTokenCreateResponse", "Policy", "PolicyData"] 11 | 12 | 13 | class PolicyData(BaseModel): 14 | id: str 15 | 16 | access: List[Literal["read_messages", "write_messages", "read_agent", "write_agent"]] 17 | 18 | type: Literal["agent"] 19 | 20 | 21 | class Policy(BaseModel): 22 | data: List[PolicyData] 23 | 24 | version: Literal["1"] 25 | 26 | 27 | class AccessTokenCreateResponse(BaseModel): 28 | token: str 29 | 30 | expires_at: str = FieldInfo(alias="expiresAt") 31 | 32 | hostname: str 33 | 34 | policy: Policy 35 | -------------------------------------------------------------------------------- /src/letta_client/types/agents/reasoning_content.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import Optional 4 | from typing_extensions import Literal 5 | 6 | from ..._models import BaseModel 7 | 8 | __all__ = ["ReasoningContent"] 9 | 10 | 11 | class ReasoningContent(BaseModel): 12 | """Sent via the Anthropic Messages API""" 13 | 14 | is_native: bool 15 | """ 16 | Whether the reasoning content was generated by a reasoner model that processed 17 | this step. 18 | """ 19 | 20 | reasoning: str 21 | """The intermediate reasoning or thought process content.""" 22 | 23 | signature: Optional[str] = None 24 | """A unique identifier for this reasoning step.""" 25 | 26 | type: Optional[Literal["reasoning"]] = None 27 | """Indicates this is a reasoning/intermediate step.""" 28 | -------------------------------------------------------------------------------- /src/letta_client/types/template_update_params.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Dict, Optional 6 | from typing_extensions import Required, TypedDict 7 | 8 | __all__ = ["TemplateUpdateParams"] 9 | 10 | 11 | class TemplateUpdateParams(TypedDict, total=False): 12 | agent_file_json: Required[Dict[str, Optional[object]]] 13 | """The agent file to update the current template version from""" 14 | 15 | save_existing_changes: bool 16 | """ 17 | If true, Letta will automatically save any changes as a version before updating 18 | the template 19 | """ 20 | 21 | update_existing_tools: bool 22 | """ 23 | If true, update existing custom tools source_code and json_schema (source_type 24 | cannot be changed) 25 | """ 26 | -------------------------------------------------------------------------------- /src/letta_client/types/agents/passage_create_params.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Union, Optional 6 | from datetime import datetime 7 | from typing_extensions import Required, Annotated, TypedDict 8 | 9 | from ..._types import SequenceNotStr 10 | from ..._utils import PropertyInfo 11 | 12 | __all__ = ["PassageCreateParams"] 13 | 14 | 15 | class PassageCreateParams(TypedDict, total=False): 16 | text: Required[str] 17 | """Text to write to archival memory.""" 18 | 19 | created_at: Annotated[Union[str, datetime, None], PropertyInfo(format="iso8601")] 20 | """Optional timestamp for the memory (defaults to current UTC time).""" 21 | 22 | tags: Optional[SequenceNotStr[str]] 23 | """Optional list of tags to attach to the memory.""" 24 | -------------------------------------------------------------------------------- /src/letta_client/types/agents/tool_update_approval_params.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Optional 6 | from typing_extensions import Required, Annotated, TypedDict 7 | 8 | from ..._utils import PropertyInfo 9 | 10 | __all__ = ["ToolUpdateApprovalParams"] 11 | 12 | 13 | class ToolUpdateApprovalParams(TypedDict, total=False): 14 | agent_id: Required[str] 15 | """The ID of the agent in the format 'agent-'""" 16 | 17 | body_requires_approval: Required[Annotated[bool, PropertyInfo(alias="requires_approval")]] 18 | """Whether the tool requires approval before execution""" 19 | 20 | query_requires_approval: Annotated[Optional[bool], PropertyInfo(alias="requires_approval")] 21 | """Whether the tool requires approval before execution""" 22 | -------------------------------------------------------------------------------- /src/letta_client/types/stdio_mcp_server.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import Dict, List, Optional 4 | from typing_extensions import Literal 5 | 6 | from .._models import BaseModel 7 | 8 | __all__ = ["StdioMcpServer"] 9 | 10 | 11 | class StdioMcpServer(BaseModel): 12 | """A Stdio MCP server""" 13 | 14 | args: List[str] 15 | """The arguments to pass to the command""" 16 | 17 | command: str 18 | """The command to run (MCP 'local' client will run this command)""" 19 | 20 | server_name: str 21 | """The name of the MCP server""" 22 | 23 | id: Optional[str] = None 24 | """The human-friendly ID of the Mcp_server""" 25 | 26 | env: Optional[Dict[str, str]] = None 27 | """Environment variables to set""" 28 | 29 | mcp_server_type: Optional[Literal["stdio"]] = None 30 | -------------------------------------------------------------------------------- /src/letta_client/types/tool_search_params.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Optional 6 | from typing_extensions import Literal, TypedDict 7 | 8 | from .._types import SequenceNotStr 9 | 10 | __all__ = ["ToolSearchParams"] 11 | 12 | 13 | class ToolSearchParams(TypedDict, total=False): 14 | limit: int 15 | """Maximum number of results to return.""" 16 | 17 | query: Optional[str] 18 | """Text query for semantic search.""" 19 | 20 | search_mode: Literal["vector", "fts", "hybrid"] 21 | """Search mode: vector, fts, or hybrid.""" 22 | 23 | tags: Optional[SequenceNotStr[str]] 24 | """Filter by tags (match any).""" 25 | 26 | tool_types: Optional[SequenceNotStr[str]] 27 | """Filter by tool types (e.g., 'custom', 'letta_core').""" 28 | -------------------------------------------------------------------------------- /src/letta_client/types/create_sse_mcp_server_param.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Dict, Optional 6 | from typing_extensions import Literal, Required, TypedDict 7 | 8 | __all__ = ["CreateSseMcpServerParam"] 9 | 10 | 11 | class CreateSseMcpServerParam(TypedDict, total=False): 12 | """Create a new SSE MCP server""" 13 | 14 | server_url: Required[str] 15 | """The URL of the server""" 16 | 17 | auth_header: Optional[str] 18 | """The name of the authentication header (e.g., 'Authorization')""" 19 | 20 | auth_token: Optional[str] 21 | """The authentication token or API key value""" 22 | 23 | custom_headers: Optional[Dict[str, str]] 24 | """Custom HTTP headers to include with requests""" 25 | 26 | mcp_server_type: Literal["sse"] 27 | -------------------------------------------------------------------------------- /src/letta_client/types/requires_approval_tool_rule_param.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Optional 6 | from typing_extensions import Literal, Required, TypedDict 7 | 8 | __all__ = ["RequiresApprovalToolRuleParam"] 9 | 10 | 11 | class RequiresApprovalToolRuleParam(TypedDict, total=False): 12 | """ 13 | Represents a tool rule configuration which requires approval before the tool can be invoked. 14 | """ 15 | 16 | tool_name: Required[str] 17 | """The name of the tool. Must exist in the database for the user's organization.""" 18 | 19 | prompt_template: Optional[str] 20 | """Optional template string (ignored). 21 | 22 | Rendering uses fast built-in formatting for performance. 23 | """ 24 | 25 | type: Literal["requires_approval"] 26 | -------------------------------------------------------------------------------- /src/letta_client/types/update_stdio_mcp_server_param.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Dict, Optional 6 | from typing_extensions import Literal, Required, TypedDict 7 | 8 | from .._types import SequenceNotStr 9 | 10 | __all__ = ["UpdateStdioMcpServerParam"] 11 | 12 | 13 | class UpdateStdioMcpServerParam(TypedDict, total=False): 14 | """Update schema for Stdio MCP server - all fields optional""" 15 | 16 | args: Required[Optional[SequenceNotStr[str]]] 17 | """The arguments to pass to the command""" 18 | 19 | command: Required[Optional[str]] 20 | """The command to run (MCP 'local' client will run this command)""" 21 | 22 | env: Optional[Dict[str, str]] 23 | """Environment variables to set""" 24 | 25 | mcp_server_type: Literal["stdio"] 26 | -------------------------------------------------------------------------------- /src/letta_client/types/agents/passage_list_params.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Optional 6 | from typing_extensions import TypedDict 7 | 8 | __all__ = ["PassageListParams"] 9 | 10 | 11 | class PassageListParams(TypedDict, total=False): 12 | after: Optional[str] 13 | """Unique ID of the memory to start the query range at.""" 14 | 15 | ascending: Optional[bool] 16 | """ 17 | Whether to sort passages oldest to newest (True, default) or newest to oldest 18 | (False) 19 | """ 20 | 21 | before: Optional[str] 22 | """Unique ID of the memory to end the query range at.""" 23 | 24 | limit: Optional[int] 25 | """How many results to include in the response.""" 26 | 27 | search: Optional[str] 28 | """Search passages by text""" 29 | -------------------------------------------------------------------------------- /src/letta_client/_utils/_logs.py: -------------------------------------------------------------------------------- 1 | import os 2 | import logging 3 | 4 | logger: logging.Logger = logging.getLogger("letta_client") 5 | httpx_logger: logging.Logger = logging.getLogger("httpx") 6 | 7 | 8 | def _basic_config() -> None: 9 | # e.g. [2023-10-05 14:12:26 - letta_client._base_client:818 - DEBUG] HTTP Request: POST http://127.0.0.1:4010/foo/bar "200 OK" 10 | logging.basicConfig( 11 | format="[%(asctime)s - %(name)s:%(lineno)d - %(levelname)s] %(message)s", 12 | datefmt="%Y-%m-%d %H:%M:%S", 13 | ) 14 | 15 | 16 | def setup_logging() -> None: 17 | env = os.environ.get("LETTA_LOG") 18 | if env == "debug": 19 | _basic_config() 20 | logger.setLevel(logging.DEBUG) 21 | httpx_logger.setLevel(logging.DEBUG) 22 | elif env == "info": 23 | _basic_config() 24 | logger.setLevel(logging.INFO) 25 | httpx_logger.setLevel(logging.INFO) 26 | -------------------------------------------------------------------------------- /src/letta_client/types/runs/message_stream_params.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Optional 6 | from typing_extensions import TypedDict 7 | 8 | __all__ = ["MessageStreamParams"] 9 | 10 | 11 | class MessageStreamParams(TypedDict, total=False): 12 | batch_size: Optional[int] 13 | """Number of entries to read per batch.""" 14 | 15 | include_pings: Optional[bool] 16 | """ 17 | Whether to include periodic keepalive ping messages in the stream to prevent 18 | connection timeouts. 19 | """ 20 | 21 | poll_interval: Optional[float] 22 | """Seconds to wait between polls when no new data.""" 23 | 24 | starting_after: int 25 | """Sequence id to use as a cursor for pagination. 26 | 27 | Response will start streaming after this chunk sequence id 28 | """ 29 | -------------------------------------------------------------------------------- /src/letta_client/types/agents/reasoning_content_param.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Optional 6 | from typing_extensions import Literal, Required, TypedDict 7 | 8 | __all__ = ["ReasoningContentParam"] 9 | 10 | 11 | class ReasoningContentParam(TypedDict, total=False): 12 | """Sent via the Anthropic Messages API""" 13 | 14 | is_native: Required[bool] 15 | """ 16 | Whether the reasoning content was generated by a reasoner model that processed 17 | this step. 18 | """ 19 | 20 | reasoning: Required[str] 21 | """The intermediate reasoning or thought process content.""" 22 | 23 | signature: Optional[str] 24 | """A unique identifier for this reasoning step.""" 25 | 26 | type: Literal["reasoning"] 27 | """Indicates this is a reasoning/intermediate step.""" 28 | -------------------------------------------------------------------------------- /src/letta_client/types/parent_tool_rule_param.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Optional 6 | from typing_extensions import Literal, Required, TypedDict 7 | 8 | from .._types import SequenceNotStr 9 | 10 | __all__ = ["ParentToolRuleParam"] 11 | 12 | 13 | class ParentToolRuleParam(TypedDict, total=False): 14 | """ 15 | A ToolRule that only allows a child tool to be called if the parent has been called. 16 | """ 17 | 18 | children: Required[SequenceNotStr[str]] 19 | """The children tools that can be invoked.""" 20 | 21 | tool_name: Required[str] 22 | """The name of the tool. Must exist in the database for the user's organization.""" 23 | 24 | prompt_template: Optional[str] 25 | """Optional template string (ignored).""" 26 | 27 | type: Literal["parent_last_tool"] 28 | -------------------------------------------------------------------------------- /src/letta_client/types/folder_update_params.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Dict, Optional 6 | from typing_extensions import TypedDict 7 | 8 | from .embedding_config_param import EmbeddingConfigParam 9 | 10 | __all__ = ["FolderUpdateParams"] 11 | 12 | 13 | class FolderUpdateParams(TypedDict, total=False): 14 | description: Optional[str] 15 | """The description of the source.""" 16 | 17 | embedding_config: Optional[EmbeddingConfigParam] 18 | """Configuration for embedding model connection and processing parameters.""" 19 | 20 | instructions: Optional[str] 21 | """Instructions for how to use the source.""" 22 | 23 | metadata: Optional[Dict[str, object]] 24 | """Metadata associated with the source.""" 25 | 26 | name: Optional[str] 27 | """The name of the source.""" 28 | -------------------------------------------------------------------------------- /src/letta_client/types/update_sse_mcp_server_param.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Dict, Optional 6 | from typing_extensions import Literal, Required, TypedDict 7 | 8 | __all__ = ["UpdateSseMcpServerParam"] 9 | 10 | 11 | class UpdateSseMcpServerParam(TypedDict, total=False): 12 | """Update schema for SSE MCP server - all fields optional""" 13 | 14 | server_url: Required[Optional[str]] 15 | """The URL of the server""" 16 | 17 | auth_header: Optional[str] 18 | """The name of the authentication header (e.g., 'Authorization')""" 19 | 20 | auth_token: Optional[str] 21 | """The authentication token or API key value""" 22 | 23 | custom_headers: Optional[Dict[str, str]] 24 | """Custom HTTP headers to include with requests""" 25 | 26 | mcp_server_type: Literal["sse"] 27 | -------------------------------------------------------------------------------- /scripts/utils/upload-artifact.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -exuo pipefail 3 | 4 | FILENAME=$(basename dist/*.whl) 5 | 6 | RESPONSE=$(curl -X POST "$URL?filename=$FILENAME" \ 7 | -H "Authorization: Bearer $AUTH" \ 8 | -H "Content-Type: application/json") 9 | 10 | SIGNED_URL=$(echo "$RESPONSE" | jq -r '.url') 11 | 12 | if [[ "$SIGNED_URL" == "null" ]]; then 13 | echo -e "\033[31mFailed to get signed URL.\033[0m" 14 | exit 1 15 | fi 16 | 17 | UPLOAD_RESPONSE=$(curl -v -X PUT \ 18 | -H "Content-Type: binary/octet-stream" \ 19 | --data-binary "@dist/$FILENAME" "$SIGNED_URL" 2>&1) 20 | 21 | if echo "$UPLOAD_RESPONSE" | grep -q "HTTP/[0-9.]* 200"; then 22 | echo -e "\033[32mUploaded build to Stainless storage.\033[0m" 23 | echo -e "\033[32mInstallation: pip install 'https://pkg.stainless.com/s/letta-sdk-python/$SHA/$FILENAME'\033[0m" 24 | else 25 | echo -e "\033[31mFailed to upload artifact.\033[0m" 26 | exit 1 27 | fi 28 | -------------------------------------------------------------------------------- /src/letta_client/types/agents/tool_call_content.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import Dict, Optional 4 | from typing_extensions import Literal 5 | 6 | from ..._models import BaseModel 7 | 8 | __all__ = ["ToolCallContent"] 9 | 10 | 11 | class ToolCallContent(BaseModel): 12 | id: str 13 | """A unique identifier for this specific tool call instance.""" 14 | 15 | input: Dict[str, object] 16 | """ 17 | The parameters being passed to the tool, structured as a dictionary of parameter 18 | names to values. 19 | """ 20 | 21 | name: str 22 | """The name of the tool being called.""" 23 | 24 | signature: Optional[str] = None 25 | """Stores a unique identifier for any reasoning associated with this tool call.""" 26 | 27 | type: Optional[Literal["tool_call"]] = None 28 | """Indicates this content represents a tool call event.""" 29 | -------------------------------------------------------------------------------- /src/letta_client/types/create_streamable_http_mcp_server_param.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Dict, Optional 6 | from typing_extensions import Literal, Required, TypedDict 7 | 8 | __all__ = ["CreateStreamableHTTPMcpServerParam"] 9 | 10 | 11 | class CreateStreamableHTTPMcpServerParam(TypedDict, total=False): 12 | """Create a new Streamable HTTP MCP server""" 13 | 14 | server_url: Required[str] 15 | """The URL of the server""" 16 | 17 | auth_header: Optional[str] 18 | """The name of the authentication header (e.g., 'Authorization')""" 19 | 20 | auth_token: Optional[str] 21 | """The authentication token or API key value""" 22 | 23 | custom_headers: Optional[Dict[str, str]] 24 | """Custom HTTP headers to include with requests""" 25 | 26 | mcp_server_type: Literal["streamable_http"] 27 | -------------------------------------------------------------------------------- /src/letta_client/types/mcp_server_create_params.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Union 6 | from typing_extensions import Required, TypeAlias, TypedDict 7 | 8 | from .create_sse_mcp_server_param import CreateSseMcpServerParam 9 | from .create_stdio_mcp_server_param import CreateStdioMcpServerParam 10 | from .create_streamable_http_mcp_server_param import CreateStreamableHTTPMcpServerParam 11 | 12 | __all__ = ["McpServerCreateParams", "Config"] 13 | 14 | 15 | class McpServerCreateParams(TypedDict, total=False): 16 | config: Required[Config] 17 | """The MCP server configuration (Stdio, SSE, or Streamable HTTP)""" 18 | 19 | server_name: Required[str] 20 | """The name of the MCP server""" 21 | 22 | 23 | Config: TypeAlias = Union[CreateStdioMcpServerParam, CreateSseMcpServerParam, CreateStreamableHTTPMcpServerParam] 24 | -------------------------------------------------------------------------------- /src/letta_client/types/max_count_per_step_tool_rule.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import Optional 4 | from typing_extensions import Literal 5 | 6 | from .._models import BaseModel 7 | 8 | __all__ = ["MaxCountPerStepToolRule"] 9 | 10 | 11 | class MaxCountPerStepToolRule(BaseModel): 12 | """ 13 | Represents a tool rule configuration which constrains the total number of times this tool can be invoked in a single step. 14 | """ 15 | 16 | max_count_limit: int 17 | """ 18 | The max limit for the total number of times this tool can be invoked in a single 19 | step. 20 | """ 21 | 22 | tool_name: str 23 | """The name of the tool. Must exist in the database for the user's organization.""" 24 | 25 | prompt_template: Optional[str] = None 26 | """Optional template string (ignored).""" 27 | 28 | type: Optional[Literal["max_count_per_step"]] = None 29 | -------------------------------------------------------------------------------- /src/letta_client/types/mcp_server_update_params.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Union, Optional 6 | from typing_extensions import Required, TypeAlias, TypedDict 7 | 8 | from .update_sse_mcp_server_param import UpdateSseMcpServerParam 9 | from .update_stdio_mcp_server_param import UpdateStdioMcpServerParam 10 | from .update_streamable_http_mcp_server_param import UpdateStreamableHTTPMcpServerParam 11 | 12 | __all__ = ["McpServerUpdateParams", "Config"] 13 | 14 | 15 | class McpServerUpdateParams(TypedDict, total=False): 16 | config: Required[Config] 17 | """The MCP server configuration updates (Stdio, SSE, or Streamable HTTP)""" 18 | 19 | server_name: Optional[str] 20 | """The name of the MCP server""" 21 | 22 | 23 | Config: TypeAlias = Union[UpdateStdioMcpServerParam, UpdateSseMcpServerParam, UpdateStreamableHTTPMcpServerParam] 24 | -------------------------------------------------------------------------------- /src/letta_client/types/agents/summary_message.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import Optional 4 | from datetime import datetime 5 | from typing_extensions import Literal 6 | 7 | from ..._models import BaseModel 8 | 9 | __all__ = ["SummaryMessage"] 10 | 11 | 12 | class SummaryMessage(BaseModel): 13 | """A message representing a summary of the conversation. 14 | 15 | Sent to the LLM as a user or system message depending on the provider. 16 | """ 17 | 18 | id: str 19 | 20 | date: datetime 21 | 22 | summary: str 23 | 24 | is_err: Optional[bool] = None 25 | 26 | message_type: Optional[Literal["summary"]] = None 27 | 28 | name: Optional[str] = None 29 | 30 | otid: Optional[str] = None 31 | 32 | run_id: Optional[str] = None 33 | 34 | sender_id: Optional[str] = None 35 | 36 | seq_id: Optional[int] = None 37 | 38 | step_id: Optional[str] = None 39 | -------------------------------------------------------------------------------- /src/letta_client/types/runs/usage_retrieve_response.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import Optional 4 | 5 | from ..._models import BaseModel 6 | 7 | __all__ = ["UsageRetrieveResponse", "CompletionTokensDetails", "PromptTokensDetails"] 8 | 9 | 10 | class CompletionTokensDetails(BaseModel): 11 | reasoning_tokens: Optional[int] = None 12 | 13 | 14 | class PromptTokensDetails(BaseModel): 15 | cache_creation_tokens: Optional[int] = None 16 | 17 | cache_read_tokens: Optional[int] = None 18 | 19 | cached_tokens: Optional[int] = None 20 | 21 | 22 | class UsageRetrieveResponse(BaseModel): 23 | completion_tokens: Optional[int] = None 24 | 25 | completion_tokens_details: Optional[CompletionTokensDetails] = None 26 | 27 | prompt_tokens: Optional[int] = None 28 | 29 | prompt_tokens_details: Optional[PromptTokensDetails] = None 30 | 31 | total_tokens: Optional[int] = None 32 | -------------------------------------------------------------------------------- /src/letta_client/types/message_list_params.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Optional 6 | from typing_extensions import Literal, TypedDict 7 | 8 | __all__ = ["MessageListParams"] 9 | 10 | 11 | class MessageListParams(TypedDict, total=False): 12 | after: Optional[str] 13 | """Message ID cursor for pagination. 14 | 15 | Returns messages that come after this message ID in the specified sort order 16 | """ 17 | 18 | before: Optional[str] 19 | """Message ID cursor for pagination. 20 | 21 | Returns messages that come before this message ID in the specified sort order 22 | """ 23 | 24 | limit: Optional[int] 25 | """Maximum number of messages to return""" 26 | 27 | order: Literal["asc", "desc"] 28 | """Sort order for messages by creation time. 29 | 30 | 'asc' for oldest first, 'desc' for newest first 31 | """ 32 | -------------------------------------------------------------------------------- /src/letta_client/types/update_streamable_http_mcp_server_param.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Dict, Optional 6 | from typing_extensions import Literal, Required, TypedDict 7 | 8 | __all__ = ["UpdateStreamableHTTPMcpServerParam"] 9 | 10 | 11 | class UpdateStreamableHTTPMcpServerParam(TypedDict, total=False): 12 | """Update schema for Streamable HTTP MCP server - all fields optional""" 13 | 14 | server_url: Required[Optional[str]] 15 | """The URL of the server""" 16 | 17 | auth_header: Optional[str] 18 | """The name of the authentication header (e.g., 'Authorization')""" 19 | 20 | auth_token: Optional[str] 21 | """The authentication token or API key value""" 22 | 23 | custom_headers: Optional[Dict[str, str]] 24 | """Custom HTTP headers to include with requests""" 25 | 26 | mcp_server_type: Literal["streamable_http"] 27 | -------------------------------------------------------------------------------- /src/letta_client/types/agents/tool_call_content_param.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Dict, Optional 6 | from typing_extensions import Literal, Required, TypedDict 7 | 8 | __all__ = ["ToolCallContentParam"] 9 | 10 | 11 | class ToolCallContentParam(TypedDict, total=False): 12 | id: Required[str] 13 | """A unique identifier for this specific tool call instance.""" 14 | 15 | input: Required[Dict[str, object]] 16 | """ 17 | The parameters being passed to the tool, structured as a dictionary of parameter 18 | names to values. 19 | """ 20 | 21 | name: Required[str] 22 | """The name of the tool being called.""" 23 | 24 | signature: Optional[str] 25 | """Stores a unique identifier for any reasoning associated with this tool call.""" 26 | 27 | type: Literal["tool_call"] 28 | """Indicates this content represents a tool call event.""" 29 | -------------------------------------------------------------------------------- /src/letta_client/types/sse_mcp_server.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import Dict, Optional 4 | from typing_extensions import Literal 5 | 6 | from .._models import BaseModel 7 | 8 | __all__ = ["SseMcpServer"] 9 | 10 | 11 | class SseMcpServer(BaseModel): 12 | """An SSE MCP server""" 13 | 14 | server_name: str 15 | """The name of the MCP server""" 16 | 17 | server_url: str 18 | """The URL of the server""" 19 | 20 | id: Optional[str] = None 21 | """The human-friendly ID of the Mcp_server""" 22 | 23 | auth_header: Optional[str] = None 24 | """The name of the authentication header (e.g., 'Authorization')""" 25 | 26 | auth_token: Optional[str] = None 27 | """The authentication token or API key value""" 28 | 29 | custom_headers: Optional[Dict[str, str]] = None 30 | """Custom HTTP headers to include with requests""" 31 | 32 | mcp_server_type: Optional[Literal["sse"]] = None 33 | -------------------------------------------------------------------------------- /src/letta_client/types/max_count_per_step_tool_rule_param.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Optional 6 | from typing_extensions import Literal, Required, TypedDict 7 | 8 | __all__ = ["MaxCountPerStepToolRuleParam"] 9 | 10 | 11 | class MaxCountPerStepToolRuleParam(TypedDict, total=False): 12 | """ 13 | Represents a tool rule configuration which constrains the total number of times this tool can be invoked in a single step. 14 | """ 15 | 16 | max_count_limit: Required[int] 17 | """ 18 | The max limit for the total number of times this tool can be invoked in a single 19 | step. 20 | """ 21 | 22 | tool_name: Required[str] 23 | """The name of the tool. Must exist in the database for the user's organization.""" 24 | 25 | prompt_template: Optional[str] 26 | """Optional template string (ignored).""" 27 | 28 | type: Literal["max_count_per_step"] 29 | -------------------------------------------------------------------------------- /src/letta_client/types/voice_sleeptime_manager_param.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Optional 6 | from typing_extensions import Literal, Required, TypedDict 7 | 8 | __all__ = ["VoiceSleeptimeManagerParam"] 9 | 10 | 11 | class VoiceSleeptimeManagerParam(TypedDict, total=False): 12 | manager_agent_id: Required[str] 13 | 14 | manager_type: Literal["voice_sleeptime"] 15 | 16 | max_message_buffer_length: Optional[int] 17 | """The desired maximum length of messages in the context window of the convo agent. 18 | 19 | This is a best effort, and may be off slightly due to user/assistant 20 | interleaving. 21 | """ 22 | 23 | min_message_buffer_length: Optional[int] 24 | """The desired minimum length of messages in the context window of the convo agent. 25 | 26 | This is a best effort, and may be off-by-one due to user/assistant interleaving. 27 | """ 28 | -------------------------------------------------------------------------------- /.github/workflows/publish-pypi.yml: -------------------------------------------------------------------------------- 1 | # This workflow is triggered when a GitHub release is created. 2 | # It can also be run manually to re-publish to PyPI in case it failed for some reason. 3 | # You can run this workflow by navigating to https://www.github.com/letta-ai/letta-python/actions/workflows/publish-pypi.yml 4 | name: Publish PyPI 5 | on: 6 | workflow_dispatch: 7 | 8 | release: 9 | types: [published] 10 | 11 | jobs: 12 | publish: 13 | name: publish 14 | runs-on: ubuntu-latest 15 | 16 | steps: 17 | - uses: actions/checkout@v4 18 | 19 | - name: Install Rye 20 | run: | 21 | curl -sSf https://rye.astral.sh/get | bash 22 | echo "$HOME/.rye/shims" >> $GITHUB_PATH 23 | env: 24 | RYE_VERSION: '0.44.0' 25 | RYE_INSTALL_OPTION: '--yes' 26 | 27 | - name: Publish to PyPI 28 | run: | 29 | bash ./bin/publish-pypi 30 | env: 31 | PYPI_TOKEN: ${{ secrets.LETTA_PYPI_TOKEN || secrets.PYPI_TOKEN }} 32 | -------------------------------------------------------------------------------- /src/letta_client/types/message_search_params.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Union 6 | from datetime import datetime 7 | from typing_extensions import Literal, Required, Annotated, TypedDict 8 | 9 | from .._utils import PropertyInfo 10 | 11 | __all__ = ["MessageSearchParams"] 12 | 13 | 14 | class MessageSearchParams(TypedDict, total=False): 15 | query: Required[str] 16 | """Text query for full-text search""" 17 | 18 | end_date: Annotated[Union[str, datetime, None], PropertyInfo(format="iso8601")] 19 | """Filter messages created on or before this date""" 20 | 21 | limit: int 22 | """Maximum number of results to return""" 23 | 24 | search_mode: Literal["vector", "fts", "hybrid"] 25 | """Search mode to use""" 26 | 27 | start_date: Annotated[Union[str, datetime, None], PropertyInfo(format="iso8601")] 28 | """Filter messages created after this date""" 29 | -------------------------------------------------------------------------------- /src/letta_client/types/batch_list_params.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Optional 6 | from typing_extensions import Literal, TypedDict 7 | 8 | __all__ = ["BatchListParams"] 9 | 10 | 11 | class BatchListParams(TypedDict, total=False): 12 | after: Optional[str] 13 | """Job ID cursor for pagination. 14 | 15 | Returns jobs that come after this job ID in the specified sort order 16 | """ 17 | 18 | before: Optional[str] 19 | """Job ID cursor for pagination. 20 | 21 | Returns jobs that come before this job ID in the specified sort order 22 | """ 23 | 24 | limit: Optional[int] 25 | """Maximum number of jobs to return""" 26 | 27 | order: Literal["asc", "desc"] 28 | """Sort order for jobs by creation time. 29 | 30 | 'asc' for oldest first, 'desc' for newest first 31 | """ 32 | 33 | order_by: Literal["created_at"] 34 | """Field to sort by""" 35 | -------------------------------------------------------------------------------- /src/letta_client/types/access_token_create_params.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import List, Iterable 6 | from typing_extensions import Literal, Required, TypedDict 7 | 8 | __all__ = ["AccessTokenCreateParams", "Policy"] 9 | 10 | 11 | class AccessTokenCreateParams(TypedDict, total=False): 12 | hostname: Required[str] 13 | """The hostname of the client side application. 14 | 15 | Please specify the full URL including the protocol (http or https). 16 | """ 17 | 18 | policy: Required[Iterable[Policy]] 19 | 20 | expires_at: str 21 | """The expiration date of the token. 22 | 23 | If not provided, the token will expire in 5 minutes 24 | """ 25 | 26 | 27 | class Policy(TypedDict, total=False): 28 | id: Required[str] 29 | 30 | access: Required[List[Literal["read_messages", "write_messages", "read_agent", "write_agent"]]] 31 | 32 | type: Required[Literal["agent"]] 33 | -------------------------------------------------------------------------------- /src/letta_client/types/agents/tool_list_params.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Optional 6 | from typing_extensions import Literal, TypedDict 7 | 8 | __all__ = ["ToolListParams"] 9 | 10 | 11 | class ToolListParams(TypedDict, total=False): 12 | after: Optional[str] 13 | """Tool ID cursor for pagination. 14 | 15 | Returns tools that come after this tool ID in the specified sort order 16 | """ 17 | 18 | before: Optional[str] 19 | """Tool ID cursor for pagination. 20 | 21 | Returns tools that come before this tool ID in the specified sort order 22 | """ 23 | 24 | limit: Optional[int] 25 | """Maximum number of tools to return""" 26 | 27 | order: Literal["asc", "desc"] 28 | """Sort order for tools by creation time. 29 | 30 | 'asc' for oldest first, 'desc' for newest first 31 | """ 32 | 33 | order_by: Literal["created_at"] 34 | """Field to sort by""" 35 | -------------------------------------------------------------------------------- /src/letta_client/types/access_token_list_response.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import List 4 | from typing_extensions import Literal 5 | 6 | from pydantic import Field as FieldInfo 7 | 8 | from .._models import BaseModel 9 | 10 | __all__ = ["AccessTokenListResponse", "Token", "TokenPolicy", "TokenPolicyData"] 11 | 12 | 13 | class TokenPolicyData(BaseModel): 14 | id: str 15 | 16 | access: List[Literal["read_messages", "write_messages", "read_agent", "write_agent"]] 17 | 18 | type: Literal["agent"] 19 | 20 | 21 | class TokenPolicy(BaseModel): 22 | data: List[TokenPolicyData] 23 | 24 | version: Literal["1"] 25 | 26 | 27 | class Token(BaseModel): 28 | token: str 29 | 30 | expires_at: str = FieldInfo(alias="expiresAt") 31 | 32 | hostname: str 33 | 34 | policy: TokenPolicy 35 | 36 | 37 | class AccessTokenListResponse(BaseModel): 38 | has_next_page: bool = FieldInfo(alias="hasNextPage") 39 | 40 | tokens: List[Token] 41 | -------------------------------------------------------------------------------- /src/letta_client/types/agents/block_list_params.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Optional 6 | from typing_extensions import Literal, TypedDict 7 | 8 | __all__ = ["BlockListParams"] 9 | 10 | 11 | class BlockListParams(TypedDict, total=False): 12 | after: Optional[str] 13 | """Block ID cursor for pagination. 14 | 15 | Returns blocks that come after this block ID in the specified sort order 16 | """ 17 | 18 | before: Optional[str] 19 | """Block ID cursor for pagination. 20 | 21 | Returns blocks that come before this block ID in the specified sort order 22 | """ 23 | 24 | limit: Optional[int] 25 | """Maximum number of blocks to return""" 26 | 27 | order: Literal["asc", "desc"] 28 | """Sort order for blocks by creation time. 29 | 30 | 'asc' for oldest first, 'desc' for newest first 31 | """ 32 | 33 | order_by: Literal["created_at"] 34 | """Field to sort by""" 35 | -------------------------------------------------------------------------------- /src/letta_client/types/agents/event_message.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import Dict, Optional 4 | from datetime import datetime 5 | from typing_extensions import Literal 6 | 7 | from ..._models import BaseModel 8 | 9 | __all__ = ["EventMessage"] 10 | 11 | 12 | class EventMessage(BaseModel): 13 | """A message for notifying the developer that an event that has occured (e.g. 14 | 15 | a compaction). Events are NOT part of the context window. 16 | """ 17 | 18 | id: str 19 | 20 | date: datetime 21 | 22 | event_data: Dict[str, object] 23 | 24 | event_type: Literal["compaction"] 25 | 26 | is_err: Optional[bool] = None 27 | 28 | message_type: Optional[Literal["event"]] = None 29 | 30 | name: Optional[str] = None 31 | 32 | otid: Optional[str] = None 33 | 34 | run_id: Optional[str] = None 35 | 36 | sender_id: Optional[str] = None 37 | 38 | seq_id: Optional[int] = None 39 | 40 | step_id: Optional[str] = None 41 | -------------------------------------------------------------------------------- /src/letta_client/types/folders/agent_list_params.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Optional 6 | from typing_extensions import Literal, TypedDict 7 | 8 | __all__ = ["AgentListParams"] 9 | 10 | 11 | class AgentListParams(TypedDict, total=False): 12 | after: Optional[str] 13 | """Agent ID cursor for pagination. 14 | 15 | Returns agents that come after this agent ID in the specified sort order 16 | """ 17 | 18 | before: Optional[str] 19 | """Agent ID cursor for pagination. 20 | 21 | Returns agents that come before this agent ID in the specified sort order 22 | """ 23 | 24 | limit: Optional[int] 25 | """Maximum number of agents to return""" 26 | 27 | order: Literal["asc", "desc"] 28 | """Sort order for agents by creation time. 29 | 30 | 'asc' for oldest first, 'desc' for newest first 31 | """ 32 | 33 | order_by: Literal["created_at"] 34 | """Field to sort by""" 35 | -------------------------------------------------------------------------------- /scripts/mock: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -e 4 | 5 | cd "$(dirname "$0")/.." 6 | 7 | if [[ -n "$1" && "$1" != '--'* ]]; then 8 | URL="$1" 9 | shift 10 | else 11 | URL="$(grep 'openapi_spec_url' .stats.yml | cut -d' ' -f2)" 12 | fi 13 | 14 | # Check if the URL is empty 15 | if [ -z "$URL" ]; then 16 | echo "Error: No OpenAPI spec path/url provided or found in .stats.yml" 17 | exit 1 18 | fi 19 | 20 | echo "==> Starting mock server with URL ${URL}" 21 | 22 | # Run prism mock on the given spec 23 | if [ "$1" == "--daemon" ]; then 24 | npm exec --package=@stainless-api/prism-cli@5.15.0 -- prism mock "$URL" &> .prism.log & 25 | 26 | # Wait for server to come online 27 | echo -n "Waiting for server" 28 | while ! grep -q "✖ fatal\|Prism is listening" ".prism.log" ; do 29 | echo -n "." 30 | sleep 0.1 31 | done 32 | 33 | if grep -q "✖ fatal" ".prism.log"; then 34 | cat .prism.log 35 | exit 1 36 | fi 37 | 38 | echo 39 | else 40 | npm exec --package=@stainless-api/prism-cli@5.15.0 -- prism mock "$URL" 41 | fi 42 | -------------------------------------------------------------------------------- /src/letta_client/types/identities/block_list_params.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Optional 6 | from typing_extensions import Literal, TypedDict 7 | 8 | __all__ = ["BlockListParams"] 9 | 10 | 11 | class BlockListParams(TypedDict, total=False): 12 | after: Optional[str] 13 | """Block ID cursor for pagination. 14 | 15 | Returns blocks that come after this block ID in the specified sort order 16 | """ 17 | 18 | before: Optional[str] 19 | """Block ID cursor for pagination. 20 | 21 | Returns blocks that come before this block ID in the specified sort order 22 | """ 23 | 24 | limit: Optional[int] 25 | """Maximum number of blocks to return""" 26 | 27 | order: Literal["asc", "desc"] 28 | """Sort order for blocks by creation time. 29 | 30 | 'asc' for oldest first, 'desc' for newest first 31 | """ 32 | 33 | order_by: Literal["created_at"] 34 | """Field to sort by""" 35 | -------------------------------------------------------------------------------- /src/letta_client/types/agents/folder_list_params.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Optional 6 | from typing_extensions import Literal, TypedDict 7 | 8 | __all__ = ["FolderListParams"] 9 | 10 | 11 | class FolderListParams(TypedDict, total=False): 12 | after: Optional[str] 13 | """Source ID cursor for pagination. 14 | 15 | Returns sources that come after this source ID in the specified sort order 16 | """ 17 | 18 | before: Optional[str] 19 | """Source ID cursor for pagination. 20 | 21 | Returns sources that come before this source ID in the specified sort order 22 | """ 23 | 24 | limit: Optional[int] 25 | """Maximum number of sources to return""" 26 | 27 | order: Literal["asc", "desc"] 28 | """Sort order for sources by creation time. 29 | 30 | 'asc' for oldest first, 'desc' for newest first 31 | """ 32 | 33 | order_by: Literal["created_at"] 34 | """Field to sort by""" 35 | -------------------------------------------------------------------------------- /src/letta_client/types/streamable_http_mcp_server.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import Dict, Optional 4 | from typing_extensions import Literal 5 | 6 | from .._models import BaseModel 7 | 8 | __all__ = ["StreamableHTTPMcpServer"] 9 | 10 | 11 | class StreamableHTTPMcpServer(BaseModel): 12 | """A Streamable HTTP MCP server""" 13 | 14 | server_name: str 15 | """The name of the MCP server""" 16 | 17 | server_url: str 18 | """The URL of the server""" 19 | 20 | id: Optional[str] = None 21 | """The human-friendly ID of the Mcp_server""" 22 | 23 | auth_header: Optional[str] = None 24 | """The name of the authentication header (e.g., 'Authorization')""" 25 | 26 | auth_token: Optional[str] = None 27 | """The authentication token or API key value""" 28 | 29 | custom_headers: Optional[Dict[str, str]] = None 30 | """Custom HTTP headers to include with requests""" 31 | 32 | mcp_server_type: Optional[Literal["streamable_http"]] = None 33 | -------------------------------------------------------------------------------- /src/letta_client/types/runs/message_list_params.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Optional 6 | from typing_extensions import Literal, TypedDict 7 | 8 | __all__ = ["MessageListParams"] 9 | 10 | 11 | class MessageListParams(TypedDict, total=False): 12 | after: Optional[str] 13 | """Message ID cursor for pagination. 14 | 15 | Returns messages that come after this message ID in the specified sort order 16 | """ 17 | 18 | before: Optional[str] 19 | """Message ID cursor for pagination. 20 | 21 | Returns messages that come before this message ID in the specified sort order 22 | """ 23 | 24 | limit: Optional[int] 25 | """Maximum number of messages to return""" 26 | 27 | order: Literal["asc", "desc"] 28 | """Sort order for messages by creation time. 29 | 30 | 'asc' for oldest first, 'desc' for newest first 31 | """ 32 | 33 | order_by: Literal["created_at"] 34 | """Field to sort by""" 35 | -------------------------------------------------------------------------------- /src/letta_client/types/steps/message_list_params.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Optional 6 | from typing_extensions import Literal, TypedDict 7 | 8 | __all__ = ["MessageListParams"] 9 | 10 | 11 | class MessageListParams(TypedDict, total=False): 12 | after: Optional[str] 13 | """Message ID cursor for pagination. 14 | 15 | Returns messages that come after this message ID in the specified sort order 16 | """ 17 | 18 | before: Optional[str] 19 | """Message ID cursor for pagination. 20 | 21 | Returns messages that come before this message ID in the specified sort order 22 | """ 23 | 24 | limit: Optional[int] 25 | """Maximum number of messages to return""" 26 | 27 | order: Literal["asc", "desc"] 28 | """Sort order for messages by creation time. 29 | 30 | 'asc' for oldest first, 'desc' for newest first 31 | """ 32 | 33 | order_by: Literal["created_at"] 34 | """Sort by field""" 35 | -------------------------------------------------------------------------------- /src/letta_client/types/conditional_tool_rule.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import Dict, Optional 4 | from typing_extensions import Literal 5 | 6 | from .._models import BaseModel 7 | 8 | __all__ = ["ConditionalToolRule"] 9 | 10 | 11 | class ConditionalToolRule(BaseModel): 12 | """ 13 | A ToolRule that conditionally maps to different child tools based on the output. 14 | """ 15 | 16 | child_output_mapping: Dict[str, str] 17 | """The output case to check for mapping""" 18 | 19 | tool_name: str 20 | """The name of the tool. Must exist in the database for the user's organization.""" 21 | 22 | default_child: Optional[str] = None 23 | """The default child tool to be called. If None, any tool can be called.""" 24 | 25 | prompt_template: Optional[str] = None 26 | """Optional template string (ignored).""" 27 | 28 | require_output_mapping: Optional[bool] = None 29 | """Whether to throw an error when output doesn't match any case""" 30 | 31 | type: Optional[Literal["conditional"]] = None 32 | -------------------------------------------------------------------------------- /src/letta_client/types/letta_message_content_union_param.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Union 6 | from typing_extensions import TypeAlias 7 | 8 | from .agents.text_content_param import TextContentParam 9 | from .agents.image_content_param import ImageContentParam 10 | from .agents.reasoning_content_param import ReasoningContentParam 11 | from .agents.tool_call_content_param import ToolCallContentParam 12 | from .agents.tool_return_content_param import ToolReturnContentParam 13 | from .agents.omitted_reasoning_content_param import OmittedReasoningContentParam 14 | from .agents.redacted_reasoning_content_param import RedactedReasoningContentParam 15 | 16 | __all__ = ["LettaMessageContentUnionParam"] 17 | 18 | LettaMessageContentUnionParam: TypeAlias = Union[ 19 | TextContentParam, 20 | ImageContentParam, 21 | ToolCallContentParam, 22 | ToolReturnContentParam, 23 | ReasoningContentParam, 24 | RedactedReasoningContentParam, 25 | OmittedReasoningContentParam, 26 | ] 27 | -------------------------------------------------------------------------------- /src/letta_client/types/folder_list_params.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Optional 6 | from typing_extensions import Literal, TypedDict 7 | 8 | __all__ = ["FolderListParams"] 9 | 10 | 11 | class FolderListParams(TypedDict, total=False): 12 | after: Optional[str] 13 | """Folder ID cursor for pagination. 14 | 15 | Returns folders that come after this folder ID in the specified sort order 16 | """ 17 | 18 | before: Optional[str] 19 | """Folder ID cursor for pagination. 20 | 21 | Returns folders that come before this folder ID in the specified sort order 22 | """ 23 | 24 | limit: Optional[int] 25 | """Maximum number of folders to return""" 26 | 27 | name: Optional[str] 28 | """Folder name to filter by""" 29 | 30 | order: Literal["asc", "desc"] 31 | """Sort order for folders by creation time. 32 | 33 | 'asc' for oldest first, 'desc' for newest first 34 | """ 35 | 36 | order_by: Literal["created_at"] 37 | """Field to sort by""" 38 | -------------------------------------------------------------------------------- /src/letta_client/types/folders/file_list_params.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Optional 6 | from typing_extensions import Literal, TypedDict 7 | 8 | __all__ = ["FileListParams"] 9 | 10 | 11 | class FileListParams(TypedDict, total=False): 12 | after: Optional[str] 13 | """File ID cursor for pagination. 14 | 15 | Returns files that come after this file ID in the specified sort order 16 | """ 17 | 18 | before: Optional[str] 19 | """File ID cursor for pagination. 20 | 21 | Returns files that come before this file ID in the specified sort order 22 | """ 23 | 24 | include_content: bool 25 | """Whether to include full file content""" 26 | 27 | limit: Optional[int] 28 | """Maximum number of files to return""" 29 | 30 | order: Literal["asc", "desc"] 31 | """Sort order for files by creation time. 32 | 33 | 'asc' for oldest first, 'desc' for newest first 34 | """ 35 | 36 | order_by: Literal["created_at"] 37 | """Field to sort by""" 38 | -------------------------------------------------------------------------------- /src/letta_client/types/identity.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import List, Optional 4 | 5 | from .._models import BaseModel 6 | from .identity_type import IdentityType 7 | from .identity_property import IdentityProperty 8 | 9 | __all__ = ["Identity"] 10 | 11 | 12 | class Identity(BaseModel): 13 | id: str 14 | """The human-friendly ID of the Identity""" 15 | 16 | agent_ids: List[str] 17 | """The IDs of the agents associated with the identity.""" 18 | 19 | block_ids: List[str] 20 | """The IDs of the blocks associated with the identity.""" 21 | 22 | identifier_key: str 23 | """External, user-generated identifier key of the identity.""" 24 | 25 | identity_type: IdentityType 26 | """The type of the identity.""" 27 | 28 | name: str 29 | """The name of the identity.""" 30 | 31 | project_id: Optional[str] = None 32 | """The project id of the identity, if applicable.""" 33 | 34 | properties: Optional[List[IdentityProperty]] = None 35 | """List of properties associated with the identity""" 36 | -------------------------------------------------------------------------------- /src/letta_client/types/agents/group_list_params.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Optional 6 | from typing_extensions import Literal, TypedDict 7 | 8 | __all__ = ["GroupListParams"] 9 | 10 | 11 | class GroupListParams(TypedDict, total=False): 12 | after: Optional[str] 13 | """Group ID cursor for pagination. 14 | 15 | Returns groups that come after this group ID in the specified sort order 16 | """ 17 | 18 | before: Optional[str] 19 | """Group ID cursor for pagination. 20 | 21 | Returns groups that come before this group ID in the specified sort order 22 | """ 23 | 24 | limit: Optional[int] 25 | """Maximum number of groups to return""" 26 | 27 | manager_type: Optional[str] 28 | """Manager type to filter groups by""" 29 | 30 | order: Literal["asc", "desc"] 31 | """Sort order for groups by creation time. 32 | 33 | 'asc' for oldest first, 'desc' for newest first 34 | """ 35 | 36 | order_by: Literal["created_at"] 37 | """Field to sort by""" 38 | -------------------------------------------------------------------------------- /src/letta_client/types/conditional_tool_rule_param.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Dict, Optional 6 | from typing_extensions import Literal, Required, TypedDict 7 | 8 | __all__ = ["ConditionalToolRuleParam"] 9 | 10 | 11 | class ConditionalToolRuleParam(TypedDict, total=False): 12 | """ 13 | A ToolRule that conditionally maps to different child tools based on the output. 14 | """ 15 | 16 | child_output_mapping: Required[Dict[str, str]] 17 | """The output case to check for mapping""" 18 | 19 | tool_name: Required[str] 20 | """The name of the tool. Must exist in the database for the user's organization.""" 21 | 22 | default_child: Optional[str] 23 | """The default child tool to be called. If None, any tool can be called.""" 24 | 25 | prompt_template: Optional[str] 26 | """Optional template string (ignored).""" 27 | 28 | require_output_mapping: bool 29 | """Whether to throw an error when output doesn't match any case""" 30 | 31 | type: Literal["conditional"] 32 | -------------------------------------------------------------------------------- /src/letta_client/types/folder_create_params.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Dict, Optional 6 | from typing_extensions import Required, TypedDict 7 | 8 | from .embedding_config_param import EmbeddingConfigParam 9 | 10 | __all__ = ["FolderCreateParams"] 11 | 12 | 13 | class FolderCreateParams(TypedDict, total=False): 14 | name: Required[str] 15 | """The name of the source.""" 16 | 17 | description: Optional[str] 18 | """The description of the source.""" 19 | 20 | embedding: Optional[str] 21 | """The handle for the embedding config used by the source.""" 22 | 23 | embedding_chunk_size: Optional[int] 24 | """The chunk size of the embedding.""" 25 | 26 | embedding_config: Optional[EmbeddingConfigParam] 27 | """Configuration for embedding model connection and processing parameters.""" 28 | 29 | instructions: Optional[str] 30 | """Instructions for how to use the source.""" 31 | 32 | metadata: Optional[Dict[str, object]] 33 | """Metadata associated with the source.""" 34 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | # Security Policy 2 | 3 | ## Reporting Security Issues 4 | 5 | This SDK is generated by [Stainless Software Inc](http://stainless.com). Stainless takes security seriously, and encourages you to report any security vulnerability promptly so that appropriate action can be taken. 6 | 7 | To report a security issue, please contact the Stainless team at security@stainless.com. 8 | 9 | ## Responsible Disclosure 10 | 11 | We appreciate the efforts of security researchers and individuals who help us maintain the security of 12 | SDKs we generate. If you believe you have found a security vulnerability, please adhere to responsible 13 | disclosure practices by allowing us a reasonable amount of time to investigate and address the issue 14 | before making any information public. 15 | 16 | ## Reporting Non-SDK Related Security Issues 17 | 18 | If you encounter security issues that are not directly related to SDKs but pertain to the services 19 | or products provided by Letta, please follow the respective company's security reporting guidelines. 20 | 21 | --- 22 | 23 | Thank you for helping us keep the SDKs and systems they interact with secure. 24 | -------------------------------------------------------------------------------- /src/letta_client/types/batches/message_list_params.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Optional 6 | from typing_extensions import Literal, TypedDict 7 | 8 | __all__ = ["MessageListParams"] 9 | 10 | 11 | class MessageListParams(TypedDict, total=False): 12 | after: Optional[str] 13 | """Message ID cursor for pagination. 14 | 15 | Returns messages that come after this message ID in the specified sort order 16 | """ 17 | 18 | agent_id: Optional[str] 19 | """Filter messages by agent ID""" 20 | 21 | before: Optional[str] 22 | """Message ID cursor for pagination. 23 | 24 | Returns messages that come before this message ID in the specified sort order 25 | """ 26 | 27 | limit: Optional[int] 28 | """Maximum number of messages to return""" 29 | 30 | order: Literal["asc", "desc"] 31 | """Sort order for messages by creation time. 32 | 33 | 'asc' for oldest first, 'desc' for newest first 34 | """ 35 | 36 | order_by: Literal["created_at"] 37 | """Field to sort by""" 38 | -------------------------------------------------------------------------------- /src/letta_client/resources/blocks/__init__.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from .agents import ( 4 | AgentsResource, 5 | AsyncAgentsResource, 6 | AgentsResourceWithRawResponse, 7 | AsyncAgentsResourceWithRawResponse, 8 | AgentsResourceWithStreamingResponse, 9 | AsyncAgentsResourceWithStreamingResponse, 10 | ) 11 | from .blocks import ( 12 | BlocksResource, 13 | AsyncBlocksResource, 14 | BlocksResourceWithRawResponse, 15 | AsyncBlocksResourceWithRawResponse, 16 | BlocksResourceWithStreamingResponse, 17 | AsyncBlocksResourceWithStreamingResponse, 18 | ) 19 | 20 | __all__ = [ 21 | "AgentsResource", 22 | "AsyncAgentsResource", 23 | "AgentsResourceWithRawResponse", 24 | "AsyncAgentsResourceWithRawResponse", 25 | "AgentsResourceWithStreamingResponse", 26 | "AsyncAgentsResourceWithStreamingResponse", 27 | "BlocksResource", 28 | "AsyncBlocksResource", 29 | "BlocksResourceWithRawResponse", 30 | "AsyncBlocksResourceWithRawResponse", 31 | "BlocksResourceWithStreamingResponse", 32 | "AsyncBlocksResourceWithStreamingResponse", 33 | ] 34 | -------------------------------------------------------------------------------- /tests/test_utils/test_proxy.py: -------------------------------------------------------------------------------- 1 | import operator 2 | from typing import Any 3 | from typing_extensions import override 4 | 5 | from letta_client._utils import LazyProxy 6 | 7 | 8 | class RecursiveLazyProxy(LazyProxy[Any]): 9 | @override 10 | def __load__(self) -> Any: 11 | return self 12 | 13 | def __call__(self, *_args: Any, **_kwds: Any) -> Any: 14 | raise RuntimeError("This should never be called!") 15 | 16 | 17 | def test_recursive_proxy() -> None: 18 | proxy = RecursiveLazyProxy() 19 | assert repr(proxy) == "RecursiveLazyProxy" 20 | assert str(proxy) == "RecursiveLazyProxy" 21 | assert dir(proxy) == [] 22 | assert type(proxy).__name__ == "RecursiveLazyProxy" 23 | assert type(operator.attrgetter("name.foo.bar.baz")(proxy)).__name__ == "RecursiveLazyProxy" 24 | 25 | 26 | def test_isinstance_does_not_error() -> None: 27 | class AlwaysErrorProxy(LazyProxy[Any]): 28 | @override 29 | def __load__(self) -> Any: 30 | raise RuntimeError("Mocking missing dependency") 31 | 32 | proxy = AlwaysErrorProxy() 33 | assert not isinstance(proxy, dict) 34 | assert isinstance(proxy, LazyProxy) 35 | -------------------------------------------------------------------------------- /src/letta_client/types/init_tool_rule.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import Dict, Optional 4 | from typing_extensions import Literal 5 | 6 | from .._models import BaseModel 7 | 8 | __all__ = ["InitToolRule"] 9 | 10 | 11 | class InitToolRule(BaseModel): 12 | """Represents the initial tool rule configuration.""" 13 | 14 | tool_name: str 15 | """The name of the tool. Must exist in the database for the user's organization.""" 16 | 17 | args: Optional[Dict[str, object]] = None 18 | """Optional prefilled arguments for this tool. 19 | 20 | When present, these values will override any LLM-provided arguments with the 21 | same keys during invocation. Keys must match the tool's parameter names and 22 | values must satisfy the tool's JSON schema. Supports partial prefill; 23 | non-overlapping parameters are left to the model. 24 | """ 25 | 26 | prompt_template: Optional[str] = None 27 | """Optional template string (ignored). 28 | 29 | Rendering uses fast built-in formatting for performance. 30 | """ 31 | 32 | type: Optional[Literal["run_first"]] = None 33 | -------------------------------------------------------------------------------- /src/letta_client/types/identity_update_params.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Iterable, Optional 6 | from typing_extensions import TypedDict 7 | 8 | from .._types import SequenceNotStr 9 | from .identity_type import IdentityType 10 | from .identity_property_param import IdentityPropertyParam 11 | 12 | __all__ = ["IdentityUpdateParams"] 13 | 14 | 15 | class IdentityUpdateParams(TypedDict, total=False): 16 | agent_ids: Optional[SequenceNotStr[str]] 17 | """The agent ids that are associated with the identity.""" 18 | 19 | block_ids: Optional[SequenceNotStr[str]] 20 | """The IDs of the blocks associated with the identity.""" 21 | 22 | identifier_key: Optional[str] 23 | """External, user-generated identifier key of the identity.""" 24 | 25 | identity_type: Optional[IdentityType] 26 | """Enum to represent the type of the identity.""" 27 | 28 | name: Optional[str] 29 | """The name of the identity.""" 30 | 31 | properties: Optional[Iterable[IdentityPropertyParam]] 32 | """List of properties associated with the identity.""" 33 | -------------------------------------------------------------------------------- /src/letta_client/types/tag_list_params.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Optional 6 | from typing_extensions import Literal, TypedDict 7 | 8 | __all__ = ["TagListParams"] 9 | 10 | 11 | class TagListParams(TypedDict, total=False): 12 | after: Optional[str] 13 | """Tag cursor for pagination. 14 | 15 | Returns tags that come after this tag in the specified sort order 16 | """ 17 | 18 | before: Optional[str] 19 | """Tag cursor for pagination. 20 | 21 | Returns tags that come before this tag in the specified sort order 22 | """ 23 | 24 | limit: Optional[int] 25 | """Maximum number of tags to return""" 26 | 27 | name: Optional[str] 28 | """Filter tags by name""" 29 | 30 | order: Literal["asc", "desc"] 31 | """Sort order for tags. 32 | 33 | 'asc' for alphabetical order, 'desc' for reverse alphabetical order 34 | """ 35 | 36 | order_by: Literal["name"] 37 | """Field to sort by""" 38 | 39 | query_text: Optional[str] 40 | """Filter tags by text search. Deprecated, please use name field instead""" 41 | -------------------------------------------------------------------------------- /src/letta_client/resources/groups/__init__.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from .groups import ( 4 | GroupsResource, 5 | AsyncGroupsResource, 6 | GroupsResourceWithRawResponse, 7 | AsyncGroupsResourceWithRawResponse, 8 | GroupsResourceWithStreamingResponse, 9 | AsyncGroupsResourceWithStreamingResponse, 10 | ) 11 | from .messages import ( 12 | MessagesResource, 13 | AsyncMessagesResource, 14 | MessagesResourceWithRawResponse, 15 | AsyncMessagesResourceWithRawResponse, 16 | MessagesResourceWithStreamingResponse, 17 | AsyncMessagesResourceWithStreamingResponse, 18 | ) 19 | 20 | __all__ = [ 21 | "MessagesResource", 22 | "AsyncMessagesResource", 23 | "MessagesResourceWithRawResponse", 24 | "AsyncMessagesResourceWithRawResponse", 25 | "MessagesResourceWithStreamingResponse", 26 | "AsyncMessagesResourceWithStreamingResponse", 27 | "GroupsResource", 28 | "AsyncGroupsResource", 29 | "GroupsResourceWithRawResponse", 30 | "AsyncGroupsResourceWithRawResponse", 31 | "GroupsResourceWithStreamingResponse", 32 | "AsyncGroupsResourceWithStreamingResponse", 33 | ] 34 | -------------------------------------------------------------------------------- /src/letta_client/resources/batches/__init__.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from .batches import ( 4 | BatchesResource, 5 | AsyncBatchesResource, 6 | BatchesResourceWithRawResponse, 7 | AsyncBatchesResourceWithRawResponse, 8 | BatchesResourceWithStreamingResponse, 9 | AsyncBatchesResourceWithStreamingResponse, 10 | ) 11 | from .messages import ( 12 | MessagesResource, 13 | AsyncMessagesResource, 14 | MessagesResourceWithRawResponse, 15 | AsyncMessagesResourceWithRawResponse, 16 | MessagesResourceWithStreamingResponse, 17 | AsyncMessagesResourceWithStreamingResponse, 18 | ) 19 | 20 | __all__ = [ 21 | "MessagesResource", 22 | "AsyncMessagesResource", 23 | "MessagesResourceWithRawResponse", 24 | "AsyncMessagesResourceWithRawResponse", 25 | "MessagesResourceWithStreamingResponse", 26 | "AsyncMessagesResourceWithStreamingResponse", 27 | "BatchesResource", 28 | "AsyncBatchesResource", 29 | "BatchesResourceWithRawResponse", 30 | "AsyncBatchesResourceWithRawResponse", 31 | "BatchesResourceWithStreamingResponse", 32 | "AsyncBatchesResourceWithStreamingResponse", 33 | ] 34 | -------------------------------------------------------------------------------- /src/letta_client/types/init_tool_rule_param.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Dict, Optional 6 | from typing_extensions import Literal, Required, TypedDict 7 | 8 | __all__ = ["InitToolRuleParam"] 9 | 10 | 11 | class InitToolRuleParam(TypedDict, total=False): 12 | """Represents the initial tool rule configuration.""" 13 | 14 | tool_name: Required[str] 15 | """The name of the tool. Must exist in the database for the user's organization.""" 16 | 17 | args: Optional[Dict[str, object]] 18 | """Optional prefilled arguments for this tool. 19 | 20 | When present, these values will override any LLM-provided arguments with the 21 | same keys during invocation. Keys must match the tool's parameter names and 22 | values must satisfy the tool's JSON schema. Supports partial prefill; 23 | non-overlapping parameters are left to the model. 24 | """ 25 | 26 | prompt_template: Optional[str] 27 | """Optional template string (ignored). 28 | 29 | Rendering uses fast built-in formatting for performance. 30 | """ 31 | 32 | type: Literal["run_first"] 33 | -------------------------------------------------------------------------------- /src/letta_client/resources/mcp_servers/__init__.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from .tools import ( 4 | ToolsResource, 5 | AsyncToolsResource, 6 | ToolsResourceWithRawResponse, 7 | AsyncToolsResourceWithRawResponse, 8 | ToolsResourceWithStreamingResponse, 9 | AsyncToolsResourceWithStreamingResponse, 10 | ) 11 | from .mcp_servers import ( 12 | McpServersResource, 13 | AsyncMcpServersResource, 14 | McpServersResourceWithRawResponse, 15 | AsyncMcpServersResourceWithRawResponse, 16 | McpServersResourceWithStreamingResponse, 17 | AsyncMcpServersResourceWithStreamingResponse, 18 | ) 19 | 20 | __all__ = [ 21 | "ToolsResource", 22 | "AsyncToolsResource", 23 | "ToolsResourceWithRawResponse", 24 | "AsyncToolsResourceWithRawResponse", 25 | "ToolsResourceWithStreamingResponse", 26 | "AsyncToolsResourceWithStreamingResponse", 27 | "McpServersResource", 28 | "AsyncMcpServersResource", 29 | "McpServersResourceWithRawResponse", 30 | "AsyncMcpServersResourceWithRawResponse", 31 | "McpServersResourceWithStreamingResponse", 32 | "AsyncMcpServersResourceWithStreamingResponse", 33 | ] 34 | -------------------------------------------------------------------------------- /src/letta_client/resources/templates/__init__.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from .agents import ( 4 | AgentsResource, 5 | AsyncAgentsResource, 6 | AgentsResourceWithRawResponse, 7 | AsyncAgentsResourceWithRawResponse, 8 | AgentsResourceWithStreamingResponse, 9 | AsyncAgentsResourceWithStreamingResponse, 10 | ) 11 | from .templates import ( 12 | TemplatesResource, 13 | AsyncTemplatesResource, 14 | TemplatesResourceWithRawResponse, 15 | AsyncTemplatesResourceWithRawResponse, 16 | TemplatesResourceWithStreamingResponse, 17 | AsyncTemplatesResourceWithStreamingResponse, 18 | ) 19 | 20 | __all__ = [ 21 | "AgentsResource", 22 | "AsyncAgentsResource", 23 | "AgentsResourceWithRawResponse", 24 | "AsyncAgentsResourceWithRawResponse", 25 | "AgentsResourceWithStreamingResponse", 26 | "AsyncAgentsResourceWithStreamingResponse", 27 | "TemplatesResource", 28 | "AsyncTemplatesResource", 29 | "TemplatesResourceWithRawResponse", 30 | "AsyncTemplatesResourceWithRawResponse", 31 | "TemplatesResourceWithStreamingResponse", 32 | "AsyncTemplatesResourceWithStreamingResponse", 33 | ] 34 | -------------------------------------------------------------------------------- /src/letta_client/resources/archives/__init__.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from .archives import ( 4 | ArchivesResource, 5 | AsyncArchivesResource, 6 | ArchivesResourceWithRawResponse, 7 | AsyncArchivesResourceWithRawResponse, 8 | ArchivesResourceWithStreamingResponse, 9 | AsyncArchivesResourceWithStreamingResponse, 10 | ) 11 | from .passages import ( 12 | PassagesResource, 13 | AsyncPassagesResource, 14 | PassagesResourceWithRawResponse, 15 | AsyncPassagesResourceWithRawResponse, 16 | PassagesResourceWithStreamingResponse, 17 | AsyncPassagesResourceWithStreamingResponse, 18 | ) 19 | 20 | __all__ = [ 21 | "PassagesResource", 22 | "AsyncPassagesResource", 23 | "PassagesResourceWithRawResponse", 24 | "AsyncPassagesResourceWithRawResponse", 25 | "PassagesResourceWithStreamingResponse", 26 | "AsyncPassagesResourceWithStreamingResponse", 27 | "ArchivesResource", 28 | "AsyncArchivesResource", 29 | "ArchivesResourceWithRawResponse", 30 | "AsyncArchivesResourceWithRawResponse", 31 | "ArchivesResourceWithStreamingResponse", 32 | "AsyncArchivesResourceWithStreamingResponse", 33 | ] 34 | -------------------------------------------------------------------------------- /src/letta_client/resources/models/__init__.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from .models import ( 4 | ModelsResource, 5 | AsyncModelsResource, 6 | ModelsResourceWithRawResponse, 7 | AsyncModelsResourceWithRawResponse, 8 | ModelsResourceWithStreamingResponse, 9 | AsyncModelsResourceWithStreamingResponse, 10 | ) 11 | from .embeddings import ( 12 | EmbeddingsResource, 13 | AsyncEmbeddingsResource, 14 | EmbeddingsResourceWithRawResponse, 15 | AsyncEmbeddingsResourceWithRawResponse, 16 | EmbeddingsResourceWithStreamingResponse, 17 | AsyncEmbeddingsResourceWithStreamingResponse, 18 | ) 19 | 20 | __all__ = [ 21 | "EmbeddingsResource", 22 | "AsyncEmbeddingsResource", 23 | "EmbeddingsResourceWithRawResponse", 24 | "AsyncEmbeddingsResourceWithRawResponse", 25 | "EmbeddingsResourceWithStreamingResponse", 26 | "AsyncEmbeddingsResourceWithStreamingResponse", 27 | "ModelsResource", 28 | "AsyncModelsResource", 29 | "ModelsResourceWithRawResponse", 30 | "AsyncModelsResourceWithRawResponse", 31 | "ModelsResourceWithStreamingResponse", 32 | "AsyncModelsResourceWithStreamingResponse", 33 | ] 34 | -------------------------------------------------------------------------------- /src/letta_client/types/agents/tool_execution_result.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import List, Optional 4 | from typing_extensions import Literal 5 | 6 | from ..._models import BaseModel 7 | from ..agent_state import AgentState 8 | 9 | __all__ = ["ToolExecutionResult"] 10 | 11 | 12 | class ToolExecutionResult(BaseModel): 13 | status: Literal["success", "error"] 14 | """The status of the tool execution and return object""" 15 | 16 | agent_state: Optional[AgentState] = None 17 | """Representation of an agent's state. 18 | 19 | This is the state of the agent at a given time, and is persisted in the DB 20 | backend. The state has all the information needed to recreate a persisted agent. 21 | """ 22 | 23 | func_return: Optional[object] = None 24 | """The function return object""" 25 | 26 | sandbox_config_fingerprint: Optional[str] = None 27 | """The fingerprint of the config for the sandbox""" 28 | 29 | stderr: Optional[List[str]] = None 30 | """Captured stderr from the function invocation""" 31 | 32 | stdout: Optional[List[str]] = None 33 | """Captured stdout (prints, logs) from function invocation""" 34 | -------------------------------------------------------------------------------- /src/letta_client/types/archive_list_params.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Optional 6 | from typing_extensions import Literal, TypedDict 7 | 8 | __all__ = ["ArchiveListParams"] 9 | 10 | 11 | class ArchiveListParams(TypedDict, total=False): 12 | after: Optional[str] 13 | """Archive ID cursor for pagination. 14 | 15 | Returns archives that come after this archive ID in the specified sort order 16 | """ 17 | 18 | agent_id: Optional[str] 19 | """Only archives attached to this agent ID""" 20 | 21 | before: Optional[str] 22 | """Archive ID cursor for pagination. 23 | 24 | Returns archives that come before this archive ID in the specified sort order 25 | """ 26 | 27 | limit: Optional[int] 28 | """Maximum number of archives to return""" 29 | 30 | name: Optional[str] 31 | """Filter by archive name (exact match)""" 32 | 33 | order: Literal["asc", "desc"] 34 | """Sort order for archives by creation time. 35 | 36 | 'asc' for oldest first, 'desc' for newest first 37 | """ 38 | 39 | order_by: Literal["created_at"] 40 | """Field to sort by""" 41 | -------------------------------------------------------------------------------- /src/letta_client/types/agents/approval_create_param.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Union, Iterable, Optional 6 | from typing_extensions import Literal, TypeAlias, TypedDict 7 | 8 | from .tool_return_param import ToolReturnParam 9 | from .approval_return_param import ApprovalReturnParam 10 | 11 | __all__ = ["ApprovalCreateParam", "Approval"] 12 | 13 | Approval: TypeAlias = Union[ApprovalReturnParam, ToolReturnParam] 14 | 15 | 16 | class ApprovalCreateParam(TypedDict, total=False): 17 | """Input to approve or deny a tool call request""" 18 | 19 | approval_request_id: Optional[str] 20 | """The message ID of the approval request""" 21 | 22 | approvals: Optional[Iterable[Approval]] 23 | """The list of approval responses""" 24 | 25 | approve: Optional[bool] 26 | """Whether the tool has been approved""" 27 | 28 | group_id: Optional[str] 29 | """The multi-agent group that the message was sent in""" 30 | 31 | reason: Optional[str] 32 | """An optional explanation for the provided approval status""" 33 | 34 | type: Literal["approval"] 35 | """The message type to be created.""" 36 | -------------------------------------------------------------------------------- /src/letta_client/types/group_list_params.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Optional 6 | from typing_extensions import Literal, TypedDict 7 | 8 | from .manager_type import ManagerType 9 | 10 | __all__ = ["GroupListParams"] 11 | 12 | 13 | class GroupListParams(TypedDict, total=False): 14 | after: Optional[str] 15 | """Group ID cursor for pagination. 16 | 17 | Returns groups that come after this group ID in the specified sort order 18 | """ 19 | 20 | before: Optional[str] 21 | """Group ID cursor for pagination. 22 | 23 | Returns groups that come before this group ID in the specified sort order 24 | """ 25 | 26 | limit: Optional[int] 27 | """Maximum number of groups to return""" 28 | 29 | manager_type: Optional[ManagerType] 30 | """Search groups by manager type""" 31 | 32 | order: Literal["asc", "desc"] 33 | """Sort order for groups by creation time. 34 | 35 | 'asc' for oldest first, 'desc' for newest first 36 | """ 37 | 38 | order_by: Literal["created_at"] 39 | """Field to sort by""" 40 | 41 | project_id: Optional[str] 42 | """Search groups by project id""" 43 | -------------------------------------------------------------------------------- /src/letta_client/types/agents/file_list_params.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Optional 6 | from typing_extensions import Literal, TypedDict 7 | 8 | __all__ = ["FileListParams"] 9 | 10 | 11 | class FileListParams(TypedDict, total=False): 12 | after: Optional[str] 13 | """File ID cursor for pagination. 14 | 15 | Returns files that come after this file ID in the specified sort order 16 | """ 17 | 18 | before: Optional[str] 19 | """File ID cursor for pagination. 20 | 21 | Returns files that come before this file ID in the specified sort order 22 | """ 23 | 24 | cursor: Optional[str] 25 | """Pagination cursor from previous response (deprecated, use before/after)""" 26 | 27 | is_open: Optional[bool] 28 | """Filter by open status (true for open files, false for closed files)""" 29 | 30 | limit: Optional[int] 31 | """Maximum number of files to return""" 32 | 33 | order: Literal["asc", "desc"] 34 | """Sort order for files by creation time. 35 | 36 | 'asc' for oldest first, 'desc' for newest first 37 | """ 38 | 39 | order_by: Literal["created_at"] 40 | """Field to sort by""" 41 | -------------------------------------------------------------------------------- /src/letta_client/types/agent_retrieve_params.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import List, Optional 6 | from typing_extensions import Literal, TypedDict 7 | 8 | from .._types import SequenceNotStr 9 | 10 | __all__ = ["AgentRetrieveParams"] 11 | 12 | 13 | class AgentRetrieveParams(TypedDict, total=False): 14 | include: List[ 15 | Literal[ 16 | "agent.blocks", 17 | "agent.identities", 18 | "agent.managed_group", 19 | "agent.secrets", 20 | "agent.sources", 21 | "agent.tags", 22 | "agent.tools", 23 | ] 24 | ] 25 | """Specify which relational fields to include in the response. 26 | 27 | No relationships are included by default. 28 | """ 29 | 30 | include_relationships: Optional[SequenceNotStr[str]] 31 | """ 32 | Specify which relational fields (e.g., 'tools', 'sources', 'memory') to include 33 | in the response. If not provided, all relationships are loaded by default. Using 34 | this can optimize performance by reducing unnecessary joins.This is a legacy 35 | parameter, and no longer supported after 1.0.0 SDK versions. 36 | """ 37 | -------------------------------------------------------------------------------- /src/letta_client/types/identity_create_params.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Iterable, Optional 6 | from typing_extensions import Required, TypedDict 7 | 8 | from .._types import SequenceNotStr 9 | from .identity_type import IdentityType 10 | from .identity_property_param import IdentityPropertyParam 11 | 12 | __all__ = ["IdentityCreateParams"] 13 | 14 | 15 | class IdentityCreateParams(TypedDict, total=False): 16 | identifier_key: Required[str] 17 | """External, user-generated identifier key of the identity.""" 18 | 19 | identity_type: Required[IdentityType] 20 | """The type of the identity.""" 21 | 22 | name: Required[str] 23 | """The name of the identity.""" 24 | 25 | agent_ids: Optional[SequenceNotStr[str]] 26 | """The agent ids that are associated with the identity.""" 27 | 28 | block_ids: Optional[SequenceNotStr[str]] 29 | """The IDs of the blocks associated with the identity.""" 30 | 31 | project_id: Optional[str] 32 | """The project id of the identity, if applicable.""" 33 | 34 | properties: Optional[Iterable[IdentityPropertyParam]] 35 | """List of properties associated with the identity.""" 36 | -------------------------------------------------------------------------------- /src/letta_client/types/identity_upsert_params.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Iterable, Optional 6 | from typing_extensions import Required, TypedDict 7 | 8 | from .._types import SequenceNotStr 9 | from .identity_type import IdentityType 10 | from .identity_property_param import IdentityPropertyParam 11 | 12 | __all__ = ["IdentityUpsertParams"] 13 | 14 | 15 | class IdentityUpsertParams(TypedDict, total=False): 16 | identifier_key: Required[str] 17 | """External, user-generated identifier key of the identity.""" 18 | 19 | identity_type: Required[IdentityType] 20 | """The type of the identity.""" 21 | 22 | name: Required[str] 23 | """The name of the identity.""" 24 | 25 | agent_ids: Optional[SequenceNotStr[str]] 26 | """The agent ids that are associated with the identity.""" 27 | 28 | block_ids: Optional[SequenceNotStr[str]] 29 | """The IDs of the blocks associated with the identity.""" 30 | 31 | project_id: Optional[str] 32 | """The project id of the identity, if applicable.""" 33 | 34 | properties: Optional[Iterable[IdentityPropertyParam]] 35 | """List of properties associated with the identity.""" 36 | -------------------------------------------------------------------------------- /src/letta_client/_resource.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | import time 6 | from typing import TYPE_CHECKING 7 | 8 | import anyio 9 | 10 | if TYPE_CHECKING: 11 | from ._client import Letta, AsyncLetta 12 | 13 | 14 | class SyncAPIResource: 15 | _client: Letta 16 | 17 | def __init__(self, client: Letta) -> None: 18 | self._client = client 19 | self._get = client.get 20 | self._post = client.post 21 | self._patch = client.patch 22 | self._put = client.put 23 | self._delete = client.delete 24 | self._get_api_list = client.get_api_list 25 | 26 | def _sleep(self, seconds: float) -> None: 27 | time.sleep(seconds) 28 | 29 | 30 | class AsyncAPIResource: 31 | _client: AsyncLetta 32 | 33 | def __init__(self, client: AsyncLetta) -> None: 34 | self._client = client 35 | self._get = client.get 36 | self._post = client.post 37 | self._patch = client.patch 38 | self._put = client.put 39 | self._delete = client.delete 40 | self._get_api_list = client.get_api_list 41 | 42 | async def _sleep(self, seconds: float) -> None: 43 | await anyio.sleep(seconds) 44 | -------------------------------------------------------------------------------- /src/letta_client/types/xai_model_settings_param.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Union, Optional 6 | from typing_extensions import Literal, TypeAlias, TypedDict 7 | 8 | from .text_response_format_param import TextResponseFormatParam 9 | from .json_object_response_format_param import JsonObjectResponseFormatParam 10 | from .json_schema_response_format_param import JsonSchemaResponseFormatParam 11 | 12 | __all__ = ["XaiModelSettingsParam", "ResponseFormat"] 13 | 14 | ResponseFormat: TypeAlias = Union[TextResponseFormatParam, JsonSchemaResponseFormatParam, JsonObjectResponseFormatParam] 15 | 16 | 17 | class XaiModelSettingsParam(TypedDict, total=False): 18 | """xAI model configuration (OpenAI-compatible).""" 19 | 20 | max_output_tokens: int 21 | """The maximum number of tokens the model can generate.""" 22 | 23 | parallel_tool_calls: bool 24 | """Whether to enable parallel tool calling.""" 25 | 26 | provider_type: Literal["xai"] 27 | """The type of the provider.""" 28 | 29 | response_format: Optional[ResponseFormat] 30 | """The response format for the model.""" 31 | 32 | temperature: float 33 | """The temperature of the model.""" 34 | -------------------------------------------------------------------------------- /src/letta_client/types/bedrock_model_settings_param.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Union, Optional 6 | from typing_extensions import Literal, TypeAlias, TypedDict 7 | 8 | from .text_response_format_param import TextResponseFormatParam 9 | from .json_object_response_format_param import JsonObjectResponseFormatParam 10 | from .json_schema_response_format_param import JsonSchemaResponseFormatParam 11 | 12 | __all__ = ["BedrockModelSettingsParam", "ResponseFormat"] 13 | 14 | ResponseFormat: TypeAlias = Union[TextResponseFormatParam, JsonSchemaResponseFormatParam, JsonObjectResponseFormatParam] 15 | 16 | 17 | class BedrockModelSettingsParam(TypedDict, total=False): 18 | """AWS Bedrock model configuration.""" 19 | 20 | max_output_tokens: int 21 | """The maximum number of tokens the model can generate.""" 22 | 23 | parallel_tool_calls: bool 24 | """Whether to enable parallel tool calling.""" 25 | 26 | provider_type: Literal["bedrock"] 27 | """The type of the provider.""" 28 | 29 | response_format: Optional[ResponseFormat] 30 | """The response format for the model.""" 31 | 32 | temperature: float 33 | """The temperature of the model.""" 34 | -------------------------------------------------------------------------------- /src/letta_client/types/groq_model_settings_param.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Union, Optional 6 | from typing_extensions import Literal, TypeAlias, TypedDict 7 | 8 | from .text_response_format_param import TextResponseFormatParam 9 | from .json_object_response_format_param import JsonObjectResponseFormatParam 10 | from .json_schema_response_format_param import JsonSchemaResponseFormatParam 11 | 12 | __all__ = ["GroqModelSettingsParam", "ResponseFormat"] 13 | 14 | ResponseFormat: TypeAlias = Union[TextResponseFormatParam, JsonSchemaResponseFormatParam, JsonObjectResponseFormatParam] 15 | 16 | 17 | class GroqModelSettingsParam(TypedDict, total=False): 18 | """Groq model configuration (OpenAI-compatible).""" 19 | 20 | max_output_tokens: int 21 | """The maximum number of tokens the model can generate.""" 22 | 23 | parallel_tool_calls: bool 24 | """Whether to enable parallel tool calling.""" 25 | 26 | provider_type: Literal["groq"] 27 | """The type of the provider.""" 28 | 29 | response_format: Optional[ResponseFormat] 30 | """The response format for the model.""" 31 | 32 | temperature: float 33 | """The temperature of the model.""" 34 | -------------------------------------------------------------------------------- /src/letta_client/types/azure_model_settings_param.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Union, Optional 6 | from typing_extensions import Literal, TypeAlias, TypedDict 7 | 8 | from .text_response_format_param import TextResponseFormatParam 9 | from .json_object_response_format_param import JsonObjectResponseFormatParam 10 | from .json_schema_response_format_param import JsonSchemaResponseFormatParam 11 | 12 | __all__ = ["AzureModelSettingsParam", "ResponseFormat"] 13 | 14 | ResponseFormat: TypeAlias = Union[TextResponseFormatParam, JsonSchemaResponseFormatParam, JsonObjectResponseFormatParam] 15 | 16 | 17 | class AzureModelSettingsParam(TypedDict, total=False): 18 | """Azure OpenAI model configuration (OpenAI-compatible).""" 19 | 20 | max_output_tokens: int 21 | """The maximum number of tokens the model can generate.""" 22 | 23 | parallel_tool_calls: bool 24 | """Whether to enable parallel tool calling.""" 25 | 26 | provider_type: Literal["azure"] 27 | """The type of the provider.""" 28 | 29 | response_format: Optional[ResponseFormat] 30 | """The response format for the model.""" 31 | 32 | temperature: float 33 | """The temperature of the model.""" 34 | -------------------------------------------------------------------------------- /src/letta_client/types/deepseek_model_settings_param.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Union, Optional 6 | from typing_extensions import Literal, TypeAlias, TypedDict 7 | 8 | from .text_response_format_param import TextResponseFormatParam 9 | from .json_object_response_format_param import JsonObjectResponseFormatParam 10 | from .json_schema_response_format_param import JsonSchemaResponseFormatParam 11 | 12 | __all__ = ["DeepseekModelSettingsParam", "ResponseFormat"] 13 | 14 | ResponseFormat: TypeAlias = Union[TextResponseFormatParam, JsonSchemaResponseFormatParam, JsonObjectResponseFormatParam] 15 | 16 | 17 | class DeepseekModelSettingsParam(TypedDict, total=False): 18 | """Deepseek model configuration (OpenAI-compatible).""" 19 | 20 | max_output_tokens: int 21 | """The maximum number of tokens the model can generate.""" 22 | 23 | parallel_tool_calls: bool 24 | """Whether to enable parallel tool calling.""" 25 | 26 | provider_type: Literal["deepseek"] 27 | """The type of the provider.""" 28 | 29 | response_format: Optional[ResponseFormat] 30 | """The response format for the model.""" 31 | 32 | temperature: float 33 | """The temperature of the model.""" 34 | -------------------------------------------------------------------------------- /src/letta_client/types/together_model_settings_param.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Union, Optional 6 | from typing_extensions import Literal, TypeAlias, TypedDict 7 | 8 | from .text_response_format_param import TextResponseFormatParam 9 | from .json_object_response_format_param import JsonObjectResponseFormatParam 10 | from .json_schema_response_format_param import JsonSchemaResponseFormatParam 11 | 12 | __all__ = ["TogetherModelSettingsParam", "ResponseFormat"] 13 | 14 | ResponseFormat: TypeAlias = Union[TextResponseFormatParam, JsonSchemaResponseFormatParam, JsonObjectResponseFormatParam] 15 | 16 | 17 | class TogetherModelSettingsParam(TypedDict, total=False): 18 | """Together AI model configuration (OpenAI-compatible).""" 19 | 20 | max_output_tokens: int 21 | """The maximum number of tokens the model can generate.""" 22 | 23 | parallel_tool_calls: bool 24 | """Whether to enable parallel tool calling.""" 25 | 26 | provider_type: Literal["together"] 27 | """The type of the provider.""" 28 | 29 | response_format: Optional[ResponseFormat] 30 | """The response format for the model.""" 31 | 32 | temperature: float 33 | """The temperature of the model.""" 34 | -------------------------------------------------------------------------------- /src/letta_client/types/agents/system_message.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import Optional 4 | from datetime import datetime 5 | from typing_extensions import Literal 6 | 7 | from ..._models import BaseModel 8 | 9 | __all__ = ["SystemMessage"] 10 | 11 | 12 | class SystemMessage(BaseModel): 13 | """A message generated by the system. 14 | 15 | Never streamed back on a response, only used for cursor pagination. 16 | 17 | Args: 18 | id (str): The ID of the message 19 | date (datetime): The date the message was created in ISO format 20 | name (Optional[str]): The name of the sender of the message 21 | content (str): The message content sent by the system 22 | """ 23 | 24 | id: str 25 | 26 | content: str 27 | """The message content sent by the system""" 28 | 29 | date: datetime 30 | 31 | is_err: Optional[bool] = None 32 | 33 | message_type: Optional[Literal["system_message"]] = None 34 | """The type of the message.""" 35 | 36 | name: Optional[str] = None 37 | 38 | otid: Optional[str] = None 39 | 40 | run_id: Optional[str] = None 41 | 42 | sender_id: Optional[str] = None 43 | 44 | seq_id: Optional[int] = None 45 | 46 | step_id: Optional[str] = None 47 | -------------------------------------------------------------------------------- /src/letta_client/types/agents/message.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import Union 4 | from typing_extensions import Annotated, TypeAlias 5 | 6 | from ..._utils import PropertyInfo 7 | from .user_message import UserMessage 8 | from .event_message import EventMessage 9 | from .system_message import SystemMessage 10 | from .summary_message import SummaryMessage 11 | from .assistant_message import AssistantMessage 12 | from .reasoning_message import ReasoningMessage 13 | from .tool_call_message import ToolCallMessage 14 | from ..tool_return_message import ToolReturnMessage 15 | from .approval_request_message import ApprovalRequestMessage 16 | from .hidden_reasoning_message import HiddenReasoningMessage 17 | from .approval_response_message import ApprovalResponseMessage 18 | 19 | __all__ = ["Message"] 20 | 21 | Message: TypeAlias = Annotated[ 22 | Union[ 23 | SystemMessage, 24 | UserMessage, 25 | ReasoningMessage, 26 | HiddenReasoningMessage, 27 | ToolCallMessage, 28 | ToolReturnMessage, 29 | AssistantMessage, 30 | ApprovalRequestMessage, 31 | ApprovalResponseMessage, 32 | SummaryMessage, 33 | EventMessage, 34 | ], 35 | PropertyInfo(discriminator="message_type"), 36 | ] 37 | -------------------------------------------------------------------------------- /src/letta_client/types/agents/file_list_response.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import Optional 4 | from datetime import datetime 5 | 6 | from ..._models import BaseModel 7 | 8 | __all__ = ["FileListResponse"] 9 | 10 | 11 | class FileListResponse(BaseModel): 12 | """Response model for agent file attachments showing file status in agent context""" 13 | 14 | id: str 15 | """Unique identifier of the file-agent relationship""" 16 | 17 | file_id: str 18 | """Unique identifier of the file""" 19 | 20 | file_name: str 21 | """Name of the file""" 22 | 23 | folder_id: str 24 | """Unique identifier of the folder/source""" 25 | 26 | folder_name: str 27 | """Name of the folder/source""" 28 | 29 | is_open: bool 30 | """Whether the file is currently open in the agent's context""" 31 | 32 | end_line: Optional[int] = None 33 | """Ending line number if file was opened with line range""" 34 | 35 | last_accessed_at: Optional[datetime] = None 36 | """Timestamp of last access by the agent""" 37 | 38 | start_line: Optional[int] = None 39 | """Starting line number if file was opened with line range""" 40 | 41 | visible_content: Optional[str] = None 42 | """Portion of the file visible to the agent if open""" 43 | -------------------------------------------------------------------------------- /src/letta_client/types/group_create_params.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Union, Optional 6 | from typing_extensions import Required, TypeAlias, TypedDict 7 | 8 | from .._types import SequenceNotStr 9 | from .dynamic_manager_param import DynamicManagerParam 10 | from .sleeptime_manager_param import SleeptimeManagerParam 11 | from .supervisor_manager_param import SupervisorManagerParam 12 | from .round_robin_manager_param import RoundRobinManagerParam 13 | from .voice_sleeptime_manager_param import VoiceSleeptimeManagerParam 14 | 15 | __all__ = ["GroupCreateParams", "ManagerConfig"] 16 | 17 | 18 | class GroupCreateParams(TypedDict, total=False): 19 | agent_ids: Required[SequenceNotStr[str]] 20 | 21 | description: Required[str] 22 | 23 | hidden: Optional[bool] 24 | """If set to True, the group will be hidden.""" 25 | 26 | manager_config: ManagerConfig 27 | 28 | project_id: Optional[str] 29 | """The associated project id.""" 30 | 31 | shared_block_ids: SequenceNotStr[str] 32 | 33 | 34 | ManagerConfig: TypeAlias = Union[ 35 | RoundRobinManagerParam, 36 | SupervisorManagerParam, 37 | DynamicManagerParam, 38 | SleeptimeManagerParam, 39 | VoiceSleeptimeManagerParam, 40 | ] 41 | -------------------------------------------------------------------------------- /src/letta_client/types/agent_environment_variable.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import Optional 4 | from datetime import datetime 5 | 6 | from .._models import BaseModel 7 | 8 | __all__ = ["AgentEnvironmentVariable"] 9 | 10 | 11 | class AgentEnvironmentVariable(BaseModel): 12 | agent_id: str 13 | """The ID of the agent this environment variable belongs to.""" 14 | 15 | key: str 16 | """The name of the environment variable.""" 17 | 18 | value: str 19 | """The value of the environment variable.""" 20 | 21 | id: Optional[str] = None 22 | """The human-friendly ID of the Agent-env""" 23 | 24 | created_at: Optional[datetime] = None 25 | """The timestamp when the object was created.""" 26 | 27 | created_by_id: Optional[str] = None 28 | """The id of the user that made this object.""" 29 | 30 | description: Optional[str] = None 31 | """An optional description of the environment variable.""" 32 | 33 | last_updated_by_id: Optional[str] = None 34 | """The id of the user that made this object.""" 35 | 36 | updated_at: Optional[datetime] = None 37 | """The timestamp when the object was last updated.""" 38 | 39 | value_enc: Optional[str] = None 40 | """Encrypted secret value (stored as encrypted string)""" 41 | -------------------------------------------------------------------------------- /src/letta_client/types/groups/message_list_params.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Optional 6 | from typing_extensions import Literal, TypedDict 7 | 8 | __all__ = ["MessageListParams"] 9 | 10 | 11 | class MessageListParams(TypedDict, total=False): 12 | after: Optional[str] 13 | """Message ID cursor for pagination. 14 | 15 | Returns messages that come after this message ID in the specified sort order 16 | """ 17 | 18 | assistant_message_tool_kwarg: str 19 | """The name of the message argument.""" 20 | 21 | assistant_message_tool_name: str 22 | """The name of the designated message tool.""" 23 | 24 | before: Optional[str] 25 | """Message ID cursor for pagination. 26 | 27 | Returns messages that come before this message ID in the specified sort order 28 | """ 29 | 30 | limit: Optional[int] 31 | """Maximum number of messages to retrieve""" 32 | 33 | order: Literal["asc", "desc"] 34 | """Sort order for messages by creation time. 35 | 36 | 'asc' for oldest first, 'desc' for newest first 37 | """ 38 | 39 | order_by: Literal["created_at"] 40 | """Field to sort by""" 41 | 42 | use_assistant_message: bool 43 | """Whether to use assistant messages""" 44 | -------------------------------------------------------------------------------- /src/letta_client/types/message_create_param.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Union, Iterable, Optional 6 | from typing_extensions import Literal, Required, TypedDict 7 | 8 | from .letta_message_content_union_param import LettaMessageContentUnionParam 9 | 10 | __all__ = ["MessageCreateParam"] 11 | 12 | 13 | class MessageCreateParam(TypedDict, total=False): 14 | """Request to create a message""" 15 | 16 | content: Required[Union[Iterable[LettaMessageContentUnionParam], str]] 17 | """The content of the message.""" 18 | 19 | role: Required[Literal["user", "system", "assistant"]] 20 | """The role of the participant.""" 21 | 22 | batch_item_id: Optional[str] 23 | """The id of the LLMBatchItem that this message is associated with""" 24 | 25 | group_id: Optional[str] 26 | """The multi-agent group that the message was sent in""" 27 | 28 | name: Optional[str] 29 | """The name of the participant.""" 30 | 31 | otid: Optional[str] 32 | """The offline threading id associated with this message""" 33 | 34 | sender_id: Optional[str] 35 | """The id of the sender of the message, can be an identity id or agent id""" 36 | 37 | type: Optional[Literal["message"]] 38 | """The message type to be created.""" 39 | -------------------------------------------------------------------------------- /src/letta_client/types/agents/passage_search_params.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Union, Optional 6 | from datetime import datetime 7 | from typing_extensions import Literal, Required, Annotated, TypedDict 8 | 9 | from ..._types import SequenceNotStr 10 | from ..._utils import PropertyInfo 11 | 12 | __all__ = ["PassageSearchParams"] 13 | 14 | 15 | class PassageSearchParams(TypedDict, total=False): 16 | query: Required[str] 17 | """String to search for using semantic similarity""" 18 | 19 | end_datetime: Annotated[Union[str, datetime, None], PropertyInfo(format="iso8601")] 20 | """Filter results to passages created before this datetime""" 21 | 22 | start_datetime: Annotated[Union[str, datetime, None], PropertyInfo(format="iso8601")] 23 | """Filter results to passages created after this datetime""" 24 | 25 | tag_match_mode: Literal["any", "all"] 26 | """ 27 | How to match tags - 'any' to match passages with any of the tags, 'all' to match 28 | only passages with all tags 29 | """ 30 | 31 | tags: Optional[SequenceNotStr[str]] 32 | """Optional list of tags to filter search results""" 33 | 34 | top_k: Optional[int] 35 | """Maximum number of results to return. Uses system default if not specified""" 36 | -------------------------------------------------------------------------------- /src/letta_client/types/xai_model_settings.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import Union, Optional 4 | from typing_extensions import Literal, Annotated, TypeAlias 5 | 6 | from .._utils import PropertyInfo 7 | from .._models import BaseModel 8 | from .text_response_format import TextResponseFormat 9 | from .json_object_response_format import JsonObjectResponseFormat 10 | from .json_schema_response_format import JsonSchemaResponseFormat 11 | 12 | __all__ = ["XaiModelSettings", "ResponseFormat"] 13 | 14 | ResponseFormat: TypeAlias = Annotated[ 15 | Union[TextResponseFormat, JsonSchemaResponseFormat, JsonObjectResponseFormat, None], 16 | PropertyInfo(discriminator="type"), 17 | ] 18 | 19 | 20 | class XaiModelSettings(BaseModel): 21 | """xAI model configuration (OpenAI-compatible).""" 22 | 23 | max_output_tokens: Optional[int] = None 24 | """The maximum number of tokens the model can generate.""" 25 | 26 | parallel_tool_calls: Optional[bool] = None 27 | """Whether to enable parallel tool calling.""" 28 | 29 | provider_type: Optional[Literal["xai"]] = None 30 | """The type of the provider.""" 31 | 32 | response_format: Optional[ResponseFormat] = None 33 | """The response format for the model.""" 34 | 35 | temperature: Optional[float] = None 36 | """The temperature of the model.""" 37 | -------------------------------------------------------------------------------- /src/letta_client/types/bedrock_model_settings.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import Union, Optional 4 | from typing_extensions import Literal, Annotated, TypeAlias 5 | 6 | from .._utils import PropertyInfo 7 | from .._models import BaseModel 8 | from .text_response_format import TextResponseFormat 9 | from .json_object_response_format import JsonObjectResponseFormat 10 | from .json_schema_response_format import JsonSchemaResponseFormat 11 | 12 | __all__ = ["BedrockModelSettings", "ResponseFormat"] 13 | 14 | ResponseFormat: TypeAlias = Annotated[ 15 | Union[TextResponseFormat, JsonSchemaResponseFormat, JsonObjectResponseFormat, None], 16 | PropertyInfo(discriminator="type"), 17 | ] 18 | 19 | 20 | class BedrockModelSettings(BaseModel): 21 | """AWS Bedrock model configuration.""" 22 | 23 | max_output_tokens: Optional[int] = None 24 | """The maximum number of tokens the model can generate.""" 25 | 26 | parallel_tool_calls: Optional[bool] = None 27 | """Whether to enable parallel tool calling.""" 28 | 29 | provider_type: Optional[Literal["bedrock"]] = None 30 | """The type of the provider.""" 31 | 32 | response_format: Optional[ResponseFormat] = None 33 | """The response format for the model.""" 34 | 35 | temperature: Optional[float] = None 36 | """The temperature of the model.""" 37 | -------------------------------------------------------------------------------- /src/letta_client/types/groq_model_settings.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import Union, Optional 4 | from typing_extensions import Literal, Annotated, TypeAlias 5 | 6 | from .._utils import PropertyInfo 7 | from .._models import BaseModel 8 | from .text_response_format import TextResponseFormat 9 | from .json_object_response_format import JsonObjectResponseFormat 10 | from .json_schema_response_format import JsonSchemaResponseFormat 11 | 12 | __all__ = ["GroqModelSettings", "ResponseFormat"] 13 | 14 | ResponseFormat: TypeAlias = Annotated[ 15 | Union[TextResponseFormat, JsonSchemaResponseFormat, JsonObjectResponseFormat, None], 16 | PropertyInfo(discriminator="type"), 17 | ] 18 | 19 | 20 | class GroqModelSettings(BaseModel): 21 | """Groq model configuration (OpenAI-compatible).""" 22 | 23 | max_output_tokens: Optional[int] = None 24 | """The maximum number of tokens the model can generate.""" 25 | 26 | parallel_tool_calls: Optional[bool] = None 27 | """Whether to enable parallel tool calling.""" 28 | 29 | provider_type: Optional[Literal["groq"]] = None 30 | """The type of the provider.""" 31 | 32 | response_format: Optional[ResponseFormat] = None 33 | """The response format for the model.""" 34 | 35 | temperature: Optional[float] = None 36 | """The temperature of the model.""" 37 | -------------------------------------------------------------------------------- /src/letta_client/_utils/_compat.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import sys 4 | import typing_extensions 5 | from typing import Any, Type, Union, Literal, Optional 6 | from datetime import date, datetime 7 | from typing_extensions import get_args as _get_args, get_origin as _get_origin 8 | 9 | from .._types import StrBytesIntFloat 10 | from ._datetime_parse import parse_date as _parse_date, parse_datetime as _parse_datetime 11 | 12 | _LITERAL_TYPES = {Literal, typing_extensions.Literal} 13 | 14 | 15 | def get_args(tp: type[Any]) -> tuple[Any, ...]: 16 | return _get_args(tp) 17 | 18 | 19 | def get_origin(tp: type[Any]) -> type[Any] | None: 20 | return _get_origin(tp) 21 | 22 | 23 | def is_union(tp: Optional[Type[Any]]) -> bool: 24 | if sys.version_info < (3, 10): 25 | return tp is Union # type: ignore[comparison-overlap] 26 | else: 27 | import types 28 | 29 | return tp is Union or tp is types.UnionType 30 | 31 | 32 | def is_typeddict(tp: Type[Any]) -> bool: 33 | return typing_extensions.is_typeddict(tp) 34 | 35 | 36 | def is_literal_type(tp: Type[Any]) -> bool: 37 | return get_origin(tp) in _LITERAL_TYPES 38 | 39 | 40 | def parse_date(value: Union[date, StrBytesIntFloat]) -> date: 41 | return _parse_date(value) 42 | 43 | 44 | def parse_datetime(value: Union[datetime, StrBytesIntFloat]) -> datetime: 45 | return _parse_datetime(value) 46 | -------------------------------------------------------------------------------- /src/letta_client/types/azure_model_settings.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import Union, Optional 4 | from typing_extensions import Literal, Annotated, TypeAlias 5 | 6 | from .._utils import PropertyInfo 7 | from .._models import BaseModel 8 | from .text_response_format import TextResponseFormat 9 | from .json_object_response_format import JsonObjectResponseFormat 10 | from .json_schema_response_format import JsonSchemaResponseFormat 11 | 12 | __all__ = ["AzureModelSettings", "ResponseFormat"] 13 | 14 | ResponseFormat: TypeAlias = Annotated[ 15 | Union[TextResponseFormat, JsonSchemaResponseFormat, JsonObjectResponseFormat, None], 16 | PropertyInfo(discriminator="type"), 17 | ] 18 | 19 | 20 | class AzureModelSettings(BaseModel): 21 | """Azure OpenAI model configuration (OpenAI-compatible).""" 22 | 23 | max_output_tokens: Optional[int] = None 24 | """The maximum number of tokens the model can generate.""" 25 | 26 | parallel_tool_calls: Optional[bool] = None 27 | """Whether to enable parallel tool calling.""" 28 | 29 | provider_type: Optional[Literal["azure"]] = None 30 | """The type of the provider.""" 31 | 32 | response_format: Optional[ResponseFormat] = None 33 | """The response format for the model.""" 34 | 35 | temperature: Optional[float] = None 36 | """The temperature of the model.""" 37 | -------------------------------------------------------------------------------- /src/letta_client/types/folder.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import Dict, Optional 4 | from datetime import datetime 5 | 6 | from .._models import BaseModel 7 | from .embedding_config import EmbeddingConfig 8 | 9 | __all__ = ["Folder"] 10 | 11 | 12 | class Folder(BaseModel): 13 | """Representation of a folder, which is a collection of files and passages.""" 14 | 15 | id: str 16 | """The human-friendly ID of the Source""" 17 | 18 | embedding_config: EmbeddingConfig 19 | """The embedding configuration used by the folder.""" 20 | 21 | name: str 22 | """The name of the folder.""" 23 | 24 | created_at: Optional[datetime] = None 25 | """The timestamp when the folder was created.""" 26 | 27 | created_by_id: Optional[str] = None 28 | """The id of the user that made this Tool.""" 29 | 30 | description: Optional[str] = None 31 | """The description of the folder.""" 32 | 33 | instructions: Optional[str] = None 34 | """Instructions for how to use the folder.""" 35 | 36 | last_updated_by_id: Optional[str] = None 37 | """The id of the user that made this Tool.""" 38 | 39 | metadata: Optional[Dict[str, object]] = None 40 | """Metadata associated with the folder.""" 41 | 42 | updated_at: Optional[datetime] = None 43 | """The timestamp when the folder was last updated.""" 44 | -------------------------------------------------------------------------------- /src/letta_client/types/deepseek_model_settings.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import Union, Optional 4 | from typing_extensions import Literal, Annotated, TypeAlias 5 | 6 | from .._utils import PropertyInfo 7 | from .._models import BaseModel 8 | from .text_response_format import TextResponseFormat 9 | from .json_object_response_format import JsonObjectResponseFormat 10 | from .json_schema_response_format import JsonSchemaResponseFormat 11 | 12 | __all__ = ["DeepseekModelSettings", "ResponseFormat"] 13 | 14 | ResponseFormat: TypeAlias = Annotated[ 15 | Union[TextResponseFormat, JsonSchemaResponseFormat, JsonObjectResponseFormat, None], 16 | PropertyInfo(discriminator="type"), 17 | ] 18 | 19 | 20 | class DeepseekModelSettings(BaseModel): 21 | """Deepseek model configuration (OpenAI-compatible).""" 22 | 23 | max_output_tokens: Optional[int] = None 24 | """The maximum number of tokens the model can generate.""" 25 | 26 | parallel_tool_calls: Optional[bool] = None 27 | """Whether to enable parallel tool calling.""" 28 | 29 | provider_type: Optional[Literal["deepseek"]] = None 30 | """The type of the provider.""" 31 | 32 | response_format: Optional[ResponseFormat] = None 33 | """The response format for the model.""" 34 | 35 | temperature: Optional[float] = None 36 | """The temperature of the model.""" 37 | -------------------------------------------------------------------------------- /src/letta_client/types/template_create_params.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Dict, Union, Optional 6 | from typing_extensions import Literal, Required, TypeAlias, TypedDict 7 | 8 | __all__ = ["TemplateCreateParams", "Variant0", "Variant1"] 9 | 10 | 11 | class Variant0(TypedDict, total=False): 12 | agent_id: Required[str] 13 | """The ID of the agent to use as a template, can be from any project""" 14 | 15 | type: Required[Literal["agent"]] 16 | 17 | name: str 18 | """Optional custom name for the template. 19 | 20 | If not provided, a random name will be generated. 21 | """ 22 | 23 | 24 | class Variant1(TypedDict, total=False): 25 | agent_file: Required[Dict[str, Optional[object]]] 26 | """ 27 | The agent file to use as a template, this should be a JSON file exported from 28 | the platform 29 | """ 30 | 31 | type: Required[Literal["agent_file"]] 32 | 33 | name: str 34 | """Optional custom name for the template. 35 | 36 | If not provided, a random name will be generated. 37 | """ 38 | 39 | update_existing_tools: bool 40 | """ 41 | If true, update existing custom tools source_code and json_schema (source_type 42 | cannot be changed) 43 | """ 44 | 45 | 46 | TemplateCreateParams: TypeAlias = Union[Variant0, Variant1] 47 | -------------------------------------------------------------------------------- /src/letta_client/types/together_model_settings.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import Union, Optional 4 | from typing_extensions import Literal, Annotated, TypeAlias 5 | 6 | from .._utils import PropertyInfo 7 | from .._models import BaseModel 8 | from .text_response_format import TextResponseFormat 9 | from .json_object_response_format import JsonObjectResponseFormat 10 | from .json_schema_response_format import JsonSchemaResponseFormat 11 | 12 | __all__ = ["TogetherModelSettings", "ResponseFormat"] 13 | 14 | ResponseFormat: TypeAlias = Annotated[ 15 | Union[TextResponseFormat, JsonSchemaResponseFormat, JsonObjectResponseFormat, None], 16 | PropertyInfo(discriminator="type"), 17 | ] 18 | 19 | 20 | class TogetherModelSettings(BaseModel): 21 | """Together AI model configuration (OpenAI-compatible).""" 22 | 23 | max_output_tokens: Optional[int] = None 24 | """The maximum number of tokens the model can generate.""" 25 | 26 | parallel_tool_calls: Optional[bool] = None 27 | """Whether to enable parallel tool calling.""" 28 | 29 | provider_type: Optional[Literal["together"]] = None 30 | """The type of the provider.""" 31 | 32 | response_format: Optional[ResponseFormat] = None 33 | """The response format for the model.""" 34 | 35 | temperature: Optional[float] = None 36 | """The temperature of the model.""" 37 | -------------------------------------------------------------------------------- /.devcontainer/devcontainer.json: -------------------------------------------------------------------------------- 1 | // For format details, see https://aka.ms/devcontainer.json. For config options, see the 2 | // README at: https://github.com/devcontainers/templates/tree/main/src/debian 3 | { 4 | "name": "Debian", 5 | "build": { 6 | "dockerfile": "Dockerfile", 7 | "context": ".." 8 | }, 9 | 10 | "postStartCommand": "rye sync --all-features", 11 | 12 | "customizations": { 13 | "vscode": { 14 | "extensions": [ 15 | "ms-python.python" 16 | ], 17 | "settings": { 18 | "terminal.integrated.shell.linux": "/bin/bash", 19 | "python.pythonPath": ".venv/bin/python", 20 | "python.defaultInterpreterPath": ".venv/bin/python", 21 | "python.typeChecking": "basic", 22 | "terminal.integrated.env.linux": { 23 | "PATH": "/home/vscode/.rye/shims:${env:PATH}" 24 | } 25 | } 26 | } 27 | }, 28 | "features": { 29 | "ghcr.io/devcontainers/features/node:1": {} 30 | } 31 | 32 | // Features to add to the dev container. More info: https://containers.dev/features. 33 | // "features": {}, 34 | 35 | // Use 'forwardPorts' to make a list of ports inside the container available locally. 36 | // "forwardPorts": [], 37 | 38 | // Configure tool-specific properties. 39 | // "customizations": {}, 40 | 41 | // Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root. 42 | // "remoteUser": "root" 43 | } 44 | -------------------------------------------------------------------------------- /src/letta_client/types/identity_list_params.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Optional 6 | from typing_extensions import Literal, TypedDict 7 | 8 | from .identity_type import IdentityType 9 | 10 | __all__ = ["IdentityListParams"] 11 | 12 | 13 | class IdentityListParams(TypedDict, total=False): 14 | after: Optional[str] 15 | """Identity ID cursor for pagination. 16 | 17 | Returns identities that come after this identity ID in the specified sort order 18 | """ 19 | 20 | before: Optional[str] 21 | """Identity ID cursor for pagination. 22 | 23 | Returns identities that come before this identity ID in the specified sort order 24 | """ 25 | 26 | identifier_key: Optional[str] 27 | 28 | identity_type: Optional[IdentityType] 29 | """Enum to represent the type of the identity.""" 30 | 31 | limit: Optional[int] 32 | """Maximum number of identities to return""" 33 | 34 | name: Optional[str] 35 | 36 | order: Literal["asc", "desc"] 37 | """Sort order for identities by creation time. 38 | 39 | 'asc' for oldest first, 'desc' for newest first 40 | """ 41 | 42 | order_by: Literal["created_at"] 43 | """Field to sort by""" 44 | 45 | project_id: Optional[str] 46 | """[DEPRECATED: Use X-Project-Id header instead] Filter identities by project ID""" 47 | -------------------------------------------------------------------------------- /src/letta_client/types/steps/message_list_response.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import Union 4 | from typing_extensions import Annotated, TypeAlias 5 | 6 | from ..._utils import PropertyInfo 7 | from ..agents.user_message import UserMessage 8 | from ..tool_return_message import ToolReturnMessage 9 | from ..agents.event_message import EventMessage 10 | from ..agents.system_message import SystemMessage 11 | from ..agents.summary_message import SummaryMessage 12 | from ..agents.assistant_message import AssistantMessage 13 | from ..agents.reasoning_message import ReasoningMessage 14 | from ..agents.tool_call_message import ToolCallMessage 15 | from ..agents.approval_request_message import ApprovalRequestMessage 16 | from ..agents.hidden_reasoning_message import HiddenReasoningMessage 17 | from ..agents.approval_response_message import ApprovalResponseMessage 18 | 19 | __all__ = ["MessageListResponse"] 20 | 21 | MessageListResponse: TypeAlias = Annotated[ 22 | Union[ 23 | SystemMessage, 24 | UserMessage, 25 | ReasoningMessage, 26 | HiddenReasoningMessage, 27 | ToolCallMessage, 28 | ToolReturnMessage, 29 | AssistantMessage, 30 | ApprovalRequestMessage, 31 | ApprovalResponseMessage, 32 | SummaryMessage, 33 | EventMessage, 34 | ], 35 | PropertyInfo(discriminator="message_type"), 36 | ] 37 | -------------------------------------------------------------------------------- /src/letta_client/types/groups/message_update_response.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import Union 4 | from typing_extensions import Annotated, TypeAlias 5 | 6 | from ..._utils import PropertyInfo 7 | from ..agents.user_message import UserMessage 8 | from ..tool_return_message import ToolReturnMessage 9 | from ..agents.event_message import EventMessage 10 | from ..agents.system_message import SystemMessage 11 | from ..agents.summary_message import SummaryMessage 12 | from ..agents.assistant_message import AssistantMessage 13 | from ..agents.reasoning_message import ReasoningMessage 14 | from ..agents.tool_call_message import ToolCallMessage 15 | from ..agents.approval_request_message import ApprovalRequestMessage 16 | from ..agents.hidden_reasoning_message import HiddenReasoningMessage 17 | from ..agents.approval_response_message import ApprovalResponseMessage 18 | 19 | __all__ = ["MessageUpdateResponse"] 20 | 21 | MessageUpdateResponse: TypeAlias = Annotated[ 22 | Union[ 23 | SystemMessage, 24 | UserMessage, 25 | ReasoningMessage, 26 | HiddenReasoningMessage, 27 | ToolCallMessage, 28 | ToolReturnMessage, 29 | AssistantMessage, 30 | ApprovalRequestMessage, 31 | ApprovalResponseMessage, 32 | SummaryMessage, 33 | EventMessage, 34 | ], 35 | PropertyInfo(discriminator="message_type"), 36 | ] 37 | -------------------------------------------------------------------------------- /src/letta_client/types/passage_search_params.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Union, Optional 6 | from datetime import datetime 7 | from typing_extensions import Literal, Required, Annotated, TypedDict 8 | 9 | from .._types import SequenceNotStr 10 | from .._utils import PropertyInfo 11 | 12 | __all__ = ["PassageSearchParams"] 13 | 14 | 15 | class PassageSearchParams(TypedDict, total=False): 16 | query: Required[str] 17 | """Text query for semantic search""" 18 | 19 | agent_id: Optional[str] 20 | """Filter passages by agent ID""" 21 | 22 | archive_id: Optional[str] 23 | """Filter passages by archive ID""" 24 | 25 | end_date: Annotated[Union[str, datetime, None], PropertyInfo(format="iso8601")] 26 | """Filter results to passages created before this datetime""" 27 | 28 | limit: int 29 | """Maximum number of results to return""" 30 | 31 | start_date: Annotated[Union[str, datetime, None], PropertyInfo(format="iso8601")] 32 | """Filter results to passages created after this datetime""" 33 | 34 | tag_match_mode: Literal["any", "all"] 35 | """ 36 | How to match tags - 'any' to match passages with any of the tags, 'all' to match 37 | only passages with all tags 38 | """ 39 | 40 | tags: Optional[SequenceNotStr[str]] 41 | """Optional list of tags to filter search results""" 42 | -------------------------------------------------------------------------------- /src/letta_client/types/identities/agent_list_params.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import List, Optional 6 | from typing_extensions import Literal, TypedDict 7 | 8 | __all__ = ["AgentListParams"] 9 | 10 | 11 | class AgentListParams(TypedDict, total=False): 12 | after: Optional[str] 13 | """Agent ID cursor for pagination. 14 | 15 | Returns agents that come after this agent ID in the specified sort order 16 | """ 17 | 18 | before: Optional[str] 19 | """Agent ID cursor for pagination. 20 | 21 | Returns agents that come before this agent ID in the specified sort order 22 | """ 23 | 24 | include: List[ 25 | Literal[ 26 | "agent.blocks", 27 | "agent.identities", 28 | "agent.managed_group", 29 | "agent.secrets", 30 | "agent.sources", 31 | "agent.tags", 32 | "agent.tools", 33 | ] 34 | ] 35 | """Specify which relational fields to include in the response. 36 | 37 | No relationships are included by default. 38 | """ 39 | 40 | limit: Optional[int] 41 | """Maximum number of agents to return""" 42 | 43 | order: Literal["asc", "desc"] 44 | """Sort order for agents by creation time. 45 | 46 | 'asc' for oldest first, 'desc' for newest first 47 | """ 48 | 49 | order_by: Literal["created_at"] 50 | """Field to sort by""" 51 | -------------------------------------------------------------------------------- /src/letta_client/types/archive.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import Dict, Optional 4 | from datetime import datetime 5 | 6 | from .._models import BaseModel 7 | from .embedding_config import EmbeddingConfig 8 | from .vector_db_provider import VectorDBProvider 9 | 10 | __all__ = ["Archive"] 11 | 12 | 13 | class Archive(BaseModel): 14 | """ 15 | Representation of an archive - a collection of archival passages that can be shared between agents. 16 | """ 17 | 18 | id: str 19 | """The human-friendly ID of the Archive""" 20 | 21 | created_at: datetime 22 | """The creation date of the archive""" 23 | 24 | embedding_config: EmbeddingConfig 25 | """Embedding configuration for passages in this archive""" 26 | 27 | name: str 28 | """The name of the archive""" 29 | 30 | created_by_id: Optional[str] = None 31 | """The id of the user that made this object.""" 32 | 33 | description: Optional[str] = None 34 | """A description of the archive""" 35 | 36 | last_updated_by_id: Optional[str] = None 37 | """The id of the user that made this object.""" 38 | 39 | metadata: Optional[Dict[str, object]] = None 40 | """Additional metadata""" 41 | 42 | updated_at: Optional[datetime] = None 43 | """The timestamp when the object was last updated.""" 44 | 45 | vector_db_provider: Optional[VectorDBProvider] = None 46 | """The vector database provider used for this archive's passages""" 47 | -------------------------------------------------------------------------------- /src/letta_client/_utils/_reflection.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import inspect 4 | from typing import Any, Callable 5 | 6 | 7 | def function_has_argument(func: Callable[..., Any], arg_name: str) -> bool: 8 | """Returns whether or not the given function has a specific parameter""" 9 | sig = inspect.signature(func) 10 | return arg_name in sig.parameters 11 | 12 | 13 | def assert_signatures_in_sync( 14 | source_func: Callable[..., Any], 15 | check_func: Callable[..., Any], 16 | *, 17 | exclude_params: set[str] = set(), 18 | ) -> None: 19 | """Ensure that the signature of the second function matches the first.""" 20 | 21 | check_sig = inspect.signature(check_func) 22 | source_sig = inspect.signature(source_func) 23 | 24 | errors: list[str] = [] 25 | 26 | for name, source_param in source_sig.parameters.items(): 27 | if name in exclude_params: 28 | continue 29 | 30 | custom_param = check_sig.parameters.get(name) 31 | if not custom_param: 32 | errors.append(f"the `{name}` param is missing") 33 | continue 34 | 35 | if custom_param.annotation != source_param.annotation: 36 | errors.append( 37 | f"types for the `{name}` param are do not match; source={repr(source_param.annotation)} checking={repr(custom_param.annotation)}" 38 | ) 39 | continue 40 | 41 | if errors: 42 | raise AssertionError(f"{len(errors)} errors encountered when comparing signatures:\n\n" + "\n\n".join(errors)) 43 | -------------------------------------------------------------------------------- /src/letta_client/types/agents/hidden_reasoning_message.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from typing import Optional 4 | from datetime import datetime 5 | from typing_extensions import Literal 6 | 7 | from ..._models import BaseModel 8 | 9 | __all__ = ["HiddenReasoningMessage"] 10 | 11 | 12 | class HiddenReasoningMessage(BaseModel): 13 | """ 14 | Representation of an agent's internal reasoning where reasoning content 15 | has been hidden from the response. 16 | 17 | Args: 18 | id (str): The ID of the message 19 | date (datetime): The date the message was created in ISO format 20 | name (Optional[str]): The name of the sender of the message 21 | state (Literal["redacted", "omitted"]): Whether the reasoning 22 | content was redacted by the provider or simply omitted by the API 23 | hidden_reasoning (Optional[str]): The internal reasoning of the agent 24 | """ 25 | 26 | id: str 27 | 28 | date: datetime 29 | 30 | state: Literal["redacted", "omitted"] 31 | 32 | hidden_reasoning: Optional[str] = None 33 | 34 | is_err: Optional[bool] = None 35 | 36 | message_type: Optional[Literal["hidden_reasoning_message"]] = None 37 | """The type of the message.""" 38 | 39 | name: Optional[str] = None 40 | 41 | otid: Optional[str] = None 42 | 43 | run_id: Optional[str] = None 44 | 45 | sender_id: Optional[str] = None 46 | 47 | seq_id: Optional[int] = None 48 | 49 | step_id: Optional[str] = None 50 | -------------------------------------------------------------------------------- /src/letta_client/types/agents/message_list_params.py: -------------------------------------------------------------------------------- 1 | # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Optional 6 | from typing_extensions import Literal, TypedDict 7 | 8 | __all__ = ["MessageListParams"] 9 | 10 | 11 | class MessageListParams(TypedDict, total=False): 12 | after: Optional[str] 13 | """Message ID cursor for pagination. 14 | 15 | Returns messages that come after this message ID in the specified sort order 16 | """ 17 | 18 | assistant_message_tool_kwarg: str 19 | """The name of the message argument.""" 20 | 21 | assistant_message_tool_name: str 22 | """The name of the designated message tool.""" 23 | 24 | before: Optional[str] 25 | """Message ID cursor for pagination. 26 | 27 | Returns messages that come before this message ID in the specified sort order 28 | """ 29 | 30 | group_id: Optional[str] 31 | """Group ID to filter messages by.""" 32 | 33 | include_err: Optional[bool] 34 | """Whether to include error messages and error statuses. 35 | 36 | For debugging purposes only. 37 | """ 38 | 39 | limit: Optional[int] 40 | """Maximum number of messages to return""" 41 | 42 | order: Literal["asc", "desc"] 43 | """Sort order for messages by creation time. 44 | 45 | 'asc' for oldest first, 'desc' for newest first 46 | """ 47 | 48 | order_by: Literal["created_at"] 49 | """Field to sort by""" 50 | 51 | use_assistant_message: bool 52 | """Whether to use assistant messages""" 53 | --------------------------------------------------------------------------------