├── .github ├── actions │ └── uv_setup │ │ └── action.yml └── workflows │ ├── _lint.yml │ ├── _test.yml │ ├── ci.yml │ └── release.yml ├── .gitignore ├── LICENSE ├── Makefile ├── README.md ├── examples └── servers │ └── streamable-http-stateless │ ├── README.md │ ├── mcp_simple_streamablehttp_stateless │ ├── __init__.py │ ├── __main__.py │ └── server.py │ ├── pyproject.toml │ └── uv.lock ├── langchain_mcp_adapters ├── __init__.py ├── client.py ├── prompts.py ├── resources.py ├── sessions.py └── tools.py ├── pyproject.toml ├── static └── img │ └── mcp.png ├── tests ├── __init__.py ├── conftest.py ├── servers │ ├── __init__.py │ ├── math_server.py │ ├── time_server.py │ └── weather_server.py ├── test_client.py ├── test_import.py ├── test_prompts.py ├── test_resources.py ├── test_tools.py └── utils.py └── uv.lock /.github/actions/uv_setup/action.yml: -------------------------------------------------------------------------------- 1 | # TODO: https://docs.astral.sh/uv/guides/integration/github/#caching 2 | 3 | name: uv-install 4 | description: Set up Python and uv 5 | 6 | inputs: 7 | python-version: 8 | description: Python version, supporting MAJOR.MINOR only 9 | required: true 10 | 11 | env: 12 | UV_VERSION: "0.5.25" 13 | 14 | runs: 15 | using: composite 16 | steps: 17 | - name: Install uv and set the python version 18 | uses: astral-sh/setup-uv@v5 19 | with: 20 | version: ${{ env.UV_VERSION }} 21 | python-version: ${{ inputs.python-version }} 22 | -------------------------------------------------------------------------------- /.github/workflows/_lint.yml: -------------------------------------------------------------------------------- 1 | name: lint 2 | 3 | on: 4 | workflow_call: 5 | inputs: 6 | working-directory: 7 | required: true 8 | type: string 9 | description: "From which folder this pipeline executes" 10 | python-version: 11 | required: true 12 | type: string 13 | description: "Python version to use" 14 | 15 | env: 16 | WORKDIR: ${{ inputs.working-directory == '' && '.' || inputs.working-directory }} 17 | 18 | # This env var allows us to get inline annotations when ruff has complaints. 19 | RUFF_OUTPUT_FORMAT: github 20 | 21 | UV_FROZEN: "true" 22 | 23 | jobs: 24 | build: 25 | name: "make lint #${{ inputs.python-version }}" 26 | runs-on: ubuntu-latest 27 | timeout-minutes: 20 28 | steps: 29 | - uses: actions/checkout@v4 30 | 31 | - name: Set up Python ${{ inputs.python-version }} + uv 32 | uses: "./.github/actions/uv_setup" 33 | with: 34 | python-version: ${{ inputs.python-version }} 35 | 36 | - name: Install dependencies 37 | working-directory: ${{ inputs.working-directory }} 38 | run: | 39 | uv sync --group test 40 | 41 | - name: Analysing the code with our lint 42 | working-directory: ${{ inputs.working-directory }} 43 | run: | 44 | make lint 45 | -------------------------------------------------------------------------------- /.github/workflows/_test.yml: -------------------------------------------------------------------------------- 1 | name: test 2 | 3 | on: 4 | workflow_call: 5 | inputs: 6 | working-directory: 7 | required: true 8 | type: string 9 | description: "From which folder this pipeline executes" 10 | python-version: 11 | required: true 12 | type: string 13 | description: "Python version to use" 14 | 15 | env: 16 | UV_FROZEN: "true" 17 | UV_NO_SYNC: "true" 18 | 19 | jobs: 20 | build: 21 | defaults: 22 | run: 23 | working-directory: ${{ inputs.working-directory }} 24 | runs-on: ubuntu-latest 25 | timeout-minutes: 20 26 | name: "make test #${{ inputs.python-version }}" 27 | steps: 28 | - uses: actions/checkout@v4 29 | 30 | - name: Set up Python ${{ inputs.python-version }} + uv 31 | uses: "./.github/actions/uv_setup" 32 | id: setup-python 33 | with: 34 | python-version: ${{ inputs.python-version }} 35 | - name: Install dependencies 36 | shell: bash 37 | run: uv sync --group test 38 | 39 | - name: Run core tests 40 | shell: bash 41 | run: | 42 | make test 43 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Run CI Tests 3 | 4 | on: 5 | push: 6 | branches: [ main ] 7 | pull_request: 8 | workflow_dispatch: # Allows to trigger the workflow manually in GitHub UI 9 | 10 | # If another push to the same PR or branch happens while this workflow is still running, 11 | # cancel the earlier run in favor of the next run. 12 | # 13 | # There's no point in testing an outdated version of the code. GitHub only allows 14 | # a limited number of job runners to be active at the same time, so it's better to cancel 15 | # pointless jobs early so that more useful jobs can run sooner. 16 | concurrency: 17 | group: ${{ github.workflow }}-${{ github.ref }} 18 | cancel-in-progress: true 19 | 20 | jobs: 21 | lint: 22 | strategy: 23 | matrix: 24 | # Only lint on the min and max supported Python versions. 25 | # It's extremely unlikely that there's a lint issue on any version in between 26 | # that doesn't show up on the min or max versions. 27 | # 28 | # GitHub rate-limits how many jobs can be running at any one time. 29 | # Starting new jobs is also relatively slow, 30 | # so linting on fewer versions makes CI faster. 31 | python-version: 32 | - "3.12" 33 | uses: 34 | ./.github/workflows/_lint.yml 35 | with: 36 | working-directory: . 37 | python-version: ${{ matrix.python-version }} 38 | secrets: inherit 39 | test: 40 | strategy: 41 | matrix: 42 | # Only lint on the min and max supported Python versions. 43 | # It's extremely unlikely that there's a lint issue on any version in between 44 | # that doesn't show up on the min or max versions. 45 | # 46 | # GitHub rate-limits how many jobs can be running at any one time. 47 | # Starting new jobs is also relatively slow, 48 | # so linting on fewer versions makes CI faster. 49 | python-version: 50 | - "3.10" 51 | - "3.12" 52 | uses: 53 | ./.github/workflows/_test.yml 54 | with: 55 | working-directory: . 56 | python-version: ${{ matrix.python-version }} 57 | secrets: inherit 58 | 59 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: release 2 | run-name: Release ${{ inputs.working-directory }} by @${{ github.actor }} 3 | on: 4 | workflow_call: 5 | inputs: 6 | working-directory: 7 | required: true 8 | type: string 9 | description: "From which folder this pipeline executes" 10 | workflow_dispatch: 11 | inputs: 12 | working-directory: 13 | description: "From which folder this pipeline executes" 14 | default: "." 15 | dangerous-nonmain-release: 16 | required: false 17 | type: boolean 18 | default: false 19 | description: "Release from a non-main branch (danger!)" 20 | 21 | env: 22 | PYTHON_VERSION: "3.11" 23 | UV_FROZEN: "true" 24 | UV_NO_SYNC: "true" 25 | 26 | jobs: 27 | build: 28 | if: github.ref == 'refs/heads/main' || inputs.dangerous-nonmain-release 29 | environment: Scheduled testing 30 | runs-on: ubuntu-latest 31 | 32 | outputs: 33 | pkg-name: ${{ steps.check-version.outputs.pkg-name }} 34 | version: ${{ steps.check-version.outputs.version }} 35 | 36 | steps: 37 | - uses: actions/checkout@v4 38 | 39 | - name: Set up Python + uv 40 | uses: "./.github/actions/uv_setup" 41 | with: 42 | python-version: ${{ env.PYTHON_VERSION }} 43 | 44 | # We want to keep this build stage *separate* from the release stage, 45 | # so that there's no sharing of permissions between them. 46 | # The release stage has trusted publishing and GitHub repo contents write access, 47 | # and we want to keep the scope of that access limited just to the release job. 48 | # Otherwise, a malicious `build` step (e.g. via a compromised dependency) 49 | # could get access to our GitHub or PyPI credentials. 50 | # 51 | # Per the trusted publishing GitHub Action: 52 | # > It is strongly advised to separate jobs for building [...] 53 | # > from the publish job. 54 | # https://github.com/pypa/gh-action-pypi-publish#non-goals 55 | - name: Build project for distribution 56 | run: uv build 57 | - name: Upload build 58 | uses: actions/upload-artifact@v4 59 | with: 60 | name: dist 61 | path: ${{ inputs.working-directory }}/dist/ 62 | 63 | - name: Check Version 64 | id: check-version 65 | shell: python 66 | working-directory: ${{ inputs.working-directory }} 67 | run: | 68 | import os 69 | import tomllib 70 | with open("pyproject.toml", "rb") as f: 71 | data = tomllib.load(f) 72 | pkg_name = data["project"]["name"] 73 | version = data["project"]["version"] 74 | with open(os.environ["GITHUB_OUTPUT"], "a") as f: 75 | f.write(f"pkg-name={pkg_name}\n") 76 | f.write(f"version={version}\n") 77 | publish: 78 | needs: 79 | - build 80 | runs-on: ubuntu-latest 81 | permissions: 82 | # This permission is used for trusted publishing: 83 | # https://blog.pypi.org/posts/2023-04-20-introducing-trusted-publishers/ 84 | # 85 | # Trusted publishing has to also be configured on PyPI for each package: 86 | # https://docs.pypi.org/trusted-publishers/adding-a-publisher/ 87 | id-token: write 88 | 89 | defaults: 90 | run: 91 | working-directory: ${{ inputs.working-directory }} 92 | 93 | steps: 94 | - uses: actions/checkout@v4 95 | 96 | - name: Set up Python + uv 97 | uses: "./.github/actions/uv_setup" 98 | with: 99 | python-version: ${{ env.PYTHON_VERSION }} 100 | 101 | - uses: actions/download-artifact@v4 102 | with: 103 | name: dist 104 | path: ${{ inputs.working-directory }}/dist/ 105 | 106 | - name: Publish package distributions to PyPI 107 | uses: pypa/gh-action-pypi-publish@release/v1 108 | with: 109 | packages-dir: ${{ inputs.working-directory }}/dist/ 110 | verbose: true 111 | print-hash: true 112 | # Temp workaround since attestations are on by default as of gh-action-pypi-publish v1.11.0 113 | attestations: false 114 | 115 | mark-release: 116 | needs: 117 | - build 118 | - publish 119 | runs-on: ubuntu-latest 120 | permissions: 121 | # This permission is needed by `ncipollo/release-action` to 122 | # create the GitHub release. 123 | contents: write 124 | 125 | defaults: 126 | run: 127 | working-directory: ${{ inputs.working-directory }} 128 | 129 | steps: 130 | - uses: actions/checkout@v4 131 | 132 | - name: Set up Python + uv 133 | uses: "./.github/actions/uv_setup" 134 | with: 135 | python-version: ${{ env.PYTHON_VERSION }} 136 | 137 | - uses: actions/download-artifact@v4 138 | with: 139 | name: dist 140 | path: ${{ inputs.working-directory }}/dist/ 141 | 142 | - name: Create Tag 143 | uses: ncipollo/release-action@v1 144 | with: 145 | artifacts: "dist/*" 146 | token: ${{ secrets.GITHUB_TOKEN }} 147 | generateReleaseNotes: true 148 | tag: ${{needs.build.outputs.pkg-name}}==${{ needs.build.outputs.version }} 149 | body: ${{ needs.release-notes.outputs.release-body }} 150 | commit: main 151 | makeLatest: true -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Pyenv 2 | .python-version 3 | 4 | # Byte-compiled / optimized / DLL files 5 | __pycache__/ 6 | *.py[cod] 7 | *$py.class 8 | 9 | # C extensions 10 | *.so 11 | 12 | # Distribution / packaging 13 | .Python 14 | build/ 15 | develop-eggs/ 16 | dist/ 17 | downloads/ 18 | eggs/ 19 | .eggs/ 20 | lib/ 21 | lib64/ 22 | parts/ 23 | sdist/ 24 | var/ 25 | wheels/ 26 | share/python-wheels/ 27 | *.egg-info/ 28 | .installed.cfg 29 | *.egg 30 | MANIFEST 31 | 32 | # Environments 33 | .venv 34 | .env 35 | 36 | # mypy 37 | .mypy_cache/ 38 | .dmypy.json 39 | dmypy.json -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2025 LangChain, Inc. 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: all lint format test help 2 | 3 | # Default target executed when no arguments are given to make. 4 | all: help 5 | 6 | ###################### 7 | # TESTING AND COVERAGE 8 | ###################### 9 | 10 | # Define a variable for the test file path. 11 | TEST_FILE ?= tests/ 12 | 13 | test: 14 | uv run pytest --disable-socket --allow-unix-socket $(TEST_FILE) 15 | 16 | test_watch: 17 | uv run ptw . -- $(TEST_FILE) 18 | 19 | 20 | ###################### 21 | # LINTING AND FORMATTING 22 | ###################### 23 | 24 | # Define a variable for Python and notebook files. 25 | lint format: PYTHON_FILES=langchain_mcp_adapters/ tests/ 26 | lint_diff format_diff: PYTHON_FILES=$(shell git diff --relative=. --name-only --diff-filter=d master | grep -E '\.py$$|\.ipynb$$') 27 | 28 | lint lint_diff: 29 | [ "$(PYTHON_FILES)" = "" ] || uv run ruff format $(PYTHON_FILES) --diff 30 | [ "$(PYTHON_FILES)" = "" ] || uv run ruff check $(PYTHON_FILES) --diff 31 | # [ "$(PYTHON_FILES)" = "" ] || uv run mypy $(PYTHON_FILES) 32 | 33 | format format_diff: 34 | [ "$(PYTHON_FILES)" = "" ] || uv run ruff check --fix $(PYTHON_FILES) 35 | [ "$(PYTHON_FILES)" = "" ] || uv run ruff format $(PYTHON_FILES) 36 | 37 | 38 | 39 | ###################### 40 | # HELP 41 | ###################### 42 | 43 | help: 44 | @echo '====================' 45 | @echo '-- LINTING --' 46 | @echo 'format - run code formatters' 47 | @echo 'lint - run linters' 48 | @echo '-- TESTS --' 49 | @echo 'test - run unit tests' 50 | @echo 'test TEST_FILE= - run all tests in file' 51 | @echo '-- DOCUMENTATION tasks are from the top-level Makefile --' 52 | 53 | 54 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # LangChain MCP Adapters 2 | 3 | This library provides a lightweight wrapper that makes [Anthropic Model Context Protocol (MCP)](https://modelcontextprotocol.io/introduction) tools compatible with [LangChain](https://github.com/langchain-ai/langchain) and [LangGraph](https://github.com/langchain-ai/langgraph). 4 | 5 | ![MCP](static/img/mcp.png) 6 | 7 | ## Features 8 | 9 | - 🛠️ Convert MCP tools into [LangChain tools](https://python.langchain.com/docs/concepts/tools/) that can be used with [LangGraph](https://github.com/langchain-ai/langgraph) agents 10 | - 📦 A client implementation that allows you to connect to multiple MCP servers and load tools from them 11 | 12 | ## Installation 13 | 14 | ```bash 15 | pip install langchain-mcp-adapters 16 | ``` 17 | 18 | ## Quickstart 19 | 20 | Here is a simple example of using the MCP tools with a LangGraph agent. 21 | 22 | ```bash 23 | pip install langchain-mcp-adapters langgraph "langchain[openai]" 24 | 25 | export OPENAI_API_KEY= 26 | ``` 27 | 28 | ### Server 29 | 30 | First, let's create an MCP server that can add and multiply numbers. 31 | 32 | ```python 33 | # math_server.py 34 | from mcp.server.fastmcp import FastMCP 35 | 36 | mcp = FastMCP("Math") 37 | 38 | @mcp.tool() 39 | def add(a: int, b: int) -> int: 40 | """Add two numbers""" 41 | return a + b 42 | 43 | @mcp.tool() 44 | def multiply(a: int, b: int) -> int: 45 | """Multiply two numbers""" 46 | return a * b 47 | 48 | if __name__ == "__main__": 49 | mcp.run(transport="stdio") 50 | ``` 51 | 52 | ### Client 53 | 54 | ```python 55 | # Create server parameters for stdio connection 56 | from mcp import ClientSession, StdioServerParameters 57 | from mcp.client.stdio import stdio_client 58 | 59 | from langchain_mcp_adapters.tools import load_mcp_tools 60 | from langgraph.prebuilt import create_react_agent 61 | 62 | server_params = StdioServerParameters( 63 | command="python", 64 | # Make sure to update to the full absolute path to your math_server.py file 65 | args=["/path/to/math_server.py"], 66 | ) 67 | 68 | async with stdio_client(server_params) as (read, write): 69 | async with ClientSession(read, write) as session: 70 | # Initialize the connection 71 | await session.initialize() 72 | 73 | # Get tools 74 | tools = await load_mcp_tools(session) 75 | 76 | # Create and run the agent 77 | agent = create_react_agent("openai:gpt-4.1", tools) 78 | agent_response = await agent.ainvoke({"messages": "what's (3 + 5) x 12?"}) 79 | ``` 80 | 81 | ## Multiple MCP Servers 82 | 83 | The library also allows you to connect to multiple MCP servers and load tools from them: 84 | 85 | ### Server 86 | 87 | ```python 88 | # math_server.py 89 | ... 90 | 91 | # weather_server.py 92 | from typing import List 93 | from mcp.server.fastmcp import FastMCP 94 | 95 | mcp = FastMCP("Weather") 96 | 97 | @mcp.tool() 98 | async def get_weather(location: str) -> str: 99 | """Get weather for location.""" 100 | return "It's always sunny in New York" 101 | 102 | if __name__ == "__main__": 103 | mcp.run(transport="streamable-http") 104 | ``` 105 | 106 | ```bash 107 | python weather_server.py 108 | ``` 109 | 110 | ### Client 111 | 112 | ```python 113 | from langchain_mcp_adapters.client import MultiServerMCPClient 114 | from langgraph.prebuilt import create_react_agent 115 | 116 | client = MultiServerMCPClient( 117 | { 118 | "math": { 119 | "command": "python", 120 | # Make sure to update to the full absolute path to your math_server.py file 121 | "args": ["/path/to/math_server.py"], 122 | "transport": "stdio", 123 | }, 124 | "weather": { 125 | # make sure you start your weather server on port 8000 126 | "url": "http://localhost:8000/mcp", 127 | "transport": "streamable_http", 128 | } 129 | } 130 | ) 131 | tools = await client.get_tools() 132 | agent = create_react_agent("openai:gpt-4.1", tools) 133 | math_response = await agent.ainvoke({"messages": "what's (3 + 5) x 12?"}) 134 | weather_response = await agent.ainvoke({"messages": "what is the weather in nyc?"}) 135 | ``` 136 | 137 | > [!note] 138 | > Example above will start a new MCP `ClientSession` for each tool invocation. If you would like to explicitly start a session for a given server, you can do: 139 | > 140 | > ```python 141 | > from langchain_mcp_adapters.tools import load_mcp_tools 142 | > 143 | > client = MultiServerMCPClient({...}) 144 | > async with client.session("math") as session: 145 | > tools = await load_mcp_tools(session) 146 | > ``` 147 | 148 | ## Streamable HTTP 149 | 150 | MCP now supports [streamable HTTP](https://modelcontextprotocol.io/specification/2025-03-26/basic/transports#streamable-http) transport. 151 | 152 | To start an [example](examples/servers/streamable-http-stateless/) streamable HTTP server, run the following: 153 | 154 | ```bash 155 | cd examples/servers/streamable-http-stateless/ 156 | uv run mcp-simple-streamablehttp-stateless --port 3000 157 | ``` 158 | 159 | Alternatively, you can use FastMCP directly (as in the examples above). 160 | 161 | To use it with Python MCP SDK `streamablehttp_client`: 162 | 163 | ```python 164 | # Use server from examples/servers/streamable-http-stateless/ 165 | 166 | from mcp import ClientSession 167 | from mcp.client.streamable_http import streamablehttp_client 168 | 169 | from langgraph.prebuilt import create_react_agent 170 | from langchain_mcp_adapters.tools import load_mcp_tools 171 | 172 | async with streamablehttp_client("http://localhost:3000/mcp") as (read, write, _): 173 | async with ClientSession(read, write) as session: 174 | # Initialize the connection 175 | await session.initialize() 176 | 177 | # Get tools 178 | tools = await load_mcp_tools(session) 179 | agent = create_react_agent("openai:gpt-4.1", tools) 180 | math_response = await agent.ainvoke({"messages": "what's (3 + 5) x 12?"}) 181 | ``` 182 | 183 | Use it with `MultiServerMCPClient`: 184 | 185 | ```python 186 | # Use server from examples/servers/streamable-http-stateless/ 187 | from langchain_mcp_adapters.client import MultiServerMCPClient 188 | from langgraph.prebuilt import create_react_agent 189 | 190 | client = MultiServerMCPClient( 191 | { 192 | "math": { 193 | "transport": "streamable_http", 194 | "url": "http://localhost:3000/mcp" 195 | }, 196 | } 197 | ) 198 | tools = await client.get_tools() 199 | agent = create_react_agent("openai:gpt-4.1", tools) 200 | math_response = await agent.ainvoke({"messages": "what's (3 + 5) x 12?"}) 201 | ``` 202 | 203 | ## Using with LangGraph StateGraph 204 | 205 | ```python 206 | from langchain_mcp_adapters.client import MultiServerMCPClient 207 | from langgraph.graph import StateGraph, MessagesState, START 208 | from langgraph.prebuilt import ToolNode, tools_condition 209 | 210 | from langchain.chat_models import init_chat_model 211 | model = init_chat_model("openai:gpt-4.1") 212 | 213 | client = MultiServerMCPClient( 214 | { 215 | "math": { 216 | "command": "python", 217 | # Make sure to update to the full absolute path to your math_server.py file 218 | "args": ["./examples/math_server.py"], 219 | "transport": "stdio", 220 | }, 221 | "weather": { 222 | # make sure you start your weather server on port 8000 223 | "url": "http://localhost:8000/mcp", 224 | "transport": "streamable_http", 225 | } 226 | } 227 | ) 228 | tools = await client.get_tools() 229 | 230 | def call_model(state: MessagesState): 231 | response = model.bind_tools(tools).invoke(state["messages"]) 232 | return {"messages": response} 233 | 234 | builder = StateGraph(MessagesState) 235 | builder.add_node(call_model) 236 | builder.add_node(ToolNode(tools)) 237 | builder.add_edge(START, "call_model") 238 | builder.add_conditional_edges( 239 | "call_model", 240 | tools_condition, 241 | ) 242 | builder.add_edge("tools", "call_model") 243 | graph = builder.compile() 244 | math_response = await graph.ainvoke({"messages": "what's (3 + 5) x 12?"}) 245 | weather_response = await graph.ainvoke({"messages": "what is the weather in nyc?"}) 246 | ``` 247 | 248 | ## Using with LangGraph API Server 249 | 250 | > [!TIP] 251 | > Check out [this guide](https://langchain-ai.github.io/langgraph/tutorials/langgraph-platform/local-server/) on getting started with LangGraph API server. 252 | 253 | If you want to run a LangGraph agent that uses MCP tools in a LangGraph API server, you can use the following setup: 254 | 255 | ```python 256 | # graph.py 257 | from contextlib import asynccontextmanager 258 | from langchain_mcp_adapters.client import MultiServerMCPClient 259 | from langgraph.prebuilt import create_react_agent 260 | 261 | async def make_graph(): 262 | client = MultiServerMCPClient( 263 | { 264 | "math": { 265 | "command": "python", 266 | # Make sure to update to the full absolute path to your math_server.py file 267 | "args": ["/path/to/math_server.py"], 268 | "transport": "stdio", 269 | }, 270 | "weather": { 271 | # make sure you start your weather server on port 8000 272 | "url": "http://localhost:8000/mcp", 273 | "transport": "streamable_http", 274 | } 275 | } 276 | ) 277 | tools = await client.get_tools() 278 | agent = create_react_agent("openai:gpt-4.1", tools) 279 | return agent 280 | ``` 281 | 282 | In your [`langgraph.json`](https://langchain-ai.github.io/langgraph/cloud/reference/cli/#configuration-file) make sure to specify `make_graph` as your graph entrypoint: 283 | 284 | ```json 285 | { 286 | "dependencies": ["."], 287 | "graphs": { 288 | "agent": "./graph.py:make_graph" 289 | } 290 | } 291 | ``` 292 | 293 | ## Add LangChain tools to a FastMCP server 294 | 295 | Use `to_fastmcp` to convert LangChain tools to FastMCP, and then add them to the `FastMCP` server via the initializer: 296 | 297 | > [!NOTE] 298 | > `tools` argument is only available in FastMCP as of `mcp >= 1.9.1` 299 | 300 | ```python 301 | from langchain_core.tools import tool 302 | from langchain_mcp_adapters.tools import to_fastmcp 303 | from mcp.server.fastmcp import FastMCP 304 | 305 | 306 | @tool 307 | def add(a: int, b: int) -> int: 308 | """Add two numbers""" 309 | return a + b 310 | 311 | 312 | fastmcp_tool = to_fastmcp(add) 313 | 314 | mcp = FastMCP("Math", tools=[fastmcp_tool]) 315 | mcp.run(transport="stdio") 316 | ``` 317 | -------------------------------------------------------------------------------- /examples/servers/streamable-http-stateless/README.md: -------------------------------------------------------------------------------- 1 | # MCP Simple StreamableHttp Stateless Server Example 2 | 3 | > Adapted from the [official Python MCP SDK example](https://github.com/modelcontextprotocol/python-sdk/tree/main/examples/servers/simple-streamablehttp-stateless) 4 | 5 | A stateless MCP server example demonstrating the StreamableHttp transport without maintaining session state. This example is ideal for understanding how to deploy MCP servers in multi-node environments where requests can be routed to any instance. 6 | 7 | ## Features 8 | 9 | - Uses the StreamableHTTP transport in stateless mode (mcp_session_id=None) 10 | - Each request creates a new ephemeral connection 11 | - No session state maintained between requests 12 | - Task lifecycle scoped to individual requests 13 | - Suitable for deployment in multi-node environments 14 | 15 | 16 | ## Usage 17 | 18 | Start the server: 19 | 20 | ```bash 21 | # Using default port 3000 22 | uv run mcp-simple-streamablehttp-stateless 23 | 24 | # Using custom port 25 | uv run mcp-simple-streamablehttp-stateless --port 3000 26 | 27 | # Custom logging level 28 | uv run mcp-simple-streamablehttp-stateless --log-level DEBUG 29 | 30 | # Enable JSON responses instead of SSE streams 31 | uv run mcp-simple-streamablehttp-stateless --json-response 32 | ``` 33 | 34 | The server exposes a tool named "start-notification-stream" that accepts three arguments: 35 | 36 | - `interval`: Time between notifications in seconds (e.g., 1.0) 37 | - `count`: Number of notifications to send (e.g., 5) 38 | - `caller`: Identifier string for the caller 39 | 40 | 41 | ## Client 42 | 43 | You can connect to this server using an HTTP client. For now, only the TypeScript SDK has streamable HTTP client examples, or you can use [Inspector](https://github.com/modelcontextprotocol/inspector) for testing. -------------------------------------------------------------------------------- /examples/servers/streamable-http-stateless/mcp_simple_streamablehttp_stateless/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/langchain-ai/langchain-mcp-adapters/0f825d7876b961371d7dd985a5914f628d655f41/examples/servers/streamable-http-stateless/mcp_simple_streamablehttp_stateless/__init__.py -------------------------------------------------------------------------------- /examples/servers/streamable-http-stateless/mcp_simple_streamablehttp_stateless/__main__.py: -------------------------------------------------------------------------------- 1 | from .server import main 2 | 3 | if __name__ == "__main__": 4 | main() 5 | -------------------------------------------------------------------------------- /examples/servers/streamable-http-stateless/mcp_simple_streamablehttp_stateless/server.py: -------------------------------------------------------------------------------- 1 | import contextlib 2 | import logging 3 | from collections.abc import AsyncIterator 4 | 5 | import anyio 6 | import click 7 | import mcp.types as types 8 | from mcp.server.lowlevel import Server 9 | from mcp.server.streamable_http_manager import StreamableHTTPSessionManager 10 | from starlette.applications import Starlette 11 | from starlette.routing import Mount 12 | from starlette.types import Receive, Scope, Send 13 | 14 | logger = logging.getLogger(__name__) 15 | 16 | 17 | @click.command() 18 | @click.option("--port", default=3000, help="Port to listen on for HTTP") 19 | @click.option( 20 | "--log-level", 21 | default="INFO", 22 | help="Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)", 23 | ) 24 | @click.option( 25 | "--json-response", 26 | is_flag=True, 27 | default=False, 28 | help="Enable JSON responses instead of SSE streams", 29 | ) 30 | def main( 31 | port: int, 32 | log_level: str, 33 | json_response: bool, 34 | ) -> int: 35 | # Configure logging 36 | logging.basicConfig( 37 | level=getattr(logging, log_level.upper()), 38 | format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", 39 | ) 40 | 41 | app = Server("mcp-streamable-http-stateless-demo") 42 | 43 | @app.call_tool() 44 | async def call_tool( 45 | name: str, arguments: dict 46 | ) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]: 47 | if name == "add": 48 | return [ 49 | types.TextContent( 50 | type="text", 51 | text=str(arguments["a"] + arguments["b"]) 52 | ) 53 | ] 54 | elif name == "multiply": 55 | return [ 56 | types.TextContent( 57 | type="text", 58 | text=str(arguments["a"] * arguments["b"]) 59 | ) 60 | ] 61 | else: 62 | raise ValueError(f"Tool {name} not found") 63 | 64 | @app.list_tools() 65 | async def list_tools() -> list[types.Tool]: 66 | return [ 67 | types.Tool( 68 | name="add", 69 | description="Adds two numbers", 70 | inputSchema={ 71 | "type": "object", 72 | "required": ["a", "b"], 73 | "properties": { 74 | "a": { 75 | "type": "number", 76 | "description": "First number to add", 77 | }, 78 | "b": { 79 | "type": "number", 80 | "description": "Second number to add", 81 | }, 82 | }, 83 | }, 84 | ), 85 | types.Tool( 86 | name="multiply", 87 | description="Multiplies two numbers", 88 | inputSchema={ 89 | "type": "object", 90 | "required": ["a", "b"], 91 | "properties": { 92 | "a": { 93 | "type": "number", 94 | "description": "First number to multiply", 95 | }, 96 | "b": { 97 | "type": "number", 98 | "description": "Second number to multiply", 99 | }, 100 | }, 101 | }, 102 | ) 103 | ] 104 | 105 | # Create the session manager with true stateless mode 106 | session_manager = StreamableHTTPSessionManager( 107 | app=app, 108 | event_store=None, 109 | json_response=json_response, 110 | stateless=True, 111 | ) 112 | 113 | async def handle_streamable_http( 114 | scope: Scope, receive: Receive, send: Send 115 | ) -> None: 116 | await session_manager.handle_request(scope, receive, send) 117 | 118 | @contextlib.asynccontextmanager 119 | async def lifespan(app: Starlette) -> AsyncIterator[None]: 120 | """Context manager for session manager.""" 121 | async with session_manager.run(): 122 | logger.info("Application started with StreamableHTTP session manager!") 123 | try: 124 | yield 125 | finally: 126 | logger.info("Application shutting down...") 127 | 128 | # Create an ASGI application using the transport 129 | starlette_app = Starlette( 130 | debug=True, 131 | routes=[ 132 | Mount("/mcp", app=handle_streamable_http), 133 | ], 134 | lifespan=lifespan, 135 | ) 136 | 137 | import uvicorn 138 | 139 | uvicorn.run(starlette_app, host="0.0.0.0", port=port) 140 | 141 | return 0 142 | -------------------------------------------------------------------------------- /examples/servers/streamable-http-stateless/pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "mcp-simple-streamablehttp-stateless" 3 | version = "0.1.0" 4 | description = "A simple MCP server exposing a StreamableHttp transport in stateless mode" 5 | readme = "README.md" 6 | requires-python = ">=3.10" 7 | authors = [{ name = "Anthropic, PBC." }] 8 | keywords = ["mcp", "llm", "automation", "web", "fetch", "http", "streamable", "stateless"] 9 | license = { text = "MIT" } 10 | dependencies = ["anyio>=4.5", "click>=8.1.0", "httpx>=0.27", "mcp", "starlette", "uvicorn"] 11 | 12 | [project.scripts] 13 | mcp-simple-streamablehttp-stateless = "mcp_simple_streamablehttp_stateless.server:main" 14 | 15 | [build-system] 16 | requires = ["hatchling"] 17 | build-backend = "hatchling.build" 18 | 19 | [tool.hatch.build.targets.wheel] 20 | packages = ["mcp_simple_streamablehttp_stateless"] 21 | 22 | [tool.pyright] 23 | include = ["mcp_simple_streamablehttp_stateless"] 24 | venvPath = "." 25 | venv = ".venv" 26 | 27 | [tool.ruff.lint] 28 | select = ["E", "F", "I"] 29 | ignore = [] 30 | 31 | [tool.ruff] 32 | line-length = 88 33 | target-version = "py310" 34 | 35 | [tool.uv] 36 | dev-dependencies = ["pyright>=1.1.378", "pytest>=8.3.3", "ruff>=0.6.9"] -------------------------------------------------------------------------------- /examples/servers/streamable-http-stateless/uv.lock: -------------------------------------------------------------------------------- 1 | version = 1 2 | revision = 1 3 | requires-python = ">=3.10" 4 | 5 | [[package]] 6 | name = "annotated-types" 7 | version = "0.7.0" 8 | source = { registry = "https://pypi.org/simple" } 9 | sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081 } 10 | wheels = [ 11 | { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643 }, 12 | ] 13 | 14 | [[package]] 15 | name = "anyio" 16 | version = "4.9.0" 17 | source = { registry = "https://pypi.org/simple" } 18 | dependencies = [ 19 | { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, 20 | { name = "idna" }, 21 | { name = "sniffio" }, 22 | { name = "typing-extensions", marker = "python_full_version < '3.13'" }, 23 | ] 24 | sdist = { url = "https://files.pythonhosted.org/packages/95/7d/4c1bd541d4dffa1b52bd83fb8527089e097a106fc90b467a7313b105f840/anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028", size = 190949 } 25 | wheels = [ 26 | { url = "https://files.pythonhosted.org/packages/a1/ee/48ca1a7c89ffec8b6a0c5d02b89c305671d5ffd8d3c94acf8b8c408575bb/anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c", size = 100916 }, 27 | ] 28 | 29 | [[package]] 30 | name = "certifi" 31 | version = "2025.4.26" 32 | source = { registry = "https://pypi.org/simple" } 33 | sdist = { url = "https://files.pythonhosted.org/packages/e8/9e/c05b3920a3b7d20d3d3310465f50348e5b3694f4f88c6daf736eef3024c4/certifi-2025.4.26.tar.gz", hash = "sha256:0a816057ea3cdefcef70270d2c515e4506bbc954f417fa5ade2021213bb8f0c6", size = 160705 } 34 | wheels = [ 35 | { url = "https://files.pythonhosted.org/packages/4a/7e/3db2bd1b1f9e95f7cddca6d6e75e2f2bd9f51b1246e546d88addca0106bd/certifi-2025.4.26-py3-none-any.whl", hash = "sha256:30350364dfe371162649852c63336a15c70c6510c2ad5015b21c2345311805f3", size = 159618 }, 36 | ] 37 | 38 | [[package]] 39 | name = "click" 40 | version = "8.1.8" 41 | source = { registry = "https://pypi.org/simple" } 42 | dependencies = [ 43 | { name = "colorama", marker = "sys_platform == 'win32'" }, 44 | ] 45 | sdist = { url = "https://files.pythonhosted.org/packages/b9/2e/0090cbf739cee7d23781ad4b89a9894a41538e4fcf4c31dcdd705b78eb8b/click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a", size = 226593 } 46 | wheels = [ 47 | { url = "https://files.pythonhosted.org/packages/7e/d4/7ebdbd03970677812aac39c869717059dbb71a4cfc033ca6e5221787892c/click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2", size = 98188 }, 48 | ] 49 | 50 | [[package]] 51 | name = "colorama" 52 | version = "0.4.6" 53 | source = { registry = "https://pypi.org/simple" } 54 | sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697 } 55 | wheels = [ 56 | { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 }, 57 | ] 58 | 59 | [[package]] 60 | name = "exceptiongroup" 61 | version = "1.2.2" 62 | source = { registry = "https://pypi.org/simple" } 63 | sdist = { url = "https://files.pythonhosted.org/packages/09/35/2495c4ac46b980e4ca1f6ad6db102322ef3ad2410b79fdde159a4b0f3b92/exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc", size = 28883 } 64 | wheels = [ 65 | { url = "https://files.pythonhosted.org/packages/02/cc/b7e31358aac6ed1ef2bb790a9746ac2c69bcb3c8588b41616914eb106eaf/exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b", size = 16453 }, 66 | ] 67 | 68 | [[package]] 69 | name = "h11" 70 | version = "0.16.0" 71 | source = { registry = "https://pypi.org/simple" } 72 | sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250 } 73 | wheels = [ 74 | { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515 }, 75 | ] 76 | 77 | [[package]] 78 | name = "httpcore" 79 | version = "1.0.9" 80 | source = { registry = "https://pypi.org/simple" } 81 | dependencies = [ 82 | { name = "certifi" }, 83 | { name = "h11" }, 84 | ] 85 | sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484 } 86 | wheels = [ 87 | { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784 }, 88 | ] 89 | 90 | [[package]] 91 | name = "httpx" 92 | version = "0.28.1" 93 | source = { registry = "https://pypi.org/simple" } 94 | dependencies = [ 95 | { name = "anyio" }, 96 | { name = "certifi" }, 97 | { name = "httpcore" }, 98 | { name = "idna" }, 99 | ] 100 | sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406 } 101 | wheels = [ 102 | { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517 }, 103 | ] 104 | 105 | [[package]] 106 | name = "httpx-sse" 107 | version = "0.4.0" 108 | source = { registry = "https://pypi.org/simple" } 109 | sdist = { url = "https://files.pythonhosted.org/packages/4c/60/8f4281fa9bbf3c8034fd54c0e7412e66edbab6bc74c4996bd616f8d0406e/httpx-sse-0.4.0.tar.gz", hash = "sha256:1e81a3a3070ce322add1d3529ed42eb5f70817f45ed6ec915ab753f961139721", size = 12624 } 110 | wheels = [ 111 | { url = "https://files.pythonhosted.org/packages/e1/9b/a181f281f65d776426002f330c31849b86b31fc9d848db62e16f03ff739f/httpx_sse-0.4.0-py3-none-any.whl", hash = "sha256:f329af6eae57eaa2bdfd962b42524764af68075ea87370a2de920af5341e318f", size = 7819 }, 112 | ] 113 | 114 | [[package]] 115 | name = "idna" 116 | version = "3.10" 117 | source = { registry = "https://pypi.org/simple" } 118 | sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490 } 119 | wheels = [ 120 | { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442 }, 121 | ] 122 | 123 | [[package]] 124 | name = "iniconfig" 125 | version = "2.1.0" 126 | source = { registry = "https://pypi.org/simple" } 127 | sdist = { url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", size = 4793 } 128 | wheels = [ 129 | { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050 }, 130 | ] 131 | 132 | [[package]] 133 | name = "mcp" 134 | version = "1.8.0" 135 | source = { registry = "https://pypi.org/simple" } 136 | dependencies = [ 137 | { name = "anyio" }, 138 | { name = "httpx" }, 139 | { name = "httpx-sse" }, 140 | { name = "pydantic" }, 141 | { name = "pydantic-settings" }, 142 | { name = "python-multipart" }, 143 | { name = "sse-starlette" }, 144 | { name = "starlette" }, 145 | { name = "uvicorn", marker = "sys_platform != 'emscripten'" }, 146 | ] 147 | sdist = { url = "https://files.pythonhosted.org/packages/ff/97/0a3e08559557b0ac5799f9fb535fbe5a4e4dcdd66ce9d32e7a74b4d0534d/mcp-1.8.0.tar.gz", hash = "sha256:263dfb700540b726c093f0c3e043f66aded0730d0b51f04eb0a3eb90055fe49b", size = 264641 } 148 | wheels = [ 149 | { url = "https://files.pythonhosted.org/packages/b2/b2/4ac3bd17b1fdd65658f18de4eb0c703517ee0b483dc5f56467802a9197e0/mcp-1.8.0-py3-none-any.whl", hash = "sha256:889d9d3b4f12b7da59e7a3933a0acadae1fce498bfcd220defb590aa291a1334", size = 119544 }, 150 | ] 151 | 152 | [[package]] 153 | name = "mcp-simple-streamablehttp-stateless" 154 | version = "0.1.0" 155 | source = { editable = "." } 156 | dependencies = [ 157 | { name = "anyio" }, 158 | { name = "click" }, 159 | { name = "httpx" }, 160 | { name = "mcp" }, 161 | { name = "starlette" }, 162 | { name = "uvicorn" }, 163 | ] 164 | 165 | [package.dev-dependencies] 166 | dev = [ 167 | { name = "pyright" }, 168 | { name = "pytest" }, 169 | { name = "ruff" }, 170 | ] 171 | 172 | [package.metadata] 173 | requires-dist = [ 174 | { name = "anyio", specifier = ">=4.5" }, 175 | { name = "click", specifier = ">=8.1.0" }, 176 | { name = "httpx", specifier = ">=0.27" }, 177 | { name = "mcp" }, 178 | { name = "starlette" }, 179 | { name = "uvicorn" }, 180 | ] 181 | 182 | [package.metadata.requires-dev] 183 | dev = [ 184 | { name = "pyright", specifier = ">=1.1.378" }, 185 | { name = "pytest", specifier = ">=8.3.3" }, 186 | { name = "ruff", specifier = ">=0.6.9" }, 187 | ] 188 | 189 | [[package]] 190 | name = "nodeenv" 191 | version = "1.9.1" 192 | source = { registry = "https://pypi.org/simple" } 193 | sdist = { url = "https://files.pythonhosted.org/packages/43/16/fc88b08840de0e0a72a2f9d8c6bae36be573e475a6326ae854bcc549fc45/nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f", size = 47437 } 194 | wheels = [ 195 | { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314 }, 196 | ] 197 | 198 | [[package]] 199 | name = "packaging" 200 | version = "25.0" 201 | source = { registry = "https://pypi.org/simple" } 202 | sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727 } 203 | wheels = [ 204 | { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469 }, 205 | ] 206 | 207 | [[package]] 208 | name = "pluggy" 209 | version = "1.5.0" 210 | source = { registry = "https://pypi.org/simple" } 211 | sdist = { url = "https://files.pythonhosted.org/packages/96/2d/02d4312c973c6050a18b314a5ad0b3210edb65a906f868e31c111dede4a6/pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1", size = 67955 } 212 | wheels = [ 213 | { url = "https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669", size = 20556 }, 214 | ] 215 | 216 | [[package]] 217 | name = "pydantic" 218 | version = "2.11.4" 219 | source = { registry = "https://pypi.org/simple" } 220 | dependencies = [ 221 | { name = "annotated-types" }, 222 | { name = "pydantic-core" }, 223 | { name = "typing-extensions" }, 224 | { name = "typing-inspection" }, 225 | ] 226 | sdist = { url = "https://files.pythonhosted.org/packages/77/ab/5250d56ad03884ab5efd07f734203943c8a8ab40d551e208af81d0257bf2/pydantic-2.11.4.tar.gz", hash = "sha256:32738d19d63a226a52eed76645a98ee07c1f410ee41d93b4afbfa85ed8111c2d", size = 786540 } 227 | wheels = [ 228 | { url = "https://files.pythonhosted.org/packages/e7/12/46b65f3534d099349e38ef6ec98b1a5a81f42536d17e0ba382c28c67ba67/pydantic-2.11.4-py3-none-any.whl", hash = "sha256:d9615eaa9ac5a063471da949c8fc16376a84afb5024688b3ff885693506764eb", size = 443900 }, 229 | ] 230 | 231 | [[package]] 232 | name = "pydantic-core" 233 | version = "2.33.2" 234 | source = { registry = "https://pypi.org/simple" } 235 | dependencies = [ 236 | { name = "typing-extensions" }, 237 | ] 238 | sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195 } 239 | wheels = [ 240 | { url = "https://files.pythonhosted.org/packages/e5/92/b31726561b5dae176c2d2c2dc43a9c5bfba5d32f96f8b4c0a600dd492447/pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8", size = 2028817 }, 241 | { url = "https://files.pythonhosted.org/packages/a3/44/3f0b95fafdaca04a483c4e685fe437c6891001bf3ce8b2fded82b9ea3aa1/pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d", size = 1861357 }, 242 | { url = "https://files.pythonhosted.org/packages/30/97/e8f13b55766234caae05372826e8e4b3b96e7b248be3157f53237682e43c/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d", size = 1898011 }, 243 | { url = "https://files.pythonhosted.org/packages/9b/a3/99c48cf7bafc991cc3ee66fd544c0aae8dc907b752f1dad2d79b1b5a471f/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572", size = 1982730 }, 244 | { url = "https://files.pythonhosted.org/packages/de/8e/a5b882ec4307010a840fb8b58bd9bf65d1840c92eae7534c7441709bf54b/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02", size = 2136178 }, 245 | { url = "https://files.pythonhosted.org/packages/e4/bb/71e35fc3ed05af6834e890edb75968e2802fe98778971ab5cba20a162315/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b", size = 2736462 }, 246 | { url = "https://files.pythonhosted.org/packages/31/0d/c8f7593e6bc7066289bbc366f2235701dcbebcd1ff0ef8e64f6f239fb47d/pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2", size = 2005652 }, 247 | { url = "https://files.pythonhosted.org/packages/d2/7a/996d8bd75f3eda405e3dd219ff5ff0a283cd8e34add39d8ef9157e722867/pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a", size = 2113306 }, 248 | { url = "https://files.pythonhosted.org/packages/ff/84/daf2a6fb2db40ffda6578a7e8c5a6e9c8affb251a05c233ae37098118788/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac", size = 2073720 }, 249 | { url = "https://files.pythonhosted.org/packages/77/fb/2258da019f4825128445ae79456a5499c032b55849dbd5bed78c95ccf163/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a", size = 2244915 }, 250 | { url = "https://files.pythonhosted.org/packages/d8/7a/925ff73756031289468326e355b6fa8316960d0d65f8b5d6b3a3e7866de7/pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b", size = 2241884 }, 251 | { url = "https://files.pythonhosted.org/packages/0b/b0/249ee6d2646f1cdadcb813805fe76265745c4010cf20a8eba7b0e639d9b2/pydantic_core-2.33.2-cp310-cp310-win32.whl", hash = "sha256:0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22", size = 1910496 }, 252 | { url = "https://files.pythonhosted.org/packages/66/ff/172ba8f12a42d4b552917aa65d1f2328990d3ccfc01d5b7c943ec084299f/pydantic_core-2.33.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640", size = 1955019 }, 253 | { url = "https://files.pythonhosted.org/packages/3f/8d/71db63483d518cbbf290261a1fc2839d17ff89fce7089e08cad07ccfce67/pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7", size = 2028584 }, 254 | { url = "https://files.pythonhosted.org/packages/24/2f/3cfa7244ae292dd850989f328722d2aef313f74ffc471184dc509e1e4e5a/pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246", size = 1855071 }, 255 | { url = "https://files.pythonhosted.org/packages/b3/d3/4ae42d33f5e3f50dd467761304be2fa0a9417fbf09735bc2cce003480f2a/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f", size = 1897823 }, 256 | { url = "https://files.pythonhosted.org/packages/f4/f3/aa5976e8352b7695ff808599794b1fba2a9ae2ee954a3426855935799488/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc", size = 1983792 }, 257 | { url = "https://files.pythonhosted.org/packages/d5/7a/cda9b5a23c552037717f2b2a5257e9b2bfe45e687386df9591eff7b46d28/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de", size = 2136338 }, 258 | { url = "https://files.pythonhosted.org/packages/2b/9f/b8f9ec8dd1417eb9da784e91e1667d58a2a4a7b7b34cf4af765ef663a7e5/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a", size = 2730998 }, 259 | { url = "https://files.pythonhosted.org/packages/47/bc/cd720e078576bdb8255d5032c5d63ee5c0bf4b7173dd955185a1d658c456/pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef", size = 2003200 }, 260 | { url = "https://files.pythonhosted.org/packages/ca/22/3602b895ee2cd29d11a2b349372446ae9727c32e78a94b3d588a40fdf187/pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e", size = 2113890 }, 261 | { url = "https://files.pythonhosted.org/packages/ff/e6/e3c5908c03cf00d629eb38393a98fccc38ee0ce8ecce32f69fc7d7b558a7/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d", size = 2073359 }, 262 | { url = "https://files.pythonhosted.org/packages/12/e7/6a36a07c59ebefc8777d1ffdaf5ae71b06b21952582e4b07eba88a421c79/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30", size = 2245883 }, 263 | { url = "https://files.pythonhosted.org/packages/16/3f/59b3187aaa6cc0c1e6616e8045b284de2b6a87b027cce2ffcea073adf1d2/pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf", size = 2241074 }, 264 | { url = "https://files.pythonhosted.org/packages/e0/ed/55532bb88f674d5d8f67ab121a2a13c385df382de2a1677f30ad385f7438/pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51", size = 1910538 }, 265 | { url = "https://files.pythonhosted.org/packages/fe/1b/25b7cccd4519c0b23c2dd636ad39d381abf113085ce4f7bec2b0dc755eb1/pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab", size = 1952909 }, 266 | { url = "https://files.pythonhosted.org/packages/49/a9/d809358e49126438055884c4366a1f6227f0f84f635a9014e2deb9b9de54/pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65", size = 1897786 }, 267 | { url = "https://files.pythonhosted.org/packages/18/8a/2b41c97f554ec8c71f2a8a5f85cb56a8b0956addfe8b0efb5b3d77e8bdc3/pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc", size = 2009000 }, 268 | { url = "https://files.pythonhosted.org/packages/a1/02/6224312aacb3c8ecbaa959897af57181fb6cf3a3d7917fd44d0f2917e6f2/pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7", size = 1847996 }, 269 | { url = "https://files.pythonhosted.org/packages/d6/46/6dcdf084a523dbe0a0be59d054734b86a981726f221f4562aed313dbcb49/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025", size = 1880957 }, 270 | { url = "https://files.pythonhosted.org/packages/ec/6b/1ec2c03837ac00886ba8160ce041ce4e325b41d06a034adbef11339ae422/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011", size = 1964199 }, 271 | { url = "https://files.pythonhosted.org/packages/2d/1d/6bf34d6adb9debd9136bd197ca72642203ce9aaaa85cfcbfcf20f9696e83/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f", size = 2120296 }, 272 | { url = "https://files.pythonhosted.org/packages/e0/94/2bd0aaf5a591e974b32a9f7123f16637776c304471a0ab33cf263cf5591a/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88", size = 2676109 }, 273 | { url = "https://files.pythonhosted.org/packages/f9/41/4b043778cf9c4285d59742281a769eac371b9e47e35f98ad321349cc5d61/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1", size = 2002028 }, 274 | { url = "https://files.pythonhosted.org/packages/cb/d5/7bb781bf2748ce3d03af04d5c969fa1308880e1dca35a9bd94e1a96a922e/pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b", size = 2100044 }, 275 | { url = "https://files.pythonhosted.org/packages/fe/36/def5e53e1eb0ad896785702a5bbfd25eed546cdcf4087ad285021a90ed53/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1", size = 2058881 }, 276 | { url = "https://files.pythonhosted.org/packages/01/6c/57f8d70b2ee57fc3dc8b9610315949837fa8c11d86927b9bb044f8705419/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6", size = 2227034 }, 277 | { url = "https://files.pythonhosted.org/packages/27/b9/9c17f0396a82b3d5cbea4c24d742083422639e7bb1d5bf600e12cb176a13/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea", size = 2234187 }, 278 | { url = "https://files.pythonhosted.org/packages/b0/6a/adf5734ffd52bf86d865093ad70b2ce543415e0e356f6cacabbc0d9ad910/pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290", size = 1892628 }, 279 | { url = "https://files.pythonhosted.org/packages/43/e4/5479fecb3606c1368d496a825d8411e126133c41224c1e7238be58b87d7e/pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2", size = 1955866 }, 280 | { url = "https://files.pythonhosted.org/packages/0d/24/8b11e8b3e2be9dd82df4b11408a67c61bb4dc4f8e11b5b0fc888b38118b5/pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab", size = 1888894 }, 281 | { url = "https://files.pythonhosted.org/packages/46/8c/99040727b41f56616573a28771b1bfa08a3d3fe74d3d513f01251f79f172/pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f", size = 2015688 }, 282 | { url = "https://files.pythonhosted.org/packages/3a/cc/5999d1eb705a6cefc31f0b4a90e9f7fc400539b1a1030529700cc1b51838/pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6", size = 1844808 }, 283 | { url = "https://files.pythonhosted.org/packages/6f/5e/a0a7b8885c98889a18b6e376f344da1ef323d270b44edf8174d6bce4d622/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef", size = 1885580 }, 284 | { url = "https://files.pythonhosted.org/packages/3b/2a/953581f343c7d11a304581156618c3f592435523dd9d79865903272c256a/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a", size = 1973859 }, 285 | { url = "https://files.pythonhosted.org/packages/e6/55/f1a813904771c03a3f97f676c62cca0c0a4138654107c1b61f19c644868b/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916", size = 2120810 }, 286 | { url = "https://files.pythonhosted.org/packages/aa/c3/053389835a996e18853ba107a63caae0b9deb4a276c6b472931ea9ae6e48/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a", size = 2676498 }, 287 | { url = "https://files.pythonhosted.org/packages/eb/3c/f4abd740877a35abade05e437245b192f9d0ffb48bbbbd708df33d3cda37/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d", size = 2000611 }, 288 | { url = "https://files.pythonhosted.org/packages/59/a7/63ef2fed1837d1121a894d0ce88439fe3e3b3e48c7543b2a4479eb99c2bd/pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56", size = 2107924 }, 289 | { url = "https://files.pythonhosted.org/packages/04/8f/2551964ef045669801675f1cfc3b0d74147f4901c3ffa42be2ddb1f0efc4/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5", size = 2063196 }, 290 | { url = "https://files.pythonhosted.org/packages/26/bd/d9602777e77fc6dbb0c7db9ad356e9a985825547dce5ad1d30ee04903918/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e", size = 2236389 }, 291 | { url = "https://files.pythonhosted.org/packages/42/db/0e950daa7e2230423ab342ae918a794964b053bec24ba8af013fc7c94846/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162", size = 2239223 }, 292 | { url = "https://files.pythonhosted.org/packages/58/4d/4f937099c545a8a17eb52cb67fe0447fd9a373b348ccfa9a87f141eeb00f/pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849", size = 1900473 }, 293 | { url = "https://files.pythonhosted.org/packages/a0/75/4a0a9bac998d78d889def5e4ef2b065acba8cae8c93696906c3a91f310ca/pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9", size = 1955269 }, 294 | { url = "https://files.pythonhosted.org/packages/f9/86/1beda0576969592f1497b4ce8e7bc8cbdf614c352426271b1b10d5f0aa64/pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9", size = 1893921 }, 295 | { url = "https://files.pythonhosted.org/packages/a4/7d/e09391c2eebeab681df2b74bfe6c43422fffede8dc74187b2b0bf6fd7571/pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac", size = 1806162 }, 296 | { url = "https://files.pythonhosted.org/packages/f1/3d/847b6b1fed9f8ed3bb95a9ad04fbd0b212e832d4f0f50ff4d9ee5a9f15cf/pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5", size = 1981560 }, 297 | { url = "https://files.pythonhosted.org/packages/6f/9a/e73262f6c6656262b5fdd723ad90f518f579b7bc8622e43a942eec53c938/pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9", size = 1935777 }, 298 | { url = "https://files.pythonhosted.org/packages/30/68/373d55e58b7e83ce371691f6eaa7175e3a24b956c44628eb25d7da007917/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa", size = 2023982 }, 299 | { url = "https://files.pythonhosted.org/packages/a4/16/145f54ac08c96a63d8ed6442f9dec17b2773d19920b627b18d4f10a061ea/pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29", size = 1858412 }, 300 | { url = "https://files.pythonhosted.org/packages/41/b1/c6dc6c3e2de4516c0bb2c46f6a373b91b5660312342a0cf5826e38ad82fa/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d", size = 1892749 }, 301 | { url = "https://files.pythonhosted.org/packages/12/73/8cd57e20afba760b21b742106f9dbdfa6697f1570b189c7457a1af4cd8a0/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e", size = 2067527 }, 302 | { url = "https://files.pythonhosted.org/packages/e3/d5/0bb5d988cc019b3cba4a78f2d4b3854427fc47ee8ec8e9eaabf787da239c/pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c", size = 2108225 }, 303 | { url = "https://files.pythonhosted.org/packages/f1/c5/00c02d1571913d496aabf146106ad8239dc132485ee22efe08085084ff7c/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec", size = 2069490 }, 304 | { url = "https://files.pythonhosted.org/packages/22/a8/dccc38768274d3ed3a59b5d06f59ccb845778687652daa71df0cab4040d7/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052", size = 2237525 }, 305 | { url = "https://files.pythonhosted.org/packages/d4/e7/4f98c0b125dda7cf7ccd14ba936218397b44f50a56dd8c16a3091df116c3/pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c", size = 2238446 }, 306 | { url = "https://files.pythonhosted.org/packages/ce/91/2ec36480fdb0b783cd9ef6795753c1dea13882f2e68e73bce76ae8c21e6a/pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808", size = 2066678 }, 307 | { url = "https://files.pythonhosted.org/packages/7b/27/d4ae6487d73948d6f20dddcd94be4ea43e74349b56eba82e9bdee2d7494c/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8", size = 2025200 }, 308 | { url = "https://files.pythonhosted.org/packages/f1/b8/b3cb95375f05d33801024079b9392a5ab45267a63400bf1866e7ce0f0de4/pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593", size = 1859123 }, 309 | { url = "https://files.pythonhosted.org/packages/05/bc/0d0b5adeda59a261cd30a1235a445bf55c7e46ae44aea28f7bd6ed46e091/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612", size = 1892852 }, 310 | { url = "https://files.pythonhosted.org/packages/3e/11/d37bdebbda2e449cb3f519f6ce950927b56d62f0b84fd9cb9e372a26a3d5/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7", size = 2067484 }, 311 | { url = "https://files.pythonhosted.org/packages/8c/55/1f95f0a05ce72ecb02a8a8a1c3be0579bbc29b1d5ab68f1378b7bebc5057/pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e", size = 2108896 }, 312 | { url = "https://files.pythonhosted.org/packages/53/89/2b2de6c81fa131f423246a9109d7b2a375e83968ad0800d6e57d0574629b/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8", size = 2069475 }, 313 | { url = "https://files.pythonhosted.org/packages/b8/e9/1f7efbe20d0b2b10f6718944b5d8ece9152390904f29a78e68d4e7961159/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf", size = 2239013 }, 314 | { url = "https://files.pythonhosted.org/packages/3c/b2/5309c905a93811524a49b4e031e9851a6b00ff0fb668794472ea7746b448/pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb", size = 2238715 }, 315 | { url = "https://files.pythonhosted.org/packages/32/56/8a7ca5d2cd2cda1d245d34b1c9a942920a718082ae8e54e5f3e5a58b7add/pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1", size = 2066757 }, 316 | ] 317 | 318 | [[package]] 319 | name = "pydantic-settings" 320 | version = "2.9.1" 321 | source = { registry = "https://pypi.org/simple" } 322 | dependencies = [ 323 | { name = "pydantic" }, 324 | { name = "python-dotenv" }, 325 | { name = "typing-inspection" }, 326 | ] 327 | sdist = { url = "https://files.pythonhosted.org/packages/67/1d/42628a2c33e93f8e9acbde0d5d735fa0850f3e6a2f8cb1eb6c40b9a732ac/pydantic_settings-2.9.1.tar.gz", hash = "sha256:c509bf79d27563add44e8446233359004ed85066cd096d8b510f715e6ef5d268", size = 163234 } 328 | wheels = [ 329 | { url = "https://files.pythonhosted.org/packages/b6/5f/d6d641b490fd3ec2c4c13b4244d68deea3a1b970a97be64f34fb5504ff72/pydantic_settings-2.9.1-py3-none-any.whl", hash = "sha256:59b4f431b1defb26fe620c71a7d3968a710d719f5f4cdbbdb7926edeb770f6ef", size = 44356 }, 330 | ] 331 | 332 | [[package]] 333 | name = "pyright" 334 | version = "1.1.400" 335 | source = { registry = "https://pypi.org/simple" } 336 | dependencies = [ 337 | { name = "nodeenv" }, 338 | { name = "typing-extensions" }, 339 | ] 340 | sdist = { url = "https://files.pythonhosted.org/packages/6c/cb/c306618a02d0ee8aed5fb8d0fe0ecfed0dbf075f71468f03a30b5f4e1fe0/pyright-1.1.400.tar.gz", hash = "sha256:b8a3ba40481aa47ba08ffb3228e821d22f7d391f83609211335858bf05686bdb", size = 3846546 } 341 | wheels = [ 342 | { url = "https://files.pythonhosted.org/packages/c8/a5/5d285e4932cf149c90e3c425610c5efaea005475d5f96f1bfdb452956c62/pyright-1.1.400-py3-none-any.whl", hash = "sha256:c80d04f98b5a4358ad3a35e241dbf2a408eee33a40779df365644f8054d2517e", size = 5563460 }, 343 | ] 344 | 345 | [[package]] 346 | name = "pytest" 347 | version = "8.3.5" 348 | source = { registry = "https://pypi.org/simple" } 349 | dependencies = [ 350 | { name = "colorama", marker = "sys_platform == 'win32'" }, 351 | { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, 352 | { name = "iniconfig" }, 353 | { name = "packaging" }, 354 | { name = "pluggy" }, 355 | { name = "tomli", marker = "python_full_version < '3.11'" }, 356 | ] 357 | sdist = { url = "https://files.pythonhosted.org/packages/ae/3c/c9d525a414d506893f0cd8a8d0de7706446213181570cdbd766691164e40/pytest-8.3.5.tar.gz", hash = "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845", size = 1450891 } 358 | wheels = [ 359 | { url = "https://files.pythonhosted.org/packages/30/3d/64ad57c803f1fa1e963a7946b6e0fea4a70df53c1a7fed304586539c2bac/pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820", size = 343634 }, 360 | ] 361 | 362 | [[package]] 363 | name = "python-dotenv" 364 | version = "1.1.0" 365 | source = { registry = "https://pypi.org/simple" } 366 | sdist = { url = "https://files.pythonhosted.org/packages/88/2c/7bb1416c5620485aa793f2de31d3df393d3686aa8a8506d11e10e13c5baf/python_dotenv-1.1.0.tar.gz", hash = "sha256:41f90bc6f5f177fb41f53e87666db362025010eb28f60a01c9143bfa33a2b2d5", size = 39920 } 367 | wheels = [ 368 | { url = "https://files.pythonhosted.org/packages/1e/18/98a99ad95133c6a6e2005fe89faedf294a748bd5dc803008059409ac9b1e/python_dotenv-1.1.0-py3-none-any.whl", hash = "sha256:d7c01d9e2293916c18baf562d95698754b0dbbb5e74d457c45d4f6561fb9d55d", size = 20256 }, 369 | ] 370 | 371 | [[package]] 372 | name = "python-multipart" 373 | version = "0.0.20" 374 | source = { registry = "https://pypi.org/simple" } 375 | sdist = { url = "https://files.pythonhosted.org/packages/f3/87/f44d7c9f274c7ee665a29b885ec97089ec5dc034c7f3fafa03da9e39a09e/python_multipart-0.0.20.tar.gz", hash = "sha256:8dd0cab45b8e23064ae09147625994d090fa46f5b0d1e13af944c331a7fa9d13", size = 37158 } 376 | wheels = [ 377 | { url = "https://files.pythonhosted.org/packages/45/58/38b5afbc1a800eeea951b9285d3912613f2603bdf897a4ab0f4bd7f405fc/python_multipart-0.0.20-py3-none-any.whl", hash = "sha256:8a62d3a8335e06589fe01f2a3e178cdcc632f3fbe0d492ad9ee0ec35aab1f104", size = 24546 }, 378 | ] 379 | 380 | [[package]] 381 | name = "ruff" 382 | version = "0.11.8" 383 | source = { registry = "https://pypi.org/simple" } 384 | sdist = { url = "https://files.pythonhosted.org/packages/52/f6/adcf73711f31c9f5393862b4281c875a462d9f639f4ccdf69dc368311c20/ruff-0.11.8.tar.gz", hash = "sha256:6d742d10626f9004b781f4558154bb226620a7242080e11caeffab1a40e99df8", size = 4086399 } 385 | wheels = [ 386 | { url = "https://files.pythonhosted.org/packages/9f/60/c6aa9062fa518a9f86cb0b85248245cddcd892a125ca00441df77d79ef88/ruff-0.11.8-py3-none-linux_armv6l.whl", hash = "sha256:896a37516c594805e34020c4a7546c8f8a234b679a7716a3f08197f38913e1a3", size = 10272473 }, 387 | { url = "https://files.pythonhosted.org/packages/a0/e4/0325e50d106dc87c00695f7bcd5044c6d252ed5120ebf423773e00270f50/ruff-0.11.8-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ab86d22d3d721a40dd3ecbb5e86ab03b2e053bc93c700dc68d1c3346b36ce835", size = 11040862 }, 388 | { url = "https://files.pythonhosted.org/packages/e6/27/b87ea1a7be37fef0adbc7fd987abbf90b6607d96aa3fc67e2c5b858e1e53/ruff-0.11.8-py3-none-macosx_11_0_arm64.whl", hash = "sha256:258f3585057508d317610e8a412788cf726efeefa2fec4dba4001d9e6f90d46c", size = 10385273 }, 389 | { url = "https://files.pythonhosted.org/packages/d3/f7/3346161570d789045ed47a86110183f6ac3af0e94e7fd682772d89f7f1a1/ruff-0.11.8-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:727d01702f7c30baed3fc3a34901a640001a2828c793525043c29f7614994a8c", size = 10578330 }, 390 | { url = "https://files.pythonhosted.org/packages/c6/c3/327fb950b4763c7b3784f91d3038ef10c13b2d42322d4ade5ce13a2f9edb/ruff-0.11.8-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3dca977cc4fc8f66e89900fa415ffe4dbc2e969da9d7a54bfca81a128c5ac219", size = 10122223 }, 391 | { url = "https://files.pythonhosted.org/packages/de/c7/ba686bce9adfeb6c61cb1bbadc17d58110fe1d602f199d79d4c880170f19/ruff-0.11.8-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c657fa987d60b104d2be8b052d66da0a2a88f9bd1d66b2254333e84ea2720c7f", size = 11697353 }, 392 | { url = "https://files.pythonhosted.org/packages/53/8e/a4fb4a1ddde3c59e73996bb3ac51844ff93384d533629434b1def7a336b0/ruff-0.11.8-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:f2e74b021d0de5eceb8bd32919f6ff8a9b40ee62ed97becd44993ae5b9949474", size = 12375936 }, 393 | { url = "https://files.pythonhosted.org/packages/ad/a1/9529cb1e2936e2479a51aeb011307e7229225df9ac64ae064d91ead54571/ruff-0.11.8-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f9b5ef39820abc0f2c62111f7045009e46b275f5b99d5e59dda113c39b7f4f38", size = 11850083 }, 394 | { url = "https://files.pythonhosted.org/packages/3e/94/8f7eac4c612673ae15a4ad2bc0ee62e03c68a2d4f458daae3de0e47c67ba/ruff-0.11.8-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c1dba3135ca503727aa4648152c0fa67c3b1385d3dc81c75cd8a229c4b2a1458", size = 14005834 }, 395 | { url = "https://files.pythonhosted.org/packages/1e/7c/6f63b46b2be870cbf3f54c9c4154d13fac4b8827f22fa05ac835c10835b2/ruff-0.11.8-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f024d32e62faad0f76b2d6afd141b8c171515e4fb91ce9fd6464335c81244e5", size = 11503713 }, 396 | { url = "https://files.pythonhosted.org/packages/3a/91/57de411b544b5fe072779678986a021d87c3ee5b89551f2ca41200c5d643/ruff-0.11.8-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:d365618d3ad747432e1ae50d61775b78c055fee5936d77fb4d92c6f559741948", size = 10457182 }, 397 | { url = "https://files.pythonhosted.org/packages/01/49/cfe73e0ce5ecdd3e6f1137bf1f1be03dcc819d1bfe5cff33deb40c5926db/ruff-0.11.8-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:4d9aaa91035bdf612c8ee7266153bcf16005c7c7e2f5878406911c92a31633cb", size = 10101027 }, 398 | { url = "https://files.pythonhosted.org/packages/56/21/a5cfe47c62b3531675795f38a0ef1c52ff8de62eaddf370d46634391a3fb/ruff-0.11.8-py3-none-musllinux_1_2_i686.whl", hash = "sha256:0eba551324733efc76116d9f3a0d52946bc2751f0cd30661564117d6fd60897c", size = 11111298 }, 399 | { url = "https://files.pythonhosted.org/packages/36/98/f76225f87e88f7cb669ae92c062b11c0a1e91f32705f829bd426f8e48b7b/ruff-0.11.8-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:161eb4cff5cfefdb6c9b8b3671d09f7def2f960cee33481dd898caf2bcd02304", size = 11566884 }, 400 | { url = "https://files.pythonhosted.org/packages/de/7e/fff70b02e57852fda17bd43f99dda37b9bcf3e1af3d97c5834ff48d04715/ruff-0.11.8-py3-none-win32.whl", hash = "sha256:5b18caa297a786465cc511d7f8be19226acf9c0a1127e06e736cd4e1878c3ea2", size = 10451102 }, 401 | { url = "https://files.pythonhosted.org/packages/7b/a9/eaa571eb70648c9bde3120a1d5892597de57766e376b831b06e7c1e43945/ruff-0.11.8-py3-none-win_amd64.whl", hash = "sha256:6e70d11043bef637c5617297bdedec9632af15d53ac1e1ba29c448da9341b0c4", size = 11597410 }, 402 | { url = "https://files.pythonhosted.org/packages/cd/be/f6b790d6ae98f1f32c645f8540d5c96248b72343b0a56fab3a07f2941897/ruff-0.11.8-py3-none-win_arm64.whl", hash = "sha256:304432e4c4a792e3da85b7699feb3426a0908ab98bf29df22a31b0cdd098fac2", size = 10713129 }, 403 | ] 404 | 405 | [[package]] 406 | name = "sniffio" 407 | version = "1.3.1" 408 | source = { registry = "https://pypi.org/simple" } 409 | sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372 } 410 | wheels = [ 411 | { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235 }, 412 | ] 413 | 414 | [[package]] 415 | name = "sse-starlette" 416 | version = "2.3.4" 417 | source = { registry = "https://pypi.org/simple" } 418 | dependencies = [ 419 | { name = "anyio" }, 420 | { name = "starlette" }, 421 | ] 422 | sdist = { url = "https://files.pythonhosted.org/packages/43/be/7e776a29b5f712b5bd13c571256a2470fcf345c562c7b2359f2ee15d9355/sse_starlette-2.3.4.tar.gz", hash = "sha256:0ffd6bed217cdbb74a84816437c609278003998b4991cd2e6872d0b35130e4d5", size = 17522 } 423 | wheels = [ 424 | { url = "https://files.pythonhosted.org/packages/43/a4/ee4a20f0b5ff34c391f3685eff7cdba1178a487766e31b04efb51bbddd87/sse_starlette-2.3.4-py3-none-any.whl", hash = "sha256:b8100694f3f892b133d0f7483acb7aacfcf6ed60f863b31947664b6dc74e529f", size = 10232 }, 425 | ] 426 | 427 | [[package]] 428 | name = "starlette" 429 | version = "0.46.2" 430 | source = { registry = "https://pypi.org/simple" } 431 | dependencies = [ 432 | { name = "anyio" }, 433 | ] 434 | sdist = { url = "https://files.pythonhosted.org/packages/ce/20/08dfcd9c983f6a6f4a1000d934b9e6d626cff8d2eeb77a89a68eef20a2b7/starlette-0.46.2.tar.gz", hash = "sha256:7f7361f34eed179294600af672f565727419830b54b7b084efe44bb82d2fccd5", size = 2580846 } 435 | wheels = [ 436 | { url = "https://files.pythonhosted.org/packages/8b/0c/9d30a4ebeb6db2b25a841afbb80f6ef9a854fc3b41be131d249a977b4959/starlette-0.46.2-py3-none-any.whl", hash = "sha256:595633ce89f8ffa71a015caed34a5b2dc1c0cdb3f0f1fbd1e69339cf2abeec35", size = 72037 }, 437 | ] 438 | 439 | [[package]] 440 | name = "tomli" 441 | version = "2.2.1" 442 | source = { registry = "https://pypi.org/simple" } 443 | sdist = { url = "https://files.pythonhosted.org/packages/18/87/302344fed471e44a87289cf4967697d07e532f2421fdaf868a303cbae4ff/tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff", size = 17175 } 444 | wheels = [ 445 | { url = "https://files.pythonhosted.org/packages/43/ca/75707e6efa2b37c77dadb324ae7d9571cb424e61ea73fad7c56c2d14527f/tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249", size = 131077 }, 446 | { url = "https://files.pythonhosted.org/packages/c7/16/51ae563a8615d472fdbffc43a3f3d46588c264ac4f024f63f01283becfbb/tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6", size = 123429 }, 447 | { url = "https://files.pythonhosted.org/packages/f1/dd/4f6cd1e7b160041db83c694abc78e100473c15d54620083dbd5aae7b990e/tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a", size = 226067 }, 448 | { url = "https://files.pythonhosted.org/packages/a9/6b/c54ede5dc70d648cc6361eaf429304b02f2871a345bbdd51e993d6cdf550/tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee", size = 236030 }, 449 | { url = "https://files.pythonhosted.org/packages/1f/47/999514fa49cfaf7a92c805a86c3c43f4215621855d151b61c602abb38091/tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e", size = 240898 }, 450 | { url = "https://files.pythonhosted.org/packages/73/41/0a01279a7ae09ee1573b423318e7934674ce06eb33f50936655071d81a24/tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4", size = 229894 }, 451 | { url = "https://files.pythonhosted.org/packages/55/18/5d8bc5b0a0362311ce4d18830a5d28943667599a60d20118074ea1b01bb7/tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106", size = 245319 }, 452 | { url = "https://files.pythonhosted.org/packages/92/a3/7ade0576d17f3cdf5ff44d61390d4b3febb8a9fc2b480c75c47ea048c646/tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8", size = 238273 }, 453 | { url = "https://files.pythonhosted.org/packages/72/6f/fa64ef058ac1446a1e51110c375339b3ec6be245af9d14c87c4a6412dd32/tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff", size = 98310 }, 454 | { url = "https://files.pythonhosted.org/packages/6a/1c/4a2dcde4a51b81be3530565e92eda625d94dafb46dbeb15069df4caffc34/tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b", size = 108309 }, 455 | { url = "https://files.pythonhosted.org/packages/52/e1/f8af4c2fcde17500422858155aeb0d7e93477a0d59a98e56cbfe75070fd0/tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea", size = 132762 }, 456 | { url = "https://files.pythonhosted.org/packages/03/b8/152c68bb84fc00396b83e7bbddd5ec0bd3dd409db4195e2a9b3e398ad2e3/tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8", size = 123453 }, 457 | { url = "https://files.pythonhosted.org/packages/c8/d6/fc9267af9166f79ac528ff7e8c55c8181ded34eb4b0e93daa767b8841573/tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192", size = 233486 }, 458 | { url = "https://files.pythonhosted.org/packages/5c/51/51c3f2884d7bab89af25f678447ea7d297b53b5a3b5730a7cb2ef6069f07/tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222", size = 242349 }, 459 | { url = "https://files.pythonhosted.org/packages/ab/df/bfa89627d13a5cc22402e441e8a931ef2108403db390ff3345c05253935e/tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77", size = 252159 }, 460 | { url = "https://files.pythonhosted.org/packages/9e/6e/fa2b916dced65763a5168c6ccb91066f7639bdc88b48adda990db10c8c0b/tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6", size = 237243 }, 461 | { url = "https://files.pythonhosted.org/packages/b4/04/885d3b1f650e1153cbb93a6a9782c58a972b94ea4483ae4ac5cedd5e4a09/tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd", size = 259645 }, 462 | { url = "https://files.pythonhosted.org/packages/9c/de/6b432d66e986e501586da298e28ebeefd3edc2c780f3ad73d22566034239/tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e", size = 244584 }, 463 | { url = "https://files.pythonhosted.org/packages/1c/9a/47c0449b98e6e7d1be6cbac02f93dd79003234ddc4aaab6ba07a9a7482e2/tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98", size = 98875 }, 464 | { url = "https://files.pythonhosted.org/packages/ef/60/9b9638f081c6f1261e2688bd487625cd1e660d0a85bd469e91d8db969734/tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4", size = 109418 }, 465 | { url = "https://files.pythonhosted.org/packages/04/90/2ee5f2e0362cb8a0b6499dc44f4d7d48f8fff06d28ba46e6f1eaa61a1388/tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7", size = 132708 }, 466 | { url = "https://files.pythonhosted.org/packages/c0/ec/46b4108816de6b385141f082ba99e315501ccd0a2ea23db4a100dd3990ea/tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c", size = 123582 }, 467 | { url = "https://files.pythonhosted.org/packages/a0/bd/b470466d0137b37b68d24556c38a0cc819e8febe392d5b199dcd7f578365/tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13", size = 232543 }, 468 | { url = "https://files.pythonhosted.org/packages/d9/e5/82e80ff3b751373f7cead2815bcbe2d51c895b3c990686741a8e56ec42ab/tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281", size = 241691 }, 469 | { url = "https://files.pythonhosted.org/packages/05/7e/2a110bc2713557d6a1bfb06af23dd01e7dde52b6ee7dadc589868f9abfac/tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272", size = 251170 }, 470 | { url = "https://files.pythonhosted.org/packages/64/7b/22d713946efe00e0adbcdfd6d1aa119ae03fd0b60ebed51ebb3fa9f5a2e5/tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140", size = 236530 }, 471 | { url = "https://files.pythonhosted.org/packages/38/31/3a76f67da4b0cf37b742ca76beaf819dca0ebef26d78fc794a576e08accf/tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2", size = 258666 }, 472 | { url = "https://files.pythonhosted.org/packages/07/10/5af1293da642aded87e8a988753945d0cf7e00a9452d3911dd3bb354c9e2/tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744", size = 243954 }, 473 | { url = "https://files.pythonhosted.org/packages/5b/b9/1ed31d167be802da0fc95020d04cd27b7d7065cc6fbefdd2f9186f60d7bd/tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec", size = 98724 }, 474 | { url = "https://files.pythonhosted.org/packages/c7/32/b0963458706accd9afcfeb867c0f9175a741bf7b19cd424230714d722198/tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69", size = 109383 }, 475 | { url = "https://files.pythonhosted.org/packages/6e/c2/61d3e0f47e2b74ef40a68b9e6ad5984f6241a942f7cd3bbfbdbd03861ea9/tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc", size = 14257 }, 476 | ] 477 | 478 | [[package]] 479 | name = "typing-extensions" 480 | version = "4.13.2" 481 | source = { registry = "https://pypi.org/simple" } 482 | sdist = { url = "https://files.pythonhosted.org/packages/f6/37/23083fcd6e35492953e8d2aaaa68b860eb422b34627b13f2ce3eb6106061/typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef", size = 106967 } 483 | wheels = [ 484 | { url = "https://files.pythonhosted.org/packages/8b/54/b1ae86c0973cc6f0210b53d508ca3641fb6d0c56823f288d108bc7ab3cc8/typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c", size = 45806 }, 485 | ] 486 | 487 | [[package]] 488 | name = "typing-inspection" 489 | version = "0.4.0" 490 | source = { registry = "https://pypi.org/simple" } 491 | dependencies = [ 492 | { name = "typing-extensions" }, 493 | ] 494 | sdist = { url = "https://files.pythonhosted.org/packages/82/5c/e6082df02e215b846b4b8c0b887a64d7d08ffaba30605502639d44c06b82/typing_inspection-0.4.0.tar.gz", hash = "sha256:9765c87de36671694a67904bf2c96e395be9c6439bb6c87b5142569dcdd65122", size = 76222 } 495 | wheels = [ 496 | { url = "https://files.pythonhosted.org/packages/31/08/aa4fdfb71f7de5176385bd9e90852eaf6b5d622735020ad600f2bab54385/typing_inspection-0.4.0-py3-none-any.whl", hash = "sha256:50e72559fcd2a6367a19f7a7e610e6afcb9fac940c650290eed893d61386832f", size = 14125 }, 497 | ] 498 | 499 | [[package]] 500 | name = "uvicorn" 501 | version = "0.34.2" 502 | source = { registry = "https://pypi.org/simple" } 503 | dependencies = [ 504 | { name = "click" }, 505 | { name = "h11" }, 506 | { name = "typing-extensions", marker = "python_full_version < '3.11'" }, 507 | ] 508 | sdist = { url = "https://files.pythonhosted.org/packages/a6/ae/9bbb19b9e1c450cf9ecaef06463e40234d98d95bf572fab11b4f19ae5ded/uvicorn-0.34.2.tar.gz", hash = "sha256:0e929828f6186353a80b58ea719861d2629d766293b6d19baf086ba31d4f3328", size = 76815 } 509 | wheels = [ 510 | { url = "https://files.pythonhosted.org/packages/b1/4b/4cef6ce21a2aaca9d852a6e84ef4f135d99fcd74fa75105e2fc0c8308acd/uvicorn-0.34.2-py3-none-any.whl", hash = "sha256:deb49af569084536d269fe0a6d67e3754f104cf03aba7c11c40f01aadf33c403", size = 62483 }, 511 | ] 512 | -------------------------------------------------------------------------------- /langchain_mcp_adapters/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/langchain-ai/langchain-mcp-adapters/0f825d7876b961371d7dd985a5914f628d655f41/langchain_mcp_adapters/__init__.py -------------------------------------------------------------------------------- /langchain_mcp_adapters/client.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from contextlib import asynccontextmanager 3 | from types import TracebackType 4 | from typing import Any, AsyncIterator 5 | 6 | from langchain_core.documents.base import Blob 7 | from langchain_core.messages import AIMessage, HumanMessage 8 | from langchain_core.tools import BaseTool 9 | from mcp import ClientSession 10 | 11 | from langchain_mcp_adapters.prompts import load_mcp_prompt 12 | from langchain_mcp_adapters.resources import load_mcp_resources 13 | from langchain_mcp_adapters.sessions import ( 14 | Connection, 15 | SSEConnection, 16 | StdioConnection, 17 | StreamableHttpConnection, 18 | WebsocketConnection, 19 | create_session, 20 | ) 21 | from langchain_mcp_adapters.tools import load_mcp_tools 22 | 23 | ASYNC_CONTEXT_MANAGER_ERROR = ( 24 | "As of langchain-mcp-adapters 0.1.0, MultiServerMCPClient cannot be used as a context manager (e.g., async with MultiServerMCPClient(...)). " 25 | "Instead, you can do one of the following:\n" 26 | "1. client = MultiServerMCPClient(...)\n" 27 | " tools = await client.get_tools()\n" 28 | "2. client = MultiServerMCPClient(...)\n" 29 | " async with client.session(server_name) as session:\n" 30 | " tools = await load_mcp_tools(session)" 31 | ) 32 | 33 | 34 | class MultiServerMCPClient: 35 | """Client for connecting to multiple MCP servers and loading LangChain-compatible tools, prompts and resources from them.""" 36 | 37 | def __init__( 38 | self, 39 | connections: dict[str, Connection] | None = None, 40 | ) -> None: 41 | """Initialize a MultiServerMCPClient with MCP servers connections. 42 | 43 | Args: 44 | connections: A dictionary mapping server names to connection configurations. 45 | If None, no initial connections are established. 46 | 47 | Example: basic usage (starting a new session on each tool call) 48 | 49 | ```python 50 | from langchain_mcp_adapters.client import MultiServerMCPClient 51 | 52 | client = MultiServerMCPClient( 53 | { 54 | "math": { 55 | "command": "python", 56 | # Make sure to update to the full absolute path to your math_server.py file 57 | "args": ["/path/to/math_server.py"], 58 | "transport": "stdio", 59 | }, 60 | "weather": { 61 | # make sure you start your weather server on port 8000 62 | "url": "http://localhost:8000/mcp", 63 | "transport": "streamable_http", 64 | } 65 | } 66 | ) 67 | all_tools = await client.get_tools() 68 | ``` 69 | 70 | Example: explicitly starting a session 71 | 72 | ```python 73 | from langchain_mcp_adapters.client import MultiServerMCPClient 74 | from langchain_mcp_adapters.tools import load_mcp_tools 75 | 76 | client = MultiServerMCPClient({...}) 77 | async with client.session("math") as session: 78 | tools = await load_mcp_tools(session) 79 | ``` 80 | """ 81 | self.connections: dict[str, Connection] = connections if connections is not None else {} 82 | 83 | @asynccontextmanager 84 | async def session( 85 | self, 86 | server_name: str, 87 | *, 88 | auto_initialize: bool = True, 89 | ) -> AsyncIterator[ClientSession]: 90 | """Connect to an MCP server and initialize a session. 91 | 92 | Args: 93 | server_name: Name to identify this server connection 94 | auto_initialize: Whether to automatically initialize the session 95 | 96 | Raises: 97 | ValueError: If the server name is not found in the connections 98 | 99 | Yields: 100 | An initialized ClientSession 101 | """ 102 | if server_name not in self.connections: 103 | raise ValueError( 104 | f"Couldn't find a server with name '{server_name}', expected one of '{list(self.connections.keys())}'" 105 | ) 106 | 107 | async with create_session(self.connections[server_name]) as session: 108 | if auto_initialize: 109 | await session.initialize() 110 | yield session 111 | 112 | async def get_tools(self, *, server_name: str | None = None) -> list[BaseTool]: 113 | """Get a list of all tools from all connected servers. 114 | 115 | Args: 116 | server_name: Optional name of the server to get tools from. 117 | If None, all tools from all servers will be returned (default). 118 | 119 | NOTE: a new session will be created for each tool call 120 | 121 | Returns: 122 | A list of LangChain tools 123 | """ 124 | if server_name is not None: 125 | if server_name not in self.connections: 126 | raise ValueError( 127 | f"Couldn't find a server with name '{server_name}', expected one of '{list(self.connections.keys())}'" 128 | ) 129 | return await load_mcp_tools(None, connection=self.connections[server_name]) 130 | 131 | all_tools: list[BaseTool] = [] 132 | load_mcp_tool_tasks = [] 133 | for connection in self.connections.values(): 134 | load_mcp_tool_task = asyncio.create_task(load_mcp_tools(None, connection=connection)) 135 | load_mcp_tool_tasks.append(load_mcp_tool_task) 136 | tools_list = await asyncio.gather(*load_mcp_tool_tasks) 137 | for tools in tools_list: 138 | all_tools.extend(tools) 139 | return all_tools 140 | 141 | async def get_prompt( 142 | self, server_name: str, prompt_name: str, *, arguments: dict[str, Any] | None = None 143 | ) -> list[HumanMessage | AIMessage]: 144 | """Get a prompt from a given MCP server.""" 145 | async with self.session(server_name) as session: 146 | prompt = await load_mcp_prompt(session, prompt_name, arguments=arguments) 147 | return prompt 148 | 149 | async def get_resources( 150 | self, server_name: str, *, uris: str | list[str] | None = None 151 | ) -> list[Blob]: 152 | """Get resources from a given MCP server. 153 | 154 | Args: 155 | server_name: Name of the server to get resources from 156 | uris: Optional resource URI or list of URIs to load. If not provided, all resources will be loaded. 157 | 158 | Returns: 159 | A list of LangChain Blobs 160 | """ 161 | async with self.session(server_name) as session: 162 | resources = await load_mcp_resources(session, uris=uris) 163 | return resources 164 | 165 | async def __aenter__(self) -> "MultiServerMCPClient": 166 | raise NotImplementedError(ASYNC_CONTEXT_MANAGER_ERROR) 167 | 168 | def __aexit__( 169 | self, 170 | exc_type: type[BaseException] | None, 171 | exc_val: BaseException | None, 172 | exc_tb: TracebackType | None, 173 | ) -> None: 174 | raise NotImplementedError(ASYNC_CONTEXT_MANAGER_ERROR) 175 | 176 | 177 | __all__ = [ 178 | "MultiServerMCPClient", 179 | "SSEConnection", 180 | "StdioConnection", 181 | "StreamableHttpConnection", 182 | "WebsocketConnection", 183 | ] 184 | -------------------------------------------------------------------------------- /langchain_mcp_adapters/prompts.py: -------------------------------------------------------------------------------- 1 | from typing import Any 2 | 3 | from langchain_core.messages import AIMessage, HumanMessage 4 | from mcp import ClientSession 5 | from mcp.types import PromptMessage 6 | 7 | 8 | def convert_mcp_prompt_message_to_langchain_message( 9 | message: PromptMessage, 10 | ) -> HumanMessage | AIMessage: 11 | """Convert an MCP prompt message to a LangChain message. 12 | 13 | Args: 14 | message: MCP prompt message to convert 15 | 16 | Returns: 17 | a LangChain message 18 | """ 19 | if message.content.type == "text": 20 | if message.role == "user": 21 | return HumanMessage(content=message.content.text) 22 | elif message.role == "assistant": 23 | return AIMessage(content=message.content.text) 24 | else: 25 | raise ValueError(f"Unsupported prompt message role: {message.role}") 26 | 27 | raise ValueError(f"Unsupported prompt message content type: {message.content.type}") 28 | 29 | 30 | async def load_mcp_prompt( 31 | session: ClientSession, name: str, *, arguments: dict[str, Any] | None = None 32 | ) -> list[HumanMessage | AIMessage]: 33 | """Load MCP prompt and convert to LangChain messages.""" 34 | response = await session.get_prompt(name, arguments) 35 | return [ 36 | convert_mcp_prompt_message_to_langchain_message(message) for message in response.messages 37 | ] 38 | -------------------------------------------------------------------------------- /langchain_mcp_adapters/resources.py: -------------------------------------------------------------------------------- 1 | import base64 2 | 3 | from langchain_core.documents.base import Blob 4 | from mcp import ClientSession 5 | from mcp.types import BlobResourceContents, ResourceContents, TextResourceContents 6 | 7 | 8 | def convert_mcp_resource_to_langchain_blob( 9 | resource_uri: str, 10 | contents: ResourceContents, 11 | ) -> Blob: 12 | """Convert an MCP resource content to a LangChain Blob. 13 | 14 | Args: 15 | resource_uri: URI of the resource 16 | contents: The resource contents 17 | 18 | Returns: 19 | A LangChain Blob 20 | """ 21 | if isinstance(contents, TextResourceContents): 22 | data = contents.text 23 | elif isinstance(contents, BlobResourceContents): 24 | data = base64.b64decode(contents.blob) 25 | else: 26 | raise ValueError(f"Unsupported content type for URI {resource_uri}") 27 | 28 | return Blob.from_data( 29 | data=data, 30 | mime_type=contents.mimeType, 31 | metadata={"uri": resource_uri}, 32 | ) 33 | 34 | 35 | async def get_mcp_resource(session: ClientSession, uri: str) -> list[Blob]: 36 | """Fetch a single MCP resource and convert it to LangChain Blobs. 37 | 38 | Args: 39 | session: MCP client session 40 | uri: URI of the resource to fetch 41 | 42 | Returns: 43 | A list of LangChain Blobs 44 | """ 45 | contents_result = await session.read_resource(uri) 46 | if not contents_result.contents or len(contents_result.contents) == 0: 47 | return [] 48 | 49 | return [ 50 | convert_mcp_resource_to_langchain_blob(uri, content) for content in contents_result.contents 51 | ] 52 | 53 | 54 | async def load_mcp_resources( 55 | session: ClientSession, 56 | *, 57 | uris: str | list[str] | None = None, 58 | ) -> list[Blob]: 59 | """Load MCP resources and convert them to LangChain Blobs. 60 | 61 | Args: 62 | session: MCP client session 63 | uris: List of URIs to load. 64 | If None, all resources will be loaded. 65 | NOTE: if you specify None, dynamic resources will NOT be loaded, 66 | as they need the parameters to be provided, 67 | and are ignored by MCP SDK's session.list_resources() method. 68 | 69 | Returns: 70 | A list of LangChain Blobs 71 | """ 72 | blobs = [] 73 | 74 | if uris is None: 75 | resources_list = await session.list_resources() 76 | uri_list = [r.uri for r in resources_list.resources] 77 | elif isinstance(uris, str): 78 | uri_list = [uris] 79 | else: 80 | uri_list = uris 81 | 82 | for uri in uri_list: 83 | try: 84 | resource_blobs = await get_mcp_resource(session, uri) 85 | blobs.extend(resource_blobs) 86 | except Exception as e: 87 | raise RuntimeError(f"Error fetching resource {uri}") from e 88 | 89 | return blobs 90 | -------------------------------------------------------------------------------- /langchain_mcp_adapters/sessions.py: -------------------------------------------------------------------------------- 1 | import os 2 | from contextlib import asynccontextmanager 3 | from datetime import timedelta 4 | from pathlib import Path 5 | from typing import Any, AsyncIterator, Literal, TypedDict 6 | 7 | from mcp import ClientSession, StdioServerParameters 8 | from mcp.client.sse import sse_client 9 | from mcp.client.stdio import stdio_client 10 | from mcp.client.streamable_http import streamablehttp_client 11 | 12 | EncodingErrorHandler = Literal["strict", "ignore", "replace"] 13 | 14 | DEFAULT_ENCODING = "utf-8" 15 | DEFAULT_ENCODING_ERROR_HANDLER: EncodingErrorHandler = "strict" 16 | 17 | DEFAULT_HTTP_TIMEOUT = 5 18 | DEFAULT_SSE_READ_TIMEOUT = 60 * 5 19 | 20 | DEFAULT_STREAMABLE_HTTP_TIMEOUT = timedelta(seconds=30) 21 | DEFAULT_STREAMABLE_HTTP_SSE_READ_TIMEOUT = timedelta(seconds=60 * 5) 22 | 23 | 24 | class StdioConnection(TypedDict): 25 | transport: Literal["stdio"] 26 | 27 | command: str 28 | """The executable to run to start the server.""" 29 | 30 | args: list[str] 31 | """Command line arguments to pass to the executable.""" 32 | 33 | env: dict[str, str] | None 34 | """The environment to use when spawning the process.""" 35 | 36 | cwd: str | Path | None 37 | """The working directory to use when spawning the process.""" 38 | 39 | encoding: str 40 | """The text encoding used when sending/receiving messages to the server.""" 41 | 42 | encoding_error_handler: EncodingErrorHandler 43 | """ 44 | The text encoding error handler. 45 | 46 | See https://docs.python.org/3/library/codecs.html#codec-base-classes for 47 | explanations of possible values 48 | """ 49 | 50 | session_kwargs: dict[str, Any] | None 51 | """Additional keyword arguments to pass to the ClientSession""" 52 | 53 | 54 | class SSEConnection(TypedDict): 55 | transport: Literal["sse"] 56 | 57 | url: str 58 | """The URL of the SSE endpoint to connect to.""" 59 | 60 | headers: dict[str, Any] | None 61 | """HTTP headers to send to the SSE endpoint""" 62 | 63 | timeout: float 64 | """HTTP timeout""" 65 | 66 | sse_read_timeout: float 67 | """SSE read timeout""" 68 | 69 | session_kwargs: dict[str, Any] | None 70 | """Additional keyword arguments to pass to the ClientSession""" 71 | 72 | 73 | class StreamableHttpConnection(TypedDict): 74 | transport: Literal["streamable_http"] 75 | 76 | url: str 77 | """The URL of the endpoint to connect to.""" 78 | 79 | headers: dict[str, Any] | None 80 | """HTTP headers to send to the endpoint.""" 81 | 82 | timeout: timedelta 83 | """HTTP timeout.""" 84 | 85 | sse_read_timeout: timedelta 86 | """How long (in seconds) the client will wait for a new event before disconnecting. 87 | All other HTTP operations are controlled by `timeout`.""" 88 | 89 | terminate_on_close: bool 90 | """Whether to terminate the session on close""" 91 | 92 | session_kwargs: dict[str, Any] | None 93 | """Additional keyword arguments to pass to the ClientSession""" 94 | 95 | 96 | class WebsocketConnection(TypedDict): 97 | transport: Literal["websocket"] 98 | 99 | url: str 100 | """The URL of the Websocket endpoint to connect to.""" 101 | 102 | session_kwargs: dict[str, Any] | None 103 | """Additional keyword arguments to pass to the ClientSession""" 104 | 105 | 106 | Connection = StdioConnection | SSEConnection | StreamableHttpConnection | WebsocketConnection 107 | 108 | 109 | @asynccontextmanager 110 | async def _create_stdio_session( 111 | *, 112 | command: str, 113 | args: list[str], 114 | env: dict[str, str] | None = None, 115 | cwd: str | Path | None = None, 116 | encoding: str = DEFAULT_ENCODING, 117 | encoding_error_handler: Literal["strict", "ignore", "replace"] = DEFAULT_ENCODING_ERROR_HANDLER, 118 | session_kwargs: dict[str, Any] | None = None, 119 | ) -> AsyncIterator[ClientSession]: 120 | """Create a new session to an MCP server using stdio 121 | 122 | Args: 123 | command: Command to execute 124 | args: Arguments for the command 125 | env: Environment variables for the command 126 | cwd: Working directory for the command 127 | encoding: Character encoding 128 | encoding_error_handler: How to handle encoding errors 129 | session_kwargs: Additional keyword arguments to pass to the ClientSession 130 | """ 131 | # NOTE: execution commands (e.g., `uvx` / `npx`) require PATH envvar to be set. 132 | # To address this, we automatically inject existing PATH envvar into the `env` value, 133 | # if it's not already set. 134 | env = env or {} 135 | if "PATH" not in env: 136 | env["PATH"] = os.environ.get("PATH", "") 137 | 138 | server_params = StdioServerParameters( 139 | command=command, 140 | args=args, 141 | env=env, 142 | cwd=cwd, 143 | encoding=encoding, 144 | encoding_error_handler=encoding_error_handler, 145 | ) 146 | 147 | # Create and store the connection 148 | async with stdio_client(server_params) as (read, write): 149 | async with ClientSession(read, write, **(session_kwargs or {})) as session: 150 | yield session 151 | 152 | 153 | @asynccontextmanager 154 | async def _create_sse_session( 155 | *, 156 | url: str, 157 | headers: dict[str, Any] | None = None, 158 | timeout: float = DEFAULT_HTTP_TIMEOUT, 159 | sse_read_timeout: float = DEFAULT_SSE_READ_TIMEOUT, 160 | session_kwargs: dict[str, Any] | None = None, 161 | ) -> AsyncIterator[ClientSession]: 162 | """Create a new session to an MCP server using SSE 163 | 164 | Args: 165 | url: URL of the SSE server 166 | headers: HTTP headers to send to the SSE endpoint 167 | timeout: HTTP timeout 168 | sse_read_timeout: SSE read timeout 169 | session_kwargs: Additional keyword arguments to pass to the ClientSession 170 | """ 171 | # Create and store the connection 172 | async with sse_client(url, headers, timeout, sse_read_timeout) as (read, write): 173 | async with ClientSession(read, write, **(session_kwargs or {})) as session: 174 | yield session 175 | 176 | 177 | @asynccontextmanager 178 | async def _create_streamable_http_session( 179 | *, 180 | url: str, 181 | headers: dict[str, Any] | None = None, 182 | timeout: timedelta = DEFAULT_STREAMABLE_HTTP_TIMEOUT, 183 | sse_read_timeout: timedelta = DEFAULT_STREAMABLE_HTTP_SSE_READ_TIMEOUT, 184 | terminate_on_close: bool = True, 185 | session_kwargs: dict[str, Any] | None = None, 186 | ) -> AsyncIterator[ClientSession]: 187 | """Create a new session to an MCP server using Streamable HTTP 188 | 189 | Args: 190 | url: URL of the endpoint to connect to 191 | headers: HTTP headers to send to the endpoint 192 | timeout: HTTP timeout 193 | sse_read_timeout: How long (in seconds) the client will wait for a new event before disconnecting. 194 | terminate_on_close: Whether to terminate the session on close 195 | session_kwargs: Additional keyword arguments to pass to the ClientSession 196 | """ 197 | # Create and store the connection 198 | async with streamablehttp_client( 199 | url, headers, timeout, sse_read_timeout, terminate_on_close 200 | ) as (read, write, _): 201 | async with ClientSession(read, write, **(session_kwargs or {})) as session: 202 | yield session 203 | 204 | 205 | @asynccontextmanager 206 | async def _create_websocket_session( 207 | *, 208 | url: str, 209 | session_kwargs: dict[str, Any] | None = None, 210 | ) -> AsyncIterator[ClientSession]: 211 | """Create a new session to an MCP server using Websockets 212 | 213 | Args: 214 | url: URL of the Websocket endpoint 215 | session_kwargs: Additional keyword arguments to pass to the ClientSession 216 | 217 | Raises: 218 | ImportError: If websockets package is not installed 219 | """ 220 | try: 221 | from mcp.client.websocket import websocket_client 222 | except ImportError: 223 | raise ImportError( 224 | "Could not import websocket_client. ", 225 | "To use Websocket connections, please install the required dependency with: ", 226 | "'pip install mcp[ws]' or 'pip install websockets'", 227 | ) from None 228 | 229 | async with websocket_client(url) as (read, write): 230 | async with ClientSession(read, write, **(session_kwargs or {})) as session: 231 | yield session 232 | 233 | 234 | @asynccontextmanager 235 | async def create_session( 236 | connection: Connection, 237 | ) -> AsyncIterator[ClientSession]: 238 | """Create a new session to an MCP server. 239 | 240 | Args: 241 | connection: Connection config to use to connect to the server 242 | 243 | Raises: 244 | ValueError: If transport is not recognized 245 | ValueError: If required parameters for the specified transport are missing 246 | 247 | Yields: 248 | A ClientSession 249 | """ 250 | transport = connection["transport"] 251 | if transport == "sse": 252 | if "url" not in connection: 253 | raise ValueError("'url' parameter is required for SSE connection") 254 | async with _create_sse_session( 255 | url=connection["url"], 256 | headers=connection.get("headers"), 257 | timeout=connection.get("timeout", DEFAULT_HTTP_TIMEOUT), 258 | sse_read_timeout=connection.get("sse_read_timeout", DEFAULT_SSE_READ_TIMEOUT), 259 | session_kwargs=connection.get("session_kwargs"), 260 | ) as session: 261 | yield session 262 | elif transport == "streamable_http": 263 | if "url" not in connection: 264 | raise ValueError("'url' parameter is required for Streamable HTTP connection") 265 | async with _create_streamable_http_session( 266 | url=connection["url"], 267 | headers=connection.get("headers"), 268 | timeout=connection.get("timeout", DEFAULT_STREAMABLE_HTTP_TIMEOUT), 269 | sse_read_timeout=connection.get( 270 | "sse_read_timeout", DEFAULT_STREAMABLE_HTTP_SSE_READ_TIMEOUT 271 | ), 272 | session_kwargs=connection.get("session_kwargs"), 273 | ) as session: 274 | yield session 275 | elif transport == "stdio": 276 | if "command" not in connection: 277 | raise ValueError("'command' parameter is required for stdio connection") 278 | if "args" not in connection: 279 | raise ValueError("'args' parameter is required for stdio connection") 280 | async with _create_stdio_session( 281 | command=connection["command"], 282 | args=connection["args"], 283 | env=connection.get("env"), 284 | cwd=connection.get("cwd"), 285 | encoding=connection.get("encoding", DEFAULT_ENCODING), 286 | encoding_error_handler=connection.get( 287 | "encoding_error_handler", DEFAULT_ENCODING_ERROR_HANDLER 288 | ), 289 | session_kwargs=connection.get("session_kwargs"), 290 | ) as session: 291 | yield session 292 | elif transport == "websocket": 293 | if "url" not in connection: 294 | raise ValueError("'url' parameter is required for Websocket connection") 295 | async with _create_websocket_session( 296 | url=connection["url"], 297 | session_kwargs=connection.get("session_kwargs"), 298 | ) as session: 299 | yield session 300 | else: 301 | raise ValueError( 302 | f"Unsupported transport: {transport}. Must be one of: 'stdio', 'sse', 'websocket', 'streamable_http'" 303 | ) 304 | -------------------------------------------------------------------------------- /langchain_mcp_adapters/tools.py: -------------------------------------------------------------------------------- 1 | from typing import Any, cast, get_args 2 | 3 | from langchain_core.tools import BaseTool, InjectedToolArg, StructuredTool, ToolException 4 | from langchain_core.tools.base import get_all_basemodel_annotations 5 | from mcp import ClientSession 6 | from mcp.server.fastmcp.tools import Tool as FastMCPTool 7 | from mcp.server.fastmcp.utilities.func_metadata import ArgModelBase, FuncMetadata 8 | from mcp.types import ( 9 | CallToolResult, 10 | EmbeddedResource, 11 | ImageContent, 12 | TextContent, 13 | ) 14 | from mcp.types import ( 15 | Tool as MCPTool, 16 | ) 17 | from pydantic import BaseModel, create_model 18 | 19 | from langchain_mcp_adapters.sessions import Connection, create_session 20 | 21 | NonTextContent = ImageContent | EmbeddedResource 22 | 23 | 24 | def _convert_call_tool_result( 25 | call_tool_result: CallToolResult, 26 | ) -> tuple[str | list[str], list[NonTextContent] | None]: 27 | text_contents: list[TextContent] = [] 28 | non_text_contents = [] 29 | for content in call_tool_result.content: 30 | if isinstance(content, TextContent): 31 | text_contents.append(content) 32 | else: 33 | non_text_contents.append(content) 34 | 35 | tool_content: str | list[str] = [content.text for content in text_contents] 36 | if not text_contents: 37 | tool_content = "" 38 | elif len(text_contents) == 1: 39 | tool_content = tool_content[0] 40 | 41 | if call_tool_result.isError: 42 | raise ToolException(tool_content) 43 | 44 | return tool_content, non_text_contents or None 45 | 46 | 47 | def convert_mcp_tool_to_langchain_tool( 48 | session: ClientSession | None, 49 | tool: MCPTool, 50 | *, 51 | connection: Connection | None = None, 52 | ) -> BaseTool: 53 | """Convert an MCP tool to a LangChain tool. 54 | 55 | NOTE: this tool can be executed only in a context of an active MCP client session. 56 | 57 | Args: 58 | session: MCP client session 59 | tool: MCP tool to convert 60 | connection: Optional connection config to use to create a new session 61 | if a `session` is not provided 62 | 63 | Returns: 64 | a LangChain tool 65 | """ 66 | if session is None and connection is None: 67 | raise ValueError("Either a session or a connection config must be provided") 68 | 69 | async def call_tool( 70 | **arguments: dict[str, Any], 71 | ) -> tuple[str | list[str], list[NonTextContent] | None]: 72 | if session is None: 73 | # If a session is not provided, we will create one on the fly 74 | async with create_session(connection) as tool_session: 75 | await tool_session.initialize() 76 | call_tool_result = await cast(ClientSession, tool_session).call_tool( 77 | tool.name, arguments 78 | ) 79 | else: 80 | call_tool_result = await session.call_tool(tool.name, arguments) 81 | return _convert_call_tool_result(call_tool_result) 82 | 83 | return StructuredTool( 84 | name=tool.name, 85 | description=tool.description or "", 86 | args_schema=tool.inputSchema, 87 | coroutine=call_tool, 88 | response_format="content_and_artifact", 89 | metadata=tool.annotations.model_dump() if tool.annotations else None, 90 | ) 91 | 92 | 93 | async def load_mcp_tools( 94 | session: ClientSession | None, 95 | *, 96 | connection: Connection | None = None, 97 | ) -> list[BaseTool]: 98 | """Load all available MCP tools and convert them to LangChain tools. 99 | 100 | Returns: 101 | list of LangChain tools. Tool annotations are returned as part 102 | of the tool metadata object. 103 | """ 104 | if session is None and connection is None: 105 | raise ValueError("Either a session or a connection config must be provided") 106 | 107 | if session is None: 108 | # If a session is not provided, we will create one on the fly 109 | async with create_session(connection) as tool_session: 110 | await tool_session.initialize() 111 | tools = await tool_session.list_tools() 112 | else: 113 | tools = await session.list_tools() 114 | 115 | converted_tools = [ 116 | convert_mcp_tool_to_langchain_tool(session, tool, connection=connection) 117 | for tool in tools.tools 118 | ] 119 | return converted_tools 120 | 121 | 122 | def _get_injected_args(tool: BaseTool) -> list[str]: 123 | def _is_injected_arg_type(type_: type) -> bool: 124 | return any( 125 | isinstance(arg, InjectedToolArg) 126 | or (isinstance(arg, type) and issubclass(arg, InjectedToolArg)) 127 | for arg in get_args(type_)[1:] 128 | ) 129 | 130 | injected_args = [ 131 | field 132 | for field, field_info in get_all_basemodel_annotations(tool.args_schema).items() 133 | if _is_injected_arg_type(field_info) 134 | ] 135 | return injected_args 136 | 137 | 138 | def to_fastmcp(tool: BaseTool) -> FastMCPTool: 139 | """Convert a LangChain tool to a FastMCP tool.""" 140 | if not issubclass(tool.args_schema, BaseModel): 141 | raise ValueError( 142 | "Tool args_schema must be a subclass of pydantic.BaseModel. " 143 | "Tools with dict args schema are not supported." 144 | ) 145 | 146 | parameters = tool.tool_call_schema.model_json_schema() 147 | field_definitions = { 148 | field: (field_info.annotation, field_info) 149 | for field, field_info in tool.tool_call_schema.model_fields.items() 150 | } 151 | arg_model = create_model( 152 | f"{tool.name}Arguments", 153 | **field_definitions, 154 | __base__=ArgModelBase, 155 | ) 156 | fn_metadata = FuncMetadata(arg_model=arg_model) 157 | 158 | async def fn(**arguments: dict[str, Any]) -> Any: 159 | return await tool.ainvoke(arguments) 160 | 161 | injected_args = _get_injected_args(tool) 162 | if len(injected_args) > 0: 163 | raise NotImplementedError("LangChain tools with injected arguments are not supported") 164 | 165 | fastmcp_tool = FastMCPTool( 166 | fn=fn, 167 | name=tool.name, 168 | description=tool.description, 169 | parameters=parameters, 170 | fn_metadata=fn_metadata, 171 | is_async=True, 172 | ) 173 | return fastmcp_tool 174 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["pdm-backend"] 3 | build-backend = "pdm.backend" 4 | 5 | [project] 6 | name = "langchain-mcp-adapters" 7 | version = "0.1.4" 8 | description = "Make Anthropic Model Context Protocol (MCP) tools compatible with LangChain and LangGraph agents." 9 | authors = [ 10 | { name = "Vadym Barda", email = "19161700+vbarda@users.noreply.github.com" }, 11 | ] 12 | license = "MIT" 13 | repository = "https://www.github.com/langchain-ai/langchain-mcp-adapters" 14 | readme = "README.md" 15 | requires-python = ">=3.10" 16 | dependencies = [ 17 | "langchain-core>=0.3.36,<0.4", 18 | "mcp>=1.9.1", 19 | ] 20 | 21 | [dependency-groups] 22 | test = [ 23 | "pytest>=8.0.0", 24 | "ruff>=0.9.4", 25 | "mypy>=1.8.0", 26 | "pytest-socket>=0.7.0", 27 | "pytest-asyncio>=0.26.0", 28 | "types-setuptools>=69.0.0", 29 | "websockets>=15.0.1" 30 | ] 31 | 32 | [tool.pytest.ini_options] 33 | minversion = "8.0" 34 | # -ra: Report all extra test outcomes (passed, skipped, failed, etc.) 35 | # -q: Enable quiet mode for less cluttered output 36 | # -v: Enable verbose output to display detailed test names and statuses 37 | # --durations=5: Show the 10 slowest tests after the run (useful for performance tuning) 38 | addopts = "-ra -q -v --durations=5" 39 | testpaths = ["tests"] 40 | python_files = ["test_*.py"] 41 | python_functions = ["test_*"] 42 | asyncio_mode = "auto" 43 | asyncio_default_fixture_loop_scope = "function" 44 | 45 | [tool.ruff] 46 | line-length = 100 47 | target-version = "py310" 48 | 49 | [tool.ruff.lint] 50 | select = [ 51 | "E", # pycodestyle errors 52 | "W", # pycodestyle warnings 53 | "F", # pyflakes 54 | "I", # isort 55 | "B", # flake8-bugbear 56 | ] 57 | ignore = [ 58 | "E501", # line-length 59 | ] 60 | 61 | 62 | [tool.mypy] 63 | python_version = "3.11" 64 | warn_return_any = true 65 | warn_unused_configs = true 66 | disallow_untyped_defs = true 67 | check_untyped_defs = true 68 | -------------------------------------------------------------------------------- /static/img/mcp.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/langchain-ai/langchain-mcp-adapters/0f825d7876b961371d7dd985a5914f628d655f41/static/img/mcp.png -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/langchain-ai/langchain-mcp-adapters/0f825d7876b961371d7dd985a5914f628d655f41/tests/__init__.py -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | import multiprocessing 2 | import socket 3 | import time 4 | from collections.abc import Generator 5 | 6 | import pytest 7 | 8 | from tests.utils import run_server 9 | 10 | 11 | @pytest.fixture 12 | def websocket_server_port() -> int: 13 | with socket.socket() as s: 14 | s.bind(("127.0.0.1", 0)) 15 | return s.getsockname()[1] 16 | raise ValueError("Free port not found!") 17 | 18 | 19 | @pytest.fixture() 20 | def websocket_server(websocket_server_port: int) -> Generator[None, None, None]: 21 | proc = multiprocessing.Process( 22 | target=run_server, kwargs={"server_port": websocket_server_port}, daemon=True 23 | ) 24 | proc.start() 25 | 26 | # Wait for server to be running 27 | max_attempts = 20 28 | attempt = 0 29 | 30 | while attempt < max_attempts: 31 | try: 32 | with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: 33 | s.connect(("127.0.0.1", websocket_server_port)) 34 | break 35 | except ConnectionRefusedError: 36 | time.sleep(0.1) 37 | attempt += 1 38 | else: 39 | raise RuntimeError(f"Server failed to start after {max_attempts} attempts") 40 | 41 | yield 42 | 43 | # Signal the server to stop 44 | proc.kill() 45 | proc.join(timeout=2) 46 | if proc.is_alive(): 47 | raise RuntimeError("Server process is still alive after attempting to terminate it") 48 | 49 | 50 | @pytest.fixture 51 | def socket_enabled(): 52 | """Temporarily enable socket connections for websocket tests.""" 53 | try: 54 | import pytest_socket 55 | 56 | pytest_socket.enable_socket() 57 | previous_state = pytest_socket.socket_allow_hosts() 58 | # Only allow connections to localhost 59 | pytest_socket.socket_allow_hosts(["127.0.0.1", "localhost"], allow_unix_socket=True) 60 | yield 61 | finally: 62 | # Restore previous state 63 | pytest_socket.socket_allow_hosts(previous_state) 64 | -------------------------------------------------------------------------------- /tests/servers/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/langchain-ai/langchain-mcp-adapters/0f825d7876b961371d7dd985a5914f628d655f41/tests/servers/__init__.py -------------------------------------------------------------------------------- /tests/servers/math_server.py: -------------------------------------------------------------------------------- 1 | from mcp.server.fastmcp import FastMCP 2 | 3 | mcp = FastMCP("Math") 4 | 5 | 6 | @mcp.tool() 7 | def add(a: int, b: int) -> int: 8 | """Add two numbers""" 9 | return a + b 10 | 11 | 12 | @mcp.tool() 13 | def multiply(a: int, b: int) -> int: 14 | """Multiply two numbers""" 15 | return a * b 16 | 17 | 18 | @mcp.prompt() 19 | def configure_assistant(skills: str) -> list[dict]: 20 | return [ 21 | { 22 | "role": "assistant", 23 | "content": f"You are a helpful assistant. You have the following skills: {skills}. Always use only one tool at a time.", 24 | } 25 | ] 26 | 27 | 28 | if __name__ == "__main__": 29 | mcp.run(transport="stdio") 30 | -------------------------------------------------------------------------------- /tests/servers/time_server.py: -------------------------------------------------------------------------------- 1 | from mcp.server.fastmcp import FastMCP 2 | 3 | mcp = FastMCP("time") 4 | 5 | 6 | @mcp.tool() 7 | def get_time() -> str: 8 | """Get current time""" 9 | return "5:20:00 PM EST" 10 | 11 | 12 | if __name__ == "__main__": 13 | mcp.run() 14 | -------------------------------------------------------------------------------- /tests/servers/weather_server.py: -------------------------------------------------------------------------------- 1 | from mcp.server.fastmcp import FastMCP 2 | 3 | mcp = FastMCP("Weather") 4 | 5 | 6 | @mcp.tool() 7 | async def get_weather(location: str) -> str: 8 | """Get weather for location.""" 9 | return f"It's always sunny in {location}" 10 | 11 | 12 | if __name__ == "__main__": 13 | mcp.run(transport="stdio") 14 | -------------------------------------------------------------------------------- /tests/test_client.py: -------------------------------------------------------------------------------- 1 | import os 2 | from pathlib import Path 3 | 4 | import pytest 5 | from langchain_core.messages import AIMessage 6 | from langchain_core.tools import BaseTool 7 | 8 | from langchain_mcp_adapters.client import MultiServerMCPClient 9 | from langchain_mcp_adapters.tools import load_mcp_tools 10 | 11 | 12 | @pytest.mark.asyncio 13 | async def test_multi_server_mcp_client( 14 | socket_enabled, 15 | websocket_server, 16 | websocket_server_port: int, 17 | ): 18 | """Test that the MultiServerMCPClient can connect to multiple servers and load tools.""" 19 | 20 | # Get the absolute path to the server scripts 21 | current_dir = Path(__file__).parent 22 | math_server_path = os.path.join(current_dir, "servers/math_server.py") 23 | weather_server_path = os.path.join(current_dir, "servers/weather_server.py") 24 | 25 | client = MultiServerMCPClient( 26 | { 27 | "math": { 28 | "command": "python", 29 | "args": [math_server_path], 30 | "transport": "stdio", 31 | }, 32 | "weather": { 33 | "command": "python", 34 | "args": [weather_server_path], 35 | "transport": "stdio", 36 | }, 37 | "time": { 38 | "url": f"ws://127.0.0.1:{websocket_server_port}/ws", 39 | "transport": "websocket", 40 | }, 41 | } 42 | ) 43 | # Check that we have tools from both servers 44 | all_tools = await client.get_tools() 45 | 46 | # Should have 3 tools (add, multiply, get_weather) 47 | assert len(all_tools) == 4 48 | 49 | # Check that tools are BaseTool instances 50 | for tool in all_tools: 51 | assert isinstance(tool, BaseTool) 52 | 53 | # Verify tool names 54 | tool_names = {tool.name for tool in all_tools} 55 | assert tool_names == {"add", "multiply", "get_weather", "get_time"} 56 | 57 | # Check math server tools 58 | math_tools = await client.get_tools(server_name="math") 59 | assert len(math_tools) == 2 60 | math_tool_names = {tool.name for tool in math_tools} 61 | assert math_tool_names == {"add", "multiply"} 62 | 63 | # Check weather server tools 64 | weather_tools = await client.get_tools(server_name="weather") 65 | assert len(weather_tools) == 1 66 | assert weather_tools[0].name == "get_weather" 67 | 68 | # Check time server tools 69 | time_tools = await client.get_tools(server_name="time") 70 | assert len(time_tools) == 1 71 | assert time_tools[0].name == "get_time" 72 | 73 | # Test that we can call a math tool 74 | add_tool = next(tool for tool in all_tools if tool.name == "add") 75 | result = await add_tool.ainvoke({"a": 2, "b": 3}) 76 | assert result == "5" 77 | 78 | # Test that we can call a weather tool 79 | weather_tool = next(tool for tool in all_tools if tool.name == "get_weather") 80 | result = await weather_tool.ainvoke({"location": "London"}) 81 | assert result == "It's always sunny in London" 82 | 83 | # Test the multiply tool 84 | multiply_tool = next(tool for tool in all_tools if tool.name == "multiply") 85 | result = await multiply_tool.ainvoke({"a": 4, "b": 5}) 86 | assert result == "20" 87 | 88 | # Test that we can call a time tool 89 | time_tool = next(tool for tool in all_tools if tool.name == "get_time") 90 | result = await time_tool.ainvoke({"args": ""}) 91 | assert result == "5:20:00 PM EST" 92 | 93 | 94 | @pytest.mark.asyncio 95 | async def test_multi_server_connect_methods( 96 | socket_enabled, 97 | websocket_server, 98 | websocket_server_port: int, 99 | ): 100 | """Test the different connect methods for MultiServerMCPClient.""" 101 | 102 | # Get the absolute path to the server scripts 103 | current_dir = Path(__file__).parent 104 | math_server_path = os.path.join(current_dir, "servers/math_server.py") 105 | 106 | # Initialize client without initial connections 107 | client = MultiServerMCPClient( 108 | { 109 | "math": { 110 | "command": "python", 111 | "args": [math_server_path], 112 | "transport": "stdio", 113 | }, 114 | "time": { 115 | "url": f"ws://127.0.0.1:{websocket_server_port}/ws", 116 | "transport": "websocket", 117 | }, 118 | } 119 | ) 120 | tool_names = set() 121 | async with client.session("math") as session: 122 | tools = await load_mcp_tools(session) 123 | assert len(tools) == 2 124 | result = await tools[0].ainvoke({"a": 2, "b": 3}) 125 | assert result == "5" 126 | 127 | for tool in tools: 128 | tool_names.add(tool.name) 129 | 130 | async with client.session("time") as session: 131 | tools = await load_mcp_tools(session) 132 | assert len(tools) == 1 133 | result = await tools[0].ainvoke({"args": ""}) 134 | assert result == "5:20:00 PM EST" 135 | 136 | for tool in tools: 137 | tool_names.add(tool.name) 138 | 139 | assert tool_names == {"add", "multiply", "get_time"} 140 | 141 | 142 | @pytest.mark.asyncio 143 | async def test_get_prompt(): 144 | """Test retrieving prompts from MCP servers.""" 145 | 146 | # Get the absolute path to the server scripts 147 | current_dir = Path(__file__).parent 148 | math_server_path = os.path.join(current_dir, "servers/math_server.py") 149 | 150 | client = MultiServerMCPClient( 151 | { 152 | "math": { 153 | "command": "python", 154 | "args": [math_server_path], 155 | "transport": "stdio", 156 | } 157 | } 158 | ) 159 | # Test getting a prompt from the math server 160 | messages = await client.get_prompt( 161 | "math", "configure_assistant", arguments={"skills": "math, addition, multiplication"} 162 | ) 163 | 164 | # Check that we got an AIMessage back 165 | assert len(messages) == 1 166 | assert isinstance(messages[0], AIMessage) 167 | assert "You are a helpful assistant" in messages[0].content 168 | assert "math, addition, multiplication" in messages[0].content 169 | -------------------------------------------------------------------------------- /tests/test_import.py: -------------------------------------------------------------------------------- 1 | def test_import() -> None: 2 | """Test that the code can be imported""" 3 | from langchain_mcp_adapters import client, prompts, resources, tools # noqa: F401 4 | -------------------------------------------------------------------------------- /tests/test_prompts.py: -------------------------------------------------------------------------------- 1 | from unittest.mock import AsyncMock 2 | 3 | import pytest 4 | from langchain_core.messages import AIMessage, HumanMessage 5 | from mcp.types import ( 6 | EmbeddedResource, 7 | ImageContent, 8 | PromptMessage, 9 | TextContent, 10 | TextResourceContents, 11 | ) 12 | 13 | from langchain_mcp_adapters.prompts import ( 14 | convert_mcp_prompt_message_to_langchain_message, 15 | load_mcp_prompt, 16 | ) 17 | 18 | 19 | @pytest.mark.parametrize( 20 | "role,text,expected_cls", 21 | [ 22 | ("assistant", "Hello", AIMessage), 23 | ("user", "Hello", HumanMessage), 24 | ], 25 | ) 26 | def test_convert_mcp_prompt_message_to_langchain_message_with_text_content( 27 | role: str, text: str, expected_cls: type 28 | ): 29 | message = PromptMessage(role=role, content=TextContent(type="text", text=text)) 30 | result = convert_mcp_prompt_message_to_langchain_message(message) 31 | assert isinstance(result, expected_cls) 32 | assert result.content == text 33 | 34 | 35 | @pytest.mark.parametrize("role", ["assistant", "user"]) 36 | def test_convert_mcp_prompt_message_to_langchain_message_with_resource_content(role: str): 37 | message = PromptMessage( 38 | role=role, 39 | content=EmbeddedResource( 40 | type="resource", 41 | resource=TextResourceContents( 42 | uri="message://greeting", mimeType="text/plain", text="hi" 43 | ), 44 | ), 45 | ) 46 | with pytest.raises(ValueError): 47 | convert_mcp_prompt_message_to_langchain_message(message) 48 | 49 | 50 | @pytest.mark.parametrize("role", ["assistant", "user"]) 51 | def test_convert_mcp_prompt_message_to_langchain_message_with_image_content(role: str): 52 | message = PromptMessage( 53 | role=role, content=ImageContent(type="image", mimeType="image/png", data="base64data") 54 | ) 55 | with pytest.raises(ValueError): 56 | convert_mcp_prompt_message_to_langchain_message(message) 57 | 58 | 59 | @pytest.mark.asyncio 60 | async def test_load_mcp_prompt(): 61 | session = AsyncMock() 62 | session.get_prompt = AsyncMock( 63 | return_value=AsyncMock( 64 | messages=[ 65 | PromptMessage(role="user", content=TextContent(type="text", text="Hello")), 66 | PromptMessage(role="assistant", content=TextContent(type="text", text="Hi")), 67 | ] 68 | ) 69 | ) 70 | result = await load_mcp_prompt(session, "test_prompt") 71 | assert len(result) == 2 72 | assert isinstance(result[0], HumanMessage) 73 | assert result[0].content == "Hello" 74 | assert isinstance(result[1], AIMessage) 75 | assert result[1].content == "Hi" 76 | -------------------------------------------------------------------------------- /tests/test_resources.py: -------------------------------------------------------------------------------- 1 | import base64 2 | from unittest.mock import AsyncMock 3 | 4 | import pytest 5 | from langchain_core.documents.base import Blob 6 | from mcp.types import ( 7 | BlobResourceContents, 8 | ListResourcesResult, 9 | ReadResourceResult, 10 | Resource, 11 | ResourceContents, 12 | TextResourceContents, 13 | ) 14 | 15 | from langchain_mcp_adapters.resources import ( 16 | convert_mcp_resource_to_langchain_blob, 17 | get_mcp_resource, 18 | load_mcp_resources, 19 | ) 20 | 21 | 22 | def test_convert_mcp_resource_to_langchain_blob_with_text(): 23 | uri = "file:///test.txt" 24 | contents = TextResourceContents(uri=uri, mimeType="text/plain", text="Hello, world!") 25 | 26 | blob = convert_mcp_resource_to_langchain_blob(uri, contents) 27 | 28 | assert isinstance(blob, Blob) 29 | assert blob.data == "Hello, world!" 30 | assert blob.mimetype == "text/plain" 31 | assert blob.metadata["uri"] == uri 32 | 33 | 34 | def test_convert_mcp_resource_to_langchain_blob(): 35 | uri = "file:///test.png" 36 | original_data = b"binary-image-data" 37 | base64_blob = base64.b64encode(original_data).decode() 38 | 39 | contents = BlobResourceContents(uri=uri, mimeType="image/png", blob=base64_blob) 40 | 41 | blob = convert_mcp_resource_to_langchain_blob(uri, contents) 42 | 43 | assert isinstance(blob, Blob) 44 | assert blob.data == original_data 45 | assert blob.mimetype == "image/png" 46 | assert blob.metadata["uri"] == uri 47 | 48 | 49 | def test_convert_mcp_resource_to_langchain_blob_with_invalid_type(): 50 | class DummyContent(ResourceContents): 51 | pass 52 | 53 | with pytest.raises(ValueError): 54 | convert_mcp_resource_to_langchain_blob("file:///dummy", DummyContent()) 55 | 56 | 57 | @pytest.mark.asyncio 58 | async def test_get_mcp_resource_with_contents(): 59 | session = AsyncMock() 60 | uri = "file:///test.txt" 61 | 62 | session.read_resource = AsyncMock( 63 | return_value=ReadResourceResult( 64 | contents=[ 65 | TextResourceContents(uri=uri, mimeType="text/plain", text="Content 1"), 66 | TextResourceContents(uri=uri, mimeType="text/plain", text="Content 2"), 67 | ] 68 | ) 69 | ) 70 | 71 | blobs = await get_mcp_resource(session, uri) 72 | 73 | assert len(blobs) == 2 74 | assert all(isinstance(d, Blob) for d in blobs) 75 | assert blobs[0].data == "Content 1" 76 | assert blobs[1].data == "Content 2" 77 | 78 | 79 | @pytest.mark.asyncio 80 | async def test_get_mcp_resource_with_text_and_blob(): 81 | session = AsyncMock() 82 | uri = "file:///mixed" 83 | 84 | original_data = b"some-binary-content" 85 | base64_blob = base64.b64encode(original_data).decode() 86 | 87 | session.read_resource = AsyncMock( 88 | return_value=ReadResourceResult( 89 | contents=[ 90 | TextResourceContents(uri=uri, mimeType="text/plain", text="Hello Text"), 91 | BlobResourceContents( 92 | uri=uri, mimeType="application/octet-stream", blob=base64_blob 93 | ), 94 | ] 95 | ) 96 | ) 97 | 98 | results = await get_mcp_resource(session, uri) 99 | 100 | assert len(results) == 2 101 | 102 | assert isinstance(results[0], Blob) 103 | assert results[0].data == "Hello Text" 104 | assert results[0].mimetype == "text/plain" 105 | 106 | assert isinstance(results[1], Blob) 107 | assert results[1].data == original_data 108 | assert results[1].mimetype == "application/octet-stream" 109 | 110 | 111 | @pytest.mark.asyncio 112 | async def test_get_mcp_resource_with_empty_contents(): 113 | session = AsyncMock() 114 | uri = "file:///empty.txt" 115 | 116 | session.read_resource = AsyncMock(return_value=ReadResourceResult(contents=[])) 117 | 118 | blobs = await get_mcp_resource(session, uri) 119 | 120 | assert len(blobs) == 0 121 | session.read_resource.assert_called_once_with(uri) 122 | 123 | 124 | @pytest.mark.asyncio 125 | async def test_load_mcp_resources_with_list_of_uris(): 126 | session = AsyncMock() 127 | uri1 = "file:///test1.txt" 128 | uri2 = "file:///test2.txt" 129 | 130 | session.read_resource = AsyncMock() 131 | session.read_resource.side_effect = [ 132 | ReadResourceResult( 133 | contents=[ 134 | TextResourceContents(uri=uri1, mimeType="text/plain", text="Content from test1") 135 | ] 136 | ), 137 | ReadResourceResult( 138 | contents=[ 139 | TextResourceContents(uri=uri2, mimeType="text/plain", text="Content from test2") 140 | ] 141 | ), 142 | ] 143 | 144 | blobs = await load_mcp_resources(session, uris=[uri1, uri2]) 145 | 146 | assert len(blobs) == 2 147 | assert all(isinstance(d, Blob) for d in blobs) 148 | assert blobs[0].data == "Content from test1" 149 | assert blobs[1].data == "Content from test2" 150 | assert blobs[0].metadata["uri"] == uri1 151 | assert blobs[1].metadata["uri"] == uri2 152 | assert session.read_resource.call_count == 2 153 | 154 | 155 | @pytest.mark.asyncio 156 | async def test_load_mcp_resources_with_single_uri_string(): 157 | session = AsyncMock() 158 | uri = "file:///test.txt" 159 | 160 | session.read_resource = AsyncMock( 161 | return_value=ReadResourceResult( 162 | contents=[ 163 | TextResourceContents(uri=uri, mimeType="text/plain", text="Content from test") 164 | ] 165 | ) 166 | ) 167 | 168 | blobs = await load_mcp_resources(session, uris=uri) 169 | 170 | assert len(blobs) == 1 171 | assert isinstance(blobs[0], Blob) 172 | assert blobs[0].data == "Content from test" 173 | assert blobs[0].metadata["uri"] == uri 174 | session.read_resource.assert_called_once_with(uri) 175 | 176 | 177 | @pytest.mark.asyncio 178 | async def test_load_mcp_resources_with_all_resources(): 179 | session = AsyncMock() 180 | 181 | session.list_resources = AsyncMock( 182 | return_value=ListResourcesResult( 183 | resources=[ 184 | Resource(uri="file:///test1.txt", name="test1.txt", mimeType="text/plain"), 185 | Resource(uri="file:///test2.txt", name="test2.txt", mimeType="text/plain"), 186 | ] 187 | ) 188 | ) 189 | 190 | session.read_resource = AsyncMock() 191 | session.read_resource.side_effect = [ 192 | ReadResourceResult( 193 | contents=[ 194 | TextResourceContents( 195 | uri="file:///test1.txt", mimeType="text/plain", text="Content from test1" 196 | ) 197 | ] 198 | ), 199 | ReadResourceResult( 200 | contents=[ 201 | TextResourceContents( 202 | uri="file:///test2.txt", mimeType="text/plain", text="Content from test2" 203 | ) 204 | ] 205 | ), 206 | ] 207 | 208 | blobs = await load_mcp_resources(session) 209 | 210 | assert len(blobs) == 2 211 | assert blobs[0].data == "Content from test1" 212 | assert blobs[1].data == "Content from test2" 213 | assert session.list_resources.called 214 | assert session.read_resource.call_count == 2 215 | 216 | 217 | @pytest.mark.asyncio 218 | async def test_load_mcp_resources_with_error_handling(): 219 | session = AsyncMock() 220 | uri1 = "file:///valid.txt" 221 | uri2 = "file:///error.txt" 222 | 223 | session.read_resource = AsyncMock() 224 | session.read_resource.side_effect = [ 225 | ReadResourceResult( 226 | contents=[TextResourceContents(uri=uri1, mimeType="text/plain", text="Valid content")] 227 | ), 228 | Exception("Resource not found"), 229 | ] 230 | 231 | with pytest.raises(RuntimeError) as exc_info: 232 | await load_mcp_resources(session, uris=[uri1, uri2]) 233 | 234 | assert "Error fetching resource" in str(exc_info.value) 235 | 236 | 237 | @pytest.mark.asyncio 238 | async def test_load_mcp_resources_with_blob_content(): 239 | session = AsyncMock() 240 | uri = "file:///with_blob" 241 | original_data = b"binary data" 242 | base64_blob = base64.b64encode(original_data).decode() 243 | 244 | session.read_resource = AsyncMock( 245 | return_value=ReadResourceResult( 246 | contents=[ 247 | BlobResourceContents(uri=uri, mimeType="application/octet-stream", blob=base64_blob) 248 | ] 249 | ) 250 | ) 251 | 252 | blobs = await load_mcp_resources(session, uris=uri) 253 | 254 | assert len(blobs) == 1 255 | assert isinstance(blobs[0], Blob) 256 | assert blobs[0].data == original_data 257 | assert blobs[0].mimetype == "application/octet-stream" 258 | -------------------------------------------------------------------------------- /tests/test_tools.py: -------------------------------------------------------------------------------- 1 | from typing import Annotated 2 | from unittest.mock import AsyncMock, MagicMock 3 | 4 | import pytest 5 | from langchain_core.callbacks import CallbackManagerForToolRun 6 | from langchain_core.messages import ToolMessage 7 | from langchain_core.tools import ArgsSchema, BaseTool, InjectedToolArg, ToolException, tool 8 | from mcp.types import ( 9 | CallToolResult, 10 | EmbeddedResource, 11 | ImageContent, 12 | TextContent, 13 | TextResourceContents, 14 | ) 15 | from mcp.types import Tool as MCPTool 16 | from pydantic import BaseModel 17 | 18 | from langchain_mcp_adapters.client import MultiServerMCPClient 19 | from langchain_mcp_adapters.tools import ( 20 | _convert_call_tool_result, 21 | convert_mcp_tool_to_langchain_tool, 22 | load_mcp_tools, 23 | to_fastmcp, 24 | ) 25 | from tests.utils import run_streamable_http 26 | 27 | 28 | def test_convert_empty_text_content(): 29 | # Test with a single text content 30 | result = CallToolResult( 31 | content=[], 32 | isError=False, 33 | ) 34 | 35 | text_content, non_text_content = _convert_call_tool_result(result) 36 | 37 | assert text_content == "" 38 | assert non_text_content is None 39 | 40 | 41 | def test_convert_single_text_content(): 42 | # Test with a single text content 43 | result = CallToolResult( 44 | content=[TextContent(type="text", text="test result")], 45 | isError=False, 46 | ) 47 | 48 | text_content, non_text_content = _convert_call_tool_result(result) 49 | 50 | assert text_content == "test result" 51 | assert non_text_content is None 52 | 53 | 54 | def test_convert_multiple_text_contents(): 55 | # Test with multiple text contents 56 | result = CallToolResult( 57 | content=[ 58 | TextContent(type="text", text="result 1"), 59 | TextContent(type="text", text="result 2"), 60 | ], 61 | isError=False, 62 | ) 63 | 64 | text_content, non_text_content = _convert_call_tool_result(result) 65 | 66 | assert text_content == ["result 1", "result 2"] 67 | assert non_text_content is None 68 | 69 | 70 | def test_convert_with_non_text_content(): 71 | # Test with non-text content 72 | image_content = ImageContent(type="image", mimeType="image/png", data="base64data") 73 | resource_content = EmbeddedResource( 74 | type="resource", 75 | resource=TextResourceContents(uri="resource://test", mimeType="text/plain", text="hi"), 76 | ) 77 | 78 | result = CallToolResult( 79 | content=[ 80 | TextContent(type="text", text="text result"), 81 | image_content, 82 | resource_content, 83 | ], 84 | isError=False, 85 | ) 86 | 87 | text_content, non_text_content = _convert_call_tool_result(result) 88 | 89 | assert text_content == "text result" 90 | assert non_text_content == [image_content, resource_content] 91 | 92 | 93 | def test_convert_with_error(): 94 | # Test with error 95 | result = CallToolResult( 96 | content=[TextContent(type="text", text="error message")], 97 | isError=True, 98 | ) 99 | 100 | with pytest.raises(ToolException) as exc_info: 101 | _convert_call_tool_result(result) 102 | 103 | assert str(exc_info.value) == "error message" 104 | 105 | 106 | @pytest.mark.asyncio 107 | async def test_convert_mcp_tool_to_langchain_tool(): 108 | tool_input_schema = { 109 | "properties": { 110 | "param1": {"title": "Param1", "type": "string"}, 111 | "param2": {"title": "Param2", "type": "integer"}, 112 | }, 113 | "required": ["param1", "param2"], 114 | "title": "ToolSchema", 115 | "type": "object", 116 | } 117 | # Mock session and MCP tool 118 | session = AsyncMock() 119 | session.call_tool.return_value = CallToolResult( 120 | content=[TextContent(type="text", text="tool result")], 121 | isError=False, 122 | ) 123 | 124 | mcp_tool = MCPTool( 125 | name="test_tool", 126 | description="Test tool description", 127 | inputSchema=tool_input_schema, 128 | ) 129 | 130 | # Convert MCP tool to LangChain tool 131 | lc_tool = convert_mcp_tool_to_langchain_tool(session, mcp_tool) 132 | 133 | # Verify the converted tool 134 | assert lc_tool.name == "test_tool" 135 | assert lc_tool.description == "Test tool description" 136 | assert lc_tool.args_schema == tool_input_schema 137 | 138 | # Test calling the tool 139 | result = await lc_tool.ainvoke( 140 | {"args": {"param1": "test", "param2": 42}, "id": "1", "type": "tool_call"} 141 | ) 142 | 143 | # Verify session.call_tool was called with correct arguments 144 | session.call_tool.assert_called_once_with("test_tool", {"param1": "test", "param2": 42}) 145 | 146 | # Verify result 147 | assert result == ToolMessage(content="tool result", name="test_tool", tool_call_id="1") 148 | 149 | 150 | @pytest.mark.asyncio 151 | async def test_load_mcp_tools(): 152 | tool_input_schema = { 153 | "properties": { 154 | "param1": {"title": "Param1", "type": "string"}, 155 | "param2": {"title": "Param2", "type": "integer"}, 156 | }, 157 | "required": ["param1", "param2"], 158 | "title": "ToolSchema", 159 | "type": "object", 160 | } 161 | # Mock session and list_tools response 162 | session = AsyncMock() 163 | mcp_tools = [ 164 | MCPTool( 165 | name="tool1", 166 | description="Tool 1 description", 167 | inputSchema=tool_input_schema, 168 | ), 169 | MCPTool( 170 | name="tool2", 171 | description="Tool 2 description", 172 | inputSchema=tool_input_schema, 173 | ), 174 | ] 175 | session.list_tools.return_value = MagicMock(tools=mcp_tools) 176 | 177 | # Mock call_tool to return different results for different tools 178 | async def mock_call_tool(tool_name, arguments): 179 | if tool_name == "tool1": 180 | return CallToolResult( 181 | content=[TextContent(type="text", text=f"tool1 result with {arguments}")], 182 | isError=False, 183 | ) 184 | else: 185 | return CallToolResult( 186 | content=[TextContent(type="text", text=f"tool2 result with {arguments}")], 187 | isError=False, 188 | ) 189 | 190 | session.call_tool.side_effect = mock_call_tool 191 | 192 | # Load MCP tools 193 | tools = await load_mcp_tools(session) 194 | 195 | # Verify the tools 196 | assert len(tools) == 2 197 | assert all(isinstance(tool, BaseTool) for tool in tools) 198 | assert tools[0].name == "tool1" 199 | assert tools[1].name == "tool2" 200 | 201 | # Test calling the first tool 202 | result1 = await tools[0].ainvoke( 203 | {"args": {"param1": "test1", "param2": 1}, "id": "1", "type": "tool_call"} 204 | ) 205 | assert result1 == ToolMessage( 206 | content="tool1 result with {'param1': 'test1', 'param2': 1}", name="tool1", tool_call_id="1" 207 | ) 208 | 209 | # Test calling the second tool 210 | result2 = await tools[1].ainvoke( 211 | {"args": {"param1": "test2", "param2": 2}, "id": "2", "type": "tool_call"} 212 | ) 213 | assert result2 == ToolMessage( 214 | content="tool2 result with {'param1': 'test2', 'param2': 2}", name="tool2", tool_call_id="2" 215 | ) 216 | 217 | 218 | @pytest.mark.asyncio 219 | async def test_load_mcp_tools_with_annotations( 220 | socket_enabled, 221 | ) -> None: 222 | """Test load mcp tools with annotations.""" 223 | from mcp.server import FastMCP 224 | from mcp.types import ToolAnnotations 225 | 226 | server = FastMCP(port=8181) 227 | 228 | @server.tool( 229 | annotations=ToolAnnotations(title="Get Time", readOnlyHint=True, idempotentHint=False) 230 | ) 231 | def get_time() -> str: 232 | """Get current time""" 233 | return "5:20:00 PM EST" 234 | 235 | async with run_streamable_http(server): 236 | # Initialize client without initial connections 237 | client = MultiServerMCPClient( 238 | { 239 | "time": { 240 | "url": "http://localhost:8181/mcp/", 241 | "transport": "streamable_http", 242 | }, 243 | } 244 | ) 245 | # pass 246 | tools = await client.get_tools(server_name="time") 247 | assert len(tools) == 1 248 | tool = tools[0] 249 | assert tool.name == "get_time" 250 | assert tool.metadata == { 251 | "title": "Get Time", 252 | "readOnlyHint": True, 253 | "idempotentHint": False, 254 | "destructiveHint": None, 255 | "openWorldHint": None, 256 | } 257 | 258 | 259 | @tool 260 | def add(a: int, b: int) -> int: 261 | """Add two numbers""" 262 | return a + b 263 | 264 | 265 | class AddInput(BaseModel): 266 | """Add two numbers""" 267 | 268 | a: int 269 | b: int 270 | 271 | 272 | @tool("add", args_schema=AddInput) 273 | def add_with_schema(a: int, b: int) -> int: 274 | return a + b 275 | 276 | 277 | @tool("add") 278 | def add_with_injection(a: int, b: int, injected_arg: Annotated[str, InjectedToolArg()]) -> int: 279 | """Add two numbers""" 280 | return a + b 281 | 282 | 283 | class AddTool(BaseTool): 284 | name: str = "add" 285 | description: str = "Add two numbers" 286 | args_schema: ArgsSchema | None = AddInput 287 | 288 | def _run(self, a: int, b: int, run_manager: CallbackManagerForToolRun | None = None) -> int: 289 | """Use the tool.""" 290 | return a + b 291 | 292 | async def _arun( 293 | self, a: int, b: int, run_manager: CallbackManagerForToolRun | None = None 294 | ) -> int: 295 | """Use the tool.""" 296 | return self._run(a, b, run_manager=run_manager) 297 | 298 | 299 | @pytest.mark.parametrize( 300 | "tool_instance", 301 | [ 302 | add, 303 | add_with_schema, 304 | AddTool(), 305 | ], 306 | ids=["tool", "tool_with_schema", "tool_class"], 307 | ) 308 | async def test_convert_langchain_tool_to_fastmcp_tool(tool_instance): 309 | fastmcp_tool = to_fastmcp(tool_instance) 310 | assert fastmcp_tool.name == "add" 311 | assert fastmcp_tool.description == "Add two numbers" 312 | assert fastmcp_tool.parameters == { 313 | "description": "Add two numbers", 314 | "properties": { 315 | "a": {"title": "A", "type": "integer"}, 316 | "b": {"title": "B", "type": "integer"}, 317 | }, 318 | "required": ["a", "b"], 319 | "title": "add", 320 | "type": "object", 321 | } 322 | assert fastmcp_tool.fn_metadata.arg_model.model_json_schema() == { 323 | "properties": { 324 | "a": {"title": "A", "type": "integer"}, 325 | "b": {"title": "B", "type": "integer"}, 326 | }, 327 | "required": ["a", "b"], 328 | "title": "addArguments", 329 | "type": "object", 330 | } 331 | 332 | arguments = {"a": 1, "b": 2} 333 | assert await fastmcp_tool.run(arguments=arguments) == 3 334 | 335 | 336 | def test_convert_langchain_tool_to_fastmcp_tool_with_injection(): 337 | with pytest.raises(NotImplementedError): 338 | to_fastmcp(add_with_injection) 339 | -------------------------------------------------------------------------------- /tests/utils.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import contextlib 3 | import time 4 | from typing import AsyncGenerator 5 | 6 | import uvicorn 7 | from mcp.server.fastmcp import FastMCP 8 | from mcp.server.websocket import websocket_server 9 | from starlette.applications import Starlette 10 | from starlette.routing import WebSocketRoute 11 | 12 | from tests.servers.time_server import mcp as time_mcp 13 | 14 | 15 | def make_server_app() -> Starlette: 16 | server = time_mcp._mcp_server 17 | 18 | async def handle_ws(websocket): 19 | async with websocket_server(websocket.scope, websocket.receive, websocket.send) as streams: 20 | await server.run(streams[0], streams[1], server.create_initialization_options()) 21 | 22 | app = Starlette( 23 | routes=[ 24 | WebSocketRoute("/ws", endpoint=handle_ws), 25 | ] 26 | ) 27 | 28 | return app 29 | 30 | 31 | def run_server(server_port: int) -> None: 32 | app = make_server_app() 33 | server = uvicorn.Server( 34 | config=uvicorn.Config(app=app, host="127.0.0.1", port=server_port, log_level="error") 35 | ) 36 | server.run() 37 | 38 | # Give server time to start 39 | while not server.started: 40 | time.sleep(0.5) 41 | 42 | 43 | @contextlib.asynccontextmanager 44 | async def run_streamable_http(server: FastMCP) -> AsyncGenerator[None, None]: 45 | """Run the server in a separate task exposing a streamable HTTP endpoint. 46 | 47 | The endpoint will be available at `http://localhost:{server.settings.port}/mcp/`. 48 | """ 49 | app = server.streamable_http_app() 50 | config = uvicorn.Config( 51 | app, 52 | host="localhost", 53 | port=server.settings.port, 54 | ) 55 | server = uvicorn.Server(config) 56 | serve_task = asyncio.create_task(server.serve()) 57 | 58 | while not server.started: 59 | await asyncio.sleep(0.1) 60 | 61 | try: 62 | yield 63 | finally: 64 | server.should_exit = True 65 | await serve_task 66 | --------------------------------------------------------------------------------