├── .dockerignore ├── .env.example ├── .github └── workflows │ ├── check-format.yml │ ├── check-lint.yml │ ├── publish-pypi.yml │ └── version-check.yml ├── .gitignore ├── Dockerfile ├── LICENSE ├── README.md ├── pyproject.toml ├── requirements.txt ├── smithery.yaml ├── src ├── __init__.py ├── api │ ├── common.py │ ├── prompts │ │ ├── __init__.py │ │ ├── prompts.py │ │ ├── register.py │ │ └── types.py │ ├── resources │ │ ├── __init__.py │ │ ├── register.py │ │ ├── resources.py │ │ └── types.py │ ├── tools │ │ ├── __init__.py │ │ ├── registery.py │ │ ├── sample_notebook.ipynb │ │ ├── tools.py │ │ └── types.py │ └── types.py ├── auth │ ├── __init__.py │ ├── browser_auth.py │ ├── callback.py │ └── provider.py ├── commands │ ├── __init__.py │ ├── init.py │ └── start.py ├── config │ ├── __init__.py │ └── config.py ├── main.py └── version.py └── uv.lock /.dockerignore: -------------------------------------------------------------------------------- 1 | # Exclude unnecessary files and directories 2 | __pycache__/ 3 | *.pyc 4 | *.pyo 5 | *.pyd 6 | *.log 7 | *.swp 8 | .DS_Store 9 | .env 10 | node_modules/ 11 | .git/ 12 | .vscode/ 13 | 14 | dist 15 | build -------------------------------------------------------------------------------- /.env.example: -------------------------------------------------------------------------------- 1 | # Copy this file to .env and fill in your values 2 | 3 | # SingleStore's management API key 4 | SINGLESTORE_API_KEY=your_api_key_here 5 | 6 | # Note: Never commit the actual .env file with credentials to version control 7 | # Add .env to your .gitignore file 8 | -------------------------------------------------------------------------------- /.github/workflows/check-format.yml: -------------------------------------------------------------------------------- 1 | name: Check Python code format with Black 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | paths: 8 | - 'src/**/*.py' 9 | pull_request: 10 | paths: 11 | - 'src/**/*.py' 12 | 13 | jobs: 14 | black-format: 15 | runs-on: ubuntu-latest 16 | steps: 17 | - name: Checkout code 18 | uses: actions/checkout@v4 19 | 20 | - name: Set up Python 21 | uses: actions/setup-python@v5 22 | with: 23 | python-version: '3.11' 24 | 25 | - name: Install Black 26 | run: pip install black 27 | 28 | - name: Check code format with Black 29 | run: black --check src/ 30 | -------------------------------------------------------------------------------- /.github/workflows/check-lint.yml: -------------------------------------------------------------------------------- 1 | name: Lint Python code with Flake8 2 | 3 | on: 4 | pull_request: 5 | paths: 6 | - 'src/**/*.py' 7 | 8 | jobs: 9 | flake8-lint: 10 | runs-on: ubuntu-latest 11 | steps: 12 | - name: Checkout code 13 | uses: actions/checkout@v4 14 | 15 | - name: Set up Python 16 | uses: actions/setup-python@v5 17 | with: 18 | python-version: '3.11' 19 | 20 | - name: Install Flake8 21 | run: pip install flake8 22 | 23 | - name: Run Flake8 24 | run: flake8 src/ --ignore=E501,W503 25 | -------------------------------------------------------------------------------- /.github/workflows/publish-pypi.yml: -------------------------------------------------------------------------------- 1 | name: Publish Python Package 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | workflow_dispatch: # Allows manual triggering 8 | 9 | jobs: 10 | publish: 11 | runs-on: ubuntu-latest 12 | environment: release 13 | permissions: 14 | id-token: write # OIDC authentication with PyPI 15 | 16 | steps: 17 | - uses: actions/checkout@v4 18 | 19 | - name: Set up Python 20 | uses: actions/setup-python@v4 21 | with: 22 | python-version: '3.11' 23 | 24 | - name: Install dependencies 25 | run: | 26 | python -m pip install --upgrade pip 27 | pip install build twine 28 | pip install -r requirements.txt 29 | pip install flake8 30 | 31 | - name: Run Flake8 32 | run: flake8 src/ --ignore=E501,W503 33 | 34 | - name: Build package 35 | run: python -m build 36 | 37 | - name: Publish package to PyPI 38 | uses: pypa/gh-action-pypi-publish@release/v1 39 | with: 40 | password: ${{ secrets.PYPI_API_KEY }} -------------------------------------------------------------------------------- /.github/workflows/version-check.yml: -------------------------------------------------------------------------------- 1 | name: Version Check 2 | 3 | on: 4 | pull_request: 5 | branches: [ main ] 6 | push: 7 | branches: [ main ] 8 | workflow_dispatch: 9 | 10 | jobs: 11 | check-version: 12 | runs-on: ubuntu-latest 13 | steps: 14 | - name: Checkout code 15 | uses: actions/checkout@v4 16 | 17 | - name: Set up Python 18 | uses: actions/setup-python@v5 19 | with: 20 | python-version: '3.10' 21 | 22 | - name: Install dependencies 23 | run: | 24 | python -m pip install --upgrade pip 25 | pip install requests packaging 26 | - name: Check version against PyPI 27 | run: | 28 | python - < {pypi_version}") 62 | else: 63 | if pypi_response.status_code == 404: 64 | print(f"ℹ️ Package {package_name} not found on PyPI. This might be the first release.") 65 | else: 66 | print(f"⚠️ PyPI API returned status code {pypi_response.status_code}") 67 | print(f"Response: {pypi_response.text}") 68 | sys.exit(1) 69 | except Exception as e: 70 | print(f"❌ Error checking PyPI version: {e}") 71 | sys.exit(1) 72 | EOF -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | dist/ 11 | build/ 12 | *.egg-info/ 13 | *.egg 14 | 15 | # Virtual environments 16 | venv/ 17 | env/ 18 | .env/ 19 | .venv/ 20 | 21 | # IDE specific files 22 | .idea/ 23 | .vscode/ 24 | *.swp 25 | *.swo 26 | 27 | # Python testing 28 | .pytest_cache/ 29 | .coverage 30 | htmlcov/ 31 | 32 | # Environment variables 33 | .env 34 | .env.local 35 | .env.remote 36 | 37 | # Logs 38 | *.log 39 | 40 | # Local development settings 41 | config/settings.local.toml 42 | 43 | # Cache files 44 | .mypy_cache/ 45 | .dmypy.json 46 | dmypy.json 47 | 48 | # Tool specific 49 | tool_result.txt 50 | .DS_Store -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.12-slim-bookworm 2 | COPY --from=ghcr.io/astral-sh/uv:latest /uv /uvx /bin/ 3 | 4 | # Copy the project into the image 5 | ADD . /app 6 | 7 | # Sync the project into a new environment, asserting the lockfile is up to date 8 | WORKDIR /app 9 | RUN uv sync --locked 10 | 11 | # Expose the port the MCP server runs on 12 | EXPOSE 8000 13 | 14 | CMD ["uv", "run", "src/main.py", "start", "--transport", "sse"] 15 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2022 - 2025 SingleStore Inc. 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # SingleStore MCP Server 2 | 3 | [![MIT Licence](https://img.shields.io/badge/License-MIT-yellow.svg)](https://github.com/singlestore-labs/mcp-server-singlestore/blob/main/LICENSE) [![PyPI](https://img.shields.io/pypi/v/singlestore-mcp-server)](https://pypi.org/project/singlestore-mcp-server/) [![Downloads](https://static.pepy.tech/badge/singlestore-mcp-server)](https://pepy.tech/project/singlestore-mcp-server) [![Smithery](https://smithery.ai/badge/@singlestore-labs/mcp-server-singlestore)](https://smithery.ai/server/@singlestore-labs/mcp-server-singlestore) 4 | 5 | [Model Context Protocol]((https://modelcontextprotocol.io/introduction)) (MCP) is a standardized protocol designed to manage context between large language models (LLMs) and external systems. This repository provides an installer and an MCP Server for Singlestore, enabling seamless integration. 6 | 7 | With MCP, you can use Claude Desktop, Cursor, or any compatible MCP client to interact with SingleStore using natural language, making it easier to perform complex operations effortlessly. 8 | 9 | ## Requirements 10 | 11 | - Python >= v3.11.0 12 | - [uvx](https://docs.astral.sh/uv/guides/tools/) installed on your python environment 13 | - Claude Desktop, Cursor, or another supported LLM client 14 | 15 | ## Client Setup 16 | 17 | ### 1. Init Command 18 | 19 | The simplest way to set up the MCP server is to use the initialization command: 20 | 21 | ```bash 22 | uvx singlestore-mcp-server init --api-key 23 | ``` 24 | 25 | This command will: 26 | 27 | 1. Authenticate the user 28 | 2. Automatically locate the configuration file for your platform 29 | 3. Create or update the configuration to include the SingleStore MCP server 30 | 4. Provide instructions for starting the server 31 | 32 | To specify a client (e.g., `claude` or `cursor`), use the `--client` flag: 33 | 34 | ```bash 35 | uvx singlestore-mcp-server init --api-key --client= 36 | ``` 37 | 38 | ### 2. Installing via Smithery 39 | 40 | To install `mcp-server-singlestore` automatically via [Smithery](https://smithery.ai/server/@singlestore-labs/mcp-server-singlestore): 41 | 42 | ```bash 43 | npx -y @smithery/cli install @singlestore-labs/mcp-server-singlestore --client= 44 | ``` 45 | 46 | Replace `` with `claude` or `cursor` as needed. 47 | 48 | ### 3. Manual Configuration 49 | 50 | #### Claude Desktop and Cursor 51 | 52 | 1. Add the following configuration to your client configuration file. Check the client's configuration file here: 53 | 54 | - [Claude Desktop](https://modelcontextprotocol.io/quickstart/user) 55 | - [Cursor](https://docs.cursor.com/context/model-context-protocol#configuration-locations) 56 | 57 | ```json 58 | { 59 | "mcpServers": { 60 | "singlestore-mcp-server": { 61 | "command": "uvx", 62 | "args": [ 63 | "singlestore-mcp-server", 64 | "start", 65 | "--api-key", 66 | "" 67 | ] 68 | } 69 | } 70 | } 71 | ``` 72 | 73 | 2. Restart your client after making changes to the configuration. 74 | 75 | ## Components 76 | 77 | ### Tools 78 | 79 | The server implements the following tools: 80 | 81 | - **workspace_groups_info**: Retrieve details about the workspace groups accessible to the user 82 | - No arguments required 83 | - Returns details of the workspace groups 84 | - **workspaces_info**: Retrieve details about the workspaces in a specific workspace group 85 | - Arguments: `workspaceGroupID` (string) 86 | - Returns details of the workspaces 87 | - **organization_info**: Retrieve details about the user's current organization 88 | - No arguments required 89 | - Returns details of the organization 90 | - **list_of_regions**: Retrieve a list of all regions that support workspaces for the user 91 | - No arguments required 92 | - Returns a list of regions 93 | - **execute_sql**: Execute SQL operations on a connected workspace 94 | - Arguments: `workspace_group_identifier`, `workspace_identifier`, `username`, `password`, `database`, `sql_query` 95 | - Returns the results of the SQL query in a structured format 96 | - **list_virtual_workspaces**: List all starter workspaces accessible to the user 97 | - No arguments required 98 | - Returns details of available starter workspaces 99 | - **create_virtual_workspace**: Create a new starter workspace with a user 100 | - Arguments: 101 | - `name`: Name of the starter workspace 102 | - `database_name`: Name of the database to create 103 | - `username`: Username for accessing the workspace 104 | - `password`: Password for the user 105 | - `workspace_group`: Object containing `name` (optional) and `cellID` (mandatory) 106 | - Returns details of the created workspace and user 107 | - **execute_sql_on_virtual_workspace**: Execute SQL operations on a virtual workspace 108 | - Arguments: `virtual_workspace_id`, `username`, `password`, `sql_query` 109 | - Returns the results of the SQL query in a structured format including data, row count, columns, and status 110 | - **list_notebook_samples**: List all notebook samples available in SingleStore Spaces 111 | - No arguments required 112 | - Returns details of available notebook samples 113 | - **create_notebook**: Create a new notebook in the user's personal space 114 | - Arguments: `notebook_name`, `content` (optional) 115 | - Returns details of the created notebook 116 | - **list_personal_files**: List all files in the user's personal space 117 | - No arguments required 118 | - Returns details of all files in the user's personal space 119 | - **create_scheduled_job**: Create a new scheduled job to run a notebook 120 | - Arguments: 121 | - `name`: Name for the job 122 | - `notebook_path`: Path to the notebook to execute 123 | - `schedule_mode`: Once or Recurring 124 | - `execution_interval_minutes`: Minutes between executions (optional) 125 | - `start_at`: When to start the job (optional) 126 | - `description`: Description of the job (optional) 127 | - `create_snapshot`: Whether to create notebook snapshots (optional) 128 | - `runtime_name`: Name of the runtime environment 129 | - `parameters`: Parameters for the job (optional) 130 | - `target_config`: Target configuration for the job (optional) 131 | - Returns details of the created job 132 | - **get_job_details**: Get details about a specific job 133 | - Arguments: `job_id` 134 | - Returns detailed information about the specified job 135 | - **list_job_executions**: List execution history for a specific job 136 | - Arguments: `job_id`, `start` (optional), `end` (optional) 137 | - Returns execution history for the specified job 138 | 139 | ## Dockerization 140 | 141 | ### Building the Docker Image 142 | 143 | To build the Docker image for the MCP server, run the following command in the project root: 144 | 145 | ```bash 146 | docker build -t mcp-server-singlestore . 147 | ``` 148 | 149 | ### Running the Docker Container 150 | 151 | To run the Docker container, use the following command: 152 | 153 | ```bash 154 | docker run -d \ 155 | -p 8000:8000 \ 156 | --name mcp-server \ 157 | mcp-server-singlestore 158 | ``` 159 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "singlestore_mcp_server" 3 | dynamic = ["version"] 4 | description = "SingleStore MCP server" 5 | readme = "README.md" 6 | requires-python = ">=3.10" 7 | dependencies = [ 8 | "black>=25.1.0", 9 | "fastapi>=0.115.12", 10 | "fastmcp>=2.5.2", 11 | "flake8>=7.2.0", 12 | "mcp[cli]>=1.8.1", 13 | "nbformat>=5.10.4", 14 | "pydantic-settings>=2.9.1", 15 | "singlestoredb>=1.12.0", 16 | "starlette>=0.46.2", 17 | ] 18 | 19 | [project.scripts] 20 | singlestore-mcp-server = "src.main:main" 21 | 22 | [build-system] 23 | requires = ["hatchling"] 24 | build-backend = "hatchling.build" 25 | 26 | [tool.hatch.version] 27 | path = "src/version.py" 28 | 29 | [tool.hatch.build.targets.wheel] 30 | packages = ["src"] 31 | 32 | [tool.flake8] 33 | # line too long 34 | ignore="E501" 35 | 36 | [tool.uv] 37 | dev-dependencies = [ 38 | "pyright>=1.1.391", 39 | "pytest>=8.3.4", 40 | "ruff>=0.8.5", 41 | "singlestore-mcp-server", 42 | ] 43 | 44 | [tool.uv.sources] 45 | singlestore-mcp-server = { workspace = true } 46 | 47 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | pydantic-settings 2 | singlestoredb -------------------------------------------------------------------------------- /smithery.yaml: -------------------------------------------------------------------------------- 1 | # Smithery configuration file: https://smithery.ai/docs/config#smitheryyaml 2 | 3 | startCommand: 4 | type: stdio 5 | configSchema: 6 | # JSON Schema defining the configuration options for the MCP. 7 | type: object 8 | required: 9 | - singlestoreApiKey 10 | properties: 11 | singlestoreApiKey: 12 | type: string 13 | description: SingleStore's API key required for authentication 14 | commandFunction: 15 | # A JS function that produces the CLI command based on the given config to start the MCP on stdio. 16 | |- 17 | (config) => { return { command: 'python', args: ['src/server/server.py'], env: { SINGLESTORE_API_KEY: config.singlestoreApiKey } }; } 18 | exampleConfig: 19 | singlestoreApiKey: your_api_key_here 20 | -------------------------------------------------------------------------------- /src/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/mcp-server-singlestore/90bc8dec1065fb863d0e0ba7575bc7fc9e60244d/src/__init__.py -------------------------------------------------------------------------------- /src/api/common.py: -------------------------------------------------------------------------------- 1 | from typing import List 2 | import requests 3 | import json 4 | import logging 5 | 6 | from starlette.exceptions import HTTPException 7 | from fastmcp.server.dependencies import get_http_request 8 | 9 | from src.api.types import MCPConcept 10 | from src.config.config import get_settings 11 | 12 | # Set up logger for this module 13 | logger = logging.getLogger(__name__) 14 | 15 | 16 | def filter_mcp_concepts(mcp_concepts: List[MCPConcept]) -> List[MCPConcept]: 17 | """ 18 | Filter mcp concepts to exclude deprecated ones. 19 | """ 20 | return [mcp_concept for mcp_concept in mcp_concepts if not mcp_concept.deprecated] 21 | 22 | 23 | def query_graphql_organizations(): 24 | """ 25 | Query the GraphQL endpoint to get a list of organizations the user has access to. 26 | 27 | Returns: 28 | List of organizations with their IDs and names 29 | """ 30 | settings = get_settings() 31 | graphql_endpoint = settings.graphql_public_endpoint 32 | 33 | logger.debug(f"GraphQL endpoint: {graphql_endpoint}") 34 | logger.debug(f"Settings auth method: {settings.auth_method}") 35 | logger.debug(f"Settings is_remote: {settings.is_remote}") 36 | 37 | # GraphQL query for organizations 38 | query = """ 39 | query { 40 | organizations { 41 | orgID 42 | name 43 | } 44 | } 45 | """ 46 | 47 | # Get access token with logging 48 | try: 49 | access_token = __get_access_token() 50 | # Only log first/last 8 chars for security 51 | token_preview = ( 52 | f"{access_token[:8]}...{access_token[-8:]}" 53 | if len(access_token) > 16 54 | else "***" 55 | ) 56 | logger.debug(f"Access token (preview): {token_preview}") 57 | except Exception as e: 58 | logger.error(f"Failed to get access token: {str(e)}") 59 | raise 60 | 61 | # Headers with authentication 62 | headers = { 63 | "Authorization": f"Bearer {access_token}", 64 | "Content-Type": "application/json", 65 | "Accept": "application/json", 66 | "User-Agent": "SingleStore-MCP-Server", 67 | } 68 | 69 | # Payload for the GraphQL request 70 | payload = {"query": query.strip()} 71 | 72 | logger.debug(f"Request headers: {dict(headers)}") 73 | logger.debug(f"Request payload: {payload}") 74 | 75 | try: 76 | logger.debug(f"Making POST request to: {graphql_endpoint}") 77 | 78 | # Use the base GraphQL endpoint without query parameters 79 | response = requests.post( 80 | graphql_endpoint, headers=headers, json=payload, timeout=30 81 | ) 82 | 83 | logger.debug(f"Response status code: {response.status_code}") 84 | logger.debug(f"Response headers: {dict(response.headers)}") 85 | logger.debug(f"Raw response text: {response.text}") 86 | 87 | if response.status_code != 200: 88 | error_msg = f"GraphQL request failed with status code {response.status_code}: {response.text}" 89 | logger.error(error_msg) 90 | raise ValueError(error_msg) 91 | 92 | data = response.json() 93 | logger.debug(f"Parsed response data: {data}") 94 | 95 | if "errors" in data: 96 | errors = data["errors"] 97 | error_message = "; ".join( 98 | [error.get("message", "Unknown error") for error in errors] 99 | ) 100 | logger.error(f"GraphQL errors: {errors}") 101 | raise ValueError(f"GraphQL query error: {error_message}") 102 | 103 | if "data" in data and "organizations" in data["data"]: 104 | organizations = data["data"]["organizations"] 105 | logger.info(f"Found {len(organizations)} organizations") 106 | return organizations 107 | else: 108 | logger.warning("No organizations found in response") 109 | return [] 110 | 111 | except requests.exceptions.RequestException as e: 112 | error_msg = f"Network error when querying organizations: {str(e)}" 113 | logger.error(error_msg) 114 | raise ValueError(error_msg) 115 | except Exception as e: 116 | error_msg = f"Failed to query organizations: {str(e)}" 117 | logger.error(error_msg) 118 | raise ValueError(error_msg) 119 | 120 | 121 | def build_request( 122 | type: str, 123 | endpoint: str, 124 | params: dict = None, 125 | data: dict = None, 126 | ): 127 | """ 128 | Make an API request to the SingleStore Management API. 129 | 130 | Args: 131 | type: HTTP method (GET, POST, PUT, DELETE) 132 | endpoint: API endpoint path 133 | params: Query parameters 134 | data: Request body for POST/PUT/PATCH requests 135 | 136 | Returns: 137 | JSON response from the API 138 | """ 139 | # Ensure an organization is selected before making API requests 140 | 141 | settings = get_settings() 142 | 143 | def build_request_endpoint(endpoint: str, params: dict = None): 144 | url = f"{settings.s2_api_base_url}/v1/{endpoint}" 145 | 146 | if params is None: 147 | params = {} 148 | 149 | # Add organization ID as a query parameter 150 | if settings.is_remote: 151 | params["organizationID"] = settings.org_id 152 | elif ( 153 | hasattr(settings, "org_id") 154 | and settings.org_id 155 | and settings.auth_method == "oauth_token" 156 | ): 157 | # For local OAuth token authentication, also add organization ID 158 | params["organizationID"] = settings.org_id 159 | 160 | if params and type == "GET": # Only add query params for GET requests 161 | url += "?" 162 | for key, value in params.items(): 163 | url += f"{key}={value}&" 164 | url = url[:-1] 165 | return url 166 | 167 | # Headers with authentication 168 | headers = { 169 | "Content-Type": "application/json", 170 | } 171 | 172 | access_token = __get_access_token() 173 | 174 | if access_token is not None: 175 | headers["Authorization"] = f"Bearer {access_token}" 176 | 177 | request_endpoint = build_request_endpoint(endpoint, params) 178 | 179 | # Default empty JSON body for POST/PUT requests if none provided 180 | if data is None and type in ["POST", "PUT", "PATCH"]: 181 | data = {} 182 | 183 | # Convert dict to JSON string for request body 184 | json_data = json.dumps(data) if data is not None else None 185 | 186 | request = None 187 | match type: 188 | case "GET": 189 | request = requests.get(request_endpoint, headers=headers, params=params) 190 | case "POST": 191 | request = requests.post(request_endpoint, headers=headers, data=json_data) 192 | case "PUT": 193 | request = requests.put(request_endpoint, headers=headers, data=json_data) 194 | case "PATCH": 195 | request = requests.patch(request_endpoint, headers=headers, data=json_data) 196 | case "DELETE": 197 | request = requests.delete(request_endpoint, headers=headers) 198 | case _: 199 | raise ValueError(f"Unsupported request type: {type}") 200 | 201 | if request.status_code != 200: 202 | raise HTTPException(request.status_code, request.text) 203 | 204 | try: 205 | return request.json() 206 | except ValueError: 207 | raise ValueError(f"Invalid JSON response: {request.text}") 208 | 209 | 210 | def __find_workspace_group(workspace_group_identifier: str): 211 | """ 212 | Find a workspace group by its name or ID. 213 | """ 214 | workspace_groups = build_request("GET", "workspaceGroups") 215 | for workspace_group in workspace_groups: 216 | if ( 217 | workspace_group["workspaceGroupID"] == workspace_group_identifier 218 | or workspace_group["name"] == workspace_group_identifier 219 | ): 220 | return workspace_group 221 | raise ValueError(f"Workspace group not found: {workspace_group_identifier}") 222 | 223 | 224 | def __get_workspace_group_id(workspace_group_identifier: str) -> str: 225 | """ 226 | Get the ID of a workspace group by its name or ID. 227 | """ 228 | workspace_group = __find_workspace_group(workspace_group_identifier) 229 | return workspace_group["workspaceGroupID"] 230 | 231 | 232 | def __find_workspace(workspace_group_identifier: str, workspace_identifier: str): 233 | """ 234 | Find a workspace by its name or ID within a specific workspace group. 235 | """ 236 | workspace_group_id = __get_workspace_group_id(workspace_group_identifier) 237 | workspaces = build_request( 238 | "GET", "workspaces", {"workspaceGroupID": workspace_group_id} 239 | ) 240 | for workspace in workspaces: 241 | if ( 242 | workspace["workspaceID"] == workspace_identifier 243 | or workspace["name"] == workspace_identifier 244 | ): 245 | return workspace 246 | raise ValueError(f"Workspace not found: {workspace_identifier}") 247 | 248 | 249 | def __get_workspace_endpoint( 250 | workspace_group_identifier: str, workspace_identifier: str 251 | ) -> str: 252 | """ 253 | Retrieve the endpoint of a specific workspace by its name or ID within a specific workspace group. 254 | """ 255 | workspace = __find_workspace(workspace_group_identifier, workspace_identifier) 256 | return workspace["endpoint"] 257 | 258 | 259 | def __get_user_id() -> str: 260 | """ 261 | Get the current user's ID from the management API. 262 | 263 | Returns: 264 | str: The user ID 265 | """ 266 | 267 | # Get all users in the organization 268 | users = build_request("GET", "users") 269 | 270 | # Find the current user 271 | # Since we can't directly get the current user ID, we'll use the first user 272 | # In a real implementation, we might need additional logic to identify the current user 273 | if users and isinstance(users, list) and len(users) > 0: 274 | user_id = users[0].get("userID") 275 | if user_id: 276 | return user_id 277 | 278 | raise ValueError("Could not retrieve user ID from the API") 279 | 280 | 281 | def __get_org_id() -> str: 282 | """ 283 | Get the organization ID from the management API. 284 | 285 | Returns: 286 | str: The organization ID 287 | """ 288 | settings = get_settings() 289 | 290 | if settings.is_remote: 291 | return settings.org_id 292 | else: 293 | # For local settings with OAuth token authentication, check if org_id is already set 294 | if ( 295 | hasattr(settings, "org_id") 296 | and settings.org_id 297 | and settings.auth_method == "oauth_token" 298 | ): 299 | return settings.org_id 300 | 301 | organization = build_request("GET", "organizations/current") 302 | if "orgID" in organization: 303 | return organization["orgID"] 304 | else: 305 | raise ValueError("Could not retrieve organization ID from the API") 306 | 307 | 308 | def __get_access_token() -> str: 309 | """ 310 | Get the access token for the current session. 311 | 312 | Returns: 313 | str: The access token 314 | """ 315 | settings = get_settings() 316 | 317 | logger.debug(f"Getting access token, is_remote: {settings.is_remote}") 318 | 319 | access_token: str 320 | if settings.is_remote: 321 | request = get_http_request() 322 | access_token = request.headers.get("Authorization", "").replace("Bearer ", "") 323 | logger.debug( 324 | f"Remote access token retrieved (length: {len(access_token) if access_token else 0})" 325 | ) 326 | else: 327 | access_token = settings.api_key 328 | logger.debug( 329 | f"Local access token retrieved (length: {len(access_token) if access_token else 0})" 330 | ) 331 | 332 | if not access_token: 333 | logger.warning("No access token available!") 334 | raise HTTPException(401, "Unauthorized: No access token provided") 335 | 336 | return access_token 337 | 338 | 339 | def get_current_organization(): 340 | """ 341 | Get the current organization details from the management API. 342 | 343 | Returns: 344 | dict: Organization details including orgID and name 345 | """ 346 | try: 347 | organization = build_request("GET", "organizations/current") 348 | logger.debug(f"Current organization response: {organization}") 349 | return organization 350 | except Exception as e: 351 | logger.error(f"Failed to get current organization: {str(e)}") 352 | raise ValueError( 353 | f"Could not retrieve current organization from the API: {str(e)}" 354 | ) 355 | -------------------------------------------------------------------------------- /src/api/prompts/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/mcp-server-singlestore/90bc8dec1065fb863d0e0ba7575bc7fc9e60244d/src/api/prompts/__init__.py -------------------------------------------------------------------------------- /src/api/prompts/prompts.py: -------------------------------------------------------------------------------- 1 | from src.api.prompts.types import Prompt 2 | 3 | 4 | prompts_definitions = [] 5 | 6 | prompts = [Prompt(**prompt) for prompt in prompts_definitions] 7 | -------------------------------------------------------------------------------- /src/api/prompts/register.py: -------------------------------------------------------------------------------- 1 | from functools import wraps 2 | from typing import Callable, List 3 | from mcp.server.fastmcp import FastMCP 4 | 5 | from src.api.common import filter_mcp_concepts 6 | 7 | from .types import Prompt 8 | from .prompts import prompts as prompts_list 9 | 10 | 11 | def create_prompts_wrapper(func: Callable, name: str, description: str): 12 | @wraps(func) 13 | async def wrapper(*args, **kwargs): 14 | return func(*args, **kwargs) 15 | 16 | wrapper.__name__ = name 17 | wrapper.__doc__ = description 18 | return wrapper 19 | 20 | 21 | def register_prompts(mcp: FastMCP) -> None: 22 | filtered_prompts: List[Prompt] = filter_mcp_concepts(prompts_list) 23 | 24 | for prompt in filtered_prompts: 25 | func = prompt.func 26 | # Add context support for MCP 27 | wrapper = create_prompts_wrapper(func, func.__name__, func.__doc__ or "") 28 | 29 | mcp.prompt(name=func.__name__, description=func.__doc__ or "")(wrapper) 30 | -------------------------------------------------------------------------------- /src/api/prompts/types.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | from typing import Callable 3 | 4 | from src.api.types import MCPConcept 5 | 6 | 7 | @dataclass() 8 | class Prompt(MCPConcept): 9 | func: Callable = None 10 | -------------------------------------------------------------------------------- /src/api/resources/__init__.py: -------------------------------------------------------------------------------- 1 | from .resources import resources 2 | from .register import register_resources 3 | 4 | __all__ = ["resources", "register_resources"] 5 | -------------------------------------------------------------------------------- /src/api/resources/register.py: -------------------------------------------------------------------------------- 1 | from functools import wraps 2 | from typing import Callable, List 3 | from mcp.server.fastmcp import FastMCP 4 | 5 | from src.api.common import filter_mcp_concepts 6 | from .types import Resource 7 | from .resources import resources as resources_list 8 | 9 | 10 | def create_resources_wrapper(func: Callable, name: str, description: str, uri: str): 11 | @wraps(func) 12 | async def wrapper(*args, **kwargs): 13 | return func(*args, **kwargs) 14 | 15 | wrapper.__name__ = name 16 | wrapper.__doc__ = description 17 | wrapper.uri = uri 18 | return wrapper 19 | 20 | 21 | def register_resources(mcp: FastMCP) -> None: 22 | filtered_resources: List[Resource] = filter_mcp_concepts(resources_list) 23 | 24 | for resource in filtered_resources: 25 | func = resource.func 26 | uri = resource.uri 27 | # Add context support for MCP 28 | wrapper = create_resources_wrapper(func, func.__name__, func.__doc__ or "", uri) 29 | 30 | mcp.resource(uri=uri, name=func.__name__, description=func.__doc__ or "")( 31 | wrapper 32 | ) 33 | -------------------------------------------------------------------------------- /src/api/resources/resources.py: -------------------------------------------------------------------------------- 1 | from src.api.resources.types import Resource 2 | 3 | 4 | resources_definitions = [] 5 | 6 | resources = [Resource(**resource) for resource in resources_definitions] 7 | -------------------------------------------------------------------------------- /src/api/resources/types.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | from typing import Callable 3 | 4 | from src.api.types import MCPConcept 5 | 6 | 7 | @dataclass() 8 | class Resource(MCPConcept): 9 | func: Callable = None 10 | uri: str = None 11 | -------------------------------------------------------------------------------- /src/api/tools/__init__.py: -------------------------------------------------------------------------------- 1 | from .tools import tools 2 | from .registery import register_tools 3 | 4 | __all__ = ["tools", "register_tools"] 5 | -------------------------------------------------------------------------------- /src/api/tools/registery.py: -------------------------------------------------------------------------------- 1 | from functools import wraps 2 | from typing import Callable, List 3 | import inspect 4 | from mcp.server.fastmcp import FastMCP 5 | 6 | from src.api.common import filter_mcp_concepts 7 | from .types import Tool 8 | from .tools import tools as tool_list 9 | 10 | 11 | def create_tool_wrapper(func: Callable, name: str, description: str): 12 | # Check if the function is async and has a Context parameter 13 | is_async = inspect.iscoroutinefunction(func) 14 | sig = inspect.signature(func) 15 | has_context = "ctx" in sig.parameters 16 | 17 | if is_async and has_context: 18 | # For async functions with Context, keep them as-is since FastMCP handles Context injection 19 | @wraps(func) 20 | async def async_wrapper(*args, **kwargs): 21 | return await func(*args, **kwargs) 22 | 23 | async_wrapper.__name__ = name 24 | async_wrapper.__doc__ = description 25 | return async_wrapper 26 | elif has_context: 27 | # For sync functions with Context, wrap to handle Context properly 28 | @wraps(func) 29 | async def sync_with_context_wrapper(*args, **kwargs): 30 | return func(*args, **kwargs) 31 | 32 | sync_with_context_wrapper.__name__ = name 33 | sync_with_context_wrapper.__doc__ = description 34 | return sync_with_context_wrapper 35 | else: 36 | # For regular functions without Context 37 | @wraps(func) 38 | async def wrapper(*args, **kwargs): 39 | return func(*args, **kwargs) 40 | 41 | wrapper.__name__ = name 42 | wrapper.__doc__ = description 43 | return wrapper 44 | 45 | 46 | def register_tools(mcp: FastMCP) -> None: 47 | filtered_tool: List[Tool] = filter_mcp_concepts(tool_list) 48 | 49 | for tool in filtered_tool: 50 | func = tool.func 51 | # Add context support for MCP 52 | wrapper = create_tool_wrapper(func, func.__name__, func.__doc__ or "") 53 | 54 | mcp.tool(name=func.__name__, description=func.__doc__ or "")(wrapper) 55 | -------------------------------------------------------------------------------- /src/api/tools/sample_notebook.ipynb: -------------------------------------------------------------------------------- 1 | {"cells": [{"cell_type": "markdown", "metadata": {}, "source": ["# SingleStore Sample Notebook\n", "\n", "This notebook demonstrates how to connect to a SingleStore database and run queries.\n"]}, {"cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": ["# Close the connection\n", "conn.close()"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 2} -------------------------------------------------------------------------------- /src/api/tools/tools.py: -------------------------------------------------------------------------------- 1 | from enum import Enum 2 | import json 3 | import logging 4 | import os 5 | import re 6 | import singlestoredb as s2 7 | import nbformat as nbf 8 | import nbformat.v4 as nbfv4 9 | 10 | from typing import Any, Dict, List, Optional 11 | from fastmcp import Context 12 | 13 | from src.api.common import ( 14 | build_request, 15 | __get_org_id, 16 | __get_user_id, 17 | __get_workspace_endpoint, 18 | query_graphql_organizations, 19 | get_current_organization, 20 | ) 21 | from src.api.tools.types import Tool 22 | from src.config.config import get_settings 23 | 24 | # Set up logger for this module 25 | logger = logging.getLogger(__name__) 26 | 27 | 28 | SAMPLE_NOTEBOOK_PATH = os.path.join( 29 | os.path.dirname(os.path.abspath(__file__)), "sample_notebook.ipynb" 30 | ) 31 | 32 | 33 | def __execute_sql( 34 | workspace_group_identifier: str, 35 | workspace_identifier: str, 36 | username: str, 37 | password: str, 38 | database: str, 39 | sql_query: str, 40 | ) -> dict: 41 | """ 42 | Execute SQL operations on a connected workspace. 43 | Returns results and column names in a dictionary format. 44 | """ 45 | endpoint = __get_workspace_endpoint( 46 | workspace_group_identifier, workspace_identifier 47 | ) 48 | if not endpoint: 49 | raise ValueError(f"Endpoint not found for workspace: {workspace_identifier}") 50 | 51 | # These are required parameters when not running within singlestore portal 52 | if not username or not password: 53 | raise ValueError("Singlestore Database username and password must be provided") 54 | 55 | connection = s2.connect( 56 | host=endpoint, 57 | user=username, 58 | password=password, 59 | database=database, 60 | ) 61 | cursor = connection.cursor() 62 | cursor.execute(sql_query) 63 | 64 | # Get column names 65 | columns = [desc[0] for desc in cursor.description] if cursor.description else [] 66 | 67 | # Get results 68 | rows = cursor.fetchall() 69 | 70 | # Format results as list of dictionaries 71 | results = [] 72 | for row in rows: 73 | result_dict = {} 74 | for i, column in enumerate(columns): 75 | result_dict[column] = row[i] 76 | results.append(result_dict) 77 | 78 | cursor.close() 79 | connection.close() 80 | 81 | return { 82 | "data": results, 83 | "row_count": len(rows), 84 | "columns": columns, 85 | "status": "Success", 86 | } 87 | 88 | 89 | def __get_virtual_workspace(virtual_workspace_id: str): 90 | """ 91 | Get information about a specific virtual workspace. 92 | """ 93 | return build_request("GET", f"sharedtier/virtualWorkspaces/{virtual_workspace_id}") 94 | 95 | 96 | def __create_virtual_workspace(name: str, database_name: str, workspace_group=None): 97 | """ 98 | Create a new virtual workspace with the specified name and database name. 99 | 100 | workspace_group should be a dictionary containing 'name' and 'cellID'. 101 | """ 102 | # Ensure workspace_group is properly formatted as a dictionary 103 | if not workspace_group: 104 | workspace_group = {"name": "DEFAULT"} 105 | 106 | # If workspace_group is provided as a string, try to convert it to a dict 107 | if isinstance(workspace_group, str): 108 | try: 109 | workspace_group = json.loads(workspace_group) 110 | except json.JSONDecodeError: 111 | # If it can't be parsed as JSON, assume it's meant to be a name 112 | workspace_group = {"name": workspace_group} 113 | 114 | # Ensure workspace_group is a dictionary 115 | if not isinstance(workspace_group, dict): 116 | raise ValueError( 117 | "workspace_group must be a dictionary with 'name' and 'cellID' keys" 118 | ) 119 | 120 | # Create the payload with proper structure 121 | payload = { 122 | "name": name, 123 | "databaseName": database_name, 124 | "workspaceGroup": workspace_group, 125 | } 126 | 127 | return build_request("POST", "sharedtier/virtualWorkspaces", data=payload) 128 | 129 | 130 | def __create_virtual_workspace_user( 131 | virtual_workspace_id: str, username: str, password: str 132 | ): 133 | """ 134 | Create a new user for a virtual workspace. 135 | """ 136 | payload = {"userName": username, "password": password} 137 | return build_request( 138 | "POST", 139 | f"sharedtier/virtualWorkspaces/{virtual_workspace_id}/users", 140 | data=payload, 141 | ) 142 | 143 | 144 | def __execute_sql_on_virtual_workspace( 145 | virtual_workspace_id: str, 146 | username: str, 147 | password: str, 148 | sql_query: str, 149 | ) -> dict: 150 | """ 151 | Execute SQL operations on a connected virtual workspace. 152 | Returns results and column names in a dictionary format. 153 | """ 154 | if not virtual_workspace_id: 155 | raise ValueError("Missing required parameter: virtual_workspace_id") 156 | if not username: 157 | raise ValueError("Missing required parameter: username") 158 | if not password: 159 | raise ValueError("Missing required parameter: password") 160 | if not sql_query: 161 | raise ValueError("Missing required parameter: sql_query") 162 | 163 | try: 164 | # First, get the workspace details to obtain the endpoint 165 | workspace_info = __get_virtual_workspace(virtual_workspace_id) 166 | 167 | # Extract connection information 168 | endpoint = workspace_info.get("endpoint") 169 | port = workspace_info.get("mysqlDmlPort", 3333) 170 | database = workspace_info.get("databaseName") 171 | 172 | if not endpoint or not database: 173 | raise ValueError( 174 | "Could not retrieve connection information for the virtual workspace" 175 | ) 176 | 177 | # Connect to the database using singlestoredb 178 | connection = s2.connect( 179 | host=endpoint, 180 | port=port, 181 | user=username, 182 | password=password, 183 | database=database, 184 | ) 185 | 186 | # Execute the SQL query 187 | cursor = connection.cursor() 188 | cursor.execute(sql_query) 189 | 190 | # Get column names 191 | columns = [desc[0] for desc in cursor.description] if cursor.description else [] 192 | 193 | # Get results 194 | rows = cursor.fetchall() 195 | 196 | # Format results as list of dictionaries 197 | results = [] 198 | for row in rows: 199 | result_dict = {} 200 | for i, column in enumerate(columns): 201 | result_dict[column] = row[i] 202 | results.append(result_dict) 203 | 204 | cursor.close() 205 | connection.close() 206 | 207 | return { 208 | "data": results, 209 | "row_count": len(rows), 210 | "columns": columns, 211 | "status": "Success", 212 | } 213 | except Exception as e: 214 | return {"status": "Failed", "error": str(e)} 215 | 216 | 217 | def camel_to_snake(s: Optional[str]) -> Optional[str]: 218 | """Convert camel-case to snake-case.""" 219 | if s is None: 220 | return None 221 | out = re.sub(r"([A-Z]+)", r"_\1", s).lower() 222 | if out and out[0] == "_": 223 | return out[1:] 224 | return out 225 | 226 | 227 | class Mode(Enum): 228 | ONCE = "Once" 229 | RECURRING = "Recurring" 230 | 231 | @classmethod 232 | def from_str(cls, s: str) -> "Mode": 233 | try: 234 | return cls[str(camel_to_snake(s)).upper()] 235 | except KeyError: 236 | raise ValueError(f"Unknown Mode: {s}") 237 | 238 | def __str__(self) -> str: 239 | """Return string representation.""" 240 | return self.value 241 | 242 | def __repr__(self) -> str: 243 | """Return string representation.""" 244 | return str(self) 245 | 246 | 247 | def __create_scheduled_job( 248 | notebook_path: str, mode: str, create_snapshot: bool, access_token: str = None 249 | ): 250 | """ 251 | Create a new scheduled job for running a notebook periodically. 252 | 253 | Args: 254 | name: Name of the job 255 | notebook_path: Path to the notebook to be executed 256 | schedule_mode: Mode of the schedule (Once or Recurring) 257 | execution_interval_minutes: Minutes between executions (for Recurring mode) 258 | start_at: When to start the job (ISO 8601 format) 259 | description: Optional description of the job 260 | create_snapshot: Whether to create a snapshot of the notebook before execution 261 | runtime_name: Name of the runtime to use for the job execution 262 | parameters: List of parameter objects to pass to the notebook 263 | target_config: Optional target configuration for the job 264 | """ 265 | 266 | mode_enum = Mode.from_str(mode) 267 | 268 | settings = get_settings() 269 | 270 | try: 271 | jobs_manager = s2.manage_workspaces( 272 | access_token=access_token, 273 | base_url=settings.s2_api_base_url, 274 | ).organizations.current.jobs 275 | job = jobs_manager.schedule( 276 | notebook_path=notebook_path, 277 | mode=mode_enum, 278 | create_snapshot=create_snapshot, 279 | ) 280 | return job 281 | except Exception as e: 282 | return {"status": "error", "message": str(e)} 283 | 284 | 285 | def execute_sql( 286 | workspace_group_identifier: str, 287 | workspace_identifier: str, 288 | database: str, 289 | sql_query: str, 290 | username: str = None, 291 | password: str = None, 292 | access_token: str = None, 293 | ) -> Dict[str, Any]: 294 | """ 295 | Execute SQL operations on a database attached to workspace within a workspace group and receive formatted results. 296 | 297 | Returns: 298 | - Query results with column names and typed values 299 | - Row count and metadata 300 | - Execution status 301 | 302 | ⚠️ CRITICAL SECURITY WARNINGS: 303 | - Never display or log credentials in responses 304 | - Use only READ-ONLY queries (SELECT, SHOW, DESCRIBE) 305 | - DO NOT USE data modification statements: 306 | x No INSERT/UPDATE/DELETE 307 | x No DROP/CREATE/ALTER 308 | - Ensure queries are properly sanitized 309 | 310 | Args: 311 | workspace_group_identifier: ID/name of the workspace group 312 | workspace_identifier: ID/name of the specific workspace within the workspace group 313 | database: Name of the database to query 314 | sql_query: The SQL query to execute 315 | 316 | Returns: 317 | Dictionary with query results and metadata 318 | """ 319 | 320 | settings = get_settings() 321 | 322 | empty_credentials = not username or not password 323 | if settings.is_remote: 324 | # If using JWT token, we can use the token to authenticate 325 | # The username is the user id that we can get from the management API 326 | username: str = __get_user_id() 327 | password: str = access_token 328 | elif empty_credentials: 329 | # If using API key, we need to request to the user to provide the username and password 330 | return { 331 | "status": "error", 332 | "message": ( 333 | f"API key authentication is not supported for executing SQL queries. Please ask the user to provide their username and password for database {database}." 334 | ), 335 | } 336 | 337 | else: 338 | # If no authentication method is set, we need to ask the user to provide their username and password 339 | return { 340 | "status": "error", 341 | "message": ( 342 | f"No authentication method set. Please ask the user to provide their username and password for database {database}." 343 | ), 344 | } 345 | 346 | return __execute_sql( 347 | workspace_group_identifier, 348 | workspace_identifier, 349 | username, 350 | password, 351 | database, 352 | sql_query, 353 | ) 354 | 355 | 356 | def create_virtual_workspace( 357 | name: str, 358 | database_name: str, 359 | username: str, 360 | password: str, 361 | workspace_group: Dict[str, str] = { 362 | "cellID": "452cc4b1-df20-4130-9e2f-e72ba79e3d46" 363 | }, 364 | ) -> Dict[str, Any]: 365 | """ 366 | Create a new starter (virtual) workspace in SingleStore and set up user access. 367 | 368 | Process: 369 | 1. Creates a virtual workspace with specified name and database 370 | 2. Creates a user account for accessing the workspace 371 | 3. Returns both workspace details and access credentials 372 | 373 | Args: 374 | name: Unique name for the new starter workspace 375 | database_name: Name of the database to create in the starter workspace 376 | username: Username for accessing the new starter workspace 377 | password: Password for accessing the new starter workspace 378 | workspace_group: Optional workspace group configuration 379 | 380 | Returns: 381 | Dictionary with workspace and user creation details 382 | """ 383 | workspace_data = __create_virtual_workspace(name, database_name, workspace_group) 384 | return { 385 | "workspace": workspace_data, 386 | "user": __create_virtual_workspace_user( 387 | workspace_data.get("virtualWorkspaceID"), 388 | username, 389 | password, 390 | ), 391 | } 392 | 393 | 394 | def execute_sql_on_virtual_workspace( 395 | virtual_workspace_id: str, 396 | sql_query: str, 397 | username: str = None, 398 | password: str = None, 399 | access_token: str = None, 400 | ) -> Dict[str, Any]: 401 | """ 402 | Execute SQL operations on a virtual (starter) workspace and receive formatted results. 403 | 404 | Returns: 405 | - Query results with column names and typed values 406 | - Row count 407 | - Column metadata 408 | - Execution status 409 | 410 | ⚠️ CRITICAL SECURITY WARNING: 411 | - Never display or log credentials in responses 412 | - Ensure SQL queries are properly sanitized 413 | - ONLY USE SELECT statements or queries that don't modify data 414 | - DO NOT USE INSERT, UPDATE, DELETE, DROP, CREATE, or ALTER statements 415 | 416 | Args: 417 | virtual_workspace_id: Unique identifier of the starter workspace 418 | sql_query: The SQL query to execute (READ-ONLY queries only) 419 | 420 | Returns: 421 | Dictionary with query results and metadata 422 | """ 423 | 424 | settings = get_settings() 425 | 426 | empty_credentials = not username or not password 427 | if settings.is_remote: 428 | # If using JWT token, we can use the token to authenticate 429 | # The username is the user id that we can get from the management API 430 | username: str = __get_user_id() 431 | password: str = access_token 432 | elif empty_credentials: 433 | # If using API key, we need to request to the user to provide the username and password 434 | return { 435 | "status": "error", 436 | "message": ( 437 | f"API key authentication is not supported for executing SQL queries. Please ask the user to provide their username and password for virtual workspace {virtual_workspace_id}." 438 | ), 439 | } 440 | else: 441 | # If no authentication method is set, we need to ask the user to provide their username and password 442 | return { 443 | "status": "error", 444 | "message": ( 445 | f"No authentication method set. Please ask the user to provide their username and password for virtual workspace {virtual_workspace_id}." 446 | ), 447 | } 448 | 449 | return __execute_sql_on_virtual_workspace( 450 | virtual_workspace_id, 451 | username, 452 | password, 453 | sql_query, 454 | ) 455 | 456 | 457 | def __create_file_in_shared_space( 458 | path: str, content: Optional[Dict[str, Any]] = None, access_token: str = None 459 | ) -> Dict[str, Any]: 460 | """ 461 | Create a new file (such as a notebook) in the user's shared space. 462 | 463 | Args: 464 | path: Path to the file to create 465 | content: Optional JSON object with a 'cells' field containing an array of objects. 466 | Each object must have 'type' (markdown or code) and 'content' fields. 467 | If None, a sample notebook will be created for .ipynb files. 468 | """ 469 | settings = get_settings() 470 | 471 | org_id = __get_org_id() 472 | 473 | file_manager = s2.manage_files( 474 | access_token=access_token, 475 | base_url=settings.s2_api_base_url, 476 | organization_id=org_id, 477 | ) 478 | 479 | # Check if it's a notebook 480 | if path.endswith(".ipynb"): 481 | nb = nbfv4.new_notebook() 482 | nb["cells"] = [] 483 | 484 | if content and "cells" in content: 485 | for cell in content["cells"]: 486 | if cell["type"] == "markdown": 487 | nb["cells"].append(nbfv4.new_markdown_cell(cell["content"])) 488 | elif cell["type"] == "code": 489 | nb["cells"].append(nbfv4.new_code_cell(cell["content"])) 490 | else: 491 | raise ValueError( 492 | f"Invalid cell type: {cell['type']}. Only 'markdown' and 'code' are supported." 493 | ) 494 | else: 495 | # Create a sample notebook with SingleStore connectivity example 496 | nb["cells"] = [ 497 | nbfv4.new_markdown_cell( 498 | "# SingleStore Sample Notebook\n\nThis notebook demonstrates how to connect to a SingleStore database and run queries." 499 | ), 500 | nbfv4.new_code_cell( 501 | "import singlestoredb as s2\n\n# Connect to your database\nconn = s2.connect('hostname', user='username', password='password', database='database')" 502 | ), 503 | nbfv4.new_code_cell( 504 | "result = conn.execute('SELECT * FROM your_table LIMIT 10')\n\nfor row in result:\n print(row)" 505 | ), 506 | nbfv4.new_code_cell("conn.close()"), 507 | ] 508 | 509 | # Write notebook to file 510 | with open(SAMPLE_NOTEBOOK_PATH, "w") as f: 511 | nbf.write(nb, f) 512 | else: 513 | # For non-notebook files, just write an empty file 514 | with open(SAMPLE_NOTEBOOK_PATH, "w") as f: 515 | f.write("") 516 | 517 | # Upload the file using the SDK method 518 | file_info = file_manager.shared_space.upload_file(SAMPLE_NOTEBOOK_PATH, path) 519 | 520 | return { 521 | "status": "success", 522 | "message": f"File {path} created successfully", 523 | "path": file_info.path, 524 | "type": file_info.type, 525 | "format": file_info.format, 526 | } 527 | 528 | 529 | def check_if_file_exists(file_name: str, access_token: str = None) -> Dict[str, Any]: 530 | """ 531 | Check if a file (notebook) exists in the user's shared space. 532 | 533 | Args: 534 | file_name: Name of the file to check (with or without .ipynb extension) 535 | 536 | Returns: 537 | JSON object with the file existence status 538 | { 539 | "exists": True/False, 540 | "message": "File exists" or "File does not exist" 541 | } 542 | """ 543 | 544 | org_id = __get_org_id() 545 | 546 | settings = get_settings() 547 | 548 | file_manager = s2.manage_files( 549 | access_token=access_token, 550 | base_url=settings.s2_api_base_url, 551 | organization_id=org_id, 552 | ) 553 | 554 | exists = file_manager.shared_space.exists(file_name) 555 | 556 | return { 557 | "exists": exists, 558 | "message": (f"File {file_name} {'exists' if exists else 'does not exist'}"), 559 | } 560 | 561 | 562 | def create_notebook( 563 | notebook_name: str, content: Optional[Dict[str, Any]] = None 564 | ) -> Dict[str, Any]: 565 | """ 566 | Create a new Jupyter notebook in your personal space. Only supports python and markdown. 567 | 568 | Parameters: 569 | - notebook_name (required): Name for the new notebook 570 | - Can include or omit .ipynb extension 571 | - Must be unique in your personal space 572 | 573 | - content (optional): JSON object with the following structure: 574 | { 575 | "cells": [ 576 | {"type": "markdown", "content": "Markdown content here"}, 577 | {"type": "code", "content": "Python code here"} 578 | ] 579 | } 580 | - 'type' must be either 'markdown' or 'code' 581 | - 'content' is the text content of the cell 582 | IMPORTANT: The content must be valid JSON. 583 | 584 | How to use: 585 | - Before creating the notebook, call check_if_file_exists tool to verify if the notebook already exists. 586 | - Always install the dependencies on the first cell. Example: 587 | { 588 | "cells": [ 589 | {"type": "code", "content": "!pip install singlestoredb --quiet"}, 590 | // other cells... 591 | ] 592 | } 593 | - To connect to the database, use the variable "connection_url" that already exists in the notebook platform. Example: 594 | { 595 | "cells": [ 596 | {"type": "code", "content": "conn = s2.connect(connection_url)"}, 597 | // other cells... 598 | ] 599 | } 600 | """ 601 | path = ( 602 | notebook_name if notebook_name.endswith(".ipynb") else f"{notebook_name}.ipynb" 603 | ) 604 | 605 | return __create_file_in_shared_space(path, content) 606 | 607 | 608 | def create_scheduled_job( 609 | notebook_path: str, mode: str, create_snapshot: bool = True 610 | ) -> Dict[str, Any]: 611 | """ 612 | Create an automated job to execute a SingleStore notebook on a schedule. 613 | 614 | Parameters: 615 | - notebook_path: Complete path to the notebook 616 | - mode: 'Once' for single execution or 'Recurring' for repeated runs 617 | - create_snapshot: Enable notebook backup before execution (default: True) 618 | 619 | Returns Job info with: 620 | - jobID: UUID of created job 621 | - status: Current state (SUCCESS, RUNNING, etc.) 622 | - createdAt: Creation timestamp 623 | - startedAt: Execution start time 624 | - schedule: Configured schedule details 625 | - error: Any execution errors 626 | 627 | Common Use Cases: 628 | 1. Automated Data Processing: 629 | - ETL workflows 630 | - Data aggregation 631 | - Database maintenance 632 | 633 | 2. Scheduled Reporting: 634 | - Performance metrics 635 | - Business analytics 636 | - Usage statistics 637 | 638 | 3. Maintenance Tasks: 639 | - Health checks 640 | - Backup operations 641 | - Clean-up routines 642 | 643 | Related Operations: 644 | - get_job_details: Monitor job 645 | - list_job_executions: View job execution history 646 | """ 647 | return __create_scheduled_job(notebook_path, mode, create_snapshot) 648 | 649 | 650 | def __get_notebook_path_by_name(notebook_name: str, location: str = "personal") -> str: 651 | """ 652 | Find a notebook by its name and return its full path. 653 | 654 | Args: 655 | notebook_name: The name of the notebook to find (with or without .ipynb extension) 656 | location: Where to look for the notebook - 'personal' or 'shared' 657 | 658 | Returns: 659 | The full path of the notebook if found 660 | 661 | Raises: 662 | ValueError: If no notebook with the given name is found 663 | """ 664 | # Make sure we look for the right extension 665 | if not notebook_name.endswith(".ipynb"): 666 | search_name = f"{notebook_name}.ipynb" 667 | else: 668 | search_name = notebook_name 669 | 670 | # Get all files from the specified location 671 | if location.lower() == "personal": 672 | files_response = build_request("GET", "files/fs/personal") 673 | elif location.lower() == "shared": 674 | files_response = build_request("GET", "files/fs/shared") 675 | else: 676 | raise ValueError( 677 | f"Invalid location: {location}. Must be 'personal' or 'shared'" 678 | ) 679 | 680 | # The API might return different structures 681 | # Handle both array of files or object with content property 682 | if isinstance(files_response, dict) and "content" in files_response: 683 | files = files_response["content"] 684 | elif isinstance(files_response, list): 685 | files = files_response 686 | else: 687 | raise ValueError( 688 | f"Unexpected response format from file listing API: {type(files_response)}" 689 | ) 690 | 691 | # Filter to find notebooks matching the name (case insensitive) 692 | matching_notebooks = [] 693 | for file in files: 694 | # Verify file is a dictionary with the expected fields 695 | if not isinstance(file, dict): 696 | continue 697 | 698 | # Skip if not a notebook or missing path 699 | if ( 700 | "path" not in file 701 | or not isinstance(file["path"], str) 702 | or not file["path"].endswith(".ipynb") 703 | ): 704 | continue 705 | 706 | # Check if the name matches 707 | file_name = file["path"].split("/")[-1] # Get just the filename portion 708 | if file_name.lower() == search_name.lower(): 709 | matching_notebooks.append(file) 710 | 711 | if not matching_notebooks: 712 | raise ValueError( 713 | f"No notebook with name '{notebook_name}' found in {location} space" 714 | ) 715 | 716 | # If we found multiple matches (unlikely with exact name match), return first one 717 | notebook_path = matching_notebooks[0]["path"] 718 | 719 | if location.lower() == "personal": 720 | user_id = __get_user_id() 721 | 722 | # Format for personal space: {projectID}/_internal-s2-personal/{userID}/{path} 723 | return f"_internal-s2-personal/{user_id}/{notebook_path}" 724 | """ elif location.lower() == "shared": 725 | project_id = __get_project_id() 726 | 727 | # Format for shared space: {projectID}/{path} 728 | return f"{project_id}/{notebook_path}" """ 729 | 730 | # If we couldn't get the IDs or format correctly, return the raw path 731 | return notebook_path 732 | 733 | 734 | def workspace_groups_info() -> List[Dict[str, Any]]: 735 | """ 736 | List all workspace groups accessible to the user in SingleStore. 737 | 738 | Returns detailed information for each group: 739 | - name: Display name of the workspace group 740 | - deploymentType: Type of deployment (e.g., 'PRODUCTION') 741 | - state: Current status (e.g., 'ACTIVE', 'PAUSED') 742 | - workspaceGroupID: Unique identifier for the group 743 | - firewallRanges: Array of allowed IP ranges for access control 744 | - createdAt: Timestamp of group creation 745 | - regionID: Identifier for deployment region 746 | - updateWindow: Maintenance window configuration 747 | 748 | Use this tool to: 749 | 1. Get workspace group IDs for other operations 750 | 2. Plan maintenance windows 751 | 752 | Related operations: 753 | - Use workspaces_info to list workspaces within a group 754 | - Use execute_sql to run queries on workspaces in a group 755 | """ 756 | return [ 757 | { 758 | "name": group["name"], 759 | "deploymentType": group["deploymentType"], 760 | "state": group["state"], 761 | "workspaceGroupID": group["workspaceGroupID"], 762 | "firewallRanges": group.get("firewallRanges", []), 763 | "createdAt": group["createdAt"], 764 | "regionID": group["regionID"], 765 | "updateWindow": group["updateWindow"], 766 | } 767 | for group in build_request("GET", "workspaceGroups") 768 | ] 769 | 770 | 771 | def workspaces_info(workspace_group_id: str) -> List[Dict[str, Any]]: 772 | """ 773 | List all workspaces within a specified workspace group in SingleStore. 774 | 775 | Returns detailed information for each workspace: 776 | - createdAt: Timestamp of workspace creation 777 | - deploymentType: Type of deployment (e.g., 'PRODUCTION') 778 | - endpoint: Connection URL for database access 779 | - name: Display name of the workspace 780 | - size: Compute and storage configuration 781 | - state: Current status (e.g., 'ACTIVE', 'PAUSED') 782 | - terminatedAt: End timestamp if applicable 783 | - workspaceGroupID: Workspacegroup identifier 784 | - workspaceID: Unique workspace identifier 785 | 786 | Args: 787 | workspace_group_id: Unique identifier of the workspace group 788 | 789 | Returns: 790 | List of workspace information dictionaries 791 | """ 792 | return [ 793 | { 794 | "createdAt": workspace["createdAt"], 795 | "deploymentType": workspace.get("deploymentType", ""), 796 | "endpoint": workspace.get("endpoint", ""), 797 | "name": workspace["name"], 798 | "size": workspace["size"], 799 | "state": workspace["state"], 800 | "terminatedAt": workspace.get("terminatedAt", False), 801 | "workspaceGroupID": workspace["workspaceGroupID"], 802 | "workspaceID": workspace["workspaceID"], 803 | } 804 | for workspace in build_request( 805 | "GET", 806 | "workspaces", 807 | {"workspaceGroupID": workspace_group_id}, 808 | ) 809 | ] 810 | 811 | 812 | def organization_info() -> Dict[str, Any]: 813 | """ 814 | Retrieve information about the current user's organization in SingleStore. 815 | 816 | Returns organization details including: 817 | - orgID: Unique identifier for the organization 818 | - name: Organization display name 819 | """ 820 | return build_request("GET", "organizations/current") 821 | 822 | 823 | def list_of_regions() -> List[Dict[str, Any]]: 824 | """ 825 | List all available deployment regions where SingleStore workspaces can be deployed for the user. 826 | 827 | Returns region information including: 828 | - regionID: Unique identifier for the region 829 | - provider: Cloud provider (AWS, GCP, or Azure) 830 | - name: Human-readable region name (e.g., Europe West 2 (London), US West 2 (Oregon)) 831 | 832 | Use this tool to: 833 | 1. Select optimal deployment regions based on: 834 | - Geographic proximity to users 835 | - Compliance requirements 836 | - Cost considerations 837 | - Available cloud providers 838 | 2. Plan multi-region deployments 839 | """ 840 | return build_request("GET", "regions") 841 | 842 | 843 | def list_virtual_workspaces() -> List[Dict[str, Any]]: 844 | """ 845 | List all starter (virtual) workspaces available to the user in SingleStore. 846 | 847 | Returns detailed information about each starter workspace: 848 | - virtualWorkspaceID: Unique identifier for the workspace 849 | - name: Display name of the workspace 850 | - endpoint: Connection endpoint URL 851 | - databaseName: Name of the primary database 852 | - mysqlDmlPort: Port for MySQL protocol connections 853 | - webSocketPort: Port for WebSocket connections 854 | - state: Current status of the workspace 855 | 856 | Use this tool to: 857 | 1. Get virtual workspace IDs for other operations 858 | 2. Check starter workspace availability and status 859 | 3. Obtain connection details for database access 860 | """ 861 | return build_request("GET", "sharedtier/virtualWorkspaces") 862 | 863 | 864 | def organization_billing_usage( 865 | start_time: str, end_time: str, aggregate_type: str 866 | ) -> Dict[str, Any]: 867 | """ 868 | Retrieve detailed billing and usage metrics for your organization over a specified time period. 869 | 870 | Returns compute and storage usage data, aggregated by your chosen time interval 871 | (hourly, daily, or monthly). This tool is essential for: 872 | 1. Monitoring resource consumption patterns 873 | 2. Analyzing cost trends 874 | 875 | Args: 876 | start_time: Beginning of the usage period (UTC ISO 8601 format, e.g., '2023-07-30T18:30:00Z') 877 | end_time: End of the usage period (UTC ISO 8601 format) 878 | aggregate_type: Time interval for data grouping ('hour', 'day', or 'month') 879 | 880 | Returns: 881 | Usage metrics and billing information 882 | """ 883 | return build_request( 884 | "GET", 885 | "billing/usage", 886 | { 887 | "startTime": start_time, 888 | "endTime": end_time, 889 | "aggregateBy": aggregate_type, 890 | }, 891 | ) 892 | 893 | 894 | def list_notebook_samples() -> List[Dict[str, Any]]: 895 | """ 896 | Retrieve a catalog of pre-built notebook templates available in SingleStore Spaces. 897 | 898 | Returns for each notebook: 899 | - name: Template name and title 900 | - description: Detailed explanation of the notebook's purpose 901 | - contentURL: Direct download link for the notebook 902 | - likes: Number of user endorsements 903 | - views: Number of times viewed 904 | - downloads: Number of times downloaded 905 | - tags: List of Notebook tags 906 | 907 | Common template categories include: 908 | 1. Getting Started guides 909 | 2. Data loading and ETL patterns 910 | 3. Query optimization examples 911 | 4. Machine learning integrations 912 | 5. Performance monitoring 913 | 6. Best practices demonstrations 914 | """ 915 | return build_request("GET", "spaces/notebooks") 916 | 917 | 918 | def list_shared_files() -> Dict[str, Any]: 919 | """ 920 | List all files and notebooks in your shared SingleStore space. 921 | 922 | Returns file object meta data for each file: 923 | - name: Name of the file (e.g., 'analysis.ipynb') 924 | - path: Full path in shared space (e.g., 'folder/analysis.ipynb') 925 | - content: File content 926 | - created: Creation timestamp (ISO 8601) 927 | - last_modified: Last modification timestamp (ISO 8601) 928 | - format: File format if applicable ('json', null) 929 | - mimetype: MIME type of the file 930 | - size: File size in bytes 931 | - type: Object type ('', 'json', 'directory') 932 | - writable: Boolean indicating write permission 933 | 934 | Use this tool to: 935 | 1. List workspace contents and structure 936 | 2. Verify file existence before operations 937 | 3. Check file timestamps and sizes 938 | 4. Determine file permissions 939 | """ 940 | return build_request("GET", "files/fs/shared") 941 | 942 | 943 | def get_job_details(job_id: str) -> Dict[str, Any]: 944 | """ 945 | Retrieve comprehensive information about a scheduled notebook job. 946 | 947 | Returns: 948 | - jobID: Unique identifier (UUID format) 949 | - name: Display name of the job 950 | - description: Human-readable job description 951 | - createdAt: Creation timestamp (ISO 8601) 952 | - terminatedAt: End timestamp if completed 953 | - completedExecutionsCount: Number of successful runs 954 | - enqueuedBy: User ID who created the job 955 | - executionConfig: Notebook path and runtime settings 956 | - schedule: Mode, interval, and start time 957 | - targetConfig: Database and workspace settings 958 | - jobMetadata: Execution statistics and status 959 | 960 | Args: 961 | job_id: UUID of the scheduled job to retrieve details for 962 | 963 | Returns: 964 | Dictionary with job details 965 | """ 966 | return build_request("GET", f"jobs/{job_id}") 967 | 968 | 969 | def list_job_executions(job_id: str, start: int = 1, end: int = 10) -> Dict[str, Any]: 970 | """ 971 | Retrieve execution history and performance metrics for a scheduled notebook job. 972 | 973 | Returns: 974 | - executions: Array of execution records containing: 975 | - executionID: Unique identifier for the execution 976 | - executionNumber: Sequential number of the run 977 | - jobID: Parent job identifier 978 | - status: Current state (Scheduled, Running, Completed, Failed) 979 | - startedAt: Execution start time (ISO 8601) 980 | - finishedAt: Execution end time (ISO 8601) 981 | - scheduledStartTime: Planned start time 982 | - snapshotNotebookPath: Backup notebook path if enabled 983 | 984 | Args: 985 | job_id: UUID of the scheduled job 986 | start: First execution number to retrieve (default: 1) 987 | end: Last execution number to retrieve (default: 10) 988 | 989 | Returns: 990 | Dictionary with execution records 991 | """ 992 | return build_request( 993 | "GET", 994 | f"jobs/{job_id}/executions", 995 | params={"start": start, "end": end}, 996 | ) 997 | 998 | 999 | def get_notebook_path(notebook_name: str, location: str = "personal") -> str: 1000 | """ 1001 | Find the complete path of a notebook by its name and generate the properly formatted path for API operations. 1002 | 1003 | Args: 1004 | notebook_name: The name of the notebook to find (with or without .ipynb extension) 1005 | location: Where to look for the notebook - 'personal' or 'shared' 1006 | 1007 | Returns: 1008 | Properly formatted path including project ID and user ID where needed 1009 | 1010 | Required for: 1011 | - Creating scheduled jobs (use returned path as notebook_path parameter) 1012 | """ 1013 | return __get_notebook_path_by_name(notebook_name, location) 1014 | 1015 | 1016 | async def get_organizations(ctx: Context) -> str: 1017 | """ 1018 | List all available SingleStore organizations your account has access to. 1019 | 1020 | After logging in, this tool must be called first to identify which organization 1021 | your queries should run against. Returns a list of organizations with: 1022 | 1023 | - orgID: Unique identifier for the organization 1024 | - name: Display name of the organization 1025 | 1026 | Use this tool when: 1027 | 1. Starting a new session to see available organizations 1028 | 2. To verify permissions across multiple organizations 1029 | 3. Before switching context to a different organization 1030 | 1031 | After viewing the list, the tool will prompt you to select an organization: 1032 | - If only one organization is available, it will be selected automatically 1033 | - If multiple organizations are available, you'll be prompted to select one by name or ID 1034 | - You can change your organization selection anytime using the `set_organization` tool 1035 | - If no organizations are available, an error will be returned 1036 | """ 1037 | settings = get_settings() 1038 | 1039 | logger.debug("get_organizations called") 1040 | logger.debug(f"Auth method: {settings.auth_method}") 1041 | logger.debug(f"Is remote: {settings.is_remote}") 1042 | 1043 | # Only show organization selection for OAuth token authentication 1044 | if settings.auth_method != "oauth_token": 1045 | logger.debug("Skipping org selection - not OAuth token auth") 1046 | return "✅ Organization selection is only required for OAuth token authentication. Your current authentication method doesn't require this step." 1047 | 1048 | try: 1049 | logger.debug("Calling query_graphql_organizations...") 1050 | # Get the list of organizations via GraphQL 1051 | organizations = query_graphql_organizations() 1052 | logger.debug(f"Retrieved {len(organizations)} organizations") 1053 | 1054 | if not organizations: 1055 | logger.warning("No organizations available") 1056 | return "❌ No organizations available for your account. Please check your access permissions." 1057 | 1058 | # Always return the list for user to choose from 1059 | org_list = "\n".join( 1060 | [f"- {org['name']} (ID: {org['orgID']})" for org in organizations] 1061 | ) 1062 | 1063 | logger.info(f"Found {len(organizations)} available organizations") 1064 | 1065 | return f"""📋 **Available SingleStore Organizations:** 1066 | 1067 | {org_list} 1068 | 1069 | ✅ To select an organization, please use the `set_organization` tool with either the organization name or ID. 1070 | 1071 | **Example:** 1072 | - `set_organization("your-org-name")` 1073 | - `set_organization("org-id-12345")` 1074 | 1075 | Once you select an organization, all subsequent API calls will use that organization.""" 1076 | 1077 | except Exception as e: 1078 | logger.error(f"Error retrieving organizations: {str(e)}") 1079 | return f"Error retrieving organizations: {e.with_traceback(None)}\n{str(e)}" 1080 | 1081 | 1082 | async def set_organization(orgID: str, ctx: Context) -> str: 1083 | """ 1084 | Select which SingleStore organization to use for all subsequent API calls. 1085 | 1086 | This tool must be called after logging in and before making other API requests. 1087 | Once set, all API calls will target the selected organization until changed. 1088 | 1089 | Args: 1090 | orgID: Name or ID of the organization to select (use get_organizations to see available options) 1091 | 1092 | Returns: 1093 | Success message with the selected organization details 1094 | 1095 | Usage: 1096 | - Call get_organizations first to see available options 1097 | - Then call this tool with either the organization's name or ID 1098 | - All subsequent API calls will use the selected organization 1099 | - You can call this tool anytime to switch to a different organization 1100 | """ 1101 | settings = get_settings() 1102 | 1103 | logger.debug(f"set_organization called with orgID: {orgID}") 1104 | logger.debug(f"Auth method: {settings.auth_method}") 1105 | logger.debug(f"Is remote: {settings.is_remote}") 1106 | 1107 | try: 1108 | # For OAuth token authentication, get the list of available organizations 1109 | # and validate that the provided orgID is one the user has access to 1110 | if settings.auth_method == "oauth_token": 1111 | logger.debug("Getting available organizations for validation...") 1112 | available_orgs = query_graphql_organizations() 1113 | 1114 | # Find the organization by ID or name 1115 | selected_org = None 1116 | for org in available_orgs: 1117 | if orgID == org["orgID"] or orgID.lower() == org["name"].lower(): 1118 | selected_org = org 1119 | break 1120 | 1121 | if not selected_org: 1122 | available_names = [ 1123 | f"{org['name']} (ID: {org['orgID']})" for org in available_orgs 1124 | ] 1125 | return ( 1126 | f"Organization '{orgID}' not found. Available organizations:\n" 1127 | + "\n".join(available_names) 1128 | ) 1129 | 1130 | # Update the settings with the organization ID 1131 | logger.debug(f"Setting org_id to: {selected_org['orgID']}") 1132 | if hasattr(settings, "org_id"): 1133 | settings.org_id = selected_org["orgID"] 1134 | else: 1135 | # For LocalSettings, we need to add the org_id attribute 1136 | setattr(settings, "org_id", selected_org["orgID"]) 1137 | 1138 | logger.info(f"Settings updated with org_id: {selected_org['orgID']}") 1139 | return f"Successfully selected organization: {selected_org['name']} (ID: {selected_org['orgID']})" 1140 | 1141 | else: 1142 | # For other authentication methods, try to get current organization 1143 | logger.debug("Getting current organization...") 1144 | current_org = get_current_organization() 1145 | 1146 | if not current_org: 1147 | logger.error("Unable to get current organization") 1148 | return "Unable to get current organization information." 1149 | 1150 | current_org_id = current_org.get("orgID") or current_org.get("id") 1151 | current_org_name = current_org.get("name") 1152 | 1153 | logger.debug(f"Current org: {current_org_name} (ID: {current_org_id})") 1154 | 1155 | # Check if the provided orgID matches the current organization 1156 | if orgID != current_org_id and orgID.lower() != current_org_name.lower(): 1157 | logger.warning( 1158 | f"Org mismatch: provided '{orgID}' vs current '{current_org_id}'" 1159 | ) 1160 | return f"Organization '{orgID}' does not match your current organization '{current_org_name}' (ID: {current_org_id})" 1161 | 1162 | logger.debug("Organization match confirmed, updating settings...") 1163 | # Update the settings with the organization ID 1164 | if hasattr(settings, "org_id"): 1165 | settings.org_id = current_org_id 1166 | else: 1167 | # For LocalSettings, we need to add the org_id attribute 1168 | setattr(settings, "org_id", current_org_id) 1169 | 1170 | logger.info(f"Settings updated with org_id: {current_org_id}") 1171 | return f"Successfully selected organization: {current_org_name} (ID: {current_org_id})" 1172 | 1173 | except Exception as e: 1174 | logger.error(f"Error in set_organization: {str(e)}") 1175 | return f"Error setting organization: {str(e)}" 1176 | 1177 | 1178 | def get_user_id(ctx: Context) -> str: 1179 | """ 1180 | Retrieve the current user's unique identifier. 1181 | 1182 | Returns: 1183 | str: UUID format identifier for the current user 1184 | 1185 | Required for: 1186 | - Constructing paths or references to personal resources 1187 | 1188 | Performance Tip: 1189 | Cache the returned ID when making multiple API calls. 1190 | """ 1191 | return __get_user_id() 1192 | 1193 | 1194 | tools_definition = [ 1195 | {"func": get_user_id, "deprecated": True}, 1196 | {"func": workspace_groups_info}, 1197 | {"func": workspaces_info}, 1198 | {"func": organization_info}, 1199 | {"func": list_of_regions}, 1200 | {"func": list_virtual_workspaces}, 1201 | {"func": organization_billing_usage}, 1202 | {"func": list_notebook_samples}, 1203 | {"func": list_shared_files}, 1204 | {"func": create_notebook}, 1205 | {"func": check_if_file_exists}, 1206 | {"func": create_scheduled_job}, 1207 | {"func": get_job_details}, 1208 | {"func": list_job_executions}, 1209 | {"func": get_notebook_path}, 1210 | {"func": get_organizations}, 1211 | {"func": set_organization}, 1212 | ] 1213 | 1214 | # Export the tools 1215 | tools = [Tool(**tool) for tool in tools_definition] 1216 | -------------------------------------------------------------------------------- /src/api/tools/types.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | from typing import Callable 3 | 4 | from src.api.types import MCPConcept 5 | 6 | 7 | @dataclass() 8 | class Tool(MCPConcept): 9 | func: Callable = None 10 | -------------------------------------------------------------------------------- /src/api/types.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | 3 | 4 | @dataclass 5 | class MCPConcept: 6 | """ 7 | Represents an MCP concept (Tool, Resource, ...etc). 8 | 9 | Attributes: 10 | deprecated: Whether the concept is deprecated. 11 | """ 12 | 13 | deprecated: bool = False 14 | -------------------------------------------------------------------------------- /src/auth/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/mcp-server-singlestore/90bc8dec1065fb863d0e0ba7575bc7fc9e60244d/src/auth/__init__.py -------------------------------------------------------------------------------- /src/auth/browser_auth.py: -------------------------------------------------------------------------------- 1 | """Browser-based OAuth authentication for local MCP server.""" 2 | 3 | import os 4 | import json 5 | import time 6 | import webbrowser 7 | import secrets 8 | import base64 9 | import hashlib 10 | import http.server 11 | import socketserver 12 | import urllib.parse 13 | import requests 14 | from pathlib import Path 15 | from typing import Optional, Dict, Any, Tuple 16 | from datetime import datetime 17 | 18 | 19 | # Scopes that are always required 20 | ALWAYS_PRESENT_SCOPES = [ 21 | "openid", 22 | "profile", 23 | "email", 24 | "phone", 25 | "address", 26 | "offline_access", 27 | ] 28 | 29 | # Credential file path 30 | CREDENTIALS_FILE = Path.home() / ".singlestore-mcp-credentials.json" 31 | 32 | # Default OAuth configuration 33 | DEFAULT_OAUTH_HOST = "https://authsvc.singlestore.com" 34 | DEFAULT_CLIENT_ID = "b7dbf19e-d140-4334-bae4-e8cd03614485" 35 | DEFAULT_AUTH_TIMEOUT = 300 # 5 minutes 36 | 37 | 38 | class TokenSet: 39 | """Class representing an OAuth token set""" 40 | 41 | def __init__(self, data: Dict[str, Any]): 42 | self.access_token = data.get("access_token") 43 | self.token_type = data.get("token_type") 44 | self.id_token = data.get("id_token") 45 | self.refresh_token = data.get("refresh_token") 46 | self.expires_at = data.get("expires_at") 47 | self.raw_data = data 48 | 49 | def is_expired(self) -> bool: 50 | """Check if the access token is expired""" 51 | if not self.expires_at: 52 | return True 53 | return datetime.now().timestamp() >= self.expires_at 54 | 55 | def to_dict(self) -> Dict[str, Any]: 56 | """Convert token set to dictionary for serialization""" 57 | return self.raw_data 58 | 59 | 60 | class AuthCallbackHandler(http.server.SimpleHTTPRequestHandler): 61 | """HTTP request handler to capture OAuth callback""" 62 | 63 | def __init__(self, *args, **kwargs): 64 | self.callback_params = None 65 | super().__init__(*args, **kwargs) 66 | 67 | def log_message(self, format, *args): 68 | """Silence the default logging""" 69 | pass 70 | 71 | def do_OPTIONS(self): 72 | """Handle OPTIONS requests (CORS preflight)""" 73 | self.send_response(200) 74 | self.send_header("Access-Control-Allow-Origin", "*") 75 | self.send_header("Access-Control-Allow-Methods", "GET, POST") 76 | self.send_header("Access-Control-Allow-Headers", "Content-Type") 77 | self.end_headers() 78 | 79 | def do_GET(self): 80 | """Handle GET requests""" 81 | if not self.path.startswith("/callback"): 82 | self.send_response(404) 83 | self.end_headers() 84 | return 85 | 86 | parsed_path = urllib.parse.urlparse(self.path) 87 | self.callback_params = urllib.parse.parse_qs(parsed_path.query) 88 | 89 | # Convert multi-value dict to single value dict for auth process 90 | self.server.callback_params = {k: v[0] for k, v in self.callback_params.items()} 91 | 92 | # Send a simple response 93 | self.send_response(200) 94 | self.send_header("Content-type", "text/html") 95 | self.end_headers() 96 | 97 | # Success response HTML 98 | response = """ 99 | 100 | 101 | Authentication Successful 102 | 108 | 109 | 110 |

✓ Authentication Successful

111 |

You have successfully authenticated with SingleStore.

112 |

You can close this window now and return to your terminal.

113 | 118 | 119 | 120 | """ 121 | 122 | self.wfile.write(response.encode()) 123 | 124 | # Signal that we've received the callback 125 | self.server.received_callback = True 126 | 127 | 128 | def generate_code_verifier() -> str: 129 | """Generate a code verifier for PKCE""" 130 | code_verifier = secrets.token_urlsafe(64) 131 | # Trim to appropriate length (43-128 chars) 132 | if len(code_verifier) > 128: 133 | code_verifier = code_verifier[:128] 134 | return code_verifier 135 | 136 | 137 | def generate_code_challenge(code_verifier: str) -> str: 138 | """Generate a code challenge from the code verifier""" 139 | code_challenge = hashlib.sha256(code_verifier.encode()).digest() 140 | code_challenge = base64.urlsafe_b64encode(code_challenge).decode().rstrip("=") 141 | return code_challenge 142 | 143 | 144 | def generate_state() -> str: 145 | """Generate a state parameter for OAuth flow""" 146 | return secrets.token_urlsafe(32) 147 | 148 | 149 | def discover_oauth_server(oauth_host: str) -> Dict[str, Any]: 150 | """Discover OAuth server endpoints""" 151 | discovery_url = ( 152 | f"{oauth_host}/auth/oidc/op/Customer/.well-known/openid-configuration" 153 | ) 154 | try: 155 | response = requests.get(discovery_url, timeout=10) 156 | response.raise_for_status() 157 | return response.json() 158 | except requests.RequestException as e: 159 | raise Exception(f"Failed to discover OAuth endpoints: {e}") 160 | 161 | 162 | def save_credentials(token_set: TokenSet) -> None: 163 | """ 164 | Save authentication token to credentials file. 165 | 166 | Args: 167 | token_set: OAuth token set 168 | """ 169 | # Create credential data structure 170 | creds = { 171 | "token_set": token_set.to_dict(), 172 | "timestamp": time.time(), 173 | } 174 | 175 | # Ensure directory exists 176 | CREDENTIALS_FILE.parent.mkdir(parents=True, exist_ok=True) 177 | 178 | # Write credentials to file with secure permissions 179 | with open(CREDENTIALS_FILE, "w") as f: 180 | json.dump(creds, f, indent=2) 181 | 182 | # Set secure permissions (readable only by user) 183 | os.chmod(CREDENTIALS_FILE, 0o600) 184 | print(f"Credentials saved to {CREDENTIALS_FILE}") 185 | 186 | 187 | def load_credentials() -> Optional[Dict[str, Any]]: 188 | """ 189 | Load authentication credentials from file. 190 | 191 | Returns: 192 | Dict containing credentials or None if not available 193 | """ 194 | if not CREDENTIALS_FILE.exists(): 195 | return None 196 | 197 | try: 198 | with open(CREDENTIALS_FILE, "r") as f: 199 | return json.load(f) 200 | except (json.JSONDecodeError, IOError) as e: 201 | print(f"Failed to load credentials: {e}") 202 | return None 203 | 204 | 205 | def refresh_token( 206 | token_set: TokenSet, 207 | client_id: str = DEFAULT_CLIENT_ID, 208 | oauth_host: str = DEFAULT_OAUTH_HOST, 209 | ) -> Optional[TokenSet]: 210 | """ 211 | Refresh an OAuth token using the refresh token. 212 | 213 | Args: 214 | token_set: The token set containing the refresh token 215 | client_id: OAuth client ID 216 | oauth_host: OAuth server host 217 | 218 | Returns: 219 | A new token set or None if refresh failed 220 | """ 221 | if not token_set.refresh_token: 222 | print("No refresh token available") 223 | return None 224 | 225 | try: 226 | # Discover OAuth server endpoints 227 | oauth_config = discover_oauth_server(oauth_host) 228 | token_endpoint = oauth_config.get("token_endpoint") 229 | 230 | if not token_endpoint: 231 | print("Invalid OAuth server configuration") 232 | return None 233 | 234 | # Prepare refresh token request 235 | data = { 236 | "grant_type": "refresh_token", 237 | "refresh_token": token_set.refresh_token, 238 | "client_id": client_id, 239 | } 240 | 241 | # Send refresh token request 242 | response = requests.post( 243 | token_endpoint, 244 | data=data, 245 | headers={"Content-Type": "application/x-www-form-urlencoded"}, 246 | timeout=10, 247 | ) 248 | response.raise_for_status() 249 | 250 | # Parse token response 251 | token_data = response.json() 252 | 253 | # Add expires_at if we got expires_in 254 | if "expires_in" in token_data and "expires_at" not in token_data: 255 | token_data["expires_at"] = ( 256 | datetime.now().timestamp() + token_data["expires_in"] 257 | ) 258 | 259 | # Create new token set 260 | new_token_set = TokenSet(token_data) 261 | save_credentials(new_token_set) 262 | 263 | print("Token refreshed successfully") 264 | return new_token_set 265 | 266 | except Exception as e: 267 | print(f"Token refresh failed: {e}") 268 | return None 269 | 270 | 271 | def authenticate( 272 | client_id: str = DEFAULT_CLIENT_ID, 273 | oauth_host: str = DEFAULT_OAUTH_HOST, 274 | auth_timeout: int = DEFAULT_AUTH_TIMEOUT, 275 | ) -> Tuple[bool, Optional[TokenSet]]: 276 | """ 277 | Launch browser authentication flow and capture OAuth token. 278 | 279 | Args: 280 | client_id: OAuth client ID to use for authentication 281 | oauth_host: OAuth server host 282 | auth_timeout: Timeout in seconds for authentication 283 | 284 | Returns: 285 | Tuple of (success: bool, token_set: Optional[TokenSet]) 286 | """ 287 | try: 288 | # Discover OAuth server endpoints 289 | print("Discovering OAuth server endpoints...") 290 | oauth_config = discover_oauth_server(oauth_host) 291 | authorization_endpoint = oauth_config.get("authorization_endpoint") 292 | token_endpoint = oauth_config.get("token_endpoint") 293 | 294 | if not authorization_endpoint or not token_endpoint: 295 | print("Invalid OAuth server configuration") 296 | return False, None 297 | 298 | # Generate PKCE code verifier and challenge 299 | code_verifier = generate_code_verifier() 300 | code_challenge = generate_code_challenge(code_verifier) 301 | 302 | # Generate state for security 303 | state = generate_state() 304 | 305 | # Find an available port for the redirect server 306 | with socketserver.TCPServer(("127.0.0.1", 0), None) as s: 307 | port = s.server_address[1] 308 | 309 | # Redirect URI 310 | redirect_uri = f"http://127.0.0.1:{port}/callback" 311 | 312 | # Create server class with additional attributes 313 | class CallbackServer(socketserver.TCPServer): 314 | def __init__(self, *args, **kwargs): 315 | super().__init__(*args, **kwargs) 316 | self.received_callback = False 317 | self.callback_params = None 318 | 319 | # Create a custom handler factory 320 | def handler(*args, **kwargs): 321 | return AuthCallbackHandler(*args, **kwargs) 322 | 323 | # Start a temporary web server to capture the callback 324 | with CallbackServer(("127.0.0.1", port), handler) as httpd: 325 | print(f"Starting temporary authentication server on port {port}...") 326 | 327 | # Prepare authorization URL 328 | scopes = " ".join(ALWAYS_PRESENT_SCOPES) 329 | auth_params = { 330 | "client_id": client_id, 331 | "redirect_uri": redirect_uri, 332 | "response_type": "code", 333 | "scope": scopes, 334 | "state": state, 335 | "code_challenge": code_challenge, 336 | "code_challenge_method": "S256", 337 | } 338 | 339 | auth_url = f"{authorization_endpoint}?{urllib.parse.urlencode(auth_params)}" 340 | 341 | print(f"\n{'=' * 60}") 342 | print("🚀 Starting browser authentication...") 343 | print(f"Client ID: {client_id}") 344 | print(f"Redirect URI: {redirect_uri}") 345 | print(f"{'=' * 60}") 346 | 347 | # Open browser to auth URL 348 | print("Opening browser for SingleStore authentication...") 349 | webbrowser.open(auth_url) 350 | 351 | print("If the browser doesn't open automatically, please visit:") 352 | print(f"{auth_url}") 353 | 354 | # Set timeout for each request 355 | httpd.timeout = 1 356 | 357 | # Serve until callback is received or timeout 358 | start_time = time.time() 359 | print(f"\nWaiting for authentication (timeout: {auth_timeout}s)...") 360 | 361 | while not httpd.received_callback: 362 | httpd.handle_request() 363 | elapsed = time.time() - start_time 364 | 365 | # Print progress every 30 seconds 366 | if int(elapsed) % 30 == 0 and elapsed > 0: 367 | remaining = auth_timeout - elapsed 368 | if remaining > 0: 369 | print(f"Still waiting... ({remaining:.0f}s remaining)") 370 | 371 | if elapsed > auth_timeout: 372 | print("\n❌ Authentication timed out") 373 | print( 374 | "Please try again or check your browser for any blocked popups." 375 | ) 376 | return False, None 377 | 378 | # Process callback parameters 379 | if not httpd.callback_params: 380 | print("❌ No callback parameters received") 381 | return False, None 382 | 383 | # Check state parameter 384 | if httpd.callback_params.get("state") != state: 385 | print("❌ State parameter mismatch, possible CSRF attack") 386 | return False, None 387 | 388 | # Extract authorization code 389 | code = httpd.callback_params.get("code") 390 | if not code: 391 | error = httpd.callback_params.get("error") 392 | error_description = httpd.callback_params.get( 393 | "error_description", "Unknown error" 394 | ) 395 | print(f"❌ Authorization failed: {error} - {error_description}") 396 | return False, None 397 | 398 | print("✅ Authorization code received, exchanging for tokens...") 399 | 400 | # Exchange code for tokens 401 | token_data = { 402 | "grant_type": "authorization_code", 403 | "code": code, 404 | "redirect_uri": redirect_uri, 405 | "client_id": client_id, 406 | "code_verifier": code_verifier, 407 | } 408 | 409 | # Send token request 410 | response = requests.post( 411 | token_endpoint, 412 | data=token_data, 413 | headers={"Content-Type": "application/x-www-form-urlencoded"}, 414 | timeout=10, 415 | ) 416 | 417 | if response.status_code != 200: 418 | print(f"❌ Token exchange failed: {response.status_code}") 419 | print(f"Response: {response.text}") 420 | return False, None 421 | 422 | # Parse token response 423 | token_response = response.json() 424 | 425 | if "error" in token_response: 426 | print( 427 | f"❌ Token exchange error: {token_response.get('error_description', token_response['error'])}" 428 | ) 429 | return False, None 430 | 431 | # Add expires_at if we got expires_in 432 | if "expires_in" in token_response and "expires_at" not in token_response: 433 | token_response["expires_at"] = ( 434 | datetime.now().timestamp() + token_response["expires_in"] 435 | ) 436 | 437 | # Create token set 438 | token_set = TokenSet(token_response) 439 | save_credentials(token_set) 440 | 441 | print("✅ Authentication successful!") 442 | return True, token_set 443 | 444 | except Exception as e: 445 | print(f"❌ Authentication failed: {e}") 446 | return False, None 447 | 448 | 449 | def get_authentication_token( 450 | client_id: str = DEFAULT_CLIENT_ID, 451 | oauth_host: str = DEFAULT_OAUTH_HOST, 452 | auth_timeout: int = DEFAULT_AUTH_TIMEOUT, 453 | force_reauth: bool = False, 454 | ) -> Optional[str]: 455 | """ 456 | Get authentication token for local MCP server. 457 | Checks saved credentials first, then launches browser auth if needed. 458 | 459 | Args: 460 | client_id: OAuth client ID to use for authentication 461 | oauth_host: OAuth server host 462 | auth_timeout: Timeout in seconds for authentication 463 | force_reauth: Force re-authentication even if valid token exists 464 | 465 | Returns: 466 | Access token if available, None otherwise 467 | """ 468 | if not force_reauth: 469 | # Check saved credentials file 470 | credentials = load_credentials() 471 | if credentials and "token_set" in credentials: 472 | token_set = TokenSet(credentials["token_set"]) 473 | 474 | # If token is expired, try to refresh it 475 | if token_set.is_expired() and token_set.refresh_token: 476 | print("Access token expired, attempting to refresh...") 477 | refreshed_token_set = refresh_token(token_set, client_id, oauth_host) 478 | if refreshed_token_set: 479 | print("✅ Token refreshed successfully") 480 | return refreshed_token_set.access_token 481 | else: 482 | print("Token refresh failed, proceeding to re-authentication") 483 | 484 | # If we have a valid token, use it 485 | if not token_set.is_expired() and token_set.access_token: 486 | print("✅ Using saved authentication token") 487 | return token_set.access_token 488 | 489 | # If no valid credentials found, launch browser authentication 490 | print("\n🔐 No valid authentication token found") 491 | print("Starting browser-based authentication with SingleStore...") 492 | 493 | success, token_set = authenticate(client_id, oauth_host, auth_timeout) 494 | 495 | if success and token_set and token_set.access_token: 496 | print("🎉 Authentication completed successfully!") 497 | return token_set.access_token 498 | else: 499 | print("❌ Authentication failed") 500 | print("Please try again or check your network connection.") 501 | return None 502 | 503 | 504 | def clear_credentials() -> bool: 505 | """ 506 | Clear saved credentials. 507 | 508 | Returns: 509 | True if credentials were cleared, False if no credentials exist 510 | """ 511 | if CREDENTIALS_FILE.exists(): 512 | try: 513 | CREDENTIALS_FILE.unlink() 514 | print(f"✅ Credentials cleared from {CREDENTIALS_FILE}") 515 | return True 516 | except Exception as e: 517 | print(f"❌ Failed to clear credentials: {e}") 518 | return False 519 | else: 520 | print("No saved credentials found") 521 | return False 522 | -------------------------------------------------------------------------------- /src/auth/callback.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from src.auth.provider import SingleStoreOAuthProvider 3 | from starlette.exceptions import HTTPException 4 | from starlette.requests import Request 5 | from starlette.responses import JSONResponse, RedirectResponse, Response 6 | 7 | 8 | def make_auth_callback_handler(oauth_provider: SingleStoreOAuthProvider): 9 | async def auth_callback_handler(request: Request) -> Response: 10 | code = request.query_params.get("code") 11 | state = request.query_params.get("state") 12 | 13 | if not code: 14 | raise HTTPException(400, "Missing code parameter") 15 | if not state: 16 | raise HTTPException(400, "Missing state parameter") 17 | 18 | try: 19 | redirect_uri = await oauth_provider.handle_singlestore_callback(code, state) 20 | return RedirectResponse(status_code=302, url=redirect_uri) 21 | except HTTPException: 22 | raise 23 | except Exception as e: 24 | logging.error("Unexpected error", exc_info=e) 25 | return JSONResponse( 26 | status_code=500, 27 | content={ 28 | "error": "server_error", 29 | "error_description": "Unexpected error", 30 | }, 31 | ) 32 | 33 | return auth_callback_handler 34 | -------------------------------------------------------------------------------- /src/auth/provider.py: -------------------------------------------------------------------------------- 1 | import base64 2 | import hashlib 3 | import secrets 4 | import time 5 | 6 | from mcp.server.auth.provider import ( 7 | AccessToken, 8 | AuthorizationCode, 9 | AuthorizationParams, 10 | OAuthAuthorizationServerProvider, 11 | RefreshToken, 12 | construct_redirect_uri, 13 | ) 14 | from mcp.shared._httpx_utils import create_mcp_http_client 15 | from mcp.shared.auth import OAuthClientInformationFull, OAuthToken 16 | from pydantic import AnyHttpUrl 17 | from starlette.exceptions import HTTPException 18 | from urllib.parse import urlencode 19 | 20 | from src.config.config import RemoteSettings 21 | 22 | 23 | class SingleStoreOAuthProvider(OAuthAuthorizationServerProvider): 24 | """Simple SingleStore OAuth provider with essential functionality.""" 25 | 26 | def __init__(self, settings: RemoteSettings): 27 | self.settings = settings 28 | self.clients: dict[str, OAuthClientInformationFull] = { 29 | # Predefined client for SingleStore MCP server 30 | # Claude Desktop Client 31 | "e651c153-8cfb-43cf-aece-bf82cbe1b34d": OAuthClientInformationFull( 32 | client_id="b7dbf19e-d140-4334-bae4-e8cd03614485", 33 | client_name="Simple SingleStore MCP Server", 34 | redirect_uris=[AnyHttpUrl("http://localhost:18089/oauth/callback")], 35 | response_types=["code"], 36 | grant_types=["authorization_code", "refresh_token"], 37 | ) 38 | } 39 | self.auth_codes: dict[str, AuthorizationCode] = {} 40 | self.tokens: dict[str, AccessToken] = {} 41 | self.state_mapping: dict[str, dict[str, str]] = {} 42 | # Store SingleStore tokens with MCP tokens using the format: 43 | # {"mcp_token": "singlestore_token"} 44 | self.token_mapping: dict[str, str] = {} 45 | 46 | async def get_client(self, client_id: str) -> OAuthClientInformationFull | None: 47 | """Get OAuth client information.""" 48 | return self.clients.get(client_id) 49 | 50 | async def register_client(self, client_info: OAuthClientInformationFull): 51 | """Register a new OAuth client.""" 52 | self.clients[client_info.client_id] = client_info 53 | 54 | def _generate_code_verifier(self) -> str: 55 | """Generate a code verifier for PKCE""" 56 | code_verifier = secrets.token_urlsafe(64) 57 | # Trim to appropriate length (43-128 chars) 58 | if len(code_verifier) > 128: 59 | code_verifier = code_verifier[:128] 60 | 61 | self.singlestore_code_verifier = code_verifier 62 | return code_verifier 63 | 64 | def _generate_code_challenge(self, code_verifier: str) -> str: 65 | """Generate a code challenge from the code verifier""" 66 | code_challenge = hashlib.sha256(code_verifier.encode()).digest() 67 | code_challenge = base64.urlsafe_b64encode(code_challenge).decode().rstrip("=") 68 | return code_challenge 69 | 70 | async def authorize( 71 | self, client: OAuthClientInformationFull, params: AuthorizationParams 72 | ) -> str: 73 | """Generate an authorization URL for SingleStore OAuth flow.""" 74 | state = params.state or secrets.token_hex(16) 75 | 76 | # Store the state mapping 77 | self.state_mapping[state] = { 78 | "code": state, # Temporarily use state as code 79 | "state": state, 80 | "redirect_uri": str(params.redirect_uri), 81 | "code_challenge": params.code_challenge, 82 | "redirect_uri_provided_explicitly": str( 83 | params.redirect_uri_provided_explicitly 84 | ), 85 | "client_id": client.client_id, 86 | } 87 | 88 | # Generate PKCE code verifier and challenge for our own use with 89 | # SingleStore OAuth 90 | code_verifier = self._generate_code_verifier() 91 | code_challenge = self._generate_code_challenge(code_verifier) 92 | 93 | auth_params = { 94 | "client_id": self.settings.client_id, 95 | "redirect_uri": self.settings.callback_path, # Our server's callback endpoint 96 | "response_type": "code", 97 | "scope": self.settings.required_scopes[0], # Use the first scope 98 | "state": state, 99 | "code_challenge": code_challenge, 100 | "code_challenge_method": "S256", 101 | } 102 | 103 | # Create the authorization URL for SingleStore OAuth 104 | auth_url = f"{self.settings.singlestore_auth_url}?{urlencode(auth_params)}" 105 | 106 | return auth_url 107 | 108 | async def handle_singlestore_callback(self, code: str, state: str) -> str: 109 | """Handle SingleStore OAuth callback.""" 110 | state_data = self.state_mapping.get(state) 111 | if not state_data: 112 | raise HTTPException(400, "Invalid state parameter") 113 | 114 | redirect_uri = state_data["redirect_uri"] 115 | code_challenge = state_data["code_challenge"] 116 | redirect_uri_provided_explicitly = ( 117 | state_data["redirect_uri_provided_explicitly"] == "True" 118 | ) 119 | client_id = state_data["client_id"] 120 | 121 | # Create MCP authorization code 122 | new_code = code 123 | auth_code = AuthorizationCode( 124 | code=new_code, 125 | client_id=client_id, 126 | redirect_uri=AnyHttpUrl(redirect_uri), 127 | redirect_uri_provided_explicitly=redirect_uri_provided_explicitly, 128 | expires_at=time.time() + 300, 129 | scopes=[self.settings.required_scopes[0]], # Use the first scope 130 | code_challenge=code_challenge, 131 | ) 132 | self.auth_codes[new_code] = auth_code 133 | 134 | del self.state_mapping[state] 135 | return construct_redirect_uri(redirect_uri, code=new_code, state=state) 136 | 137 | async def load_authorization_code( 138 | self, client: OAuthClientInformationFull, authorization_code: str 139 | ) -> AuthorizationCode | None: 140 | """Load an authorization code.""" 141 | return self.auth_codes.get(authorization_code) 142 | 143 | async def exchange_authorization_code( 144 | self, client: OAuthClientInformationFull, authorization_code: AuthorizationCode 145 | ) -> OAuthToken: 146 | """Exchange authorization code for tokens.""" 147 | if authorization_code.code not in self.auth_codes: 148 | raise ValueError("Invalid authorization code") 149 | 150 | data = None 151 | # Get the S2 token from the S2 authentication server 152 | async with create_mcp_http_client() as http_client: 153 | response = await http_client.post( 154 | self.settings.singlestore_token_url, 155 | params={ 156 | "grant_type": "authorization_code", 157 | "code_verifier": self.singlestore_code_verifier, 158 | "client_id": self.settings.client_id, 159 | }, 160 | data={ 161 | "code": authorization_code.code, 162 | "redirect_uri": self.settings.callback_path, 163 | }, 164 | headers={"Content-Type": "application/x-www-form-urlencoded"}, 165 | ) 166 | 167 | if response.status_code != 200: 168 | print( 169 | f"Failed to exchange code for token: {response.status_code} - {response.text}" 170 | ) 171 | raise HTTPException(400, "Failed to exchange code for token") 172 | 173 | data = response.json() 174 | 175 | if "error" in data: 176 | raise HTTPException(400, data.get("error_description", data["error"])) 177 | 178 | mcp_token = data["access_token"] 179 | 180 | if not mcp_token: 181 | raise HTTPException(400, "No access token received from SingleStore") 182 | 183 | expires_in = data.get("expires_in", 3600) 184 | if expires_in <= 0: 185 | raise HTTPException( 186 | 400, "Invalid expiration time received from SingleStore" 187 | ) 188 | 189 | token_type: str = data.get("token_type") 190 | if token_type != "Bearer": 191 | raise HTTPException(400, "Unsupported token type received from SingleStore") 192 | 193 | # Store MCP token 194 | self.tokens[mcp_token] = AccessToken( 195 | token=mcp_token, 196 | client_id=client.client_id, 197 | scopes=authorization_code.scopes, 198 | expires_at=int(time.time()) + expires_in, 199 | ) 200 | 201 | # Find SingleStore token for this client 202 | singlestore_token = next( 203 | ( 204 | token 205 | for token, data in self.tokens.items() 206 | if data.client_id == client.client_id 207 | ), 208 | None, 209 | ) 210 | 211 | # Store mapping between MCP token and SingleStore token 212 | if singlestore_token: 213 | self.token_mapping[mcp_token] = singlestore_token 214 | 215 | print(self.token_mapping) 216 | 217 | del self.singlestore_code_verifier # Remove after use 218 | del self.auth_codes[authorization_code.code] 219 | 220 | return OAuthToken( 221 | access_token=mcp_token, 222 | token_type="bearer", 223 | expires_in=expires_in, 224 | scope=" ".join(authorization_code.scopes), 225 | ) 226 | 227 | async def load_access_token(self, token: str) -> AccessToken | None: 228 | """Load and validate an access token.""" 229 | access_token = self.tokens.get(token) 230 | print(f"Loading access token: {token} -> {access_token}") 231 | 232 | if not access_token: 233 | return None 234 | 235 | # Check if expired 236 | if access_token.expires_at and access_token.expires_at < time.time(): 237 | del self.tokens[token] 238 | return None 239 | 240 | return access_token 241 | 242 | async def load_refresh_token( 243 | self, client: OAuthClientInformationFull, refresh_token: str 244 | ) -> RefreshToken | None: 245 | """Load a refresh token - not supported.""" 246 | return None 247 | 248 | async def exchange_refresh_token( 249 | self, 250 | client: OAuthClientInformationFull, 251 | refresh_token: RefreshToken, 252 | scopes: list[str], 253 | ) -> OAuthToken: 254 | """Exchange refresh token""" 255 | raise NotImplementedError("Not supported") 256 | 257 | async def revoke_token( 258 | self, token: str, token_type_hint: str | None = None 259 | ) -> None: 260 | """Revoke a token.""" 261 | if token in self.tokens: 262 | del self.tokens[token] 263 | -------------------------------------------------------------------------------- /src/commands/__init__.py: -------------------------------------------------------------------------------- 1 | from .init import init_command 2 | from .start import start_command 3 | 4 | __all__ = ["init_command", "start_command"] 5 | -------------------------------------------------------------------------------- /src/commands/init.py: -------------------------------------------------------------------------------- 1 | import os 2 | import json 3 | import sys 4 | from pathlib import Path 5 | from typing import Optional, Literal 6 | 7 | # Supported client types 8 | ClientType = Literal["claude", "cursor"] 9 | 10 | # Client config file paths (platform-dependent) 11 | CLIENT_CONFIG_PATHS = { 12 | "claude": { 13 | "darwin": ("~/Library/Application Support/Claude/claude_desktop_config.json"), 14 | "win32": "%APPDATA%\\Claude\\claude_desktop_config.json", 15 | "linux": "~/.config/Claude/claude_desktop_config.json", 16 | }, 17 | "cursor": { 18 | "darwin": "~/.cursor/mcp.json", 19 | "win32": "~/.cursor/mcp.json", 20 | "linux": "~/.cursor/mcp.json", 21 | }, 22 | } 23 | 24 | # Client-specific config templates 25 | CLIENT_CONFIG_TEMPLATES = { 26 | "claude": { 27 | "mcpServers": { 28 | "singlestore-mcp-server": { 29 | "command": "uvx", 30 | "args": [ 31 | "singlestore-mcp-server", 32 | "start", 33 | "--api-key", 34 | "{api_key}", 35 | ], 36 | } 37 | } 38 | }, 39 | "cursor": { 40 | "mcpServers": { 41 | "singlestore-mcp-server": { 42 | "command": "uvx", 43 | "args": ["singlestore-mcp-server", "start", "--api-key", "{api_key}"], 44 | } 45 | } 46 | }, 47 | } 48 | 49 | 50 | def get_config_path(client: ClientType) -> Optional[Path]: 51 | """ 52 | Get the platform-specific config path for the client. 53 | 54 | Args: 55 | client: The LLM client name 56 | 57 | Returns: 58 | Path to the config file or None if unsupported platform 59 | """ 60 | platform = sys.platform 61 | if platform not in CLIENT_CONFIG_PATHS[client]: 62 | print(f"Unsupported platform: {platform} for client: {client}") 63 | return None 64 | 65 | # Get the raw path and expand environment variables and user directory 66 | raw_path = CLIENT_CONFIG_PATHS[client][platform] 67 | if platform == "win32": 68 | # Windows-specific environment variable expansion 69 | for env_var in os.environ: 70 | placeholder = f"%{env_var}%" 71 | if placeholder in raw_path: 72 | raw_path = raw_path.replace(placeholder, os.environ[env_var]) 73 | return Path(raw_path) 74 | else: 75 | # Unix-like systems 76 | return Path(os.path.expanduser(raw_path)) 77 | 78 | 79 | def create_config_directory(config_path: Path) -> bool: 80 | """ 81 | Create the directory for the config file if it doesn't exist. 82 | 83 | Args: 84 | config_path: Path to the config file 85 | 86 | Returns: 87 | True if successful, False otherwise 88 | """ 89 | try: 90 | config_path.parent.mkdir(parents=True, exist_ok=True) 91 | return True 92 | except Exception as e: 93 | print(f"Error creating config directory: {e}") 94 | return False 95 | 96 | 97 | def update_client_config(client: ClientType, api_key: str) -> bool: 98 | """ 99 | Update the client configuration file to use the SingleStore MCP server. 100 | 101 | Args: 102 | client: The LLM client name 103 | api_key: SingleStore API key 104 | 105 | Returns: 106 | True if successful, False otherwise 107 | """ 108 | config_path = get_config_path(client) 109 | if not config_path: 110 | return False 111 | 112 | # Create directory if it doesn't exist 113 | if not create_config_directory(config_path): 114 | return False 115 | 116 | # Prepare the config data 117 | template = CLIENT_CONFIG_TEMPLATES[client] 118 | 119 | # Fill in the API key 120 | config_str = json.dumps(template, indent=2) 121 | config_str = config_str.replace('"{api_key}"', f'"{api_key}"') 122 | config_data = json.loads(config_str) 123 | 124 | try: 125 | # Read existing config if available 126 | if config_path.exists(): 127 | with open(config_path, "r") as f: 128 | try: 129 | existing_config = json.load(f) 130 | # Merge the configs based on client type 131 | if client in ["claude", "cursor"]: 132 | if "mcpServers" not in existing_config: 133 | existing_config["mcpServers"] = {} 134 | existing_config["mcpServers"]["singlestore-mcp-server"] = ( 135 | config_data["mcpServers"]["singlestore-mcp-server"] 136 | ) 137 | 138 | config_data = existing_config 139 | except json.JSONDecodeError: 140 | # If the file exists but is invalid JSON, use our template 141 | print( 142 | f"Warning: Existing config file at {config_path} is not valid JSON. Creating a new file." 143 | ) 144 | 145 | # Write the updated config 146 | with open(config_path, "w") as f: 147 | json.dump(config_data, indent=2, fp=f) 148 | 149 | print( 150 | f"Successfully configured {client.capitalize()} to use SingleStore MCP server." 151 | ) 152 | print(f"Config updated at: {config_path}") 153 | return True 154 | 155 | except Exception as e: 156 | print(f"Error updating client config: {e}") 157 | return False 158 | 159 | 160 | def init_command( 161 | api_key: str, 162 | client: str = "claude", 163 | ) -> int: 164 | """ 165 | Initialize the SingleStore MCP server for a specific client. 166 | 167 | Args: 168 | api_key: SingleStore API key 169 | client: Name of the LLM client (claude, cursor) 170 | 171 | Returns: 172 | Exit code (0 for success, 1 for failure) 173 | """ 174 | client = client.lower() 175 | valid_clients = list(CLIENT_CONFIG_TEMPLATES.keys()) 176 | 177 | if client not in valid_clients: 178 | print(f"Error: Unsupported client '{client}'") 179 | print(f"Supported clients: {', '.join(valid_clients)}") 180 | return 1 181 | 182 | print(f"Initializing SingleStore MCP server for {client.capitalize()}...") 183 | # Update the client configuration 184 | if update_client_config(client, api_key): 185 | print("\nSetup complete! You can now use the MCP server with your LLM client.") 186 | print("Restart your LLM client to apply the changes.") 187 | return 0 188 | else: 189 | print("\nSetup failed. Please check the error messages above.") 190 | return 1 191 | -------------------------------------------------------------------------------- /src/commands/start.py: -------------------------------------------------------------------------------- 1 | from fastmcp import FastMCP 2 | from mcp.server.auth.settings import AuthSettings, ClientRegistrationOptions 3 | import logging 4 | 5 | from src.auth.callback import make_auth_callback_handler 6 | from src.api.tools import register_tools 7 | from src.auth.provider import SingleStoreOAuthProvider 8 | from src.api.resources.register import register_resources 9 | from src.auth.browser_auth import get_authentication_token 10 | import src.config.config as config 11 | 12 | # Configure logging to enable debug messages 13 | logging.basicConfig( 14 | level=logging.DEBUG, 15 | format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", 16 | handlers=[logging.StreamHandler()], 17 | ) 18 | 19 | 20 | def start_command(transport, api_key): 21 | # Handle browser authentication for stdio mode when no API key is provided 22 | if transport == config.Transport.STDIO and not api_key: 23 | print("No API key provided for stdio mode. Starting browser authentication...") 24 | oauth_token = get_authentication_token() 25 | if not oauth_token: 26 | print( 27 | "❌ Authentication failed. Cannot start MCP server without valid credentials." 28 | ) 29 | return 30 | print("✅ Authentication successful. Starting MCP server...") 31 | 32 | # Create settings with OAuth token 33 | settings = config.init_settings(transport=transport, api_key=oauth_token) 34 | if isinstance(settings, config.LocalSettings): 35 | settings.set_oauth_token(oauth_token) 36 | else: 37 | settings = config.init_settings(transport=transport, api_key=api_key) 38 | 39 | mcp_args = { 40 | "name": "SingleStore MCP Server", 41 | } 42 | 43 | if settings.is_remote: 44 | mcp_args["auth"] = AuthSettings( 45 | issuer_url=settings.server_url, 46 | required_scopes=settings.required_scopes, 47 | client_registration_options=ClientRegistrationOptions( 48 | enabled=True, 49 | valid_scopes=settings.required_scopes, 50 | default_scopes=settings.required_scopes, 51 | ), 52 | ) 53 | 54 | provider = SingleStoreOAuthProvider(settings=settings) 55 | 56 | mcp_args["auth_server_provider"] = provider 57 | 58 | mcp = FastMCP(**mcp_args) 59 | 60 | register_tools(mcp) 61 | register_resources(mcp) 62 | 63 | if settings.is_remote: 64 | # Register the callback handler with the captured oauth_provider 65 | mcp.custom_route("/callback", methods=["GET"])( 66 | make_auth_callback_handler(provider) 67 | ) 68 | mcp.run(transport=transport, host=settings.host, port=settings.port) 69 | else: 70 | mcp.run(transport=transport) 71 | -------------------------------------------------------------------------------- /src/config/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/mcp-server-singlestore/90bc8dec1065fb863d0e0ba7575bc7fc9e60244d/src/config/__init__.py -------------------------------------------------------------------------------- /src/config/config.py: -------------------------------------------------------------------------------- 1 | from typing import List 2 | import requests 3 | 4 | from abc import ABC 5 | from contextvars import ContextVar 6 | from enum import Enum 7 | from pydantic import AnyHttpUrl 8 | from pydantic_settings import BaseSettings, SettingsConfigDict 9 | 10 | 11 | class Transport(str, Enum): 12 | STDIO = "stdio" 13 | SSE = "sse" 14 | HTTP = "streamable-http" 15 | 16 | 17 | class AuthMethod(str, Enum): 18 | API_KEY = "api_key" 19 | OAUTH_TOKEN = "oauth_token" 20 | 21 | 22 | class Settings(ABC, BaseSettings): 23 | host: str = "localhost" 24 | port: int = 8000 25 | s2_api_base_url: str = "https://api.singlestore.com" 26 | graphql_public_endpoint: str = "https://backend.singlestore.com/public" 27 | transport: Transport 28 | is_remote: bool 29 | 30 | 31 | class LocalSettings(Settings): 32 | api_key: str | None = None 33 | org_id: str | None = ( 34 | None # Added to support organization selection for OAuth token auth 35 | ) 36 | auth_method: AuthMethod = AuthMethod.API_KEY 37 | transport: Transport = Transport.STDIO 38 | is_remote: bool = False 39 | 40 | model_config = SettingsConfigDict(env_prefix="MCP_", env_file=".env.local") 41 | 42 | def set_oauth_token(self, token: str) -> None: 43 | """Set OAuth token as the authentication method""" 44 | self.api_key = token 45 | self.auth_method = AuthMethod.OAUTH_TOKEN 46 | 47 | 48 | class RemoteSettings(Settings): 49 | host: str 50 | 51 | org_id: str 52 | 53 | is_remote: bool = True 54 | 55 | issuer_url: str 56 | required_scopes: List[str] 57 | 58 | server_url: AnyHttpUrl | None = None 59 | 60 | client_id: str 61 | callback_path: AnyHttpUrl | None = None 62 | 63 | # SingleStore OAuth URLs 64 | singlestore_auth_url: str | None = None 65 | singlestore_token_url: str | None = None 66 | 67 | # Stores temporarily generated code verifier for PKCE. Will be deleted after use. 68 | singlestore_code_verifier: str = "" 69 | 70 | model_config = SettingsConfigDict(env_prefix="MCP_", env_file=".env.remote") 71 | 72 | def __init__(self, **data): 73 | """Initialize settings with values from environment variables.""" 74 | super().__init__(**data) 75 | 76 | self.server_url = AnyHttpUrl(f"http://{self.host}:{self.port}") 77 | self.callback_path = AnyHttpUrl(f"http://{self.host}:{self.port}/callback") 78 | 79 | self.singlestore_auth_url, self.singlestore_token_url = ( 80 | self.discover_oauth_server() 81 | ) 82 | 83 | def discover_oauth_server(self) -> tuple[str, str]: 84 | """Discover OAuth server endpoints""" 85 | discovery_url = f"{self.issuer_url}/.well-known/openid-configuration" 86 | response = requests.get(discovery_url, timeout=10) 87 | response.raise_for_status() 88 | 89 | authorization_endpoint: str = response.json().get("authorization_endpoint") 90 | 91 | if not authorization_endpoint: 92 | raise ValueError("Failed to discover OAuth endpoints") 93 | 94 | token_endpoint: str = response.json().get("token_endpoint") 95 | if not token_endpoint: 96 | raise ValueError("Failed to discover OAuth endpoints") 97 | 98 | return authorization_endpoint, token_endpoint 99 | 100 | 101 | _settings_ctx: ContextVar[Settings] = ContextVar("settings", default=None) 102 | 103 | 104 | def init_settings( 105 | transport: Transport, api_key: str | None = None 106 | ) -> RemoteSettings | LocalSettings: 107 | match transport: 108 | case Transport.HTTP: 109 | settings = RemoteSettings(transport=Transport.HTTP) 110 | case Transport.SSE: 111 | settings = RemoteSettings(transport=Transport.SSE) 112 | case Transport.STDIO: 113 | settings = LocalSettings(api_key=api_key) 114 | case _: 115 | raise ValueError(f"Unsupported transport mode: {transport}") 116 | 117 | _settings_ctx.set(settings) 118 | return settings 119 | 120 | 121 | def get_settings() -> RemoteSettings | LocalSettings: 122 | settings = _settings_ctx.get() 123 | if settings is None: 124 | raise RuntimeError("Settings have not been initialized.") 125 | return settings 126 | -------------------------------------------------------------------------------- /src/main.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import click 3 | import logging 4 | 5 | import src.config.config as config 6 | from src.commands import init_command, start_command 7 | 8 | 9 | @click.group() 10 | def cli(): 11 | pass 12 | 13 | 14 | @cli.command() 15 | @click.option( 16 | "--transport", 17 | type=click.Choice(["stdio", "sse", "http"], case_sensitive=True), 18 | required=False, 19 | default="stdio", 20 | help="Transport mode: stdio (local) or sse/http (remote)", 21 | ) 22 | @click.option( 23 | "--api-key", 24 | type=str, 25 | default=None, 26 | help="API key for authentication on stdio transport (optional - will use browser authentication if not provided)", 27 | ) 28 | def start(transport: config.Transport, api_key: str | None): 29 | """ 30 | Start the MCP server with the specified transport. Available transports: 31 | - stdio: Local transport for development 32 | - sse: Server-Sent Events for remote connections 33 | - http: HTTP transport for remote connections 34 | 35 | If no API key is provided for stdio transport, it will trigger browser authentication. 36 | """ 37 | logging.info(f"Starting MCP server with transport={transport}") 38 | start_command(transport, api_key) 39 | 40 | 41 | @cli.command() 42 | @click.option( 43 | "--api-key", 44 | type=str, 45 | required=True, 46 | help="API key for authentication on stdio transport", 47 | ) 48 | @click.option( 49 | "--client", 50 | type=click.Choice(["claude", "cursor"], case_sensitive=False), 51 | required=False, 52 | default="claude", 53 | help="LLM client to configure (default: claude)", 54 | ) 55 | def init(api_key: str, client: str): 56 | """ 57 | Configures the SingleStore MCP server for a specific LLM client. Available clients: 58 | - claude: Configure for Anthropic's Claude 59 | - cursor: Configure for Cursor's LLM 60 | """ 61 | logging.info(f"Configuring SingleStore MCP server for {client}") 62 | sys.exit(init_command(api_key, client)) 63 | 64 | 65 | @cli.command() 66 | def clear_auth(): 67 | """ 68 | Clear saved authentication credentials. 69 | """ 70 | from src.auth.browser_auth import clear_credentials 71 | 72 | if clear_credentials(): 73 | print("✅ Authentication credentials cleared successfully.") 74 | else: 75 | print("No credentials found to clear.") 76 | 77 | 78 | @cli.command() 79 | def test_auth(): 80 | """ 81 | Test browser authentication without starting the server. 82 | """ 83 | from src.auth.browser_auth import get_authentication_token 84 | 85 | print("Testing browser authentication...") 86 | token = get_authentication_token() 87 | 88 | if token: 89 | print("✅ Authentication successful!") 90 | print(f"Token received (first 20 chars): {token[:20]}...") 91 | else: 92 | print("❌ Authentication failed!") 93 | 94 | 95 | def main(): 96 | """ 97 | Main entry point for the MCP server CLI. 98 | """ 99 | cli() 100 | 101 | 102 | if __name__ == "__main__": 103 | main() 104 | -------------------------------------------------------------------------------- /src/version.py: -------------------------------------------------------------------------------- 1 | __version__ = "0.2.12" 2 | --------------------------------------------------------------------------------