├── .github
└── pre-commit.yml
├── .gitignore
├── .pre-commit-config.yaml
├── LICENSE
├── README.md
├── images
├── Cursor.gif
├── fast-agent.gif
└── fast-agent.mov
├── mcp_server_snowflake
├── __init__.py
├── server.py
├── tools.py
└── utils.py
├── pyproject.toml
├── services
└── service_config.yaml
└── uv.lock
/.github/pre-commit.yml:
--------------------------------------------------------------------------------
1 | name: pre-commit
2 |
3 | on:
4 | pull_request:
5 | push:
6 | branches: [main]
7 |
8 | jobs:
9 | pre-commit:
10 | runs-on: ubuntu-latest
11 | steps:
12 | - uses: actions/checkout@v4
13 | - uses: actions/setup-python@v5
14 | - uses: pre-commit/action@v3.0.1
15 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | share/python-wheels/
24 | *.egg-info/
25 | .installed.cfg
26 | *.egg
27 | MANIFEST
28 |
29 | # PyInstaller
30 | # Usually these files are written by a python script from a template
31 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
32 | *.manifest
33 | *.spec
34 |
35 | # Installer logs
36 | pip-log.txt
37 | pip-delete-this-directory.txt
38 |
39 | # Unit test / coverage reports
40 | htmlcov/
41 | .tox/
42 | .nox/
43 | .coverage
44 | .coverage.*
45 | .cache
46 | nosetests.xml
47 | coverage.xml
48 | *.cover
49 | *.py,cover
50 | .hypothesis/
51 | .pytest_cache/
52 | cover/
53 |
54 | # Translations
55 | *.mo
56 | *.pot
57 |
58 | # Django stuff:
59 | *.log
60 | local_settings.py
61 | db.sqlite3
62 | db.sqlite3-journal
63 |
64 | # Flask stuff:
65 | instance/
66 | .webassets-cache
67 |
68 | # Scrapy stuff:
69 | .scrapy
70 |
71 | # Sphinx documentation
72 | docs/_build/
73 |
74 | # PyBuilder
75 | .pybuilder/
76 | target/
77 |
78 | # Jupyter Notebook
79 | .ipynb_checkpoints
80 |
81 | # IPython
82 | profile_default/
83 | ipython_config.py
84 |
85 | # pyenv
86 | # For a library or package, you might want to ignore these files since the code is
87 | # intended to run in multiple environments; otherwise, check them in:
88 | # .python-version
89 |
90 | # pipenv
91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
94 | # install all needed dependencies.
95 | #Pipfile.lock
96 |
97 | # UV
98 | # Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control.
99 | # This is especially recommended for binary packages to ensure reproducibility, and is more
100 | # commonly ignored for libraries.
101 | #uv.lock
102 |
103 | # poetry
104 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
105 | # This is especially recommended for binary packages to ensure reproducibility, and is more
106 | # commonly ignored for libraries.
107 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
108 | #poetry.lock
109 |
110 | # pdm
111 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
112 | #pdm.lock
113 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
114 | # in version control.
115 | # https://pdm.fming.dev/latest/usage/project/#working-with-version-control
116 | .pdm.toml
117 | .pdm-python
118 | .pdm-build/
119 |
120 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
121 | __pypackages__/
122 |
123 | # Celery stuff
124 | celerybeat-schedule
125 | celerybeat.pid
126 |
127 | # SageMath parsed files
128 | *.sage.py
129 |
130 | # Environments
131 | .env
132 | .venv
133 | env/
134 | venv/
135 | ENV/
136 | env.bak/
137 | venv.bak/
138 |
139 | # Spyder project settings
140 | .spyderproject
141 | .spyproject
142 |
143 | # Rope project settings
144 | .ropeproject
145 |
146 | # mkdocs documentation
147 | /site
148 |
149 | # mypy
150 | .mypy_cache/
151 | .dmypy.json
152 | dmypy.json
153 |
154 | # Pyre type checker
155 | .pyre/
156 |
157 | # pytype static type analyzer
158 | .pytype/
159 |
160 | # Cython debug symbols
161 | cython_debug/
162 |
163 | # PyCharm
164 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can
165 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
166 | # and can be added to the global gitignore or merged into this file. For a more nuclear
167 | # option (not recommended) you can uncomment the following to ignore the entire idea folder.
168 | #.idea/
169 |
170 | # Ruff stuff:
171 | .ruff_cache/
172 |
173 | # PyPI configuration file
174 | .pypirc
175 |
176 | # fast-agent files
177 | fastagent.secrets.yaml
178 | *.jsonl
179 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | repos:
2 | - repo: https://github.com/pre-commit/pre-commit-hooks
3 | rev: v5.0.0
4 | hooks:
5 | - id: check-toml
6 | - id: end-of-file-fixer
7 | - id: trailing-whitespace
8 | - repo: https://github.com/astral-sh/ruff-pre-commit
9 | rev: v0.11.9
10 | hooks:
11 | - id: ruff
12 | - id: ruff-format
13 | - repo: https://github.com/astral-sh/uv-pre-commit
14 | rev: 0.7.3
15 | hooks:
16 | - id: uv-lock
17 | - repo: https://github.com/codespell-project/codespell
18 | rev: v2.4.0
19 | hooks:
20 | - id: codespell
21 | additional_dependencies:
22 | - tomli
23 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Snowflake Cortex AI Model Context Protocol (MCP) Server
2 |
3 |
4 |
5 |
6 |
7 | This Snowflake MCP server provides tooling for Snowflake Cortex AI features, bringing these capabilities to the MCP ecosystem. When connected to an MCP Client (e.g. [Claude for Desktop](https://claude.ai/download), [fast-agent](https://fast-agent.ai/), [Agentic Orchestration Framework](https://github.com/Snowflake-Labs/orchestration-framework/blob/main/README.md)), users can leverage these Cortex AI features.
8 |
9 | The MCP server currently supports the below Cortex AI capabilities:
10 | - **[Cortex Search](https://docs.snowflake.com/en/user-guide/snowflake-cortex/cortex-search/cortex-search-overview)**: Query unstructured data in Snowflake as commonly used in Retrieval Augmented Generation (RAG) applications.
11 | - **[Cortex Analyst](https://docs.snowflake.com/en/user-guide/snowflake-cortex/cortex-analyst)**: Query structured data in Snowflake via rich semantic modeling.
12 | - **[Cortex Complete](https://docs.snowflake.com/en/sql-reference/functions/complete-snowflake-cortex)**: Simple chat-completion with optional parameters using a number of available LLMs
13 | - **[Cortex Agent](https://docs.snowflake.com/en/user-guide/snowflake-cortex/cortex-agents)**: (**Coming Soon**) Agentic orchestrator across structured and unstructured data retrieval
14 |
15 | # Getting Started
16 |
17 | ## Service Configuration
18 |
19 | A simple configuration file is used to create tooling for the various Cortex AI features. An example can be seen at [services/service_config.yaml](services/service_config.yaml) and a template is below. Many Cortex Search and Cortex Analyst services can be added. Ideal descriptions are both highly descriptive and mutually exclusive. The path to this configuration file will be passed to the server and the contents used to create MCP server tools at startup.
20 |
21 | ```
22 | cortex_complete: # Set default model if one is not specified by user in Cortex Copmlete tool
23 | default_model: "snowflake-llama-3.3-70b"
24 | search_services: # List all Cortex Search services
25 | - service_name: ""
26 | description: > # Should start with "Search service that ..."
27 | ""
28 | database_name: ""
29 | schema_name: ""
30 | - service_name: ""
31 | description: > # Should start with "Search service that ..."
32 | ""
33 | database_name: ""
34 | schema_name: ""
35 | analyst_services: # List all Cortex Analyst semantic models/views
36 | - service_name: "" # Create descriptive name for the service
37 | semantic_model: "" # Fully-qualify semantic YAML model or Semantic View
38 | description: > # Should start with "Analyst service that ..."
39 | ""
40 | - service_name: "" # Create descriptive name for the service
41 | semantic_model: "" # Fully-qualify semantic YAML model or Semantic View
42 | description: > # Should start with "Analyst service that ..."
43 | ""
44 | ```
45 |
46 | ## Snowflake Account Identifier
47 |
48 | A Snowflake username and account identifier will be necessary to connect. From Snowsight, select your user name and [Connect a tool to Snowflake](https://docs.snowflake.com/user-guide/gen-conn-config#using-sf-web-interface-to-get-connection-settings) to obtain your Snowflake account identifier. This will be passed to the server at startup.
49 |
50 | ## Programmatic Access Token Authentication
51 |
52 | The MCP server uses [Snowflake Programmatic Access Token (PAT)](https://docs.snowflake.com/en/user-guide/programmatic-access-tokens) for authentication. Follow the [instructions](https://docs.snowflake.com/en/user-guide/programmatic-access-tokens#generating-a-programmatic-access-token) to generate a new PAT for a given user. Be sure to copy the token - it will be passed to the server at startup.
53 |
54 | > [!IMPORTANT]
55 | > PATs do not use secondary roles. Either select a specific role that has access to all desired services and their related objects OR select Any of my roles.
56 |
57 | # Using with MCP Clients
58 |
59 | The MCP server is client-agnostic and will work with most MCP Clients that support basic functionality for MCP tools and (optionally) resources. Below are some examples.
60 |
61 | ## [Claude Desktop](https://support.anthropic.com/en/articles/10065433-installing-claude-for-desktop)
62 | To integrate this server with Claude Desktop as the MCP Client, add the following to your app's server configuration. By default, this is located at
63 | - macOS: ~/Library/Application Support/Claude/claude_desktop_config.json
64 | - Windows: %APPDATA%\Claude\claude_desktop_config.json
65 |
66 | Set the path to the service configuration file and values for environment variables SNOWFLAKE_PAT, SNOWFLAKE_ACCOUNT, and SNOWFLAKE_USER.
67 |
68 | ```
69 | {
70 | "mcpServers": {
71 | "mcp-server-snowflake": {
72 | "command": "uvx",
73 | "args": [
74 | "--from",
75 | "git+https://github.com/Snowflake-Labs/mcp",
76 | "mcp-server-snowflake",
77 | "--service-config-file",
78 | "/service_config.yaml"
79 | ]
80 | "env": {
81 | "SNOWFLAKE_PAT": "",
82 | "SNOWFLAKE_ACCOUNT": "",
83 | "SNOWFLAKE_USER": ""
84 | }
85 | }
86 | }
87 | }
88 | ```
89 | ## [Cursor](https://www.cursor.com/)
90 | Register the MCP server in cursor by opening Cursor and navigating to Settings -> Cursor Settings -> MCP. Add the below.
91 | ```
92 | {
93 | "mcpServers": {
94 | "mcp-server-snowflake": {
95 | "command": "uvx",
96 | "args": [
97 | "--from",
98 | "git+https://github.com/Snowflake-Labs/mcp",
99 | "mcp-server-snowflake",
100 | "--service-config-file",
101 | "/service_config.yaml",
102 | "--account-identifier",
103 | "",
104 | "--username",
105 | "",
106 | "--pat",
107 | ""
108 | ]
109 | }
110 | }
111 | }
112 | ```
113 |
114 | Add the MCP server as context in the chat.
115 |
116 |
117 |
118 | For troubleshooting Cursor server issues, view the logs by opening the Output panel and selecting Cursor MCP from the dropdown menu.
119 |
120 | ## [fast-agent](https://fast-agent.ai/)
121 |
122 | Update the `fastagent.config.yaml` mcp server section with an updated path to the configuration file.
123 | ```
124 | # MCP Servers
125 | mcp:
126 | servers:
127 | mcp-server-snowflake:
128 | command: "uvx"
129 | args: ["--from", "git+https://github.com/Snowflake-Labs/mcp", "mcp-server-snowflake", "--service-config-file", "/service_config.yaml"]
130 | ```
131 |
132 | Update the `fastagent.secrets.yaml` mcp server section with environment variables.
133 | ```
134 | mcp:
135 | servers:
136 | mcp-server-snowflake:
137 | env:
138 | SNOWFLAKE_PAT:
139 | SNOWFLAKE_ACCOUNT:
140 | SNOWFLAKE_USER:
141 | ```
142 |
143 |
144 |
145 |
146 | # Troubleshooting
147 |
148 | ## Running MCP Inspector
149 |
150 | MCP Inspector is suggested for troubleshooting the MCP server. Run the below to launch the inspector. Be sure to set values for service config file, SNOWFLAKE_ACCOUNT, SNOWFLAKE_USER, and SNOWFLAKE_PAT are set accordingly.
151 |
152 | `npx @modelcontextprotocol/inspector uvx --from "git+https://github.com/Snowflake-Labs/mcp" mcp-server-snowflake --service-config-file "/service_config.yaml" --account-identifier $SNOWFLAKE_ACCOUNT --username $SNOWFLAKE_USER --pat $SNOWFLAKE_PAT`
153 |
154 | # FAQs
155 |
156 | #### How do I try this?
157 |
158 | - The MCP server is intended to be used as one part of the MCP ecosystem. Think of it as a collection of tools. You'll need an MCP Client to act as an orchestrator. See the [MCP Introduction](https://modelcontextprotocol.io/introduction) for more information.
159 |
160 | #### Where is this deployed? Is this in Snowpark Container Services?
161 |
162 | - All tools in this MCP server are managed services, accessible via REST API. No separate remote service deployment is necessary. Instead, the current version of the server is intended to be started by the MCP client, such as Claude Desktop, Cursor, fast-agent, etc. By configuring these MCP client with the server, the application will spin up the server service for you. Future versions of the MCP server may be deployed as a remote service in the future.
163 |
164 | #### I'm receiving permission errors from my tool calls.
165 |
166 | - Programmatic Access Tokens do not evaluate secondary roles. When creating them, please select a single role that has access to all services and their underlying objects OR select any role. A new PAT will need to be created to alter this property.
167 |
168 | #### How many Cortex Search or Cortex Analysts can I add?
169 |
170 | - You may add multiple instances of both services. The MCP Client will determine the appropriate one(s) to use based on the user's prompt.
171 |
172 | # Bug Reports, Feedback, or Other Questions
173 |
174 | Please add issues to the GitHub repository.
175 |
--------------------------------------------------------------------------------
/images/Cursor.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Snowflake-Labs/mcp/bd6093e19616864392bef64b7c04298bf81c265c/images/Cursor.gif
--------------------------------------------------------------------------------
/images/fast-agent.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Snowflake-Labs/mcp/bd6093e19616864392bef64b7c04298bf81c265c/images/fast-agent.gif
--------------------------------------------------------------------------------
/images/fast-agent.mov:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Snowflake-Labs/mcp/bd6093e19616864392bef64b7c04298bf81c265c/images/fast-agent.mov
--------------------------------------------------------------------------------
/mcp_server_snowflake/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright 2025 Snowflake Inc.
2 | # SPDX-License-Identifier: Apache-2.0
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | # http://www.apache.org/licenses/LICENSE-2.0
7 | # Unless required by applicable law or agreed to in writing, software
8 | # distributed under the License is distributed on an "AS IS" BASIS,
9 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
10 | # See the License for the specific language governing permissions and
11 | # limitations under the License.
12 | """
13 | Snowflake MCP Server Package.
14 |
15 | This package provides a Model Context Protocol (MCP) server implementation for
16 | interacting with Snowflake's Cortex AI services. The server enables seamless
17 | integration with Snowflake's machine learning and AI capabilities through a
18 | standardized protocol interface.
19 |
20 | The package supports:
21 | - Cortex Complete: Large language model completions and chat
22 | - Cortex Search: Semantic search across Snowflake data
23 | - Cortex Analyst: Natural language to SQL query generation
24 | - Model discovery: Identify available models in region
25 |
26 | The server can be configured through command-line arguments or environment
27 | variables and uses a YAML configuration file to define service specifications.
28 |
29 | Environment Variables
30 | ---------------------
31 | SNOWFLAKE_ACCOUNT : str
32 | Snowflake account identifier (alternative to --account-identifier)
33 | SNOWFLAKE_USER : str
34 | Snowflake username (alternative to --username)
35 | SNOWFLAKE_PAT : str
36 | Programmatic Access Token (alternative to --pat)
37 | SERVICE_CONFIG_FILE : str
38 | Path to service configuration file (alternative to --service-config-file)
39 |
40 | """
41 |
42 | import asyncio
43 | import argparse
44 | import os
45 |
46 | from . import server
47 | from mcp_server_snowflake.utils import MissingArgumentsException
48 |
49 |
50 | def get_var(var_name: str, env_var_name: str, args) -> str | None:
51 | """
52 | Retrieve variable value from command line arguments or environment variables.
53 |
54 | Checks for a variable value first in command line arguments, then falls back
55 | to environment variables. This provides flexible configuration options for
56 | the MCP server.
57 |
58 | Parameters
59 | ----------
60 | var_name : str
61 | The attribute name to check in the command line arguments object
62 | env_var_name : str
63 | The environment variable name to check if command line arg is not provided
64 | args : argparse.Namespace
65 | Parsed command line arguments object
66 |
67 | Returns
68 | -------
69 | str | None
70 | The variable value if found in either source, None otherwise
71 |
72 | Examples
73 | --------
74 | Get account identifier from args or environment:
75 |
76 | >>> args = parser.parse_args(['--account-identifier', 'myaccount'])
77 | >>> get_var('account_identifier', 'SNOWFLAKE_ACCOUNT', args)
78 | 'myaccount'
79 |
80 | >>> os.environ['SNOWFLAKE_ACCOUNT'] = 'myaccount'
81 | >>> args = parser.parse_args([])
82 | >>> get_var('account_identifier', 'SNOWFLAKE_ACCOUNT', args)
83 | 'myaccount'
84 | """
85 |
86 | if getattr(args, var_name):
87 | return getattr(args, var_name)
88 | elif env_var_name in os.environ:
89 | return os.environ[env_var_name]
90 | else:
91 | return None
92 |
93 |
94 | def main():
95 | """
96 | Main entry point for the Snowflake MCP server package.
97 |
98 | Parses command line arguments, retrieves configuration from arguments or
99 | environment variables, validates required parameters, and starts the
100 | asyncio-based MCP server. The server handles Model Context Protocol
101 | communications over stdin/stdout streams.
102 |
103 | The function sets up argument parsing for Snowflake connection parameters
104 | and service configuration, then delegates to the main server implementation.
105 |
106 | Raises
107 | ------
108 | MissingArgumentException
109 | If required parameters (account_identifier and pat) are not provided
110 | through either command line arguments or environment variables
111 | SystemExit
112 | If argument parsing fails or help is requested
113 |
114 | Notes
115 | -----
116 | The server requires these minimum parameters:
117 | - account_identifier: Snowflake account identifier
118 | - username: Snowflake username
119 | - pat: Programmatic Access Token for authentication
120 | - service-config-file: Path to service configuration file
121 |
122 | """
123 | parser = argparse.ArgumentParser(description="Snowflake MCP Server")
124 |
125 | parser.add_argument(
126 | "--account-identifier", required=False, help="Snowflake account identifier"
127 | )
128 | parser.add_argument(
129 | "--username", required=False, help="Username for Snowflake account"
130 | )
131 | parser.add_argument(
132 | "--pat", required=False, help="Programmatic Access Token (PAT) for Snowflake"
133 | )
134 | parser.add_argument(
135 | "--service-config-file",
136 | required=False,
137 | help="Path to service specification file",
138 | )
139 |
140 | args = parser.parse_args()
141 | account_identifier = get_var("account_identifier", "SNOWFLAKE_ACCOUNT", args)
142 | username = get_var("username", "SNOWFLAKE_USER", args)
143 | pat = get_var("pat", "SNOWFLAKE_PAT", args)
144 | service_config_file = get_var("service_config_file", "SERVICE_CONFIG_FILE", args)
145 |
146 | parameters = dict(
147 | account_identifier=account_identifier,
148 | username=username,
149 | pat=pat,
150 | service_config_file=service_config_file,
151 | )
152 |
153 | if not all(parameters.values()):
154 | raise MissingArgumentsException(
155 | missing=[k for k, v in parameters.items() if not v]
156 | ) from None
157 | asyncio.run(
158 | server.main(
159 | account_identifier=account_identifier,
160 | username=username,
161 | pat=pat,
162 | config_path=service_config_file,
163 | )
164 | )
165 |
166 |
167 | # Optionally expose other important items at package level
168 | __all__ = ["main", "server"]
169 |
--------------------------------------------------------------------------------
/mcp_server_snowflake/server.py:
--------------------------------------------------------------------------------
1 | # Copyright 2025 Snowflake Inc.
2 | # SPDX-License-Identifier: Apache-2.0
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | # http://www.apache.org/licenses/LICENSE-2.0
7 | # Unless required by applicable law or agreed to in writing, software
8 | # distributed under the License is distributed on an "AS IS" BASIS,
9 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
10 | # See the License for the specific language governing permissions and
11 | # limitations under the License.
12 | import logging
13 | from typing import Optional
14 | from pydantic import AnyUrl
15 | import yaml
16 | import json
17 | from pathlib import Path
18 |
19 | from mcp.server import Server, NotificationOptions
20 | import mcp.types as types
21 | import mcp.server.stdio
22 | from mcp.server.models import InitializationOptions
23 | from snowflake.connector import connect
24 |
25 | import mcp_server_snowflake.tools as tools
26 |
27 | config_file_uri = Path(__file__).parent.parent / "services" / "service_config.yaml"
28 | server_name = "mcp-server-snowflake"
29 | server_version = "0.0.1"
30 | tag_major_version = 1
31 | tag_minor_version = 0
32 |
33 | logger = logging.getLogger(server_name)
34 |
35 |
36 | class SnowflakeService:
37 | """
38 | Snowflake service configuration and management.
39 |
40 | This class handles the configuration and setup of Snowflake Cortex services
41 | including search, analyst, and agent services. It loads service specifications
42 | from a YAML configuration file and provides access to service parameters.
43 |
44 | Parameters
45 | ----------
46 | account_identifier : str, optional
47 | Snowflake account identifier
48 | username : str, optional
49 | Snowflake username for authentication
50 | pat : str, optional
51 | Programmatic Access Token for Snowflake authentication
52 | config_path : str, optional
53 | Path to the service configuration YAML file
54 |
55 | Attributes
56 | ----------
57 | account_identifier : str
58 | Snowflake account identifier
59 | username : str
60 | Snowflake username
61 | pat : str
62 | Programmatic Access Token
63 | config_path : str
64 | Path to configuration file
65 | default_complete_model : str
66 | Default model for Cortex Complete operations
67 | search_services : list
68 | List of configured search service specifications
69 | analyst_services : list
70 | List of configured analyst service specifications
71 | agent_services : list
72 | List of configured agent service specifications
73 | """
74 |
75 | def __init__(
76 | self,
77 | account_identifier: Optional[str] = None,
78 | username: Optional[str] = None,
79 | pat: Optional[str] = None,
80 | config_path: Optional[str] = None,
81 | ):
82 | self.account_identifier = account_identifier
83 | self.username = username
84 | self.pat = pat
85 | self.config_path = config_path
86 | self.default_complete_model = None
87 | self.search_services = []
88 | self.analyst_services = []
89 | self.agent_services = []
90 | self.unpack_service_specs()
91 | self.set_query_tag(
92 | major_version=tag_major_version, minor_version=tag_minor_version
93 | )
94 |
95 | def unpack_service_specs(self) -> None:
96 | """
97 | Load and parse service specifications from configuration file.
98 |
99 | Reads the YAML configuration file and extracts service specifications
100 | for search, analyst, and agent services. Also sets the default
101 | completion model.
102 | """
103 | try:
104 | # Load the service configuration from a YAML file
105 | with open(self.config_path, "r") as file:
106 | service_config = yaml.safe_load(file)
107 | except FileNotFoundError:
108 | logger.error(f"Service configuration file not found: {self.config_path}")
109 | raise
110 | except yaml.YAMLError as e:
111 | logger.error(f"Error parsing YAML file: {e}")
112 | raise
113 | except Exception as e:
114 | logger.error(f"Unexpected error loading service config: {e}")
115 | raise
116 |
117 | # Extract the service specifications
118 | try:
119 | self.search_services = service_config.get("search_services", [])
120 | self.analyst_services = service_config.get("analyst_services", [])
121 | self.agent_services = service_config.get(
122 | "agent_services", []
123 | ) # Not supported yet
124 | self.default_complete_model = service_config.get("cortex_complete", {}).get(
125 | "default_model", None
126 | )
127 | except Exception as e:
128 | logger.error(f"Error extracting service specifications: {e}")
129 | raise
130 |
131 | if self.default_complete_model is None:
132 | logger.warning(
133 | "No default model found in the service specification. Using snowflake-llama-3.3-70b as default."
134 | )
135 |
136 | def set_query_tag(
137 | self,
138 | query_tag: dict[str, str] = {"origin": "sf_sit", "name": "mcp_server"},
139 | major_version: Optional[int] = None,
140 | minor_version: Optional[int] = None,
141 | ) -> None:
142 | """
143 | Set the query tag for the Snowflake service.
144 |
145 | Parameters
146 | ----------
147 | query_tag : dict[str, str], optional
148 | Query tag dictionary
149 | major_version : int, optional
150 | Major version of the query tag
151 | minor_version : int, optional
152 | Minor version of the query tag
153 | """
154 | if major_version is not None and minor_version is not None:
155 | query_tag["version"] = {"major": major_version, "minor": minor_version}
156 |
157 | try:
158 | with (
159 | connect(
160 | account=self.account_identifier,
161 | user=self.username,
162 | password=self.pat,
163 | session_parameters={
164 | "QUERY_TAG": json.dumps(query_tag),
165 | },
166 | ) as con,
167 | con.cursor() as cur,
168 | ):
169 | cur.execute("SELECT 1").fetchone()
170 | except Exception as e:
171 | logger.warning(f"Error setting query tag: {e}")
172 |
173 |
174 | async def load_service_config_resource(file_path: str) -> str:
175 | """
176 | Load service configuration from YAML file as JSON string.
177 |
178 | Parameters
179 | ----------
180 | file_path : str
181 | Path to the YAML configuration file
182 |
183 | Returns
184 | -------
185 | str
186 | JSON string representation of the configuration
187 |
188 | Raises
189 | ------
190 | FileNotFoundError
191 | If the configuration file cannot be found
192 | yaml.YAMLError
193 | If the YAML file is malformed
194 | """
195 | with open(file_path, "r") as file:
196 | service_config = yaml.safe_load(file)
197 |
198 | return json.dumps(service_config)
199 |
200 |
201 | async def main(account_identifier: str, username: str, pat: str, config_path: str):
202 | """
203 | Main server setup and execution function.
204 |
205 | Initializes the Snowflake MCP server with the provided credentials and
206 | configuration. Sets up resource handlers, tool handlers, and starts
207 | the server using stdio streams.
208 |
209 | Parameters
210 | ----------
211 | account_identifier : str
212 | Snowflake account identifier
213 | username : str
214 | Snowflake username for authentication
215 | pat : str
216 | Programmatic Access Token for Snowflake authentication
217 | config_path : str
218 | Path to the service configuration YAML file
219 |
220 | Raises
221 | ------
222 | ValueError
223 | If required parameters are missing or invalid
224 | ConnectionError
225 | If unable to connect to Snowflake services
226 | """
227 | snowflake_service = SnowflakeService(
228 | account_identifier=account_identifier,
229 | username=username,
230 | pat=pat,
231 | config_path=config_path,
232 | ) # noqa F841
233 | server = Server("snowflake") # noqa F841
234 |
235 | # For DEBUGGING
236 | logger.info("Starting Snowflake MCP server")
237 |
238 | @server.list_resources()
239 | async def list_resources() -> list[types.Resource]:
240 | """
241 | List available resources.
242 |
243 | Returns
244 | -------
245 | list[types.Resource]
246 | List of available resources including service configuration
247 | """
248 | return [
249 | types.Resource(
250 | uri=config_file_uri.as_uri(),
251 | name="Service Specification Configuration",
252 | description="Service Specification Configuration",
253 | mimeType="application/yaml",
254 | )
255 | ]
256 |
257 | @server.read_resource()
258 | async def read_resource(uri: AnyUrl) -> str:
259 | """
260 | Read resource content by URI.
261 |
262 | Parameters
263 | ----------
264 | uri : AnyUrl
265 | URI of the resource to read
266 |
267 | Returns
268 | -------
269 | str
270 | Resource content as string
271 |
272 | Raises
273 | ------
274 | ValueError
275 | If the requested resource URI is not found
276 | """
277 | if str(uri) == config_file_uri.as_uri():
278 | service_config = await load_service_config_resource(
279 | snowflake_service.config_path
280 | )
281 |
282 | return service_config
283 |
284 | @server.list_tools()
285 | async def handle_list_tools() -> list[types.Tool]:
286 | """
287 | List available tools.
288 |
289 | Returns all available tools including base tools (complete, models,
290 | specification) and dynamically generated tools from service
291 | configurations (search and analyst services).
292 |
293 | Returns
294 | -------
295 | list[types.Tool]
296 | List of all available tools
297 | """
298 | # Define tool types for Cortex Search Service
299 | search_tools_types = tools.get_cortex_search_tool_types(
300 | snowflake_service.search_services
301 | )
302 | # Define tool types for Cortex Analyst Service
303 | analyst_tools_types = tools.get_cortex_analyst_tool_types(
304 | snowflake_service.analyst_services
305 | )
306 | # Tools that are not dynamically instantiated based on config file
307 | base_tools = [
308 | # Cortex Complete Tool Type
309 | tools.get_cortex_complete_tool_type(),
310 | # Get model cards
311 | tools.get_cortex_models_tool_type(),
312 | # Get spec config file
313 | types.Tool(
314 | name="get-specification-resource",
315 | description="""Retrieves the service specification resource""",
316 | inputSchema={"type": "object", "properties": {}, "required": []},
317 | ),
318 | ]
319 |
320 | return base_tools + search_tools_types + analyst_tools_types
321 |
322 | @server.call_tool()
323 | async def handle_call_tool(
324 | name: str, arguments: dict | None
325 | ) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]:
326 | """
327 | Handle tool execution requests.
328 |
329 | Routes tool calls to appropriate handlers based on tool name.
330 | Supports specification retrieval, model management, completion,
331 | search, and analyst tools.
332 |
333 | Parameters
334 | ----------
335 | name : str
336 | Name of the tool to execute
337 | arguments : dict, optional
338 | Tool-specific arguments
339 |
340 | Returns
341 | -------
342 | list[types.TextContent | types.ImageContent | types.EmbeddedResource]
343 | Tool execution results
344 |
345 | Raises
346 | ------
347 | ValueError
348 | If required parameters are missing or tool is not found
349 | """
350 | if name == "get-specification-resource":
351 | spec = await read_resource(config_file_uri.as_uri())
352 | return [
353 | types.EmbeddedResource(
354 | type="resource",
355 | resource=types.TextResourceContents(
356 | text=spec,
357 | uri=config_file_uri.as_uri(),
358 | mimeType="application/json",
359 | ),
360 | )
361 | ]
362 |
363 | if name == "get-model-cards":
364 | # Call the cortex_complete function
365 | response = await tools.get_cortex_models(
366 | account_identifier=snowflake_service.account_identifier,
367 | username=snowflake_service.username,
368 | PAT=snowflake_service.pat,
369 | )
370 |
371 | if response:
372 | return [types.TextContent(type="text", text=json.dumps(response))]
373 | else:
374 | raise ValueError("No model cards found.")
375 |
376 | if name == "cortex-complete":
377 | # Validate required parameters
378 | prompt = arguments.get("prompt")
379 | if not prompt:
380 | raise ValueError("Missing required parameters")
381 |
382 | model = arguments.get("model")
383 | if not model:
384 | model = snowflake_service.default_complete_model
385 |
386 | response_format = arguments.get("response_format")
387 |
388 | # Call the cortex_complete function
389 | response = await tools.cortex_complete(
390 | prompt=prompt,
391 | model=model,
392 | account_identifier=snowflake_service.account_identifier,
393 | PAT=snowflake_service.pat,
394 | response_format=response_format,
395 | )
396 |
397 | return [types.TextContent(type="text", text=str(response))]
398 |
399 | if name in [
400 | spec.get("service_name") for spec in snowflake_service.search_services
401 | ]:
402 | # Find the corresponding service specification
403 | service_spec = next(
404 | (
405 | spec
406 | for spec in snowflake_service.search_services
407 | if spec.get("service_name") == name
408 | ),
409 | None,
410 | )
411 | if not service_spec:
412 | raise ValueError(f"Service specification for {name} not found")
413 |
414 | # Extract parameters from the service specification
415 | database_name = service_spec.get("database_name")
416 | schema_name = service_spec.get("schema_name")
417 |
418 | # Validate required parameters
419 | query = arguments.get("query")
420 | columns = arguments.get("columns", [])
421 | filter_query = arguments.get("filter_query", None)
422 | if not query:
423 | raise ValueError("Missing required parameters")
424 |
425 | # Call the query_cortex_search function
426 | response = await tools.query_cortex_search(
427 | account_identifier=snowflake_service.account_identifier,
428 | service_name=name,
429 | database_name=database_name,
430 | schema_name=schema_name,
431 | query=query,
432 | PAT=snowflake_service.pat,
433 | columns=columns,
434 | filter_query=filter_query,
435 | )
436 |
437 | return [types.TextContent(type="text", text=str(response))]
438 |
439 | if name in [
440 | spec.get("service_name") for spec in snowflake_service.analyst_services
441 | ]:
442 | # Find the corresponding service specification
443 | service_spec = next(
444 | (
445 | spec
446 | for spec in snowflake_service.analyst_services
447 | if spec.get("service_name") == name
448 | ),
449 | None,
450 | )
451 | if not service_spec:
452 | raise ValueError(f"Service specification for {name} not found")
453 |
454 | # Extract parameters from the service specification
455 | semantic_model = service_spec.get("semantic_model")
456 |
457 | # Validate required parameters
458 | query = arguments.get("query")
459 | if not query:
460 | raise ValueError("Missing required parameters")
461 |
462 | # Call the query_cortex_search function
463 | response = await tools.query_cortex_analyst(
464 | account_identifier=snowflake_service.account_identifier,
465 | semantic_model=semantic_model,
466 | query=query,
467 | username=snowflake_service.username,
468 | PAT=snowflake_service.pat,
469 | )
470 |
471 | return [types.TextContent(type="text", text=str(response))]
472 |
473 | # Run the server using stdin/stdout streams
474 | async with mcp.server.stdio.stdio_server() as (read_stream, write_stream):
475 | await server.run(
476 | read_stream,
477 | write_stream,
478 | InitializationOptions(
479 | server_name=server_name,
480 | server_version=server_version,
481 | capabilities=server.get_capabilities(
482 | notification_options=NotificationOptions(),
483 | experimental_capabilities={},
484 | ),
485 | ),
486 | )
487 |
--------------------------------------------------------------------------------
/mcp_server_snowflake/tools.py:
--------------------------------------------------------------------------------
1 | # Copyright 2025 Snowflake Inc.
2 | # SPDX-License-Identifier: Apache-2.0
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | # http://www.apache.org/licenses/LICENSE-2.0
7 | # Unless required by applicable law or agreed to in writing, software
8 | # distributed under the License is distributed on an "AS IS" BASIS,
9 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
10 | # See the License for the specific language governing permissions and
11 | # limitations under the License.
12 | import requests
13 | from typing import Optional
14 | from collections import OrderedDict
15 |
16 | import mcp.types as types
17 | from bs4 import BeautifulSoup
18 | from snowflake.connector import DictCursor
19 | from snowflake.connector import connect
20 |
21 | from mcp_server_snowflake.utils import SnowflakeResponse, SnowflakeException
22 |
23 |
24 | sfse = SnowflakeResponse() # For parsing Snowflake responses
25 |
26 |
27 | # Cortex Search Service
28 | @sfse.snowflake_response(api="search")
29 | async def query_cortex_search(
30 | account_identifier: str,
31 | service_name: str,
32 | database_name: str,
33 | schema_name: str,
34 | query: str,
35 | PAT: str,
36 | columns: Optional[list[str]] = None,
37 | filter_query: Optional[dict] = {},
38 | ) -> dict:
39 | """
40 | Query a Cortex Search Service using the REST API.
41 |
42 | Performs semantic search against a configured Cortex Search service using
43 | Snowflake's REST API. Supports filtering and column selection for refined
44 | search results.
45 |
46 | Parameters
47 | ----------
48 | account_identifier : str
49 | Snowflake account identifier
50 | service_name : str
51 | Name of the Cortex Search Service
52 | database_name : str
53 | Target database containing the search service
54 | schema_name : str
55 | Target schema containing the search service
56 | query : str
57 | The search query string to submit to Cortex Search
58 | PAT : str
59 | Programmatic Access Token for authentication
60 | columns : list[str], optional
61 | List of columns to return for each relevant result, by default None
62 | filter_query : dict, optional
63 | Filter query to apply to search results, by default {}
64 |
65 | Returns
66 | -------
67 | dict
68 | JSON response from the Cortex Search API containing search results
69 |
70 | Raises
71 | ------
72 | SnowflakeException
73 | If the API request fails or returns an error status code
74 |
75 | References
76 | ----------
77 | Snowflake Cortex Search REST API:
78 | https://docs.snowflake.com/developer-guide/snowflake-rest-api/reference/cortex-search-service
79 | """
80 | base_url = f"https://{account_identifier}.snowflakecomputing.com/api/v2/databases/{database_name}/schemas/{schema_name}/cortex-search-services/{service_name}:query"
81 |
82 | headers = {
83 | "X-Snowflake-Authorization-Token-Type": "PROGRAMMATIC_ACCESS_TOKEN",
84 | "Authorization": f"Bearer {PAT}",
85 | "Content-Type": "application/json",
86 | "Accept": "application/json, text/event-stream",
87 | }
88 |
89 | payload = {
90 | "query": query,
91 | "filter": filter_query,
92 | }
93 |
94 | if isinstance(columns, list) and len(columns) > 0:
95 | payload["columns"] = columns
96 |
97 | response = requests.post(base_url, headers=headers, json=payload)
98 |
99 | if response.status_code == 200:
100 | return response
101 | else:
102 | raise SnowflakeException(
103 | tool="Cortex Search",
104 | status_code=response.status_code,
105 | message=response.text,
106 | )
107 |
108 |
109 | def get_cortex_search_tool_types(search_services: list[dict]) -> list[types.Tool]:
110 | """
111 | Generate MCP tool definitions for configured search services.
112 |
113 | Creates tool specifications for each configured Cortex Search service,
114 | including input schemas with query parameters, column selection, and
115 | filtering options.
116 |
117 | Parameters
118 | ----------
119 | search_services : list[dict]
120 | List of search service configuration dictionaries containing
121 | service_name, description, and other service metadata
122 |
123 | Returns
124 | -------
125 | list[types.Tool]
126 | List of MCP Tool objects with complete input schemas for search operations
127 |
128 | Notes
129 | -----
130 | The generated tools support advanced filtering with operators:
131 | - @eq: Equality matching for text/numeric values
132 | - @contains: Array contains matching
133 | - @gte/@lte: Numeric/date range filtering
134 | - @and/@or/@not: Logical operators for complex filters
135 | """
136 |
137 | return [
138 | types.Tool(
139 | name=x.get("service_name"),
140 | description=x.get("description"),
141 | inputSchema={
142 | "type": "object",
143 | "properties": {
144 | "query": {
145 | "type": "string",
146 | "description": "User query to search in search service",
147 | },
148 | "columns": {
149 | "type": "array",
150 | "description": "Optional list of columns to return for each relevant result in the response.",
151 | },
152 | "filter_query": {
153 | "type": "object",
154 | "description": """Cortex Search supports filtering on the ATTRIBUTES columns specified in the CREATE CORTEX SEARCH SERVICE command.
155 |
156 | Cortex Search supports four matching operators:
157 |
158 | 1. TEXT or NUMERIC equality: @eq
159 | 2. ARRAY contains: @contains
160 | 3. NUMERIC or DATE/TIMESTAMP greater than or equal to: @gte
161 | 4. NUMERIC or DATE/TIMESTAMP less than or equal to: @lte
162 |
163 | These matching operators can be composed with various logical operators:
164 |
165 | - @and
166 | - @or
167 | - @not
168 |
169 | The following usage notes apply:
170 |
171 | Matching against NaN ('not a number') values in the source query are handled as
172 | described in Special values. Fixed-point numeric values with more than 19 digits (not
173 | including leading zeroes) do not work with @eq, @gte, or @lte and will not be returned
174 | by these operators (although they could still be returned by the overall query with the
175 | use of @not).
176 |
177 | TIMESTAMP and DATE filters accept values of the form: YYYY-MM-DD and, for timezone
178 | aware dates: YYYY-MM-DD+HH:MM. If the timezone offset is not specified, the date is
179 | interpreted in UTC.
180 |
181 | These operators can be combined into a single filter object.
182 |
183 | Example:
184 | Filtering on rows where NUMERIC column numeric_col is between 10.5 and 12.5 (inclusive):
185 |
186 | { "@and": [
187 | { "@gte": { "numeric_col": 10.5 } },
188 | { "@lte": { "numeric_col": 12.5 } }
189 | ]}""",
190 | },
191 | },
192 | "required": ["query"],
193 | },
194 | )
195 | for x in search_services
196 | ]
197 |
198 |
199 | # Cortex Complete Service
200 | @sfse.snowflake_response(api="complete")
201 | async def cortex_complete(
202 | prompt: str,
203 | model: str,
204 | account_identifier: str,
205 | PAT: str,
206 | response_format: Optional[dict] = None,
207 | ) -> dict:
208 | """
209 | Generate text completions using Snowflake Cortex Complete API.
210 |
211 | Sends a chat completion request to Snowflake's Cortex Complete service
212 | using the specified language model. Supports structured JSON responses
213 | when a response format is provided.
214 |
215 | Parameters
216 | ----------
217 | prompt : str
218 | User prompt message to send to the language model
219 | model : str
220 | Snowflake Cortex LLM model name to use for completion
221 | account_identifier : str
222 | Snowflake account identifier
223 | PAT : str
224 | Programmatic Access Token for authentication
225 | response_format : dict, optional
226 | JSON schema for structured response format, by default None
227 |
228 | Returns
229 | -------
230 | dict
231 | JSON response from the Cortex Complete API containing the generated text
232 |
233 | Raises
234 | ------
235 | SnowflakeException
236 | If the API request fails or returns an error status code
237 |
238 | Notes
239 | -----
240 | The temperature is set to 0.0 for deterministic responses. The response_format
241 | parameter allows for structured JSON outputs following a provided schema.
242 | """
243 | base_url = f"https://{account_identifier}.snowflakecomputing.com/api/v2/cortex/inference:complete"
244 |
245 | headers = {
246 | "X-Snowflake-Authorization-Token-Type": "PROGRAMMATIC_ACCESS_TOKEN",
247 | "Authorization": f"Bearer {PAT}",
248 | "Content-Type": "application/json",
249 | "Accept": "application/json, text/event-stream",
250 | }
251 | payload = {
252 | "model": model,
253 | "messages": [{"role": "user", "content": prompt}],
254 | "temperature": 0.0,
255 | }
256 |
257 | # Add response_format to payload if provided
258 | if response_format is not None:
259 | payload["response_format"] = response_format
260 |
261 | response = requests.post(base_url, headers=headers, json=payload)
262 |
263 | if response.status_code == 200:
264 | return response
265 | else:
266 | raise SnowflakeException(
267 | tool="Cortex Complete",
268 | status_code=response.status_code,
269 | message=response.text,
270 | )
271 |
272 |
273 | def get_cortex_complete_tool_type():
274 | """
275 | Generate MCP tool definition for Cortex Complete service.
276 |
277 | Creates a tool specification for the Cortex Complete LLM service with
278 | support for prompt input, model selection, and structured JSON responses.
279 |
280 | Returns
281 | -------
282 | types.Tool
283 | MCP Tool object with complete input schema for LLM completion operations
284 |
285 | Notes
286 | -----
287 | The tool supports optional structured JSON responses through the response_format
288 | parameter, which accepts a JSON schema defining the expected output structure.
289 | """
290 | return types.Tool(
291 | name="cortex-complete",
292 | description="""Simple LLM chat completion API using Cortex Complete""",
293 | inputSchema={
294 | "type": "object",
295 | "properties": {
296 | "prompt": {
297 | "type": "string",
298 | "description": "User prompt message to send to the LLM",
299 | },
300 | "model": {
301 | "type": "string",
302 | "description": "Optional Snowflake Cortex LLM Model name to use.",
303 | },
304 | "response_format": {
305 | "type": "object",
306 | "description": """Optional JSON response format to use for the LLM response.
307 | Type must be 'json' and schema must be a valid JSON schema.
308 | Example:
309 | {
310 | "type": "json",
311 | "schema": {
312 | "type": "object",
313 | "properties": {
314 | "people": {
315 | "type": "array",
316 | "items": {
317 | "type": "object",
318 | "properties": {
319 | "name": {
320 | "type": "string"
321 | },
322 | "age": {
323 | "type": "number"
324 | }
325 | },
326 | "required": ["name", "age"]
327 | }
328 | }
329 | },
330 | "required": ["people"]
331 | }
332 | }
333 | """,
334 | },
335 | },
336 | "required": ["prompt"],
337 | },
338 | )
339 |
340 |
341 | def get_region(
342 | account_identifier: str,
343 | username: str,
344 | PAT: str,
345 | ) -> str:
346 | """
347 | Retrieve the current region of the Snowflake account.
348 |
349 | Executes a SQL query to determine the region where the Snowflake
350 | account is located using the CURRENT_REGION() function.
351 |
352 | Parameters
353 | ----------
354 | account_identifier : str
355 | Snowflake account identifier
356 | username : str
357 | Snowflake username for authentication
358 | PAT : str
359 | Programmatic Access Token for authentication
360 |
361 | Returns
362 | -------
363 | str
364 | The region name where the Snowflake account is located
365 |
366 | Raises
367 | ------
368 | snowflake.connector.errors.Error
369 | If connection to Snowflake fails or query execution fails
370 | """
371 |
372 | statement = "SELECT CURRENT_REGION()"
373 | with (
374 | connect(
375 | account=account_identifier,
376 | user=username,
377 | password=PAT,
378 | ) as con,
379 | con.cursor(DictCursor) as cur,
380 | ):
381 | cur.execute(statement)
382 | return cur.fetchone().get("CURRENT_REGION()")
383 |
384 |
385 | async def get_cortex_models(
386 | account_identifier: str,
387 | username: str,
388 | PAT: str,
389 | url: str = "https://docs.snowflake.com/en/user-guide/snowflake-cortex/cortex-llm-rest-api#model-availability",
390 | ) -> str | dict[str, list[dict[str, str]] | str]:
391 | """
392 | Retrieve available Cortex Complete model information from Snowflake documentation.
393 |
394 | Scrapes the Snowflake documentation to get current model availability
395 | information specifically for the REST API and combines it with the account's region
396 | information.
397 |
398 | Parameters
399 | ----------
400 | account_identifier : str
401 | Snowflake account identifier
402 | username : str
403 | Snowflake username for authentication
404 | PAT : str
405 | Programmatic Access Token for authentication
406 | url : str, optional
407 | URL to Snowflake Cortex model documentation, by default official docs URL
408 |
409 | Returns
410 | -------
411 | str | dict[str, list[dict[str, str]] | str]
412 | Either an error message string or a dictionary containing:
413 | - 'current_region': The account's region
414 | - 'model_availability': List of available models with their details
415 | """
416 |
417 | # Send HTTP request
418 | response = requests.get(url)
419 | if response.status_code != 200:
420 | return f"Failed to retrieve the page {url} with {response.status_code}"
421 |
422 | # Parse HTML
423 | soup = BeautifulSoup(response.content, "html.parser")
424 |
425 | # Find the model availability section (could be a table or other format)
426 | section = soup.find(id="model-availability") or soup.find(
427 | string="Model availability"
428 | ).find_parent("section")
429 |
430 | if not section:
431 | return (
432 | f"Failed to retrieve model availability from the docs. Please visit {url}."
433 | )
434 |
435 | else:
436 | # Process the specific section if found
437 | tables = section.find_all("table")
438 | if tables:
439 | model_data = []
440 | table = tables[0]
441 |
442 | # Get headers
443 | headers = []
444 | for th in table.find_all("th"):
445 | headers.append(th.text.strip())
446 |
447 | # Extract rows
448 | for row in table.find_all("tr")[1:]: # Skip header row
449 | cells = row.find_all(["td", "th"])
450 | if cells:
451 | row_data = {}
452 | for i, cell in enumerate(cells):
453 | if i < len(headers):
454 | row_data[headers[i]] = cell.text.strip()
455 | model_data.append(row_data)
456 |
457 | return OrderedDict(
458 | [
459 | ("current_region", get_region(account_identifier, username, PAT)),
460 | ("model_availability", model_data),
461 | ]
462 | )
463 | else:
464 | return f"No model availability table found at {url}."
465 |
466 |
467 | def get_cortex_models_tool_type():
468 | """
469 | Generate MCP tool definition for retrieving Cortex model information.
470 |
471 | Creates a tool specification for fetching available Cortex Complete
472 | models and their regional availability.
473 |
474 | Returns
475 | -------
476 | types.Tool
477 | MCP Tool object for retrieving model cards and availability information
478 | """
479 | return types.Tool(
480 | name="get-model-cards",
481 | description="""Retrieves available model cards in Snowflake Cortex REST API""",
482 | inputSchema={"type": "object", "properties": {}, "required": []},
483 | )
484 |
485 |
486 | # Cortex Analyst Service
487 | @sfse.snowflake_response(api="analyst")
488 | async def query_cortex_analyst(
489 | account_identifier: str,
490 | semantic_model: str,
491 | query: str,
492 | username: str,
493 | PAT: str,
494 | ) -> dict:
495 | """
496 | Query Snowflake Cortex Analyst service for natural language to SQL conversion.
497 |
498 | Sends a natural language query to the Cortex Analyst service, which
499 | interprets the query against a semantic model and generates appropriate
500 | SQL responses with explanations.
501 |
502 | Parameters
503 | ----------
504 | account_identifier : str
505 | Snowflake account identifier
506 | semantic_model : str
507 | Fully qualified path to YAML semantic file or Snowflake Semantic View.
508 | Examples:
509 | - "@my_db.my_schema.my_stage/my_semantic_model.yaml"
510 | - "MY_DB.MY_SCH.MY_SEMANTIC_VIEW"
511 | query : str
512 | Natural language query string to submit to Cortex Analyst
513 | username : str
514 | Snowflake username for authentication
515 | PAT : str
516 | Programmatic Access Token for authentication
517 |
518 | Returns
519 | -------
520 | dict
521 | JSON response from the Cortex Analyst API containing generated SQL,
522 | explanations, and query results
523 |
524 | Raises
525 | ------
526 | SnowflakeException
527 | If the API request fails or returns an error status code
528 |
529 | Notes
530 | -----
531 | The function automatically detects whether the semantic_model parameter
532 | refers to a YAML file (starts with @ and ends with .yaml) or a semantic view.
533 | Currently configured for non-streaming responses.
534 | """
535 | base_url = f"https://{account_identifier}.snowflakecomputing.com/api/v2/cortex/analyst/message"
536 |
537 | headers = {
538 | "X-Snowflake-Authorization-Token-Type": "PROGRAMMATIC_ACCESS_TOKEN",
539 | "Authorization": f"Bearer {PAT}",
540 | "Content-Type": "application/json",
541 | "Accept": "application/json, text/event-stream",
542 | }
543 |
544 | if semantic_model.startswith("@") and semantic_model.endswith(".yaml"):
545 | semantic_type = "semantic_model_file"
546 | else:
547 | semantic_type = "semantic_model_view"
548 |
549 | payload = {
550 | "messages": [
551 | {
552 | "role": "user",
553 | "content": [
554 | {
555 | "type": "text",
556 | "text": query,
557 | }
558 | ],
559 | }
560 | ],
561 | semantic_type: semantic_model,
562 | "stream": False,
563 | }
564 |
565 | response = requests.post(base_url, headers=headers, json=payload)
566 |
567 | if response.status_code == 200:
568 | return response
569 |
570 | else:
571 | raise SnowflakeException(
572 | tool="Cortex Analyst",
573 | status_code=response.status_code,
574 | message=response.text,
575 | )
576 |
577 |
578 | def get_cortex_analyst_tool_types(analyst_services: list[dict]) -> list[types.Tool]:
579 | """
580 | Generate MCP tool definitions for configured Cortex Analyst services.
581 |
582 | Creates tool specifications for each configured Cortex Analyst service,
583 | enabling natural language querying against semantic models.
584 |
585 | Parameters
586 | ----------
587 | analyst_services : list[dict]
588 | List of analyst service configuration dictionaries containing
589 | service_name, description, and semantic model references
590 |
591 | Returns
592 | -------
593 | list[types.Tool]
594 | List of MCP Tool objects with input schemas for natural language queries
595 | """
596 |
597 | return [
598 | types.Tool(
599 | name=x.get("service_name"),
600 | description=x.get("description"),
601 | inputSchema={
602 | "type": "object",
603 | "properties": {
604 | "query": {
605 | "type": "string",
606 | "description": "A rephrased natural language prompt from the user.",
607 | },
608 | },
609 | "required": ["query"],
610 | },
611 | )
612 | for x in analyst_services
613 | ]
614 |
--------------------------------------------------------------------------------
/mcp_server_snowflake/utils.py:
--------------------------------------------------------------------------------
1 | # Copyright 2025 Snowflake Inc.
2 | # SPDX-License-Identifier: Apache-2.0
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | # http://www.apache.org/licenses/LICENSE-2.0
7 | # Unless required by applicable law or agreed to in writing, software
8 | # distributed under the License is distributed on an "AS IS" BASIS,
9 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
10 | # See the License for the specific language governing permissions and
11 | # limitations under the License.
12 | import requests
13 | from functools import wraps
14 | from typing import Awaitable, Callable, TypeVar, Optional, Union
15 | from typing_extensions import ParamSpec
16 | import json
17 | from snowflake.connector import DictCursor
18 | from snowflake.connector import connect
19 | from pydantic import BaseModel
20 | import ast
21 | from textwrap import dedent
22 |
23 | P = ParamSpec("P")
24 | R = TypeVar("R")
25 |
26 |
27 | class AnalystResponse(BaseModel):
28 | """
29 | Response model for Cortex Analyst API results.
30 |
31 | Represents the structured response from Cortex Analyst containing
32 | natural language text, generated SQL, and query execution results.
33 |
34 | Attributes
35 | ----------
36 | text : str
37 | Natural language response text from the analyst
38 | sql : str, optional
39 | Generated SQL query, by default None
40 | results : dict | list, optional
41 | Query execution results if SQL was executed, by default None
42 | """
43 |
44 | text: str
45 | sql: Optional[str] = None
46 | results: Optional[Union[dict, list]] = None
47 |
48 |
49 | class SearchResponse(BaseModel):
50 | """
51 | Response model for Cortex Search API results.
52 |
53 | Represents the structured response from Cortex Search containing
54 | search results and metadata.
55 |
56 | Attributes
57 | ----------
58 | results : str | dict | list
59 | Search results in various formats depending on query and configuration
60 | """
61 |
62 | results: Union[str, dict, list]
63 |
64 |
65 | class CompleteResponse(BaseModel):
66 | """
67 | Response model for Cortex Complete API results.
68 |
69 | Represents the response from Cortex Complete for unstructured text generation.
70 |
71 | Attributes
72 | ----------
73 | results : str | dict | list
74 | Generated text or content from the language model
75 | """
76 |
77 | results: Union[str, dict, list]
78 |
79 |
80 | class CompleteResponseStructured(BaseModel):
81 | """
82 | Response model for structured Cortex Complete API results.
83 |
84 | Represents the response from Cortex Complete when using structured
85 | JSON output with a defined schema.
86 |
87 | Attributes
88 | ----------
89 | results : dict | list
90 | Structured data conforming to the provided JSON schema
91 | """
92 |
93 | results: Union[dict, list]
94 |
95 |
96 | class SnowflakeResponse:
97 | """
98 | Response parser and decorator provider for Snowflake Cortex APIs.
99 |
100 | This class provides decorators and parsing methods for handling responses
101 | from different Snowflake Cortex services. It processes Server-Sent Events (SSE),
102 | executes SQL queries, and formats responses consistently across all services.
103 |
104 | The class supports three main API types:
105 | - complete: Language model completion responses
106 | - analyst: Cortex Analyst responses
107 | - search: Cortex search responses
108 |
109 | Examples
110 | --------
111 | Basic usage with decorator:
112 |
113 | >>> sfse = SnowflakeResponse()
114 | >>> @sfse.snowflake_response(api="complete")
115 | ... async def my_complete_function():
116 | ... # Function implementation
117 | ... pass
118 |
119 | Methods
120 | -------
121 | fetch_results(statement, **kwargs)
122 | Execute SQL statement and fetch results
123 | parse_analyst_response(response, **kwargs)
124 | Parse Cortex Analyst API responses
125 | parse_search_response(response)
126 | Parse Cortex Search API responses
127 | parse_llm_response(response, structured=False)
128 | Parse Cortex Complete API responses
129 | snowflake_response(api)
130 | Decorator factory for response parsing
131 | """
132 |
133 | def fetch_results(self, statement: str, **kwargs):
134 | """
135 | Execute SQL statement and fetch all results using Snowflake connector.
136 |
137 | Establishes a connection to Snowflake, executes the provided SQL statement,
138 | and returns all results using a dictionary cursor for easier data access.
139 |
140 | Parameters
141 | ----------
142 | statement : str
143 | SQL statement to execute
144 | **kwargs
145 | Connection parameters including account, user, password
146 |
147 | Returns
148 | -------
149 | list[dict]
150 | List of dictionaries containing query results with column names as keys
151 |
152 | Raises
153 | ------
154 | snowflake.connector.errors.Error
155 | If connection fails or SQL execution encounters an error
156 | """
157 | with (
158 | connect(**kwargs) as con,
159 | con.cursor(DictCursor) as cur,
160 | ):
161 | cur.execute(statement)
162 | return cur.fetchall()
163 |
164 | def parse_analyst_response(
165 | self, response: requests.Response | dict, **kwargs
166 | ) -> str:
167 | """
168 | Parse Cortex Analyst API response and execute any generated SQL.
169 |
170 | Processes the analyst response to extract natural language text and
171 | SQL statements. If SQL is present, executes it against Snowflake
172 | and includes the results in the parsed response.
173 |
174 | Parameters
175 | ----------
176 | response : requests.Response | dict
177 | Raw response from Cortex Analyst API
178 | **kwargs
179 | Connection parameters for SQL execution (account, user, password)
180 |
181 | Returns
182 | -------
183 | str
184 | JSON string containing parsed analyst response with text, SQL, and results
185 | """
186 | content = response.json().get("message", {"content": []}).get("content", [])
187 | res = {}
188 | for item in content:
189 | if item.get("type") == "text":
190 | res["text"] = item.get("text", "")
191 |
192 | elif item.get("type") == "sql":
193 | res["sql"] = item.get("statement", "")
194 | if item.get("statement"):
195 | res["results"] = self.fetch_results(statement=res["sql"], **kwargs)
196 | response = AnalystResponse(**res)
197 | return response.model_dump_json()
198 |
199 | def parse_search_response(self, response: requests.Response | dict) -> str:
200 | """
201 | Parse Cortex Search API response into structured format.
202 |
203 | Extracts search results from the API response and formats them
204 | using the SearchResponse model for consistent output structure.
205 |
206 | Parameters
207 | ----------
208 | response : requests.Response | dict
209 | Raw response from Cortex Search API
210 |
211 | Returns
212 | -------
213 | str
214 | JSON string containing formatted search results
215 | """
216 | content = response.json()
217 | ret = SearchResponse(results=content.get("results", []))
218 | return ret.model_dump_json()
219 |
220 | def parse_llm_response(
221 | self, response: requests.models.Response | dict, structured: bool = False
222 | ) -> str | list | dict:
223 | """
224 | Parse Cortex Complete LLM API response from Server-Sent Events.
225 |
226 | Processes streaming SSE response from the Cortex Complete API,
227 | extracting text content and optionally parsing structured JSON
228 | responses based on provided schemas.
229 |
230 | Parameters
231 | ----------
232 | response : requests.models.Response | dict
233 | Raw streaming response from Cortex Complete API
234 | structured : bool, optional
235 | Whether to parse response as structured JSON, by default False
236 |
237 | Returns
238 | -------
239 | str | list | dict
240 | JSON string containing either plain text or structured data
241 | depending on the structured parameter
242 |
243 | Raises
244 | ------
245 | json.JSONDecodeError
246 | If SSE event data cannot be parsed as JSON
247 | SyntaxError
248 | If structured response cannot be parsed as valid Python literal
249 | """
250 | sse_events = dict(events=[])
251 | content_text = []
252 | for event in response.iter_lines():
253 | if bool(event.strip()):
254 | if event.decode("utf-8").startswith("data: "):
255 | event_row = event.decode("utf-8").removeprefix("data: ")
256 | try:
257 | sse_events["events"].append(json.loads(event_row))
258 | except json.JSONDecodeError as JDE:
259 | raise (JDE)
260 |
261 | for event in sse_events["events"]:
262 | delta = event.get("choices")[0].get("delta", {})
263 | if delta.get("type") == "text":
264 | if content := delta.get("content"):
265 | content_text.append(content)
266 |
267 | if structured:
268 | ret = CompleteResponseStructured(
269 | results=ast.literal_eval("".join(content_text))
270 | )
271 | else:
272 | ret = CompleteResponse(results="".join(content_text))
273 |
274 | return ret.model_dump_json()
275 |
276 | def snowflake_response(
277 | self,
278 | api: str,
279 | ) -> Callable[[Callable[P, Awaitable[R]]], Callable[P, Awaitable[R]]]:
280 | """
281 | Decorator factory for consistent response parsing across Cortex APIs.
282 |
283 | Creates a decorator that automatically parses responses from different
284 | Cortex API endpoints based on the specified API type. The decorator
285 | handles the raw API response and returns formatted, structured data.
286 |
287 | Parameters
288 | ----------
289 | api : str
290 | API type to handle. Must be one of: "complete", "analyst", "search"
291 |
292 | Returns
293 | -------
294 | Callable
295 | Decorator function that wraps async functions to provide response parsing
296 |
297 | Examples
298 | --------
299 | Decorating a function for Cortex Complete API:
300 |
301 | >>> @sfse.snowflake_response(api="complete")
302 | ... async def my_completion_function(prompt, **kwargs):
303 | ... # Make API call
304 | ... return raw_response
305 | """
306 |
307 | def cortex_wrapper(
308 | func: Callable[P, Awaitable[R]],
309 | ) -> Callable[P, Awaitable[R]]:
310 | @wraps(func)
311 | async def response_parsers(*args: P.args, **kwargs: P.kwargs) -> R:
312 | raw_sse = await func(*args, **kwargs)
313 | conn_kwargs = dict(
314 | account=kwargs.get("account_identifier", ""),
315 | user=kwargs.get("username", ""),
316 | password=kwargs.get("PAT", ""),
317 | )
318 | match api:
319 | case "complete":
320 | structured = kwargs.get("response_format", {})
321 | parsed = self.parse_llm_response(
322 | response=raw_sse, structured=bool(structured)
323 | )
324 | case "analyst":
325 | parsed = self.parse_analyst_response(
326 | response=raw_sse, **conn_kwargs
327 | )
328 | case "search":
329 | parsed = self.parse_search_response(response=raw_sse)
330 | return parsed
331 |
332 | return response_parsers
333 |
334 | return cortex_wrapper
335 |
336 |
337 | class SnowflakeException(Exception):
338 | """
339 | Custom exception class for Snowflake API errors.
340 |
341 | Provides enhanced error handling for Snowflake Cortex API operations
342 | with specific error messages based on HTTP status codes and error types.
343 |
344 | Parameters
345 | ----------
346 | tool : str
347 | Name of the Cortex tool that generated the error
348 | message : str
349 | Raw error message from the API
350 | status_code : int, optional
351 | HTTP status code from the API response, by default None
352 |
353 | Attributes
354 | ----------
355 | tool : str
356 | The Cortex service that generated the error
357 | message : str
358 | Original error message from the API
359 | status_code : int
360 | HTTP status code associated with the error
361 |
362 | Methods
363 | -------
364 | __str__()
365 | Returns formatted error message based on status code and content
366 |
367 | Examples
368 | --------
369 | Raising a Snowflake exception:
370 |
371 | >>> raise SnowflakeException(
372 | ... tool="Cortex Complete",
373 | ... message="Model not found",
374 | ... status_code=400
375 | ... )
376 | """
377 |
378 | def __init__(self, tool: str, message, status_code: Optional[int] = None):
379 | self.message = message
380 | self.status_code = status_code
381 | super().__init__(message)
382 | self.tool = tool
383 |
384 | def __str__(self):
385 | """
386 | Format error message based on status code and error content.
387 |
388 | Provides user-friendly error messages with specific guidance
389 | based on common HTTP status codes and error patterns.
390 |
391 | Returns
392 | -------
393 | str
394 | Formatted error message with tool name, description, and guidance
395 |
396 | Notes
397 | -----
398 | Status code handling:
399 | - 400: Bad request errors with model validation
400 | - 401: Authorization/authentication errors
401 | - Other codes: Generic error with status code
402 | """
403 | if self.status_code == 400:
404 | if "unknown model" in self.message:
405 | return f"{self.tool} Error: Selected model not available or invalid.\n\nError Message: {self.message} "
406 | else:
407 | return f"{self.tool} Error: The resource cannot be found.\n\nError Message: {self.message} "
408 |
409 | elif self.status_code == 401:
410 | return f"{self.tool} Error: An authorization error occurred.\n\nError Message: {self.message} "
411 | else:
412 | return f"{self.tool} Error: An error has occurred.\n\nError Message: {self.message} \n Code: {self.status_code}"
413 |
414 |
415 | class MissingArgumentsException(Exception):
416 | def __init__(self, missing: list):
417 | self.missing = missing
418 | super().__init__(missing)
419 |
420 | def __str__(self):
421 | missing_str = "\n\t\t".join(["--" + i for i in self.missing])
422 | message = f"""
423 | -----------------------------------------------------------------------------------
424 | Required arguments missing:
425 | \t{missing_str}
426 | These values must be specified as command-line arguments or environment variables
427 | -----------------------------------------------------------------------------------"""
428 |
429 | return dedent(message)
430 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [project]
2 | name = "mcp-server-snowflake"
3 | version = "0.1.0"
4 | description = "MCP server for Snowflake"
5 | readme = "README.md"
6 | requires-python = ">=3.11"
7 | authors = [
8 | {name = "Carlos Serrano"},
9 | {name = "Jason Summer"},
10 | {name = "Tyler White"},
11 | ]
12 | dependencies = [
13 | "beautifulsoup4>=4.13.4",
14 | "mcp[cli]>=1.8.0",
15 | "pydantic>=2.11.4",
16 | "pyyaml>=6.0.2",
17 | "requests>=2.32.3",
18 | "snowflake-connector-python>=3.15.0",
19 | ]
20 |
21 | [dependency-groups]
22 | dev = [
23 | "fast-agent-mcp>=0.2.24",
24 | "pre-commit>=4.2.0",
25 | "pytest>=8.3.5",
26 | "python-dotenv>=1.1.0",
27 | "ruff>=0.11.8",
28 | ]
29 |
30 | [build-system]
31 | requires = ["hatchling>=1.0.0"]
32 | build-backend = "hatchling.build"
33 |
34 | [project.scripts]
35 | mcp-server-snowflake = "mcp_server_snowflake:main"
36 |
--------------------------------------------------------------------------------
/services/service_config.yaml:
--------------------------------------------------------------------------------
1 | cortex_complete: # Set default model if one is not specified by user in Cortex Copmlete tool
2 | default_model: "snowflake-llama-3.3-70b"
3 | search_services: # List all Cortex Search services
4 | - service_name: ""
5 | description: > # Should start with "Search service that ..."
6 | ""
7 | database_name: ""
8 | schema_name: ""
9 | - service_name: ""
10 | description: > # Should start with "Search service that ..."
11 | ""
12 | database_name: ""
13 | schema_name: ""
14 | analyst_services: # List all Cortex Analyst semantic models/views
15 | - service_name: "" # Create descriptive name for the service
16 | semantic_model: "" # Fully-qualify semantic YAML model or Semantic View
17 | description: > # Should start with "Analyst service that ..."
18 | ""
19 | - service_name: "" # Create descriptive name for the service
20 | semantic_model: "" # Fully-qualify semantic YAML model or Semantic View
21 | description: > # Should start with "Analyst service that ..."
22 | ""
23 |
--------------------------------------------------------------------------------