├── .python-version
├── .DS_Store
├── assets
├── hammer-icon.png
└── addon-instructions.png
├── .gitignore
├── main.py
├── src
└── blender_mcp
│ ├── __init__.py
│ └── server.py
├── pyproject.toml
├── LICENSE
├── README.md
├── uv.lock
└── addon.py
/.python-version:
--------------------------------------------------------------------------------
1 | 3.13.2
2 |
--------------------------------------------------------------------------------
/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nanashi1526/blender-mcp/HEAD/.DS_Store
--------------------------------------------------------------------------------
/assets/hammer-icon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nanashi1526/blender-mcp/HEAD/assets/hammer-icon.png
--------------------------------------------------------------------------------
/assets/addon-instructions.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Nanashi1526/blender-mcp/HEAD/assets/addon-instructions.png
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Python-generated files
2 | __pycache__/
3 | *.py[oc]
4 | build/
5 | dist/
6 | wheels/
7 | *.egg-info
8 |
9 | # Virtual environments
10 | .venv
11 |
--------------------------------------------------------------------------------
/main.py:
--------------------------------------------------------------------------------
1 | from blender_mcp.server import main as server_main
2 |
3 | def main():
4 | """Entry point for the blender-mcp package"""
5 | server_main()
6 |
7 | if __name__ == "__main__":
8 | main()
9 |
--------------------------------------------------------------------------------
/src/blender_mcp/__init__.py:
--------------------------------------------------------------------------------
1 | """Blender integration through the Model Context Protocol."""
2 |
3 | __version__ = "0.1.0"
4 |
5 | # Expose key classes and functions for easier imports
6 | from .server import BlenderConnection, get_blender_connection
7 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [project]
2 | name = "blender-mcp"
3 | version = "1.2"
4 | description = "Blender integration through the Model Context Protocol"
5 | readme = "README.md"
6 | requires-python = ">=3.10"
7 | authors = [
8 | {name = "Your Name", email = "your.email@example.com"}
9 | ]
10 | license = {text = "MIT"}
11 | classifiers = [
12 | "Programming Language :: Python :: 3",
13 | "License :: OSI Approved :: MIT License",
14 | "Operating System :: OS Independent",
15 | ]
16 | dependencies = [
17 | "mcp[cli]>=1.3.0",
18 | ]
19 |
20 | [project.scripts]
21 | blender-mcp = "blender_mcp.server:main"
22 |
23 | [build-system]
24 | requires = ["setuptools>=61.0", "wheel"]
25 | build-backend = "setuptools.build_meta"
26 |
27 | [tool.setuptools]
28 | package-dir = {"" = "src"}
29 |
30 | [project.urls]
31 | "Homepage" = "https://github.com/yourusername/blender-mcp"
32 | "Bug Tracker" = "https://github.com/yourusername/blender-mcp/issues"
33 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2025 Siddharth Ahuja
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 | # BlenderMCP - Blender Model Context Protocol Integration
4 |
5 | BlenderMCP connects Blender to Claude AI through the Model Context Protocol (MCP), allowing Claude to directly interact with and control Blender. This integration enables prompt assisted 3D modeling, scene creation, and manipulation.
6 |
7 | [Full tutorial](https://www.youtube.com/watch?v=lCyQ717DuzQ)
8 |
9 | ### Join the Community
10 |
11 | Give feedback, get inspired, and build on top of the MCP: [Discord](https://discord.gg/z5apgR8TFU)
12 |
13 | ### Supporters
14 |
15 |
16 |
Special thanks to:
17 |
18 |
19 |
20 |
21 |
22 |
23 | ### [Warp, the intelligent terminal for developers](https://www.warp.dev/blender-mcp)
24 | [Available for MacOS, Linux, & Windows](https://www.warp.dev/blender-mcp)
25 |
26 |
27 |
28 |
29 | **Other supporters:**
30 |
31 | [CodeRabbit](https://www.coderabbit.ai/)
32 |
33 | **All supporters:**
34 |
35 | [Support this project](https://github.com/sponsors/ahujasid)
36 |
37 | ## Release notes (1.2.0)
38 | - View screenshots for Blender viewport to better understand the scene
39 | - Search and download Sketchfab models
40 |
41 |
42 | ### Previously added features:
43 | - Support for Poly Haven assets through their API
44 | - Support to generate 3D models using Hyper3D Rodin
45 | - For newcomers, you can go straight to Installation. For existing users, see the points below
46 | - Download the latest addon.py file and replace the older one, then add it to Blender
47 | - Delete the MCP server from Claude and add it back again, and you should be good to go!
48 |
49 | ## Features
50 |
51 | - **Two-way communication**: Connect Claude AI to Blender through a socket-based server
52 | - **Object manipulation**: Create, modify, and delete 3D objects in Blender
53 | - **Material control**: Apply and modify materials and colors
54 | - **Scene inspection**: Get detailed information about the current Blender scene
55 | - **Code execution**: Run arbitrary Python code in Blender from Claude
56 |
57 | ## Components
58 |
59 | The system consists of two main components:
60 |
61 | 1. **Blender Addon (`addon.py`)**: A Blender addon that creates a socket server within Blender to receive and execute commands
62 | 2. **MCP Server (`src/blender_mcp/server.py`)**: A Python server that implements the Model Context Protocol and connects to the Blender addon
63 |
64 | ## Installation
65 |
66 |
67 | ### Prerequisites
68 |
69 | - Blender 3.0 or newer
70 | - Python 3.10 or newer
71 | - uv package manager:
72 |
73 | **If you're on Mac, please install uv as**
74 | ```bash
75 | brew install uv
76 | ```
77 | **On Windows**
78 | ```bash
79 | powershell -c "irm https://astral.sh/uv/install.ps1 | iex"
80 | ```
81 | and then
82 | ```bash
83 | set Path=C:\Users\nntra\.local\bin;%Path%
84 | ```
85 |
86 | Otherwise installation instructions are on their website: [Install uv](https://docs.astral.sh/uv/getting-started/installation/)
87 |
88 | **⚠️ Do not proceed before installing UV**
89 |
90 |
91 | ### Claude for Desktop Integration
92 |
93 | [Watch the setup instruction video](https://www.youtube.com/watch?v=neoK_WMq92g) (Assuming you have already installed uv)
94 |
95 | Go to Claude > Settings > Developer > Edit Config > claude_desktop_config.json to include the following:
96 |
97 | ```json
98 | {
99 | "mcpServers": {
100 | "blender": {
101 | "command": "uvx",
102 | "args": [
103 | "blender-mcp"
104 | ]
105 | }
106 | }
107 | }
108 | ```
109 |
110 | ### Cursor integration
111 |
112 | [](https://cursor.com/install-mcp?name=blender&config=eyJjb21tYW5kIjoidXZ4IGJsZW5kZXItbWNwIn0%3D)
113 |
114 | For Mac users, go to Settings > MCP and paste the following
115 |
116 | - To use as a global server, use "add new global MCP server" button and paste
117 | - To use as a project specific server, create `.cursor/mcp.json` in the root of the project and paste
118 |
119 |
120 | ```json
121 | {
122 | "mcpServers": {
123 | "blender": {
124 | "command": "uvx",
125 | "args": [
126 | "blender-mcp"
127 | ]
128 | }
129 | }
130 | }
131 | ```
132 |
133 | For Windows users, go to Settings > MCP > Add Server, add a new server with the following settings:
134 |
135 | ```json
136 | {
137 | "mcpServers": {
138 | "blender": {
139 | "command": "cmd",
140 | "args": [
141 | "/c",
142 | "uvx",
143 | "blender-mcp"
144 | ]
145 | }
146 | }
147 | }
148 | ```
149 |
150 | [Cursor setup video](https://www.youtube.com/watch?v=wgWsJshecac)
151 |
152 | **⚠️ Only run one instance of the MCP server (either on Cursor or Claude Desktop), not both**
153 |
154 | ### Installing the Blender Addon
155 |
156 | 1. Download the `addon.py` file from this repo
157 | 1. Open Blender
158 | 2. Go to Edit > Preferences > Add-ons
159 | 3. Click "Install..." and select the `addon.py` file
160 | 4. Enable the addon by checking the box next to "Interface: Blender MCP"
161 |
162 |
163 | ## Usage
164 |
165 | ### Starting the Connection
166 | 
167 |
168 | 1. In Blender, go to the 3D View sidebar (press N if not visible)
169 | 2. Find the "BlenderMCP" tab
170 | 3. Turn on the Poly Haven checkbox if you want assets from their API (optional)
171 | 4. Click "Connect to Claude"
172 | 5. Make sure the MCP server is running in your terminal
173 |
174 | ### Using with Claude
175 |
176 | Once the config file has been set on Claude, and the addon is running on Blender, you will see a hammer icon with tools for the Blender MCP.
177 |
178 | 
179 |
180 | #### Capabilities
181 |
182 | - Get scene and object information
183 | - Create, delete and modify shapes
184 | - Apply or create materials for objects
185 | - Execute any Python code in Blender
186 | - Download the right models, assets and HDRIs through [Poly Haven](https://polyhaven.com/)
187 | - AI generated 3D models through [Hyper3D Rodin](https://hyper3d.ai/)
188 |
189 |
190 | ### Example Commands
191 |
192 | Here are some examples of what you can ask Claude to do:
193 |
194 | - "Create a low poly scene in a dungeon, with a dragon guarding a pot of gold" [Demo](https://www.youtube.com/watch?v=DqgKuLYUv00)
195 | - "Create a beach vibe using HDRIs, textures, and models like rocks and vegetation from Poly Haven" [Demo](https://www.youtube.com/watch?v=I29rn92gkC4)
196 | - Give a reference image, and create a Blender scene out of it [Demo](https://www.youtube.com/watch?v=FDRb03XPiRo)
197 | - "Generate a 3D model of a garden gnome through Hyper3D"
198 | - "Get information about the current scene, and make a threejs sketch from it" [Demo](https://www.youtube.com/watch?v=jxbNI5L7AH8)
199 | - "Make this car red and metallic"
200 | - "Create a sphere and place it above the cube"
201 | - "Make the lighting like a studio"
202 | - "Point the camera at the scene, and make it isometric"
203 |
204 | ## Hyper3D integration
205 |
206 | Hyper3D's free trial key allows you to generate a limited number of models per day. If the daily limit is reached, you can wait for the next day's reset or obtain your own key from hyper3d.ai and fal.ai.
207 |
208 | ## Troubleshooting
209 |
210 | - **Connection issues**: Make sure the Blender addon server is running, and the MCP server is configured on Claude, DO NOT run the uvx command in the terminal. Sometimes, the first command won't go through but after that it starts working.
211 | - **Timeout errors**: Try simplifying your requests or breaking them into smaller steps
212 | - **Poly Haven integration**: Claude is sometimes erratic with its behaviour
213 | - **Have you tried turning it off and on again?**: If you're still having connection errors, try restarting both Claude and the Blender server
214 |
215 |
216 | ## Technical Details
217 |
218 | ### Communication Protocol
219 |
220 | The system uses a simple JSON-based protocol over TCP sockets:
221 |
222 | - **Commands** are sent as JSON objects with a `type` and optional `params`
223 | - **Responses** are JSON objects with a `status` and `result` or `message`
224 |
225 | ## Limitations & Security Considerations
226 |
227 | - The `execute_blender_code` tool allows running arbitrary Python code in Blender, which can be powerful but potentially dangerous. Use with caution in production environments. ALWAYS save your work before using it.
228 | - Poly Haven requires downloading models, textures, and HDRI images. If you do not want to use it, please turn it off in the checkbox in Blender.
229 | - Complex operations might need to be broken down into smaller steps
230 |
231 |
232 | ## Contributing
233 |
234 | Contributions are welcome! Please feel free to submit a Pull Request.
235 |
236 | ## Disclaimer
237 |
238 | This is a third-party integration and not made by Blender. Made by [Siddharth](https://x.com/sidahuj)
239 |
--------------------------------------------------------------------------------
/uv.lock:
--------------------------------------------------------------------------------
1 | version = 1
2 | revision = 1
3 | requires-python = ">=3.10"
4 |
5 | [[package]]
6 | name = "annotated-types"
7 | version = "0.7.0"
8 | source = { registry = "https://pypi.org/simple" }
9 | sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081 }
10 | wheels = [
11 | { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643 },
12 | ]
13 |
14 | [[package]]
15 | name = "anyio"
16 | version = "4.8.0"
17 | source = { registry = "https://pypi.org/simple" }
18 | dependencies = [
19 | { name = "exceptiongroup", marker = "python_full_version < '3.11'" },
20 | { name = "idna" },
21 | { name = "sniffio" },
22 | { name = "typing-extensions", marker = "python_full_version < '3.13'" },
23 | ]
24 | sdist = { url = "https://files.pythonhosted.org/packages/a3/73/199a98fc2dae33535d6b8e8e6ec01f8c1d76c9adb096c6b7d64823038cde/anyio-4.8.0.tar.gz", hash = "sha256:1d9fe889df5212298c0c0723fa20479d1b94883a2df44bd3897aa91083316f7a", size = 181126 }
25 | wheels = [
26 | { url = "https://files.pythonhosted.org/packages/46/eb/e7f063ad1fec6b3178a3cd82d1a3c4de82cccf283fc42746168188e1cdd5/anyio-4.8.0-py3-none-any.whl", hash = "sha256:b5011f270ab5eb0abf13385f851315585cc37ef330dd88e27ec3d34d651fd47a", size = 96041 },
27 | ]
28 |
29 | [[package]]
30 | name = "blender-mcp"
31 | version = "1.2"
32 | source = { editable = "." }
33 | dependencies = [
34 | { name = "mcp", extra = ["cli"] },
35 | ]
36 |
37 | [package.metadata]
38 | requires-dist = [{ name = "mcp", extras = ["cli"], specifier = ">=1.3.0" }]
39 |
40 | [[package]]
41 | name = "certifi"
42 | version = "2025.1.31"
43 | source = { registry = "https://pypi.org/simple" }
44 | sdist = { url = "https://files.pythonhosted.org/packages/1c/ab/c9f1e32b7b1bf505bf26f0ef697775960db7932abeb7b516de930ba2705f/certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651", size = 167577 }
45 | wheels = [
46 | { url = "https://files.pythonhosted.org/packages/38/fc/bce832fd4fd99766c04d1ee0eead6b0ec6486fb100ae5e74c1d91292b982/certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe", size = 166393 },
47 | ]
48 |
49 | [[package]]
50 | name = "click"
51 | version = "8.1.8"
52 | source = { registry = "https://pypi.org/simple" }
53 | dependencies = [
54 | { name = "colorama", marker = "sys_platform == 'win32'" },
55 | ]
56 | sdist = { url = "https://files.pythonhosted.org/packages/b9/2e/0090cbf739cee7d23781ad4b89a9894a41538e4fcf4c31dcdd705b78eb8b/click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a", size = 226593 }
57 | wheels = [
58 | { url = "https://files.pythonhosted.org/packages/7e/d4/7ebdbd03970677812aac39c869717059dbb71a4cfc033ca6e5221787892c/click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2", size = 98188 },
59 | ]
60 |
61 | [[package]]
62 | name = "colorama"
63 | version = "0.4.6"
64 | source = { registry = "https://pypi.org/simple" }
65 | sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697 }
66 | wheels = [
67 | { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 },
68 | ]
69 |
70 | [[package]]
71 | name = "exceptiongroup"
72 | version = "1.2.2"
73 | source = { registry = "https://pypi.org/simple" }
74 | sdist = { url = "https://files.pythonhosted.org/packages/09/35/2495c4ac46b980e4ca1f6ad6db102322ef3ad2410b79fdde159a4b0f3b92/exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc", size = 28883 }
75 | wheels = [
76 | { url = "https://files.pythonhosted.org/packages/02/cc/b7e31358aac6ed1ef2bb790a9746ac2c69bcb3c8588b41616914eb106eaf/exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b", size = 16453 },
77 | ]
78 |
79 | [[package]]
80 | name = "h11"
81 | version = "0.14.0"
82 | source = { registry = "https://pypi.org/simple" }
83 | sdist = { url = "https://files.pythonhosted.org/packages/f5/38/3af3d3633a34a3316095b39c8e8fb4853a28a536e55d347bd8d8e9a14b03/h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d", size = 100418 }
84 | wheels = [
85 | { url = "https://files.pythonhosted.org/packages/95/04/ff642e65ad6b90db43e668d70ffb6736436c7ce41fcc549f4e9472234127/h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761", size = 58259 },
86 | ]
87 |
88 | [[package]]
89 | name = "httpcore"
90 | version = "1.0.7"
91 | source = { registry = "https://pypi.org/simple" }
92 | dependencies = [
93 | { name = "certifi" },
94 | { name = "h11" },
95 | ]
96 | sdist = { url = "https://files.pythonhosted.org/packages/6a/41/d7d0a89eb493922c37d343b607bc1b5da7f5be7e383740b4753ad8943e90/httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c", size = 85196 }
97 | wheels = [
98 | { url = "https://files.pythonhosted.org/packages/87/f5/72347bc88306acb359581ac4d52f23c0ef445b57157adedb9aee0cd689d2/httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd", size = 78551 },
99 | ]
100 |
101 | [[package]]
102 | name = "httpx"
103 | version = "0.28.1"
104 | source = { registry = "https://pypi.org/simple" }
105 | dependencies = [
106 | { name = "anyio" },
107 | { name = "certifi" },
108 | { name = "httpcore" },
109 | { name = "idna" },
110 | ]
111 | sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406 }
112 | wheels = [
113 | { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517 },
114 | ]
115 |
116 | [[package]]
117 | name = "httpx-sse"
118 | version = "0.4.0"
119 | source = { registry = "https://pypi.org/simple" }
120 | sdist = { url = "https://files.pythonhosted.org/packages/4c/60/8f4281fa9bbf3c8034fd54c0e7412e66edbab6bc74c4996bd616f8d0406e/httpx-sse-0.4.0.tar.gz", hash = "sha256:1e81a3a3070ce322add1d3529ed42eb5f70817f45ed6ec915ab753f961139721", size = 12624 }
121 | wheels = [
122 | { url = "https://files.pythonhosted.org/packages/e1/9b/a181f281f65d776426002f330c31849b86b31fc9d848db62e16f03ff739f/httpx_sse-0.4.0-py3-none-any.whl", hash = "sha256:f329af6eae57eaa2bdfd962b42524764af68075ea87370a2de920af5341e318f", size = 7819 },
123 | ]
124 |
125 | [[package]]
126 | name = "idna"
127 | version = "3.10"
128 | source = { registry = "https://pypi.org/simple" }
129 | sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490 }
130 | wheels = [
131 | { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442 },
132 | ]
133 |
134 | [[package]]
135 | name = "markdown-it-py"
136 | version = "3.0.0"
137 | source = { registry = "https://pypi.org/simple" }
138 | dependencies = [
139 | { name = "mdurl" },
140 | ]
141 | sdist = { url = "https://files.pythonhosted.org/packages/38/71/3b932df36c1a044d397a1f92d1cf91ee0a503d91e470cbd670aa66b07ed0/markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb", size = 74596 }
142 | wheels = [
143 | { url = "https://files.pythonhosted.org/packages/42/d7/1ec15b46af6af88f19b8e5ffea08fa375d433c998b8a7639e76935c14f1f/markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", size = 87528 },
144 | ]
145 |
146 | [[package]]
147 | name = "mcp"
148 | version = "1.3.0"
149 | source = { registry = "https://pypi.org/simple" }
150 | dependencies = [
151 | { name = "anyio" },
152 | { name = "httpx" },
153 | { name = "httpx-sse" },
154 | { name = "pydantic" },
155 | { name = "pydantic-settings" },
156 | { name = "sse-starlette" },
157 | { name = "starlette" },
158 | { name = "uvicorn" },
159 | ]
160 | sdist = { url = "https://files.pythonhosted.org/packages/6b/b6/81e5f2490290351fc97bf46c24ff935128cb7d34d68e3987b522f26f7ada/mcp-1.3.0.tar.gz", hash = "sha256:f409ae4482ce9d53e7ac03f3f7808bcab735bdfc0fba937453782efb43882d45", size = 150235 }
161 | wheels = [
162 | { url = "https://files.pythonhosted.org/packages/d0/d2/a9e87b506b2094f5aa9becc1af5178842701b27217fa43877353da2577e3/mcp-1.3.0-py3-none-any.whl", hash = "sha256:2829d67ce339a249f803f22eba5e90385eafcac45c94b00cab6cef7e8f217211", size = 70672 },
163 | ]
164 |
165 | [package.optional-dependencies]
166 | cli = [
167 | { name = "python-dotenv" },
168 | { name = "typer" },
169 | ]
170 |
171 | [[package]]
172 | name = "mdurl"
173 | version = "0.1.2"
174 | source = { registry = "https://pypi.org/simple" }
175 | sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729 }
176 | wheels = [
177 | { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979 },
178 | ]
179 |
180 | [[package]]
181 | name = "pydantic"
182 | version = "2.10.6"
183 | source = { registry = "https://pypi.org/simple" }
184 | dependencies = [
185 | { name = "annotated-types" },
186 | { name = "pydantic-core" },
187 | { name = "typing-extensions" },
188 | ]
189 | sdist = { url = "https://files.pythonhosted.org/packages/b7/ae/d5220c5c52b158b1de7ca89fc5edb72f304a70a4c540c84c8844bf4008de/pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236", size = 761681 }
190 | wheels = [
191 | { url = "https://files.pythonhosted.org/packages/f4/3c/8cc1cc84deffa6e25d2d0c688ebb80635dfdbf1dbea3e30c541c8cf4d860/pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584", size = 431696 },
192 | ]
193 |
194 | [[package]]
195 | name = "pydantic-core"
196 | version = "2.27.2"
197 | source = { registry = "https://pypi.org/simple" }
198 | dependencies = [
199 | { name = "typing-extensions" },
200 | ]
201 | sdist = { url = "https://files.pythonhosted.org/packages/fc/01/f3e5ac5e7c25833db5eb555f7b7ab24cd6f8c322d3a3ad2d67a952dc0abc/pydantic_core-2.27.2.tar.gz", hash = "sha256:eb026e5a4c1fee05726072337ff51d1efb6f59090b7da90d30ea58625b1ffb39", size = 413443 }
202 | wheels = [
203 | { url = "https://files.pythonhosted.org/packages/3a/bc/fed5f74b5d802cf9a03e83f60f18864e90e3aed7223adaca5ffb7a8d8d64/pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa", size = 1895938 },
204 | { url = "https://files.pythonhosted.org/packages/71/2a/185aff24ce844e39abb8dd680f4e959f0006944f4a8a0ea372d9f9ae2e53/pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c", size = 1815684 },
205 | { url = "https://files.pythonhosted.org/packages/c3/43/fafabd3d94d159d4f1ed62e383e264f146a17dd4d48453319fd782e7979e/pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7969e133a6f183be60e9f6f56bfae753585680f3b7307a8e555a948d443cc05a", size = 1829169 },
206 | { url = "https://files.pythonhosted.org/packages/a2/d1/f2dfe1a2a637ce6800b799aa086d079998959f6f1215eb4497966efd2274/pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3de9961f2a346257caf0aa508a4da705467f53778e9ef6fe744c038119737ef5", size = 1867227 },
207 | { url = "https://files.pythonhosted.org/packages/7d/39/e06fcbcc1c785daa3160ccf6c1c38fea31f5754b756e34b65f74e99780b5/pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e2bb4d3e5873c37bb3dd58714d4cd0b0e6238cebc4177ac8fe878f8b3aa8e74c", size = 2037695 },
208 | { url = "https://files.pythonhosted.org/packages/7a/67/61291ee98e07f0650eb756d44998214231f50751ba7e13f4f325d95249ab/pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:280d219beebb0752699480fe8f1dc61ab6615c2046d76b7ab7ee38858de0a4e7", size = 2741662 },
209 | { url = "https://files.pythonhosted.org/packages/32/90/3b15e31b88ca39e9e626630b4c4a1f5a0dfd09076366f4219429e6786076/pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47956ae78b6422cbd46f772f1746799cbb862de838fd8d1fbd34a82e05b0983a", size = 1993370 },
210 | { url = "https://files.pythonhosted.org/packages/ff/83/c06d333ee3a67e2e13e07794995c1535565132940715931c1c43bfc85b11/pydantic_core-2.27.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:14d4a5c49d2f009d62a2a7140d3064f686d17a5d1a268bc641954ba181880236", size = 1996813 },
211 | { url = "https://files.pythonhosted.org/packages/7c/f7/89be1c8deb6e22618a74f0ca0d933fdcb8baa254753b26b25ad3acff8f74/pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:337b443af21d488716f8d0b6164de833e788aa6bd7e3a39c005febc1284f4962", size = 2005287 },
212 | { url = "https://files.pythonhosted.org/packages/b7/7d/8eb3e23206c00ef7feee17b83a4ffa0a623eb1a9d382e56e4aa46fd15ff2/pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:03d0f86ea3184a12f41a2d23f7ccb79cdb5a18e06993f8a45baa8dfec746f0e9", size = 2128414 },
213 | { url = "https://files.pythonhosted.org/packages/4e/99/fe80f3ff8dd71a3ea15763878d464476e6cb0a2db95ff1c5c554133b6b83/pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7041c36f5680c6e0f08d922aed302e98b3745d97fe1589db0a3eebf6624523af", size = 2155301 },
214 | { url = "https://files.pythonhosted.org/packages/2b/a3/e50460b9a5789ca1451b70d4f52546fa9e2b420ba3bfa6100105c0559238/pydantic_core-2.27.2-cp310-cp310-win32.whl", hash = "sha256:50a68f3e3819077be2c98110c1f9dcb3817e93f267ba80a2c05bb4f8799e2ff4", size = 1816685 },
215 | { url = "https://files.pythonhosted.org/packages/57/4c/a8838731cb0f2c2a39d3535376466de6049034d7b239c0202a64aaa05533/pydantic_core-2.27.2-cp310-cp310-win_amd64.whl", hash = "sha256:e0fd26b16394ead34a424eecf8a31a1f5137094cabe84a1bcb10fa6ba39d3d31", size = 1982876 },
216 | { url = "https://files.pythonhosted.org/packages/c2/89/f3450af9d09d44eea1f2c369f49e8f181d742f28220f88cc4dfaae91ea6e/pydantic_core-2.27.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:8e10c99ef58cfdf2a66fc15d66b16c4a04f62bca39db589ae8cba08bc55331bc", size = 1893421 },
217 | { url = "https://files.pythonhosted.org/packages/9e/e3/71fe85af2021f3f386da42d291412e5baf6ce7716bd7101ea49c810eda90/pydantic_core-2.27.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:26f32e0adf166a84d0cb63be85c562ca8a6fa8de28e5f0d92250c6b7e9e2aff7", size = 1814998 },
218 | { url = "https://files.pythonhosted.org/packages/a6/3c/724039e0d848fd69dbf5806894e26479577316c6f0f112bacaf67aa889ac/pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c19d1ea0673cd13cc2f872f6c9ab42acc4e4f492a7ca9d3795ce2b112dd7e15", size = 1826167 },
219 | { url = "https://files.pythonhosted.org/packages/2b/5b/1b29e8c1fb5f3199a9a57c1452004ff39f494bbe9bdbe9a81e18172e40d3/pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e68c4446fe0810e959cdff46ab0a41ce2f2c86d227d96dc3847af0ba7def306", size = 1865071 },
220 | { url = "https://files.pythonhosted.org/packages/89/6c/3985203863d76bb7d7266e36970d7e3b6385148c18a68cc8915fd8c84d57/pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9640b0059ff4f14d1f37321b94061c6db164fbe49b334b31643e0528d100d99", size = 2036244 },
221 | { url = "https://files.pythonhosted.org/packages/0e/41/f15316858a246b5d723f7d7f599f79e37493b2e84bfc789e58d88c209f8a/pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:40d02e7d45c9f8af700f3452f329ead92da4c5f4317ca9b896de7ce7199ea459", size = 2737470 },
222 | { url = "https://files.pythonhosted.org/packages/a8/7c/b860618c25678bbd6d1d99dbdfdf0510ccb50790099b963ff78a124b754f/pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c1fd185014191700554795c99b347d64f2bb637966c4cfc16998a0ca700d048", size = 1992291 },
223 | { url = "https://files.pythonhosted.org/packages/bf/73/42c3742a391eccbeab39f15213ecda3104ae8682ba3c0c28069fbcb8c10d/pydantic_core-2.27.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d81d2068e1c1228a565af076598f9e7451712700b673de8f502f0334f281387d", size = 1994613 },
224 | { url = "https://files.pythonhosted.org/packages/94/7a/941e89096d1175d56f59340f3a8ebaf20762fef222c298ea96d36a6328c5/pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1a4207639fb02ec2dbb76227d7c751a20b1a6b4bc52850568e52260cae64ca3b", size = 2002355 },
225 | { url = "https://files.pythonhosted.org/packages/6e/95/2359937a73d49e336a5a19848713555605d4d8d6940c3ec6c6c0ca4dcf25/pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:3de3ce3c9ddc8bbd88f6e0e304dea0e66d843ec9de1b0042b0911c1663ffd474", size = 2126661 },
226 | { url = "https://files.pythonhosted.org/packages/2b/4c/ca02b7bdb6012a1adef21a50625b14f43ed4d11f1fc237f9d7490aa5078c/pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:30c5f68ded0c36466acede341551106821043e9afaad516adfb6e8fa80a4e6a6", size = 2153261 },
227 | { url = "https://files.pythonhosted.org/packages/72/9d/a241db83f973049a1092a079272ffe2e3e82e98561ef6214ab53fe53b1c7/pydantic_core-2.27.2-cp311-cp311-win32.whl", hash = "sha256:c70c26d2c99f78b125a3459f8afe1aed4d9687c24fd677c6a4436bc042e50d6c", size = 1812361 },
228 | { url = "https://files.pythonhosted.org/packages/e8/ef/013f07248041b74abd48a385e2110aa3a9bbfef0fbd97d4e6d07d2f5b89a/pydantic_core-2.27.2-cp311-cp311-win_amd64.whl", hash = "sha256:08e125dbdc505fa69ca7d9c499639ab6407cfa909214d500897d02afb816e7cc", size = 1982484 },
229 | { url = "https://files.pythonhosted.org/packages/10/1c/16b3a3e3398fd29dca77cea0a1d998d6bde3902fa2706985191e2313cc76/pydantic_core-2.27.2-cp311-cp311-win_arm64.whl", hash = "sha256:26f0d68d4b235a2bae0c3fc585c585b4ecc51382db0e3ba402a22cbc440915e4", size = 1867102 },
230 | { url = "https://files.pythonhosted.org/packages/d6/74/51c8a5482ca447871c93e142d9d4a92ead74de6c8dc5e66733e22c9bba89/pydantic_core-2.27.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9e0c8cfefa0ef83b4da9588448b6d8d2a2bf1a53c3f1ae5fca39eb3061e2f0b0", size = 1893127 },
231 | { url = "https://files.pythonhosted.org/packages/d3/f3/c97e80721735868313c58b89d2de85fa80fe8dfeeed84dc51598b92a135e/pydantic_core-2.27.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83097677b8e3bd7eaa6775720ec8e0405f1575015a463285a92bfdfe254529ef", size = 1811340 },
232 | { url = "https://files.pythonhosted.org/packages/9e/91/840ec1375e686dbae1bd80a9e46c26a1e0083e1186abc610efa3d9a36180/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:172fce187655fece0c90d90a678424b013f8fbb0ca8b036ac266749c09438cb7", size = 1822900 },
233 | { url = "https://files.pythonhosted.org/packages/f6/31/4240bc96025035500c18adc149aa6ffdf1a0062a4b525c932065ceb4d868/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:519f29f5213271eeeeb3093f662ba2fd512b91c5f188f3bb7b27bc5973816934", size = 1869177 },
234 | { url = "https://files.pythonhosted.org/packages/fa/20/02fbaadb7808be578317015c462655c317a77a7c8f0ef274bc016a784c54/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05e3a55d124407fffba0dd6b0c0cd056d10e983ceb4e5dbd10dda135c31071d6", size = 2038046 },
235 | { url = "https://files.pythonhosted.org/packages/06/86/7f306b904e6c9eccf0668248b3f272090e49c275bc488a7b88b0823444a4/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c3ed807c7b91de05e63930188f19e921d1fe90de6b4f5cd43ee7fcc3525cb8c", size = 2685386 },
236 | { url = "https://files.pythonhosted.org/packages/8d/f0/49129b27c43396581a635d8710dae54a791b17dfc50c70164866bbf865e3/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fb4aadc0b9a0c063206846d603b92030eb6f03069151a625667f982887153e2", size = 1997060 },
237 | { url = "https://files.pythonhosted.org/packages/0d/0f/943b4af7cd416c477fd40b187036c4f89b416a33d3cc0ab7b82708a667aa/pydantic_core-2.27.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28ccb213807e037460326424ceb8b5245acb88f32f3d2777427476e1b32c48c4", size = 2004870 },
238 | { url = "https://files.pythonhosted.org/packages/35/40/aea70b5b1a63911c53a4c8117c0a828d6790483f858041f47bab0b779f44/pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:de3cd1899e2c279b140adde9357c4495ed9d47131b4a4eaff9052f23398076b3", size = 1999822 },
239 | { url = "https://files.pythonhosted.org/packages/f2/b3/807b94fd337d58effc5498fd1a7a4d9d59af4133e83e32ae39a96fddec9d/pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:220f892729375e2d736b97d0e51466252ad84c51857d4d15f5e9692f9ef12be4", size = 2130364 },
240 | { url = "https://files.pythonhosted.org/packages/fc/df/791c827cd4ee6efd59248dca9369fb35e80a9484462c33c6649a8d02b565/pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a0fcd29cd6b4e74fe8ddd2c90330fd8edf2e30cb52acda47f06dd615ae72da57", size = 2158303 },
241 | { url = "https://files.pythonhosted.org/packages/9b/67/4e197c300976af185b7cef4c02203e175fb127e414125916bf1128b639a9/pydantic_core-2.27.2-cp312-cp312-win32.whl", hash = "sha256:1e2cb691ed9834cd6a8be61228471d0a503731abfb42f82458ff27be7b2186fc", size = 1834064 },
242 | { url = "https://files.pythonhosted.org/packages/1f/ea/cd7209a889163b8dcca139fe32b9687dd05249161a3edda62860430457a5/pydantic_core-2.27.2-cp312-cp312-win_amd64.whl", hash = "sha256:cc3f1a99a4f4f9dd1de4fe0312c114e740b5ddead65bb4102884b384c15d8bc9", size = 1989046 },
243 | { url = "https://files.pythonhosted.org/packages/bc/49/c54baab2f4658c26ac633d798dab66b4c3a9bbf47cff5284e9c182f4137a/pydantic_core-2.27.2-cp312-cp312-win_arm64.whl", hash = "sha256:3911ac9284cd8a1792d3cb26a2da18f3ca26c6908cc434a18f730dc0db7bfa3b", size = 1885092 },
244 | { url = "https://files.pythonhosted.org/packages/41/b1/9bc383f48f8002f99104e3acff6cba1231b29ef76cfa45d1506a5cad1f84/pydantic_core-2.27.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7d14bd329640e63852364c306f4d23eb744e0f8193148d4044dd3dacdaacbd8b", size = 1892709 },
245 | { url = "https://files.pythonhosted.org/packages/10/6c/e62b8657b834f3eb2961b49ec8e301eb99946245e70bf42c8817350cbefc/pydantic_core-2.27.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82f91663004eb8ed30ff478d77c4d1179b3563df6cdb15c0817cd1cdaf34d154", size = 1811273 },
246 | { url = "https://files.pythonhosted.org/packages/ba/15/52cfe49c8c986e081b863b102d6b859d9defc63446b642ccbbb3742bf371/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71b24c7d61131bb83df10cc7e687433609963a944ccf45190cfc21e0887b08c9", size = 1823027 },
247 | { url = "https://files.pythonhosted.org/packages/b1/1c/b6f402cfc18ec0024120602bdbcebc7bdd5b856528c013bd4d13865ca473/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa8e459d4954f608fa26116118bb67f56b93b209c39b008277ace29937453dc9", size = 1868888 },
248 | { url = "https://files.pythonhosted.org/packages/bd/7b/8cb75b66ac37bc2975a3b7de99f3c6f355fcc4d89820b61dffa8f1e81677/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce8918cbebc8da707ba805b7fd0b382816858728ae7fe19a942080c24e5b7cd1", size = 2037738 },
249 | { url = "https://files.pythonhosted.org/packages/c8/f1/786d8fe78970a06f61df22cba58e365ce304bf9b9f46cc71c8c424e0c334/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eda3f5c2a021bbc5d976107bb302e0131351c2ba54343f8a496dc8783d3d3a6a", size = 2685138 },
250 | { url = "https://files.pythonhosted.org/packages/a6/74/d12b2cd841d8724dc8ffb13fc5cef86566a53ed358103150209ecd5d1999/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8086fa684c4775c27f03f062cbb9eaa6e17f064307e86b21b9e0abc9c0f02e", size = 1997025 },
251 | { url = "https://files.pythonhosted.org/packages/a0/6e/940bcd631bc4d9a06c9539b51f070b66e8f370ed0933f392db6ff350d873/pydantic_core-2.27.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8d9b3388db186ba0c099a6d20f0604a44eabdeef1777ddd94786cdae158729e4", size = 2004633 },
252 | { url = "https://files.pythonhosted.org/packages/50/cc/a46b34f1708d82498c227d5d80ce615b2dd502ddcfd8376fc14a36655af1/pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7a66efda2387de898c8f38c0cf7f14fca0b51a8ef0b24bfea5849f1b3c95af27", size = 1999404 },
253 | { url = "https://files.pythonhosted.org/packages/ca/2d/c365cfa930ed23bc58c41463bae347d1005537dc8db79e998af8ba28d35e/pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:18a101c168e4e092ab40dbc2503bdc0f62010e95d292b27827871dc85450d7ee", size = 2130130 },
254 | { url = "https://files.pythonhosted.org/packages/f4/d7/eb64d015c350b7cdb371145b54d96c919d4db516817f31cd1c650cae3b21/pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ba5dd002f88b78a4215ed2f8ddbdf85e8513382820ba15ad5ad8955ce0ca19a1", size = 2157946 },
255 | { url = "https://files.pythonhosted.org/packages/a4/99/bddde3ddde76c03b65dfd5a66ab436c4e58ffc42927d4ff1198ffbf96f5f/pydantic_core-2.27.2-cp313-cp313-win32.whl", hash = "sha256:1ebaf1d0481914d004a573394f4be3a7616334be70261007e47c2a6fe7e50130", size = 1834387 },
256 | { url = "https://files.pythonhosted.org/packages/71/47/82b5e846e01b26ac6f1893d3c5f9f3a2eb6ba79be26eef0b759b4fe72946/pydantic_core-2.27.2-cp313-cp313-win_amd64.whl", hash = "sha256:953101387ecf2f5652883208769a79e48db18c6df442568a0b5ccd8c2723abee", size = 1990453 },
257 | { url = "https://files.pythonhosted.org/packages/51/b2/b2b50d5ecf21acf870190ae5d093602d95f66c9c31f9d5de6062eb329ad1/pydantic_core-2.27.2-cp313-cp313-win_arm64.whl", hash = "sha256:ac4dbfd1691affb8f48c2c13241a2e3b60ff23247cbcf981759c768b6633cf8b", size = 1885186 },
258 | { url = "https://files.pythonhosted.org/packages/46/72/af70981a341500419e67d5cb45abe552a7c74b66326ac8877588488da1ac/pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2bf14caea37e91198329b828eae1618c068dfb8ef17bb33287a7ad4b61ac314e", size = 1891159 },
259 | { url = "https://files.pythonhosted.org/packages/ad/3d/c5913cccdef93e0a6a95c2d057d2c2cba347815c845cda79ddd3c0f5e17d/pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b0cb791f5b45307caae8810c2023a184c74605ec3bcbb67d13846c28ff731ff8", size = 1768331 },
260 | { url = "https://files.pythonhosted.org/packages/f6/f0/a3ae8fbee269e4934f14e2e0e00928f9346c5943174f2811193113e58252/pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:688d3fd9fcb71f41c4c015c023d12a79d1c4c0732ec9eb35d96e3388a120dcf3", size = 1822467 },
261 | { url = "https://files.pythonhosted.org/packages/d7/7a/7bbf241a04e9f9ea24cd5874354a83526d639b02674648af3f350554276c/pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d591580c34f4d731592f0e9fe40f9cc1b430d297eecc70b962e93c5c668f15f", size = 1979797 },
262 | { url = "https://files.pythonhosted.org/packages/4f/5f/4784c6107731f89e0005a92ecb8a2efeafdb55eb992b8e9d0a2be5199335/pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:82f986faf4e644ffc189a7f1aafc86e46ef70372bb153e7001e8afccc6e54133", size = 1987839 },
263 | { url = "https://files.pythonhosted.org/packages/6d/a7/61246562b651dff00de86a5f01b6e4befb518df314c54dec187a78d81c84/pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:bec317a27290e2537f922639cafd54990551725fc844249e64c523301d0822fc", size = 1998861 },
264 | { url = "https://files.pythonhosted.org/packages/86/aa/837821ecf0c022bbb74ca132e117c358321e72e7f9702d1b6a03758545e2/pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:0296abcb83a797db256b773f45773da397da75a08f5fcaef41f2044adec05f50", size = 2116582 },
265 | { url = "https://files.pythonhosted.org/packages/81/b0/5e74656e95623cbaa0a6278d16cf15e10a51f6002e3ec126541e95c29ea3/pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0d75070718e369e452075a6017fbf187f788e17ed67a3abd47fa934d001863d9", size = 2151985 },
266 | { url = "https://files.pythonhosted.org/packages/63/37/3e32eeb2a451fddaa3898e2163746b0cffbbdbb4740d38372db0490d67f3/pydantic_core-2.27.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7e17b560be3c98a8e3aa66ce828bdebb9e9ac6ad5466fba92eb74c4c95cb1151", size = 2004715 },
267 | ]
268 |
269 | [[package]]
270 | name = "pydantic-settings"
271 | version = "2.8.1"
272 | source = { registry = "https://pypi.org/simple" }
273 | dependencies = [
274 | { name = "pydantic" },
275 | { name = "python-dotenv" },
276 | ]
277 | sdist = { url = "https://files.pythonhosted.org/packages/88/82/c79424d7d8c29b994fb01d277da57b0a9b09cc03c3ff875f9bd8a86b2145/pydantic_settings-2.8.1.tar.gz", hash = "sha256:d5c663dfbe9db9d5e1c646b2e161da12f0d734d422ee56f567d0ea2cee4e8585", size = 83550 }
278 | wheels = [
279 | { url = "https://files.pythonhosted.org/packages/0b/53/a64f03044927dc47aafe029c42a5b7aabc38dfb813475e0e1bf71c4a59d0/pydantic_settings-2.8.1-py3-none-any.whl", hash = "sha256:81942d5ac3d905f7f3ee1a70df5dfb62d5569c12f51a5a647defc1c3d9ee2e9c", size = 30839 },
280 | ]
281 |
282 | [[package]]
283 | name = "pygments"
284 | version = "2.19.1"
285 | source = { registry = "https://pypi.org/simple" }
286 | sdist = { url = "https://files.pythonhosted.org/packages/7c/2d/c3338d48ea6cc0feb8446d8e6937e1408088a72a39937982cc6111d17f84/pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f", size = 4968581 }
287 | wheels = [
288 | { url = "https://files.pythonhosted.org/packages/8a/0b/9fcc47d19c48b59121088dd6da2488a49d5f72dacf8262e2790a1d2c7d15/pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c", size = 1225293 },
289 | ]
290 |
291 | [[package]]
292 | name = "python-dotenv"
293 | version = "1.0.1"
294 | source = { registry = "https://pypi.org/simple" }
295 | sdist = { url = "https://files.pythonhosted.org/packages/bc/57/e84d88dfe0aec03b7a2d4327012c1627ab5f03652216c63d49846d7a6c58/python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca", size = 39115 }
296 | wheels = [
297 | { url = "https://files.pythonhosted.org/packages/6a/3e/b68c118422ec867fa7ab88444e1274aa40681c606d59ac27de5a5588f082/python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a", size = 19863 },
298 | ]
299 |
300 | [[package]]
301 | name = "rich"
302 | version = "13.9.4"
303 | source = { registry = "https://pypi.org/simple" }
304 | dependencies = [
305 | { name = "markdown-it-py" },
306 | { name = "pygments" },
307 | { name = "typing-extensions", marker = "python_full_version < '3.11'" },
308 | ]
309 | sdist = { url = "https://files.pythonhosted.org/packages/ab/3a/0316b28d0761c6734d6bc14e770d85506c986c85ffb239e688eeaab2c2bc/rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098", size = 223149 }
310 | wheels = [
311 | { url = "https://files.pythonhosted.org/packages/19/71/39c7c0d87f8d4e6c020a393182060eaefeeae6c01dab6a84ec346f2567df/rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90", size = 242424 },
312 | ]
313 |
314 | [[package]]
315 | name = "shellingham"
316 | version = "1.5.4"
317 | source = { registry = "https://pypi.org/simple" }
318 | sdist = { url = "https://files.pythonhosted.org/packages/58/15/8b3609fd3830ef7b27b655beb4b4e9c62313a4e8da8c676e142cc210d58e/shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de", size = 10310 }
319 | wheels = [
320 | { url = "https://files.pythonhosted.org/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", size = 9755 },
321 | ]
322 |
323 | [[package]]
324 | name = "sniffio"
325 | version = "1.3.1"
326 | source = { registry = "https://pypi.org/simple" }
327 | sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372 }
328 | wheels = [
329 | { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235 },
330 | ]
331 |
332 | [[package]]
333 | name = "sse-starlette"
334 | version = "2.2.1"
335 | source = { registry = "https://pypi.org/simple" }
336 | dependencies = [
337 | { name = "anyio" },
338 | { name = "starlette" },
339 | ]
340 | sdist = { url = "https://files.pythonhosted.org/packages/71/a4/80d2a11af59fe75b48230846989e93979c892d3a20016b42bb44edb9e398/sse_starlette-2.2.1.tar.gz", hash = "sha256:54470d5f19274aeed6b2d473430b08b4b379ea851d953b11d7f1c4a2c118b419", size = 17376 }
341 | wheels = [
342 | { url = "https://files.pythonhosted.org/packages/d9/e0/5b8bd393f27f4a62461c5cf2479c75a2cc2ffa330976f9f00f5f6e4f50eb/sse_starlette-2.2.1-py3-none-any.whl", hash = "sha256:6410a3d3ba0c89e7675d4c273a301d64649c03a5ef1ca101f10b47f895fd0e99", size = 10120 },
343 | ]
344 |
345 | [[package]]
346 | name = "starlette"
347 | version = "0.46.0"
348 | source = { registry = "https://pypi.org/simple" }
349 | dependencies = [
350 | { name = "anyio" },
351 | ]
352 | sdist = { url = "https://files.pythonhosted.org/packages/44/b6/fb9a32e3c5d59b1e383c357534c63c2d3caa6f25bf3c59dd89d296ecbaec/starlette-0.46.0.tar.gz", hash = "sha256:b359e4567456b28d473d0193f34c0de0ed49710d75ef183a74a5ce0499324f50", size = 2575568 }
353 | wheels = [
354 | { url = "https://files.pythonhosted.org/packages/41/94/8af675a62e3c91c2dee47cf92e602cfac86e8767b1a1ac3caf1b327c2ab0/starlette-0.46.0-py3-none-any.whl", hash = "sha256:913f0798bd90ba90a9156383bcf1350a17d6259451d0d8ee27fc0cf2db609038", size = 71991 },
355 | ]
356 |
357 | [[package]]
358 | name = "typer"
359 | version = "0.15.2"
360 | source = { registry = "https://pypi.org/simple" }
361 | dependencies = [
362 | { name = "click" },
363 | { name = "rich" },
364 | { name = "shellingham" },
365 | { name = "typing-extensions" },
366 | ]
367 | sdist = { url = "https://files.pythonhosted.org/packages/8b/6f/3991f0f1c7fcb2df31aef28e0594d8d54b05393a0e4e34c65e475c2a5d41/typer-0.15.2.tar.gz", hash = "sha256:ab2fab47533a813c49fe1f16b1a370fd5819099c00b119e0633df65f22144ba5", size = 100711 }
368 | wheels = [
369 | { url = "https://files.pythonhosted.org/packages/7f/fc/5b29fea8cee020515ca82cc68e3b8e1e34bb19a3535ad854cac9257b414c/typer-0.15.2-py3-none-any.whl", hash = "sha256:46a499c6107d645a9c13f7ee46c5d5096cae6f5fc57dd11eccbbb9ae3e44ddfc", size = 45061 },
370 | ]
371 |
372 | [[package]]
373 | name = "typing-extensions"
374 | version = "4.12.2"
375 | source = { registry = "https://pypi.org/simple" }
376 | sdist = { url = "https://files.pythonhosted.org/packages/df/db/f35a00659bc03fec321ba8bce9420de607a1d37f8342eee1863174c69557/typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8", size = 85321 }
377 | wheels = [
378 | { url = "https://files.pythonhosted.org/packages/26/9f/ad63fc0248c5379346306f8668cda6e2e2e9c95e01216d2b8ffd9ff037d0/typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d", size = 37438 },
379 | ]
380 |
381 | [[package]]
382 | name = "uvicorn"
383 | version = "0.34.0"
384 | source = { registry = "https://pypi.org/simple" }
385 | dependencies = [
386 | { name = "click" },
387 | { name = "h11" },
388 | { name = "typing-extensions", marker = "python_full_version < '3.11'" },
389 | ]
390 | sdist = { url = "https://files.pythonhosted.org/packages/4b/4d/938bd85e5bf2edeec766267a5015ad969730bb91e31b44021dfe8b22df6c/uvicorn-0.34.0.tar.gz", hash = "sha256:404051050cd7e905de2c9a7e61790943440b3416f49cb409f965d9dcd0fa73e9", size = 76568 }
391 | wheels = [
392 | { url = "https://files.pythonhosted.org/packages/61/14/33a3a1352cfa71812a3a21e8c9bfb83f60b0011f5e36f2b1399d51928209/uvicorn-0.34.0-py3-none-any.whl", hash = "sha256:023dc038422502fa28a09c7a30bf2b6991512da7dcdb8fd35fe57cfc154126f4", size = 62315 },
393 | ]
394 |
--------------------------------------------------------------------------------
/src/blender_mcp/server.py:
--------------------------------------------------------------------------------
1 | # blender_mcp_server.py
2 | from mcp.server.fastmcp import FastMCP, Context, Image
3 | import socket
4 | import json
5 | import asyncio
6 | import logging
7 | import tempfile
8 | from dataclasses import dataclass
9 | from contextlib import asynccontextmanager
10 | from typing import AsyncIterator, Dict, Any, List
11 | import os
12 | from pathlib import Path
13 | import base64
14 | from urllib.parse import urlparse
15 |
16 | # Configure logging
17 | logging.basicConfig(level=logging.INFO,
18 | format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
19 | logger = logging.getLogger("BlenderMCPServer")
20 |
21 | @dataclass
22 | class BlenderConnection:
23 | host: str
24 | port: int
25 | sock: socket.socket = None # Changed from 'socket' to 'sock' to avoid naming conflict
26 |
27 | def connect(self) -> bool:
28 | """Connect to the Blender addon socket server"""
29 | if self.sock:
30 | return True
31 |
32 | try:
33 | self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
34 | self.sock.connect((self.host, self.port))
35 | logger.info(f"Connected to Blender at {self.host}:{self.port}")
36 | return True
37 | except Exception as e:
38 | logger.error(f"Failed to connect to Blender: {str(e)}")
39 | self.sock = None
40 | return False
41 |
42 | def disconnect(self):
43 | """Disconnect from the Blender addon"""
44 | if self.sock:
45 | try:
46 | self.sock.close()
47 | except Exception as e:
48 | logger.error(f"Error disconnecting from Blender: {str(e)}")
49 | finally:
50 | self.sock = None
51 |
52 | def receive_full_response(self, sock, buffer_size=8192):
53 | """Receive the complete response, potentially in multiple chunks"""
54 | chunks = []
55 | # Use a consistent timeout value that matches the addon's timeout
56 | sock.settimeout(15.0) # Match the addon's timeout
57 |
58 | try:
59 | while True:
60 | try:
61 | chunk = sock.recv(buffer_size)
62 | if not chunk:
63 | # If we get an empty chunk, the connection might be closed
64 | if not chunks: # If we haven't received anything yet, this is an error
65 | raise Exception("Connection closed before receiving any data")
66 | break
67 |
68 | chunks.append(chunk)
69 |
70 | # Check if we've received a complete JSON object
71 | try:
72 | data = b''.join(chunks)
73 | json.loads(data.decode('utf-8'))
74 | # If we get here, it parsed successfully
75 | logger.info(f"Received complete response ({len(data)} bytes)")
76 | return data
77 | except json.JSONDecodeError:
78 | # Incomplete JSON, continue receiving
79 | continue
80 | except socket.timeout:
81 | # If we hit a timeout during receiving, break the loop and try to use what we have
82 | logger.warning("Socket timeout during chunked receive")
83 | break
84 | except (ConnectionError, BrokenPipeError, ConnectionResetError) as e:
85 | logger.error(f"Socket connection error during receive: {str(e)}")
86 | raise # Re-raise to be handled by the caller
87 | except socket.timeout:
88 | logger.warning("Socket timeout during chunked receive")
89 | except Exception as e:
90 | logger.error(f"Error during receive: {str(e)}")
91 | raise
92 |
93 | # If we get here, we either timed out or broke out of the loop
94 | # Try to use what we have
95 | if chunks:
96 | data = b''.join(chunks)
97 | logger.info(f"Returning data after receive completion ({len(data)} bytes)")
98 | try:
99 | # Try to parse what we have
100 | json.loads(data.decode('utf-8'))
101 | return data
102 | except json.JSONDecodeError:
103 | # If we can't parse it, it's incomplete
104 | raise Exception("Incomplete JSON response received")
105 | else:
106 | raise Exception("No data received")
107 |
108 | def send_command(self, command_type: str, params: Dict[str, Any] = None) -> Dict[str, Any]:
109 | """Send a command to Blender and return the response"""
110 | if not self.sock and not self.connect():
111 | raise ConnectionError("Not connected to Blender")
112 |
113 | command = {
114 | "type": command_type,
115 | "params": params or {}
116 | }
117 |
118 | try:
119 | # Log the command being sent
120 | logger.info(f"Sending command: {command_type} with params: {params}")
121 |
122 | # Send the command
123 | self.sock.sendall(json.dumps(command).encode('utf-8'))
124 | logger.info(f"Command sent, waiting for response...")
125 |
126 | # Set a timeout for receiving - use the same timeout as in receive_full_response
127 | self.sock.settimeout(15.0) # Match the addon's timeout
128 |
129 | # Receive the response using the improved receive_full_response method
130 | response_data = self.receive_full_response(self.sock)
131 | logger.info(f"Received {len(response_data)} bytes of data")
132 |
133 | response = json.loads(response_data.decode('utf-8'))
134 | logger.info(f"Response parsed, status: {response.get('status', 'unknown')}")
135 |
136 | if response.get("status") == "error":
137 | logger.error(f"Blender error: {response.get('message')}")
138 | raise Exception(response.get("message", "Unknown error from Blender"))
139 |
140 | return response.get("result", {})
141 | except socket.timeout:
142 | logger.error("Socket timeout while waiting for response from Blender")
143 | # Don't try to reconnect here - let the get_blender_connection handle reconnection
144 | # Just invalidate the current socket so it will be recreated next time
145 | self.sock = None
146 | raise Exception("Timeout waiting for Blender response - try simplifying your request")
147 | except (ConnectionError, BrokenPipeError, ConnectionResetError) as e:
148 | logger.error(f"Socket connection error: {str(e)}")
149 | self.sock = None
150 | raise Exception(f"Connection to Blender lost: {str(e)}")
151 | except json.JSONDecodeError as e:
152 | logger.error(f"Invalid JSON response from Blender: {str(e)}")
153 | # Try to log what was received
154 | if 'response_data' in locals() and response_data:
155 | logger.error(f"Raw response (first 200 bytes): {response_data[:200]}")
156 | raise Exception(f"Invalid response from Blender: {str(e)}")
157 | except Exception as e:
158 | logger.error(f"Error communicating with Blender: {str(e)}")
159 | # Don't try to reconnect here - let the get_blender_connection handle reconnection
160 | self.sock = None
161 | raise Exception(f"Communication error with Blender: {str(e)}")
162 |
163 | @asynccontextmanager
164 | async def server_lifespan(server: FastMCP) -> AsyncIterator[Dict[str, Any]]:
165 | """Manage server startup and shutdown lifecycle"""
166 | # We don't need to create a connection here since we're using the global connection
167 | # for resources and tools
168 |
169 | try:
170 | # Just log that we're starting up
171 | logger.info("BlenderMCP server starting up")
172 |
173 | # Try to connect to Blender on startup to verify it's available
174 | try:
175 | # This will initialize the global connection if needed
176 | blender = get_blender_connection()
177 | logger.info("Successfully connected to Blender on startup")
178 | except Exception as e:
179 | logger.warning(f"Could not connect to Blender on startup: {str(e)}")
180 | logger.warning("Make sure the Blender addon is running before using Blender resources or tools")
181 |
182 | # Return an empty context - we're using the global connection
183 | yield {}
184 | finally:
185 | # Clean up the global connection on shutdown
186 | global _blender_connection
187 | if _blender_connection:
188 | logger.info("Disconnecting from Blender on shutdown")
189 | _blender_connection.disconnect()
190 | _blender_connection = None
191 | logger.info("BlenderMCP server shut down")
192 |
193 | # Create the MCP server with lifespan support
194 | mcp = FastMCP(
195 | "BlenderMCP",
196 | description="Blender integration through the Model Context Protocol",
197 | lifespan=server_lifespan
198 | )
199 |
200 | # Resource endpoints
201 |
202 | # Global connection for resources (since resources can't access context)
203 | _blender_connection = None
204 | _polyhaven_enabled = False # Add this global variable
205 |
206 | def get_blender_connection():
207 | """Get or create a persistent Blender connection"""
208 | global _blender_connection, _polyhaven_enabled # Add _polyhaven_enabled to globals
209 |
210 | # If we have an existing connection, check if it's still valid
211 | if _blender_connection is not None:
212 | try:
213 | # First check if PolyHaven is enabled by sending a ping command
214 | result = _blender_connection.send_command("get_polyhaven_status")
215 | # Store the PolyHaven status globally
216 | _polyhaven_enabled = result.get("enabled", False)
217 | return _blender_connection
218 | except Exception as e:
219 | # Connection is dead, close it and create a new one
220 | logger.warning(f"Existing connection is no longer valid: {str(e)}")
221 | try:
222 | _blender_connection.disconnect()
223 | except:
224 | pass
225 | _blender_connection = None
226 |
227 | # Create a new connection if needed
228 | if _blender_connection is None:
229 | _blender_connection = BlenderConnection(host="localhost", port=9876)
230 | if not _blender_connection.connect():
231 | logger.error("Failed to connect to Blender")
232 | _blender_connection = None
233 | raise Exception("Could not connect to Blender. Make sure the Blender addon is running.")
234 | logger.info("Created new persistent connection to Blender")
235 |
236 | return _blender_connection
237 |
238 |
239 | @mcp.tool()
240 | def get_scene_info(ctx: Context) -> str:
241 | """Get detailed information about the current Blender scene"""
242 | try:
243 | blender = get_blender_connection()
244 | result = blender.send_command("get_scene_info")
245 |
246 | # Just return the JSON representation of what Blender sent us
247 | return json.dumps(result, indent=2)
248 | except Exception as e:
249 | logger.error(f"Error getting scene info from Blender: {str(e)}")
250 | return f"Error getting scene info: {str(e)}"
251 |
252 | @mcp.tool()
253 | def get_object_info(ctx: Context, object_name: str) -> str:
254 | """
255 | Get detailed information about a specific object in the Blender scene.
256 |
257 | Parameters:
258 | - object_name: The name of the object to get information about
259 | """
260 | try:
261 | blender = get_blender_connection()
262 | result = blender.send_command("get_object_info", {"name": object_name})
263 |
264 | # Just return the JSON representation of what Blender sent us
265 | return json.dumps(result, indent=2)
266 | except Exception as e:
267 | logger.error(f"Error getting object info from Blender: {str(e)}")
268 | return f"Error getting object info: {str(e)}"
269 |
270 | @mcp.tool()
271 | def get_viewport_screenshot(ctx: Context, max_size: int = 800) -> Image:
272 | """
273 | Capture a screenshot of the current Blender 3D viewport.
274 |
275 | Parameters:
276 | - max_size: Maximum size in pixels for the largest dimension (default: 800)
277 |
278 | Returns the screenshot as an Image.
279 | """
280 | try:
281 | blender = get_blender_connection()
282 |
283 | # Create temp file path
284 | temp_dir = tempfile.gettempdir()
285 | temp_path = os.path.join(temp_dir, f"blender_screenshot_{os.getpid()}.png")
286 |
287 | result = blender.send_command("get_viewport_screenshot", {
288 | "max_size": max_size,
289 | "filepath": temp_path,
290 | "format": "png"
291 | })
292 |
293 | if "error" in result:
294 | raise Exception(result["error"])
295 |
296 | if not os.path.exists(temp_path):
297 | raise Exception("Screenshot file was not created")
298 |
299 | # Read the file
300 | with open(temp_path, 'rb') as f:
301 | image_bytes = f.read()
302 |
303 | # Delete the temp file
304 | os.remove(temp_path)
305 |
306 | return Image(data=image_bytes, format="png")
307 |
308 | except Exception as e:
309 | logger.error(f"Error capturing screenshot: {str(e)}")
310 | raise Exception(f"Screenshot failed: {str(e)}")
311 |
312 |
313 | @mcp.tool()
314 | def execute_blender_code(ctx: Context, code: str) -> str:
315 | """
316 | Execute arbitrary Python code in Blender. Make sure to do it step-by-step by breaking it into smaller chunks.
317 |
318 | Parameters:
319 | - code: The Python code to execute
320 | """
321 | try:
322 | # Get the global connection
323 | blender = get_blender_connection()
324 | result = blender.send_command("execute_code", {"code": code})
325 | return f"Code executed successfully: {result.get('result', '')}"
326 | except Exception as e:
327 | logger.error(f"Error executing code: {str(e)}")
328 | return f"Error executing code: {str(e)}"
329 |
330 | @mcp.tool()
331 | def get_polyhaven_categories(ctx: Context, asset_type: str = "hdris") -> str:
332 | """
333 | Get a list of categories for a specific asset type on Polyhaven.
334 |
335 | Parameters:
336 | - asset_type: The type of asset to get categories for (hdris, textures, models, all)
337 | """
338 | try:
339 | blender = get_blender_connection()
340 | if not _polyhaven_enabled:
341 | return "PolyHaven integration is disabled. Select it in the sidebar in BlenderMCP, then run it again."
342 | result = blender.send_command("get_polyhaven_categories", {"asset_type": asset_type})
343 |
344 | if "error" in result:
345 | return f"Error: {result['error']}"
346 |
347 | # Format the categories in a more readable way
348 | categories = result["categories"]
349 | formatted_output = f"Categories for {asset_type}:\n\n"
350 |
351 | # Sort categories by count (descending)
352 | sorted_categories = sorted(categories.items(), key=lambda x: x[1], reverse=True)
353 |
354 | for category, count in sorted_categories:
355 | formatted_output += f"- {category}: {count} assets\n"
356 |
357 | return formatted_output
358 | except Exception as e:
359 | logger.error(f"Error getting Polyhaven categories: {str(e)}")
360 | return f"Error getting Polyhaven categories: {str(e)}"
361 |
362 | @mcp.tool()
363 | def search_polyhaven_assets(
364 | ctx: Context,
365 | asset_type: str = "all",
366 | categories: str = None
367 | ) -> str:
368 | """
369 | Search for assets on Polyhaven with optional filtering.
370 |
371 | Parameters:
372 | - asset_type: Type of assets to search for (hdris, textures, models, all)
373 | - categories: Optional comma-separated list of categories to filter by
374 |
375 | Returns a list of matching assets with basic information.
376 | """
377 | try:
378 | blender = get_blender_connection()
379 | result = blender.send_command("search_polyhaven_assets", {
380 | "asset_type": asset_type,
381 | "categories": categories
382 | })
383 |
384 | if "error" in result:
385 | return f"Error: {result['error']}"
386 |
387 | # Format the assets in a more readable way
388 | assets = result["assets"]
389 | total_count = result["total_count"]
390 | returned_count = result["returned_count"]
391 |
392 | formatted_output = f"Found {total_count} assets"
393 | if categories:
394 | formatted_output += f" in categories: {categories}"
395 | formatted_output += f"\nShowing {returned_count} assets:\n\n"
396 |
397 | # Sort assets by download count (popularity)
398 | sorted_assets = sorted(assets.items(), key=lambda x: x[1].get("download_count", 0), reverse=True)
399 |
400 | for asset_id, asset_data in sorted_assets:
401 | formatted_output += f"- {asset_data.get('name', asset_id)} (ID: {asset_id})\n"
402 | formatted_output += f" Type: {['HDRI', 'Texture', 'Model'][asset_data.get('type', 0)]}\n"
403 | formatted_output += f" Categories: {', '.join(asset_data.get('categories', []))}\n"
404 | formatted_output += f" Downloads: {asset_data.get('download_count', 'Unknown')}\n\n"
405 |
406 | return formatted_output
407 | except Exception as e:
408 | logger.error(f"Error searching Polyhaven assets: {str(e)}")
409 | return f"Error searching Polyhaven assets: {str(e)}"
410 |
411 | @mcp.tool()
412 | def download_polyhaven_asset(
413 | ctx: Context,
414 | asset_id: str,
415 | asset_type: str,
416 | resolution: str = "1k",
417 | file_format: str = None
418 | ) -> str:
419 | """
420 | Download and import a Polyhaven asset into Blender.
421 |
422 | Parameters:
423 | - asset_id: The ID of the asset to download
424 | - asset_type: The type of asset (hdris, textures, models)
425 | - resolution: The resolution to download (e.g., 1k, 2k, 4k)
426 | - file_format: Optional file format (e.g., hdr, exr for HDRIs; jpg, png for textures; gltf, fbx for models)
427 |
428 | Returns a message indicating success or failure.
429 | """
430 | try:
431 | blender = get_blender_connection()
432 | result = blender.send_command("download_polyhaven_asset", {
433 | "asset_id": asset_id,
434 | "asset_type": asset_type,
435 | "resolution": resolution,
436 | "file_format": file_format
437 | })
438 |
439 | if "error" in result:
440 | return f"Error: {result['error']}"
441 |
442 | if result.get("success"):
443 | message = result.get("message", "Asset downloaded and imported successfully")
444 |
445 | # Add additional information based on asset type
446 | if asset_type == "hdris":
447 | return f"{message}. The HDRI has been set as the world environment."
448 | elif asset_type == "textures":
449 | material_name = result.get("material", "")
450 | maps = ", ".join(result.get("maps", []))
451 | return f"{message}. Created material '{material_name}' with maps: {maps}."
452 | elif asset_type == "models":
453 | return f"{message}. The model has been imported into the current scene."
454 | else:
455 | return message
456 | else:
457 | return f"Failed to download asset: {result.get('message', 'Unknown error')}"
458 | except Exception as e:
459 | logger.error(f"Error downloading Polyhaven asset: {str(e)}")
460 | return f"Error downloading Polyhaven asset: {str(e)}"
461 |
462 | @mcp.tool()
463 | def set_texture(
464 | ctx: Context,
465 | object_name: str,
466 | texture_id: str
467 | ) -> str:
468 | """
469 | Apply a previously downloaded Polyhaven texture to an object.
470 |
471 | Parameters:
472 | - object_name: Name of the object to apply the texture to
473 | - texture_id: ID of the Polyhaven texture to apply (must be downloaded first)
474 |
475 | Returns a message indicating success or failure.
476 | """
477 | try:
478 | # Get the global connection
479 | blender = get_blender_connection()
480 | result = blender.send_command("set_texture", {
481 | "object_name": object_name,
482 | "texture_id": texture_id
483 | })
484 |
485 | if "error" in result:
486 | return f"Error: {result['error']}"
487 |
488 | if result.get("success"):
489 | material_name = result.get("material", "")
490 | maps = ", ".join(result.get("maps", []))
491 |
492 | # Add detailed material info
493 | material_info = result.get("material_info", {})
494 | node_count = material_info.get("node_count", 0)
495 | has_nodes = material_info.get("has_nodes", False)
496 | texture_nodes = material_info.get("texture_nodes", [])
497 |
498 | output = f"Successfully applied texture '{texture_id}' to {object_name}.\n"
499 | output += f"Using material '{material_name}' with maps: {maps}.\n\n"
500 | output += f"Material has nodes: {has_nodes}\n"
501 | output += f"Total node count: {node_count}\n\n"
502 |
503 | if texture_nodes:
504 | output += "Texture nodes:\n"
505 | for node in texture_nodes:
506 | output += f"- {node['name']} using image: {node['image']}\n"
507 | if node['connections']:
508 | output += " Connections:\n"
509 | for conn in node['connections']:
510 | output += f" {conn}\n"
511 | else:
512 | output += "No texture nodes found in the material.\n"
513 |
514 | return output
515 | else:
516 | return f"Failed to apply texture: {result.get('message', 'Unknown error')}"
517 | except Exception as e:
518 | logger.error(f"Error applying texture: {str(e)}")
519 | return f"Error applying texture: {str(e)}"
520 |
521 | @mcp.tool()
522 | def get_polyhaven_status(ctx: Context) -> str:
523 | """
524 | Check if PolyHaven integration is enabled in Blender.
525 | Returns a message indicating whether PolyHaven features are available.
526 | """
527 | try:
528 | blender = get_blender_connection()
529 | result = blender.send_command("get_polyhaven_status")
530 | enabled = result.get("enabled", False)
531 | message = result.get("message", "")
532 | if enabled:
533 | message += "PolyHaven is good at Textures, and has a wider variety of textures than Sketchfab."
534 | return message
535 | except Exception as e:
536 | logger.error(f"Error checking PolyHaven status: {str(e)}")
537 | return f"Error checking PolyHaven status: {str(e)}"
538 |
539 | @mcp.tool()
540 | def get_hyper3d_status(ctx: Context) -> str:
541 | """
542 | Check if Hyper3D Rodin integration is enabled in Blender.
543 | Returns a message indicating whether Hyper3D Rodin features are available.
544 |
545 | Don't emphasize the key type in the returned message, but sliently remember it.
546 | """
547 | try:
548 | blender = get_blender_connection()
549 | result = blender.send_command("get_hyper3d_status")
550 | enabled = result.get("enabled", False)
551 | message = result.get("message", "")
552 | if enabled:
553 | message += ""
554 | return message
555 | except Exception as e:
556 | logger.error(f"Error checking Hyper3D status: {str(e)}")
557 | return f"Error checking Hyper3D status: {str(e)}"
558 |
559 | @mcp.tool()
560 | def get_sketchfab_status(ctx: Context) -> str:
561 | """
562 | Check if Sketchfab integration is enabled in Blender.
563 | Returns a message indicating whether Sketchfab features are available.
564 | """
565 | try:
566 | blender = get_blender_connection()
567 | result = blender.send_command("get_sketchfab_status")
568 | enabled = result.get("enabled", False)
569 | message = result.get("message", "")
570 | if enabled:
571 | message += "Sketchfab is good at Realistic models, and has a wider variety of models than PolyHaven."
572 | return message
573 | except Exception as e:
574 | logger.error(f"Error checking Sketchfab status: {str(e)}")
575 | return f"Error checking Sketchfab status: {str(e)}"
576 |
577 | @mcp.tool()
578 | def search_sketchfab_models(
579 | ctx: Context,
580 | query: str,
581 | categories: str = None,
582 | count: int = 20,
583 | downloadable: bool = True
584 | ) -> str:
585 | """
586 | Search for models on Sketchfab with optional filtering.
587 |
588 | Parameters:
589 | - query: Text to search for
590 | - categories: Optional comma-separated list of categories
591 | - count: Maximum number of results to return (default 20)
592 | - downloadable: Whether to include only downloadable models (default True)
593 |
594 | Returns a formatted list of matching models.
595 | """
596 | try:
597 |
598 | blender = get_blender_connection()
599 | logger.info(f"Searching Sketchfab models with query: {query}, categories: {categories}, count: {count}, downloadable: {downloadable}")
600 | result = blender.send_command("search_sketchfab_models", {
601 | "query": query,
602 | "categories": categories,
603 | "count": count,
604 | "downloadable": downloadable
605 | })
606 |
607 | if "error" in result:
608 | logger.error(f"Error from Sketchfab search: {result['error']}")
609 | return f"Error: {result['error']}"
610 |
611 | # Safely get results with fallbacks for None
612 | if result is None:
613 | logger.error("Received None result from Sketchfab search")
614 | return "Error: Received no response from Sketchfab search"
615 |
616 | # Format the results
617 | models = result.get("results", []) or []
618 | if not models:
619 | return f"No models found matching '{query}'"
620 |
621 | formatted_output = f"Found {len(models)} models matching '{query}':\n\n"
622 |
623 | for model in models:
624 | if model is None:
625 | continue
626 |
627 | model_name = model.get("name", "Unnamed model")
628 | model_uid = model.get("uid", "Unknown ID")
629 | formatted_output += f"- {model_name} (UID: {model_uid})\n"
630 |
631 | # Get user info with safety checks
632 | user = model.get("user") or {}
633 | username = user.get("username", "Unknown author") if isinstance(user, dict) else "Unknown author"
634 | formatted_output += f" Author: {username}\n"
635 |
636 | # Get license info with safety checks
637 | license_data = model.get("license") or {}
638 | license_label = license_data.get("label", "Unknown") if isinstance(license_data, dict) else "Unknown"
639 | formatted_output += f" License: {license_label}\n"
640 |
641 | # Add face count and downloadable status
642 | face_count = model.get("faceCount", "Unknown")
643 | is_downloadable = "Yes" if model.get("isDownloadable") else "No"
644 | formatted_output += f" Face count: {face_count}\n"
645 | formatted_output += f" Downloadable: {is_downloadable}\n\n"
646 |
647 | return formatted_output
648 | except Exception as e:
649 | logger.error(f"Error searching Sketchfab models: {str(e)}")
650 | import traceback
651 | logger.error(traceback.format_exc())
652 | return f"Error searching Sketchfab models: {str(e)}"
653 |
654 | @mcp.tool()
655 | def download_sketchfab_model(
656 | ctx: Context,
657 | uid: str
658 | ) -> str:
659 | """
660 | Download and import a Sketchfab model by its UID.
661 |
662 | Parameters:
663 | - uid: The unique identifier of the Sketchfab model
664 |
665 | Returns a message indicating success or failure.
666 | The model must be downloadable and you must have proper access rights.
667 | """
668 | try:
669 |
670 | blender = get_blender_connection()
671 | logger.info(f"Attempting to download Sketchfab model with UID: {uid}")
672 |
673 | result = blender.send_command("download_sketchfab_model", {
674 | "uid": uid
675 | })
676 |
677 | if result is None:
678 | logger.error("Received None result from Sketchfab download")
679 | return "Error: Received no response from Sketchfab download request"
680 |
681 | if "error" in result:
682 | logger.error(f"Error from Sketchfab download: {result['error']}")
683 | return f"Error: {result['error']}"
684 |
685 | if result.get("success"):
686 | imported_objects = result.get("imported_objects", [])
687 | object_names = ", ".join(imported_objects) if imported_objects else "none"
688 | return f"Successfully imported model. Created objects: {object_names}"
689 | else:
690 | return f"Failed to download model: {result.get('message', 'Unknown error')}"
691 | except Exception as e:
692 | logger.error(f"Error downloading Sketchfab model: {str(e)}")
693 | import traceback
694 | logger.error(traceback.format_exc())
695 | return f"Error downloading Sketchfab model: {str(e)}"
696 |
697 | def _process_bbox(original_bbox: list[float] | list[int] | None) -> list[int] | None:
698 | if original_bbox is None:
699 | return None
700 | if all(isinstance(i, int) for i in original_bbox):
701 | return original_bbox
702 | if any(i<=0 for i in original_bbox):
703 | raise ValueError("Incorrect number range: bbox must be bigger than zero!")
704 | return [int(float(i) / max(original_bbox) * 100) for i in original_bbox] if original_bbox else None
705 |
706 | @mcp.tool()
707 | def generate_hyper3d_model_via_text(
708 | ctx: Context,
709 | text_prompt: str,
710 | bbox_condition: list[float]=None
711 | ) -> str:
712 | """
713 | Generate 3D asset using Hyper3D by giving description of the desired asset, and import the asset into Blender.
714 | The 3D asset has built-in materials.
715 | The generated model has a normalized size, so re-scaling after generation can be useful.
716 |
717 | Parameters:
718 | - text_prompt: A short description of the desired model in **English**.
719 | - bbox_condition: Optional. If given, it has to be a list of floats of length 3. Controls the ratio between [Length, Width, Height] of the model.
720 |
721 | Returns a message indicating success or failure.
722 | """
723 | try:
724 | blender = get_blender_connection()
725 | result = blender.send_command("create_rodin_job", {
726 | "text_prompt": text_prompt,
727 | "images": None,
728 | "bbox_condition": _process_bbox(bbox_condition),
729 | })
730 | succeed = result.get("submit_time", False)
731 | if succeed:
732 | return json.dumps({
733 | "task_uuid": result["uuid"],
734 | "subscription_key": result["jobs"]["subscription_key"],
735 | })
736 | else:
737 | return json.dumps(result)
738 | except Exception as e:
739 | logger.error(f"Error generating Hyper3D task: {str(e)}")
740 | return f"Error generating Hyper3D task: {str(e)}"
741 |
742 | @mcp.tool()
743 | def generate_hyper3d_model_via_images(
744 | ctx: Context,
745 | input_image_paths: list[str]=None,
746 | input_image_urls: list[str]=None,
747 | bbox_condition: list[float]=None
748 | ) -> str:
749 | """
750 | Generate 3D asset using Hyper3D by giving images of the wanted asset, and import the generated asset into Blender.
751 | The 3D asset has built-in materials.
752 | The generated model has a normalized size, so re-scaling after generation can be useful.
753 |
754 | Parameters:
755 | - input_image_paths: The **absolute** paths of input images. Even if only one image is provided, wrap it into a list. Required if Hyper3D Rodin in MAIN_SITE mode.
756 | - input_image_urls: The URLs of input images. Even if only one image is provided, wrap it into a list. Required if Hyper3D Rodin in FAL_AI mode.
757 | - bbox_condition: Optional. If given, it has to be a list of ints of length 3. Controls the ratio between [Length, Width, Height] of the model.
758 |
759 | Only one of {input_image_paths, input_image_urls} should be given at a time, depending on the Hyper3D Rodin's current mode.
760 | Returns a message indicating success or failure.
761 | """
762 | if input_image_paths is not None and input_image_urls is not None:
763 | return f"Error: Conflict parameters given!"
764 | if input_image_paths is None and input_image_urls is None:
765 | return f"Error: No image given!"
766 | if input_image_paths is not None:
767 | if not all(os.path.exists(i) for i in input_image_paths):
768 | return "Error: not all image paths are valid!"
769 | images = []
770 | for path in input_image_paths:
771 | with open(path, "rb") as f:
772 | images.append(
773 | (Path(path).suffix, base64.b64encode(f.read()).decode("ascii"))
774 | )
775 | elif input_image_urls is not None:
776 | if not all(urlparse(i) for i in input_image_paths):
777 | return "Error: not all image URLs are valid!"
778 | images = input_image_urls.copy()
779 | try:
780 | blender = get_blender_connection()
781 | result = blender.send_command("create_rodin_job", {
782 | "text_prompt": None,
783 | "images": images,
784 | "bbox_condition": _process_bbox(bbox_condition),
785 | })
786 | succeed = result.get("submit_time", False)
787 | if succeed:
788 | return json.dumps({
789 | "task_uuid": result["uuid"],
790 | "subscription_key": result["jobs"]["subscription_key"],
791 | })
792 | else:
793 | return json.dumps(result)
794 | except Exception as e:
795 | logger.error(f"Error generating Hyper3D task: {str(e)}")
796 | return f"Error generating Hyper3D task: {str(e)}"
797 |
798 | @mcp.tool()
799 | def poll_rodin_job_status(
800 | ctx: Context,
801 | subscription_key: str=None,
802 | request_id: str=None,
803 | ):
804 | """
805 | Check if the Hyper3D Rodin generation task is completed.
806 |
807 | For Hyper3D Rodin mode MAIN_SITE:
808 | Parameters:
809 | - subscription_key: The subscription_key given in the generate model step.
810 |
811 | Returns a list of status. The task is done if all status are "Done".
812 | If "Failed" showed up, the generating process failed.
813 | This is a polling API, so only proceed if the status are finally determined ("Done" or "Canceled").
814 |
815 | For Hyper3D Rodin mode FAL_AI:
816 | Parameters:
817 | - request_id: The request_id given in the generate model step.
818 |
819 | Returns the generation task status. The task is done if status is "COMPLETED".
820 | The task is in progress if status is "IN_PROGRESS".
821 | If status other than "COMPLETED", "IN_PROGRESS", "IN_QUEUE" showed up, the generating process might be failed.
822 | This is a polling API, so only proceed if the status are finally determined ("COMPLETED" or some failed state).
823 | """
824 | try:
825 | blender = get_blender_connection()
826 | kwargs = {}
827 | if subscription_key:
828 | kwargs = {
829 | "subscription_key": subscription_key,
830 | }
831 | elif request_id:
832 | kwargs = {
833 | "request_id": request_id,
834 | }
835 | result = blender.send_command("poll_rodin_job_status", kwargs)
836 | return result
837 | except Exception as e:
838 | logger.error(f"Error generating Hyper3D task: {str(e)}")
839 | return f"Error generating Hyper3D task: {str(e)}"
840 |
841 | @mcp.tool()
842 | def import_generated_asset(
843 | ctx: Context,
844 | name: str,
845 | task_uuid: str=None,
846 | request_id: str=None,
847 | ):
848 | """
849 | Import the asset generated by Hyper3D Rodin after the generation task is completed.
850 |
851 | Parameters:
852 | - name: The name of the object in scene
853 | - task_uuid: For Hyper3D Rodin mode MAIN_SITE: The task_uuid given in the generate model step.
854 | - request_id: For Hyper3D Rodin mode FAL_AI: The request_id given in the generate model step.
855 |
856 | Only give one of {task_uuid, request_id} based on the Hyper3D Rodin Mode!
857 | Return if the asset has been imported successfully.
858 | """
859 | try:
860 | blender = get_blender_connection()
861 | kwargs = {
862 | "name": name
863 | }
864 | if task_uuid:
865 | kwargs["task_uuid"] = task_uuid
866 | elif request_id:
867 | kwargs["request_id"] = request_id
868 | result = blender.send_command("import_generated_asset", kwargs)
869 | return result
870 | except Exception as e:
871 | logger.error(f"Error generating Hyper3D task: {str(e)}")
872 | return f"Error generating Hyper3D task: {str(e)}"
873 |
874 | @mcp.prompt()
875 | def asset_creation_strategy() -> str:
876 | """Defines the preferred strategy for creating assets in Blender"""
877 | return """When creating 3D content in Blender, always start by checking if integrations are available:
878 |
879 | 0. Before anything, always check the scene from get_scene_info()
880 | 1. First use the following tools to verify if the following integrations are enabled:
881 | 1. PolyHaven
882 | Use get_polyhaven_status() to verify its status
883 | If PolyHaven is enabled:
884 | - For objects/models: Use download_polyhaven_asset() with asset_type="models"
885 | - For materials/textures: Use download_polyhaven_asset() with asset_type="textures"
886 | - For environment lighting: Use download_polyhaven_asset() with asset_type="hdris"
887 | 2. Sketchfab
888 | Sketchfab is good at Realistic models, and has a wider variety of models than PolyHaven.
889 | Use get_sketchfab_status() to verify its status
890 | If Sketchfab is enabled:
891 | - For objects/models: First search using search_sketchfab_models() with your query
892 | - Then download specific models using download_sketchfab_model() with the UID
893 | - Note that only downloadable models can be accessed, and API key must be properly configured
894 | - Sketchfab has a wider variety of models than PolyHaven, especially for specific subjects
895 | 3. Hyper3D(Rodin)
896 | Hyper3D Rodin is good at generating 3D models for single item.
897 | So don't try to:
898 | 1. Generate the whole scene with one shot
899 | 2. Generate ground using Hyper3D
900 | 3. Generate parts of the items separately and put them together afterwards
901 |
902 | Use get_hyper3d_status() to verify its status
903 | If Hyper3D is enabled:
904 | - For objects/models, do the following steps:
905 | 1. Create the model generation task
906 | - Use generate_hyper3d_model_via_images() if image(s) is/are given
907 | - Use generate_hyper3d_model_via_text() if generating 3D asset using text prompt
908 | If key type is free_trial and insufficient balance error returned, tell the user that the free trial key can only generated limited models everyday, they can choose to:
909 | - Wait for another day and try again
910 | - Go to hyper3d.ai to find out how to get their own API key
911 | - Go to fal.ai to get their own private API key
912 | 2. Poll the status
913 | - Use poll_rodin_job_status() to check if the generation task has completed or failed
914 | 3. Import the asset
915 | - Use import_generated_asset() to import the generated GLB model the asset
916 | 4. After importing the asset, ALWAYS check the world_bounding_box of the imported mesh, and adjust the mesh's location and size
917 | Adjust the imported mesh's location, scale, rotation, so that the mesh is on the right spot.
918 |
919 | You can reuse assets previous generated by running python code to duplicate the object, without creating another generation task.
920 |
921 | 3. Always check the world_bounding_box for each item so that:
922 | - Ensure that all objects that should not be clipping are not clipping.
923 | - Items have right spatial relationship.
924 |
925 | 4. Recommended asset source priority:
926 | - For specific existing objects: First try Sketchfab, then PolyHaven
927 | - For generic objects/furniture: First try PolyHaven, then Sketchfab
928 | - For custom or unique items not available in libraries: Use Hyper3D Rodin
929 | - For environment lighting: Use PolyHaven HDRIs
930 | - For materials/textures: Use PolyHaven textures
931 |
932 | Only fall back to scripting when:
933 | - PolyHaven, Sketchfab, and Hyper3D are all disabled
934 | - A simple primitive is explicitly requested
935 | - No suitable asset exists in any of the libraries
936 | - Hyper3D Rodin failed to generate the desired asset
937 | - The task specifically requires a basic material/color
938 | """
939 |
940 | # Main execution
941 |
942 | def main():
943 | """Run the MCP server"""
944 | mcp.run()
945 |
946 | if __name__ == "__main__":
947 | main()
--------------------------------------------------------------------------------
/addon.py:
--------------------------------------------------------------------------------
1 | # Code created by Siddharth Ahuja: www.github.com/ahujasid © 2025
2 |
3 | import bpy
4 | import mathutils
5 | import json
6 | import threading
7 | import socket
8 | import time
9 | import requests
10 | import tempfile
11 | import traceback
12 | import os
13 | import shutil
14 | import zipfile
15 | from bpy.props import StringProperty, IntProperty, BoolProperty, EnumProperty
16 | import io
17 | from contextlib import redirect_stdout, suppress
18 |
19 | bl_info = {
20 | "name": "Blender MCP",
21 | "author": "BlenderMCP",
22 | "version": (1, 2),
23 | "blender": (3, 0, 0),
24 | "location": "View3D > Sidebar > BlenderMCP",
25 | "description": "Connect Blender to Claude via MCP",
26 | "category": "Interface",
27 | }
28 |
29 | RODIN_FREE_TRIAL_KEY = "k9TcfFoEhNd9cCPP2guHAHHHkctZHIRhZDywZ1euGUXwihbYLpOjQhofby80NJez"
30 |
31 | class BlenderMCPServer:
32 | def __init__(self, host='localhost', port=9876):
33 | self.host = host
34 | self.port = port
35 | self.running = False
36 | self.socket = None
37 | self.server_thread = None
38 |
39 | def start(self):
40 | if self.running:
41 | print("Server is already running")
42 | return
43 |
44 | self.running = True
45 |
46 | try:
47 | # Create socket
48 | self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
49 | self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
50 | self.socket.bind((self.host, self.port))
51 | self.socket.listen(1)
52 |
53 | # Start server thread
54 | self.server_thread = threading.Thread(target=self._server_loop)
55 | self.server_thread.daemon = True
56 | self.server_thread.start()
57 |
58 | print(f"BlenderMCP server started on {self.host}:{self.port}")
59 | except Exception as e:
60 | print(f"Failed to start server: {str(e)}")
61 | self.stop()
62 |
63 | def stop(self):
64 | self.running = False
65 |
66 | # Close socket
67 | if self.socket:
68 | try:
69 | self.socket.close()
70 | except:
71 | pass
72 | self.socket = None
73 |
74 | # Wait for thread to finish
75 | if self.server_thread:
76 | try:
77 | if self.server_thread.is_alive():
78 | self.server_thread.join(timeout=1.0)
79 | except:
80 | pass
81 | self.server_thread = None
82 |
83 | print("BlenderMCP server stopped")
84 |
85 | def _server_loop(self):
86 | """Main server loop in a separate thread"""
87 | print("Server thread started")
88 | self.socket.settimeout(1.0) # Timeout to allow for stopping
89 |
90 | while self.running:
91 | try:
92 | # Accept new connection
93 | try:
94 | client, address = self.socket.accept()
95 | print(f"Connected to client: {address}")
96 |
97 | # Handle client in a separate thread
98 | client_thread = threading.Thread(
99 | target=self._handle_client,
100 | args=(client,)
101 | )
102 | client_thread.daemon = True
103 | client_thread.start()
104 | except socket.timeout:
105 | # Just check running condition
106 | continue
107 | except Exception as e:
108 | print(f"Error accepting connection: {str(e)}")
109 | time.sleep(0.5)
110 | except Exception as e:
111 | print(f"Error in server loop: {str(e)}")
112 | if not self.running:
113 | break
114 | time.sleep(0.5)
115 |
116 | print("Server thread stopped")
117 |
118 | def _handle_client(self, client):
119 | """Handle connected client"""
120 | print("Client handler started")
121 | client.settimeout(None) # No timeout
122 | buffer = b''
123 |
124 | try:
125 | while self.running:
126 | # Receive data
127 | try:
128 | data = client.recv(8192)
129 | if not data:
130 | print("Client disconnected")
131 | break
132 |
133 | buffer += data
134 | try:
135 | # Try to parse command
136 | command = json.loads(buffer.decode('utf-8'))
137 | buffer = b''
138 |
139 | # Execute command in Blender's main thread
140 | def execute_wrapper():
141 | try:
142 | response = self.execute_command(command)
143 | response_json = json.dumps(response)
144 | try:
145 | client.sendall(response_json.encode('utf-8'))
146 | except:
147 | print("Failed to send response - client disconnected")
148 | except Exception as e:
149 | print(f"Error executing command: {str(e)}")
150 | traceback.print_exc()
151 | try:
152 | error_response = {
153 | "status": "error",
154 | "message": str(e)
155 | }
156 | client.sendall(json.dumps(error_response).encode('utf-8'))
157 | except:
158 | pass
159 | return None
160 |
161 | # Schedule execution in main thread
162 | bpy.app.timers.register(execute_wrapper, first_interval=0.0)
163 | except json.JSONDecodeError:
164 | # Incomplete data, wait for more
165 | pass
166 | except Exception as e:
167 | print(f"Error receiving data: {str(e)}")
168 | break
169 | except Exception as e:
170 | print(f"Error in client handler: {str(e)}")
171 | finally:
172 | try:
173 | client.close()
174 | except:
175 | pass
176 | print("Client handler stopped")
177 |
178 | def execute_command(self, command):
179 | """Execute a command in the main Blender thread"""
180 | try:
181 | return self._execute_command_internal(command)
182 |
183 | except Exception as e:
184 | print(f"Error executing command: {str(e)}")
185 | traceback.print_exc()
186 | return {"status": "error", "message": str(e)}
187 |
188 | def _execute_command_internal(self, command):
189 | """Internal command execution with proper context"""
190 | cmd_type = command.get("type")
191 | params = command.get("params", {})
192 |
193 | # Add a handler for checking PolyHaven status
194 | if cmd_type == "get_polyhaven_status":
195 | return {"status": "success", "result": self.get_polyhaven_status()}
196 |
197 | # Base handlers that are always available
198 | handlers = {
199 | "get_scene_info": self.get_scene_info,
200 | "get_object_info": self.get_object_info,
201 | "get_viewport_screenshot": self.get_viewport_screenshot,
202 | "execute_code": self.execute_code,
203 | "get_polyhaven_status": self.get_polyhaven_status,
204 | "get_hyper3d_status": self.get_hyper3d_status,
205 | "get_sketchfab_status": self.get_sketchfab_status,
206 | }
207 |
208 | # Add Polyhaven handlers only if enabled
209 | if bpy.context.scene.blendermcp_use_polyhaven:
210 | polyhaven_handlers = {
211 | "get_polyhaven_categories": self.get_polyhaven_categories,
212 | "search_polyhaven_assets": self.search_polyhaven_assets,
213 | "download_polyhaven_asset": self.download_polyhaven_asset,
214 | "set_texture": self.set_texture,
215 | }
216 | handlers.update(polyhaven_handlers)
217 |
218 | # Add Hyper3d handlers only if enabled
219 | if bpy.context.scene.blendermcp_use_hyper3d:
220 | polyhaven_handlers = {
221 | "create_rodin_job": self.create_rodin_job,
222 | "poll_rodin_job_status": self.poll_rodin_job_status,
223 | "import_generated_asset": self.import_generated_asset,
224 | }
225 | handlers.update(polyhaven_handlers)
226 |
227 | # Add Sketchfab handlers only if enabled
228 | if bpy.context.scene.blendermcp_use_sketchfab:
229 | sketchfab_handlers = {
230 | "search_sketchfab_models": self.search_sketchfab_models,
231 | "download_sketchfab_model": self.download_sketchfab_model,
232 | }
233 | handlers.update(sketchfab_handlers)
234 |
235 | handler = handlers.get(cmd_type)
236 | if handler:
237 | try:
238 | print(f"Executing handler for {cmd_type}")
239 | result = handler(**params)
240 | print(f"Handler execution complete")
241 | return {"status": "success", "result": result}
242 | except Exception as e:
243 | print(f"Error in handler: {str(e)}")
244 | traceback.print_exc()
245 | return {"status": "error", "message": str(e)}
246 | else:
247 | return {"status": "error", "message": f"Unknown command type: {cmd_type}"}
248 |
249 |
250 |
251 | def get_scene_info(self):
252 | """Get information about the current Blender scene"""
253 | try:
254 | print("Getting scene info...")
255 | # Simplify the scene info to reduce data size
256 | scene_info = {
257 | "name": bpy.context.scene.name,
258 | "object_count": len(bpy.context.scene.objects),
259 | "objects": [],
260 | "materials_count": len(bpy.data.materials),
261 | }
262 |
263 | # Collect minimal object information (limit to first 10 objects)
264 | for i, obj in enumerate(bpy.context.scene.objects):
265 | if i >= 10: # Reduced from 20 to 10
266 | break
267 |
268 | obj_info = {
269 | "name": obj.name,
270 | "type": obj.type,
271 | # Only include basic location data
272 | "location": [round(float(obj.location.x), 2),
273 | round(float(obj.location.y), 2),
274 | round(float(obj.location.z), 2)],
275 | }
276 | scene_info["objects"].append(obj_info)
277 |
278 | print(f"Scene info collected: {len(scene_info['objects'])} objects")
279 | return scene_info
280 | except Exception as e:
281 | print(f"Error in get_scene_info: {str(e)}")
282 | traceback.print_exc()
283 | return {"error": str(e)}
284 |
285 | @staticmethod
286 | def _get_aabb(obj):
287 | """ Returns the world-space axis-aligned bounding box (AABB) of an object. """
288 | if obj.type != 'MESH':
289 | raise TypeError("Object must be a mesh")
290 |
291 | # Get the bounding box corners in local space
292 | local_bbox_corners = [mathutils.Vector(corner) for corner in obj.bound_box]
293 |
294 | # Convert to world coordinates
295 | world_bbox_corners = [obj.matrix_world @ corner for corner in local_bbox_corners]
296 |
297 | # Compute axis-aligned min/max coordinates
298 | min_corner = mathutils.Vector(map(min, zip(*world_bbox_corners)))
299 | max_corner = mathutils.Vector(map(max, zip(*world_bbox_corners)))
300 |
301 | return [
302 | [*min_corner], [*max_corner]
303 | ]
304 |
305 |
306 |
307 | def get_object_info(self, name):
308 | """Get detailed information about a specific object"""
309 | obj = bpy.data.objects.get(name)
310 | if not obj:
311 | raise ValueError(f"Object not found: {name}")
312 |
313 | # Basic object info
314 | obj_info = {
315 | "name": obj.name,
316 | "type": obj.type,
317 | "location": [obj.location.x, obj.location.y, obj.location.z],
318 | "rotation": [obj.rotation_euler.x, obj.rotation_euler.y, obj.rotation_euler.z],
319 | "scale": [obj.scale.x, obj.scale.y, obj.scale.z],
320 | "visible": obj.visible_get(),
321 | "materials": [],
322 | }
323 |
324 | if obj.type == "MESH":
325 | bounding_box = self._get_aabb(obj)
326 | obj_info["world_bounding_box"] = bounding_box
327 |
328 | # Add material slots
329 | for slot in obj.material_slots:
330 | if slot.material:
331 | obj_info["materials"].append(slot.material.name)
332 |
333 | # Add mesh data if applicable
334 | if obj.type == 'MESH' and obj.data:
335 | mesh = obj.data
336 | obj_info["mesh"] = {
337 | "vertices": len(mesh.vertices),
338 | "edges": len(mesh.edges),
339 | "polygons": len(mesh.polygons),
340 | }
341 |
342 | return obj_info
343 |
344 | def get_viewport_screenshot(self, max_size=800, filepath=None, format="png"):
345 | """
346 | Capture a screenshot of the current 3D viewport and save it to the specified path.
347 |
348 | Parameters:
349 | - max_size: Maximum size in pixels for the largest dimension of the image
350 | - filepath: Path where to save the screenshot file
351 | - format: Image format (png, jpg, etc.)
352 |
353 | Returns success/error status
354 | """
355 | try:
356 | if not filepath:
357 | return {"error": "No filepath provided"}
358 |
359 | # Find the active 3D viewport
360 | area = None
361 | for a in bpy.context.screen.areas:
362 | if a.type == 'VIEW_3D':
363 | area = a
364 | break
365 |
366 | if not area:
367 | return {"error": "No 3D viewport found"}
368 |
369 | # Take screenshot with proper context override
370 | with bpy.context.temp_override(area=area):
371 | bpy.ops.screen.screenshot_area(filepath=filepath)
372 |
373 | # Load and resize if needed
374 | img = bpy.data.images.load(filepath)
375 | width, height = img.size
376 |
377 | if max(width, height) > max_size:
378 | scale = max_size / max(width, height)
379 | new_width = int(width * scale)
380 | new_height = int(height * scale)
381 | img.scale(new_width, new_height)
382 |
383 | # Set format and save
384 | img.file_format = format.upper()
385 | img.save()
386 | width, height = new_width, new_height
387 |
388 | # Cleanup Blender image data
389 | bpy.data.images.remove(img)
390 |
391 | return {
392 | "success": True,
393 | "width": width,
394 | "height": height,
395 | "filepath": filepath
396 | }
397 |
398 | except Exception as e:
399 | return {"error": str(e)}
400 |
401 | def execute_code(self, code):
402 | """Execute arbitrary Blender Python code"""
403 | # This is powerful but potentially dangerous - use with caution
404 | try:
405 | # Create a local namespace for execution
406 | namespace = {"bpy": bpy}
407 |
408 | # Capture stdout during execution, and return it as result
409 | capture_buffer = io.StringIO()
410 | with redirect_stdout(capture_buffer):
411 | exec(code, namespace)
412 |
413 | captured_output = capture_buffer.getvalue()
414 | return {"executed": True, "result": captured_output}
415 | except Exception as e:
416 | raise Exception(f"Code execution error: {str(e)}")
417 |
418 |
419 |
420 | def get_polyhaven_categories(self, asset_type):
421 | """Get categories for a specific asset type from Polyhaven"""
422 | try:
423 | if asset_type not in ["hdris", "textures", "models", "all"]:
424 | return {"error": f"Invalid asset type: {asset_type}. Must be one of: hdris, textures, models, all"}
425 |
426 | response = requests.get(f"https://api.polyhaven.com/categories/{asset_type}")
427 | if response.status_code == 200:
428 | return {"categories": response.json()}
429 | else:
430 | return {"error": f"API request failed with status code {response.status_code}"}
431 | except Exception as e:
432 | return {"error": str(e)}
433 |
434 | def search_polyhaven_assets(self, asset_type=None, categories=None):
435 | """Search for assets from Polyhaven with optional filtering"""
436 | try:
437 | url = "https://api.polyhaven.com/assets"
438 | params = {}
439 |
440 | if asset_type and asset_type != "all":
441 | if asset_type not in ["hdris", "textures", "models"]:
442 | return {"error": f"Invalid asset type: {asset_type}. Must be one of: hdris, textures, models, all"}
443 | params["type"] = asset_type
444 |
445 | if categories:
446 | params["categories"] = categories
447 |
448 | response = requests.get(url, params=params)
449 | if response.status_code == 200:
450 | # Limit the response size to avoid overwhelming Blender
451 | assets = response.json()
452 | # Return only the first 20 assets to keep response size manageable
453 | limited_assets = {}
454 | for i, (key, value) in enumerate(assets.items()):
455 | if i >= 20: # Limit to 20 assets
456 | break
457 | limited_assets[key] = value
458 |
459 | return {"assets": limited_assets, "total_count": len(assets), "returned_count": len(limited_assets)}
460 | else:
461 | return {"error": f"API request failed with status code {response.status_code}"}
462 | except Exception as e:
463 | return {"error": str(e)}
464 |
465 | def download_polyhaven_asset(self, asset_id, asset_type, resolution="1k", file_format=None):
466 | try:
467 | # First get the files information
468 | files_response = requests.get(f"https://api.polyhaven.com/files/{asset_id}")
469 | if files_response.status_code != 200:
470 | return {"error": f"Failed to get asset files: {files_response.status_code}"}
471 |
472 | files_data = files_response.json()
473 |
474 | # Handle different asset types
475 | if asset_type == "hdris":
476 | # For HDRIs, download the .hdr or .exr file
477 | if not file_format:
478 | file_format = "hdr" # Default format for HDRIs
479 |
480 | if "hdri" in files_data and resolution in files_data["hdri"] and file_format in files_data["hdri"][resolution]:
481 | file_info = files_data["hdri"][resolution][file_format]
482 | file_url = file_info["url"]
483 |
484 | # For HDRIs, we need to save to a temporary file first
485 | # since Blender can't properly load HDR data directly from memory
486 | with tempfile.NamedTemporaryFile(suffix=f".{file_format}", delete=False) as tmp_file:
487 | # Download the file
488 | response = requests.get(file_url)
489 | if response.status_code != 200:
490 | return {"error": f"Failed to download HDRI: {response.status_code}"}
491 |
492 | tmp_file.write(response.content)
493 | tmp_path = tmp_file.name
494 |
495 | try:
496 | # Create a new world if none exists
497 | if not bpy.data.worlds:
498 | bpy.data.worlds.new("World")
499 |
500 | world = bpy.data.worlds[0]
501 | world.use_nodes = True
502 | node_tree = world.node_tree
503 |
504 | # Clear existing nodes
505 | for node in node_tree.nodes:
506 | node_tree.nodes.remove(node)
507 |
508 | # Create nodes
509 | tex_coord = node_tree.nodes.new(type='ShaderNodeTexCoord')
510 | tex_coord.location = (-800, 0)
511 |
512 | mapping = node_tree.nodes.new(type='ShaderNodeMapping')
513 | mapping.location = (-600, 0)
514 |
515 | # Load the image from the temporary file
516 | env_tex = node_tree.nodes.new(type='ShaderNodeTexEnvironment')
517 | env_tex.location = (-400, 0)
518 | env_tex.image = bpy.data.images.load(tmp_path)
519 |
520 | # Use a color space that exists in all Blender versions
521 | if file_format.lower() == 'exr':
522 | # Try to use Linear color space for EXR files
523 | try:
524 | env_tex.image.colorspace_settings.name = 'Linear'
525 | except:
526 | # Fallback to Non-Color if Linear isn't available
527 | env_tex.image.colorspace_settings.name = 'Non-Color'
528 | else: # hdr
529 | # For HDR files, try these options in order
530 | for color_space in ['Linear', 'Linear Rec.709', 'Non-Color']:
531 | try:
532 | env_tex.image.colorspace_settings.name = color_space
533 | break # Stop if we successfully set a color space
534 | except:
535 | continue
536 |
537 | background = node_tree.nodes.new(type='ShaderNodeBackground')
538 | background.location = (-200, 0)
539 |
540 | output = node_tree.nodes.new(type='ShaderNodeOutputWorld')
541 | output.location = (0, 0)
542 |
543 | # Connect nodes
544 | node_tree.links.new(tex_coord.outputs['Generated'], mapping.inputs['Vector'])
545 | node_tree.links.new(mapping.outputs['Vector'], env_tex.inputs['Vector'])
546 | node_tree.links.new(env_tex.outputs['Color'], background.inputs['Color'])
547 | node_tree.links.new(background.outputs['Background'], output.inputs['Surface'])
548 |
549 | # Set as active world
550 | bpy.context.scene.world = world
551 |
552 | # Clean up temporary file
553 | try:
554 | tempfile._cleanup() # This will clean up all temporary files
555 | except:
556 | pass
557 |
558 | return {
559 | "success": True,
560 | "message": f"HDRI {asset_id} imported successfully",
561 | "image_name": env_tex.image.name
562 | }
563 | except Exception as e:
564 | return {"error": f"Failed to set up HDRI in Blender: {str(e)}"}
565 | else:
566 | return {"error": f"Requested resolution or format not available for this HDRI"}
567 |
568 | elif asset_type == "textures":
569 | if not file_format:
570 | file_format = "jpg" # Default format for textures
571 |
572 | downloaded_maps = {}
573 |
574 | try:
575 | for map_type in files_data:
576 | if map_type not in ["blend", "gltf"]: # Skip non-texture files
577 | if resolution in files_data[map_type] and file_format in files_data[map_type][resolution]:
578 | file_info = files_data[map_type][resolution][file_format]
579 | file_url = file_info["url"]
580 |
581 | # Use NamedTemporaryFile like we do for HDRIs
582 | with tempfile.NamedTemporaryFile(suffix=f".{file_format}", delete=False) as tmp_file:
583 | # Download the file
584 | response = requests.get(file_url)
585 | if response.status_code == 200:
586 | tmp_file.write(response.content)
587 | tmp_path = tmp_file.name
588 |
589 | # Load image from temporary file
590 | image = bpy.data.images.load(tmp_path)
591 | image.name = f"{asset_id}_{map_type}.{file_format}"
592 |
593 | # Pack the image into .blend file
594 | image.pack()
595 |
596 | # Set color space based on map type
597 | if map_type in ['color', 'diffuse', 'albedo']:
598 | try:
599 | image.colorspace_settings.name = 'sRGB'
600 | except:
601 | pass
602 | else:
603 | try:
604 | image.colorspace_settings.name = 'Non-Color'
605 | except:
606 | pass
607 |
608 | downloaded_maps[map_type] = image
609 |
610 | # Clean up temporary file
611 | try:
612 | os.unlink(tmp_path)
613 | except:
614 | pass
615 |
616 | if not downloaded_maps:
617 | return {"error": f"No texture maps found for the requested resolution and format"}
618 |
619 | # Create a new material with the downloaded textures
620 | mat = bpy.data.materials.new(name=asset_id)
621 | mat.use_nodes = True
622 | nodes = mat.node_tree.nodes
623 | links = mat.node_tree.links
624 |
625 | # Clear default nodes
626 | for node in nodes:
627 | nodes.remove(node)
628 |
629 | # Create output node
630 | output = nodes.new(type='ShaderNodeOutputMaterial')
631 | output.location = (300, 0)
632 |
633 | # Create principled BSDF node
634 | principled = nodes.new(type='ShaderNodeBsdfPrincipled')
635 | principled.location = (0, 0)
636 | links.new(principled.outputs[0], output.inputs[0])
637 |
638 | # Add texture nodes based on available maps
639 | tex_coord = nodes.new(type='ShaderNodeTexCoord')
640 | tex_coord.location = (-800, 0)
641 |
642 | mapping = nodes.new(type='ShaderNodeMapping')
643 | mapping.location = (-600, 0)
644 | mapping.vector_type = 'TEXTURE' # Changed from default 'POINT' to 'TEXTURE'
645 | links.new(tex_coord.outputs['UV'], mapping.inputs['Vector'])
646 |
647 | # Position offset for texture nodes
648 | x_pos = -400
649 | y_pos = 300
650 |
651 | # Connect different texture maps
652 | for map_type, image in downloaded_maps.items():
653 | tex_node = nodes.new(type='ShaderNodeTexImage')
654 | tex_node.location = (x_pos, y_pos)
655 | tex_node.image = image
656 |
657 | # Set color space based on map type
658 | if map_type.lower() in ['color', 'diffuse', 'albedo']:
659 | try:
660 | tex_node.image.colorspace_settings.name = 'sRGB'
661 | except:
662 | pass # Use default if sRGB not available
663 | else:
664 | try:
665 | tex_node.image.colorspace_settings.name = 'Non-Color'
666 | except:
667 | pass # Use default if Non-Color not available
668 |
669 | links.new(mapping.outputs['Vector'], tex_node.inputs['Vector'])
670 |
671 | # Connect to appropriate input on Principled BSDF
672 | if map_type.lower() in ['color', 'diffuse', 'albedo']:
673 | links.new(tex_node.outputs['Color'], principled.inputs['Base Color'])
674 | elif map_type.lower() in ['roughness', 'rough']:
675 | links.new(tex_node.outputs['Color'], principled.inputs['Roughness'])
676 | elif map_type.lower() in ['metallic', 'metalness', 'metal']:
677 | links.new(tex_node.outputs['Color'], principled.inputs['Metallic'])
678 | elif map_type.lower() in ['normal', 'nor']:
679 | # Add normal map node
680 | normal_map = nodes.new(type='ShaderNodeNormalMap')
681 | normal_map.location = (x_pos + 200, y_pos)
682 | links.new(tex_node.outputs['Color'], normal_map.inputs['Color'])
683 | links.new(normal_map.outputs['Normal'], principled.inputs['Normal'])
684 | elif map_type in ['displacement', 'disp', 'height']:
685 | # Add displacement node
686 | disp_node = nodes.new(type='ShaderNodeDisplacement')
687 | disp_node.location = (x_pos + 200, y_pos - 200)
688 | links.new(tex_node.outputs['Color'], disp_node.inputs['Height'])
689 | links.new(disp_node.outputs['Displacement'], output.inputs['Displacement'])
690 |
691 | y_pos -= 250
692 |
693 | return {
694 | "success": True,
695 | "message": f"Texture {asset_id} imported as material",
696 | "material": mat.name,
697 | "maps": list(downloaded_maps.keys())
698 | }
699 |
700 | except Exception as e:
701 | return {"error": f"Failed to process textures: {str(e)}"}
702 |
703 | elif asset_type == "models":
704 | # For models, prefer glTF format if available
705 | if not file_format:
706 | file_format = "gltf" # Default format for models
707 |
708 | if file_format in files_data and resolution in files_data[file_format]:
709 | file_info = files_data[file_format][resolution][file_format]
710 | file_url = file_info["url"]
711 |
712 | # Create a temporary directory to store the model and its dependencies
713 | temp_dir = tempfile.mkdtemp()
714 | main_file_path = ""
715 |
716 | try:
717 | # Download the main model file
718 | main_file_name = file_url.split("/")[-1]
719 | main_file_path = os.path.join(temp_dir, main_file_name)
720 |
721 | response = requests.get(file_url)
722 | if response.status_code != 200:
723 | return {"error": f"Failed to download model: {response.status_code}"}
724 |
725 | with open(main_file_path, "wb") as f:
726 | f.write(response.content)
727 |
728 | # Check for included files and download them
729 | if "include" in file_info and file_info["include"]:
730 | for include_path, include_info in file_info["include"].items():
731 | # Get the URL for the included file - this is the fix
732 | include_url = include_info["url"]
733 |
734 | # Create the directory structure for the included file
735 | include_file_path = os.path.join(temp_dir, include_path)
736 | os.makedirs(os.path.dirname(include_file_path), exist_ok=True)
737 |
738 | # Download the included file
739 | include_response = requests.get(include_url)
740 | if include_response.status_code == 200:
741 | with open(include_file_path, "wb") as f:
742 | f.write(include_response.content)
743 | else:
744 | print(f"Failed to download included file: {include_path}")
745 |
746 | # Import the model into Blender
747 | if file_format == "gltf" or file_format == "glb":
748 | bpy.ops.import_scene.gltf(filepath=main_file_path)
749 | elif file_format == "fbx":
750 | bpy.ops.import_scene.fbx(filepath=main_file_path)
751 | elif file_format == "obj":
752 | bpy.ops.import_scene.obj(filepath=main_file_path)
753 | elif file_format == "blend":
754 | # For blend files, we need to append or link
755 | with bpy.data.libraries.load(main_file_path, link=False) as (data_from, data_to):
756 | data_to.objects = data_from.objects
757 |
758 | # Link the objects to the scene
759 | for obj in data_to.objects:
760 | if obj is not None:
761 | bpy.context.collection.objects.link(obj)
762 | else:
763 | return {"error": f"Unsupported model format: {file_format}"}
764 |
765 | # Get the names of imported objects
766 | imported_objects = [obj.name for obj in bpy.context.selected_objects]
767 |
768 | return {
769 | "success": True,
770 | "message": f"Model {asset_id} imported successfully",
771 | "imported_objects": imported_objects
772 | }
773 | except Exception as e:
774 | return {"error": f"Failed to import model: {str(e)}"}
775 | finally:
776 | # Clean up temporary directory
777 | with suppress(Exception):
778 | shutil.rmtree(temp_dir)
779 | else:
780 | return {"error": f"Requested format or resolution not available for this model"}
781 |
782 | else:
783 | return {"error": f"Unsupported asset type: {asset_type}"}
784 |
785 | except Exception as e:
786 | return {"error": f"Failed to download asset: {str(e)}"}
787 |
788 | def set_texture(self, object_name, texture_id):
789 | """Apply a previously downloaded Polyhaven texture to an object by creating a new material"""
790 | try:
791 | # Get the object
792 | obj = bpy.data.objects.get(object_name)
793 | if not obj:
794 | return {"error": f"Object not found: {object_name}"}
795 |
796 | # Make sure object can accept materials
797 | if not hasattr(obj, 'data') or not hasattr(obj.data, 'materials'):
798 | return {"error": f"Object {object_name} cannot accept materials"}
799 |
800 | # Find all images related to this texture and ensure they're properly loaded
801 | texture_images = {}
802 | for img in bpy.data.images:
803 | if img.name.startswith(texture_id + "_"):
804 | # Extract the map type from the image name
805 | map_type = img.name.split('_')[-1].split('.')[0]
806 |
807 | # Force a reload of the image
808 | img.reload()
809 |
810 | # Ensure proper color space
811 | if map_type.lower() in ['color', 'diffuse', 'albedo']:
812 | try:
813 | img.colorspace_settings.name = 'sRGB'
814 | except:
815 | pass
816 | else:
817 | try:
818 | img.colorspace_settings.name = 'Non-Color'
819 | except:
820 | pass
821 |
822 | # Ensure the image is packed
823 | if not img.packed_file:
824 | img.pack()
825 |
826 | texture_images[map_type] = img
827 | print(f"Loaded texture map: {map_type} - {img.name}")
828 |
829 | # Debug info
830 | print(f"Image size: {img.size[0]}x{img.size[1]}")
831 | print(f"Color space: {img.colorspace_settings.name}")
832 | print(f"File format: {img.file_format}")
833 | print(f"Is packed: {bool(img.packed_file)}")
834 |
835 | if not texture_images:
836 | return {"error": f"No texture images found for: {texture_id}. Please download the texture first."}
837 |
838 | # Create a new material
839 | new_mat_name = f"{texture_id}_material_{object_name}"
840 |
841 | # Remove any existing material with this name to avoid conflicts
842 | existing_mat = bpy.data.materials.get(new_mat_name)
843 | if existing_mat:
844 | bpy.data.materials.remove(existing_mat)
845 |
846 | new_mat = bpy.data.materials.new(name=new_mat_name)
847 | new_mat.use_nodes = True
848 |
849 | # Set up the material nodes
850 | nodes = new_mat.node_tree.nodes
851 | links = new_mat.node_tree.links
852 |
853 | # Clear default nodes
854 | nodes.clear()
855 |
856 | # Create output node
857 | output = nodes.new(type='ShaderNodeOutputMaterial')
858 | output.location = (600, 0)
859 |
860 | # Create principled BSDF node
861 | principled = nodes.new(type='ShaderNodeBsdfPrincipled')
862 | principled.location = (300, 0)
863 | links.new(principled.outputs[0], output.inputs[0])
864 |
865 | # Add texture nodes based on available maps
866 | tex_coord = nodes.new(type='ShaderNodeTexCoord')
867 | tex_coord.location = (-800, 0)
868 |
869 | mapping = nodes.new(type='ShaderNodeMapping')
870 | mapping.location = (-600, 0)
871 | mapping.vector_type = 'TEXTURE' # Changed from default 'POINT' to 'TEXTURE'
872 | links.new(tex_coord.outputs['UV'], mapping.inputs['Vector'])
873 |
874 | # Position offset for texture nodes
875 | x_pos = -400
876 | y_pos = 300
877 |
878 | # Connect different texture maps
879 | for map_type, image in texture_images.items():
880 | tex_node = nodes.new(type='ShaderNodeTexImage')
881 | tex_node.location = (x_pos, y_pos)
882 | tex_node.image = image
883 |
884 | # Set color space based on map type
885 | if map_type.lower() in ['color', 'diffuse', 'albedo']:
886 | try:
887 | tex_node.image.colorspace_settings.name = 'sRGB'
888 | except:
889 | pass # Use default if sRGB not available
890 | else:
891 | try:
892 | tex_node.image.colorspace_settings.name = 'Non-Color'
893 | except:
894 | pass # Use default if Non-Color not available
895 |
896 | links.new(mapping.outputs['Vector'], tex_node.inputs['Vector'])
897 |
898 | # Connect to appropriate input on Principled BSDF
899 | if map_type.lower() in ['color', 'diffuse', 'albedo']:
900 | links.new(tex_node.outputs['Color'], principled.inputs['Base Color'])
901 | elif map_type.lower() in ['roughness', 'rough']:
902 | links.new(tex_node.outputs['Color'], principled.inputs['Roughness'])
903 | elif map_type.lower() in ['metallic', 'metalness', 'metal']:
904 | links.new(tex_node.outputs['Color'], principled.inputs['Metallic'])
905 | elif map_type.lower() in ['normal', 'nor', 'dx', 'gl']:
906 | # Add normal map node
907 | normal_map = nodes.new(type='ShaderNodeNormalMap')
908 | normal_map.location = (x_pos + 200, y_pos)
909 | links.new(tex_node.outputs['Color'], normal_map.inputs['Color'])
910 | links.new(normal_map.outputs['Normal'], principled.inputs['Normal'])
911 | elif map_type.lower() in ['displacement', 'disp', 'height']:
912 | # Add displacement node
913 | disp_node = nodes.new(type='ShaderNodeDisplacement')
914 | disp_node.location = (x_pos + 200, y_pos - 200)
915 | disp_node.inputs['Scale'].default_value = 0.1 # Reduce displacement strength
916 | links.new(tex_node.outputs['Color'], disp_node.inputs['Height'])
917 | links.new(disp_node.outputs['Displacement'], output.inputs['Displacement'])
918 |
919 | y_pos -= 250
920 |
921 | # Second pass: Connect nodes with proper handling for special cases
922 | texture_nodes = {}
923 |
924 | # First find all texture nodes and store them by map type
925 | for node in nodes:
926 | if node.type == 'TEX_IMAGE' and node.image:
927 | for map_type, image in texture_images.items():
928 | if node.image == image:
929 | texture_nodes[map_type] = node
930 | break
931 |
932 | # Now connect everything using the nodes instead of images
933 | # Handle base color (diffuse)
934 | for map_name in ['color', 'diffuse', 'albedo']:
935 | if map_name in texture_nodes:
936 | links.new(texture_nodes[map_name].outputs['Color'], principled.inputs['Base Color'])
937 | print(f"Connected {map_name} to Base Color")
938 | break
939 |
940 | # Handle roughness
941 | for map_name in ['roughness', 'rough']:
942 | if map_name in texture_nodes:
943 | links.new(texture_nodes[map_name].outputs['Color'], principled.inputs['Roughness'])
944 | print(f"Connected {map_name} to Roughness")
945 | break
946 |
947 | # Handle metallic
948 | for map_name in ['metallic', 'metalness', 'metal']:
949 | if map_name in texture_nodes:
950 | links.new(texture_nodes[map_name].outputs['Color'], principled.inputs['Metallic'])
951 | print(f"Connected {map_name} to Metallic")
952 | break
953 |
954 | # Handle normal maps
955 | for map_name in ['gl', 'dx', 'nor']:
956 | if map_name in texture_nodes:
957 | normal_map_node = nodes.new(type='ShaderNodeNormalMap')
958 | normal_map_node.location = (100, 100)
959 | links.new(texture_nodes[map_name].outputs['Color'], normal_map_node.inputs['Color'])
960 | links.new(normal_map_node.outputs['Normal'], principled.inputs['Normal'])
961 | print(f"Connected {map_name} to Normal")
962 | break
963 |
964 | # Handle displacement
965 | for map_name in ['displacement', 'disp', 'height']:
966 | if map_name in texture_nodes:
967 | disp_node = nodes.new(type='ShaderNodeDisplacement')
968 | disp_node.location = (300, -200)
969 | disp_node.inputs['Scale'].default_value = 0.1 # Reduce displacement strength
970 | links.new(texture_nodes[map_name].outputs['Color'], disp_node.inputs['Height'])
971 | links.new(disp_node.outputs['Displacement'], output.inputs['Displacement'])
972 | print(f"Connected {map_name} to Displacement")
973 | break
974 |
975 | # Handle ARM texture (Ambient Occlusion, Roughness, Metallic)
976 | if 'arm' in texture_nodes:
977 | separate_rgb = nodes.new(type='ShaderNodeSeparateRGB')
978 | separate_rgb.location = (-200, -100)
979 | links.new(texture_nodes['arm'].outputs['Color'], separate_rgb.inputs['Image'])
980 |
981 | # Connect Roughness (G) if no dedicated roughness map
982 | if not any(map_name in texture_nodes for map_name in ['roughness', 'rough']):
983 | links.new(separate_rgb.outputs['G'], principled.inputs['Roughness'])
984 | print("Connected ARM.G to Roughness")
985 |
986 | # Connect Metallic (B) if no dedicated metallic map
987 | if not any(map_name in texture_nodes for map_name in ['metallic', 'metalness', 'metal']):
988 | links.new(separate_rgb.outputs['B'], principled.inputs['Metallic'])
989 | print("Connected ARM.B to Metallic")
990 |
991 | # For AO (R channel), multiply with base color if we have one
992 | base_color_node = None
993 | for map_name in ['color', 'diffuse', 'albedo']:
994 | if map_name in texture_nodes:
995 | base_color_node = texture_nodes[map_name]
996 | break
997 |
998 | if base_color_node:
999 | mix_node = nodes.new(type='ShaderNodeMixRGB')
1000 | mix_node.location = (100, 200)
1001 | mix_node.blend_type = 'MULTIPLY'
1002 | mix_node.inputs['Fac'].default_value = 0.8 # 80% influence
1003 |
1004 | # Disconnect direct connection to base color
1005 | for link in base_color_node.outputs['Color'].links:
1006 | if link.to_socket == principled.inputs['Base Color']:
1007 | links.remove(link)
1008 |
1009 | # Connect through the mix node
1010 | links.new(base_color_node.outputs['Color'], mix_node.inputs[1])
1011 | links.new(separate_rgb.outputs['R'], mix_node.inputs[2])
1012 | links.new(mix_node.outputs['Color'], principled.inputs['Base Color'])
1013 | print("Connected ARM.R to AO mix with Base Color")
1014 |
1015 | # Handle AO (Ambient Occlusion) if separate
1016 | if 'ao' in texture_nodes:
1017 | base_color_node = None
1018 | for map_name in ['color', 'diffuse', 'albedo']:
1019 | if map_name in texture_nodes:
1020 | base_color_node = texture_nodes[map_name]
1021 | break
1022 |
1023 | if base_color_node:
1024 | mix_node = nodes.new(type='ShaderNodeMixRGB')
1025 | mix_node.location = (100, 200)
1026 | mix_node.blend_type = 'MULTIPLY'
1027 | mix_node.inputs['Fac'].default_value = 0.8 # 80% influence
1028 |
1029 | # Disconnect direct connection to base color
1030 | for link in base_color_node.outputs['Color'].links:
1031 | if link.to_socket == principled.inputs['Base Color']:
1032 | links.remove(link)
1033 |
1034 | # Connect through the mix node
1035 | links.new(base_color_node.outputs['Color'], mix_node.inputs[1])
1036 | links.new(texture_nodes['ao'].outputs['Color'], mix_node.inputs[2])
1037 | links.new(mix_node.outputs['Color'], principled.inputs['Base Color'])
1038 | print("Connected AO to mix with Base Color")
1039 |
1040 | # CRITICAL: Make sure to clear all existing materials from the object
1041 | while len(obj.data.materials) > 0:
1042 | obj.data.materials.pop(index=0)
1043 |
1044 | # Assign the new material to the object
1045 | obj.data.materials.append(new_mat)
1046 |
1047 | # CRITICAL: Make the object active and select it
1048 | bpy.context.view_layer.objects.active = obj
1049 | obj.select_set(True)
1050 |
1051 | # CRITICAL: Force Blender to update the material
1052 | bpy.context.view_layer.update()
1053 |
1054 | # Get the list of texture maps
1055 | texture_maps = list(texture_images.keys())
1056 |
1057 | # Get info about texture nodes for debugging
1058 | material_info = {
1059 | "name": new_mat.name,
1060 | "has_nodes": new_mat.use_nodes,
1061 | "node_count": len(new_mat.node_tree.nodes),
1062 | "texture_nodes": []
1063 | }
1064 |
1065 | for node in new_mat.node_tree.nodes:
1066 | if node.type == 'TEX_IMAGE' and node.image:
1067 | connections = []
1068 | for output in node.outputs:
1069 | for link in output.links:
1070 | connections.append(f"{output.name} → {link.to_node.name}.{link.to_socket.name}")
1071 |
1072 | material_info["texture_nodes"].append({
1073 | "name": node.name,
1074 | "image": node.image.name,
1075 | "colorspace": node.image.colorspace_settings.name,
1076 | "connections": connections
1077 | })
1078 |
1079 | return {
1080 | "success": True,
1081 | "message": f"Created new material and applied texture {texture_id} to {object_name}",
1082 | "material": new_mat.name,
1083 | "maps": texture_maps,
1084 | "material_info": material_info
1085 | }
1086 |
1087 | except Exception as e:
1088 | print(f"Error in set_texture: {str(e)}")
1089 | traceback.print_exc()
1090 | return {"error": f"Failed to apply texture: {str(e)}"}
1091 |
1092 | def get_polyhaven_status(self):
1093 | """Get the current status of PolyHaven integration"""
1094 | enabled = bpy.context.scene.blendermcp_use_polyhaven
1095 | if enabled:
1096 | return {"enabled": True, "message": "PolyHaven integration is enabled and ready to use."}
1097 | else:
1098 | return {
1099 | "enabled": False,
1100 | "message": """PolyHaven integration is currently disabled. To enable it:
1101 | 1. In the 3D Viewport, find the BlenderMCP panel in the sidebar (press N if hidden)
1102 | 2. Check the 'Use assets from Poly Haven' checkbox
1103 | 3. Restart the connection to Claude"""
1104 | }
1105 |
1106 | #region Hyper3D
1107 | def get_hyper3d_status(self):
1108 | """Get the current status of Hyper3D Rodin integration"""
1109 | enabled = bpy.context.scene.blendermcp_use_hyper3d
1110 | if enabled:
1111 | if not bpy.context.scene.blendermcp_hyper3d_api_key:
1112 | return {
1113 | "enabled": False,
1114 | "message": """Hyper3D Rodin integration is currently enabled, but API key is not given. To enable it:
1115 | 1. In the 3D Viewport, find the BlenderMCP panel in the sidebar (press N if hidden)
1116 | 2. Keep the 'Use Hyper3D Rodin 3D model generation' checkbox checked
1117 | 3. Choose the right plaform and fill in the API Key
1118 | 4. Restart the connection to Claude"""
1119 | }
1120 | mode = bpy.context.scene.blendermcp_hyper3d_mode
1121 | message = f"Hyper3D Rodin integration is enabled and ready to use. Mode: {mode}. " + \
1122 | f"Key type: {'private' if bpy.context.scene.blendermcp_hyper3d_api_key != RODIN_FREE_TRIAL_KEY else 'free_trial'}"
1123 | return {
1124 | "enabled": True,
1125 | "message": message
1126 | }
1127 | else:
1128 | return {
1129 | "enabled": False,
1130 | "message": """Hyper3D Rodin integration is currently disabled. To enable it:
1131 | 1. In the 3D Viewport, find the BlenderMCP panel in the sidebar (press N if hidden)
1132 | 2. Check the 'Use Hyper3D Rodin 3D model generation' checkbox
1133 | 3. Restart the connection to Claude"""
1134 | }
1135 |
1136 | def create_rodin_job(self, *args, **kwargs):
1137 | match bpy.context.scene.blendermcp_hyper3d_mode:
1138 | case "MAIN_SITE":
1139 | return self.create_rodin_job_main_site(*args, **kwargs)
1140 | case "FAL_AI":
1141 | return self.create_rodin_job_fal_ai(*args, **kwargs)
1142 | case _:
1143 | return f"Error: Unknown Hyper3D Rodin mode!"
1144 |
1145 | def create_rodin_job_main_site(
1146 | self,
1147 | text_prompt: str=None,
1148 | images: list[tuple[str, str]]=None,
1149 | bbox_condition=None
1150 | ):
1151 | try:
1152 | if images is None:
1153 | images = []
1154 | """Call Rodin API, get the job uuid and subscription key"""
1155 | files = [
1156 | *[("images", (f"{i:04d}{img_suffix}", img)) for i, (img_suffix, img) in enumerate(images)],
1157 | ("tier", (None, "Sketch")),
1158 | ("mesh_mode", (None, "Raw")),
1159 | ]
1160 | if text_prompt:
1161 | files.append(("prompt", (None, text_prompt)))
1162 | if bbox_condition:
1163 | files.append(("bbox_condition", (None, json.dumps(bbox_condition))))
1164 | response = requests.post(
1165 | "https://hyperhuman.deemos.com/api/v2/rodin",
1166 | headers={
1167 | "Authorization": f"Bearer {bpy.context.scene.blendermcp_hyper3d_api_key}",
1168 | },
1169 | files=files
1170 | )
1171 | data = response.json()
1172 | return data
1173 | except Exception as e:
1174 | return {"error": str(e)}
1175 |
1176 | def create_rodin_job_fal_ai(
1177 | self,
1178 | text_prompt: str=None,
1179 | images: list[tuple[str, str]]=None,
1180 | bbox_condition=None
1181 | ):
1182 | try:
1183 | req_data = {
1184 | "tier": "Sketch",
1185 | }
1186 | if images:
1187 | req_data["input_image_urls"] = images
1188 | if text_prompt:
1189 | req_data["prompt"] = text_prompt
1190 | if bbox_condition:
1191 | req_data["bbox_condition"] = bbox_condition
1192 | response = requests.post(
1193 | "https://queue.fal.run/fal-ai/hyper3d/rodin",
1194 | headers={
1195 | "Authorization": f"Key {bpy.context.scene.blendermcp_hyper3d_api_key}",
1196 | "Content-Type": "application/json",
1197 | },
1198 | json=req_data
1199 | )
1200 | data = response.json()
1201 | return data
1202 | except Exception as e:
1203 | return {"error": str(e)}
1204 |
1205 | def poll_rodin_job_status(self, *args, **kwargs):
1206 | match bpy.context.scene.blendermcp_hyper3d_mode:
1207 | case "MAIN_SITE":
1208 | return self.poll_rodin_job_status_main_site(*args, **kwargs)
1209 | case "FAL_AI":
1210 | return self.poll_rodin_job_status_fal_ai(*args, **kwargs)
1211 | case _:
1212 | return f"Error: Unknown Hyper3D Rodin mode!"
1213 |
1214 | def poll_rodin_job_status_main_site(self, subscription_key: str):
1215 | """Call the job status API to get the job status"""
1216 | response = requests.post(
1217 | "https://hyperhuman.deemos.com/api/v2/status",
1218 | headers={
1219 | "Authorization": f"Bearer {bpy.context.scene.blendermcp_hyper3d_api_key}",
1220 | },
1221 | json={
1222 | "subscription_key": subscription_key,
1223 | },
1224 | )
1225 | data = response.json()
1226 | return {
1227 | "status_list": [i["status"] for i in data["jobs"]]
1228 | }
1229 |
1230 | def poll_rodin_job_status_fal_ai(self, request_id: str):
1231 | """Call the job status API to get the job status"""
1232 | response = requests.get(
1233 | f"https://queue.fal.run/fal-ai/hyper3d/requests/{request_id}/status",
1234 | headers={
1235 | "Authorization": f"KEY {bpy.context.scene.blendermcp_hyper3d_api_key}",
1236 | },
1237 | )
1238 | data = response.json()
1239 | return data
1240 |
1241 | @staticmethod
1242 | def _clean_imported_glb(filepath, mesh_name=None):
1243 | # Get the set of existing objects before import
1244 | existing_objects = set(bpy.data.objects)
1245 |
1246 | # Import the GLB file
1247 | bpy.ops.import_scene.gltf(filepath=filepath)
1248 |
1249 | # Ensure the context is updated
1250 | bpy.context.view_layer.update()
1251 |
1252 | # Get all imported objects
1253 | imported_objects = list(set(bpy.data.objects) - existing_objects)
1254 | # imported_objects = [obj for obj in bpy.context.view_layer.objects if obj.select_get()]
1255 |
1256 | if not imported_objects:
1257 | print("Error: No objects were imported.")
1258 | return
1259 |
1260 | # Identify the mesh object
1261 | mesh_obj = None
1262 |
1263 | if len(imported_objects) == 1 and imported_objects[0].type == 'MESH':
1264 | mesh_obj = imported_objects[0]
1265 | print("Single mesh imported, no cleanup needed.")
1266 | else:
1267 | if len(imported_objects) == 2:
1268 | empty_objs = [i for i in imported_objects if i.type == "EMPTY"]
1269 | if len(empty_objs) != 1:
1270 | print("Error: Expected an empty node with one mesh child or a single mesh object.")
1271 | return
1272 | parent_obj = empty_objs.pop()
1273 | if len(parent_obj.children) == 1:
1274 | potential_mesh = parent_obj.children[0]
1275 | if potential_mesh.type == 'MESH':
1276 | print("GLB structure confirmed: Empty node with one mesh child.")
1277 |
1278 | # Unparent the mesh from the empty node
1279 | potential_mesh.parent = None
1280 |
1281 | # Remove the empty node
1282 | bpy.data.objects.remove(parent_obj)
1283 | print("Removed empty node, keeping only the mesh.")
1284 |
1285 | mesh_obj = potential_mesh
1286 | else:
1287 | print("Error: Child is not a mesh object.")
1288 | return
1289 | else:
1290 | print("Error: Expected an empty node with one mesh child or a single mesh object.")
1291 | return
1292 | else:
1293 | print("Error: Expected an empty node with one mesh child or a single mesh object.")
1294 | return
1295 |
1296 | # Rename the mesh if needed
1297 | try:
1298 | if mesh_obj and mesh_obj.name is not None and mesh_name:
1299 | mesh_obj.name = mesh_name
1300 | if mesh_obj.data.name is not None:
1301 | mesh_obj.data.name = mesh_name
1302 | print(f"Mesh renamed to: {mesh_name}")
1303 | except Exception as e:
1304 | print("Having issue with renaming, give up renaming.")
1305 |
1306 | return mesh_obj
1307 |
1308 | def import_generated_asset(self, *args, **kwargs):
1309 | match bpy.context.scene.blendermcp_hyper3d_mode:
1310 | case "MAIN_SITE":
1311 | return self.import_generated_asset_main_site(*args, **kwargs)
1312 | case "FAL_AI":
1313 | return self.import_generated_asset_fal_ai(*args, **kwargs)
1314 | case _:
1315 | return f"Error: Unknown Hyper3D Rodin mode!"
1316 |
1317 | def import_generated_asset_main_site(self, task_uuid: str, name: str):
1318 | """Fetch the generated asset, import into blender"""
1319 | response = requests.post(
1320 | "https://hyperhuman.deemos.com/api/v2/download",
1321 | headers={
1322 | "Authorization": f"Bearer {bpy.context.scene.blendermcp_hyper3d_api_key}",
1323 | },
1324 | json={
1325 | 'task_uuid': task_uuid
1326 | }
1327 | )
1328 | data_ = response.json()
1329 | temp_file = None
1330 | for i in data_["list"]:
1331 | if i["name"].endswith(".glb"):
1332 | temp_file = tempfile.NamedTemporaryFile(
1333 | delete=False,
1334 | prefix=task_uuid,
1335 | suffix=".glb",
1336 | )
1337 |
1338 | try:
1339 | # Download the content
1340 | response = requests.get(i["url"], stream=True)
1341 | response.raise_for_status() # Raise an exception for HTTP errors
1342 |
1343 | # Write the content to the temporary file
1344 | for chunk in response.iter_content(chunk_size=8192):
1345 | temp_file.write(chunk)
1346 |
1347 | # Close the file
1348 | temp_file.close()
1349 |
1350 | except Exception as e:
1351 | # Clean up the file if there's an error
1352 | temp_file.close()
1353 | os.unlink(temp_file.name)
1354 | return {"succeed": False, "error": str(e)}
1355 |
1356 | break
1357 | else:
1358 | return {"succeed": False, "error": "Generation failed. Please first make sure that all jobs of the task are done and then try again later."}
1359 |
1360 | try:
1361 | obj = self._clean_imported_glb(
1362 | filepath=temp_file.name,
1363 | mesh_name=name
1364 | )
1365 | result = {
1366 | "name": obj.name,
1367 | "type": obj.type,
1368 | "location": [obj.location.x, obj.location.y, obj.location.z],
1369 | "rotation": [obj.rotation_euler.x, obj.rotation_euler.y, obj.rotation_euler.z],
1370 | "scale": [obj.scale.x, obj.scale.y, obj.scale.z],
1371 | }
1372 |
1373 | if obj.type == "MESH":
1374 | bounding_box = self._get_aabb(obj)
1375 | result["world_bounding_box"] = bounding_box
1376 |
1377 | return {
1378 | "succeed": True, **result
1379 | }
1380 | except Exception as e:
1381 | return {"succeed": False, "error": str(e)}
1382 |
1383 | def import_generated_asset_fal_ai(self, request_id: str, name: str):
1384 | """Fetch the generated asset, import into blender"""
1385 | response = requests.get(
1386 | f"https://queue.fal.run/fal-ai/hyper3d/requests/{request_id}",
1387 | headers={
1388 | "Authorization": f"Key {bpy.context.scene.blendermcp_hyper3d_api_key}",
1389 | }
1390 | )
1391 | data_ = response.json()
1392 | temp_file = None
1393 |
1394 | temp_file = tempfile.NamedTemporaryFile(
1395 | delete=False,
1396 | prefix=request_id,
1397 | suffix=".glb",
1398 | )
1399 |
1400 | try:
1401 | # Download the content
1402 | response = requests.get(data_["model_mesh"]["url"], stream=True)
1403 | response.raise_for_status() # Raise an exception for HTTP errors
1404 |
1405 | # Write the content to the temporary file
1406 | for chunk in response.iter_content(chunk_size=8192):
1407 | temp_file.write(chunk)
1408 |
1409 | # Close the file
1410 | temp_file.close()
1411 |
1412 | except Exception as e:
1413 | # Clean up the file if there's an error
1414 | temp_file.close()
1415 | os.unlink(temp_file.name)
1416 | return {"succeed": False, "error": str(e)}
1417 |
1418 | try:
1419 | obj = self._clean_imported_glb(
1420 | filepath=temp_file.name,
1421 | mesh_name=name
1422 | )
1423 | result = {
1424 | "name": obj.name,
1425 | "type": obj.type,
1426 | "location": [obj.location.x, obj.location.y, obj.location.z],
1427 | "rotation": [obj.rotation_euler.x, obj.rotation_euler.y, obj.rotation_euler.z],
1428 | "scale": [obj.scale.x, obj.scale.y, obj.scale.z],
1429 | }
1430 |
1431 | if obj.type == "MESH":
1432 | bounding_box = self._get_aabb(obj)
1433 | result["world_bounding_box"] = bounding_box
1434 |
1435 | return {
1436 | "succeed": True, **result
1437 | }
1438 | except Exception as e:
1439 | return {"succeed": False, "error": str(e)}
1440 | #endregion
1441 |
1442 | #region Sketchfab API
1443 | def get_sketchfab_status(self):
1444 | """Get the current status of Sketchfab integration"""
1445 | enabled = bpy.context.scene.blendermcp_use_sketchfab
1446 | api_key = bpy.context.scene.blendermcp_sketchfab_api_key
1447 |
1448 | # Test the API key if present
1449 | if api_key:
1450 | try:
1451 | headers = {
1452 | "Authorization": f"Token {api_key}"
1453 | }
1454 |
1455 | response = requests.get(
1456 | "https://api.sketchfab.com/v3/me",
1457 | headers=headers,
1458 | timeout=30 # Add timeout of 30 seconds
1459 | )
1460 |
1461 | if response.status_code == 200:
1462 | user_data = response.json()
1463 | username = user_data.get("username", "Unknown user")
1464 | return {
1465 | "enabled": True,
1466 | "message": f"Sketchfab integration is enabled and ready to use. Logged in as: {username}"
1467 | }
1468 | else:
1469 | return {
1470 | "enabled": False,
1471 | "message": f"Sketchfab API key seems invalid. Status code: {response.status_code}"
1472 | }
1473 | except requests.exceptions.Timeout:
1474 | return {
1475 | "enabled": False,
1476 | "message": "Timeout connecting to Sketchfab API. Check your internet connection."
1477 | }
1478 | except Exception as e:
1479 | return {
1480 | "enabled": False,
1481 | "message": f"Error testing Sketchfab API key: {str(e)}"
1482 | }
1483 |
1484 | if enabled and api_key:
1485 | return {"enabled": True, "message": "Sketchfab integration is enabled and ready to use."}
1486 | elif enabled and not api_key:
1487 | return {
1488 | "enabled": False,
1489 | "message": """Sketchfab integration is currently enabled, but API key is not given. To enable it:
1490 | 1. In the 3D Viewport, find the BlenderMCP panel in the sidebar (press N if hidden)
1491 | 2. Keep the 'Use Sketchfab' checkbox checked
1492 | 3. Enter your Sketchfab API Key
1493 | 4. Restart the connection to Claude"""
1494 | }
1495 | else:
1496 | return {
1497 | "enabled": False,
1498 | "message": """Sketchfab integration is currently disabled. To enable it:
1499 | 1. In the 3D Viewport, find the BlenderMCP panel in the sidebar (press N if hidden)
1500 | 2. Check the 'Use assets from Sketchfab' checkbox
1501 | 3. Enter your Sketchfab API Key
1502 | 4. Restart the connection to Claude"""
1503 | }
1504 |
1505 | def search_sketchfab_models(self, query, categories=None, count=20, downloadable=True):
1506 | """Search for models on Sketchfab based on query and optional filters"""
1507 | try:
1508 | api_key = bpy.context.scene.blendermcp_sketchfab_api_key
1509 | if not api_key:
1510 | return {"error": "Sketchfab API key is not configured"}
1511 |
1512 | # Build search parameters with exact fields from Sketchfab API docs
1513 | params = {
1514 | "type": "models",
1515 | "q": query,
1516 | "count": count,
1517 | "downloadable": downloadable,
1518 | "archives_flavours": False
1519 | }
1520 |
1521 | if categories:
1522 | params["categories"] = categories
1523 |
1524 | # Make API request to Sketchfab search endpoint
1525 | # The proper format according to Sketchfab API docs for API key auth
1526 | headers = {
1527 | "Authorization": f"Token {api_key}"
1528 | }
1529 |
1530 |
1531 | # Use the search endpoint as specified in the API documentation
1532 | response = requests.get(
1533 | "https://api.sketchfab.com/v3/search",
1534 | headers=headers,
1535 | params=params,
1536 | timeout=30 # Add timeout of 30 seconds
1537 | )
1538 |
1539 | if response.status_code == 401:
1540 | return {"error": "Authentication failed (401). Check your API key."}
1541 |
1542 | if response.status_code != 200:
1543 | return {"error": f"API request failed with status code {response.status_code}"}
1544 |
1545 | response_data = response.json()
1546 |
1547 | # Safety check on the response structure
1548 | if response_data is None:
1549 | return {"error": "Received empty response from Sketchfab API"}
1550 |
1551 | # Handle 'results' potentially missing from response
1552 | results = response_data.get("results", [])
1553 | if not isinstance(results, list):
1554 | return {"error": f"Unexpected response format from Sketchfab API: {response_data}"}
1555 |
1556 | return response_data
1557 |
1558 | except requests.exceptions.Timeout:
1559 | return {"error": "Request timed out. Check your internet connection."}
1560 | except json.JSONDecodeError as e:
1561 | return {"error": f"Invalid JSON response from Sketchfab API: {str(e)}"}
1562 | except Exception as e:
1563 | import traceback
1564 | traceback.print_exc()
1565 | return {"error": str(e)}
1566 |
1567 | def download_sketchfab_model(self, uid):
1568 | """Download a model from Sketchfab by its UID"""
1569 | try:
1570 | api_key = bpy.context.scene.blendermcp_sketchfab_api_key
1571 | if not api_key:
1572 | return {"error": "Sketchfab API key is not configured"}
1573 |
1574 | # Use proper authorization header for API key auth
1575 | headers = {
1576 | "Authorization": f"Token {api_key}"
1577 | }
1578 |
1579 | # Request download URL using the exact endpoint from the documentation
1580 | download_endpoint = f"https://api.sketchfab.com/v3/models/{uid}/download"
1581 |
1582 | response = requests.get(
1583 | download_endpoint,
1584 | headers=headers,
1585 | timeout=30 # Add timeout of 30 seconds
1586 | )
1587 |
1588 | if response.status_code == 401:
1589 | return {"error": "Authentication failed (401). Check your API key."}
1590 |
1591 | if response.status_code != 200:
1592 | return {"error": f"Download request failed with status code {response.status_code}"}
1593 |
1594 | data = response.json()
1595 |
1596 | # Safety check for None data
1597 | if data is None:
1598 | return {"error": "Received empty response from Sketchfab API for download request"}
1599 |
1600 | # Extract download URL with safety checks
1601 | gltf_data = data.get("gltf")
1602 | if not gltf_data:
1603 | return {"error": "No gltf download URL available for this model. Response: " + str(data)}
1604 |
1605 | download_url = gltf_data.get("url")
1606 | if not download_url:
1607 | return {"error": "No download URL available for this model. Make sure the model is downloadable and you have access."}
1608 |
1609 | # Download the model (already has timeout)
1610 | model_response = requests.get(download_url, timeout=60) # 60 second timeout
1611 |
1612 | if model_response.status_code != 200:
1613 | return {"error": f"Model download failed with status code {model_response.status_code}"}
1614 |
1615 | # Save to temporary file
1616 | temp_dir = tempfile.mkdtemp()
1617 | zip_file_path = os.path.join(temp_dir, f"{uid}.zip")
1618 |
1619 | with open(zip_file_path, "wb") as f:
1620 | f.write(model_response.content)
1621 |
1622 | # Extract the zip file with enhanced security
1623 | with zipfile.ZipFile(zip_file_path, 'r') as zip_ref:
1624 | # More secure zip slip prevention
1625 | for file_info in zip_ref.infolist():
1626 | # Get the path of the file
1627 | file_path = file_info.filename
1628 |
1629 | # Convert directory separators to the current OS style
1630 | # This handles both / and \ in zip entries
1631 | target_path = os.path.join(temp_dir, os.path.normpath(file_path))
1632 |
1633 | # Get absolute paths for comparison
1634 | abs_temp_dir = os.path.abspath(temp_dir)
1635 | abs_target_path = os.path.abspath(target_path)
1636 |
1637 | # Ensure the normalized path doesn't escape the target directory
1638 | if not abs_target_path.startswith(abs_temp_dir):
1639 | with suppress(Exception):
1640 | shutil.rmtree(temp_dir)
1641 | return {"error": "Security issue: Zip contains files with path traversal attempt"}
1642 |
1643 | # Additional explicit check for directory traversal
1644 | if ".." in file_path:
1645 | with suppress(Exception):
1646 | shutil.rmtree(temp_dir)
1647 | return {"error": "Security issue: Zip contains files with directory traversal sequence"}
1648 |
1649 | # If all files passed security checks, extract them
1650 | zip_ref.extractall(temp_dir)
1651 |
1652 | # Find the main glTF file
1653 | gltf_files = [f for f in os.listdir(temp_dir) if f.endswith('.gltf') or f.endswith('.glb')]
1654 |
1655 | if not gltf_files:
1656 | with suppress(Exception):
1657 | shutil.rmtree(temp_dir)
1658 | return {"error": "No glTF file found in the downloaded model"}
1659 |
1660 | main_file = os.path.join(temp_dir, gltf_files[0])
1661 |
1662 | # Import the model
1663 | bpy.ops.import_scene.gltf(filepath=main_file)
1664 |
1665 | # Get the names of imported objects
1666 | imported_objects = [obj.name for obj in bpy.context.selected_objects]
1667 |
1668 | # Clean up temporary files
1669 | with suppress(Exception):
1670 | shutil.rmtree(temp_dir)
1671 |
1672 | return {
1673 | "success": True,
1674 | "message": "Model imported successfully",
1675 | "imported_objects": imported_objects
1676 | }
1677 |
1678 | except requests.exceptions.Timeout:
1679 | return {"error": "Request timed out. Check your internet connection and try again with a simpler model."}
1680 | except json.JSONDecodeError as e:
1681 | return {"error": f"Invalid JSON response from Sketchfab API: {str(e)}"}
1682 | except Exception as e:
1683 | import traceback
1684 | traceback.print_exc()
1685 | return {"error": f"Failed to download model: {str(e)}"}
1686 | #endregion
1687 |
1688 | # Blender UI Panel
1689 | class BLENDERMCP_PT_Panel(bpy.types.Panel):
1690 | bl_label = "Blender MCP"
1691 | bl_idname = "BLENDERMCP_PT_Panel"
1692 | bl_space_type = 'VIEW_3D'
1693 | bl_region_type = 'UI'
1694 | bl_category = 'BlenderMCP'
1695 |
1696 | def draw(self, context):
1697 | layout = self.layout
1698 | scene = context.scene
1699 |
1700 | layout.prop(scene, "blendermcp_port")
1701 | layout.prop(scene, "blendermcp_use_polyhaven", text="Use assets from Poly Haven")
1702 |
1703 | layout.prop(scene, "blendermcp_use_hyper3d", text="Use Hyper3D Rodin 3D model generation")
1704 | if scene.blendermcp_use_hyper3d:
1705 | layout.prop(scene, "blendermcp_hyper3d_mode", text="Rodin Mode")
1706 | layout.prop(scene, "blendermcp_hyper3d_api_key", text="API Key")
1707 | layout.operator("blendermcp.set_hyper3d_free_trial_api_key", text="Set Free Trial API Key")
1708 |
1709 | layout.prop(scene, "blendermcp_use_sketchfab", text="Use assets from Sketchfab")
1710 | if scene.blendermcp_use_sketchfab:
1711 | layout.prop(scene, "blendermcp_sketchfab_api_key", text="API Key")
1712 |
1713 | if not scene.blendermcp_server_running:
1714 | layout.operator("blendermcp.start_server", text="Connect to MCP server")
1715 | else:
1716 | layout.operator("blendermcp.stop_server", text="Disconnect from MCP server")
1717 | layout.label(text=f"Running on port {scene.blendermcp_port}")
1718 |
1719 | # Operator to set Hyper3D API Key
1720 | class BLENDERMCP_OT_SetFreeTrialHyper3DAPIKey(bpy.types.Operator):
1721 | bl_idname = "blendermcp.set_hyper3d_free_trial_api_key"
1722 | bl_label = "Set Free Trial API Key"
1723 |
1724 | def execute(self, context):
1725 | context.scene.blendermcp_hyper3d_api_key = RODIN_FREE_TRIAL_KEY
1726 | context.scene.blendermcp_hyper3d_mode = 'MAIN_SITE'
1727 | self.report({'INFO'}, "API Key set successfully!")
1728 | return {'FINISHED'}
1729 |
1730 | # Operator to start the server
1731 | class BLENDERMCP_OT_StartServer(bpy.types.Operator):
1732 | bl_idname = "blendermcp.start_server"
1733 | bl_label = "Connect to Claude"
1734 | bl_description = "Start the BlenderMCP server to connect with Claude"
1735 |
1736 | def execute(self, context):
1737 | scene = context.scene
1738 |
1739 | # Create a new server instance
1740 | if not hasattr(bpy.types, "blendermcp_server") or not bpy.types.blendermcp_server:
1741 | bpy.types.blendermcp_server = BlenderMCPServer(port=scene.blendermcp_port)
1742 |
1743 | # Start the server
1744 | bpy.types.blendermcp_server.start()
1745 | scene.blendermcp_server_running = True
1746 |
1747 | return {'FINISHED'}
1748 |
1749 | # Operator to stop the server
1750 | class BLENDERMCP_OT_StopServer(bpy.types.Operator):
1751 | bl_idname = "blendermcp.stop_server"
1752 | bl_label = "Stop the connection to Claude"
1753 | bl_description = "Stop the connection to Claude"
1754 |
1755 | def execute(self, context):
1756 | scene = context.scene
1757 |
1758 | # Stop the server if it exists
1759 | if hasattr(bpy.types, "blendermcp_server") and bpy.types.blendermcp_server:
1760 | bpy.types.blendermcp_server.stop()
1761 | del bpy.types.blendermcp_server
1762 |
1763 | scene.blendermcp_server_running = False
1764 |
1765 | return {'FINISHED'}
1766 |
1767 | # Registration functions
1768 | def register():
1769 | bpy.types.Scene.blendermcp_port = IntProperty(
1770 | name="Port",
1771 | description="Port for the BlenderMCP server",
1772 | default=9876,
1773 | min=1024,
1774 | max=65535
1775 | )
1776 |
1777 | bpy.types.Scene.blendermcp_server_running = bpy.props.BoolProperty(
1778 | name="Server Running",
1779 | default=False
1780 | )
1781 |
1782 | bpy.types.Scene.blendermcp_use_polyhaven = bpy.props.BoolProperty(
1783 | name="Use Poly Haven",
1784 | description="Enable Poly Haven asset integration",
1785 | default=False
1786 | )
1787 |
1788 | bpy.types.Scene.blendermcp_use_hyper3d = bpy.props.BoolProperty(
1789 | name="Use Hyper3D Rodin",
1790 | description="Enable Hyper3D Rodin generatino integration",
1791 | default=False
1792 | )
1793 |
1794 | bpy.types.Scene.blendermcp_hyper3d_mode = bpy.props.EnumProperty(
1795 | name="Rodin Mode",
1796 | description="Choose the platform used to call Rodin APIs",
1797 | items=[
1798 | ("MAIN_SITE", "hyper3d.ai", "hyper3d.ai"),
1799 | ("FAL_AI", "fal.ai", "fal.ai"),
1800 | ],
1801 | default="MAIN_SITE"
1802 | )
1803 |
1804 | bpy.types.Scene.blendermcp_hyper3d_api_key = bpy.props.StringProperty(
1805 | name="Hyper3D API Key",
1806 | subtype="PASSWORD",
1807 | description="API Key provided by Hyper3D",
1808 | default=""
1809 | )
1810 |
1811 | bpy.types.Scene.blendermcp_use_sketchfab = bpy.props.BoolProperty(
1812 | name="Use Sketchfab",
1813 | description="Enable Sketchfab asset integration",
1814 | default=False
1815 | )
1816 |
1817 | bpy.types.Scene.blendermcp_sketchfab_api_key = bpy.props.StringProperty(
1818 | name="Sketchfab API Key",
1819 | subtype="PASSWORD",
1820 | description="API Key provided by Sketchfab",
1821 | default=""
1822 | )
1823 |
1824 | bpy.utils.register_class(BLENDERMCP_PT_Panel)
1825 | bpy.utils.register_class(BLENDERMCP_OT_SetFreeTrialHyper3DAPIKey)
1826 | bpy.utils.register_class(BLENDERMCP_OT_StartServer)
1827 | bpy.utils.register_class(BLENDERMCP_OT_StopServer)
1828 |
1829 | print("BlenderMCP addon registered")
1830 |
1831 | def unregister():
1832 | # Stop the server if it's running
1833 | if hasattr(bpy.types, "blendermcp_server") and bpy.types.blendermcp_server:
1834 | bpy.types.blendermcp_server.stop()
1835 | del bpy.types.blendermcp_server
1836 |
1837 | bpy.utils.unregister_class(BLENDERMCP_PT_Panel)
1838 | bpy.utils.unregister_class(BLENDERMCP_OT_SetFreeTrialHyper3DAPIKey)
1839 | bpy.utils.unregister_class(BLENDERMCP_OT_StartServer)
1840 | bpy.utils.unregister_class(BLENDERMCP_OT_StopServer)
1841 |
1842 | del bpy.types.Scene.blendermcp_port
1843 | del bpy.types.Scene.blendermcp_server_running
1844 | del bpy.types.Scene.blendermcp_use_polyhaven
1845 | del bpy.types.Scene.blendermcp_use_hyper3d
1846 | del bpy.types.Scene.blendermcp_hyper3d_mode
1847 | del bpy.types.Scene.blendermcp_hyper3d_api_key
1848 | del bpy.types.Scene.blendermcp_use_sketchfab
1849 | del bpy.types.Scene.blendermcp_sketchfab_api_key
1850 |
1851 | print("BlenderMCP addon unregistered")
1852 |
1853 | if __name__ == "__main__":
1854 | register()
1855 |
--------------------------------------------------------------------------------