├── .difyignore
├── .env.example
├── .gitignore
├── GUIDE.md
├── LICENSE
├── PRIVACY.md
├── README.md
├── _assets
├── arxiv_mcp_server_test.png
├── asAgentStrategiesNode.png
├── everything_mcp_server_test_resource.png
├── icon.svg
├── initial_mcp_plugin_settings.png
├── plugin_install_offline.png
└── plugin_install_online.png
├── main.py
├── manifest.yaml
├── output_parser
└── cot_output_parser.py
├── prompt
└── template.py
├── provider
├── agent.py
└── agent.yaml
├── requirements.txt
├── strategies
├── ReAct.py
├── ReAct.yaml
├── function_calling.py
├── function_calling.yaml
├── mcpReAct.py
└── mcpReAct.yaml
└── test
├── chatflow
├── README.md
├── mcp_multi_sse_chatflow_v0.0.3.yml
├── mcp_sse_chatflow_v0.0.2.yml
└── mcp_stdio_chatflow_v0.0.1.yml
└── result
├── everything_mcp_server_prompt_log.txt
├── everything_mcp_server_resource_log.txt
└── everything_mcp_server_tool_log.txt
/.difyignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # Distribution / packaging
7 | .Python
8 | build/
9 | develop-eggs/
10 | dist/
11 | downloads/
12 | eggs/
13 | .eggs/
14 | lib/
15 | lib64/
16 | parts/
17 | sdist/
18 | var/
19 | wheels/
20 | share/python-wheels/
21 | *.egg-info/
22 | .installed.cfg
23 | *.egg
24 | MANIFEST
25 |
26 | # PyInstaller
27 | # Usually these files are written by a python script from a template
28 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
29 | *.manifest
30 | *.spec
31 |
32 | # Installer logs
33 | pip-log.txt
34 | pip-delete-this-directory.txt
35 |
36 | # Unit test / coverage reports
37 | htmlcov/
38 | .tox/
39 | .nox/
40 | .coverage
41 | .coverage.*
42 | .cache
43 | nosetests.xml
44 | coverage.xml
45 | *.cover
46 | *.py,cover
47 | .hypothesis/
48 | .pytest_cache/
49 | cover/
50 |
51 | # Translations
52 | *.mo
53 | *.pot
54 |
55 | # Django stuff:
56 | *.log
57 | local_settings.py
58 | db.sqlite3
59 | db.sqlite3-journal
60 |
61 | # Flask stuff:
62 | instance/
63 | .webassets-cache
64 |
65 | # Scrapy stuff:
66 | .scrapy
67 |
68 | # Sphinx documentation
69 | docs/_build/
70 |
71 | # PyBuilder
72 | .pybuilder/
73 | target/
74 |
75 | # Jupyter Notebook
76 | .ipynb_checkpoints
77 |
78 | # IPython
79 | profile_default/
80 | ipython_config.py
81 |
82 | # pyenv
83 | # For a library or package, you might want to ignore these files since the code is
84 | # intended to run in multiple environments; otherwise, check them in:
85 | .python-version
86 |
87 | # pipenv
88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
91 | # install all needed dependencies.
92 | Pipfile.lock
93 |
94 | # UV
95 | # Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control.
96 | # This is especially recommended for binary packages to ensure reproducibility, and is more
97 | # commonly ignored for libraries.
98 | uv.lock
99 |
100 | # poetry
101 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
102 | # This is especially recommended for binary packages to ensure reproducibility, and is more
103 | # commonly ignored for libraries.
104 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
105 | poetry.lock
106 |
107 | # pdm
108 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
109 | #pdm.lock
110 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
111 | # in version control.
112 | # https://pdm.fming.dev/latest/usage/project/#working-with-version-control
113 | .pdm.toml
114 | .pdm-python
115 | .pdm-build/
116 |
117 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
118 | __pypackages__/
119 |
120 | # Celery stuff
121 | celerybeat-schedule
122 | celerybeat.pid
123 |
124 | # SageMath parsed files
125 | *.sage.py
126 |
127 | # Environments
128 | .env
129 | .venv
130 | env/
131 | venv/
132 | ENV/
133 | env.bak/
134 | venv.bak/
135 |
136 | # Spyder project settings
137 | .spyderproject
138 | .spyproject
139 |
140 | # Rope project settings
141 | .ropeproject
142 |
143 | # mkdocs documentation
144 | /site
145 |
146 | # mypy
147 | .mypy_cache/
148 | .dmypy.json
149 | dmypy.json
150 |
151 | # Pyre type checker
152 | .pyre/
153 |
154 | # pytype static type analyzer
155 | .pytype/
156 |
157 | # Cython debug symbols
158 | cython_debug/
159 |
160 | # PyCharm
161 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can
162 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
163 | # and can be added to the global gitignore or merged into this file. For a more nuclear
164 | # option (not recommended) you can uncomment the following to ignore the entire idea folder.
165 | .idea/
166 |
167 | # Vscode
168 | .vscode/
169 |
170 | # Git
171 | .git/
172 | .gitignore
173 |
174 | # Mac
175 | .DS_Store
176 |
177 | # Windows
178 | Thumbs.db
179 |
--------------------------------------------------------------------------------
/.env.example:
--------------------------------------------------------------------------------
1 | INSTALL_METHOD=remote
2 | REMOTE_INSTALL_HOST=debug.dify.ai
3 | REMOTE_INSTALL_PORT=5003
4 | REMOTE_INSTALL_KEY=xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
5 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .DS_Store
2 | *.pyc
3 | venv/
4 | .venv/
5 | .vscode/
6 | mhelper.py
7 | logs
8 | .env
9 | .idea
10 | .venv/
11 | .scripts/
12 |
--------------------------------------------------------------------------------
/GUIDE.md:
--------------------------------------------------------------------------------
1 | ## User Guide of how to develop a Dify Plugin
2 |
3 | Hi there, looks like you have already created a Plugin, now let's get you started with the development!
4 |
5 | ### Choose a Plugin type you want to develop
6 |
7 | Before start, you need some basic knowledge about the Plugin types, Plugin supports to extend the following abilities in Dify:
8 | - **Tool**: Tool Providers like Google Search, Stable Diffusion, etc. it can be used to perform a specific task.
9 | - **Model**: Model Providers like OpenAI, Anthropic, etc. you can use their models to enhance the AI capabilities.
10 | - **Endpoint**: Like Service API in Dify and Ingress in Kubernetes, you can extend a http service as an endpoint and control its logics using your own code.
11 |
12 | Based on the ability you want to extend, we have divided the Plugin into three types: **Tool**, **Model**, and **Extension**.
13 |
14 | - **Tool**: It's a tool provider, but not only limited to tools, you can implement an endpoint there, for example, you need both `Sending Message` and `Receiving Message` if you are building a Discord Bot, **Tool** and **Endpoint** are both required.
15 | - **Model**: Just a model provider, extending others is not allowed.
16 | - **Extension**: Other times, you may only need a simple http service to extend the functionalities, **Extension** is the right choice for you.
17 |
18 | I believe you have chosen the right type for your Plugin while creating it, if not, you can change it later by modifying the `manifest.yaml` file.
19 |
20 | ### Manifest
21 |
22 | Now you can edit the `manifest.yaml` file to describe your Plugin, here is the basic structure of it:
23 |
24 | - version(version, required):Plugin's version
25 | - type(type, required):Plugin's type, currently only supports `plugin`, future support `bundle`
26 | - author(string, required):Author, it's the organization name in Marketplace and should also equals to the owner of the repository
27 | - label(label, required):Multi-language name
28 | - created_at(RFC3339, required):Creation time, Marketplace requires that the creation time must be less than the current time
29 | - icon(asset, required):Icon path
30 | - resource (object):Resources to be applied
31 | - memory (int64):Maximum memory usage, mainly related to resource application on SaaS for serverless, unit bytes
32 | - permission(object):Permission application
33 | - tool(object):Reverse call tool permission
34 | - enabled (bool)
35 | - model(object):Reverse call model permission
36 | - enabled(bool)
37 | - llm(bool)
38 | - text_embedding(bool)
39 | - rerank(bool)
40 | - tts(bool)
41 | - speech2text(bool)
42 | - moderation(bool)
43 | - node(object):Reverse call node permission
44 | - enabled(bool)
45 | - endpoint(object):Allow to register endpoint permission
46 | - enabled(bool)
47 | - app(object):Reverse call app permission
48 | - enabled(bool)
49 | - storage(object):Apply for persistent storage permission
50 | - enabled(bool)
51 | - size(int64):Maximum allowed persistent memory, unit bytes
52 | - plugins(object, required):Plugin extension specific ability yaml file list, absolute path in the plugin package, if you need to extend the model, you need to define a file like openai.yaml, and fill in the path here, and the file on the path must exist, otherwise the packaging will fail.
53 | - Format
54 | - tools(list[string]): Extended tool suppliers, as for the detailed format, please refer to [Tool Guide](https://docs.dify.ai/docs/plugins/standard/tool_provider)
55 | - models(list[string]):Extended model suppliers, as for the detailed format, please refer to [Model Guide](https://docs.dify.ai/docs/plugins/standard/model_provider)
56 | - endpoints(list[string]):Extended Endpoints suppliers, as for the detailed format, please refer to [Endpoint Guide](https://docs.dify.ai/docs/plugins/standard/endpoint_group)
57 | - Restrictions
58 | - Not allowed to extend both tools and models
59 | - Not allowed to have no extension
60 | - Not allowed to extend both models and endpoints
61 | - Currently only supports up to one supplier of each type of extension
62 | - meta(object)
63 | - version(version, required):manifest format version, initial version 0.0.1
64 | - arch(list[string], required):Supported architectures, currently only supports amd64 arm64
65 | - runner(object, required):Runtime configuration
66 | - language(string):Currently only supports python
67 | - version(string):Language version, currently only supports 3.12
68 | - entrypoint(string):Program entry, in python it should be main
69 |
70 | ### Install Dependencies
71 |
72 | - First of all, you need a Python 3.11+ environment, as our SDK requires that.
73 | - Then, install the dependencies:
74 | ```bash
75 | pip install -r requirements.txt
76 | ```
77 | - If you want to add more dependencies, you can add them to the `requirements.txt` file, once you have set the runner to python in the `manifest.yaml` file, `requirements.txt` will be automatically generated and used for packaging and deployment.
78 |
79 | ### Implement the Plugin
80 |
81 | Now you can start to implement your Plugin, by following these examples, you can quickly understand how to implement your own Plugin:
82 |
83 | - [OpenAI](https://github.com/langgenius/dify-plugin-sdks/tree/main/python/examples/openai): best practice for model provider
84 | - [Google Search](https://github.com/langgenius/dify-plugin-sdks/tree/main/python/examples/google): a simple example for tool provider
85 | - [Neko](https://github.com/langgenius/dify-plugin-sdks/tree/main/python/examples/neko): a funny example for endpoint group
86 |
87 | ### Test and Debug the Plugin
88 |
89 | You may already noticed that a `.env.example` file in the root directory of your Plugin, just copy it to `.env` and fill in the corresponding values, there are some environment variables you need to set if you want to debug your Plugin locally.
90 |
91 | - `INSTALL_METHOD`: Set this to `remote`, your plugin will connect to a Dify instance through the network.
92 | - `REMOTE_INSTALL_HOST`: The host of your Dify instance, you can use our SaaS instance `https://debug.dify.ai`, or self-hosted Dify instance.
93 | - `REMOTE_INSTALL_PORT`: The port of your Dify instance, default is 5003
94 | - `REMOTE_INSTALL_KEY`: You should get your debugging key from the Dify instance you used, at the right top of the plugin management page, you can see a button with a `debug` icon, click it and you will get the key.
95 |
96 | Run the following command to start your Plugin:
97 |
98 | ```bash
99 | python -m main
100 | ```
101 |
102 | Refresh the page of your Dify instance, you should be able to see your Plugin in the list now, but it will be marked as `debugging`, you can use it normally, but not recommended for production.
103 |
104 | ### Package the Plugin
105 |
106 | After all, just package your Plugin by running the following command:
107 |
108 | ```bash
109 | dify-plugin plugin package ./ROOT_DIRECTORY_OF_YOUR_PLUGIN
110 | ```
111 |
112 | you will get a `plugin.difypkg` file, that's all, you can submit it to the Marketplace now, look forward to your Plugin being listed!
113 |
114 |
115 | ## User Privacy Policy
116 |
117 | Please fill in the privacy policy of the plugin if you want to make it published on the Marketplace, refer to [PRIVACY.md](PRIVACY.md) for more details.
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright [yyyy] [name of copyright owner]
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/PRIVACY.md:
--------------------------------------------------------------------------------
1 | ## Privacy
2 | This plugin is released under the Apache License 2.0. Users are responsible for protecting their own privacy and securing their data according to the guidelines of their chosen LLM API platform. Please read the README for correct usage, as the default implementation does not include a Human-In-The-Loop mechanism.
3 |
4 | ### Q1: What data does the plugin access?
5 | ### A1:
6 | The plugin itself does not actively collect personal information. However, the data it accesses depends on the connected MCP server. Depending on the server’s configuration, it may temporarily access user-related data (e.g., user names, file paths, etc.). Once the execution of the mcpReAct agent strategy node is complete, all such data is discarded.
7 |
8 | ### Q2: Does the plugin collect or store personal information?
9 | ### A2:
10 | No. As an individual developer, I do not implement any persistent collection of personal data. The plugin is designed to run transiently, and any accessed information is not stored beyond the lifetime of a single operation.
11 |
12 | ### Q3: What about privacy in relation to LLM APIs?
13 | ### A3:
14 | Since users choose their own LLM API platform, the privacy and security of data exchanged with those APIs are determined by the respective platforms. Please review and comply with the privacy policies of your chosen LLM provider.
15 |
16 | ### Q4: What is the user’s responsibility?
17 | ### A4:
18 | Read the Documentation: Ensure you follow the usage guidelines provided in the README.
19 | Trusted MCP Servers: Only connect to MCP servers you trust.
20 | Privacy Management: Under Apache License 2.0, this plugin comes with no warranty regarding privacy or security. You are solely responsible for protecting your own data.
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # dify-mcp-client
2 | `MCP Client` as Agent Strategy Plugin.
3 | > [!IMPORTANT]
4 | > Dify is not `MCP Server` but `MCP Host`.
5 |
6 | 
7 |
8 | ## How it works
9 | Each `MCP client` (ReAct Agent) node can connect `MCP servers`.
10 | 1. `Tool`, `Resource`, `Prompt` lists are converted into Dify Tools.
11 | 2. Your selected LLM can see their `name`, `description`, `argument type`
12 | 3. The LLM calls Tools based on the ReAct loop (Reason → Act → Observe).
13 |
14 | > [!NOTE]
15 | > Most of the code in this repository contains the following files.
16 | > #### Dify Official Plugins / Agent Strategies
17 | > https://github.com/langgenius/dify-official-plugins/tree/main/agent-strategies/cot_agent
18 |
19 | ## ✅ What I did
20 | - Copied `ReAct.py` and renamed file as `mcpReAct.py`
21 | - Added `config_json` GUI input field by editing `mcpReAct.yaml` and `class mcpReActParams()`
22 |
23 | ### in mcpReAct.py, I added
24 | - New 12 functions for MCP
25 | - `__init__()` for initializing `AsyncExitStack` and `event loop`
26 | - Some codes in `_handle_invoke_action()` for MCP
27 | - MCP setup and cleanup in `_invoke()`
28 |
29 | > [!IMPORTANT]
30 | > ReAct while loop is as they are
31 |
32 | ## 🔄 Update history
33 | - Add SSE MCP client (v0.0.2)
34 | - Support multi SSE servers (v0.0.3)
35 | - Update python module and simplify its dependency (v0.0.4)
36 | - mcp(v1.1.2→v1.6.0+)
37 | - dify_plugin(0.0.1b72→v0.1.0)
38 |
39 | ## ⚠️ Caution and Limitation
40 | > [!CAUTION]
41 | > This plugin does **not** implement a **human-in-the-loop** mechanism by default, so connect **reliable mcp server only**.
42 | > To avoid it, decrease `max itereations`(default:`3`) to `1`, and use this Agent node repeatedly in Chatflow.
43 | > However, agent memory is reset by the end of Workflow.
44 | > Use `Conversaton Variable` to save history and pass it to QUERY.
45 | > Don't forget to add a phrase such as
46 | > *"ask for user's permission when calling tools"* in INSTRUCTION.
47 |
48 | # How to use this plugin
49 |
50 | ## 🛜Install the plugin from GitHub
51 | - Enter the following GitHub repository name
52 | ```
53 | https://github.com/3dify-project/dify-mcp-client/
54 | ```
55 | - Dify > PLUGINS > + Install plugin > INSTALL FROM > GitHub
56 | 
57 |
58 | ## ⬇️Install the plugin from .difypkg file
59 | - Go to Releases https://github.com/3dify-project/dify-mcp-client/releases
60 | - Select suitable version of `.difypkg`
61 | - Dify > PLUGINS > + Install plugin > INSTALL FROM > Local Package File
62 | 
63 |
64 | ## How to handle errors when installing plugins?
65 |
66 | **Issue**: If you encounter the error message: `plugin verification has been enabled, and the plugin you want to install has a bad signature`, how to handle the issue?
67 | **Solution**: Open `/docker/.env` and change from `true` to `false`:
68 | ```
69 | FORCE_VERIFYING_SIGNATURE=false
70 | ```
71 | Run the following commands to restart the Dify service:
72 | ```bash
73 | cd docker
74 | docker compose down
75 | docker compose up -d
76 | ```
77 | Once this field is added, the Dify platform will allow the installation of all plugins that are not listed (and thus not verified) in the Dify Marketplace.
78 | > [!TIP]
79 | > Marketplace need Approval. If stars⭐ reach 100, I'll consider to make PR for them.
80 |
81 | ## Where does this plugin show up?
82 | - It takes few minutes to install
83 | - Once installed, you can use it any workflows as Agent node
84 | - Select "mcpReAct" strategy (otherwise no MCP)
85 | 
86 |
87 | ## Config
88 | MCP Agent Plugin node require config_json like this to command or URL to connect MCP servers
89 | ```
90 | {
91 | "mcpServers":{
92 | "name_of_server1":{
93 | "url": "http://host.docker.internal:8080/sse"
94 | },
95 | "name_of_server2":{
96 | "url": "http://host.docker.internal:8008/sse"
97 | }
98 | }
99 | }
100 | ```
101 | > [!WARNING]
102 | > - Each server's port number should be different, like 8080, 8008, ...
103 | > - If you want to use stdio mcp server, there are 3 ways.
104 | > 1. Convert it to SSE mcp server https://github.com/3dify-project/dify-mcp-client/edit/main/README.md#how-to-convert-stdio-mcp-server-into-sse-mcp-server
105 | > 2. Deploy with source code (**NOT** by .difypkg or GitHub reposity name install) https://github.com/3dify-project/dify-mcp-client/edit/main/README.md#-how-to-develop-and-deploy-plugin
106 | > 3. Pre-install Node.js inside dify-plugin docker (issue:https://github.com/3dify-project/dify-mcp-client/issues/10) guide: https://github.com/tangyoha/tangyoha-bili/tree/master/dify/mcp/map_mcp
107 |
108 | ## Chatflow Example
109 | 
110 | > [!WARNING]
111 | > - The Tools field should not be left blank. so **select Dify tools** like "current time".
112 | #### I provide this Dify ChatFlow `.yml` for testing this plugin.
113 | https://github.com/3dify-project/dify-mcp-client/tree/main/test/chatflow
114 | #### After download DSL(yml) file, import it in Dify and you can test MCP using "Everything MCP server"
115 | https://github.com/modelcontextprotocol/servers/tree/main/src/everything
116 |
117 | # How to convert `stdio` MCP server into SSE MCP server
118 | ## option1️⃣: Edit MCP server's code
119 | If fastMCP server, change like this
120 | ```diff
121 | if __name__ == "__main__":
122 | - mcp.run(transport="stdio")
123 | + mcp.run(transport="sse")
124 | ```
125 |
126 | ## option2️⃣: via mcp-proxy
127 | ```
128 | \mcp-proxy>uv venv -p 3.12
129 | .venv\Scripts\activate
130 | uv tool install mcp-proxy
131 | ```
132 | ### Check Node.js has installed and npx(.cmd) Path
133 | (Mac/Linux)
134 | ```
135 | which npx
136 | ```
137 | (Windows)
138 | ```
139 | where npx
140 | ```
141 | result
142 | ```
143 | C:\Program Files\nodejs\npx
144 | C:\Program Files\nodejs\npx.cmd
145 | C:\Users\USER_NAME\AppData\Roaming\npm\npx
146 | C:\Users\USER_NAME\AppData\Roaming\npm\npx.cmd
147 | ```
148 |
149 | If claude_desktop_config.json is following schema,
150 | ```
151 | {
152 | "mcpServers": {
153 | "SERVER_NAME": {
154 | "command": CMD_NAME_OR_PATH
155 | "args": {VALUE1, VALUE2}
156 | }
157 | }
158 | }
159 | ```
160 | ### Wake up stdio MCP server by this command
161 | ```
162 | mcp-proxy --sse-port=8080 --pass-environment -- CMD_NAME_OR_PATH --arg1 VALUE1 --arg2 VALUE2 ...
163 | ```
164 | If your OS is Windows, use npx.cmd instead of npx. Following is example command to convert stdio "everything MCP server" to SSE via mcp-proxy.
165 | ```
166 | mcp-proxy --sse-port=8080 --pass-environment -- C:\Program Files\nodejs\npx.cmd --arg1 -y --arg2 @modelcontextprotocol/server-everything
167 | ```
168 |
169 | Similarly, on another command line (If you use sample Chatflow for v0.0.3)
170 | ```
171 | pip install mcp-simple-arxiv
172 | mcp-proxy --sse-port=8008 --pass-environment -- C:\Users\USER_NAME\AppData\Local\Programs\Python\Python310\python.exe -m -mcp_simple_arxiv
173 | ```
174 |
175 | Following is a mcp-proxy setup log.
176 | ```
177 | (mcp_proxy) C:\User\USER_NAME\mcp-proxy>mcp-proxy --sse-port=8080 --pass-environment -- C:\Program Files\nodejs\npx.cmd --arg1 -y --arg2 @modelcontextprotocol/server-everything
178 | DEBUG:root:Starting stdio client and SSE server
179 | DEBUG:asyncio:Using proactor: IocpProactor
180 | DEBUG:mcp.server.lowlevel.server:Initializing server 'example-servers/everything'
181 | DEBUG:mcp.server.sse:SseServerTransport initialized with endpoint: /messages/
182 | INFO: Started server process [53104]
183 | INFO: Waiting for application startup.
184 | INFO: Application startup complete.
185 | INFO: Uvicorn running on http://127.0.0.1:8080 (Press CTRL+C to quit)
186 | ```
187 |
188 | # 🔨 How to develop and deploy plugin
189 |
190 | ### Official plugin dev guide
191 | https://github.com/3dify-project/dify-mcp-client/blob/main/GUIDE.md
192 |
193 | ### Dify plugin SDK daemon
194 | If your OS is Windows and CPU is Intel or AMD, you need to download `dify-plugin-windows-amd64.exe` (v0.0.7)
195 | Choose your OS-compatible verson here:
196 | https://github.com/langgenius/dify-plugin-daemon/releases
197 | 1. Rename it as dify.exe for convinence
198 | 2. mkdir "C\User\user\\.local\bin" (Windows) and register it as system path.
199 | 3. Copy `dify.exe` to under dify-mcp-client/
200 | > [!TIP]
201 | > Following guide is helpful.
202 | > https://docs.dify.ai/plugins/quick-start/develop-plugins/initialize-development-tools
203 |
204 | ### Reference
205 | https://docs.dify.ai/plugins/quick-start/develop-plugins/initialize-development-tools
206 |
207 | > [!NOTE]
208 | > You can skip this stage if you pull or download codes of this repo
209 | > ```
210 | > dify plugin init
211 | > ```
212 | > Initial settings are as follow
213 | > 
214 |
215 | ### Change directory
216 | ```
217 | cd dify-mcp-client
218 | ```
219 |
220 | ### Install python module
221 | Python3.12+ is compatible. The `venv` and `uv` are not necessary, but recommended.
222 | ```
223 | uv venv -p 3.12
224 | .venv\Scripts\activate
225 | ```
226 | Install python modules for plugin development
227 | ```
228 | uv pip install -r requirements.txt
229 | ```
230 |
231 | ### Duplicate `env.example` and rename one to `.env`
232 | I changed `REMOTE_INSTALL_HOST` from `debug.dify.ai` to `localhost`
233 | (Docker Compose environment)
234 | click 🪲bug icon button to see these information
235 |
236 | ### Activate Dify plugin
237 | ```
238 | python -m main
239 | ```
240 | (ctrl+C to stop)
241 | > [!TIP]
242 | > REMOTE_INSTALL_KEY of .env often changes.
243 | > If you encounter error messages like `handshake failed, invalid key`, renew it.
244 |
245 | ### Package into .difypkg
246 | `./dify-mcp-client` is my default root name
247 | ```
248 | dify plugin package ./ROOT_OF_YOUR_PROJECT
249 | ```
250 |
251 | ## Useful GitHub repositories for developers
252 |
253 | #### Dify Plugin SDKs
254 | https://github.com/langgenius/dify-plugin-sdks
255 |
256 | #### MCP Python SDK
257 | https://github.com/modelcontextprotocol/python-sdk
258 |
259 |
260 | > [!TIP]
261 | > MCP client example
262 | > https://github.com/modelcontextprotocol/python-sdk/blob/main/examples/clients/simple-chatbot/mcp_simple_chatbot/main.py
263 |
264 | > [!NOTE]
265 | > Dify plugin has `requirements.txt` which automatically installs python modules.
266 | > I include latest `mcp` in it, so you don't need to download the MCP SDK separately.
267 |
--------------------------------------------------------------------------------
/_assets/arxiv_mcp_server_test.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/3dify-project/dify-mcp-client/cd6d27dd49f2f17b10cc71814f50f755c5766d91/_assets/arxiv_mcp_server_test.png
--------------------------------------------------------------------------------
/_assets/asAgentStrategiesNode.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/3dify-project/dify-mcp-client/cd6d27dd49f2f17b10cc71814f50f755c5766d91/_assets/asAgentStrategiesNode.png
--------------------------------------------------------------------------------
/_assets/everything_mcp_server_test_resource.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/3dify-project/dify-mcp-client/cd6d27dd49f2f17b10cc71814f50f755c5766d91/_assets/everything_mcp_server_test_resource.png
--------------------------------------------------------------------------------
/_assets/icon.svg:
--------------------------------------------------------------------------------
1 |
7 |
--------------------------------------------------------------------------------
/_assets/initial_mcp_plugin_settings.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/3dify-project/dify-mcp-client/cd6d27dd49f2f17b10cc71814f50f755c5766d91/_assets/initial_mcp_plugin_settings.png
--------------------------------------------------------------------------------
/_assets/plugin_install_offline.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/3dify-project/dify-mcp-client/cd6d27dd49f2f17b10cc71814f50f755c5766d91/_assets/plugin_install_offline.png
--------------------------------------------------------------------------------
/_assets/plugin_install_online.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/3dify-project/dify-mcp-client/cd6d27dd49f2f17b10cc71814f50f755c5766d91/_assets/plugin_install_online.png
--------------------------------------------------------------------------------
/main.py:
--------------------------------------------------------------------------------
1 | from dify_plugin import Plugin, DifyPluginEnv
2 |
3 | plugin = Plugin(DifyPluginEnv(MAX_REQUEST_TIMEOUT=120))
4 |
5 | if __name__ == '__main__':
6 | plugin.run()
7 |
--------------------------------------------------------------------------------
/manifest.yaml:
--------------------------------------------------------------------------------
1 | version: 0.0.4
2 | type: plugin
3 | author: 3dify-project
4 | name: mcp_client
5 | label:
6 | en_US: mcp_client
7 | ja_JP: mcp_client
8 | zh_Hans: mcp_client
9 | pt_BR: mcp_client
10 | description:
11 | en_US: Claude MCP Client as Dify plugin. This maintains 1:1 connections with MCP servers. By default, list of {tool, resource, prompt} on the server is passed to LLM. Dify is regarded as Host (not MCP server).
12 | ja_JP: Claude MCP Client as Dify plugin. This maintains 1:1 connections with MCP servers. By default, list of {tool, resource, prompt} on the server is passed to LLM. Dify is regarded as Host (not MCP server).
13 | zh_Hans: Claude MCP Client as Dify plugin. This maintains 1:1 connections with MCP servers. By default, list of {tool, resource, prompt} on the server is passed to LLM. Dify is regarded as Host (not MCP server).
14 | pt_BR: Claude MCP Client as Dify plugin. This maintains 1:1 connections with MCP servers. By default, list of {tool, resource, prompt} on the server is passed to LLM. Dify is regarded as Host (not MCP server).
15 | icon: icon.svg
16 | resource:
17 | memory: 268435456
18 | permission:
19 | tool:
20 | enabled: true
21 | model:
22 | enabled: true
23 | llm: true
24 | text_embedding: false
25 | rerank: false
26 | tts: false
27 | speech2text: false
28 | moderation: false
29 | endpoint:
30 | enabled: true
31 | app:
32 | enabled: true
33 | storage:
34 | enabled: true
35 | size: 1048576
36 | plugins:
37 | agent_strategies:
38 | - provider/agent.yaml
39 | meta:
40 | version: 0.0.4
41 | arch:
42 | - amd64
43 | - arm64
44 | runner:
45 | language: python
46 | version: "3.12"
47 | entrypoint: main
48 | created_at: 2025-03-04T16:50:45.7131549+09:00
49 | privacy: PRIVACY.md
50 | verified: false
51 |
--------------------------------------------------------------------------------
/output_parser/cot_output_parser.py:
--------------------------------------------------------------------------------
1 | import json
2 | import re
3 | from collections.abc import Generator
4 | from typing import Union
5 |
6 | from dify_plugin.entities.model.llm import LLMResultChunk
7 | from dify_plugin.interfaces.agent import AgentScratchpadUnit
8 |
9 |
10 | class CotAgentOutputParser:
11 | @classmethod
12 | def handle_react_stream_output(
13 | cls, llm_response: Generator[LLMResultChunk, None, None], usage_dict: dict
14 | ) -> Generator[Union[str, AgentScratchpadUnit.Action], None, None]:
15 | def parse_action(json_str):
16 | try:
17 | action = json.loads(json_str, strict=False)
18 | action_name = None
19 | action_input = None
20 |
21 | # cohere always returns a list
22 | if isinstance(action, list) and len(action) == 1:
23 | action = action[0]
24 |
25 | for key, value in action.items():
26 | if "input" in key.lower():
27 | action_input = value
28 | else:
29 | action_name = value
30 |
31 | if action_name is not None and action_input is not None:
32 | return AgentScratchpadUnit.Action(
33 | action_name=action_name,
34 | action_input=action_input,
35 | )
36 | else:
37 | return json_str or ""
38 | except:
39 | return json_str or ""
40 |
41 | def extra_json_from_code_block(
42 | code_block,
43 | ) -> Generator[Union[str, AgentScratchpadUnit.Action], None, None]:
44 | code_blocks = re.findall(r"```(.*?)```", code_block, re.DOTALL)
45 | if not code_blocks:
46 | return
47 | for block in code_blocks:
48 | json_text = re.sub(
49 | r"^[a-zA-Z]+\n", "", block.strip(), flags=re.MULTILINE
50 | )
51 | yield parse_action(json_text)
52 |
53 | code_block_cache = ""
54 | code_block_delimiter_count = 0
55 | in_code_block = False
56 | json_cache = ""
57 | json_quote_count = 0
58 | in_json = False
59 | got_json = False
60 |
61 | action_cache = ""
62 | action_str = "action:"
63 | action_idx = 0
64 |
65 | thought_cache = ""
66 | thought_str = "thought:"
67 | thought_idx = 0
68 |
69 | last_character = ""
70 |
71 | for response in llm_response:
72 | if response.delta.usage:
73 | usage_dict["usage"] = response.delta.usage
74 | response_content = response.delta.message.content
75 | if not isinstance(response_content, str):
76 | continue
77 |
78 | # stream
79 | index = 0
80 | while index < len(response_content):
81 | steps = 1
82 | delta = response_content[index : index + steps]
83 | yield_delta = False
84 |
85 | if delta == "`":
86 | last_character = delta
87 | code_block_cache += delta
88 | code_block_delimiter_count += 1
89 | else:
90 | if not in_code_block:
91 | if code_block_delimiter_count > 0:
92 | last_character = delta
93 | yield code_block_cache
94 | code_block_cache = ""
95 | else:
96 | last_character = delta
97 | code_block_cache += delta
98 | code_block_delimiter_count = 0
99 |
100 | if not in_code_block and not in_json:
101 | if delta.lower() == action_str[action_idx] and action_idx == 0:
102 | if last_character not in {"\n", " ", ""}:
103 | yield_delta = True
104 | else:
105 | last_character = delta
106 | action_cache += delta
107 | action_idx += 1
108 | if action_idx == len(action_str):
109 | action_cache = ""
110 | action_idx = 0
111 | index += steps
112 | continue
113 | elif delta.lower() == action_str[action_idx] and action_idx > 0:
114 | last_character = delta
115 | action_cache += delta
116 | action_idx += 1
117 | if action_idx == len(action_str):
118 | action_cache = ""
119 | action_idx = 0
120 | index += steps
121 | continue
122 | else:
123 | if action_cache:
124 | last_character = delta
125 | yield action_cache
126 | action_cache = ""
127 | action_idx = 0
128 |
129 | if delta.lower() == thought_str[thought_idx] and thought_idx == 0:
130 | if last_character not in {"\n", " ", ""}:
131 | yield_delta = True
132 | else:
133 | last_character = delta
134 | thought_cache += delta
135 | thought_idx += 1
136 | if thought_idx == len(thought_str):
137 | thought_cache = ""
138 | thought_idx = 0
139 | index += steps
140 | continue
141 | elif delta.lower() == thought_str[thought_idx] and thought_idx > 0:
142 | last_character = delta
143 | thought_cache += delta
144 | thought_idx += 1
145 | if thought_idx == len(thought_str):
146 | thought_cache = ""
147 | thought_idx = 0
148 | index += steps
149 | continue
150 | else:
151 | if thought_cache:
152 | last_character = delta
153 | yield thought_cache
154 | thought_cache = ""
155 | thought_idx = 0
156 |
157 | if yield_delta:
158 | index += steps
159 | last_character = delta
160 | yield delta
161 | continue
162 |
163 | if code_block_delimiter_count == 3:
164 | if in_code_block:
165 | last_character = delta
166 | yield from extra_json_from_code_block(code_block_cache)
167 | code_block_cache = ""
168 |
169 | in_code_block = not in_code_block
170 | code_block_delimiter_count = 0
171 |
172 | if not in_code_block:
173 | # handle single json
174 | if delta == "{":
175 | json_quote_count += 1
176 | in_json = True
177 | last_character = delta
178 | json_cache += delta
179 | elif delta == "}":
180 | last_character = delta
181 | json_cache += delta
182 | if json_quote_count > 0:
183 | json_quote_count -= 1
184 | if json_quote_count == 0:
185 | in_json = False
186 | got_json = True
187 | index += steps
188 | continue
189 | else:
190 | if in_json:
191 | last_character = delta
192 | json_cache += delta
193 |
194 | if got_json:
195 | got_json = False
196 | last_character = delta
197 | yield parse_action(json_cache)
198 | json_cache = ""
199 | json_quote_count = 0
200 | in_json = False
201 |
202 | if not in_code_block and not in_json:
203 | last_character = delta
204 | yield delta.replace("`", "")
205 |
206 | index += steps
207 |
208 | if code_block_cache:
209 | yield code_block_cache
210 |
211 | if json_cache:
212 | yield parse_action(json_cache)
213 |
--------------------------------------------------------------------------------
/prompt/template.py:
--------------------------------------------------------------------------------
1 | ENGLISH_REACT_COMPLETION_PROMPT_TEMPLATES = """Respond to the human as helpfully and accurately as possible.
2 |
3 | {{instruction}}
4 |
5 | You have access to the following tools:
6 |
7 | {{tools}}
8 |
9 | Use a json blob to specify a tool by providing an action key (tool name) and an action_input key (tool input).
10 | Valid "action" values: "Final Answer" or {{tool_names}}
11 |
12 | Provide only ONE action per $JSON_BLOB, as shown:
13 |
14 | ```
15 | {
16 | "action": $TOOL_NAME,
17 | "action_input": $ACTION_INPUT
18 | }
19 | ```
20 |
21 | Follow this format:
22 |
23 | Question: input question to answer
24 | Thought: consider previous and subsequent steps
25 | Action:
26 | ```
27 | $JSON_BLOB
28 | ```
29 | Observation: action result
30 | ... (repeat Thought/Action/Observation N times)
31 | Thought: I know what to respond
32 | Action:
33 | ```
34 | {
35 | "action": "Final Answer",
36 | "action_input": "Final response to human"
37 | }
38 | ```
39 |
40 | Begin! Reminder to ALWAYS respond with a valid json blob of a single action. Use tools if necessary. Respond directly if appropriate. Format is Action:```$JSON_BLOB```then Observation:.
41 | {{historic_messages}}
42 | Question: {{query}}
43 | {{agent_scratchpad}}
44 | Thought:""" # noqa: E501
45 |
46 |
47 | ENGLISH_REACT_COMPLETION_AGENT_SCRATCHPAD_TEMPLATES = """Observation: {{observation}}
48 | Thought:"""
49 |
50 | ENGLISH_REACT_CHAT_PROMPT_TEMPLATES = """Respond to the human as helpfully and accurately as possible.
51 |
52 | {{instruction}}
53 |
54 | You have access to the following tools:
55 |
56 | {{tools}}
57 |
58 | Use a json blob to specify a tool by providing an action key (tool name) and an action_input key (tool input).
59 | Valid "action" values: "Final Answer" or {{tool_names}}
60 |
61 | Provide only ONE action per $JSON_BLOB, as shown:
62 |
63 | ```
64 | {
65 | "action": $TOOL_NAME,
66 | "action_input": $ACTION_INPUT
67 | }
68 | ```
69 |
70 | Follow this format:
71 |
72 | Question: input question to answer
73 | Thought: consider previous and subsequent steps
74 | Action:
75 | ```
76 | $JSON_BLOB
77 | ```
78 | Observation: action result
79 | ... (repeat Thought/Action/Observation N times)
80 | Thought: I know what to respond
81 | Action:
82 | ```
83 | {
84 | "action": "Final Answer",
85 | "action_input": "Final response to human"
86 | }
87 | ```
88 |
89 | Begin! Reminder to ALWAYS respond with a valid json blob of a single action. Use tools if necessary. Respond directly if appropriate. Format is Action:```$JSON_BLOB```then Observation:.
90 | """ # noqa: E501
91 |
92 |
93 | ENGLISH_REACT_CHAT_AGENT_SCRATCHPAD_TEMPLATES = ""
94 |
95 | REACT_PROMPT_TEMPLATES = {
96 | "english": {
97 | "chat": {
98 | "prompt": ENGLISH_REACT_CHAT_PROMPT_TEMPLATES,
99 | "agent_scratchpad": ENGLISH_REACT_CHAT_AGENT_SCRATCHPAD_TEMPLATES,
100 | },
101 | "completion": {
102 | "prompt": ENGLISH_REACT_COMPLETION_PROMPT_TEMPLATES,
103 | "agent_scratchpad": ENGLISH_REACT_COMPLETION_AGENT_SCRATCHPAD_TEMPLATES,
104 | },
105 | }
106 | }
107 |
--------------------------------------------------------------------------------
/provider/agent.py:
--------------------------------------------------------------------------------
1 | from dify_plugin.interfaces.agent import AgentProvider
2 |
3 |
4 | class LanggeniusAgentProvider(AgentProvider):
5 | pass
6 |
--------------------------------------------------------------------------------
/provider/agent.yaml:
--------------------------------------------------------------------------------
1 | identity:
2 | author: langgenius
3 | name: agent
4 | label:
5 | en_US: Agent
6 | zh_Hans: Agent
7 | pt_BR: Agent
8 | description:
9 | en_US: Agent
10 | zh_Hans: Agent
11 | pt_BR: Agent
12 | icon: icon.svg
13 | strategies:
14 | - strategies/function_calling.yaml
15 | - strategies/ReAct.yaml
16 | - strategies/mcpReAct.yaml
17 | extra:
18 | python:
19 | source: provider/agent.py
20 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | dify_plugin==0.1.0
2 | mcp>=1.6.0
--------------------------------------------------------------------------------
/strategies/ReAct.py:
--------------------------------------------------------------------------------
1 | import json
2 | import time
3 | from collections.abc import Generator, Mapping
4 | from typing import Any, Optional, cast
5 |
6 | from dify_plugin.entities.agent import AgentInvokeMessage
7 | from dify_plugin.entities.model.llm import LLMModelConfig, LLMUsage
8 | from dify_plugin.entities.model.message import (
9 | AssistantPromptMessage,
10 | PromptMessage,
11 | SystemPromptMessage,
12 | ToolPromptMessage,
13 | UserPromptMessage,
14 | )
15 | from dify_plugin.entities.tool import (
16 | LogMetadata,
17 | ToolInvokeMessage,
18 | ToolParameter,
19 | ToolProviderType,
20 | )
21 | from dify_plugin.interfaces.agent import (
22 | AgentModelConfig,
23 | AgentScratchpadUnit,
24 | AgentStrategy,
25 | ToolEntity,
26 | )
27 | from output_parser.cot_output_parser import CotAgentOutputParser
28 | from prompt.template import REACT_PROMPT_TEMPLATES
29 | from pydantic import BaseModel, Field
30 |
31 | ignore_observation_providers = ["wenxin"]
32 |
33 |
34 | class ReActParams(BaseModel):
35 | query: str
36 | instruction: str | None
37 | model: AgentModelConfig
38 | tools: list[ToolEntity] | None
39 | inputs: dict[str, Any] = {}
40 | maximum_iterations: int = 3
41 |
42 |
43 | class AgentPromptEntity(BaseModel):
44 | """
45 | Agent Prompt Entity.
46 | """
47 |
48 | first_prompt: str
49 | next_iteration: str
50 |
51 |
52 | class ToolInvokeMeta(BaseModel):
53 | """
54 | Tool invoke meta
55 | """
56 |
57 | time_cost: float = Field(..., description="The time cost of the tool invoke")
58 | error: Optional[str] = None
59 | tool_config: Optional[dict] = None
60 |
61 | @classmethod
62 | def empty(cls) -> "ToolInvokeMeta":
63 | """
64 | Get an empty instance of ToolInvokeMeta
65 | """
66 | return cls(time_cost=0.0, error=None, tool_config={})
67 |
68 | @classmethod
69 | def error_instance(cls, error: str) -> "ToolInvokeMeta":
70 | """
71 | Get an instance of ToolInvokeMeta with error
72 | """
73 | return cls(time_cost=0.0, error=error, tool_config={})
74 |
75 | def to_dict(self) -> dict:
76 | return {
77 | "time_cost": self.time_cost,
78 | "error": self.error,
79 | "tool_config": self.tool_config,
80 | }
81 |
82 |
83 | class ReActAgentStrategy(AgentStrategy):
84 | def _invoke(self, parameters: dict[str, Any]) -> Generator[AgentInvokeMessage]:
85 | react_params = ReActParams(**parameters)
86 | query = react_params.query
87 | model = react_params.model
88 | agent_scratchpad = []
89 | history_prompt_messages: list[PromptMessage] = []
90 | current_session_messages = []
91 | self._organize_historic_prompt_messages(
92 | history_prompt_messages, current_session_messages=current_session_messages
93 | )
94 | tools = react_params.tools
95 | tool_instances = {tool.identity.name: tool for tool in tools} if tools else {}
96 | react_params.model.completion_params = (
97 | react_params.model.completion_params or {}
98 | )
99 | # check model mode
100 | stop = (
101 | react_params.model.completion_params.get("stop", [])
102 | if react_params.model.completion_params
103 | else []
104 | )
105 |
106 | if (
107 | "Observation" not in stop
108 | and model.provider not in ignore_observation_providers
109 | ):
110 | stop.append("Observation")
111 | # init instruction
112 | inputs = react_params.inputs
113 | instruction = react_params.instruction or ""
114 | self._instruction = self._fill_in_inputs_from_external_data_tools(
115 | instruction, inputs
116 | )
117 |
118 | iteration_step = 1
119 | max_iteration_steps = react_params.maximum_iterations
120 |
121 | # convert tools into ModelRuntime Tool format
122 | prompt_messages_tools = self._init_prompt_tools(tools)
123 | self._prompt_messages_tools = prompt_messages_tools
124 |
125 | run_agent_state = True
126 | llm_usage: dict[str, Optional[LLMUsage]] = {"usage": None}
127 | final_answer = ""
128 | prompt_messages = []
129 | while run_agent_state and iteration_step <= max_iteration_steps:
130 | # continue to run until there is not any tool call
131 | run_agent_state = False
132 | round_started_at = time.perf_counter()
133 | round_log = self.create_log_message(
134 | label=f"ROUND {iteration_step}",
135 | data={},
136 | metadata={
137 | LogMetadata.STARTED_AT: round_started_at,
138 | },
139 | status=ToolInvokeMessage.LogMessage.LogStatus.START,
140 | )
141 | yield round_log
142 | if iteration_step == max_iteration_steps:
143 | # the last iteration, remove all tools
144 | self._prompt_messages_tools = []
145 |
146 | message_file_ids: list[str] = []
147 |
148 | # recalc llm max tokens
149 | prompt_messages = self._organize_prompt_messages(agent_scratchpad, query)
150 | if model.completion_params:
151 | self.recalc_llm_max_tokens(
152 | model.entity, prompt_messages, model.completion_params
153 | )
154 | # invoke model
155 | chunks = self.session.model.llm.invoke(
156 | model_config=LLMModelConfig(**model.model_dump(mode="json")),
157 | prompt_messages=prompt_messages,
158 | stream=True,
159 | stop=stop,
160 | )
161 |
162 | usage_dict = {}
163 | react_chunks = CotAgentOutputParser.handle_react_stream_output(
164 | chunks, usage_dict
165 | )
166 | scratchpad = AgentScratchpadUnit(
167 | agent_response="",
168 | thought="",
169 | action_str="",
170 | observation="",
171 | action=None,
172 | )
173 |
174 | model_started_at = time.perf_counter()
175 | model_log = self.create_log_message(
176 | label=f"{model.model} Thought",
177 | data={},
178 | metadata={
179 | LogMetadata.STARTED_AT: model_started_at,
180 | LogMetadata.PROVIDER: model.provider,
181 | },
182 | parent=round_log,
183 | status=ToolInvokeMessage.LogMessage.LogStatus.START,
184 | )
185 | yield model_log
186 |
187 | for chunk in react_chunks:
188 | if isinstance(chunk, AgentScratchpadUnit.Action):
189 | action = chunk
190 | # detect action
191 | assert scratchpad.agent_response is not None
192 | scratchpad.agent_response += json.dumps(chunk.model_dump())
193 |
194 | scratchpad.action_str = json.dumps(chunk.model_dump())
195 | scratchpad.action = action
196 | else:
197 | scratchpad.agent_response = scratchpad.agent_response or ""
198 | scratchpad.thought = scratchpad.thought or ""
199 | scratchpad.agent_response += chunk
200 | scratchpad.thought += chunk
201 | scratchpad.thought = (
202 | scratchpad.thought.strip()
203 | if scratchpad.thought
204 | else "I am thinking about how to help you"
205 | )
206 | agent_scratchpad.append(scratchpad)
207 |
208 | # get llm usage
209 | if "usage" in usage_dict:
210 | if usage_dict["usage"] is not None:
211 | self.increase_usage(llm_usage, usage_dict["usage"])
212 | else:
213 | usage_dict["usage"] = LLMUsage.empty_usage()
214 |
215 | action = (
216 | scratchpad.action.to_dict()
217 | if scratchpad.action
218 | else {"action": scratchpad.agent_response}
219 | )
220 |
221 | yield self.finish_log_message(
222 | log=model_log,
223 | data={"thought": scratchpad.thought, **action},
224 | metadata={
225 | LogMetadata.STARTED_AT: model_started_at,
226 | LogMetadata.FINISHED_AT: time.perf_counter(),
227 | LogMetadata.ELAPSED_TIME: time.perf_counter() - model_started_at,
228 | LogMetadata.PROVIDER: model.provider,
229 | LogMetadata.TOTAL_PRICE: usage_dict["usage"].total_price
230 | if usage_dict["usage"]
231 | else 0,
232 | LogMetadata.CURRENCY: usage_dict["usage"].currency
233 | if usage_dict["usage"]
234 | else "",
235 | LogMetadata.TOTAL_TOKENS: usage_dict["usage"].total_tokens
236 | if usage_dict["usage"]
237 | else 0,
238 | },
239 | )
240 | if not scratchpad.action:
241 | final_answer = scratchpad.thought
242 | else:
243 | if scratchpad.action.action_name.lower() == "final answer":
244 | # action is final answer, return final answer directly
245 | try:
246 | if isinstance(scratchpad.action.action_input, dict):
247 | final_answer = json.dumps(scratchpad.action.action_input)
248 | elif isinstance(scratchpad.action.action_input, str):
249 | final_answer = scratchpad.action.action_input
250 | else:
251 | final_answer = f"{scratchpad.action.action_input}"
252 | except json.JSONDecodeError:
253 | final_answer = f"{scratchpad.action.action_input}"
254 | else:
255 | run_agent_state = True
256 | # action is tool call, invoke tool
257 | tool_call_started_at = time.perf_counter()
258 | tool_name = scratchpad.action.action_name
259 | tool_call_log = self.create_log_message(
260 | label=f"CALL {tool_name}",
261 | data={},
262 | metadata={
263 | LogMetadata.STARTED_AT: time.perf_counter(),
264 | LogMetadata.PROVIDER: tool_instances[
265 | tool_name
266 | ].identity.provider
267 | if tool_instances.get(tool_name)
268 | else "",
269 | },
270 | parent=round_log,
271 | status=ToolInvokeMessage.LogMessage.LogStatus.START,
272 | )
273 | yield tool_call_log
274 | tool_invoke_response, tool_invoke_parameters = (
275 | self._handle_invoke_action(
276 | action=scratchpad.action,
277 | tool_instances=tool_instances,
278 | message_file_ids=message_file_ids,
279 | )
280 | )
281 | scratchpad.observation = tool_invoke_response
282 | scratchpad.agent_response = tool_invoke_response
283 | yield self.finish_log_message(
284 | log=tool_call_log,
285 | data={
286 | "tool_name": tool_name,
287 | "tool_call_args": tool_invoke_parameters,
288 | "output": tool_invoke_response,
289 | },
290 | metadata={
291 | LogMetadata.STARTED_AT: tool_call_started_at,
292 | LogMetadata.PROVIDER: tool_instances[
293 | tool_name
294 | ].identity.provider
295 | if tool_instances.get(tool_name)
296 | else "",
297 | LogMetadata.FINISHED_AT: time.perf_counter(),
298 | LogMetadata.ELAPSED_TIME: time.perf_counter()
299 | - tool_call_started_at,
300 | },
301 | )
302 |
303 | # update prompt tool message
304 | for prompt_tool in self._prompt_messages_tools:
305 | self.update_prompt_message_tool(
306 | tool_instances[prompt_tool.name], prompt_tool
307 | )
308 | yield self.finish_log_message(
309 | log=round_log,
310 | data={
311 | "action_name": scratchpad.action.action_name
312 | if scratchpad.action
313 | else "",
314 | "action_input": scratchpad.action.action_input
315 | if scratchpad.action
316 | else "",
317 | "thought": scratchpad.thought,
318 | "observation": scratchpad.observation,
319 | },
320 | metadata={
321 | LogMetadata.STARTED_AT: round_started_at,
322 | LogMetadata.FINISHED_AT: time.perf_counter(),
323 | LogMetadata.ELAPSED_TIME: time.perf_counter() - round_started_at,
324 | LogMetadata.TOTAL_PRICE: usage_dict["usage"].total_price
325 | if usage_dict["usage"]
326 | else 0,
327 | LogMetadata.CURRENCY: usage_dict["usage"].currency
328 | if usage_dict["usage"]
329 | else "",
330 | LogMetadata.TOTAL_TOKENS: usage_dict["usage"].total_tokens
331 | if usage_dict["usage"]
332 | else 0,
333 | },
334 | )
335 | iteration_step += 1
336 |
337 | yield self.create_text_message(final_answer)
338 | yield self.create_json_message(
339 | {
340 | "execution_metadata": {
341 | LogMetadata.TOTAL_PRICE: llm_usage["usage"].total_price
342 | if llm_usage["usage"] is not None
343 | else 0,
344 | LogMetadata.CURRENCY: llm_usage["usage"].currency
345 | if llm_usage["usage"] is not None
346 | else "",
347 | LogMetadata.TOTAL_TOKENS: llm_usage["usage"].total_tokens
348 | if llm_usage["usage"] is not None
349 | else 0,
350 | }
351 | }
352 | )
353 |
354 | def _organize_system_prompt(self) -> SystemPromptMessage:
355 | """
356 | Organize system prompt
357 | """
358 |
359 | prompt_entity = AgentPromptEntity(
360 | first_prompt=REACT_PROMPT_TEMPLATES["english"]["chat"]["prompt"],
361 | next_iteration=REACT_PROMPT_TEMPLATES["english"]["chat"][
362 | "agent_scratchpad"
363 | ],
364 | )
365 | if not prompt_entity:
366 | raise ValueError("Agent prompt configuration is not set")
367 | first_prompt = prompt_entity.first_prompt
368 |
369 | system_prompt = (
370 | first_prompt.replace("{{instruction}}", self._instruction)
371 | .replace(
372 | "{{tools}}",
373 | json.dumps(
374 | [
375 | tool.model_dump(mode="json")
376 | for tool in self._prompt_messages_tools
377 | ]
378 | ),
379 | )
380 | .replace(
381 | "{{tool_names}}",
382 | ", ".join([tool.name for tool in self._prompt_messages_tools]),
383 | )
384 | )
385 |
386 | return SystemPromptMessage(content=system_prompt)
387 |
388 | def _organize_user_query(
389 | self, query, prompt_messages: list[PromptMessage]
390 | ) -> list[PromptMessage]:
391 | """
392 | Organize user query
393 | """
394 | prompt_messages.append(UserPromptMessage(content=query))
395 |
396 | return prompt_messages
397 |
398 | def _organize_prompt_messages(
399 | self, agent_scratchpad: list, query: str
400 | ) -> list[PromptMessage]:
401 | """
402 | Organize
403 | """
404 | # organize system prompt
405 | system_message = self._organize_system_prompt()
406 |
407 | # organize current assistant messages
408 | agent_scratchpad = agent_scratchpad
409 | if not agent_scratchpad:
410 | assistant_messages = []
411 | else:
412 | assistant_message = AssistantPromptMessage(content="")
413 | assistant_message.content = (
414 | "" # FIXME: type check tell mypy that assistant_message.content is str
415 | )
416 | for unit in agent_scratchpad:
417 | if unit.is_final():
418 | assert isinstance(assistant_message.content, str)
419 | assistant_message.content += f"Final Answer: {unit.agent_response}"
420 | else:
421 | assert isinstance(assistant_message.content, str)
422 | assistant_message.content += f"Thought: {unit.thought}\n\n"
423 | if unit.action_str:
424 | assistant_message.content += f"Action: {unit.action_str}\n\n"
425 | if unit.observation:
426 | assistant_message.content += (
427 | f"Observation: {unit.observation}\n\n"
428 | )
429 |
430 | assistant_messages = [assistant_message]
431 |
432 | # query messages
433 | query_messages = self._organize_user_query(query, [])
434 |
435 | if assistant_messages:
436 | # organize historic prompt messages
437 | historic_messages = self._organize_historic_prompt_messages(
438 | [
439 | system_message,
440 | *query_messages,
441 | *assistant_messages,
442 | UserPromptMessage(content="continue"),
443 | ]
444 | )
445 | messages = [
446 | system_message,
447 | *historic_messages,
448 | *query_messages,
449 | *assistant_messages,
450 | UserPromptMessage(content="continue"),
451 | ]
452 | else:
453 | # organize historic prompt messages
454 | historic_messages = self._organize_historic_prompt_messages(
455 | [system_message, *query_messages]
456 | )
457 | messages = [system_message, *historic_messages, *query_messages]
458 |
459 | # join all messages
460 | return messages
461 |
462 | def _handle_invoke_action(
463 | self,
464 | action: AgentScratchpadUnit.Action,
465 | tool_instances: Mapping[str, ToolEntity],
466 | message_file_ids: list[str],
467 | ) -> tuple[str, dict[str, Any] | str]:
468 | """
469 | handle invoke action
470 | :param action: action
471 | :param tool_instances: tool instances
472 | :param message_file_ids: message file ids
473 | :param trace_manager: trace manager
474 | :return: observation, meta
475 | """
476 | # action is tool call, invoke tool
477 | tool_call_name = action.action_name
478 | tool_call_args = action.action_input
479 | tool_instance = tool_instances.get(tool_call_name)
480 |
481 | if not tool_instance:
482 | answer = f"there is not a tool named {tool_call_name}"
483 | return answer, tool_call_args
484 |
485 | if isinstance(tool_call_args, str):
486 | try:
487 | tool_call_args = json.loads(tool_call_args)
488 | except json.JSONDecodeError as e:
489 | params = [
490 | param.name
491 | for param in tool_instance.parameters
492 | if param.form == ToolParameter.ToolParameterForm.LLM
493 | ]
494 | if len(params) > 1:
495 | raise ValueError("tool call args is not a valid json string") from e
496 | tool_call_args = {params[0]: tool_call_args} if len(params) == 1 else {}
497 |
498 | tool_invoke_parameters = {**tool_instance.runtime_parameters, **tool_call_args}
499 | try:
500 | tool_invoke_responses = self.session.tool.invoke(
501 | provider_type=ToolProviderType(tool_instance.provider_type),
502 | provider=tool_instance.identity.provider,
503 | tool_name=tool_instance.identity.name,
504 | parameters=tool_invoke_parameters,
505 | )
506 | result = ""
507 | for response in tool_invoke_responses:
508 | if response.type == ToolInvokeMessage.MessageType.TEXT:
509 | result += cast(ToolInvokeMessage.TextMessage, response.message).text
510 | elif response.type == ToolInvokeMessage.MessageType.LINK:
511 | result += (
512 | f"result link: {cast(ToolInvokeMessage.TextMessage, response.message).text}."
513 | + " please tell user to check it."
514 | )
515 | elif response.type in {
516 | ToolInvokeMessage.MessageType.IMAGE_LINK,
517 | ToolInvokeMessage.MessageType.IMAGE,
518 | }:
519 | result += (
520 | "image has been created and sent to user already, "
521 | + "you do not need to create it, just tell the user to check it now."
522 | )
523 | elif response.type == ToolInvokeMessage.MessageType.JSON:
524 | text = json.dumps(
525 | cast(
526 | ToolInvokeMessage.JsonMessage, response.message
527 | ).json_object,
528 | ensure_ascii=False,
529 | )
530 | result += f"tool response: {text}."
531 | else:
532 | result += f"tool response: {response.message!r}."
533 | except Exception as e:
534 | result = f"tool invoke error: {str(e)}"
535 |
536 | return result, tool_invoke_parameters
537 |
538 | def _convert_dict_to_action(self, action: dict) -> AgentScratchpadUnit.Action:
539 | """
540 | convert dict to action
541 | """
542 | return AgentScratchpadUnit.Action(
543 | action_name=action["action"], action_input=action["action_input"]
544 | )
545 |
546 | def _fill_in_inputs_from_external_data_tools(
547 | self, instruction: str, inputs: Mapping[str, Any]
548 | ) -> str:
549 | """
550 | fill in inputs from external data tools
551 | """
552 | for key, value in inputs.items():
553 | try:
554 | instruction = instruction.replace(f"{{{{{key}}}}}", str(value))
555 | except Exception:
556 | continue
557 |
558 | return instruction
559 |
560 | def _format_assistant_message(
561 | self, agent_scratchpad: list[AgentScratchpadUnit]
562 | ) -> str:
563 | """
564 | format assistant message
565 | """
566 | message = ""
567 | for scratchpad in agent_scratchpad:
568 | if scratchpad.is_final():
569 | message += f"Final Answer: {scratchpad.agent_response}"
570 | else:
571 | message += f"Thought: {scratchpad.thought}\n\n"
572 | if scratchpad.action_str:
573 | message += f"Action: {scratchpad.action_str}\n\n"
574 | if scratchpad.observation:
575 | message += f"Observation: {scratchpad.observation}\n\n"
576 |
577 | return message
578 |
579 | def _organize_historic_prompt_messages(
580 | self,
581 | history_prompt_messages: list[PromptMessage],
582 | current_session_messages: list[PromptMessage] | None = None,
583 | ) -> list[PromptMessage]:
584 | """
585 | organize historic prompt messages
586 | """
587 | result: list[PromptMessage] = []
588 | scratchpads: list[AgentScratchpadUnit] = []
589 | current_scratchpad: AgentScratchpadUnit | None = None
590 |
591 | for message in history_prompt_messages:
592 | if isinstance(message, AssistantPromptMessage):
593 | if not current_scratchpad:
594 | assert isinstance(message.content, str)
595 | current_scratchpad = AgentScratchpadUnit(
596 | agent_response=message.content,
597 | thought=message.content
598 | or "I am thinking about how to help you",
599 | action_str="",
600 | action=None,
601 | observation=None,
602 | )
603 | scratchpads.append(current_scratchpad)
604 | if message.tool_calls:
605 | try:
606 | current_scratchpad.action = AgentScratchpadUnit.Action(
607 | action_name=message.tool_calls[0].function.name,
608 | action_input=json.loads(
609 | message.tool_calls[0].function.arguments
610 | ),
611 | )
612 | current_scratchpad.action_str = json.dumps(
613 | current_scratchpad.action.to_dict()
614 | )
615 | except Exception:
616 | pass
617 | elif isinstance(message, ToolPromptMessage):
618 | if current_scratchpad:
619 | assert isinstance(message.content, str)
620 | current_scratchpad.observation = message.content
621 | else:
622 | raise NotImplementedError("expected str type")
623 | elif isinstance(message, UserPromptMessage):
624 | if scratchpads:
625 | result.append(
626 | AssistantPromptMessage(
627 | content=self._format_assistant_message(scratchpads)
628 | )
629 | )
630 | scratchpads = []
631 | current_scratchpad = None
632 |
633 | result.append(message)
634 |
635 | if scratchpads:
636 | result.append(
637 | AssistantPromptMessage(
638 | content=self._format_assistant_message(scratchpads)
639 | )
640 | )
641 |
642 | return current_session_messages or []
643 |
--------------------------------------------------------------------------------
/strategies/ReAct.yaml:
--------------------------------------------------------------------------------
1 | identity:
2 | name: ReAct
3 | author: Dify
4 | label:
5 | en_US: ReAct
6 | zh_Hans: ReAct
7 | pt_BR: ReAct
8 | description:
9 | en_US: ReAct is a basic strategy for agent, model will use the tools provided to perform the task.
10 | zh_Hans: ReAct 是一个基本的 Agent 策略,模型将使用提供的工具来执行任务。
11 | pt_BR: ReAct is a basic strategy for agent, model will use the tools provided to perform the task.
12 | parameters:
13 | - name: model
14 | type: model-selector
15 | scope: tool-call&llm
16 | required: true
17 | label:
18 | en_US: Model
19 | zh_Hans: 模型
20 | pt_BR: Model
21 | - name: tools
22 | type: array[tools]
23 | required: true
24 | label:
25 | en_US: Tools list
26 | zh_Hans: 工具列表
27 | pt_BR: Tools list
28 | - name: instruction
29 | type: string
30 | required: true
31 | label:
32 | en_US: Instruction
33 | zh_Hans: 指令
34 | pt_BR: Instruction
35 | auto_generate:
36 | type: prompt_instruction
37 | template:
38 | enabled: true
39 | - name: query
40 | type: string
41 | required: true
42 | label:
43 | en_US: Query
44 | zh_Hans: 查询
45 | pt_BR: Query
46 | - name: maximum_iterations
47 | type: number
48 | required: true
49 | label:
50 | en_US: Maxium Iterations
51 | zh_Hans: 最大迭代次数
52 | pt_BR: Maxium Iterations
53 | default: 3
54 | min: 1
55 | max: 30
56 | extra:
57 | python:
58 | source: strategies/ReAct.py
59 |
--------------------------------------------------------------------------------
/strategies/function_calling.py:
--------------------------------------------------------------------------------
1 | import json
2 | import time
3 | from collections.abc import Generator
4 | from copy import deepcopy
5 | from typing import Any, Optional, cast
6 |
7 | from dify_plugin.entities.agent import AgentInvokeMessage
8 | from dify_plugin.entities.model.llm import (
9 | LLMModelConfig,
10 | LLMResult,
11 | LLMResultChunk,
12 | LLMUsage,
13 | )
14 | from dify_plugin.entities.model.message import (
15 | AssistantPromptMessage,
16 | PromptMessage,
17 | PromptMessageContentType,
18 | PromptMessageRole,
19 | SystemPromptMessage,
20 | ToolPromptMessage,
21 | UserPromptMessage,
22 | )
23 | from dify_plugin.entities.tool import LogMetadata, ToolInvokeMessage, ToolProviderType
24 | from dify_plugin.interfaces.agent import AgentModelConfig, AgentStrategy, ToolEntity
25 | from pydantic import BaseModel, Field
26 |
27 |
28 | class FunctionCallingParams(BaseModel):
29 | query: str
30 | instruction: str | None
31 | model: AgentModelConfig
32 | tools: list[ToolEntity] | None
33 | maximum_iterations: int = 3
34 |
35 |
36 | class ToolInvokeMeta(BaseModel):
37 | """
38 | Tool invoke meta
39 | """
40 |
41 | time_cost: float = Field(..., description="The time cost of the tool invoke")
42 | error: Optional[str] = None
43 | tool_config: Optional[dict] = None
44 |
45 | @classmethod
46 | def empty(cls) -> "ToolInvokeMeta":
47 | """
48 | Get an empty instance of ToolInvokeMeta
49 | """
50 | return cls(time_cost=0.0, error=None, tool_config={})
51 |
52 | @classmethod
53 | def error_instance(cls, error: str) -> "ToolInvokeMeta":
54 | """
55 | Get an instance of ToolInvokeMeta with error
56 | """
57 | return cls(time_cost=0.0, error=error, tool_config={})
58 |
59 | def to_dict(self) -> dict:
60 | return {
61 | "time_cost": self.time_cost,
62 | "error": self.error,
63 | "tool_config": self.tool_config,
64 | }
65 |
66 |
67 | class FunctionCallingAgentStrategy(AgentStrategy):
68 | def __init__(self, session):
69 | super().__init__(session)
70 | self.query = ""
71 |
72 | def _invoke(self, parameters: dict[str, Any]) -> Generator[AgentInvokeMessage]:
73 | """
74 | Run FunctionCall agent application
75 | """
76 | fc_params = FunctionCallingParams(**parameters)
77 | query = fc_params.query
78 | self.query = query
79 | instruction = fc_params.instruction
80 | init_prompt_messages = [
81 | PromptMessage(role=PromptMessageRole.SYSTEM, content=instruction)
82 | ]
83 | tools = fc_params.tools
84 | tool_instances = {tool.identity.name: tool for tool in tools} if tools else {}
85 | model = fc_params.model
86 | stop = (
87 | fc_params.model.completion_params.get("stop", [])
88 | if fc_params.model.completion_params
89 | else []
90 | )
91 | # convert tools into ModelRuntime Tool format
92 | prompt_messages_tools = self._init_prompt_tools(tools)
93 |
94 | iteration_step = 1
95 | max_iteration_steps = fc_params.maximum_iterations
96 | current_thoughts: list[PromptMessage] = []
97 | # continue to run until there is not any tool call
98 | function_call_state = True
99 | llm_usage: dict[str, Optional[LLMUsage]] = {"usage": None}
100 | final_answer = ""
101 |
102 | while function_call_state and iteration_step <= max_iteration_steps:
103 | function_call_state = False
104 | round_started_at = time.perf_counter()
105 | round_log = self.create_log_message(
106 | label=f"ROUND {iteration_step}",
107 | data={},
108 | metadata={
109 | LogMetadata.STARTED_AT: round_started_at,
110 | },
111 | status=ToolInvokeMessage.LogMessage.LogStatus.START,
112 | )
113 | yield round_log
114 | if iteration_step == max_iteration_steps:
115 | # the last iteration, remove all tools
116 | prompt_messages_tools = []
117 |
118 | # recalc llm max tokens
119 | prompt_messages = self._organize_prompt_messages(
120 | history_prompt_messages=init_prompt_messages,
121 | current_thoughts=current_thoughts,
122 | )
123 | if model.completion_params:
124 | self.recalc_llm_max_tokens(
125 | model.entity, prompt_messages, model.completion_params
126 | )
127 | # invoke model
128 | model_started_at = time.perf_counter()
129 | model_log = self.create_log_message(
130 | label=f"{model.model} Thought",
131 | data={},
132 | metadata={
133 | LogMetadata.STARTED_AT: model_started_at,
134 | LogMetadata.PROVIDER: model.provider,
135 | },
136 | parent=round_log,
137 | status=ToolInvokeMessage.LogMessage.LogStatus.START,
138 | )
139 | yield model_log
140 | chunks: Generator[LLMResultChunk, None, None] | LLMResult = (
141 | self.session.model.llm.invoke(
142 | model_config=LLMModelConfig(**model.model_dump(mode="json")),
143 | prompt_messages=prompt_messages,
144 | stream=True,
145 | stop=stop,
146 | tools=prompt_messages_tools,
147 | )
148 | )
149 |
150 | tool_calls: list[tuple[str, str, dict[str, Any]]] = []
151 |
152 | # save full response
153 | response = ""
154 |
155 | # save tool call names and inputs
156 | tool_call_names = ""
157 |
158 | current_llm_usage = None
159 |
160 | if isinstance(chunks, Generator):
161 | for chunk in chunks:
162 | # check if there is any tool call
163 | if self.check_tool_calls(chunk):
164 | function_call_state = True
165 | tool_calls.extend(self.extract_tool_calls(chunk) or [])
166 | tool_call_names = ";".join(
167 | [tool_call[1] for tool_call in tool_calls]
168 | )
169 |
170 | if chunk.delta.message and chunk.delta.message.content:
171 | if isinstance(chunk.delta.message.content, list):
172 | for content in chunk.delta.message.content:
173 | response += content.data
174 | if (
175 | not function_call_state
176 | or iteration_step == max_iteration_steps
177 | ):
178 | yield self.create_text_message(content.data)
179 | else:
180 | response += str(chunk.delta.message.content)
181 | if (
182 | not function_call_state
183 | or iteration_step == max_iteration_steps
184 | ):
185 | yield self.create_text_message(
186 | str(chunk.delta.message.content)
187 | )
188 |
189 | if chunk.delta.usage:
190 | self.increase_usage(llm_usage, chunk.delta.usage)
191 | current_llm_usage = chunk.delta.usage
192 |
193 | else:
194 | result = chunks
195 | # check if there is any tool call
196 | if self.check_blocking_tool_calls(result):
197 | function_call_state = True
198 | tool_calls.extend(self.extract_blocking_tool_calls(result) or [])
199 | tool_call_names = ";".join(
200 | [tool_call[1] for tool_call in tool_calls]
201 | )
202 |
203 | if result.usage:
204 | self.increase_usage(llm_usage, result.usage)
205 | current_llm_usage = result.usage
206 |
207 | if result.message and result.message.content:
208 | if isinstance(result.message.content, list):
209 | for content in result.message.content:
210 | response += content.data
211 | else:
212 | response += str(result.message.content)
213 |
214 | if not result.message.content:
215 | result.message.content = ""
216 | yield self.finish_log_message(
217 | log=model_log,
218 | data={
219 | "output": response,
220 | "tool_name": tool_call_names,
221 | "tool_input": {
222 | tool_call[1]: tool_call[2] for tool_call in tool_calls
223 | },
224 | },
225 | metadata={
226 | LogMetadata.STARTED_AT: model_started_at,
227 | LogMetadata.FINISHED_AT: time.perf_counter(),
228 | LogMetadata.ELAPSED_TIME: time.perf_counter() - model_started_at,
229 | LogMetadata.PROVIDER: model.provider,
230 | LogMetadata.TOTAL_PRICE: current_llm_usage.total_price
231 | if current_llm_usage
232 | else 0,
233 | LogMetadata.CURRENCY: current_llm_usage.currency
234 | if current_llm_usage
235 | else "",
236 | LogMetadata.TOTAL_TOKENS: current_llm_usage.total_tokens
237 | if current_llm_usage
238 | else 0,
239 | },
240 | )
241 | assistant_message = AssistantPromptMessage(content="", tool_calls=[])
242 | if tool_calls:
243 | assistant_message.tool_calls = [
244 | AssistantPromptMessage.ToolCall(
245 | id=tool_call[0],
246 | type="function",
247 | function=AssistantPromptMessage.ToolCall.ToolCallFunction(
248 | name=tool_call[1],
249 | arguments=json.dumps(tool_call[2], ensure_ascii=False),
250 | ),
251 | )
252 | for tool_call in tool_calls
253 | ]
254 | else:
255 | assistant_message.content = response
256 |
257 | current_thoughts.append(assistant_message)
258 |
259 | final_answer += response + "\n"
260 |
261 | # call tools
262 | tool_responses = []
263 | for tool_call_id, tool_call_name, tool_call_args in tool_calls:
264 | tool_instance = tool_instances[tool_call_name]
265 | tool_call_started_at = time.perf_counter()
266 | tool_call_log = self.create_log_message(
267 | label=f"CALL {tool_call_name}",
268 | data={},
269 | metadata={
270 | LogMetadata.STARTED_AT: time.perf_counter(),
271 | LogMetadata.PROVIDER: tool_instance.identity.provider,
272 | },
273 | parent=round_log,
274 | status=ToolInvokeMessage.LogMessage.LogStatus.START,
275 | )
276 | yield tool_call_log
277 | if not tool_instance:
278 | tool_response = {
279 | "tool_call_id": tool_call_id,
280 | "tool_call_name": tool_call_name,
281 | "tool_response": f"there is not a tool named {tool_call_name}",
282 | "meta": ToolInvokeMeta.error_instance(
283 | f"there is not a tool named {tool_call_name}"
284 | ).to_dict(),
285 | }
286 | else:
287 | # invoke tool
288 | try:
289 | tool_invoke_responses = self.session.tool.invoke(
290 | provider_type=ToolProviderType(tool_instance.provider_type),
291 | provider=tool_instance.identity.provider,
292 | tool_name=tool_instance.identity.name,
293 | parameters={
294 | **tool_instance.runtime_parameters,
295 | **tool_call_args,
296 | },
297 | )
298 | result = ""
299 | for response in tool_invoke_responses:
300 | if response.type == ToolInvokeMessage.MessageType.TEXT:
301 | result += cast(
302 | ToolInvokeMessage.TextMessage, response.message
303 | ).text
304 | elif response.type == ToolInvokeMessage.MessageType.LINK:
305 | result += (
306 | f"result link: {cast(ToolInvokeMessage.TextMessage, response.message).text}."
307 | + " please tell user to check it."
308 | )
309 | elif response.type in {
310 | ToolInvokeMessage.MessageType.IMAGE_LINK,
311 | ToolInvokeMessage.MessageType.IMAGE,
312 | }:
313 | result += (
314 | "image has been created and sent to user already, "
315 | + "you do not need to create it, just tell the user to check it now."
316 | )
317 | elif response.type == ToolInvokeMessage.MessageType.JSON:
318 | text = json.dumps(
319 | cast(
320 | ToolInvokeMessage.JsonMessage, response.message
321 | ).json_object,
322 | ensure_ascii=False,
323 | )
324 | result += f"tool response: {text}."
325 | else:
326 | result += f"tool response: {response.message!r}."
327 | except Exception as e:
328 | result = f"tool invoke error: {str(e)}"
329 | tool_response = {
330 | "tool_call_id": tool_call_id,
331 | "tool_call_name": tool_call_name,
332 | "tool_call_input": {
333 | **tool_instance.runtime_parameters,
334 | **tool_call_args,
335 | },
336 | "tool_response": result,
337 | }
338 |
339 | yield self.finish_log_message(
340 | log=tool_call_log,
341 | data={
342 | "output": tool_response,
343 | },
344 | metadata={
345 | LogMetadata.STARTED_AT: tool_call_started_at,
346 | LogMetadata.PROVIDER: tool_instance.identity.provider,
347 | LogMetadata.FINISHED_AT: time.perf_counter(),
348 | LogMetadata.ELAPSED_TIME: time.perf_counter()
349 | - tool_call_started_at,
350 | },
351 | )
352 | tool_responses.append(tool_response)
353 | if tool_response["tool_response"] is not None:
354 | current_thoughts.append(
355 | ToolPromptMessage(
356 | content=str(tool_response["tool_response"]),
357 | tool_call_id=tool_call_id,
358 | name=tool_call_name,
359 | )
360 | )
361 |
362 | # update prompt tool
363 | for prompt_tool in prompt_messages_tools:
364 | self.update_prompt_message_tool(
365 | tool_instances[prompt_tool.name], prompt_tool
366 | )
367 | yield self.finish_log_message(
368 | log=round_log,
369 | data={
370 | "output": {
371 | "llm_response": response,
372 | "tool_responses": tool_responses,
373 | },
374 | },
375 | metadata={
376 | LogMetadata.STARTED_AT: round_started_at,
377 | LogMetadata.FINISHED_AT: time.perf_counter(),
378 | LogMetadata.ELAPSED_TIME: time.perf_counter() - round_started_at,
379 | LogMetadata.TOTAL_PRICE: current_llm_usage.total_price
380 | if current_llm_usage
381 | else 0,
382 | LogMetadata.CURRENCY: current_llm_usage.currency
383 | if current_llm_usage
384 | else "",
385 | LogMetadata.TOTAL_TOKENS: current_llm_usage.total_tokens
386 | if current_llm_usage
387 | else 0,
388 | },
389 | )
390 | iteration_step += 1
391 |
392 | yield self.create_json_message(
393 | {
394 | "execution_metadata": {
395 | LogMetadata.TOTAL_PRICE: llm_usage["usage"].total_price
396 | if llm_usage["usage"] is not None
397 | else 0,
398 | LogMetadata.CURRENCY: llm_usage["usage"].currency
399 | if llm_usage["usage"] is not None
400 | else "",
401 | LogMetadata.TOTAL_TOKENS: llm_usage["usage"].total_tokens
402 | if llm_usage["usage"] is not None
403 | else 0,
404 | }
405 | }
406 | )
407 |
408 | def check_tool_calls(self, llm_result_chunk: LLMResultChunk) -> bool:
409 | """
410 | Check if there is any tool call in llm result chunk
411 | """
412 | return bool(llm_result_chunk.delta.message.tool_calls)
413 |
414 | def check_blocking_tool_calls(self, llm_result: LLMResult) -> bool:
415 | """
416 | Check if there is any blocking tool call in llm result
417 | """
418 | return bool(llm_result.message.tool_calls)
419 |
420 | def extract_tool_calls(
421 | self, llm_result_chunk: LLMResultChunk
422 | ) -> list[tuple[str, str, dict[str, Any]]]:
423 | """
424 | Extract tool calls from llm result chunk
425 |
426 | Returns:
427 | List[Tuple[str, str, Dict[str, Any]]]: [(tool_call_id, tool_call_name, tool_call_args)]
428 | """
429 | tool_calls = []
430 | for prompt_message in llm_result_chunk.delta.message.tool_calls:
431 | args = {}
432 | if prompt_message.function.arguments != "":
433 | args = json.loads(prompt_message.function.arguments)
434 |
435 | tool_calls.append(
436 | (
437 | prompt_message.id,
438 | prompt_message.function.name,
439 | args,
440 | )
441 | )
442 |
443 | return tool_calls
444 |
445 | def extract_blocking_tool_calls(
446 | self, llm_result: LLMResult
447 | ) -> list[tuple[str, str, dict[str, Any]]]:
448 | """
449 | Extract blocking tool calls from llm result
450 |
451 | Returns:
452 | List[Tuple[str, str, Dict[str, Any]]]: [(tool_call_id, tool_call_name, tool_call_args)]
453 | """
454 | tool_calls = []
455 | for prompt_message in llm_result.message.tool_calls:
456 | args = {}
457 | if prompt_message.function.arguments != "":
458 | args = json.loads(prompt_message.function.arguments)
459 |
460 | tool_calls.append(
461 | (
462 | prompt_message.id,
463 | prompt_message.function.name,
464 | args,
465 | )
466 | )
467 |
468 | return tool_calls
469 |
470 | def _init_system_message(
471 | self, prompt_template: str, prompt_messages: list[PromptMessage]
472 | ) -> list[PromptMessage]:
473 | """
474 | Initialize system message
475 | """
476 | if not prompt_messages and prompt_template:
477 | return [
478 | SystemPromptMessage(content=prompt_template),
479 | ]
480 |
481 | if (
482 | prompt_messages
483 | and not isinstance(prompt_messages[0], SystemPromptMessage)
484 | and prompt_template
485 | ):
486 | prompt_messages.insert(0, SystemPromptMessage(content=prompt_template))
487 |
488 | return prompt_messages or []
489 |
490 | def _organize_user_query(
491 | self, query: str, prompt_messages: list[PromptMessage]
492 | ) -> list[PromptMessage]:
493 | """
494 | Organize user query
495 | """
496 |
497 | prompt_messages.append(UserPromptMessage(content=query))
498 |
499 | return prompt_messages
500 |
501 | def _clear_user_prompt_image_messages(
502 | self, prompt_messages: list[PromptMessage]
503 | ) -> list[PromptMessage]:
504 | """
505 | As for now, gpt supports both fc and vision at the first iteration.
506 | We need to remove the image messages from the prompt messages at the first iteration.
507 | """
508 | prompt_messages = deepcopy(prompt_messages)
509 |
510 | for prompt_message in prompt_messages:
511 | if isinstance(prompt_message, UserPromptMessage) and isinstance(
512 | prompt_message.content, list
513 | ):
514 | prompt_message.content = "\n".join(
515 | [
516 | content.data
517 | if content.type == PromptMessageContentType.TEXT
518 | else "[image]"
519 | if content.type == PromptMessageContentType.IMAGE
520 | else "[file]"
521 | for content in prompt_message.content
522 | ]
523 | )
524 |
525 | return prompt_messages
526 |
527 | def _organize_prompt_messages(
528 | self,
529 | current_thoughts: list[PromptMessage],
530 | history_prompt_messages: list[PromptMessage],
531 | ) -> list[PromptMessage]:
532 | prompt_template = ""
533 | history_prompt_messages = self._init_system_message(
534 | prompt_template, history_prompt_messages
535 | )
536 | query_prompt_messages = self._organize_user_query(self.query or "", [])
537 |
538 | prompt_messages = [
539 | *history_prompt_messages,
540 | *query_prompt_messages,
541 | *current_thoughts,
542 | ]
543 | if len(current_thoughts) != 0:
544 | # clear messages after the first iteration
545 | prompt_messages = self._clear_user_prompt_image_messages(prompt_messages)
546 | return prompt_messages
547 |
--------------------------------------------------------------------------------
/strategies/function_calling.yaml:
--------------------------------------------------------------------------------
1 | identity:
2 | name: function_calling
3 | author: Dify
4 | label:
5 | en_US: FunctionCalling
6 | zh_Hans: FunctionCalling
7 | pt_BR: FunctionCalling
8 | description:
9 | en_US: Function Calling is a basic strategy for agent, model will use the tools provided to perform the task.
10 | zh_Hans: Function Calling 是一个基本的 Agent 策略,模型将使用提供的工具来执行任务。
11 | pt_BR: Function Calling is a basic strategy for agent, model will use the tools provided to perform the task.
12 | parameters:
13 | - name: model
14 | type: model-selector
15 | scope: tool-call&llm
16 | required: true
17 | label:
18 | en_US: Model
19 | zh_Hans: 模型
20 | pt_BR: Model
21 | - name: tools
22 | type: array[tools]
23 | required: true
24 | label:
25 | en_US: Tools list
26 | zh_Hans: 工具列表
27 | pt_BR: Tools list
28 | - name: instruction
29 | type: string
30 | required: true
31 | label:
32 | en_US: Instruction
33 | zh_Hans: 指令
34 | pt_BR: Instruction
35 | auto_generate:
36 | type: prompt_instruction
37 | template:
38 | enabled: true
39 | - name: query
40 | type: string
41 | required: true
42 | label:
43 | en_US: Query
44 | zh_Hans: 查询
45 | pt_BR: Query
46 | - name: maximum_iterations
47 | type: number
48 | required: true
49 | label:
50 | en_US: Maxium Iterations
51 | zh_Hans: 最大迭代次数
52 | pt_BR: Maxium Iterations
53 | default: 3
54 | max: 30
55 | min: 1
56 | extra:
57 | python:
58 | source: strategies/function_calling.py
59 |
--------------------------------------------------------------------------------
/strategies/mcpReAct.yaml:
--------------------------------------------------------------------------------
1 | identity:
2 | name: mcpReAct
3 | author: 3dify
4 | label:
5 | en_US: mcpReAct
6 | zh_Hans: mcpReAct
7 | pt_BR: mcpReAct
8 | description:
9 | en_US: mcpReAct is a basic strategy for agent, model will use the tools provided to perform the task. This node is MCP client that can connect to a MCP server. Dify is regarded as Host (not MCP server).
10 | zh_Hans: mcpReAct 是一个基本的 Agent 策略,模型将使用提供的工具来执行任务。
11 | pt_BR: mcpReAct is a basic strategy for agent, model will use the tools provided to perform the task. This node is MCP client that can connect to a MCP server. Dify is regarded as Host (not MCP server).
12 | parameters:
13 | - name: model
14 | type: model-selector
15 | scope: tool-call&llm
16 | required: true
17 | label:
18 | en_US: Model
19 | zh_Hans: 模型
20 | pt_BR: Model
21 | - name: config_json
22 | type: string
23 | required: true
24 | label:
25 | en_US: Commands to awake each MCP servers (claude_desktop_config.json)
26 | zh_Hans: 唤醒每个 MCP 服务器的命令(claude_desktop_config.json)
27 | pt_BR: Commands to awake each MCP servers (claude_desktop_config.json)
28 | - name: tools
29 | type: array[tools]
30 | required: false
31 | label:
32 | en_US: Tools list
33 | zh_Hans: 工具列表
34 | pt_BR: Tools list
35 | - name: instruction
36 | type: string
37 | required: true
38 | label:
39 | en_US: Instruction
40 | zh_Hans: 指令
41 | pt_BR: Instruction
42 | auto_generate:
43 | type: prompt_instruction
44 | template:
45 | enabled: true
46 | - name: query
47 | type: string
48 | required: true
49 | label:
50 | en_US: Query
51 | zh_Hans: 查询
52 | pt_BR: Query
53 | - name: maximum_iterations
54 | type: number
55 | required: true
56 | label:
57 | en_US: Maxium Iterations
58 | zh_Hans: 最大迭代次数
59 | pt_BR: Maxium Iterations
60 | default: 3
61 | min: 1
62 | max: 30
63 | extra:
64 | python:
65 | source: strategies/mcpReAct.py
66 |
--------------------------------------------------------------------------------
/test/chatflow/README.md:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/test/chatflow/mcp_multi_sse_chatflow_v0.0.3.yml:
--------------------------------------------------------------------------------
1 | app:
2 | description: 'Test for MCP client as Dify Agent Strategy plugin.
3 |
4 | used "Everything MCP server"
5 |
6 | Resource and Prompt are working correctly.
7 |
8 | Tool also Succussed, except for "samplingLLM", "annotatedMessage"'
9 | icon: 🤖
10 | icon_background: '#FFEAD5'
11 | mode: advanced-chat
12 | name: multi MCP server test
13 | use_icon_as_answer_icon: true
14 | dependencies: []
15 | kind: app
16 | version: 0.1.5
17 | workflow:
18 | conversation_variables: []
19 | environment_variables: []
20 | features:
21 | file_upload:
22 | allowed_file_extensions: []
23 | allowed_file_types:
24 | - image
25 | allowed_file_upload_methods:
26 | - remote_url
27 | - local_file
28 | enabled: true
29 | fileUploadConfig:
30 | audio_file_size_limit: 50
31 | batch_count_limit: 5
32 | file_size_limit: 15
33 | image_file_size_limit: 10
34 | video_file_size_limit: 100
35 | workflow_file_upload_limit: 10
36 | image:
37 | enabled: false
38 | number_limits: 3
39 | transfer_methods:
40 | - local_file
41 | - remote_url
42 | number_limits: 1
43 | opening_statement: ''
44 | retriever_resource:
45 | enabled: true
46 | sensitive_word_avoidance:
47 | enabled: false
48 | speech_to_text:
49 | enabled: false
50 | suggested_questions: []
51 | suggested_questions_after_answer:
52 | enabled: false
53 | text_to_speech:
54 | enabled: false
55 | language: ''
56 | voice: ''
57 | graph:
58 | edges:
59 | - data:
60 | isInIteration: false
61 | sourceType: start
62 | targetType: agent
63 | id: 1741367649354-source-1742356283732-target
64 | source: '1741367649354'
65 | sourceHandle: source
66 | target: '1742356283732'
67 | targetHandle: target
68 | type: custom
69 | zIndex: 0
70 | - data:
71 | isInIteration: false
72 | sourceType: agent
73 | targetType: answer
74 | id: 1742356283732-source-answer-target
75 | source: '1742356283732'
76 | sourceHandle: source
77 | target: answer
78 | targetHandle: target
79 | type: custom
80 | zIndex: 0
81 | nodes:
82 | - data:
83 | desc: ''
84 | selected: false
85 | title: Start
86 | type: start
87 | variables: []
88 | height: 53
89 | id: '1741367649354'
90 | position:
91 | x: 87.25729350925093
92 | y: 282
93 | positionAbsolute:
94 | x: 87.25729350925093
95 | y: 282
96 | selected: false
97 | sourcePosition: right
98 | targetPosition: left
99 | type: custom
100 | width: 243
101 | - data:
102 | answer: '{{#1742356283732.text#}}'
103 | desc: ''
104 | selected: false
105 | title: Answer
106 | type: answer
107 | variables: []
108 | height: 101
109 | id: answer
110 | position:
111 | x: 629.2361265621643
112 | y: 282
113 | positionAbsolute:
114 | x: 629.2361265621643
115 | y: 282
116 | selected: false
117 | sourcePosition: right
118 | targetPosition: left
119 | type: custom
120 | width: 243
121 | - data:
122 | author: 3Dify-developer
123 | desc: ''
124 | height: 194
125 | selected: false
126 | showAuthor: true
127 | text: '{"root":{"children":[{"children":[{"detail":0,"format":1,"mode":"normal","style":"font-size:
128 | 16px;","text":"User prompt base","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":1,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"font-size:
129 | 16px;","text":"I''m developing MCP client as Dify Agent Strategy Plugin.","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"font-size:
130 | 16px;","text":"- Some features haven''t implemented and might not work as
131 | I expected.","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"font-size:
132 | 16px;","text":"- I found good MCP server for testing.","type":"text","version":1},{"type":"linebreak","version":1},{"detail":0,"format":0,"mode":"normal","style":"font-size:
133 | 16px;","text":"- You should ignore \"Current Time Tool\". ","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""}],"direction":"ltr","format":"","indent":0,"type":"root","version":1}}'
134 | theme: blue
135 | title: ''
136 | type: ''
137 | width: 569
138 | height: 194
139 | id: '1741624678131'
140 | position:
141 | x: 87.25729350925093
142 | y: 508.8200615915433
143 | positionAbsolute:
144 | x: 87.25729350925093
145 | y: 508.8200615915433
146 | selected: false
147 | sourcePosition: right
148 | targetPosition: left
149 | type: custom-note
150 | width: 569
151 | - data:
152 | author: 3Dify-developer
153 | desc: ''
154 | height: 88
155 | selected: false
156 | showAuthor: false
157 | text: '{"root":{"children":[{"children":[{"children":[{"detail":0,"format":1,"mode":"normal","style":"font-size:
158 | 16px;","text":"Sample MCP server for testing","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"link","version":1,"rel":"noreferrer","target":null,"title":null,"url":""}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":1,"textStyle":"font-size:
159 | 16px;"},{"children":[{"detail":0,"format":0,"mode":"normal","style":"","text":"https://github.com/modelcontextprotocol/servers/tree/main/src/everything","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""}],"direction":"ltr","format":"","indent":0,"type":"root","version":1}}'
160 | theme: cyan
161 | title: ''
162 | type: ''
163 | width: 449
164 | height: 88
165 | id: '1741626728667'
166 | position:
167 | x: 690.5542795067578
168 | y: 763.3257650174893
169 | positionAbsolute:
170 | x: 690.5542795067578
171 | y: 763.3257650174893
172 | selected: false
173 | sourcePosition: right
174 | targetPosition: left
175 | type: custom-note
176 | width: 449
177 | - data:
178 | author: 3Dify-developer
179 | desc: ''
180 | height: 151
181 | selected: false
182 | showAuthor: true
183 | text: '{"root":{"children":[{"children":[{"detail":0,"format":1,"mode":"normal","style":"font-size:
184 | 16px;","text":"MCP Tool Test prompt","type":"text","version":1},{"type":"linebreak","version":1},{"detail":0,"format":0,"mode":"normal","style":"font-size:
185 | 16px;","text":"Try mcp tools one-by-one","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":1,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"font-size:
186 | 16px;","text":"- judge result in \"thought\" phase (I want to know at least
187 | Fail or Success)","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"font-size:
188 | 16px;","text":"- avoid \"sampleLLM\" which is not compatible feature yet.
189 | ","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""}],"direction":"ltr","format":"","indent":0,"type":"root","version":1}}'
190 | theme: green
191 | title: ''
192 | type: ''
193 | width: 570
194 | height: 151
195 | id: '1741669149712'
196 | position:
197 | x: 87.25729350925093
198 | y: 718.2693074279789
199 | positionAbsolute:
200 | x: 87.25729350925093
201 | y: 718.2693074279789
202 | selected: false
203 | sourcePosition: right
204 | targetPosition: left
205 | type: custom-note
206 | width: 570
207 | - data:
208 | author: 3Dify-developer
209 | desc: ''
210 | height: 140
211 | selected: false
212 | showAuthor: true
213 | text: '{"root":{"children":[{"children":[{"detail":0,"format":1,"mode":"normal","style":"font-size:
214 | 16px;","text":"MCP Resource Test prompt","type":"text","version":1},{"type":"linebreak","version":1},{"detail":0,"format":0,"mode":"normal","style":"font-size:
215 | 16px;","text":"Try mcp resources one-by-one","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":1,"textStyle":"font-size:
216 | 16px;"},{"children":[{"detail":0,"format":0,"mode":"normal","style":"font-size:
217 | 16px;","text":"- judge result in \"thought\" phase (I want to know at least
218 | Fail or Success)","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[],"direction":null,"format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""}],"direction":"ltr","format":"","indent":0,"type":"root","version":1}}'
219 | theme: green
220 | title: ''
221 | type: ''
222 | width: 568
223 | height: 140
224 | id: '1741669231167'
225 | position:
226 | x: 88.65225939320432
227 | y: 879.6113451588606
228 | positionAbsolute:
229 | x: 88.65225939320432
230 | y: 879.6113451588606
231 | selected: false
232 | sourcePosition: right
233 | targetPosition: left
234 | type: custom-note
235 | width: 568
236 | - data:
237 | author: 3Dify-developer
238 | desc: ''
239 | height: 128
240 | selected: false
241 | showAuthor: true
242 | text: '{"root":{"children":[{"children":[{"detail":0,"format":1,"mode":"normal","style":"font-size:
243 | 16px;","text":"MCP Prompt Test prompt","type":"text","version":1},{"type":"linebreak","version":1},{"detail":0,"format":0,"mode":"normal","style":"font-size:
244 | 16px;","text":"Try mcp prompts one-by-one","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":1,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"font-size:
245 | 16px;","text":"- judge result in \"thought\" phase (I want to know at least
246 | Fail or Success)","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""}],"direction":"ltr","format":"","indent":0,"type":"root","version":1}}'
247 | theme: green
248 | title: ''
249 | type: ''
250 | width: 568
251 | height: 128
252 | id: '1741669352719'
253 | position:
254 | x: 87.25729350925093
255 | y: 1033.9889783833733
256 | positionAbsolute:
257 | x: 87.25729350925093
258 | y: 1033.9889783833733
259 | selected: false
260 | sourcePosition: right
261 | targetPosition: left
262 | type: custom-note
263 | width: 568
264 | - data:
265 | author: 3Dify-developer
266 | desc: ''
267 | height: 264
268 | selected: false
269 | showAuthor: true
270 | text: '{"root":{"children":[{"children":[{"detail":0,"format":0,"mode":"normal","style":"font-size:
271 | 16px;","text":"config.json example ","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"font-size:
272 | 16px;","text":"(","type":"text","version":1},{"detail":0,"format":1,"mode":"normal","style":"font-size:
273 | 16px;","text":"Cloud?","type":"text","version":1},{"detail":0,"format":0,"mode":"normal","style":"font-size:
274 | 16px;","text":")","type":"text","version":1},{"type":"linebreak","version":1},{"detail":0,"format":0,"mode":"normal","style":"","text":"{","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"","text":" \"mcpServers\":
275 | {","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"","text":" \"everything\":
276 | {","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"","text":" \"url\":
277 | \"http://127.0.0.1:8080/sse\"","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"","text":" },","type":"text","version":1}],"direction":null,"format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"","text":" \"simple-arxiv\":
278 | {","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"","text":" \"url\":
279 | \"http://127.0.0.1:8008/sse\"","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"","text":" }","type":"text","version":1}],"direction":null,"format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"","text":" }","type":"text","version":1}],"direction":null,"format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"","text":"}","type":"text","version":1}],"direction":null,"format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""}],"direction":"ltr","format":"","indent":0,"type":"root","version":1}}'
280 | theme: pink
281 | title: ''
282 | type: ''
283 | width: 283
284 | height: 264
285 | id: '1741671860068'
286 | position:
287 | x: 1027.6182377664975
288 | y: 988.1065026922056
289 | positionAbsolute:
290 | x: 1027.6182377664975
291 | y: 988.1065026922056
292 | selected: false
293 | sourcePosition: right
294 | targetPosition: left
295 | type: custom-note
296 | width: 283
297 | - data:
298 | author: 3Dify-developer
299 | desc: ''
300 | height: 93
301 | selected: false
302 | showAuthor: true
303 | text: '{"root":{"children":[{"children":[{"detail":0,"format":0,"mode":"normal","style":"font-size:
304 | 14px;","text":"TOOL LIST should not blank. Choose some plugin.","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":"font-size:
305 | 14px;"},{"children":[{"detail":0,"format":0,"mode":"normal","style":"font-size:
306 | 14px;","text":"I recommend official Dify plugin like \"Current Time\".","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":"font-size:
307 | 14px;"}],"direction":"ltr","format":"","indent":0,"type":"root","version":1}}'
308 | theme: yellow
309 | title: ''
310 | type: ''
311 | width: 363
312 | height: 93
313 | id: '1741766414431'
314 | position:
315 | x: 289.33163665746144
316 | y: 178.12817056680163
317 | positionAbsolute:
318 | x: 289.33163665746144
319 | y: 178.12817056680163
320 | selected: false
321 | sourcePosition: right
322 | targetPosition: left
323 | type: custom-note
324 | width: 363
325 | - data:
326 | author: 3Dify-developer
327 | desc: ''
328 | height: 362
329 | selected: false
330 | showAuthor: true
331 | text: '{"root":{"children":[{"children":[{"detail":0,"format":1,"mode":"normal","style":"font-size:
332 | 16px;","text":"Example query","type":"text","version":1},{"type":"linebreak","version":1},{"detail":0,"format":0,"mode":"normal","style":"font-size:
333 | 16px;","text":"I''m developing MCP client as Dify Agent Strategy Plugin.","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":1,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"font-size:
334 | 16px;","text":"- Some features haven''t implemented and might not work as
335 | I expected.","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"font-size:
336 | 16px;","text":"- I found good MCP server for testing.","type":"text","version":1},{"type":"linebreak","version":1},{"detail":0,"format":0,"mode":"normal","style":"font-size:
337 | 16px;","text":"- You should ignore \"Current Time Tool\". ","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[],"direction":null,"format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"font-size:
338 | 16px;","text":"At first, tell me all tools you are available in ","type":"text","version":1},{"detail":0,"format":0,"mode":"normal","style":"font-size:
339 | 14px;","text":"\"thought\".","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"font-size:
340 | 16px;","text":"Try mcp tools one-by-one","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"font-size:
341 | 16px;","text":"- judge result in \"thought\" phase (I want to know at least
342 | Fail or Success)","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"font-size:
343 | 16px;","text":"- avoid \"sampleLLM\" which is not compatible feature yet.
344 | ","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""}],"direction":"ltr","format":"","indent":0,"type":"root","version":1}}'
345 | theme: blue
346 | title: ''
347 | type: ''
348 | width: 502
349 | height: 362
350 | id: '1741772186537'
351 | position:
352 | x: 673.1166803440168
353 | y: 375.05515998236154
354 | positionAbsolute:
355 | x: 673.1166803440168
356 | y: 375.05515998236154
357 | selected: false
358 | sourcePosition: right
359 | targetPosition: left
360 | type: custom-note
361 | width: 502
362 | - data:
363 | author: 3Dify-developer
364 | desc: ''
365 | height: 97
366 | selected: false
367 | showAuthor: true
368 | text: '{"root":{"children":[{"children":[{"detail":0,"format":1,"mode":"normal","style":"font-size:
369 | 16px;","text":"Instruction","type":"text","version":1},{"type":"linebreak","version":1},{"detail":0,"format":0,"mode":"normal","style":"","text":"You
370 | are helpful LLM Agent","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":1,"textStyle":""}],"direction":"ltr","format":"","indent":0,"type":"root","version":1}}'
371 | theme: violet
372 | title: ''
373 | type: ''
374 | width: 240
375 | height: 97
376 | id: '1741802957289'
377 | position:
378 | x: 87.25729350925093
379 | y: 375.05515998236154
380 | positionAbsolute:
381 | x: 87.25729350925093
382 | y: 375.05515998236154
383 | selected: false
384 | sourcePosition: right
385 | targetPosition: left
386 | type: custom-note
387 | width: 240
388 | - data:
389 | agent_parameters:
390 | config_json:
391 | type: constant
392 | value: "{\n \"mcpServers\": {\n \"everything\": {\n \"url\"\
393 | : \"http://host.docker.internal:8080/sse\"\n },\n \"simple-arxiv\"\
394 | : {\n \"url\": \"http://host.docker.internal:8008/sse\"\n \
395 | \ }\n }\n}"
396 | instruction:
397 | type: constant
398 | value: You are helpful LLM Agent
399 | maximum_iterations:
400 | type: constant
401 | value: 3
402 | model:
403 | type: constant
404 | value:
405 | completion_params: {}
406 | mode: chat
407 | model: gemini-2.0-flash-exp
408 | model_type: llm
409 | provider: langgenius/gemini/google
410 | type: model-selector
411 | query:
412 | type: constant
413 | value: '{{#sys.query#}}'
414 | tools:
415 | type: constant
416 | value:
417 | - enabled: true
418 | extra:
419 | description: ''
420 | parameters: {}
421 | provider_name: time
422 | schemas:
423 | - auto_generate: null
424 | default: '%Y-%m-%d %H:%M:%S'
425 | form: form
426 | human_description:
427 | en_US: Time format in strftime standard.
428 | ja_JP: Time format in strftime standard.
429 | pt_BR: Time format in strftime standard.
430 | zh_Hans: strftime 标准的时间格式。
431 | label:
432 | en_US: Format
433 | ja_JP: Format
434 | pt_BR: Format
435 | zh_Hans: 格式
436 | llm_description: null
437 | max: null
438 | min: null
439 | name: format
440 | options: []
441 | placeholder: null
442 | precision: null
443 | required: false
444 | scope: null
445 | template: null
446 | type: string
447 | - auto_generate: null
448 | default: UTC
449 | form: form
450 | human_description:
451 | en_US: Timezone
452 | ja_JP: Timezone
453 | pt_BR: Timezone
454 | zh_Hans: 时区
455 | label:
456 | en_US: Timezone
457 | ja_JP: Timezone
458 | pt_BR: Timezone
459 | zh_Hans: 时区
460 | llm_description: null
461 | max: null
462 | min: null
463 | name: timezone
464 | options:
465 | - label:
466 | en_US: UTC
467 | ja_JP: UTC
468 | pt_BR: UTC
469 | zh_Hans: UTC
470 | value: UTC
471 | - label:
472 | en_US: America/New_York
473 | ja_JP: America/New_York
474 | pt_BR: America/New_York
475 | zh_Hans: 美洲/纽约
476 | value: America/New_York
477 | - label:
478 | en_US: America/Los_Angeles
479 | ja_JP: America/Los_Angeles
480 | pt_BR: America/Los_Angeles
481 | zh_Hans: 美洲/洛杉矶
482 | value: America/Los_Angeles
483 | - label:
484 | en_US: America/Chicago
485 | ja_JP: America/Chicago
486 | pt_BR: America/Chicago
487 | zh_Hans: 美洲/芝加哥
488 | value: America/Chicago
489 | - label:
490 | en_US: America/Sao_Paulo
491 | ja_JP: America/Sao_Paulo
492 | pt_BR: América/São Paulo
493 | zh_Hans: 美洲/圣保罗
494 | value: America/Sao_Paulo
495 | - label:
496 | en_US: Asia/Shanghai
497 | ja_JP: Asia/Shanghai
498 | pt_BR: Asia/Shanghai
499 | zh_Hans: 亚洲/上海
500 | value: Asia/Shanghai
501 | - label:
502 | en_US: Asia/Ho_Chi_Minh
503 | ja_JP: Asia/Ho_Chi_Minh
504 | pt_BR: Ásia/Ho Chi Minh
505 | zh_Hans: 亚洲/胡志明市
506 | value: Asia/Ho_Chi_Minh
507 | - label:
508 | en_US: Asia/Tokyo
509 | ja_JP: Asia/Tokyo
510 | pt_BR: Asia/Tokyo
511 | zh_Hans: 亚洲/东京
512 | value: Asia/Tokyo
513 | - label:
514 | en_US: Asia/Dubai
515 | ja_JP: Asia/Dubai
516 | pt_BR: Asia/Dubai
517 | zh_Hans: 亚洲/迪拜
518 | value: Asia/Dubai
519 | - label:
520 | en_US: Asia/Kolkata
521 | ja_JP: Asia/Kolkata
522 | pt_BR: Asia/Kolkata
523 | zh_Hans: 亚洲/加尔各答
524 | value: Asia/Kolkata
525 | - label:
526 | en_US: Asia/Seoul
527 | ja_JP: Asia/Seoul
528 | pt_BR: Asia/Seoul
529 | zh_Hans: 亚洲/首尔
530 | value: Asia/Seoul
531 | - label:
532 | en_US: Asia/Singapore
533 | ja_JP: Asia/Singapore
534 | pt_BR: Asia/Singapore
535 | zh_Hans: 亚洲/新加坡
536 | value: Asia/Singapore
537 | - label:
538 | en_US: Europe/London
539 | ja_JP: Europe/London
540 | pt_BR: Europe/London
541 | zh_Hans: 欧洲/伦敦
542 | value: Europe/London
543 | - label:
544 | en_US: Europe/Berlin
545 | ja_JP: Europe/Berlin
546 | pt_BR: Europe/Berlin
547 | zh_Hans: 欧洲/柏林
548 | value: Europe/Berlin
549 | - label:
550 | en_US: Europe/Moscow
551 | ja_JP: Europe/Moscow
552 | pt_BR: Europe/Moscow
553 | zh_Hans: 欧洲/莫斯科
554 | value: Europe/Moscow
555 | - label:
556 | en_US: Australia/Sydney
557 | ja_JP: Australia/Sydney
558 | pt_BR: Australia/Sydney
559 | zh_Hans: 澳大利亚/悉尼
560 | value: Australia/Sydney
561 | - label:
562 | en_US: Pacific/Auckland
563 | ja_JP: Pacific/Auckland
564 | pt_BR: Pacific/Auckland
565 | zh_Hans: 太平洋/奥克兰
566 | value: Pacific/Auckland
567 | - label:
568 | en_US: Africa/Cairo
569 | ja_JP: Africa/Cairo
570 | pt_BR: Africa/Cairo
571 | zh_Hans: 非洲/开罗
572 | value: Africa/Cairo
573 | placeholder: null
574 | precision: null
575 | required: false
576 | scope: null
577 | template: null
578 | type: select
579 | settings:
580 | format:
581 | value: '%Y-%m-%d %H:%M:%S'
582 | timezone:
583 | value: UTC
584 | tool_label: Current Time
585 | tool_name: current_time
586 | type: builtin
587 | agent_strategy_label: mcpReAct
588 | agent_strategy_name: mcpReAct
589 | agent_strategy_provider_name: 3dify-project/mcp_client/agent
590 | desc: ''
591 | output_schema: null
592 | plugin_unique_identifier: 3dify-project/mcp_client:0.0.2@0a8f8d04a49549adc0fbe6118da99f846239aad74b5ede932e8bb40cec531491
593 | selected: false
594 | title: Agent
595 | type: agent
596 | height: 197
597 | id: '1742356283732'
598 | position:
599 | x: 361.24246556756697
600 | y: 282
601 | positionAbsolute:
602 | x: 361.24246556756697
603 | y: 282
604 | selected: true
605 | sourcePosition: right
606 | targetPosition: left
607 | type: custom
608 | width: 243
609 | - data:
610 | author: 3Dify-developer
611 | desc: ''
612 | height: 264
613 | selected: false
614 | showAuthor: false
615 | text: '{"root":{"children":[{"children":[{"detail":0,"format":0,"mode":"normal","style":"font-size:
616 | 16px;","text":"config.json example ","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"font-size:
617 | 16px;","text":"(","type":"text","version":1},{"detail":0,"format":1,"mode":"normal","style":"font-size:
618 | 16px;","text":"Docker Compose self deploy","type":"text","version":1},{"detail":0,"format":0,"mode":"normal","style":"font-size:
619 | 16px;","text":")","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"","text":"{","type":"text","version":1}],"direction":null,"format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"","text":" \"mcpServers\":
620 | {","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"","text":" \"everything\":
621 | {","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"","text":" \"url\":
622 | \"http://host.docker.internal:8080/sse\"","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"","text":" },","type":"text","version":1}],"direction":null,"format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"","text":" \"simple-arxiv\":
623 | {","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"","text":" \"url\":
624 | \"http://host.docker.internal:8008/sse\"","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"","text":" }","type":"text","version":1}],"direction":null,"format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"","text":" }","type":"text","version":1}],"direction":null,"format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"","text":"}","type":"text","version":1}],"direction":null,"format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""}],"direction":"ltr","format":"","indent":0,"type":"root","version":1}}'
625 | theme: pink
626 | title: ''
627 | type: ''
628 | width: 315
629 | height: 264
630 | id: '1742367595361'
631 | position:
632 | x: 690.5542795067578
633 | y: 988.1065026922056
634 | positionAbsolute:
635 | x: 690.5542795067578
636 | y: 988.1065026922056
637 | selected: false
638 | sourcePosition: right
639 | targetPosition: left
640 | type: custom-note
641 | width: 315
642 | - data:
643 | author: 3Dify-developer
644 | desc: ''
645 | height: 97
646 | selected: false
647 | showAuthor: false
648 | text: '{"root":{"children":[{"children":[{"detail":0,"format":1,"mode":"normal","style":"font-size:
649 | 16px;","text":"How to convert \"stdio\" to \"SSE\" MCP server","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":1,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"","text":"README
650 | of my GitHub repository","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"children":[{"detail":0,"format":0,"mode":"normal","style":"","text":"https://github.com/3dify-project/dify-mcp-client","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"link","version":1,"rel":"noreferrer","target":null,"title":null,"url":"https://github.com/3dify-project/dify-mcp-client"}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""}],"direction":"ltr","format":"","indent":0,"type":"root","version":1}}'
651 | theme: cyan
652 | title: ''
653 | type: ''
654 | width: 447
655 | height: 97
656 | id: '1742452980902'
657 | position:
658 | x: 690.5542795067578
659 | y: 863.8984826162116
660 | positionAbsolute:
661 | x: 690.5542795067578
662 | y: 863.8984826162116
663 | selected: false
664 | sourcePosition: right
665 | targetPosition: left
666 | type: custom-note
667 | width: 447
668 | viewport:
669 | x: 73.56256265549337
670 | y: -104.78783867265224
671 | zoom: 0.8487039665217165
672 |
--------------------------------------------------------------------------------
/test/chatflow/mcp_sse_chatflow_v0.0.2.yml:
--------------------------------------------------------------------------------
1 | app:
2 | description: 'Test for MCP client as Dify Agent Strategy plugin.
3 |
4 | used "Everything MCP server"
5 |
6 | Resource and Prompt are working correctly.
7 |
8 | Tool also Succussed, except for "samplingLLM", "annotatedMessage"'
9 | icon: 🤖
10 | icon_background: '#FFEAD5'
11 | mode: advanced-chat
12 | name: SSE MCP test
13 | use_icon_as_answer_icon: true
14 | dependencies: []
15 | kind: app
16 | version: 0.1.5
17 | workflow:
18 | conversation_variables: []
19 | environment_variables: []
20 | features:
21 | file_upload:
22 | allowed_file_extensions:
23 | - .JPG
24 | - .JPEG
25 | - .PNG
26 | - .GIF
27 | - .WEBP
28 | - .SVG
29 | allowed_file_types:
30 | - image
31 | allowed_file_upload_methods:
32 | - local_file
33 | - remote_url
34 | enabled: false
35 | fileUploadConfig:
36 | audio_file_size_limit: 50
37 | batch_count_limit: 5
38 | file_size_limit: 15
39 | image_file_size_limit: 10
40 | video_file_size_limit: 100
41 | workflow_file_upload_limit: 10
42 | image:
43 | enabled: false
44 | number_limits: 3
45 | transfer_methods:
46 | - local_file
47 | - remote_url
48 | number_limits: 3
49 | opening_statement: ''
50 | retriever_resource:
51 | enabled: true
52 | sensitive_word_avoidance:
53 | enabled: false
54 | speech_to_text:
55 | enabled: false
56 | suggested_questions: []
57 | suggested_questions_after_answer:
58 | enabled: false
59 | text_to_speech:
60 | enabled: false
61 | language: ''
62 | voice: ''
63 | graph:
64 | edges:
65 | - data:
66 | isInIteration: false
67 | sourceType: start
68 | targetType: agent
69 | id: 1741367649354-source-1742356283732-target
70 | source: '1741367649354'
71 | sourceHandle: source
72 | target: '1742356283732'
73 | targetHandle: target
74 | type: custom
75 | zIndex: 0
76 | - data:
77 | isInIteration: false
78 | sourceType: agent
79 | targetType: answer
80 | id: 1742356283732-source-answer-target
81 | source: '1742356283732'
82 | sourceHandle: source
83 | target: answer
84 | targetHandle: target
85 | type: custom
86 | zIndex: 0
87 | nodes:
88 | - data:
89 | desc: ''
90 | selected: false
91 | title: Start
92 | type: start
93 | variables: []
94 | height: 53
95 | id: '1741367649354'
96 | position:
97 | x: 87.25729350925093
98 | y: 282
99 | positionAbsolute:
100 | x: 87.25729350925093
101 | y: 282
102 | selected: false
103 | sourcePosition: right
104 | targetPosition: left
105 | type: custom
106 | width: 243
107 | - data:
108 | answer: '{{#1742356283732.text#}}'
109 | desc: ''
110 | selected: false
111 | title: Answer
112 | type: answer
113 | variables: []
114 | height: 101
115 | id: answer
116 | position:
117 | x: 629.2361265621643
118 | y: 282
119 | positionAbsolute:
120 | x: 629.2361265621643
121 | y: 282
122 | selected: false
123 | sourcePosition: right
124 | targetPosition: left
125 | type: custom
126 | width: 243
127 | - data:
128 | author: 3Dify-developer
129 | desc: ''
130 | height: 194
131 | selected: false
132 | showAuthor: true
133 | text: '{"root":{"children":[{"children":[{"detail":0,"format":1,"mode":"normal","style":"font-size:
134 | 16px;","text":"User prompt base","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":1,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"font-size:
135 | 16px;","text":"I''m developing MCP client as Dify Agent Strategy Plugin.","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"font-size:
136 | 16px;","text":"- Some features haven''t implemented and might not work as
137 | I expected.","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"font-size:
138 | 16px;","text":"- I found good MCP server for testing.","type":"text","version":1},{"type":"linebreak","version":1},{"detail":0,"format":0,"mode":"normal","style":"font-size:
139 | 16px;","text":"- You should ignore \"Current Time Tool\". ","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""}],"direction":"ltr","format":"","indent":0,"type":"root","version":1}}'
140 | theme: blue
141 | title: ''
142 | type: ''
143 | width: 569
144 | height: 194
145 | id: '1741624678131'
146 | position:
147 | x: 87.25729350925093
148 | y: 508.8200615915433
149 | positionAbsolute:
150 | x: 87.25729350925093
151 | y: 508.8200615915433
152 | selected: false
153 | sourcePosition: right
154 | targetPosition: left
155 | type: custom-note
156 | width: 569
157 | - data:
158 | author: 3Dify-developer
159 | desc: ''
160 | height: 88
161 | selected: false
162 | showAuthor: false
163 | text: '{"root":{"children":[{"children":[{"children":[{"detail":0,"format":1,"mode":"normal","style":"font-size:
164 | 16px;","text":"Sample MCP server for testing","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"link","version":1,"rel":"noreferrer","target":null,"title":null,"url":""}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":1,"textStyle":"font-size:
165 | 16px;"},{"children":[{"detail":0,"format":0,"mode":"normal","style":"","text":"https://github.com/modelcontextprotocol/servers/tree/main/src/everything","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""}],"direction":"ltr","format":"","indent":0,"type":"root","version":1}}'
166 | theme: cyan
167 | title: ''
168 | type: ''
169 | width: 449
170 | height: 88
171 | id: '1741626728667'
172 | position:
173 | x: 690.5542795067578
174 | y: 763.3257650174893
175 | positionAbsolute:
176 | x: 690.5542795067578
177 | y: 763.3257650174893
178 | selected: false
179 | sourcePosition: right
180 | targetPosition: left
181 | type: custom-note
182 | width: 449
183 | - data:
184 | author: 3Dify-developer
185 | desc: ''
186 | height: 151
187 | selected: false
188 | showAuthor: true
189 | text: '{"root":{"children":[{"children":[{"detail":0,"format":1,"mode":"normal","style":"font-size:
190 | 16px;","text":"MCP Tool Test prompt","type":"text","version":1},{"type":"linebreak","version":1},{"detail":0,"format":0,"mode":"normal","style":"font-size:
191 | 16px;","text":"Try mcp tools one-by-one","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":1,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"font-size:
192 | 16px;","text":"- judge result in \"thought\" phase (I want to know at least
193 | Fail or Success)","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"font-size:
194 | 16px;","text":"- avoid \"sampleLLM\" which is not compatible feature yet.
195 | ","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""}],"direction":"ltr","format":"","indent":0,"type":"root","version":1}}'
196 | theme: green
197 | title: ''
198 | type: ''
199 | width: 570
200 | height: 151
201 | id: '1741669149712'
202 | position:
203 | x: 87.25729350925093
204 | y: 718.2693074279789
205 | positionAbsolute:
206 | x: 87.25729350925093
207 | y: 718.2693074279789
208 | selected: false
209 | sourcePosition: right
210 | targetPosition: left
211 | type: custom-note
212 | width: 570
213 | - data:
214 | author: 3Dify-developer
215 | desc: ''
216 | height: 140
217 | selected: false
218 | showAuthor: true
219 | text: '{"root":{"children":[{"children":[{"detail":0,"format":1,"mode":"normal","style":"font-size:
220 | 16px;","text":"MCP Resource Test prompt","type":"text","version":1},{"type":"linebreak","version":1},{"detail":0,"format":0,"mode":"normal","style":"font-size:
221 | 16px;","text":"Try mcp resources one-by-one","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":1,"textStyle":"font-size:
222 | 16px;"},{"children":[{"detail":0,"format":0,"mode":"normal","style":"font-size:
223 | 16px;","text":"- judge result in \"thought\" phase (I want to know at least
224 | Fail or Success)","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[],"direction":null,"format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""}],"direction":"ltr","format":"","indent":0,"type":"root","version":1}}'
225 | theme: green
226 | title: ''
227 | type: ''
228 | width: 568
229 | height: 140
230 | id: '1741669231167'
231 | position:
232 | x: 88.65225939320432
233 | y: 879.6113451588606
234 | positionAbsolute:
235 | x: 88.65225939320432
236 | y: 879.6113451588606
237 | selected: false
238 | sourcePosition: right
239 | targetPosition: left
240 | type: custom-note
241 | width: 568
242 | - data:
243 | author: 3Dify-developer
244 | desc: ''
245 | height: 128
246 | selected: false
247 | showAuthor: true
248 | text: '{"root":{"children":[{"children":[{"detail":0,"format":1,"mode":"normal","style":"font-size:
249 | 16px;","text":"MCP Prompt Test prompt","type":"text","version":1},{"type":"linebreak","version":1},{"detail":0,"format":0,"mode":"normal","style":"font-size:
250 | 16px;","text":"Try mcp prompts one-by-one","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":1,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"font-size:
251 | 16px;","text":"- judge result in \"thought\" phase (I want to know at least
252 | Fail or Success)","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""}],"direction":"ltr","format":"","indent":0,"type":"root","version":1}}'
253 | theme: green
254 | title: ''
255 | type: ''
256 | width: 568
257 | height: 128
258 | id: '1741669352719'
259 | position:
260 | x: 87.25729350925093
261 | y: 1033.9889783833733
262 | positionAbsolute:
263 | x: 87.25729350925093
264 | y: 1033.9889783833733
265 | selected: false
266 | sourcePosition: right
267 | targetPosition: left
268 | type: custom-note
269 | width: 568
270 | - data:
271 | author: 3Dify-developer
272 | desc: ''
273 | height: 228
274 | selected: false
275 | showAuthor: true
276 | text: '{"root":{"children":[{"children":[{"detail":0,"format":0,"mode":"normal","style":"font-size:
277 | 16px;","text":"config.json example ","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":"font-size:
278 | 16px;"},{"children":[{"detail":0,"format":0,"mode":"normal","style":"font-size:
279 | 16px;","text":"(","type":"text","version":1},{"detail":0,"format":1,"mode":"normal","style":"font-size:
280 | 16px;","text":"Cloud?","type":"text","version":1},{"detail":0,"format":0,"mode":"normal","style":"font-size:
281 | 16px;","text":")","type":"text","version":1},{"type":"linebreak","version":1},{"detail":0,"format":0,"mode":"normal","style":"","text":"{","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":"font-size:
282 | 16px;"},{"children":[{"detail":0,"format":0,"mode":"normal","style":"","text":" \"mcpServers\":
283 | {","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"","text":" \"everything\":
284 | {","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"","text":" \"url\":
285 | \"http://127.0.0.1:8080/sse\"","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"","text":" }","type":"text","version":1}],"direction":null,"format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"","text":" }","type":"text","version":1}],"direction":null,"format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"","text":"}","type":"text","version":1}],"direction":null,"format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""}],"direction":"ltr","format":"","indent":0,"type":"root","version":1}}'
286 | theme: pink
287 | title: ''
288 | type: ''
289 | width: 243
290 | height: 228
291 | id: '1741671860068'
292 | position:
293 | x: 1060.1016025290248
294 | y: 969.1578732473977
295 | positionAbsolute:
296 | x: 1060.1016025290248
297 | y: 969.1578732473977
298 | selected: false
299 | sourcePosition: right
300 | targetPosition: left
301 | type: custom-note
302 | width: 243
303 | - data:
304 | author: 3Dify-developer
305 | desc: ''
306 | height: 93
307 | selected: false
308 | showAuthor: true
309 | text: '{"root":{"children":[{"children":[{"detail":0,"format":0,"mode":"normal","style":"font-size:
310 | 14px;","text":"TOOL LIST should not blank. Choose some plugin.","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":"font-size:
311 | 14px;"},{"children":[{"detail":0,"format":0,"mode":"normal","style":"font-size:
312 | 14px;","text":"I recommend official Dify plugin like \"Current Time\".","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":"font-size:
313 | 14px;"}],"direction":"ltr","format":"","indent":0,"type":"root","version":1}}'
314 | theme: yellow
315 | title: ''
316 | type: ''
317 | width: 363
318 | height: 93
319 | id: '1741766414431'
320 | position:
321 | x: 289.33163665746144
322 | y: 178.12817056680163
323 | positionAbsolute:
324 | x: 289.33163665746144
325 | y: 178.12817056680163
326 | selected: false
327 | sourcePosition: right
328 | targetPosition: left
329 | type: custom-note
330 | width: 363
331 | - data:
332 | author: 3Dify-developer
333 | desc: ''
334 | height: 362
335 | selected: false
336 | showAuthor: true
337 | text: '{"root":{"children":[{"children":[{"detail":0,"format":1,"mode":"normal","style":"font-size:
338 | 16px;","text":"Example query","type":"text","version":1},{"type":"linebreak","version":1},{"detail":0,"format":0,"mode":"normal","style":"font-size:
339 | 16px;","text":"I''m developing MCP client as Dify Agent Strategy Plugin.","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":1,"textStyle":"font-size:
340 | 16px;"},{"children":[{"detail":0,"format":0,"mode":"normal","style":"font-size:
341 | 16px;","text":"- Some features haven''t implemented and might not work as
342 | I expected.","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"font-size:
343 | 16px;","text":"- I found good MCP server for testing.","type":"text","version":1},{"type":"linebreak","version":1},{"detail":0,"format":0,"mode":"normal","style":"font-size:
344 | 16px;","text":"- You should ignore \"Current Time Tool\". ","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[],"direction":null,"format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"font-size:
345 | 16px;","text":"At first tell me what tools are available","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[],"direction":null,"format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"font-size:
346 | 16px;","text":"Try mcp tools one-by-one","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"font-size:
347 | 16px;","text":"- judge result in \"thought\" phase (I want to know at least
348 | Fail or Success)","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"font-size:
349 | 16px;","text":"- avoid \"sampleLLM\" which is not compatible feature yet.
350 | ","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""}],"direction":"ltr","format":"","indent":0,"type":"root","version":1}}'
351 | theme: blue
352 | title: ''
353 | type: ''
354 | width: 502
355 | height: 362
356 | id: '1741772186537'
357 | position:
358 | x: 690.5542795067578
359 | y: 397.44223574668086
360 | positionAbsolute:
361 | x: 690.5542795067578
362 | y: 397.44223574668086
363 | selected: false
364 | sourcePosition: right
365 | targetPosition: left
366 | type: custom-note
367 | width: 502
368 | - data:
369 | author: 3Dify-developer
370 | desc: ''
371 | height: 97
372 | selected: false
373 | showAuthor: true
374 | text: '{"root":{"children":[{"children":[{"detail":0,"format":1,"mode":"normal","style":"font-size:
375 | 16px;","text":"Instruction","type":"text","version":1},{"type":"linebreak","version":1},{"detail":0,"format":0,"mode":"normal","style":"","text":"You
376 | are helpful LLM Agent","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":1,"textStyle":""}],"direction":"ltr","format":"","indent":0,"type":"root","version":1}}'
377 | theme: violet
378 | title: ''
379 | type: ''
380 | width: 240
381 | height: 97
382 | id: '1741802957289'
383 | position:
384 | x: 87.25729350925093
385 | y: 375.05515998236154
386 | positionAbsolute:
387 | x: 87.25729350925093
388 | y: 375.05515998236154
389 | selected: true
390 | sourcePosition: right
391 | targetPosition: left
392 | type: custom-note
393 | width: 240
394 | - data:
395 | agent_parameters:
396 | config_json:
397 | type: constant
398 | value: "{\n \"mcpServers\": {\n \"everything\": {\n \"url\"\
399 | : \"http://host.docker.internal:8080/sse\"\n }\n }\n}"
400 | instruction:
401 | type: constant
402 | value: You are helpful LLM Agent
403 | maximum_iterations:
404 | type: constant
405 | value: 3
406 | model:
407 | type: constant
408 | value:
409 | completion_params: {}
410 | mode: chat
411 | model: gemini-2.0-flash-exp
412 | model_type: llm
413 | provider: langgenius/gemini/google
414 | type: model-selector
415 | query:
416 | type: constant
417 | value: '{{#sys.query#}}'
418 | tools:
419 | type: constant
420 | value:
421 | - enabled: true
422 | extra:
423 | description: ''
424 | parameters: {}
425 | provider_name: time
426 | schemas:
427 | - auto_generate: null
428 | default: '%Y-%m-%d %H:%M:%S'
429 | form: form
430 | human_description:
431 | en_US: Time format in strftime standard.
432 | ja_JP: Time format in strftime standard.
433 | pt_BR: Time format in strftime standard.
434 | zh_Hans: strftime 标准的时间格式。
435 | label:
436 | en_US: Format
437 | ja_JP: Format
438 | pt_BR: Format
439 | zh_Hans: 格式
440 | llm_description: null
441 | max: null
442 | min: null
443 | name: format
444 | options: []
445 | placeholder: null
446 | precision: null
447 | required: false
448 | scope: null
449 | template: null
450 | type: string
451 | - auto_generate: null
452 | default: UTC
453 | form: form
454 | human_description:
455 | en_US: Timezone
456 | ja_JP: Timezone
457 | pt_BR: Timezone
458 | zh_Hans: 时区
459 | label:
460 | en_US: Timezone
461 | ja_JP: Timezone
462 | pt_BR: Timezone
463 | zh_Hans: 时区
464 | llm_description: null
465 | max: null
466 | min: null
467 | name: timezone
468 | options:
469 | - label:
470 | en_US: UTC
471 | ja_JP: UTC
472 | pt_BR: UTC
473 | zh_Hans: UTC
474 | value: UTC
475 | - label:
476 | en_US: America/New_York
477 | ja_JP: America/New_York
478 | pt_BR: America/New_York
479 | zh_Hans: 美洲/纽约
480 | value: America/New_York
481 | - label:
482 | en_US: America/Los_Angeles
483 | ja_JP: America/Los_Angeles
484 | pt_BR: America/Los_Angeles
485 | zh_Hans: 美洲/洛杉矶
486 | value: America/Los_Angeles
487 | - label:
488 | en_US: America/Chicago
489 | ja_JP: America/Chicago
490 | pt_BR: America/Chicago
491 | zh_Hans: 美洲/芝加哥
492 | value: America/Chicago
493 | - label:
494 | en_US: America/Sao_Paulo
495 | ja_JP: America/Sao_Paulo
496 | pt_BR: América/São Paulo
497 | zh_Hans: 美洲/圣保罗
498 | value: America/Sao_Paulo
499 | - label:
500 | en_US: Asia/Shanghai
501 | ja_JP: Asia/Shanghai
502 | pt_BR: Asia/Shanghai
503 | zh_Hans: 亚洲/上海
504 | value: Asia/Shanghai
505 | - label:
506 | en_US: Asia/Ho_Chi_Minh
507 | ja_JP: Asia/Ho_Chi_Minh
508 | pt_BR: Ásia/Ho Chi Minh
509 | zh_Hans: 亚洲/胡志明市
510 | value: Asia/Ho_Chi_Minh
511 | - label:
512 | en_US: Asia/Tokyo
513 | ja_JP: Asia/Tokyo
514 | pt_BR: Asia/Tokyo
515 | zh_Hans: 亚洲/东京
516 | value: Asia/Tokyo
517 | - label:
518 | en_US: Asia/Dubai
519 | ja_JP: Asia/Dubai
520 | pt_BR: Asia/Dubai
521 | zh_Hans: 亚洲/迪拜
522 | value: Asia/Dubai
523 | - label:
524 | en_US: Asia/Kolkata
525 | ja_JP: Asia/Kolkata
526 | pt_BR: Asia/Kolkata
527 | zh_Hans: 亚洲/加尔各答
528 | value: Asia/Kolkata
529 | - label:
530 | en_US: Asia/Seoul
531 | ja_JP: Asia/Seoul
532 | pt_BR: Asia/Seoul
533 | zh_Hans: 亚洲/首尔
534 | value: Asia/Seoul
535 | - label:
536 | en_US: Asia/Singapore
537 | ja_JP: Asia/Singapore
538 | pt_BR: Asia/Singapore
539 | zh_Hans: 亚洲/新加坡
540 | value: Asia/Singapore
541 | - label:
542 | en_US: Europe/London
543 | ja_JP: Europe/London
544 | pt_BR: Europe/London
545 | zh_Hans: 欧洲/伦敦
546 | value: Europe/London
547 | - label:
548 | en_US: Europe/Berlin
549 | ja_JP: Europe/Berlin
550 | pt_BR: Europe/Berlin
551 | zh_Hans: 欧洲/柏林
552 | value: Europe/Berlin
553 | - label:
554 | en_US: Europe/Moscow
555 | ja_JP: Europe/Moscow
556 | pt_BR: Europe/Moscow
557 | zh_Hans: 欧洲/莫斯科
558 | value: Europe/Moscow
559 | - label:
560 | en_US: Australia/Sydney
561 | ja_JP: Australia/Sydney
562 | pt_BR: Australia/Sydney
563 | zh_Hans: 澳大利亚/悉尼
564 | value: Australia/Sydney
565 | - label:
566 | en_US: Pacific/Auckland
567 | ja_JP: Pacific/Auckland
568 | pt_BR: Pacific/Auckland
569 | zh_Hans: 太平洋/奥克兰
570 | value: Pacific/Auckland
571 | - label:
572 | en_US: Africa/Cairo
573 | ja_JP: Africa/Cairo
574 | pt_BR: Africa/Cairo
575 | zh_Hans: 非洲/开罗
576 | value: Africa/Cairo
577 | placeholder: null
578 | precision: null
579 | required: false
580 | scope: null
581 | template: null
582 | type: select
583 | settings:
584 | format:
585 | value: '%Y-%m-%d %H:%M:%S'
586 | timezone:
587 | value: UTC
588 | tool_label: Current Time
589 | tool_name: current_time
590 | type: builtin
591 | agent_strategy_label: mcpReAct
592 | agent_strategy_name: mcpReAct
593 | agent_strategy_provider_name: 3dify-project/mcp_client/agent
594 | desc: ''
595 | output_schema: null
596 | plugin_unique_identifier: 3dify-project/mcp_client:0.0.2@0a8f8d04a49549adc0fbe6118da99f846239aad74b5ede932e8bb40cec531491
597 | selected: false
598 | title: Agent
599 | type: agent
600 | height: 197
601 | id: '1742356283732'
602 | position:
603 | x: 361.24246556756697
604 | y: 282
605 | positionAbsolute:
606 | x: 361.24246556756697
607 | y: 282
608 | selected: false
609 | sourcePosition: right
610 | targetPosition: left
611 | type: custom
612 | width: 243
613 | - data:
614 | author: 3Dify-developer
615 | desc: ''
616 | height: 231
617 | selected: false
618 | showAuthor: true
619 | text: '{"root":{"children":[{"children":[{"detail":0,"format":0,"mode":"normal","style":"font-size:
620 | 16px;","text":"config.json example ","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":"font-size:
621 | 16px;"},{"children":[{"detail":0,"format":0,"mode":"normal","style":"font-size:
622 | 16px;","text":"(","type":"text","version":1},{"detail":0,"format":1,"mode":"normal","style":"font-size:
623 | 16px;","text":"Docker Compose self deploy","type":"text","version":1},{"detail":0,"format":0,"mode":"normal","style":"font-size:
624 | 16px;","text":")","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":"font-size:
625 | 16px;"},{"children":[{"detail":0,"format":0,"mode":"normal","style":"","text":"{","type":"text","version":1}],"direction":null,"format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"","text":" \"mcpServers\":
626 | {","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"","text":" \"everything\":
627 | {","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"","text":" \"url\":
628 | \"http://host.docker.internal:8080/sse\"","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"","text":" }","type":"text","version":1}],"direction":null,"format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"","text":" }","type":"text","version":1}],"direction":null,"format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"","text":"}","type":"text","version":1}],"direction":null,"format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""}],"direction":"ltr","format":"","indent":0,"type":"root","version":1}}'
629 | theme: pink
630 | title: ''
631 | type: ''
632 | width: 357
633 | height: 231
634 | id: '1742367595361'
635 | position:
636 | x: 690.5542795067578
637 | y: 969.1578732473977
638 | positionAbsolute:
639 | x: 690.5542795067578
640 | y: 969.1578732473977
641 | selected: false
642 | sourcePosition: right
643 | targetPosition: left
644 | type: custom-note
645 | width: 357
646 | - data:
647 | author: 3Dify-developer
648 | desc: ''
649 | height: 97
650 | selected: false
651 | showAuthor: false
652 | text: '{"root":{"children":[{"children":[{"detail":0,"format":1,"mode":"normal","style":"font-size:
653 | 16px;","text":"How to convert \"stdio\" to \"SSE\" MCP server","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":1,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"","text":"README
654 | of my GitHub repository","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"children":[{"detail":0,"format":0,"mode":"normal","style":"","text":"https://github.com/3dify-project/dify-mcp-client","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"link","version":1,"rel":"noreferrer","target":null,"title":null,"url":"https://github.com/3dify-project/dify-mcp-client"}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""}],"direction":"ltr","format":"","indent":0,"type":"root","version":1}}'
655 | theme: cyan
656 | title: ''
657 | type: ''
658 | width: 447
659 | height: 97
660 | id: '1742452980902'
661 | position:
662 | x: 690.5542795067578
663 | y: 863.8984826162116
664 | positionAbsolute:
665 | x: 690.5542795067578
666 | y: 863.8984826162116
667 | selected: false
668 | sourcePosition: right
669 | targetPosition: left
670 | type: custom-note
671 | width: 447
672 | viewport:
673 | x: -33.95272842445752
674 | y: -104.19406886847042
675 | zoom: 0.8487039665217158
676 |
--------------------------------------------------------------------------------
/test/chatflow/mcp_stdio_chatflow_v0.0.1.yml:
--------------------------------------------------------------------------------
1 | app:
2 | description: 'Test for MCP client as Dify Agent Strategy plugin.
3 |
4 | used "Everything MCP server"
5 |
6 | Resource and Prompt are working correctly.
7 |
8 | Tool also Succussed, except for "samplingLLM", "annotatedMessage"'
9 | icon: 🤖
10 | icon_background: '#FFEAD5'
11 | mode: advanced-chat
12 | name: MCP test
13 | use_icon_as_answer_icon: true
14 | dependencies: []
15 | kind: app
16 | version: 0.1.5
17 | workflow:
18 | conversation_variables: []
19 | environment_variables: []
20 | features:
21 | file_upload:
22 | allowed_file_extensions:
23 | - .JPG
24 | - .JPEG
25 | - .PNG
26 | - .GIF
27 | - .WEBP
28 | - .SVG
29 | allowed_file_types:
30 | - image
31 | allowed_file_upload_methods:
32 | - local_file
33 | - remote_url
34 | enabled: false
35 | fileUploadConfig:
36 | audio_file_size_limit: 50
37 | batch_count_limit: 5
38 | file_size_limit: 15
39 | image_file_size_limit: 10
40 | video_file_size_limit: 100
41 | workflow_file_upload_limit: 10
42 | image:
43 | enabled: false
44 | number_limits: 3
45 | transfer_methods:
46 | - local_file
47 | - remote_url
48 | number_limits: 3
49 | opening_statement: ''
50 | retriever_resource:
51 | enabled: true
52 | sensitive_word_avoidance:
53 | enabled: false
54 | speech_to_text:
55 | enabled: false
56 | suggested_questions: []
57 | suggested_questions_after_answer:
58 | enabled: false
59 | text_to_speech:
60 | enabled: false
61 | language: ''
62 | voice: ''
63 | graph:
64 | edges:
65 | - data:
66 | isInIteration: false
67 | sourceType: start
68 | targetType: agent
69 | id: 1741367649354-source-1741765826830-target
70 | source: '1741367649354'
71 | sourceHandle: source
72 | target: '1741765826830'
73 | targetHandle: target
74 | type: custom
75 | zIndex: 0
76 | - data:
77 | isInIteration: false
78 | sourceType: agent
79 | targetType: answer
80 | id: 1741765826830-source-answer-target
81 | source: '1741765826830'
82 | sourceHandle: source
83 | target: answer
84 | targetHandle: target
85 | type: custom
86 | zIndex: 0
87 | nodes:
88 | - data:
89 | desc: ''
90 | selected: false
91 | title: Start
92 | type: start
93 | variables: []
94 | height: 53
95 | id: '1741367649354'
96 | position:
97 | x: 87.25729350925093
98 | y: 282
99 | positionAbsolute:
100 | x: 87.25729350925093
101 | y: 282
102 | selected: false
103 | sourcePosition: right
104 | targetPosition: left
105 | type: custom
106 | width: 243
107 | - data:
108 | answer: '{{#1741367691327.text#}}'
109 | desc: ''
110 | selected: false
111 | title: Answer
112 | type: answer
113 | variables: []
114 | height: 101
115 | id: answer
116 | position:
117 | x: 627.4767826811337
118 | y: 282
119 | positionAbsolute:
120 | x: 627.4767826811337
121 | y: 282
122 | selected: false
123 | sourcePosition: right
124 | targetPosition: left
125 | type: custom
126 | width: 243
127 | - data:
128 | author: 3Dify-developer
129 | desc: ''
130 | height: 351
131 | selected: false
132 | showAuthor: true
133 | text: '{"root":{"children":[{"children":[{"detail":0,"format":1,"mode":"normal","style":"font-size:
134 | 16px;","text":"Mac ","type":"text","version":1},{"detail":0,"format":0,"mode":"normal","style":"font-size:
135 | 16px;","text":"config.json example","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":1,"textStyle":"font-size:
136 | 16px;"},{"children":[{"detail":0,"format":0,"mode":"normal","style":"","text":"{","type":"text","version":1}],"direction":null,"format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"","text":" \"mcpServers\":
137 | {","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"","text":" \"everything\":
138 | {","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"","text":" \"command\":
139 | \"npx\",","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"","text":" \"args\":
140 | [","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"","text":" \"-y\",","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"","text":" \"@modelcontextprotocol/server-everything\"","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"","text":" ]","type":"text","version":1}],"direction":null,"format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"","text":" }","type":"text","version":1}],"direction":null,"format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"","text":" }","type":"text","version":1}],"direction":null,"format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"","text":"}","type":"text","version":1}],"direction":null,"format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""}],"direction":"ltr","format":"","indent":0,"type":"root","version":1}}'
141 | theme: pink
142 | title: ''
143 | type: ''
144 | width: 289
145 | height: 351
146 | id: '1741579814015'
147 | position:
148 | x: 973.6650449303346
149 | y: 610.9646610182883
150 | positionAbsolute:
151 | x: 973.6650449303346
152 | y: 610.9646610182883
153 | selected: false
154 | sourcePosition: right
155 | targetPosition: left
156 | type: custom-note
157 | width: 289
158 | - data:
159 | author: 3Dify-developer
160 | desc: ''
161 | height: 194
162 | selected: false
163 | showAuthor: true
164 | text: '{"root":{"children":[{"children":[{"detail":0,"format":1,"mode":"normal","style":"font-size:
165 | 16px;","text":"User prompt base","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":1,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"font-size:
166 | 16px;","text":"I''m developing MCP client as Dify Agent Strategy Plugin.","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"font-size:
167 | 16px;","text":"- Some features haven''t implemented and might not work as
168 | I expected.","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"font-size:
169 | 16px;","text":"- I found good MCP server for testing.","type":"text","version":1},{"type":"linebreak","version":1},{"detail":0,"format":0,"mode":"normal","style":"font-size:
170 | 16px;","text":"- You should ignore \"Current Time Tool\". ","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":"font-size:
171 | 16px;"},{"children":[],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[],"direction":null,"format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""}],"direction":"ltr","format":"","indent":0,"type":"root","version":1}}'
172 | theme: blue
173 | title: ''
174 | type: ''
175 | width: 569
176 | height: 194
177 | id: '1741624678131'
178 | position:
179 | x: 87.25729350925093
180 | y: 508.8200615915433
181 | positionAbsolute:
182 | x: 87.25729350925093
183 | y: 508.8200615915433
184 | selected: false
185 | sourcePosition: right
186 | targetPosition: left
187 | type: custom-note
188 | width: 569
189 | - data:
190 | author: 3Dify-developer
191 | desc: ''
192 | height: 88
193 | selected: false
194 | showAuthor: true
195 | text: '{"root":{"children":[{"children":[{"detail":0,"format":1,"mode":"normal","style":"font-size:
196 | 16px;","text":"Sample MCP server for testing","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":1,"textStyle":"font-size:
197 | 16px;"},{"children":[{"detail":0,"format":0,"mode":"normal","style":"","text":"https://github.com/modelcontextprotocol/servers/tree/main/src/everything","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""}],"direction":"ltr","format":"","indent":0,"type":"root","version":1}}'
198 | theme: cyan
199 | title: ''
200 | type: ''
201 | width: 449
202 | height: 88
203 | id: '1741626728667'
204 | position:
205 | x: 737.2683807703559
206 | y: 508.8200615915433
207 | positionAbsolute:
208 | x: 737.2683807703559
209 | y: 508.8200615915433
210 | selected: false
211 | sourcePosition: right
212 | targetPosition: left
213 | type: custom-note
214 | width: 449
215 | - data:
216 | author: 3Dify-developer
217 | desc: ''
218 | height: 151
219 | selected: false
220 | showAuthor: true
221 | text: '{"root":{"children":[{"children":[{"detail":0,"format":1,"mode":"normal","style":"font-size:
222 | 16px;","text":"MCP Tool Test prompt","type":"text","version":1},{"type":"linebreak","version":1},{"detail":0,"format":0,"mode":"normal","style":"font-size:
223 | 16px;","text":"Try mcp tools one-by-one","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":1,"textStyle":"font-size:
224 | 16px;"},{"children":[{"detail":0,"format":0,"mode":"normal","style":"font-size:
225 | 16px;","text":"- judge result in \"thought\" phase (I want to know at least
226 | Fail or Success)","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"font-size:
227 | 16px;","text":"- avoid \"sampleLLM\" which is not compatible feature yet.
228 | ","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""}],"direction":"ltr","format":"","indent":0,"type":"root","version":1}}'
229 | theme: violet
230 | title: ''
231 | type: ''
232 | width: 570
233 | height: 151
234 | id: '1741669149712'
235 | position:
236 | x: 87.25729350925093
237 | y: 718.2693074279789
238 | positionAbsolute:
239 | x: 87.25729350925093
240 | y: 718.2693074279789
241 | selected: false
242 | sourcePosition: right
243 | targetPosition: left
244 | type: custom-note
245 | width: 570
246 | - data:
247 | author: 3Dify-developer
248 | desc: ''
249 | height: 140
250 | selected: false
251 | showAuthor: true
252 | text: '{"root":{"children":[{"children":[{"detail":0,"format":1,"mode":"normal","style":"font-size:
253 | 16px;","text":"MCP Resource Test prompt","type":"text","version":1},{"type":"linebreak","version":1},{"detail":0,"format":0,"mode":"normal","style":"font-size:
254 | 16px;","text":"Try mcp resources one-by-one","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":1,"textStyle":"font-size:
255 | 16px;"},{"children":[{"detail":0,"format":0,"mode":"normal","style":"font-size:
256 | 16px;","text":"- judge result in \"thought\" phase (I want to know at least
257 | Fail or Success)","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[],"direction":null,"format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""}],"direction":"ltr","format":"","indent":0,"type":"root","version":1}}'
258 | theme: yellow
259 | title: ''
260 | type: ''
261 | width: 568
262 | height: 140
263 | id: '1741669231167'
264 | position:
265 | x: 88.65225939320432
266 | y: 879.6113451588606
267 | positionAbsolute:
268 | x: 88.65225939320432
269 | y: 879.6113451588606
270 | selected: false
271 | sourcePosition: right
272 | targetPosition: left
273 | type: custom-note
274 | width: 568
275 | - data:
276 | author: 3Dify-developer
277 | desc: ''
278 | height: 128
279 | selected: false
280 | showAuthor: true
281 | text: '{"root":{"children":[{"children":[{"detail":0,"format":1,"mode":"normal","style":"font-size:
282 | 16px;","text":"MCP Prompt Test prompt","type":"text","version":1},{"type":"linebreak","version":1},{"detail":0,"format":0,"mode":"normal","style":"font-size:
283 | 16px;","text":"Try mcp prompts one-by-one","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":1,"textStyle":"font-size:
284 | 16px;"},{"children":[{"detail":0,"format":0,"mode":"normal","style":"font-size:
285 | 16px;","text":"- judge result in \"thought\" phase (I want to know at least
286 | Fail or Success)","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""}],"direction":"ltr","format":"","indent":0,"type":"root","version":1}}'
287 | theme: green
288 | title: ''
289 | type: ''
290 | width: 568
291 | height: 128
292 | id: '1741669352719'
293 | position:
294 | x: 87.25729350925093
295 | y: 1033.9889783833733
296 | positionAbsolute:
297 | x: 87.25729350925093
298 | y: 1033.9889783833733
299 | selected: false
300 | sourcePosition: right
301 | targetPosition: left
302 | type: custom-note
303 | width: 568
304 | - data:
305 | author: 3Dify-developer
306 | desc: ''
307 | height: 354
308 | selected: false
309 | showAuthor: true
310 | text: '{"root":{"children":[{"children":[{"detail":0,"format":1,"mode":"normal","style":"font-size:
311 | 16px;","text":"Windows ","type":"text","version":1},{"detail":0,"format":0,"mode":"normal","style":"font-size:
312 | 16px;","text":"config.json example","type":"text","version":1},{"type":"linebreak","version":1},{"detail":0,"format":0,"mode":"normal","style":"","text":"{","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":1,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"","text":" \"mcpServers\":
313 | {","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"","text":" \"everything\":
314 | {","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"","text":" \"command\":
315 | \"npx.cmd\",","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"","text":" \"args\":
316 | [","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"","text":" \"-y\",","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"","text":" \"@modelcontextprotocol/server-everything\"","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"","text":" ]","type":"text","version":1}],"direction":null,"format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"","text":" }","type":"text","version":1}],"direction":null,"format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"","text":" }","type":"text","version":1}],"direction":null,"format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"","text":"}","type":"text","version":1}],"direction":null,"format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""},{"children":[{"detail":0,"format":1,"mode":"normal","style":"font-size:
317 | 16px;","text":"warnning","type":"text","version":1},{"detail":0,"format":0,"mode":"normal","style":"font-size:
318 | 16px;","text":": use ","type":"text","version":1},{"detail":0,"format":1,"mode":"normal","style":"font-size:
319 | 16px;","text":"npx.cmd","type":"text","version":1},{"detail":0,"format":0,"mode":"normal","style":"font-size:
320 | 16px;","text":" ","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":1,"textStyle":""},{"children":[{"detail":0,"format":0,"mode":"normal","style":"font-size:
321 | 16px;","text":" instead of npx","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":""}],"direction":"ltr","format":"","indent":0,"type":"root","version":1}}'
322 | theme: pink
323 | title: ''
324 | type: ''
325 | width: 288
326 | height: 354
327 | id: '1741671860068'
328 | position:
329 | x: 675.9865130991246
330 | y: 610.9646610182883
331 | positionAbsolute:
332 | x: 675.9865130991246
333 | y: 610.9646610182883
334 | selected: false
335 | sourcePosition: right
336 | targetPosition: left
337 | type: custom-note
338 | width: 288
339 | - data:
340 | agent_parameters:
341 | config_json:
342 | type: constant
343 | value: "{\n \"mcpServers\": {\n \"everything\": {\n \"command\"\
344 | : \"npx.cmd\",\n \"args\": [\n \"-y\",\n \"@modelcontextprotocol/server-everything\"\
345 | \n ]\n }\n }\n}"
346 | instruction:
347 | type: constant
348 | value: You are helpful LLM agent.
349 | maximum_iterations:
350 | type: constant
351 | value: 3
352 | model:
353 | type: constant
354 | value:
355 | completion_params: {}
356 | mode: chat
357 | model: gemini-2.0-flash-exp
358 | model_type: llm
359 | provider: langgenius/gemini/google
360 | type: model-selector
361 | query:
362 | type: constant
363 | value: '{{#sys.query#}}'
364 | tools:
365 | type: constant
366 | value: []
367 | agent_strategy_label: mcpReAct
368 | agent_strategy_name: mcpReAct
369 | agent_strategy_provider_name: 3dify-project/mcp_client/agent
370 | desc: ''
371 | output_schema: null
372 | plugin_unique_identifier: 3dify-project/mcp_client:0.0.1@eac12d97acd3c57819aae9ee561591b60708e1cf09bdaa3ba38fce60221ad50d
373 | selected: false
374 | title: Agent
375 | type: agent
376 | height: 145
377 | id: '1741765826830'
378 | position:
379 | x: 347.36834087720297
380 | y: 282
381 | positionAbsolute:
382 | x: 347.36834087720297
383 | y: 282
384 | selected: false
385 | sourcePosition: right
386 | targetPosition: left
387 | type: custom
388 | width: 243
389 | - data:
390 | author: 3Dify-developer
391 | desc: ''
392 | height: 93
393 | selected: false
394 | showAuthor: true
395 | text: '{"root":{"children":[{"children":[{"detail":0,"format":0,"mode":"normal","style":"font-size:
396 | 14px;","text":"TOOL LIST should not blank. Choose some plugin.","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":"font-size:
397 | 14px;"},{"children":[{"detail":0,"format":0,"mode":"normal","style":"font-size:
398 | 14px;","text":"I recommend official Dify plugin like \"Current Time\".","type":"text","version":1}],"direction":"ltr","format":"","indent":0,"type":"paragraph","version":1,"textFormat":0,"textStyle":"font-size:
399 | 14px;"}],"direction":"ltr","format":"","indent":0,"type":"root","version":1}}'
400 | theme: yellow
401 | title: ''
402 | type: ''
403 | width: 363
404 | height: 93
405 | id: '1741766414431'
406 | position:
407 | x: 289.33163665746144
408 | y: 178.12817056680163
409 | positionAbsolute:
410 | x: 289.33163665746144
411 | y: 178.12817056680163
412 | selected: true
413 | sourcePosition: right
414 | targetPosition: left
415 | type: custom-note
416 | width: 363
417 | viewport:
418 | x: 184.66257640044716
419 | y: -96.37762447627853
420 | zoom: 0.8615237662477786
421 |
--------------------------------------------------------------------------------
/test/result/everything_mcp_server_prompt_log.txt:
--------------------------------------------------------------------------------
1 | ROUND 1
2 | {
3 | "action_input": {},
4 | "action_name": "mcp_prompt_simple_prompt",
5 | "observation": "user: This is a simple prompt without arguments.",
6 | "thought": "Okay, I understand. I will try the MCP prompts one by one and judge the result in the \"thought\" phase, indicating either \"Success\" or \"Fail\" based on whether the tool call was successful and returned a value. I will ignore the \"Current Time Tool.\"\n\nFirst, I will try the `mcp_prompt_simple_prompt`."
7 | }
8 |
9 | ROUND 2
10 | {
11 | "action_input": {
12 | "style": "verbose",
13 | "temperature": "0.7"
14 | },
15 | "action_name": "mcp_prompt_complex_prompt",
16 | "observation": "user: This is a complex prompt with arguments: temperature=0.7, style=verbose\nassistant: I understand. You've provided a complex prompt with temperature and style arguments. How would you like me to proceed?\nuser: type='image' data='iVBORw0KGgoAAAANSUhEUgAAABQAAAAUCAYAAACNiR0NAAAKsGlDQ1BJQ0MgUHJvZmlsZQAASImVlwdUU+kSgOfe9JDQEiIgJfQmSCeAlBBaAAXpYCMkAUKJMRBU7MriClZURLCs6KqIgo0idizYFsWC3QVZBNR1sWDDlXeBQ9jdd9575805c+a7c+efmf+e/z9nLgCdKZDJMlF1gCxpjjwyyI8dn5DIJvUABRiY0kBdIMyWcSMiwgCTUft3+dgGyJC9YzuU69/f/1fREImzhQBIBMbJomxhFsbHMe0TyuQ5ALg9mN9kbo5siK9gzJRjDWL8ZIhTR7hviJOHGY8fjomO5GGsDUCmCQTyVACaKeZn5wpTsTw0f4ztpSKJFGPsGbyzsmaLMMbqgiUWI8N4KD8n+S95Uv+WM1mZUyBIVfLIXoaF7C/JlmUK5v+fn+N/S1amYrSGOaa0NHlwJGaxvpAHGbNDlSxNnhI+yhLRcPwwpymCY0ZZmM1LHGWRwD9UuTZzStgop0gC+co8OfzoURZnB0SNsnx2pLJWipzHHWWBfKyuIiNG6U8T85X589Ki40Y5VxI7ZZSzM6JCx2J4Sr9cEansXywN8hurG6jce1b2X/Yr4SvX5qRFByv3LhjrXyzljuXMjlf2JhL7B4zFxCjjZTl+ylqyzAhlvDgzSOnPzo1Srs3BDuTY2gjlN0wXhESMMoRBELAhBjIhB+QggECQgBTEOeJ5Q2cUeLNl8+WS1LQcNhe7ZWI2Xyq0m8B2tHd0Bhi6syNH4j1r+C4irGtjvhWVAF4nBgcHT475Qm4BHEkCoNaO+SxnAKh3A1w5JVTIc0d8Q9cJCEAFNWCCDhiACViCLTiCK3iCLwRACIRDNCTATBBCGmRhnc+FhbAMCqAI1sNmKIOdsBv2wyE4CvVwCs7DZbgOt+AePIZ26IJX0AcfYQBBEBJCRxiIDmKImCE2iCPCQbyRACQMiUQSkCQkFZEiCmQhsgIpQoqRMmQXUokcQU4g55GrSCvyEOlAepF3yFcUh9JQJqqPmqMTUQ7KRUPRaHQGmorOQfPQfHQtWopWoAfROvQ8eh29h7ajr9B+HOBUcCycEc4Wx8HxcOG4RFwKTo5bjCvEleAqcNW4Rlwz7g6uHfca9wVPxDPwbLwt3hMfjI/BC/Fz8Ivxq/Fl+P34OvxF/B18B74P/51AJ+gRbAgeBD4hnpBKmEsoIJQQ9hJqCZcI9whdhI9EIpFFtCC6EYOJCcR04gLiauJ2Yg3xHLGV2EnsJ5FIOiQbkhcpnCQg5ZAKSFtJB0lnSbdJXaTPZBWyIdmRHEhOJEvJy8kl5APkM+Tb5G7yAEWdYkbxoIRTRJT5lHWUPZRGyk1KF2WAqkG1oHpRo6np1GXUUmo19RL1CfW9ioqKsYq7ylQVicpSlVKVwypXVDpUvtA0adY0Hm06TUFbS9tHO0d7SHtPp9PN6b70RHoOfS29kn6B/oz+WZWhaqfKVxWpLlEtV61Tva36Ro2iZqbGVZuplqdWonZM7abaa3WKurk6T12gvli9XP2E+n31fg2GhoNGuEaWxmqNAxpXNXo0SZrmmgGaIs18zd2aFzQ7GTiGCYPHEDJWMPYwLjG6mESmBZPPTGcWMQ8xW5h9WppazlqxWvO0yrVOa7WzcCxzFp+VyVrHOspqY30dpz+OO048btW46nG3x33SHq/tqy3WLtSu0b6n/VWHrROgk6GzQade56kuXtdad6ruXN0dupd0X49njvccLxxfOP7o+Ed6qJ61XqTeAr3dejf0+vUN9IP0Zfpb9S/ovzZgGfgapBtsMjhj0GvIMPQ2lBhuMjxr+JKtxeayM9ml7IvsPiM9o2AjhdEuoxajAWML4xjj5cY1xk9NqCYckxSTTSZNJn2mhqaTTReaVpk+MqOYcczSzLaYNZt9MrcwjzNfaV5v3mOhbcG3yLOosnhiSbf0sZxjWWF514poxbHKsNpudcsatXaxTrMut75pg9q42khsttu0TiBMcJ8gnVAx4b4tzZZrm2tbZdthx7ILs1tuV2/3ZqLpxMSJGyY2T/xu72Kfab/H/rGDpkOIw3KHRod3jtaOQsdyx7tOdKdApyVODU5vnW2cxc47nB+4MFwmu6x0aXL509XNVe5a7drrZuqW5LbN7T6HyYngrOZccSe4+7kvcT/l/sXD1SPH46jHH562nhmeBzx7JllMEk/aM6nTy9hL4LXLq92b7Z3k/ZN3u4+Rj8Cnwue5r4mvyHevbzfXipvOPch942fvJ/er9fvE8+At4p3zx/kH+Rf6twRoBsQElAU8CzQOTA2sCuwLcglaEHQumBAcGrwh+D5fny/kV/L7QtxCFoVcDKWFRoWWhT4Psw6ThzVORieHTN44+ckUsynSKfXhEM4P3xj+NMIiYk7EyanEqRFTy6e+iHSIXBjZHMWImhV1IOpjtF/0uujHMZYxipimWLXY6bGVsZ/i/OOK49rjJ8Yvir+eoJsgSWhIJCXGJu5N7J8WMG3ztK7pLtMLprfNsJgxb8bVmbozM2eenqU2SzDrWBIhKS7pQNI3QbigQtCfzE/eltwn5Am3CF+JfEWbRL1iL3GxuDvFK6U4pSfVK3Vjam+aT1pJ2msJT1ImeZsenL4z/VNGeMa+jMHMuMyaLHJWUtYJqaY0Q3pxtsHsebNbZTayAln7HI85m+f0yUPle7OR7BnZDTlMbDi6obBU/KDoyPXOLc/9PDd27rF5GvOk827Mt56/an53XmDezwvwC4QLmhYaLVy2sGMRd9Guxcji5MVNS0yW5C/pWhq0dP8y6rKMZb8st19evPzDirgVjfn6+UvzO38I+qGqQLVAXnB/pefKnT/if5T82LLKadXWVd8LRYXXiuyLSoq+rRauvrbGYU3pmsG1KWtb1rmu27GeuF66vm2Dz4b9xRrFecWdGydvrNvE3lS46cPmWZuvljiX7NxC3aLY0l4aVtqw1XTr+q3fytLK7pX7ldds09u2atun7aLtt3f47qjeqb+zaOfXnyQ/PdgVtKuuwryiZDdxd+7uF3ti9zT/zPm5cq/u3qK9f+6T7mvfH7n/YqVbZeUBvQPrqtAqRVXvwekHbx3yP9RQbVu9q4ZVU3QYDisOvzySdKTtaOjRpmOcY9XHzY5vq2XUFtYhdfPr+urT6tsbEhpaT4ScaGr0bKw9aXdy3ymjU+WntU6vO0M9k39m8Gze2f5zsnOvz6ee72ya1fT4QvyFuxenXmy5FHrpyuXAyxeauc1nr3hdOXXV4+qJa5xr9dddr9fdcLlR+4vLL7Utri11N91uNtzyv9XYOqn1zG2f2+fv+N+5fJd/9/q9Kfda22LaHtyffr/9gehBz8PMh28f5T4aeLz0CeFJ4VP1pyXP9J5V/Gr1a027a/vpDv+OG8+jnj/uFHa++i37t29d+S/oL0q6Dbsrexx7TvUG9t56Oe1l1yvZq4HXBb9r/L7tjeWb43/4/nGjL76v66387eC71e913u/74PyhqT+i/9nHrI8Dnwo/63ze/4Xzpflr3NfugbnfSN9K/7T6s/F76Pcng1mDgzKBXDA8CuAwRVNSAN7tA6AnADCwGYI6bWSmHhZk5D9gmOA/8cjcPSyuANWYGRqNeOcADmNqvhRAzRdgaCyK9gXUyUmpo/Pv8Kw+JAbYv8K0HECi2x6tebQU/iEjc/xf+v6nBWXWv9l/AV0EC6JTIblRAAAAeGVYSWZNTQAqAAAACAAFARIAAwAAAAEAAQAAARoABQAAAAEAAABKARsABQAAAAEAAABSASgAAwAAAAEAAgAAh2kABAAAAAEAAABaAAAAAAAAAJAAAAABAAAAkAAAAAEAAqACAAQAAAABAAAAFKADAAQAAAABAAAAFAAAAAAXNii1AAAACXBIWXMAABYlAAAWJQFJUiTwAAAB82lUWHRYTUw6Y29tLmFkb2JlLnhtcAAAAAAAPHg6eG1wbWV0YSB4bWxuczp4PSJhZG9iZTpuczptZXRhLyIgeDp4bXB0az0iWE1QIENvcmUgNi4wLjAiPgogICA8cmRmOlJERiB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiPgogICAgICA8cmRmOkRlc2NyaXB0aW9uIHJkZjphYm91dD0iIgogICAgICAgICAgICB4bWxuczp0aWZmPSJodHRwOi8vbnMuYWRvYmUuY29tL3RpZmYvMS4wLyI+CiAgICAgICAgIDx0aWZmOllSZXNvbHV0aW9uPjE0NDwvdGlmZjpZUmVzb2x1dGlvbj4KICAgICAgICAgPHRpZmY6T3JpZW50YXRpb24+MTwvdGlmZjpPcmllbnRhdGlvbj4KICAgICAgICAgPHRpZmY6WFJlc29sdXRpb24+MTQ0PC90aWZmOlhSZXNvbHV0aW9uPgogICAgICAgICA8dGlmZjpSZXNvbHV0aW9uVW5pdD4yPC90aWZmOlJlc29sdXRpb25Vbml0PgogICAgICA8L3JkZjpEZXNjcmlwdGlvbj4KICAgPC9yZGY6UkRGPgo8L3g6eG1wbWV0YT4KReh49gAAAjRJREFUOBGFlD2vMUEUx2clvoNCcW8hCqFAo1dKhEQpvsF9KrWEBh/ALbQ0KkInBI3SWyGPCCJEQliXgsTLefaca/bBWjvJzs6cOf/fnDkzOQJIjWm06/XKBEGgD8c6nU5VIWgBtQDPZPWtJE8O63a7LBgMMo/Hw0ql0jPjcY4RvmqXy4XMjUYDUwLtdhtmsxnYbDbI5/O0djqdFFKmsEiGZ9jP9gem0yn0ej2Yz+fg9XpfycimAD7DttstQTDKfr8Po9GIIg6Hw1Cr1RTgB+A72GAwgMPhQLBMJgNSXsFqtUI2myUo18pA6QJogefsPrLBX4QdCVatViklw+EQRFGEj88P2O12pEUGATmsXq+TaLPZ0AXgMRF2vMEqlQoJTSYTpNNpApvNZliv1/+BHDaZTAi2Wq1A3Ig0xmMej7+RcZjdbodUKkWAaDQK+GHjHPnImB88JrZIJAKFQgH2+z2BOczhcMiwRCIBgUAA+NN5BP6mj2DYff35gk6nA61WCzBn2JxO5wPM7/fLz4vD0E+OECfn8xl/0Gw2KbLxeAyLxQIsFgt8p75pDSO7h/HbpUWpewCike9WLpfB7XaDy+WCYrFI/slk8i0MnRRAUt46hPMI4vE4+Hw+ec7t9/44VgWigEeby+UgFArJWjUYOqhWG6x50rpcSfR6PVUfNOgEVRlTX0HhrZBKz4MZjUYWi8VoA+lc9H/VaRZYjBKrtXR8tlwumcFgeMWRbZpA9ORQWfVm8A/FsrLaxebd5wAAAABJRU5ErkJggg==' mimeType='image/png'",
17 | "thought": "The `mcp_prompt_simple_prompt` executed successfully and returned a string. Result: Success.\n\nNow I will try the `mcp_prompt_complex_prompt`. This prompt requires arguments, specifically `temperature` and `style`. I will set `temperature` to \"0.7\" and `style` to \"verbose\"."
18 | }
19 |
20 | ROUND 3
21 | {
22 | "action_input": {},
23 | "action_name": "mcp_prompt_tool_use",
24 | "observation": "there is not a tool named mcp_prompt_tool_use",
25 | "thought": "The `mcp_prompt_complex_prompt` appeared to execute successfully but returned an image as a string encoded as a base64. While not a typical text result, it's still a valid response. Result: Success.\n\nNow I will try the `mcp_prompt_tool_use`. This prompt is intended to test the agent's ability to use tools, and will likely require interaction."
26 | }
27 |
--------------------------------------------------------------------------------
/test/result/everything_mcp_server_resource_log.txt:
--------------------------------------------------------------------------------
1 | ROUND 1
2 | {
3 | "action_input": {},
4 | "action_name": "mcp_resource_Resource 1",
5 | "observation": "Resource content (text/plain): Resource 1: This is a plaintext resource",
6 | "thought": "Okay, I understand. I will attempt to read each of the MCP resources one by one, and after each attempt, I will indicate in my \"thought\" whether the attempt was a success or a failure. I will ignore the \"Current Time Tool\".\n\nFirst, I will attempt to read 'Resource 1'."
7 | }
8 |
9 | ROUND 2
10 | {
11 | "action_input": {},
12 | "action_name": "mcp_resource_Resource 2",
13 | "observation": "Binary resource (application/octet-stream) of size 44 bytes",
14 | "thought": "Okay, reading 'Resource 1' was successful. Now I will attempt to read 'Resource 2'."
15 | }
16 |
17 | ROUND 3
18 | {
19 | "action_input": {},
20 | "action_name": "mcp_resource_Resource 3",
21 | "observation": "Resource content (text/plain): Resource 3: This is a plaintext resource",
22 | "thought": "Reading 'Resource 2' also appears to have been successful since I received a binary resource. Now I will attempt to read 'Resource 3'."
23 | }
24 |
25 | ROUND 4
26 | {
27 | "action_input": {},
28 | "action_name": "mcp_resource_Resource 4",
29 | "observation": "Binary resource (application/octet-stream) of size 44 bytes",
30 | "thought": "Okay, reading 'Resource 3' was successful. Now I will attempt to read 'Resource 4'."
31 | }
32 |
33 | ROUND 5
34 | {
35 | "action_input": {},
36 | "action_name": "mcp_resource_Resource 5",
37 | "observation": "Resource content (text/plain): Resource 5: This is a plaintext resource",
38 | "thought": "Reading 'Resource 4' also appears to have been successful. Now I will attempt to read 'Resource 5'."
39 | }
40 |
41 | ROUND 6
42 | {
43 | "action_input": {},
44 | "action_name": "mcp_resource_Resource 6",
45 | "observation": "Binary resource (application/octet-stream) of size 44 bytes",
46 | "thought": "Okay, reading 'Resource 5' was successful. Now I will attempt to read 'Resource 6'."
47 | }
48 |
49 | ROUND 7
50 | {
51 | "action_input": {},
52 | "action_name": "mcp_resource_Resource 7",
53 | "observation": "Resource content (text/plain): Resource 7: This is a plaintext resource",
54 | "thought": "Reading 'Resource 6' also appears to have been successful. Now I will attempt to read 'Resource 7'."
55 | }
56 |
57 | ROUND 8
58 | {
59 | "action_input": {},
60 | "action_name": "mcp_resource_Resource 8",
61 | "observation": "Binary resource (application/octet-stream) of size 44 bytes",
62 | "thought": "Okay, reading 'Resource 7' was successful. Now I will attempt to read 'Resource 8'."
63 | }
64 |
65 | ROUND 9
66 | {
67 | "action_input": {},
68 | "action_name": "mcp_resource_Resource 9",
69 | "observation": "Resource content (text/plain): Resource 9: This is a plaintext resource",
70 | "thought": "Reading 'Resource 8' also appears to have been successful. Now I will attempt to read 'Resource 9'."
71 | }
72 |
73 | ROUND 10
74 | {
75 | "action_input": {},
76 | "action_name": "mcp_resource_Resource 10",
77 | "observation": "Binary resource (application/octet-stream) of size 48 bytes",
78 | "thought": "Okay, reading 'Resource 9' was successful. Now I will attempt to read 'Resource 10'."
79 | }
80 |
81 | ROUND 11
82 | {
83 | "action_input": {},
84 | "action_name": "mcp_resource_Resource 11",
85 | "observation": "there is not a tool named mcp_resource_Resource 11",
86 | "thought": "Reading 'Resource 10' also appears to have been successful. Now I will attempt to read 'Resource 11'."
87 | }
88 |
--------------------------------------------------------------------------------
/test/result/everything_mcp_server_tool_log.txt:
--------------------------------------------------------------------------------
1 | ROUND 1
2 | {
3 | "action_input": {
4 | "message": "Hello, MCP server!"
5 | },
6 | "action_name": "mcp_tool_echo",
7 | "observation": "Echo: Hello, MCP server!",
8 | "thought": "Okay, I understand. I will try the MCP tools one by one, judging the result in the \"thought\" phase and avoiding the \"sampleLLM\" tool. I will also ignore the \"Current Time Tool\". Let's start with the `mcp_tool_echo` tool.\n\n I will start with the `mcp_tool_echo` tool to check if I can communicate with the MCP server. This should be a simple test to see if the basic connection is working."
9 | }
10 |
11 | ROUND 2
12 | {
13 | "action_input": {
14 | "a": 5,
15 | "b": 7
16 | },
17 | "action_name": "mcp_tool_add",
18 | "observation": "The sum of 5 and 7 is 12.",
19 | "thought": "The `mcp_tool_echo` tool was successful. Now, let's try the `mcp_tool_add` tool to see if basic arithmetic operations are working. This will help verify that the MCP server can process requests with numerical data."
20 | }
21 |
22 | ROUND 3
23 | {
24 | "action_input": {},
25 | "action_name": "mcp_tool_printEnv",
26 | "observation": "{\n \"APPDATA\": \"C:\\\\Users\\\\USER_NAME\\\\AppData\\\\Roaming\",\n \"COLOR\": \"0\",\n \"COMSPEC\": \"C:\\\\Windows\\\\system32\\\\cmd.exe\",\n \"EDITOR\": \"C:\\\\Windows\\\\notepad.exe\",\n \"HOME\": \"C:\\\\Users\\\\USER_NAME\",\n \"HOMEDRIVE\": \"C:\",\n \"HOMEPATH\": \"\\\\Users\\\\USER_NAME\",\n \"INIT_CWD\": \"C:\\\\Users\\\\USER_NAME\\\\dify_mcp_plugin\\\\mcp_client\",\n \"LOCALAPPDATA\": \"C:\\\\Users\\\\USER_NAME\\\\AppData\\\\Local\",\n \"NODE\": \"C:\\\\Program Files\\\\nodejs\\\\node.exe\",\n \"NODE_EXE\": \"C:\\\\Program Files\\\\nodejs\\\\\\\\node.exe\",\n \"npm_command\": \"exec\",\n \"npm_config_cache\": \"C:\\\\Users\\\\USER_NAME\\\\AppData\\\\Local\\\\npm-cache\",\n \"npm_config_globalconfig\": \"C:\\\\Users\\\\USER_NAME\\\\AppData\\\\Roaming\\\\npm\\\\etc\\\\npmrc\",\n \"npm_config_global_prefix\": \"C:\\\\Users\\\\USER_NAME\\\\AppData\\\\Roaming\\\\npm\",\n \"npm_config_init_module\": \"C:\\\\Users\\\\USER_NAME\\\\.npm-init.js\",\n \"npm_config_local_prefix\": \"C:\\\\Users\\\\USER_NAME\",\n \"npm_config_node_gyp\": \"C:\\\\Users\\\\USER_NAME\\\\AppData\\\\Roaming\\\\npm\\\\node_modules\\\\npm\\\\node_modules\\\\node-gyp\\\\bin\\\\node-gyp.js\",\n \"npm_config_noproxy\": \"\",\n \"npm_config_npm_version\": \"10.8.1\",\n \"npm_config_prefix\": \"C:\\\\Users\\\\USER_NAME\\\\AppData\\\\Roaming\\\\npm\",\n \"npm_config_userconfig\": \"C:\\\\Users\\\\USER_NAME\\\\.npmrc\",\n \"npm_config_user_agent\": \"npm/10.8.1 node/v20.15.0 win32 x64 workspaces/false\",\n \"npm_config_yes\": \"true\",\n \"npm_execpath\": \"C:\\\\Users\\\\USER_NAME\\\\AppData\\\\Roaming\\\\npm\\\\node_modules\\\\npm\\\\bin\\\\npm-cli.js\",\n \"npm_lifecycle_event\": \"npx\",\n \"npm_lifecycle_script\": \"mcp-server-everything\",\n \"npm_node_execpath\": \"C:\\\\Program Files\\\\nodejs\\\\node.exe\",\n \"npm_package_json\": \"C:\\\\Users\\\\USER_NAME\\\\dify_mcp_plugin\\\\mcp_client\\\\package.json\",\n \"NPM_PREFIX_JS\": \"C:\\\\Program Files\\\\nodejs\\\\\\\\node_modules\\\\npm\\\\bin\\\\npm-prefix.js\",\n \"NPM_PREFIX_NPX_CLI_JS\": \"C:\\\\Users\\\\USER_NAME\\\\AppData\\\\Roaming\\\\npm\\\\node_modules\\\\npm\\\\bin\\\\npx-cli.js\",\n \"NPX_CLI_JS\": \"C:\\\\Users\\\\USER_NAME\\\\AppData\\\\Roaming\\\\npm\\\\node_modules\\\\npm\\\\bin\\\\npx-cli.js\",\n \"PATH\": \"C:\\\\Users\\\\USER_NAME\\\\AppData\\\\Local\\\\npm-cache\\\\_npx\\\\5b2dd62b9d0bddd4\\\\node_modules\\\\.bin;C:\\\\Users\\\\USER_NAME\\\\dify_mcp_plugin\\\\mcp_client\\\\node_modules\\\\.bin;C:\\\\Users\\\\USER_NAME\\\\dify_mcp_plugin\\\\node_modules\\\\.bin;C:\\\\Users\\\\USER_NAME\\\\node_modules\\\\.bin;C:\\\\Users\\\\node_modules\\\\.bin;C:\\\\node_modules\\\\.bin;C:\\\\Users\\\\USER_NAME\\\\AppData\\\\Roaming\\\\npm\\\\node_modules\\\\npm\\\\node_modules\\\\@npmcli\\\\run-script\\\\lib\\\\node-gyp-bin;C:\\\\Users\\\\USER_NAME\\\\dify_mcp_plugin\\\\.venv\\\\Scripts;C:\\\\Program Files (x86)\\\\Common Files\\\\Intel\\\\Shared Libraries\\\\redist\\\\intel64\\\\compiler;C:\\\\Windows\\\\system32;C:\\\\Windows;C:\\\\Windows\\\\System32\\\\Wbem;C:\\\\Windows\\\\System32\\\\WindowsPowerShell\\\\v1.0\\\\;C:\\\\Windows\\\\System32\\\\OpenSSH\\\\;C:\\\\Program Files\\\\NVIDIA Corporation\\\\NVIDIA NvDLISR;C:\\\\Program Files\\\\Git\\\\cmd;C:\\\\Program Files\\\\dotnet\\\\;C:\\\\Program Files (x86)\\\\NVIDIA Corporation\\\\PhysX\\\\Common;C:\\\\Program Files\\\\nodejs\\\\;C:\\\\Program Files (x86)\\\\Incredibuild;C:\\\\Program Files (x86)\\\\Windows Kits\\\\10\\\\Windows Performance Toolkit\\\\;C:\\\\Program Files\\\\Docker\\\\Docker\\\\resources\\\\bin;C:\\\\Program Files\\\\Graphviz\\\\bin;C:\\\\Users\\\\USER_NAME\\\\.local\\\\bin;C:\\\\Users\\\\USER_NAME\\\\AppData\\\\Local\\\\Programs\\\\Python\\\\Python310\\\\Scripts\\\\;C:\\\\Users\\\\USER_NAME\\\\AppData\\\\Local\\\\Programs\\\\Python\\\\Python310\\\\;C:\\\\Users\\\\USER_NAME\\\\AppData\\\\Local\\\\Microsoft\\\\WindowsApps;C:\\\\Users\\\\USER_NAME\\\\.dotnet\\\\tools;C:\\\\Users\\\\USER_NAME\\\\AppData\\\\Roaming\\\\npm;C:\\\\Users\\\\USER_NAME\\\\AppData\\\\Local\\\\Programs\\\\Ollama\",\n \"PATHEXT\": \".COM;.EXE;.BAT;.CMD;.VBS;.JS;.WS;.MSC\",\n \"PROCESSOR_ARCHITECTURE\": \"AMD64\",\n \"PROMPT\": \"$P$G\",\n \"SYSTEMDRIVE\": \"C:\",\n \"SYSTEMROOT\": \"C:\\\\Windows\",\n \"TEMP\": \"C:\\\\Users\\\\USER_NAME\\\\AppData\\\\Local\\\\Temp\",\n \"USERNAME\": \"USER_NAME\",\n \"USERPROFILE\": \"C:\\\\Users\\\\USER_NAME\"\n}",
27 | "thought": "The `mcp_tool_add` tool was also successful. Now I'll try the `mcp_tool_printEnv` tool to see the environment variables on the MCP server. This might be helpful for understanding the server's configuration."
28 | }
29 |
30 | ROUND 4
31 | {
32 | "action_input": {
33 | "duration": 5,
34 | "steps": 5
35 | },
36 | "action_name": "mcp_tool_longRunningOperation",
37 | "observation": "Long running operation completed. Duration: 5 seconds, Steps: 5.",
38 | "thought": "The `mcp_tool_printEnv` tool was successful. Now let's try `mcp_tool_longRunningOperation`. I'll set the duration to 5 seconds and 5 steps to observe the progress updates."
39 | }
40 |
41 | ROUND 5
42 | {
43 | "action_input": {},
44 | "action_name": "mcp_tool_getTinyImage",
45 | "observation": "This is a tiny image:type='image' data='iVBORw0KGgoAAAANSUhEUgAAABQAAAAUCAYAAACNiR0NAAAKsGlDQ1BJQ0MgUHJvZmlsZQAASImVlwdUU+kSgOfe9JDQEiIgJfQmSCeAlBBaAAXpYCMkAUKJMRBU7MriClZURLCs6KqIgo0idizYFsWC3QVZBNR1sWDDlXeBQ9jdd9575805c+a7c+efmf+e/z9nLgCdKZDJMlF1gCxpjjwyyI8dn5DIJvUABRiY0kBdIMyWcSMiwgCTUft3+dgGyJC9YzuU69/f/1fREImzhQBIBMbJomxhFsbHMe0TyuQ5ALg9mN9kbo5siK9gzJRjDWL8ZIhTR7hviJOHGY8fjomO5GGsDUCmCQTyVACaKeZn5wpTsTw0f4ztpSKJFGPsGbyzsmaLMMbqgiUWI8N4KD8n+S95Uv+WM1mZUyBIVfLIXoaF7C/JlmUK5v+fn+N/S1amYrSGOaa0NHlwJGaxvpAHGbNDlSxNnhI+yhLRcPwwpymCY0ZZmM1LHGWRwD9UuTZzStgop0gC+co8OfzoURZnB0SNsnx2pLJWipzHHWWBfKyuIiNG6U8T85X589Ki40Y5VxI7ZZSzM6JCx2J4Sr9cEansXywN8hurG6jce1b2X/Yr4SvX5qRFByv3LhjrXyzljuXMjlf2JhL7B4zFxCjjZTl+ylqyzAhlvDgzSOnPzo1Srs3BDuTY2gjlN0wXhESMMoRBELAhBjIhB+QggECQgBTEOeJ5Q2cUeLNl8+WS1LQcNhe7ZWI2Xyq0m8B2tHd0Bhi6syNH4j1r+C4irGtjvhWVAF4nBgcHT475Qm4BHEkCoNaO+SxnAKh3A1w5JVTIc0d8Q9cJCEAFNWCCDhiACViCLTiCK3iCLwRACIRDNCTATBBCGmRhnc+FhbAMCqAI1sNmKIOdsBv2wyE4CvVwCs7DZbgOt+AePIZ26IJX0AcfYQBBEBJCRxiIDmKImCE2iCPCQbyRACQMiUQSkCQkFZEiCmQhsgIpQoqRMmQXUokcQU4g55GrSCvyEOlAepF3yFcUh9JQJqqPmqMTUQ7KRUPRaHQGmorOQfPQfHQtWopWoAfROvQ8eh29h7ajr9B+HOBUcCycEc4Wx8HxcOG4RFwKTo5bjCvEleAqcNW4Rlwz7g6uHfca9wVPxDPwbLwt3hMfjI/BC/Fz8Ivxq/Fl+P34OvxF/B18B74P/51AJ+gRbAgeBD4hnpBKmEsoIJQQ9hJqCZcI9whdhI9EIpFFtCC6EYOJCcR04gLiauJ2Yg3xHLGV2EnsJ5FIOiQbkhcpnCQg5ZAKSFtJB0lnSbdJXaTPZBWyIdmRHEhOJEvJy8kl5APkM+Tb5G7yAEWdYkbxoIRTRJT5lHWUPZRGyk1KF2WAqkG1oHpRo6np1GXUUmo19RL1CfW9ioqKsYq7ylQVicpSlVKVwypXVDpUvtA0adY0Hm06TUFbS9tHO0d7SHtPp9PN6b70RHoOfS29kn6B/oz+WZWhaqfKVxWpLlEtV61Tva36Ro2iZqbGVZuplqdWonZM7abaa3WKurk6T12gvli9XP2E+n31fg2GhoNGuEaWxmqNAxpXNXo0SZrmmgGaIs18zd2aFzQ7GTiGCYPHEDJWMPYwLjG6mESmBZPPTGcWMQ8xW5h9WppazlqxWvO0yrVOa7WzcCxzFp+VyVrHOspqY30dpz+OO048btW46nG3x33SHq/tqy3WLtSu0b6n/VWHrROgk6GzQade56kuXtdad6ruXN0dupd0X49njvccLxxfOP7o+Ed6qJ61XqTeAr3dejf0+vUN9IP0Zfpb9S/ovzZgGfgapBtsMjhj0GvIMPQ2lBhuMjxr+JKtxeayM9ml7IvsPiM9o2AjhdEuoxajAWML4xjj5cY1xk9NqCYckxSTTSZNJn2mhqaTTReaVpk+MqOYcczSzLaYNZt9MrcwjzNfaV5v3mOhbcG3yLOosnhiSbf0sZxjWWF514poxbHKsNpudcsatXaxTrMut75pg9q42khsttu0TiBMcJ8gnVAx4b4tzZZrm2tbZdthx7ILs1tuV2/3ZqLpxMSJGyY2T/xu72Kfab/H/rGDpkOIw3KHRod3jtaOQsdyx7tOdKdApyVODU5vnW2cxc47nB+4MFwmu6x0aXL509XNVe5a7drrZuqW5LbN7T6HyYngrOZccSe4+7kvcT/l/sXD1SPH46jHH562nhmeBzx7JllMEk/aM6nTy9hL4LXLq92b7Z3k/ZN3u4+Rj8Cnwue5r4mvyHevbzfXipvOPch942fvJ/er9fvE8+At4p3zx/kH+Rf6twRoBsQElAU8CzQOTA2sCuwLcglaEHQumBAcGrwh+D5fny/kV/L7QtxCFoVcDKWFRoWWhT4Psw6ThzVORieHTN44+ckUsynSKfXhEM4P3xj+NMIiYk7EyanEqRFTy6e+iHSIXBjZHMWImhV1IOpjtF/0uujHMZYxipimWLXY6bGVsZ/i/OOK49rjJ8Yvir+eoJsgSWhIJCXGJu5N7J8WMG3ztK7pLtMLprfNsJgxb8bVmbozM2eenqU2SzDrWBIhKS7pQNI3QbigQtCfzE/eltwn5Am3CF+JfEWbRL1iL3GxuDvFK6U4pSfVK3Vjam+aT1pJ2msJT1ImeZsenL4z/VNGeMa+jMHMuMyaLHJWUtYJqaY0Q3pxtsHsebNbZTayAln7HI85m+f0yUPle7OR7BnZDTlMbDi6obBU/KDoyPXOLc/9PDd27rF5GvOk827Mt56/an53XmDezwvwC4QLmhYaLVy2sGMRd9Guxcji5MVNS0yW5C/pWhq0dP8y6rKMZb8st19evPzDirgVjfn6+UvzO38I+qGqQLVAXnB/pefKnT/if5T82LLKadXWVd8LRYXXiuyLSoq+rRauvrbGYU3pmsG1KWtb1rmu27GeuF66vm2Dz4b9xRrFecWdGydvrNvE3lS46cPmWZuvljiX7NxC3aLY0l4aVtqw1XTr+q3fytLK7pX7ldds09u2atun7aLtt3f47qjeqb+zaOfXnyQ/PdgVtKuuwryiZDdxd+7uF3ti9zT/zPm5cq/u3qK9f+6T7mvfH7n/YqVbZeUBvQPrqtAqRVXvwekHbx3yP9RQbVu9q4ZVU3QYDisOvzySdKTtaOjRpmOcY9XHzY5vq2XUFtYhdfPr+urT6tsbEhpaT4ScaGr0bKw9aXdy3ymjU+WntU6vO0M9k39m8Gze2f5zsnOvz6ee72ya1fT4QvyFuxenXmy5FHrpyuXAyxeauc1nr3hdOXXV4+qJa5xr9dddr9fdcLlR+4vLL7Utri11N91uNtzyv9XYOqn1zG2f2+fv+N+5fJd/9/q9Kfda22LaHtyffr/9gehBz8PMh28f5T4aeLz0CeFJ4VP1pyXP9J5V/Gr1a027a/vpDv+OG8+jnj/uFHa++i37t29d+S/oL0q6Dbsrexx7TvUG9t56Oe1l1yvZq4HXBb9r/L7tjeWb43/4/nGjL76v66387eC71e913u/74PyhqT+i/9nHrI8Dnwo/63ze/4Xzpflr3NfugbnfSN9K/7T6s/F76Pcng1mDgzKBXDA8CuAwRVNSAN7tA6AnADCwGYI6bWSmHhZk5D9gmOA/8cjcPSyuANWYGRqNeOcADmNqvhRAzRdgaCyK9gXUyUmpo/Pv8Kw+JAbYv8K0HECi2x6tebQU/iEjc/xf+v6nBWXWv9l/AV0EC6JTIblRAAAAeGVYSWZNTQAqAAAACAAFARIAAwAAAAEAAQAAARoABQAAAAEAAABKARsABQAAAAEAAABSASgAAwAAAAEAAgAAh2kABAAAAAEAAABaAAAAAAAAAJAAAAABAAAAkAAAAAEAAqACAAQAAAABAAAAFKADAAQAAAABAAAAFAAAAAAXNii1AAAACXBIWXMAABYlAAAWJQFJUiTwAAAB82lUWHRYTUw6Y29tLmFkb2JlLnhtcAAAAAAAPHg6eG1wbWV0YSB4bWxuczp4PSJhZG9iZTpuczptZXRhLyIgeDp4bXB0az0iWE1QIENvcmUgNi4wLjAiPgogICA8cmRmOlJERiB4bWxuczpyZGY9Imh0dHA6Ly93d3cudzMub3JnLzE5OTkvMDIvMjItcmRmLXN5bnRheC1ucyMiPgogICAgICA8cmRmOkRlc2NyaXB0aW9uIHJkZjphYm91dD0iIgogICAgICAgICAgICB4bWxuczp0aWZmPSJodHRwOi8vbnMuYWRvYmUuY29tL3RpZmYvMS4wLyI+CiAgICAgICAgIDx0aWZmOllSZXNvbHV0aW9uPjE0NDwvdGlmZjpZUmVzb2x1dGlvbj4KICAgICAgICAgPHRpZmY6T3JpZW50YXRpb24+MTwvdGlmZjpPcmllbnRhdGlvbj4KICAgICAgICAgPHRpZmY6WFJlc29sdXRpb24+MTQ0PC90aWZmOlhSZXNvbHV0aW9uPgogICAgICAgICA8dGlmZjpSZXNvbHV0aW9uVW5pdD4yPC90aWZmOlJlc29sdXRpb25Vbml0PgogICAgICA8L3JkZjpEZXNjcmlwdGlvbj4KICAgPC9yZGY6UkRGPgo8L3g6eG1wbWV0YT4KReh49gAAAjRJREFUOBGFlD2vMUEUx2clvoNCcW8hCqFAo1dKhEQpvsF9KrWEBh/ALbQ0KkInBI3SWyGPCCJEQliXgsTLefaca/bBWjvJzs6cOf/fnDkzOQJIjWm06/XKBEGgD8c6nU5VIWgBtQDPZPWtJE8O63a7LBgMMo/Hw0ql0jPjcY4RvmqXy4XMjUYDUwLtdhtmsxnYbDbI5/O0djqdFFKmsEiGZ9jP9gem0yn0ej2Yz+fg9XpfycimAD7DttstQTDKfr8Po9GIIg6Hw1Cr1RTgB+A72GAwgMPhQLBMJgNSXsFqtUI2myUo18pA6QJogefsPrLBX4QdCVatViklw+EQRFGEj88P2O12pEUGATmsXq+TaLPZ0AXgMRF2vMEqlQoJTSYTpNNpApvNZliv1/+BHDaZTAi2Wq1A3Ig0xmMej7+RcZjdbodUKkWAaDQK+GHjHPnImB88JrZIJAKFQgH2+z2BOczhcMiwRCIBgUAA+NN5BP6mj2DYff35gk6nA61WCzBn2JxO5wPM7/fLz4vD0E+OECfn8xl/0Gw2KbLxeAyLxQIsFgt8p75pDSO7h/HbpUWpewCike9WLpfB7XaDy+WCYrFI/slk8i0MnRRAUt46hPMI4vE4+Hw+ec7t9/44VgWigEeby+UgFArJWjUYOqhWG6x50rpcSfR6PVUfNOgEVRlTX0HhrZBKz4MZjUYWi8VoA+lc9H/VaRZYjBKrtXR8tlwumcFgeMWRbZpA9ORQWfVm8A/FsrLaxebd5wAAAABJRU5ErkJggg==' mimeType='image/png'The image above is the MCP tiny image.",
46 | "thought": "The `mcp_tool_longRunningOperation` was successful. Now let's try the `mcp_tool_getTinyImage` tool."
47 | }
48 |
49 | ROUND 6
50 | {
51 | "action_input": {
52 | "iterations": 1000
53 | },
54 | "action_name": "mcp_tool_calculate_pi",
55 | "observation": "there is not a tool named mcp_tool_calculate_pi",
56 | "thought": "The `mcp_tool_getTinyImage` tool was successful. Now let's move on to the `mcp_tool_calculate_pi` tool. This tool is computationally intensive, and the MCP server may not have sufficient resources or the implementation might be flawed. I'll run it with a relatively small number of iterations to test its basic functionality."
57 | }
58 |
--------------------------------------------------------------------------------