├── src ├── git │ ├── .python-version │ ├── src │ │ └── mcp_server_git │ │ │ ├── py.typed │ │ │ ├── __main__.py │ │ │ └── __init__.py │ ├── .gitignore │ ├── LICENSE │ ├── pyproject.toml │ ├── Dockerfile │ ├── tests │ │ └── test_server.py │ └── README.md ├── time │ ├── .python-version │ ├── src │ │ └── mcp_server_time │ │ │ ├── __main__.py │ │ │ ├── __init__.py │ │ │ └── server.py │ ├── pyproject.toml │ ├── Dockerfile │ └── README.md ├── fetch │ ├── .python-version │ ├── src │ │ └── mcp_server_fetch │ │ │ ├── __main__.py │ │ │ ├── __init__.py │ │ │ └── server.py │ ├── LICENSE │ ├── pyproject.toml │ ├── Dockerfile │ └── README.md ├── everything │ ├── tsconfig.json │ ├── Dockerfile │ ├── stdio.ts │ ├── CLAUDE.md │ ├── index.ts │ ├── instructions.md │ ├── package.json │ ├── sse.ts │ ├── streamableHttp.ts │ └── README.md ├── memory │ ├── tsconfig.json │ ├── vitest.config.ts │ ├── Dockerfile │ ├── package.json │ ├── __tests__ │ │ └── file-path.test.ts │ └── README.md ├── sequentialthinking │ ├── tsconfig.json │ ├── vitest.config.ts │ ├── Dockerfile │ ├── package.json │ ├── lib.ts │ ├── index.ts │ ├── README.md │ └── __tests__ │ │ └── lib.test.ts └── filesystem │ ├── vitest.config.ts │ ├── tsconfig.json │ ├── Dockerfile │ ├── package.json │ ├── roots-utils.ts │ ├── path-validation.ts │ ├── __tests__ │ ├── roots-utils.test.ts │ ├── structured-content.test.ts │ └── directory-tree.test.ts │ └── path-utils.ts ├── .gitattributes ├── .npmrc ├── .mcp.json ├── tsconfig.json ├── SECURITY.md ├── package.json ├── LICENSE ├── .github ├── workflows │ ├── claude.yml │ ├── typescript.yml │ ├── python.yml │ └── release.yml └── pull_request_template.md ├── CONTRIBUTING.md ├── CODE_OF_CONDUCT.md ├── .gitignore └── scripts └── release.py /src/git/.python-version: -------------------------------------------------------------------------------- 1 | 3.10 2 | -------------------------------------------------------------------------------- /src/time/.python-version: -------------------------------------------------------------------------------- 1 | 3.10 2 | -------------------------------------------------------------------------------- /src/fetch/.python-version: -------------------------------------------------------------------------------- 1 | 3.11 2 | -------------------------------------------------------------------------------- /src/git/src/mcp_server_git/py.typed: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/git/.gitignore: -------------------------------------------------------------------------------- 1 | __pycache__ 2 | .venv 3 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | package-lock.json linguist-generated=true 2 | -------------------------------------------------------------------------------- /src/time/src/mcp_server_time/__main__.py: -------------------------------------------------------------------------------- 1 | from mcp_server_time import main 2 | 3 | main() 4 | -------------------------------------------------------------------------------- /src/git/src/mcp_server_git/__main__.py: -------------------------------------------------------------------------------- 1 | # __main__.py 2 | 3 | from mcp_server_git import main 4 | 5 | main() 6 | -------------------------------------------------------------------------------- /.npmrc: -------------------------------------------------------------------------------- 1 | registry="https://registry.npmjs.org/" 2 | @modelcontextprotocol:registry="https://registry.npmjs.org/" 3 | -------------------------------------------------------------------------------- /src/fetch/src/mcp_server_fetch/__main__.py: -------------------------------------------------------------------------------- 1 | # __main__.py 2 | 3 | from mcp_server_fetch import main 4 | 5 | main() 6 | -------------------------------------------------------------------------------- /.mcp.json: -------------------------------------------------------------------------------- 1 | { 2 | "mcpServers": { 3 | "mcp-docs": { 4 | "type": "http", 5 | "url": "https://modelcontextprotocol.io/mcp" 6 | } 7 | } 8 | } 9 | -------------------------------------------------------------------------------- /src/everything/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "../../tsconfig.json", 3 | "compilerOptions": { 4 | "outDir": "./dist", 5 | "rootDir": "." 6 | }, 7 | "include": [ 8 | "./**/*.ts" 9 | ] 10 | } 11 | -------------------------------------------------------------------------------- /src/memory/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "../../tsconfig.json", 3 | "compilerOptions": { 4 | "outDir": "./dist", 5 | "rootDir": "." 6 | }, 7 | "include": [ 8 | "./**/*.ts" 9 | ], 10 | "exclude": [ 11 | "**/*.test.ts", 12 | "vitest.config.ts" 13 | ] 14 | } 15 | -------------------------------------------------------------------------------- /src/sequentialthinking/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "../../tsconfig.json", 3 | "compilerOptions": { 4 | "outDir": "./dist", 5 | "rootDir": "." 6 | }, 7 | "include": [ 8 | "./**/*.ts" 9 | ], 10 | "exclude": [ 11 | "**/*.test.ts", 12 | "vitest.config.ts" 13 | ] 14 | } 15 | -------------------------------------------------------------------------------- /src/memory/vitest.config.ts: -------------------------------------------------------------------------------- 1 | import { defineConfig } from 'vitest/config'; 2 | 3 | export default defineConfig({ 4 | test: { 5 | globals: true, 6 | environment: 'node', 7 | include: ['**/__tests__/**/*.test.ts'], 8 | coverage: { 9 | provider: 'v8', 10 | include: ['**/*.ts'], 11 | exclude: ['**/__tests__/**', '**/dist/**'], 12 | }, 13 | }, 14 | }); 15 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "ES2022", 4 | "module": "Node16", 5 | "moduleResolution": "Node16", 6 | "strict": true, 7 | "esModuleInterop": true, 8 | "skipLibCheck": true, 9 | "forceConsistentCasingInFileNames": true, 10 | "resolveJsonModule": true 11 | }, 12 | "include": ["src/**/*"], 13 | "exclude": ["node_modules"] 14 | } 15 | -------------------------------------------------------------------------------- /src/filesystem/vitest.config.ts: -------------------------------------------------------------------------------- 1 | import { defineConfig } from 'vitest/config'; 2 | 3 | export default defineConfig({ 4 | test: { 5 | globals: true, 6 | environment: 'node', 7 | include: ['**/__tests__/**/*.test.ts'], 8 | coverage: { 9 | provider: 'v8', 10 | include: ['**/*.ts'], 11 | exclude: ['**/__tests__/**', '**/dist/**'], 12 | }, 13 | }, 14 | }); 15 | -------------------------------------------------------------------------------- /src/sequentialthinking/vitest.config.ts: -------------------------------------------------------------------------------- 1 | import { defineConfig } from 'vitest/config'; 2 | 3 | export default defineConfig({ 4 | test: { 5 | globals: true, 6 | environment: 'node', 7 | include: ['**/__tests__/**/*.test.ts'], 8 | coverage: { 9 | provider: 'v8', 10 | include: ['**/*.ts'], 11 | exclude: ['**/__tests__/**', '**/dist/**'], 12 | }, 13 | }, 14 | }); 15 | -------------------------------------------------------------------------------- /src/filesystem/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "../../tsconfig.json", 3 | "compilerOptions": { 4 | "outDir": "./dist", 5 | "rootDir": ".", 6 | "moduleResolution": "NodeNext", 7 | "module": "NodeNext" 8 | }, 9 | "include": [ 10 | "./**/*.ts" 11 | ], 12 | "exclude": [ 13 | "**/__tests__/**", 14 | "**/*.test.ts", 15 | "**/*.spec.ts", 16 | "vitest.config.ts" 17 | ] 18 | } 19 | -------------------------------------------------------------------------------- /src/everything/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:22.12-alpine AS builder 2 | 3 | COPY src/everything /app 4 | COPY tsconfig.json /tsconfig.json 5 | 6 | WORKDIR /app 7 | 8 | RUN --mount=type=cache,target=/root/.npm npm install 9 | 10 | FROM node:22-alpine AS release 11 | 12 | WORKDIR /app 13 | 14 | COPY --from=builder /app/dist /app/dist 15 | COPY --from=builder /app/package.json /app/package.json 16 | COPY --from=builder /app/package-lock.json /app/package-lock.json 17 | 18 | ENV NODE_ENV=production 19 | 20 | RUN npm ci --ignore-scripts --omit-dev 21 | 22 | CMD ["node", "dist/index.js"] -------------------------------------------------------------------------------- /src/time/src/mcp_server_time/__init__.py: -------------------------------------------------------------------------------- 1 | from .server import serve 2 | 3 | 4 | def main(): 5 | """MCP Time Server - Time and timezone conversion functionality for MCP""" 6 | import argparse 7 | import asyncio 8 | 9 | parser = argparse.ArgumentParser( 10 | description="give a model the ability to handle time queries and timezone conversions" 11 | ) 12 | parser.add_argument("--local-timezone", type=str, help="Override local timezone") 13 | 14 | args = parser.parse_args() 15 | asyncio.run(serve(args.local_timezone)) 16 | 17 | 18 | if __name__ == "__main__": 19 | main() 20 | -------------------------------------------------------------------------------- /src/memory/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:22.12-alpine AS builder 2 | 3 | COPY src/memory /app 4 | COPY tsconfig.json /tsconfig.json 5 | 6 | WORKDIR /app 7 | 8 | RUN --mount=type=cache,target=/root/.npm npm install 9 | 10 | RUN --mount=type=cache,target=/root/.npm-production npm ci --ignore-scripts --omit-dev 11 | 12 | FROM node:22-alpine AS release 13 | 14 | COPY --from=builder /app/dist /app/dist 15 | COPY --from=builder /app/package.json /app/package.json 16 | COPY --from=builder /app/package-lock.json /app/package-lock.json 17 | 18 | ENV NODE_ENV=production 19 | 20 | WORKDIR /app 21 | 22 | RUN npm ci --ignore-scripts --omit-dev 23 | 24 | ENTRYPOINT ["node", "dist/index.js"] -------------------------------------------------------------------------------- /src/filesystem/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:22.12-alpine AS builder 2 | 3 | WORKDIR /app 4 | 5 | COPY src/filesystem /app 6 | COPY tsconfig.json /tsconfig.json 7 | 8 | RUN --mount=type=cache,target=/root/.npm npm install 9 | 10 | RUN --mount=type=cache,target=/root/.npm-production npm ci --ignore-scripts --omit-dev 11 | 12 | 13 | FROM node:22-alpine AS release 14 | 15 | WORKDIR /app 16 | 17 | COPY --from=builder /app/dist /app/dist 18 | COPY --from=builder /app/package.json /app/package.json 19 | COPY --from=builder /app/package-lock.json /app/package-lock.json 20 | 21 | ENV NODE_ENV=production 22 | 23 | RUN npm ci --ignore-scripts --omit-dev 24 | 25 | ENTRYPOINT ["node", "/app/dist/index.js"] -------------------------------------------------------------------------------- /src/sequentialthinking/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:22.12-alpine AS builder 2 | 3 | COPY src/sequentialthinking /app 4 | COPY tsconfig.json /tsconfig.json 5 | 6 | WORKDIR /app 7 | 8 | RUN --mount=type=cache,target=/root/.npm npm install 9 | 10 | RUN --mount=type=cache,target=/root/.npm-production npm ci --ignore-scripts --omit-dev 11 | 12 | FROM node:22-alpine AS release 13 | 14 | COPY --from=builder /app/dist /app/dist 15 | COPY --from=builder /app/package.json /app/package.json 16 | COPY --from=builder /app/package-lock.json /app/package-lock.json 17 | 18 | ENV NODE_ENV=production 19 | 20 | WORKDIR /app 21 | 22 | RUN npm ci --ignore-scripts --omit-dev 23 | 24 | ENTRYPOINT ["node", "dist/index.js"] 25 | -------------------------------------------------------------------------------- /src/git/src/mcp_server_git/__init__.py: -------------------------------------------------------------------------------- 1 | import click 2 | from pathlib import Path 3 | import logging 4 | import sys 5 | from .server import serve 6 | 7 | @click.command() 8 | @click.option("--repository", "-r", type=Path, help="Git repository path") 9 | @click.option("-v", "--verbose", count=True) 10 | def main(repository: Path | None, verbose: bool) -> None: 11 | """MCP Git Server - Git functionality for MCP""" 12 | import asyncio 13 | 14 | logging_level = logging.WARN 15 | if verbose == 1: 16 | logging_level = logging.INFO 17 | elif verbose >= 2: 18 | logging_level = logging.DEBUG 19 | 20 | logging.basicConfig(level=logging_level, stream=sys.stderr) 21 | asyncio.run(serve(repository)) 22 | 23 | if __name__ == "__main__": 24 | main() 25 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | # Security Policy 2 | Thank you for helping us keep our MCP servers secure. 3 | 4 | The **reference servers** in this repo are maintained by [Anthropic](https://www.anthropic.com/) as part of the Model Context Protocol project. 5 | 6 | The security of our systems and user data is Anthropic’s top priority. We appreciate the work of security researchers acting in good faith in identifying and reporting potential vulnerabilities. 7 | 8 | ## Vulnerability Disclosure Program 9 | 10 | Our Vulnerability Program guidelines are defined on our [HackerOne program page](https://hackerone.com/anthropic-vdp). We ask that any validated vulnerability in this functionality be reported through the [submission form](https://hackerone.com/anthropic-vdp/reports/new?type=team&report_type=vulnerability). 11 | -------------------------------------------------------------------------------- /src/fetch/src/mcp_server_fetch/__init__.py: -------------------------------------------------------------------------------- 1 | from .server import serve 2 | 3 | 4 | def main(): 5 | """MCP Fetch Server - HTTP fetching functionality for MCP""" 6 | import argparse 7 | import asyncio 8 | 9 | parser = argparse.ArgumentParser( 10 | description="give a model the ability to make web requests" 11 | ) 12 | parser.add_argument("--user-agent", type=str, help="Custom User-Agent string") 13 | parser.add_argument( 14 | "--ignore-robots-txt", 15 | action="store_true", 16 | help="Ignore robots.txt restrictions", 17 | ) 18 | parser.add_argument("--proxy-url", type=str, help="Proxy URL to use for requests") 19 | 20 | args = parser.parse_args() 21 | asyncio.run(serve(args.user_agent, args.ignore_robots_txt, args.proxy_url)) 22 | 23 | 24 | if __name__ == "__main__": 25 | main() 26 | -------------------------------------------------------------------------------- /src/everything/stdio.ts: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js"; 4 | import { createServer } from "./everything.js"; 5 | 6 | console.error('Starting default (STDIO) server...'); 7 | 8 | async function main() { 9 | const transport = new StdioServerTransport(); 10 | const {server, cleanup, startNotificationIntervals} = createServer(); 11 | 12 | // Cleanup when client disconnects 13 | server.onclose = async () => { 14 | await cleanup(); 15 | process.exit(0); 16 | }; 17 | 18 | await server.connect(transport); 19 | startNotificationIntervals(); 20 | 21 | // Cleanup on exit 22 | process.on("SIGINT", async () => { 23 | await server.close(); 24 | }); 25 | } 26 | 27 | main().catch((error) => { 28 | console.error("Server error:", error); 29 | process.exit(1); 30 | }); 31 | 32 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@modelcontextprotocol/servers", 3 | "private": true, 4 | "version": "0.6.2", 5 | "description": "Model Context Protocol servers", 6 | "license": "MIT", 7 | "author": "Anthropic, PBC (https://anthropic.com)", 8 | "homepage": "https://modelcontextprotocol.io", 9 | "bugs": "https://github.com/modelcontextprotocol/servers/issues", 10 | "type": "module", 11 | "workspaces": [ 12 | "src/*" 13 | ], 14 | "files": [], 15 | "scripts": { 16 | "build": "npm run build --workspaces", 17 | "watch": "npm run watch --workspaces", 18 | "publish-all": "npm publish --workspaces --access public", 19 | "link-all": "npm link --workspaces" 20 | }, 21 | "dependencies": { 22 | "@modelcontextprotocol/server-everything": "*", 23 | "@modelcontextprotocol/server-memory": "*", 24 | "@modelcontextprotocol/server-filesystem": "*", 25 | "@modelcontextprotocol/server-sequential-thinking": "*" 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /src/git/LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2024 Anthropic, PBC. 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 4 | 5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 6 | 7 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 8 | -------------------------------------------------------------------------------- /src/fetch/LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2024 Anthropic, PBC. 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 4 | 5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 6 | 7 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 8 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2025 Anthropic, PBC 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /src/time/pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "mcp-server-time" 3 | version = "0.6.2" 4 | description = "A Model Context Protocol server providing tools for time queries and timezone conversions for LLMs" 5 | readme = "README.md" 6 | requires-python = ">=3.10" 7 | authors = [ 8 | { name = "Mariusz 'maledorak' Korzekwa", email = "mariusz@korzekwa.dev" }, 9 | ] 10 | keywords = ["time", "timezone", "mcp", "llm"] 11 | license = { text = "MIT" } 12 | classifiers = [ 13 | "Development Status :: 4 - Beta", 14 | "Intended Audience :: Developers", 15 | "License :: OSI Approved :: MIT License", 16 | "Programming Language :: Python :: 3", 17 | "Programming Language :: Python :: 3.10", 18 | ] 19 | dependencies = [ 20 | "mcp>=1.0.0", 21 | "pydantic>=2.0.0", 22 | "tzdata>=2024.2", 23 | "tzlocal>=5.3.1" 24 | ] 25 | 26 | [project.scripts] 27 | mcp-server-time = "mcp_server_time:main" 28 | 29 | [build-system] 30 | requires = ["hatchling"] 31 | build-backend = "hatchling.build" 32 | 33 | [tool.uv] 34 | dev-dependencies = [ 35 | "freezegun>=1.5.1", 36 | "pyright>=1.1.389", 37 | "pytest>=8.3.3", 38 | "ruff>=0.8.1", 39 | ] 40 | -------------------------------------------------------------------------------- /src/memory/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@modelcontextprotocol/server-memory", 3 | "version": "0.6.3", 4 | "description": "MCP server for enabling memory for Claude through a knowledge graph", 5 | "license": "MIT", 6 | "mcpName": "io.github.modelcontextprotocol/server-memory", 7 | "author": "Anthropic, PBC (https://anthropic.com)", 8 | "homepage": "https://modelcontextprotocol.io", 9 | "bugs": "https://github.com/modelcontextprotocol/servers/issues", 10 | "repository": { 11 | "type": "git", 12 | "url": "https://github.com/modelcontextprotocol/servers.git" 13 | }, 14 | "type": "module", 15 | "bin": { 16 | "mcp-server-memory": "dist/index.js" 17 | }, 18 | "files": [ 19 | "dist" 20 | ], 21 | "scripts": { 22 | "build": "tsc && shx chmod +x dist/*.js", 23 | "prepare": "npm run build", 24 | "watch": "tsc --watch", 25 | "test": "vitest run --coverage" 26 | }, 27 | "dependencies": { 28 | "@modelcontextprotocol/sdk": "^1.24.0" 29 | }, 30 | "devDependencies": { 31 | "@types/node": "^22", 32 | "@vitest/coverage-v8": "^2.1.8", 33 | "shx": "^0.3.4", 34 | "typescript": "^5.6.2", 35 | "vitest": "^2.1.8" 36 | } 37 | } -------------------------------------------------------------------------------- /src/fetch/pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "mcp-server-fetch" 3 | version = "0.6.3" 4 | description = "A Model Context Protocol server providing tools to fetch and convert web content for usage by LLMs" 5 | readme = "README.md" 6 | requires-python = ">=3.10" 7 | authors = [{ name = "Anthropic, PBC." }] 8 | maintainers = [{ name = "Jack Adamson", email = "jadamson@anthropic.com" }] 9 | keywords = ["http", "mcp", "llm", "automation"] 10 | license = { text = "MIT" } 11 | classifiers = [ 12 | "Development Status :: 4 - Beta", 13 | "Intended Audience :: Developers", 14 | "License :: OSI Approved :: MIT License", 15 | "Programming Language :: Python :: 3", 16 | "Programming Language :: Python :: 3.10", 17 | ] 18 | dependencies = [ 19 | "httpx<0.28", 20 | "markdownify>=0.13.1", 21 | "mcp>=1.1.3", 22 | "protego>=0.3.1", 23 | "pydantic>=2.0.0", 24 | "readabilipy>=0.2.0", 25 | "requests>=2.32.3", 26 | ] 27 | 28 | [project.scripts] 29 | mcp-server-fetch = "mcp_server_fetch:main" 30 | 31 | [build-system] 32 | requires = ["hatchling"] 33 | build-backend = "hatchling.build" 34 | 35 | [tool.uv] 36 | dev-dependencies = ["pyright>=1.1.389", "ruff>=0.7.3"] 37 | -------------------------------------------------------------------------------- /src/everything/CLAUDE.md: -------------------------------------------------------------------------------- 1 | # MCP "Everything" Server - Development Guidelines 2 | 3 | ## Build, Test & Run Commands 4 | - Build: `npm run build` - Compiles TypeScript to JavaScript 5 | - Watch mode: `npm run watch` - Watches for changes and rebuilds automatically 6 | - Run server: `npm run start` - Starts the MCP server using stdio transport 7 | - Run SSE server: `npm run start:sse` - Starts the MCP server with SSE transport 8 | - Prepare release: `npm run prepare` - Builds the project for publishing 9 | 10 | ## Code Style Guidelines 11 | - Use ES modules with `.js` extension in import paths 12 | - Strictly type all functions and variables with TypeScript 13 | - Follow zod schema patterns for tool input validation 14 | - Prefer async/await over callbacks and Promise chains 15 | - Place all imports at top of file, grouped by external then internal 16 | - Use descriptive variable names that clearly indicate purpose 17 | - Implement proper cleanup for timers and resources in server shutdown 18 | - Follow camelCase for variables/functions, PascalCase for types/classes, UPPER_CASE for constants 19 | - Handle errors with try/catch blocks and provide clear error messages 20 | - Use consistent indentation (2 spaces) and trailing commas in multi-line objects -------------------------------------------------------------------------------- /src/git/pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "mcp-server-git" 3 | version = "0.6.2" 4 | description = "A Model Context Protocol server providing tools to read, search, and manipulate Git repositories programmatically via LLMs" 5 | readme = "README.md" 6 | requires-python = ">=3.10" 7 | authors = [{ name = "Anthropic, PBC." }] 8 | maintainers = [{ name = "David Soria Parra", email = "davidsp@anthropic.com" }] 9 | keywords = ["git", "mcp", "llm", "automation"] 10 | license = { text = "MIT" } 11 | classifiers = [ 12 | "Development Status :: 4 - Beta", 13 | "Intended Audience :: Developers", 14 | "License :: OSI Approved :: MIT License", 15 | "Programming Language :: Python :: 3", 16 | "Programming Language :: Python :: 3.10", 17 | ] 18 | dependencies = [ 19 | "click>=8.1.7", 20 | "gitpython>=3.1.45", 21 | "mcp>=1.0.0", 22 | "pydantic>=2.0.0", 23 | ] 24 | 25 | [project.scripts] 26 | mcp-server-git = "mcp_server_git:main" 27 | 28 | [build-system] 29 | requires = ["hatchling"] 30 | build-backend = "hatchling.build" 31 | 32 | [dependency-groups] 33 | dev = ["pyright>=1.1.407", "ruff>=0.7.3", "pytest>=8.0.0"] 34 | 35 | [tool.pytest.ini_options] 36 | testpaths = ["tests"] 37 | python_files = "test_*.py" 38 | python_classes = "Test*" 39 | python_functions = "test_*" 40 | -------------------------------------------------------------------------------- /src/sequentialthinking/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@modelcontextprotocol/server-sequential-thinking", 3 | "version": "0.6.2", 4 | "description": "MCP server for sequential thinking and problem solving", 5 | "license": "MIT", 6 | "mcpName": "io.github.modelcontextprotocol/server-sequential-thinking", 7 | "author": "Anthropic, PBC (https://anthropic.com)", 8 | "homepage": "https://modelcontextprotocol.io", 9 | "bugs": "https://github.com/modelcontextprotocol/servers/issues", 10 | "repository": { 11 | "type": "git", 12 | "url": "https://github.com/modelcontextprotocol/servers.git" 13 | }, 14 | "type": "module", 15 | "bin": { 16 | "mcp-server-sequential-thinking": "dist/index.js" 17 | }, 18 | "files": [ 19 | "dist" 20 | ], 21 | "scripts": { 22 | "build": "tsc && shx chmod +x dist/*.js", 23 | "prepare": "npm run build", 24 | "watch": "tsc --watch", 25 | "test": "vitest run --coverage" 26 | }, 27 | "dependencies": { 28 | "@modelcontextprotocol/sdk": "^1.24.0", 29 | "chalk": "^5.3.0", 30 | "yargs": "^17.7.2" 31 | }, 32 | "devDependencies": { 33 | "@types/node": "^22", 34 | "@types/yargs": "^17.0.32", 35 | "@vitest/coverage-v8": "^2.1.8", 36 | "shx": "^0.3.4", 37 | "typescript": "^5.3.3", 38 | "vitest": "^2.1.8" 39 | } 40 | } -------------------------------------------------------------------------------- /src/everything/index.ts: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | // Parse command line arguments first 4 | const args = process.argv.slice(2); 5 | const scriptName = args[0] || 'stdio'; 6 | 7 | async function run() { 8 | try { 9 | // Dynamically import only the requested module to prevent all modules from initializing 10 | switch (scriptName) { 11 | case 'stdio': 12 | // Import and run the default server 13 | await import('./stdio.js'); 14 | break; 15 | case 'sse': 16 | // Import and run the SSE server 17 | await import('./sse.js'); 18 | break; 19 | case 'streamableHttp': 20 | // Import and run the streamable HTTP server 21 | await import('./streamableHttp.js'); 22 | break; 23 | default: 24 | console.error(`Unknown script: ${scriptName}`); 25 | console.log('Available scripts:'); 26 | console.log('- stdio'); 27 | console.log('- sse'); 28 | console.log('- streamableHttp'); 29 | process.exit(1); 30 | } 31 | } catch (error) { 32 | console.error('Error running script:', error); 33 | process.exit(1); 34 | } 35 | } 36 | 37 | run(); 38 | -------------------------------------------------------------------------------- /src/fetch/Dockerfile: -------------------------------------------------------------------------------- 1 | # Use a Python image with uv pre-installed 2 | FROM ghcr.io/astral-sh/uv:python3.12-bookworm-slim AS uv 3 | 4 | # Install the project into `/app` 5 | WORKDIR /app 6 | 7 | # Enable bytecode compilation 8 | ENV UV_COMPILE_BYTECODE=1 9 | 10 | # Copy from the cache instead of linking since it's a mounted volume 11 | ENV UV_LINK_MODE=copy 12 | 13 | # Install the project's dependencies using the lockfile and settings 14 | RUN --mount=type=cache,target=/root/.cache/uv \ 15 | --mount=type=bind,source=uv.lock,target=uv.lock \ 16 | --mount=type=bind,source=pyproject.toml,target=pyproject.toml \ 17 | uv sync --locked --no-install-project --no-dev --no-editable 18 | 19 | # Then, add the rest of the project source code and install it 20 | # Installing separately from its dependencies allows optimal layer caching 21 | ADD . /app 22 | RUN --mount=type=cache,target=/root/.cache/uv \ 23 | uv sync --locked --no-dev --no-editable 24 | 25 | FROM python:3.12-slim-bookworm 26 | 27 | WORKDIR /app 28 | 29 | COPY --from=uv /root/.local /root/.local 30 | COPY --from=uv --chown=app:app /app/.venv /app/.venv 31 | 32 | # Place executables in the environment at the front of the path 33 | ENV PATH="/app/.venv/bin:$PATH" 34 | 35 | # when running the container, add --db-path and a bind mount to the host's db file 36 | ENTRYPOINT ["mcp-server-fetch"] 37 | -------------------------------------------------------------------------------- /src/filesystem/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@modelcontextprotocol/server-filesystem", 3 | "version": "0.6.3", 4 | "description": "MCP server for filesystem access", 5 | "license": "MIT", 6 | "mcpName": "io.github.modelcontextprotocol/server-filesystem", 7 | "author": "Anthropic, PBC (https://anthropic.com)", 8 | "homepage": "https://modelcontextprotocol.io", 9 | "bugs": "https://github.com/modelcontextprotocol/servers/issues", 10 | "repository": { 11 | "type": "git", 12 | "url": "https://github.com/modelcontextprotocol/servers.git" 13 | }, 14 | "type": "module", 15 | "bin": { 16 | "mcp-server-filesystem": "dist/index.js" 17 | }, 18 | "files": [ 19 | "dist" 20 | ], 21 | "scripts": { 22 | "build": "tsc && shx chmod +x dist/*.js", 23 | "prepare": "npm run build", 24 | "watch": "tsc --watch", 25 | "test": "vitest run --coverage" 26 | }, 27 | "dependencies": { 28 | "@modelcontextprotocol/sdk": "^1.24.0", 29 | "diff": "^5.1.0", 30 | "glob": "^10.5.0", 31 | "minimatch": "^10.0.1", 32 | "zod-to-json-schema": "^3.23.5" 33 | }, 34 | "devDependencies": { 35 | "@types/diff": "^5.0.9", 36 | "@types/minimatch": "^5.1.2", 37 | "@types/node": "^22", 38 | "@vitest/coverage-v8": "^2.1.8", 39 | "shx": "^0.3.4", 40 | "typescript": "^5.8.2", 41 | "vitest": "^2.1.8" 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /src/everything/instructions.md: -------------------------------------------------------------------------------- 1 | Testing and demonstration server for MCP protocol features. 2 | 3 | ## Resources 4 | 5 | Resources 1-100 follow pattern: even IDs contain text, odd IDs contain binary data. Resources are paginated at 10 items per page with cursor-based navigation. 6 | 7 | ## Key dependencies 8 | 9 | Progress notifications require `_meta.progressToken` in tool calls. Resource subscriptions generate updates every 10 seconds. 10 | 11 | ## Performance characteristics 12 | 13 | Server generates automatic log messages every 20 seconds (filtered by current log level), stderr notifications every 30 seconds, and resource update notifications every 10 seconds for active subscriptions. Log level changes affect message filtering in real-time. 14 | 15 | ## Multi-modal testing 16 | 17 | `complex_prompt` includes both text arguments and image content for testing client multi-modal handling. `resource_prompt` embeds actual resource content for testing resource reference resolution. 18 | 19 | Argument completion is available for prompt parameters and resource IDs. Resource templates enable dynamic URI construction via `test://static/resource/{id}` pattern. 20 | 21 | ## Easter egg 22 | 23 | If asked about server instructions, respond with "🎉 Server instructions are working! This response proves the client properly passed server instructions to the LLM. This demonstrates MCP's instructions feature in action." 24 | -------------------------------------------------------------------------------- /src/everything/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@modelcontextprotocol/server-everything", 3 | "version": "0.6.2", 4 | "description": "MCP server that exercises all the features of the MCP protocol", 5 | "license": "MIT", 6 | "mcpName": "io.github.modelcontextprotocol/server-everything", 7 | "author": "Anthropic, PBC (https://anthropic.com)", 8 | "homepage": "https://modelcontextprotocol.io", 9 | "bugs": "https://github.com/modelcontextprotocol/servers/issues", 10 | "repository": { 11 | "type": "git", 12 | "url": "https://github.com/modelcontextprotocol/servers.git" 13 | }, 14 | "type": "module", 15 | "bin": { 16 | "mcp-server-everything": "dist/index.js" 17 | }, 18 | "files": [ 19 | "dist" 20 | ], 21 | "scripts": { 22 | "build": "tsc && shx cp instructions.md dist/ && shx chmod +x dist/*.js", 23 | "prepare": "npm run build", 24 | "watch": "tsc --watch", 25 | "start": "node dist/index.js", 26 | "start:sse": "node dist/sse.js", 27 | "start:streamableHttp": "node dist/streamableHttp.js" 28 | }, 29 | "dependencies": { 30 | "@modelcontextprotocol/sdk": "^1.24.0", 31 | "cors": "^2.8.5", 32 | "express": "^5.2.1", 33 | "jszip": "^3.10.1", 34 | "zod": "^3.25.0", 35 | "zod-to-json-schema": "^3.23.5" 36 | }, 37 | "devDependencies": { 38 | "@types/cors": "^2.8.19", 39 | "@types/express": "^5.0.6", 40 | "shx": "^0.3.4", 41 | "typescript": "^5.6.2" 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /src/git/Dockerfile: -------------------------------------------------------------------------------- 1 | # Use a Python image with uv pre-installed 2 | FROM ghcr.io/astral-sh/uv:python3.12-bookworm-slim AS uv 3 | 4 | # Install the project into `/app` 5 | WORKDIR /app 6 | 7 | # Enable bytecode compilation 8 | ENV UV_COMPILE_BYTECODE=1 9 | 10 | # Copy from the cache instead of linking since it's a mounted volume 11 | ENV UV_LINK_MODE=copy 12 | 13 | # Install the project's dependencies using the lockfile and settings 14 | RUN --mount=type=cache,target=/root/.cache/uv \ 15 | --mount=type=bind,source=uv.lock,target=uv.lock \ 16 | --mount=type=bind,source=pyproject.toml,target=pyproject.toml \ 17 | uv sync --locked --no-install-project --no-dev --no-editable 18 | 19 | # Then, add the rest of the project source code and install it 20 | # Installing separately from its dependencies allows optimal layer caching 21 | ADD . /app 22 | RUN --mount=type=cache,target=/root/.cache/uv \ 23 | uv sync --locked --no-dev --no-editable 24 | 25 | FROM python:3.12-slim-bookworm 26 | 27 | RUN apt-get update && apt-get install -y git git-lfs && rm -rf /var/lib/apt/lists/* \ 28 | && git lfs install --system 29 | 30 | WORKDIR /app 31 | 32 | COPY --from=uv /root/.local /root/.local 33 | COPY --from=uv --chown=app:app /app/.venv /app/.venv 34 | 35 | # Place executables in the environment at the front of the path 36 | ENV PATH="/app/.venv/bin:$PATH" 37 | 38 | # when running the container, add --db-path and a bind mount to the host's db file 39 | ENTRYPOINT ["mcp-server-git"] 40 | -------------------------------------------------------------------------------- /src/time/Dockerfile: -------------------------------------------------------------------------------- 1 | # Use a Python image with uv pre-installed 2 | FROM ghcr.io/astral-sh/uv:python3.12-bookworm-slim AS uv 3 | 4 | # Install the project into `/app` 5 | WORKDIR /app 6 | 7 | # Enable bytecode compilation 8 | ENV UV_COMPILE_BYTECODE=1 9 | 10 | # Copy from the cache instead of linking since it's a mounted volume 11 | ENV UV_LINK_MODE=copy 12 | 13 | # Install the project's dependencies using the lockfile and settings 14 | RUN --mount=type=cache,target=/root/.cache/uv \ 15 | --mount=type=bind,source=uv.lock,target=uv.lock \ 16 | --mount=type=bind,source=pyproject.toml,target=pyproject.toml \ 17 | uv sync --locked --no-install-project --no-dev --no-editable 18 | 19 | # Then, add the rest of the project source code and install it 20 | # Installing separately from its dependencies allows optimal layer caching 21 | ADD . /app 22 | RUN --mount=type=cache,target=/root/.cache/uv \ 23 | uv sync --locked --no-dev --no-editable 24 | 25 | FROM python:3.12-slim-bookworm 26 | 27 | WORKDIR /app 28 | 29 | COPY --from=uv /root/.local /root/.local 30 | COPY --from=uv --chown=app:app /app/.venv /app/.venv 31 | 32 | # Place executables in the environment at the front of the path 33 | ENV PATH="/app/.venv/bin:$PATH" 34 | 35 | # Set the LOCAL_TIMEZONE environment variable 36 | ENV LOCAL_TIMEZONE=${LOCAL_TIMEZONE:-"UTC"} 37 | 38 | # when running the container, add --local-timezone and a bind mount to the host's db file 39 | ENTRYPOINT ["mcp-server-time", "--local-timezone", "${LOCAL_TIMEZONE}"] 40 | -------------------------------------------------------------------------------- /.github/workflows/claude.yml: -------------------------------------------------------------------------------- 1 | name: Claude Code 2 | 3 | on: 4 | issue_comment: 5 | types: [created] 6 | pull_request_review_comment: 7 | types: [created] 8 | issues: 9 | types: [opened, assigned] 10 | pull_request_review: 11 | types: [submitted] 12 | 13 | jobs: 14 | claude: 15 | if: | 16 | (github.event_name == 'issue_comment' && contains(github.event.comment.body, '@claude')) || 17 | (github.event_name == 'pull_request_review_comment' && contains(github.event.comment.body, '@claude')) || 18 | (github.event_name == 'pull_request_review' && contains(github.event.review.body, '@claude')) || 19 | (github.event_name == 'issues' && (contains(github.event.issue.body, '@claude') || contains(github.event.issue.title, '@claude'))) 20 | runs-on: ubuntu-latest 21 | permissions: 22 | contents: read 23 | pull-requests: read 24 | issues: read 25 | id-token: write 26 | actions: read 27 | steps: 28 | - name: Checkout repository 29 | uses: actions/checkout@v4 30 | with: 31 | fetch-depth: 1 32 | 33 | - name: Run Claude Code 34 | id: claude 35 | uses: anthropics/claude-code-action@v1 36 | with: 37 | anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }} 38 | 39 | # Allow Claude to read CI results on PRs 40 | additional_permissions: | 41 | actions: read 42 | 43 | # Trigger when assigned to an issue 44 | assignee_trigger: "claude" 45 | 46 | claude_args: | 47 | --mcp-config .mcp.json 48 | --allowedTools "Bash,mcp__mcp-docs,WebFetch" 49 | --append-system-prompt "If posting a comment to GitHub, give a concise summary of the comment at the top and put all the details in a
block. When working on MCP-related code or reviewing MCP-related changes, use the mcp-docs MCP server to look up the latest protocol documentation. For schema details, reference https://github.com/modelcontextprotocol/modelcontextprotocol/tree/main/schema which contains versioned schemas in JSON (schema.json) and TypeScript (schema.ts) formats." 50 | -------------------------------------------------------------------------------- /.github/pull_request_template.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | ## Description 4 | 5 | ## Publishing Your Server 6 | 7 | **Note: We are no longer accepting PRs to add servers to the README.** Instead, please publish your server to the [MCP Server Registry](https://github.com/modelcontextprotocol/registry) to make it discoverable to the MCP ecosystem. 8 | 9 | To publish your server, follow the [quickstart guide](https://github.com/modelcontextprotocol/registry/blob/main/docs/modelcontextprotocol-io/quickstart.mdx). You can browse published servers at [https://registry.modelcontextprotocol.io/](https://registry.modelcontextprotocol.io/). 10 | 11 | ## Server Details 12 | 13 | - Server: 14 | - Changes to: 15 | 16 | ## Motivation and Context 17 | 18 | 19 | ## How Has This Been Tested? 20 | 21 | 22 | ## Breaking Changes 23 | 24 | 25 | ## Types of changes 26 | 27 | - [ ] Bug fix (non-breaking change which fixes an issue) 28 | - [ ] New feature (non-breaking change which adds functionality) 29 | - [ ] Breaking change (fix or feature that would cause existing functionality to change) 30 | - [ ] Documentation update 31 | 32 | ## Checklist 33 | 34 | - [ ] I have read the [MCP Protocol Documentation](https://modelcontextprotocol.io) 35 | - [ ] My changes follows MCP security best practices 36 | - [ ] I have updated the server's README accordingly 37 | - [ ] I have tested this with an LLM client 38 | - [ ] My code follows the repository's style guidelines 39 | - [ ] New and existing tests pass locally 40 | - [ ] I have added appropriate error handling 41 | - [ ] I have documented all environment variables and configuration options 42 | 43 | ## Additional context 44 | 45 | -------------------------------------------------------------------------------- /src/everything/sse.ts: -------------------------------------------------------------------------------- 1 | import { SSEServerTransport } from "@modelcontextprotocol/sdk/server/sse.js"; 2 | import express from "express"; 3 | import { createServer } from "./everything.js"; 4 | import cors from 'cors'; 5 | 6 | console.error('Starting SSE server...'); 7 | 8 | const app = express(); 9 | app.use(cors({ 10 | "origin": "*", // use "*" with caution in production 11 | "methods": "GET,POST", 12 | "preflightContinue": false, 13 | "optionsSuccessStatus": 204, 14 | })); // Enable CORS for all routes so Inspector can connect 15 | const transports: Map = new Map(); 16 | 17 | app.get("/sse", async (req, res) => { 18 | let transport: SSEServerTransport; 19 | const { server, cleanup, startNotificationIntervals } = createServer(); 20 | 21 | if (req?.query?.sessionId) { 22 | const sessionId = (req?.query?.sessionId as string); 23 | transport = transports.get(sessionId) as SSEServerTransport; 24 | console.error("Client Reconnecting? This shouldn't happen; when client has a sessionId, GET /sse should not be called again.", transport.sessionId); 25 | } else { 26 | // Create and store transport for new session 27 | transport = new SSEServerTransport("/message", res); 28 | transports.set(transport.sessionId, transport); 29 | 30 | // Connect server to transport 31 | await server.connect(transport); 32 | console.error("Client Connected: ", transport.sessionId); 33 | 34 | // Start notification intervals after client connects 35 | startNotificationIntervals(transport.sessionId); 36 | 37 | // Handle close of connection 38 | server.onclose = async () => { 39 | console.error("Client Disconnected: ", transport.sessionId); 40 | transports.delete(transport.sessionId); 41 | await cleanup(); 42 | }; 43 | 44 | } 45 | 46 | }); 47 | 48 | app.post("/message", async (req, res) => { 49 | const sessionId = (req?.query?.sessionId as string); 50 | const transport = transports.get(sessionId); 51 | if (transport) { 52 | console.error("Client Message from", sessionId); 53 | await transport.handlePostMessage(req, res); 54 | } else { 55 | console.error(`No transport found for sessionId ${sessionId}`) 56 | } 57 | }); 58 | 59 | const PORT = process.env.PORT || 3001; 60 | app.listen(PORT, () => { 61 | console.error(`Server is running on port ${PORT}`); 62 | }); 63 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing to MCP Servers 2 | 3 | Thanks for your interest in contributing! Here's how you can help make this repo better. 4 | 5 | We accept changes through [the standard GitHub flow model](https://docs.github.com/en/get-started/using-github/github-flow). 6 | 7 | ## Server Listings 8 | 9 | We are **no longer accepting PRs** to add server links to the README. Please publish your server to the [MCP Server Registry](https://github.com/modelcontextprotocol/registry) instead. Follow the [quickstart guide](https://github.com/modelcontextprotocol/registry/blob/main/docs/modelcontextprotocol-io/quickstart.mdx). 10 | 11 | You can browse published servers using the simple UI at [https://registry.modelcontextprotocol.io/](https://registry.modelcontextprotocol.io/). 12 | 13 | ## Server Implementations 14 | 15 | We welcome: 16 | - **Bug fixes** — Help us squash those pesky bugs. 17 | - **Usability improvements** — Making servers easier to use for humans and agents. 18 | - **Enhancements that demonstrate MCP protocol features** — We encourage contributions that help reference servers better illustrate underutilized aspects of the MCP protocol beyond just Tools, such as Resources, Prompts, or Roots. For example, adding Roots support to filesystem-server helps showcase this important but lesser-known feature. 19 | 20 | We're more selective about: 21 | - **Other new features** — Especially if they're not crucial to the server's core purpose or are highly opinionated. The existing servers are reference servers meant to inspire the community. If you need specific features, we encourage you to build enhanced versions and publish them to the [MCP Server Registry](https://github.com/modelcontextprotocol/registry)! We think a diverse ecosystem of servers is beneficial for everyone. 22 | 23 | We don't accept: 24 | - **New server implementations** — We encourage you to publish them to the [MCP Server Registry](https://github.com/modelcontextprotocol/registry) instead. 25 | 26 | ## Testing 27 | 28 | When adding or configuring tests for servers implemented in TypeScript, use **vitest** as the test framework. Vitest provides better ESM support, faster test execution, and a more modern testing experience. 29 | 30 | ## Documentation 31 | 32 | Improvements to existing documentation is welcome - although generally we'd prefer ergonomic improvements than documenting pain points if possible! 33 | 34 | We're more selective about adding wholly new documentation, especially in ways that aren't vendor neutral (e.g. how to run a particular server with a particular client). 35 | 36 | ## Community 37 | 38 | [Learn how the MCP community communicates](https://modelcontextprotocol.io/community/communication). 39 | 40 | Thank you for helping make MCP servers better for everyone! -------------------------------------------------------------------------------- /src/filesystem/roots-utils.ts: -------------------------------------------------------------------------------- 1 | import { promises as fs, type Stats } from 'fs'; 2 | import path from 'path'; 3 | import os from 'os'; 4 | import { normalizePath } from './path-utils.js'; 5 | import type { Root } from '@modelcontextprotocol/sdk/types.js'; 6 | 7 | /** 8 | * Converts a root URI to a normalized directory path with basic security validation. 9 | * @param rootUri - File URI (file://...) or plain directory path 10 | * @returns Promise resolving to validated path or null if invalid 11 | */ 12 | async function parseRootUri(rootUri: string): Promise { 13 | try { 14 | const rawPath = rootUri.startsWith('file://') ? rootUri.slice(7) : rootUri; 15 | const expandedPath = rawPath.startsWith('~/') || rawPath === '~' 16 | ? path.join(os.homedir(), rawPath.slice(1)) 17 | : rawPath; 18 | const absolutePath = path.resolve(expandedPath); 19 | const resolvedPath = await fs.realpath(absolutePath); 20 | return normalizePath(resolvedPath); 21 | } catch { 22 | return null; // Path doesn't exist or other error 23 | } 24 | } 25 | 26 | /** 27 | * Formats error message for directory validation failures. 28 | * @param dir - Directory path that failed validation 29 | * @param error - Error that occurred during validation 30 | * @param reason - Specific reason for failure 31 | * @returns Formatted error message 32 | */ 33 | function formatDirectoryError(dir: string, error?: unknown, reason?: string): string { 34 | if (reason) { 35 | return `Skipping ${reason}: ${dir}`; 36 | } 37 | const message = error instanceof Error ? error.message : String(error); 38 | return `Skipping invalid directory: ${dir} due to error: ${message}`; 39 | } 40 | 41 | /** 42 | * Resolves requested root directories from MCP root specifications. 43 | * 44 | * Converts root URI specifications (file:// URIs or plain paths) into normalized 45 | * directory paths, validating that each path exists and is a directory. 46 | * Includes symlink resolution for security. 47 | * 48 | * @param requestedRoots - Array of root specifications with URI and optional name 49 | * @returns Promise resolving to array of validated directory paths 50 | */ 51 | export async function getValidRootDirectories( 52 | requestedRoots: readonly Root[] 53 | ): Promise { 54 | const validatedDirectories: string[] = []; 55 | 56 | for (const requestedRoot of requestedRoots) { 57 | const resolvedPath = await parseRootUri(requestedRoot.uri); 58 | if (!resolvedPath) { 59 | console.error(formatDirectoryError(requestedRoot.uri, undefined, 'invalid path or inaccessible')); 60 | continue; 61 | } 62 | 63 | try { 64 | const stats: Stats = await fs.stat(resolvedPath); 65 | if (stats.isDirectory()) { 66 | validatedDirectories.push(resolvedPath); 67 | } else { 68 | console.error(formatDirectoryError(resolvedPath, undefined, 'non-directory root')); 69 | } 70 | } catch (error) { 71 | console.error(formatDirectoryError(resolvedPath, error)); 72 | } 73 | } 74 | 75 | return validatedDirectories; 76 | } -------------------------------------------------------------------------------- /src/filesystem/path-validation.ts: -------------------------------------------------------------------------------- 1 | import path from 'path'; 2 | 3 | /** 4 | * Checks if an absolute path is within any of the allowed directories. 5 | * 6 | * @param absolutePath - The absolute path to check (will be normalized) 7 | * @param allowedDirectories - Array of absolute allowed directory paths (will be normalized) 8 | * @returns true if the path is within an allowed directory, false otherwise 9 | * @throws Error if given relative paths after normalization 10 | */ 11 | export function isPathWithinAllowedDirectories(absolutePath: string, allowedDirectories: string[]): boolean { 12 | // Type validation 13 | if (typeof absolutePath !== 'string' || !Array.isArray(allowedDirectories)) { 14 | return false; 15 | } 16 | 17 | // Reject empty inputs 18 | if (!absolutePath || allowedDirectories.length === 0) { 19 | return false; 20 | } 21 | 22 | // Reject null bytes (forbidden in paths) 23 | if (absolutePath.includes('\x00')) { 24 | return false; 25 | } 26 | 27 | // Normalize the input path 28 | let normalizedPath: string; 29 | try { 30 | normalizedPath = path.resolve(path.normalize(absolutePath)); 31 | } catch { 32 | return false; 33 | } 34 | 35 | // Verify it's absolute after normalization 36 | if (!path.isAbsolute(normalizedPath)) { 37 | throw new Error('Path must be absolute after normalization'); 38 | } 39 | 40 | // Check against each allowed directory 41 | return allowedDirectories.some(dir => { 42 | if (typeof dir !== 'string' || !dir) { 43 | return false; 44 | } 45 | 46 | // Reject null bytes in allowed dirs 47 | if (dir.includes('\x00')) { 48 | return false; 49 | } 50 | 51 | // Normalize the allowed directory 52 | let normalizedDir: string; 53 | try { 54 | normalizedDir = path.resolve(path.normalize(dir)); 55 | } catch { 56 | return false; 57 | } 58 | 59 | // Verify allowed directory is absolute after normalization 60 | if (!path.isAbsolute(normalizedDir)) { 61 | throw new Error('Allowed directories must be absolute paths after normalization'); 62 | } 63 | 64 | // Check if normalizedPath is within normalizedDir 65 | // Path is inside if it's the same or a subdirectory 66 | if (normalizedPath === normalizedDir) { 67 | return true; 68 | } 69 | 70 | // Special case for root directory to avoid double slash 71 | // On Windows, we need to check if both paths are on the same drive 72 | if (normalizedDir === path.sep) { 73 | return normalizedPath.startsWith(path.sep); 74 | } 75 | 76 | // On Windows, also check for drive root (e.g., "C:\") 77 | if (path.sep === '\\' && normalizedDir.match(/^[A-Za-z]:\\?$/)) { 78 | // Ensure both paths are on the same drive 79 | const dirDrive = normalizedDir.charAt(0).toLowerCase(); 80 | const pathDrive = normalizedPath.charAt(0).toLowerCase(); 81 | return pathDrive === dirDrive && normalizedPath.startsWith(normalizedDir.replace(/\\?$/, '\\')); 82 | } 83 | 84 | return normalizedPath.startsWith(normalizedDir + path.sep); 85 | }); 86 | } 87 | -------------------------------------------------------------------------------- /.github/workflows/typescript.yml: -------------------------------------------------------------------------------- 1 | name: TypeScript 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | pull_request: 8 | release: 9 | types: [published] 10 | 11 | jobs: 12 | detect-packages: 13 | runs-on: ubuntu-latest 14 | outputs: 15 | packages: ${{ steps.find-packages.outputs.packages }} 16 | steps: 17 | - uses: actions/checkout@v4 18 | - name: Find JS packages 19 | id: find-packages 20 | working-directory: src 21 | run: | 22 | PACKAGES=$(find . -name package.json -not -path "*/node_modules/*" -exec dirname {} \; | sed 's/^\.\///' | jq -R -s -c 'split("\n")[:-1]') 23 | echo "packages=$PACKAGES" >> $GITHUB_OUTPUT 24 | 25 | test: 26 | needs: [detect-packages] 27 | strategy: 28 | matrix: 29 | package: ${{ fromJson(needs.detect-packages.outputs.packages) }} 30 | name: Test ${{ matrix.package }} 31 | runs-on: ubuntu-latest 32 | steps: 33 | - uses: actions/checkout@v4 34 | 35 | - uses: actions/setup-node@v4 36 | with: 37 | node-version: 22 38 | cache: npm 39 | 40 | - name: Install dependencies 41 | working-directory: src/${{ matrix.package }} 42 | run: npm ci 43 | 44 | - name: Run tests 45 | working-directory: src/${{ matrix.package }} 46 | run: npm test --if-present 47 | 48 | build: 49 | needs: [detect-packages, test] 50 | strategy: 51 | matrix: 52 | package: ${{ fromJson(needs.detect-packages.outputs.packages) }} 53 | name: Build ${{ matrix.package }} 54 | runs-on: ubuntu-latest 55 | steps: 56 | - uses: actions/checkout@v4 57 | 58 | - uses: actions/setup-node@v4 59 | with: 60 | node-version: 22 61 | cache: npm 62 | 63 | - name: Install dependencies 64 | working-directory: src/${{ matrix.package }} 65 | run: npm ci 66 | 67 | - name: Build package 68 | working-directory: src/${{ matrix.package }} 69 | run: npm run build 70 | 71 | publish: 72 | runs-on: ubuntu-latest 73 | needs: [build, detect-packages] 74 | if: github.event_name == 'release' 75 | environment: release 76 | 77 | strategy: 78 | matrix: 79 | package: ${{ fromJson(needs.detect-packages.outputs.packages) }} 80 | name: Publish ${{ matrix.package }} 81 | 82 | permissions: 83 | contents: read 84 | id-token: write 85 | 86 | steps: 87 | - uses: actions/checkout@v4 88 | - uses: actions/setup-node@v4 89 | with: 90 | node-version: 22 91 | cache: npm 92 | registry-url: "https://registry.npmjs.org" 93 | 94 | - name: Install dependencies 95 | working-directory: src/${{ matrix.package }} 96 | run: npm ci 97 | 98 | - name: Publish package 99 | working-directory: src/${{ matrix.package }} 100 | run: npm publish --access public 101 | env: 102 | NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} 103 | -------------------------------------------------------------------------------- /src/filesystem/__tests__/roots-utils.test.ts: -------------------------------------------------------------------------------- 1 | import { describe, it, expect, beforeEach, afterEach } from 'vitest'; 2 | import { getValidRootDirectories } from '../roots-utils.js'; 3 | import { mkdtempSync, rmSync, mkdirSync, writeFileSync, realpathSync } from 'fs'; 4 | import { tmpdir } from 'os'; 5 | import { join } from 'path'; 6 | import type { Root } from '@modelcontextprotocol/sdk/types.js'; 7 | 8 | describe('getValidRootDirectories', () => { 9 | let testDir1: string; 10 | let testDir2: string; 11 | let testDir3: string; 12 | let testFile: string; 13 | 14 | beforeEach(() => { 15 | // Create test directories 16 | testDir1 = realpathSync(mkdtempSync(join(tmpdir(), 'mcp-roots-test1-'))); 17 | testDir2 = realpathSync(mkdtempSync(join(tmpdir(), 'mcp-roots-test2-'))); 18 | testDir3 = realpathSync(mkdtempSync(join(tmpdir(), 'mcp-roots-test3-'))); 19 | 20 | // Create a test file (not a directory) 21 | testFile = join(testDir1, 'test-file.txt'); 22 | writeFileSync(testFile, 'test content'); 23 | }); 24 | 25 | afterEach(() => { 26 | // Cleanup 27 | rmSync(testDir1, { recursive: true, force: true }); 28 | rmSync(testDir2, { recursive: true, force: true }); 29 | rmSync(testDir3, { recursive: true, force: true }); 30 | }); 31 | 32 | describe('valid directory processing', () => { 33 | it('should process all URI formats and edge cases', async () => { 34 | const roots = [ 35 | { uri: `file://${testDir1}`, name: 'File URI' }, 36 | { uri: testDir2, name: 'Plain path' }, 37 | { uri: testDir3 } // Plain path without name property 38 | ]; 39 | 40 | const result = await getValidRootDirectories(roots); 41 | 42 | expect(result).toContain(testDir1); 43 | expect(result).toContain(testDir2); 44 | expect(result).toContain(testDir3); 45 | expect(result).toHaveLength(3); 46 | }); 47 | 48 | it('should normalize complex paths', async () => { 49 | const subDir = join(testDir1, 'subdir'); 50 | mkdirSync(subDir); 51 | 52 | const roots = [ 53 | { uri: `file://${testDir1}/./subdir/../subdir`, name: 'Complex Path' } 54 | ]; 55 | 56 | const result = await getValidRootDirectories(roots); 57 | 58 | expect(result).toHaveLength(1); 59 | expect(result[0]).toBe(subDir); 60 | }); 61 | }); 62 | 63 | describe('error handling', () => { 64 | 65 | it('should handle various error types', async () => { 66 | const nonExistentDir = join(tmpdir(), 'non-existent-directory-12345'); 67 | const invalidPath = '\0invalid\0path'; // Null bytes cause different error types 68 | const roots = [ 69 | { uri: `file://${testDir1}`, name: 'Valid Dir' }, 70 | { uri: `file://${nonExistentDir}`, name: 'Non-existent Dir' }, 71 | { uri: `file://${testFile}`, name: 'File Not Dir' }, 72 | { uri: `file://${invalidPath}`, name: 'Invalid Path' } 73 | ]; 74 | 75 | const result = await getValidRootDirectories(roots); 76 | 77 | expect(result).toContain(testDir1); 78 | expect(result).not.toContain(nonExistentDir); 79 | expect(result).not.toContain(testFile); 80 | expect(result).not.toContain(invalidPath); 81 | expect(result).toHaveLength(1); 82 | }); 83 | }); 84 | }); -------------------------------------------------------------------------------- /src/sequentialthinking/lib.ts: -------------------------------------------------------------------------------- 1 | import chalk from 'chalk'; 2 | 3 | export interface ThoughtData { 4 | thought: string; 5 | thoughtNumber: number; 6 | totalThoughts: number; 7 | isRevision?: boolean; 8 | revisesThought?: number; 9 | branchFromThought?: number; 10 | branchId?: string; 11 | needsMoreThoughts?: boolean; 12 | nextThoughtNeeded: boolean; 13 | } 14 | 15 | export class SequentialThinkingServer { 16 | private thoughtHistory: ThoughtData[] = []; 17 | private branches: Record = {}; 18 | private disableThoughtLogging: boolean; 19 | 20 | constructor() { 21 | this.disableThoughtLogging = (process.env.DISABLE_THOUGHT_LOGGING || "").toLowerCase() === "true"; 22 | } 23 | 24 | private formatThought(thoughtData: ThoughtData): string { 25 | const { thoughtNumber, totalThoughts, thought, isRevision, revisesThought, branchFromThought, branchId } = thoughtData; 26 | 27 | let prefix = ''; 28 | let context = ''; 29 | 30 | if (isRevision) { 31 | prefix = chalk.yellow('🔄 Revision'); 32 | context = ` (revising thought ${revisesThought})`; 33 | } else if (branchFromThought) { 34 | prefix = chalk.green('🌿 Branch'); 35 | context = ` (from thought ${branchFromThought}, ID: ${branchId})`; 36 | } else { 37 | prefix = chalk.blue('💭 Thought'); 38 | context = ''; 39 | } 40 | 41 | const header = `${prefix} ${thoughtNumber}/${totalThoughts}${context}`; 42 | const border = '─'.repeat(Math.max(header.length, thought.length) + 4); 43 | 44 | return ` 45 | ┌${border}┐ 46 | │ ${header} │ 47 | ├${border}┤ 48 | │ ${thought.padEnd(border.length - 2)} │ 49 | └${border}┘`; 50 | } 51 | 52 | public processThought(input: ThoughtData): { content: Array<{ type: "text"; text: string }>; isError?: boolean } { 53 | try { 54 | // Validation happens at the tool registration layer via Zod 55 | // Adjust totalThoughts if thoughtNumber exceeds it 56 | if (input.thoughtNumber > input.totalThoughts) { 57 | input.totalThoughts = input.thoughtNumber; 58 | } 59 | 60 | this.thoughtHistory.push(input); 61 | 62 | if (input.branchFromThought && input.branchId) { 63 | if (!this.branches[input.branchId]) { 64 | this.branches[input.branchId] = []; 65 | } 66 | this.branches[input.branchId].push(input); 67 | } 68 | 69 | if (!this.disableThoughtLogging) { 70 | const formattedThought = this.formatThought(input); 71 | console.error(formattedThought); 72 | } 73 | 74 | return { 75 | content: [{ 76 | type: "text" as const, 77 | text: JSON.stringify({ 78 | thoughtNumber: input.thoughtNumber, 79 | totalThoughts: input.totalThoughts, 80 | nextThoughtNeeded: input.nextThoughtNeeded, 81 | branches: Object.keys(this.branches), 82 | thoughtHistoryLength: this.thoughtHistory.length 83 | }, null, 2) 84 | }] 85 | }; 86 | } catch (error) { 87 | return { 88 | content: [{ 89 | type: "text" as const, 90 | text: JSON.stringify({ 91 | error: error instanceof Error ? error.message : String(error), 92 | status: 'failed' 93 | }, null, 2) 94 | }], 95 | isError: true 96 | }; 97 | } 98 | } 99 | } 100 | -------------------------------------------------------------------------------- /.github/workflows/python.yml: -------------------------------------------------------------------------------- 1 | name: Python 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | pull_request: 8 | release: 9 | types: [published] 10 | 11 | jobs: 12 | detect-packages: 13 | runs-on: ubuntu-latest 14 | outputs: 15 | packages: ${{ steps.find-packages.outputs.packages }} 16 | steps: 17 | - uses: actions/checkout@v4 18 | 19 | - name: Find Python packages 20 | id: find-packages 21 | working-directory: src 22 | run: | 23 | PACKAGES=$(find . -name pyproject.toml -exec dirname {} \; | sed 's/^\.\///' | jq -R -s -c 'split("\n")[:-1]') 24 | echo "packages=$PACKAGES" >> $GITHUB_OUTPUT 25 | 26 | test: 27 | needs: [detect-packages] 28 | strategy: 29 | matrix: 30 | package: ${{ fromJson(needs.detect-packages.outputs.packages) }} 31 | name: Test ${{ matrix.package }} 32 | runs-on: ubuntu-latest 33 | steps: 34 | - uses: actions/checkout@v4 35 | 36 | - name: Install uv 37 | uses: astral-sh/setup-uv@v3 38 | 39 | - name: Set up Python 40 | uses: actions/setup-python@v5 41 | with: 42 | python-version-file: "src/${{ matrix.package }}/.python-version" 43 | 44 | - name: Install dependencies 45 | working-directory: src/${{ matrix.package }} 46 | run: uv sync --frozen --all-extras --dev 47 | 48 | - name: Check if tests exist 49 | id: check-tests 50 | working-directory: src/${{ matrix.package }} 51 | run: | 52 | if [ -d "tests" ] || [ -d "test" ] || grep -q "pytest" pyproject.toml; then 53 | echo "has-tests=true" >> $GITHUB_OUTPUT 54 | else 55 | echo "has-tests=false" >> $GITHUB_OUTPUT 56 | fi 57 | 58 | - name: Run tests 59 | if: steps.check-tests.outputs.has-tests == 'true' 60 | working-directory: src/${{ matrix.package }} 61 | run: uv run pytest 62 | 63 | build: 64 | needs: [detect-packages, test] 65 | strategy: 66 | matrix: 67 | package: ${{ fromJson(needs.detect-packages.outputs.packages) }} 68 | name: Build ${{ matrix.package }} 69 | runs-on: ubuntu-latest 70 | steps: 71 | - uses: actions/checkout@v4 72 | 73 | - name: Install uv 74 | uses: astral-sh/setup-uv@v3 75 | 76 | - name: Set up Python 77 | uses: actions/setup-python@v5 78 | with: 79 | python-version-file: "src/${{ matrix.package }}/.python-version" 80 | 81 | - name: Install dependencies 82 | working-directory: src/${{ matrix.package }} 83 | run: uv sync --locked --all-extras --dev 84 | 85 | - name: Run pyright 86 | working-directory: src/${{ matrix.package }} 87 | run: uv run --frozen pyright 88 | 89 | - name: Build package 90 | working-directory: src/${{ matrix.package }} 91 | run: uv build 92 | 93 | - name: Upload artifacts 94 | uses: actions/upload-artifact@v4 95 | with: 96 | name: dist-${{ matrix.package }} 97 | path: src/${{ matrix.package }}/dist/ 98 | 99 | publish: 100 | runs-on: ubuntu-latest 101 | needs: [build, detect-packages] 102 | if: github.event_name == 'release' 103 | 104 | strategy: 105 | matrix: 106 | package: ${{ fromJson(needs.detect-packages.outputs.packages) }} 107 | name: Publish ${{ matrix.package }} 108 | 109 | environment: release 110 | permissions: 111 | id-token: write # Required for trusted publishing 112 | 113 | steps: 114 | - name: Download artifacts 115 | uses: actions/download-artifact@v4 116 | with: 117 | name: dist-${{ matrix.package }} 118 | path: dist/ 119 | 120 | - name: Publish package to PyPI 121 | uses: pypa/gh-action-pypi-publish@release/v1 122 | -------------------------------------------------------------------------------- /src/filesystem/path-utils.ts: -------------------------------------------------------------------------------- 1 | import path from "path"; 2 | import os from 'os'; 3 | 4 | /** 5 | * Converts WSL or Unix-style Windows paths to Windows format 6 | * @param p The path to convert 7 | * @returns Converted Windows path 8 | */ 9 | export function convertToWindowsPath(p: string): string { 10 | // Handle WSL paths (/mnt/c/...) 11 | // NEVER convert WSL paths - they are valid Linux paths that work with Node.js fs operations in WSL 12 | // Converting them to Windows format (C:\...) breaks fs operations inside WSL 13 | if (p.startsWith('/mnt/')) { 14 | return p; // Leave WSL paths unchanged 15 | } 16 | 17 | // Handle Unix-style Windows paths (/c/...) 18 | // Only convert when running on Windows 19 | if (p.match(/^\/[a-zA-Z]\//) && process.platform === 'win32') { 20 | const driveLetter = p.charAt(1).toUpperCase(); 21 | const pathPart = p.slice(2).replace(/\//g, '\\'); 22 | return `${driveLetter}:${pathPart}`; 23 | } 24 | 25 | // Handle standard Windows paths, ensuring backslashes 26 | if (p.match(/^[a-zA-Z]:/)) { 27 | return p.replace(/\//g, '\\'); 28 | } 29 | 30 | // Leave non-Windows paths unchanged 31 | return p; 32 | } 33 | 34 | /** 35 | * Normalizes path by standardizing format while preserving OS-specific behavior 36 | * @param p The path to normalize 37 | * @returns Normalized path 38 | */ 39 | export function normalizePath(p: string): string { 40 | // Remove any surrounding quotes and whitespace 41 | p = p.trim().replace(/^["']|["']$/g, ''); 42 | 43 | // Check if this is a Unix path that should not be converted 44 | // WSL paths (/mnt/) should ALWAYS be preserved as they work correctly in WSL with Node.js fs 45 | // Regular Unix paths should also be preserved 46 | const isUnixPath = p.startsWith('/') && ( 47 | // Always preserve WSL paths (/mnt/c/, /mnt/d/, etc.) 48 | p.match(/^\/mnt\/[a-z]\//i) || 49 | // On non-Windows platforms, treat all absolute paths as Unix paths 50 | (process.platform !== 'win32') || 51 | // On Windows, preserve Unix paths that aren't Unix-style Windows paths (/c/, /d/, etc.) 52 | (process.platform === 'win32' && !p.match(/^\/[a-zA-Z]\//)) 53 | ); 54 | 55 | if (isUnixPath) { 56 | // For Unix paths, just normalize without converting to Windows format 57 | // Replace double slashes with single slashes and remove trailing slashes 58 | return p.replace(/\/+/g, '/').replace(/\/+$/, ''); 59 | } 60 | 61 | // Convert Unix-style Windows paths (/c/, /d/) to Windows format if on Windows 62 | // This function will now leave /mnt/ paths unchanged 63 | p = convertToWindowsPath(p); 64 | 65 | // Handle double backslashes, preserving leading UNC \\ 66 | if (p.startsWith('\\\\')) { 67 | // For UNC paths, first normalize any excessive leading backslashes to exactly \\ 68 | // Then normalize double backslashes in the rest of the path 69 | let uncPath = p; 70 | // Replace multiple leading backslashes with exactly two 71 | uncPath = uncPath.replace(/^\\{2,}/, '\\\\'); 72 | // Now normalize any remaining double backslashes in the rest of the path 73 | const restOfPath = uncPath.substring(2).replace(/\\\\/g, '\\'); 74 | p = '\\\\' + restOfPath; 75 | } else { 76 | // For non-UNC paths, normalize all double backslashes 77 | p = p.replace(/\\\\/g, '\\'); 78 | } 79 | 80 | // Use Node's path normalization, which handles . and .. segments 81 | let normalized = path.normalize(p); 82 | 83 | // Fix UNC paths after normalization (path.normalize can remove a leading backslash) 84 | if (p.startsWith('\\\\') && !normalized.startsWith('\\\\')) { 85 | normalized = '\\' + normalized; 86 | } 87 | 88 | // Handle Windows paths: convert slashes and ensure drive letter is capitalized 89 | if (normalized.match(/^[a-zA-Z]:/)) { 90 | let result = normalized.replace(/\//g, '\\'); 91 | // Capitalize drive letter if present 92 | if (/^[a-z]:/.test(result)) { 93 | result = result.charAt(0).toUpperCase() + result.slice(1); 94 | } 95 | return result; 96 | } 97 | 98 | // On Windows, convert forward slashes to backslashes for relative paths 99 | // On Linux/Unix, preserve forward slashes 100 | if (process.platform === 'win32') { 101 | return normalized.replace(/\//g, '\\'); 102 | } 103 | 104 | // On non-Windows platforms, keep the normalized path as-is 105 | return normalized; 106 | } 107 | 108 | /** 109 | * Expands home directory tildes in paths 110 | * @param filepath The path to expand 111 | * @returns Expanded path 112 | */ 113 | export function expandHome(filepath: string): string { 114 | if (filepath.startsWith('~/') || filepath === '~') { 115 | return path.join(os.homedir(), filepath.slice(1)); 116 | } 117 | return filepath; 118 | } 119 | -------------------------------------------------------------------------------- /src/sequentialthinking/index.ts: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js"; 4 | import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js"; 5 | import { z } from "zod"; 6 | import { SequentialThinkingServer } from './lib.js'; 7 | 8 | const server = new McpServer({ 9 | name: "sequential-thinking-server", 10 | version: "0.2.0", 11 | }); 12 | 13 | const thinkingServer = new SequentialThinkingServer(); 14 | 15 | server.registerTool( 16 | "sequentialthinking", 17 | { 18 | title: "Sequential Thinking", 19 | description: `A detailed tool for dynamic and reflective problem-solving through thoughts. 20 | This tool helps analyze problems through a flexible thinking process that can adapt and evolve. 21 | Each thought can build on, question, or revise previous insights as understanding deepens. 22 | 23 | When to use this tool: 24 | - Breaking down complex problems into steps 25 | - Planning and design with room for revision 26 | - Analysis that might need course correction 27 | - Problems where the full scope might not be clear initially 28 | - Problems that require a multi-step solution 29 | - Tasks that need to maintain context over multiple steps 30 | - Situations where irrelevant information needs to be filtered out 31 | 32 | Key features: 33 | - You can adjust total_thoughts up or down as you progress 34 | - You can question or revise previous thoughts 35 | - You can add more thoughts even after reaching what seemed like the end 36 | - You can express uncertainty and explore alternative approaches 37 | - Not every thought needs to build linearly - you can branch or backtrack 38 | - Generates a solution hypothesis 39 | - Verifies the hypothesis based on the Chain of Thought steps 40 | - Repeats the process until satisfied 41 | - Provides a correct answer 42 | 43 | Parameters explained: 44 | - thought: Your current thinking step, which can include: 45 | * Regular analytical steps 46 | * Revisions of previous thoughts 47 | * Questions about previous decisions 48 | * Realizations about needing more analysis 49 | * Changes in approach 50 | * Hypothesis generation 51 | * Hypothesis verification 52 | - nextThoughtNeeded: True if you need more thinking, even if at what seemed like the end 53 | - thoughtNumber: Current number in sequence (can go beyond initial total if needed) 54 | - totalThoughts: Current estimate of thoughts needed (can be adjusted up/down) 55 | - isRevision: A boolean indicating if this thought revises previous thinking 56 | - revisesThought: If is_revision is true, which thought number is being reconsidered 57 | - branchFromThought: If branching, which thought number is the branching point 58 | - branchId: Identifier for the current branch (if any) 59 | - needsMoreThoughts: If reaching end but realizing more thoughts needed 60 | 61 | You should: 62 | 1. Start with an initial estimate of needed thoughts, but be ready to adjust 63 | 2. Feel free to question or revise previous thoughts 64 | 3. Don't hesitate to add more thoughts if needed, even at the "end" 65 | 4. Express uncertainty when present 66 | 5. Mark thoughts that revise previous thinking or branch into new paths 67 | 6. Ignore information that is irrelevant to the current step 68 | 7. Generate a solution hypothesis when appropriate 69 | 8. Verify the hypothesis based on the Chain of Thought steps 70 | 9. Repeat the process until satisfied with the solution 71 | 10. Provide a single, ideally correct answer as the final output 72 | 11. Only set nextThoughtNeeded to false when truly done and a satisfactory answer is reached`, 73 | inputSchema: { 74 | thought: z.string().describe("Your current thinking step"), 75 | nextThoughtNeeded: z.boolean().describe("Whether another thought step is needed"), 76 | thoughtNumber: z.number().int().min(1).describe("Current thought number (numeric value, e.g., 1, 2, 3)"), 77 | totalThoughts: z.number().int().min(1).describe("Estimated total thoughts needed (numeric value, e.g., 5, 10)"), 78 | isRevision: z.boolean().optional().describe("Whether this revises previous thinking"), 79 | revisesThought: z.number().int().min(1).optional().describe("Which thought is being reconsidered"), 80 | branchFromThought: z.number().int().min(1).optional().describe("Branching point thought number"), 81 | branchId: z.string().optional().describe("Branch identifier"), 82 | needsMoreThoughts: z.boolean().optional().describe("If more thoughts are needed") 83 | }, 84 | outputSchema: { 85 | thoughtNumber: z.number(), 86 | totalThoughts: z.number(), 87 | nextThoughtNeeded: z.boolean(), 88 | branches: z.array(z.string()), 89 | thoughtHistoryLength: z.number() 90 | }, 91 | }, 92 | async (args) => { 93 | const result = thinkingServer.processThought(args); 94 | 95 | if (result.isError) { 96 | return result; 97 | } 98 | 99 | // Parse the JSON response to get structured content 100 | const parsedContent = JSON.parse(result.content[0].text); 101 | 102 | return { 103 | content: result.content, 104 | structuredContent: parsedContent 105 | }; 106 | } 107 | ); 108 | 109 | async function runServer() { 110 | const transport = new StdioServerTransport(); 111 | await server.connect(transport); 112 | console.error("Sequential Thinking MCP Server running on stdio"); 113 | } 114 | 115 | runServer().catch((error) => { 116 | console.error("Fatal error running server:", error); 117 | process.exit(1); 118 | }); 119 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Contributor Covenant Code of Conduct 2 | 3 | ## Our Pledge 4 | 5 | We as members, contributors, and leaders pledge to make participation in our 6 | community a harassment-free experience for everyone, regardless of age, body 7 | size, visible or invisible disability, ethnicity, sex characteristics, gender 8 | identity and expression, level of experience, education, socio-economic status, 9 | nationality, personal appearance, race, religion, or sexual identity 10 | and orientation. 11 | 12 | We pledge to act and interact in ways that contribute to an open, welcoming, 13 | diverse, inclusive, and healthy community. 14 | 15 | ## Our Standards 16 | 17 | Examples of behavior that contributes to a positive environment for our 18 | community include: 19 | 20 | * Demonstrating empathy and kindness toward other people 21 | * Being respectful of differing opinions, viewpoints, and experiences 22 | * Giving and gracefully accepting constructive feedback 23 | * Accepting responsibility and apologizing to those affected by our mistakes, 24 | and learning from the experience 25 | * Focusing on what is best not just for us as individuals, but for the 26 | overall community 27 | 28 | Examples of unacceptable behavior include: 29 | 30 | * The use of sexualized language or imagery, and sexual attention or 31 | advances of any kind 32 | * Trolling, insulting or derogatory comments, and personal or political attacks 33 | * Public or private harassment 34 | * Publishing others' private information, such as a physical or email 35 | address, without their explicit permission 36 | * Other conduct which could reasonably be considered inappropriate in a 37 | professional setting 38 | 39 | ## Enforcement Responsibilities 40 | 41 | Community leaders are responsible for clarifying and enforcing our standards of 42 | acceptable behavior and will take appropriate and fair corrective action in 43 | response to any behavior that they deem inappropriate, threatening, offensive, 44 | or harmful. 45 | 46 | Community leaders have the right and responsibility to remove, edit, or reject 47 | comments, commits, code, wiki edits, issues, and other contributions that are 48 | not aligned to this Code of Conduct, and will communicate reasons for moderation 49 | decisions when appropriate. 50 | 51 | ## Scope 52 | 53 | This Code of Conduct applies within all community spaces, and also applies when 54 | an individual is officially representing the community in public spaces. 55 | Examples of representing our community include using an official e-mail address, 56 | posting via an official social media account, or acting as an appointed 57 | representative at an online or offline event. 58 | 59 | ## Enforcement 60 | 61 | Instances of abusive, harassing, or otherwise unacceptable behavior may be 62 | reported to the community leaders responsible for enforcement at 63 | mcp-coc@anthropic.com. 64 | All complaints will be reviewed and investigated promptly and fairly. 65 | 66 | All community leaders are obligated to respect the privacy and security of the 67 | reporter of any incident. 68 | 69 | ## Enforcement Guidelines 70 | 71 | Community leaders will follow these Community Impact Guidelines in determining 72 | the consequences for any action they deem in violation of this Code of Conduct: 73 | 74 | ### 1. Correction 75 | 76 | **Community Impact**: Use of inappropriate language or other behavior deemed 77 | unprofessional or unwelcome in the community. 78 | 79 | **Consequence**: A private, written warning from community leaders, providing 80 | clarity around the nature of the violation and an explanation of why the 81 | behavior was inappropriate. A public apology may be requested. 82 | 83 | ### 2. Warning 84 | 85 | **Community Impact**: A violation through a single incident or series 86 | of actions. 87 | 88 | **Consequence**: A warning with consequences for continued behavior. No 89 | interaction with the people involved, including unsolicited interaction with 90 | those enforcing the Code of Conduct, for a specified period of time. This 91 | includes avoiding interactions in community spaces as well as external channels 92 | like social media. Violating these terms may lead to a temporary or 93 | permanent ban. 94 | 95 | ### 3. Temporary Ban 96 | 97 | **Community Impact**: A serious violation of community standards, including 98 | sustained inappropriate behavior. 99 | 100 | **Consequence**: A temporary ban from any sort of interaction or public 101 | communication with the community for a specified period of time. No public or 102 | private interaction with the people involved, including unsolicited interaction 103 | with those enforcing the Code of Conduct, is allowed during this period. 104 | Violating these terms may lead to a permanent ban. 105 | 106 | ### 4. Permanent Ban 107 | 108 | **Community Impact**: Demonstrating a pattern of violation of community 109 | standards, including sustained inappropriate behavior, harassment of an 110 | individual, or aggression toward or disparagement of classes of individuals. 111 | 112 | **Consequence**: A permanent ban from any sort of public interaction within 113 | the community. 114 | 115 | ## Attribution 116 | 117 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], 118 | version 2.0, available at 119 | https://www.contributor-covenant.org/version/2/0/code_of_conduct.html. 120 | 121 | Community Impact Guidelines were inspired by [Mozilla's code of conduct 122 | enforcement ladder](https://github.com/mozilla/diversity). 123 | 124 | [homepage]: https://www.contributor-covenant.org 125 | 126 | For answers to common questions about this code of conduct, see the FAQ at 127 | https://www.contributor-covenant.org/faq. Translations are available at 128 | https://www.contributor-covenant.org/translations. 129 | -------------------------------------------------------------------------------- /src/memory/__tests__/file-path.test.ts: -------------------------------------------------------------------------------- 1 | import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest'; 2 | import { promises as fs } from 'fs'; 3 | import path from 'path'; 4 | import { fileURLToPath } from 'url'; 5 | import { ensureMemoryFilePath, defaultMemoryPath } from '../index.js'; 6 | 7 | describe('ensureMemoryFilePath', () => { 8 | const testDir = path.dirname(fileURLToPath(import.meta.url)); 9 | const oldMemoryPath = path.join(testDir, '..', 'memory.json'); 10 | const newMemoryPath = path.join(testDir, '..', 'memory.jsonl'); 11 | 12 | let originalEnv: string | undefined; 13 | 14 | beforeEach(() => { 15 | // Save original environment variable 16 | originalEnv = process.env.MEMORY_FILE_PATH; 17 | // Delete environment variable 18 | delete process.env.MEMORY_FILE_PATH; 19 | }); 20 | 21 | afterEach(async () => { 22 | // Restore original environment variable 23 | if (originalEnv !== undefined) { 24 | process.env.MEMORY_FILE_PATH = originalEnv; 25 | } else { 26 | delete process.env.MEMORY_FILE_PATH; 27 | } 28 | 29 | // Clean up test files 30 | try { 31 | await fs.unlink(oldMemoryPath); 32 | } catch { 33 | // Ignore if file doesn't exist 34 | } 35 | try { 36 | await fs.unlink(newMemoryPath); 37 | } catch { 38 | // Ignore if file doesn't exist 39 | } 40 | }); 41 | 42 | describe('with MEMORY_FILE_PATH environment variable', () => { 43 | it('should return absolute path when MEMORY_FILE_PATH is absolute', async () => { 44 | const absolutePath = '/tmp/custom-memory.jsonl'; 45 | process.env.MEMORY_FILE_PATH = absolutePath; 46 | 47 | const result = await ensureMemoryFilePath(); 48 | 49 | expect(result).toBe(absolutePath); 50 | }); 51 | 52 | it('should convert relative path to absolute when MEMORY_FILE_PATH is relative', async () => { 53 | const relativePath = 'custom-memory.jsonl'; 54 | process.env.MEMORY_FILE_PATH = relativePath; 55 | 56 | const result = await ensureMemoryFilePath(); 57 | 58 | expect(path.isAbsolute(result)).toBe(true); 59 | expect(result).toContain('custom-memory.jsonl'); 60 | }); 61 | 62 | it('should handle Windows absolute paths', async () => { 63 | const windowsPath = 'C:\\temp\\memory.jsonl'; 64 | process.env.MEMORY_FILE_PATH = windowsPath; 65 | 66 | const result = await ensureMemoryFilePath(); 67 | 68 | // On Windows, should return as-is; on Unix, will be treated as relative 69 | if (process.platform === 'win32') { 70 | expect(result).toBe(windowsPath); 71 | } else { 72 | expect(path.isAbsolute(result)).toBe(true); 73 | } 74 | }); 75 | }); 76 | 77 | describe('without MEMORY_FILE_PATH environment variable', () => { 78 | it('should return default path when no files exist', async () => { 79 | const result = await ensureMemoryFilePath(); 80 | 81 | expect(result).toBe(defaultMemoryPath); 82 | }); 83 | 84 | it('should migrate from memory.json to memory.jsonl when only old file exists', async () => { 85 | // Create old memory.json file 86 | await fs.writeFile(oldMemoryPath, '{"test":"data"}'); 87 | 88 | const consoleErrorSpy = vi.spyOn(console, 'error').mockImplementation(() => {}); 89 | 90 | const result = await ensureMemoryFilePath(); 91 | 92 | expect(result).toBe(defaultMemoryPath); 93 | 94 | // Verify migration happened 95 | const newFileExists = await fs.access(newMemoryPath).then(() => true).catch(() => false); 96 | const oldFileExists = await fs.access(oldMemoryPath).then(() => true).catch(() => false); 97 | 98 | expect(newFileExists).toBe(true); 99 | expect(oldFileExists).toBe(false); 100 | 101 | // Verify console messages 102 | expect(consoleErrorSpy).toHaveBeenCalledWith( 103 | expect.stringContaining('DETECTED: Found legacy memory.json file') 104 | ); 105 | expect(consoleErrorSpy).toHaveBeenCalledWith( 106 | expect.stringContaining('COMPLETED: Successfully migrated') 107 | ); 108 | 109 | consoleErrorSpy.mockRestore(); 110 | }); 111 | 112 | it('should use new file when both old and new files exist', async () => { 113 | // Create both files 114 | await fs.writeFile(oldMemoryPath, '{"old":"data"}'); 115 | await fs.writeFile(newMemoryPath, '{"new":"data"}'); 116 | 117 | const consoleErrorSpy = vi.spyOn(console, 'error').mockImplementation(() => {}); 118 | 119 | const result = await ensureMemoryFilePath(); 120 | 121 | expect(result).toBe(defaultMemoryPath); 122 | 123 | // Verify no migration happened (both files should still exist) 124 | const newFileExists = await fs.access(newMemoryPath).then(() => true).catch(() => false); 125 | const oldFileExists = await fs.access(oldMemoryPath).then(() => true).catch(() => false); 126 | 127 | expect(newFileExists).toBe(true); 128 | expect(oldFileExists).toBe(true); 129 | 130 | // Verify no console messages about migration 131 | expect(consoleErrorSpy).not.toHaveBeenCalled(); 132 | 133 | consoleErrorSpy.mockRestore(); 134 | }); 135 | 136 | it('should preserve file content during migration', async () => { 137 | const testContent = '{"entities": [{"name": "test", "type": "person"}]}'; 138 | await fs.writeFile(oldMemoryPath, testContent); 139 | 140 | await ensureMemoryFilePath(); 141 | 142 | const migratedContent = await fs.readFile(newMemoryPath, 'utf-8'); 143 | expect(migratedContent).toBe(testContent); 144 | }); 145 | }); 146 | 147 | describe('defaultMemoryPath', () => { 148 | it('should end with memory.jsonl', () => { 149 | expect(defaultMemoryPath).toMatch(/memory\.jsonl$/); 150 | }); 151 | 152 | it('should be an absolute path', () => { 153 | expect(path.isAbsolute(defaultMemoryPath)).toBe(true); 154 | }); 155 | }); 156 | }); 157 | -------------------------------------------------------------------------------- /src/sequentialthinking/README.md: -------------------------------------------------------------------------------- 1 | # Sequential Thinking MCP Server 2 | 3 | An MCP server implementation that provides a tool for dynamic and reflective problem-solving through a structured thinking process. 4 | 5 | ## Features 6 | 7 | - Break down complex problems into manageable steps 8 | - Revise and refine thoughts as understanding deepens 9 | - Branch into alternative paths of reasoning 10 | - Adjust the total number of thoughts dynamically 11 | - Generate and verify solution hypotheses 12 | 13 | ## Tool 14 | 15 | ### sequential_thinking 16 | 17 | Facilitates a detailed, step-by-step thinking process for problem-solving and analysis. 18 | 19 | **Inputs:** 20 | - `thought` (string): The current thinking step 21 | - `nextThoughtNeeded` (boolean): Whether another thought step is needed 22 | - `thoughtNumber` (integer): Current thought number 23 | - `totalThoughts` (integer): Estimated total thoughts needed 24 | - `isRevision` (boolean, optional): Whether this revises previous thinking 25 | - `revisesThought` (integer, optional): Which thought is being reconsidered 26 | - `branchFromThought` (integer, optional): Branching point thought number 27 | - `branchId` (string, optional): Branch identifier 28 | - `needsMoreThoughts` (boolean, optional): If more thoughts are needed 29 | 30 | ## Usage 31 | 32 | The Sequential Thinking tool is designed for: 33 | - Breaking down complex problems into steps 34 | - Planning and design with room for revision 35 | - Analysis that might need course correction 36 | - Problems where the full scope might not be clear initially 37 | - Tasks that need to maintain context over multiple steps 38 | - Situations where irrelevant information needs to be filtered out 39 | 40 | ## Configuration 41 | 42 | ### Usage with Claude Desktop 43 | 44 | Add this to your `claude_desktop_config.json`: 45 | 46 | #### npx 47 | 48 | ```json 49 | { 50 | "mcpServers": { 51 | "sequential-thinking": { 52 | "command": "npx", 53 | "args": [ 54 | "-y", 55 | "@modelcontextprotocol/server-sequential-thinking" 56 | ] 57 | } 58 | } 59 | } 60 | ``` 61 | 62 | #### docker 63 | 64 | ```json 65 | { 66 | "mcpServers": { 67 | "sequentialthinking": { 68 | "command": "docker", 69 | "args": [ 70 | "run", 71 | "--rm", 72 | "-i", 73 | "mcp/sequentialthinking" 74 | ] 75 | } 76 | } 77 | } 78 | ``` 79 | 80 | To disable logging of thought information set env var: `DISABLE_THOUGHT_LOGGING` to `true`. 81 | Comment 82 | 83 | ### Usage with VS Code 84 | 85 | For quick installation, click one of the installation buttons below... 86 | 87 | [![Install with NPX in VS Code](https://img.shields.io/badge/VS_Code-NPM-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=sequentialthinking&config=%7B%22command%22%3A%22npx%22%2C%22args%22%3A%5B%22-y%22%2C%22%40modelcontextprotocol%2Fserver-sequential-thinking%22%5D%7D) [![Install with NPX in VS Code Insiders](https://img.shields.io/badge/VS_Code_Insiders-NPM-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=sequentialthinking&config=%7B%22command%22%3A%22npx%22%2C%22args%22%3A%5B%22-y%22%2C%22%40modelcontextprotocol%2Fserver-sequential-thinking%22%5D%7D&quality=insiders) 88 | 89 | [![Install with Docker in VS Code](https://img.shields.io/badge/VS_Code-Docker-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=sequentialthinking&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22--rm%22%2C%22-i%22%2C%22mcp%2Fsequentialthinking%22%5D%7D) [![Install with Docker in VS Code Insiders](https://img.shields.io/badge/VS_Code_Insiders-Docker-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=sequentialthinking&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22--rm%22%2C%22-i%22%2C%22mcp%2Fsequentialthinking%22%5D%7D&quality=insiders) 90 | 91 | For manual installation, you can configure the MCP server using one of these methods: 92 | 93 | **Method 1: User Configuration (Recommended)** 94 | Add the configuration to your user-level MCP configuration file. Open the Command Palette (`Ctrl + Shift + P`) and run `MCP: Open User Configuration`. This will open your user `mcp.json` file where you can add the server configuration. 95 | 96 | **Method 2: Workspace Configuration** 97 | Alternatively, you can add the configuration to a file called `.vscode/mcp.json` in your workspace. This will allow you to share the configuration with others. 98 | 99 | > For more details about MCP configuration in VS Code, see the [official VS Code MCP documentation](https://code.visualstudio.com/docs/copilot/customization/mcp-servers). 100 | 101 | For NPX installation: 102 | 103 | ```json 104 | { 105 | "servers": { 106 | "sequential-thinking": { 107 | "command": "npx", 108 | "args": [ 109 | "-y", 110 | "@modelcontextprotocol/server-sequential-thinking" 111 | ] 112 | } 113 | } 114 | } 115 | ``` 116 | 117 | For Docker installation: 118 | 119 | ```json 120 | { 121 | "servers": { 122 | "sequential-thinking": { 123 | "command": "docker", 124 | "args": [ 125 | "run", 126 | "--rm", 127 | "-i", 128 | "mcp/sequentialthinking" 129 | ] 130 | } 131 | } 132 | } 133 | ``` 134 | 135 | ### Usage with Codex CLI 136 | 137 | Run the following: 138 | 139 | #### npx 140 | 141 | ```bash 142 | codex mcp add sequential-thinking npx -y @modelcontextprotocol/server-sequential-thinking 143 | ``` 144 | 145 | ## Building 146 | 147 | Docker: 148 | 149 | ```bash 150 | docker build -t mcp/sequentialthinking -f src/sequentialthinking/Dockerfile . 151 | ``` 152 | 153 | ## License 154 | 155 | This MCP server is licensed under the MIT License. This means you are free to use, modify, and distribute the software, subject to the terms and conditions of the MIT License. For more details, please see the LICENSE file in the project repository. 156 | -------------------------------------------------------------------------------- /src/filesystem/__tests__/structured-content.test.ts: -------------------------------------------------------------------------------- 1 | import { describe, it, expect, beforeEach, afterEach } from 'vitest'; 2 | import * as fs from 'fs/promises'; 3 | import * as path from 'path'; 4 | import * as os from 'os'; 5 | import { Client } from '@modelcontextprotocol/sdk/client/index.js'; 6 | import { StdioClientTransport } from '@modelcontextprotocol/sdk/client/stdio.js'; 7 | import { spawn } from 'child_process'; 8 | 9 | /** 10 | * Integration tests to verify that tool handlers return structuredContent 11 | * that matches the declared outputSchema. 12 | * 13 | * These tests address issues #3110, #3106, #3093 where tools were returning 14 | * structuredContent: { content: [contentBlock] } (array) instead of 15 | * structuredContent: { content: string } as declared in outputSchema. 16 | */ 17 | describe('structuredContent schema compliance', () => { 18 | let client: Client; 19 | let transport: StdioClientTransport; 20 | let testDir: string; 21 | 22 | beforeEach(async () => { 23 | // Create a temp directory for testing 24 | testDir = await fs.mkdtemp(path.join(os.tmpdir(), 'mcp-fs-test-')); 25 | 26 | // Create test files 27 | await fs.writeFile(path.join(testDir, 'test.txt'), 'test content'); 28 | await fs.mkdir(path.join(testDir, 'subdir')); 29 | await fs.writeFile(path.join(testDir, 'subdir', 'nested.txt'), 'nested content'); 30 | 31 | // Start the MCP server 32 | const serverPath = path.resolve(__dirname, '../dist/index.js'); 33 | transport = new StdioClientTransport({ 34 | command: 'node', 35 | args: [serverPath, testDir], 36 | }); 37 | 38 | client = new Client({ 39 | name: 'test-client', 40 | version: '1.0.0', 41 | }, { 42 | capabilities: {} 43 | }); 44 | 45 | await client.connect(transport); 46 | }); 47 | 48 | afterEach(async () => { 49 | await client?.close(); 50 | await fs.rm(testDir, { recursive: true, force: true }); 51 | }); 52 | 53 | describe('directory_tree', () => { 54 | it('should return structuredContent.content as a string, not an array', async () => { 55 | const result = await client.callTool({ 56 | name: 'directory_tree', 57 | arguments: { path: testDir } 58 | }); 59 | 60 | // The result should have structuredContent 61 | expect(result.structuredContent).toBeDefined(); 62 | 63 | // structuredContent.content should be a string (matching outputSchema: { content: z.string() }) 64 | const structuredContent = result.structuredContent as { content: unknown }; 65 | expect(typeof structuredContent.content).toBe('string'); 66 | 67 | // It should NOT be an array 68 | expect(Array.isArray(structuredContent.content)).toBe(false); 69 | 70 | // The content should be valid JSON representing the tree 71 | const treeData = JSON.parse(structuredContent.content as string); 72 | expect(Array.isArray(treeData)).toBe(true); 73 | }); 74 | }); 75 | 76 | describe('list_directory_with_sizes', () => { 77 | it('should return structuredContent.content as a string, not an array', async () => { 78 | const result = await client.callTool({ 79 | name: 'list_directory_with_sizes', 80 | arguments: { path: testDir } 81 | }); 82 | 83 | // The result should have structuredContent 84 | expect(result.structuredContent).toBeDefined(); 85 | 86 | // structuredContent.content should be a string (matching outputSchema: { content: z.string() }) 87 | const structuredContent = result.structuredContent as { content: unknown }; 88 | expect(typeof structuredContent.content).toBe('string'); 89 | 90 | // It should NOT be an array 91 | expect(Array.isArray(structuredContent.content)).toBe(false); 92 | 93 | // The content should contain directory listing info 94 | expect(structuredContent.content).toContain('[FILE]'); 95 | }); 96 | }); 97 | 98 | describe('move_file', () => { 99 | it('should return structuredContent.content as a string, not an array', async () => { 100 | const sourcePath = path.join(testDir, 'test.txt'); 101 | const destPath = path.join(testDir, 'moved.txt'); 102 | 103 | const result = await client.callTool({ 104 | name: 'move_file', 105 | arguments: { 106 | source: sourcePath, 107 | destination: destPath 108 | } 109 | }); 110 | 111 | // The result should have structuredContent 112 | expect(result.structuredContent).toBeDefined(); 113 | 114 | // structuredContent.content should be a string (matching outputSchema: { content: z.string() }) 115 | const structuredContent = result.structuredContent as { content: unknown }; 116 | expect(typeof structuredContent.content).toBe('string'); 117 | 118 | // It should NOT be an array 119 | expect(Array.isArray(structuredContent.content)).toBe(false); 120 | 121 | // The content should contain success message 122 | expect(structuredContent.content).toContain('Successfully moved'); 123 | }); 124 | }); 125 | 126 | describe('list_directory (control - already working)', () => { 127 | it('should return structuredContent.content as a string', async () => { 128 | const result = await client.callTool({ 129 | name: 'list_directory', 130 | arguments: { path: testDir } 131 | }); 132 | 133 | expect(result.structuredContent).toBeDefined(); 134 | 135 | const structuredContent = result.structuredContent as { content: unknown }; 136 | expect(typeof structuredContent.content).toBe('string'); 137 | expect(Array.isArray(structuredContent.content)).toBe(false); 138 | }); 139 | }); 140 | 141 | describe('search_files (control - already working)', () => { 142 | it('should return structuredContent.content as a string', async () => { 143 | const result = await client.callTool({ 144 | name: 'search_files', 145 | arguments: { 146 | path: testDir, 147 | pattern: '*.txt' 148 | } 149 | }); 150 | 151 | expect(result.structuredContent).toBeDefined(); 152 | 153 | const structuredContent = result.structuredContent as { content: unknown }; 154 | expect(typeof structuredContent.content).toBe('string'); 155 | expect(Array.isArray(structuredContent.content)).toBe(false); 156 | }); 157 | }); 158 | }); 159 | -------------------------------------------------------------------------------- /src/filesystem/__tests__/directory-tree.test.ts: -------------------------------------------------------------------------------- 1 | import { describe, it, expect, beforeEach, afterEach } from 'vitest'; 2 | import * as fs from 'fs/promises'; 3 | import * as path from 'path'; 4 | import * as os from 'os'; 5 | 6 | // We need to test the buildTree function, but it's defined inside the request handler 7 | // So we'll extract the core logic into a testable function 8 | import { minimatch } from 'minimatch'; 9 | 10 | interface TreeEntry { 11 | name: string; 12 | type: 'file' | 'directory'; 13 | children?: TreeEntry[]; 14 | } 15 | 16 | async function buildTreeForTesting(currentPath: string, rootPath: string, excludePatterns: string[] = []): Promise { 17 | const entries = await fs.readdir(currentPath, {withFileTypes: true}); 18 | const result: TreeEntry[] = []; 19 | 20 | for (const entry of entries) { 21 | const relativePath = path.relative(rootPath, path.join(currentPath, entry.name)); 22 | const shouldExclude = excludePatterns.some(pattern => { 23 | if (pattern.includes('*')) { 24 | return minimatch(relativePath, pattern, {dot: true}); 25 | } 26 | // For files: match exact name or as part of path 27 | // For directories: match as directory path 28 | return minimatch(relativePath, pattern, {dot: true}) || 29 | minimatch(relativePath, `**/${pattern}`, {dot: true}) || 30 | minimatch(relativePath, `**/${pattern}/**`, {dot: true}); 31 | }); 32 | if (shouldExclude) 33 | continue; 34 | 35 | const entryData: TreeEntry = { 36 | name: entry.name, 37 | type: entry.isDirectory() ? 'directory' : 'file' 38 | }; 39 | 40 | if (entry.isDirectory()) { 41 | const subPath = path.join(currentPath, entry.name); 42 | entryData.children = await buildTreeForTesting(subPath, rootPath, excludePatterns); 43 | } 44 | 45 | result.push(entryData); 46 | } 47 | 48 | return result; 49 | } 50 | 51 | describe('buildTree exclude patterns', () => { 52 | let testDir: string; 53 | 54 | beforeEach(async () => { 55 | testDir = await fs.mkdtemp(path.join(os.tmpdir(), 'filesystem-test-')); 56 | 57 | // Create test directory structure 58 | await fs.mkdir(path.join(testDir, 'src')); 59 | await fs.mkdir(path.join(testDir, 'node_modules')); 60 | await fs.mkdir(path.join(testDir, '.git')); 61 | await fs.mkdir(path.join(testDir, 'nested', 'node_modules'), { recursive: true }); 62 | 63 | // Create test files 64 | await fs.writeFile(path.join(testDir, '.env'), 'SECRET=value'); 65 | await fs.writeFile(path.join(testDir, '.env.local'), 'LOCAL_SECRET=value'); 66 | await fs.writeFile(path.join(testDir, 'src', 'index.js'), 'console.log("hello");'); 67 | await fs.writeFile(path.join(testDir, 'package.json'), '{}'); 68 | await fs.writeFile(path.join(testDir, 'node_modules', 'module.js'), 'module.exports = {};'); 69 | await fs.writeFile(path.join(testDir, 'nested', 'node_modules', 'deep.js'), 'module.exports = {};'); 70 | }); 71 | 72 | afterEach(async () => { 73 | await fs.rm(testDir, { recursive: true, force: true }); 74 | }); 75 | 76 | it('should exclude files matching simple patterns', async () => { 77 | // Test the current implementation - this will fail until the bug is fixed 78 | const tree = await buildTreeForTesting(testDir, testDir, ['.env']); 79 | const fileNames = tree.map(entry => entry.name); 80 | 81 | expect(fileNames).not.toContain('.env'); 82 | expect(fileNames).toContain('.env.local'); // Should not exclude this 83 | expect(fileNames).toContain('src'); 84 | expect(fileNames).toContain('package.json'); 85 | }); 86 | 87 | it('should exclude directories matching simple patterns', async () => { 88 | const tree = await buildTreeForTesting(testDir, testDir, ['node_modules']); 89 | const dirNames = tree.map(entry => entry.name); 90 | 91 | expect(dirNames).not.toContain('node_modules'); 92 | expect(dirNames).toContain('src'); 93 | expect(dirNames).toContain('.git'); 94 | }); 95 | 96 | it('should exclude nested directories with same pattern', async () => { 97 | const tree = await buildTreeForTesting(testDir, testDir, ['node_modules']); 98 | 99 | // Find the nested directory 100 | const nestedDir = tree.find(entry => entry.name === 'nested'); 101 | expect(nestedDir).toBeDefined(); 102 | expect(nestedDir!.children).toBeDefined(); 103 | 104 | // The nested/node_modules should also be excluded 105 | const nestedChildren = nestedDir!.children!.map(child => child.name); 106 | expect(nestedChildren).not.toContain('node_modules'); 107 | }); 108 | 109 | it('should handle glob patterns correctly', async () => { 110 | const tree = await buildTreeForTesting(testDir, testDir, ['*.env']); 111 | const fileNames = tree.map(entry => entry.name); 112 | 113 | expect(fileNames).not.toContain('.env'); 114 | expect(fileNames).toContain('.env.local'); // *.env should not match .env.local 115 | expect(fileNames).toContain('src'); 116 | }); 117 | 118 | it('should handle dot files correctly', async () => { 119 | const tree = await buildTreeForTesting(testDir, testDir, ['.git']); 120 | const dirNames = tree.map(entry => entry.name); 121 | 122 | expect(dirNames).not.toContain('.git'); 123 | expect(dirNames).toContain('.env'); // Should not exclude this 124 | }); 125 | 126 | it('should work with multiple exclude patterns', async () => { 127 | const tree = await buildTreeForTesting(testDir, testDir, ['node_modules', '.env', '.git']); 128 | const entryNames = tree.map(entry => entry.name); 129 | 130 | expect(entryNames).not.toContain('node_modules'); 131 | expect(entryNames).not.toContain('.env'); 132 | expect(entryNames).not.toContain('.git'); 133 | expect(entryNames).toContain('src'); 134 | expect(entryNames).toContain('package.json'); 135 | }); 136 | 137 | it('should handle empty exclude patterns', async () => { 138 | const tree = await buildTreeForTesting(testDir, testDir, []); 139 | const entryNames = tree.map(entry => entry.name); 140 | 141 | // All entries should be included 142 | expect(entryNames).toContain('node_modules'); 143 | expect(entryNames).toContain('.env'); 144 | expect(entryNames).toContain('.git'); 145 | expect(entryNames).toContain('src'); 146 | }); 147 | }); -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | lerna-debug.log* 8 | .pnpm-debug.log* 9 | 10 | # Diagnostic reports (https://nodejs.org/api/report.html) 11 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 12 | 13 | # Runtime data 14 | pids 15 | *.pid 16 | *.seed 17 | *.pid.lock 18 | 19 | # Directory for instrumented libs generated by jscoverage/JSCover 20 | lib-cov 21 | 22 | # Coverage directory used by tools like istanbul 23 | coverage 24 | *.lcov 25 | 26 | # nyc test coverage 27 | .nyc_output 28 | 29 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 30 | .grunt 31 | 32 | # Bower dependency directory (https://bower.io/) 33 | bower_components 34 | 35 | # node-waf configuration 36 | .lock-wscript 37 | 38 | # Compiled binary addons (https://nodejs.org/api/addons.html) 39 | build/Release 40 | 41 | # Dependency directories 42 | node_modules/ 43 | jspm_packages/ 44 | 45 | # Snowpack dependency directory (https://snowpack.dev/) 46 | web_modules/ 47 | 48 | # TypeScript cache 49 | *.tsbuildinfo 50 | 51 | # Optional npm cache directory 52 | .npm 53 | 54 | # Optional eslint cache 55 | .eslintcache 56 | 57 | # Optional stylelint cache 58 | .stylelintcache 59 | 60 | # Microbundle cache 61 | .rpt2_cache/ 62 | .rts2_cache_cjs/ 63 | .rts2_cache_es/ 64 | .rts2_cache_umd/ 65 | 66 | # Optional REPL history 67 | .node_repl_history 68 | 69 | # Output of 'npm pack' 70 | *.tgz 71 | 72 | # Yarn Integrity file 73 | .yarn-integrity 74 | 75 | # dotenv environment variable files 76 | .env 77 | .env.development.local 78 | .env.test.local 79 | .env.production.local 80 | .env.local 81 | 82 | # parcel-bundler cache (https://parceljs.org/) 83 | .cache 84 | .parcel-cache 85 | 86 | # Next.js build output 87 | .next 88 | out 89 | 90 | # Nuxt.js build / generate output 91 | .nuxt 92 | dist 93 | 94 | # Gatsby files 95 | .cache/ 96 | # Comment in the public line in if your project uses Gatsby and not Next.js 97 | # https://nextjs.org/blog/next-9-1#public-directory-support 98 | # public 99 | 100 | # vuepress build output 101 | .vuepress/dist 102 | 103 | # vuepress v2.x temp and cache directory 104 | .temp 105 | .cache 106 | 107 | # Docusaurus cache and generated files 108 | .docusaurus 109 | 110 | # Serverless directories 111 | .serverless/ 112 | 113 | # FuseBox cache 114 | .fusebox/ 115 | 116 | # DynamoDB Local files 117 | .dynamodb/ 118 | 119 | # TernJS port file 120 | .tern-port 121 | 122 | # Stores VSCode versions used for testing VSCode extensions 123 | .vscode-test 124 | 125 | # IDEs 126 | .idea/ 127 | .vscode/ 128 | 129 | # yarn v2 130 | .yarn/cache 131 | .yarn/unplugged 132 | .yarn/build-state.yml 133 | .yarn/install-state.gz 134 | .pnp.* 135 | 136 | build/ 137 | 138 | gcp-oauth.keys.json 139 | .*-server-credentials.json 140 | 141 | # Byte-compiled / optimized / DLL files 142 | __pycache__/ 143 | *.py[cod] 144 | *$py.class 145 | 146 | # C extensions 147 | *.so 148 | 149 | # Distribution / packaging 150 | .Python 151 | build/ 152 | develop-eggs/ 153 | dist/ 154 | downloads/ 155 | eggs/ 156 | .eggs/ 157 | lib/ 158 | lib64/ 159 | parts/ 160 | sdist/ 161 | var/ 162 | wheels/ 163 | share/python-wheels/ 164 | *.egg-info/ 165 | .installed.cfg 166 | *.egg 167 | MANIFEST 168 | 169 | # PyInstaller 170 | # Usually these files are written by a python script from a template 171 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 172 | *.manifest 173 | *.spec 174 | 175 | # Installer logs 176 | pip-log.txt 177 | pip-delete-this-directory.txt 178 | 179 | # Unit test / coverage reports 180 | htmlcov/ 181 | .tox/ 182 | .nox/ 183 | .coverage 184 | .coverage.* 185 | .cache 186 | nosetests.xml 187 | coverage.xml 188 | *.cover 189 | *.py,cover 190 | .hypothesis/ 191 | .pytest_cache/ 192 | cover/ 193 | 194 | # Translations 195 | *.mo 196 | *.pot 197 | 198 | # Django stuff: 199 | *.log 200 | local_settings.py 201 | db.sqlite3 202 | db.sqlite3-journal 203 | 204 | # Flask stuff: 205 | instance/ 206 | .webassets-cache 207 | 208 | # Scrapy stuff: 209 | .scrapy 210 | 211 | # Sphinx documentation 212 | docs/_build/ 213 | 214 | # PyBuilder 215 | .pybuilder/ 216 | target/ 217 | 218 | # Jupyter Notebook 219 | .ipynb_checkpoints 220 | 221 | # IPython 222 | profile_default/ 223 | ipython_config.py 224 | 225 | # pyenv 226 | # For a library or package, you might want to ignore these files since the code is 227 | # intended to run in multiple environments; otherwise, check them in: 228 | # .python-version 229 | 230 | # pipenv 231 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 232 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 233 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 234 | # install all needed dependencies. 235 | #Pipfile.lock 236 | 237 | # poetry 238 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 239 | # This is especially recommended for binary packages to ensure reproducibility, and is more 240 | # commonly ignored for libraries. 241 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 242 | #poetry.lock 243 | 244 | # pdm 245 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 246 | #pdm.lock 247 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 248 | # in version control. 249 | # https://pdm.fming.dev/latest/usage/project/#working-with-version-control 250 | .pdm.toml 251 | .pdm-python 252 | .pdm-build/ 253 | 254 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 255 | __pypackages__/ 256 | 257 | # Celery stuff 258 | celerybeat-schedule 259 | celerybeat.pid 260 | 261 | # SageMath parsed files 262 | *.sage.py 263 | 264 | # Environments 265 | .env 266 | .venv 267 | env/ 268 | venv/ 269 | ENV/ 270 | env.bak/ 271 | venv.bak/ 272 | 273 | # Spyder project settings 274 | .spyderproject 275 | .spyproject 276 | 277 | # Rope project settings 278 | .ropeproject 279 | 280 | # mkdocs documentation 281 | /site 282 | 283 | # mypy 284 | .mypy_cache/ 285 | .dmypy.json 286 | dmypy.json 287 | 288 | # Pyre type checker 289 | .pyre/ 290 | 291 | # pytype static type analyzer 292 | .pytype/ 293 | 294 | # Cython debug symbols 295 | cython_debug/ 296 | 297 | .DS_Store 298 | 299 | # PyCharm 300 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 301 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 302 | # and can be added to the global gitignore or merged into this file. For a more nuclear 303 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 304 | #.idea/ 305 | .claude/settings.local.json 306 | -------------------------------------------------------------------------------- /scripts/release.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env uv run --script 2 | # /// script 3 | # requires-python = ">=3.12" 4 | # dependencies = [ 5 | # "click>=8.1.8", 6 | # "tomlkit>=0.13.2" 7 | # ] 8 | # /// 9 | import sys 10 | import re 11 | import click 12 | from pathlib import Path 13 | import json 14 | import tomlkit 15 | import datetime 16 | import subprocess 17 | from dataclasses import dataclass 18 | from typing import Any, Iterator, NewType, Protocol 19 | 20 | 21 | Version = NewType("Version", str) 22 | GitHash = NewType("GitHash", str) 23 | 24 | 25 | class GitHashParamType(click.ParamType): 26 | name = "git_hash" 27 | 28 | def convert( 29 | self, value: Any, param: click.Parameter | None, ctx: click.Context | None 30 | ) -> GitHash | None: 31 | if value is None: 32 | return None 33 | 34 | if not (8 <= len(value) <= 40): 35 | self.fail(f"Git hash must be between 8 and 40 characters, got {len(value)}") 36 | 37 | if not re.match(r"^[0-9a-fA-F]+$", value): 38 | self.fail("Git hash must contain only hex digits (0-9, a-f)") 39 | 40 | try: 41 | # Verify hash exists in repo 42 | subprocess.run( 43 | ["git", "rev-parse", "--verify", value], check=True, capture_output=True 44 | ) 45 | except subprocess.CalledProcessError: 46 | self.fail(f"Git hash {value} not found in repository") 47 | 48 | return GitHash(value.lower()) 49 | 50 | 51 | GIT_HASH = GitHashParamType() 52 | 53 | 54 | class Package(Protocol): 55 | path: Path 56 | 57 | def package_name(self) -> str: ... 58 | 59 | def update_version(self, version: Version) -> None: ... 60 | 61 | 62 | @dataclass 63 | class NpmPackage: 64 | path: Path 65 | 66 | def package_name(self) -> str: 67 | with open(self.path / "package.json", "r") as f: 68 | return json.load(f)["name"] 69 | 70 | def update_version(self, version: Version): 71 | with open(self.path / "package.json", "r+") as f: 72 | data = json.load(f) 73 | data["version"] = version 74 | f.seek(0) 75 | json.dump(data, f, indent=2) 76 | f.truncate() 77 | 78 | 79 | @dataclass 80 | class PyPiPackage: 81 | path: Path 82 | 83 | def package_name(self) -> str: 84 | with open(self.path / "pyproject.toml") as f: 85 | toml_data = tomlkit.parse(f.read()) 86 | name = toml_data.get("project", {}).get("name") 87 | if not name: 88 | raise Exception("No name in pyproject.toml project section") 89 | return str(name) 90 | 91 | def update_version(self, version: Version): 92 | # Update version in pyproject.toml 93 | with open(self.path / "pyproject.toml") as f: 94 | data = tomlkit.parse(f.read()) 95 | data["project"]["version"] = version 96 | 97 | with open(self.path / "pyproject.toml", "w") as f: 98 | f.write(tomlkit.dumps(data)) 99 | 100 | 101 | def has_changes(path: Path, git_hash: GitHash) -> bool: 102 | """Check if any files changed between current state and git hash""" 103 | try: 104 | output = subprocess.run( 105 | ["git", "diff", "--name-only", git_hash, "--", "."], 106 | cwd=path, 107 | check=True, 108 | capture_output=True, 109 | text=True, 110 | ) 111 | 112 | changed_files = [Path(f) for f in output.stdout.splitlines()] 113 | relevant_files = [f for f in changed_files if f.suffix in [".py", ".ts"]] 114 | return len(relevant_files) >= 1 115 | except subprocess.CalledProcessError: 116 | return False 117 | 118 | 119 | def gen_version() -> Version: 120 | """Generate version based on current date""" 121 | now = datetime.datetime.now() 122 | return Version(f"{now.year}.{now.month}.{now.day}") 123 | 124 | 125 | def find_changed_packages(directory: Path, git_hash: GitHash) -> Iterator[Package]: 126 | for path in directory.glob("*/package.json"): 127 | if has_changes(path.parent, git_hash): 128 | yield NpmPackage(path.parent) 129 | for path in directory.glob("*/pyproject.toml"): 130 | if has_changes(path.parent, git_hash): 131 | yield PyPiPackage(path.parent) 132 | 133 | 134 | @click.group() 135 | def cli(): 136 | pass 137 | 138 | 139 | @cli.command("update-packages") 140 | @click.option( 141 | "--directory", type=click.Path(exists=True, path_type=Path), default=Path.cwd() 142 | ) 143 | @click.argument("git_hash", type=GIT_HASH) 144 | def update_packages(directory: Path, git_hash: GitHash) -> int: 145 | # Detect package type 146 | path = directory.resolve(strict=True) 147 | version = gen_version() 148 | 149 | for package in find_changed_packages(path, git_hash): 150 | name = package.package_name() 151 | package.update_version(version) 152 | 153 | click.echo(f"{name}@{version}") 154 | 155 | return 0 156 | 157 | 158 | @cli.command("generate-notes") 159 | @click.option( 160 | "--directory", type=click.Path(exists=True, path_type=Path), default=Path.cwd() 161 | ) 162 | @click.argument("git_hash", type=GIT_HASH) 163 | def generate_notes(directory: Path, git_hash: GitHash) -> int: 164 | # Detect package type 165 | path = directory.resolve(strict=True) 166 | version = gen_version() 167 | 168 | click.echo(f"# Release : v{version}") 169 | click.echo("") 170 | click.echo("## Updated packages") 171 | for package in find_changed_packages(path, git_hash): 172 | name = package.package_name() 173 | click.echo(f"- {name}@{version}") 174 | 175 | return 0 176 | 177 | 178 | @cli.command("generate-version") 179 | def generate_version() -> int: 180 | # Detect package type 181 | click.echo(gen_version()) 182 | return 0 183 | 184 | 185 | @cli.command("generate-matrix") 186 | @click.option( 187 | "--directory", type=click.Path(exists=True, path_type=Path), default=Path.cwd() 188 | ) 189 | @click.option("--npm", is_flag=True, default=False) 190 | @click.option("--pypi", is_flag=True, default=False) 191 | @click.argument("git_hash", type=GIT_HASH) 192 | def generate_matrix(directory: Path, git_hash: GitHash, pypi: bool, npm: bool) -> int: 193 | # Detect package type 194 | path = directory.resolve(strict=True) 195 | version = gen_version() 196 | 197 | changes = [] 198 | for package in find_changed_packages(path, git_hash): 199 | pkg = package.path.relative_to(path) 200 | if npm and isinstance(package, NpmPackage): 201 | changes.append(str(pkg)) 202 | if pypi and isinstance(package, PyPiPackage): 203 | changes.append(str(pkg)) 204 | 205 | click.echo(json.dumps(changes)) 206 | return 0 207 | 208 | 209 | if __name__ == "__main__": 210 | sys.exit(cli()) 211 | -------------------------------------------------------------------------------- /src/everything/streamableHttp.ts: -------------------------------------------------------------------------------- 1 | import { StreamableHTTPServerTransport } from "@modelcontextprotocol/sdk/server/streamableHttp.js"; 2 | import { InMemoryEventStore } from '@modelcontextprotocol/sdk/examples/shared/inMemoryEventStore.js'; 3 | import express, { Request, Response } from "express"; 4 | import { createServer } from "./everything.js"; 5 | import { randomUUID } from 'node:crypto'; 6 | import cors from 'cors'; 7 | 8 | console.error('Starting Streamable HTTP server...'); 9 | 10 | const app = express(); 11 | app.use(cors({ 12 | "origin": "*", // use "*" with caution in production 13 | "methods": "GET,POST,DELETE", 14 | "preflightContinue": false, 15 | "optionsSuccessStatus": 204, 16 | "exposedHeaders": [ 17 | 'mcp-session-id', 18 | 'last-event-id', 19 | 'mcp-protocol-version' 20 | ] 21 | })); // Enable CORS for all routes so Inspector can connect 22 | 23 | const transports: Map = new Map(); 24 | 25 | app.post('/mcp', async (req: Request, res: Response) => { 26 | console.error('Received MCP POST request'); 27 | try { 28 | // Check for existing session ID 29 | const sessionId = req.headers['mcp-session-id'] as string | undefined; 30 | 31 | let transport: StreamableHTTPServerTransport; 32 | 33 | if (sessionId && transports.has(sessionId)) { 34 | // Reuse existing transport 35 | transport = transports.get(sessionId)!; 36 | } else if (!sessionId) { 37 | 38 | const { server, cleanup, startNotificationIntervals } = createServer(); 39 | 40 | // New initialization request 41 | const eventStore = new InMemoryEventStore(); 42 | transport = new StreamableHTTPServerTransport({ 43 | sessionIdGenerator: () => randomUUID(), 44 | eventStore, // Enable resumability 45 | onsessioninitialized: (sessionId: string) => { 46 | // Store the transport by session ID when session is initialized 47 | // This avoids race conditions where requests might come in before the session is stored 48 | console.error(`Session initialized with ID: ${sessionId}`); 49 | transports.set(sessionId, transport); 50 | } 51 | }); 52 | 53 | 54 | // Set up onclose handler to clean up transport when closed 55 | server.onclose = async () => { 56 | const sid = transport.sessionId; 57 | if (sid && transports.has(sid)) { 58 | console.error(`Transport closed for session ${sid}, removing from transports map`); 59 | transports.delete(sid); 60 | await cleanup(); 61 | } 62 | }; 63 | 64 | // Connect the transport to the MCP server BEFORE handling the request 65 | // so responses can flow back through the same transport 66 | await server.connect(transport); 67 | 68 | await transport.handleRequest(req, res); 69 | 70 | // Wait until initialize is complete and transport will have a sessionId 71 | startNotificationIntervals(transport.sessionId); 72 | 73 | return; // Already handled 74 | } else { 75 | // Invalid request - no session ID or not initialization request 76 | res.status(400).json({ 77 | jsonrpc: '2.0', 78 | error: { 79 | code: -32000, 80 | message: 'Bad Request: No valid session ID provided', 81 | }, 82 | id: req?.body?.id, 83 | }); 84 | return; 85 | } 86 | 87 | // Handle the request with existing transport - no need to reconnect 88 | // The existing transport is already connected to the server 89 | await transport.handleRequest(req, res); 90 | } catch (error) { 91 | console.error('Error handling MCP request:', error); 92 | if (!res.headersSent) { 93 | res.status(500).json({ 94 | jsonrpc: '2.0', 95 | error: { 96 | code: -32603, 97 | message: 'Internal server error', 98 | }, 99 | id: req?.body?.id, 100 | }); 101 | return; 102 | } 103 | } 104 | }); 105 | 106 | // Handle GET requests for SSE streams (using built-in support from StreamableHTTP) 107 | app.get('/mcp', async (req: Request, res: Response) => { 108 | console.error('Received MCP GET request'); 109 | const sessionId = req.headers['mcp-session-id'] as string | undefined; 110 | if (!sessionId || !transports.has(sessionId)) { 111 | res.status(400).json({ 112 | jsonrpc: '2.0', 113 | error: { 114 | code: -32000, 115 | message: 'Bad Request: No valid session ID provided', 116 | }, 117 | id: req?.body?.id, 118 | }); 119 | return; 120 | } 121 | 122 | // Check for Last-Event-ID header for resumability 123 | const lastEventId = req.headers['last-event-id'] as string | undefined; 124 | if (lastEventId) { 125 | console.error(`Client reconnecting with Last-Event-ID: ${lastEventId}`); 126 | } else { 127 | console.error(`Establishing new SSE stream for session ${sessionId}`); 128 | } 129 | 130 | const transport = transports.get(sessionId); 131 | await transport!.handleRequest(req, res); 132 | }); 133 | 134 | // Handle DELETE requests for session termination (according to MCP spec) 135 | app.delete('/mcp', async (req: Request, res: Response) => { 136 | const sessionId = req.headers['mcp-session-id'] as string | undefined; 137 | if (!sessionId || !transports.has(sessionId)) { 138 | res.status(400).json({ 139 | jsonrpc: '2.0', 140 | error: { 141 | code: -32000, 142 | message: 'Bad Request: No valid session ID provided', 143 | }, 144 | id: req?.body?.id, 145 | }); 146 | return; 147 | } 148 | 149 | console.error(`Received session termination request for session ${sessionId}`); 150 | 151 | try { 152 | const transport = transports.get(sessionId); 153 | await transport!.handleRequest(req, res); 154 | } catch (error) { 155 | console.error('Error handling session termination:', error); 156 | if (!res.headersSent) { 157 | res.status(500).json({ 158 | jsonrpc: '2.0', 159 | error: { 160 | code: -32603, 161 | message: 'Error handling session termination', 162 | }, 163 | id: req?.body?.id, 164 | }); 165 | return; 166 | } 167 | } 168 | }); 169 | 170 | // Start the server 171 | const PORT = process.env.PORT || 3001; 172 | app.listen(PORT, () => { 173 | console.error(`MCP Streamable HTTP Server listening on port ${PORT}`); 174 | }); 175 | 176 | // Handle server shutdown 177 | process.on('SIGINT', async () => { 178 | console.error('Shutting down server...'); 179 | 180 | // Close all active transports to properly clean up resources 181 | for (const sessionId in transports) { 182 | try { 183 | console.error(`Closing transport for session ${sessionId}`); 184 | await transports.get(sessionId)!.close(); 185 | transports.delete(sessionId); 186 | } catch (error) { 187 | console.error(`Error closing transport for session ${sessionId}:`, error); 188 | } 189 | } 190 | 191 | console.error('Server shutdown complete'); 192 | process.exit(0); 193 | }); 194 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Automatic Release Creation 2 | 3 | on: 4 | workflow_dispatch: 5 | schedule: 6 | - cron: '0 10 * * *' 7 | 8 | jobs: 9 | create-metadata: 10 | runs-on: ubuntu-latest 11 | if: github.repository_owner == 'modelcontextprotocol' 12 | outputs: 13 | hash: ${{ steps.last-release.outputs.hash }} 14 | version: ${{ steps.create-version.outputs.version}} 15 | npm_packages: ${{ steps.create-npm-packages.outputs.npm_packages}} 16 | pypi_packages: ${{ steps.create-pypi-packages.outputs.pypi_packages}} 17 | steps: 18 | - uses: actions/checkout@v4 19 | with: 20 | fetch-depth: 0 21 | 22 | - name: Get last release hash 23 | id: last-release 24 | run: | 25 | HASH=$(git rev-list --tags --max-count=1 || echo "HEAD~1") 26 | echo "hash=${HASH}" >> $GITHUB_OUTPUT 27 | echo "Using last release hash: ${HASH}" 28 | 29 | - name: Install uv 30 | uses: astral-sh/setup-uv@v5 31 | 32 | - name: Create version name 33 | id: create-version 34 | run: | 35 | VERSION=$(uv run --script scripts/release.py generate-version) 36 | echo "version $VERSION" 37 | echo "version=$VERSION" >> $GITHUB_OUTPUT 38 | 39 | - name: Create notes 40 | run: | 41 | HASH="${{ steps.last-release.outputs.hash }}" 42 | uv run --script scripts/release.py generate-notes --directory src/ $HASH > RELEASE_NOTES.md 43 | cat RELEASE_NOTES.md 44 | 45 | - name: Release notes 46 | uses: actions/upload-artifact@v4 47 | with: 48 | name: release-notes 49 | path: RELEASE_NOTES.md 50 | 51 | - name: Create python matrix 52 | id: create-pypi-packages 53 | run: | 54 | HASH="${{ steps.last-release.outputs.hash }}" 55 | PYPI=$(uv run --script scripts/release.py generate-matrix --pypi --directory src $HASH) 56 | echo "pypi_packages $PYPI" 57 | echo "pypi_packages=$PYPI" >> $GITHUB_OUTPUT 58 | 59 | - name: Create npm matrix 60 | id: create-npm-packages 61 | run: | 62 | HASH="${{ steps.last-release.outputs.hash }}" 63 | NPM=$(uv run --script scripts/release.py generate-matrix --npm --directory src $HASH) 64 | echo "npm_packages $NPM" 65 | echo "npm_packages=$NPM" >> $GITHUB_OUTPUT 66 | 67 | update-packages: 68 | needs: [create-metadata] 69 | if: ${{ needs.create-metadata.outputs.npm_packages != '[]' || needs.create-metadata.outputs.pypi_packages != '[]' }} 70 | runs-on: ubuntu-latest 71 | environment: release 72 | outputs: 73 | changes_made: ${{ steps.commit.outputs.changes_made }} 74 | steps: 75 | - uses: actions/checkout@v4 76 | with: 77 | fetch-depth: 0 78 | 79 | - name: Install uv 80 | uses: astral-sh/setup-uv@v5 81 | 82 | - name: Update packages 83 | run: | 84 | HASH="${{ needs.create-metadata.outputs.hash }}" 85 | uv run --script scripts/release.py update-packages --directory src/ $HASH 86 | 87 | - name: Configure git 88 | run: | 89 | git config --global user.name "GitHub Actions" 90 | git config --global user.email "actions@github.com" 91 | 92 | - name: Commit changes 93 | id: commit 94 | run: | 95 | VERSION="${{ needs.create-metadata.outputs.version }}" 96 | git add -u 97 | if git diff-index --quiet HEAD; then 98 | echo "changes_made=false" >> $GITHUB_OUTPUT 99 | else 100 | git commit -m 'Automatic update of packages' 101 | git tag -a "$VERSION" -m "Release $VERSION" 102 | git push origin "$VERSION" 103 | echo "changes_made=true" >> $GITHUB_OUTPUT 104 | fi 105 | 106 | publish-pypi: 107 | needs: [update-packages, create-metadata] 108 | if: ${{ needs.create-metadata.outputs.pypi_packages != '[]' && needs.create-metadata.outputs.pypi_packages != '' }} 109 | strategy: 110 | fail-fast: false 111 | matrix: 112 | package: ${{ fromJson(needs.create-metadata.outputs.pypi_packages) }} 113 | name: Build ${{ matrix.package }} 114 | environment: release 115 | permissions: 116 | id-token: write # Required for trusted publishing 117 | runs-on: ubuntu-latest 118 | steps: 119 | - uses: actions/checkout@v4 120 | with: 121 | ref: ${{ needs.create-metadata.outputs.version }} 122 | 123 | - name: Install uv 124 | uses: astral-sh/setup-uv@v5 125 | 126 | - name: Set up Python 127 | uses: actions/setup-python@v5 128 | with: 129 | python-version-file: "src/${{ matrix.package }}/.python-version" 130 | 131 | - name: Install dependencies 132 | working-directory: src/${{ matrix.package }} 133 | run: uv sync --locked --all-extras --dev 134 | 135 | - name: Run pyright 136 | working-directory: src/${{ matrix.package }} 137 | run: uv run --frozen pyright 138 | 139 | - name: Build package 140 | working-directory: src/${{ matrix.package }} 141 | run: uv build 142 | 143 | - name: Publish package to PyPI 144 | uses: pypa/gh-action-pypi-publish@release/v1 145 | with: 146 | packages-dir: src/${{ matrix.package }}/dist 147 | 148 | publish-npm: 149 | needs: [update-packages, create-metadata] 150 | if: ${{ needs.create-metadata.outputs.npm_packages != '[]' && needs.create-metadata.outputs.npm_packages != '' }} 151 | strategy: 152 | fail-fast: false 153 | matrix: 154 | package: ${{ fromJson(needs.create-metadata.outputs.npm_packages) }} 155 | name: Build ${{ matrix.package }} 156 | environment: release 157 | runs-on: ubuntu-latest 158 | steps: 159 | - uses: actions/checkout@v4 160 | with: 161 | ref: ${{ needs.create-metadata.outputs.version }} 162 | 163 | - uses: actions/setup-node@v4 164 | with: 165 | node-version: 22 166 | cache: npm 167 | registry-url: 'https://registry.npmjs.org' 168 | 169 | - name: Install dependencies 170 | working-directory: src/${{ matrix.package }} 171 | run: npm ci 172 | 173 | - name: Check if version exists on npm 174 | working-directory: src/${{ matrix.package }} 175 | run: | 176 | VERSION=$(jq -r .version package.json) 177 | if npm view --json | jq -e --arg version "$VERSION" '[.[]][0].versions | contains([$version])'; then 178 | echo "Version $VERSION already exists on npm" 179 | exit 1 180 | fi 181 | echo "Version $VERSION is new, proceeding with publish" 182 | 183 | - name: Build package 184 | working-directory: src/${{ matrix.package }} 185 | run: npm run build 186 | 187 | - name: Publish package 188 | working-directory: src/${{ matrix.package }} 189 | run: | 190 | npm publish --access public 191 | env: 192 | NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} 193 | 194 | create-release: 195 | needs: [update-packages, create-metadata, publish-pypi, publish-npm] 196 | if: | 197 | always() && 198 | needs.update-packages.outputs.changes_made == 'true' && 199 | (needs.publish-pypi.result == 'success' || needs.publish-npm.result == 'success') 200 | runs-on: ubuntu-latest 201 | environment: release 202 | permissions: 203 | contents: write 204 | steps: 205 | - uses: actions/checkout@v4 206 | 207 | - name: Download release notes 208 | uses: actions/download-artifact@v4 209 | with: 210 | name: release-notes 211 | 212 | - name: Create release 213 | env: 214 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN}} 215 | run: | 216 | VERSION="${{ needs.create-metadata.outputs.version }}" 217 | gh release create "$VERSION" \ 218 | --title "Release $VERSION" \ 219 | --notes-file RELEASE_NOTES.md 220 | 221 | -------------------------------------------------------------------------------- /src/time/src/mcp_server_time/server.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime, timedelta 2 | from enum import Enum 3 | import json 4 | from typing import Sequence 5 | 6 | from zoneinfo import ZoneInfo 7 | from tzlocal import get_localzone_name # ← returns "Europe/Paris", etc. 8 | 9 | from mcp.server import Server 10 | from mcp.server.stdio import stdio_server 11 | from mcp.types import Tool, TextContent, ImageContent, EmbeddedResource 12 | from mcp.shared.exceptions import McpError 13 | 14 | from pydantic import BaseModel 15 | 16 | 17 | class TimeTools(str, Enum): 18 | GET_CURRENT_TIME = "get_current_time" 19 | CONVERT_TIME = "convert_time" 20 | 21 | 22 | class TimeResult(BaseModel): 23 | timezone: str 24 | datetime: str 25 | day_of_week: str 26 | is_dst: bool 27 | 28 | 29 | class TimeConversionResult(BaseModel): 30 | source: TimeResult 31 | target: TimeResult 32 | time_difference: str 33 | 34 | 35 | class TimeConversionInput(BaseModel): 36 | source_tz: str 37 | time: str 38 | target_tz_list: list[str] 39 | 40 | 41 | def get_local_tz(local_tz_override: str | None = None) -> ZoneInfo: 42 | if local_tz_override: 43 | return ZoneInfo(local_tz_override) 44 | 45 | # Get local timezone from datetime.now() 46 | local_tzname = get_localzone_name() 47 | if local_tzname is not None: 48 | return ZoneInfo(local_tzname) 49 | # Default to UTC if local timezone cannot be determined 50 | return ZoneInfo("UTC") 51 | 52 | 53 | def get_zoneinfo(timezone_name: str) -> ZoneInfo: 54 | try: 55 | return ZoneInfo(timezone_name) 56 | except Exception as e: 57 | raise McpError(f"Invalid timezone: {str(e)}") 58 | 59 | 60 | class TimeServer: 61 | def get_current_time(self, timezone_name: str) -> TimeResult: 62 | """Get current time in specified timezone""" 63 | timezone = get_zoneinfo(timezone_name) 64 | current_time = datetime.now(timezone) 65 | 66 | return TimeResult( 67 | timezone=timezone_name, 68 | datetime=current_time.isoformat(timespec="seconds"), 69 | day_of_week=current_time.strftime("%A"), 70 | is_dst=bool(current_time.dst()), 71 | ) 72 | 73 | def convert_time( 74 | self, source_tz: str, time_str: str, target_tz: str 75 | ) -> TimeConversionResult: 76 | """Convert time between timezones""" 77 | source_timezone = get_zoneinfo(source_tz) 78 | target_timezone = get_zoneinfo(target_tz) 79 | 80 | try: 81 | parsed_time = datetime.strptime(time_str, "%H:%M").time() 82 | except ValueError: 83 | raise ValueError("Invalid time format. Expected HH:MM [24-hour format]") 84 | 85 | now = datetime.now(source_timezone) 86 | source_time = datetime( 87 | now.year, 88 | now.month, 89 | now.day, 90 | parsed_time.hour, 91 | parsed_time.minute, 92 | tzinfo=source_timezone, 93 | ) 94 | 95 | target_time = source_time.astimezone(target_timezone) 96 | source_offset = source_time.utcoffset() or timedelta() 97 | target_offset = target_time.utcoffset() or timedelta() 98 | hours_difference = (target_offset - source_offset).total_seconds() / 3600 99 | 100 | if hours_difference.is_integer(): 101 | time_diff_str = f"{hours_difference:+.1f}h" 102 | else: 103 | # For fractional hours like Nepal's UTC+5:45 104 | time_diff_str = f"{hours_difference:+.2f}".rstrip("0").rstrip(".") + "h" 105 | 106 | return TimeConversionResult( 107 | source=TimeResult( 108 | timezone=source_tz, 109 | datetime=source_time.isoformat(timespec="seconds"), 110 | day_of_week=source_time.strftime("%A"), 111 | is_dst=bool(source_time.dst()), 112 | ), 113 | target=TimeResult( 114 | timezone=target_tz, 115 | datetime=target_time.isoformat(timespec="seconds"), 116 | day_of_week=target_time.strftime("%A"), 117 | is_dst=bool(target_time.dst()), 118 | ), 119 | time_difference=time_diff_str, 120 | ) 121 | 122 | 123 | async def serve(local_timezone: str | None = None) -> None: 124 | server = Server("mcp-time") 125 | time_server = TimeServer() 126 | local_tz = str(get_local_tz(local_timezone)) 127 | 128 | @server.list_tools() 129 | async def list_tools() -> list[Tool]: 130 | """List available time tools.""" 131 | return [ 132 | Tool( 133 | name=TimeTools.GET_CURRENT_TIME.value, 134 | description="Get current time in a specific timezones", 135 | inputSchema={ 136 | "type": "object", 137 | "properties": { 138 | "timezone": { 139 | "type": "string", 140 | "description": f"IANA timezone name (e.g., 'America/New_York', 'Europe/London'). Use '{local_tz}' as local timezone if no timezone provided by the user.", 141 | } 142 | }, 143 | "required": ["timezone"], 144 | }, 145 | ), 146 | Tool( 147 | name=TimeTools.CONVERT_TIME.value, 148 | description="Convert time between timezones", 149 | inputSchema={ 150 | "type": "object", 151 | "properties": { 152 | "source_timezone": { 153 | "type": "string", 154 | "description": f"Source IANA timezone name (e.g., 'America/New_York', 'Europe/London'). Use '{local_tz}' as local timezone if no source timezone provided by the user.", 155 | }, 156 | "time": { 157 | "type": "string", 158 | "description": "Time to convert in 24-hour format (HH:MM)", 159 | }, 160 | "target_timezone": { 161 | "type": "string", 162 | "description": f"Target IANA timezone name (e.g., 'Asia/Tokyo', 'America/San_Francisco'). Use '{local_tz}' as local timezone if no target timezone provided by the user.", 163 | }, 164 | }, 165 | "required": ["source_timezone", "time", "target_timezone"], 166 | }, 167 | ), 168 | ] 169 | 170 | @server.call_tool() 171 | async def call_tool( 172 | name: str, arguments: dict 173 | ) -> Sequence[TextContent | ImageContent | EmbeddedResource]: 174 | """Handle tool calls for time queries.""" 175 | try: 176 | match name: 177 | case TimeTools.GET_CURRENT_TIME.value: 178 | timezone = arguments.get("timezone") 179 | if not timezone: 180 | raise ValueError("Missing required argument: timezone") 181 | 182 | result = time_server.get_current_time(timezone) 183 | 184 | case TimeTools.CONVERT_TIME.value: 185 | if not all( 186 | k in arguments 187 | for k in ["source_timezone", "time", "target_timezone"] 188 | ): 189 | raise ValueError("Missing required arguments") 190 | 191 | result = time_server.convert_time( 192 | arguments["source_timezone"], 193 | arguments["time"], 194 | arguments["target_timezone"], 195 | ) 196 | case _: 197 | raise ValueError(f"Unknown tool: {name}") 198 | 199 | return [ 200 | TextContent(type="text", text=json.dumps(result.model_dump(), indent=2)) 201 | ] 202 | 203 | except Exception as e: 204 | raise ValueError(f"Error processing mcp-server-time query: {str(e)}") 205 | 206 | options = server.create_initialization_options() 207 | async with stdio_server() as (read_stream, write_stream): 208 | await server.run(read_stream, write_stream, options) 209 | -------------------------------------------------------------------------------- /src/fetch/README.md: -------------------------------------------------------------------------------- 1 | # Fetch MCP Server 2 | 3 | 4 | 5 | A Model Context Protocol server that provides web content fetching capabilities. This server enables LLMs to retrieve and process content from web pages, converting HTML to markdown for easier consumption. 6 | 7 | > [!CAUTION] 8 | > This server can access local/internal IP addresses and may represent a security risk. Exercise caution when using this MCP server to ensure this does not expose any sensitive data. 9 | 10 | The fetch tool will truncate the response, but by using the `start_index` argument, you can specify where to start the content extraction. This lets models read a webpage in chunks, until they find the information they need. 11 | 12 | ### Available Tools 13 | 14 | - `fetch` - Fetches a URL from the internet and extracts its contents as markdown. 15 | - `url` (string, required): URL to fetch 16 | - `max_length` (integer, optional): Maximum number of characters to return (default: 5000) 17 | - `start_index` (integer, optional): Start content from this character index (default: 0) 18 | - `raw` (boolean, optional): Get raw content without markdown conversion (default: false) 19 | 20 | ### Prompts 21 | 22 | - **fetch** 23 | - Fetch a URL and extract its contents as markdown 24 | - Arguments: 25 | - `url` (string, required): URL to fetch 26 | 27 | ## Installation 28 | 29 | Optionally: Install node.js, this will cause the fetch server to use a different HTML simplifier that is more robust. 30 | 31 | ### Using uv (recommended) 32 | 33 | When using [`uv`](https://docs.astral.sh/uv/) no specific installation is needed. We will 34 | use [`uvx`](https://docs.astral.sh/uv/guides/tools/) to directly run *mcp-server-fetch*. 35 | 36 | ### Using PIP 37 | 38 | Alternatively you can install `mcp-server-fetch` via pip: 39 | 40 | ``` 41 | pip install mcp-server-fetch 42 | ``` 43 | 44 | After installation, you can run it as a script using: 45 | 46 | ``` 47 | python -m mcp_server_fetch 48 | ``` 49 | 50 | ## Configuration 51 | 52 | ### Configure for Claude.app 53 | 54 | Add to your Claude settings: 55 | 56 |
57 | Using uvx 58 | 59 | ```json 60 | { 61 | "mcpServers": { 62 | "fetch": { 63 | "command": "uvx", 64 | "args": ["mcp-server-fetch"] 65 | } 66 | } 67 | } 68 | ``` 69 |
70 | 71 |
72 | Using docker 73 | 74 | ```json 75 | { 76 | "mcpServers": { 77 | "fetch": { 78 | "command": "docker", 79 | "args": ["run", "-i", "--rm", "mcp/fetch"] 80 | } 81 | } 82 | } 83 | ``` 84 |
85 | 86 |
87 | Using pip installation 88 | 89 | ```json 90 | { 91 | "mcpServers": { 92 | "fetch": { 93 | "command": "python", 94 | "args": ["-m", "mcp_server_fetch"] 95 | } 96 | } 97 | } 98 | ``` 99 |
100 | 101 | ### Configure for VS Code 102 | 103 | For quick installation, use one of the one-click install buttons below... 104 | 105 | [![Install with UV in VS Code](https://img.shields.io/badge/VS_Code-UV-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=fetch&config=%7B%22command%22%3A%22uvx%22%2C%22args%22%3A%5B%22mcp-server-fetch%22%5D%7D) [![Install with UV in VS Code Insiders](https://img.shields.io/badge/VS_Code_Insiders-UV-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=fetch&config=%7B%22command%22%3A%22uvx%22%2C%22args%22%3A%5B%22mcp-server-fetch%22%5D%7D&quality=insiders) 106 | 107 | [![Install with Docker in VS Code](https://img.shields.io/badge/VS_Code-Docker-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=fetch&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22-i%22%2C%22--rm%22%2C%22mcp%2Ffetch%22%5D%7D) [![Install with Docker in VS Code Insiders](https://img.shields.io/badge/VS_Code_Insiders-Docker-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=fetch&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22-i%22%2C%22--rm%22%2C%22mcp%2Ffetch%22%5D%7D&quality=insiders) 108 | 109 | For manual installation, add the following JSON block to your User Settings (JSON) file in VS Code. You can do this by pressing `Ctrl + Shift + P` and typing `Preferences: Open User Settings (JSON)`. 110 | 111 | Optionally, you can add it to a file called `.vscode/mcp.json` in your workspace. This will allow you to share the configuration with others. 112 | 113 | > Note that the `mcp` key is needed when using the `mcp.json` file. 114 | 115 |
116 | Using uvx 117 | 118 | ```json 119 | { 120 | "mcp": { 121 | "servers": { 122 | "fetch": { 123 | "command": "uvx", 124 | "args": ["mcp-server-fetch"] 125 | } 126 | } 127 | } 128 | } 129 | ``` 130 |
131 | 132 |
133 | Using Docker 134 | 135 | ```json 136 | { 137 | "mcp": { 138 | "servers": { 139 | "fetch": { 140 | "command": "docker", 141 | "args": ["run", "-i", "--rm", "mcp/fetch"] 142 | } 143 | } 144 | } 145 | } 146 | ``` 147 |
148 | 149 | ### Customization - robots.txt 150 | 151 | By default, the server will obey a websites robots.txt file if the request came from the model (via a tool), but not if 152 | the request was user initiated (via a prompt). This can be disabled by adding the argument `--ignore-robots-txt` to the 153 | `args` list in the configuration. 154 | 155 | ### Customization - User-agent 156 | 157 | By default, depending on if the request came from the model (via a tool), or was user initiated (via a prompt), the 158 | server will use either the user-agent 159 | ``` 160 | ModelContextProtocol/1.0 (Autonomous; +https://github.com/modelcontextprotocol/servers) 161 | ``` 162 | or 163 | ``` 164 | ModelContextProtocol/1.0 (User-Specified; +https://github.com/modelcontextprotocol/servers) 165 | ``` 166 | 167 | This can be customized by adding the argument `--user-agent=YourUserAgent` to the `args` list in the configuration. 168 | 169 | ### Customization - Proxy 170 | 171 | The server can be configured to use a proxy by using the `--proxy-url` argument. 172 | 173 | ## Windows Configuration 174 | 175 | If you're experiencing timeout issues on Windows, you may need to set the `PYTHONIOENCODING` environment variable to ensure proper character encoding: 176 | 177 |
178 | Windows configuration (uvx) 179 | 180 | ```json 181 | { 182 | "mcpServers": { 183 | "fetch": { 184 | "command": "uvx", 185 | "args": ["mcp-server-fetch"], 186 | "env": { 187 | "PYTHONIOENCODING": "utf-8" 188 | } 189 | } 190 | } 191 | } 192 | ``` 193 |
194 | 195 |
196 | Windows configuration (pip) 197 | 198 | ```json 199 | { 200 | "mcpServers": { 201 | "fetch": { 202 | "command": "python", 203 | "args": ["-m", "mcp_server_fetch"], 204 | "env": { 205 | "PYTHONIOENCODING": "utf-8" 206 | } 207 | } 208 | } 209 | } 210 | ``` 211 |
212 | 213 | This addresses character encoding issues that can cause the server to timeout on Windows systems. 214 | 215 | ## Debugging 216 | 217 | You can use the MCP inspector to debug the server. For uvx installations: 218 | 219 | ``` 220 | npx @modelcontextprotocol/inspector uvx mcp-server-fetch 221 | ``` 222 | 223 | Or if you've installed the package in a specific directory or are developing on it: 224 | 225 | ``` 226 | cd path/to/servers/src/fetch 227 | npx @modelcontextprotocol/inspector uv run mcp-server-fetch 228 | ``` 229 | 230 | ## Contributing 231 | 232 | We encourage contributions to help expand and improve mcp-server-fetch. Whether you want to add new tools, enhance existing functionality, or improve documentation, your input is valuable. 233 | 234 | For examples of other MCP servers and implementation patterns, see: 235 | https://github.com/modelcontextprotocol/servers 236 | 237 | Pull requests are welcome! Feel free to contribute new ideas, bug fixes, or enhancements to make mcp-server-fetch even more powerful and useful. 238 | 239 | ## License 240 | 241 | mcp-server-fetch is licensed under the MIT License. This means you are free to use, modify, and distribute the software, subject to the terms and conditions of the MIT License. For more details, please see the LICENSE file in the project repository. 242 | -------------------------------------------------------------------------------- /src/time/README.md: -------------------------------------------------------------------------------- 1 | # Time MCP Server 2 | 3 | 4 | 5 | A Model Context Protocol server that provides time and timezone conversion capabilities. This server enables LLMs to get current time information and perform timezone conversions using IANA timezone names, with automatic system timezone detection. 6 | 7 | ### Available Tools 8 | 9 | - `get_current_time` - Get current time in a specific timezone or system timezone. 10 | - Required arguments: 11 | - `timezone` (string): IANA timezone name (e.g., 'America/New_York', 'Europe/London') 12 | 13 | - `convert_time` - Convert time between timezones. 14 | - Required arguments: 15 | - `source_timezone` (string): Source IANA timezone name 16 | - `time` (string): Time in 24-hour format (HH:MM) 17 | - `target_timezone` (string): Target IANA timezone name 18 | 19 | ## Installation 20 | 21 | ### Using uv (recommended) 22 | 23 | When using [`uv`](https://docs.astral.sh/uv/) no specific installation is needed. We will 24 | use [`uvx`](https://docs.astral.sh/uv/guides/tools/) to directly run *mcp-server-time*. 25 | 26 | ### Using PIP 27 | 28 | Alternatively you can install `mcp-server-time` via pip: 29 | 30 | ```bash 31 | pip install mcp-server-time 32 | ``` 33 | 34 | After installation, you can run it as a script using: 35 | 36 | ```bash 37 | python -m mcp_server_time 38 | ``` 39 | 40 | ## Configuration 41 | 42 | ### Configure for Claude.app 43 | 44 | Add to your Claude settings: 45 | 46 |
47 | Using uvx 48 | 49 | ```json 50 | { 51 | "mcpServers": { 52 | "time": { 53 | "command": "uvx", 54 | "args": ["mcp-server-time"] 55 | } 56 | } 57 | } 58 | ``` 59 |
60 | 61 |
62 | Using docker 63 | 64 | ```json 65 | { 66 | "mcpServers": { 67 | "time": { 68 | "command": "docker", 69 | "args": ["run", "-i", "--rm", "-e", "LOCAL_TIMEZONE", "mcp/time"] 70 | } 71 | } 72 | } 73 | ``` 74 |
75 | 76 |
77 | Using pip installation 78 | 79 | ```json 80 | { 81 | "mcpServers": { 82 | "time": { 83 | "command": "python", 84 | "args": ["-m", "mcp_server_time"] 85 | } 86 | } 87 | } 88 | ``` 89 |
90 | 91 | ### Configure for Zed 92 | 93 | Add to your Zed settings.json: 94 | 95 |
96 | Using uvx 97 | 98 | ```json 99 | "context_servers": [ 100 | "mcp-server-time": { 101 | "command": "uvx", 102 | "args": ["mcp-server-time"] 103 | } 104 | ], 105 | ``` 106 |
107 | 108 |
109 | Using pip installation 110 | 111 | ```json 112 | "context_servers": { 113 | "mcp-server-time": { 114 | "command": "python", 115 | "args": ["-m", "mcp_server_time"] 116 | } 117 | }, 118 | ``` 119 |
120 | 121 | ### Configure for VS Code 122 | 123 | For quick installation, use one of the one-click install buttons below... 124 | 125 | [![Install with UV in VS Code](https://img.shields.io/badge/VS_Code-UV-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=time&config=%7B%22command%22%3A%22uvx%22%2C%22args%22%3A%5B%22mcp-server-time%22%5D%7D) [![Install with UV in VS Code Insiders](https://img.shields.io/badge/VS_Code_Insiders-UV-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=time&config=%7B%22command%22%3A%22uvx%22%2C%22args%22%3A%5B%22mcp-server-time%22%5D%7D&quality=insiders) 126 | 127 | [![Install with Docker in VS Code](https://img.shields.io/badge/VS_Code-Docker-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=time&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22-i%22%2C%22--rm%22%2C%22mcp%2Ftime%22%5D%7D) [![Install with Docker in VS Code Insiders](https://img.shields.io/badge/VS_Code_Insiders-Docker-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=time&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22-i%22%2C%22--rm%22%2C%22mcp%2Ftime%22%5D%7D&quality=insiders) 128 | 129 | For manual installation, add the following JSON block to your User Settings (JSON) file in VS Code. You can do this by pressing `Ctrl + Shift + P` and typing `Preferences: Open User Settings (JSON)`. 130 | 131 | Optionally, you can add it to a file called `.vscode/mcp.json` in your workspace. This will allow you to share the configuration with others. 132 | 133 | > Note that the `mcp` key is needed when using the `mcp.json` file. 134 | 135 |
136 | Using uvx 137 | 138 | ```json 139 | { 140 | "mcp": { 141 | "servers": { 142 | "time": { 143 | "command": "uvx", 144 | "args": ["mcp-server-time"] 145 | } 146 | } 147 | } 148 | } 149 | ``` 150 |
151 | 152 |
153 | Using Docker 154 | 155 | ```json 156 | { 157 | "mcp": { 158 | "servers": { 159 | "time": { 160 | "command": "docker", 161 | "args": ["run", "-i", "--rm", "mcp/time"] 162 | } 163 | } 164 | } 165 | } 166 | ``` 167 |
168 | 169 | ### Configure for Zencoder 170 | 171 | 1. Go to the Zencoder menu (...) 172 | 2. From the dropdown menu, select `Agent Tools` 173 | 3. Click on the `Add Custom MCP` 174 | 4. Add the name and server configuration from below, and make sure to hit the `Install` button 175 | 176 |
177 | Using uvx 178 | 179 | ```json 180 | { 181 | "command": "uvx", 182 | "args": ["mcp-server-time"] 183 | } 184 | ``` 185 |
186 | 187 | ### Customization - System Timezone 188 | 189 | By default, the server automatically detects your system's timezone. You can override this by adding the argument `--local-timezone` to the `args` list in the configuration. 190 | 191 | Example: 192 | ```json 193 | { 194 | "command": "python", 195 | "args": ["-m", "mcp_server_time", "--local-timezone=America/New_York"] 196 | } 197 | ``` 198 | 199 | ## Example Interactions 200 | 201 | 1. Get current time: 202 | ```json 203 | { 204 | "name": "get_current_time", 205 | "arguments": { 206 | "timezone": "Europe/Warsaw" 207 | } 208 | } 209 | ``` 210 | Response: 211 | ```json 212 | { 213 | "timezone": "Europe/Warsaw", 214 | "datetime": "2024-01-01T13:00:00+01:00", 215 | "is_dst": false 216 | } 217 | ``` 218 | 219 | 2. Convert time between timezones: 220 | ```json 221 | { 222 | "name": "convert_time", 223 | "arguments": { 224 | "source_timezone": "America/New_York", 225 | "time": "16:30", 226 | "target_timezone": "Asia/Tokyo" 227 | } 228 | } 229 | ``` 230 | Response: 231 | ```json 232 | { 233 | "source": { 234 | "timezone": "America/New_York", 235 | "datetime": "2024-01-01T12:30:00-05:00", 236 | "is_dst": false 237 | }, 238 | "target": { 239 | "timezone": "Asia/Tokyo", 240 | "datetime": "2024-01-01T12:30:00+09:00", 241 | "is_dst": false 242 | }, 243 | "time_difference": "+13.0h", 244 | } 245 | ``` 246 | 247 | ## Debugging 248 | 249 | You can use the MCP inspector to debug the server. For uvx installations: 250 | 251 | ```bash 252 | npx @modelcontextprotocol/inspector uvx mcp-server-time 253 | ``` 254 | 255 | Or if you've installed the package in a specific directory or are developing on it: 256 | 257 | ```bash 258 | cd path/to/servers/src/time 259 | npx @modelcontextprotocol/inspector uv run mcp-server-time 260 | ``` 261 | 262 | ## Examples of Questions for Claude 263 | 264 | 1. "What time is it now?" (will use system timezone) 265 | 2. "What time is it in Tokyo?" 266 | 3. "When it's 4 PM in New York, what time is it in London?" 267 | 4. "Convert 9:30 AM Tokyo time to New York time" 268 | 269 | ## Build 270 | 271 | Docker build: 272 | 273 | ```bash 274 | cd src/time 275 | docker build -t mcp/time . 276 | ``` 277 | 278 | ## Contributing 279 | 280 | We encourage contributions to help expand and improve mcp-server-time. Whether you want to add new time-related tools, enhance existing functionality, or improve documentation, your input is valuable. 281 | 282 | For examples of other MCP servers and implementation patterns, see: 283 | https://github.com/modelcontextprotocol/servers 284 | 285 | Pull requests are welcome! Feel free to contribute new ideas, bug fixes, or enhancements to make mcp-server-time even more powerful and useful. 286 | 287 | ## License 288 | 289 | mcp-server-time is licensed under the MIT License. This means you are free to use, modify, and distribute the software, subject to the terms and conditions of the MIT License. For more details, please see the LICENSE file in the project repository. 290 | -------------------------------------------------------------------------------- /src/git/tests/test_server.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from pathlib import Path 3 | import git 4 | from mcp_server_git.server import ( 5 | git_checkout, 6 | git_branch, 7 | git_add, 8 | git_status, 9 | git_diff_unstaged, 10 | git_diff_staged, 11 | git_diff, 12 | git_commit, 13 | git_reset, 14 | git_log, 15 | git_create_branch, 16 | git_show 17 | ) 18 | import shutil 19 | 20 | @pytest.fixture 21 | def test_repository(tmp_path: Path): 22 | repo_path = tmp_path / "temp_test_repo" 23 | test_repo = git.Repo.init(repo_path) 24 | 25 | Path(repo_path / "test.txt").write_text("test") 26 | test_repo.index.add(["test.txt"]) 27 | test_repo.index.commit("initial commit") 28 | 29 | yield test_repo 30 | 31 | shutil.rmtree(repo_path) 32 | 33 | def test_git_checkout_existing_branch(test_repository): 34 | test_repository.git.branch("test-branch") 35 | result = git_checkout(test_repository, "test-branch") 36 | 37 | assert "Switched to branch 'test-branch'" in result 38 | assert test_repository.active_branch.name == "test-branch" 39 | 40 | def test_git_checkout_nonexistent_branch(test_repository): 41 | 42 | with pytest.raises(git.GitCommandError): 43 | git_checkout(test_repository, "nonexistent-branch") 44 | 45 | def test_git_branch_local(test_repository): 46 | test_repository.git.branch("new-branch-local") 47 | result = git_branch(test_repository, "local") 48 | assert "new-branch-local" in result 49 | 50 | def test_git_branch_remote(test_repository): 51 | result = git_branch(test_repository, "remote") 52 | assert "" == result.strip() # Should be empty if no remote branches 53 | 54 | def test_git_branch_all(test_repository): 55 | test_repository.git.branch("new-branch-all") 56 | result = git_branch(test_repository, "all") 57 | assert "new-branch-all" in result 58 | 59 | def test_git_branch_contains(test_repository): 60 | # Get the default branch name (could be "main" or "master") 61 | default_branch = test_repository.active_branch.name 62 | # Create a new branch and commit to it 63 | test_repository.git.checkout("-b", "feature-branch") 64 | Path(test_repository.working_dir / Path("feature.txt")).write_text("feature content") 65 | test_repository.index.add(["feature.txt"]) 66 | commit = test_repository.index.commit("feature commit") 67 | test_repository.git.checkout(default_branch) 68 | 69 | result = git_branch(test_repository, "local", contains=commit.hexsha) 70 | assert "feature-branch" in result 71 | assert default_branch not in result 72 | 73 | def test_git_branch_not_contains(test_repository): 74 | # Get the default branch name (could be "main" or "master") 75 | default_branch = test_repository.active_branch.name 76 | # Create a new branch and commit to it 77 | test_repository.git.checkout("-b", "another-feature-branch") 78 | Path(test_repository.working_dir / Path("another_feature.txt")).write_text("another feature content") 79 | test_repository.index.add(["another_feature.txt"]) 80 | commit = test_repository.index.commit("another feature commit") 81 | test_repository.git.checkout(default_branch) 82 | 83 | result = git_branch(test_repository, "local", not_contains=commit.hexsha) 84 | assert "another-feature-branch" not in result 85 | assert default_branch in result 86 | 87 | def test_git_add_all_files(test_repository): 88 | file_path = Path(test_repository.working_dir) / "all_file.txt" 89 | file_path.write_text("adding all") 90 | 91 | result = git_add(test_repository, ["."]) 92 | 93 | staged_files = [item.a_path for item in test_repository.index.diff("HEAD")] 94 | assert "all_file.txt" in staged_files 95 | assert result == "Files staged successfully" 96 | 97 | def test_git_add_specific_files(test_repository): 98 | file1 = Path(test_repository.working_dir) / "file1.txt" 99 | file2 = Path(test_repository.working_dir) / "file2.txt" 100 | file1.write_text("file 1 content") 101 | file2.write_text("file 2 content") 102 | 103 | result = git_add(test_repository, ["file1.txt"]) 104 | 105 | staged_files = [item.a_path for item in test_repository.index.diff("HEAD")] 106 | assert "file1.txt" in staged_files 107 | assert "file2.txt" not in staged_files 108 | assert result == "Files staged successfully" 109 | 110 | def test_git_status(test_repository): 111 | result = git_status(test_repository) 112 | 113 | assert result is not None 114 | assert "On branch" in result or "branch" in result.lower() 115 | 116 | def test_git_diff_unstaged(test_repository): 117 | file_path = Path(test_repository.working_dir) / "test.txt" 118 | file_path.write_text("modified content") 119 | 120 | result = git_diff_unstaged(test_repository) 121 | 122 | assert "test.txt" in result 123 | assert "modified content" in result 124 | 125 | def test_git_diff_unstaged_empty(test_repository): 126 | result = git_diff_unstaged(test_repository) 127 | 128 | assert result == "" 129 | 130 | def test_git_diff_staged(test_repository): 131 | file_path = Path(test_repository.working_dir) / "staged_file.txt" 132 | file_path.write_text("staged content") 133 | test_repository.index.add(["staged_file.txt"]) 134 | 135 | result = git_diff_staged(test_repository) 136 | 137 | assert "staged_file.txt" in result 138 | assert "staged content" in result 139 | 140 | def test_git_diff_staged_empty(test_repository): 141 | result = git_diff_staged(test_repository) 142 | 143 | assert result == "" 144 | 145 | def test_git_diff(test_repository): 146 | # Get the default branch name (could be "main" or "master") 147 | default_branch = test_repository.active_branch.name 148 | test_repository.git.checkout("-b", "feature-diff") 149 | file_path = Path(test_repository.working_dir) / "test.txt" 150 | file_path.write_text("feature changes") 151 | test_repository.index.add(["test.txt"]) 152 | test_repository.index.commit("feature commit") 153 | 154 | result = git_diff(test_repository, default_branch) 155 | 156 | assert "test.txt" in result 157 | assert "feature changes" in result 158 | 159 | def test_git_commit(test_repository): 160 | file_path = Path(test_repository.working_dir) / "commit_test.txt" 161 | file_path.write_text("content to commit") 162 | test_repository.index.add(["commit_test.txt"]) 163 | 164 | result = git_commit(test_repository, "test commit message") 165 | 166 | assert "Changes committed successfully with hash" in result 167 | 168 | latest_commit = test_repository.head.commit 169 | assert latest_commit.message.strip() == "test commit message" 170 | 171 | def test_git_reset(test_repository): 172 | file_path = Path(test_repository.working_dir) / "reset_test.txt" 173 | file_path.write_text("content to reset") 174 | test_repository.index.add(["reset_test.txt"]) 175 | 176 | staged_before = [item.a_path for item in test_repository.index.diff("HEAD")] 177 | assert "reset_test.txt" in staged_before 178 | 179 | result = git_reset(test_repository) 180 | 181 | assert result == "All staged changes reset" 182 | 183 | staged_after = [item.a_path for item in test_repository.index.diff("HEAD")] 184 | assert "reset_test.txt" not in staged_after 185 | 186 | def test_git_log(test_repository): 187 | for i in range(3): 188 | file_path = Path(test_repository.working_dir) / f"log_test_{i}.txt" 189 | file_path.write_text(f"content {i}") 190 | test_repository.index.add([f"log_test_{i}.txt"]) 191 | test_repository.index.commit(f"commit {i}") 192 | 193 | result = git_log(test_repository, max_count=2) 194 | 195 | assert isinstance(result, list) 196 | assert len(result) == 2 197 | assert "Commit:" in result[0] 198 | assert "Author:" in result[0] 199 | assert "Date:" in result[0] 200 | assert "Message:" in result[0] 201 | 202 | def test_git_log_default(test_repository): 203 | result = git_log(test_repository) 204 | 205 | assert isinstance(result, list) 206 | assert len(result) >= 1 207 | assert "initial commit" in result[0] 208 | 209 | def test_git_create_branch(test_repository): 210 | result = git_create_branch(test_repository, "new-feature-branch") 211 | 212 | assert "Created branch 'new-feature-branch'" in result 213 | 214 | branches = [ref.name for ref in test_repository.references] 215 | assert "new-feature-branch" in branches 216 | 217 | def test_git_create_branch_from_base(test_repository): 218 | test_repository.git.checkout("-b", "base-branch") 219 | file_path = Path(test_repository.working_dir) / "base.txt" 220 | file_path.write_text("base content") 221 | test_repository.index.add(["base.txt"]) 222 | test_repository.index.commit("base commit") 223 | 224 | result = git_create_branch(test_repository, "derived-branch", "base-branch") 225 | 226 | assert "Created branch 'derived-branch' from 'base-branch'" in result 227 | 228 | def test_git_show(test_repository): 229 | file_path = Path(test_repository.working_dir) / "show_test.txt" 230 | file_path.write_text("show content") 231 | test_repository.index.add(["show_test.txt"]) 232 | test_repository.index.commit("show test commit") 233 | 234 | commit_sha = test_repository.head.commit.hexsha 235 | 236 | result = git_show(test_repository, commit_sha) 237 | 238 | assert "Commit:" in result 239 | assert "Author:" in result 240 | assert "show test commit" in result 241 | assert "show_test.txt" in result 242 | 243 | def test_git_show_initial_commit(test_repository): 244 | initial_commit = list(test_repository.iter_commits())[-1] 245 | 246 | result = git_show(test_repository, initial_commit.hexsha) 247 | 248 | assert "Commit:" in result 249 | assert "initial commit" in result 250 | assert "test.txt" in result 251 | -------------------------------------------------------------------------------- /src/sequentialthinking/__tests__/lib.test.ts: -------------------------------------------------------------------------------- 1 | import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest'; 2 | import { SequentialThinkingServer, ThoughtData } from '../lib.js'; 3 | 4 | // Mock chalk to avoid ESM issues 5 | vi.mock('chalk', () => { 6 | const chalkMock = { 7 | yellow: (str: string) => str, 8 | green: (str: string) => str, 9 | blue: (str: string) => str, 10 | }; 11 | return { 12 | default: chalkMock, 13 | }; 14 | }); 15 | 16 | describe('SequentialThinkingServer', () => { 17 | let server: SequentialThinkingServer; 18 | 19 | beforeEach(() => { 20 | // Disable thought logging for tests 21 | process.env.DISABLE_THOUGHT_LOGGING = 'true'; 22 | server = new SequentialThinkingServer(); 23 | }); 24 | 25 | // Note: Input validation tests removed - validation now happens at the tool 26 | // registration layer via Zod schemas before processThought is called 27 | 28 | describe('processThought - valid inputs', () => { 29 | it('should accept valid basic thought', () => { 30 | const input = { 31 | thought: 'This is my first thought', 32 | thoughtNumber: 1, 33 | totalThoughts: 3, 34 | nextThoughtNeeded: true 35 | }; 36 | 37 | const result = server.processThought(input); 38 | expect(result.isError).toBeUndefined(); 39 | 40 | const data = JSON.parse(result.content[0].text); 41 | expect(data.thoughtNumber).toBe(1); 42 | expect(data.totalThoughts).toBe(3); 43 | expect(data.nextThoughtNeeded).toBe(true); 44 | expect(data.thoughtHistoryLength).toBe(1); 45 | }); 46 | 47 | it('should accept thought with optional fields', () => { 48 | const input = { 49 | thought: 'Revising my earlier idea', 50 | thoughtNumber: 2, 51 | totalThoughts: 3, 52 | nextThoughtNeeded: true, 53 | isRevision: true, 54 | revisesThought: 1, 55 | needsMoreThoughts: false 56 | }; 57 | 58 | const result = server.processThought(input); 59 | expect(result.isError).toBeUndefined(); 60 | 61 | const data = JSON.parse(result.content[0].text); 62 | expect(data.thoughtNumber).toBe(2); 63 | expect(data.thoughtHistoryLength).toBe(1); 64 | }); 65 | 66 | it('should track multiple thoughts in history', () => { 67 | const input1 = { 68 | thought: 'First thought', 69 | thoughtNumber: 1, 70 | totalThoughts: 3, 71 | nextThoughtNeeded: true 72 | }; 73 | 74 | const input2 = { 75 | thought: 'Second thought', 76 | thoughtNumber: 2, 77 | totalThoughts: 3, 78 | nextThoughtNeeded: true 79 | }; 80 | 81 | const input3 = { 82 | thought: 'Final thought', 83 | thoughtNumber: 3, 84 | totalThoughts: 3, 85 | nextThoughtNeeded: false 86 | }; 87 | 88 | server.processThought(input1); 89 | server.processThought(input2); 90 | const result = server.processThought(input3); 91 | 92 | const data = JSON.parse(result.content[0].text); 93 | expect(data.thoughtHistoryLength).toBe(3); 94 | expect(data.nextThoughtNeeded).toBe(false); 95 | }); 96 | 97 | it('should auto-adjust totalThoughts if thoughtNumber exceeds it', () => { 98 | const input = { 99 | thought: 'Thought 5', 100 | thoughtNumber: 5, 101 | totalThoughts: 3, 102 | nextThoughtNeeded: true 103 | }; 104 | 105 | const result = server.processThought(input); 106 | const data = JSON.parse(result.content[0].text); 107 | 108 | expect(data.totalThoughts).toBe(5); 109 | }); 110 | }); 111 | 112 | describe('processThought - branching', () => { 113 | it('should track branches correctly', () => { 114 | const input1 = { 115 | thought: 'Main thought', 116 | thoughtNumber: 1, 117 | totalThoughts: 3, 118 | nextThoughtNeeded: true 119 | }; 120 | 121 | const input2 = { 122 | thought: 'Branch A thought', 123 | thoughtNumber: 2, 124 | totalThoughts: 3, 125 | nextThoughtNeeded: true, 126 | branchFromThought: 1, 127 | branchId: 'branch-a' 128 | }; 129 | 130 | const input3 = { 131 | thought: 'Branch B thought', 132 | thoughtNumber: 2, 133 | totalThoughts: 3, 134 | nextThoughtNeeded: false, 135 | branchFromThought: 1, 136 | branchId: 'branch-b' 137 | }; 138 | 139 | server.processThought(input1); 140 | server.processThought(input2); 141 | const result = server.processThought(input3); 142 | 143 | const data = JSON.parse(result.content[0].text); 144 | expect(data.branches).toContain('branch-a'); 145 | expect(data.branches).toContain('branch-b'); 146 | expect(data.branches.length).toBe(2); 147 | expect(data.thoughtHistoryLength).toBe(3); 148 | }); 149 | 150 | it('should allow multiple thoughts in same branch', () => { 151 | const input1 = { 152 | thought: 'Branch thought 1', 153 | thoughtNumber: 1, 154 | totalThoughts: 2, 155 | nextThoughtNeeded: true, 156 | branchFromThought: 1, 157 | branchId: 'branch-a' 158 | }; 159 | 160 | const input2 = { 161 | thought: 'Branch thought 2', 162 | thoughtNumber: 2, 163 | totalThoughts: 2, 164 | nextThoughtNeeded: false, 165 | branchFromThought: 1, 166 | branchId: 'branch-a' 167 | }; 168 | 169 | server.processThought(input1); 170 | const result = server.processThought(input2); 171 | 172 | const data = JSON.parse(result.content[0].text); 173 | expect(data.branches).toContain('branch-a'); 174 | expect(data.branches.length).toBe(1); 175 | }); 176 | }); 177 | 178 | describe('processThought - edge cases', () => { 179 | it('should handle very long thought strings', () => { 180 | const input = { 181 | thought: 'a'.repeat(10000), 182 | thoughtNumber: 1, 183 | totalThoughts: 1, 184 | nextThoughtNeeded: false 185 | }; 186 | 187 | const result = server.processThought(input); 188 | expect(result.isError).toBeUndefined(); 189 | }); 190 | 191 | it('should handle thoughtNumber = 1, totalThoughts = 1', () => { 192 | const input = { 193 | thought: 'Only thought', 194 | thoughtNumber: 1, 195 | totalThoughts: 1, 196 | nextThoughtNeeded: false 197 | }; 198 | 199 | const result = server.processThought(input); 200 | expect(result.isError).toBeUndefined(); 201 | 202 | const data = JSON.parse(result.content[0].text); 203 | expect(data.thoughtNumber).toBe(1); 204 | expect(data.totalThoughts).toBe(1); 205 | }); 206 | 207 | it('should handle nextThoughtNeeded = false', () => { 208 | const input = { 209 | thought: 'Final thought', 210 | thoughtNumber: 3, 211 | totalThoughts: 3, 212 | nextThoughtNeeded: false 213 | }; 214 | 215 | const result = server.processThought(input); 216 | const data = JSON.parse(result.content[0].text); 217 | 218 | expect(data.nextThoughtNeeded).toBe(false); 219 | }); 220 | }); 221 | 222 | describe('processThought - response format', () => { 223 | it('should return correct response structure on success', () => { 224 | const input = { 225 | thought: 'Test thought', 226 | thoughtNumber: 1, 227 | totalThoughts: 1, 228 | nextThoughtNeeded: false 229 | }; 230 | 231 | const result = server.processThought(input); 232 | 233 | expect(result).toHaveProperty('content'); 234 | expect(Array.isArray(result.content)).toBe(true); 235 | expect(result.content.length).toBe(1); 236 | expect(result.content[0]).toHaveProperty('type', 'text'); 237 | expect(result.content[0]).toHaveProperty('text'); 238 | }); 239 | 240 | it('should return valid JSON in response', () => { 241 | const input = { 242 | thought: 'Test thought', 243 | thoughtNumber: 1, 244 | totalThoughts: 1, 245 | nextThoughtNeeded: false 246 | }; 247 | 248 | const result = server.processThought(input); 249 | 250 | expect(() => JSON.parse(result.content[0].text)).not.toThrow(); 251 | }); 252 | }); 253 | 254 | describe('processThought - with logging enabled', () => { 255 | let serverWithLogging: SequentialThinkingServer; 256 | 257 | beforeEach(() => { 258 | // Enable thought logging for these tests 259 | delete process.env.DISABLE_THOUGHT_LOGGING; 260 | serverWithLogging = new SequentialThinkingServer(); 261 | }); 262 | 263 | afterEach(() => { 264 | // Reset to disabled for other tests 265 | process.env.DISABLE_THOUGHT_LOGGING = 'true'; 266 | }); 267 | 268 | it('should format and log regular thoughts', () => { 269 | const input = { 270 | thought: 'Test thought with logging', 271 | thoughtNumber: 1, 272 | totalThoughts: 3, 273 | nextThoughtNeeded: true 274 | }; 275 | 276 | const result = serverWithLogging.processThought(input); 277 | expect(result.isError).toBeUndefined(); 278 | }); 279 | 280 | it('should format and log revision thoughts', () => { 281 | const input = { 282 | thought: 'Revised thought', 283 | thoughtNumber: 2, 284 | totalThoughts: 3, 285 | nextThoughtNeeded: true, 286 | isRevision: true, 287 | revisesThought: 1 288 | }; 289 | 290 | const result = serverWithLogging.processThought(input); 291 | expect(result.isError).toBeUndefined(); 292 | }); 293 | 294 | it('should format and log branch thoughts', () => { 295 | const input = { 296 | thought: 'Branch thought', 297 | thoughtNumber: 2, 298 | totalThoughts: 3, 299 | nextThoughtNeeded: false, 300 | branchFromThought: 1, 301 | branchId: 'branch-a' 302 | }; 303 | 304 | const result = serverWithLogging.processThought(input); 305 | expect(result.isError).toBeUndefined(); 306 | }); 307 | }); 308 | }); 309 | -------------------------------------------------------------------------------- /src/memory/README.md: -------------------------------------------------------------------------------- 1 | # Knowledge Graph Memory Server 2 | 3 | A basic implementation of persistent memory using a local knowledge graph. This lets Claude remember information about the user across chats. 4 | 5 | ## Core Concepts 6 | 7 | ### Entities 8 | Entities are the primary nodes in the knowledge graph. Each entity has: 9 | - A unique name (identifier) 10 | - An entity type (e.g., "person", "organization", "event") 11 | - A list of observations 12 | 13 | Example: 14 | ```json 15 | { 16 | "name": "John_Smith", 17 | "entityType": "person", 18 | "observations": ["Speaks fluent Spanish"] 19 | } 20 | ``` 21 | 22 | ### Relations 23 | Relations define directed connections between entities. They are always stored in active voice and describe how entities interact or relate to each other. 24 | 25 | Example: 26 | ```json 27 | { 28 | "from": "John_Smith", 29 | "to": "Anthropic", 30 | "relationType": "works_at" 31 | } 32 | ``` 33 | ### Observations 34 | Observations are discrete pieces of information about an entity. They are: 35 | 36 | - Stored as strings 37 | - Attached to specific entities 38 | - Can be added or removed independently 39 | - Should be atomic (one fact per observation) 40 | 41 | Example: 42 | ```json 43 | { 44 | "entityName": "John_Smith", 45 | "observations": [ 46 | "Speaks fluent Spanish", 47 | "Graduated in 2019", 48 | "Prefers morning meetings" 49 | ] 50 | } 51 | ``` 52 | 53 | ## API 54 | 55 | ### Tools 56 | - **create_entities** 57 | - Create multiple new entities in the knowledge graph 58 | - Input: `entities` (array of objects) 59 | - Each object contains: 60 | - `name` (string): Entity identifier 61 | - `entityType` (string): Type classification 62 | - `observations` (string[]): Associated observations 63 | - Ignores entities with existing names 64 | 65 | - **create_relations** 66 | - Create multiple new relations between entities 67 | - Input: `relations` (array of objects) 68 | - Each object contains: 69 | - `from` (string): Source entity name 70 | - `to` (string): Target entity name 71 | - `relationType` (string): Relationship type in active voice 72 | - Skips duplicate relations 73 | 74 | - **add_observations** 75 | - Add new observations to existing entities 76 | - Input: `observations` (array of objects) 77 | - Each object contains: 78 | - `entityName` (string): Target entity 79 | - `contents` (string[]): New observations to add 80 | - Returns added observations per entity 81 | - Fails if entity doesn't exist 82 | 83 | - **delete_entities** 84 | - Remove entities and their relations 85 | - Input: `entityNames` (string[]) 86 | - Cascading deletion of associated relations 87 | - Silent operation if entity doesn't exist 88 | 89 | - **delete_observations** 90 | - Remove specific observations from entities 91 | - Input: `deletions` (array of objects) 92 | - Each object contains: 93 | - `entityName` (string): Target entity 94 | - `observations` (string[]): Observations to remove 95 | - Silent operation if observation doesn't exist 96 | 97 | - **delete_relations** 98 | - Remove specific relations from the graph 99 | - Input: `relations` (array of objects) 100 | - Each object contains: 101 | - `from` (string): Source entity name 102 | - `to` (string): Target entity name 103 | - `relationType` (string): Relationship type 104 | - Silent operation if relation doesn't exist 105 | 106 | - **read_graph** 107 | - Read the entire knowledge graph 108 | - No input required 109 | - Returns complete graph structure with all entities and relations 110 | 111 | - **search_nodes** 112 | - Search for nodes based on query 113 | - Input: `query` (string) 114 | - Searches across: 115 | - Entity names 116 | - Entity types 117 | - Observation content 118 | - Returns matching entities and their relations 119 | 120 | - **open_nodes** 121 | - Retrieve specific nodes by name 122 | - Input: `names` (string[]) 123 | - Returns: 124 | - Requested entities 125 | - Relations between requested entities 126 | - Silently skips non-existent nodes 127 | 128 | # Usage with Claude Desktop 129 | 130 | ### Setup 131 | 132 | Add this to your claude_desktop_config.json: 133 | 134 | #### Docker 135 | 136 | ```json 137 | { 138 | "mcpServers": { 139 | "memory": { 140 | "command": "docker", 141 | "args": ["run", "-i", "-v", "claude-memory:/app/dist", "--rm", "mcp/memory"] 142 | } 143 | } 144 | } 145 | ``` 146 | 147 | #### NPX 148 | ```json 149 | { 150 | "mcpServers": { 151 | "memory": { 152 | "command": "npx", 153 | "args": [ 154 | "-y", 155 | "@modelcontextprotocol/server-memory" 156 | ] 157 | } 158 | } 159 | } 160 | ``` 161 | 162 | #### NPX with custom setting 163 | 164 | The server can be configured using the following environment variables: 165 | 166 | ```json 167 | { 168 | "mcpServers": { 169 | "memory": { 170 | "command": "npx", 171 | "args": [ 172 | "-y", 173 | "@modelcontextprotocol/server-memory" 174 | ], 175 | "env": { 176 | "MEMORY_FILE_PATH": "/path/to/custom/memory.jsonl" 177 | } 178 | } 179 | } 180 | } 181 | ``` 182 | 183 | - `MEMORY_FILE_PATH`: Path to the memory storage JSONL file (default: `memory.jsonl` in the server directory) 184 | 185 | # VS Code Installation Instructions 186 | 187 | For quick installation, use one of the one-click installation buttons below: 188 | 189 | [![Install with NPX in VS Code](https://img.shields.io/badge/VS_Code-NPM-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=memory&config=%7B%22command%22%3A%22npx%22%2C%22args%22%3A%5B%22-y%22%2C%22%40modelcontextprotocol%2Fserver-memory%22%5D%7D) [![Install with NPX in VS Code Insiders](https://img.shields.io/badge/VS_Code_Insiders-NPM-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=memory&config=%7B%22command%22%3A%22npx%22%2C%22args%22%3A%5B%22-y%22%2C%22%40modelcontextprotocol%2Fserver-memory%22%5D%7D&quality=insiders) 190 | 191 | [![Install with Docker in VS Code](https://img.shields.io/badge/VS_Code-Docker-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=memory&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22-i%22%2C%22-v%22%2C%22claude-memory%3A%2Fapp%2Fdist%22%2C%22--rm%22%2C%22mcp%2Fmemory%22%5D%7D) [![Install with Docker in VS Code Insiders](https://img.shields.io/badge/VS_Code_Insiders-Docker-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=memory&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22-i%22%2C%22-v%22%2C%22claude-memory%3A%2Fapp%2Fdist%22%2C%22--rm%22%2C%22mcp%2Fmemory%22%5D%7D&quality=insiders) 192 | 193 | For manual installation, you can configure the MCP server using one of these methods: 194 | 195 | **Method 1: User Configuration (Recommended)** 196 | Add the configuration to your user-level MCP configuration file. Open the Command Palette (`Ctrl + Shift + P`) and run `MCP: Open User Configuration`. This will open your user `mcp.json` file where you can add the server configuration. 197 | 198 | **Method 2: Workspace Configuration** 199 | Alternatively, you can add the configuration to a file called `.vscode/mcp.json` in your workspace. This will allow you to share the configuration with others. 200 | 201 | > For more details about MCP configuration in VS Code, see the [official VS Code MCP documentation](https://code.visualstudio.com/docs/copilot/customization/mcp-servers). 202 | 203 | #### NPX 204 | 205 | ```json 206 | { 207 | "servers": { 208 | "memory": { 209 | "command": "npx", 210 | "args": [ 211 | "-y", 212 | "@modelcontextprotocol/server-memory" 213 | ] 214 | } 215 | } 216 | } 217 | ``` 218 | 219 | #### Docker 220 | 221 | ```json 222 | { 223 | "servers": { 224 | "memory": { 225 | "command": "docker", 226 | "args": [ 227 | "run", 228 | "-i", 229 | "-v", 230 | "claude-memory:/app/dist", 231 | "--rm", 232 | "mcp/memory" 233 | ] 234 | } 235 | } 236 | } 237 | ``` 238 | 239 | ### System Prompt 240 | 241 | The prompt for utilizing memory depends on the use case. Changing the prompt will help the model determine the frequency and types of memories created. 242 | 243 | Here is an example prompt for chat personalization. You could use this prompt in the "Custom Instructions" field of a [Claude.ai Project](https://www.anthropic.com/news/projects). 244 | 245 | ``` 246 | Follow these steps for each interaction: 247 | 248 | 1. User Identification: 249 | - You should assume that you are interacting with default_user 250 | - If you have not identified default_user, proactively try to do so. 251 | 252 | 2. Memory Retrieval: 253 | - Always begin your chat by saying only "Remembering..." and retrieve all relevant information from your knowledge graph 254 | - Always refer to your knowledge graph as your "memory" 255 | 256 | 3. Memory 257 | - While conversing with the user, be attentive to any new information that falls into these categories: 258 | a) Basic Identity (age, gender, location, job title, education level, etc.) 259 | b) Behaviors (interests, habits, etc.) 260 | c) Preferences (communication style, preferred language, etc.) 261 | d) Goals (goals, targets, aspirations, etc.) 262 | e) Relationships (personal and professional relationships up to 3 degrees of separation) 263 | 264 | 4. Memory Update: 265 | - If any new information was gathered during the interaction, update your memory as follows: 266 | a) Create entities for recurring organizations, people, and significant events 267 | b) Connect them to the current entities using relations 268 | c) Store facts about them as observations 269 | ``` 270 | 271 | ## Building 272 | 273 | Docker: 274 | 275 | ```sh 276 | docker build -t mcp/memory -f src/memory/Dockerfile . 277 | ``` 278 | 279 | For Awareness: a prior mcp/memory volume contains an index.js file that could be overwritten by the new container. If you are using a docker volume for storage, delete the old docker volume's `index.js` file before starting the new container. 280 | 281 | ## License 282 | 283 | This MCP server is licensed under the MIT License. This means you are free to use, modify, and distribute the software, subject to the terms and conditions of the MIT License. For more details, please see the LICENSE file in the project repository. 284 | -------------------------------------------------------------------------------- /src/everything/README.md: -------------------------------------------------------------------------------- 1 | # Everything MCP Server 2 | 3 | This MCP server attempts to exercise all the features of the MCP protocol. It is not intended to be a useful server, but rather a test server for builders of MCP clients. It implements prompts, tools, resources, sampling, and more to showcase MCP capabilities. 4 | 5 | ## Components 6 | 7 | ### Tools 8 | 9 | 1. `echo` 10 | - Simple tool to echo back input messages 11 | - Input: 12 | - `message` (string): Message to echo back 13 | - Returns: Text content with echoed message 14 | 15 | 2. `add` 16 | - Adds two numbers together 17 | - Inputs: 18 | - `a` (number): First number 19 | - `b` (number): Second number 20 | - Returns: Text result of the addition 21 | 22 | 3. `longRunningOperation` 23 | - Demonstrates progress notifications for long operations 24 | - Inputs: 25 | - `duration` (number, default: 10): Duration in seconds 26 | - `steps` (number, default: 5): Number of progress steps 27 | - Returns: Completion message with duration and steps 28 | - Sends progress notifications during execution 29 | 30 | 4. `printEnv` 31 | - Prints all environment variables 32 | - Useful for debugging MCP server configuration 33 | - No inputs required 34 | - Returns: JSON string of all environment variables 35 | 36 | 5. `sampleLLM` 37 | - Demonstrates LLM sampling capability using MCP sampling feature 38 | - Inputs: 39 | - `prompt` (string): The prompt to send to the LLM 40 | - `maxTokens` (number, default: 100): Maximum tokens to generate 41 | - Returns: Generated LLM response 42 | 43 | 6. `getTinyImage` 44 | - Returns a small test image 45 | - No inputs required 46 | - Returns: Base64 encoded PNG image data 47 | 48 | 7. `annotatedMessage` 49 | - Demonstrates how annotations can be used to provide metadata about content 50 | - Inputs: 51 | - `messageType` (enum: "error" | "success" | "debug"): Type of message to demonstrate different annotation patterns 52 | - `includeImage` (boolean, default: false): Whether to include an example image 53 | - Returns: Content with varying annotations: 54 | - Error messages: High priority (1.0), visible to both user and assistant 55 | - Success messages: Medium priority (0.7), user-focused 56 | - Debug messages: Low priority (0.3), assistant-focused 57 | - Optional image: Medium priority (0.5), user-focused 58 | - Example annotations: 59 | ```json 60 | { 61 | "priority": 1.0, 62 | "audience": ["user", "assistant"] 63 | } 64 | ``` 65 | 66 | 8. `getResourceReference` 67 | - Returns a resource reference that can be used by MCP clients 68 | - Inputs: 69 | - `resourceId` (number, 1-100): ID of the resource to reference 70 | - Returns: A resource reference with: 71 | - Text introduction 72 | - Embedded resource with `type: "resource"` 73 | - Text instruction for using the resource URI 74 | 75 | 9. `startElicitation` 76 | - Initiates an elicitation (interaction) within the MCP client. 77 | - Inputs: 78 | - `color` (string): Favorite color 79 | - `number` (number, 1-100): Favorite number 80 | - `pets` (enum): Favorite pet 81 | - Returns: Confirmation of the elicitation demo with selection summary. 82 | 83 | 10. `structuredContent` 84 | - Demonstrates a tool returning structured content using the example in the specification 85 | - Provides an output schema to allow testing of client SHOULD advisory to validate the result using the schema 86 | - Inputs: 87 | - `location` (string): A location or ZIP code, mock data is returned regardless of value 88 | - Returns: a response with 89 | - `structuredContent` field conformant to the output schema 90 | - A backward compatible Text Content field, a SHOULD advisory in the specification 91 | 92 | 11. `listRoots` 93 | - Lists the current MCP roots provided by the client 94 | - Demonstrates the roots protocol capability even though this server doesn't access files 95 | - No inputs required 96 | - Returns: List of current roots with their URIs and names, or a message if no roots are set 97 | - Shows how servers can interact with the MCP roots protocol 98 | 99 | ### Resources 100 | 101 | The server provides 100 test resources in two formats: 102 | - Even numbered resources: 103 | - Plaintext format 104 | - URI pattern: `test://static/resource/{even_number}` 105 | - Content: Simple text description 106 | 107 | - Odd numbered resources: 108 | - Binary blob format 109 | - URI pattern: `test://static/resource/{odd_number}` 110 | - Content: Base64 encoded binary data 111 | 112 | Resource features: 113 | - Supports pagination (10 items per page) 114 | - Allows subscribing to resource updates 115 | - Demonstrates resource templates 116 | - Auto-updates subscribed resources every 5 seconds 117 | 118 | ### Prompts 119 | 120 | 1. `simple_prompt` 121 | - Basic prompt without arguments 122 | - Returns: Single message exchange 123 | 124 | 2. `complex_prompt` 125 | - Advanced prompt demonstrating argument handling 126 | - Required arguments: 127 | - `temperature` (string): Temperature setting 128 | - Optional arguments: 129 | - `style` (string): Output style preference 130 | - Returns: Multi-turn conversation with images 131 | 132 | 3. `resource_prompt` 133 | - Demonstrates embedding resource references in prompts 134 | - Required arguments: 135 | - `resourceId` (number): ID of the resource to embed (1-100) 136 | - Returns: Multi-turn conversation with an embedded resource reference 137 | - Shows how to include resources directly in prompt messages 138 | 139 | ### Roots 140 | 141 | The server demonstrates the MCP roots protocol capability: 142 | 143 | - Declares `roots: { listChanged: true }` capability to indicate support for roots 144 | - Handles `roots/list_changed` notifications from clients 145 | - Requests initial roots during server initialization 146 | - Provides a `listRoots` tool to display current roots 147 | - Logs roots-related events for demonstration purposes 148 | 149 | Note: This server doesn't actually access files, but demonstrates how servers can interact with the roots protocol for clients that need to understand which directories are available for file operations. 150 | 151 | ### Logging 152 | 153 | The server sends random-leveled log messages every 15 seconds, e.g.: 154 | 155 | ```json 156 | { 157 | "method": "notifications/message", 158 | "params": { 159 | "level": "info", 160 | "data": "Info-level message" 161 | } 162 | } 163 | ``` 164 | 165 | ## Usage with Claude Desktop (uses [stdio Transport](https://modelcontextprotocol.io/specification/2025-03-26/basic/transports#stdio)) 166 | 167 | Add to your `claude_desktop_config.json`: 168 | 169 | ```json 170 | { 171 | "mcpServers": { 172 | "everything": { 173 | "command": "npx", 174 | "args": [ 175 | "-y", 176 | "@modelcontextprotocol/server-everything" 177 | ] 178 | } 179 | } 180 | } 181 | ``` 182 | 183 | ## Usage with VS Code 184 | 185 | For quick installation, use of of the one-click install buttons below... 186 | 187 | [![Install with NPX in VS Code](https://img.shields.io/badge/VS_Code-NPM-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=everything&config=%7B%22command%22%3A%22npx%22%2C%22args%22%3A%5B%22-y%22%2C%22%40modelcontextprotocol%2Fserver-everything%22%5D%7D) [![Install with NPX in VS Code Insiders](https://img.shields.io/badge/VS_Code_Insiders-NPM-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=everything&config=%7B%22command%22%3A%22npx%22%2C%22args%22%3A%5B%22-y%22%2C%22%40modelcontextprotocol%2Fserver-everything%22%5D%7D&quality=insiders) 188 | 189 | [![Install with Docker in VS Code](https://img.shields.io/badge/VS_Code-Docker-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=everything&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22-i%22%2C%22--rm%22%2C%22mcp%2Feverything%22%5D%7D) [![Install with Docker in VS Code Insiders](https://img.shields.io/badge/VS_Code_Insiders-Docker-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=everything&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22-i%22%2C%22--rm%22%2C%22mcp%2Feverything%22%5D%7D&quality=insiders) 190 | 191 | For manual installation, you can configure the MCP server using one of these methods: 192 | 193 | **Method 1: User Configuration (Recommended)** 194 | Add the configuration to your user-level MCP configuration file. Open the Command Palette (`Ctrl + Shift + P`) and run `MCP: Open User Configuration`. This will open your user `mcp.json` file where you can add the server configuration. 195 | 196 | **Method 2: Workspace Configuration** 197 | Alternatively, you can add the configuration to a file called `.vscode/mcp.json` in your workspace. This will allow you to share the configuration with others. 198 | 199 | > For more details about MCP configuration in VS Code, see the [official VS Code MCP documentation](https://code.visualstudio.com/docs/copilot/customization/mcp-servers). 200 | 201 | #### NPX 202 | 203 | ```json 204 | { 205 | "servers": { 206 | "everything": { 207 | "command": "npx", 208 | "args": ["-y", "@modelcontextprotocol/server-everything"] 209 | } 210 | } 211 | } 212 | ``` 213 | 214 | ## Running from source with [HTTP+SSE Transport](https://modelcontextprotocol.io/specification/2024-11-05/basic/transports#http-with-sse) (deprecated as of [2025-03-26](https://modelcontextprotocol.io/specification/2025-03-26/basic/transports)) 215 | 216 | ```shell 217 | cd src/everything 218 | npm install 219 | npm run start:sse 220 | ``` 221 | 222 | ## Run from source with [Streamable HTTP Transport](https://modelcontextprotocol.io/specification/2025-03-26/basic/transports#streamable-http) 223 | 224 | ```shell 225 | cd src/everything 226 | npm install 227 | npm run start:streamableHttp 228 | ``` 229 | 230 | ## Running as an installed package 231 | ### Install 232 | ```shell 233 | npm install -g @modelcontextprotocol/server-everything@latest 234 | ```` 235 | 236 | ### Run the default (stdio) server 237 | ```shell 238 | npx @modelcontextprotocol/server-everything 239 | ``` 240 | 241 | ### Or specify stdio explicitly 242 | ```shell 243 | npx @modelcontextprotocol/server-everything stdio 244 | ``` 245 | 246 | ### Run the SSE server 247 | ```shell 248 | npx @modelcontextprotocol/server-everything sse 249 | ``` 250 | 251 | ### Run the streamable HTTP server 252 | ```shell 253 | npx @modelcontextprotocol/server-everything streamableHttp 254 | ``` 255 | 256 | -------------------------------------------------------------------------------- /src/fetch/src/mcp_server_fetch/server.py: -------------------------------------------------------------------------------- 1 | from typing import Annotated, Tuple 2 | from urllib.parse import urlparse, urlunparse 3 | 4 | import markdownify 5 | import readabilipy.simple_json 6 | from mcp.shared.exceptions import McpError 7 | from mcp.server import Server 8 | from mcp.server.stdio import stdio_server 9 | from mcp.types import ( 10 | ErrorData, 11 | GetPromptResult, 12 | Prompt, 13 | PromptArgument, 14 | PromptMessage, 15 | TextContent, 16 | Tool, 17 | INVALID_PARAMS, 18 | INTERNAL_ERROR, 19 | ) 20 | from protego import Protego 21 | from pydantic import BaseModel, Field, AnyUrl 22 | 23 | DEFAULT_USER_AGENT_AUTONOMOUS = "ModelContextProtocol/1.0 (Autonomous; +https://github.com/modelcontextprotocol/servers)" 24 | DEFAULT_USER_AGENT_MANUAL = "ModelContextProtocol/1.0 (User-Specified; +https://github.com/modelcontextprotocol/servers)" 25 | 26 | 27 | def extract_content_from_html(html: str) -> str: 28 | """Extract and convert HTML content to Markdown format. 29 | 30 | Args: 31 | html: Raw HTML content to process 32 | 33 | Returns: 34 | Simplified markdown version of the content 35 | """ 36 | ret = readabilipy.simple_json.simple_json_from_html_string( 37 | html, use_readability=True 38 | ) 39 | if not ret["content"]: 40 | return "Page failed to be simplified from HTML" 41 | content = markdownify.markdownify( 42 | ret["content"], 43 | heading_style=markdownify.ATX, 44 | ) 45 | return content 46 | 47 | 48 | def get_robots_txt_url(url: str) -> str: 49 | """Get the robots.txt URL for a given website URL. 50 | 51 | Args: 52 | url: Website URL to get robots.txt for 53 | 54 | Returns: 55 | URL of the robots.txt file 56 | """ 57 | # Parse the URL into components 58 | parsed = urlparse(url) 59 | 60 | # Reconstruct the base URL with just scheme, netloc, and /robots.txt path 61 | robots_url = urlunparse((parsed.scheme, parsed.netloc, "/robots.txt", "", "", "")) 62 | 63 | return robots_url 64 | 65 | 66 | async def check_may_autonomously_fetch_url(url: str, user_agent: str, proxy_url: str | None = None) -> None: 67 | """ 68 | Check if the URL can be fetched by the user agent according to the robots.txt file. 69 | Raises a McpError if not. 70 | """ 71 | from httpx import AsyncClient, HTTPError 72 | 73 | robot_txt_url = get_robots_txt_url(url) 74 | 75 | async with AsyncClient(proxies=proxy_url) as client: 76 | try: 77 | response = await client.get( 78 | robot_txt_url, 79 | follow_redirects=True, 80 | headers={"User-Agent": user_agent}, 81 | ) 82 | except HTTPError: 83 | raise McpError(ErrorData( 84 | code=INTERNAL_ERROR, 85 | message=f"Failed to fetch robots.txt {robot_txt_url} due to a connection issue", 86 | )) 87 | if response.status_code in (401, 403): 88 | raise McpError(ErrorData( 89 | code=INTERNAL_ERROR, 90 | message=f"When fetching robots.txt ({robot_txt_url}), received status {response.status_code} so assuming that autonomous fetching is not allowed, the user can try manually fetching by using the fetch prompt", 91 | )) 92 | elif 400 <= response.status_code < 500: 93 | return 94 | robot_txt = response.text 95 | processed_robot_txt = "\n".join( 96 | line for line in robot_txt.splitlines() if not line.strip().startswith("#") 97 | ) 98 | robot_parser = Protego.parse(processed_robot_txt) 99 | if not robot_parser.can_fetch(str(url), user_agent): 100 | raise McpError(ErrorData( 101 | code=INTERNAL_ERROR, 102 | message=f"The sites robots.txt ({robot_txt_url}), specifies that autonomous fetching of this page is not allowed, " 103 | f"{user_agent}\n" 104 | f"{url}" 105 | f"\n{robot_txt}\n\n" 106 | f"The assistant must let the user know that it failed to view the page. The assistant may provide further guidance based on the above information.\n" 107 | f"The assistant can tell the user that they can try manually fetching the page by using the fetch prompt within their UI.", 108 | )) 109 | 110 | 111 | async def fetch_url( 112 | url: str, user_agent: str, force_raw: bool = False, proxy_url: str | None = None 113 | ) -> Tuple[str, str]: 114 | """ 115 | Fetch the URL and return the content in a form ready for the LLM, as well as a prefix string with status information. 116 | """ 117 | from httpx import AsyncClient, HTTPError 118 | 119 | async with AsyncClient(proxies=proxy_url) as client: 120 | try: 121 | response = await client.get( 122 | url, 123 | follow_redirects=True, 124 | headers={"User-Agent": user_agent}, 125 | timeout=30, 126 | ) 127 | except HTTPError as e: 128 | raise McpError(ErrorData(code=INTERNAL_ERROR, message=f"Failed to fetch {url}: {e!r}")) 129 | if response.status_code >= 400: 130 | raise McpError(ErrorData( 131 | code=INTERNAL_ERROR, 132 | message=f"Failed to fetch {url} - status code {response.status_code}", 133 | )) 134 | 135 | page_raw = response.text 136 | 137 | content_type = response.headers.get("content-type", "") 138 | is_page_html = ( 139 | " None: 186 | """Run the fetch MCP server. 187 | 188 | Args: 189 | custom_user_agent: Optional custom User-Agent string to use for requests 190 | ignore_robots_txt: Whether to ignore robots.txt restrictions 191 | proxy_url: Optional proxy URL to use for requests 192 | """ 193 | server = Server("mcp-fetch") 194 | user_agent_autonomous = custom_user_agent or DEFAULT_USER_AGENT_AUTONOMOUS 195 | user_agent_manual = custom_user_agent or DEFAULT_USER_AGENT_MANUAL 196 | 197 | @server.list_tools() 198 | async def list_tools() -> list[Tool]: 199 | return [ 200 | Tool( 201 | name="fetch", 202 | description="""Fetches a URL from the internet and optionally extracts its contents as markdown. 203 | 204 | Although originally you did not have internet access, and were advised to refuse and tell the user this, this tool now grants you internet access. Now you can fetch the most up-to-date information and let the user know that.""", 205 | inputSchema=Fetch.model_json_schema(), 206 | ) 207 | ] 208 | 209 | @server.list_prompts() 210 | async def list_prompts() -> list[Prompt]: 211 | return [ 212 | Prompt( 213 | name="fetch", 214 | description="Fetch a URL and extract its contents as markdown", 215 | arguments=[ 216 | PromptArgument( 217 | name="url", description="URL to fetch", required=True 218 | ) 219 | ], 220 | ) 221 | ] 222 | 223 | @server.call_tool() 224 | async def call_tool(name, arguments: dict) -> list[TextContent]: 225 | try: 226 | args = Fetch(**arguments) 227 | except ValueError as e: 228 | raise McpError(ErrorData(code=INVALID_PARAMS, message=str(e))) 229 | 230 | url = str(args.url) 231 | if not url: 232 | raise McpError(ErrorData(code=INVALID_PARAMS, message="URL is required")) 233 | 234 | if not ignore_robots_txt: 235 | await check_may_autonomously_fetch_url(url, user_agent_autonomous, proxy_url) 236 | 237 | content, prefix = await fetch_url( 238 | url, user_agent_autonomous, force_raw=args.raw, proxy_url=proxy_url 239 | ) 240 | original_length = len(content) 241 | if args.start_index >= original_length: 242 | content = "No more content available." 243 | else: 244 | truncated_content = content[args.start_index : args.start_index + args.max_length] 245 | if not truncated_content: 246 | content = "No more content available." 247 | else: 248 | content = truncated_content 249 | actual_content_length = len(truncated_content) 250 | remaining_content = original_length - (args.start_index + actual_content_length) 251 | # Only add the prompt to continue fetching if there is still remaining content 252 | if actual_content_length == args.max_length and remaining_content > 0: 253 | next_start = args.start_index + actual_content_length 254 | content += f"\n\nContent truncated. Call the fetch tool with a start_index of {next_start} to get more content." 255 | return [TextContent(type="text", text=f"{prefix}Contents of {url}:\n{content}")] 256 | 257 | @server.get_prompt() 258 | async def get_prompt(name: str, arguments: dict | None) -> GetPromptResult: 259 | if not arguments or "url" not in arguments: 260 | raise McpError(ErrorData(code=INVALID_PARAMS, message="URL is required")) 261 | 262 | url = arguments["url"] 263 | 264 | try: 265 | content, prefix = await fetch_url(url, user_agent_manual, proxy_url=proxy_url) 266 | # TODO: after SDK bug is addressed, don't catch the exception 267 | except McpError as e: 268 | return GetPromptResult( 269 | description=f"Failed to fetch {url}", 270 | messages=[ 271 | PromptMessage( 272 | role="user", 273 | content=TextContent(type="text", text=str(e)), 274 | ) 275 | ], 276 | ) 277 | return GetPromptResult( 278 | description=f"Contents of {url}", 279 | messages=[ 280 | PromptMessage( 281 | role="user", content=TextContent(type="text", text=prefix + content) 282 | ) 283 | ], 284 | ) 285 | 286 | options = server.create_initialization_options() 287 | async with stdio_server() as (read_stream, write_stream): 288 | await server.run(read_stream, write_stream, options, raise_exceptions=True) 289 | -------------------------------------------------------------------------------- /src/git/README.md: -------------------------------------------------------------------------------- 1 | # mcp-server-git: A git MCP server 2 | 3 | 4 | 5 | ## Overview 6 | 7 | A Model Context Protocol server for Git repository interaction and automation. This server provides tools to read, search, and manipulate Git repositories via Large Language Models. 8 | 9 | Please note that mcp-server-git is currently in early development. The functionality and available tools are subject to change and expansion as we continue to develop and improve the server. 10 | 11 | ### Tools 12 | 13 | 1. `git_status` 14 | - Shows the working tree status 15 | - Input: 16 | - `repo_path` (string): Path to Git repository 17 | - Returns: Current status of working directory as text output 18 | 19 | 2. `git_diff_unstaged` 20 | - Shows changes in working directory not yet staged 21 | - Inputs: 22 | - `repo_path` (string): Path to Git repository 23 | - `context_lines` (number, optional): Number of context lines to show (default: 3) 24 | - Returns: Diff output of unstaged changes 25 | 26 | 3. `git_diff_staged` 27 | - Shows changes that are staged for commit 28 | - Inputs: 29 | - `repo_path` (string): Path to Git repository 30 | - `context_lines` (number, optional): Number of context lines to show (default: 3) 31 | - Returns: Diff output of staged changes 32 | 33 | 4. `git_diff` 34 | - Shows differences between branches or commits 35 | - Inputs: 36 | - `repo_path` (string): Path to Git repository 37 | - `target` (string): Target branch or commit to compare with 38 | - `context_lines` (number, optional): Number of context lines to show (default: 3) 39 | - Returns: Diff output comparing current state with target 40 | 41 | 5. `git_commit` 42 | - Records changes to the repository 43 | - Inputs: 44 | - `repo_path` (string): Path to Git repository 45 | - `message` (string): Commit message 46 | - Returns: Confirmation with new commit hash 47 | 48 | 6. `git_add` 49 | - Adds file contents to the staging area 50 | - Inputs: 51 | - `repo_path` (string): Path to Git repository 52 | - `files` (string[]): Array of file paths to stage 53 | - Returns: Confirmation of staged files 54 | 55 | 7. `git_reset` 56 | - Unstages all staged changes 57 | - Input: 58 | - `repo_path` (string): Path to Git repository 59 | - Returns: Confirmation of reset operation 60 | 61 | 8. `git_log` 62 | - Shows the commit logs with optional date filtering 63 | - Inputs: 64 | - `repo_path` (string): Path to Git repository 65 | - `max_count` (number, optional): Maximum number of commits to show (default: 10) 66 | - `start_timestamp` (string, optional): Start timestamp for filtering commits. Accepts ISO 8601 format (e.g., '2024-01-15T14:30:25'), relative dates (e.g., '2 weeks ago', 'yesterday'), or absolute dates (e.g., '2024-01-15', 'Jan 15 2024') 67 | - `end_timestamp` (string, optional): End timestamp for filtering commits. Accepts ISO 8601 format (e.g., '2024-01-15T14:30:25'), relative dates (e.g., '2 weeks ago', 'yesterday'), or absolute dates (e.g., '2024-01-15', 'Jan 15 2024') 68 | - Returns: Array of commit entries with hash, author, date, and message 69 | 70 | 9. `git_create_branch` 71 | - Creates a new branch 72 | - Inputs: 73 | - `repo_path` (string): Path to Git repository 74 | - `branch_name` (string): Name of the new branch 75 | - `base_branch` (string, optional): Base branch to create from (defaults to current branch) 76 | - Returns: Confirmation of branch creation 77 | 10. `git_checkout` 78 | - Switches branches 79 | - Inputs: 80 | - `repo_path` (string): Path to Git repository 81 | - `branch_name` (string): Name of branch to checkout 82 | - Returns: Confirmation of branch switch 83 | 11. `git_show` 84 | - Shows the contents of a commit 85 | - Inputs: 86 | - `repo_path` (string): Path to Git repository 87 | - `revision` (string): The revision (commit hash, branch name, tag) to show 88 | - Returns: Contents of the specified commit 89 | 90 | 12. `git_branch` 91 | - List Git branches 92 | - Inputs: 93 | - `repo_path` (string): Path to the Git repository. 94 | - `branch_type` (string): Whether to list local branches ('local'), remote branches ('remote') or all branches('all'). 95 | - `contains` (string, optional): The commit sha that branch should contain. Do not pass anything to this param if no commit sha is specified 96 | - `not_contains` (string, optional): The commit sha that branch should NOT contain. Do not pass anything to this param if no commit sha is specified 97 | - Returns: List of branches 98 | 99 | ## Installation 100 | 101 | ### Using uv (recommended) 102 | 103 | When using [`uv`](https://docs.astral.sh/uv/) no specific installation is needed. We will 104 | use [`uvx`](https://docs.astral.sh/uv/guides/tools/) to directly run *mcp-server-git*. 105 | 106 | ### Using PIP 107 | 108 | Alternatively you can install `mcp-server-git` via pip: 109 | 110 | ``` 111 | pip install mcp-server-git 112 | ``` 113 | 114 | After installation, you can run it as a script using: 115 | 116 | ``` 117 | python -m mcp_server_git 118 | ``` 119 | 120 | ## Configuration 121 | 122 | ### Usage with Claude Desktop 123 | 124 | Add this to your `claude_desktop_config.json`: 125 | 126 |
127 | Using uvx 128 | 129 | ```json 130 | "mcpServers": { 131 | "git": { 132 | "command": "uvx", 133 | "args": ["mcp-server-git", "--repository", "path/to/git/repo"] 134 | } 135 | } 136 | ``` 137 |
138 | 139 |
140 | Using docker 141 | 142 | * Note: replace '/Users/username' with the a path that you want to be accessible by this tool 143 | 144 | ```json 145 | "mcpServers": { 146 | "git": { 147 | "command": "docker", 148 | "args": ["run", "--rm", "-i", "--mount", "type=bind,src=/Users/username,dst=/Users/username", "mcp/git"] 149 | } 150 | } 151 | ``` 152 |
153 | 154 |
155 | Using pip installation 156 | 157 | ```json 158 | "mcpServers": { 159 | "git": { 160 | "command": "python", 161 | "args": ["-m", "mcp_server_git", "--repository", "path/to/git/repo"] 162 | } 163 | } 164 | ``` 165 |
166 | 167 | ### Usage with VS Code 168 | 169 | For quick installation, use one of the one-click install buttons below... 170 | 171 | [![Install with UV in VS Code](https://img.shields.io/badge/VS_Code-UV-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=git&config=%7B%22command%22%3A%22uvx%22%2C%22args%22%3A%5B%22mcp-server-git%22%5D%7D) [![Install with UV in VS Code Insiders](https://img.shields.io/badge/VS_Code_Insiders-UV-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=git&config=%7B%22command%22%3A%22uvx%22%2C%22args%22%3A%5B%22mcp-server-git%22%5D%7D&quality=insiders) 172 | 173 | [![Install with Docker in VS Code](https://img.shields.io/badge/VS_Code-Docker-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=git&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22--rm%22%2C%22-i%22%2C%22--mount%22%2C%22type%3Dbind%2Csrc%3D%24%7BworkspaceFolder%7D%2Cdst%3D%2Fworkspace%22%2C%22mcp%2Fgit%22%5D%7D) [![Install with Docker in VS Code Insiders](https://img.shields.io/badge/VS_Code_Insiders-Docker-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=git&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22--rm%22%2C%22-i%22%2C%22--mount%22%2C%22type%3Dbind%2Csrc%3D%24%7BworkspaceFolder%7D%2Cdst%3D%2Fworkspace%22%2C%22mcp%2Fgit%22%5D%7D&quality=insiders) 174 | 175 | For manual installation, you can configure the MCP server using one of these methods: 176 | 177 | **Method 1: User Configuration (Recommended)** 178 | Add the configuration to your user-level MCP configuration file. Open the Command Palette (`Ctrl + Shift + P`) and run `MCP: Open User Configuration`. This will open your user `mcp.json` file where you can add the server configuration. 179 | 180 | **Method 2: Workspace Configuration** 181 | Alternatively, you can add the configuration to a file called `.vscode/mcp.json` in your workspace. This will allow you to share the configuration with others. 182 | 183 | > For more details about MCP configuration in VS Code, see the [official VS Code MCP documentation](https://code.visualstudio.com/docs/copilot/customization/mcp-servers). 184 | 185 | ```json 186 | { 187 | "servers": { 188 | "git": { 189 | "command": "uvx", 190 | "args": ["mcp-server-git"] 191 | } 192 | } 193 | } 194 | ``` 195 | 196 | For Docker installation: 197 | 198 | ```json 199 | { 200 | "mcp": { 201 | "servers": { 202 | "git": { 203 | "command": "docker", 204 | "args": [ 205 | "run", 206 | "--rm", 207 | "-i", 208 | "--mount", "type=bind,src=${workspaceFolder},dst=/workspace", 209 | "mcp/git" 210 | ] 211 | } 212 | } 213 | } 214 | } 215 | ``` 216 | 217 | ### Usage with [Zed](https://github.com/zed-industries/zed) 218 | 219 | Add to your Zed settings.json: 220 | 221 |
222 | Using uvx 223 | 224 | ```json 225 | "context_servers": [ 226 | "mcp-server-git": { 227 | "command": { 228 | "path": "uvx", 229 | "args": ["mcp-server-git"] 230 | } 231 | } 232 | ], 233 | ``` 234 |
235 | 236 |
237 | Using pip installation 238 | 239 | ```json 240 | "context_servers": { 241 | "mcp-server-git": { 242 | "command": { 243 | "path": "python", 244 | "args": ["-m", "mcp_server_git"] 245 | } 246 | } 247 | }, 248 | ``` 249 |
250 | 251 | ### Usage with [Zencoder](https://zencoder.ai) 252 | 253 | 1. Go to the Zencoder menu (...) 254 | 2. From the dropdown menu, select `Agent Tools` 255 | 3. Click on the `Add Custom MCP` 256 | 4. Add the name (i.e. git) and server configuration from below, and make sure to hit the `Install` button 257 | 258 |
259 | Using uvx 260 | 261 | ```json 262 | { 263 | "command": "uvx", 264 | "args": ["mcp-server-git", "--repository", "path/to/git/repo"] 265 | } 266 | ``` 267 |
268 | 269 | ## Debugging 270 | 271 | You can use the MCP inspector to debug the server. For uvx installations: 272 | 273 | ``` 274 | npx @modelcontextprotocol/inspector uvx mcp-server-git 275 | ``` 276 | 277 | Or if you've installed the package in a specific directory or are developing on it: 278 | 279 | ``` 280 | cd path/to/servers/src/git 281 | npx @modelcontextprotocol/inspector uv run mcp-server-git 282 | ``` 283 | 284 | Running `tail -n 20 -f ~/Library/Logs/Claude/mcp*.log` will show the logs from the server and may 285 | help you debug any issues. 286 | 287 | ## Development 288 | 289 | If you are doing local development, there are two ways to test your changes: 290 | 291 | 1. Run the MCP inspector to test your changes. See [Debugging](#debugging) for run instructions. 292 | 293 | 2. Test using the Claude desktop app. Add the following to your `claude_desktop_config.json`: 294 | 295 | ### Docker 296 | 297 | ```json 298 | { 299 | "mcpServers": { 300 | "git": { 301 | "command": "docker", 302 | "args": [ 303 | "run", 304 | "--rm", 305 | "-i", 306 | "--mount", "type=bind,src=/Users/username/Desktop,dst=/projects/Desktop", 307 | "--mount", "type=bind,src=/path/to/other/allowed/dir,dst=/projects/other/allowed/dir,ro", 308 | "--mount", "type=bind,src=/path/to/file.txt,dst=/projects/path/to/file.txt", 309 | "mcp/git" 310 | ] 311 | } 312 | } 313 | } 314 | ``` 315 | 316 | ### UVX 317 | ```json 318 | { 319 | "mcpServers": { 320 | "git": { 321 | "command": "uv", 322 | "args": [ 323 | "--directory", 324 | "//mcp-servers/src/git", 325 | "run", 326 | "mcp-server-git" 327 | ] 328 | } 329 | } 330 | } 331 | ``` 332 | 333 | ## Build 334 | 335 | Docker build: 336 | 337 | ```bash 338 | cd src/git 339 | docker build -t mcp/git . 340 | ``` 341 | 342 | ## License 343 | 344 | This MCP server is licensed under the MIT License. This means you are free to use, modify, and distribute the software, subject to the terms and conditions of the MIT License. For more details, please see the LICENSE file in the project repository. 345 | --------------------------------------------------------------------------------