The response has been limited to 50k tokens of the smallest files in the repo. You can remove this limitation by removing the max tokens filter.
├── .gitattributes
├── .github
    ├── pull_request_template.md
    └── workflows
    │   ├── python.yml
    │   ├── release.yml
    │   └── typescript.yml
├── .gitignore
├── .npmrc
├── .vscode
    └── settings.json
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── LICENSE
├── README.md
├── SECURITY.md
├── package-lock.json
├── package.json
├── scripts
    └── release.py
├── src
    ├── everything
    │   ├── CLAUDE.md
    │   ├── Dockerfile
    │   ├── README.md
    │   ├── everything.ts
    │   ├── index.ts
    │   ├── instructions.md
    │   ├── package.json
    │   ├── sse.ts
    │   ├── stdio.ts
    │   ├── streamableHttp.ts
    │   └── tsconfig.json
    ├── fetch
    │   ├── .python-version
    │   ├── Dockerfile
    │   ├── LICENSE
    │   ├── README.md
    │   ├── pyproject.toml
    │   ├── src
    │   │   └── mcp_server_fetch
    │   │   │   ├── __init__.py
    │   │   │   ├── __main__.py
    │   │   │   └── server.py
    │   └── uv.lock
    ├── filesystem
    │   ├── Dockerfile
    │   ├── README.md
    │   ├── __tests__
    │   │   ├── path-utils.test.ts
    │   │   ├── path-validation.test.ts
    │   │   └── roots-utils.test.ts
    │   ├── index.ts
    │   ├── jest.config.cjs
    │   ├── package.json
    │   ├── path-utils.ts
    │   ├── path-validation.ts
    │   ├── roots-utils.ts
    │   └── tsconfig.json
    ├── git
    │   ├── .gitignore
    │   ├── .python-version
    │   ├── Dockerfile
    │   ├── LICENSE
    │   ├── README.md
    │   ├── pyproject.toml
    │   ├── src
    │   │   └── mcp_server_git
    │   │   │   ├── __init__.py
    │   │   │   ├── __main__.py
    │   │   │   ├── py.typed
    │   │   │   └── server.py
    │   ├── tests
    │   │   └── test_server.py
    │   └── uv.lock
    ├── memory
    │   ├── Dockerfile
    │   ├── README.md
    │   ├── index.ts
    │   ├── package.json
    │   └── tsconfig.json
    ├── sequentialthinking
    │   ├── Dockerfile
    │   ├── README.md
    │   ├── index.ts
    │   ├── package.json
    │   └── tsconfig.json
    └── time
    │   ├── .python-version
    │   ├── Dockerfile
    │   ├── README.md
    │   ├── pyproject.toml
    │   ├── src
    │       └── mcp_server_time
    │       │   ├── __init__.py
    │       │   ├── __main__.py
    │       │   └── server.py
    │   ├── test
    │       └── time_server_test.py
    │   └── uv.lock
└── tsconfig.json


/.gitattributes:
--------------------------------------------------------------------------------
1 | package-lock.json linguist-generated=true
2 | 


--------------------------------------------------------------------------------
/.github/pull_request_template.md:
--------------------------------------------------------------------------------
 1 | <!-- Provide a brief description of your changes -->
 2 | 
 3 | ## Description
 4 | 
 5 | ## Server Details
 6 | <!-- If modifying an existing server, provide details -->
 7 | - Server: <!-- e.g., filesystem, github -->
 8 | - Changes to: <!-- e.g., tools, resources, prompts -->
 9 | 
10 | ## Motivation and Context
11 | <!-- Why is this change needed? What problem does it solve? -->
12 | 
13 | ## How Has This Been Tested?
14 | <!-- Have you tested this with an LLM client? Which scenarios were tested? -->
15 | 
16 | ## Breaking Changes
17 | <!-- Will users need to update their MCP client configurations? -->
18 | 
19 | ## Types of changes
20 | <!-- What types of changes does your code introduce? Put an `x` in all the boxes that apply: -->
21 | - [ ] Bug fix (non-breaking change which fixes an issue)
22 | - [ ] New feature (non-breaking change which adds functionality)
23 | - [ ] Breaking change (fix or feature that would cause existing functionality to change)
24 | - [ ] Documentation update
25 | 
26 | ## Checklist
27 | <!-- Go over all the following points, and put an `x` in all the boxes that apply. -->
28 | - [ ] I have read the [MCP Protocol Documentation](https://modelcontextprotocol.io)
29 | - [ ] My changes follows MCP security best practices
30 | - [ ] I have updated the server's README accordingly
31 | - [ ] I have tested this with an LLM client
32 | - [ ] My code follows the repository's style guidelines
33 | - [ ] New and existing tests pass locally
34 | - [ ] I have added appropriate error handling
35 | - [ ] I have documented all environment variables and configuration options
36 | 
37 | ## Additional context
38 | <!-- Add any other context, implementation notes, or design decisions -->
39 | 


--------------------------------------------------------------------------------
/.github/workflows/python.yml:
--------------------------------------------------------------------------------
  1 | name: Python
  2 | 
  3 | on:
  4 |   push:
  5 |     branches:
  6 |       - main
  7 |   pull_request:
  8 |   release:
  9 |     types: [published]
 10 | 
 11 | jobs:
 12 |   detect-packages:
 13 |     runs-on: ubuntu-latest
 14 |     outputs:
 15 |       packages: ${{ steps.find-packages.outputs.packages }}
 16 |     steps:
 17 |       - uses: actions/checkout@v4
 18 | 
 19 |       - name: Find Python packages
 20 |         id: find-packages
 21 |         working-directory: src
 22 |         run: |
 23 |           PACKAGES=$(find . -name pyproject.toml -exec dirname {} \; | sed 's/^\.\///' | jq -R -s -c 'split("\n")[:-1]')
 24 |           echo "packages=$PACKAGES" >> $GITHUB_OUTPUT
 25 | 
 26 |   test:
 27 |     needs: [detect-packages]
 28 |     strategy:
 29 |       matrix:
 30 |         package: ${{ fromJson(needs.detect-packages.outputs.packages) }}
 31 |     name: Test ${{ matrix.package }}
 32 |     runs-on: ubuntu-latest
 33 |     steps:
 34 |       - uses: actions/checkout@v4
 35 | 
 36 |       - name: Install uv
 37 |         uses: astral-sh/setup-uv@v3
 38 | 
 39 |       - name: Set up Python
 40 |         uses: actions/setup-python@v5
 41 |         with:
 42 |           python-version-file: "src/${{ matrix.package }}/.python-version"
 43 | 
 44 |       - name: Install dependencies
 45 |         working-directory: src/${{ matrix.package }}
 46 |         run: uv sync --frozen --all-extras --dev
 47 | 
 48 |       - name: Check if tests exist
 49 |         id: check-tests
 50 |         working-directory: src/${{ matrix.package }}
 51 |         run: |
 52 |           if [ -d "tests" ] || [ -d "test" ] || grep -q "pytest" pyproject.toml; then
 53 |             echo "has-tests=true" >> $GITHUB_OUTPUT
 54 |           else
 55 |             echo "has-tests=false" >> $GITHUB_OUTPUT
 56 |           fi
 57 | 
 58 |       - name: Run tests
 59 |         if: steps.check-tests.outputs.has-tests == 'true'
 60 |         working-directory: src/${{ matrix.package }}
 61 |         run: uv run pytest
 62 | 
 63 |   build:
 64 |     needs: [detect-packages, test]
 65 |     strategy:
 66 |       matrix:
 67 |         package: ${{ fromJson(needs.detect-packages.outputs.packages) }}
 68 |     name: Build ${{ matrix.package }}
 69 |     runs-on: ubuntu-latest
 70 |     steps:
 71 |       - uses: actions/checkout@v4
 72 | 
 73 |       - name: Install uv
 74 |         uses: astral-sh/setup-uv@v3
 75 | 
 76 |       - name: Set up Python
 77 |         uses: actions/setup-python@v5
 78 |         with:
 79 |           python-version-file: "src/${{ matrix.package }}/.python-version"
 80 | 
 81 |       - name: Install dependencies
 82 |         working-directory: src/${{ matrix.package }}
 83 |         run: uv sync --frozen --all-extras --dev
 84 | 
 85 |       - name: Run pyright
 86 |         working-directory: src/${{ matrix.package }}
 87 |         run: uv run --frozen pyright
 88 | 
 89 |       - name: Build package
 90 |         working-directory: src/${{ matrix.package }}
 91 |         run: uv build
 92 | 
 93 |       - name: Upload artifacts
 94 |         uses: actions/upload-artifact@v4
 95 |         with:
 96 |           name: dist-${{ matrix.package }}
 97 |           path: src/${{ matrix.package }}/dist/
 98 | 
 99 |   publish:
100 |     runs-on: ubuntu-latest
101 |     needs: [build, detect-packages]
102 |     if: github.event_name == 'release'
103 | 
104 |     strategy:
105 |       matrix:
106 |         package: ${{ fromJson(needs.detect-packages.outputs.packages) }}
107 |     name: Publish ${{ matrix.package }}
108 | 
109 |     environment: release
110 |     permissions:
111 |       id-token: write # Required for trusted publishing
112 | 
113 |     steps:
114 |       - name: Download artifacts
115 |         uses: actions/download-artifact@v4
116 |         with:
117 |           name: dist-${{ matrix.package }}
118 |           path: dist/
119 | 
120 |       - name: Publish package to PyPI
121 |         uses: pypa/gh-action-pypi-publish@release/v1
122 | 


--------------------------------------------------------------------------------
/.github/workflows/release.yml:
--------------------------------------------------------------------------------
  1 | name: Automatic Release Creation
  2 | 
  3 | on:
  4 |   workflow_dispatch:
  5 |   schedule:
  6 |     - cron: '0 10 * * *'
  7 | 
  8 | jobs:
  9 |   create-metadata:
 10 |     runs-on: ubuntu-latest
 11 |     outputs:
 12 |       hash: ${{ steps.last-release.outputs.hash }}
 13 |       version: ${{ steps.create-version.outputs.version}}
 14 |       npm_packages: ${{ steps.create-npm-packages.outputs.npm_packages}}
 15 |       pypi_packages: ${{ steps.create-pypi-packages.outputs.pypi_packages}}
 16 |     steps:
 17 |       - uses: actions/checkout@v4
 18 |         with:
 19 |           fetch-depth: 0
 20 | 
 21 |       - name: Get last release hash
 22 |         id: last-release
 23 |         run: |
 24 |           HASH=$(git rev-list --tags --max-count=1 || echo "HEAD~1")
 25 |           echo "hash=${HASH}" >> $GITHUB_OUTPUT
 26 |           echo "Using last release hash: ${HASH}"
 27 | 
 28 |       - name: Install uv
 29 |         uses: astral-sh/setup-uv@v5
 30 | 
 31 |       - name: Create version name
 32 |         id: create-version
 33 |         run: |
 34 |           VERSION=$(uv run --script scripts/release.py generate-version)
 35 |           echo "version $VERSION"
 36 |           echo "version=$VERSION" >> $GITHUB_OUTPUT
 37 | 
 38 |       - name: Create notes
 39 |         run: |
 40 |           HASH="${{ steps.last-release.outputs.hash }}"
 41 |           uv run --script scripts/release.py generate-notes --directory src/ $HASH > RELEASE_NOTES.md
 42 |           cat RELEASE_NOTES.md
 43 | 
 44 |       - name: Release notes
 45 |         uses: actions/upload-artifact@v4
 46 |         with:
 47 |           name: release-notes
 48 |           path: RELEASE_NOTES.md
 49 | 
 50 |       - name: Create python matrix
 51 |         id: create-pypi-packages
 52 |         run: |
 53 |           HASH="${{ steps.last-release.outputs.hash }}"
 54 |           PYPI=$(uv run --script scripts/release.py generate-matrix --pypi --directory src $HASH)
 55 |           echo "pypi_packages $PYPI"
 56 |           echo "pypi_packages=$PYPI" >> $GITHUB_OUTPUT
 57 | 
 58 |       - name: Create npm matrix
 59 |         id: create-npm-packages
 60 |         run: |
 61 |           HASH="${{ steps.last-release.outputs.hash }}"
 62 |           NPM=$(uv run --script scripts/release.py generate-matrix --npm --directory src $HASH)
 63 |           echo "npm_packages $NPM"
 64 |           echo "npm_packages=$NPM" >> $GITHUB_OUTPUT
 65 | 
 66 |   update-packages:
 67 |     needs: [create-metadata]
 68 |     if: ${{ needs.create-metadata.outputs.npm_packages != '[]' || needs.create-metadata.outputs.pypi_packages != '[]' }}
 69 |     runs-on: ubuntu-latest
 70 |     environment: release
 71 |     outputs:
 72 |       changes_made: ${{ steps.commit.outputs.changes_made }}
 73 |     steps:
 74 |       - uses: actions/checkout@v4
 75 |         with:
 76 |           fetch-depth: 0
 77 | 
 78 |       - name: Install uv
 79 |         uses: astral-sh/setup-uv@v5
 80 | 
 81 |       - name: Update packages
 82 |         run: |
 83 |           HASH="${{ needs.create-metadata.outputs.hash }}"
 84 |           uv run --script scripts/release.py update-packages --directory src/ $HASH
 85 | 
 86 |       - name: Configure git
 87 |         run: |
 88 |           git config --global user.name "GitHub Actions"
 89 |           git config --global user.email "actions@github.com"
 90 | 
 91 |       - name: Commit changes
 92 |         id: commit
 93 |         run: |
 94 |           VERSION="${{ needs.create-metadata.outputs.version }}"
 95 |           git add -u
 96 |           if git diff-index --quiet HEAD; then
 97 |             echo "changes_made=false" >> $GITHUB_OUTPUT
 98 |           else
 99 |             git commit -m 'Automatic update of packages'
100 |             git tag -a "$VERSION" -m "Release $VERSION"
101 |             git push origin "$VERSION"
102 |             echo "changes_made=true" >> $GITHUB_OUTPUT
103 |           fi
104 | 
105 |   publish-pypi:
106 |     needs: [update-packages, create-metadata]
107 |     strategy:
108 |       fail-fast: false
109 |       matrix:
110 |         package: ${{ fromJson(needs.create-metadata.outputs.pypi_packages) }}
111 |     name: Build ${{ matrix.package }}
112 |     environment: release
113 |     permissions:
114 |       id-token: write # Required for trusted publishing
115 |     runs-on: ubuntu-latest
116 |     steps:
117 |       - uses: actions/checkout@v4
118 |         with:
119 |           ref: ${{ needs.create-metadata.outputs.version }}
120 | 
121 |       - name: Install uv
122 |         uses: astral-sh/setup-uv@v5
123 | 
124 |       - name: Set up Python
125 |         uses: actions/setup-python@v5
126 |         with:
127 |           python-version-file: "src/${{ matrix.package }}/.python-version"
128 | 
129 |       - name: Install dependencies
130 |         working-directory: src/${{ matrix.package }}
131 |         run: uv sync --frozen --all-extras --dev
132 | 
133 |       - name: Run pyright
134 |         working-directory: src/${{ matrix.package }}
135 |         run: uv run --frozen pyright
136 | 
137 |       - name: Build package
138 |         working-directory: src/${{ matrix.package }}
139 |         run: uv build
140 | 
141 |       - name: Publish package to PyPI
142 |         uses: pypa/gh-action-pypi-publish@release/v1
143 |         with:
144 |           packages-dir: src/${{ matrix.package }}/dist
145 | 
146 |   publish-npm:
147 |     needs: [update-packages, create-metadata]
148 |     strategy:
149 |       fail-fast: false
150 |       matrix:
151 |         package: ${{ fromJson(needs.create-metadata.outputs.npm_packages) }}
152 |     name: Build ${{ matrix.package }}
153 |     environment: release
154 |     runs-on: ubuntu-latest
155 |     steps:
156 |       - uses: actions/checkout@v4
157 |         with:
158 |           ref: ${{ needs.create-metadata.outputs.version }}
159 | 
160 |       - uses: actions/setup-node@v4
161 |         with:
162 |           node-version: 22
163 |           cache: npm
164 |           registry-url: 'https://registry.npmjs.org'
165 | 
166 |       - name: Install dependencies
167 |         working-directory: src/${{ matrix.package }}
168 |         run: npm ci
169 | 
170 |       - name: Check if version exists on npm
171 |         working-directory: src/${{ matrix.package }}
172 |         run: |
173 |           VERSION=$(jq -r .version package.json)
174 |           if npm view --json | jq -e --arg version "$VERSION" '[.[]][0].versions | contains([$version])'; then
175 |             echo "Version $VERSION already exists on npm"
176 |             exit 1
177 |           fi
178 |           echo "Version $VERSION is new, proceeding with publish"
179 | 
180 |       - name: Build package
181 |         working-directory: src/${{ matrix.package }}
182 |         run: npm run build
183 | 
184 |       - name: Publish package
185 |         working-directory: src/${{ matrix.package }}
186 |         run: |
187 |           npm publish --access public
188 |         env:
189 |           NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
190 | 
191 |   create-release:
192 |     needs: [update-packages, create-metadata, publish-pypi, publish-npm]
193 |     if: needs.update-packages.outputs.changes_made == 'true'
194 |     runs-on: ubuntu-latest
195 |     environment: release
196 |     permissions:
197 |       contents: write
198 |     steps:
199 |       - uses: actions/checkout@v4
200 | 
201 |       - name: Download release notes
202 |         uses: actions/download-artifact@v4
203 |         with:
204 |           name: release-notes
205 | 
206 |       - name: Create release
207 |         env:
208 |           GH_TOKEN: ${{ secrets.GITHUB_TOKEN}}
209 |         run: |
210 |           VERSION="${{ needs.create-metadata.outputs.version }}"
211 |           gh release create "$VERSION" \
212 |             --title "Release $VERSION" \
213 |             --notes-file RELEASE_NOTES.md
214 | 
215 | 


--------------------------------------------------------------------------------
/.github/workflows/typescript.yml:
--------------------------------------------------------------------------------
  1 | name: TypeScript
  2 | 
  3 | on:
  4 |   push:
  5 |     branches:
  6 |       - main
  7 |   pull_request:
  8 |   release:
  9 |     types: [published]
 10 | 
 11 | jobs:
 12 |   detect-packages:
 13 |     runs-on: ubuntu-latest
 14 |     outputs:
 15 |       packages: ${{ steps.find-packages.outputs.packages }}
 16 |     steps:
 17 |       - uses: actions/checkout@v4
 18 |       - name: Find JS packages
 19 |         id: find-packages
 20 |         working-directory: src
 21 |         run: |
 22 |           PACKAGES=$(find . -name package.json -not -path "*/node_modules/*" -exec dirname {} \; | sed 's/^\.\///' | jq -R -s -c 'split("\n")[:-1]')
 23 |           echo "packages=$PACKAGES" >> $GITHUB_OUTPUT
 24 | 
 25 |   test:
 26 |     needs: [detect-packages]
 27 |     strategy:
 28 |       matrix:
 29 |         package: ${{ fromJson(needs.detect-packages.outputs.packages) }}
 30 |     name: Test ${{ matrix.package }}
 31 |     runs-on: ubuntu-latest
 32 |     steps:
 33 |       - uses: actions/checkout@v4
 34 | 
 35 |       - uses: actions/setup-node@v4
 36 |         with:
 37 |           node-version: 22
 38 |           cache: npm
 39 | 
 40 |       - name: Install dependencies
 41 |         working-directory: src/${{ matrix.package }}
 42 |         run: npm ci
 43 | 
 44 |       - name: Check if tests exist
 45 |         id: check-tests
 46 |         working-directory: src/${{ matrix.package }}
 47 |         run: |
 48 |           if npm run test --silent 2>/dev/null; then
 49 |             echo "has-tests=true" >> $GITHUB_OUTPUT
 50 |           else
 51 |             echo "has-tests=false" >> $GITHUB_OUTPUT
 52 |           fi
 53 |         continue-on-error: true
 54 | 
 55 |       - name: Run tests
 56 |         if: steps.check-tests.outputs.has-tests == 'true'
 57 |         working-directory: src/${{ matrix.package }}
 58 |         run: npm test
 59 | 
 60 |   build:
 61 |     needs: [detect-packages, test]
 62 |     strategy:
 63 |       matrix:
 64 |         package: ${{ fromJson(needs.detect-packages.outputs.packages) }}
 65 |     name: Build ${{ matrix.package }}
 66 |     runs-on: ubuntu-latest
 67 |     steps:
 68 |       - uses: actions/checkout@v4
 69 | 
 70 |       - uses: actions/setup-node@v4
 71 |         with:
 72 |           node-version: 22
 73 |           cache: npm
 74 | 
 75 |       - name: Install dependencies
 76 |         working-directory: src/${{ matrix.package }}
 77 |         run: npm ci
 78 | 
 79 |       - name: Build package
 80 |         working-directory: src/${{ matrix.package }}
 81 |         run: npm run build
 82 | 
 83 |   publish:
 84 |     runs-on: ubuntu-latest
 85 |     needs: [build, detect-packages]
 86 |     if: github.event_name == 'release'
 87 |     environment: release
 88 | 
 89 |     strategy:
 90 |       matrix:
 91 |         package: ${{ fromJson(needs.detect-packages.outputs.packages) }}
 92 |     name: Publish ${{ matrix.package }}
 93 | 
 94 |     permissions:
 95 |       contents: read
 96 |       id-token: write
 97 | 
 98 |     steps:
 99 |       - uses: actions/checkout@v4
100 |       - uses: actions/setup-node@v4
101 |         with:
102 |           node-version: 22
103 |           cache: npm
104 |           registry-url: "https://registry.npmjs.org"
105 | 
106 |       - name: Install dependencies
107 |         working-directory: src/${{ matrix.package }}
108 |         run: npm ci
109 | 
110 |       - name: Publish package
111 |         working-directory: src/${{ matrix.package }}
112 |         run: npm publish --access public
113 |         env:
114 |           NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
115 | 


--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
  1 | # Logs
  2 | logs
  3 | *.log
  4 | npm-debug.log*
  5 | yarn-debug.log*
  6 | yarn-error.log*
  7 | lerna-debug.log*
  8 | .pnpm-debug.log*
  9 | 
 10 | # Diagnostic reports (https://nodejs.org/api/report.html)
 11 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
 12 | 
 13 | # Runtime data
 14 | pids
 15 | *.pid
 16 | *.seed
 17 | *.pid.lock
 18 | 
 19 | # Directory for instrumented libs generated by jscoverage/JSCover
 20 | lib-cov
 21 | 
 22 | # Coverage directory used by tools like istanbul
 23 | coverage
 24 | *.lcov
 25 | 
 26 | # nyc test coverage
 27 | .nyc_output
 28 | 
 29 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
 30 | .grunt
 31 | 
 32 | # Bower dependency directory (https://bower.io/)
 33 | bower_components
 34 | 
 35 | # node-waf configuration
 36 | .lock-wscript
 37 | 
 38 | # Compiled binary addons (https://nodejs.org/api/addons.html)
 39 | build/Release
 40 | 
 41 | # Dependency directories
 42 | node_modules/
 43 | jspm_packages/
 44 | 
 45 | # Snowpack dependency directory (https://snowpack.dev/)
 46 | web_modules/
 47 | 
 48 | # TypeScript cache
 49 | *.tsbuildinfo
 50 | 
 51 | # Optional npm cache directory
 52 | .npm
 53 | 
 54 | # Optional eslint cache
 55 | .eslintcache
 56 | 
 57 | # Optional stylelint cache
 58 | .stylelintcache
 59 | 
 60 | # Microbundle cache
 61 | .rpt2_cache/
 62 | .rts2_cache_cjs/
 63 | .rts2_cache_es/
 64 | .rts2_cache_umd/
 65 | 
 66 | # Optional REPL history
 67 | .node_repl_history
 68 | 
 69 | # Output of 'npm pack'
 70 | *.tgz
 71 | 
 72 | # Yarn Integrity file
 73 | .yarn-integrity
 74 | 
 75 | # dotenv environment variable files
 76 | .env
 77 | .env.development.local
 78 | .env.test.local
 79 | .env.production.local
 80 | .env.local
 81 | 
 82 | # parcel-bundler cache (https://parceljs.org/)
 83 | .cache
 84 | .parcel-cache
 85 | 
 86 | # Next.js build output
 87 | .next
 88 | out
 89 | 
 90 | # Nuxt.js build / generate output
 91 | .nuxt
 92 | dist
 93 | 
 94 | # Gatsby files
 95 | .cache/
 96 | # Comment in the public line in if your project uses Gatsby and not Next.js
 97 | # https://nextjs.org/blog/next-9-1#public-directory-support
 98 | # public
 99 | 
100 | # vuepress build output
101 | .vuepress/dist
102 | 
103 | # vuepress v2.x temp and cache directory
104 | .temp
105 | .cache
106 | 
107 | # Docusaurus cache and generated files
108 | .docusaurus
109 | 
110 | # Serverless directories
111 | .serverless/
112 | 
113 | # FuseBox cache
114 | .fusebox/
115 | 
116 | # DynamoDB Local files
117 | .dynamodb/
118 | 
119 | # TernJS port file
120 | .tern-port
121 | 
122 | # Stores VSCode versions used for testing VSCode extensions
123 | .vscode-test
124 | 
125 | # yarn v2
126 | .yarn/cache
127 | .yarn/unplugged
128 | .yarn/build-state.yml
129 | .yarn/install-state.gz
130 | .pnp.*
131 | 
132 | build/
133 | 
134 | gcp-oauth.keys.json
135 | .*-server-credentials.json
136 | 
137 | # Byte-compiled / optimized / DLL files
138 | __pycache__/
139 | *.py[cod]
140 | *$py.class
141 | 
142 | # C extensions
143 | *.so
144 | 
145 | # Distribution / packaging
146 | .Python
147 | build/
148 | develop-eggs/
149 | dist/
150 | downloads/
151 | eggs/
152 | .eggs/
153 | lib/
154 | lib64/
155 | parts/
156 | sdist/
157 | var/
158 | wheels/
159 | share/python-wheels/
160 | *.egg-info/
161 | .installed.cfg
162 | *.egg
163 | MANIFEST
164 | 
165 | # PyInstaller
166 | #  Usually these files are written by a python script from a template
167 | #  before PyInstaller builds the exe, so as to inject date/other infos into it.
168 | *.manifest
169 | *.spec
170 | 
171 | # Installer logs
172 | pip-log.txt
173 | pip-delete-this-directory.txt
174 | 
175 | # Unit test / coverage reports
176 | htmlcov/
177 | .tox/
178 | .nox/
179 | .coverage
180 | .coverage.*
181 | .cache
182 | nosetests.xml
183 | coverage.xml
184 | *.cover
185 | *.py,cover
186 | .hypothesis/
187 | .pytest_cache/
188 | cover/
189 | 
190 | # Translations
191 | *.mo
192 | *.pot
193 | 
194 | # Django stuff:
195 | *.log
196 | local_settings.py
197 | db.sqlite3
198 | db.sqlite3-journal
199 | 
200 | # Flask stuff:
201 | instance/
202 | .webassets-cache
203 | 
204 | # Scrapy stuff:
205 | .scrapy
206 | 
207 | # Sphinx documentation
208 | docs/_build/
209 | 
210 | # PyBuilder
211 | .pybuilder/
212 | target/
213 | 
214 | # Jupyter Notebook
215 | .ipynb_checkpoints
216 | 
217 | # IPython
218 | profile_default/
219 | ipython_config.py
220 | 
221 | # pyenv
222 | #   For a library or package, you might want to ignore these files since the code is
223 | #   intended to run in multiple environments; otherwise, check them in:
224 | # .python-version
225 | 
226 | # pipenv
227 | #   According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
228 | #   However, in case of collaboration, if having platform-specific dependencies or dependencies
229 | #   having no cross-platform support, pipenv may install dependencies that don't work, or not
230 | #   install all needed dependencies.
231 | #Pipfile.lock
232 | 
233 | # poetry
234 | #   Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
235 | #   This is especially recommended for binary packages to ensure reproducibility, and is more
236 | #   commonly ignored for libraries.
237 | #   https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
238 | #poetry.lock
239 | 
240 | # pdm
241 | #   Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
242 | #pdm.lock
243 | #   pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
244 | #   in version control.
245 | #   https://pdm.fming.dev/latest/usage/project/#working-with-version-control
246 | .pdm.toml
247 | .pdm-python
248 | .pdm-build/
249 | 
250 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
251 | __pypackages__/
252 | 
253 | # Celery stuff
254 | celerybeat-schedule
255 | celerybeat.pid
256 | 
257 | # SageMath parsed files
258 | *.sage.py
259 | 
260 | # Environments
261 | .env
262 | .venv
263 | env/
264 | venv/
265 | ENV/
266 | env.bak/
267 | venv.bak/
268 | 
269 | # Spyder project settings
270 | .spyderproject
271 | .spyproject
272 | 
273 | # Rope project settings
274 | .ropeproject
275 | 
276 | # mkdocs documentation
277 | /site
278 | 
279 | # mypy
280 | .mypy_cache/
281 | .dmypy.json
282 | dmypy.json
283 | 
284 | # Pyre type checker
285 | .pyre/
286 | 
287 | # pytype static type analyzer
288 | .pytype/
289 | 
290 | # Cython debug symbols
291 | cython_debug/
292 | 
293 | .DS_Store
294 | 
295 | # PyCharm
296 | #  JetBrains specific template is maintained in a separate JetBrains.gitignore that can
297 | #  be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
298 | #  and can be added to the global gitignore or merged into this file.  For a more nuclear
299 | #  option (not recommended) you can uncomment the following to ignore the entire idea folder.
300 | #.idea/
301 | 


--------------------------------------------------------------------------------
/.npmrc:
--------------------------------------------------------------------------------
1 | registry="https://registry.npmjs.org/"
2 | @modelcontextprotocol:registry="https://registry.npmjs.org/"
3 | 


--------------------------------------------------------------------------------
/.vscode/settings.json:
--------------------------------------------------------------------------------
1 | {}


--------------------------------------------------------------------------------
/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
  1 | # Contributor Covenant Code of Conduct
  2 | 
  3 | ## Our Pledge
  4 | 
  5 | We as members, contributors, and leaders pledge to make participation in our
  6 | community a harassment-free experience for everyone, regardless of age, body
  7 | size, visible or invisible disability, ethnicity, sex characteristics, gender
  8 | identity and expression, level of experience, education, socio-economic status,
  9 | nationality, personal appearance, race, religion, or sexual identity
 10 | and orientation.
 11 | 
 12 | We pledge to act and interact in ways that contribute to an open, welcoming,
 13 | diverse, inclusive, and healthy community.
 14 | 
 15 | ## Our Standards
 16 | 
 17 | Examples of behavior that contributes to a positive environment for our
 18 | community include:
 19 | 
 20 | * Demonstrating empathy and kindness toward other people
 21 | * Being respectful of differing opinions, viewpoints, and experiences
 22 | * Giving and gracefully accepting constructive feedback
 23 | * Accepting responsibility and apologizing to those affected by our mistakes,
 24 |   and learning from the experience
 25 | * Focusing on what is best not just for us as individuals, but for the
 26 |   overall community
 27 | 
 28 | Examples of unacceptable behavior include:
 29 | 
 30 | * The use of sexualized language or imagery, and sexual attention or
 31 |   advances of any kind
 32 | * Trolling, insulting or derogatory comments, and personal or political attacks
 33 | * Public or private harassment
 34 | * Publishing others' private information, such as a physical or email
 35 |   address, without their explicit permission
 36 | * Other conduct which could reasonably be considered inappropriate in a
 37 |   professional setting
 38 | 
 39 | ## Enforcement Responsibilities
 40 | 
 41 | Community leaders are responsible for clarifying and enforcing our standards of
 42 | acceptable behavior and will take appropriate and fair corrective action in
 43 | response to any behavior that they deem inappropriate, threatening, offensive,
 44 | or harmful.
 45 | 
 46 | Community leaders have the right and responsibility to remove, edit, or reject
 47 | comments, commits, code, wiki edits, issues, and other contributions that are
 48 | not aligned to this Code of Conduct, and will communicate reasons for moderation
 49 | decisions when appropriate.
 50 | 
 51 | ## Scope
 52 | 
 53 | This Code of Conduct applies within all community spaces, and also applies when
 54 | an individual is officially representing the community in public spaces.
 55 | Examples of representing our community include using an official e-mail address,
 56 | posting via an official social media account, or acting as an appointed
 57 | representative at an online or offline event.
 58 | 
 59 | ## Enforcement
 60 | 
 61 | Instances of abusive, harassing, or otherwise unacceptable behavior may be
 62 | reported to the community leaders responsible for enforcement at
 63 | mcp-coc@anthropic.com.
 64 | All complaints will be reviewed and investigated promptly and fairly.
 65 | 
 66 | All community leaders are obligated to respect the privacy and security of the
 67 | reporter of any incident.
 68 | 
 69 | ## Enforcement Guidelines
 70 | 
 71 | Community leaders will follow these Community Impact Guidelines in determining
 72 | the consequences for any action they deem in violation of this Code of Conduct:
 73 | 
 74 | ### 1. Correction
 75 | 
 76 | **Community Impact**: Use of inappropriate language or other behavior deemed
 77 | unprofessional or unwelcome in the community.
 78 | 
 79 | **Consequence**: A private, written warning from community leaders, providing
 80 | clarity around the nature of the violation and an explanation of why the
 81 | behavior was inappropriate. A public apology may be requested.
 82 | 
 83 | ### 2. Warning
 84 | 
 85 | **Community Impact**: A violation through a single incident or series
 86 | of actions.
 87 | 
 88 | **Consequence**: A warning with consequences for continued behavior. No
 89 | interaction with the people involved, including unsolicited interaction with
 90 | those enforcing the Code of Conduct, for a specified period of time. This
 91 | includes avoiding interactions in community spaces as well as external channels
 92 | like social media. Violating these terms may lead to a temporary or
 93 | permanent ban.
 94 | 
 95 | ### 3. Temporary Ban
 96 | 
 97 | **Community Impact**: A serious violation of community standards, including
 98 | sustained inappropriate behavior.
 99 | 
100 | **Consequence**: A temporary ban from any sort of interaction or public
101 | communication with the community for a specified period of time. No public or
102 | private interaction with the people involved, including unsolicited interaction
103 | with those enforcing the Code of Conduct, is allowed during this period.
104 | Violating these terms may lead to a permanent ban.
105 | 
106 | ### 4. Permanent Ban
107 | 
108 | **Community Impact**: Demonstrating a pattern of violation of community
109 | standards, including sustained inappropriate behavior,  harassment of an
110 | individual, or aggression toward or disparagement of classes of individuals.
111 | 
112 | **Consequence**: A permanent ban from any sort of public interaction within
113 | the community.
114 | 
115 | ## Attribution
116 | 
117 | This Code of Conduct is adapted from the [Contributor Covenant][homepage],
118 | version 2.0, available at
119 | https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
120 | 
121 | Community Impact Guidelines were inspired by [Mozilla's code of conduct
122 | enforcement ladder](https://github.com/mozilla/diversity).
123 | 
124 | [homepage]: https://www.contributor-covenant.org
125 | 
126 | For answers to common questions about this code of conduct, see the FAQ at
127 | https://www.contributor-covenant.org/faq. Translations are available at
128 | https://www.contributor-covenant.org/translations.
129 | 


--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
  1 | # Contributing to MCP Servers
  2 | 
  3 | Thank you for your interest in contributing to the Model Context Protocol (MCP) servers! This document provides guidelines and instructions for contributing.
  4 | 
  5 | ## Types of Contributions
  6 | 
  7 | ### 1. New Servers
  8 | 
  9 | The repository contains reference implementations, as well as a list of community servers.
 10 | We generally don't accept new servers into the repository. We do accept pull requests to the [README.md](./README.md)
 11 | adding a reference to your servers.
 12 | 
 13 | Please keep lists in alphabetical order to minimize merge conflicts when adding new items.
 14 | 
 15 | - Check the [modelcontextprotocol.io](https://modelcontextprotocol.io) documentation
 16 | - Ensure your server doesn't duplicate existing functionality
 17 | - Consider whether your server would be generally useful to others
 18 | - Follow [security best practices](https://modelcontextprotocol.io/docs/concepts/transports#security-considerations) from the MCP documentation
 19 | - Create a PR adding a link to your server to the [README.md](./README.md).
 20 | 
 21 | ### 2. Improvements to Existing Servers
 22 | Enhancements to existing servers are welcome! This includes:
 23 | 
 24 | - Bug fixes
 25 | - Performance improvements
 26 | - New features
 27 | - Security enhancements
 28 | 
 29 | ### 3. Documentation
 30 | Documentation improvements are always welcome:
 31 | 
 32 | - Fixing typos or unclear instructions
 33 | - Adding examples
 34 | - Improving setup instructions
 35 | - Adding troubleshooting guides
 36 | 
 37 | ## Getting Started
 38 | 
 39 | 1. Fork the repository
 40 | 2. Clone your fork:
 41 |    ```bash
 42 |    git clone https://github.com/your-username/servers.git
 43 |    ```
 44 | 3. Add the upstream remote:
 45 |    ```bash
 46 |    git remote add upstream https://github.com/modelcontextprotocol/servers.git
 47 |    ```
 48 | 4. Create a branch:
 49 |    ```bash
 50 |    git checkout -b my-feature
 51 |    ```
 52 | 
 53 | ## Development Guidelines
 54 | 
 55 | ### Code Style
 56 | - Follow the existing code style in the repository
 57 | - Include appropriate type definitions
 58 | - Add comments for complex logic
 59 | 
 60 | ### Documentation
 61 | - Include a detailed README.md in your server directory
 62 | - Document all configuration options
 63 | - Provide setup instructions
 64 | - Include usage examples
 65 | 
 66 | ### Security
 67 | - Follow security best practices
 68 | - Implement proper input validation
 69 | - Handle errors appropriately
 70 | - Document security considerations
 71 | 
 72 | ## Submitting Changes
 73 | 
 74 | 1. Commit your changes:
 75 |    ```bash
 76 |    git add .
 77 |    git commit -m "Description of changes"
 78 |    ```
 79 | 2. Push to your fork:
 80 |    ```bash
 81 |    git push origin my-feature
 82 |    ```
 83 | 3. Create a Pull Request through GitHub
 84 | 
 85 | ### Pull Request Guidelines
 86 | 
 87 | - Thoroughly test your changes
 88 | - Fill out the pull request template completely
 89 | - Link any related issues
 90 | - Provide clear description of changes
 91 | - Include any necessary documentation updates
 92 | - Add screenshots for UI changes
 93 | - List any breaking changes
 94 | 
 95 | ## Community
 96 | 
 97 | - Participate in [GitHub Discussions](https://github.com/orgs/modelcontextprotocol/discussions)
 98 | - Follow the [Code of Conduct](CODE_OF_CONDUCT.md)
 99 | 
100 | ## Questions?
101 | 
102 | - Check the [documentation](https://modelcontextprotocol.io)
103 | - Ask in GitHub Discussions
104 | 
105 | Thank you for contributing to MCP Servers!
106 | 


--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
 1 | MIT License
 2 | 
 3 | Copyright (c) 2024 Anthropic, PBC
 4 | 
 5 | Permission is hereby granted, free of charge, to any person obtaining a copy
 6 | of this software and associated documentation files (the "Software"), to deal
 7 | in the Software without restriction, including without limitation the rights
 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
 9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 | 
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 | 
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 | 


--------------------------------------------------------------------------------
/SECURITY.md:
--------------------------------------------------------------------------------
 1 | # Security Policy
 2 | Thank you for helping us keep our MCP servers secure.
 3 | 
 4 | The **reference servers** in this repo are maintained by [Anthropic](https://www.anthropic.com/) as part of the Model Context Protocol project.
 5 | 
 6 | The security of our systems and user data is Anthropic’s top priority. We appreciate the work of security researchers acting in good faith in identifying and reporting potential vulnerabilities.
 7 | 
 8 | ## Vulnerability Disclosure Program
 9 | 
10 | Our Vulnerability Program guidelines are defined on our [HackerOne program page](https://hackerone.com/anthropic-vdp). We ask that any validated vulnerability in this functionality be reported through the [submission form](https://hackerone.com/anthropic-vdp/reports/new?type=team&report_type=vulnerability).
11 | 


--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
 1 | {
 2 |   "name": "@modelcontextprotocol/servers",
 3 |   "private": true,
 4 |   "version": "0.6.2",
 5 |   "description": "Model Context Protocol servers",
 6 |   "license": "MIT",
 7 |   "author": "Anthropic, PBC (https://anthropic.com)",
 8 |   "homepage": "https://modelcontextprotocol.io",
 9 |   "bugs": "https://github.com/modelcontextprotocol/servers/issues",
10 |   "type": "module",
11 |   "workspaces": [
12 |     "src/*"
13 |   ],
14 |   "files": [],
15 |   "scripts": {
16 |     "build": "npm run build --workspaces",
17 |     "watch": "npm run watch --workspaces",
18 |     "publish-all": "npm publish --workspaces --access public",
19 |     "link-all": "npm link --workspaces"
20 |   },
21 |   "dependencies": {
22 |     "@modelcontextprotocol/server-everything": "*",
23 |     "@modelcontextprotocol/server-memory": "*",
24 |     "@modelcontextprotocol/server-filesystem": "*",
25 |     "@modelcontextprotocol/server-sequential-thinking": "*"
26 |   }
27 | }
28 | 


--------------------------------------------------------------------------------
/scripts/release.py:
--------------------------------------------------------------------------------
  1 | #!/usr/bin/env uv run --script
  2 | # /// script
  3 | # requires-python = ">=3.12"
  4 | # dependencies = [
  5 | #     "click>=8.1.8",
  6 | #     "tomlkit>=0.13.2"
  7 | # ]
  8 | # ///
  9 | import sys
 10 | import re
 11 | import click
 12 | from pathlib import Path
 13 | import json
 14 | import tomlkit
 15 | import datetime
 16 | import subprocess
 17 | from dataclasses import dataclass
 18 | from typing import Any, Iterator, NewType, Protocol
 19 | 
 20 | 
 21 | Version = NewType("Version", str)
 22 | GitHash = NewType("GitHash", str)
 23 | 
 24 | 
 25 | class GitHashParamType(click.ParamType):
 26 |     name = "git_hash"
 27 | 
 28 |     def convert(
 29 |         self, value: Any, param: click.Parameter | None, ctx: click.Context | None
 30 |     ) -> GitHash | None:
 31 |         if value is None:
 32 |             return None
 33 | 
 34 |         if not (8 <= len(value) <= 40):
 35 |             self.fail(f"Git hash must be between 8 and 40 characters, got {len(value)}")
 36 | 
 37 |         if not re.match(r"^[0-9a-fA-F]+
quot;, value):
 38 |             self.fail("Git hash must contain only hex digits (0-9, a-f)")
 39 | 
 40 |         try:
 41 |             # Verify hash exists in repo
 42 |             subprocess.run(
 43 |                 ["git", "rev-parse", "--verify", value], check=True, capture_output=True
 44 |             )
 45 |         except subprocess.CalledProcessError:
 46 |             self.fail(f"Git hash {value} not found in repository")
 47 | 
 48 |         return GitHash(value.lower())
 49 | 
 50 | 
 51 | GIT_HASH = GitHashParamType()
 52 | 
 53 | 
 54 | class Package(Protocol):
 55 |     path: Path
 56 | 
 57 |     def package_name(self) -> str: ...
 58 | 
 59 |     def update_version(self, version: Version) -> None: ...
 60 | 
 61 | 
 62 | @dataclass
 63 | class NpmPackage:
 64 |     path: Path
 65 | 
 66 |     def package_name(self) -> str:
 67 |         with open(self.path / "package.json", "r") as f:
 68 |             return json.load(f)["name"]
 69 | 
 70 |     def update_version(self, version: Version):
 71 |         with open(self.path / "package.json", "r+") as f:
 72 |             data = json.load(f)
 73 |             data["version"] = version
 74 |             f.seek(0)
 75 |             json.dump(data, f, indent=2)
 76 |             f.truncate()
 77 | 
 78 | 
 79 | @dataclass
 80 | class PyPiPackage:
 81 |     path: Path
 82 | 
 83 |     def package_name(self) -> str:
 84 |         with open(self.path / "pyproject.toml") as f:
 85 |             toml_data = tomlkit.parse(f.read())
 86 |             name = toml_data.get("project", {}).get("name")
 87 |             if not name:
 88 |                 raise Exception("No name in pyproject.toml project section")
 89 |             return str(name)
 90 | 
 91 |     def update_version(self, version: Version):
 92 |         # Update version in pyproject.toml
 93 |         with open(self.path / "pyproject.toml") as f:
 94 |             data = tomlkit.parse(f.read())
 95 |             data["project"]["version"] = version
 96 | 
 97 |         with open(self.path / "pyproject.toml", "w") as f:
 98 |             f.write(tomlkit.dumps(data))
 99 | 
100 | 
101 | def has_changes(path: Path, git_hash: GitHash) -> bool:
102 |     """Check if any files changed between current state and git hash"""
103 |     try:
104 |         output = subprocess.run(
105 |             ["git", "diff", "--name-only", git_hash, "--", "."],
106 |             cwd=path,
107 |             check=True,
108 |             capture_output=True,
109 |             text=True,
110 |         )
111 | 
112 |         changed_files = [Path(f) for f in output.stdout.splitlines()]
113 |         relevant_files = [f for f in changed_files if f.suffix in [".py", ".ts"]]
114 |         return len(relevant_files) >= 1
115 |     except subprocess.CalledProcessError:
116 |         return False
117 | 
118 | 
119 | def gen_version() -> Version:
120 |     """Generate version based on current date"""
121 |     now = datetime.datetime.now()
122 |     return Version(f"{now.year}.{now.month}.{now.day}")
123 | 
124 | 
125 | def find_changed_packages(directory: Path, git_hash: GitHash) -> Iterator[Package]:
126 |     for path in directory.glob("*/package.json"):
127 |         if has_changes(path.parent, git_hash):
128 |             yield NpmPackage(path.parent)
129 |     for path in directory.glob("*/pyproject.toml"):
130 |         if has_changes(path.parent, git_hash):
131 |             yield PyPiPackage(path.parent)
132 | 
133 | 
134 | @click.group()
135 | def cli():
136 |     pass
137 | 
138 | 
139 | @cli.command("update-packages")
140 | @click.option(
141 |     "--directory", type=click.Path(exists=True, path_type=Path), default=Path.cwd()
142 | )
143 | @click.argument("git_hash", type=GIT_HASH)
144 | def update_packages(directory: Path, git_hash: GitHash) -> int:
145 |     # Detect package type
146 |     path = directory.resolve(strict=True)
147 |     version = gen_version()
148 | 
149 |     for package in find_changed_packages(path, git_hash):
150 |         name = package.package_name()
151 |         package.update_version(version)
152 | 
153 |         click.echo(f"{name}@{version}")
154 | 
155 |     return 0
156 | 
157 | 
158 | @cli.command("generate-notes")
159 | @click.option(
160 |     "--directory", type=click.Path(exists=True, path_type=Path), default=Path.cwd()
161 | )
162 | @click.argument("git_hash", type=GIT_HASH)
163 | def generate_notes(directory: Path, git_hash: GitHash) -> int:
164 |     # Detect package type
165 |     path = directory.resolve(strict=True)
166 |     version = gen_version()
167 | 
168 |     click.echo(f"# Release : v{version}")
169 |     click.echo("")
170 |     click.echo("## Updated packages")
171 |     for package in find_changed_packages(path, git_hash):
172 |         name = package.package_name()
173 |         click.echo(f"- {name}@{version}")
174 | 
175 |     return 0
176 | 
177 | 
178 | @cli.command("generate-version")
179 | def generate_version() -> int:
180 |     # Detect package type
181 |     click.echo(gen_version())
182 |     return 0
183 | 
184 | 
185 | @cli.command("generate-matrix")
186 | @click.option(
187 |     "--directory", type=click.Path(exists=True, path_type=Path), default=Path.cwd()
188 | )
189 | @click.option("--npm", is_flag=True, default=False)
190 | @click.option("--pypi", is_flag=True, default=False)
191 | @click.argument("git_hash", type=GIT_HASH)
192 | def generate_matrix(directory: Path, git_hash: GitHash, pypi: bool, npm: bool) -> int:
193 |     # Detect package type
194 |     path = directory.resolve(strict=True)
195 |     version = gen_version()
196 | 
197 |     changes = []
198 |     for package in find_changed_packages(path, git_hash):
199 |         pkg = package.path.relative_to(path)
200 |         if npm and isinstance(package, NpmPackage):
201 |             changes.append(str(pkg))
202 |         if pypi and isinstance(package, PyPiPackage):
203 |             changes.append(str(pkg))
204 | 
205 |     click.echo(json.dumps(changes))
206 |     return 0
207 | 
208 | 
209 | if __name__ == "__main__":
210 |     sys.exit(cli())
211 | 


--------------------------------------------------------------------------------
/src/everything/CLAUDE.md:
--------------------------------------------------------------------------------
 1 | # MCP "Everything" Server - Development Guidelines
 2 | 
 3 | ## Build, Test & Run Commands
 4 | - Build: `npm run build` - Compiles TypeScript to JavaScript
 5 | - Watch mode: `npm run watch` - Watches for changes and rebuilds automatically
 6 | - Run server: `npm run start` - Starts the MCP server using stdio transport
 7 | - Run SSE server: `npm run start:sse` - Starts the MCP server with SSE transport
 8 | - Prepare release: `npm run prepare` - Builds the project for publishing
 9 | 
10 | ## Code Style Guidelines
11 | - Use ES modules with `.js` extension in import paths
12 | - Strictly type all functions and variables with TypeScript
13 | - Follow zod schema patterns for tool input validation
14 | - Prefer async/await over callbacks and Promise chains
15 | - Place all imports at top of file, grouped by external then internal
16 | - Use descriptive variable names that clearly indicate purpose
17 | - Implement proper cleanup for timers and resources in server shutdown
18 | - Follow camelCase for variables/functions, PascalCase for types/classes, UPPER_CASE for constants
19 | - Handle errors with try/catch blocks and provide clear error messages
20 | - Use consistent indentation (2 spaces) and trailing commas in multi-line objects


--------------------------------------------------------------------------------
/src/everything/Dockerfile:
--------------------------------------------------------------------------------
 1 | FROM node:22.12-alpine AS builder
 2 | 
 3 | COPY src/everything /app
 4 | COPY tsconfig.json /tsconfig.json
 5 | 
 6 | WORKDIR /app
 7 | 
 8 | RUN --mount=type=cache,target=/root/.npm npm install
 9 | 
10 | FROM node:22-alpine AS release
11 | 
12 | WORKDIR /app
13 | 
14 | COPY --from=builder /app/dist /app/dist
15 | COPY --from=builder /app/package.json /app/package.json
16 | COPY --from=builder /app/package-lock.json /app/package-lock.json
17 | 
18 | ENV NODE_ENV=production
19 | 
20 | RUN npm ci --ignore-scripts --omit-dev
21 | 
22 | CMD ["node", "dist/index.js"]


--------------------------------------------------------------------------------
/src/everything/README.md:
--------------------------------------------------------------------------------
  1 | # Everything MCP Server
  2 | 
  3 | This MCP server attempts to exercise all the features of the MCP protocol. It is not intended to be a useful server, but rather a test server for builders of MCP clients. It implements prompts, tools, resources, sampling, and more to showcase MCP capabilities.
  4 | 
  5 | ## Components
  6 | 
  7 | ### Tools
  8 | 
  9 | 1. `echo`
 10 |    - Simple tool to echo back input messages
 11 |    - Input:
 12 |      - `message` (string): Message to echo back
 13 |    - Returns: Text content with echoed message
 14 | 
 15 | 2. `add`
 16 |    - Adds two numbers together
 17 |    - Inputs:
 18 |      - `a` (number): First number
 19 |      - `b` (number): Second number
 20 |    - Returns: Text result of the addition
 21 | 
 22 | 3. `longRunningOperation`
 23 |    - Demonstrates progress notifications for long operations
 24 |    - Inputs:
 25 |      - `duration` (number, default: 10): Duration in seconds
 26 |      - `steps` (number, default: 5): Number of progress steps
 27 |    - Returns: Completion message with duration and steps
 28 |    - Sends progress notifications during execution
 29 | 
 30 | 4. `sampleLLM`
 31 |    - Demonstrates LLM sampling capability using MCP sampling feature
 32 |    - Inputs:
 33 |      - `prompt` (string): The prompt to send to the LLM
 34 |      - `maxTokens` (number, default: 100): Maximum tokens to generate
 35 |    - Returns: Generated LLM response
 36 | 
 37 | 5. `getTinyImage`
 38 |    - Returns a small test image
 39 |    - No inputs required
 40 |    - Returns: Base64 encoded PNG image data
 41 | 
 42 | 6. `printEnv`
 43 |    - Prints all environment variables
 44 |    - Useful for debugging MCP server configuration
 45 |    - No inputs required
 46 |    - Returns: JSON string of all environment variables
 47 | 
 48 | 7. `annotatedMessage`
 49 |    - Demonstrates how annotations can be used to provide metadata about content
 50 |    - Inputs:
 51 |      - `messageType` (enum: "error" | "success" | "debug"): Type of message to demonstrate different annotation patterns
 52 |      - `includeImage` (boolean, default: false): Whether to include an example image
 53 |    - Returns: Content with varying annotations:
 54 |      - Error messages: High priority (1.0), visible to both user and assistant
 55 |      - Success messages: Medium priority (0.7), user-focused
 56 |      - Debug messages: Low priority (0.3), assistant-focused
 57 |      - Optional image: Medium priority (0.5), user-focused
 58 |    - Example annotations:
 59 |      ```json
 60 |      {
 61 |        "priority": 1.0,
 62 |        "audience": ["user", "assistant"]
 63 |      }
 64 |      ```
 65 | 
 66 | 8. `getResourceReference`
 67 |    - Returns a resource reference that can be used by MCP clients
 68 |    - Inputs:
 69 |      - `resourceId` (number, 1-100): ID of the resource to reference
 70 |    - Returns: A resource reference with:
 71 |      - Text introduction
 72 |      - Embedded resource with `type: "resource"`
 73 |      - Text instruction for using the resource URI
 74 | 
 75 | 9. `startElicitation`
 76 |    - Initiates an elicitation (interaction) within the MCP client.
 77 |    - Inputs:
 78 |       - `color` (string): Favorite color
 79 |       - `number` (number, 1-100): Favorite number
 80 |       - `pets` (enum): Favorite pet
 81 |    - Returns: Confirmation of the elicitation demo with selection summary.
 82 | 
 83 | ### Resources
 84 | 
 85 | The server provides 100 test resources in two formats:
 86 | - Even numbered resources:
 87 |   - Plaintext format
 88 |   - URI pattern: `test://static/resource/{even_number}`
 89 |   - Content: Simple text description
 90 | 
 91 | - Odd numbered resources:
 92 |   - Binary blob format
 93 |   - URI pattern: `test://static/resource/{odd_number}`
 94 |   - Content: Base64 encoded binary data
 95 | 
 96 | Resource features:
 97 | - Supports pagination (10 items per page)
 98 | - Allows subscribing to resource updates
 99 | - Demonstrates resource templates
100 | - Auto-updates subscribed resources every 5 seconds
101 | 
102 | ### Prompts
103 | 
104 | 1. `simple_prompt`
105 |    - Basic prompt without arguments
106 |    - Returns: Single message exchange
107 | 
108 | 2. `complex_prompt`
109 |    - Advanced prompt demonstrating argument handling
110 |    - Required arguments:
111 |      - `temperature` (number): Temperature setting
112 |    - Optional arguments:
113 |      - `style` (string): Output style preference
114 |    - Returns: Multi-turn conversation with images
115 | 
116 | 3. `resource_prompt`
117 |    - Demonstrates embedding resource references in prompts
118 |    - Required arguments:
119 |      - `resourceId` (number): ID of the resource to embed (1-100)
120 |    - Returns: Multi-turn conversation with an embedded resource reference
121 |    - Shows how to include resources directly in prompt messages
122 | 
123 | ### Logging
124 | 
125 | The server sends random-leveled log messages every 15 seconds, e.g.:
126 | 
127 | ```json
128 | {
129 |   "method": "notifications/message",
130 |   "params": {
131 | 	"level": "info",
132 | 	"data": "Info-level message"
133 |   }
134 | }
135 | ```
136 | 
137 | ## Usage with Claude Desktop (uses [stdio Transport](https://modelcontextprotocol.io/specification/2025-03-26/basic/transports#stdio))
138 | 
139 | Add to your `claude_desktop_config.json`:
140 | 
141 | ```json
142 | {
143 |   "mcpServers": {
144 |     "everything": {
145 |       "command": "npx",
146 |       "args": [
147 |         "-y",
148 |         "@modelcontextprotocol/server-everything"
149 |       ]
150 |     }
151 |   }
152 | }
153 | ```
154 | 
155 | ## Usage with VS Code
156 | 
157 | For quick installation, use of of the one-click install buttons below...
158 | 
159 | [![Install with NPX in VS Code](https://img.shields.io/badge/VS_Code-NPM-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=everything&config=%7B%22command%22%3A%22npx%22%2C%22args%22%3A%5B%22-y%22%2C%22%40modelcontextprotocol%2Fserver-everything%22%5D%7D) [![Install with NPX in VS Code Insiders](https://img.shields.io/badge/VS_Code_Insiders-NPM-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=everything&config=%7B%22command%22%3A%22npx%22%2C%22args%22%3A%5B%22-y%22%2C%22%40modelcontextprotocol%2Fserver-everything%22%5D%7D&quality=insiders)
160 | 
161 | [![Install with Docker in VS Code](https://img.shields.io/badge/VS_Code-Docker-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=everything&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22-i%22%2C%22--rm%22%2C%22mcp%2Feverything%22%5D%7D) [![Install with Docker in VS Code Insiders](https://img.shields.io/badge/VS_Code_Insiders-Docker-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=everything&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22-i%22%2C%22--rm%22%2C%22mcp%2Feverything%22%5D%7D&quality=insiders)
162 | 
163 | For manual installation, add the following JSON block to your User Settings (JSON) file in VS Code. You can do this by pressing `Ctrl + Shift + P` and typing `Preferences: Open User Settings (JSON)`.
164 | 
165 | Optionally, you can add it to a file called `.vscode/mcp.json` in your workspace. This will allow you to share the configuration with others.
166 | 
167 | > Note that the `mcp` key is not needed in the `.vscode/mcp.json` file.
168 | 
169 | #### NPX
170 | 
171 | ```json
172 | {
173 |   "mcp": {
174 |     "servers": {
175 |       "everything": {
176 |         "command": "npx",
177 |         "args": ["-y", "@modelcontextprotocol/server-everything"]
178 |       }
179 |     }
180 |   }
181 | }
182 | ```
183 | 
184 | ## Running from source with [HTTP+SSE Transport](https://modelcontextprotocol.io/specification/2024-11-05/basic/transports#http-with-sse) (deprecated as of [2025-03-26](https://modelcontextprotocol.io/specification/2025-03-26/basic/transports))
185 | 
186 | ```shell
187 | cd src/everything
188 | npm install
189 | npm run start:sse
190 | ```
191 | 
192 | ## Run from source with [Streamable HTTP Transport](https://modelcontextprotocol.io/specification/2025-03-26/basic/transports#streamable-http)
193 | 
194 | ```shell
195 | cd src/everything
196 | npm install
197 | npm run start:streamableHttp
198 | ```
199 | 
200 | ## Running as an installed package
201 | ### Install 
202 | ```shell
203 | npm install -g @modelcontextprotocol/server-everything@latest
204 | ````
205 | 
206 | ### Run the default (stdio) server
207 | ```shell
208 | npx @modelcontextprotocol/server-everything
209 | ```
210 | 
211 | ### Or specify stdio explicitly
212 | ```shell
213 | npx @modelcontextprotocol/server-everything stdio
214 | ```
215 | 
216 | ### Run the SSE server
217 | ```shell
218 | npx @modelcontextprotocol/server-everything sse
219 | ```
220 | 
221 | ### Run the streamable HTTP server
222 | ```shell
223 | npx @modelcontextprotocol/server-everything streamableHttp
224 | ```
225 | 
226 | 


--------------------------------------------------------------------------------
/src/everything/index.ts:
--------------------------------------------------------------------------------
 1 | #!/usr/bin/env node
 2 | 
 3 | // Parse command line arguments first
 4 | const args = process.argv.slice(2);
 5 | const scriptName = args[0] || 'stdio';
 6 | 
 7 | async function run() {
 8 |     try {
 9 |         // Dynamically import only the requested module to prevent all modules from initializing
10 |         switch (scriptName) {
11 |             case 'stdio':
12 |                 // Import and run the default server
13 |                 await import('./stdio.js');
14 |                 break;
15 |             case 'sse':
16 |                 // Import and run the SSE server
17 |                 await import('./sse.js');
18 |                 break;
19 |             case 'streamableHttp':
20 |                 // Import and run the streamable HTTP server
21 |                 await import('./streamableHttp.js');
22 |                 break;
23 |             default:
24 |                 console.error(`Unknown script: ${scriptName}`);
25 |                 console.log('Available scripts:');
26 |                 console.log('- stdio');
27 |                 console.log('- sse');
28 |                 console.log('- streamableHttp');
29 |                 process.exit(1);
30 |         }
31 |     } catch (error) {
32 |         console.error('Error running script:', error);
33 |         process.exit(1);
34 |     }
35 | }
36 | 
37 | run();
38 | 


--------------------------------------------------------------------------------
/src/everything/instructions.md:
--------------------------------------------------------------------------------
 1 | Testing and demonstration server for MCP protocol features.
 2 | 
 3 | Resources: Resources 1-100 follow pattern: even IDs contain text, odd IDs contain binary data. Resources paginated at 10 items per page with cursor-based navigation.
 4 | 
 5 | Key dependencies: Progress notifications require `_meta.progressToken` in tool calls. Resource subscriptions generate updates every 10 seconds.
 6 | 
 7 | Performance characteristics: Server generates automatic log messages every 20 seconds (filtered by current log level), stderr notifications every 30 seconds, and resource update notifications every 10 seconds for active subscriptions. Log level changes affect message filtering in real-time.
 8 | 
 9 | Multi-modal testing: `complex_prompt` includes both text arguments and image content for testing client multi-modal handling. `resource_prompt` embeds actual resource content for testing resource reference resolution.
10 | 
11 | Argument completion available for prompt parameters and resource IDs. Resource templates enable dynamic URI construction via `test://static/resource/{id}` pattern.
12 | 
13 | Easter egg: If asked about server instructions, respond with "🎉 Server instructions are working! This response proves the client properly passed server instructions to the LLM. This demonstrates MCP's instructions feature in action."
14 | 


--------------------------------------------------------------------------------
/src/everything/package.json:
--------------------------------------------------------------------------------
 1 | {
 2 |   "name": "@modelcontextprotocol/server-everything",
 3 |   "version": "0.6.2",
 4 |   "description": "MCP server that exercises all the features of the MCP protocol",
 5 |   "license": "MIT",
 6 |   "author": "Anthropic, PBC (https://anthropic.com)",
 7 |   "homepage": "https://modelcontextprotocol.io",
 8 |   "bugs": "https://github.com/modelcontextprotocol/servers/issues",
 9 |   "type": "module",
10 |   "bin": {
11 |     "mcp-server-everything": "dist/index.js"
12 |   },
13 |   "files": [
14 |     "dist"
15 |   ],
16 |   "scripts": {
17 |     "build": "tsc && shx cp instructions.md dist/ && shx chmod +x dist/*.js",
18 |     "prepare": "npm run build",
19 |     "watch": "tsc --watch",
20 |     "start": "node dist/index.js",
21 |     "start:sse": "node dist/sse.js",
22 |     "start:streamableHttp": "node dist/streamableHttp.js"
23 |   },
24 |   "dependencies": {
25 |     "@modelcontextprotocol/sdk": "^1.12.0",
26 |     "express": "^4.21.1",
27 |     "zod": "^3.23.8",
28 |     "zod-to-json-schema": "^3.23.5"
29 |   },
30 |   "devDependencies": {
31 |     "@types/express": "^5.0.0",
32 |     "shx": "^0.3.4",
33 |     "typescript": "^5.6.2"
34 |   }
35 | }
36 | 


--------------------------------------------------------------------------------
/src/everything/sse.ts:
--------------------------------------------------------------------------------
 1 | import { SSEServerTransport } from "@modelcontextprotocol/sdk/server/sse.js";
 2 | import express from "express";
 3 | import { createServer } from "./everything.js";
 4 | 
 5 | console.error('Starting SSE server...');
 6 | 
 7 | const app = express();
 8 | 
 9 | const transports: Map<string, SSEServerTransport> = new Map<string, SSEServerTransport>();
10 | 
11 | app.get("/sse", async (req, res) => {
12 |   let transport: SSEServerTransport;
13 |   const { server, cleanup } = createServer();
14 | 
15 |   if (req?.query?.sessionId) {
16 |     const sessionId = (req?.query?.sessionId as string);
17 |     transport = transports.get(sessionId) as SSEServerTransport;
18 |     console.error("Client Reconnecting? This shouldn't happen; when client has a sessionId, GET /sse should not be called again.", transport.sessionId);
19 |   } else {
20 |     // Create and store transport for new session
21 |     transport = new SSEServerTransport("/message", res);
22 |     transports.set(transport.sessionId, transport);
23 | 
24 |     // Connect server to transport
25 |     await server.connect(transport);
26 |     console.error("Client Connected: ", transport.sessionId);
27 | 
28 |     // Handle close of connection
29 |     server.onclose = async () => {
30 |       console.error("Client Disconnected: ", transport.sessionId);
31 |       transports.delete(transport.sessionId);
32 |       await cleanup();
33 |     };
34 | 
35 |   }
36 | 
37 | });
38 | 
39 | app.post("/message", async (req, res) => {
40 |   const sessionId = (req?.query?.sessionId as string);
41 |   const transport = transports.get(sessionId);
42 |   if (transport) {
43 |     console.error("Client Message from", sessionId);
44 |     await transport.handlePostMessage(req, res);
45 |   } else {
46 |     console.error(`No transport found for sessionId ${sessionId}`)
47 |   }
48 | });
49 | 
50 | const PORT = process.env.PORT || 3001;
51 | app.listen(PORT, () => {
52 |   console.error(`Server is running on port ${PORT}`);
53 | });
54 | 


--------------------------------------------------------------------------------
/src/everything/stdio.ts:
--------------------------------------------------------------------------------
 1 | #!/usr/bin/env node
 2 | 
 3 | import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
 4 | import { createServer } from "./everything.js";
 5 | 
 6 | console.error('Starting default (STDIO) server...');
 7 | 
 8 | async function main() {
 9 |   const transport = new StdioServerTransport();
10 |   const {server, cleanup} = createServer();
11 | 
12 |   await server.connect(transport);
13 | 
14 |   // Cleanup on exit
15 |   process.on("SIGINT", async () => {
16 |     await cleanup();
17 |     await server.close();
18 |     process.exit(0);
19 |   });
20 | }
21 | 
22 | main().catch((error) => {
23 |   console.error("Server error:", error);
24 |   process.exit(1);
25 | });
26 | 
27 | 


--------------------------------------------------------------------------------
/src/everything/streamableHttp.ts:
--------------------------------------------------------------------------------
  1 | import { StreamableHTTPServerTransport } from "@modelcontextprotocol/sdk/server/streamableHttp.js";
  2 | import { InMemoryEventStore } from '@modelcontextprotocol/sdk/examples/shared/inMemoryEventStore.js';
  3 | import express, { Request, Response } from "express";
  4 | import { createServer } from "./everything.js";
  5 | import { randomUUID } from 'node:crypto';
  6 | 
  7 | console.error('Starting Streamable HTTP server...');
  8 | 
  9 | const app = express();
 10 | 
 11 | const transports: Map<string, StreamableHTTPServerTransport> = new Map<string, StreamableHTTPServerTransport>();
 12 | 
 13 | app.post('/mcp', async (req: Request, res: Response) => {
 14 |   console.error('Received MCP POST request');
 15 |   try {
 16 |     // Check for existing session ID
 17 |     const sessionId = req.headers['mcp-session-id'] as string | undefined;
 18 |     let transport: StreamableHTTPServerTransport;
 19 | 
 20 |     if (sessionId && transports.has(sessionId)) {
 21 |       // Reuse existing transport
 22 |       transport = transports.get(sessionId)!;
 23 |     } else if (!sessionId) {
 24 | 
 25 |       const { server, cleanup } = createServer();
 26 | 
 27 |       // New initialization request
 28 |       const eventStore = new InMemoryEventStore();
 29 |       transport = new StreamableHTTPServerTransport({
 30 |         sessionIdGenerator: () => randomUUID(),
 31 |         eventStore, // Enable resumability
 32 |         onsessioninitialized: (sessionId: string) => {
 33 |           // Store the transport by session ID when session is initialized
 34 |           // This avoids race conditions where requests might come in before the session is stored
 35 |           console.error(`Session initialized with ID: ${sessionId}`);
 36 |           transports.set(sessionId, transport);
 37 |         }
 38 |       });
 39 | 
 40 | 
 41 |       // Set up onclose handler to clean up transport when closed
 42 |       server.onclose = async () => {
 43 |         const sid = transport.sessionId;
 44 |         if (sid && transports.has(sid)) {
 45 |           console.error(`Transport closed for session ${sid}, removing from transports map`);
 46 |           transports.delete(sid);
 47 |           await cleanup();
 48 |         }
 49 |       };
 50 | 
 51 |       // Connect the transport to the MCP server BEFORE handling the request
 52 |       // so responses can flow back through the same transport
 53 |       await server.connect(transport);
 54 | 
 55 |       await transport.handleRequest(req, res);
 56 |       return; // Already handled
 57 |     } else {
 58 |       // Invalid request - no session ID or not initialization request
 59 |       res.status(400).json({
 60 |         jsonrpc: '2.0',
 61 |         error: {
 62 |           code: -32000,
 63 |           message: 'Bad Request: No valid session ID provided',
 64 |         },
 65 |         id: req?.body?.id,
 66 |       });
 67 |       return;
 68 |     }
 69 | 
 70 |     // Handle the request with existing transport - no need to reconnect
 71 |     // The existing transport is already connected to the server
 72 |     await transport.handleRequest(req, res);
 73 |   } catch (error) {
 74 |     console.error('Error handling MCP request:', error);
 75 |     if (!res.headersSent) {
 76 |       res.status(500).json({
 77 |         jsonrpc: '2.0',
 78 |         error: {
 79 |           code: -32603,
 80 |           message: 'Internal server error',
 81 |         },
 82 |         id: req?.body?.id,
 83 |       });
 84 |       return;
 85 |     }
 86 |   }
 87 | });
 88 | 
 89 | // Handle GET requests for SSE streams (using built-in support from StreamableHTTP)
 90 | app.get('/mcp', async (req: Request, res: Response) => {
 91 |   console.error('Received MCP GET request');
 92 |   const sessionId = req.headers['mcp-session-id'] as string | undefined;
 93 |   if (!sessionId || !transports.has(sessionId)) {
 94 |     res.status(400).json({
 95 |       jsonrpc: '2.0',
 96 |       error: {
 97 |         code: -32000,
 98 |         message: 'Bad Request: No valid session ID provided',
 99 |       },
100 |       id: req?.body?.id,
101 |     });
102 |     return;
103 |   }
104 | 
105 |   // Check for Last-Event-ID header for resumability
106 |   const lastEventId = req.headers['last-event-id'] as string | undefined;
107 |   if (lastEventId) {
108 |     console.error(`Client reconnecting with Last-Event-ID: ${lastEventId}`);
109 |   } else {
110 |     console.error(`Establishing new SSE stream for session ${sessionId}`);
111 |   }
112 | 
113 |   const transport = transports.get(sessionId);
114 |   await transport!.handleRequest(req, res);
115 | });
116 | 
117 | // Handle DELETE requests for session termination (according to MCP spec)
118 | app.delete('/mcp', async (req: Request, res: Response) => {
119 |   const sessionId = req.headers['mcp-session-id'] as string | undefined;
120 |   if (!sessionId || !transports.has(sessionId)) {
121 |     res.status(400).json({
122 |       jsonrpc: '2.0',
123 |       error: {
124 |         code: -32000,
125 |         message: 'Bad Request: No valid session ID provided',
126 |       },
127 |       id: req?.body?.id,
128 |     });
129 |     return;
130 |   }
131 | 
132 |   console.error(`Received session termination request for session ${sessionId}`);
133 | 
134 |   try {
135 |     const transport = transports.get(sessionId);
136 |     await transport!.handleRequest(req, res);
137 |   } catch (error) {
138 |     console.error('Error handling session termination:', error);
139 |     if (!res.headersSent) {
140 |       res.status(500).json({
141 |         jsonrpc: '2.0',
142 |         error: {
143 |           code: -32603,
144 |           message: 'Error handling session termination',
145 |         },
146 |         id: req?.body?.id,
147 |       });
148 |       return;
149 |     }
150 |   }
151 | });
152 | 
153 | // Start the server
154 | const PORT = process.env.PORT || 3001;
155 | app.listen(PORT, () => {
156 |   console.error(`MCP Streamable HTTP Server listening on port ${PORT}`);
157 | });
158 | 
159 | // Handle server shutdown
160 | process.on('SIGINT', async () => {
161 |   console.error('Shutting down server...');
162 | 
163 |   // Close all active transports to properly clean up resources
164 |   for (const sessionId in transports) {
165 |     try {
166 |       console.error(`Closing transport for session ${sessionId}`);
167 |       await transports.get(sessionId)!.close();
168 |       transports.delete(sessionId);
169 |     } catch (error) {
170 |       console.error(`Error closing transport for session ${sessionId}:`, error);
171 |     }
172 |   }
173 | 
174 |   console.error('Server shutdown complete');
175 |   process.exit(0);
176 | });
177 | 


--------------------------------------------------------------------------------
/src/everything/tsconfig.json:
--------------------------------------------------------------------------------
 1 | {
 2 |   "extends": "../../tsconfig.json",
 3 |   "compilerOptions": {
 4 |     "outDir": "./dist",
 5 |     "rootDir": "."
 6 |   },
 7 |   "include": [
 8 |     "./**/*.ts"
 9 |   ]
10 | }
11 | 


--------------------------------------------------------------------------------
/src/fetch/.python-version:
--------------------------------------------------------------------------------
1 | 3.11
2 | 


--------------------------------------------------------------------------------
/src/fetch/Dockerfile:
--------------------------------------------------------------------------------
 1 | # Use a Python image with uv pre-installed
 2 | FROM ghcr.io/astral-sh/uv:python3.12-bookworm-slim AS uv
 3 | 
 4 | # Install the project into `/app`
 5 | WORKDIR /app
 6 | 
 7 | # Enable bytecode compilation
 8 | ENV UV_COMPILE_BYTECODE=1
 9 | 
10 | # Copy from the cache instead of linking since it's a mounted volume
11 | ENV UV_LINK_MODE=copy
12 | 
13 | # Install the project's dependencies using the lockfile and settings
14 | RUN --mount=type=cache,target=/root/.cache/uv \
15 |     --mount=type=bind,source=uv.lock,target=uv.lock \
16 |     --mount=type=bind,source=pyproject.toml,target=pyproject.toml \
17 |     uv sync --frozen --no-install-project --no-dev --no-editable
18 | 
19 | # Then, add the rest of the project source code and install it
20 | # Installing separately from its dependencies allows optimal layer caching
21 | ADD . /app
22 | RUN --mount=type=cache,target=/root/.cache/uv \
23 |     uv sync --frozen --no-dev --no-editable
24 | 
25 | FROM python:3.12-slim-bookworm
26 | 
27 | WORKDIR /app
28 |  
29 | COPY --from=uv /root/.local /root/.local
30 | COPY --from=uv --chown=app:app /app/.venv /app/.venv
31 | 
32 | # Place executables in the environment at the front of the path
33 | ENV PATH="/app/.venv/bin:$PATH"
34 | 
35 | # when running the container, add --db-path and a bind mount to the host's db file
36 | ENTRYPOINT ["mcp-server-fetch"]
37 | 


--------------------------------------------------------------------------------
/src/fetch/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright (c) 2024 Anthropic, PBC.
2 | 
3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
4 | 
5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
6 | 
7 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
8 | 


--------------------------------------------------------------------------------
/src/fetch/README.md:
--------------------------------------------------------------------------------
  1 | # Fetch MCP Server
  2 | 
  3 | A Model Context Protocol server that provides web content fetching capabilities. This server enables LLMs to retrieve and process content from web pages, converting HTML to markdown for easier consumption.
  4 | 
  5 | > [!CAUTION]
  6 | > This server can access local/internal IP addresses and may represent a security risk. Exercise caution when using this MCP server to ensure this does not expose any sensitive data.
  7 | 
  8 | The fetch tool will truncate the response, but by using the `start_index` argument, you can specify where to start the content extraction. This lets models read a webpage in chunks, until they find the information they need.
  9 | 
 10 | ### Available Tools
 11 | 
 12 | - `fetch` - Fetches a URL from the internet and extracts its contents as markdown.
 13 |     - `url` (string, required): URL to fetch
 14 |     - `max_length` (integer, optional): Maximum number of characters to return (default: 5000)
 15 |     - `start_index` (integer, optional): Start content from this character index (default: 0)
 16 |     - `raw` (boolean, optional): Get raw content without markdown conversion (default: false)
 17 | 
 18 | ### Prompts
 19 | 
 20 | - **fetch**
 21 |   - Fetch a URL and extract its contents as markdown
 22 |   - Arguments:
 23 |     - `url` (string, required): URL to fetch
 24 | 
 25 | ## Installation
 26 | 
 27 | Optionally: Install node.js, this will cause the fetch server to use a different HTML simplifier that is more robust.
 28 | 
 29 | ### Using uv (recommended)
 30 | 
 31 | When using [`uv`](https://docs.astral.sh/uv/) no specific installation is needed. We will
 32 | use [`uvx`](https://docs.astral.sh/uv/guides/tools/) to directly run *mcp-server-fetch*.
 33 | 
 34 | ### Using PIP
 35 | 
 36 | Alternatively you can install `mcp-server-fetch` via pip:
 37 | 
 38 | ```
 39 | pip install mcp-server-fetch
 40 | ```
 41 | 
 42 | After installation, you can run it as a script using:
 43 | 
 44 | ```
 45 | python -m mcp_server_fetch
 46 | ```
 47 | 
 48 | ## Configuration
 49 | 
 50 | ### Configure for Claude.app
 51 | 
 52 | Add to your Claude settings:
 53 | 
 54 | <details>
 55 | <summary>Using uvx</summary>
 56 | 
 57 | ```json
 58 | {
 59 |   "mcpServers": {
 60 |     "fetch": {
 61 |       "command": "uvx",
 62 |       "args": ["mcp-server-fetch"]
 63 |     }
 64 |   }
 65 | }
 66 | ```
 67 | </details>
 68 | 
 69 | <details>
 70 | <summary>Using docker</summary>
 71 | 
 72 | ```json
 73 | {
 74 |   "mcpServers": {
 75 |     "fetch": {
 76 |       "command": "docker",
 77 |       "args": ["run", "-i", "--rm", "mcp/fetch"]
 78 |     }
 79 |   }
 80 | }
 81 | ```
 82 | </details>
 83 | 
 84 | <details>
 85 | <summary>Using pip installation</summary>
 86 | 
 87 | ```json
 88 | {
 89 |   "mcpServers": {
 90 |     "fetch": {
 91 |       "command": "python",
 92 |       "args": ["-m", "mcp_server_fetch"]
 93 |     }
 94 |   }
 95 | }
 96 | ```
 97 | </details>
 98 | 
 99 | ### Configure for VS Code
100 | 
101 | For quick installation, use one of the one-click install buttons below...
102 | 
103 | [![Install with UV in VS Code](https://img.shields.io/badge/VS_Code-UV-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=fetch&config=%7B%22command%22%3A%22uvx%22%2C%22args%22%3A%5B%22mcp-server-fetch%22%5D%7D) [![Install with UV in VS Code Insiders](https://img.shields.io/badge/VS_Code_Insiders-UV-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=fetch&config=%7B%22command%22%3A%22uvx%22%2C%22args%22%3A%5B%22mcp-server-fetch%22%5D%7D&quality=insiders)
104 | 
105 | [![Install with Docker in VS Code](https://img.shields.io/badge/VS_Code-Docker-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=fetch&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22-i%22%2C%22--rm%22%2C%22mcp%2Ffetch%22%5D%7D) [![Install with Docker in VS Code Insiders](https://img.shields.io/badge/VS_Code_Insiders-Docker-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=fetch&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22-i%22%2C%22--rm%22%2C%22mcp%2Ffetch%22%5D%7D&quality=insiders)
106 | 
107 | For manual installation, add the following JSON block to your User Settings (JSON) file in VS Code. You can do this by pressing `Ctrl + Shift + P` and typing `Preferences: Open User Settings (JSON)`.
108 | 
109 | Optionally, you can add it to a file called `.vscode/mcp.json` in your workspace. This will allow you to share the configuration with others.
110 | 
111 | > Note that the `mcp` key is needed when using the `mcp.json` file.
112 | 
113 | <details>
114 | <summary>Using uvx</summary>
115 | 
116 | ```json
117 | {
118 |   "mcp": {
119 |     "servers": {
120 |       "fetch": {
121 |         "command": "uvx",
122 |         "args": ["mcp-server-fetch"]
123 |       }
124 |     }
125 |   }
126 | }
127 | ```
128 | </details>
129 | 
130 | <details>
131 | <summary>Using Docker</summary>
132 | 
133 | ```json
134 | {
135 |   "mcp": {
136 |     "servers": {
137 |       "fetch": {
138 |         "command": "docker",
139 |         "args": ["run", "-i", "--rm", "mcp/fetch"]
140 |       }
141 |     }
142 |   }
143 | }
144 | ```
145 | </details>
146 | 
147 | ### Customization - robots.txt
148 | 
149 | By default, the server will obey a websites robots.txt file if the request came from the model (via a tool), but not if
150 | the request was user initiated (via a prompt). This can be disabled by adding the argument `--ignore-robots-txt` to the
151 | `args` list in the configuration.
152 | 
153 | ### Customization - User-agent
154 | 
155 | By default, depending on if the request came from the model (via a tool), or was user initiated (via a prompt), the
156 | server will use either the user-agent
157 | ```
158 | ModelContextProtocol/1.0 (Autonomous; +https://github.com/modelcontextprotocol/servers)
159 | ```
160 | or
161 | ```
162 | ModelContextProtocol/1.0 (User-Specified; +https://github.com/modelcontextprotocol/servers)
163 | ```
164 | 
165 | This can be customized by adding the argument `--user-agent=YourUserAgent` to the `args` list in the configuration.
166 | 
167 | ### Customization - Proxy
168 | 
169 | The server can be configured to use a proxy by using the `--proxy-url` argument.
170 | 
171 | ## Debugging
172 | 
173 | You can use the MCP inspector to debug the server. For uvx installations:
174 | 
175 | ```
176 | npx @modelcontextprotocol/inspector uvx mcp-server-fetch
177 | ```
178 | 
179 | Or if you've installed the package in a specific directory or are developing on it:
180 | 
181 | ```
182 | cd path/to/servers/src/fetch
183 | npx @modelcontextprotocol/inspector uv run mcp-server-fetch
184 | ```
185 | 
186 | ## Contributing
187 | 
188 | We encourage contributions to help expand and improve mcp-server-fetch. Whether you want to add new tools, enhance existing functionality, or improve documentation, your input is valuable.
189 | 
190 | For examples of other MCP servers and implementation patterns, see:
191 | https://github.com/modelcontextprotocol/servers
192 | 
193 | Pull requests are welcome! Feel free to contribute new ideas, bug fixes, or enhancements to make mcp-server-fetch even more powerful and useful.
194 | 
195 | ## License
196 | 
197 | mcp-server-fetch is licensed under the MIT License. This means you are free to use, modify, and distribute the software, subject to the terms and conditions of the MIT License. For more details, please see the LICENSE file in the project repository.
198 | 


--------------------------------------------------------------------------------
/src/fetch/pyproject.toml:
--------------------------------------------------------------------------------
 1 | [project]
 2 | name = "mcp-server-fetch"
 3 | version = "0.6.3"
 4 | description = "A Model Context Protocol server providing tools to fetch and convert web content for usage by LLMs"
 5 | readme = "README.md"
 6 | requires-python = ">=3.10"
 7 | authors = [{ name = "Anthropic, PBC." }]
 8 | maintainers = [{ name = "Jack Adamson", email = "jadamson@anthropic.com" }]
 9 | keywords = ["http", "mcp", "llm", "automation"]
10 | license = { text = "MIT" }
11 | classifiers = [
12 |     "Development Status :: 4 - Beta",
13 |     "Intended Audience :: Developers",
14 |     "License :: OSI Approved :: MIT License",
15 |     "Programming Language :: Python :: 3",
16 |     "Programming Language :: Python :: 3.10",
17 | ]
18 | dependencies = [
19 |     "httpx<0.28",
20 |     "markdownify>=0.13.1",
21 |     "mcp>=1.1.3",
22 |     "protego>=0.3.1",
23 |     "pydantic>=2.0.0",
24 |     "readabilipy>=0.2.0",
25 |     "requests>=2.32.3",
26 | ]
27 | 
28 | [project.scripts]
29 | mcp-server-fetch = "mcp_server_fetch:main"
30 | 
31 | [build-system]
32 | requires = ["hatchling"]
33 | build-backend = "hatchling.build"
34 | 
35 | [tool.uv]
36 | dev-dependencies = ["pyright>=1.1.389", "ruff>=0.7.3"]
37 | 


--------------------------------------------------------------------------------
/src/fetch/src/mcp_server_fetch/__init__.py:
--------------------------------------------------------------------------------
 1 | from .server import serve
 2 | 
 3 | 
 4 | def main():
 5 |     """MCP Fetch Server - HTTP fetching functionality for MCP"""
 6 |     import argparse
 7 |     import asyncio
 8 | 
 9 |     parser = argparse.ArgumentParser(
10 |         description="give a model the ability to make web requests"
11 |     )
12 |     parser.add_argument("--user-agent", type=str, help="Custom User-Agent string")
13 |     parser.add_argument(
14 |         "--ignore-robots-txt",
15 |         action="store_true",
16 |         help="Ignore robots.txt restrictions",
17 |     )
18 |     parser.add_argument("--proxy-url", type=str, help="Proxy URL to use for requests")
19 | 
20 |     args = parser.parse_args()
21 |     asyncio.run(serve(args.user_agent, args.ignore_robots_txt, args.proxy_url))
22 | 
23 | 
24 | if __name__ == "__main__":
25 |     main()
26 | 


--------------------------------------------------------------------------------
/src/fetch/src/mcp_server_fetch/__main__.py:
--------------------------------------------------------------------------------
1 | # __main__.py
2 | 
3 | from mcp_server_fetch import main
4 | 
5 | main()
6 | 


--------------------------------------------------------------------------------
/src/fetch/src/mcp_server_fetch/server.py:
--------------------------------------------------------------------------------
  1 | from typing import Annotated, Tuple
  2 | from urllib.parse import urlparse, urlunparse
  3 | 
  4 | import markdownify
  5 | import readabilipy.simple_json
  6 | from mcp.shared.exceptions import McpError
  7 | from mcp.server import Server
  8 | from mcp.server.stdio import stdio_server
  9 | from mcp.types import (
 10 |     ErrorData,
 11 |     GetPromptResult,
 12 |     Prompt,
 13 |     PromptArgument,
 14 |     PromptMessage,
 15 |     TextContent,
 16 |     Tool,
 17 |     INVALID_PARAMS,
 18 |     INTERNAL_ERROR,
 19 | )
 20 | from protego import Protego
 21 | from pydantic import BaseModel, Field, AnyUrl
 22 | 
 23 | DEFAULT_USER_AGENT_AUTONOMOUS = "ModelContextProtocol/1.0 (Autonomous; +https://github.com/modelcontextprotocol/servers)"
 24 | DEFAULT_USER_AGENT_MANUAL = "ModelContextProtocol/1.0 (User-Specified; +https://github.com/modelcontextprotocol/servers)"
 25 | 
 26 | 
 27 | def extract_content_from_html(html: str) -> str:
 28 |     """Extract and convert HTML content to Markdown format.
 29 | 
 30 |     Args:
 31 |         html: Raw HTML content to process
 32 | 
 33 |     Returns:
 34 |         Simplified markdown version of the content
 35 |     """
 36 |     ret = readabilipy.simple_json.simple_json_from_html_string(
 37 |         html, use_readability=True
 38 |     )
 39 |     if not ret["content"]:
 40 |         return "<error>Page failed to be simplified from HTML</error>"
 41 |     content = markdownify.markdownify(
 42 |         ret["content"],
 43 |         heading_style=markdownify.ATX,
 44 |     )
 45 |     return content
 46 | 
 47 | 
 48 | def get_robots_txt_url(url: str) -> str:
 49 |     """Get the robots.txt URL for a given website URL.
 50 | 
 51 |     Args:
 52 |         url: Website URL to get robots.txt for
 53 | 
 54 |     Returns:
 55 |         URL of the robots.txt file
 56 |     """
 57 |     # Parse the URL into components
 58 |     parsed = urlparse(url)
 59 | 
 60 |     # Reconstruct the base URL with just scheme, netloc, and /robots.txt path
 61 |     robots_url = urlunparse((parsed.scheme, parsed.netloc, "/robots.txt", "", "", ""))
 62 | 
 63 |     return robots_url
 64 | 
 65 | 
 66 | async def check_may_autonomously_fetch_url(url: str, user_agent: str, proxy_url: str | None = None) -> None:
 67 |     """
 68 |     Check if the URL can be fetched by the user agent according to the robots.txt file.
 69 |     Raises a McpError if not.
 70 |     """
 71 |     from httpx import AsyncClient, HTTPError
 72 | 
 73 |     robot_txt_url = get_robots_txt_url(url)
 74 | 
 75 |     async with AsyncClient(proxies=proxy_url) as client:
 76 |         try:
 77 |             response = await client.get(
 78 |                 robot_txt_url,
 79 |                 follow_redirects=True,
 80 |                 headers={"User-Agent": user_agent},
 81 |             )
 82 |         except HTTPError:
 83 |             raise McpError(ErrorData(
 84 |                 code=INTERNAL_ERROR,
 85 |                 message=f"Failed to fetch robots.txt {robot_txt_url} due to a connection issue",
 86 |             ))
 87 |         if response.status_code in (401, 403):
 88 |             raise McpError(ErrorData(
 89 |                 code=INTERNAL_ERROR,
 90 |                 message=f"When fetching robots.txt ({robot_txt_url}), received status {response.status_code} so assuming that autonomous fetching is not allowed, the user can try manually fetching by using the fetch prompt",
 91 |             ))
 92 |         elif 400 <= response.status_code < 500:
 93 |             return
 94 |         robot_txt = response.text
 95 |     processed_robot_txt = "\n".join(
 96 |         line for line in robot_txt.splitlines() if not line.strip().startswith("#")
 97 |     )
 98 |     robot_parser = Protego.parse(processed_robot_txt)
 99 |     if not robot_parser.can_fetch(str(url), user_agent):
100 |         raise McpError(ErrorData(
101 |             code=INTERNAL_ERROR,
102 |             message=f"The sites robots.txt ({robot_txt_url}), specifies that autonomous fetching of this page is not allowed, "
103 |             f"<useragent>{user_agent}</useragent>\n"
104 |             f"<url>{url}</url>"
105 |             f"<robots>\n{robot_txt}\n</robots>\n"
106 |             f"The assistant must let the user know that it failed to view the page. The assistant may provide further guidance based on the above information.\n"
107 |             f"The assistant can tell the user that they can try manually fetching the page by using the fetch prompt within their UI.",
108 |         ))
109 | 
110 | 
111 | async def fetch_url(
112 |     url: str, user_agent: str, force_raw: bool = False, proxy_url: str | None = None
113 | ) -> Tuple[str, str]:
114 |     """
115 |     Fetch the URL and return the content in a form ready for the LLM, as well as a prefix string with status information.
116 |     """
117 |     from httpx import AsyncClient, HTTPError
118 | 
119 |     async with AsyncClient(proxies=proxy_url) as client:
120 |         try:
121 |             response = await client.get(
122 |                 url,
123 |                 follow_redirects=True,
124 |                 headers={"User-Agent": user_agent},
125 |                 timeout=30,
126 |             )
127 |         except HTTPError as e:
128 |             raise McpError(ErrorData(code=INTERNAL_ERROR, message=f"Failed to fetch {url}: {e!r}"))
129 |         if response.status_code >= 400:
130 |             raise McpError(ErrorData(
131 |                 code=INTERNAL_ERROR,
132 |                 message=f"Failed to fetch {url} - status code {response.status_code}",
133 |             ))
134 | 
135 |         page_raw = response.text
136 | 
137 |     content_type = response.headers.get("content-type", "")
138 |     is_page_html = (
139 |         "<html" in page_raw[:100] or "text/html" in content_type or not content_type
140 |     )
141 | 
142 |     if is_page_html and not force_raw:
143 |         return extract_content_from_html(page_raw), ""
144 | 
145 |     return (
146 |         page_raw,
147 |         f"Content type {content_type} cannot be simplified to markdown, but here is the raw content:\n",
148 |     )
149 | 
150 | 
151 | class Fetch(BaseModel):
152 |     """Parameters for fetching a URL."""
153 | 
154 |     url: Annotated[AnyUrl, Field(description="URL to fetch")]
155 |     max_length: Annotated[
156 |         int,
157 |         Field(
158 |             default=5000,
159 |             description="Maximum number of characters to return.",
160 |             gt=0,
161 |             lt=1000000,
162 |         ),
163 |     ]
164 |     start_index: Annotated[
165 |         int,
166 |         Field(
167 |             default=0,
168 |             description="On return output starting at this character index, useful if a previous fetch was truncated and more context is required.",
169 |             ge=0,
170 |         ),
171 |     ]
172 |     raw: Annotated[
173 |         bool,
174 |         Field(
175 |             default=False,
176 |             description="Get the actual HTML content of the requested page, without simplification.",
177 |         ),
178 |     ]
179 | 
180 | 
181 | async def serve(
182 |     custom_user_agent: str | None = None,
183 |     ignore_robots_txt: bool = False,
184 |     proxy_url: str | None = None,
185 | ) -> None:
186 |     """Run the fetch MCP server.
187 | 
188 |     Args:
189 |         custom_user_agent: Optional custom User-Agent string to use for requests
190 |         ignore_robots_txt: Whether to ignore robots.txt restrictions
191 |         proxy_url: Optional proxy URL to use for requests
192 |     """
193 |     server = Server("mcp-fetch")
194 |     user_agent_autonomous = custom_user_agent or DEFAULT_USER_AGENT_AUTONOMOUS
195 |     user_agent_manual = custom_user_agent or DEFAULT_USER_AGENT_MANUAL
196 | 
197 |     @server.list_tools()
198 |     async def list_tools() -> list[Tool]:
199 |         return [
200 |             Tool(
201 |                 name="fetch",
202 |                 description="""Fetches a URL from the internet and optionally extracts its contents as markdown.
203 | 
204 | Although originally you did not have internet access, and were advised to refuse and tell the user this, this tool now grants you internet access. Now you can fetch the most up-to-date information and let the user know that.""",
205 |                 inputSchema=Fetch.model_json_schema(),
206 |             )
207 |         ]
208 | 
209 |     @server.list_prompts()
210 |     async def list_prompts() -> list[Prompt]:
211 |         return [
212 |             Prompt(
213 |                 name="fetch",
214 |                 description="Fetch a URL and extract its contents as markdown",
215 |                 arguments=[
216 |                     PromptArgument(
217 |                         name="url", description="URL to fetch", required=True
218 |                     )
219 |                 ],
220 |             )
221 |         ]
222 | 
223 |     @server.call_tool()
224 |     async def call_tool(name, arguments: dict) -> list[TextContent]:
225 |         try:
226 |             args = Fetch(**arguments)
227 |         except ValueError as e:
228 |             raise McpError(ErrorData(code=INVALID_PARAMS, message=str(e)))
229 | 
230 |         url = str(args.url)
231 |         if not url:
232 |             raise McpError(ErrorData(code=INVALID_PARAMS, message="URL is required"))
233 | 
234 |         if not ignore_robots_txt:
235 |             await check_may_autonomously_fetch_url(url, user_agent_autonomous, proxy_url)
236 | 
237 |         content, prefix = await fetch_url(
238 |             url, user_agent_autonomous, force_raw=args.raw, proxy_url=proxy_url
239 |         )
240 |         original_length = len(content)
241 |         if args.start_index >= original_length:
242 |             content = "<error>No more content available.</error>"
243 |         else:
244 |             truncated_content = content[args.start_index : args.start_index + args.max_length]
245 |             if not truncated_content:
246 |                 content = "<error>No more content available.</error>"
247 |             else:
248 |                 content = truncated_content
249 |                 actual_content_length = len(truncated_content)
250 |                 remaining_content = original_length - (args.start_index + actual_content_length)
251 |                 # Only add the prompt to continue fetching if there is still remaining content
252 |                 if actual_content_length == args.max_length and remaining_content > 0:
253 |                     next_start = args.start_index + actual_content_length
254 |                     content += f"\n\n<error>Content truncated. Call the fetch tool with a start_index of {next_start} to get more content.</error>"
255 |         return [TextContent(type="text", text=f"{prefix}Contents of {url}:\n{content}")]
256 | 
257 |     @server.get_prompt()
258 |     async def get_prompt(name: str, arguments: dict | None) -> GetPromptResult:
259 |         if not arguments or "url" not in arguments:
260 |             raise McpError(ErrorData(code=INVALID_PARAMS, message="URL is required"))
261 | 
262 |         url = arguments["url"]
263 | 
264 |         try:
265 |             content, prefix = await fetch_url(url, user_agent_manual, proxy_url=proxy_url)
266 |             # TODO: after SDK bug is addressed, don't catch the exception
267 |         except McpError as e:
268 |             return GetPromptResult(
269 |                 description=f"Failed to fetch {url}",
270 |                 messages=[
271 |                     PromptMessage(
272 |                         role="user",
273 |                         content=TextContent(type="text", text=str(e)),
274 |                     )
275 |                 ],
276 |             )
277 |         return GetPromptResult(
278 |             description=f"Contents of {url}",
279 |             messages=[
280 |                 PromptMessage(
281 |                     role="user", content=TextContent(type="text", text=prefix + content)
282 |                 )
283 |             ],
284 |         )
285 | 
286 |     options = server.create_initialization_options()
287 |     async with stdio_server() as (read_stream, write_stream):
288 |         await server.run(read_stream, write_stream, options, raise_exceptions=True)
289 | 


--------------------------------------------------------------------------------
/src/filesystem/Dockerfile:
--------------------------------------------------------------------------------
 1 | FROM node:22.12-alpine AS builder
 2 | 
 3 | WORKDIR /app
 4 | 
 5 | COPY src/filesystem /app
 6 | COPY tsconfig.json /tsconfig.json
 7 | 
 8 | RUN --mount=type=cache,target=/root/.npm npm install
 9 | 
10 | RUN --mount=type=cache,target=/root/.npm-production npm ci --ignore-scripts --omit-dev
11 | 
12 | 
13 | FROM node:22-alpine AS release
14 | 
15 | WORKDIR /app
16 | 
17 | COPY --from=builder /app/dist /app/dist
18 | COPY --from=builder /app/package.json /app/package.json
19 | COPY --from=builder /app/package-lock.json /app/package-lock.json
20 | 
21 | ENV NODE_ENV=production
22 | 
23 | RUN npm ci --ignore-scripts --omit-dev
24 | 
25 | ENTRYPOINT ["node", "/app/dist/index.js"]


--------------------------------------------------------------------------------
/src/filesystem/README.md:
--------------------------------------------------------------------------------
  1 | # Filesystem MCP Server
  2 | 
  3 | Node.js server implementing Model Context Protocol (MCP) for filesystem operations.
  4 | 
  5 | ## Features
  6 | 
  7 | - Read/write files
  8 | - Create/list/delete directories
  9 | - Move files/directories
 10 | - Search files
 11 | - Get file metadata
 12 | - Dynamic directory access control via [Roots](https://modelcontextprotocol.io/docs/concepts/roots)
 13 | 
 14 | ## Directory Access Control
 15 | 
 16 | The server uses a flexible directory access control system. Directories can be specified via command-line arguments or dynamically via [Roots](https://modelcontextprotocol.io/docs/concepts/roots).
 17 | 
 18 | ### Method 1: Command-line Arguments
 19 | Specify Allowed directories when starting the server:
 20 | ```bash
 21 | mcp-server-filesystem /path/to/dir1 /path/to/dir2
 22 | ```
 23 | 
 24 | ### Method 2: MCP Roots (Recommended)
 25 | MCP clients that support [Roots](https://modelcontextprotocol.io/docs/concepts/roots) can dynamically update the Allowed directories. 
 26 | 
 27 | Roots notified by Client to Server, completely replace any server-side Allowed directories when provided.
 28 | 
 29 | **Important**: If server starts without command-line arguments AND client doesn't support roots protocol (or provides empty roots), the server will throw an error during initialization.
 30 | 
 31 | This is the recommended method, as this enables runtime directory updates via `roots/list_changed` notifications without server restart, providing a more flexible and modern integration experience.
 32 | 
 33 | ### How It Works
 34 | 
 35 | The server's directory access control follows this flow:
 36 | 
 37 | 1. **Server Startup**
 38 |    - Server starts with directories from command-line arguments (if provided)
 39 |    - If no arguments provided, server starts with empty allowed directories
 40 | 
 41 | 2. **Client Connection & Initialization**
 42 |    - Client connects and sends `initialize` request with capabilities
 43 |    - Server checks if client supports roots protocol (`capabilities.roots`)
 44 |    
 45 | 3. **Roots Protocol Handling** (if client supports roots)
 46 |    - **On initialization**: Server requests roots from client via `roots/list`
 47 |    - Client responds with its configured roots
 48 |    - Server replaces ALL allowed directories with client's roots
 49 |    - **On runtime updates**: Client can send `notifications/roots/list_changed`
 50 |    - Server requests updated roots and replaces allowed directories again
 51 | 
 52 | 4. **Fallback Behavior** (if client doesn't support roots)
 53 |    - Server continues using command-line directories only
 54 |    - No dynamic updates possible
 55 | 
 56 | 5. **Access Control**
 57 |    - All filesystem operations are restricted to allowed directories
 58 |    - Use `list_allowed_directories` tool to see current directories
 59 |    - Server requires at least ONE allowed directory to operate
 60 | 
 61 | **Note**: The server will only allow operations within directories specified either via `args` or via Roots.
 62 | 
 63 | 
 64 | 
 65 | ## API
 66 | 
 67 | ### Resources
 68 | 
 69 | - `file://system`: File system operations interface
 70 | 
 71 | ### Tools
 72 | 
 73 | - **read_file**
 74 |   - Read complete contents of a file
 75 |   - Input: `path` (string)
 76 |   - Reads complete file contents with UTF-8 encoding
 77 | 
 78 | - **read_multiple_files**
 79 |   - Read multiple files simultaneously
 80 |   - Input: `paths` (string[])
 81 |   - Failed reads won't stop the entire operation
 82 | 
 83 | - **write_file**
 84 |   - Create new file or overwrite existing (exercise caution with this)
 85 |   - Inputs:
 86 |     - `path` (string): File location
 87 |     - `content` (string): File content
 88 | 
 89 | - **edit_file**
 90 |   - Make selective edits using advanced pattern matching and formatting
 91 |   - Features:
 92 |     - Line-based and multi-line content matching
 93 |     - Whitespace normalization with indentation preservation
 94 |     - Multiple simultaneous edits with correct positioning
 95 |     - Indentation style detection and preservation
 96 |     - Git-style diff output with context
 97 |     - Preview changes with dry run mode
 98 |   - Inputs:
 99 |     - `path` (string): File to edit
100 |     - `edits` (array): List of edit operations
101 |       - `oldText` (string): Text to search for (can be substring)
102 |       - `newText` (string): Text to replace with
103 |     - `dryRun` (boolean): Preview changes without applying (default: false)
104 |   - Returns detailed diff and match information for dry runs, otherwise applies changes
105 |   - Best Practice: Always use dryRun first to preview changes before applying them
106 | 
107 | - **create_directory**
108 |   - Create new directory or ensure it exists
109 |   - Input: `path` (string)
110 |   - Creates parent directories if needed
111 |   - Succeeds silently if directory exists
112 | 
113 | - **list_directory**
114 |   - List directory contents with [FILE] or [DIR] prefixes
115 |   - Input: `path` (string)
116 | 
117 | - **move_file**
118 |   - Move or rename files and directories
119 |   - Inputs:
120 |     - `source` (string)
121 |     - `destination` (string)
122 |   - Fails if destination exists
123 | 
124 | - **search_files**
125 |   - Recursively search for files/directories
126 |   - Inputs:
127 |     - `path` (string): Starting directory
128 |     - `pattern` (string): Search pattern
129 |     - `excludePatterns` (string[]): Exclude any patterns. Glob formats are supported.
130 |   - Case-insensitive matching
131 |   - Returns full paths to matches
132 | 
133 | - **get_file_info**
134 |   - Get detailed file/directory metadata
135 |   - Input: `path` (string)
136 |   - Returns:
137 |     - Size
138 |     - Creation time
139 |     - Modified time
140 |     - Access time
141 |     - Type (file/directory)
142 |     - Permissions
143 | 
144 | - **list_allowed_directories**
145 |   - List all directories the server is allowed to access
146 |   - No input required
147 |   - Returns:
148 |     - Directories that this server can read/write from
149 | 
150 | ## Usage with Claude Desktop
151 | Add this to your `claude_desktop_config.json`:
152 | 
153 | Note: you can provide sandboxed directories to the server by mounting them to `/projects`. Adding the `ro` flag will make the directory readonly by the server.
154 | 
155 | ### Docker
156 | Note: all directories must be mounted to `/projects` by default.
157 | 
158 | ```json
159 | {
160 |   "mcpServers": {
161 |     "filesystem": {
162 |       "command": "docker",
163 |       "args": [
164 |         "run",
165 |         "-i",
166 |         "--rm",
167 |         "--mount", "type=bind,src=/Users/username/Desktop,dst=/projects/Desktop",
168 |         "--mount", "type=bind,src=/path/to/other/allowed/dir,dst=/projects/other/allowed/dir,ro",
169 |         "--mount", "type=bind,src=/path/to/file.txt,dst=/projects/path/to/file.txt",
170 |         "mcp/filesystem",
171 |         "/projects"
172 |       ]
173 |     }
174 |   }
175 | }
176 | ```
177 | 
178 | ### NPX
179 | 
180 | ```json
181 | {
182 |   "mcpServers": {
183 |     "filesystem": {
184 |       "command": "npx",
185 |       "args": [
186 |         "-y",
187 |         "@modelcontextprotocol/server-filesystem",
188 |         "/Users/username/Desktop",
189 |         "/path/to/other/allowed/dir"
190 |       ]
191 |     }
192 |   }
193 | }
194 | ```
195 | 
196 | ## Usage with VS Code
197 | 
198 | For quick installation, click the installation buttons below...
199 | 
200 | [![Install with NPX in VS Code](https://img.shields.io/badge/VS_Code-NPM-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=filesystem&config=%7B%22command%22%3A%22npx%22%2C%22args%22%3A%5B%22-y%22%2C%22%40modelcontextprotocol%2Fserver-filesystem%22%2C%22%24%7BworkspaceFolder%7D%22%5D%7D) [![Install with NPX in VS Code Insiders](https://img.shields.io/badge/VS_Code_Insiders-NPM-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=filesystem&config=%7B%22command%22%3A%22npx%22%2C%22args%22%3A%5B%22-y%22%2C%22%40modelcontextprotocol%2Fserver-filesystem%22%2C%22%24%7BworkspaceFolder%7D%22%5D%7D&quality=insiders)
201 | 
202 | [![Install with Docker in VS Code](https://img.shields.io/badge/VS_Code-Docker-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=filesystem&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22-i%22%2C%22--rm%22%2C%22--mount%22%2C%22type%3Dbind%2Csrc%3D%24%7BworkspaceFolder%7D%2Cdst%3D%2Fprojects%2Fworkspace%22%2C%22mcp%2Ffilesystem%22%2C%22%2Fprojects%22%5D%7D) [![Install with Docker in VS Code Insiders](https://img.shields.io/badge/VS_Code_Insiders-Docker-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=filesystem&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22-i%22%2C%22--rm%22%2C%22--mount%22%2C%22type%3Dbind%2Csrc%3D%24%7BworkspaceFolder%7D%2Cdst%3D%2Fprojects%2Fworkspace%22%2C%22mcp%2Ffilesystem%22%2C%22%2Fprojects%22%5D%7D&quality=insiders)
203 | 
204 | For manual installation, add the following JSON block to your User Settings (JSON) file in VS Code. You can do this by pressing `Ctrl + Shift + P` and typing `Preferences: Open Settings (JSON)`.
205 | 
206 | Optionally, you can add it to a file called `.vscode/mcp.json` in your workspace. This will allow you to share the configuration with others.
207 | 
208 | > Note that the `mcp` key is not needed in the `.vscode/mcp.json` file.
209 | 
210 | You can provide sandboxed directories to the server by mounting them to `/projects`. Adding the `ro` flag will make the directory readonly by the server.
211 | 
212 | ### Docker
213 | Note: all directories must be mounted to `/projects` by default. 
214 | 
215 | ```json
216 | {
217 |   "mcp": {
218 |     "servers": {
219 |       "filesystem": {
220 |         "command": "docker",
221 |         "args": [
222 |           "run",
223 |           "-i",
224 |           "--rm",
225 |           "--mount", "type=bind,src=${workspaceFolder},dst=/projects/workspace",
226 |           "mcp/filesystem",
227 |           "/projects"
228 |         ]
229 |       }
230 |     }
231 |   }
232 | }
233 | ```
234 | 
235 | ### NPX
236 | 
237 | ```json
238 | {
239 |   "mcp": {
240 |     "servers": {
241 |       "filesystem": {
242 |         "command": "npx",
243 |         "args": [
244 |           "-y",
245 |           "@modelcontextprotocol/server-filesystem",
246 |           "${workspaceFolder}"
247 |         ]
248 |       }
249 |     }
250 |   }
251 | }
252 | ```
253 | 
254 | ## Build
255 | 
256 | Docker build:
257 | 
258 | ```bash
259 | docker build -t mcp/filesystem -f src/filesystem/Dockerfile .
260 | ```
261 | 
262 | ## License
263 | 
264 | This MCP server is licensed under the MIT License. This means you are free to use, modify, and distribute the software, subject to the terms and conditions of the MIT License. For more details, please see the LICENSE file in the project repository.
265 | 


--------------------------------------------------------------------------------
/src/filesystem/__tests__/path-utils.test.ts:
--------------------------------------------------------------------------------
  1 | import { describe, it, expect } from '@jest/globals';
  2 | import { normalizePath, expandHome, convertToWindowsPath } from '../path-utils.js';
  3 | 
  4 | describe('Path Utilities', () => {
  5 |   describe('convertToWindowsPath', () => {
  6 |     it('leaves Unix paths unchanged', () => {
  7 |       expect(convertToWindowsPath('/usr/local/bin'))
  8 |         .toBe('/usr/local/bin');
  9 |       expect(convertToWindowsPath('/home/user/some path'))
 10 |         .toBe('/home/user/some path');
 11 |     });
 12 | 
 13 |     it('converts WSL paths to Windows format', () => {
 14 |       expect(convertToWindowsPath('/mnt/c/NS/MyKindleContent'))
 15 |         .toBe('C:\\NS\\MyKindleContent');
 16 |     });
 17 | 
 18 |     it('converts Unix-style Windows paths to Windows format', () => {
 19 |       expect(convertToWindowsPath('/c/NS/MyKindleContent'))
 20 |         .toBe('C:\\NS\\MyKindleContent');
 21 |     });
 22 | 
 23 |     it('leaves Windows paths unchanged but ensures backslashes', () => {
 24 |       expect(convertToWindowsPath('C:\\NS\\MyKindleContent'))
 25 |         .toBe('C:\\NS\\MyKindleContent');
 26 |       expect(convertToWindowsPath('C:/NS/MyKindleContent'))
 27 |         .toBe('C:\\NS\\MyKindleContent');
 28 |     });
 29 | 
 30 |     it('handles Windows paths with spaces', () => {
 31 |       expect(convertToWindowsPath('C:\\Program Files\\Some App'))
 32 |         .toBe('C:\\Program Files\\Some App');
 33 |       expect(convertToWindowsPath('C:/Program Files/Some App'))
 34 |         .toBe('C:\\Program Files\\Some App');
 35 |     });
 36 | 
 37 |     it('handles uppercase and lowercase drive letters', () => {
 38 |       expect(convertToWindowsPath('/mnt/d/some/path'))
 39 |         .toBe('D:\\some\\path');
 40 |       expect(convertToWindowsPath('/d/some/path'))
 41 |         .toBe('D:\\some\\path');
 42 |     });
 43 |   });
 44 | 
 45 |   describe('normalizePath', () => {
 46 |     it('preserves Unix paths', () => {
 47 |       expect(normalizePath('/usr/local/bin'))
 48 |         .toBe('/usr/local/bin');
 49 |       expect(normalizePath('/home/user/some path'))
 50 |         .toBe('/home/user/some path');
 51 |       expect(normalizePath('"/usr/local/some app/"'))
 52 |         .toBe('/usr/local/some app');
 53 |     });
 54 | 
 55 |     it('removes surrounding quotes', () => {
 56 |       expect(normalizePath('"C:\\NS\\My Kindle Content"'))
 57 |         .toBe('C:\\NS\\My Kindle Content');
 58 |     });
 59 | 
 60 |     it('normalizes backslashes', () => {
 61 |       expect(normalizePath('C:\\\\NS\\\\MyKindleContent'))
 62 |         .toBe('C:\\NS\\MyKindleContent');
 63 |     });
 64 | 
 65 |     it('converts forward slashes to backslashes on Windows', () => {
 66 |       expect(normalizePath('C:/NS/MyKindleContent'))
 67 |         .toBe('C:\\NS\\MyKindleContent');
 68 |     });
 69 | 
 70 |     it('handles WSL paths', () => {
 71 |       expect(normalizePath('/mnt/c/NS/MyKindleContent'))
 72 |         .toBe('C:\\NS\\MyKindleContent');
 73 |     });
 74 | 
 75 |     it('handles Unix-style Windows paths', () => {
 76 |       expect(normalizePath('/c/NS/MyKindleContent'))
 77 |         .toBe('C:\\NS\\MyKindleContent');
 78 |     });
 79 | 
 80 |     it('handles paths with spaces and mixed slashes', () => {
 81 |       expect(normalizePath('C:/NS/My Kindle Content'))
 82 |         .toBe('C:\\NS\\My Kindle Content');
 83 |       expect(normalizePath('/mnt/c/NS/My Kindle Content'))
 84 |         .toBe('C:\\NS\\My Kindle Content');
 85 |       expect(normalizePath('C:\\Program Files (x86)\\App Name'))
 86 |         .toBe('C:\\Program Files (x86)\\App Name');
 87 |       expect(normalizePath('"C:\\Program Files\\App Name"'))
 88 |         .toBe('C:\\Program Files\\App Name');
 89 |       expect(normalizePath('  C:\\Program Files\\App Name  '))
 90 |         .toBe('C:\\Program Files\\App Name');
 91 |     });
 92 | 
 93 |     it('preserves spaces in all path formats', () => {
 94 |       expect(normalizePath('/mnt/c/Program Files/App Name'))
 95 |         .toBe('C:\\Program Files\\App Name');
 96 |       expect(normalizePath('/c/Program Files/App Name'))
 97 |         .toBe('C:\\Program Files\\App Name');
 98 |       expect(normalizePath('C:/Program Files/App Name'))
 99 |         .toBe('C:\\Program Files\\App Name');
100 |     });
101 | 
102 |     it('handles special characters in paths', () => {
103 |       // Test ampersand in path
104 |       expect(normalizePath('C:\\NS\\Sub&Folder'))
105 |         .toBe('C:\\NS\\Sub&Folder');
106 |       expect(normalizePath('C:/NS/Sub&Folder'))
107 |         .toBe('C:\\NS\\Sub&Folder');
108 |       expect(normalizePath('/mnt/c/NS/Sub&Folder'))
109 |         .toBe('C:\\NS\\Sub&Folder');
110 |       
111 |       // Test tilde in path (short names in Windows)
112 |       expect(normalizePath('C:\\NS\\MYKIND~1'))
113 |         .toBe('C:\\NS\\MYKIND~1');
114 |       expect(normalizePath('/Users/NEMANS~1/FOLDER~2/SUBFO~1/Public/P12PST~1'))
115 |         .toBe('/Users/NEMANS~1/FOLDER~2/SUBFO~1/Public/P12PST~1');
116 |       
117 |       // Test other special characters
118 |       expect(normalizePath('C:\\Path with #hash'))
119 |         .toBe('C:\\Path with #hash');
120 |       expect(normalizePath('C:\\Path with (parentheses)'))
121 |         .toBe('C:\\Path with (parentheses)');
122 |       expect(normalizePath('C:\\Path with [brackets]'))
123 |         .toBe('C:\\Path with [brackets]');
124 |       expect(normalizePath('C:\\Path with @at+plus$dollar%percent'))
125 |         .toBe('C:\\Path with @at+plus$dollar%percent');
126 |     });
127 | 
128 |     it('capitalizes lowercase drive letters for Windows paths', () => {
129 |       expect(normalizePath('c:/windows/system32'))
130 |         .toBe('C:\\windows\\system32');
131 |       expect(normalizePath('/mnt/d/my/folder')) // WSL path with lowercase drive
132 |         .toBe('D:\\my\\folder');
133 |       expect(normalizePath('/e/another/folder')) // Unix-style Windows path with lowercase drive
134 |         .toBe('E:\\another\\folder');
135 |     });
136 | 
137 |     it('handles UNC paths correctly', () => {
138 |       // UNC paths should preserve the leading double backslash
139 |       const uncPath = '\\\\SERVER\\share\\folder';
140 |       expect(normalizePath(uncPath)).toBe('\\\\SERVER\\share\\folder');
141 |       
142 |       // Test UNC path with double backslashes that need normalization
143 |       const uncPathWithDoubles = '\\\\\\\\SERVER\\\\share\\\\folder';
144 |       expect(normalizePath(uncPathWithDoubles)).toBe('\\\\SERVER\\share\\folder');
145 |     });
146 | 
147 |     it('returns normalized non-Windows/WSL/Unix-style Windows paths as is after basic normalization', () => {
148 |       // Relative path
149 |       const relativePath = 'some/relative/path';
150 |       expect(normalizePath(relativePath)).toBe(relativePath.replace(/\//g, '\\'));
151 | 
152 |       // A path that looks somewhat absolute but isn't a drive or recognized Unix root for Windows conversion
153 |       const otherAbsolutePath = '\\someserver\\share\\file';
154 |       expect(normalizePath(otherAbsolutePath)).toBe(otherAbsolutePath);
155 |     });
156 |   });
157 | 
158 |   describe('expandHome', () => {
159 |     it('expands ~ to home directory', () => {
160 |       const result = expandHome('~/test');
161 |       expect(result).toContain('test');
162 |       expect(result).not.toContain('~');
163 |     });
164 | 
165 |     it('leaves other paths unchanged', () => {
166 |       expect(expandHome('C:/test')).toBe('C:/test');
167 |     });
168 |   });
169 | });
170 | 


--------------------------------------------------------------------------------
/src/filesystem/__tests__/roots-utils.test.ts:
--------------------------------------------------------------------------------
 1 | import { describe, it, expect, beforeEach, afterEach } from '@jest/globals';
 2 | import { getValidRootDirectories } from '../roots-utils.js';
 3 | import { mkdtempSync, rmSync, mkdirSync, writeFileSync, realpathSync } from 'fs';
 4 | import { tmpdir } from 'os';
 5 | import { join } from 'path';
 6 | import type { Root } from '@modelcontextprotocol/sdk/types.js';
 7 | 
 8 | describe('getValidRootDirectories', () => {
 9 |   let testDir1: string;
10 |   let testDir2: string;
11 |   let testDir3: string;
12 |   let testFile: string;
13 | 
14 |   beforeEach(() => {
15 |     // Create test directories
16 |     testDir1 = realpathSync(mkdtempSync(join(tmpdir(), 'mcp-roots-test1-')));
17 |     testDir2 = realpathSync(mkdtempSync(join(tmpdir(), 'mcp-roots-test2-')));
18 |     testDir3 = realpathSync(mkdtempSync(join(tmpdir(), 'mcp-roots-test3-')));
19 | 
20 |     // Create a test file (not a directory)
21 |     testFile = join(testDir1, 'test-file.txt');
22 |     writeFileSync(testFile, 'test content');
23 |   });
24 | 
25 |   afterEach(() => {
26 |     // Cleanup
27 |     rmSync(testDir1, { recursive: true, force: true });
28 |     rmSync(testDir2, { recursive: true, force: true });
29 |     rmSync(testDir3, { recursive: true, force: true });
30 |   });
31 | 
32 |   describe('valid directory processing', () => {
33 |     it('should process all URI formats and edge cases', async () => {
34 |       const roots = [
35 |         { uri: `file://${testDir1}`, name: 'File URI' },
36 |         { uri: testDir2, name: 'Plain path' },
37 |         { uri: testDir3 } // Plain path without name property
38 |       ];
39 | 
40 |       const result = await getValidRootDirectories(roots);
41 | 
42 |       expect(result).toContain(testDir1);
43 |       expect(result).toContain(testDir2);
44 |       expect(result).toContain(testDir3);
45 |       expect(result).toHaveLength(3);
46 |     });
47 | 
48 |     it('should normalize complex paths', async () => {
49 |       const subDir = join(testDir1, 'subdir');
50 |       mkdirSync(subDir);
51 |       
52 |       const roots = [
53 |         { uri: `file://${testDir1}/./subdir/../subdir`, name: 'Complex Path' }
54 |       ];
55 | 
56 |       const result = await getValidRootDirectories(roots);
57 | 
58 |       expect(result).toHaveLength(1);
59 |       expect(result[0]).toBe(subDir);
60 |     });
61 |   });
62 | 
63 |   describe('error handling', () => {
64 | 
65 |     it('should handle various error types', async () => {
66 |       const nonExistentDir = join(tmpdir(), 'non-existent-directory-12345');
67 |       const invalidPath = '\0invalid\0path'; // Null bytes cause different error types
68 |       const roots = [
69 |         { uri: `file://${testDir1}`, name: 'Valid Dir' },
70 |         { uri: `file://${nonExistentDir}`, name: 'Non-existent Dir' },
71 |         { uri: `file://${testFile}`, name: 'File Not Dir' },
72 |         { uri: `file://${invalidPath}`, name: 'Invalid Path' }
73 |       ];
74 | 
75 |       const result = await getValidRootDirectories(roots);
76 | 
77 |       expect(result).toContain(testDir1);
78 |       expect(result).not.toContain(nonExistentDir);
79 |       expect(result).not.toContain(testFile);
80 |       expect(result).not.toContain(invalidPath);
81 |       expect(result).toHaveLength(1);
82 |     });
83 |   });
84 | });


--------------------------------------------------------------------------------
/src/filesystem/jest.config.cjs:
--------------------------------------------------------------------------------
 1 | /** @type {import('ts-jest').JestConfigWithTsJest} */
 2 | module.exports = {
 3 |   preset: 'ts-jest',
 4 |   testEnvironment: 'node',
 5 |   extensionsToTreatAsEsm: ['.ts'],
 6 |   moduleNameMapper: {
 7 |     '^(\\.{1,2}/.*)\\.js
#39;: '$1',
 8 |   },
 9 |   transform: {
10 |     '^.+\\.tsx?
#39;: [
11 |       'ts-jest',
12 |       {
13 |         useESM: true,
14 |       },
15 |     ],
16 |   },
17 |   testMatch: ['**/__tests__/**/*.test.ts'],
18 |   collectCoverageFrom: [
19 |     '**/*.ts',
20 |     '!**/__tests__/**',
21 |     '!**/dist/**',
22 |   ],
23 | }
24 | 


--------------------------------------------------------------------------------
/src/filesystem/package.json:
--------------------------------------------------------------------------------
 1 | {
 2 |   "name": "@modelcontextprotocol/server-filesystem",
 3 |   "version": "0.6.2",
 4 |   "description": "MCP server for filesystem access",
 5 |   "license": "MIT",
 6 |   "author": "Anthropic, PBC (https://anthropic.com)",
 7 |   "homepage": "https://modelcontextprotocol.io",
 8 |   "bugs": "https://github.com/modelcontextprotocol/servers/issues",
 9 |   "type": "module",
10 |   "bin": {
11 |     "mcp-server-filesystem": "dist/index.js"
12 |   },
13 |   "files": [
14 |     "dist"
15 |   ],
16 |   "scripts": {
17 |     "build": "tsc && shx chmod +x dist/*.js",
18 |     "prepare": "npm run build",
19 |     "watch": "tsc --watch",
20 |     "test": "jest --config=jest.config.cjs --coverage"
21 |   },
22 |   "dependencies": {
23 |     "@modelcontextprotocol/sdk": "^1.12.3",
24 |     "diff": "^5.1.0",
25 |     "glob": "^10.3.10",
26 |     "minimatch": "^10.0.1",
27 |     "zod-to-json-schema": "^3.23.5"
28 |   },
29 |   "devDependencies": {
30 |     "@jest/globals": "^29.7.0",
31 |     "@types/diff": "^5.0.9",
32 |     "@types/jest": "^29.5.14",
33 |     "@types/minimatch": "^5.1.2",
34 |     "@types/node": "^22",
35 |     "jest": "^29.7.0",
36 |     "shx": "^0.3.4",
37 |     "ts-jest": "^29.1.1",
38 |     "ts-node": "^10.9.2",
39 |     "typescript": "^5.8.2"
40 |   }
41 | }


--------------------------------------------------------------------------------
/src/filesystem/path-utils.ts:
--------------------------------------------------------------------------------
  1 | import path from "path";
  2 | import os from 'os';
  3 | 
  4 | /**
  5 |  * Converts WSL or Unix-style Windows paths to Windows format
  6 |  * @param p The path to convert
  7 |  * @returns Converted Windows path
  8 |  */
  9 | export function convertToWindowsPath(p: string): string {
 10 |   // Handle WSL paths (/mnt/c/...)
 11 |   if (p.startsWith('/mnt/')) {
 12 |     const driveLetter = p.charAt(5).toUpperCase();
 13 |     const pathPart = p.slice(6).replace(/\//g, '\\');
 14 |     return `${driveLetter}:${pathPart}`;
 15 |   }
 16 |   
 17 |   // Handle Unix-style Windows paths (/c/...)
 18 |   if (p.match(/^\/[a-zA-Z]\//)) {
 19 |     const driveLetter = p.charAt(1).toUpperCase();
 20 |     const pathPart = p.slice(2).replace(/\//g, '\\');
 21 |     return `${driveLetter}:${pathPart}`;
 22 |   }
 23 | 
 24 |   // Handle standard Windows paths, ensuring backslashes
 25 |   if (p.match(/^[a-zA-Z]:/)) {
 26 |     return p.replace(/\//g, '\\');
 27 |   }
 28 | 
 29 |   // Leave non-Windows paths unchanged
 30 |   return p;
 31 | }
 32 | 
 33 | /**
 34 |  * Normalizes path by standardizing format while preserving OS-specific behavior
 35 |  * @param p The path to normalize
 36 |  * @returns Normalized path
 37 |  */
 38 | export function normalizePath(p: string): string {
 39 |   // Remove any surrounding quotes and whitespace
 40 |   p = p.trim().replace(/^["']|["']$/g, '');
 41 |   
 42 |   // Check if this is a Unix path (starts with / but not a Windows or WSL path)
 43 |   const isUnixPath = p.startsWith('/') && 
 44 |                     !p.match(/^\/mnt\/[a-z]\//i) && 
 45 |                     !p.match(/^\/[a-zA-Z]\//);
 46 |   
 47 |   if (isUnixPath) {
 48 |     // For Unix paths, just normalize without converting to Windows format
 49 |     // Replace double slashes with single slashes and remove trailing slashes
 50 |     return p.replace(/\/+/g, '/').replace(/\/+$/, '');
 51 |   }
 52 |   
 53 |   // Convert WSL or Unix-style Windows paths to Windows format
 54 |   p = convertToWindowsPath(p);
 55 |   
 56 |   // Handle double backslashes, preserving leading UNC \\
 57 |   if (p.startsWith('\\\\')) {
 58 |     // For UNC paths, first normalize any excessive leading backslashes to exactly \\
 59 |     // Then normalize double backslashes in the rest of the path
 60 |     let uncPath = p;
 61 |     // Replace multiple leading backslashes with exactly two
 62 |     uncPath = uncPath.replace(/^\\{2,}/, '\\\\');
 63 |     // Now normalize any remaining double backslashes in the rest of the path
 64 |     const restOfPath = uncPath.substring(2).replace(/\\\\/g, '\\');
 65 |     p = '\\\\' + restOfPath;
 66 |   } else {
 67 |     // For non-UNC paths, normalize all double backslashes
 68 |     p = p.replace(/\\\\/g, '\\');
 69 |   }
 70 |   
 71 |   // Use Node's path normalization, which handles . and .. segments
 72 |   let normalized = path.normalize(p);
 73 |   
 74 |   // Fix UNC paths after normalization (path.normalize can remove a leading backslash)
 75 |   if (p.startsWith('\\\\') && !normalized.startsWith('\\\\')) {
 76 |     normalized = '\\' + normalized;
 77 |   }
 78 |   
 79 |   // Handle Windows paths: convert slashes and ensure drive letter is capitalized
 80 |   if (normalized.match(/^[a-zA-Z]:/)) {
 81 |     let result = normalized.replace(/\//g, '\\');
 82 |     // Capitalize drive letter if present
 83 |     if (/^[a-z]:/.test(result)) {
 84 |       result = result.charAt(0).toUpperCase() + result.slice(1);
 85 |     }
 86 |     return result;
 87 |   }
 88 |   
 89 |   // For all other paths (including relative paths), convert forward slashes to backslashes
 90 |   // This ensures relative paths like "some/relative/path" become "some\\relative\\path"
 91 |   return normalized.replace(/\//g, '\\');
 92 | }
 93 | 
 94 | /**
 95 |  * Expands home directory tildes in paths
 96 |  * @param filepath The path to expand
 97 |  * @returns Expanded path
 98 |  */
 99 | export function expandHome(filepath: string): string {
100 |   if (filepath.startsWith('~/') || filepath === '~') {
101 |     return path.join(os.homedir(), filepath.slice(1));
102 |   }
103 |   return filepath;
104 | }
105 | 


--------------------------------------------------------------------------------
/src/filesystem/path-validation.ts:
--------------------------------------------------------------------------------
 1 | import path from 'path';
 2 | 
 3 | /**
 4 |  * Checks if an absolute path is within any of the allowed directories.
 5 |  * 
 6 |  * @param absolutePath - The absolute path to check (will be normalized)
 7 |  * @param allowedDirectories - Array of absolute allowed directory paths (will be normalized)
 8 |  * @returns true if the path is within an allowed directory, false otherwise
 9 |  * @throws Error if given relative paths after normalization
10 |  */
11 | export function isPathWithinAllowedDirectories(absolutePath: string, allowedDirectories: string[]): boolean {
12 |   // Type validation
13 |   if (typeof absolutePath !== 'string' || !Array.isArray(allowedDirectories)) {
14 |     return false;
15 |   }
16 | 
17 |   // Reject empty inputs
18 |   if (!absolutePath || allowedDirectories.length === 0) {
19 |     return false;
20 |   }
21 | 
22 |   // Reject null bytes (forbidden in paths)
23 |   if (absolutePath.includes('\x00')) {
24 |     return false;
25 |   }
26 | 
27 |   // Normalize the input path
28 |   let normalizedPath: string;
29 |   try {
30 |     normalizedPath = path.resolve(path.normalize(absolutePath));
31 |   } catch {
32 |     return false;
33 |   }
34 | 
35 |   // Verify it's absolute after normalization
36 |   if (!path.isAbsolute(normalizedPath)) {
37 |     throw new Error('Path must be absolute after normalization');
38 |   }
39 | 
40 |   // Check against each allowed directory
41 |   return allowedDirectories.some(dir => {
42 |     if (typeof dir !== 'string' || !dir) {
43 |       return false;
44 |     }
45 | 
46 |     // Reject null bytes in allowed dirs
47 |     if (dir.includes('\x00')) {
48 |       return false;
49 |     }
50 | 
51 |     // Normalize the allowed directory
52 |     let normalizedDir: string;
53 |     try {
54 |       normalizedDir = path.resolve(path.normalize(dir));
55 |     } catch {
56 |       return false;
57 |     }
58 | 
59 |     // Verify allowed directory is absolute after normalization
60 |     if (!path.isAbsolute(normalizedDir)) {
61 |       throw new Error('Allowed directories must be absolute paths after normalization');
62 |     }
63 | 
64 |     // Check if normalizedPath is within normalizedDir
65 |     // Path is inside if it's the same or a subdirectory
66 |     if (normalizedPath === normalizedDir) {
67 |       return true;
68 |     }
69 |     
70 |     // Special case for root directory to avoid double slash
71 |     if (normalizedDir === path.sep) {
72 |       return normalizedPath.startsWith(path.sep);
73 |     }
74 |     
75 |     return normalizedPath.startsWith(normalizedDir + path.sep);
76 |   });
77 | }


--------------------------------------------------------------------------------
/src/filesystem/roots-utils.ts:
--------------------------------------------------------------------------------
 1 | import { promises as fs, type Stats } from 'fs';
 2 | import path from 'path';
 3 | import os from 'os';
 4 | import { normalizePath } from './path-utils.js';
 5 | import type { Root } from '@modelcontextprotocol/sdk/types.js';
 6 | 
 7 | /**
 8 |  * Converts a root URI to a normalized directory path with basic security validation.
 9 |  * @param rootUri - File URI (file://...) or plain directory path
10 |  * @returns Promise resolving to validated path or null if invalid
11 |  */
12 | async function parseRootUri(rootUri: string): Promise<string | null> {
13 |   try {
14 |     const rawPath = rootUri.startsWith('file://') ? rootUri.slice(7) : rootUri;
15 |     const expandedPath = rawPath.startsWith('~/') || rawPath === '~' 
16 |       ? path.join(os.homedir(), rawPath.slice(1)) 
17 |       : rawPath;
18 |     const absolutePath = path.resolve(expandedPath);
19 |     const resolvedPath = await fs.realpath(absolutePath);
20 |     return normalizePath(resolvedPath);
21 |   } catch {
22 |     return null; // Path doesn't exist or other error
23 |   }
24 | }
25 | 
26 | /**
27 |  * Formats error message for directory validation failures.
28 |  * @param dir - Directory path that failed validation
29 |  * @param error - Error that occurred during validation
30 |  * @param reason - Specific reason for failure
31 |  * @returns Formatted error message
32 |  */
33 | function formatDirectoryError(dir: string, error?: unknown, reason?: string): string {
34 |   if (reason) {
35 |     return `Skipping ${reason}: ${dir}`;
36 |   }
37 |   const message = error instanceof Error ? error.message : String(error);
38 |   return `Skipping invalid directory: ${dir} due to error: ${message}`;
39 | }
40 | 
41 | /**
42 |  * Resolves requested root directories from MCP root specifications.
43 |  * 
44 |  * Converts root URI specifications (file:// URIs or plain paths) into normalized
45 |  * directory paths, validating that each path exists and is a directory.
46 |  * Includes symlink resolution for security.
47 |  * 
48 |  * @param requestedRoots - Array of root specifications with URI and optional name
49 |  * @returns Promise resolving to array of validated directory paths
50 |  */
51 | export async function getValidRootDirectories(
52 |   requestedRoots: readonly Root[]
53 | ): Promise<string[]> {
54 |   const validatedDirectories: string[] = [];
55 |   
56 |   for (const requestedRoot of requestedRoots) {
57 |     const resolvedPath = await parseRootUri(requestedRoot.uri);
58 |     if (!resolvedPath) {
59 |       console.error(formatDirectoryError(requestedRoot.uri, undefined, 'invalid path or inaccessible'));
60 |       continue;
61 |     }
62 |     
63 |     try {
64 |       const stats: Stats = await fs.stat(resolvedPath);
65 |       if (stats.isDirectory()) {
66 |         validatedDirectories.push(resolvedPath);
67 |       } else {
68 |         console.error(formatDirectoryError(resolvedPath, undefined, 'non-directory root'));
69 |       }
70 |     } catch (error) {
71 |       console.error(formatDirectoryError(resolvedPath, error));
72 |     }
73 |   }
74 |   
75 |   return validatedDirectories;
76 | }


--------------------------------------------------------------------------------
/src/filesystem/tsconfig.json:
--------------------------------------------------------------------------------
 1 | {
 2 |   "extends": "../../tsconfig.json",
 3 |   "compilerOptions": {
 4 |     "outDir": "./dist",
 5 |     "rootDir": ".",
 6 |     "moduleResolution": "NodeNext",
 7 |     "module": "NodeNext"
 8 |   },
 9 |   "include": [
10 |     "./**/*.ts"
11 |   ]
12 | }
13 | 


--------------------------------------------------------------------------------
/src/git/.gitignore:
--------------------------------------------------------------------------------
1 | __pycache__
2 | .venv
3 | 


--------------------------------------------------------------------------------
/src/git/.python-version:
--------------------------------------------------------------------------------
1 | 3.10
2 | 


--------------------------------------------------------------------------------
/src/git/Dockerfile:
--------------------------------------------------------------------------------
 1 | # Use a Python image with uv pre-installed
 2 | FROM ghcr.io/astral-sh/uv:python3.12-bookworm-slim AS uv
 3 | 
 4 | # Install the project into `/app`
 5 | WORKDIR /app
 6 | 
 7 | # Enable bytecode compilation
 8 | ENV UV_COMPILE_BYTECODE=1
 9 | 
10 | # Copy from the cache instead of linking since it's a mounted volume
11 | ENV UV_LINK_MODE=copy
12 | 
13 | # Install the project's dependencies using the lockfile and settings
14 | RUN --mount=type=cache,target=/root/.cache/uv \
15 |     --mount=type=bind,source=uv.lock,target=uv.lock \
16 |     --mount=type=bind,source=pyproject.toml,target=pyproject.toml \
17 |     uv sync --frozen --no-install-project --no-dev --no-editable
18 | 
19 | # Then, add the rest of the project source code and install it
20 | # Installing separately from its dependencies allows optimal layer caching
21 | ADD . /app
22 | RUN --mount=type=cache,target=/root/.cache/uv \
23 |     uv sync --frozen --no-dev --no-editable
24 | 
25 | FROM python:3.12-slim-bookworm
26 | 
27 | RUN apt-get update && apt-get install -y git && rm -rf /var/lib/apt/lists/*
28 | 
29 | WORKDIR /app
30 |  
31 | COPY --from=uv /root/.local /root/.local
32 | COPY --from=uv --chown=app:app /app/.venv /app/.venv
33 | 
34 | # Place executables in the environment at the front of the path
35 | ENV PATH="/app/.venv/bin:$PATH"
36 | 
37 | # when running the container, add --db-path and a bind mount to the host's db file
38 | ENTRYPOINT ["mcp-server-git"]
39 | 


--------------------------------------------------------------------------------
/src/git/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright (c) 2024 Anthropic, PBC.
2 | 
3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
4 | 
5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
6 | 
7 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
8 | 


--------------------------------------------------------------------------------
/src/git/README.md:
--------------------------------------------------------------------------------
  1 | # mcp-server-git: A git MCP server
  2 | 
  3 | ## Overview
  4 | 
  5 | A Model Context Protocol server for Git repository interaction and automation. This server provides tools to read, search, and manipulate Git repositories via Large Language Models.
  6 | 
  7 | Please note that mcp-server-git is currently in early development. The functionality and available tools are subject to change and expansion as we continue to develop and improve the server.
  8 | 
  9 | ### Tools
 10 | 
 11 | 1. `git_status`
 12 |    - Shows the working tree status
 13 |    - Input:
 14 |      - `repo_path` (string): Path to Git repository
 15 |    - Returns: Current status of working directory as text output
 16 | 
 17 | 2. `git_diff_unstaged`
 18 |    - Shows changes in working directory not yet staged
 19 |    - Inputs:
 20 |      - `repo_path` (string): Path to Git repository
 21 |      - `context_lines` (number, optional): Number of context lines to show (default: 3)
 22 |    - Returns: Diff output of unstaged changes
 23 | 
 24 | 3. `git_diff_staged`
 25 |    - Shows changes that are staged for commit
 26 |    - Inputs:
 27 |      - `repo_path` (string): Path to Git repository
 28 |      - `context_lines` (number, optional): Number of context lines to show (default: 3)
 29 |    - Returns: Diff output of staged changes
 30 | 
 31 | 4. `git_diff`
 32 |    - Shows differences between branches or commits
 33 |    - Inputs:
 34 |      - `repo_path` (string): Path to Git repository
 35 |      - `target` (string): Target branch or commit to compare with
 36 |      - `context_lines` (number, optional): Number of context lines to show (default: 3)
 37 |    - Returns: Diff output comparing current state with target
 38 | 
 39 | 5. `git_commit`
 40 |    - Records changes to the repository
 41 |    - Inputs:
 42 |      - `repo_path` (string): Path to Git repository
 43 |      - `message` (string): Commit message
 44 |    - Returns: Confirmation with new commit hash
 45 | 
 46 | 6. `git_add`
 47 |    - Adds file contents to the staging area
 48 |    - Inputs:
 49 |      - `repo_path` (string): Path to Git repository
 50 |      - `files` (string[]): Array of file paths to stage
 51 |    - Returns: Confirmation of staged files
 52 | 
 53 | 7. `git_reset`
 54 |    - Unstages all staged changes
 55 |    - Input:
 56 |      - `repo_path` (string): Path to Git repository
 57 |    - Returns: Confirmation of reset operation
 58 | 
 59 | 8. `git_log`
 60 |    - Shows the commit logs
 61 |    - Inputs:
 62 |      - `repo_path` (string): Path to Git repository
 63 |      - `max_count` (number, optional): Maximum number of commits to show (default: 10)
 64 |    - Returns: Array of commit entries with hash, author, date, and message
 65 | 
 66 | 9. `git_create_branch`
 67 |    - Creates a new branch
 68 |    - Inputs:
 69 |      - `repo_path` (string): Path to Git repository
 70 |      - `branch_name` (string): Name of the new branch
 71 |      - `start_point` (string, optional): Starting point for the new branch
 72 |    - Returns: Confirmation of branch creation
 73 | 10. `git_checkout`
 74 |    - Switches branches
 75 |    - Inputs:
 76 |      - `repo_path` (string): Path to Git repository
 77 |      - `branch_name` (string): Name of branch to checkout
 78 |    - Returns: Confirmation of branch switch
 79 | 11. `git_show`
 80 |    - Shows the contents of a commit
 81 |    - Inputs:
 82 |      - `repo_path` (string): Path to Git repository
 83 |      - `revision` (string): The revision (commit hash, branch name, tag) to show
 84 |    - Returns: Contents of the specified commit
 85 | 12. `git_init`
 86 |    - Initializes a Git repository
 87 |    - Inputs:
 88 |      - `repo_path` (string): Path to directory to initialize git repo
 89 |    - Returns: Confirmation of repository initialization
 90 | 
 91 | 13. `git_branch`
 92 |    - List Git branches
 93 |    - Inputs:
 94 |      - `repo_path` (string): Path to the Git repository.
 95 |      - `branch_type` (string): Whether to list local branches ('local'), remote branches ('remote') or all branches('all').
 96 |      - `contains` (string, optional): The commit sha that branch should contain. Do not pass anything to this param if no commit sha is specified
 97 |      - `not_contains` (string, optional): The commit sha that branch should NOT contain. Do not pass anything to this param if no commit sha is specified
 98 |    - Returns: List of branches
 99 | 
100 | ## Installation
101 | 
102 | ### Using uv (recommended)
103 | 
104 | When using [`uv`](https://docs.astral.sh/uv/) no specific installation is needed. We will
105 | use [`uvx`](https://docs.astral.sh/uv/guides/tools/) to directly run *mcp-server-git*.
106 | 
107 | ### Using PIP
108 | 
109 | Alternatively you can install `mcp-server-git` via pip:
110 | 
111 | ```
112 | pip install mcp-server-git
113 | ```
114 | 
115 | After installation, you can run it as a script using:
116 | 
117 | ```
118 | python -m mcp_server_git
119 | ```
120 | 
121 | ## Configuration
122 | 
123 | ### Usage with Claude Desktop
124 | 
125 | Add this to your `claude_desktop_config.json`:
126 | 
127 | <details>
128 | <summary>Using uvx</summary>
129 | 
130 | ```json
131 | "mcpServers": {
132 |   "git": {
133 |     "command": "uvx",
134 |     "args": ["mcp-server-git", "--repository", "path/to/git/repo"]
135 |   }
136 | }
137 | ```
138 | </details>
139 | 
140 | <details>
141 | <summary>Using docker</summary>
142 | 
143 | * Note: replace '/Users/username' with the a path that you want to be accessible by this tool
144 | 
145 | ```json
146 | "mcpServers": {
147 |   "git": {
148 |     "command": "docker",
149 |     "args": ["run", "--rm", "-i", "--mount", "type=bind,src=/Users/username,dst=/Users/username", "mcp/git"]
150 |   }
151 | }
152 | ```
153 | </details>
154 | 
155 | <details>
156 | <summary>Using pip installation</summary>
157 | 
158 | ```json
159 | "mcpServers": {
160 |   "git": {
161 |     "command": "python",
162 |     "args": ["-m", "mcp_server_git", "--repository", "path/to/git/repo"]
163 |   }
164 | }
165 | ```
166 | </details>
167 | 
168 | ### Usage with VS Code
169 | 
170 | For quick installation, use one of the one-click install buttons below...
171 | 
172 | [![Install with UV in VS Code](https://img.shields.io/badge/VS_Code-UV-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=git&config=%7B%22command%22%3A%22uvx%22%2C%22args%22%3A%5B%22mcp-server-git%22%5D%7D) [![Install with UV in VS Code Insiders](https://img.shields.io/badge/VS_Code_Insiders-UV-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=git&config=%7B%22command%22%3A%22uvx%22%2C%22args%22%3A%5B%22mcp-server-git%22%5D%7D&quality=insiders)
173 | 
174 | [![Install with Docker in VS Code](https://img.shields.io/badge/VS_Code-Docker-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=git&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22--rm%22%2C%22-i%22%2C%22--mount%22%2C%22type%3Dbind%2Csrc%3D%24%7BworkspaceFolder%7D%2Cdst%3D%2Fworkspace%22%2C%22mcp%2Fgit%22%5D%7D) [![Install with Docker in VS Code Insiders](https://img.shields.io/badge/VS_Code_Insiders-Docker-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=git&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22--rm%22%2C%22-i%22%2C%22--mount%22%2C%22type%3Dbind%2Csrc%3D%24%7BworkspaceFolder%7D%2Cdst%3D%2Fworkspace%22%2C%22mcp%2Fgit%22%5D%7D&quality=insiders)
175 | 
176 | For manual installation, add the following JSON block to your User Settings (JSON) file in VS Code. You can do this by pressing `Ctrl + Shift + P` and typing `Preferences: Open Settings (JSON)`.
177 | 
178 | Optionally, you can add it to a file called `.vscode/mcp.json` in your workspace. This will allow you to share the configuration with others. 
179 | 
180 | > Note that the `mcp` key is not needed in the `.vscode/mcp.json` file.
181 | 
182 | ```json
183 | {
184 |   "mcp": {
185 |     "servers": {
186 |       "git": {
187 |         "command": "uvx",
188 |         "args": ["mcp-server-git"]
189 |       }
190 |     }
191 |   }
192 | }
193 | ```
194 | 
195 | For Docker installation:
196 | 
197 | ```json
198 | {
199 |   "mcp": {
200 |     "servers": {
201 |       "git": {
202 |         "command": "docker",
203 |         "args": [
204 |           "run",
205 |           "--rm",
206 |           "-i",
207 |           "--mount", "type=bind,src=${workspaceFolder},dst=/workspace",
208 |           "mcp/git"
209 |         ]
210 |       }
211 |     }
212 |   }
213 | }
214 | ```
215 | 
216 | ### Usage with [Zed](https://github.com/zed-industries/zed)
217 | 
218 | Add to your Zed settings.json:
219 | 
220 | <details>
221 | <summary>Using uvx</summary>
222 | 
223 | ```json
224 | "context_servers": [
225 |   "mcp-server-git": {
226 |     "command": {
227 |       "path": "uvx",
228 |       "args": ["mcp-server-git"]
229 |     }
230 |   }
231 | ],
232 | ```
233 | </details>
234 | 
235 | <details>
236 | <summary>Using pip installation</summary>
237 | 
238 | ```json
239 | "context_servers": {
240 |   "mcp-server-git": {
241 |     "command": {
242 |       "path": "python",
243 |       "args": ["-m", "mcp_server_git"]
244 |     }
245 |   }
246 | },
247 | ```
248 | </details>
249 | 
250 | ### Usage with [Zencoder](https://zencoder.ai)
251 | 
252 | 1. Go to the Zencoder menu (...)
253 | 2. From the dropdown menu, select `Agent Tools`
254 | 3. Click on the `Add Custom MCP`
255 | 4. Add the name (i.e. git) and server configuration from below, and make sure to hit the `Install` button
256 | 
257 | <details>
258 | <summary>Using uvx</summary>
259 | 
260 | ```json
261 | {
262 |     "command": "uvx",
263 |     "args": ["mcp-server-git", "--repository", "path/to/git/repo"]
264 | }
265 | ```
266 | </details>
267 | 
268 | ## Debugging
269 | 
270 | You can use the MCP inspector to debug the server. For uvx installations:
271 | 
272 | ```
273 | npx @modelcontextprotocol/inspector uvx mcp-server-git
274 | ```
275 | 
276 | Or if you've installed the package in a specific directory or are developing on it:
277 | 
278 | ```
279 | cd path/to/servers/src/git
280 | npx @modelcontextprotocol/inspector uv run mcp-server-git
281 | ```
282 | 
283 | Running `tail -n 20 -f ~/Library/Logs/Claude/mcp*.log` will show the logs from the server and may
284 | help you debug any issues.
285 | 
286 | ## Development
287 | 
288 | If you are doing local development, there are two ways to test your changes:
289 | 
290 | 1. Run the MCP inspector to test your changes. See [Debugging](#debugging) for run instructions.
291 | 
292 | 2. Test using the Claude desktop app. Add the following to your `claude_desktop_config.json`:
293 | 
294 | ### Docker
295 | 
296 | ```json
297 | {
298 |   "mcpServers": {
299 |     "git": {
300 |       "command": "docker",
301 |       "args": [
302 |         "run",
303 |         "--rm",
304 |         "-i",
305 |         "--mount", "type=bind,src=/Users/username/Desktop,dst=/projects/Desktop",
306 |         "--mount", "type=bind,src=/path/to/other/allowed/dir,dst=/projects/other/allowed/dir,ro",
307 |         "--mount", "type=bind,src=/path/to/file.txt,dst=/projects/path/to/file.txt",
308 |         "mcp/git"
309 |       ]
310 |     }
311 |   }
312 | }
313 | ```
314 | 
315 | ### UVX
316 | ```json
317 | {
318 | "mcpServers": {
319 |   "git": {
320 |     "command": "uv",
321 |     "args": [
322 |       "--directory",
323 |       "/<path to mcp-servers>/mcp-servers/src/git",
324 |       "run",
325 |       "mcp-server-git"
326 |     ]
327 |     }
328 |   }
329 | }
330 | ```
331 | 
332 | ## Build
333 | 
334 | Docker build:
335 | 
336 | ```bash
337 | cd src/git
338 | docker build -t mcp/git .
339 | ```
340 | 
341 | ## License
342 | 
343 | This MCP server is licensed under the MIT License. This means you are free to use, modify, and distribute the software, subject to the terms and conditions of the MIT License. For more details, please see the LICENSE file in the project repository.
344 | 


--------------------------------------------------------------------------------
/src/git/pyproject.toml:
--------------------------------------------------------------------------------
 1 | [project]
 2 | name = "mcp-server-git"
 3 | version = "0.6.2"
 4 | description = "A Model Context Protocol server providing tools to read, search, and manipulate Git repositories programmatically via LLMs"
 5 | readme = "README.md"
 6 | requires-python = ">=3.10"
 7 | authors = [{ name = "Anthropic, PBC." }]
 8 | maintainers = [{ name = "David Soria Parra", email = "davidsp@anthropic.com" }]
 9 | keywords = ["git", "mcp", "llm", "automation"]
10 | license = { text = "MIT" }
11 | classifiers = [
12 |     "Development Status :: 4 - Beta",
13 |     "Intended Audience :: Developers",
14 |     "License :: OSI Approved :: MIT License",
15 |     "Programming Language :: Python :: 3",
16 |     "Programming Language :: Python :: 3.10",
17 | ]
18 | dependencies = [
19 |     "click>=8.1.7",
20 |     "gitpython>=3.1.43",
21 |     "mcp>=1.0.0",
22 |     "pydantic>=2.0.0",
23 | ]
24 | 
25 | [project.scripts]
26 | mcp-server-git = "mcp_server_git:main"
27 | 
28 | [build-system]
29 | requires = ["hatchling"]
30 | build-backend = "hatchling.build"
31 | 
32 | [tool.uv]
33 | dev-dependencies = ["pyright>=1.1.389", "ruff>=0.7.3", "pytest>=8.0.0"]
34 | 
35 | [tool.pytest.ini_options]
36 | testpaths = ["tests"]
37 | python_files = "test_*.py"
38 | python_classes = "Test*"
39 | python_functions = "test_*"
40 | 


--------------------------------------------------------------------------------
/src/git/src/mcp_server_git/__init__.py:
--------------------------------------------------------------------------------
 1 | import click
 2 | from pathlib import Path
 3 | import logging
 4 | import sys
 5 | from .server import serve
 6 | 
 7 | @click.command()
 8 | @click.option("--repository", "-r", type=Path, help="Git repository path")
 9 | @click.option("-v", "--verbose", count=True)
10 | def main(repository: Path | None, verbose: bool) -> None:
11 |     """MCP Git Server - Git functionality for MCP"""
12 |     import asyncio
13 | 
14 |     logging_level = logging.WARN
15 |     if verbose == 1:
16 |         logging_level = logging.INFO
17 |     elif verbose >= 2:
18 |         logging_level = logging.DEBUG
19 | 
20 |     logging.basicConfig(level=logging_level, stream=sys.stderr)
21 |     asyncio.run(serve(repository))
22 | 
23 | if __name__ == "__main__":
24 |     main()
25 | 


--------------------------------------------------------------------------------
/src/git/src/mcp_server_git/__main__.py:
--------------------------------------------------------------------------------
1 | # __main__.py
2 | 
3 | from mcp_server_git import main
4 | 
5 | main()
6 | 


--------------------------------------------------------------------------------
/src/git/src/mcp_server_git/py.typed:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/modelcontextprotocol/servers/5dd2182c21457d6eea20f639e5aa0d96e03e5fc0/src/git/src/mcp_server_git/py.typed


--------------------------------------------------------------------------------
/src/git/src/mcp_server_git/server.py:
--------------------------------------------------------------------------------
  1 | import logging
  2 | from pathlib import Path
  3 | from typing import Sequence, Optional
  4 | from mcp.server import Server
  5 | from mcp.server.session import ServerSession
  6 | from mcp.server.stdio import stdio_server
  7 | from mcp.types import (
  8 |     ClientCapabilities,
  9 |     TextContent,
 10 |     Tool,
 11 |     ListRootsResult,
 12 |     RootsCapability,
 13 | )
 14 | from enum import Enum
 15 | import git
 16 | from pydantic import BaseModel, Field
 17 | 
 18 | # Default number of context lines to show in diff output
 19 | DEFAULT_CONTEXT_LINES = 3
 20 | 
 21 | class GitStatus(BaseModel):
 22 |     repo_path: str
 23 | 
 24 | class GitDiffUnstaged(BaseModel):
 25 |     repo_path: str
 26 |     context_lines: int = DEFAULT_CONTEXT_LINES
 27 | 
 28 | class GitDiffStaged(BaseModel):
 29 |     repo_path: str
 30 |     context_lines: int = DEFAULT_CONTEXT_LINES
 31 | 
 32 | class GitDiff(BaseModel):
 33 |     repo_path: str
 34 |     target: str
 35 |     context_lines: int = DEFAULT_CONTEXT_LINES
 36 | 
 37 | class GitCommit(BaseModel):
 38 |     repo_path: str
 39 |     message: str
 40 | 
 41 | class GitAdd(BaseModel):
 42 |     repo_path: str
 43 |     files: list[str]
 44 | 
 45 | class GitReset(BaseModel):
 46 |     repo_path: str
 47 | 
 48 | class GitLog(BaseModel):
 49 |     repo_path: str
 50 |     max_count: int = 10
 51 | 
 52 | class GitCreateBranch(BaseModel):
 53 |     repo_path: str
 54 |     branch_name: str
 55 |     base_branch: str | None = None
 56 | 
 57 | class GitCheckout(BaseModel):
 58 |     repo_path: str
 59 |     branch_name: str
 60 | 
 61 | class GitShow(BaseModel):
 62 |     repo_path: str
 63 |     revision: str
 64 | 
 65 | class GitInit(BaseModel):
 66 |     repo_path: str
 67 | 
 68 | class GitBranch(BaseModel):
 69 |     repo_path: str = Field(
 70 |         ...,
 71 |         description="The path to the Git repository.",
 72 |     )
 73 |     branch_type: str = Field(
 74 |         ...,
 75 |         description="Whether to list local branches ('local'), remote branches ('remote') or all branches('all').",
 76 |     )
 77 |     contains: Optional[str] = Field(
 78 |         None,
 79 |         description="The commit sha that branch should contain. Do not pass anything to this param if no commit sha is specified",
 80 |     )
 81 |     not_contains: Optional[str] = Field(
 82 |         None,
 83 |         description="The commit sha that branch should NOT contain. Do not pass anything to this param if no commit sha is specified",
 84 |     )
 85 | 
 86 | class GitTools(str, Enum):
 87 |     STATUS = "git_status"
 88 |     DIFF_UNSTAGED = "git_diff_unstaged"
 89 |     DIFF_STAGED = "git_diff_staged"
 90 |     DIFF = "git_diff"
 91 |     COMMIT = "git_commit"
 92 |     ADD = "git_add"
 93 |     RESET = "git_reset"
 94 |     LOG = "git_log"
 95 |     CREATE_BRANCH = "git_create_branch"
 96 |     CHECKOUT = "git_checkout"
 97 |     SHOW = "git_show"
 98 |     INIT = "git_init"
 99 |     BRANCH = "git_branch"
100 | 
101 | def git_status(repo: git.Repo) -> str:
102 |     return repo.git.status()
103 | 
104 | def git_diff_unstaged(repo: git.Repo, context_lines: int = DEFAULT_CONTEXT_LINES) -> str:
105 |     return repo.git.diff(f"--unified={context_lines}")
106 | 
107 | def git_diff_staged(repo: git.Repo, context_lines: int = DEFAULT_CONTEXT_LINES) -> str:
108 |     return repo.git.diff(f"--unified={context_lines}", "--cached")
109 | 
110 | def git_diff(repo: git.Repo, target: str, context_lines: int = DEFAULT_CONTEXT_LINES) -> str:
111 |     return repo.git.diff(f"--unified={context_lines}", target)
112 | 
113 | def git_commit(repo: git.Repo, message: str) -> str:
114 |     commit = repo.index.commit(message)
115 |     return f"Changes committed successfully with hash {commit.hexsha}"
116 | 
117 | def git_add(repo: git.Repo, files: list[str]) -> str:
118 |     repo.index.add(files)
119 |     return "Files staged successfully"
120 | 
121 | def git_reset(repo: git.Repo) -> str:
122 |     repo.index.reset()
123 |     return "All staged changes reset"
124 | 
125 | def git_log(repo: git.Repo, max_count: int = 10) -> list[str]:
126 |     commits = list(repo.iter_commits(max_count=max_count))
127 |     log = []
128 |     for commit in commits:
129 |         log.append(
130 |             f"Commit: {commit.hexsha!r}\n"
131 |             f"Author: {commit.author!r}\n"
132 |             f"Date: {commit.authored_datetime}\n"
133 |             f"Message: {commit.message!r}\n"
134 |         )
135 |     return log
136 | 
137 | def git_create_branch(repo: git.Repo, branch_name: str, base_branch: str | None = None) -> str:
138 |     if base_branch:
139 |         base = repo.references[base_branch]
140 |     else:
141 |         base = repo.active_branch
142 | 
143 |     repo.create_head(branch_name, base)
144 |     return f"Created branch '{branch_name}' from '{base.name}'"
145 | 
146 | def git_checkout(repo: git.Repo, branch_name: str) -> str:
147 |     repo.git.checkout(branch_name)
148 |     return f"Switched to branch '{branch_name}'"
149 | 
150 | def git_init(repo_path: str) -> str:
151 |     try:
152 |         repo = git.Repo.init(path=repo_path, mkdir=True)
153 |         return f"Initialized empty Git repository in {repo.git_dir}"
154 |     except Exception as e:
155 |         return f"Error initializing repository: {str(e)}"
156 | 
157 | def git_show(repo: git.Repo, revision: str) -> str:
158 |     commit = repo.commit(revision)
159 |     output = [
160 |         f"Commit: {commit.hexsha!r}\n"
161 |         f"Author: {commit.author!r}\n"
162 |         f"Date: {commit.authored_datetime!r}\n"
163 |         f"Message: {commit.message!r}\n"
164 |     ]
165 |     if commit.parents:
166 |         parent = commit.parents[0]
167 |         diff = parent.diff(commit, create_patch=True)
168 |     else:
169 |         diff = commit.diff(git.NULL_TREE, create_patch=True)
170 |     for d in diff:
171 |         output.append(f"\n--- {d.a_path}\n+++ {d.b_path}\n")
172 |         output.append(d.diff.decode('utf-8'))
173 |     return "".join(output)
174 | 
175 | def git_branch(repo: git.Repo, branch_type: str, contains: str | None = None, not_contains: str | None = None) -> str:
176 |     match contains:
177 |         case None:
178 |             contains_sha = (None,)
179 |         case _:
180 |             contains_sha = ("--contains", contains)
181 | 
182 |     match not_contains:
183 |         case None:
184 |             not_contains_sha = (None,)
185 |         case _:
186 |             not_contains_sha = ("--no-contains", not_contains)
187 | 
188 |     match branch_type:
189 |         case 'local':
190 |             b_type = None
191 |         case 'remote':
192 |             b_type = "-r"
193 |         case 'all':
194 |             b_type = "-a"
195 |         case _:
196 |             return f"Invalid branch type: {branch_type}"
197 | 
198 |     # None value will be auto deleted by GitPython
199 |     branch_info = repo.git.branch(b_type, *contains_sha, *not_contains_sha)
200 | 
201 |     return branch_info
202 | 
203 | async def serve(repository: Path | None) -> None:
204 |     logger = logging.getLogger(__name__)
205 | 
206 |     if repository is not None:
207 |         try:
208 |             git.Repo(repository)
209 |             logger.info(f"Using repository at {repository}")
210 |         except git.InvalidGitRepositoryError:
211 |             logger.error(f"{repository} is not a valid Git repository")
212 |             return
213 | 
214 |     server = Server("mcp-git")
215 | 
216 |     @server.list_tools()
217 |     async def list_tools() -> list[Tool]:
218 |         return [
219 |             Tool(
220 |                 name=GitTools.STATUS,
221 |                 description="Shows the working tree status",
222 |                 inputSchema=GitStatus.model_json_schema(),
223 |             ),
224 |             Tool(
225 |                 name=GitTools.DIFF_UNSTAGED,
226 |                 description="Shows changes in the working directory that are not yet staged",
227 |                 inputSchema=GitDiffUnstaged.model_json_schema(),
228 |             ),
229 |             Tool(
230 |                 name=GitTools.DIFF_STAGED,
231 |                 description="Shows changes that are staged for commit",
232 |                 inputSchema=GitDiffStaged.model_json_schema(),
233 |             ),
234 |             Tool(
235 |                 name=GitTools.DIFF,
236 |                 description="Shows differences between branches or commits",
237 |                 inputSchema=GitDiff.model_json_schema(),
238 |             ),
239 |             Tool(
240 |                 name=GitTools.COMMIT,
241 |                 description="Records changes to the repository",
242 |                 inputSchema=GitCommit.model_json_schema(),
243 |             ),
244 |             Tool(
245 |                 name=GitTools.ADD,
246 |                 description="Adds file contents to the staging area",
247 |                 inputSchema=GitAdd.model_json_schema(),
248 |             ),
249 |             Tool(
250 |                 name=GitTools.RESET,
251 |                 description="Unstages all staged changes",
252 |                 inputSchema=GitReset.model_json_schema(),
253 |             ),
254 |             Tool(
255 |                 name=GitTools.LOG,
256 |                 description="Shows the commit logs",
257 |                 inputSchema=GitLog.model_json_schema(),
258 |             ),
259 |             Tool(
260 |                 name=GitTools.CREATE_BRANCH,
261 |                 description="Creates a new branch from an optional base branch",
262 |                 inputSchema=GitCreateBranch.model_json_schema(),
263 |             ),
264 |             Tool(
265 |                 name=GitTools.CHECKOUT,
266 |                 description="Switches branches",
267 |                 inputSchema=GitCheckout.model_json_schema(),
268 |             ),
269 |             Tool(
270 |                 name=GitTools.SHOW,
271 |                 description="Shows the contents of a commit",
272 |                 inputSchema=GitShow.model_json_schema(),
273 |             ),
274 |             Tool(
275 |                 name=GitTools.INIT,
276 |                 description="Initialize a new Git repository",
277 |                 inputSchema=GitInit.model_json_schema(),
278 |             ),
279 |             Tool(
280 |                 name=GitTools.BRANCH,
281 |                 description="List Git branches",
282 |                 inputSchema=GitBranch.model_json_schema(),
283 |             )
284 |         ]
285 | 
286 |     async def list_repos() -> Sequence[str]:
287 |         async def by_roots() -> Sequence[str]:
288 |             if not isinstance(server.request_context.session, ServerSession):
289 |                 raise TypeError("server.request_context.session must be a ServerSession")
290 | 
291 |             if not server.request_context.session.check_client_capability(
292 |                 ClientCapabilities(roots=RootsCapability())
293 |             ):
294 |                 return []
295 | 
296 |             roots_result: ListRootsResult = await server.request_context.session.list_roots()
297 |             logger.debug(f"Roots result: {roots_result}")
298 |             repo_paths = []
299 |             for root in roots_result.roots:
300 |                 path = root.uri.path
301 |                 try:
302 |                     git.Repo(path)
303 |                     repo_paths.append(str(path))
304 |                 except git.InvalidGitRepositoryError:
305 |                     pass
306 |             return repo_paths
307 | 
308 |         def by_commandline() -> Sequence[str]:
309 |             return [str(repository)] if repository is not None else []
310 | 
311 |         cmd_repos = by_commandline()
312 |         root_repos = await by_roots()
313 |         return [*root_repos, *cmd_repos]
314 | 
315 |     @server.call_tool()
316 |     async def call_tool(name: str, arguments: dict) -> list[TextContent]:
317 |         repo_path = Path(arguments["repo_path"])
318 |         
319 |         # Handle git init separately since it doesn't require an existing repo
320 |         if name == GitTools.INIT:
321 |             result = git_init(str(repo_path))
322 |             return [TextContent(
323 |                 type="text",
324 |                 text=result
325 |             )]
326 |             
327 |         # For all other commands, we need an existing repo
328 |         repo = git.Repo(repo_path)
329 | 
330 |         match name:
331 |             case GitTools.STATUS:
332 |                 status = git_status(repo)
333 |                 return [TextContent(
334 |                     type="text",
335 |                     text=f"Repository status:\n{status}"
336 |                 )]
337 | 
338 |             case GitTools.DIFF_UNSTAGED:
339 |                 diff = git_diff_unstaged(repo, arguments.get("context_lines", DEFAULT_CONTEXT_LINES))
340 |                 return [TextContent(
341 |                     type="text",
342 |                     text=f"Unstaged changes:\n{diff}"
343 |                 )]
344 | 
345 |             case GitTools.DIFF_STAGED:
346 |                 diff = git_diff_staged(repo, arguments.get("context_lines", DEFAULT_CONTEXT_LINES))
347 |                 return [TextContent(
348 |                     type="text",
349 |                     text=f"Staged changes:\n{diff}"
350 |                 )]
351 | 
352 |             case GitTools.DIFF:
353 |                 diff = git_diff(repo, arguments["target"], arguments.get("context_lines", DEFAULT_CONTEXT_LINES))
354 |                 return [TextContent(
355 |                     type="text",
356 |                     text=f"Diff with {arguments['target']}:\n{diff}"
357 |                 )]
358 | 
359 |             case GitTools.COMMIT:
360 |                 result = git_commit(repo, arguments["message"])
361 |                 return [TextContent(
362 |                     type="text",
363 |                     text=result
364 |                 )]
365 | 
366 |             case GitTools.ADD:
367 |                 result = git_add(repo, arguments["files"])
368 |                 return [TextContent(
369 |                     type="text",
370 |                     text=result
371 |                 )]
372 | 
373 |             case GitTools.RESET:
374 |                 result = git_reset(repo)
375 |                 return [TextContent(
376 |                     type="text",
377 |                     text=result
378 |                 )]
379 | 
380 |             case GitTools.LOG:
381 |                 log = git_log(repo, arguments.get("max_count", 10))
382 |                 return [TextContent(
383 |                     type="text",
384 |                     text="Commit history:\n" + "\n".join(log)
385 |                 )]
386 | 
387 |             case GitTools.CREATE_BRANCH:
388 |                 result = git_create_branch(
389 |                     repo,
390 |                     arguments["branch_name"],
391 |                     arguments.get("base_branch")
392 |                 )
393 |                 return [TextContent(
394 |                     type="text",
395 |                     text=result
396 |                 )]
397 | 
398 |             case GitTools.CHECKOUT:
399 |                 result = git_checkout(repo, arguments["branch_name"])
400 |                 return [TextContent(
401 |                     type="text",
402 |                     text=result
403 |                 )]
404 | 
405 |             case GitTools.SHOW:
406 |                 result = git_show(repo, arguments["revision"])
407 |                 return [TextContent(
408 |                     type="text",
409 |                     text=result
410 |                 )]
411 | 
412 |             case GitTools.BRANCH:
413 |                 result = git_branch(
414 |                     repo,
415 |                     arguments.get("branch_type", 'local'),
416 |                     arguments.get("contains", None),
417 |                     arguments.get("not_contains", None),
418 |                 )
419 |                 return [TextContent(
420 |                     type="text",
421 |                     text=result
422 |                 )]
423 | 
424 |             case _:
425 |                 raise ValueError(f"Unknown tool: {name}")
426 | 
427 |     options = server.create_initialization_options()
428 |     async with stdio_server() as (read_stream, write_stream):
429 |         await server.run(read_stream, write_stream, options, raise_exceptions=True)
430 | 


--------------------------------------------------------------------------------
/src/git/tests/test_server.py:
--------------------------------------------------------------------------------
 1 | import pytest
 2 | from pathlib import Path
 3 | import git
 4 | from mcp_server_git.server import git_checkout, git_branch
 5 | import shutil
 6 | 
 7 | @pytest.fixture
 8 | def test_repository(tmp_path: Path):
 9 |     repo_path = tmp_path / "temp_test_repo"
10 |     test_repo = git.Repo.init(repo_path)
11 | 
12 |     Path(repo_path / "test.txt").write_text("test")
13 |     test_repo.index.add(["test.txt"])
14 |     test_repo.index.commit("initial commit")
15 | 
16 |     yield test_repo
17 | 
18 |     shutil.rmtree(repo_path)
19 | 
20 | def test_git_checkout_existing_branch(test_repository):
21 |     test_repository.git.branch("test-branch")
22 |     result = git_checkout(test_repository, "test-branch")
23 | 
24 |     assert "Switched to branch 'test-branch'" in result
25 |     assert test_repository.active_branch.name == "test-branch"
26 | 
27 | def test_git_checkout_nonexistent_branch(test_repository):
28 | 
29 |     with pytest.raises(git.GitCommandError):
30 |         git_checkout(test_repository, "nonexistent-branch")
31 | 
32 | def test_git_branch_local(test_repository):
33 |     test_repository.git.branch("new-branch-local")
34 |     result = git_branch(test_repository, "local")
35 |     assert "new-branch-local" in result
36 | 
37 | def test_git_branch_remote(test_repository):
38 |     # GitPython does not easily support creating remote branches without a remote.
39 |     # This test will check the behavior when 'remote' is specified without actual remotes.
40 |     result = git_branch(test_repository, "remote")
41 |     assert "" == result.strip()  # Should be empty if no remote branches
42 | 
43 | def test_git_branch_all(test_repository):
44 |     test_repository.git.branch("new-branch-all")
45 |     result = git_branch(test_repository, "all")
46 |     assert "new-branch-all" in result
47 | 
48 | def test_git_branch_contains(test_repository):
49 |     # Create a new branch and commit to it
50 |     test_repository.git.checkout("-b", "feature-branch")
51 |     Path(test_repository.working_dir / Path("feature.txt")).write_text("feature content")
52 |     test_repository.index.add(["feature.txt"])
53 |     commit = test_repository.index.commit("feature commit")
54 |     test_repository.git.checkout("master")
55 | 
56 |     result = git_branch(test_repository, "local", contains=commit.hexsha)
57 |     assert "feature-branch" in result
58 |     assert "master" not in result
59 | 
60 | def test_git_branch_not_contains(test_repository):
61 |     # Create a new branch and commit to it
62 |     test_repository.git.checkout("-b", "another-feature-branch")
63 |     Path(test_repository.working_dir / Path("another_feature.txt")).write_text("another feature content")
64 |     test_repository.index.add(["another_feature.txt"])
65 |     commit = test_repository.index.commit("another feature commit")
66 |     test_repository.git.checkout("master")
67 | 
68 |     result = git_branch(test_repository, "local", not_contains=commit.hexsha)
69 |     assert "another-feature-branch" not in result
70 |     assert "master" in result
71 | 


--------------------------------------------------------------------------------
/src/memory/Dockerfile:
--------------------------------------------------------------------------------
 1 | FROM node:22.12-alpine AS builder
 2 | 
 3 | COPY src/memory /app
 4 | COPY tsconfig.json /tsconfig.json
 5 | 
 6 | WORKDIR /app
 7 | 
 8 | RUN --mount=type=cache,target=/root/.npm npm install
 9 | 
10 | RUN --mount=type=cache,target=/root/.npm-production npm ci --ignore-scripts --omit-dev
11 | 
12 | FROM node:22-alpine AS release
13 | 
14 | COPY --from=builder /app/dist /app/dist
15 | COPY --from=builder /app/package.json /app/package.json
16 | COPY --from=builder /app/package-lock.json /app/package-lock.json
17 | 
18 | ENV NODE_ENV=production
19 | 
20 | WORKDIR /app
21 | 
22 | RUN npm ci --ignore-scripts --omit-dev
23 | 
24 | ENTRYPOINT ["node", "dist/index.js"]


--------------------------------------------------------------------------------
/src/memory/README.md:
--------------------------------------------------------------------------------
  1 | # Knowledge Graph Memory Server
  2 | 
  3 | A basic implementation of persistent memory using a local knowledge graph. This lets Claude remember information about the user across chats.
  4 | 
  5 | ## Core Concepts
  6 | 
  7 | ### Entities
  8 | Entities are the primary nodes in the knowledge graph. Each entity has:
  9 | - A unique name (identifier)
 10 | - An entity type (e.g., "person", "organization", "event")
 11 | - A list of observations
 12 | 
 13 | Example:
 14 | ```json
 15 | {
 16 |   "name": "John_Smith",
 17 |   "entityType": "person",
 18 |   "observations": ["Speaks fluent Spanish"]
 19 | }
 20 | ```
 21 | 
 22 | ### Relations
 23 | Relations define directed connections between entities. They are always stored in active voice and describe how entities interact or relate to each other.
 24 | 
 25 | Example:
 26 | ```json
 27 | {
 28 |   "from": "John_Smith",
 29 |   "to": "Anthropic",
 30 |   "relationType": "works_at"
 31 | }
 32 | ```
 33 | ### Observations
 34 | Observations are discrete pieces of information about an entity. They are:
 35 | 
 36 | - Stored as strings
 37 | - Attached to specific entities
 38 | - Can be added or removed independently
 39 | - Should be atomic (one fact per observation)
 40 | 
 41 | Example:
 42 | ```json
 43 | {
 44 |   "entityName": "John_Smith",
 45 |   "observations": [
 46 |     "Speaks fluent Spanish",
 47 |     "Graduated in 2019",
 48 |     "Prefers morning meetings"
 49 |   ]
 50 | }
 51 | ```
 52 | 
 53 | ## API
 54 | 
 55 | ### Tools
 56 | - **create_entities**
 57 |   - Create multiple new entities in the knowledge graph
 58 |   - Input: `entities` (array of objects)
 59 |     - Each object contains:
 60 |       - `name` (string): Entity identifier
 61 |       - `entityType` (string): Type classification
 62 |       - `observations` (string[]): Associated observations
 63 |   - Ignores entities with existing names
 64 | 
 65 | - **create_relations**
 66 |   - Create multiple new relations between entities
 67 |   - Input: `relations` (array of objects)
 68 |     - Each object contains:
 69 |       - `from` (string): Source entity name
 70 |       - `to` (string): Target entity name
 71 |       - `relationType` (string): Relationship type in active voice
 72 |   - Skips duplicate relations
 73 | 
 74 | - **add_observations**
 75 |   - Add new observations to existing entities
 76 |   - Input: `observations` (array of objects)
 77 |     - Each object contains:
 78 |       - `entityName` (string): Target entity
 79 |       - `contents` (string[]): New observations to add
 80 |   - Returns added observations per entity
 81 |   - Fails if entity doesn't exist
 82 | 
 83 | - **delete_entities**
 84 |   - Remove entities and their relations
 85 |   - Input: `entityNames` (string[])
 86 |   - Cascading deletion of associated relations
 87 |   - Silent operation if entity doesn't exist
 88 | 
 89 | - **delete_observations**
 90 |   - Remove specific observations from entities
 91 |   - Input: `deletions` (array of objects)
 92 |     - Each object contains:
 93 |       - `entityName` (string): Target entity
 94 |       - `observations` (string[]): Observations to remove
 95 |   - Silent operation if observation doesn't exist
 96 | 
 97 | - **delete_relations**
 98 |   - Remove specific relations from the graph
 99 |   - Input: `relations` (array of objects)
100 |     - Each object contains:
101 |       - `from` (string): Source entity name
102 |       - `to` (string): Target entity name
103 |       - `relationType` (string): Relationship type
104 |   - Silent operation if relation doesn't exist
105 | 
106 | - **read_graph**
107 |   - Read the entire knowledge graph
108 |   - No input required
109 |   - Returns complete graph structure with all entities and relations
110 | 
111 | - **search_nodes**
112 |   - Search for nodes based on query
113 |   - Input: `query` (string)
114 |   - Searches across:
115 |     - Entity names
116 |     - Entity types
117 |     - Observation content
118 |   - Returns matching entities and their relations
119 | 
120 | - **open_nodes**
121 |   - Retrieve specific nodes by name
122 |   - Input: `names` (string[])
123 |   - Returns:
124 |     - Requested entities
125 |     - Relations between requested entities
126 |   - Silently skips non-existent nodes
127 | 
128 | # Usage with Claude Desktop
129 | 
130 | ### Setup
131 | 
132 | Add this to your claude_desktop_config.json:
133 | 
134 | #### Docker
135 | 
136 | ```json
137 | {
138 |   "mcpServers": {
139 |     "memory": {
140 |       "command": "docker",
141 |       "args": ["run", "-i", "-v", "claude-memory:/app/dist", "--rm", "mcp/memory"]
142 |     }
143 |   }
144 | }
145 | ```
146 | 
147 | #### NPX
148 | ```json
149 | {
150 |   "mcpServers": {
151 |     "memory": {
152 |       "command": "npx",
153 |       "args": [
154 |         "-y",
155 |         "@modelcontextprotocol/server-memory"
156 |       ]
157 |     }
158 |   }
159 | }
160 | ```
161 | 
162 | #### NPX with custom setting
163 | 
164 | The server can be configured using the following environment variables:
165 | 
166 | ```json
167 | {
168 |   "mcpServers": {
169 |     "memory": {
170 |       "command": "npx",
171 |       "args": [
172 |         "-y",
173 |         "@modelcontextprotocol/server-memory"
174 |       ],
175 |       "env": {
176 |         "MEMORY_FILE_PATH": "/path/to/custom/memory.json"
177 |       }
178 |     }
179 |   }
180 | }
181 | ```
182 | 
183 | - `MEMORY_FILE_PATH`: Path to the memory storage JSON file (default: `memory.json` in the server directory)
184 | 
185 | # VS Code Installation Instructions
186 | 
187 | For quick installation, use one of the one-click installation buttons below:
188 | 
189 | [![Install with NPX in VS Code](https://img.shields.io/badge/VS_Code-NPM-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=memory&config=%7B%22command%22%3A%22npx%22%2C%22args%22%3A%5B%22-y%22%2C%22%40modelcontextprotocol%2Fserver-memory%22%5D%7D) [![Install with NPX in VS Code Insiders](https://img.shields.io/badge/VS_Code_Insiders-NPM-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=memory&config=%7B%22command%22%3A%22npx%22%2C%22args%22%3A%5B%22-y%22%2C%22%40modelcontextprotocol%2Fserver-memory%22%5D%7D&quality=insiders)
190 | 
191 | [![Install with Docker in VS Code](https://img.shields.io/badge/VS_Code-Docker-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=memory&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22-i%22%2C%22-v%22%2C%22claude-memory%3A%2Fapp%2Fdist%22%2C%22--rm%22%2C%22mcp%2Fmemory%22%5D%7D) [![Install with Docker in VS Code Insiders](https://img.shields.io/badge/VS_Code_Insiders-Docker-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=memory&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22-i%22%2C%22-v%22%2C%22claude-memory%3A%2Fapp%2Fdist%22%2C%22--rm%22%2C%22mcp%2Fmemory%22%5D%7D&quality=insiders)
192 | 
193 | For manual installation, add the following JSON block to your User Settings (JSON) file in VS Code. You can do this by pressing `Ctrl + Shift + P` and typing `Preferences: Open Settings (JSON)`.
194 | 
195 | Optionally, you can add it to a file called `.vscode/mcp.json` in your workspace. This will allow you to share the configuration with others. 
196 | 
197 | > Note that the `mcp` key is not needed in the `.vscode/mcp.json` file.
198 | 
199 | #### NPX
200 | 
201 | ```json
202 | {
203 |   "mcp": {
204 |     "servers": {
205 |       "memory": {
206 |         "command": "npx",
207 |         "args": [
208 |           "-y",
209 |           "@modelcontextprotocol/server-memory"
210 |         ]
211 |       }
212 |     }
213 |   }
214 | }
215 | ```
216 | 
217 | #### Docker
218 | 
219 | ```json
220 | {
221 |   "mcp": {
222 |     "servers": {
223 |       "memory": {
224 |         "command": "docker",
225 |         "args": [
226 |           "run",
227 |           "-i",
228 |           "-v",
229 |           "claude-memory:/app/dist",
230 |           "--rm",
231 |           "mcp/memory"
232 |         ]
233 |       }
234 |     }
235 |   }
236 | }
237 | ```
238 | 
239 | ### System Prompt
240 | 
241 | The prompt for utilizing memory depends on the use case. Changing the prompt will help the model determine the frequency and types of memories created.
242 | 
243 | Here is an example prompt for chat personalization. You could use this prompt in the "Custom Instructions" field of a [Claude.ai Project](https://www.anthropic.com/news/projects). 
244 | 
245 | ```
246 | Follow these steps for each interaction:
247 | 
248 | 1. User Identification:
249 |    - You should assume that you are interacting with default_user
250 |    - If you have not identified default_user, proactively try to do so.
251 | 
252 | 2. Memory Retrieval:
253 |    - Always begin your chat by saying only "Remembering..." and retrieve all relevant information from your knowledge graph
254 |    - Always refer to your knowledge graph as your "memory"
255 | 
256 | 3. Memory
257 |    - While conversing with the user, be attentive to any new information that falls into these categories:
258 |      a) Basic Identity (age, gender, location, job title, education level, etc.)
259 |      b) Behaviors (interests, habits, etc.)
260 |      c) Preferences (communication style, preferred language, etc.)
261 |      d) Goals (goals, targets, aspirations, etc.)
262 |      e) Relationships (personal and professional relationships up to 3 degrees of separation)
263 | 
264 | 4. Memory Update:
265 |    - If any new information was gathered during the interaction, update your memory as follows:
266 |      a) Create entities for recurring organizations, people, and significant events
267 |      b) Connect them to the current entities using relations
268 |      c) Store facts about them as observations
269 | ```
270 | 
271 | ## Building
272 | 
273 | Docker:
274 | 
275 | ```sh
276 | docker build -t mcp/memory -f src/memory/Dockerfile . 
277 | ```
278 | 
279 | ## License
280 | 
281 | This MCP server is licensed under the MIT License. This means you are free to use, modify, and distribute the software, subject to the terms and conditions of the MIT License. For more details, please see the LICENSE file in the project repository.
282 | 


--------------------------------------------------------------------------------
/src/memory/index.ts:
--------------------------------------------------------------------------------
  1 | #!/usr/bin/env node
  2 | 
  3 | import { Server } from "@modelcontextprotocol/sdk/server/index.js";
  4 | import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
  5 | import {
  6 |   CallToolRequestSchema,
  7 |   ListToolsRequestSchema,
  8 | } from "@modelcontextprotocol/sdk/types.js";
  9 | import { promises as fs } from 'fs';
 10 | import path from 'path';
 11 | import { fileURLToPath } from 'url';
 12 | 
 13 | // Define memory file path using environment variable with fallback
 14 | const defaultMemoryPath = path.join(path.dirname(fileURLToPath(import.meta.url)), 'memory.json');
 15 | 
 16 | // If MEMORY_FILE_PATH is just a filename, put it in the same directory as the script
 17 | const MEMORY_FILE_PATH = process.env.MEMORY_FILE_PATH
 18 |   ? path.isAbsolute(process.env.MEMORY_FILE_PATH)
 19 |     ? process.env.MEMORY_FILE_PATH
 20 |     : path.join(path.dirname(fileURLToPath(import.meta.url)), process.env.MEMORY_FILE_PATH)
 21 |   : defaultMemoryPath;
 22 | 
 23 | // We are storing our memory using entities, relations, and observations in a graph structure
 24 | interface Entity {
 25 |   name: string;
 26 |   entityType: string;
 27 |   observations: string[];
 28 | }
 29 | 
 30 | interface Relation {
 31 |   from: string;
 32 |   to: string;
 33 |   relationType: string;
 34 | }
 35 | 
 36 | interface KnowledgeGraph {
 37 |   entities: Entity[];
 38 |   relations: Relation[];
 39 | }
 40 | 
 41 | // The KnowledgeGraphManager class contains all operations to interact with the knowledge graph
 42 | class KnowledgeGraphManager {
 43 |   private async loadGraph(): Promise<KnowledgeGraph> {
 44 |     try {
 45 |       const data = await fs.readFile(MEMORY_FILE_PATH, "utf-8");
 46 |       const lines = data.split("\n").filter(line => line.trim() !== "");
 47 |       return lines.reduce((graph: KnowledgeGraph, line) => {
 48 |         const item = JSON.parse(line);
 49 |         if (item.type === "entity") graph.entities.push(item as Entity);
 50 |         if (item.type === "relation") graph.relations.push(item as Relation);
 51 |         return graph;
 52 |       }, { entities: [], relations: [] });
 53 |     } catch (error) {
 54 |       if (error instanceof Error && 'code' in error && (error as any).code === "ENOENT") {
 55 |         return { entities: [], relations: [] };
 56 |       }
 57 |       throw error;
 58 |     }
 59 |   }
 60 | 
 61 |   private async saveGraph(graph: KnowledgeGraph): Promise<void> {
 62 |     const lines = [
 63 |       ...graph.entities.map(e => JSON.stringify({ type: "entity", ...e })),
 64 |       ...graph.relations.map(r => JSON.stringify({ type: "relation", ...r })),
 65 |     ];
 66 |     await fs.writeFile(MEMORY_FILE_PATH, lines.join("\n"));
 67 |   }
 68 | 
 69 |   async createEntities(entities: Entity[]): Promise<Entity[]> {
 70 |     const graph = await this.loadGraph();
 71 |     const newEntities = entities.filter(e => !graph.entities.some(existingEntity => existingEntity.name === e.name));
 72 |     graph.entities.push(...newEntities);
 73 |     await this.saveGraph(graph);
 74 |     return newEntities;
 75 |   }
 76 | 
 77 |   async createRelations(relations: Relation[]): Promise<Relation[]> {
 78 |     const graph = await this.loadGraph();
 79 |     const newRelations = relations.filter(r => !graph.relations.some(existingRelation => 
 80 |       existingRelation.from === r.from && 
 81 |       existingRelation.to === r.to && 
 82 |       existingRelation.relationType === r.relationType
 83 |     ));
 84 |     graph.relations.push(...newRelations);
 85 |     await this.saveGraph(graph);
 86 |     return newRelations;
 87 |   }
 88 | 
 89 |   async addObservations(observations: { entityName: string; contents: string[] }[]): Promise<{ entityName: string; addedObservations: string[] }[]> {
 90 |     const graph = await this.loadGraph();
 91 |     const results = observations.map(o => {
 92 |       const entity = graph.entities.find(e => e.name === o.entityName);
 93 |       if (!entity) {
 94 |         throw new Error(`Entity with name ${o.entityName} not found`);
 95 |       }
 96 |       const newObservations = o.contents.filter(content => !entity.observations.includes(content));
 97 |       entity.observations.push(...newObservations);
 98 |       return { entityName: o.entityName, addedObservations: newObservations };
 99 |     });
100 |     await this.saveGraph(graph);
101 |     return results;
102 |   }
103 | 
104 |   async deleteEntities(entityNames: string[]): Promise<void> {
105 |     const graph = await this.loadGraph();
106 |     graph.entities = graph.entities.filter(e => !entityNames.includes(e.name));
107 |     graph.relations = graph.relations.filter(r => !entityNames.includes(r.from) && !entityNames.includes(r.to));
108 |     await this.saveGraph(graph);
109 |   }
110 | 
111 |   async deleteObservations(deletions: { entityName: string; observations: string[] }[]): Promise<void> {
112 |     const graph = await this.loadGraph();
113 |     deletions.forEach(d => {
114 |       const entity = graph.entities.find(e => e.name === d.entityName);
115 |       if (entity) {
116 |         entity.observations = entity.observations.filter(o => !d.observations.includes(o));
117 |       }
118 |     });
119 |     await this.saveGraph(graph);
120 |   }
121 | 
122 |   async deleteRelations(relations: Relation[]): Promise<void> {
123 |     const graph = await this.loadGraph();
124 |     graph.relations = graph.relations.filter(r => !relations.some(delRelation => 
125 |       r.from === delRelation.from && 
126 |       r.to === delRelation.to && 
127 |       r.relationType === delRelation.relationType
128 |     ));
129 |     await this.saveGraph(graph);
130 |   }
131 | 
132 |   async readGraph(): Promise<KnowledgeGraph> {
133 |     return this.loadGraph();
134 |   }
135 | 
136 |   // Very basic search function
137 |   async searchNodes(query: string): Promise<KnowledgeGraph> {
138 |     const graph = await this.loadGraph();
139 |     
140 |     // Filter entities
141 |     const filteredEntities = graph.entities.filter(e => 
142 |       e.name.toLowerCase().includes(query.toLowerCase()) ||
143 |       e.entityType.toLowerCase().includes(query.toLowerCase()) ||
144 |       e.observations.some(o => o.toLowerCase().includes(query.toLowerCase()))
145 |     );
146 |   
147 |     // Create a Set of filtered entity names for quick lookup
148 |     const filteredEntityNames = new Set(filteredEntities.map(e => e.name));
149 |   
150 |     // Filter relations to only include those between filtered entities
151 |     const filteredRelations = graph.relations.filter(r => 
152 |       filteredEntityNames.has(r.from) && filteredEntityNames.has(r.to)
153 |     );
154 |   
155 |     const filteredGraph: KnowledgeGraph = {
156 |       entities: filteredEntities,
157 |       relations: filteredRelations,
158 |     };
159 |   
160 |     return filteredGraph;
161 |   }
162 | 
163 |   async openNodes(names: string[]): Promise<KnowledgeGraph> {
164 |     const graph = await this.loadGraph();
165 |     
166 |     // Filter entities
167 |     const filteredEntities = graph.entities.filter(e => names.includes(e.name));
168 |   
169 |     // Create a Set of filtered entity names for quick lookup
170 |     const filteredEntityNames = new Set(filteredEntities.map(e => e.name));
171 |   
172 |     // Filter relations to only include those between filtered entities
173 |     const filteredRelations = graph.relations.filter(r => 
174 |       filteredEntityNames.has(r.from) && filteredEntityNames.has(r.to)
175 |     );
176 |   
177 |     const filteredGraph: KnowledgeGraph = {
178 |       entities: filteredEntities,
179 |       relations: filteredRelations,
180 |     };
181 |   
182 |     return filteredGraph;
183 |   }
184 | }
185 | 
186 | const knowledgeGraphManager = new KnowledgeGraphManager();
187 | 
188 | 
189 | // The server instance and tools exposed to Claude
190 | const server = new Server({
191 |   name: "memory-server",
192 |   version: "0.6.3",
193 | },    {
194 |     capabilities: {
195 |       tools: {},
196 |     },
197 |   },);
198 | 
199 | server.setRequestHandler(ListToolsRequestSchema, async () => {
200 |   return {
201 |     tools: [
202 |       {
203 |         name: "create_entities",
204 |         description: "Create multiple new entities in the knowledge graph",
205 |         inputSchema: {
206 |           type: "object",
207 |           properties: {
208 |             entities: {
209 |               type: "array",
210 |               items: {
211 |                 type: "object",
212 |                 properties: {
213 |                   name: { type: "string", description: "The name of the entity" },
214 |                   entityType: { type: "string", description: "The type of the entity" },
215 |                   observations: { 
216 |                     type: "array", 
217 |                     items: { type: "string" },
218 |                     description: "An array of observation contents associated with the entity"
219 |                   },
220 |                 },
221 |                 required: ["name", "entityType", "observations"],
222 |               },
223 |             },
224 |           },
225 |           required: ["entities"],
226 |         },
227 |       },
228 |       {
229 |         name: "create_relations",
230 |         description: "Create multiple new relations between entities in the knowledge graph. Relations should be in active voice",
231 |         inputSchema: {
232 |           type: "object",
233 |           properties: {
234 |             relations: {
235 |               type: "array",
236 |               items: {
237 |                 type: "object",
238 |                 properties: {
239 |                   from: { type: "string", description: "The name of the entity where the relation starts" },
240 |                   to: { type: "string", description: "The name of the entity where the relation ends" },
241 |                   relationType: { type: "string", description: "The type of the relation" },
242 |                 },
243 |                 required: ["from", "to", "relationType"],
244 |               },
245 |             },
246 |           },
247 |           required: ["relations"],
248 |         },
249 |       },
250 |       {
251 |         name: "add_observations",
252 |         description: "Add new observations to existing entities in the knowledge graph",
253 |         inputSchema: {
254 |           type: "object",
255 |           properties: {
256 |             observations: {
257 |               type: "array",
258 |               items: {
259 |                 type: "object",
260 |                 properties: {
261 |                   entityName: { type: "string", description: "The name of the entity to add the observations to" },
262 |                   contents: { 
263 |                     type: "array", 
264 |                     items: { type: "string" },
265 |                     description: "An array of observation contents to add"
266 |                   },
267 |                 },
268 |                 required: ["entityName", "contents"],
269 |               },
270 |             },
271 |           },
272 |           required: ["observations"],
273 |         },
274 |       },
275 |       {
276 |         name: "delete_entities",
277 |         description: "Delete multiple entities and their associated relations from the knowledge graph",
278 |         inputSchema: {
279 |           type: "object",
280 |           properties: {
281 |             entityNames: { 
282 |               type: "array", 
283 |               items: { type: "string" },
284 |               description: "An array of entity names to delete" 
285 |             },
286 |           },
287 |           required: ["entityNames"],
288 |         },
289 |       },
290 |       {
291 |         name: "delete_observations",
292 |         description: "Delete specific observations from entities in the knowledge graph",
293 |         inputSchema: {
294 |           type: "object",
295 |           properties: {
296 |             deletions: {
297 |               type: "array",
298 |               items: {
299 |                 type: "object",
300 |                 properties: {
301 |                   entityName: { type: "string", description: "The name of the entity containing the observations" },
302 |                   observations: { 
303 |                     type: "array", 
304 |                     items: { type: "string" },
305 |                     description: "An array of observations to delete"
306 |                   },
307 |                 },
308 |                 required: ["entityName", "observations"],
309 |               },
310 |             },
311 |           },
312 |           required: ["deletions"],
313 |         },
314 |       },
315 |       {
316 |         name: "delete_relations",
317 |         description: "Delete multiple relations from the knowledge graph",
318 |         inputSchema: {
319 |           type: "object",
320 |           properties: {
321 |             relations: { 
322 |               type: "array", 
323 |               items: {
324 |                 type: "object",
325 |                 properties: {
326 |                   from: { type: "string", description: "The name of the entity where the relation starts" },
327 |                   to: { type: "string", description: "The name of the entity where the relation ends" },
328 |                   relationType: { type: "string", description: "The type of the relation" },
329 |                 },
330 |                 required: ["from", "to", "relationType"],
331 |               },
332 |               description: "An array of relations to delete" 
333 |             },
334 |           },
335 |           required: ["relations"],
336 |         },
337 |       },
338 |       {
339 |         name: "read_graph",
340 |         description: "Read the entire knowledge graph",
341 |         inputSchema: {
342 |           type: "object",
343 |           properties: {},
344 |         },
345 |       },
346 |       {
347 |         name: "search_nodes",
348 |         description: "Search for nodes in the knowledge graph based on a query",
349 |         inputSchema: {
350 |           type: "object",
351 |           properties: {
352 |             query: { type: "string", description: "The search query to match against entity names, types, and observation content" },
353 |           },
354 |           required: ["query"],
355 |         },
356 |       },
357 |       {
358 |         name: "open_nodes",
359 |         description: "Open specific nodes in the knowledge graph by their names",
360 |         inputSchema: {
361 |           type: "object",
362 |           properties: {
363 |             names: {
364 |               type: "array",
365 |               items: { type: "string" },
366 |               description: "An array of entity names to retrieve",
367 |             },
368 |           },
369 |           required: ["names"],
370 |         },
371 |       },
372 |     ],
373 |   };
374 | });
375 | 
376 | server.setRequestHandler(CallToolRequestSchema, async (request) => {
377 |   const { name, arguments: args } = request.params;
378 | 
379 |   if (!args) {
380 |     throw new Error(`No arguments provided for tool: ${name}`);
381 |   }
382 | 
383 |   switch (name) {
384 |     case "create_entities":
385 |       return { content: [{ type: "text", text: JSON.stringify(await knowledgeGraphManager.createEntities(args.entities as Entity[]), null, 2) }] };
386 |     case "create_relations":
387 |       return { content: [{ type: "text", text: JSON.stringify(await knowledgeGraphManager.createRelations(args.relations as Relation[]), null, 2) }] };
388 |     case "add_observations":
389 |       return { content: [{ type: "text", text: JSON.stringify(await knowledgeGraphManager.addObservations(args.observations as { entityName: string; contents: string[] }[]), null, 2) }] };
390 |     case "delete_entities":
391 |       await knowledgeGraphManager.deleteEntities(args.entityNames as string[]);
392 |       return { content: [{ type: "text", text: "Entities deleted successfully" }] };
393 |     case "delete_observations":
394 |       await knowledgeGraphManager.deleteObservations(args.deletions as { entityName: string; observations: string[] }[]);
395 |       return { content: [{ type: "text", text: "Observations deleted successfully" }] };
396 |     case "delete_relations":
397 |       await knowledgeGraphManager.deleteRelations(args.relations as Relation[]);
398 |       return { content: [{ type: "text", text: "Relations deleted successfully" }] };
399 |     case "read_graph":
400 |       return { content: [{ type: "text", text: JSON.stringify(await knowledgeGraphManager.readGraph(), null, 2) }] };
401 |     case "search_nodes":
402 |       return { content: [{ type: "text", text: JSON.stringify(await knowledgeGraphManager.searchNodes(args.query as string), null, 2) }] };
403 |     case "open_nodes":
404 |       return { content: [{ type: "text", text: JSON.stringify(await knowledgeGraphManager.openNodes(args.names as string[]), null, 2) }] };
405 |     default:
406 |       throw new Error(`Unknown tool: ${name}`);
407 |   }
408 | });
409 | 
410 | async function main() {
411 |   const transport = new StdioServerTransport();
412 |   await server.connect(transport);
413 |   console.error("Knowledge Graph MCP Server running on stdio");
414 | }
415 | 
416 | main().catch((error) => {
417 |   console.error("Fatal error in main():", error);
418 |   process.exit(1);
419 | });
420 | 


--------------------------------------------------------------------------------
/src/memory/package.json:
--------------------------------------------------------------------------------
 1 | {
 2 |   "name": "@modelcontextprotocol/server-memory",
 3 |   "version": "0.6.3",
 4 |   "description": "MCP server for enabling memory for Claude through a knowledge graph",
 5 |   "license": "MIT",
 6 |   "author": "Anthropic, PBC (https://anthropic.com)",
 7 |   "homepage": "https://modelcontextprotocol.io",
 8 |   "bugs": "https://github.com/modelcontextprotocol/servers/issues",
 9 |   "type": "module",
10 |   "bin": {
11 |     "mcp-server-memory": "dist/index.js"
12 |   },
13 |   "files": [
14 |     "dist"
15 |   ],
16 |   "scripts": {
17 |     "build": "tsc && shx chmod +x dist/*.js",
18 |     "prepare": "npm run build",
19 |     "watch": "tsc --watch"
20 |   },
21 |   "dependencies": {
22 |     "@modelcontextprotocol/sdk": "1.0.1"
23 |   },
24 |   "devDependencies": {
25 |     "@types/node": "^22",
26 |     "shx": "^0.3.4",
27 |     "typescript": "^5.6.2"
28 |   }
29 | }


--------------------------------------------------------------------------------
/src/memory/tsconfig.json:
--------------------------------------------------------------------------------
 1 | {
 2 |     "extends": "../../tsconfig.json",
 3 |     "compilerOptions": {
 4 |       "outDir": "./dist",
 5 |       "rootDir": "."
 6 |     },
 7 |     "include": [
 8 |       "./**/*.ts"
 9 |     ]
10 |   }
11 |   


--------------------------------------------------------------------------------
/src/sequentialthinking/Dockerfile:
--------------------------------------------------------------------------------
 1 | FROM node:22.12-alpine AS builder
 2 | 
 3 | COPY src/sequentialthinking /app
 4 | COPY tsconfig.json /tsconfig.json
 5 | 
 6 | WORKDIR /app
 7 | 
 8 | RUN --mount=type=cache,target=/root/.npm npm install
 9 | 
10 | RUN --mount=type=cache,target=/root/.npm-production npm ci --ignore-scripts --omit-dev
11 | 
12 | FROM node:22-alpine AS release
13 | 
14 | COPY --from=builder /app/dist /app/dist
15 | COPY --from=builder /app/package.json /app/package.json
16 | COPY --from=builder /app/package-lock.json /app/package-lock.json
17 | 
18 | ENV NODE_ENV=production
19 | 
20 | WORKDIR /app
21 | 
22 | RUN npm ci --ignore-scripts --omit-dev
23 | 
24 | ENTRYPOINT ["node", "dist/index.js"]
25 | 


--------------------------------------------------------------------------------
/src/sequentialthinking/README.md:
--------------------------------------------------------------------------------
  1 | # Sequential Thinking MCP Server
  2 | 
  3 | An MCP server implementation that provides a tool for dynamic and reflective problem-solving through a structured thinking process.
  4 | 
  5 | ## Features
  6 | 
  7 | - Break down complex problems into manageable steps
  8 | - Revise and refine thoughts as understanding deepens
  9 | - Branch into alternative paths of reasoning
 10 | - Adjust the total number of thoughts dynamically
 11 | - Generate and verify solution hypotheses
 12 | 
 13 | ## Tool
 14 | 
 15 | ### sequential_thinking
 16 | 
 17 | Facilitates a detailed, step-by-step thinking process for problem-solving and analysis.
 18 | 
 19 | **Inputs:**
 20 | - `thought` (string): The current thinking step
 21 | - `nextThoughtNeeded` (boolean): Whether another thought step is needed
 22 | - `thoughtNumber` (integer): Current thought number
 23 | - `totalThoughts` (integer): Estimated total thoughts needed
 24 | - `isRevision` (boolean, optional): Whether this revises previous thinking
 25 | - `revisesThought` (integer, optional): Which thought is being reconsidered
 26 | - `branchFromThought` (integer, optional): Branching point thought number
 27 | - `branchId` (string, optional): Branch identifier
 28 | - `needsMoreThoughts` (boolean, optional): If more thoughts are needed
 29 | 
 30 | ## Usage
 31 | 
 32 | The Sequential Thinking tool is designed for:
 33 | - Breaking down complex problems into steps
 34 | - Planning and design with room for revision
 35 | - Analysis that might need course correction
 36 | - Problems where the full scope might not be clear initially
 37 | - Tasks that need to maintain context over multiple steps
 38 | - Situations where irrelevant information needs to be filtered out
 39 | 
 40 | ## Configuration
 41 | 
 42 | ### Usage with Claude Desktop
 43 | 
 44 | Add this to your `claude_desktop_config.json`:
 45 | 
 46 | #### npx
 47 | 
 48 | ```json
 49 | {
 50 |   "mcpServers": {
 51 |     "sequential-thinking": {
 52 |       "command": "npx",
 53 |       "args": [
 54 |         "-y",
 55 |         "@modelcontextprotocol/server-sequential-thinking"
 56 |       ]
 57 |     }
 58 |   }
 59 | }
 60 | ```
 61 | 
 62 | #### docker
 63 | 
 64 | ```json
 65 | {
 66 |   "mcpServers": {
 67 |     "sequentialthinking": {
 68 |       "command": "docker",
 69 |       "args": [
 70 |         "run",
 71 |         "--rm",
 72 |         "-i",
 73 |         "mcp/sequentialthinking"
 74 |       ]
 75 |     }
 76 |   }
 77 | }
 78 | ```
 79 | 
 80 | To disable logging of thought information set env var: `DISABLE_THOUGHT_LOGGING` to `true`.
 81 | Comment
 82 | 
 83 | ### Usage with VS Code
 84 | 
 85 | For quick installation, click one of the installation buttons below...
 86 | 
 87 | [![Install with NPX in VS Code](https://img.shields.io/badge/VS_Code-NPM-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=sequentialthinking&config=%7B%22command%22%3A%22npx%22%2C%22args%22%3A%5B%22-y%22%2C%22%40modelcontextprotocol%2Fserver-sequential-thinking%22%5D%7D) [![Install with NPX in VS Code Insiders](https://img.shields.io/badge/VS_Code_Insiders-NPM-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=sequentialthinking&config=%7B%22command%22%3A%22npx%22%2C%22args%22%3A%5B%22-y%22%2C%22%40modelcontextprotocol%2Fserver-sequential-thinking%22%5D%7D&quality=insiders)
 88 | 
 89 | [![Install with Docker in VS Code](https://img.shields.io/badge/VS_Code-Docker-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=sequentialthinking&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22--rm%22%2C%22-i%22%2C%22mcp%2Fsequentialthinking%22%5D%7D) [![Install with Docker in VS Code Insiders](https://img.shields.io/badge/VS_Code_Insiders-Docker-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=sequentialthinking&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22--rm%22%2C%22-i%22%2C%22mcp%2Fsequentialthinking%22%5D%7D&quality=insiders)
 90 | 
 91 | For manual installation, add the following JSON block to your User Settings (JSON) file in VS Code. You can do this by pressing `Ctrl + Shift + P` and typing `Preferences: Open Settings (JSON)`.
 92 | 
 93 | Optionally, you can add it to a file called `.vscode/mcp.json` in your workspace. This will allow you to share the configuration with others.
 94 | 
 95 | > Note that the `mcp` key is not needed in the `.vscode/mcp.json` file.
 96 | 
 97 | For NPX installation:
 98 | 
 99 | ```json
100 | {
101 |   "mcp": {
102 |     "servers": {
103 |       "sequential-thinking": {
104 |         "command": "npx",
105 |         "args": [
106 |           "-y",
107 |           "@modelcontextprotocol/server-sequential-thinking"
108 |         ]
109 |       }
110 |     }
111 |   }
112 | }
113 | ```
114 | 
115 | For Docker installation:
116 | 
117 | ```json
118 | {
119 |   "mcp": {
120 |     "servers": {
121 |       "sequential-thinking": {
122 |         "command": "docker",
123 |         "args": [
124 |           "run",
125 |           "--rm",
126 |           "-i",
127 |           "mcp/sequentialthinking"
128 |         ]
129 |       }
130 |     }
131 |   }
132 | }
133 | ```
134 | 
135 | ## Building
136 | 
137 | Docker:
138 | 
139 | ```bash
140 | docker build -t mcp/sequentialthinking -f src/sequentialthinking/Dockerfile .
141 | ```
142 | 
143 | ## License
144 | 
145 | This MCP server is licensed under the MIT License. This means you are free to use, modify, and distribute the software, subject to the terms and conditions of the MIT License. For more details, please see the LICENSE file in the project repository.
146 | 


--------------------------------------------------------------------------------
/src/sequentialthinking/index.ts:
--------------------------------------------------------------------------------
  1 | #!/usr/bin/env node
  2 | 
  3 | import { Server } from "@modelcontextprotocol/sdk/server/index.js";
  4 | import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
  5 | import {
  6 |   CallToolRequestSchema,
  7 |   ListToolsRequestSchema,
  8 |   Tool,
  9 | } from "@modelcontextprotocol/sdk/types.js";
 10 | // Fixed chalk import for ESM
 11 | import chalk from 'chalk';
 12 | 
 13 | interface ThoughtData {
 14 |   thought: string;
 15 |   thoughtNumber: number;
 16 |   totalThoughts: number;
 17 |   isRevision?: boolean;
 18 |   revisesThought?: number;
 19 |   branchFromThought?: number;
 20 |   branchId?: string;
 21 |   needsMoreThoughts?: boolean;
 22 |   nextThoughtNeeded: boolean;
 23 | }
 24 | 
 25 | class SequentialThinkingServer {
 26 |   private thoughtHistory: ThoughtData[] = [];
 27 |   private branches: Record<string, ThoughtData[]> = {};
 28 |   private disableThoughtLogging: boolean;
 29 | 
 30 |   constructor() {
 31 |     this.disableThoughtLogging = (process.env.DISABLE_THOUGHT_LOGGING || "").toLowerCase() === "true";
 32 |   }
 33 | 
 34 |   private validateThoughtData(input: unknown): ThoughtData {
 35 |     const data = input as Record<string, unknown>;
 36 | 
 37 |     if (!data.thought || typeof data.thought !== 'string') {
 38 |       throw new Error('Invalid thought: must be a string');
 39 |     }
 40 |     if (!data.thoughtNumber || typeof data.thoughtNumber !== 'number') {
 41 |       throw new Error('Invalid thoughtNumber: must be a number');
 42 |     }
 43 |     if (!data.totalThoughts || typeof data.totalThoughts !== 'number') {
 44 |       throw new Error('Invalid totalThoughts: must be a number');
 45 |     }
 46 |     if (typeof data.nextThoughtNeeded !== 'boolean') {
 47 |       throw new Error('Invalid nextThoughtNeeded: must be a boolean');
 48 |     }
 49 | 
 50 |     return {
 51 |       thought: data.thought,
 52 |       thoughtNumber: data.thoughtNumber,
 53 |       totalThoughts: data.totalThoughts,
 54 |       nextThoughtNeeded: data.nextThoughtNeeded,
 55 |       isRevision: data.isRevision as boolean | undefined,
 56 |       revisesThought: data.revisesThought as number | undefined,
 57 |       branchFromThought: data.branchFromThought as number | undefined,
 58 |       branchId: data.branchId as string | undefined,
 59 |       needsMoreThoughts: data.needsMoreThoughts as boolean | undefined,
 60 |     };
 61 |   }
 62 | 
 63 |   private formatThought(thoughtData: ThoughtData): string {
 64 |     const { thoughtNumber, totalThoughts, thought, isRevision, revisesThought, branchFromThought, branchId } = thoughtData;
 65 | 
 66 |     let prefix = '';
 67 |     let context = '';
 68 | 
 69 |     if (isRevision) {
 70 |       prefix = chalk.yellow('🔄 Revision');
 71 |       context = ` (revising thought ${revisesThought})`;
 72 |     } else if (branchFromThought) {
 73 |       prefix = chalk.green('🌿 Branch');
 74 |       context = ` (from thought ${branchFromThought}, ID: ${branchId})`;
 75 |     } else {
 76 |       prefix = chalk.blue('💭 Thought');
 77 |       context = '';
 78 |     }
 79 | 
 80 |     const header = `${prefix} ${thoughtNumber}/${totalThoughts}${context}`;
 81 |     const border = '─'.repeat(Math.max(header.length, thought.length) + 4);
 82 | 
 83 |     return `
 84 | ┌${border}┐
 85 | │ ${header} │
 86 | ├${border}┤
 87 | │ ${thought.padEnd(border.length - 2)} │
 88 | └${border}┘`;
 89 |   }
 90 | 
 91 |   public processThought(input: unknown): { content: Array<{ type: string; text: string }>; isError?: boolean } {
 92 |     try {
 93 |       const validatedInput = this.validateThoughtData(input);
 94 | 
 95 |       if (validatedInput.thoughtNumber > validatedInput.totalThoughts) {
 96 |         validatedInput.totalThoughts = validatedInput.thoughtNumber;
 97 |       }
 98 | 
 99 |       this.thoughtHistory.push(validatedInput);
100 | 
101 |       if (validatedInput.branchFromThought && validatedInput.branchId) {
102 |         if (!this.branches[validatedInput.branchId]) {
103 |           this.branches[validatedInput.branchId] = [];
104 |         }
105 |         this.branches[validatedInput.branchId].push(validatedInput);
106 |       }
107 | 
108 |       if (!this.disableThoughtLogging) {
109 |         const formattedThought = this.formatThought(validatedInput);
110 |         console.error(formattedThought);
111 |       }
112 | 
113 |       return {
114 |         content: [{
115 |           type: "text",
116 |           text: JSON.stringify({
117 |             thoughtNumber: validatedInput.thoughtNumber,
118 |             totalThoughts: validatedInput.totalThoughts,
119 |             nextThoughtNeeded: validatedInput.nextThoughtNeeded,
120 |             branches: Object.keys(this.branches),
121 |             thoughtHistoryLength: this.thoughtHistory.length
122 |           }, null, 2)
123 |         }]
124 |       };
125 |     } catch (error) {
126 |       return {
127 |         content: [{
128 |           type: "text",
129 |           text: JSON.stringify({
130 |             error: error instanceof Error ? error.message : String(error),
131 |             status: 'failed'
132 |           }, null, 2)
133 |         }],
134 |         isError: true
135 |       };
136 |     }
137 |   }
138 | }
139 | 
140 | const SEQUENTIAL_THINKING_TOOL: Tool = {
141 |   name: "sequentialthinking",
142 |   description: `A detailed tool for dynamic and reflective problem-solving through thoughts.
143 | This tool helps analyze problems through a flexible thinking process that can adapt and evolve.
144 | Each thought can build on, question, or revise previous insights as understanding deepens.
145 | 
146 | When to use this tool:
147 | - Breaking down complex problems into steps
148 | - Planning and design with room for revision
149 | - Analysis that might need course correction
150 | - Problems where the full scope might not be clear initially
151 | - Problems that require a multi-step solution
152 | - Tasks that need to maintain context over multiple steps
153 | - Situations where irrelevant information needs to be filtered out
154 | 
155 | Key features:
156 | - You can adjust total_thoughts up or down as you progress
157 | - You can question or revise previous thoughts
158 | - You can add more thoughts even after reaching what seemed like the end
159 | - You can express uncertainty and explore alternative approaches
160 | - Not every thought needs to build linearly - you can branch or backtrack
161 | - Generates a solution hypothesis
162 | - Verifies the hypothesis based on the Chain of Thought steps
163 | - Repeats the process until satisfied
164 | - Provides a correct answer
165 | 
166 | Parameters explained:
167 | - thought: Your current thinking step, which can include:
168 | * Regular analytical steps
169 | * Revisions of previous thoughts
170 | * Questions about previous decisions
171 | * Realizations about needing more analysis
172 | * Changes in approach
173 | * Hypothesis generation
174 | * Hypothesis verification
175 | - next_thought_needed: True if you need more thinking, even if at what seemed like the end
176 | - thought_number: Current number in sequence (can go beyond initial total if needed)
177 | - total_thoughts: Current estimate of thoughts needed (can be adjusted up/down)
178 | - is_revision: A boolean indicating if this thought revises previous thinking
179 | - revises_thought: If is_revision is true, which thought number is being reconsidered
180 | - branch_from_thought: If branching, which thought number is the branching point
181 | - branch_id: Identifier for the current branch (if any)
182 | - needs_more_thoughts: If reaching end but realizing more thoughts needed
183 | 
184 | You should:
185 | 1. Start with an initial estimate of needed thoughts, but be ready to adjust
186 | 2. Feel free to question or revise previous thoughts
187 | 3. Don't hesitate to add more thoughts if needed, even at the "end"
188 | 4. Express uncertainty when present
189 | 5. Mark thoughts that revise previous thinking or branch into new paths
190 | 6. Ignore information that is irrelevant to the current step
191 | 7. Generate a solution hypothesis when appropriate
192 | 8. Verify the hypothesis based on the Chain of Thought steps
193 | 9. Repeat the process until satisfied with the solution
194 | 10. Provide a single, ideally correct answer as the final output
195 | 11. Only set next_thought_needed to false when truly done and a satisfactory answer is reached`,
196 |   inputSchema: {
197 |     type: "object",
198 |     properties: {
199 |       thought: {
200 |         type: "string",
201 |         description: "Your current thinking step"
202 |       },
203 |       nextThoughtNeeded: {
204 |         type: "boolean",
205 |         description: "Whether another thought step is needed"
206 |       },
207 |       thoughtNumber: {
208 |         type: "integer",
209 |         description: "Current thought number",
210 |         minimum: 1
211 |       },
212 |       totalThoughts: {
213 |         type: "integer",
214 |         description: "Estimated total thoughts needed",
215 |         minimum: 1
216 |       },
217 |       isRevision: {
218 |         type: "boolean",
219 |         description: "Whether this revises previous thinking"
220 |       },
221 |       revisesThought: {
222 |         type: "integer",
223 |         description: "Which thought is being reconsidered",
224 |         minimum: 1
225 |       },
226 |       branchFromThought: {
227 |         type: "integer",
228 |         description: "Branching point thought number",
229 |         minimum: 1
230 |       },
231 |       branchId: {
232 |         type: "string",
233 |         description: "Branch identifier"
234 |       },
235 |       needsMoreThoughts: {
236 |         type: "boolean",
237 |         description: "If more thoughts are needed"
238 |       }
239 |     },
240 |     required: ["thought", "nextThoughtNeeded", "thoughtNumber", "totalThoughts"]
241 |   }
242 | };
243 | 
244 | const server = new Server(
245 |   {
246 |     name: "sequential-thinking-server",
247 |     version: "0.2.0",
248 |   },
249 |   {
250 |     capabilities: {
251 |       tools: {},
252 |     },
253 |   }
254 | );
255 | 
256 | const thinkingServer = new SequentialThinkingServer();
257 | 
258 | server.setRequestHandler(ListToolsRequestSchema, async () => ({
259 |   tools: [SEQUENTIAL_THINKING_TOOL],
260 | }));
261 | 
262 | server.setRequestHandler(CallToolRequestSchema, async (request) => {
263 |   if (request.params.name === "sequentialthinking") {
264 |     return thinkingServer.processThought(request.params.arguments);
265 |   }
266 | 
267 |   return {
268 |     content: [{
269 |       type: "text",
270 |       text: `Unknown tool: ${request.params.name}`
271 |     }],
272 |     isError: true
273 |   };
274 | });
275 | 
276 | async function runServer() {
277 |   const transport = new StdioServerTransport();
278 |   await server.connect(transport);
279 |   console.error("Sequential Thinking MCP Server running on stdio");
280 | }
281 | 
282 | runServer().catch((error) => {
283 |   console.error("Fatal error running server:", error);
284 |   process.exit(1);
285 | });
286 | 


--------------------------------------------------------------------------------
/src/sequentialthinking/package.json:
--------------------------------------------------------------------------------
 1 | {
 2 |   "name": "@modelcontextprotocol/server-sequential-thinking",
 3 |   "version": "0.6.2",
 4 |   "description": "MCP server for sequential thinking and problem solving",
 5 |   "license": "MIT",
 6 |   "author": "Anthropic, PBC (https://anthropic.com)",
 7 |   "homepage": "https://modelcontextprotocol.io",
 8 |   "bugs": "https://github.com/modelcontextprotocol/servers/issues",
 9 |   "type": "module",
10 |   "bin": {
11 |     "mcp-server-sequential-thinking": "dist/index.js"
12 |   },
13 |   "files": [
14 |     "dist"
15 |   ],
16 |   "scripts": {
17 |     "build": "tsc && shx chmod +x dist/*.js",
18 |     "prepare": "npm run build",
19 |     "watch": "tsc --watch"
20 |   },
21 |   "dependencies": {
22 |     "@modelcontextprotocol/sdk": "0.5.0",
23 |     "chalk": "^5.3.0",
24 |     "yargs": "^17.7.2"
25 |   },
26 |   "devDependencies": {
27 |     "@types/node": "^22",
28 |     "@types/yargs": "^17.0.32",
29 |     "shx": "^0.3.4",
30 |     "typescript": "^5.3.3"
31 |   }
32 | }


--------------------------------------------------------------------------------
/src/sequentialthinking/tsconfig.json:
--------------------------------------------------------------------------------
 1 | {
 2 |   "extends": "../../tsconfig.json",
 3 |   "compilerOptions": {
 4 |     "outDir": "./dist",
 5 |     "rootDir": ".",
 6 |     "moduleResolution": "NodeNext",
 7 |     "module": "NodeNext"
 8 |   },
 9 |   "include": ["./**/*.ts"]
10 | }
11 | 


--------------------------------------------------------------------------------
/src/time/.python-version:
--------------------------------------------------------------------------------
1 | 3.10
2 | 


--------------------------------------------------------------------------------
/src/time/Dockerfile:
--------------------------------------------------------------------------------
 1 | # Use a Python image with uv pre-installed
 2 | FROM ghcr.io/astral-sh/uv:python3.12-bookworm-slim AS uv
 3 | 
 4 | # Install the project into `/app`
 5 | WORKDIR /app
 6 | 
 7 | # Enable bytecode compilation
 8 | ENV UV_COMPILE_BYTECODE=1
 9 | 
10 | # Copy from the cache instead of linking since it's a mounted volume
11 | ENV UV_LINK_MODE=copy
12 | 
13 | # Install the project's dependencies using the lockfile and settings
14 | RUN --mount=type=cache,target=/root/.cache/uv \
15 |     --mount=type=bind,source=uv.lock,target=uv.lock \
16 |     --mount=type=bind,source=pyproject.toml,target=pyproject.toml \
17 |     uv sync --frozen --no-install-project --no-dev --no-editable
18 | 
19 | # Then, add the rest of the project source code and install it
20 | # Installing separately from its dependencies allows optimal layer caching
21 | ADD . /app
22 | RUN --mount=type=cache,target=/root/.cache/uv \
23 |     uv sync --frozen --no-dev --no-editable
24 | 
25 | FROM python:3.12-slim-bookworm
26 | 
27 | WORKDIR /app
28 |  
29 | COPY --from=uv /root/.local /root/.local
30 | COPY --from=uv --chown=app:app /app/.venv /app/.venv
31 | 
32 | # Place executables in the environment at the front of the path
33 | ENV PATH="/app/.venv/bin:$PATH"
34 | 
35 | # when running the container, add --db-path and a bind mount to the host's db file
36 | ENTRYPOINT ["mcp-server-time"]
37 | 


--------------------------------------------------------------------------------
/src/time/README.md:
--------------------------------------------------------------------------------
  1 | # Time MCP Server
  2 | 
  3 | A Model Context Protocol server that provides time and timezone conversion capabilities. This server enables LLMs to get current time information and perform timezone conversions using IANA timezone names, with automatic system timezone detection.
  4 | 
  5 | ### Available Tools
  6 | 
  7 | - `get_current_time` - Get current time in a specific timezone or system timezone.
  8 |   - Required arguments:
  9 |     - `timezone` (string): IANA timezone name (e.g., 'America/New_York', 'Europe/London')
 10 | 
 11 | - `convert_time` - Convert time between timezones.
 12 |   - Required arguments:
 13 |     - `source_timezone` (string): Source IANA timezone name
 14 |     - `time` (string): Time in 24-hour format (HH:MM)
 15 |     - `target_timezone` (string): Target IANA timezone name
 16 | 
 17 | ## Installation
 18 | 
 19 | ### Using uv (recommended)
 20 | 
 21 | When using [`uv`](https://docs.astral.sh/uv/) no specific installation is needed. We will
 22 | use [`uvx`](https://docs.astral.sh/uv/guides/tools/) to directly run *mcp-server-time*.
 23 | 
 24 | ### Using PIP
 25 | 
 26 | Alternatively you can install `mcp-server-time` via pip:
 27 | 
 28 | ```bash
 29 | pip install mcp-server-time
 30 | ```
 31 | 
 32 | After installation, you can run it as a script using:
 33 | 
 34 | ```bash
 35 | python -m mcp_server_time
 36 | ```
 37 | 
 38 | ## Configuration
 39 | 
 40 | ### Configure for Claude.app
 41 | 
 42 | Add to your Claude settings:
 43 | 
 44 | <details>
 45 | <summary>Using uvx</summary>
 46 | 
 47 | ```json
 48 | {
 49 |   "mcpServers": {
 50 |     "time": {
 51 |       "command": "uvx",
 52 |       "args": ["mcp-server-time"]
 53 |     }
 54 |   }
 55 | }
 56 | ```
 57 | </details>
 58 | 
 59 | <details>
 60 | <summary>Using docker</summary>
 61 | 
 62 | ```json
 63 | {
 64 |   "mcpServers": {
 65 |     "time": {
 66 |       "command": "docker",
 67 |       "args": ["run", "-i", "--rm", "mcp/time"]
 68 |     }
 69 |   }
 70 | }
 71 | ```
 72 | </details>
 73 | 
 74 | <details>
 75 | <summary>Using pip installation</summary>
 76 | 
 77 | ```json
 78 | {
 79 |   "mcpServers": {
 80 |     "time": {
 81 |       "command": "python",
 82 |       "args": ["-m", "mcp_server_time"]
 83 |     }
 84 |   }
 85 | }
 86 | ```
 87 | </details>
 88 | 
 89 | ### Configure for Zed
 90 | 
 91 | Add to your Zed settings.json:
 92 | 
 93 | <details>
 94 | <summary>Using uvx</summary>
 95 | 
 96 | ```json
 97 | "context_servers": [
 98 |   "mcp-server-time": {
 99 |     "command": "uvx",
100 |     "args": ["mcp-server-time"]
101 |   }
102 | ],
103 | ```
104 | </details>
105 | 
106 | <details>
107 | <summary>Using pip installation</summary>
108 | 
109 | ```json
110 | "context_servers": {
111 |   "mcp-server-time": {
112 |     "command": "python",
113 |     "args": ["-m", "mcp_server_time"]
114 |   }
115 | },
116 | ```
117 | </details>
118 | 
119 | ### Configure for VS Code
120 | 
121 | For quick installation, use one of the one-click install buttons below...
122 | 
123 | [![Install with UV in VS Code](https://img.shields.io/badge/VS_Code-UV-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=time&config=%7B%22command%22%3A%22uvx%22%2C%22args%22%3A%5B%22mcp-server-time%22%5D%7D) [![Install with UV in VS Code Insiders](https://img.shields.io/badge/VS_Code_Insiders-UV-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=time&config=%7B%22command%22%3A%22uvx%22%2C%22args%22%3A%5B%22mcp-server-time%22%5D%7D&quality=insiders)
124 | 
125 | [![Install with Docker in VS Code](https://img.shields.io/badge/VS_Code-Docker-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=time&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22-i%22%2C%22--rm%22%2C%22mcp%2Ftime%22%5D%7D) [![Install with Docker in VS Code Insiders](https://img.shields.io/badge/VS_Code_Insiders-Docker-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=time&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22-i%22%2C%22--rm%22%2C%22mcp%2Ftime%22%5D%7D&quality=insiders)
126 | 
127 | For manual installation, add the following JSON block to your User Settings (JSON) file in VS Code. You can do this by pressing `Ctrl + Shift + P` and typing `Preferences: Open User Settings (JSON)`.
128 | 
129 | Optionally, you can add it to a file called `.vscode/mcp.json` in your workspace. This will allow you to share the configuration with others.
130 | 
131 | > Note that the `mcp` key is needed when using the `mcp.json` file.
132 | 
133 | <details>
134 | <summary>Using uvx</summary>
135 | 
136 | ```json
137 | {
138 |   "mcp": {
139 |     "servers": {
140 |       "time": {
141 |         "command": "uvx",
142 |         "args": ["mcp-server-time"]
143 |       }
144 |     }
145 |   }
146 | }
147 | ```
148 | </details>
149 | 
150 | <details>
151 | <summary>Using Docker</summary>
152 | 
153 | ```json
154 | {
155 |   "mcp": {
156 |     "servers": {
157 |       "time": {
158 |         "command": "docker",
159 |         "args": ["run", "-i", "--rm", "mcp/time"]
160 |       }
161 |     }
162 |   }
163 | }
164 | ```
165 | </details>
166 | 
167 | ### Configure for Zencoder
168 | 
169 | 1. Go to the Zencoder menu (...)
170 | 2. From the dropdown menu, select `Agent Tools`
171 | 3. Click on the `Add Custom MCP`
172 | 4. Add the name and server configuration from below, and make sure to hit the `Install` button
173 | 
174 | <details>
175 | <summary>Using uvx</summary>
176 | 
177 | ```json
178 | {
179 |     "command": "uvx",
180 |     "args": ["mcp-server-time"]
181 |   }
182 | ```
183 | </details>
184 | 
185 | ### Customization - System Timezone
186 | 
187 | By default, the server automatically detects your system's timezone. You can override this by adding the argument `--local-timezone` to the `args` list in the configuration.
188 | 
189 | Example:
190 | ```json
191 | {
192 |   "command": "python",
193 |   "args": ["-m", "mcp_server_time", "--local-timezone=America/New_York"]
194 | }
195 | ```
196 | 
197 | ## Example Interactions
198 | 
199 | 1. Get current time:
200 | ```json
201 | {
202 |   "name": "get_current_time",
203 |   "arguments": {
204 |     "timezone": "Europe/Warsaw"
205 |   }
206 | }
207 | ```
208 | Response:
209 | ```json
210 | {
211 |   "timezone": "Europe/Warsaw",
212 |   "datetime": "2024-01-01T13:00:00+01:00",
213 |   "is_dst": false
214 | }
215 | ```
216 | 
217 | 2. Convert time between timezones:
218 | ```json
219 | {
220 |   "name": "convert_time",
221 |   "arguments": {
222 |     "source_timezone": "America/New_York",
223 |     "time": "16:30",
224 |     "target_timezone": "Asia/Tokyo"
225 |   }
226 | }
227 | ```
228 | Response:
229 | ```json
230 | {
231 |   "source": {
232 |     "timezone": "America/New_York",
233 |     "datetime": "2024-01-01T12:30:00-05:00",
234 |     "is_dst": false
235 |   },
236 |   "target": {
237 |     "timezone": "Asia/Tokyo",
238 |     "datetime": "2024-01-01T12:30:00+09:00",
239 |     "is_dst": false
240 |   },
241 |   "time_difference": "+13.0h",
242 | }
243 | ```
244 | 
245 | ## Debugging
246 | 
247 | You can use the MCP inspector to debug the server. For uvx installations:
248 | 
249 | ```bash
250 | npx @modelcontextprotocol/inspector uvx mcp-server-time
251 | ```
252 | 
253 | Or if you've installed the package in a specific directory or are developing on it:
254 | 
255 | ```bash
256 | cd path/to/servers/src/time
257 | npx @modelcontextprotocol/inspector uv run mcp-server-time
258 | ```
259 | 
260 | ## Examples of Questions for Claude
261 | 
262 | 1. "What time is it now?" (will use system timezone)
263 | 2. "What time is it in Tokyo?"
264 | 3. "When it's 4 PM in New York, what time is it in London?"
265 | 4. "Convert 9:30 AM Tokyo time to New York time"
266 | 
267 | ## Build
268 | 
269 | Docker build:
270 | 
271 | ```bash
272 | cd src/time
273 | docker build -t mcp/time .
274 | ```
275 | 
276 | ## Contributing
277 | 
278 | We encourage contributions to help expand and improve mcp-server-time. Whether you want to add new time-related tools, enhance existing functionality, or improve documentation, your input is valuable.
279 | 
280 | For examples of other MCP servers and implementation patterns, see:
281 | https://github.com/modelcontextprotocol/servers
282 | 
283 | Pull requests are welcome! Feel free to contribute new ideas, bug fixes, or enhancements to make mcp-server-time even more powerful and useful.
284 | 
285 | ## License
286 | 
287 | mcp-server-time is licensed under the MIT License. This means you are free to use, modify, and distribute the software, subject to the terms and conditions of the MIT License. For more details, please see the LICENSE file in the project repository.
288 | 


--------------------------------------------------------------------------------
/src/time/pyproject.toml:
--------------------------------------------------------------------------------
 1 | [project]
 2 | name = "mcp-server-time"
 3 | version = "0.6.2"
 4 | description = "A Model Context Protocol server providing tools for time queries and timezone conversions for LLMs"
 5 | readme = "README.md"
 6 | requires-python = ">=3.10"
 7 | authors = [
 8 |     { name = "Mariusz 'maledorak' Korzekwa", email = "mariusz@korzekwa.dev" },
 9 | ]
10 | keywords = ["time", "timezone", "mcp", "llm"]
11 | license = { text = "MIT" }
12 | classifiers = [
13 |     "Development Status :: 4 - Beta",
14 |     "Intended Audience :: Developers",
15 |     "License :: OSI Approved :: MIT License",
16 |     "Programming Language :: Python :: 3",
17 |     "Programming Language :: Python :: 3.10",
18 | ]
19 | dependencies = [
20 |     "mcp>=1.0.0",
21 |     "pydantic>=2.0.0",
22 |     "tzdata>=2024.2",
23 |     "tzlocal>=5.3.1"
24 | ]
25 | 
26 | [project.scripts]
27 | mcp-server-time = "mcp_server_time:main"
28 | 
29 | [build-system]
30 | requires = ["hatchling"]
31 | build-backend = "hatchling.build"
32 | 
33 | [tool.uv]
34 | dev-dependencies = [
35 |     "freezegun>=1.5.1",
36 |     "pyright>=1.1.389",
37 |     "pytest>=8.3.3",
38 |     "ruff>=0.8.1",
39 | ]
40 | 


--------------------------------------------------------------------------------
/src/time/src/mcp_server_time/__init__.py:
--------------------------------------------------------------------------------
 1 | from .server import serve
 2 | 
 3 | 
 4 | def main():
 5 |     """MCP Time Server - Time and timezone conversion functionality for MCP"""
 6 |     import argparse
 7 |     import asyncio
 8 | 
 9 |     parser = argparse.ArgumentParser(
10 |         description="give a model the ability to handle time queries and timezone conversions"
11 |     )
12 |     parser.add_argument("--local-timezone", type=str, help="Override local timezone")
13 | 
14 |     args = parser.parse_args()
15 |     asyncio.run(serve(args.local_timezone))
16 | 
17 | 
18 | if __name__ == "__main__":
19 |     main()
20 | 


--------------------------------------------------------------------------------
/src/time/src/mcp_server_time/__main__.py:
--------------------------------------------------------------------------------
1 | from mcp_server_time import main
2 | 
3 | main()
4 | 


--------------------------------------------------------------------------------
/src/time/src/mcp_server_time/server.py:
--------------------------------------------------------------------------------
  1 | from datetime import datetime, timedelta
  2 | from enum import Enum
  3 | import json
  4 | from typing import Sequence
  5 | 
  6 | from zoneinfo import ZoneInfo
  7 | from tzlocal import get_localzone_name  # ← returns "Europe/Paris", etc.
  8 | 
  9 | from mcp.server import Server
 10 | from mcp.server.stdio import stdio_server
 11 | from mcp.types import Tool, TextContent, ImageContent, EmbeddedResource
 12 | from mcp.shared.exceptions import McpError
 13 | 
 14 | from pydantic import BaseModel
 15 | 
 16 | 
 17 | class TimeTools(str, Enum):
 18 |     GET_CURRENT_TIME = "get_current_time"
 19 |     CONVERT_TIME = "convert_time"
 20 | 
 21 | 
 22 | class TimeResult(BaseModel):
 23 |     timezone: str
 24 |     datetime: str
 25 |     is_dst: bool
 26 | 
 27 | 
 28 | class TimeConversionResult(BaseModel):
 29 |     source: TimeResult
 30 |     target: TimeResult
 31 |     time_difference: str
 32 | 
 33 | 
 34 | class TimeConversionInput(BaseModel):
 35 |     source_tz: str
 36 |     time: str
 37 |     target_tz_list: list[str]
 38 | 
 39 | 
 40 | def get_local_tz(local_tz_override: str | None = None) -> ZoneInfo:
 41 |     if local_tz_override:
 42 |         return ZoneInfo(local_tz_override)
 43 | 
 44 |     # Get local timezone from datetime.now()
 45 |     local_tzname = get_localzone_name()
 46 |     if local_tzname is not None:
 47 |         return ZoneInfo(local_tzname)
 48 |     raise McpError("Could not determine local timezone - tzinfo is None")
 49 | 
 50 | 
 51 | def get_zoneinfo(timezone_name: str) -> ZoneInfo:
 52 |     try:
 53 |         return ZoneInfo(timezone_name)
 54 |     except Exception as e:
 55 |         raise McpError(f"Invalid timezone: {str(e)}")
 56 | 
 57 | 
 58 | class TimeServer:
 59 |     def get_current_time(self, timezone_name: str) -> TimeResult:
 60 |         """Get current time in specified timezone"""
 61 |         timezone = get_zoneinfo(timezone_name)
 62 |         current_time = datetime.now(timezone)
 63 | 
 64 |         return TimeResult(
 65 |             timezone=timezone_name,
 66 |             datetime=current_time.isoformat(timespec="seconds"),
 67 |             is_dst=bool(current_time.dst()),
 68 |         )
 69 | 
 70 |     def convert_time(
 71 |         self, source_tz: str, time_str: str, target_tz: str
 72 |     ) -> TimeConversionResult:
 73 |         """Convert time between timezones"""
 74 |         source_timezone = get_zoneinfo(source_tz)
 75 |         target_timezone = get_zoneinfo(target_tz)
 76 | 
 77 |         try:
 78 |             parsed_time = datetime.strptime(time_str, "%H:%M").time()
 79 |         except ValueError:
 80 |             raise ValueError("Invalid time format. Expected HH:MM [24-hour format]")
 81 | 
 82 |         now = datetime.now(source_timezone)
 83 |         source_time = datetime(
 84 |             now.year,
 85 |             now.month,
 86 |             now.day,
 87 |             parsed_time.hour,
 88 |             parsed_time.minute,
 89 |             tzinfo=source_timezone,
 90 |         )
 91 | 
 92 |         target_time = source_time.astimezone(target_timezone)
 93 |         source_offset = source_time.utcoffset() or timedelta()
 94 |         target_offset = target_time.utcoffset() or timedelta()
 95 |         hours_difference = (target_offset - source_offset).total_seconds() / 3600
 96 | 
 97 |         if hours_difference.is_integer():
 98 |             time_diff_str = f"{hours_difference:+.1f}h"
 99 |         else:
100 |             # For fractional hours like Nepal's UTC+5:45
101 |             time_diff_str = f"{hours_difference:+.2f}".rstrip("0").rstrip(".") + "h"
102 | 
103 |         return TimeConversionResult(
104 |             source=TimeResult(
105 |                 timezone=source_tz,
106 |                 datetime=source_time.isoformat(timespec="seconds"),
107 |                 is_dst=bool(source_time.dst()),
108 |             ),
109 |             target=TimeResult(
110 |                 timezone=target_tz,
111 |                 datetime=target_time.isoformat(timespec="seconds"),
112 |                 is_dst=bool(target_time.dst()),
113 |             ),
114 |             time_difference=time_diff_str,
115 |         )
116 | 
117 | 
118 | async def serve(local_timezone: str | None = None) -> None:
119 |     server = Server("mcp-time")
120 |     time_server = TimeServer()
121 |     local_tz = str(get_local_tz(local_timezone))
122 | 
123 |     @server.list_tools()
124 |     async def list_tools() -> list[Tool]:
125 |         """List available time tools."""
126 |         return [
127 |             Tool(
128 |                 name=TimeTools.GET_CURRENT_TIME.value,
129 |                 description="Get current time in a specific timezones",
130 |                 inputSchema={
131 |                     "type": "object",
132 |                     "properties": {
133 |                         "timezone": {
134 |                             "type": "string",
135 |                             "description": f"IANA timezone name (e.g., 'America/New_York', 'Europe/London'). Use '{local_tz}' as local timezone if no timezone provided by the user.",
136 |                         }
137 |                     },
138 |                     "required": ["timezone"],
139 |                 },
140 |             ),
141 |             Tool(
142 |                 name=TimeTools.CONVERT_TIME.value,
143 |                 description="Convert time between timezones",
144 |                 inputSchema={
145 |                     "type": "object",
146 |                     "properties": {
147 |                         "source_timezone": {
148 |                             "type": "string",
149 |                             "description": f"Source IANA timezone name (e.g., 'America/New_York', 'Europe/London'). Use '{local_tz}' as local timezone if no source timezone provided by the user.",
150 |                         },
151 |                         "time": {
152 |                             "type": "string",
153 |                             "description": "Time to convert in 24-hour format (HH:MM)",
154 |                         },
155 |                         "target_timezone": {
156 |                             "type": "string",
157 |                             "description": f"Target IANA timezone name (e.g., 'Asia/Tokyo', 'America/San_Francisco'). Use '{local_tz}' as local timezone if no target timezone provided by the user.",
158 |                         },
159 |                     },
160 |                     "required": ["source_timezone", "time", "target_timezone"],
161 |                 },
162 |             ),
163 |         ]
164 | 
165 |     @server.call_tool()
166 |     async def call_tool(
167 |         name: str, arguments: dict
168 |     ) -> Sequence[TextContent | ImageContent | EmbeddedResource]:
169 |         """Handle tool calls for time queries."""
170 |         try:
171 |             match name:
172 |                 case TimeTools.GET_CURRENT_TIME.value:
173 |                     timezone = arguments.get("timezone")
174 |                     if not timezone:
175 |                         raise ValueError("Missing required argument: timezone")
176 | 
177 |                     result = time_server.get_current_time(timezone)
178 | 
179 |                 case TimeTools.CONVERT_TIME.value:
180 |                     if not all(
181 |                         k in arguments
182 |                         for k in ["source_timezone", "time", "target_timezone"]
183 |                     ):
184 |                         raise ValueError("Missing required arguments")
185 | 
186 |                     result = time_server.convert_time(
187 |                         arguments["source_timezone"],
188 |                         arguments["time"],
189 |                         arguments["target_timezone"],
190 |                     )
191 |                 case _:
192 |                     raise ValueError(f"Unknown tool: {name}")
193 | 
194 |             return [
195 |                 TextContent(type="text", text=json.dumps(result.model_dump(), indent=2))
196 |             ]
197 | 
198 |         except Exception as e:
199 |             raise ValueError(f"Error processing mcp-server-time query: {str(e)}")
200 | 
201 |     options = server.create_initialization_options()
202 |     async with stdio_server() as (read_stream, write_stream):
203 |         await server.run(read_stream, write_stream, options)
204 | 


--------------------------------------------------------------------------------
/tsconfig.json:
--------------------------------------------------------------------------------
 1 | {
 2 |   "compilerOptions": {
 3 |     "target": "ES2022",
 4 |     "module": "Node16",
 5 |     "moduleResolution": "Node16",
 6 |     "strict": true,
 7 |     "esModuleInterop": true,
 8 |     "skipLibCheck": true,
 9 |     "forceConsistentCasingInFileNames": true,
10 |     "resolveJsonModule": true
11 |   },
12 |   "include": ["src/**/*"],
13 |   "exclude": ["node_modules"]
14 | }
15 | 


--------------------------------------------------------------------------------