├── .gitattributes ├── .github ├── pull_request_template.md └── workflows │ ├── python.yml │ ├── release.yml │ ├── typescript.yml │ └── version-check.yml ├── .gitignore ├── .npmrc ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── LICENSE ├── README.md ├── SECURITY.md ├── package-lock.json ├── package.json ├── scripts └── release.py ├── src ├── aws-kb-retrieval-server │ ├── Dockerfile │ ├── README.md │ ├── index.ts │ ├── package.json │ └── tsconfig.json ├── brave-search │ ├── Dockerfile │ ├── README.md │ ├── index.ts │ ├── package.json │ └── tsconfig.json ├── everart │ ├── Dockerfile │ ├── README.md │ ├── index.ts │ ├── package.json │ └── tsconfig.json ├── everything │ ├── CLAUDE.md │ ├── Dockerfile │ ├── README.md │ ├── everything.ts │ ├── index.ts │ ├── package.json │ ├── sse.ts │ └── tsconfig.json ├── fetch │ ├── .python-version │ ├── Dockerfile │ ├── LICENSE │ ├── README.md │ ├── pyproject.toml │ ├── src │ │ └── mcp_server_fetch │ │ │ ├── __init__.py │ │ │ ├── __main__.py │ │ │ └── server.py │ └── uv.lock ├── filesystem │ ├── Dockerfile │ ├── README.md │ ├── index.ts │ ├── package.json │ └── tsconfig.json ├── gdrive │ ├── Dockerfile │ ├── README.md │ ├── index.ts │ ├── package.json │ ├── replace_open.sh │ └── tsconfig.json ├── git │ ├── .gitignore │ ├── .python-version │ ├── Dockerfile │ ├── LICENSE │ ├── README.md │ ├── pyproject.toml │ ├── src │ │ └── mcp_server_git │ │ │ ├── __init__.py │ │ │ ├── __main__.py │ │ │ └── server.py │ ├── tests │ │ └── test_server.py │ └── uv.lock ├── github │ ├── Dockerfile │ ├── README.md │ ├── common │ │ ├── errors.ts │ │ ├── types.ts │ │ ├── utils.ts │ │ └── version.ts │ ├── index.ts │ ├── operations │ │ ├── branches.ts │ │ ├── commits.ts │ │ ├── files.ts │ │ ├── issues.ts │ │ ├── pulls.ts │ │ ├── repository.ts │ │ └── search.ts │ ├── package.json │ └── tsconfig.json ├── gitlab │ ├── Dockerfile │ ├── README.md │ ├── index.ts │ ├── package.json │ ├── schemas.ts │ └── tsconfig.json ├── google-maps │ ├── Dockerfile │ ├── README.md │ ├── index.ts │ ├── package.json │ └── tsconfig.json ├── memory │ ├── Dockerfile │ ├── README.md │ ├── index.ts │ ├── package.json │ └── tsconfig.json ├── postgres │ ├── Dockerfile │ ├── README.md │ ├── index.ts │ ├── package.json │ └── tsconfig.json ├── puppeteer │ ├── Dockerfile │ ├── README.md │ ├── index.ts │ ├── package.json │ └── tsconfig.json ├── redis │ ├── Dockerfile │ ├── README.md │ ├── package.json │ ├── src │ │ └── index.ts │ └── tsconfig.json ├── sentry │ ├── .python-version │ ├── Dockerfile │ ├── README.md │ ├── pyproject.toml │ ├── src │ │ └── mcp_server_sentry │ │ │ ├── __init__.py │ │ │ ├── __main__.py │ │ │ └── server.py │ └── uv.lock ├── sequentialthinking │ ├── Dockerfile │ ├── README.md │ ├── index.ts │ ├── package.json │ └── tsconfig.json ├── slack │ ├── Dockerfile │ ├── README.md │ ├── index.ts │ ├── package.json │ └── tsconfig.json ├── sqlite │ ├── .python-version │ ├── Dockerfile │ ├── README.md │ ├── pyproject.toml │ ├── src │ │ └── mcp_server_sqlite │ │ │ ├── __init__.py │ │ │ └── server.py │ ├── test.db │ └── uv.lock └── time │ ├── .python-version │ ├── Dockerfile │ ├── README.md │ ├── pyproject.toml │ ├── src │ └── mcp_server_time │ │ ├── __init__.py │ │ ├── __main__.py │ │ └── server.py │ ├── test │ └── time_server_test.py │ └── uv.lock └── tsconfig.json /.gitattributes: -------------------------------------------------------------------------------- 1 | package-lock.json linguist-generated=true 2 | -------------------------------------------------------------------------------- /.github/pull_request_template.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | ## Description 4 | 5 | ## Server Details 6 | 7 | - Server: 8 | - Changes to: 9 | 10 | ## Motivation and Context 11 | 12 | 13 | ## How Has This Been Tested? 14 | 15 | 16 | ## Breaking Changes 17 | 18 | 19 | ## Types of changes 20 | 21 | - [ ] Bug fix (non-breaking change which fixes an issue) 22 | - [ ] New feature (non-breaking change which adds functionality) 23 | - [ ] Breaking change (fix or feature that would cause existing functionality to change) 24 | - [ ] Documentation update 25 | 26 | ## Checklist 27 | 28 | - [ ] I have read the [MCP Protocol Documentation](https://modelcontextprotocol.io) 29 | - [ ] My changes follows MCP security best practices 30 | - [ ] I have updated the server's README accordingly 31 | - [ ] I have tested this with an LLM client 32 | - [ ] My code follows the repository's style guidelines 33 | - [ ] New and existing tests pass locally 34 | - [ ] I have added appropriate error handling 35 | - [ ] I have documented all environment variables and configuration options 36 | 37 | ## Additional context 38 | 39 | -------------------------------------------------------------------------------- /.github/workflows/python.yml: -------------------------------------------------------------------------------- 1 | name: Python 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | pull_request: 8 | release: 9 | types: [published] 10 | 11 | jobs: 12 | detect-packages: 13 | runs-on: ubuntu-latest 14 | outputs: 15 | packages: ${{ steps.find-packages.outputs.packages }} 16 | steps: 17 | - uses: actions/checkout@v4 18 | 19 | - name: Find Python packages 20 | id: find-packages 21 | working-directory: src 22 | run: | 23 | PACKAGES=$(find . -name pyproject.toml -exec dirname {} \; | sed 's/^\.\///' | jq -R -s -c 'split("\n")[:-1]') 24 | echo "packages=$PACKAGES" >> $GITHUB_OUTPUT 25 | 26 | build: 27 | needs: [detect-packages] 28 | strategy: 29 | matrix: 30 | package: ${{ fromJson(needs.detect-packages.outputs.packages) }} 31 | name: Build ${{ matrix.package }} 32 | runs-on: ubuntu-latest 33 | steps: 34 | - uses: actions/checkout@v4 35 | 36 | - name: Install uv 37 | uses: astral-sh/setup-uv@v3 38 | 39 | - name: Set up Python 40 | uses: actions/setup-python@v5 41 | with: 42 | python-version-file: "src/${{ matrix.package }}/.python-version" 43 | 44 | - name: Install dependencies 45 | working-directory: src/${{ matrix.package }} 46 | run: uv sync --frozen --all-extras --dev 47 | 48 | - name: Run pyright 49 | working-directory: src/${{ matrix.package }} 50 | run: uv run --frozen pyright 51 | 52 | - name: Build package 53 | working-directory: src/${{ matrix.package }} 54 | run: uv build 55 | 56 | - name: Upload artifacts 57 | uses: actions/upload-artifact@v4 58 | with: 59 | name: dist-${{ matrix.package }} 60 | path: src/${{ matrix.package }}/dist/ 61 | 62 | publish: 63 | runs-on: ubuntu-latest 64 | needs: [build, detect-packages] 65 | if: github.event_name == 'release' 66 | 67 | strategy: 68 | matrix: 69 | package: ${{ fromJson(needs.detect-packages.outputs.packages) }} 70 | name: Publish ${{ matrix.package }} 71 | 72 | environment: release 73 | permissions: 74 | id-token: write # Required for trusted publishing 75 | 76 | steps: 77 | - name: Download artifacts 78 | uses: actions/download-artifact@v4 79 | with: 80 | name: dist-${{ matrix.package }} 81 | path: dist/ 82 | 83 | - name: Publish package to PyPI 84 | uses: pypa/gh-action-pypi-publish@release/v1 85 | -------------------------------------------------------------------------------- /.github/workflows/typescript.yml: -------------------------------------------------------------------------------- 1 | name: TypeScript 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | pull_request: 8 | release: 9 | types: [published] 10 | 11 | jobs: 12 | detect-packages: 13 | runs-on: ubuntu-latest 14 | outputs: 15 | packages: ${{ steps.find-packages.outputs.packages }} 16 | steps: 17 | - uses: actions/checkout@v4 18 | - name: Find JS packages 19 | id: find-packages 20 | working-directory: src 21 | run: | 22 | PACKAGES=$(find . -name package.json -not -path "*/node_modules/*" -exec dirname {} \; | sed 's/^\.\///' | jq -R -s -c 'split("\n")[:-1]') 23 | echo "packages=$PACKAGES" >> $GITHUB_OUTPUT 24 | 25 | build: 26 | needs: [detect-packages] 27 | strategy: 28 | matrix: 29 | package: ${{ fromJson(needs.detect-packages.outputs.packages) }} 30 | name: Build ${{ matrix.package }} 31 | runs-on: ubuntu-latest 32 | steps: 33 | - uses: actions/checkout@v4 34 | 35 | - uses: actions/setup-node@v4 36 | with: 37 | node-version: 22 38 | cache: npm 39 | 40 | - name: Install dependencies 41 | working-directory: src/${{ matrix.package }} 42 | run: npm ci 43 | 44 | - name: Build package 45 | working-directory: src/${{ matrix.package }} 46 | run: npm run build 47 | 48 | publish: 49 | runs-on: ubuntu-latest 50 | needs: [build, detect-packages] 51 | if: github.event_name == 'release' 52 | environment: release 53 | 54 | strategy: 55 | matrix: 56 | package: ${{ fromJson(needs.detect-packages.outputs.packages) }} 57 | name: Publish ${{ matrix.package }} 58 | 59 | permissions: 60 | contents: read 61 | id-token: write 62 | 63 | steps: 64 | - uses: actions/checkout@v4 65 | - uses: actions/setup-node@v4 66 | with: 67 | node-version: 22 68 | cache: npm 69 | registry-url: "https://registry.npmjs.org" 70 | 71 | - name: Install dependencies 72 | working-directory: src/${{ matrix.package }} 73 | run: npm ci 74 | 75 | - name: Publish package 76 | working-directory: src/${{ matrix.package }} 77 | run: npm publish --access public 78 | env: 79 | NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} 80 | -------------------------------------------------------------------------------- /.github/workflows/version-check.yml: -------------------------------------------------------------------------------- 1 | name: Version Consistency Check 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | pull_request: 8 | release: 9 | types: [published] 10 | 11 | jobs: 12 | github: 13 | name: Check GitHub server version consistency 14 | runs-on: ubuntu-latest 15 | steps: 16 | - uses: actions/checkout@v4 17 | 18 | - name: Check version consistency 19 | run: | 20 | PACKAGE_VERSION=$(node -p "require('./src/github/package.json').version") 21 | TS_VERSION=$(grep -o '".*"' ./src/github/common/version.ts | tr -d '"') 22 | 23 | if [ "$PACKAGE_VERSION" != "$TS_VERSION" ]; then 24 | echo "::error::Version mismatch detected!" 25 | echo "::error::package.json version: $PACKAGE_VERSION" 26 | echo "::error::version.ts version: $TS_VERSION" 27 | exit 1 28 | else 29 | echo "✅ Versions match: $PACKAGE_VERSION" 30 | fi 31 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | lerna-debug.log* 8 | .pnpm-debug.log* 9 | 10 | # Diagnostic reports (https://nodejs.org/api/report.html) 11 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 12 | 13 | # Runtime data 14 | pids 15 | *.pid 16 | *.seed 17 | *.pid.lock 18 | 19 | # Directory for instrumented libs generated by jscoverage/JSCover 20 | lib-cov 21 | 22 | # Coverage directory used by tools like istanbul 23 | coverage 24 | *.lcov 25 | 26 | # nyc test coverage 27 | .nyc_output 28 | 29 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 30 | .grunt 31 | 32 | # Bower dependency directory (https://bower.io/) 33 | bower_components 34 | 35 | # node-waf configuration 36 | .lock-wscript 37 | 38 | # Compiled binary addons (https://nodejs.org/api/addons.html) 39 | build/Release 40 | 41 | # Dependency directories 42 | node_modules/ 43 | jspm_packages/ 44 | 45 | # Snowpack dependency directory (https://snowpack.dev/) 46 | web_modules/ 47 | 48 | # TypeScript cache 49 | *.tsbuildinfo 50 | 51 | # Optional npm cache directory 52 | .npm 53 | 54 | # Optional eslint cache 55 | .eslintcache 56 | 57 | # Optional stylelint cache 58 | .stylelintcache 59 | 60 | # Microbundle cache 61 | .rpt2_cache/ 62 | .rts2_cache_cjs/ 63 | .rts2_cache_es/ 64 | .rts2_cache_umd/ 65 | 66 | # Optional REPL history 67 | .node_repl_history 68 | 69 | # Output of 'npm pack' 70 | *.tgz 71 | 72 | # Yarn Integrity file 73 | .yarn-integrity 74 | 75 | # dotenv environment variable files 76 | .env 77 | .env.development.local 78 | .env.test.local 79 | .env.production.local 80 | .env.local 81 | 82 | # parcel-bundler cache (https://parceljs.org/) 83 | .cache 84 | .parcel-cache 85 | 86 | # Next.js build output 87 | .next 88 | out 89 | 90 | # Nuxt.js build / generate output 91 | .nuxt 92 | dist 93 | 94 | # Gatsby files 95 | .cache/ 96 | # Comment in the public line in if your project uses Gatsby and not Next.js 97 | # https://nextjs.org/blog/next-9-1#public-directory-support 98 | # public 99 | 100 | # vuepress build output 101 | .vuepress/dist 102 | 103 | # vuepress v2.x temp and cache directory 104 | .temp 105 | .cache 106 | 107 | # Docusaurus cache and generated files 108 | .docusaurus 109 | 110 | # Serverless directories 111 | .serverless/ 112 | 113 | # FuseBox cache 114 | .fusebox/ 115 | 116 | # DynamoDB Local files 117 | .dynamodb/ 118 | 119 | # TernJS port file 120 | .tern-port 121 | 122 | # Stores VSCode versions used for testing VSCode extensions 123 | .vscode-test 124 | 125 | # yarn v2 126 | .yarn/cache 127 | .yarn/unplugged 128 | .yarn/build-state.yml 129 | .yarn/install-state.gz 130 | .pnp.* 131 | 132 | build/ 133 | 134 | gcp-oauth.keys.json 135 | .*-server-credentials.json 136 | 137 | # Byte-compiled / optimized / DLL files 138 | __pycache__/ 139 | *.py[cod] 140 | *$py.class 141 | 142 | # C extensions 143 | *.so 144 | 145 | # Distribution / packaging 146 | .Python 147 | build/ 148 | develop-eggs/ 149 | dist/ 150 | downloads/ 151 | eggs/ 152 | .eggs/ 153 | lib/ 154 | lib64/ 155 | parts/ 156 | sdist/ 157 | var/ 158 | wheels/ 159 | share/python-wheels/ 160 | *.egg-info/ 161 | .installed.cfg 162 | *.egg 163 | MANIFEST 164 | 165 | # PyInstaller 166 | # Usually these files are written by a python script from a template 167 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 168 | *.manifest 169 | *.spec 170 | 171 | # Installer logs 172 | pip-log.txt 173 | pip-delete-this-directory.txt 174 | 175 | # Unit test / coverage reports 176 | htmlcov/ 177 | .tox/ 178 | .nox/ 179 | .coverage 180 | .coverage.* 181 | .cache 182 | nosetests.xml 183 | coverage.xml 184 | *.cover 185 | *.py,cover 186 | .hypothesis/ 187 | .pytest_cache/ 188 | cover/ 189 | 190 | # Translations 191 | *.mo 192 | *.pot 193 | 194 | # Django stuff: 195 | *.log 196 | local_settings.py 197 | db.sqlite3 198 | db.sqlite3-journal 199 | 200 | # Flask stuff: 201 | instance/ 202 | .webassets-cache 203 | 204 | # Scrapy stuff: 205 | .scrapy 206 | 207 | # Sphinx documentation 208 | docs/_build/ 209 | 210 | # PyBuilder 211 | .pybuilder/ 212 | target/ 213 | 214 | # Jupyter Notebook 215 | .ipynb_checkpoints 216 | 217 | # IPython 218 | profile_default/ 219 | ipython_config.py 220 | 221 | # pyenv 222 | # For a library or package, you might want to ignore these files since the code is 223 | # intended to run in multiple environments; otherwise, check them in: 224 | # .python-version 225 | 226 | # pipenv 227 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 228 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 229 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 230 | # install all needed dependencies. 231 | #Pipfile.lock 232 | 233 | # poetry 234 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 235 | # This is especially recommended for binary packages to ensure reproducibility, and is more 236 | # commonly ignored for libraries. 237 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 238 | #poetry.lock 239 | 240 | # pdm 241 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 242 | #pdm.lock 243 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 244 | # in version control. 245 | # https://pdm.fming.dev/latest/usage/project/#working-with-version-control 246 | .pdm.toml 247 | .pdm-python 248 | .pdm-build/ 249 | 250 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 251 | __pypackages__/ 252 | 253 | # Celery stuff 254 | celerybeat-schedule 255 | celerybeat.pid 256 | 257 | # SageMath parsed files 258 | *.sage.py 259 | 260 | # Environments 261 | .env 262 | .venv 263 | env/ 264 | venv/ 265 | ENV/ 266 | env.bak/ 267 | venv.bak/ 268 | 269 | # Spyder project settings 270 | .spyderproject 271 | .spyproject 272 | 273 | # Rope project settings 274 | .ropeproject 275 | 276 | # mkdocs documentation 277 | /site 278 | 279 | # mypy 280 | .mypy_cache/ 281 | .dmypy.json 282 | dmypy.json 283 | 284 | # Pyre type checker 285 | .pyre/ 286 | 287 | # pytype static type analyzer 288 | .pytype/ 289 | 290 | # Cython debug symbols 291 | cython_debug/ 292 | 293 | .DS_Store 294 | 295 | # PyCharm 296 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 297 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 298 | # and can be added to the global gitignore or merged into this file. For a more nuclear 299 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 300 | #.idea/ 301 | -------------------------------------------------------------------------------- /.npmrc: -------------------------------------------------------------------------------- 1 | registry="https://registry.npmjs.org/" 2 | @modelcontextprotocol:registry="https://registry.npmjs.org/" 3 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Contributor Covenant Code of Conduct 2 | 3 | ## Our Pledge 4 | 5 | We as members, contributors, and leaders pledge to make participation in our 6 | community a harassment-free experience for everyone, regardless of age, body 7 | size, visible or invisible disability, ethnicity, sex characteristics, gender 8 | identity and expression, level of experience, education, socio-economic status, 9 | nationality, personal appearance, race, religion, or sexual identity 10 | and orientation. 11 | 12 | We pledge to act and interact in ways that contribute to an open, welcoming, 13 | diverse, inclusive, and healthy community. 14 | 15 | ## Our Standards 16 | 17 | Examples of behavior that contributes to a positive environment for our 18 | community include: 19 | 20 | * Demonstrating empathy and kindness toward other people 21 | * Being respectful of differing opinions, viewpoints, and experiences 22 | * Giving and gracefully accepting constructive feedback 23 | * Accepting responsibility and apologizing to those affected by our mistakes, 24 | and learning from the experience 25 | * Focusing on what is best not just for us as individuals, but for the 26 | overall community 27 | 28 | Examples of unacceptable behavior include: 29 | 30 | * The use of sexualized language or imagery, and sexual attention or 31 | advances of any kind 32 | * Trolling, insulting or derogatory comments, and personal or political attacks 33 | * Public or private harassment 34 | * Publishing others' private information, such as a physical or email 35 | address, without their explicit permission 36 | * Other conduct which could reasonably be considered inappropriate in a 37 | professional setting 38 | 39 | ## Enforcement Responsibilities 40 | 41 | Community leaders are responsible for clarifying and enforcing our standards of 42 | acceptable behavior and will take appropriate and fair corrective action in 43 | response to any behavior that they deem inappropriate, threatening, offensive, 44 | or harmful. 45 | 46 | Community leaders have the right and responsibility to remove, edit, or reject 47 | comments, commits, code, wiki edits, issues, and other contributions that are 48 | not aligned to this Code of Conduct, and will communicate reasons for moderation 49 | decisions when appropriate. 50 | 51 | ## Scope 52 | 53 | This Code of Conduct applies within all community spaces, and also applies when 54 | an individual is officially representing the community in public spaces. 55 | Examples of representing our community include using an official e-mail address, 56 | posting via an official social media account, or acting as an appointed 57 | representative at an online or offline event. 58 | 59 | ## Enforcement 60 | 61 | Instances of abusive, harassing, or otherwise unacceptable behavior may be 62 | reported to the community leaders responsible for enforcement at 63 | mcp-coc@anthropic.com. 64 | All complaints will be reviewed and investigated promptly and fairly. 65 | 66 | All community leaders are obligated to respect the privacy and security of the 67 | reporter of any incident. 68 | 69 | ## Enforcement Guidelines 70 | 71 | Community leaders will follow these Community Impact Guidelines in determining 72 | the consequences for any action they deem in violation of this Code of Conduct: 73 | 74 | ### 1. Correction 75 | 76 | **Community Impact**: Use of inappropriate language or other behavior deemed 77 | unprofessional or unwelcome in the community. 78 | 79 | **Consequence**: A private, written warning from community leaders, providing 80 | clarity around the nature of the violation and an explanation of why the 81 | behavior was inappropriate. A public apology may be requested. 82 | 83 | ### 2. Warning 84 | 85 | **Community Impact**: A violation through a single incident or series 86 | of actions. 87 | 88 | **Consequence**: A warning with consequences for continued behavior. No 89 | interaction with the people involved, including unsolicited interaction with 90 | those enforcing the Code of Conduct, for a specified period of time. This 91 | includes avoiding interactions in community spaces as well as external channels 92 | like social media. Violating these terms may lead to a temporary or 93 | permanent ban. 94 | 95 | ### 3. Temporary Ban 96 | 97 | **Community Impact**: A serious violation of community standards, including 98 | sustained inappropriate behavior. 99 | 100 | **Consequence**: A temporary ban from any sort of interaction or public 101 | communication with the community for a specified period of time. No public or 102 | private interaction with the people involved, including unsolicited interaction 103 | with those enforcing the Code of Conduct, is allowed during this period. 104 | Violating these terms may lead to a permanent ban. 105 | 106 | ### 4. Permanent Ban 107 | 108 | **Community Impact**: Demonstrating a pattern of violation of community 109 | standards, including sustained inappropriate behavior, harassment of an 110 | individual, or aggression toward or disparagement of classes of individuals. 111 | 112 | **Consequence**: A permanent ban from any sort of public interaction within 113 | the community. 114 | 115 | ## Attribution 116 | 117 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], 118 | version 2.0, available at 119 | https://www.contributor-covenant.org/version/2/0/code_of_conduct.html. 120 | 121 | Community Impact Guidelines were inspired by [Mozilla's code of conduct 122 | enforcement ladder](https://github.com/mozilla/diversity). 123 | 124 | [homepage]: https://www.contributor-covenant.org 125 | 126 | For answers to common questions about this code of conduct, see the FAQ at 127 | https://www.contributor-covenant.org/faq. Translations are available at 128 | https://www.contributor-covenant.org/translations. 129 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing to MCP Servers 2 | 3 | Thank you for your interest in contributing to the Model Context Protocol (MCP) servers! This document provides guidelines and instructions for contributing. 4 | 5 | ## Types of Contributions 6 | 7 | ### 1. New Servers 8 | 9 | The repository contains reference implementations, as well as a list of community servers. 10 | We generally don't accept new servers into the repository. We do accept pull requests to the [README.md](./README.md) 11 | adding a reference to your servers. 12 | 13 | Please keep lists in alphabetical order to minimize merge conflicts when adding new items. 14 | 15 | - Check the [modelcontextprotocol.io](https://modelcontextprotocol.io) documentation 16 | - Ensure your server doesn't duplicate existing functionality 17 | - Consider whether your server would be generally useful to others 18 | - Follow [security best practices](https://modelcontextprotocol.io/docs/concepts/transports#security-considerations) from the MCP documentation 19 | - Create a PR adding a link to your server to the [README.md](./README.md). 20 | 21 | ### 2. Improvements to Existing Servers 22 | Enhancements to existing servers are welcome! This includes: 23 | 24 | - Bug fixes 25 | - Performance improvements 26 | - New features 27 | - Security enhancements 28 | 29 | ### 3. Documentation 30 | Documentation improvements are always welcome: 31 | 32 | - Fixing typos or unclear instructions 33 | - Adding examples 34 | - Improving setup instructions 35 | - Adding troubleshooting guides 36 | 37 | ## Getting Started 38 | 39 | 1. Fork the repository 40 | 2. Clone your fork: 41 | ```bash 42 | git clone https://github.com/your-username/servers.git 43 | ``` 44 | 3. Add the upstream remote: 45 | ```bash 46 | git remote add upstream https://github.com/modelcontextprotocol/servers.git 47 | ``` 48 | 4. Create a branch: 49 | ```bash 50 | git checkout -b my-feature 51 | ``` 52 | 53 | ## Development Guidelines 54 | 55 | ### Code Style 56 | - Follow the existing code style in the repository 57 | - Include appropriate type definitions 58 | - Add comments for complex logic 59 | 60 | ### Documentation 61 | - Include a detailed README.md in your server directory 62 | - Document all configuration options 63 | - Provide setup instructions 64 | - Include usage examples 65 | 66 | ### Security 67 | - Follow security best practices 68 | - Implement proper input validation 69 | - Handle errors appropriately 70 | - Document security considerations 71 | 72 | ## Submitting Changes 73 | 74 | 1. Commit your changes: 75 | ```bash 76 | git add . 77 | git commit -m "Description of changes" 78 | ``` 79 | 2. Push to your fork: 80 | ```bash 81 | git push origin my-feature 82 | ``` 83 | 3. Create a Pull Request through GitHub 84 | 85 | ### Pull Request Guidelines 86 | 87 | - Thoroughly test your changes 88 | - Fill out the pull request template completely 89 | - Link any related issues 90 | - Provide clear description of changes 91 | - Include any necessary documentation updates 92 | - Add screenshots for UI changes 93 | - List any breaking changes 94 | 95 | ## Community 96 | 97 | - Participate in [GitHub Discussions](https://github.com/orgs/modelcontextprotocol/discussions) 98 | - Follow the [Code of Conduct](CODE_OF_CONDUCT.md) 99 | 100 | ## Questions? 101 | 102 | - Check the [documentation](https://modelcontextprotocol.io) 103 | - Ask in GitHub Discussions 104 | 105 | Thank you for contributing to MCP Servers! 106 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2024 Anthropic, PBC 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | # Security Policy 2 | Thank you for helping us keep our MCP servers secure. 3 | 4 | These servers are maintained by [Anthropic](https://www.anthropic.com/) as part of the Model Context Protocol project. 5 | 6 | The security of our systems and user data is Anthropic’s top priority. We appreciate the work of security researchers acting in good faith in identifying and reporting potential vulnerabilities. 7 | 8 | ## Vulnerability Disclosure Program 9 | 10 | Our Vulnerability Program guidelines are defined on our [HackerOne program page](https://hackerone.com/anthropic-vdp). We ask that any validated vulnerability in this functionality be reported through the [submission form](https://hackerone.com/anthropic-vdp/reports/new?type=team&report_type=vulnerability). 11 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@modelcontextprotocol/servers", 3 | "private": true, 4 | "version": "0.6.2", 5 | "description": "Model Context Protocol servers", 6 | "license": "MIT", 7 | "author": "Anthropic, PBC (https://anthropic.com)", 8 | "homepage": "https://modelcontextprotocol.io", 9 | "bugs": "https://github.com/modelcontextprotocol/servers/issues", 10 | "type": "module", 11 | "workspaces": [ 12 | "src/*" 13 | ], 14 | "files": [], 15 | "scripts": { 16 | "build": "npm run build --workspaces", 17 | "watch": "npm run watch --workspaces", 18 | "publish-all": "npm publish --workspaces --access public", 19 | "link-all": "npm link --workspaces" 20 | }, 21 | "dependencies": { 22 | "@modelcontextprotocol/server-everything": "*", 23 | "@modelcontextprotocol/server-gdrive": "*", 24 | "@modelcontextprotocol/server-postgres": "*", 25 | "@modelcontextprotocol/server-puppeteer": "*", 26 | "@modelcontextprotocol/server-slack": "*", 27 | "@modelcontextprotocol/server-brave-search": "*", 28 | "@modelcontextprotocol/server-memory": "*", 29 | "@modelcontextprotocol/server-filesystem": "*", 30 | "@modelcontextprotocol/server-everart": "*", 31 | "@modelcontextprotocol/server-sequential-thinking": "*" 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /scripts/release.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env uv run --script 2 | # /// script 3 | # requires-python = ">=3.12" 4 | # dependencies = [ 5 | # "click>=8.1.8", 6 | # "tomlkit>=0.13.2" 7 | # ] 8 | # /// 9 | import sys 10 | import re 11 | import click 12 | from pathlib import Path 13 | import json 14 | import tomlkit 15 | import datetime 16 | import subprocess 17 | from dataclasses import dataclass 18 | from typing import Any, Iterator, NewType, Protocol 19 | 20 | 21 | Version = NewType("Version", str) 22 | GitHash = NewType("GitHash", str) 23 | 24 | 25 | class GitHashParamType(click.ParamType): 26 | name = "git_hash" 27 | 28 | def convert( 29 | self, value: Any, param: click.Parameter | None, ctx: click.Context | None 30 | ) -> GitHash | None: 31 | if value is None: 32 | return None 33 | 34 | if not (8 <= len(value) <= 40): 35 | self.fail(f"Git hash must be between 8 and 40 characters, got {len(value)}") 36 | 37 | if not re.match(r"^[0-9a-fA-F]+$", value): 38 | self.fail("Git hash must contain only hex digits (0-9, a-f)") 39 | 40 | try: 41 | # Verify hash exists in repo 42 | subprocess.run( 43 | ["git", "rev-parse", "--verify", value], check=True, capture_output=True 44 | ) 45 | except subprocess.CalledProcessError: 46 | self.fail(f"Git hash {value} not found in repository") 47 | 48 | return GitHash(value.lower()) 49 | 50 | 51 | GIT_HASH = GitHashParamType() 52 | 53 | 54 | class Package(Protocol): 55 | path: Path 56 | 57 | def package_name(self) -> str: ... 58 | 59 | def update_version(self, version: Version) -> None: ... 60 | 61 | 62 | @dataclass 63 | class NpmPackage: 64 | path: Path 65 | 66 | def package_name(self) -> str: 67 | with open(self.path / "package.json", "r") as f: 68 | return json.load(f)["name"] 69 | 70 | def update_version(self, version: Version): 71 | with open(self.path / "package.json", "r+") as f: 72 | data = json.load(f) 73 | data["version"] = version 74 | f.seek(0) 75 | json.dump(data, f, indent=2) 76 | f.truncate() 77 | 78 | 79 | @dataclass 80 | class PyPiPackage: 81 | path: Path 82 | 83 | def package_name(self) -> str: 84 | with open(self.path / "pyproject.toml") as f: 85 | toml_data = tomlkit.parse(f.read()) 86 | name = toml_data.get("project", {}).get("name") 87 | if not name: 88 | raise Exception("No name in pyproject.toml project section") 89 | return str(name) 90 | 91 | def update_version(self, version: Version): 92 | # Update version in pyproject.toml 93 | with open(self.path / "pyproject.toml") as f: 94 | data = tomlkit.parse(f.read()) 95 | data["project"]["version"] = version 96 | 97 | with open(self.path / "pyproject.toml", "w") as f: 98 | f.write(tomlkit.dumps(data)) 99 | 100 | 101 | def has_changes(path: Path, git_hash: GitHash) -> bool: 102 | """Check if any files changed between current state and git hash""" 103 | try: 104 | output = subprocess.run( 105 | ["git", "diff", "--name-only", git_hash, "--", "."], 106 | cwd=path, 107 | check=True, 108 | capture_output=True, 109 | text=True, 110 | ) 111 | 112 | changed_files = [Path(f) for f in output.stdout.splitlines()] 113 | relevant_files = [f for f in changed_files if f.suffix in [".py", ".ts"]] 114 | return len(relevant_files) >= 1 115 | except subprocess.CalledProcessError: 116 | return False 117 | 118 | 119 | def gen_version() -> Version: 120 | """Generate version based on current date""" 121 | now = datetime.datetime.now() 122 | return Version(f"{now.year}.{now.month}.{now.day}") 123 | 124 | 125 | def find_changed_packages(directory: Path, git_hash: GitHash) -> Iterator[Package]: 126 | for path in directory.glob("*/package.json"): 127 | if has_changes(path.parent, git_hash): 128 | yield NpmPackage(path.parent) 129 | for path in directory.glob("*/pyproject.toml"): 130 | if has_changes(path.parent, git_hash): 131 | yield PyPiPackage(path.parent) 132 | 133 | 134 | @click.group() 135 | def cli(): 136 | pass 137 | 138 | 139 | @cli.command("update-packages") 140 | @click.option( 141 | "--directory", type=click.Path(exists=True, path_type=Path), default=Path.cwd() 142 | ) 143 | @click.argument("git_hash", type=GIT_HASH) 144 | def update_packages(directory: Path, git_hash: GitHash) -> int: 145 | # Detect package type 146 | path = directory.resolve(strict=True) 147 | version = gen_version() 148 | 149 | for package in find_changed_packages(path, git_hash): 150 | name = package.package_name() 151 | package.update_version(version) 152 | 153 | click.echo(f"{name}@{version}") 154 | 155 | return 0 156 | 157 | 158 | @cli.command("generate-notes") 159 | @click.option( 160 | "--directory", type=click.Path(exists=True, path_type=Path), default=Path.cwd() 161 | ) 162 | @click.argument("git_hash", type=GIT_HASH) 163 | def generate_notes(directory: Path, git_hash: GitHash) -> int: 164 | # Detect package type 165 | path = directory.resolve(strict=True) 166 | version = gen_version() 167 | 168 | click.echo(f"# Release : v{version}") 169 | click.echo("") 170 | click.echo("## Updated packages") 171 | for package in find_changed_packages(path, git_hash): 172 | name = package.package_name() 173 | click.echo(f"- {name}@{version}") 174 | 175 | return 0 176 | 177 | 178 | @cli.command("generate-version") 179 | def generate_version() -> int: 180 | # Detect package type 181 | click.echo(gen_version()) 182 | return 0 183 | 184 | 185 | @cli.command("generate-matrix") 186 | @click.option( 187 | "--directory", type=click.Path(exists=True, path_type=Path), default=Path.cwd() 188 | ) 189 | @click.option("--npm", is_flag=True, default=False) 190 | @click.option("--pypi", is_flag=True, default=False) 191 | @click.argument("git_hash", type=GIT_HASH) 192 | def generate_matrix(directory: Path, git_hash: GitHash, pypi: bool, npm: bool) -> int: 193 | # Detect package type 194 | path = directory.resolve(strict=True) 195 | version = gen_version() 196 | 197 | changes = [] 198 | for package in find_changed_packages(path, git_hash): 199 | pkg = package.path.relative_to(path) 200 | if npm and isinstance(package, NpmPackage): 201 | changes.append(str(pkg)) 202 | if pypi and isinstance(package, PyPiPackage): 203 | changes.append(str(pkg)) 204 | 205 | click.echo(json.dumps(changes)) 206 | return 0 207 | 208 | 209 | if __name__ == "__main__": 210 | sys.exit(cli()) 211 | -------------------------------------------------------------------------------- /src/aws-kb-retrieval-server/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:22.12-alpine AS builder 2 | 3 | COPY src/aws-kb-retrieval-server /app 4 | COPY tsconfig.json /tsconfig.json 5 | 6 | WORKDIR /app 7 | 8 | RUN --mount=type=cache,target=/root/.npm npm install 9 | 10 | FROM node:22-alpine AS release 11 | 12 | WORKDIR /app 13 | 14 | COPY --from=builder /app/dist /app/dist 15 | COPY --from=builder /app/package.json /app/package.json 16 | COPY --from=builder /app/package-lock.json /app/package-lock.json 17 | 18 | ENV NODE_ENV=production 19 | 20 | RUN npm ci --ignore-scripts --omit-dev 21 | 22 | ENTRYPOINT ["node", "dist/index.js"] -------------------------------------------------------------------------------- /src/aws-kb-retrieval-server/index.ts: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | import { Server } from "@modelcontextprotocol/sdk/server/index.js"; 3 | import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js"; 4 | import { 5 | CallToolRequestSchema, 6 | ListToolsRequestSchema, 7 | Tool, 8 | } from "@modelcontextprotocol/sdk/types.js"; 9 | import { 10 | BedrockAgentRuntimeClient, 11 | RetrieveCommand, 12 | RetrieveCommandInput, 13 | } from "@aws-sdk/client-bedrock-agent-runtime"; 14 | 15 | // AWS client initialization 16 | const bedrockClient = new BedrockAgentRuntimeClient({ 17 | region: process.env.AWS_REGION, 18 | credentials: { 19 | accessKeyId: process.env.AWS_ACCESS_KEY_ID!, 20 | secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY!, 21 | }, 22 | }); 23 | 24 | interface RAGSource { 25 | id: string; 26 | fileName: string; 27 | snippet: string; 28 | score: number; 29 | } 30 | 31 | async function retrieveContext( 32 | query: string, 33 | knowledgeBaseId: string, 34 | n: number = 3 35 | ): Promise<{ 36 | context: string; 37 | isRagWorking: boolean; 38 | ragSources: RAGSource[]; 39 | }> { 40 | try { 41 | if (!knowledgeBaseId) { 42 | console.error("knowledgeBaseId is not provided"); 43 | return { 44 | context: "", 45 | isRagWorking: false, 46 | ragSources: [], 47 | }; 48 | } 49 | 50 | const input: RetrieveCommandInput = { 51 | knowledgeBaseId: knowledgeBaseId, 52 | retrievalQuery: { text: query }, 53 | retrievalConfiguration: { 54 | vectorSearchConfiguration: { numberOfResults: n }, 55 | }, 56 | }; 57 | 58 | const command = new RetrieveCommand(input); 59 | const response = await bedrockClient.send(command); 60 | const rawResults = response?.retrievalResults || []; 61 | const ragSources: RAGSource[] = rawResults 62 | .filter((res) => res?.content?.text) 63 | .map((result, index) => { 64 | const uri = result?.location?.s3Location?.uri || ""; 65 | const fileName = uri.split("/").pop() || `Source-${index}.txt`; 66 | return { 67 | id: (result.metadata?.["x-amz-bedrock-kb-chunk-id"] as string) || `chunk-${index}`, 68 | fileName: fileName.replace(/_/g, " ").replace(".txt", ""), 69 | snippet: result.content?.text || "", 70 | score: (result.score as number) || 0, 71 | }; 72 | }) 73 | .slice(0, 3); 74 | 75 | const context = rawResults 76 | .filter((res): res is { content: { text: string } } => res?.content?.text !== undefined) 77 | .map(res => res.content.text) 78 | .join("\n\n"); 79 | 80 | return { 81 | context, 82 | isRagWorking: true, 83 | ragSources, 84 | }; 85 | } catch (error) { 86 | console.error("RAG Error:", error); 87 | return { context: "", isRagWorking: false, ragSources: [] }; 88 | } 89 | } 90 | 91 | // Define the retrieval tool 92 | const RETRIEVAL_TOOL: Tool = { 93 | name: "retrieve_from_aws_kb", 94 | description: "Performs retrieval from the AWS Knowledge Base using the provided query and Knowledge Base ID.", 95 | inputSchema: { 96 | type: "object", 97 | properties: { 98 | query: { type: "string", description: "The query to perform retrieval on" }, 99 | knowledgeBaseId: { type: "string", description: "The ID of the AWS Knowledge Base" }, 100 | n: { type: "number", default: 3, description: "Number of results to retrieve" }, 101 | }, 102 | required: ["query", "knowledgeBaseId"], 103 | }, 104 | }; 105 | 106 | // Server setup 107 | const server = new Server( 108 | { 109 | name: "aws-kb-retrieval-server", 110 | version: "0.2.0", 111 | }, 112 | { 113 | capabilities: { 114 | tools: {}, 115 | }, 116 | }, 117 | ); 118 | 119 | // Request handlers 120 | server.setRequestHandler(ListToolsRequestSchema, async () => ({ 121 | tools: [RETRIEVAL_TOOL], 122 | })); 123 | 124 | server.setRequestHandler(CallToolRequestSchema, async (request) => { 125 | const { name, arguments: args } = request.params; 126 | 127 | if (name === "retrieve_from_aws_kb") { 128 | const { query, knowledgeBaseId, n = 3 } = args as Record; 129 | try { 130 | const result = await retrieveContext(query, knowledgeBaseId, n); 131 | if (result.isRagWorking) { 132 | return { 133 | content: [ 134 | { type: "text", text: `Context: ${result.context}` }, 135 | { type: "text", text: `RAG Sources: ${JSON.stringify(result.ragSources)}` }, 136 | ], 137 | }; 138 | } else { 139 | return { 140 | content: [{ type: "text", text: "Retrieval failed or returned no results." }], 141 | }; 142 | } 143 | } catch (error) { 144 | return { 145 | content: [{ type: "text", text: `Error occurred: ${error}` }], 146 | }; 147 | } 148 | } else { 149 | return { 150 | content: [{ type: "text", text: `Unknown tool: ${name}` }], 151 | isError: true, 152 | }; 153 | } 154 | }); 155 | 156 | // Server startup 157 | async function runServer() { 158 | const transport = new StdioServerTransport(); 159 | await server.connect(transport); 160 | console.error("AWS KB Retrieval Server running on stdio"); 161 | } 162 | 163 | runServer().catch((error) => { 164 | console.error("Fatal error running server:", error); 165 | process.exit(1); 166 | }); 167 | -------------------------------------------------------------------------------- /src/aws-kb-retrieval-server/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@modelcontextprotocol/server-aws-kb-retrieval", 3 | "version": "0.6.2", 4 | "description": "MCP server for AWS Knowledge Base retrieval using Bedrock Agent Runtime", 5 | "license": "MIT", 6 | "author": "Anthropic, PBC (https://anthropic.com)", 7 | "homepage": "https://modelcontextprotocol.io", 8 | "bugs": "https://github.com/modelcontextprotocol/servers/issues", 9 | "type": "module", 10 | "bin": { 11 | "mcp-server-aws-kb-retrieval": "dist/index.js" 12 | }, 13 | "files": [ 14 | "dist" 15 | ], 16 | "scripts": { 17 | "build": "tsc && shx chmod +x dist/*.js", 18 | "prepare": "npm run build", 19 | "watch": "tsc --watch" 20 | }, 21 | "dependencies": { 22 | "@modelcontextprotocol/sdk": "0.5.0", 23 | "@aws-sdk/client-bedrock-agent-runtime": "^3.0.0" 24 | }, 25 | "devDependencies": { 26 | "@types/node": "^22", 27 | "shx": "^0.3.4", 28 | "typescript": "^5.6.2" 29 | } 30 | } -------------------------------------------------------------------------------- /src/aws-kb-retrieval-server/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "../../tsconfig.json", 3 | "compilerOptions": { 4 | "outDir": "./dist", 5 | "rootDir": ".", 6 | "composite": true, 7 | "incremental": true, 8 | "tsBuildInfoFile": "./dist/.tsbuildinfo" 9 | }, 10 | "include": [ 11 | "./**/*.ts" 12 | ], 13 | "exclude": [ 14 | "node_modules", 15 | "dist" 16 | ] 17 | } 18 | -------------------------------------------------------------------------------- /src/brave-search/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:22.12-alpine AS builder 2 | 3 | # Must be entire project because `prepare` script is run during `npm install` and requires all files. 4 | COPY src/brave-search /app 5 | COPY tsconfig.json /tsconfig.json 6 | 7 | WORKDIR /app 8 | 9 | RUN --mount=type=cache,target=/root/.npm npm install 10 | 11 | FROM node:22-alpine AS release 12 | 13 | WORKDIR /app 14 | 15 | COPY --from=builder /app/dist /app/dist 16 | COPY --from=builder /app/package.json /app/package.json 17 | COPY --from=builder /app/package-lock.json /app/package-lock.json 18 | 19 | ENV NODE_ENV=production 20 | 21 | RUN npm ci --ignore-scripts --omit-dev 22 | 23 | ENTRYPOINT ["node", "dist/index.js"] -------------------------------------------------------------------------------- /src/brave-search/README.md: -------------------------------------------------------------------------------- 1 | # Brave Search MCP Server 2 | 3 | An MCP server implementation that integrates the Brave Search API, providing both web and local search capabilities. 4 | 5 | ## Features 6 | 7 | - **Web Search**: General queries, news, articles, with pagination and freshness controls 8 | - **Local Search**: Find businesses, restaurants, and services with detailed information 9 | - **Flexible Filtering**: Control result types, safety levels, and content freshness 10 | - **Smart Fallbacks**: Local search automatically falls back to web when no results are found 11 | 12 | ## Tools 13 | 14 | - **brave_web_search** 15 | 16 | - Execute web searches with pagination and filtering 17 | - Inputs: 18 | - `query` (string): Search terms 19 | - `count` (number, optional): Results per page (max 20) 20 | - `offset` (number, optional): Pagination offset (max 9) 21 | 22 | - **brave_local_search** 23 | - Search for local businesses and services 24 | - Inputs: 25 | - `query` (string): Local search terms 26 | - `count` (number, optional): Number of results (max 20) 27 | - Automatically falls back to web search if no local results found 28 | 29 | ## Configuration 30 | 31 | ### Getting an API Key 32 | 33 | 1. Sign up for a [Brave Search API account](https://brave.com/search/api/) 34 | 2. Choose a plan (Free tier available with 2,000 queries/month) 35 | 3. Generate your API key [from the developer dashboard](https://api-dashboard.search.brave.com/app/keys) 36 | 37 | ### Usage with Claude Desktop 38 | 39 | Add this to your `claude_desktop_config.json`: 40 | 41 | ### Docker 42 | 43 | ```json 44 | { 45 | "mcpServers": { 46 | "brave-search": { 47 | "command": "docker", 48 | "args": [ 49 | "run", 50 | "-i", 51 | "--rm", 52 | "-e", 53 | "BRAVE_API_KEY", 54 | "mcp/brave-search" 55 | ], 56 | "env": { 57 | "BRAVE_API_KEY": "YOUR_API_KEY_HERE" 58 | } 59 | } 60 | } 61 | } 62 | ``` 63 | 64 | ### NPX 65 | 66 | ```json 67 | { 68 | "mcpServers": { 69 | "brave-search": { 70 | "command": "npx", 71 | "args": [ 72 | "-y", 73 | "@modelcontextprotocol/server-brave-search" 74 | ], 75 | "env": { 76 | "BRAVE_API_KEY": "YOUR_API_KEY_HERE" 77 | } 78 | } 79 | } 80 | } 81 | ``` 82 | 83 | ### Usage with VS Code 84 | 85 | For quick installation, use the one-click installation buttons below... 86 | 87 | [![Install with NPX in VS Code](https://img.shields.io/badge/VS_Code-NPM-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=brave&inputs=%5B%7B%22type%22%3A%22promptString%22%2C%22id%22%3A%22apiKey%22%7D%5D&config=%7B%22command%22%3A%22npx%22%2C%22args%22%3A%5B%22-y%22%2C%22%40modelcontextprotocol%2Fserver-brave-search%22%5D%2C%22env%22%3A%7B%22BRAVE_API_KEY%22%3A%22%24%7Binput%3Abrave_api_key%7D%22%7D%7D) [![Install with NPX in VS Code Insiders](https://img.shields.io/badge/VS_Code_Insiders-NPM-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=brave&inputs=%5B%7B%22type%22%3A%22promptString%22%2C%22id%22%3A%22apiKey%22%7D%5D&config=%7B%22command%22%3A%22npx%22%2C%22args%22%3A%5B%22-y%22%2C%22%40modelcontextprotocol%2Fserver-brave-search%22%5D%2C%22env%22%3A%7B%22BRAVE_API_KEY%22%3A%22%24%7Binput%3Abrave_api_key%7D%22%7D%7D&quality=insiders) 88 | 89 | [![Install with Docker in VS Code](https://img.shields.io/badge/VS_Code-Docker-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=brave&inputs=%5B%7B%22type%22%3A%22promptString%22%2C%22id%22%3A%22apiKey%22%7D%5D&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22-i%22%2C%22--rm%22%2C%22-e%22%2C%22BRAVE_API_KEY%22%2C%22mcp%2Fbrave-search%22%5D%2C%22env%22%3A%7B%22BRAVE_API_KEY%22%3A%22%24%7Binput%3Abrave_api_key%7D%22%7D%7D) [![Install with Docker in VS Code Insiders](https://img.shields.io/badge/VS_Code_Insiders-Docker-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=brave&inputs=%5B%7B%22type%22%3A%22promptString%22%2C%22id%22%3A%22apiKey%22%7D%5D&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22-i%22%2C%22--rm%22%2C%22-e%22%2C%22BRAVE_API_KEY%22%2C%22mcp%2Fbrave-search%22%5D%2C%22env%22%3A%7B%22BRAVE_API_KEY%22%3A%22%24%7Binput%3Abrave_api_key%7D%22%7D%7D&quality=insiders) 90 | 91 | For manual installation, add the following JSON block to your User Settings (JSON) file in VS Code. You can do this by pressing `Ctrl + Shift + P` and typing `Preferences: Open User Settings (JSON)`. 92 | 93 | Optionally, you can add it to a file called `.vscode/mcp.json` in your workspace. This will allow you to share the configuration with others. 94 | 95 | > Note that the `mcp` key is not needed in the `.vscode/mcp.json` file. 96 | 97 | #### Docker 98 | 99 | ```json 100 | { 101 | "mcp": { 102 | "inputs": [ 103 | { 104 | "type": "promptString", 105 | "id": "brave_api_key", 106 | "description": "Brave Search API Key", 107 | "password": true 108 | } 109 | ], 110 | "servers": { 111 | "brave-search": { 112 | "command": "docker", 113 | "args": [ 114 | "run", 115 | "-i", 116 | "--rm", 117 | "-e", 118 | "BRAVE_API_KEY", 119 | "mcp/brave-search" 120 | ], 121 | "env": { 122 | "BRAVE_API_KEY": "${input:brave_api_key}" 123 | } 124 | } 125 | } 126 | } 127 | } 128 | ``` 129 | 130 | #### NPX 131 | 132 | ```json 133 | { 134 | "mcp": { 135 | "inputs": [ 136 | { 137 | "type": "promptString", 138 | "id": "brave_api_key", 139 | "description": "Brave Search API Key", 140 | "password": true 141 | } 142 | ], 143 | "servers": { 144 | "brave-search": { 145 | "command": "npx", 146 | "args": ["-y", "@modelcontextprotocol/server-brave-search"], 147 | "env": { 148 | "BRAVE_API_KEY": "${input:brave_api_key}" 149 | } 150 | } 151 | } 152 | } 153 | } 154 | ``` 155 | 156 | ## Build 157 | 158 | Docker build: 159 | 160 | ```bash 161 | docker build -t mcp/brave-search:latest -f src/brave-search/Dockerfile . 162 | ``` 163 | 164 | ## License 165 | 166 | This MCP server is licensed under the MIT License. This means you are free to use, modify, and distribute the software, subject to the terms and conditions of the MIT License. For more details, please see the LICENSE file in the project repository. 167 | -------------------------------------------------------------------------------- /src/brave-search/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@modelcontextprotocol/server-brave-search", 3 | "version": "0.6.2", 4 | "description": "MCP server for Brave Search API integration", 5 | "license": "MIT", 6 | "author": "Anthropic, PBC (https://anthropic.com)", 7 | "homepage": "https://modelcontextprotocol.io", 8 | "bugs": "https://github.com/modelcontextprotocol/servers/issues", 9 | "type": "module", 10 | "bin": { 11 | "mcp-server-brave-search": "dist/index.js" 12 | }, 13 | "files": [ 14 | "dist" 15 | ], 16 | "scripts": { 17 | "build": "tsc && shx chmod +x dist/*.js", 18 | "prepare": "npm run build", 19 | "watch": "tsc --watch" 20 | }, 21 | "dependencies": { 22 | "@modelcontextprotocol/sdk": "1.0.1" 23 | }, 24 | "devDependencies": { 25 | "@types/node": "^22", 26 | "shx": "^0.3.4", 27 | "typescript": "^5.6.2" 28 | } 29 | } -------------------------------------------------------------------------------- /src/brave-search/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "../../tsconfig.json", 3 | "compilerOptions": { 4 | "outDir": "./dist", 5 | "rootDir": "." 6 | }, 7 | "include": [ 8 | "./**/*.ts" 9 | ] 10 | } 11 | -------------------------------------------------------------------------------- /src/everart/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:22.12-alpine AS builder 2 | 3 | COPY src/everart /app 4 | COPY tsconfig.json /tsconfig.json 5 | 6 | WORKDIR /app 7 | 8 | RUN --mount=type=cache,target=/root/.npm npm install 9 | 10 | FROM node:22-alpine AS release 11 | 12 | WORKDIR /app 13 | 14 | COPY --from=builder /app/dist /app/dist 15 | COPY --from=builder /app/package.json /app/package.json 16 | COPY --from=builder /app/package-lock.json /app/package-lock.json 17 | 18 | ENV NODE_ENV=production 19 | 20 | RUN npm ci --ignore-scripts --omit-dev 21 | 22 | ENTRYPOINT ["node", "dist/index.js"] 23 | 24 | CMD ["node", "dist/index.js"] -------------------------------------------------------------------------------- /src/everart/README.md: -------------------------------------------------------------------------------- 1 | # EverArt MCP Server 2 | 3 | Image generation server for Claude Desktop using EverArt's API. 4 | 5 | ## Install 6 | 7 | ```bash 8 | npm install 9 | export EVERART_API_KEY=your_key_here 10 | ``` 11 | 12 | ## Config 13 | 14 | ### Usage with Claude Desktop 15 | 16 | Add to Claude Desktop config: 17 | 18 | #### Docker 19 | 20 | ```json 21 | { 22 | "mcpServers": { 23 | "everart": { 24 | "command": "docker", 25 | "args": ["run", "-i", "--rm", "-e", "EVERART_API_KEY", "mcp/everart"], 26 | "env": { 27 | "EVERART_API_KEY": "your_key_here" 28 | } 29 | } 30 | } 31 | } 32 | ``` 33 | 34 | #### NPX 35 | 36 | ```json 37 | { 38 | "mcpServers": { 39 | "everart": { 40 | "command": "npx", 41 | "args": ["-y", "@modelcontextprotocol/server-everart"], 42 | "env": { 43 | "EVERART_API_KEY": "your_key_here" 44 | } 45 | } 46 | } 47 | } 48 | ``` 49 | 50 | ### Usage with VS Code 51 | 52 | For quick installation, use the one-click installation buttons below... 53 | 54 | [![Install with NPX in VS Code](https://img.shields.io/badge/VS_Code-NPM-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=everart&inputs=%5B%7B%22type%22%3A%22promptString%22%2C%22id%22%3A%22everart_api_key%22%2C%22description%22%3A%22EverArt%20API%20Key%22%2C%22password%22%3Atrue%7D%5D&config=%7B%22command%22%3A%22npx%22%2C%22args%22%3A%5B%22-y%22%2C%22%40modelcontextprotocol%2Fserver-everart%22%5D%2C%22env%22%3A%7B%22EVERART_API_KEY%22%3A%22%24%7Binput%3Aeverart_api_key%7D%22%7D%7D) [![Install with NPX in VS Code Insiders](https://img.shields.io/badge/VS_Code_Insiders-NPM-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=everart&inputs=%5B%7B%22type%22%3A%22promptString%22%2C%22id%22%3A%22everart_api_key%22%2C%22description%22%3A%22EverArt%20API%20Key%22%2C%22password%22%3Atrue%7D%5D&config=%7B%22command%22%3A%22npx%22%2C%22args%22%3A%5B%22-y%22%2C%22%40modelcontextprotocol%2Fserver-everart%22%5D%2C%22env%22%3A%7B%22EVERART_API_KEY%22%3A%22%24%7Binput%3Aeverart_api_key%7D%22%7D%7D&quality=insiders) 55 | 56 | [![Install with Docker in VS Code](https://img.shields.io/badge/VS_Code-Docker-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=everart&inputs=%5B%7B%22type%22%3A%22promptString%22%2C%22id%22%3A%22everart_api_key%22%2C%22description%22%3A%22EverArt%20API%20Key%22%2C%22password%22%3Atrue%7D%5D&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22-i%22%2C%22--rm%22%2C%22mcp%2Feverart%22%5D%2C%22env%22%3A%7B%22EVERART_API_KEY%22%3A%22%24%7Binput%3Aeverart_api_key%7D%22%7D%7D) [![Install with Docker in VS Code Insiders](https://img.shields.io/badge/VS_Code_Insiders-Docker-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=everart&inputs=%5B%7B%22type%22%3A%22promptString%22%2C%22id%22%3A%22everart_api_key%22%2C%22description%22%3A%22EverArt%20API%20Key%22%2C%22password%22%3Atrue%7D%5D&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22-i%22%2C%22--rm%22%2C%22mcp%2Feverart%22%5D%2C%22env%22%3A%7B%22EVERART_API_KEY%22%3A%22%24%7Binput%3Aeverart_api_key%7D%22%7D%7D&quality=insiders) 57 | 58 | For manual installation, add the following JSON block to your User Settings (JSON) file in VS Code. You can do this by pressing `Ctrl + Shift + P` and typing `Preferences: Open Settings (JSON)`. 59 | 60 | Optionally, you can add it to a file called `.vscode/mcp.json` in your workspace. This will allow you to share the configuration with others. 61 | 62 | > Note that the `mcp` key is needed when using the `mcp.json` file. 63 | 64 | #### Docker 65 | 66 | ```json 67 | { 68 | "mcp": { 69 | "inputs": [ 70 | { 71 | "type": "promptString", 72 | "id": "everart_api_key", 73 | "description": "EverArt API Key", 74 | "password": true 75 | } 76 | ], 77 | "servers": { 78 | "everart": { 79 | "command": "docker", 80 | "args": ["run", "-i", "--rm", "-e", "EVERART_API_KEY", "mcp/everart"], 81 | "env": { 82 | "EVERART_API_KEY": "${input:everart_api_key}" 83 | } 84 | } 85 | } 86 | } 87 | } 88 | ``` 89 | 90 | #### NPX 91 | 92 | ```json 93 | { 94 | "mcp": { 95 | "inputs": [ 96 | { 97 | "type": "promptString", 98 | "id": "everart_api_key", 99 | "description": "EverArt API Key", 100 | "password": true 101 | } 102 | ], 103 | "servers": { 104 | "everart": { 105 | "command": "npx", 106 | "args": ["-y", "@modelcontextprotocol/server-everart"], 107 | "env": { 108 | "EVERART_API_KEY": "${input:everart_api_key}" 109 | } 110 | } 111 | } 112 | } 113 | } 114 | ``` 115 | 116 | ## Tools 117 | 118 | ### generate_image 119 | 120 | Generates images with multiple model options. Opens result in browser and returns URL. 121 | 122 | Parameters: 123 | 124 | ```typescript 125 | { 126 | prompt: string, // Image description 127 | model?: string, // Model ID (default: "207910310772879360") 128 | image_count?: number // Number of images (default: 1) 129 | } 130 | ``` 131 | 132 | Models: 133 | 134 | - 5000: FLUX1.1 (standard) 135 | - 9000: FLUX1.1-ultra 136 | - 6000: SD3.5 137 | - 7000: Recraft-Real 138 | - 8000: Recraft-Vector 139 | 140 | All images generated at 1024x1024. 141 | 142 | Sample usage: 143 | 144 | ```javascript 145 | const result = await client.callTool({ 146 | name: "generate_image", 147 | arguments: { 148 | prompt: "A cat sitting elegantly", 149 | model: "7000", 150 | image_count: 1 151 | } 152 | }); 153 | ``` 154 | 155 | Response format: 156 | 157 | ``` 158 | Image generated successfully! 159 | The image has been opened in your default browser. 160 | 161 | Generation details: 162 | - Model: 7000 163 | - Prompt: "A cat sitting elegantly" 164 | - Image URL: https://storage.googleapis.com/... 165 | 166 | You can also click the URL above to view the image again. 167 | ``` 168 | 169 | ## Building w/ Docker 170 | 171 | ```sh 172 | docker build -t mcp/everart -f src/everart/Dockerfile . 173 | ``` 174 | -------------------------------------------------------------------------------- /src/everart/index.ts: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | import EverArt from "everart"; 3 | import { Server } from "@modelcontextprotocol/sdk/server/index.js"; 4 | import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js"; 5 | import { 6 | CallToolRequestSchema, 7 | ListToolsRequestSchema, 8 | ListResourcesRequestSchema, 9 | ReadResourceRequestSchema, 10 | } from "@modelcontextprotocol/sdk/types.js"; 11 | import fetch from "node-fetch"; 12 | import open from "open"; 13 | 14 | const server = new Server( 15 | { 16 | name: "example-servers/everart", 17 | version: "0.2.0", 18 | }, 19 | { 20 | capabilities: { 21 | tools: {}, 22 | resources: {}, // Required for image resources 23 | }, 24 | }, 25 | ); 26 | 27 | if (!process.env.EVERART_API_KEY) { 28 | console.error("EVERART_API_KEY environment variable is not set"); 29 | process.exit(1); 30 | } 31 | 32 | const client = new EverArt.default(process.env.EVERART_API_KEY); 33 | 34 | server.setRequestHandler(ListToolsRequestSchema, async () => ({ 35 | tools: [ 36 | { 37 | name: "generate_image", 38 | description: 39 | "Generate images using EverArt Models and returns a clickable link to view the generated image. " + 40 | "The tool will return a URL that can be clicked to view the image in a browser. " + 41 | "Available models:\n" + 42 | "- 5000:FLUX1.1: Standard quality\n" + 43 | "- 9000:FLUX1.1-ultra: Ultra high quality\n" + 44 | "- 6000:SD3.5: Stable Diffusion 3.5\n" + 45 | "- 7000:Recraft-Real: Photorealistic style\n" + 46 | "- 8000:Recraft-Vector: Vector art style\n" + 47 | "\nThe response will contain a direct link to view the generated image.", 48 | inputSchema: { 49 | type: "object", 50 | properties: { 51 | prompt: { 52 | type: "string", 53 | description: "Text description of desired image", 54 | }, 55 | model: { 56 | type: "string", 57 | description: 58 | "Model ID (5000:FLUX1.1, 9000:FLUX1.1-ultra, 6000:SD3.5, 7000:Recraft-Real, 8000:Recraft-Vector)", 59 | default: "5000", 60 | }, 61 | image_count: { 62 | type: "number", 63 | description: "Number of images to generate", 64 | default: 1, 65 | }, 66 | }, 67 | required: ["prompt"], 68 | }, 69 | }, 70 | ], 71 | })); 72 | 73 | server.setRequestHandler(ListResourcesRequestSchema, async () => { 74 | return { 75 | resources: [ 76 | { 77 | uri: "everart://images", 78 | mimeType: "image/png", 79 | name: "Generated Images", 80 | }, 81 | ], 82 | }; 83 | }); 84 | 85 | server.setRequestHandler(ReadResourceRequestSchema, async (request) => { 86 | if (request.params.uri === "everart://images") { 87 | return { 88 | contents: [ 89 | { 90 | uri: "everart://images", 91 | mimeType: "image/png", 92 | blob: "", // Empty since this is just for listing 93 | }, 94 | ], 95 | }; 96 | } 97 | throw new Error("Resource not found"); 98 | }); 99 | 100 | server.setRequestHandler(CallToolRequestSchema, async (request) => { 101 | if (request.params.name === "generate_image") { 102 | try { 103 | const { 104 | prompt, 105 | model = "207910310772879360", 106 | image_count = 1, 107 | } = request.params.arguments as any; 108 | 109 | // Use correct EverArt API method 110 | const generation = await client.v1.generations.create( 111 | model, 112 | prompt, 113 | "txt2img", 114 | { 115 | imageCount: image_count, 116 | height: 1024, 117 | width: 1024, 118 | }, 119 | ); 120 | 121 | // Wait for generation to complete 122 | const completedGen = await client.v1.generations.fetchWithPolling( 123 | generation[0].id, 124 | ); 125 | 126 | const imgUrl = completedGen.image_url; 127 | if (!imgUrl) throw new Error("No image URL"); 128 | 129 | // Automatically open the image URL in the default browser 130 | await open(imgUrl); 131 | 132 | // Return a formatted message with the clickable link 133 | return { 134 | content: [ 135 | { 136 | type: "text", 137 | text: `Image generated successfully!\nThe image has been opened in your default browser.\n\nGeneration details:\n- Model: ${model}\n- Prompt: "${prompt}"\n- Image URL: ${imgUrl}\n\nYou can also click the URL above to view the image again.`, 138 | }, 139 | ], 140 | }; 141 | } catch (error: unknown) { 142 | console.error("Detailed error:", error); 143 | const errorMessage = 144 | error instanceof Error ? error.message : "Unknown error"; 145 | return { 146 | content: [{ type: "text", text: `Error: ${errorMessage}` }], 147 | isError: true, 148 | }; 149 | } 150 | } 151 | throw new Error(`Unknown tool: ${request.params.name}`); 152 | }); 153 | 154 | async function runServer() { 155 | const transport = new StdioServerTransport(); 156 | await server.connect(transport); 157 | console.error("EverArt MCP Server running on stdio"); 158 | } 159 | 160 | runServer().catch(console.error); 161 | -------------------------------------------------------------------------------- /src/everart/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@modelcontextprotocol/server-everart", 3 | "version": "0.6.2", 4 | "description": "MCP server for EverArt API integration", 5 | "license": "MIT", 6 | "author": "Anthropic, PBC (https://anthropic.com)", 7 | "homepage": "https://modelcontextprotocol.io", 8 | "bugs": "https://github.com/modelcontextprotocol/servers/issues", 9 | "type": "module", 10 | "bin": { 11 | "mcp-server-everart": "dist/index.js" 12 | }, 13 | "files": [ 14 | "dist" 15 | ], 16 | "scripts": { 17 | "build": "tsc && shx chmod +x dist/*.js", 18 | "prepare": "npm run build", 19 | "watch": "tsc --watch" 20 | }, 21 | "dependencies": { 22 | "@modelcontextprotocol/sdk": "0.5.0", 23 | "everart": "^1.0.0", 24 | "node-fetch": "^3.3.2", 25 | "open": "^9.1.0" 26 | }, 27 | "devDependencies": { 28 | "@types/node": "^22", 29 | "shx": "^0.3.4", 30 | "typescript": "^5.3.3" 31 | } 32 | } -------------------------------------------------------------------------------- /src/everart/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "../../tsconfig.json", 3 | "compilerOptions": { 4 | "outDir": "./dist", 5 | "rootDir": "." 6 | }, 7 | "include": [ 8 | "./**/*.ts" 9 | ] 10 | } 11 | -------------------------------------------------------------------------------- /src/everything/CLAUDE.md: -------------------------------------------------------------------------------- 1 | # MCP "Everything" Server - Development Guidelines 2 | 3 | ## Build, Test & Run Commands 4 | - Build: `npm run build` - Compiles TypeScript to JavaScript 5 | - Watch mode: `npm run watch` - Watches for changes and rebuilds automatically 6 | - Run server: `npm run start` - Starts the MCP server using stdio transport 7 | - Run SSE server: `npm run start:sse` - Starts the MCP server with SSE transport 8 | - Prepare release: `npm run prepare` - Builds the project for publishing 9 | 10 | ## Code Style Guidelines 11 | - Use ES modules with `.js` extension in import paths 12 | - Strictly type all functions and variables with TypeScript 13 | - Follow zod schema patterns for tool input validation 14 | - Prefer async/await over callbacks and Promise chains 15 | - Place all imports at top of file, grouped by external then internal 16 | - Use descriptive variable names that clearly indicate purpose 17 | - Implement proper cleanup for timers and resources in server shutdown 18 | - Follow camelCase for variables/functions, PascalCase for types/classes, UPPER_CASE for constants 19 | - Handle errors with try/catch blocks and provide clear error messages 20 | - Use consistent indentation (2 spaces) and trailing commas in multi-line objects -------------------------------------------------------------------------------- /src/everything/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:22.12-alpine AS builder 2 | 3 | COPY src/everything /app 4 | COPY tsconfig.json /tsconfig.json 5 | 6 | WORKDIR /app 7 | 8 | RUN --mount=type=cache,target=/root/.npm npm install 9 | 10 | FROM node:22-alpine AS release 11 | 12 | WORKDIR /app 13 | 14 | COPY --from=builder /app/dist /app/dist 15 | COPY --from=builder /app/package.json /app/package.json 16 | COPY --from=builder /app/package-lock.json /app/package-lock.json 17 | 18 | ENV NODE_ENV=production 19 | 20 | RUN npm ci --ignore-scripts --omit-dev 21 | 22 | CMD ["node", "dist/index.js"] -------------------------------------------------------------------------------- /src/everything/README.md: -------------------------------------------------------------------------------- 1 | # Everything MCP Server 2 | 3 | This MCP server attempts to exercise all the features of the MCP protocol. It is not intended to be a useful server, but rather a test server for builders of MCP clients. It implements prompts, tools, resources, sampling, and more to showcase MCP capabilities. 4 | 5 | ## Components 6 | 7 | ### Tools 8 | 9 | 1. `echo` 10 | - Simple tool to echo back input messages 11 | - Input: 12 | - `message` (string): Message to echo back 13 | - Returns: Text content with echoed message 14 | 15 | 2. `add` 16 | - Adds two numbers together 17 | - Inputs: 18 | - `a` (number): First number 19 | - `b` (number): Second number 20 | - Returns: Text result of the addition 21 | 22 | 3. `longRunningOperation` 23 | - Demonstrates progress notifications for long operations 24 | - Inputs: 25 | - `duration` (number, default: 10): Duration in seconds 26 | - `steps` (number, default: 5): Number of progress steps 27 | - Returns: Completion message with duration and steps 28 | - Sends progress notifications during execution 29 | 30 | 4. `sampleLLM` 31 | - Demonstrates LLM sampling capability using MCP sampling feature 32 | - Inputs: 33 | - `prompt` (string): The prompt to send to the LLM 34 | - `maxTokens` (number, default: 100): Maximum tokens to generate 35 | - Returns: Generated LLM response 36 | 37 | 5. `getTinyImage` 38 | - Returns a small test image 39 | - No inputs required 40 | - Returns: Base64 encoded PNG image data 41 | 42 | 6. `printEnv` 43 | - Prints all environment variables 44 | - Useful for debugging MCP server configuration 45 | - No inputs required 46 | - Returns: JSON string of all environment variables 47 | 48 | 7. `annotatedMessage` 49 | - Demonstrates how annotations can be used to provide metadata about content 50 | - Inputs: 51 | - `messageType` (enum: "error" | "success" | "debug"): Type of message to demonstrate different annotation patterns 52 | - `includeImage` (boolean, default: false): Whether to include an example image 53 | - Returns: Content with varying annotations: 54 | - Error messages: High priority (1.0), visible to both user and assistant 55 | - Success messages: Medium priority (0.7), user-focused 56 | - Debug messages: Low priority (0.3), assistant-focused 57 | - Optional image: Medium priority (0.5), user-focused 58 | - Example annotations: 59 | ```json 60 | { 61 | "priority": 1.0, 62 | "audience": ["user", "assistant"] 63 | } 64 | ``` 65 | 66 | 8. `getResourceReference` 67 | - Returns a resource reference that can be used by MCP clients 68 | - Inputs: 69 | - `resourceId` (number, 1-100): ID of the resource to reference 70 | - Returns: A resource reference with: 71 | - Text introduction 72 | - Embedded resource with `type: "resource"` 73 | - Text instruction for using the resource URI 74 | 75 | ### Resources 76 | 77 | The server provides 100 test resources in two formats: 78 | - Even numbered resources: 79 | - Plaintext format 80 | - URI pattern: `test://static/resource/{even_number}` 81 | - Content: Simple text description 82 | 83 | - Odd numbered resources: 84 | - Binary blob format 85 | - URI pattern: `test://static/resource/{odd_number}` 86 | - Content: Base64 encoded binary data 87 | 88 | Resource features: 89 | - Supports pagination (10 items per page) 90 | - Allows subscribing to resource updates 91 | - Demonstrates resource templates 92 | - Auto-updates subscribed resources every 5 seconds 93 | 94 | ### Prompts 95 | 96 | 1. `simple_prompt` 97 | - Basic prompt without arguments 98 | - Returns: Single message exchange 99 | 100 | 2. `complex_prompt` 101 | - Advanced prompt demonstrating argument handling 102 | - Required arguments: 103 | - `temperature` (number): Temperature setting 104 | - Optional arguments: 105 | - `style` (string): Output style preference 106 | - Returns: Multi-turn conversation with images 107 | 108 | 3. `resource_prompt` 109 | - Demonstrates embedding resource references in prompts 110 | - Required arguments: 111 | - `resourceId` (number): ID of the resource to embed (1-100) 112 | - Returns: Multi-turn conversation with an embedded resource reference 113 | - Shows how to include resources directly in prompt messages 114 | 115 | ### Logging 116 | 117 | The server sends random-leveled log messages every 15 seconds, e.g.: 118 | 119 | ```json 120 | { 121 | "method": "notifications/message", 122 | "params": { 123 | "level": "info", 124 | "data": "Info-level message" 125 | } 126 | } 127 | ``` 128 | 129 | ## Usage with Claude Desktop 130 | 131 | Add to your `claude_desktop_config.json`: 132 | 133 | ```json 134 | { 135 | "mcpServers": { 136 | "everything": { 137 | "command": "npx", 138 | "args": [ 139 | "-y", 140 | "@modelcontextprotocol/server-everything" 141 | ] 142 | } 143 | } 144 | } 145 | ``` 146 | 147 | ## Usage with VS Code 148 | 149 | For quick installation, use of of the one-click install buttons below... 150 | 151 | [![Install with NPX in VS Code](https://img.shields.io/badge/VS_Code-NPM-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=everything&config=%7B%22command%22%3A%22npx%22%2C%22args%22%3A%5B%22-y%22%2C%22%40modelcontextprotocol%2Fserver-everything%22%5D%7D) [![Install with NPX in VS Code Insiders](https://img.shields.io/badge/VS_Code_Insiders-NPM-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=everything&config=%7B%22command%22%3A%22npx%22%2C%22args%22%3A%5B%22-y%22%2C%22%40modelcontextprotocol%2Fserver-everything%22%5D%7D&quality=insiders) 152 | 153 | [![Install with Docker in VS Code](https://img.shields.io/badge/VS_Code-Docker-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=everything&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22-i%22%2C%22--rm%22%2C%22mcp%2Feverything%22%5D%7D) [![Install with Docker in VS Code Insiders](https://img.shields.io/badge/VS_Code_Insiders-Docker-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=everything&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22-i%22%2C%22--rm%22%2C%22mcp%2Feverything%22%5D%7D&quality=insiders) 154 | 155 | For manual installation, add the following JSON block to your User Settings (JSON) file in VS Code. You can do this by pressing `Ctrl + Shift + P` and typing `Preferences: Open User Settings (JSON)`. 156 | 157 | Optionally, you can add it to a file called `.vscode/mcp.json` in your workspace. This will allow you to share the configuration with others. 158 | 159 | > Note that the `mcp` key is not needed in the `.vscode/mcp.json` file. 160 | 161 | #### NPX 162 | 163 | ```json 164 | { 165 | "mcp": { 166 | "servers": { 167 | "everything": { 168 | "command": "npx", 169 | "args": ["-y", "@modelcontextprotocol/server-everything"] 170 | } 171 | } 172 | } 173 | } 174 | ``` 175 | -------------------------------------------------------------------------------- /src/everything/index.ts: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js"; 4 | import { createServer } from "./everything.js"; 5 | 6 | async function main() { 7 | const transport = new StdioServerTransport(); 8 | const { server, cleanup } = createServer(); 9 | 10 | await server.connect(transport); 11 | 12 | // Cleanup on exit 13 | process.on("SIGINT", async () => { 14 | await cleanup(); 15 | await server.close(); 16 | process.exit(0); 17 | }); 18 | } 19 | 20 | main().catch((error) => { 21 | console.error("Server error:", error); 22 | process.exit(1); 23 | }); 24 | -------------------------------------------------------------------------------- /src/everything/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@modelcontextprotocol/server-everything", 3 | "version": "0.6.2", 4 | "description": "MCP server that exercises all the features of the MCP protocol", 5 | "license": "MIT", 6 | "author": "Anthropic, PBC (https://anthropic.com)", 7 | "homepage": "https://modelcontextprotocol.io", 8 | "bugs": "https://github.com/modelcontextprotocol/servers/issues", 9 | "type": "module", 10 | "bin": { 11 | "mcp-server-everything": "dist/index.js" 12 | }, 13 | "files": [ 14 | "dist" 15 | ], 16 | "scripts": { 17 | "build": "tsc && shx chmod +x dist/*.js", 18 | "prepare": "npm run build", 19 | "watch": "tsc --watch", 20 | "start": "node dist/index.js", 21 | "start:sse": "node dist/sse.js" 22 | }, 23 | "dependencies": { 24 | "@modelcontextprotocol/sdk": "^1.9.0", 25 | "express": "^4.21.1", 26 | "zod": "^3.23.8", 27 | "zod-to-json-schema": "^3.23.5" 28 | }, 29 | "devDependencies": { 30 | "@types/express": "^5.0.0", 31 | "shx": "^0.3.4", 32 | "typescript": "^5.6.2" 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /src/everything/sse.ts: -------------------------------------------------------------------------------- 1 | import { SSEServerTransport } from "@modelcontextprotocol/sdk/server/sse.js"; 2 | import express from "express"; 3 | import { createServer } from "./everything.js"; 4 | 5 | const app = express(); 6 | 7 | const { server, cleanup } = createServer(); 8 | 9 | let transport: SSEServerTransport; 10 | 11 | app.get("/sse", async (req, res) => { 12 | console.log("Received connection"); 13 | transport = new SSEServerTransport("/message", res); 14 | await server.connect(transport); 15 | 16 | server.onclose = async () => { 17 | await cleanup(); 18 | await server.close(); 19 | process.exit(0); 20 | }; 21 | }); 22 | 23 | app.post("/message", async (req, res) => { 24 | console.log("Received message"); 25 | 26 | await transport.handlePostMessage(req, res); 27 | }); 28 | 29 | const PORT = process.env.PORT || 3001; 30 | app.listen(PORT, () => { 31 | console.log(`Server is running on port ${PORT}`); 32 | }); 33 | -------------------------------------------------------------------------------- /src/everything/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "../../tsconfig.json", 3 | "compilerOptions": { 4 | "outDir": "./dist", 5 | "rootDir": "." 6 | }, 7 | "include": [ 8 | "./**/*.ts" 9 | ] 10 | } 11 | -------------------------------------------------------------------------------- /src/fetch/.python-version: -------------------------------------------------------------------------------- 1 | 3.11 2 | -------------------------------------------------------------------------------- /src/fetch/Dockerfile: -------------------------------------------------------------------------------- 1 | # Use a Python image with uv pre-installed 2 | FROM ghcr.io/astral-sh/uv:python3.12-bookworm-slim AS uv 3 | 4 | # Install the project into `/app` 5 | WORKDIR /app 6 | 7 | # Enable bytecode compilation 8 | ENV UV_COMPILE_BYTECODE=1 9 | 10 | # Copy from the cache instead of linking since it's a mounted volume 11 | ENV UV_LINK_MODE=copy 12 | 13 | # Install the project's dependencies using the lockfile and settings 14 | RUN --mount=type=cache,target=/root/.cache/uv \ 15 | --mount=type=bind,source=uv.lock,target=uv.lock \ 16 | --mount=type=bind,source=pyproject.toml,target=pyproject.toml \ 17 | uv sync --frozen --no-install-project --no-dev --no-editable 18 | 19 | # Then, add the rest of the project source code and install it 20 | # Installing separately from its dependencies allows optimal layer caching 21 | ADD . /app 22 | RUN --mount=type=cache,target=/root/.cache/uv \ 23 | uv sync --frozen --no-dev --no-editable 24 | 25 | FROM python:3.12-slim-bookworm 26 | 27 | WORKDIR /app 28 | 29 | COPY --from=uv /root/.local /root/.local 30 | COPY --from=uv --chown=app:app /app/.venv /app/.venv 31 | 32 | # Place executables in the environment at the front of the path 33 | ENV PATH="/app/.venv/bin:$PATH" 34 | 35 | # when running the container, add --db-path and a bind mount to the host's db file 36 | ENTRYPOINT ["mcp-server-fetch"] 37 | -------------------------------------------------------------------------------- /src/fetch/LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2024 Anthropic, PBC. 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 4 | 5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 6 | 7 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 8 | -------------------------------------------------------------------------------- /src/fetch/README.md: -------------------------------------------------------------------------------- 1 | # Fetch MCP Server 2 | 3 | A Model Context Protocol server that provides web content fetching capabilities. This server enables LLMs to retrieve and process content from web pages, converting HTML to markdown for easier consumption. 4 | 5 | The fetch tool will truncate the response, but by using the `start_index` argument, you can specify where to start the content extraction. This lets models read a webpage in chunks, until they find the information they need. 6 | 7 | ### Available Tools 8 | 9 | - `fetch` - Fetches a URL from the internet and extracts its contents as markdown. 10 | - `url` (string, required): URL to fetch 11 | - `max_length` (integer, optional): Maximum number of characters to return (default: 5000) 12 | - `start_index` (integer, optional): Start content from this character index (default: 0) 13 | - `raw` (boolean, optional): Get raw content without markdown conversion (default: false) 14 | 15 | ### Prompts 16 | 17 | - **fetch** 18 | - Fetch a URL and extract its contents as markdown 19 | - Arguments: 20 | - `url` (string, required): URL to fetch 21 | 22 | ## Installation 23 | 24 | Optionally: Install node.js, this will cause the fetch server to use a different HTML simplifier that is more robust. 25 | 26 | ### Using uv (recommended) 27 | 28 | When using [`uv`](https://docs.astral.sh/uv/) no specific installation is needed. We will 29 | use [`uvx`](https://docs.astral.sh/uv/guides/tools/) to directly run *mcp-server-fetch*. 30 | 31 | ### Using PIP 32 | 33 | Alternatively you can install `mcp-server-fetch` via pip: 34 | 35 | ``` 36 | pip install mcp-server-fetch 37 | ``` 38 | 39 | After installation, you can run it as a script using: 40 | 41 | ``` 42 | python -m mcp_server_fetch 43 | ``` 44 | 45 | ## Configuration 46 | 47 | ### Configure for Claude.app 48 | 49 | Add to your Claude settings: 50 | 51 |
52 | Using uvx 53 | 54 | ```json 55 | "mcpServers": { 56 | "fetch": { 57 | "command": "uvx", 58 | "args": ["mcp-server-fetch"] 59 | } 60 | } 61 | ``` 62 |
63 | 64 |
65 | Using docker 66 | 67 | ```json 68 | "mcpServers": { 69 | "fetch": { 70 | "command": "docker", 71 | "args": ["run", "-i", "--rm", "mcp/fetch"] 72 | } 73 | } 74 | ``` 75 |
76 | 77 |
78 | Using pip installation 79 | 80 | ```json 81 | "mcpServers": { 82 | "fetch": { 83 | "command": "python", 84 | "args": ["-m", "mcp_server_fetch"] 85 | } 86 | } 87 | ``` 88 |
89 | 90 | ### Configure for VS Code 91 | 92 | For quick installation, use one of the one-click install buttons below... 93 | 94 | [![Install with UV in VS Code](https://img.shields.io/badge/VS_Code-UV-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=fetch&config=%7B%22command%22%3A%22uvx%22%2C%22args%22%3A%5B%22mcp-server-fetch%22%5D%7D) [![Install with UV in VS Code Insiders](https://img.shields.io/badge/VS_Code_Insiders-UV-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=fetch&config=%7B%22command%22%3A%22uvx%22%2C%22args%22%3A%5B%22mcp-server-fetch%22%5D%7D&quality=insiders) 95 | 96 | [![Install with Docker in VS Code](https://img.shields.io/badge/VS_Code-Docker-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=fetch&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22-i%22%2C%22--rm%22%2C%22mcp%2Ffetch%22%5D%7D) [![Install with Docker in VS Code Insiders](https://img.shields.io/badge/VS_Code_Insiders-Docker-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=fetch&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22-i%22%2C%22--rm%22%2C%22mcp%2Ffetch%22%5D%7D&quality=insiders) 97 | 98 | For manual installation, add the following JSON block to your User Settings (JSON) file in VS Code. You can do this by pressing `Ctrl + Shift + P` and typing `Preferences: Open User Settings (JSON)`. 99 | 100 | Optionally, you can add it to a file called `.vscode/mcp.json` in your workspace. This will allow you to share the configuration with others. 101 | 102 | > Note that the `mcp` key is needed when using the `mcp.json` file. 103 | 104 |
105 | Using uvx 106 | 107 | ```json 108 | { 109 | "mcp": { 110 | "servers": { 111 | "fetch": { 112 | "command": "uvx", 113 | "args": ["mcp-server-fetch"] 114 | } 115 | } 116 | } 117 | } 118 | ``` 119 |
120 | 121 |
122 | Using Docker 123 | 124 | ```json 125 | { 126 | "mcp": { 127 | "servers": { 128 | "fetch": { 129 | "command": "docker", 130 | "args": ["run", "-i", "--rm", "mcp/fetch"] 131 | } 132 | } 133 | } 134 | } 135 | ``` 136 |
137 | 138 | ### Customization - robots.txt 139 | 140 | By default, the server will obey a websites robots.txt file if the request came from the model (via a tool), but not if 141 | the request was user initiated (via a prompt). This can be disabled by adding the argument `--ignore-robots-txt` to the 142 | `args` list in the configuration. 143 | 144 | ### Customization - User-agent 145 | 146 | By default, depending on if the request came from the model (via a tool), or was user initiated (via a prompt), the 147 | server will use either the user-agent 148 | ``` 149 | ModelContextProtocol/1.0 (Autonomous; +https://github.com/modelcontextprotocol/servers) 150 | ``` 151 | or 152 | ``` 153 | ModelContextProtocol/1.0 (User-Specified; +https://github.com/modelcontextprotocol/servers) 154 | ``` 155 | 156 | This can be customized by adding the argument `--user-agent=YourUserAgent` to the `args` list in the configuration. 157 | 158 | ### Customization - Proxy 159 | 160 | The server can be configured to use a proxy by using the `--proxy-url` argument. 161 | 162 | ## Debugging 163 | 164 | You can use the MCP inspector to debug the server. For uvx installations: 165 | 166 | ``` 167 | npx @modelcontextprotocol/inspector uvx mcp-server-fetch 168 | ``` 169 | 170 | Or if you've installed the package in a specific directory or are developing on it: 171 | 172 | ``` 173 | cd path/to/servers/src/fetch 174 | npx @modelcontextprotocol/inspector uv run mcp-server-fetch 175 | ``` 176 | 177 | ## Contributing 178 | 179 | We encourage contributions to help expand and improve mcp-server-fetch. Whether you want to add new tools, enhance existing functionality, or improve documentation, your input is valuable. 180 | 181 | For examples of other MCP servers and implementation patterns, see: 182 | https://github.com/modelcontextprotocol/servers 183 | 184 | Pull requests are welcome! Feel free to contribute new ideas, bug fixes, or enhancements to make mcp-server-fetch even more powerful and useful. 185 | 186 | ## License 187 | 188 | mcp-server-fetch is licensed under the MIT License. This means you are free to use, modify, and distribute the software, subject to the terms and conditions of the MIT License. For more details, please see the LICENSE file in the project repository. 189 | -------------------------------------------------------------------------------- /src/fetch/pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "mcp-server-fetch" 3 | version = "0.6.3" 4 | description = "A Model Context Protocol server providing tools to fetch and convert web content for usage by LLMs" 5 | readme = "README.md" 6 | requires-python = ">=3.10" 7 | authors = [{ name = "Anthropic, PBC." }] 8 | maintainers = [{ name = "Jack Adamson", email = "jadamson@anthropic.com" }] 9 | keywords = ["http", "mcp", "llm", "automation"] 10 | license = { text = "MIT" } 11 | classifiers = [ 12 | "Development Status :: 4 - Beta", 13 | "Intended Audience :: Developers", 14 | "License :: OSI Approved :: MIT License", 15 | "Programming Language :: Python :: 3", 16 | "Programming Language :: Python :: 3.10", 17 | ] 18 | dependencies = [ 19 | "httpx<0.28", 20 | "markdownify>=0.13.1", 21 | "mcp>=1.1.3", 22 | "protego>=0.3.1", 23 | "pydantic>=2.0.0", 24 | "readabilipy>=0.2.0", 25 | "requests>=2.32.3", 26 | ] 27 | 28 | [project.scripts] 29 | mcp-server-fetch = "mcp_server_fetch:main" 30 | 31 | [build-system] 32 | requires = ["hatchling"] 33 | build-backend = "hatchling.build" 34 | 35 | [tool.uv] 36 | dev-dependencies = ["pyright>=1.1.389", "ruff>=0.7.3"] 37 | -------------------------------------------------------------------------------- /src/fetch/src/mcp_server_fetch/__init__.py: -------------------------------------------------------------------------------- 1 | from .server import serve 2 | 3 | 4 | def main(): 5 | """MCP Fetch Server - HTTP fetching functionality for MCP""" 6 | import argparse 7 | import asyncio 8 | 9 | parser = argparse.ArgumentParser( 10 | description="give a model the ability to make web requests" 11 | ) 12 | parser.add_argument("--user-agent", type=str, help="Custom User-Agent string") 13 | parser.add_argument( 14 | "--ignore-robots-txt", 15 | action="store_true", 16 | help="Ignore robots.txt restrictions", 17 | ) 18 | parser.add_argument("--proxy-url", type=str, help="Proxy URL to use for requests") 19 | 20 | args = parser.parse_args() 21 | asyncio.run(serve(args.user_agent, args.ignore_robots_txt, args.proxy_url)) 22 | 23 | 24 | if __name__ == "__main__": 25 | main() 26 | -------------------------------------------------------------------------------- /src/fetch/src/mcp_server_fetch/__main__.py: -------------------------------------------------------------------------------- 1 | # __main__.py 2 | 3 | from mcp_server_fetch import main 4 | 5 | main() 6 | -------------------------------------------------------------------------------- /src/filesystem/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:22.12-alpine AS builder 2 | 3 | WORKDIR /app 4 | 5 | COPY src/filesystem /app 6 | COPY tsconfig.json /tsconfig.json 7 | 8 | RUN --mount=type=cache,target=/root/.npm npm install 9 | 10 | RUN --mount=type=cache,target=/root/.npm-production npm ci --ignore-scripts --omit-dev 11 | 12 | 13 | FROM node:22-alpine AS release 14 | 15 | WORKDIR /app 16 | 17 | COPY --from=builder /app/dist /app/dist 18 | COPY --from=builder /app/package.json /app/package.json 19 | COPY --from=builder /app/package-lock.json /app/package-lock.json 20 | 21 | ENV NODE_ENV=production 22 | 23 | RUN npm ci --ignore-scripts --omit-dev 24 | 25 | ENTRYPOINT ["node", "/app/dist/index.js"] -------------------------------------------------------------------------------- /src/filesystem/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@modelcontextprotocol/server-filesystem", 3 | "version": "0.6.2", 4 | "description": "MCP server for filesystem access", 5 | "license": "MIT", 6 | "author": "Anthropic, PBC (https://anthropic.com)", 7 | "homepage": "https://modelcontextprotocol.io", 8 | "bugs": "https://github.com/modelcontextprotocol/servers/issues", 9 | "type": "module", 10 | "bin": { 11 | "mcp-server-filesystem": "dist/index.js" 12 | }, 13 | "files": [ 14 | "dist" 15 | ], 16 | "scripts": { 17 | "build": "tsc && shx chmod +x dist/*.js", 18 | "prepare": "npm run build", 19 | "watch": "tsc --watch" 20 | }, 21 | "dependencies": { 22 | "@modelcontextprotocol/sdk": "0.5.0", 23 | "diff": "^5.1.0", 24 | "glob": "^10.3.10", 25 | "minimatch": "^10.0.1", 26 | "zod-to-json-schema": "^3.23.5" 27 | }, 28 | "devDependencies": { 29 | "@types/diff": "^5.0.9", 30 | "@types/minimatch": "^5.1.2", 31 | "@types/node": "^22", 32 | "shx": "^0.3.4", 33 | "typescript": "^5.3.3" 34 | } 35 | } -------------------------------------------------------------------------------- /src/filesystem/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "../../tsconfig.json", 3 | "compilerOptions": { 4 | "outDir": "./dist", 5 | "rootDir": ".", 6 | "moduleResolution": "NodeNext", 7 | "module": "NodeNext" 8 | }, 9 | "include": [ 10 | "./**/*.ts" 11 | ] 12 | } 13 | -------------------------------------------------------------------------------- /src/gdrive/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:22.12-alpine AS builder 2 | 3 | COPY src/gdrive /app 4 | COPY tsconfig.json /tsconfig.json 5 | 6 | WORKDIR /app 7 | 8 | RUN --mount=type=cache,target=/root/.npm npm install 9 | 10 | RUN --mount=type=cache,target=/root/.npm-production npm ci --ignore-scripts --omit-dev 11 | 12 | FROM node:22-alpine AS release 13 | 14 | WORKDIR /app 15 | 16 | COPY --from=builder /app/dist /app/dist 17 | COPY --from=builder /app/package.json /app/package.json 18 | COPY --from=builder /app/package-lock.json /app/package-lock.json 19 | COPY src/gdrive/replace_open.sh /replace_open.sh 20 | 21 | ENV NODE_ENV=production 22 | 23 | RUN npm ci --ignore-scripts --omit-dev 24 | 25 | RUN sh /replace_open.sh 26 | 27 | RUN rm /replace_open.sh 28 | 29 | ENTRYPOINT ["node", "dist/index.js"] -------------------------------------------------------------------------------- /src/gdrive/README.md: -------------------------------------------------------------------------------- 1 | # Google Drive server 2 | 3 | This MCP server integrates with Google Drive to allow listing, reading, and searching over files. 4 | 5 | ## Components 6 | 7 | ### Tools 8 | 9 | - **search** 10 | - Search for files in Google Drive 11 | - Input: `query` (string): Search query 12 | - Returns file names and MIME types of matching files 13 | 14 | ### Resources 15 | 16 | The server provides access to Google Drive files: 17 | 18 | - **Files** (`gdrive:///`) 19 | - Supports all file types 20 | - Google Workspace files are automatically exported: 21 | - Docs → Markdown 22 | - Sheets → CSV 23 | - Presentations → Plain text 24 | - Drawings → PNG 25 | - Other files are provided in their native format 26 | 27 | ## Getting started 28 | 29 | 1. [Create a new Google Cloud project](https://console.cloud.google.com/projectcreate) 30 | 2. [Enable the Google Drive API](https://console.cloud.google.com/workspace-api/products) 31 | 3. [Configure an OAuth consent screen](https://console.cloud.google.com/apis/credentials/consent) ("internal" is fine for testing) 32 | 4. Add OAuth scope `https://www.googleapis.com/auth/drive.readonly` 33 | 5. [Create an OAuth Client ID](https://console.cloud.google.com/apis/credentials/oauthclient) for application type "Desktop App" 34 | 6. Download the JSON file of your client's OAuth keys 35 | 7. Rename the key file to `gcp-oauth.keys.json` and place into the root of this repo (i.e. `servers/gcp-oauth.keys.json`) 36 | 37 | Make sure to build the server with either `npm run build` or `npm run watch`. 38 | 39 | ### Authentication 40 | 41 | To authenticate and save credentials: 42 | 43 | 1. Run the server with the `auth` argument: `node ./dist auth` 44 | 2. This will open an authentication flow in your system browser 45 | 3. Complete the authentication process 46 | 4. Credentials will be saved in the root of this repo (i.e. `servers/.gdrive-server-credentials.json`) 47 | 48 | ### Usage with Desktop App 49 | 50 | To integrate this server with the desktop app, add the following to your app's server configuration: 51 | 52 | #### Docker 53 | 54 | Authentication: 55 | 56 | Assuming you have completed setting up the OAuth application on Google Cloud, you can now auth the server with the following command, replacing `/path/to/gcp-oauth.keys.json` with the path to your OAuth keys file: 57 | 58 | ```bash 59 | docker run -i --rm --mount type=bind,source=/path/to/gcp-oauth.keys.json,target=/gcp-oauth.keys.json -v mcp-gdrive:/gdrive-server -e GDRIVE_OAUTH_PATH=/gcp-oauth.keys.json -e "GDRIVE_CREDENTIALS_PATH=/gdrive-server/credentials.json" -p 3000:3000 mcp/gdrive auth 60 | ``` 61 | 62 | The command will print the URL to open in your browser. Open this URL in your browser and complete the authentication process. The credentials will be saved in the `mcp-gdrive` volume. 63 | 64 | Once authenticated, you can use the server in your app's server configuration: 65 | 66 | ```json 67 | { 68 | "mcpServers": { 69 | "gdrive": { 70 | "command": "docker", 71 | "args": ["run", "-i", "--rm", "-v", "mcp-gdrive:/gdrive-server", "-e", "GDRIVE_CREDENTIALS_PATH=/gdrive-server/credentials.json", "mcp/gdrive"] 72 | } 73 | } 74 | } 75 | ``` 76 | 77 | #### NPX 78 | 79 | ```json 80 | { 81 | "mcpServers": { 82 | "gdrive": { 83 | "command": "npx", 84 | "args": [ 85 | "-y", 86 | "@modelcontextprotocol/server-gdrive" 87 | ] 88 | } 89 | } 90 | } 91 | ``` 92 | 93 | ### Usage with VS Code 94 | 95 | For quick installation, use one of the one-click install buttons below.. 96 | 97 | [![Install with NPX in VS Code](https://img.shields.io/badge/VS_Code-NPM-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=gdrive&inputs=%5B%7B%22type%22%3A%22promptString%22%2C%22id%22%3A%22credentials_path%22%2C%22description%22%3A%22Path%20to%20.gdrive-server-credentials.json%20file%22%7D%5D&config=%7B%22command%22%3A%22npx%22%2C%22args%22%3A%5B%22-y%22%2C%22%40modelcontextprotocol%2Fserver-gdrive%22%5D%2C%22env%22%3A%7B%22GDRIVE_CREDENTIALS_PATH%22%3A%22%24%7Binput%3Acredentials_path%7D%22%7D%7D) [![Install with NPX in VS Code Insiders](https://img.shields.io/badge/VS_Code_Insiders-NPM-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=gdrive&inputs=%5B%7B%22type%22%3A%22promptString%22%2C%22id%22%3A%22credentials_path%22%2C%22description%22%3A%22Path%20to%20.gdrive-server-credentials.json%20file%22%7D%5D&config=%7B%22command%22%3A%22npx%22%2C%22args%22%3A%5B%22-y%22%2C%22%40modelcontextprotocol%2Fserver-gdrive%22%5D%2C%22env%22%3A%7B%22GDRIVE_CREDENTIALS_PATH%22%3A%22%24%7Binput%3Acredentials_path%7D%22%7D%7D&quality=insiders) 98 | 99 | [![Install with Docker in VS Code](https://img.shields.io/badge/VS_Code-Docker-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=gdrive&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22-i%22%2C%22--rm%22%2C%22-v%22%2C%22mcp-gdrive%3A%2Fgdrive-server%22%2C%22-e%22%2C%22GDRIVE_CREDENTIALS_PATH%3D%2Fgdrive-server%2Fcredentials.json%22%2C%22mcp%2Fgdrive%22%5D%7D) [![Install with Docker in VS Code Insiders](https://img.shields.io/badge/VS_Code_Insiders-Docker-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=gdrive&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22-i%22%2C%22--rm%22%2C%22-v%22%2C%22mcp-gdrive%3A%2Fgdrive-server%22%2C%22-e%22%2C%22GDRIVE_CREDENTIALS_PATH%3D%2Fgdrive-server%2Fcredentials.json%22%2C%22mcp%2Fgdrive%22%5D%7D&quality=insiders) 100 | 101 | For manual installation, add the following JSON block to your User Settings (JSON) file in VS Code. You can do this by pressing `Ctrl + Shift + P` and typing `Preferences: Open User Settings (JSON)`. 102 | 103 | Optionally, you can add it to a file called `.vscode/mcp.json` in your workspace. This will allow you to share the configuration with others. 104 | 105 | > Note that the `mcp` key is not needed in the `.vscode/mcp.json` file. 106 | 107 | #### NPX 108 | 109 | ```json 110 | { 111 | "mcp": { 112 | "servers": { 113 | "gdrive": { 114 | "command": "npx", 115 | "args": [ 116 | "-y", 117 | "@modelcontextprotocol/server-gdrive" 118 | ], 119 | "env": { 120 | "GDRIVE_CREDENTIALS_PATH": "/path/to/.gdrive-server-credentials.json" 121 | } 122 | } 123 | } 124 | } 125 | } 126 | ``` 127 | 128 | #### Docker 129 | 130 | ```json 131 | { 132 | "mcp": { 133 | "servers": { 134 | "gdrive": { 135 | "command": "docker", 136 | "args": [ 137 | "run", 138 | "-i", 139 | "--rm", 140 | "-v", 141 | "mcp-gdrive:/gdrive-server", 142 | "-e", 143 | "GDRIVE_CREDENTIALS_PATH=/gdrive-server/credentials.json", 144 | "mcp/gdrive" 145 | ] 146 | } 147 | } 148 | } 149 | } 150 | ``` 151 | 152 | ## License 153 | 154 | This MCP server is licensed under the MIT License. This means you are free to use, modify, and distribute the software, subject to the terms and conditions of the MIT License. For more details, please see the LICENSE file in the project repository. 155 | -------------------------------------------------------------------------------- /src/gdrive/index.ts: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | import { authenticate } from "@google-cloud/local-auth"; 4 | import { Server } from "@modelcontextprotocol/sdk/server/index.js"; 5 | import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js"; 6 | import { 7 | CallToolRequestSchema, 8 | ListResourcesRequestSchema, 9 | ListToolsRequestSchema, 10 | ReadResourceRequestSchema, 11 | } from "@modelcontextprotocol/sdk/types.js"; 12 | import fs from "fs"; 13 | import { google } from "googleapis"; 14 | import path from "path"; 15 | import { fileURLToPath } from 'url'; 16 | 17 | const drive = google.drive("v3"); 18 | 19 | const server = new Server( 20 | { 21 | name: "example-servers/gdrive", 22 | version: "0.1.0", 23 | }, 24 | { 25 | capabilities: { 26 | resources: {}, 27 | tools: {}, 28 | }, 29 | }, 30 | ); 31 | 32 | server.setRequestHandler(ListResourcesRequestSchema, async (request) => { 33 | const pageSize = 10; 34 | const params: any = { 35 | pageSize, 36 | fields: "nextPageToken, files(id, name, mimeType)", 37 | }; 38 | 39 | if (request.params?.cursor) { 40 | params.pageToken = request.params.cursor; 41 | } 42 | 43 | const res = await drive.files.list(params); 44 | const files = res.data.files!; 45 | 46 | return { 47 | resources: files.map((file) => ({ 48 | uri: `gdrive:///${file.id}`, 49 | mimeType: file.mimeType, 50 | name: file.name, 51 | })), 52 | nextCursor: res.data.nextPageToken, 53 | }; 54 | }); 55 | 56 | server.setRequestHandler(ReadResourceRequestSchema, async (request) => { 57 | const fileId = request.params.uri.replace("gdrive:///", ""); 58 | 59 | // First get file metadata to check mime type 60 | const file = await drive.files.get({ 61 | fileId, 62 | fields: "mimeType", 63 | }); 64 | 65 | // For Google Docs/Sheets/etc we need to export 66 | if (file.data.mimeType?.startsWith("application/vnd.google-apps")) { 67 | let exportMimeType: string; 68 | switch (file.data.mimeType) { 69 | case "application/vnd.google-apps.document": 70 | exportMimeType = "text/markdown"; 71 | break; 72 | case "application/vnd.google-apps.spreadsheet": 73 | exportMimeType = "text/csv"; 74 | break; 75 | case "application/vnd.google-apps.presentation": 76 | exportMimeType = "text/plain"; 77 | break; 78 | case "application/vnd.google-apps.drawing": 79 | exportMimeType = "image/png"; 80 | break; 81 | default: 82 | exportMimeType = "text/plain"; 83 | } 84 | 85 | const res = await drive.files.export( 86 | { fileId, mimeType: exportMimeType }, 87 | { responseType: "text" }, 88 | ); 89 | 90 | return { 91 | contents: [ 92 | { 93 | uri: request.params.uri, 94 | mimeType: exportMimeType, 95 | text: res.data, 96 | }, 97 | ], 98 | }; 99 | } 100 | 101 | // For regular files download content 102 | const res = await drive.files.get( 103 | { fileId, alt: "media" }, 104 | { responseType: "arraybuffer" }, 105 | ); 106 | const mimeType = file.data.mimeType || "application/octet-stream"; 107 | if (mimeType.startsWith("text/") || mimeType === "application/json") { 108 | return { 109 | contents: [ 110 | { 111 | uri: request.params.uri, 112 | mimeType: mimeType, 113 | text: Buffer.from(res.data as ArrayBuffer).toString("utf-8"), 114 | }, 115 | ], 116 | }; 117 | } else { 118 | return { 119 | contents: [ 120 | { 121 | uri: request.params.uri, 122 | mimeType: mimeType, 123 | blob: Buffer.from(res.data as ArrayBuffer).toString("base64"), 124 | }, 125 | ], 126 | }; 127 | } 128 | }); 129 | 130 | server.setRequestHandler(ListToolsRequestSchema, async () => { 131 | return { 132 | tools: [ 133 | { 134 | name: "search", 135 | description: "Search for files in Google Drive", 136 | inputSchema: { 137 | type: "object", 138 | properties: { 139 | query: { 140 | type: "string", 141 | description: "Search query", 142 | }, 143 | }, 144 | required: ["query"], 145 | }, 146 | }, 147 | ], 148 | }; 149 | }); 150 | 151 | server.setRequestHandler(CallToolRequestSchema, async (request) => { 152 | if (request.params.name === "search") { 153 | const userQuery = request.params.arguments?.query as string; 154 | const escapedQuery = userQuery.replace(/\\/g, "\\\\").replace(/'/g, "\\'"); 155 | const formattedQuery = `fullText contains '${escapedQuery}'`; 156 | 157 | const res = await drive.files.list({ 158 | q: formattedQuery, 159 | pageSize: 10, 160 | fields: "files(id, name, mimeType, modifiedTime, size)", 161 | }); 162 | 163 | const fileList = res.data.files 164 | ?.map((file: any) => `${file.name} (${file.mimeType})`) 165 | .join("\n"); 166 | return { 167 | content: [ 168 | { 169 | type: "text", 170 | text: `Found ${res.data.files?.length ?? 0} files:\n${fileList}`, 171 | }, 172 | ], 173 | isError: false, 174 | }; 175 | } 176 | throw new Error("Tool not found"); 177 | }); 178 | 179 | const credentialsPath = process.env.GDRIVE_CREDENTIALS_PATH || path.join( 180 | path.dirname(fileURLToPath(import.meta.url)), 181 | "../../../.gdrive-server-credentials.json", 182 | ); 183 | 184 | async function authenticateAndSaveCredentials() { 185 | console.log("Launching auth flow…"); 186 | const auth = await authenticate({ 187 | keyfilePath: process.env.GDRIVE_OAUTH_PATH || path.join( 188 | path.dirname(fileURLToPath(import.meta.url)), 189 | "../../../gcp-oauth.keys.json", 190 | ), 191 | scopes: ["https://www.googleapis.com/auth/drive.readonly"], 192 | }); 193 | fs.writeFileSync(credentialsPath, JSON.stringify(auth.credentials)); 194 | console.log("Credentials saved. You can now run the server."); 195 | } 196 | 197 | async function loadCredentialsAndRunServer() { 198 | if (!fs.existsSync(credentialsPath)) { 199 | console.error( 200 | "Credentials not found. Please run with 'auth' argument first.", 201 | ); 202 | process.exit(1); 203 | } 204 | 205 | const credentials = JSON.parse(fs.readFileSync(credentialsPath, "utf-8")); 206 | const auth = new google.auth.OAuth2(); 207 | auth.setCredentials(credentials); 208 | google.options({ auth }); 209 | 210 | console.error("Credentials loaded. Starting server."); 211 | const transport = new StdioServerTransport(); 212 | await server.connect(transport); 213 | } 214 | 215 | if (process.argv[2] === "auth") { 216 | authenticateAndSaveCredentials().catch(console.error); 217 | } else { 218 | loadCredentialsAndRunServer().catch(console.error); 219 | } 220 | -------------------------------------------------------------------------------- /src/gdrive/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@modelcontextprotocol/server-gdrive", 3 | "version": "0.6.2", 4 | "description": "MCP server for interacting with Google Drive", 5 | "license": "MIT", 6 | "author": "Anthropic, PBC (https://anthropic.com)", 7 | "homepage": "https://modelcontextprotocol.io", 8 | "bugs": "https://github.com/modelcontextprotocol/servers/issues", 9 | "type": "module", 10 | "bin": { 11 | "mcp-server-gdrive": "dist/index.js" 12 | }, 13 | "files": [ 14 | "dist" 15 | ], 16 | "scripts": { 17 | "build": "tsc && shx chmod +x dist/*.js", 18 | "prepare": "npm run build", 19 | "watch": "tsc --watch" 20 | }, 21 | "dependencies": { 22 | "@google-cloud/local-auth": "^3.0.1", 23 | "@modelcontextprotocol/sdk": "1.0.1", 24 | "googleapis": "^144.0.0" 25 | }, 26 | "devDependencies": { 27 | "@types/node": "^22", 28 | "shx": "^0.3.4", 29 | "typescript": "^5.6.2" 30 | } 31 | } -------------------------------------------------------------------------------- /src/gdrive/replace_open.sh: -------------------------------------------------------------------------------- 1 | #! /bin/bash 2 | 3 | # Basic script to replace opn(authorizeUrl, { wait: false }).then(cp => cp.unref()); with process.stdout.write(`Open this URL in your browser: ${authorizeUrl}`); 4 | 5 | sed -i 's/opn(authorizeUrl, { wait: false }).then(cp => cp.unref());/process.stderr.write(`Open this URL in your browser: ${authorizeUrl}\n`);/' node_modules/@google-cloud/local-auth/build/src/index.js 6 | -------------------------------------------------------------------------------- /src/gdrive/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "../../tsconfig.json", 3 | "compilerOptions": { 4 | "outDir": "./dist", 5 | "rootDir": "." 6 | }, 7 | "include": [ 8 | "./**/*.ts" 9 | ] 10 | } 11 | -------------------------------------------------------------------------------- /src/git/.gitignore: -------------------------------------------------------------------------------- 1 | __pycache__ 2 | .venv 3 | -------------------------------------------------------------------------------- /src/git/.python-version: -------------------------------------------------------------------------------- 1 | 3.10 2 | -------------------------------------------------------------------------------- /src/git/Dockerfile: -------------------------------------------------------------------------------- 1 | # Use a Python image with uv pre-installed 2 | FROM ghcr.io/astral-sh/uv:python3.12-bookworm-slim AS uv 3 | 4 | # Install the project into `/app` 5 | WORKDIR /app 6 | 7 | # Enable bytecode compilation 8 | ENV UV_COMPILE_BYTECODE=1 9 | 10 | # Copy from the cache instead of linking since it's a mounted volume 11 | ENV UV_LINK_MODE=copy 12 | 13 | # Install the project's dependencies using the lockfile and settings 14 | RUN --mount=type=cache,target=/root/.cache/uv \ 15 | --mount=type=bind,source=uv.lock,target=uv.lock \ 16 | --mount=type=bind,source=pyproject.toml,target=pyproject.toml \ 17 | uv sync --frozen --no-install-project --no-dev --no-editable 18 | 19 | # Then, add the rest of the project source code and install it 20 | # Installing separately from its dependencies allows optimal layer caching 21 | ADD . /app 22 | RUN --mount=type=cache,target=/root/.cache/uv \ 23 | uv sync --frozen --no-dev --no-editable 24 | 25 | FROM python:3.12-slim-bookworm 26 | 27 | RUN apt-get update && apt-get install -y git && rm -rf /var/lib/apt/lists/* 28 | 29 | WORKDIR /app 30 | 31 | COPY --from=uv /root/.local /root/.local 32 | COPY --from=uv --chown=app:app /app/.venv /app/.venv 33 | 34 | # Place executables in the environment at the front of the path 35 | ENV PATH="/app/.venv/bin:$PATH" 36 | 37 | # when running the container, add --db-path and a bind mount to the host's db file 38 | ENTRYPOINT ["mcp-server-git"] 39 | -------------------------------------------------------------------------------- /src/git/LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2024 Anthropic, PBC. 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 4 | 5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 6 | 7 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 8 | -------------------------------------------------------------------------------- /src/git/pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "mcp-server-git" 3 | version = "0.6.2" 4 | description = "A Model Context Protocol server providing tools to read, search, and manipulate Git repositories programmatically via LLMs" 5 | readme = "README.md" 6 | requires-python = ">=3.10" 7 | authors = [{ name = "Anthropic, PBC." }] 8 | maintainers = [{ name = "David Soria Parra", email = "davidsp@anthropic.com" }] 9 | keywords = ["git", "mcp", "llm", "automation"] 10 | license = { text = "MIT" } 11 | classifiers = [ 12 | "Development Status :: 4 - Beta", 13 | "Intended Audience :: Developers", 14 | "License :: OSI Approved :: MIT License", 15 | "Programming Language :: Python :: 3", 16 | "Programming Language :: Python :: 3.10", 17 | ] 18 | dependencies = [ 19 | "click>=8.1.7", 20 | "gitpython>=3.1.43", 21 | "mcp>=1.0.0", 22 | "pydantic>=2.0.0", 23 | ] 24 | 25 | [project.scripts] 26 | mcp-server-git = "mcp_server_git:main" 27 | 28 | [build-system] 29 | requires = ["hatchling"] 30 | build-backend = "hatchling.build" 31 | 32 | [tool.uv] 33 | dev-dependencies = ["pyright>=1.1.389", "ruff>=0.7.3", "pytest>=8.0.0"] 34 | 35 | [tool.pytest.ini_options] 36 | testpaths = ["tests"] 37 | python_files = "test_*.py" 38 | python_classes = "Test*" 39 | python_functions = "test_*" -------------------------------------------------------------------------------- /src/git/src/mcp_server_git/__init__.py: -------------------------------------------------------------------------------- 1 | import click 2 | from pathlib import Path 3 | import logging 4 | import sys 5 | from .server import serve 6 | 7 | @click.command() 8 | @click.option("--repository", "-r", type=Path, help="Git repository path") 9 | @click.option("-v", "--verbose", count=True) 10 | def main(repository: Path | None, verbose: bool) -> None: 11 | """MCP Git Server - Git functionality for MCP""" 12 | import asyncio 13 | 14 | logging_level = logging.WARN 15 | if verbose == 1: 16 | logging_level = logging.INFO 17 | elif verbose >= 2: 18 | logging_level = logging.DEBUG 19 | 20 | logging.basicConfig(level=logging_level, stream=sys.stderr) 21 | asyncio.run(serve(repository)) 22 | 23 | if __name__ == "__main__": 24 | main() 25 | -------------------------------------------------------------------------------- /src/git/src/mcp_server_git/__main__.py: -------------------------------------------------------------------------------- 1 | # __main__.py 2 | 3 | from mcp_server_git import main 4 | 5 | main() 6 | -------------------------------------------------------------------------------- /src/git/tests/test_server.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from pathlib import Path 3 | import git 4 | from mcp_server_git.server import git_checkout 5 | import shutil 6 | 7 | @pytest.fixture 8 | def test_repository(tmp_path: Path): 9 | repo_path = tmp_path / "temp_test_repo" 10 | test_repo = git.Repo.init(repo_path) 11 | 12 | Path(repo_path / "test.txt").write_text("test") 13 | test_repo.index.add(["test.txt"]) 14 | test_repo.index.commit("initial commit") 15 | 16 | yield test_repo 17 | 18 | shutil.rmtree(repo_path) 19 | 20 | def test_git_checkout_existing_branch(test_repository): 21 | test_repository.git.branch("test-branch") 22 | result = git_checkout(test_repository, "test-branch") 23 | 24 | assert "Switched to branch 'test-branch'" in result 25 | assert test_repository.active_branch.name == "test-branch" 26 | 27 | def test_git_checkout_nonexistent_branch(test_repository): 28 | 29 | with pytest.raises(git.GitCommandError): 30 | git_checkout(test_repository, "nonexistent-branch") -------------------------------------------------------------------------------- /src/github/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:22.12-alpine AS builder 2 | 3 | # Must be entire project because `prepare` script is run during `npm install` and requires all files. 4 | COPY src/github /app 5 | COPY tsconfig.json /tsconfig.json 6 | 7 | WORKDIR /app 8 | 9 | RUN --mount=type=cache,target=/root/.npm npm install 10 | 11 | FROM node:22.12-alpine AS release 12 | 13 | COPY --from=builder /app/dist /app/dist 14 | COPY --from=builder /app/package.json /app/package.json 15 | COPY --from=builder /app/package-lock.json /app/package-lock.json 16 | 17 | ENV NODE_ENV=production 18 | 19 | WORKDIR /app 20 | 21 | RUN npm ci --ignore-scripts --omit-dev 22 | 23 | ENTRYPOINT ["node", "dist/index.js"] -------------------------------------------------------------------------------- /src/github/common/errors.ts: -------------------------------------------------------------------------------- 1 | export class GitHubError extends Error { 2 | constructor( 3 | message: string, 4 | public readonly status: number, 5 | public readonly response: unknown 6 | ) { 7 | super(message); 8 | this.name = "GitHubError"; 9 | } 10 | } 11 | 12 | export class GitHubValidationError extends GitHubError { 13 | constructor(message: string, status: number, response: unknown) { 14 | super(message, status, response); 15 | this.name = "GitHubValidationError"; 16 | } 17 | } 18 | 19 | export class GitHubResourceNotFoundError extends GitHubError { 20 | constructor(resource: string) { 21 | super(`Resource not found: ${resource}`, 404, { message: `${resource} not found` }); 22 | this.name = "GitHubResourceNotFoundError"; 23 | } 24 | } 25 | 26 | export class GitHubAuthenticationError extends GitHubError { 27 | constructor(message = "Authentication failed") { 28 | super(message, 401, { message }); 29 | this.name = "GitHubAuthenticationError"; 30 | } 31 | } 32 | 33 | export class GitHubPermissionError extends GitHubError { 34 | constructor(message = "Insufficient permissions") { 35 | super(message, 403, { message }); 36 | this.name = "GitHubPermissionError"; 37 | } 38 | } 39 | 40 | export class GitHubRateLimitError extends GitHubError { 41 | constructor( 42 | message = "Rate limit exceeded", 43 | public readonly resetAt: Date 44 | ) { 45 | super(message, 429, { message, reset_at: resetAt.toISOString() }); 46 | this.name = "GitHubRateLimitError"; 47 | } 48 | } 49 | 50 | export class GitHubConflictError extends GitHubError { 51 | constructor(message: string) { 52 | super(message, 409, { message }); 53 | this.name = "GitHubConflictError"; 54 | } 55 | } 56 | 57 | export function isGitHubError(error: unknown): error is GitHubError { 58 | return error instanceof GitHubError; 59 | } 60 | 61 | export function createGitHubError(status: number, response: any): GitHubError { 62 | switch (status) { 63 | case 401: 64 | return new GitHubAuthenticationError(response?.message); 65 | case 403: 66 | return new GitHubPermissionError(response?.message); 67 | case 404: 68 | return new GitHubResourceNotFoundError(response?.message || "Resource"); 69 | case 409: 70 | return new GitHubConflictError(response?.message || "Conflict occurred"); 71 | case 422: 72 | return new GitHubValidationError( 73 | response?.message || "Validation failed", 74 | status, 75 | response 76 | ); 77 | case 429: 78 | return new GitHubRateLimitError( 79 | response?.message, 80 | new Date(response?.reset_at || Date.now() + 60000) 81 | ); 82 | default: 83 | return new GitHubError( 84 | response?.message || "GitHub API error", 85 | status, 86 | response 87 | ); 88 | } 89 | } -------------------------------------------------------------------------------- /src/github/common/utils.ts: -------------------------------------------------------------------------------- 1 | import { getUserAgent } from "universal-user-agent"; 2 | import { createGitHubError } from "./errors.js"; 3 | import { VERSION } from "./version.js"; 4 | 5 | type RequestOptions = { 6 | method?: string; 7 | body?: unknown; 8 | headers?: Record; 9 | } 10 | 11 | async function parseResponseBody(response: Response): Promise { 12 | const contentType = response.headers.get("content-type"); 13 | if (contentType?.includes("application/json")) { 14 | return response.json(); 15 | } 16 | return response.text(); 17 | } 18 | 19 | export function buildUrl(baseUrl: string, params: Record): string { 20 | const url = new URL(baseUrl); 21 | Object.entries(params).forEach(([key, value]) => { 22 | if (value !== undefined) { 23 | url.searchParams.append(key, value.toString()); 24 | } 25 | }); 26 | return url.toString(); 27 | } 28 | 29 | const USER_AGENT = `modelcontextprotocol/servers/github/v${VERSION} ${getUserAgent()}`; 30 | 31 | export async function githubRequest( 32 | url: string, 33 | options: RequestOptions = {} 34 | ): Promise { 35 | const headers: Record = { 36 | "Accept": "application/vnd.github.v3+json", 37 | "Content-Type": "application/json", 38 | "User-Agent": USER_AGENT, 39 | ...options.headers, 40 | }; 41 | 42 | if (process.env.GITHUB_PERSONAL_ACCESS_TOKEN) { 43 | headers["Authorization"] = `Bearer ${process.env.GITHUB_PERSONAL_ACCESS_TOKEN}`; 44 | } 45 | 46 | const response = await fetch(url, { 47 | method: options.method || "GET", 48 | headers, 49 | body: options.body ? JSON.stringify(options.body) : undefined, 50 | }); 51 | 52 | const responseBody = await parseResponseBody(response); 53 | 54 | if (!response.ok) { 55 | throw createGitHubError(response.status, responseBody); 56 | } 57 | 58 | return responseBody; 59 | } 60 | 61 | export function validateBranchName(branch: string): string { 62 | const sanitized = branch.trim(); 63 | if (!sanitized) { 64 | throw new Error("Branch name cannot be empty"); 65 | } 66 | if (sanitized.includes("..")) { 67 | throw new Error("Branch name cannot contain '..'"); 68 | } 69 | if (/[\s~^:?*[\\\]]/.test(sanitized)) { 70 | throw new Error("Branch name contains invalid characters"); 71 | } 72 | if (sanitized.startsWith("/") || sanitized.endsWith("/")) { 73 | throw new Error("Branch name cannot start or end with '/'"); 74 | } 75 | if (sanitized.endsWith(".lock")) { 76 | throw new Error("Branch name cannot end with '.lock'"); 77 | } 78 | return sanitized; 79 | } 80 | 81 | export function validateRepositoryName(name: string): string { 82 | const sanitized = name.trim().toLowerCase(); 83 | if (!sanitized) { 84 | throw new Error("Repository name cannot be empty"); 85 | } 86 | if (!/^[a-z0-9_.-]+$/.test(sanitized)) { 87 | throw new Error( 88 | "Repository name can only contain lowercase letters, numbers, hyphens, periods, and underscores" 89 | ); 90 | } 91 | if (sanitized.startsWith(".") || sanitized.endsWith(".")) { 92 | throw new Error("Repository name cannot start or end with a period"); 93 | } 94 | return sanitized; 95 | } 96 | 97 | export function validateOwnerName(owner: string): string { 98 | const sanitized = owner.trim().toLowerCase(); 99 | if (!sanitized) { 100 | throw new Error("Owner name cannot be empty"); 101 | } 102 | if (!/^[a-z0-9](?:[a-z0-9]|-(?=[a-z0-9])){0,38}$/.test(sanitized)) { 103 | throw new Error( 104 | "Owner name must start with a letter or number and can contain up to 39 characters" 105 | ); 106 | } 107 | return sanitized; 108 | } 109 | 110 | export async function checkBranchExists( 111 | owner: string, 112 | repo: string, 113 | branch: string 114 | ): Promise { 115 | try { 116 | await githubRequest( 117 | `https://api.github.com/repos/${owner}/${repo}/branches/${branch}` 118 | ); 119 | return true; 120 | } catch (error) { 121 | if (error && typeof error === "object" && "status" in error && error.status === 404) { 122 | return false; 123 | } 124 | throw error; 125 | } 126 | } 127 | 128 | export async function checkUserExists(username: string): Promise { 129 | try { 130 | await githubRequest(`https://api.github.com/users/${username}`); 131 | return true; 132 | } catch (error) { 133 | if (error && typeof error === "object" && "status" in error && error.status === 404) { 134 | return false; 135 | } 136 | throw error; 137 | } 138 | } -------------------------------------------------------------------------------- /src/github/common/version.ts: -------------------------------------------------------------------------------- 1 | // If the format of this file changes, so it doesn't simply export a VERSION constant, 2 | // this will break .github/workflows/version-check.yml. 3 | export const VERSION = "0.6.2"; -------------------------------------------------------------------------------- /src/github/operations/branches.ts: -------------------------------------------------------------------------------- 1 | import { z } from "zod"; 2 | import { githubRequest } from "../common/utils.js"; 3 | import { GitHubReferenceSchema } from "../common/types.js"; 4 | 5 | // Schema definitions 6 | export const CreateBranchOptionsSchema = z.object({ 7 | ref: z.string(), 8 | sha: z.string(), 9 | }); 10 | 11 | export const CreateBranchSchema = z.object({ 12 | owner: z.string().describe("Repository owner (username or organization)"), 13 | repo: z.string().describe("Repository name"), 14 | branch: z.string().describe("Name for the new branch"), 15 | from_branch: z.string().optional().describe("Optional: source branch to create from (defaults to the repository's default branch)"), 16 | }); 17 | 18 | // Type exports 19 | export type CreateBranchOptions = z.infer; 20 | 21 | // Function implementations 22 | export async function getDefaultBranchSHA(owner: string, repo: string): Promise { 23 | try { 24 | const response = await githubRequest( 25 | `https://api.github.com/repos/${owner}/${repo}/git/refs/heads/main` 26 | ); 27 | const data = GitHubReferenceSchema.parse(response); 28 | return data.object.sha; 29 | } catch (error) { 30 | const masterResponse = await githubRequest( 31 | `https://api.github.com/repos/${owner}/${repo}/git/refs/heads/master` 32 | ); 33 | if (!masterResponse) { 34 | throw new Error("Could not find default branch (tried 'main' and 'master')"); 35 | } 36 | const data = GitHubReferenceSchema.parse(masterResponse); 37 | return data.object.sha; 38 | } 39 | } 40 | 41 | export async function createBranch( 42 | owner: string, 43 | repo: string, 44 | options: CreateBranchOptions 45 | ): Promise> { 46 | const fullRef = `refs/heads/${options.ref}`; 47 | 48 | const response = await githubRequest( 49 | `https://api.github.com/repos/${owner}/${repo}/git/refs`, 50 | { 51 | method: "POST", 52 | body: { 53 | ref: fullRef, 54 | sha: options.sha, 55 | }, 56 | } 57 | ); 58 | 59 | return GitHubReferenceSchema.parse(response); 60 | } 61 | 62 | export async function getBranchSHA( 63 | owner: string, 64 | repo: string, 65 | branch: string 66 | ): Promise { 67 | const response = await githubRequest( 68 | `https://api.github.com/repos/${owner}/${repo}/git/refs/heads/${branch}` 69 | ); 70 | 71 | const data = GitHubReferenceSchema.parse(response); 72 | return data.object.sha; 73 | } 74 | 75 | export async function createBranchFromRef( 76 | owner: string, 77 | repo: string, 78 | newBranch: string, 79 | fromBranch?: string 80 | ): Promise> { 81 | let sha: string; 82 | if (fromBranch) { 83 | sha = await getBranchSHA(owner, repo, fromBranch); 84 | } else { 85 | sha = await getDefaultBranchSHA(owner, repo); 86 | } 87 | 88 | return createBranch(owner, repo, { 89 | ref: newBranch, 90 | sha, 91 | }); 92 | } 93 | 94 | export async function updateBranch( 95 | owner: string, 96 | repo: string, 97 | branch: string, 98 | sha: string 99 | ): Promise> { 100 | const response = await githubRequest( 101 | `https://api.github.com/repos/${owner}/${repo}/git/refs/heads/${branch}`, 102 | { 103 | method: "PATCH", 104 | body: { 105 | sha, 106 | force: true, 107 | }, 108 | } 109 | ); 110 | 111 | return GitHubReferenceSchema.parse(response); 112 | } 113 | -------------------------------------------------------------------------------- /src/github/operations/commits.ts: -------------------------------------------------------------------------------- 1 | import { z } from "zod"; 2 | import { githubRequest, buildUrl } from "../common/utils.js"; 3 | 4 | export const ListCommitsSchema = z.object({ 5 | owner: z.string(), 6 | repo: z.string(), 7 | sha: z.string().optional(), 8 | page: z.number().optional(), 9 | perPage: z.number().optional() 10 | }); 11 | 12 | export async function listCommits( 13 | owner: string, 14 | repo: string, 15 | page?: number, 16 | perPage?: number, 17 | sha?: string 18 | ) { 19 | return githubRequest( 20 | buildUrl(`https://api.github.com/repos/${owner}/${repo}/commits`, { 21 | page: page?.toString(), 22 | per_page: perPage?.toString(), 23 | sha 24 | }) 25 | ); 26 | } -------------------------------------------------------------------------------- /src/github/operations/files.ts: -------------------------------------------------------------------------------- 1 | import { z } from "zod"; 2 | import { githubRequest } from "../common/utils.js"; 3 | import { 4 | GitHubContentSchema, 5 | GitHubAuthorSchema, 6 | GitHubTreeSchema, 7 | GitHubCommitSchema, 8 | GitHubReferenceSchema, 9 | GitHubFileContentSchema, 10 | } from "../common/types.js"; 11 | 12 | // Schema definitions 13 | export const FileOperationSchema = z.object({ 14 | path: z.string(), 15 | content: z.string(), 16 | }); 17 | 18 | export const CreateOrUpdateFileSchema = z.object({ 19 | owner: z.string().describe("Repository owner (username or organization)"), 20 | repo: z.string().describe("Repository name"), 21 | path: z.string().describe("Path where to create/update the file"), 22 | content: z.string().describe("Content of the file"), 23 | message: z.string().describe("Commit message"), 24 | branch: z.string().describe("Branch to create/update the file in"), 25 | sha: z.string().optional().describe("SHA of the file being replaced (required when updating existing files)"), 26 | }); 27 | 28 | export const GetFileContentsSchema = z.object({ 29 | owner: z.string().describe("Repository owner (username or organization)"), 30 | repo: z.string().describe("Repository name"), 31 | path: z.string().describe("Path to the file or directory"), 32 | branch: z.string().optional().describe("Branch to get contents from"), 33 | }); 34 | 35 | export const PushFilesSchema = z.object({ 36 | owner: z.string().describe("Repository owner (username or organization)"), 37 | repo: z.string().describe("Repository name"), 38 | branch: z.string().describe("Branch to push to (e.g., 'main' or 'master')"), 39 | files: z.array(FileOperationSchema).describe("Array of files to push"), 40 | message: z.string().describe("Commit message"), 41 | }); 42 | 43 | export const GitHubCreateUpdateFileResponseSchema = z.object({ 44 | content: GitHubFileContentSchema.nullable(), 45 | commit: z.object({ 46 | sha: z.string(), 47 | node_id: z.string(), 48 | url: z.string(), 49 | html_url: z.string(), 50 | author: GitHubAuthorSchema, 51 | committer: GitHubAuthorSchema, 52 | message: z.string(), 53 | tree: z.object({ 54 | sha: z.string(), 55 | url: z.string(), 56 | }), 57 | parents: z.array( 58 | z.object({ 59 | sha: z.string(), 60 | url: z.string(), 61 | html_url: z.string(), 62 | }) 63 | ), 64 | }), 65 | }); 66 | 67 | // Type exports 68 | export type FileOperation = z.infer; 69 | export type GitHubCreateUpdateFileResponse = z.infer; 70 | 71 | // Function implementations 72 | export async function getFileContents( 73 | owner: string, 74 | repo: string, 75 | path: string, 76 | branch?: string 77 | ) { 78 | let url = `https://api.github.com/repos/${owner}/${repo}/contents/${path}`; 79 | if (branch) { 80 | url += `?ref=${branch}`; 81 | } 82 | 83 | const response = await githubRequest(url); 84 | const data = GitHubContentSchema.parse(response); 85 | 86 | // If it's a file, decode the content 87 | if (!Array.isArray(data) && data.content) { 88 | data.content = Buffer.from(data.content, "base64").toString("utf8"); 89 | } 90 | 91 | return data; 92 | } 93 | 94 | export async function createOrUpdateFile( 95 | owner: string, 96 | repo: string, 97 | path: string, 98 | content: string, 99 | message: string, 100 | branch: string, 101 | sha?: string 102 | ) { 103 | const encodedContent = Buffer.from(content).toString("base64"); 104 | 105 | let currentSha = sha; 106 | if (!currentSha) { 107 | try { 108 | const existingFile = await getFileContents(owner, repo, path, branch); 109 | if (!Array.isArray(existingFile)) { 110 | currentSha = existingFile.sha; 111 | } 112 | } catch (error) { 113 | console.error("Note: File does not exist in branch, will create new file"); 114 | } 115 | } 116 | 117 | const url = `https://api.github.com/repos/${owner}/${repo}/contents/${path}`; 118 | const body = { 119 | message, 120 | content: encodedContent, 121 | branch, 122 | ...(currentSha ? { sha: currentSha } : {}), 123 | }; 124 | 125 | const response = await githubRequest(url, { 126 | method: "PUT", 127 | body, 128 | }); 129 | 130 | return GitHubCreateUpdateFileResponseSchema.parse(response); 131 | } 132 | 133 | async function createTree( 134 | owner: string, 135 | repo: string, 136 | files: FileOperation[], 137 | baseTree?: string 138 | ) { 139 | const tree = files.map((file) => ({ 140 | path: file.path, 141 | mode: "100644" as const, 142 | type: "blob" as const, 143 | content: file.content, 144 | })); 145 | 146 | const response = await githubRequest( 147 | `https://api.github.com/repos/${owner}/${repo}/git/trees`, 148 | { 149 | method: "POST", 150 | body: { 151 | tree, 152 | base_tree: baseTree, 153 | }, 154 | } 155 | ); 156 | 157 | return GitHubTreeSchema.parse(response); 158 | } 159 | 160 | async function createCommit( 161 | owner: string, 162 | repo: string, 163 | message: string, 164 | tree: string, 165 | parents: string[] 166 | ) { 167 | const response = await githubRequest( 168 | `https://api.github.com/repos/${owner}/${repo}/git/commits`, 169 | { 170 | method: "POST", 171 | body: { 172 | message, 173 | tree, 174 | parents, 175 | }, 176 | } 177 | ); 178 | 179 | return GitHubCommitSchema.parse(response); 180 | } 181 | 182 | async function updateReference( 183 | owner: string, 184 | repo: string, 185 | ref: string, 186 | sha: string 187 | ) { 188 | const response = await githubRequest( 189 | `https://api.github.com/repos/${owner}/${repo}/git/refs/${ref}`, 190 | { 191 | method: "PATCH", 192 | body: { 193 | sha, 194 | force: true, 195 | }, 196 | } 197 | ); 198 | 199 | return GitHubReferenceSchema.parse(response); 200 | } 201 | 202 | export async function pushFiles( 203 | owner: string, 204 | repo: string, 205 | branch: string, 206 | files: FileOperation[], 207 | message: string 208 | ) { 209 | const refResponse = await githubRequest( 210 | `https://api.github.com/repos/${owner}/${repo}/git/refs/heads/${branch}` 211 | ); 212 | 213 | const ref = GitHubReferenceSchema.parse(refResponse); 214 | const commitSha = ref.object.sha; 215 | 216 | const tree = await createTree(owner, repo, files, commitSha); 217 | const commit = await createCommit(owner, repo, message, tree.sha, [commitSha]); 218 | return await updateReference(owner, repo, `heads/${branch}`, commit.sha); 219 | } 220 | -------------------------------------------------------------------------------- /src/github/operations/issues.ts: -------------------------------------------------------------------------------- 1 | import { z } from "zod"; 2 | import { githubRequest, buildUrl } from "../common/utils.js"; 3 | 4 | export const GetIssueSchema = z.object({ 5 | owner: z.string(), 6 | repo: z.string(), 7 | issue_number: z.number(), 8 | }); 9 | 10 | export const IssueCommentSchema = z.object({ 11 | owner: z.string(), 12 | repo: z.string(), 13 | issue_number: z.number(), 14 | body: z.string(), 15 | }); 16 | 17 | export const CreateIssueOptionsSchema = z.object({ 18 | title: z.string(), 19 | body: z.string().optional(), 20 | assignees: z.array(z.string()).optional(), 21 | milestone: z.number().optional(), 22 | labels: z.array(z.string()).optional(), 23 | }); 24 | 25 | export const CreateIssueSchema = z.object({ 26 | owner: z.string(), 27 | repo: z.string(), 28 | ...CreateIssueOptionsSchema.shape, 29 | }); 30 | 31 | export const ListIssuesOptionsSchema = z.object({ 32 | owner: z.string(), 33 | repo: z.string(), 34 | direction: z.enum(["asc", "desc"]).optional(), 35 | labels: z.array(z.string()).optional(), 36 | page: z.number().optional(), 37 | per_page: z.number().optional(), 38 | since: z.string().optional(), 39 | sort: z.enum(["created", "updated", "comments"]).optional(), 40 | state: z.enum(["open", "closed", "all"]).optional(), 41 | }); 42 | 43 | export const UpdateIssueOptionsSchema = z.object({ 44 | owner: z.string(), 45 | repo: z.string(), 46 | issue_number: z.number(), 47 | title: z.string().optional(), 48 | body: z.string().optional(), 49 | assignees: z.array(z.string()).optional(), 50 | milestone: z.number().optional(), 51 | labels: z.array(z.string()).optional(), 52 | state: z.enum(["open", "closed"]).optional(), 53 | }); 54 | 55 | export async function getIssue(owner: string, repo: string, issue_number: number) { 56 | return githubRequest(`https://api.github.com/repos/${owner}/${repo}/issues/${issue_number}`); 57 | } 58 | 59 | export async function addIssueComment( 60 | owner: string, 61 | repo: string, 62 | issue_number: number, 63 | body: string 64 | ) { 65 | return githubRequest(`https://api.github.com/repos/${owner}/${repo}/issues/${issue_number}/comments`, { 66 | method: "POST", 67 | body: { body }, 68 | }); 69 | } 70 | 71 | export async function createIssue( 72 | owner: string, 73 | repo: string, 74 | options: z.infer 75 | ) { 76 | return githubRequest( 77 | `https://api.github.com/repos/${owner}/${repo}/issues`, 78 | { 79 | method: "POST", 80 | body: options, 81 | } 82 | ); 83 | } 84 | 85 | export async function listIssues( 86 | owner: string, 87 | repo: string, 88 | options: Omit, "owner" | "repo"> 89 | ) { 90 | const urlParams: Record = { 91 | direction: options.direction, 92 | labels: options.labels?.join(","), 93 | page: options.page?.toString(), 94 | per_page: options.per_page?.toString(), 95 | since: options.since, 96 | sort: options.sort, 97 | state: options.state 98 | }; 99 | 100 | return githubRequest( 101 | buildUrl(`https://api.github.com/repos/${owner}/${repo}/issues`, urlParams) 102 | ); 103 | } 104 | 105 | export async function updateIssue( 106 | owner: string, 107 | repo: string, 108 | issue_number: number, 109 | options: Omit, "owner" | "repo" | "issue_number"> 110 | ) { 111 | return githubRequest( 112 | `https://api.github.com/repos/${owner}/${repo}/issues/${issue_number}`, 113 | { 114 | method: "PATCH", 115 | body: options, 116 | } 117 | ); 118 | } -------------------------------------------------------------------------------- /src/github/operations/repository.ts: -------------------------------------------------------------------------------- 1 | import { z } from "zod"; 2 | import { githubRequest } from "../common/utils.js"; 3 | import { GitHubRepositorySchema, GitHubSearchResponseSchema } from "../common/types.js"; 4 | 5 | // Schema definitions 6 | export const CreateRepositoryOptionsSchema = z.object({ 7 | name: z.string().describe("Repository name"), 8 | description: z.string().optional().describe("Repository description"), 9 | private: z.boolean().optional().describe("Whether the repository should be private"), 10 | autoInit: z.boolean().optional().describe("Initialize with README.md"), 11 | }); 12 | 13 | export const SearchRepositoriesSchema = z.object({ 14 | query: z.string().describe("Search query (see GitHub search syntax)"), 15 | page: z.number().optional().describe("Page number for pagination (default: 1)"), 16 | perPage: z.number().optional().describe("Number of results per page (default: 30, max: 100)"), 17 | }); 18 | 19 | export const ForkRepositorySchema = z.object({ 20 | owner: z.string().describe("Repository owner (username or organization)"), 21 | repo: z.string().describe("Repository name"), 22 | organization: z.string().optional().describe("Optional: organization to fork to (defaults to your personal account)"), 23 | }); 24 | 25 | // Type exports 26 | export type CreateRepositoryOptions = z.infer; 27 | 28 | // Function implementations 29 | export async function createRepository(options: CreateRepositoryOptions) { 30 | const response = await githubRequest("https://api.github.com/user/repos", { 31 | method: "POST", 32 | body: options, 33 | }); 34 | return GitHubRepositorySchema.parse(response); 35 | } 36 | 37 | export async function searchRepositories( 38 | query: string, 39 | page: number = 1, 40 | perPage: number = 30 41 | ) { 42 | const url = new URL("https://api.github.com/search/repositories"); 43 | url.searchParams.append("q", query); 44 | url.searchParams.append("page", page.toString()); 45 | url.searchParams.append("per_page", perPage.toString()); 46 | 47 | const response = await githubRequest(url.toString()); 48 | return GitHubSearchResponseSchema.parse(response); 49 | } 50 | 51 | export async function forkRepository( 52 | owner: string, 53 | repo: string, 54 | organization?: string 55 | ) { 56 | const url = organization 57 | ? `https://api.github.com/repos/${owner}/${repo}/forks?organization=${organization}` 58 | : `https://api.github.com/repos/${owner}/${repo}/forks`; 59 | 60 | const response = await githubRequest(url, { method: "POST" }); 61 | return GitHubRepositorySchema.extend({ 62 | parent: GitHubRepositorySchema, 63 | source: GitHubRepositorySchema, 64 | }).parse(response); 65 | } 66 | -------------------------------------------------------------------------------- /src/github/operations/search.ts: -------------------------------------------------------------------------------- 1 | import { z } from "zod"; 2 | import { githubRequest, buildUrl } from "../common/utils.js"; 3 | 4 | export const SearchOptions = z.object({ 5 | q: z.string(), 6 | order: z.enum(["asc", "desc"]).optional(), 7 | page: z.number().min(1).optional(), 8 | per_page: z.number().min(1).max(100).optional(), 9 | }); 10 | 11 | export const SearchUsersOptions = SearchOptions.extend({ 12 | sort: z.enum(["followers", "repositories", "joined"]).optional(), 13 | }); 14 | 15 | export const SearchIssuesOptions = SearchOptions.extend({ 16 | sort: z.enum([ 17 | "comments", 18 | "reactions", 19 | "reactions-+1", 20 | "reactions--1", 21 | "reactions-smile", 22 | "reactions-thinking_face", 23 | "reactions-heart", 24 | "reactions-tada", 25 | "interactions", 26 | "created", 27 | "updated", 28 | ]).optional(), 29 | }); 30 | 31 | export const SearchCodeSchema = SearchOptions; 32 | export const SearchUsersSchema = SearchUsersOptions; 33 | export const SearchIssuesSchema = SearchIssuesOptions; 34 | 35 | export async function searchCode(params: z.infer) { 36 | return githubRequest(buildUrl("https://api.github.com/search/code", params)); 37 | } 38 | 39 | export async function searchIssues(params: z.infer) { 40 | return githubRequest(buildUrl("https://api.github.com/search/issues", params)); 41 | } 42 | 43 | export async function searchUsers(params: z.infer) { 44 | return githubRequest(buildUrl("https://api.github.com/search/users", params)); 45 | } -------------------------------------------------------------------------------- /src/github/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@modelcontextprotocol/server-github", 3 | "version": "0.6.2", 4 | "description": "MCP server for using the GitHub API", 5 | "license": "MIT", 6 | "author": "Anthropic, PBC (https://anthropic.com)", 7 | "homepage": "https://modelcontextprotocol.io", 8 | "bugs": "https://github.com/modelcontextprotocol/servers/issues", 9 | "type": "module", 10 | "bin": { 11 | "mcp-server-github": "dist/index.js" 12 | }, 13 | "files": [ 14 | "dist" 15 | ], 16 | "scripts": { 17 | "build": "tsc && shx chmod +x dist/*.js", 18 | "prepare": "npm run build", 19 | "watch": "tsc --watch" 20 | }, 21 | "dependencies": { 22 | "@modelcontextprotocol/sdk": "1.0.1", 23 | "@types/node": "^22", 24 | "@types/node-fetch": "^2.6.12", 25 | "node-fetch": "^3.3.2", 26 | "universal-user-agent": "^7.0.2", 27 | "zod": "^3.22.4", 28 | "zod-to-json-schema": "^3.23.5" 29 | }, 30 | "devDependencies": { 31 | "shx": "^0.3.4", 32 | "typescript": "^5.6.2" 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /src/github/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "../../tsconfig.json", 3 | "compilerOptions": { 4 | "outDir": "./dist", 5 | "rootDir": "." 6 | }, 7 | "include": [ 8 | "./**/*.ts" 9 | ] 10 | } 11 | -------------------------------------------------------------------------------- /src/gitlab/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:22.12-alpine AS builder 2 | 3 | COPY src/gitlab /app 4 | COPY tsconfig.json /tsconfig.json 5 | 6 | WORKDIR /app 7 | 8 | RUN --mount=type=cache,target=/root/.npm npm install 9 | 10 | RUN --mount=type=cache,target=/root/.npm-production npm ci --ignore-scripts --omit-dev 11 | 12 | FROM node:22.12-alpine AS release 13 | 14 | WORKDIR /app 15 | 16 | COPY --from=builder /app/dist /app/dist 17 | COPY --from=builder /app/package.json /app/package.json 18 | COPY --from=builder /app/package-lock.json /app/package-lock.json 19 | 20 | ENV NODE_ENV=production 21 | 22 | RUN npm ci --ignore-scripts --omit-dev 23 | 24 | ENTRYPOINT ["node", "dist/index.js"] -------------------------------------------------------------------------------- /src/gitlab/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@modelcontextprotocol/server-gitlab", 3 | "version": "0.6.2", 4 | "description": "MCP server for using the GitLab API", 5 | "license": "MIT", 6 | "author": "GitLab, PBC (https://gitlab.com)", 7 | "homepage": "https://modelcontextprotocol.io", 8 | "bugs": "https://github.com/modelcontextprotocol/servers/issues", 9 | "type": "module", 10 | "bin": { 11 | "mcp-server-gitlab": "dist/index.js" 12 | }, 13 | "files": [ 14 | "dist" 15 | ], 16 | "scripts": { 17 | "build": "tsc && shx chmod +x dist/*.js", 18 | "prepare": "npm run build", 19 | "watch": "tsc --watch" 20 | }, 21 | "dependencies": { 22 | "@modelcontextprotocol/sdk": "1.0.1", 23 | "@types/node-fetch": "^2.6.12", 24 | "node-fetch": "^3.3.2", 25 | "zod-to-json-schema": "^3.23.5" 26 | }, 27 | "devDependencies": { 28 | "shx": "^0.3.4", 29 | "typescript": "^5.6.2" 30 | } 31 | } -------------------------------------------------------------------------------- /src/gitlab/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "../../tsconfig.json", 3 | "compilerOptions": { 4 | "outDir": "./dist", 5 | "rootDir": "." 6 | }, 7 | "include": [ 8 | "./**/*.ts" 9 | ] 10 | } 11 | -------------------------------------------------------------------------------- /src/google-maps/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:22.12-alpine AS builder 2 | 3 | # Must be entire project because `prepare` script is run during `npm install` and requires all files. 4 | COPY src/google-maps /app 5 | COPY tsconfig.json /tsconfig.json 6 | 7 | WORKDIR /app 8 | 9 | RUN --mount=type=cache,target=/root/.npm npm install 10 | 11 | RUN --mount=type=cache,target=/root/.npm-production npm ci --ignore-scripts --omit-dev 12 | 13 | FROM node:22-alpine AS release 14 | 15 | COPY --from=builder /app/dist /app/dist 16 | COPY --from=builder /app/package.json /app/package.json 17 | COPY --from=builder /app/package-lock.json /app/package-lock.json 18 | 19 | ENV NODE_ENV=production 20 | 21 | WORKDIR /app 22 | 23 | RUN npm ci --ignore-scripts --omit-dev 24 | 25 | ENTRYPOINT ["node", "dist/index.js"] -------------------------------------------------------------------------------- /src/google-maps/README.md: -------------------------------------------------------------------------------- 1 | # Google Maps MCP Server 2 | 3 | MCP Server for the Google Maps API. 4 | 5 | ## Tools 6 | 7 | 1. `maps_geocode` 8 | 9 | - Convert address to coordinates 10 | - Input: `address` (string) 11 | - Returns: location, formatted_address, place_id 12 | 13 | 2. `maps_reverse_geocode` 14 | 15 | - Convert coordinates to address 16 | - Inputs: 17 | - `latitude` (number) 18 | - `longitude` (number) 19 | - Returns: formatted_address, place_id, address_components 20 | 21 | 3. `maps_search_places` 22 | 23 | - Search for places using text query 24 | - Inputs: 25 | - `query` (string) 26 | - `location` (optional): { latitude: number, longitude: number } 27 | - `radius` (optional): number (meters, max 50000) 28 | - Returns: array of places with names, addresses, locations 29 | 30 | 4. `maps_place_details` 31 | 32 | - Get detailed information about a place 33 | - Input: `place_id` (string) 34 | - Returns: name, address, contact info, ratings, reviews, opening hours 35 | 36 | 5. `maps_distance_matrix` 37 | 38 | - Calculate distances and times between points 39 | - Inputs: 40 | - `origins` (string[]) 41 | - `destinations` (string[]) 42 | - `mode` (optional): "driving" | "walking" | "bicycling" | "transit" 43 | - Returns: distances and durations matrix 44 | 45 | 6. `maps_elevation` 46 | 47 | - Get elevation data for locations 48 | - Input: `locations` (array of {latitude, longitude}) 49 | - Returns: elevation data for each point 50 | 51 | 7. `maps_directions` 52 | - Get directions between points 53 | - Inputs: 54 | - `origin` (string) 55 | - `destination` (string) 56 | - `mode` (optional): "driving" | "walking" | "bicycling" | "transit" 57 | - Returns: route details with steps, distance, duration 58 | 59 | ## Setup 60 | 61 | ### API Key 62 | 63 | Get a Google Maps API key by following the instructions [here](https://developers.google.com/maps/documentation/javascript/get-api-key#create-api-keys). 64 | 65 | ### Usage with Claude Desktop 66 | 67 | Add the following to your `claude_desktop_config.json`: 68 | 69 | #### Docker 70 | 71 | ```json 72 | { 73 | "mcpServers": { 74 | "google-maps": { 75 | "command": "docker", 76 | "args": [ 77 | "run", 78 | "-i", 79 | "--rm", 80 | "-e", 81 | "GOOGLE_MAPS_API_KEY", 82 | "mcp/google-maps" 83 | ], 84 | "env": { 85 | "GOOGLE_MAPS_API_KEY": "" 86 | } 87 | } 88 | } 89 | } 90 | ``` 91 | 92 | ### NPX 93 | 94 | ```json 95 | { 96 | "mcpServers": { 97 | "google-maps": { 98 | "command": "npx", 99 | "args": ["-y", "@modelcontextprotocol/server-google-maps"], 100 | "env": { 101 | "GOOGLE_MAPS_API_KEY": "" 102 | } 103 | } 104 | } 105 | } 106 | ``` 107 | 108 | ### Usage with VS Code 109 | 110 | For quick installation, use one of the one-click install buttons below... 111 | 112 | [![Install with NPX in VS Code](https://img.shields.io/badge/VS_Code-NPM-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=google-maps&inputs=%5B%7B%22type%22%3A%22promptString%22%2C%22id%22%3A%22maps_api_key%22%2C%22description%22%3A%22Google%20Maps%20API%20Key%22%2C%22password%22%3Atrue%7D%5D&config=%7B%22command%22%3A%22npx%22%2C%22args%22%3A%5B%22-y%22%2C%22%40modelcontextprotocol%2Fserver-google-maps%22%5D%2C%22env%22%3A%7B%22GOOGLE_MAPS_API_KEY%22%3A%22%24%7Binput%3Amaps_api_key%7D%22%7D%7D) [![Install with NPX in VS Code Insiders](https://img.shields.io/badge/VS_Code_Insiders-NPM-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=google-maps&inputs=%5B%7B%22type%22%3A%22promptString%22%2C%22id%22%3A%22maps_api_key%22%2C%22description%22%3A%22Google%20Maps%20API%20Key%22%2C%22password%22%3Atrue%7D%5D&config=%7B%22command%22%3A%22npx%22%2C%22args%22%3A%5B%22-y%22%2C%22%40modelcontextprotocol%2Fserver-google-maps%22%5D%2C%22env%22%3A%7B%22GOOGLE_MAPS_API_KEY%22%3A%22%24%7Binput%3Amaps_api_key%7D%22%7D%7D&quality=insiders) 113 | 114 | [![Install with Docker in VS Code](https://img.shields.io/badge/VS_Code-Docker-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=google-maps&inputs=%5B%7B%22type%22%3A%22promptString%22%2C%22id%22%3A%22maps_api_key%22%2C%22description%22%3A%22Google%20Maps%20API%20Key%22%2C%22password%22%3Atrue%7D%5D&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22-i%22%2C%22--rm%22%2C%22mcp%2Fgoogle-maps%22%5D%2C%22env%22%3A%7B%22GOOGLE_MAPS_API_KEY%22%3A%22%24%7Binput%3Amaps_api_key%7D%22%7D%7D) [![Install with Docker in VS Code Insiders](https://img.shields.io/badge/VS_Code_Insiders-Docker-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=google-maps&inputs=%5B%7B%22type%22%3A%22promptString%22%2C%22id%22%3A%22maps_api_key%22%2C%22description%22%3A%22Google%20Maps%20API%20Key%22%2C%22password%22%3Atrue%7D%5D&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22-i%22%2C%22--rm%22%2C%22mcp%2Fgoogle-maps%22%5D%2C%22env%22%3A%7B%22GOOGLE_MAPS_API_KEY%22%3A%22%24%7Binput%3Amaps_api_key%7D%22%7D%7D&quality=insiders) 115 | 116 | For manual installation, add the following JSON block to your User Settings (JSON) file in VS Code. You can do this by pressing `Ctrl + Shift + P` and typing `Preferences: Open User Settings (JSON)`. 117 | 118 | Optionally, you can add it to a file called `.vscode/mcp.json` in your workspace. This will allow you to share the configuration with others. 119 | 120 | > Note that the `mcp` key is not needed in the `.vscode/mcp.json` file. 121 | 122 | ```json 123 | { 124 | "mcp": { 125 | "inputs": [ 126 | { 127 | "type": "promptString", 128 | "id": "maps_api_key", 129 | "description": "Google Maps API Key", 130 | "password": true 131 | } 132 | ], 133 | "servers": { 134 | "google-maps": { 135 | "command": "npx", 136 | "args": ["-y", "@modelcontextprotocol/server-google-maps"], 137 | "env": { 138 | "GOOGLE_MAPS_API_KEY": "${input:maps_api_key}" 139 | } 140 | } 141 | } 142 | } 143 | } 144 | ``` 145 | 146 | For Docker installation: 147 | 148 | ```json 149 | { 150 | "mcp": { 151 | "inputs": [ 152 | { 153 | "type": "promptString", 154 | "id": "maps_api_key", 155 | "description": "Google Maps API Key", 156 | "password": true 157 | } 158 | ], 159 | "servers": { 160 | "google-maps": { 161 | "command": "docker", 162 | "args": ["run", "-i", "--rm", "mcp/google-maps"], 163 | "env": { 164 | "GOOGLE_MAPS_API_KEY": "${input:maps_api_key}" 165 | } 166 | } 167 | } 168 | } 169 | } 170 | ``` 171 | 172 | ## Build 173 | 174 | Docker build: 175 | 176 | ```bash 177 | docker build -t mcp/google-maps -f src/google-maps/Dockerfile . 178 | ``` 179 | 180 | ## License 181 | 182 | This MCP server is licensed under the MIT License. This means you are free to use, modify, and distribute the software, subject to the terms and conditions of the MIT License. For more details, please see the LICENSE file in the project repository. 183 | -------------------------------------------------------------------------------- /src/google-maps/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@modelcontextprotocol/server-google-maps", 3 | "version": "0.6.2", 4 | "description": "MCP server for using the Google Maps API", 5 | "license": "MIT", 6 | "author": "Anthropic, PBC (https://anthropic.com)", 7 | "homepage": "https://modelcontextprotocol.io", 8 | "bugs": "https://github.com/modelcontextprotocol/servers/issues", 9 | "type": "module", 10 | "bin": { 11 | "mcp-server-google-maps": "dist/index.js" 12 | }, 13 | "files": [ 14 | "dist" 15 | ], 16 | "scripts": { 17 | "build": "tsc && shx chmod +x dist/*.js", 18 | "prepare": "npm run build", 19 | "watch": "tsc --watch" 20 | }, 21 | "dependencies": { 22 | "@modelcontextprotocol/sdk": "1.0.1", 23 | "@types/node-fetch": "^2.6.12", 24 | "node-fetch": "^3.3.2" 25 | }, 26 | "devDependencies": { 27 | "shx": "^0.3.4", 28 | "typescript": "^5.6.2" 29 | } 30 | } -------------------------------------------------------------------------------- /src/google-maps/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "../../tsconfig.json", 3 | "compilerOptions": { 4 | "outDir": "./dist", 5 | "rootDir": "." 6 | }, 7 | "include": [ 8 | "./**/*.ts" 9 | ] 10 | } 11 | -------------------------------------------------------------------------------- /src/memory/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:22.12-alpine AS builder 2 | 3 | COPY src/memory /app 4 | COPY tsconfig.json /tsconfig.json 5 | 6 | WORKDIR /app 7 | 8 | RUN --mount=type=cache,target=/root/.npm npm install 9 | 10 | RUN --mount=type=cache,target=/root/.npm-production npm ci --ignore-scripts --omit-dev 11 | 12 | FROM node:22-alpine AS release 13 | 14 | COPY --from=builder /app/dist /app/dist 15 | COPY --from=builder /app/package.json /app/package.json 16 | COPY --from=builder /app/package-lock.json /app/package-lock.json 17 | 18 | ENV NODE_ENV=production 19 | 20 | WORKDIR /app 21 | 22 | RUN npm ci --ignore-scripts --omit-dev 23 | 24 | ENTRYPOINT ["node", "dist/index.js"] -------------------------------------------------------------------------------- /src/memory/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@modelcontextprotocol/server-memory", 3 | "version": "0.6.3", 4 | "description": "MCP server for enabling memory for Claude through a knowledge graph", 5 | "license": "MIT", 6 | "author": "Anthropic, PBC (https://anthropic.com)", 7 | "homepage": "https://modelcontextprotocol.io", 8 | "bugs": "https://github.com/modelcontextprotocol/servers/issues", 9 | "type": "module", 10 | "bin": { 11 | "mcp-server-memory": "dist/index.js" 12 | }, 13 | "files": [ 14 | "dist" 15 | ], 16 | "scripts": { 17 | "build": "tsc && shx chmod +x dist/*.js", 18 | "prepare": "npm run build", 19 | "watch": "tsc --watch" 20 | }, 21 | "dependencies": { 22 | "@modelcontextprotocol/sdk": "1.0.1" 23 | }, 24 | "devDependencies": { 25 | "@types/node": "^22", 26 | "shx": "^0.3.4", 27 | "typescript": "^5.6.2" 28 | } 29 | } -------------------------------------------------------------------------------- /src/memory/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "../../tsconfig.json", 3 | "compilerOptions": { 4 | "outDir": "./dist", 5 | "rootDir": "." 6 | }, 7 | "include": [ 8 | "./**/*.ts" 9 | ] 10 | } 11 | -------------------------------------------------------------------------------- /src/postgres/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:22.12-alpine AS builder 2 | 3 | COPY src/postgres /app 4 | COPY tsconfig.json /tsconfig.json 5 | 6 | WORKDIR /app 7 | 8 | RUN --mount=type=cache,target=/root/.npm npm install 9 | 10 | RUN --mount=type=cache,target=/root/.npm-production npm ci --ignore-scripts --omit-dev 11 | 12 | FROM node:22-alpine AS release 13 | 14 | COPY --from=builder /app/dist /app/dist 15 | COPY --from=builder /app/package.json /app/package.json 16 | COPY --from=builder /app/package-lock.json /app/package-lock.json 17 | 18 | ENV NODE_ENV=production 19 | 20 | WORKDIR /app 21 | 22 | RUN npm ci --ignore-scripts --omit-dev 23 | 24 | ENTRYPOINT ["node", "dist/index.js"] -------------------------------------------------------------------------------- /src/postgres/README.md: -------------------------------------------------------------------------------- 1 | # PostgreSQL 2 | 3 | A Model Context Protocol server that provides read-only access to PostgreSQL databases. This server enables LLMs to inspect database schemas and execute read-only queries. 4 | 5 | ## Components 6 | 7 | ### Tools 8 | 9 | - **query** 10 | - Execute read-only SQL queries against the connected database 11 | - Input: `sql` (string): The SQL query to execute 12 | - All queries are executed within a READ ONLY transaction 13 | 14 | ### Resources 15 | 16 | The server provides schema information for each table in the database: 17 | 18 | - **Table Schemas** (`postgres:////schema`) 19 | - JSON schema information for each table 20 | - Includes column names and data types 21 | - Automatically discovered from database metadata 22 | 23 | ## Configuration 24 | 25 | ### Usage with Claude Desktop 26 | 27 | To use this server with the Claude Desktop app, add the following configuration to the "mcpServers" section of your `claude_desktop_config.json`: 28 | 29 | ### Docker 30 | 31 | * when running docker on macos, use host.docker.internal if the server is running on the host network (eg localhost) 32 | * username/password can be added to the postgresql url with `postgresql://user:password@host:port/db-name` 33 | 34 | ```json 35 | { 36 | "mcpServers": { 37 | "postgres": { 38 | "command": "docker", 39 | "args": [ 40 | "run", 41 | "-i", 42 | "--rm", 43 | "mcp/postgres", 44 | "postgresql://host.docker.internal:5432/mydb"] 45 | } 46 | } 47 | } 48 | ``` 49 | 50 | ### NPX 51 | 52 | ```json 53 | { 54 | "mcpServers": { 55 | "postgres": { 56 | "command": "npx", 57 | "args": [ 58 | "-y", 59 | "@modelcontextprotocol/server-postgres", 60 | "postgresql://localhost/mydb" 61 | ] 62 | } 63 | } 64 | } 65 | ``` 66 | 67 | Replace `/mydb` with your database name. 68 | 69 | ### Usage with VS Code 70 | 71 | For quick installation, use one of the one-click install buttons below... 72 | 73 | [![Install with NPX in VS Code](https://img.shields.io/badge/VS_Code-NPM-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=postgres&inputs=%5B%7B%22type%22%3A%22promptString%22%2C%22id%22%3A%22pg_url%22%2C%22description%22%3A%22PostgreSQL%20URL%20(e.g.%20postgresql%3A%2F%2Fuser%3Apass%40localhost%3A5432%2Fmydb)%22%7D%5D&config=%7B%22command%22%3A%22npx%22%2C%22args%22%3A%5B%22-y%22%2C%22%40modelcontextprotocol%2Fserver-postgres%22%2C%22%24%7Binput%3Apg_url%7D%22%5D%7D) [![Install with NPX in VS Code Insiders](https://img.shields.io/badge/VS_Code_Insiders-NPM-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=postgres&inputs=%5B%7B%22type%22%3A%22promptString%22%2C%22id%22%3A%22pg_url%22%2C%22description%22%3A%22PostgreSQL%20URL%20(e.g.%20postgresql%3A%2F%2Fuser%3Apass%40localhost%3A5432%2Fmydb)%22%7D%5D&config=%7B%22command%22%3A%22npx%22%2C%22args%22%3A%5B%22-y%22%2C%22%40modelcontextprotocol%2Fserver-postgres%22%2C%22%24%7Binput%3Apg_url%7D%22%5D%7D&quality=insiders) 74 | 75 | [![Install with Docker in VS Code](https://img.shields.io/badge/VS_Code-Docker-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=postgres&inputs=%5B%7B%22type%22%3A%22promptString%22%2C%22id%22%3A%22pg_url%22%2C%22description%22%3A%22PostgreSQL%20URL%20(e.g.%20postgresql%3A%2F%2Fuser%3Apass%40host.docker.internal%3A5432%2Fmydb)%22%7D%5D&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22-i%22%2C%22--rm%22%2C%22mcp%2Fpostgres%22%2C%22%24%7Binput%3Apg_url%7D%22%5D%7D) [![Install with Docker in VS Code Insiders](https://img.shields.io/badge/VS_Code_Insiders-Docker-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=postgres&inputs=%5B%7B%22type%22%3A%22promptString%22%2C%22id%22%3A%22pg_url%22%2C%22description%22%3A%22PostgreSQL%20URL%20(e.g.%20postgresql%3A%2F%2Fuser%3Apass%40host.docker.internal%3A5432%2Fmydb)%22%7D%5D&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22-i%22%2C%22--rm%22%2C%22mcp%2Fpostgres%22%2C%22%24%7Binput%3Apg_url%7D%22%5D%7D&quality=insiders) 76 | 77 | For manual installation, add the following JSON block to your User Settings (JSON) file in VS Code. You can do this by pressing `Ctrl + Shift + P` and typing `Preferences: Open User Settings (JSON)`. 78 | 79 | Optionally, you can add it to a file called `.vscode/mcp.json` in your workspace. This will allow you to share the configuration with others. 80 | 81 | > Note that the `mcp` key is not needed in the `.vscode/mcp.json` file. 82 | 83 | ### Docker 84 | 85 | **Note**: When using Docker and connecting to a PostgreSQL server on your host machine, use `host.docker.internal` instead of `localhost` in the connection URL. 86 | 87 | ```json 88 | { 89 | "mcp": { 90 | "inputs": [ 91 | { 92 | "type": "promptString", 93 | "id": "pg_url", 94 | "description": "PostgreSQL URL (e.g. postgresql://user:pass@host.docker.internal:5432/mydb)" 95 | } 96 | ], 97 | "servers": { 98 | "postgres": { 99 | "command": "docker", 100 | "args": [ 101 | "run", 102 | "-i", 103 | "--rm", 104 | "mcp/postgres", 105 | "${input:pg_url}" 106 | ] 107 | } 108 | } 109 | } 110 | } 111 | ``` 112 | 113 | ### NPX 114 | 115 | ```json 116 | { 117 | "mcp": { 118 | "inputs": [ 119 | { 120 | "type": "promptString", 121 | "id": "pg_url", 122 | "description": "PostgreSQL URL (e.g. postgresql://user:pass@localhost:5432/mydb)" 123 | } 124 | ], 125 | "servers": { 126 | "postgres": { 127 | "command": "npx", 128 | "args": [ 129 | "-y", 130 | "@modelcontextprotocol/server-postgres", 131 | "${input:pg_url}" 132 | ] 133 | } 134 | } 135 | } 136 | } 137 | ``` 138 | 139 | ## Building 140 | 141 | Docker: 142 | 143 | ```sh 144 | docker build -t mcp/postgres -f src/postgres/Dockerfile . 145 | ``` 146 | 147 | ## License 148 | 149 | This MCP server is licensed under the MIT License. This means you are free to use, modify, and distribute the software, subject to the terms and conditions of the MIT License. For more details, please see the LICENSE file in the project repository. 150 | -------------------------------------------------------------------------------- /src/postgres/index.ts: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | import { Server } from "@modelcontextprotocol/sdk/server/index.js"; 4 | import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js"; 5 | import { 6 | CallToolRequestSchema, 7 | ListResourcesRequestSchema, 8 | ListToolsRequestSchema, 9 | ReadResourceRequestSchema, 10 | } from "@modelcontextprotocol/sdk/types.js"; 11 | import pg from "pg"; 12 | 13 | const server = new Server( 14 | { 15 | name: "example-servers/postgres", 16 | version: "0.1.0", 17 | }, 18 | { 19 | capabilities: { 20 | resources: {}, 21 | tools: {}, 22 | }, 23 | }, 24 | ); 25 | 26 | const args = process.argv.slice(2); 27 | if (args.length === 0) { 28 | console.error("Please provide a database URL as a command-line argument"); 29 | process.exit(1); 30 | } 31 | 32 | const databaseUrl = args[0]; 33 | 34 | const resourceBaseUrl = new URL(databaseUrl); 35 | resourceBaseUrl.protocol = "postgres:"; 36 | resourceBaseUrl.password = ""; 37 | 38 | const pool = new pg.Pool({ 39 | connectionString: databaseUrl, 40 | }); 41 | 42 | const SCHEMA_PATH = "schema"; 43 | 44 | server.setRequestHandler(ListResourcesRequestSchema, async () => { 45 | const client = await pool.connect(); 46 | try { 47 | const result = await client.query( 48 | "SELECT table_name FROM information_schema.tables WHERE table_schema = 'public'", 49 | ); 50 | return { 51 | resources: result.rows.map((row) => ({ 52 | uri: new URL(`${row.table_name}/${SCHEMA_PATH}`, resourceBaseUrl).href, 53 | mimeType: "application/json", 54 | name: `"${row.table_name}" database schema`, 55 | })), 56 | }; 57 | } finally { 58 | client.release(); 59 | } 60 | }); 61 | 62 | server.setRequestHandler(ReadResourceRequestSchema, async (request) => { 63 | const resourceUrl = new URL(request.params.uri); 64 | 65 | const pathComponents = resourceUrl.pathname.split("/"); 66 | const schema = pathComponents.pop(); 67 | const tableName = pathComponents.pop(); 68 | 69 | if (schema !== SCHEMA_PATH) { 70 | throw new Error("Invalid resource URI"); 71 | } 72 | 73 | const client = await pool.connect(); 74 | try { 75 | const result = await client.query( 76 | "SELECT column_name, data_type FROM information_schema.columns WHERE table_name = $1", 77 | [tableName], 78 | ); 79 | 80 | return { 81 | contents: [ 82 | { 83 | uri: request.params.uri, 84 | mimeType: "application/json", 85 | text: JSON.stringify(result.rows, null, 2), 86 | }, 87 | ], 88 | }; 89 | } finally { 90 | client.release(); 91 | } 92 | }); 93 | 94 | server.setRequestHandler(ListToolsRequestSchema, async () => { 95 | return { 96 | tools: [ 97 | { 98 | name: "query", 99 | description: "Run a read-only SQL query", 100 | inputSchema: { 101 | type: "object", 102 | properties: { 103 | sql: { type: "string" }, 104 | }, 105 | }, 106 | }, 107 | ], 108 | }; 109 | }); 110 | 111 | server.setRequestHandler(CallToolRequestSchema, async (request) => { 112 | if (request.params.name === "query") { 113 | const sql = request.params.arguments?.sql as string; 114 | 115 | const client = await pool.connect(); 116 | try { 117 | await client.query("BEGIN TRANSACTION READ ONLY"); 118 | const result = await client.query(sql); 119 | return { 120 | content: [{ type: "text", text: JSON.stringify(result.rows, null, 2) }], 121 | isError: false, 122 | }; 123 | } catch (error) { 124 | throw error; 125 | } finally { 126 | client 127 | .query("ROLLBACK") 128 | .catch((error) => 129 | console.warn("Could not roll back transaction:", error), 130 | ); 131 | 132 | client.release(); 133 | } 134 | } 135 | throw new Error(`Unknown tool: ${request.params.name}`); 136 | }); 137 | 138 | async function runServer() { 139 | const transport = new StdioServerTransport(); 140 | await server.connect(transport); 141 | } 142 | 143 | runServer().catch(console.error); 144 | -------------------------------------------------------------------------------- /src/postgres/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@modelcontextprotocol/server-postgres", 3 | "version": "0.6.2", 4 | "description": "MCP server for interacting with PostgreSQL databases", 5 | "license": "MIT", 6 | "author": "Anthropic, PBC (https://anthropic.com)", 7 | "homepage": "https://modelcontextprotocol.io", 8 | "bugs": "https://github.com/modelcontextprotocol/servers/issues", 9 | "type": "module", 10 | "bin": { 11 | "mcp-server-postgres": "dist/index.js" 12 | }, 13 | "files": [ 14 | "dist" 15 | ], 16 | "scripts": { 17 | "build": "tsc && shx chmod +x dist/*.js", 18 | "prepare": "npm run build", 19 | "watch": "tsc --watch" 20 | }, 21 | "dependencies": { 22 | "@modelcontextprotocol/sdk": "1.0.1", 23 | "pg": "^8.13.0" 24 | }, 25 | "devDependencies": { 26 | "@types/pg": "^8.11.10", 27 | "shx": "^0.3.4", 28 | "typescript": "^5.6.2" 29 | } 30 | } -------------------------------------------------------------------------------- /src/postgres/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "../../tsconfig.json", 3 | "compilerOptions": { 4 | "outDir": "./dist", 5 | "rootDir": "." 6 | }, 7 | "include": [ 8 | "./**/*.ts" 9 | ] 10 | } 11 | -------------------------------------------------------------------------------- /src/puppeteer/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:22-bookworm-slim 2 | 3 | ENV DEBIAN_FRONTEND noninteractive 4 | 5 | # for arm64 support we need to install chromium provided by debian 6 | # npm ERR! The chromium binary is not available for arm64. 7 | # https://github.com/puppeteer/puppeteer/issues/7740 8 | 9 | ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD true 10 | ENV PUPPETEER_EXECUTABLE_PATH=/usr/bin/chromium 11 | 12 | RUN apt-get update && \ 13 | apt-get install -y wget gnupg && \ 14 | apt-get install -y fonts-ipafont-gothic fonts-wqy-zenhei fonts-thai-tlwg fonts-kacst fonts-freefont-ttf libxss1 \ 15 | libgtk2.0-0 libnss3 libatk-bridge2.0-0 libdrm2 libxkbcommon0 libgbm1 libasound2 && \ 16 | apt-get install -y chromium && \ 17 | apt-get clean 18 | 19 | COPY src/puppeteer /project 20 | COPY tsconfig.json /tsconfig.json 21 | 22 | WORKDIR /project 23 | 24 | RUN npm install 25 | 26 | ENTRYPOINT ["node", "dist/index.js"] -------------------------------------------------------------------------------- /src/puppeteer/README.md: -------------------------------------------------------------------------------- 1 | # Puppeteer 2 | 3 | A Model Context Protocol server that provides browser automation capabilities using Puppeteer. This server enables LLMs to interact with web pages, take screenshots, and execute JavaScript in a real browser environment. 4 | 5 | ## Components 6 | 7 | ### Tools 8 | 9 | - **puppeteer_navigate** 10 | 11 | - Navigate to any URL in the browser 12 | - Inputs: 13 | - `url` (string, required): URL to navigate to 14 | - `launchOptions` (object, optional): PuppeteerJS LaunchOptions. Default null. If changed and not null, browser restarts. Example: `{ headless: true, args: ['--user-data-dir="C:/Data"'] }` 15 | - `allowDangerous` (boolean, optional): Allow dangerous LaunchOptions that reduce security. When false, dangerous args like `--no-sandbox`, `--disable-web-security` will throw errors. Default false. 16 | 17 | - **puppeteer_screenshot** 18 | 19 | - Capture screenshots of the entire page or specific elements 20 | - Inputs: 21 | - `name` (string, required): Name for the screenshot 22 | - `selector` (string, optional): CSS selector for element to screenshot 23 | - `width` (number, optional, default: 800): Screenshot width 24 | - `height` (number, optional, default: 600): Screenshot height 25 | 26 | - **puppeteer_click** 27 | 28 | - Click elements on the page 29 | - Input: `selector` (string): CSS selector for element to click 30 | 31 | - **puppeteer_hover** 32 | 33 | - Hover elements on the page 34 | - Input: `selector` (string): CSS selector for element to hover 35 | 36 | - **puppeteer_fill** 37 | 38 | - Fill out input fields 39 | - Inputs: 40 | - `selector` (string): CSS selector for input field 41 | - `value` (string): Value to fill 42 | 43 | - **puppeteer_select** 44 | 45 | - Select an element with SELECT tag 46 | - Inputs: 47 | - `selector` (string): CSS selector for element to select 48 | - `value` (string): Value to select 49 | 50 | - **puppeteer_evaluate** 51 | - Execute JavaScript in the browser console 52 | - Input: `script` (string): JavaScript code to execute 53 | 54 | ### Resources 55 | 56 | The server provides access to two types of resources: 57 | 58 | 1. **Console Logs** (`console://logs`) 59 | 60 | - Browser console output in text format 61 | - Includes all console messages from the browser 62 | 63 | 2. **Screenshots** (`screenshot://`) 64 | - PNG images of captured screenshots 65 | - Accessible via the screenshot name specified during capture 66 | 67 | ## Key Features 68 | 69 | - Browser automation 70 | - Console log monitoring 71 | - Screenshot capabilities 72 | - JavaScript execution 73 | - Basic web interaction (navigation, clicking, form filling) 74 | - Customizable Puppeteer launch options 75 | 76 | ## Configuration to use Puppeteer Server 77 | 78 | ### Usage with Claude Desktop 79 | 80 | Here's the Claude Desktop configuration to use the Puppeter server: 81 | 82 | ### Docker 83 | 84 | **NOTE** The docker implementation will use headless chromium, where as the NPX version will open a browser window. 85 | 86 | ```json 87 | { 88 | "mcpServers": { 89 | "puppeteer": { 90 | "command": "docker", 91 | "args": [ 92 | "run", 93 | "-i", 94 | "--rm", 95 | "--init", 96 | "-e", 97 | "DOCKER_CONTAINER=true", 98 | "mcp/puppeteer" 99 | ] 100 | } 101 | } 102 | } 103 | ``` 104 | 105 | ### NPX 106 | 107 | ```json 108 | { 109 | "mcpServers": { 110 | "puppeteer": { 111 | "command": "npx", 112 | "args": ["-y", "@modelcontextprotocol/server-puppeteer"] 113 | } 114 | } 115 | } 116 | ``` 117 | 118 | ### Usage with VS Code 119 | 120 | For quick installation, use one of the one-click install buttons below... 121 | 122 | [![Install with NPX in VS Code](https://img.shields.io/badge/VS_Code-NPM-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=puppeteer&config=%7B%22command%22%3A%22npx%22%2C%22args%22%3A%5B%22-y%22%2C%22%40modelcontextprotocol%2Fserver-puppeteer%22%5D%7D) [![Install with NPX in VS Code Insiders](https://img.shields.io/badge/VS_Code_Insiders-NPM-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=puppeteer&config=%7B%22command%22%3A%22npx%22%2C%22args%22%3A%5B%22-y%22%2C%22%40modelcontextprotocol%2Fserver-puppeteer%22%5D%7D&quality=insiders) 123 | 124 | [![Install with Docker in VS Code](https://img.shields.io/badge/VS_Code-Docker-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=puppeteer&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22-i%22%2C%22--rm%22%2C%22--init%22%2C%22-e%22%2C%22DOCKER_CONTAINER%3Dtrue%22%2C%22mcp%2Fpuppeteer%22%5D%7D) [![Install with Docker in VS Code Insiders](https://img.shields.io/badge/VS_Code_Insiders-Docker-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=puppeteer&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22-i%22%2C%22--rm%22%2C%22--init%22%2C%22-e%22%2C%22DOCKER_CONTAINER%3Dtrue%22%2C%22mcp%2Fpuppeteer%22%5D%7D&quality=insiders) 125 | 126 | For manual installation, add the following JSON block to your User Settings (JSON) file in VS Code. You can do this by pressing `Ctrl + Shift + P` and typing `Preferences: Open User Settings (JSON)`. 127 | 128 | Optionally, you can add it to a file called `.vscode/mcp.json` in your workspace. This will allow you to share the configuration with others. 129 | 130 | > Note that the `mcp` key is not needed in the `.vscode/mcp.json` file. 131 | 132 | For NPX installation (opens a browser window): 133 | 134 | ```json 135 | { 136 | "mcp": { 137 | "servers": { 138 | "puppeteer": { 139 | "command": "npx", 140 | "args": ["-y", "@modelcontextprotocol/server-puppeteer"] 141 | } 142 | } 143 | } 144 | } 145 | ``` 146 | 147 | For Docker installation (uses headless chromium): 148 | 149 | ```json 150 | { 151 | "mcp": { 152 | "servers": { 153 | "puppeteer": { 154 | "command": "docker", 155 | "args": [ 156 | "run", 157 | "-i", 158 | "--rm", 159 | "--init", 160 | "-e", 161 | "DOCKER_CONTAINER=true", 162 | "mcp/puppeteer" 163 | ] 164 | } 165 | } 166 | } 167 | } 168 | ``` 169 | 170 | ### Launch Options 171 | 172 | You can customize Puppeteer's browser behavior in two ways: 173 | 174 | 1. **Environment Variable**: Set `PUPPETEER_LAUNCH_OPTIONS` with a JSON-encoded string in the MCP configuration's `env` parameter: 175 | 176 | ```json 177 | { 178 | "mcpServers": { 179 | "mcp-puppeteer": { 180 | "command": "npx", 181 | "args": ["-y", "@modelcontextprotocol/server-puppeteer"], 182 | "env": { 183 | "PUPPETEER_LAUNCH_OPTIONS": "{ \"headless\": false, \"executablePath\": \"C:/Program Files/Google/Chrome/Application/chrome.exe\", \"args\": [] }", 184 | "ALLOW_DANGEROUS": "true" 185 | } 186 | } 187 | } 188 | } 189 | ``` 190 | 191 | 2. **Tool Call Arguments**: Pass `launchOptions` and `allowDangerous` parameters to the `puppeteer_navigate` tool: 192 | 193 | ```json 194 | { 195 | "url": "https://example.com", 196 | "launchOptions": { 197 | "headless": false, 198 | "defaultViewport": { "width": 1280, "height": 720 } 199 | } 200 | } 201 | ``` 202 | 203 | ## Build 204 | 205 | Docker build: 206 | 207 | ```bash 208 | docker build -t mcp/puppeteer -f src/puppeteer/Dockerfile . 209 | ``` 210 | 211 | ## License 212 | 213 | This MCP server is licensed under the MIT License. This means you are free to use, modify, and distribute the software, subject to the terms and conditions of the MIT License. For more details, please see the LICENSE file in the project repository. 214 | -------------------------------------------------------------------------------- /src/puppeteer/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@modelcontextprotocol/server-puppeteer", 3 | "version": "0.6.2", 4 | "description": "MCP server for browser automation using Puppeteer", 5 | "license": "MIT", 6 | "author": "Anthropic, PBC (https://anthropic.com)", 7 | "homepage": "https://modelcontextprotocol.io", 8 | "bugs": "https://github.com/modelcontextprotocol/servers/issues", 9 | "type": "module", 10 | "bin": { 11 | "mcp-server-puppeteer": "dist/index.js" 12 | }, 13 | "files": [ 14 | "dist" 15 | ], 16 | "scripts": { 17 | "build": "tsc && shx chmod +x dist/*.js", 18 | "prepare": "npm run build", 19 | "watch": "tsc --watch" 20 | }, 21 | "dependencies": { 22 | "@modelcontextprotocol/sdk": "1.0.1", 23 | "puppeteer": "^23.4.0" 24 | }, 25 | "devDependencies": { 26 | "shx": "^0.3.4", 27 | "typescript": "^5.6.2" 28 | } 29 | } -------------------------------------------------------------------------------- /src/puppeteer/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "../../tsconfig.json", 3 | "compilerOptions": { 4 | "outDir": "./dist", 5 | "rootDir": "." 6 | }, 7 | "include": [ 8 | "./**/*.ts" 9 | ] 10 | } 11 | -------------------------------------------------------------------------------- /src/redis/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:22.12-alpine as builder 2 | 3 | COPY src/redis /app 4 | 5 | WORKDIR /app 6 | 7 | RUN --mount=type=cache,target=/root/.npm npm install 8 | 9 | RUN npm run build 10 | 11 | FROM node:22-alpine AS release 12 | 13 | COPY --from=builder /app/build /app/build 14 | COPY --from=builder /app/package.json /app/package.json 15 | COPY --from=builder /app/package-lock.json /app/package-lock.json 16 | 17 | ENV NODE_ENV=production 18 | 19 | WORKDIR /app 20 | 21 | RUN npm ci --ignore-scripts --omit-dev 22 | 23 | ENTRYPOINT ["node", "build/index.js"] -------------------------------------------------------------------------------- /src/redis/README.md: -------------------------------------------------------------------------------- 1 | # Redis 2 | 3 | A Model Context Protocol server that provides access to Redis databases. This server enables LLMs to interact with Redis key-value stores through a set of standardized tools. 4 | 5 | ## Prerequisites 6 | 7 | 1. Redis server must be installed and running 8 | - [Download Redis](https://redis.io/download) 9 | - For Windows users: Use [Windows Subsystem for Linux (WSL)](https://redis.io/docs/getting-started/installation/install-redis-on-windows/) or [Memurai](https://www.memurai.com/) (Redis-compatible Windows server) 10 | - Default port: 6379 11 | 12 | ## Common Issues & Solutions 13 | 14 | ### Connection Errors 15 | 16 | **ECONNREFUSED** 17 | - **Cause**: Redis/Memurai server is not running or unreachable 18 | - **Solution**: 19 | - Verify server is running: 20 | - Redis: `redis-cli ping` should return "PONG" 21 | - Memurai (Windows): `memurai-cli ping` should return "PONG" 22 | - Check service status: 23 | - Linux: `systemctl status redis` 24 | - macOS: `brew services list` 25 | - Windows: Check Memurai in Services (services.msc) 26 | - Ensure correct port (default 6379) is not blocked by firewall 27 | - Verify Redis URL format: `redis://hostname:port` 28 | - If `redis://localhost:6379` fails with ECONNREFUSED, try using the explicit IP: `redis://127.0.0.1:6379` 29 | 30 | ### Server Behavior 31 | 32 | - The server implements exponential backoff with a maximum of 5 retries 33 | - Initial retry delay: 1 second, maximum delay: 30 seconds 34 | - Server will exit after max retries to prevent infinite reconnection loops 35 | 36 | ## Components 37 | 38 | ### Tools 39 | 40 | - **set** 41 | - Set a Redis key-value pair with optional expiration 42 | - Input: 43 | - `key` (string): Redis key 44 | - `value` (string): Value to store 45 | - `expireSeconds` (number, optional): Expiration time in seconds 46 | 47 | - **get** 48 | - Get value by key from Redis 49 | - Input: `key` (string): Redis key to retrieve 50 | 51 | - **delete** 52 | - Delete one or more keys from Redis 53 | - Input: `key` (string | string[]): Key or array of keys to delete 54 | 55 | - **list** 56 | - List Redis keys matching a pattern 57 | - Input: `pattern` (string, optional): Pattern to match keys (default: *) 58 | 59 | ## Usage with Claude Desktop 60 | 61 | To use this server with the Claude Desktop app, add the following configuration to the "mcpServers" section of your `claude_desktop_config.json`: 62 | 63 | ### Docker 64 | 65 | * when running docker on macos, use host.docker.internal if the server is running on the host network (eg localhost) 66 | * Redis URL can be specified as an argument, defaults to "redis://localhost:6379" (use "redis://127.0.0.1:6379" if localhost fails) 67 | 68 | ```json 69 | { 70 | "mcpServers": { 71 | "redis": { 72 | "command": "docker", 73 | "args": [ 74 | "run", 75 | "-i", 76 | "--rm", 77 | "mcp/redis", 78 | "redis://host.docker.internal:6379"] 79 | } 80 | } 81 | } 82 | ``` 83 | 84 | ### NPX 85 | 86 | ```json 87 | { 88 | "mcpServers": { 89 | "redis": { 90 | "command": "npx", 91 | "args": [ 92 | "-y", 93 | "@modelcontextprotocol/server-redis", 94 | "redis://localhost:6379" 95 | ] 96 | } 97 | } 98 | } 99 | ``` 100 | 101 | ## Usage with VS Code 102 | 103 | For quick installation, use one of the one-click install buttons below... 104 | 105 | [![Install with NPX in VS Code](https://img.shields.io/badge/VS_Code-NPM-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=redis&inputs=%5B%7B%22type%22%3A%22promptString%22%2C%22id%22%3A%22redis_url%22%2C%22description%22%3A%22Redis%20URL%20(e.g.%20redis%3A%2F%2Flocalhost%3A6379)%22%7D%5D&config=%7B%22command%22%3A%22npx%22%2C%22args%22%3A%5B%22-y%22%2C%22%40modelcontextprotocol%2Fserver-redis%22%5D%2C%22env%22%3A%7B%22REDIS_URL%22%3A%22%24%7Binput%3Aredis_url%7D%22%7D%7D) [![Install with NPX in VS Code Insiders](https://img.shields.io/badge/VS_Code_Insiders-NPM-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=redis&inputs=%5B%7B%22type%22%3A%22promptString%22%2C%22id%22%3A%22redis_url%22%2C%22description%22%3A%22Redis%20URL%20(e.g.%20redis%3A%2F%2Flocalhost%3A6379)%22%7D%5D&config=%7B%22command%22%3A%22npx%22%2C%22args%22%3A%5B%22-y%22%2C%22%40modelcontextprotocol%2Fserver-redis%22%5D%2C%22env%22%3A%7B%22REDIS_URL%22%3A%22%24%7Binput%3Aredis_url%7D%22%7D%7D&quality=insiders) 106 | 107 | [![Install with Docker in VS Code](https://img.shields.io/badge/VS_Code-Docker-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=redis&inputs=%5B%7B%22type%22%3A%22promptString%22%2C%22id%22%3A%22redis_url%22%2C%22description%22%3A%22Redis%20URL%20(e.g.%20redis%3A%2F%2Fhost.docker.internal%3A6379)%22%7D%5D&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22-i%22%2C%22--rm%22%2C%22mcp%2Fredis%22%5D%2C%22env%22%3A%7B%22REDIS_URL%22%3A%22%24%7Binput%3Aredis_url%7D%22%7D%7D) [![Install with Docker in VS Code Insiders](https://img.shields.io/badge/VS_Code_Insiders-Docker-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=redis&inputs=%5B%7B%22type%22%3A%22promptString%22%2C%22id%22%3A%22redis_url%22%2C%22description%22%3A%22Redis%20URL%20(e.g.%20redis%3A%2F%2Fhost.docker.internal%3A6379)%22%7D%5D&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22-i%22%2C%22--rm%22%2C%22mcp%2Fredis%22%5D%2C%22env%22%3A%7B%22REDIS_URL%22%3A%22%24%7Binput%3Aredis_url%7D%22%7D%7D&quality=insiders) 108 | 109 | For manual installation, add the following JSON block to your User Settings (JSON) file in VS Code. You can do this by pressing `Ctrl + Shift + P` and typing `Preferences: Open User Settings (JSON)`. 110 | 111 | Optionally, you can add it to a file called `.vscode/mcp.json` in your workspace. This will allow you to share the configuration with others. 112 | 113 | > Note that the `mcp` key is not needed in the `.vscode/mcp.json` file. 114 | 115 | ```json 116 | { 117 | "mcp": { 118 | "inputs": [ 119 | { 120 | "type": "promptString", 121 | "id": "redis_url", 122 | "description": "Redis URL (e.g. redis://localhost:6379)" 123 | } 124 | ], 125 | "servers": { 126 | "redis": { 127 | "command": "npx", 128 | "args": ["-y", "@modelcontextprotocol/server-redis"], 129 | "env": { 130 | "REDIS_URL": "${input:redis_url}" 131 | } 132 | } 133 | } 134 | } 135 | } 136 | ``` 137 | 138 | For Docker installation: 139 | 140 | ```json 141 | { 142 | "mcp": { 143 | "inputs": [ 144 | { 145 | "type": "promptString", 146 | "id": "redis_url", 147 | "description": "Redis URL (e.g. redis://host.docker.internal:6379)" 148 | } 149 | ], 150 | "servers": { 151 | "redis": { 152 | "command": "docker", 153 | "args": ["run", "-i", "--rm", "mcp/redis"], 154 | "env": { 155 | "REDIS_URL": "${input:redis_url}" 156 | } 157 | } 158 | } 159 | } 160 | } 161 | ``` 162 | 163 | ## License 164 | 165 | This MCP server is licensed under the MIT License. This means you are free to use, modify, and distribute the software, subject to the terms and conditions of the MIT License. For more details, please see the LICENSE file in the project repository. 166 | -------------------------------------------------------------------------------- /src/redis/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@modelcontextprotocol/server-redis", 3 | "version": "0.1.1", 4 | "description": "MCP server for using Redis", 5 | "license": "MIT", 6 | "author": "Anthropic, PBC (https://anthropic.com)", 7 | "homepage": "https://modelcontextprotocol.io", 8 | "bugs": "https://github.com/modelcontextprotocol/servers/issues", 9 | "type": "module", 10 | "bin": { 11 | "mcp-server-redis": "dist/index.js" 12 | }, 13 | "files": [ 14 | "dist" 15 | ], 16 | "scripts": { 17 | "build": "tsc && shx chmod +x dist/*.js", 18 | "prepare": "npm run build", 19 | "watch": "tsc --watch" 20 | }, 21 | "dependencies": { 22 | "@modelcontextprotocol/sdk": "^1.7.0", 23 | "@types/node": "^22.10.2", 24 | "@types/redis": "^4.0.10", 25 | "redis": "^4.7.0" 26 | }, 27 | "devDependencies": { 28 | "shx": "^0.3.4", 29 | "typescript": "^5.7.2" 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /src/redis/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "ES2022", 4 | "module": "Node16", 5 | "moduleResolution": "Node16", 6 | "outDir": "./dist", 7 | "rootDir": "./src", 8 | "strict": true, 9 | "esModuleInterop": true, 10 | "skipLibCheck": true, 11 | "forceConsistentCasingInFileNames": true 12 | }, 13 | "include": ["src/**/*"], 14 | "exclude": ["node_modules"] 15 | } 16 | -------------------------------------------------------------------------------- /src/sentry/.python-version: -------------------------------------------------------------------------------- 1 | 3.10 2 | -------------------------------------------------------------------------------- /src/sentry/Dockerfile: -------------------------------------------------------------------------------- 1 | # Use a Python image with uv pre-installed 2 | FROM ghcr.io/astral-sh/uv:python3.12-bookworm-slim AS uv 3 | 4 | # Install the project into `/app` 5 | WORKDIR /app 6 | 7 | # Enable bytecode compilation 8 | ENV UV_COMPILE_BYTECODE=1 9 | 10 | # Copy from the cache instead of linking since it's a mounted volume 11 | ENV UV_LINK_MODE=copy 12 | 13 | # Install the project's dependencies using the lockfile and settings 14 | RUN --mount=type=cache,target=/root/.cache/uv \ 15 | --mount=type=bind,source=uv.lock,target=uv.lock \ 16 | --mount=type=bind,source=pyproject.toml,target=pyproject.toml \ 17 | uv sync --frozen --no-install-project --no-dev --no-editable 18 | 19 | # Then, add the rest of the project source code and install it 20 | # Installing separately from its dependencies allows optimal layer caching 21 | ADD . /app 22 | RUN --mount=type=cache,target=/root/.cache/uv \ 23 | uv sync --frozen --no-dev --no-editable 24 | 25 | FROM python:3.12-slim-bookworm 26 | 27 | WORKDIR /app 28 | 29 | COPY --from=uv /root/.local /root/.local 30 | COPY --from=uv --chown=app:app /app/.venv /app/.venv 31 | 32 | # Place executables in the environment at the front of the path 33 | ENV PATH="/app/.venv/bin:$PATH" 34 | 35 | # when running the container, add --db-path and a bind mount to the host's db file 36 | ENTRYPOINT ["mcp-server-sentry"] 37 | 38 | -------------------------------------------------------------------------------- /src/sentry/README.md: -------------------------------------------------------------------------------- 1 | # mcp-server-sentry: A Sentry MCP server 2 | 3 | ## Overview 4 | 5 | A Model Context Protocol server for retrieving and analyzing issues from Sentry.io. This server provides tools to inspect error reports, stacktraces, and other debugging information from your Sentry account. 6 | 7 | ### Tools 8 | 9 | 1. `get_sentry_issue` 10 | - Retrieve and analyze a Sentry issue by ID or URL 11 | - Input: 12 | - `issue_id_or_url` (string): Sentry issue ID or URL to analyze 13 | - Returns: Issue details including: 14 | - Title 15 | - Issue ID 16 | - Status 17 | - Level 18 | - First seen timestamp 19 | - Last seen timestamp 20 | - Event count 21 | - Full stacktrace 22 | 23 | ### Prompts 24 | 25 | 1. `sentry-issue` 26 | - Retrieve issue details from Sentry 27 | - Input: 28 | - `issue_id_or_url` (string): Sentry issue ID or URL 29 | - Returns: Formatted issue details as conversation context 30 | 31 | ## Installation 32 | 33 | ### Using uv (recommended) 34 | 35 | When using [`uv`](https://docs.astral.sh/uv/) no specific installation is needed. We will 36 | use [`uvx`](https://docs.astral.sh/uv/guides/tools/) to directly run *mcp-server-sentry*. 37 | 38 | ### Using PIP 39 | 40 | Alternatively you can install `mcp-server-sentry` via pip: 41 | 42 | ``` 43 | pip install mcp-server-sentry 44 | ``` 45 | 46 | After installation, you can run it as a script using: 47 | 48 | ``` 49 | python -m mcp_server_sentry 50 | ``` 51 | 52 | ## Configuration 53 | 54 | ### Usage with Claude Desktop 55 | 56 | Add this to your `claude_desktop_config.json`: 57 | 58 |
59 | Using uvx 60 | 61 | ```json 62 | "mcpServers": { 63 | "sentry": { 64 | "command": "uvx", 65 | "args": ["mcp-server-sentry", "--auth-token", "YOUR_SENTRY_TOKEN"] 66 | } 67 | } 68 | ``` 69 |
70 | 71 |
72 | 73 |
74 | Using docker 75 | 76 | ```json 77 | "mcpServers": { 78 | "sentry": { 79 | "command": "docker", 80 | "args": ["run", "-i", "--rm", "mcp/sentry", "--auth-token", "YOUR_SENTRY_TOKEN"] 81 | } 82 | } 83 | ``` 84 |
85 | 86 |
87 | 88 | Using pip installation 89 | 90 | ```json 91 | "mcpServers": { 92 | "sentry": { 93 | "command": "python", 94 | "args": ["-m", "mcp_server_sentry", "--auth-token", "YOUR_SENTRY_TOKEN"] 95 | } 96 | } 97 | ``` 98 |
99 | 100 | ### Usage with VS Code 101 | 102 | For quick installation, use one of the one-click installation buttons below... 103 | 104 | [![Install with UV in VS Code](https://img.shields.io/badge/VS_Code-UV-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=sentry&inputs=%5B%7B%22type%22%3A%22promptString%22%2C%22id%22%3A%22auth_token%22%2C%22description%22%3A%22Sentry%20Auth%20Token%22%2C%22password%22%3Atrue%7D%5D&config=%7B%22command%22%3A%22uvx%22%2C%22args%22%3A%5B%22mcp-server-sentry%22%5D%2C%22env%22%3A%7B%22SENTRY_AUTH_TOKEN%22%3A%22%24%7Binput%3Aauth_token%7D%22%7D%7D) [![Install with UV in VS Code Insiders](https://img.shields.io/badge/VS_Code_Insiders-UV-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=sentry&inputs=%5B%7B%22type%22%3A%22promptString%22%2C%22id%22%3A%22auth_token%22%2C%22description%22%3A%22Sentry%20Auth%20Token%22%2C%22password%22%3Atrue%7D%5D&config=%7B%22command%22%3A%22uvx%22%2C%22args%22%3A%5B%22mcp-server-sentry%22%5D%2C%22env%22%3A%7B%22SENTRY_AUTH_TOKEN%22%3A%22%24%7Binput%3Aauth_token%7D%22%7D%7D&quality=insiders) 105 | 106 | [![Install with Docker in VS Code](https://img.shields.io/badge/VS_Code-Docker-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=sentry&inputs=%5B%7B%22type%22%3A%22promptString%22%2C%22id%22%3A%22auth_token%22%2C%22description%22%3A%22Sentry%20Auth%20Token%22%2C%22password%22%3Atrue%7D%5D&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22-i%22%2C%22--rm%22%2C%22mcp%2Fsentry%22%5D%2C%22env%22%3A%7B%22SENTRY_AUTH_TOKEN%22%3A%22%24%7Binput%3Aauth_token%7D%22%7D%7D) [![Install with Docker in VS Code Insiders](https://img.shields.io/badge/VS_Code_Insiders-Docker-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=sentry&inputs=%5B%7B%22type%22%3A%22promptString%22%2C%22id%22%3A%22auth_token%22%2C%22description%22%3A%22Sentry%20Auth%20Token%22%2C%22password%22%3Atrue%7D%5D&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22-i%22%2C%22--rm%22%2C%22mcp%2Fsentry%22%5D%2C%22env%22%3A%7B%22SENTRY_AUTH_TOKEN%22%3A%22%24%7Binput%3Aauth_token%7D%22%7D%7D&quality=insiders) 107 | 108 | For manual installation, add the following JSON block to your User Settings (JSON) file in VS Code. You can do this by pressing `Ctrl + Shift + P` and typing `Preferences: Open Settings (JSON)`. 109 | 110 | Optionally, you can add it to a file called `.vscode/mcp.json` in your workspace. This will allow you to share the configuration with others. 111 | 112 | > Note that the `mcp` key is needed when using the `mcp.json` file. 113 | 114 |
115 | Using uvx 116 | 117 | ```json 118 | { 119 | "mcp": { 120 | "inputs": [ 121 | { 122 | "type": "promptString", 123 | "id": "auth_token", 124 | "description": "Sentry Auth Token", 125 | "password": true 126 | } 127 | ], 128 | "servers": { 129 | "sentry": { 130 | "command": "uvx", 131 | "args": ["mcp-server-sentry"], 132 | "env": { 133 | "SENTRY_AUTH_TOKEN": "${input:auth_token}" 134 | } 135 | } 136 | } 137 | } 138 | } 139 | ``` 140 |
141 | 142 |
143 | Using docker 144 | 145 | ```json 146 | { 147 | "mcp": { 148 | "inputs": [ 149 | { 150 | "type": "promptString", 151 | "id": "auth_token", 152 | "description": "Sentry Auth Token", 153 | "password": true 154 | } 155 | ], 156 | "servers": { 157 | "sentry": { 158 | "command": "docker", 159 | "args": ["run", "-i", "--rm", "mcp/sentry"], 160 | "env": { 161 | "SENTRY_AUTH_TOKEN": "${input:auth_token}" 162 | } 163 | } 164 | } 165 | } 166 | } 167 | ``` 168 |
169 | 170 | ### Usage with [Zed](https://github.com/zed-industries/zed) 171 | 172 | Add to your Zed settings.json: 173 | 174 |
175 | Using uvx 176 | 177 | ```json 178 | "context_servers": [ 179 | "mcp-server-sentry": { 180 | "command": { 181 | "path": "uvx", 182 | "args": ["mcp-server-sentry", "--auth-token", "YOUR_SENTRY_TOKEN"] 183 | } 184 | } 185 | ], 186 | ``` 187 |
188 | 189 |
190 | Using pip installation 191 | 192 | ```json 193 | "context_servers": { 194 | "mcp-server-sentry": { 195 | "command": "python", 196 | "args": ["-m", "mcp_server_sentry", "--auth-token", "YOUR_SENTRY_TOKEN"] 197 | } 198 | }, 199 | ``` 200 |
201 | 202 | ## Debugging 203 | 204 | You can use the MCP inspector to debug the server. For uvx installations: 205 | 206 | ``` 207 | npx @modelcontextprotocol/inspector uvx mcp-server-sentry --auth-token YOUR_SENTRY_TOKEN 208 | ``` 209 | 210 | Or if you've installed the package in a specific directory or are developing on it: 211 | 212 | ``` 213 | cd path/to/servers/src/sentry 214 | npx @modelcontextprotocol/inspector uv run mcp-server-sentry --auth-token YOUR_SENTRY_TOKEN 215 | ``` 216 | 217 | ## License 218 | 219 | This MCP server is licensed under the MIT License. This means you are free to use, modify, and distribute the software, subject to the terms and conditions of the MIT License. For more details, please see the LICENSE file in the project repository. 220 | -------------------------------------------------------------------------------- /src/sentry/pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "mcp-server-sentry" 3 | version = "0.6.2" 4 | description = "MCP server for retrieving issues from sentry.io" 5 | readme = "README.md" 6 | requires-python = ">=3.10" 7 | dependencies = ["mcp>=1.0.0"] 8 | 9 | [build-system] 10 | requires = ["hatchling"] 11 | build-backend = "hatchling.build" 12 | 13 | [tool.uv] 14 | dev-dependencies = ["pyright>=1.1.389", "pytest>=8.3.3", "ruff>=0.8.0"] 15 | 16 | [project.scripts] 17 | mcp-server-sentry = "mcp_server_sentry:main" 18 | -------------------------------------------------------------------------------- /src/sentry/src/mcp_server_sentry/__init__.py: -------------------------------------------------------------------------------- 1 | from . import server 2 | import asyncio 3 | 4 | 5 | def main(): 6 | """Main entry point for the package.""" 7 | asyncio.run(server.main()) 8 | 9 | 10 | # Optionally expose other important items at package level 11 | __all__ = ["main", "server"] 12 | -------------------------------------------------------------------------------- /src/sentry/src/mcp_server_sentry/__main__.py: -------------------------------------------------------------------------------- 1 | from mcp_server_sentry.server import main 2 | 3 | if __name__ == "__main__": 4 | main() 5 | -------------------------------------------------------------------------------- /src/sequentialthinking/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:22.12-alpine AS builder 2 | 3 | COPY src/sequentialthinking /app 4 | COPY tsconfig.json /tsconfig.json 5 | 6 | WORKDIR /app 7 | 8 | RUN --mount=type=cache,target=/root/.npm npm install 9 | 10 | RUN --mount=type=cache,target=/root/.npm-production npm ci --ignore-scripts --omit-dev 11 | 12 | FROM node:22-alpine AS release 13 | 14 | COPY --from=builder /app/dist /app/dist 15 | COPY --from=builder /app/package.json /app/package.json 16 | COPY --from=builder /app/package-lock.json /app/package-lock.json 17 | 18 | ENV NODE_ENV=production 19 | 20 | WORKDIR /app 21 | 22 | RUN npm ci --ignore-scripts --omit-dev 23 | 24 | ENTRYPOINT ["node", "dist/index.js"] 25 | -------------------------------------------------------------------------------- /src/sequentialthinking/README.md: -------------------------------------------------------------------------------- 1 | # Sequential Thinking MCP Server 2 | 3 | An MCP server implementation that provides a tool for dynamic and reflective problem-solving through a structured thinking process. 4 | 5 | ## Features 6 | 7 | - Break down complex problems into manageable steps 8 | - Revise and refine thoughts as understanding deepens 9 | - Branch into alternative paths of reasoning 10 | - Adjust the total number of thoughts dynamically 11 | - Generate and verify solution hypotheses 12 | 13 | ## Tool 14 | 15 | ### sequential_thinking 16 | 17 | Facilitates a detailed, step-by-step thinking process for problem-solving and analysis. 18 | 19 | **Inputs:** 20 | - `thought` (string): The current thinking step 21 | - `nextThoughtNeeded` (boolean): Whether another thought step is needed 22 | - `thoughtNumber` (integer): Current thought number 23 | - `totalThoughts` (integer): Estimated total thoughts needed 24 | - `isRevision` (boolean, optional): Whether this revises previous thinking 25 | - `revisesThought` (integer, optional): Which thought is being reconsidered 26 | - `branchFromThought` (integer, optional): Branching point thought number 27 | - `branchId` (string, optional): Branch identifier 28 | - `needsMoreThoughts` (boolean, optional): If more thoughts are needed 29 | 30 | ## Usage 31 | 32 | The Sequential Thinking tool is designed for: 33 | - Breaking down complex problems into steps 34 | - Planning and design with room for revision 35 | - Analysis that might need course correction 36 | - Problems where the full scope might not be clear initially 37 | - Tasks that need to maintain context over multiple steps 38 | - Situations where irrelevant information needs to be filtered out 39 | 40 | ## Configuration 41 | 42 | ### Usage with Claude Desktop 43 | 44 | Add this to your `claude_desktop_config.json`: 45 | 46 | #### npx 47 | 48 | ```json 49 | { 50 | "mcpServers": { 51 | "sequential-thinking": { 52 | "command": "npx", 53 | "args": [ 54 | "-y", 55 | "@modelcontextprotocol/server-sequential-thinking" 56 | ] 57 | } 58 | } 59 | } 60 | ``` 61 | 62 | #### docker 63 | 64 | ```json 65 | { 66 | "mcpServers": { 67 | "sequentialthinking": { 68 | "command": "docker", 69 | "args": [ 70 | "run", 71 | "--rm", 72 | "-i", 73 | "mcp/sequentialthinking" 74 | ] 75 | } 76 | } 77 | } 78 | ``` 79 | 80 | ### Usage with VS Code 81 | 82 | For quick installation, click one of the installation buttons below... 83 | 84 | [![Install with NPX in VS Code](https://img.shields.io/badge/VS_Code-NPM-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=sequentialthinking&config=%7B%22command%22%3A%22npx%22%2C%22args%22%3A%5B%22-y%22%2C%22%40modelcontextprotocol%2Fserver-sequential-thinking%22%5D%7D) [![Install with NPX in VS Code Insiders](https://img.shields.io/badge/VS_Code_Insiders-NPM-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=sequentialthinking&config=%7B%22command%22%3A%22npx%22%2C%22args%22%3A%5B%22-y%22%2C%22%40modelcontextprotocol%2Fserver-sequential-thinking%22%5D%7D&quality=insiders) 85 | 86 | [![Install with Docker in VS Code](https://img.shields.io/badge/VS_Code-Docker-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=sequentialthinking&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22--rm%22%2C%22-i%22%2C%22mcp%2Fsequentialthinking%22%5D%7D) [![Install with Docker in VS Code Insiders](https://img.shields.io/badge/VS_Code_Insiders-Docker-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=sequentialthinking&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22--rm%22%2C%22-i%22%2C%22mcp%2Fsequentialthinking%22%5D%7D&quality=insiders) 87 | 88 | For manual installation, add the following JSON block to your User Settings (JSON) file in VS Code. You can do this by pressing `Ctrl + Shift + P` and typing `Preferences: Open Settings (JSON)`. 89 | 90 | Optionally, you can add it to a file called `.vscode/mcp.json` in your workspace. This will allow you to share the configuration with others. 91 | 92 | > Note that the `mcp` key is not needed in the `.vscode/mcp.json` file. 93 | 94 | For NPX installation: 95 | 96 | ```json 97 | { 98 | "mcp": { 99 | "servers": { 100 | "sequential-thinking": { 101 | "command": "npx", 102 | "args": [ 103 | "-y", 104 | "@modelcontextprotocol/server-sequential-thinking" 105 | ] 106 | } 107 | } 108 | } 109 | } 110 | ``` 111 | 112 | For Docker installation: 113 | 114 | ```json 115 | { 116 | "mcp": { 117 | "servers": { 118 | "sequential-thinking": { 119 | "command": "docker", 120 | "args": [ 121 | "run", 122 | "--rm", 123 | "-i", 124 | "mcp/sequentialthinking" 125 | ] 126 | } 127 | } 128 | } 129 | } 130 | ``` 131 | 132 | ## Building 133 | 134 | Docker: 135 | 136 | ```bash 137 | docker build -t mcp/sequentialthinking -f src/sequentialthinking/Dockerfile . 138 | ``` 139 | 140 | ## License 141 | 142 | This MCP server is licensed under the MIT License. This means you are free to use, modify, and distribute the software, subject to the terms and conditions of the MIT License. For more details, please see the LICENSE file in the project repository. 143 | -------------------------------------------------------------------------------- /src/sequentialthinking/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@modelcontextprotocol/server-sequential-thinking", 3 | "version": "0.6.2", 4 | "description": "MCP server for sequential thinking and problem solving", 5 | "license": "MIT", 6 | "author": "Anthropic, PBC (https://anthropic.com)", 7 | "homepage": "https://modelcontextprotocol.io", 8 | "bugs": "https://github.com/modelcontextprotocol/servers/issues", 9 | "type": "module", 10 | "bin": { 11 | "mcp-server-sequential-thinking": "dist/index.js" 12 | }, 13 | "files": [ 14 | "dist" 15 | ], 16 | "scripts": { 17 | "build": "tsc && shx chmod +x dist/*.js", 18 | "prepare": "npm run build", 19 | "watch": "tsc --watch" 20 | }, 21 | "dependencies": { 22 | "@modelcontextprotocol/sdk": "0.5.0", 23 | "chalk": "^5.3.0", 24 | "yargs": "^17.7.2" 25 | }, 26 | "devDependencies": { 27 | "@types/node": "^22", 28 | "@types/yargs": "^17.0.32", 29 | "shx": "^0.3.4", 30 | "typescript": "^5.3.3" 31 | } 32 | } -------------------------------------------------------------------------------- /src/sequentialthinking/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "../../tsconfig.json", 3 | "compilerOptions": { 4 | "outDir": "./dist", 5 | "rootDir": ".", 6 | "moduleResolution": "NodeNext", 7 | "module": "NodeNext" 8 | }, 9 | "include": ["./**/*.ts"] 10 | } 11 | -------------------------------------------------------------------------------- /src/slack/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:22.12-alpine AS builder 2 | 3 | # Must be entire project because `prepare` script is run during `npm install` and requires all files. 4 | COPY src/slack /app 5 | COPY tsconfig.json /tsconfig.json 6 | 7 | WORKDIR /app 8 | 9 | RUN --mount=type=cache,target=/root/.npm npm install 10 | 11 | RUN --mount=type=cache,target=/root/.npm-production npm ci --ignore-scripts --omit-dev 12 | 13 | FROM node:22-alpine AS release 14 | 15 | COPY --from=builder /app/dist /app/dist 16 | COPY --from=builder /app/package.json /app/package.json 17 | COPY --from=builder /app/package-lock.json /app/package-lock.json 18 | 19 | ENV NODE_ENV=production 20 | 21 | WORKDIR /app 22 | 23 | RUN npm ci --ignore-scripts --omit-dev 24 | 25 | ENTRYPOINT ["node", "dist/index.js"] 26 | -------------------------------------------------------------------------------- /src/slack/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@modelcontextprotocol/server-slack", 3 | "version": "0.6.2", 4 | "description": "MCP server for interacting with Slack", 5 | "license": "MIT", 6 | "author": "Anthropic, PBC (https://anthropic.com)", 7 | "homepage": "https://modelcontextprotocol.io", 8 | "bugs": "https://github.com/modelcontextprotocol/servers/issues", 9 | "type": "module", 10 | "bin": { 11 | "mcp-server-slack": "dist/index.js" 12 | }, 13 | "files": [ 14 | "dist" 15 | ], 16 | "scripts": { 17 | "build": "tsc && shx chmod +x dist/*.js", 18 | "prepare": "npm run build", 19 | "watch": "tsc --watch" 20 | }, 21 | "dependencies": { 22 | "@modelcontextprotocol/sdk": "1.0.1" 23 | }, 24 | "devDependencies": { 25 | "@types/node": "^22", 26 | "shx": "^0.3.4", 27 | "typescript": "^5.6.2" 28 | } 29 | } -------------------------------------------------------------------------------- /src/slack/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "../../tsconfig.json", 3 | "compilerOptions": { 4 | "outDir": "./dist", 5 | "rootDir": "." 6 | }, 7 | "include": [ 8 | "./**/*.ts" 9 | ] 10 | } 11 | -------------------------------------------------------------------------------- /src/sqlite/.python-version: -------------------------------------------------------------------------------- 1 | 3.10 2 | -------------------------------------------------------------------------------- /src/sqlite/Dockerfile: -------------------------------------------------------------------------------- 1 | # Use a Python image with uv pre-installed 2 | FROM ghcr.io/astral-sh/uv:python3.12-bookworm-slim AS uv 3 | 4 | # Install the project into `/app` 5 | WORKDIR /app 6 | 7 | # Enable bytecode compilation 8 | ENV UV_COMPILE_BYTECODE=1 9 | 10 | # Copy from the cache instead of linking since it's a mounted volume 11 | ENV UV_LINK_MODE=copy 12 | 13 | # Install the project's dependencies using the lockfile and settings 14 | RUN --mount=type=cache,target=/root/.cache/uv \ 15 | --mount=type=bind,source=uv.lock,target=uv.lock \ 16 | --mount=type=bind,source=pyproject.toml,target=pyproject.toml \ 17 | uv sync --frozen --no-install-project --no-dev --no-editable 18 | 19 | # Then, add the rest of the project source code and install it 20 | # Installing separately from its dependencies allows optimal layer caching 21 | ADD . /app 22 | RUN --mount=type=cache,target=/root/.cache/uv \ 23 | uv sync --frozen --no-dev --no-editable 24 | 25 | FROM python:3.12-slim-bookworm 26 | 27 | WORKDIR /app 28 | 29 | COPY --from=uv /root/.local /root/.local 30 | COPY --from=uv --chown=app:app /app/.venv /app/.venv 31 | 32 | # Place executables in the environment at the front of the path 33 | ENV PATH="/app/.venv/bin:$PATH" 34 | 35 | # when running the container, add --db-path and a bind mount to the host's db file 36 | ENTRYPOINT ["mcp-server-sqlite"] 37 | 38 | -------------------------------------------------------------------------------- /src/sqlite/README.md: -------------------------------------------------------------------------------- 1 | # SQLite MCP Server 2 | 3 | ## Overview 4 | A Model Context Protocol (MCP) server implementation that provides database interaction and business intelligence capabilities through SQLite. This server enables running SQL queries, analyzing business data, and automatically generating business insight memos. 5 | 6 | ## Components 7 | 8 | ### Resources 9 | The server exposes a single dynamic resource: 10 | - `memo://insights`: A continuously updated business insights memo that aggregates discovered insights during analysis 11 | - Auto-updates as new insights are discovered via the append-insight tool 12 | 13 | ### Prompts 14 | The server provides a demonstration prompt: 15 | - `mcp-demo`: Interactive prompt that guides users through database operations 16 | - Required argument: `topic` - The business domain to analyze 17 | - Generates appropriate database schemas and sample data 18 | - Guides users through analysis and insight generation 19 | - Integrates with the business insights memo 20 | 21 | ### Tools 22 | The server offers six core tools: 23 | 24 | #### Query Tools 25 | - `read_query` 26 | - Execute SELECT queries to read data from the database 27 | - Input: 28 | - `query` (string): The SELECT SQL query to execute 29 | - Returns: Query results as array of objects 30 | 31 | - `write_query` 32 | - Execute INSERT, UPDATE, or DELETE queries 33 | - Input: 34 | - `query` (string): The SQL modification query 35 | - Returns: `{ affected_rows: number }` 36 | 37 | - `create_table` 38 | - Create new tables in the database 39 | - Input: 40 | - `query` (string): CREATE TABLE SQL statement 41 | - Returns: Confirmation of table creation 42 | 43 | #### Schema Tools 44 | - `list_tables` 45 | - Get a list of all tables in the database 46 | - No input required 47 | - Returns: Array of table names 48 | 49 | - `describe-table` 50 | - View schema information for a specific table 51 | - Input: 52 | - `table_name` (string): Name of table to describe 53 | - Returns: Array of column definitions with names and types 54 | 55 | #### Analysis Tools 56 | - `append_insight` 57 | - Add new business insights to the memo resource 58 | - Input: 59 | - `insight` (string): Business insight discovered from data analysis 60 | - Returns: Confirmation of insight addition 61 | - Triggers update of memo://insights resource 62 | 63 | 64 | ## Usage with Claude Desktop 65 | 66 | ### uv 67 | 68 | ```bash 69 | # Add the server to your claude_desktop_config.json 70 | "mcpServers": { 71 | "sqlite": { 72 | "command": "uv", 73 | "args": [ 74 | "--directory", 75 | "parent_of_servers_repo/servers/src/sqlite", 76 | "run", 77 | "mcp-server-sqlite", 78 | "--db-path", 79 | "~/test.db" 80 | ] 81 | } 82 | } 83 | ``` 84 | 85 | ### Docker 86 | 87 | ```json 88 | # Add the server to your claude_desktop_config.json 89 | "mcpServers": { 90 | "sqlite": { 91 | "command": "docker", 92 | "args": [ 93 | "run", 94 | "--rm", 95 | "-i", 96 | "-v", 97 | "mcp-test:/mcp", 98 | "mcp/sqlite", 99 | "--db-path", 100 | "/mcp/test.db" 101 | ] 102 | } 103 | } 104 | ``` 105 | 106 | ## Usage with VS Code 107 | 108 | For quick installation, click the installation buttons below: 109 | 110 | [![Install with UV in VS Code](https://img.shields.io/badge/VS_Code-UV-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=sqlite&inputs=%5B%7B%22type%22%3A%22promptString%22%2C%22id%22%3A%22db_path%22%2C%22description%22%3A%22SQLite%20Database%20Path%22%2C%22default%22%3A%22%24%7BworkspaceFolder%7D%2Fdb.sqlite%22%7D%5D&config=%7B%22command%22%3A%22uvx%22%2C%22args%22%3A%5B%22mcp-server-sqlite%22%2C%22--db-path%22%2C%22%24%7Binput%3Adb_path%7D%22%5D%7D) [![Install with UV in VS Code Insiders](https://img.shields.io/badge/VS_Code_Insiders-UV-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=sqlite&inputs=%5B%7B%22type%22%3A%22promptString%22%2C%22id%22%3A%22db_path%22%2C%22description%22%3A%22SQLite%20Database%20Path%22%2C%22default%22%3A%22%24%7BworkspaceFolder%7D%2Fdb.sqlite%22%7D%5D&config=%7B%22command%22%3A%22uvx%22%2C%22args%22%3A%5B%22mcp-server-sqlite%22%2C%22--db-path%22%2C%22%24%7Binput%3Adb_path%7D%22%5D%7D&quality=insiders) 111 | 112 | [![Install with Docker in VS Code](https://img.shields.io/badge/VS_Code-Docker-0098FF?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=sqlite&inputs=%5B%7B%22type%22%3A%22promptString%22%2C%22id%22%3A%22db_path%22%2C%22description%22%3A%22SQLite%20Database%20Path%20(within%20container)%22%2C%22default%22%3A%22%2Fmcp%2Fdb.sqlite%22%7D%5D&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22-i%22%2C%22--rm%22%2C%22-v%22%2C%22mcp-sqlite%3A%2Fmcp%22%2C%22mcp%2Fsqlite%22%2C%22--db-path%22%2C%22%24%7Binput%3Adb_path%7D%22%5D%7D) [![Install with Docker in VS Code Insiders](https://img.shields.io/badge/VS_Code_Insiders-Docker-24bfa5?style=flat-square&logo=visualstudiocode&logoColor=white)](https://insiders.vscode.dev/redirect/mcp/install?name=sqlite&inputs=%5B%7B%22type%22%3A%22promptString%22%2C%22id%22%3A%22db_path%22%2C%22description%22%3A%22SQLite%20Database%20Path%20(within%20container)%22%2C%22default%22%3A%22%2Fmcp%2Fdb.sqlite%22%7D%5D&config=%7B%22command%22%3A%22docker%22%2C%22args%22%3A%5B%22run%22%2C%22-i%22%2C%22--rm%22%2C%22-v%22%2C%22mcp-sqlite%3A%2Fmcp%22%2C%22mcp%2Fsqlite%22%2C%22--db-path%22%2C%22%24%7Binput%3Adb_path%7D%22%5D%7D&quality=insiders) 113 | 114 | For manual installation, add the following JSON block to your User Settings (JSON) file in VS Code. You can do this by pressing `Ctrl + Shift + P` and typing `Preferences: Open Settings (JSON)`. 115 | 116 | Optionally, you can add it to a file called `.vscode/mcp.json` in your workspace. This will allow you to share the configuration with others. 117 | 118 | > Note that the `mcp` key is needed when using the `mcp.json` file. 119 | 120 | ### uv 121 | 122 | ```json 123 | { 124 | "mcp": { 125 | "inputs": [ 126 | { 127 | "type": "promptString", 128 | "id": "db_path", 129 | "description": "SQLite Database Path", 130 | "default": "${workspaceFolder}/db.sqlite" 131 | } 132 | ], 133 | "servers": { 134 | "sqlite": { 135 | "command": "uvx", 136 | "args": [ 137 | "mcp-server-sqlite", 138 | "--db-path", 139 | "${input:db_path}" 140 | ] 141 | } 142 | } 143 | } 144 | } 145 | ``` 146 | 147 | ### Docker 148 | 149 | ```json 150 | { 151 | "mcp": { 152 | "inputs": [ 153 | { 154 | "type": "promptString", 155 | "id": "db_path", 156 | "description": "SQLite Database Path (within container)", 157 | "default": "/mcp/db.sqlite" 158 | } 159 | ], 160 | "servers": { 161 | "sqlite": { 162 | "command": "docker", 163 | "args": [ 164 | "run", 165 | "-i", 166 | "--rm", 167 | "-v", 168 | "mcp-sqlite:/mcp", 169 | "mcp/sqlite", 170 | "--db-path", 171 | "${input:db_path}" 172 | ] 173 | } 174 | } 175 | } 176 | } 177 | ``` 178 | 179 | ## Building 180 | 181 | Docker: 182 | 183 | ```bash 184 | docker build -t mcp/sqlite . 185 | ``` 186 | 187 | ## Test with MCP inspector 188 | 189 | ```bash 190 | uv add "mcp[cli]" 191 | mcp dev src/mcp_server_sqlite/server.py:wrapper 192 | ``` 193 | 194 | ## License 195 | 196 | This MCP server is licensed under the MIT License. This means you are free to use, modify, and distribute the software, subject to the terms and conditions of the MIT License. For more details, please see the LICENSE file in the project repository. 197 | -------------------------------------------------------------------------------- /src/sqlite/pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "mcp-server-sqlite" 3 | version = "0.6.2" 4 | description = "A simple SQLite MCP server" 5 | readme = "README.md" 6 | requires-python = ">=3.10" 7 | dependencies = [ 8 | "mcp[cli]>=1.6.0", 9 | ] 10 | 11 | [build-system] 12 | requires = ["hatchling"] 13 | build-backend = "hatchling.build" 14 | 15 | [tool.uv] 16 | dev-dependencies = ["pyright>=1.1.389"] 17 | 18 | [project.scripts] 19 | mcp-server-sqlite = "mcp_server_sqlite:main" 20 | -------------------------------------------------------------------------------- /src/sqlite/src/mcp_server_sqlite/__init__.py: -------------------------------------------------------------------------------- 1 | from . import server 2 | import asyncio 3 | import argparse 4 | 5 | 6 | def main(): 7 | """Main entry point for the package.""" 8 | parser = argparse.ArgumentParser(description='SQLite MCP Server') 9 | parser.add_argument('--db-path', 10 | default="./sqlite_mcp_server.db", 11 | help='Path to SQLite database file') 12 | 13 | args = parser.parse_args() 14 | asyncio.run(server.main(args.db_path)) 15 | 16 | 17 | # Optionally expose other important items at package level 18 | __all__ = ["main", "server"] 19 | -------------------------------------------------------------------------------- /src/sqlite/test.db: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/modelcontextprotocol/servers/a0c7a31b0cbfb526ec02dfca73458eedccc38ec3/src/sqlite/test.db -------------------------------------------------------------------------------- /src/time/.python-version: -------------------------------------------------------------------------------- 1 | 3.10 2 | -------------------------------------------------------------------------------- /src/time/Dockerfile: -------------------------------------------------------------------------------- 1 | # Use a Python image with uv pre-installed 2 | FROM ghcr.io/astral-sh/uv:python3.12-bookworm-slim AS uv 3 | 4 | # Install the project into `/app` 5 | WORKDIR /app 6 | 7 | # Enable bytecode compilation 8 | ENV UV_COMPILE_BYTECODE=1 9 | 10 | # Copy from the cache instead of linking since it's a mounted volume 11 | ENV UV_LINK_MODE=copy 12 | 13 | # Install the project's dependencies using the lockfile and settings 14 | RUN --mount=type=cache,target=/root/.cache/uv \ 15 | --mount=type=bind,source=uv.lock,target=uv.lock \ 16 | --mount=type=bind,source=pyproject.toml,target=pyproject.toml \ 17 | uv sync --frozen --no-install-project --no-dev --no-editable 18 | 19 | # Then, add the rest of the project source code and install it 20 | # Installing separately from its dependencies allows optimal layer caching 21 | ADD . /app 22 | RUN --mount=type=cache,target=/root/.cache/uv \ 23 | uv sync --frozen --no-dev --no-editable 24 | 25 | FROM python:3.12-slim-bookworm 26 | 27 | WORKDIR /app 28 | 29 | COPY --from=uv /root/.local /root/.local 30 | COPY --from=uv --chown=app:app /app/.venv /app/.venv 31 | 32 | # Place executables in the environment at the front of the path 33 | ENV PATH="/app/.venv/bin:$PATH" 34 | 35 | # when running the container, add --db-path and a bind mount to the host's db file 36 | ENTRYPOINT ["mcp-server-time"] 37 | -------------------------------------------------------------------------------- /src/time/pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "mcp-server-time" 3 | version = "0.6.2" 4 | description = "A Model Context Protocol server providing tools for time queries and timezone conversions for LLMs" 5 | readme = "README.md" 6 | requires-python = ">=3.10" 7 | authors = [ 8 | { name = "Mariusz 'maledorak' Korzekwa", email = "mariusz@korzekwa.dev" }, 9 | ] 10 | keywords = ["time", "timezone", "mcp", "llm"] 11 | license = { text = "MIT" } 12 | classifiers = [ 13 | "Development Status :: 4 - Beta", 14 | "Intended Audience :: Developers", 15 | "License :: OSI Approved :: MIT License", 16 | "Programming Language :: Python :: 3", 17 | "Programming Language :: Python :: 3.10", 18 | ] 19 | dependencies = [ 20 | "mcp>=1.0.0", 21 | "pydantic>=2.0.0", 22 | "tzdata>=2024.2", 23 | ] 24 | 25 | [project.scripts] 26 | mcp-server-time = "mcp_server_time:main" 27 | 28 | [build-system] 29 | requires = ["hatchling"] 30 | build-backend = "hatchling.build" 31 | 32 | [tool.uv] 33 | dev-dependencies = [ 34 | "freezegun>=1.5.1", 35 | "pyright>=1.1.389", 36 | "pytest>=8.3.3", 37 | "ruff>=0.8.1", 38 | ] 39 | -------------------------------------------------------------------------------- /src/time/src/mcp_server_time/__init__.py: -------------------------------------------------------------------------------- 1 | from .server import serve 2 | 3 | 4 | def main(): 5 | """MCP Time Server - Time and timezone conversion functionality for MCP""" 6 | import argparse 7 | import asyncio 8 | 9 | parser = argparse.ArgumentParser( 10 | description="give a model the ability to handle time queries and timezone conversions" 11 | ) 12 | parser.add_argument("--local-timezone", type=str, help="Override local timezone") 13 | 14 | args = parser.parse_args() 15 | asyncio.run(serve(args.local_timezone)) 16 | 17 | 18 | if __name__ == "__main__": 19 | main() 20 | -------------------------------------------------------------------------------- /src/time/src/mcp_server_time/__main__.py: -------------------------------------------------------------------------------- 1 | from mcp_server_time import main 2 | 3 | main() 4 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "ES2022", 4 | "module": "Node16", 5 | "moduleResolution": "Node16", 6 | "strict": true, 7 | "esModuleInterop": true, 8 | "skipLibCheck": true, 9 | "forceConsistentCasingInFileNames": true, 10 | "resolveJsonModule": true 11 | }, 12 | "include": ["src/**/*"], 13 | "exclude": ["node_modules"] 14 | } 15 | --------------------------------------------------------------------------------