├── .cursor ├── base.mdc ├── mcp.json └── test.mdc ├── .gitignore ├── .repomixignore ├── .vscode └── settings.json ├── CHANGELOG.md ├── CONTRIBUTING.md ├── Dockerfile ├── LICENSE ├── README.md ├── bin └── cli.mjs ├── build.config.ts ├── docs └── unbuild.md ├── eslint.config.js ├── logo.svg ├── package.json ├── pnpm-lock.yaml ├── public ├── banner.png ├── deepwiki.jpg ├── deepwiki_logo.png ├── inspect.jpg ├── mcp-sse-starter.jpg ├── starter2.jpg ├── stdio-mcp-starter.jpg └── streamable2.jpg ├── repomix.config.json ├── scripts └── release.ts ├── src ├── converter │ └── htmlToMarkdown.ts ├── index.ts ├── lib │ ├── httpCrawler.ts │ ├── linkRewrite.ts │ └── sanitizeSchema.ts ├── schemas │ └── deepwiki.ts ├── server.ts ├── tools │ ├── deepwiki.ts │ └── deepwikiSearch.ts ├── types.ts ├── utils.ts └── utils │ ├── extractKeyword.ts │ └── resolveRepoFetch.ts ├── tests ├── McpClient.ts ├── client.test.ts ├── crawler.test.ts └── server.test.ts └── vite.config.ts /.cursor/base.mdc: -------------------------------------------------------------------------------- 1 | --- 2 | description: 3 | globs: 4 | alwaysApply: true 5 | --- 6 | 7 | - Always use "pnpm" (never use any other package manager like npm, bun, deno) 8 | - Always use typescript. 9 | - Tools are always added and build inside `src/tools/*.ts` folder. 10 | - Don't add too much comments. only on complex functions -------------------------------------------------------------------------------- /.cursor/mcp.json: -------------------------------------------------------------------------------- 1 | { 2 | "mcpServers": { 3 | "mcp-deepwiki": { 4 | "command": "node", 5 | "args": ["./bin/cli.mjs"] 6 | }, 7 | "mcp-deepwiki-live": { 8 | "command": "npx", 9 | "args": ["-y", "mcp-deepwiki@0.0.8"] 10 | } 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /.cursor/test.mdc: -------------------------------------------------------------------------------- 1 | --- 2 | description: 3 | globs: tests/** 4 | alwaysApply: false 5 | --- 6 | - Always use vitest (never use jest or any other testing framework) 7 | - Always pin **Vitest ≥ 3.1** (or `latest`) in `devDependencies` so `vi.stubGlobal`, `unstubGlobals`, and other modern helpers exist. 8 | - Use `environment: 'node'` and **Node 22+** so the built-in `fetch` and `Headers` classes are available without polyfills. 9 | - Trim or normalise strings before comparing multi-line Markdown/HTML: `expect(result.trim())` rather than raw literals with trailing spaces. 10 | - Always place async fetch stubs **inside** each test (or `beforeEach`) so they do not bleed unexpected data into other cases. -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Dependency directories 2 | node_modules/ 3 | 4 | # TypeScript output 5 | dist/ 6 | 7 | # Environment variables 8 | .env 9 | .env.local 10 | 11 | # Debug logs 12 | npm-debug.log* 13 | yarn-debug.log* 14 | yarn-error.log* 15 | 16 | # Editor directories and files 17 | .idea/ 18 | *.swp 19 | *.swo 20 | 21 | # OS specific 22 | .DS_Store 23 | Thumbs.db 24 | .hidden 25 | llms_*.txt 26 | 27 | .hidden 28 | repomix-output.xml -------------------------------------------------------------------------------- /.repomixignore: -------------------------------------------------------------------------------- 1 | # Add patterns to ignore here, one per line 2 | # Example: 3 | # *.log 4 | # tmp/ 5 | public 6 | prompts 7 | CHANGELOG.md 8 | CONTRIBUTING.md 9 | README.md 10 | LICENSE 11 | scripts 12 | .vscode 13 | docs 14 | DOCKERFILE 15 | tests/server.test.ts 16 | 17 | # MAYBE REMOVE THIS LATER ON 18 | .cursor -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "eslint.experimental.useFlatConfig": true, 3 | "editor.codeActionsOnSave": { 4 | "source.fixAll.eslint": "explicit" 5 | }, 6 | "editor.formatOnSave": true, // format after eslint fixes 7 | "eslint.validate": [ // add more languages if you need 8 | "javascript", 9 | "javascriptreact", 10 | "typescript", 11 | "typescriptreact" 12 | ] 13 | } 14 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | All notable changes to this project will be documented in this file. 4 | 5 | The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), 6 | and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). 7 | 8 | ## [Unreleased] 9 | 10 | ## [0.0.9] - 2025-05-12 11 | 12 | ### Changed 13 | - Updated domain again and changed back to `deepwiki.com` (by @KerneggerTim). 14 | 15 | ## [0.0.8] - 2025-05-09 16 | 17 | ### Changed 18 | - Updated to support `deepwiki.org` instead of `deepwiki.com` (by @KerneggerTim). 19 | 20 | ### Fixed 21 | - Preserve owner/repo format during URL normalization in `deepwiki_fetch` (related to `src/tools/deepwiki.ts`) (by @darinkishore). 22 | 23 | ## [0.0.7] - YYYY-MM-DD 24 | 25 | ### Added 26 | - **NLP-powered keyword extraction:** (Already documented in 0.0.6, but linked to `wink-nlp` dependency addition in this version) 27 | - **Automatic GitHub repo resolution:** (Already documented in 0.0.6, but relevant to README changes) 28 | 29 | ### Changed 30 | - Updated `README.md` to reflect that the `url` parameter in `deepwiki_fetch` now accepts single library keywords. 31 | - Added "Future Work" section to `README.md`. 32 | 33 | ### Dependencies 34 | - Added `wink-nlp` -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing to Deepwiki-to-Markdown MCP Server 2 | 3 | Thank you for considering contributing to the Deepwiki-to-Markdown MCP Server! This document provides guidelines and instructions for contributing. 4 | 5 | ## Code of Conduct 6 | 7 | Please be respectful and considerate of others when contributing to this project. We aim to foster an inclusive and welcoming community. 8 | 9 | ## How to Contribute 10 | 11 | ### Reporting Bugs 12 | 13 | If you find a bug, please open an issue with the following information: 14 | 15 | - A clear, descriptive title 16 | - Steps to reproduce the issue 17 | - Expected behavior 18 | - Actual behavior 19 | - Any relevant logs or error messages 20 | - Environment details (OS, Node.js version, etc.) 21 | 22 | ### Suggesting Enhancements 23 | 24 | For feature requests or enhancements: 25 | 26 | - Use a clear, descriptive title 27 | - Provide a detailed description of the proposed functionality 28 | - Explain why this enhancement would be useful 29 | - Consider including mockups or examples if applicable 30 | 31 | ### Pull Requests 32 | 33 | 1. Fork the repository 34 | 2. Create a new branch from `main` (`git checkout -b feature/your-feature-name`) 35 | 3. Make your changes 36 | 4. Run tests to ensure they pass (`npm test`) 37 | 5. Run linting to ensure code quality (`npm run lint`) 38 | 6. Update documentation as needed 39 | 7. Commit your changes with a clear message 40 | 8. Push to your fork 41 | 9. Submit a pull request to the `main` branch 42 | 43 | ### Pull Request Guidelines 44 | 45 | - Follow the coding style of the project 46 | - Include tests for new features 47 | - Update the README.md with details of changes if applicable 48 | - Update the CHANGELOG.md following the existing format 49 | - The pull request should work on the latest Node.js LTS version 50 | 51 | ## Development Workflow 52 | 53 | 1. Clone the repository: `git clone https://github.com/regenrek/mcp-deepwiki.git` 54 | 2. Install dependencies: `npm install` 55 | 3. Run in development mode: `npm run dev-stdio` (or `dev-http`/`dev-sse`) 56 | 4. Make your changes 57 | 5. Run tests: `npm test` 58 | 6. Run linting: `npm run lint` 59 | 60 | ## Project Structure 61 | 62 | ``` 63 | src/ 64 | ├── functions/ # Core functionality 65 | │ ├── __tests__/ # Unit tests 66 | │ ├── crawler.ts # Website crawling logic 67 | │ ├── converter.ts # HTML to Markdown conversion 68 | │ ├── types.ts # TypeScript interfaces & schemas 69 | │ └── utils.ts # Utility functions 70 | ├── tools/ # MCP tool definitions 71 | │ ├── deepwiki.ts # Deepwiki fetch tool 72 | │ └── mytool.ts # Example tool 73 | ├── index.ts # Main entry point 74 | ├── server.ts # MCP server setup 75 | └── types.ts # Core type definitions 76 | ``` 77 | 78 | ## Testing 79 | 80 | Please ensure all tests pass before submitting a pull request: 81 | 82 | ```bash 83 | npm test 84 | ``` 85 | 86 | Write new tests for new features or bug fixes. We use Vitest for testing. 87 | 88 | ## Linting 89 | 90 | We use ESLint to maintain code quality: 91 | 92 | ```bash 93 | npm run lint 94 | ``` 95 | 96 | ## Documentation 97 | 98 | Please update the documentation when necessary: 99 | 100 | - README.md for user-facing changes 101 | - CHANGELOG.md for release notes 102 | - Code comments for complex logic 103 | 104 | ## Release Process 105 | 106 | 1. Update version in package.json 107 | 2. Update CHANGELOG.md 108 | 3. Commit changes 109 | 4. Create a tag for the release 110 | 5. Push to GitHub 111 | 6. Publish to npm 112 | 113 | ## Questions? 114 | 115 | If you have any questions, please open an issue or reach out to the maintainers. 116 | 117 | Thank you for contributing to the Deepwiki-to-Markdown MCP Server! 118 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | # Use pnpm official image for better caching and consistency 2 | FROM node:22.12-alpine AS builder 3 | 4 | # Install pnpm 5 | RUN npm install -g pnpm@9.14.4 6 | 7 | WORKDIR /app 8 | 9 | # Copy package manifests and install dependencies (including dev for build) 10 | COPY package.json pnpm-lock.yaml ./ 11 | RUN --mount=type=cache,target=/root/.local/share/pnpm/store pnpm install --frozen-lockfile 12 | 13 | # Copy the rest of the application code 14 | COPY . . 15 | 16 | # Build the application (creates the dist folder) 17 | RUN pnpm build 18 | 19 | # Remove dev dependencies after build 20 | RUN pnpm prune --prod 21 | 22 | # --- Release Stage --- 23 | FROM node:22-alpine AS release 24 | 25 | WORKDIR /app 26 | 27 | ENV NODE_ENV=production 28 | 29 | # Copy necessary artifacts from the builder stage 30 | COPY --from=builder /app/node_modules ./node_modules 31 | COPY --from=builder /app/dist ./dist 32 | COPY --from=builder /app/bin ./bin 33 | COPY --from=builder /app/package.json ./package.json # Good practice to include 34 | 35 | # Set the entrypoint to your CLI script 36 | ENTRYPOINT ["node", "bin/cli.mjs"] -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) Kevin Kern 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in 13 | all copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 21 | THE SOFTWARE. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Deepwiki MCP Server 2 | 3 | This is an **unofficial Deepwiki MCP Server** 4 | 5 | It takes a Deepwiki URL via MCP, crawls all relevant pages, converts them to Markdown, and returns either one document or a list by page. 6 | 7 | ## Features 8 | 9 | - 🔒 **Domain Safety**: Only processes URLs from deepwiki.com 10 | - 🧹 **HTML Sanitization**: Strips headers, footers, navigation, scripts, and ads 11 | - 🔗 **Link Rewriting**: Adjusts links to work in Markdown 12 | - 📄 **Multiple Output Formats**: Get one document or structured pages 13 | - 🚀 **Performance**: Fast crawling with adjustable concurrency and depth 14 | - **NLP**: It's to search just for the library name 15 | 16 | ## Usage 17 | 18 | Prompts you can use: 19 | 20 | ``` 21 | deepwiki fetch how can i use gpt-image-1 with "vercel ai" sdk 22 | ``` 23 | 24 | ``` 25 | deepwiki fetch how can i create new blocks in shadcn? 26 | ``` 27 | 28 | ``` 29 | deepwiki fetch i want to understand how X works 30 | ``` 31 | 32 | Fetch complete Documentation (Default) 33 | ``` 34 | use deepwiki https://deepwiki.com/shadcn-ui/ui 35 | use deepwiki multiple pages https://deepwiki.com/shadcn-ui/ui 36 | ``` 37 | 38 | Single Page 39 | ``` 40 | use deepwiki fetch single page https://deepwiki.com/tailwindlabs/tailwindcss/2.2-theme-system 41 | ``` 42 | 43 | Get by shortform 44 | ``` 45 | use deepwiki fetch tailwindlabs/tailwindcss 46 | ``` 47 | 48 | ``` 49 | deepwiki fetch library 50 | 51 | deepwiki fetch url 52 | deepwiki fetch / 53 | 54 | deepwiki multiple pages ... 55 | deepwiki single page url ... 56 | ``` 57 | 58 | ## Cursor 59 | 60 | Add this to `.cursor/mcp.json` file. 61 | 62 | ``` 63 | { 64 | "mcpServers": { 65 | "mcp-deepwiki": { 66 | "command": "npx", 67 | "args": ["-y", "mcp-deepwiki@latest"] 68 | } 69 | } 70 | } 71 | ``` 72 | 73 | ![Deepwiki Logo](public/deepwiki.jpg) 74 | 75 | ### MCP Tool Integration 76 | 77 | The package registers a tool named `deepwiki_fetch` that you can use with any MCP-compatible client: 78 | 79 | ```json 80 | { 81 | "action": "deepwiki_fetch", 82 | "params": { 83 | "url": "https://deepwiki.com/user/repo", 84 | "mode": "aggregate", 85 | "maxDepth": "1" 86 | } 87 | } 88 | ``` 89 | 90 | #### Parameters 91 | 92 | - `url` (required): The starting URL of the Deepwiki repository 93 | - `mode` (optional): Output mode, either "aggregate" for a single Markdown document (default) or "pages" for structured page data 94 | - `maxDepth` (optional): Maximum depth of pages to crawl (default: 10) 95 | 96 | ### Response Format 97 | 98 | #### Success Response (Aggregate Mode) 99 | 100 | ```json 101 | { 102 | "status": "ok", 103 | "data": "# Page Title\n\nPage content...\n\n---\n\n# Another Page\n\nMore content...", 104 | "totalPages": 5, 105 | "totalBytes": 25000, 106 | "elapsedMs": 1200 107 | } 108 | ``` 109 | 110 | #### Success Response (Pages Mode) 111 | 112 | ```json 113 | { 114 | "status": "ok", 115 | "data": [ 116 | { 117 | "path": "index", 118 | "markdown": "# Home Page\n\nWelcome to the repository." 119 | }, 120 | { 121 | "path": "section/page1", 122 | "markdown": "# First Page\n\nThis is the first page content." 123 | } 124 | ], 125 | "totalPages": 2, 126 | "totalBytes": 12000, 127 | "elapsedMs": 800 128 | } 129 | ``` 130 | 131 | #### Error Response 132 | 133 | ```json 134 | { 135 | "status": "error", 136 | "code": "DOMAIN_NOT_ALLOWED", 137 | "message": "Only deepwiki.com domains are allowed" 138 | } 139 | ``` 140 | 141 | #### Partial Success Response 142 | 143 | ```json 144 | { 145 | "status": "partial", 146 | "data": "# Page Title\n\nPage content...", 147 | "errors": [ 148 | { 149 | "url": "https://deepwiki.com/user/repo/page2", 150 | "reason": "HTTP error: 404" 151 | } 152 | ], 153 | "totalPages": 1, 154 | "totalBytes": 5000, 155 | "elapsedMs": 950 156 | } 157 | ``` 158 | 159 | ### Progress Events 160 | 161 | When using the tool, you'll receive progress events during crawling: 162 | 163 | ``` 164 | Fetched https://deepwiki.com/user/repo: 12500 bytes in 450ms (status: 200) 165 | Fetched https://deepwiki.com/user/repo/page1: 8750 bytes in 320ms (status: 200) 166 | Fetched https://deepwiki.com/user/repo/page2: 6200 bytes in 280ms (status: 200) 167 | ``` 168 | 169 | ## Local Development - Installation 170 | 171 | ### Local Usage 172 | 173 | ``` 174 | { 175 | "mcpServers": { 176 | "mcp-deepwiki": { 177 | "command": "node", 178 | "args": ["./bin/cli.mjs"] 179 | } 180 | } 181 | } 182 | ``` 183 | 184 | ### From Source 185 | 186 | ```bash 187 | # Clone the repository 188 | git clone https://github.com/regenrek/deepwiki-mcp.git 189 | cd deepwiki-mcp 190 | 191 | # Install dependencies 192 | npm install 193 | 194 | # Build the package 195 | npm run build 196 | ``` 197 | 198 | #### Direct API Calls 199 | 200 | For HTTP transport, you can make direct API calls: 201 | 202 | ```bash 203 | curl -X POST http://localhost:3000/mcp \ 204 | -H "Content-Type: application/json" \ 205 | -d '{ 206 | "id": "req-1", 207 | "action": "deepwiki_fetch", 208 | "params": { 209 | "url": "https://deepwiki.com/user/repo", 210 | "mode": "aggregate" 211 | } 212 | }' 213 | ``` 214 | 215 | ## Configuration 216 | 217 | ### Environment Variables 218 | 219 | - `DEEPWIKI_MAX_CONCURRENCY`: Maximum concurrent requests (default: 5) 220 | - `DEEPWIKI_REQUEST_TIMEOUT`: Request timeout in milliseconds (default: 30000) 221 | - `DEEPWIKI_MAX_RETRIES`: Maximum retry attempts for failed requests (default: 3) 222 | - `DEEPWIKI_RETRY_DELAY`: Base delay for retry backoff in milliseconds (default: 250) 223 | 224 | To configure these, create a `.env` file in the project root: 225 | 226 | ``` 227 | DEEPWIKI_MAX_CONCURRENCY=10 228 | DEEPWIKI_REQUEST_TIMEOUT=60000 229 | DEEPWIKI_MAX_RETRIES=5 230 | DEEPWIKI_RETRY_DELAY=500 231 | ``` 232 | 233 | ## Docker Deployment (Untested) 234 | 235 | Build and run the Docker image: 236 | 237 | ```bash 238 | # Build the image 239 | docker build -t mcp-deepwiki . 240 | 241 | # Run with stdio transport (for development) 242 | docker run -it --rm mcp-deepwiki 243 | 244 | # Run with HTTP transport (for production) 245 | docker run -d -p 3000:3000 mcp-deepwiki --http --port 3000 246 | 247 | # Run with environment variables 248 | docker run -d -p 3000:3000 \ 249 | -e DEEPWIKI_MAX_CONCURRENCY=10 \ 250 | -e DEEPWIKI_REQUEST_TIMEOUT=60000 \ 251 | mcp-deepwiki --http --port 3000 252 | ``` 253 | 254 | ## Development 255 | 256 | ```bash 257 | # Install dependencies 258 | pnpm install 259 | 260 | # Run in development mode with stdio 261 | pnpm run dev-stdio 262 | 263 | # Run tests 264 | pnpm test 265 | 266 | # Run linter 267 | pnpm run lint 268 | 269 | # Build the package 270 | pnpm run build 271 | ``` 272 | 273 | ## Troubleshooting 274 | 275 | ### Common Issues 276 | 277 | 1. **Permission Denied**: If you get EACCES errors when running the CLI, make sure to make the binary executable: 278 | ```bash 279 | chmod +x ./node_modules/.bin/mcp-deepwiki 280 | ``` 281 | 282 | 2. **Connection Refused**: Make sure the port is available and not blocked by a firewall: 283 | ```bash 284 | # Check if port is in use 285 | lsof -i :3000 286 | ``` 287 | 288 | 3. **Timeout Errors**: For large repositories, consider increasing the timeout and concurrency: 289 | ``` 290 | DEEPWIKI_REQUEST_TIMEOUT=60000 DEEPWIKI_MAX_CONCURRENCY=10 npx mcp-deepwiki 291 | ``` 292 | 293 | ## Contributing 294 | 295 | We welcome contributions! Please see [CONTRIBUTING.md](CONTRIBUTING.md) for details. 296 | 297 | ## License 298 | 299 | MIT 300 | 301 | ## Links 302 | 303 | - X/Twitter: [@kregenrek](https://x.com/kregenrek) 304 | - Bluesky: [@kevinkern.dev](https://bsky.app/profile/kevinkern.dev) 305 | 306 | ## Courses 307 | - Learn Cursor AI: [Ultimate Cursor Course](https://www.instructa.ai/en/cursor-ai) 308 | - Learn to build software with AI: [instructa.ai](https://www.instructa.ai) 309 | 310 | ## See my other projects: 311 | 312 | * [AI Prompts](https://github.com/instructa/ai-prompts/blob/main/README.md) - Curated AI Prompts for Cursor AI, Cline, Windsurf and Github Copilot 313 | * [codefetch](https://github.com/regenrek/codefetch) - Turn code into Markdown for LLMs with one simple terminal command 314 | * [aidex](https://github.com/regenrek/aidex) A CLI tool that provides detailed information about AI language models, helping developers choose the right model for their needs.# tool-starter 315 | -------------------------------------------------------------------------------- /bin/cli.mjs: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | import { fileURLToPath } from 'node:url' 4 | import { runMain } from '../dist/index.mjs' 5 | 6 | globalThis.__mcp_starter_cli__ = { 7 | startTime: Date.now(), 8 | entry: fileURLToPath(import.meta.url), 9 | } 10 | 11 | runMain() 12 | -------------------------------------------------------------------------------- /build.config.ts: -------------------------------------------------------------------------------- 1 | import { defineBuildConfig } from 'unbuild' 2 | 3 | export default defineBuildConfig({ 4 | entries: [ 5 | { input: 'src/index.ts' }, 6 | ], 7 | clean: true, 8 | rollup: { 9 | inlineDependencies: true, 10 | esbuild: { 11 | target: 'node16', 12 | minify: true, 13 | }, 14 | }, 15 | }) 16 | -------------------------------------------------------------------------------- /docs/unbuild.md: -------------------------------------------------------------------------------- 1 | # unbuild 2 | 3 | 4 | 5 | [![npm version](https://img.shields.io/npm/v/unbuild)](https://npmjs.com/package/unbuild) 6 | [![npm downloads](https://img.shields.io/npm/dm/unbuild)](https://npm.chart.dev/unbuild) 7 | 8 | 9 | 10 | > A unified JavaScript build system 11 | 12 | ### 📦 Optimized bundler 13 | 14 | Robust [rollup](https://rollupjs.org) based bundler that supports TypeScript and generates commonjs and module formats + type declarations. 15 | 16 | ### 🪄 Automated config 17 | 18 | Automagically infer build config and entries from `package.json`. 19 | 20 | ### 📁 Bundleless build 21 | 22 | Integration with [mkdist](https://github.com/unjs/mkdist) for generating bundleless dists with file-to-file transpilation. 23 | 24 | ### ✨ Passive watcher 25 | 26 | Stub `dist` once using `unbuild --stub` (powered by [jiti](https://github.com/unjs/jiti)) and you can try and link your project without needing to watch and rebuild during development. 27 | 28 | ### ✍ Untype Generator 29 | 30 | Integration with [untyped](https://github.com/unjs/untyped). 31 | 32 | ### ✔️ Secure builds 33 | 34 | Automatically check for various build issues such as potential **missing** and **unused** [dependencies](https://docs.npmjs.com/cli/v7/configuring-npm/package-json#dependencies) and fail CI. 35 | 36 | CLI output also includes output size and exports for quick inspection. 37 | 38 | ## Usage 39 | 40 | Create `src/index.ts`: 41 | 42 | ```js 43 | export function log(...args) { 44 | console.log(...args) 45 | } 46 | ``` 47 | 48 | Update `package.json`: 49 | 50 | ```json 51 | { 52 | "type": "module", 53 | "scripts": { 54 | "build": "unbuild", 55 | "prepack": "unbuild" 56 | }, 57 | "exports": { 58 | ".": { 59 | "import": "./dist/index.mjs", 60 | "require": "./dist/index.cjs" 61 | } 62 | }, 63 | "main": "./dist/index.cjs", 64 | "types": "./dist/index.d.ts", 65 | "files": ["dist"] 66 | } 67 | ``` 68 | 69 | > **Note** 70 | > You can find a more complete example in [unjs/template](https://github.com/unjs/template) for project setup. 71 | 72 | Build with `unbuild`: 73 | 74 | ```sh 75 | npx unbuild 76 | ``` 77 | 78 | Configuration is automatically inferred from fields in `package.json` mapped to `src/` directory. For more control, continue with next section. 79 | 80 | ## Configuration 81 | 82 | Create `build.config.ts`: 83 | 84 | ```js 85 | export default { 86 | entries: ['./src/index'], 87 | } 88 | ``` 89 | 90 | You can either use `unbuild` key in `package.json` or `build.config.{js,cjs,mjs,ts,mts,cts,json}` to specify configuration. 91 | 92 | See options [here](./src/types.ts). 93 | 94 | Example: 95 | 96 | ```js 97 | import { defineBuildConfig } from 'unbuild' 98 | 99 | export default defineBuildConfig({ 100 | // If entries is not provided, will be automatically inferred from package.json 101 | entries: [ 102 | // default 103 | './src/index', 104 | // mkdist builder transpiles file-to-file keeping original sources structure 105 | { 106 | builder: 'mkdist', 107 | input: './src/package/components/', 108 | outDir: './build/components', 109 | }, 110 | ], 111 | 112 | // Change outDir, default is 'dist' 113 | outDir: 'build', 114 | 115 | // Generates .d.ts declaration file 116 | declaration: true, 117 | }) 118 | ``` 119 | 120 | Or with multiple builds you can declare an array of configs: 121 | 122 | ```js 123 | import { defineBuildConfig } from 'unbuild' 124 | 125 | export default defineBuildConfig([ 126 | { 127 | // If entries is not provided, will be automatically inferred from package.json 128 | entries: [ 129 | // default 130 | './src/index', 131 | // mkdist builder transpiles file-to-file keeping original sources structure 132 | { 133 | builder: 'mkdist', 134 | input: './src/package/components/', 135 | outDir: './build/components', 136 | }, 137 | ], 138 | 139 | // Change outDir, default is 'dist' 140 | outDir: 'build', 141 | 142 | /** 143 | * `compatible` means "src/index.ts" will generate "dist/index.d.mts", "dist/index.d.cts" and "dist/index.d.ts". 144 | * `node16` means "src/index.ts" will generate "dist/index.d.mts" and "dist/index.d.cts". 145 | * `true` is equivalent to `compatible`. 146 | * `false` will disable declaration generation. 147 | * `undefined` will auto detect based on "package.json". If "package.json" has "types" field, it will be `"compatible"`, otherwise `false`. 148 | */ 149 | declaration: 'compatible', 150 | }, 151 | { 152 | name: 'minified', 153 | entries: ['./src/index'], 154 | outDir: 'build/min', 155 | rollup: { 156 | esbuild: { 157 | minify: true, 158 | }, 159 | }, 160 | }, 161 | ]) 162 | ``` 163 | 164 | ## Recipes 165 | 166 | ### Decorators support 167 | 168 | In `build.config.ts` 169 | 170 | ```ts 171 | import { defineBuildConfig } from 'unbuild' 172 | 173 | export default defineBuildConfig({ 174 | rollup: { 175 | esbuild: { 176 | tsconfigRaw: { 177 | compilerOptions: { 178 | experimentalDecorators: true, 179 | }, 180 | }, 181 | }, 182 | }, 183 | }) 184 | ``` 185 | 186 | ### Generate sourcemaps 187 | 188 | ```ts 189 | import { defineBuildConfig } from 'unbuild' 190 | 191 | export default defineBuildConfig({ 192 | sourcemap: true, 193 | }) 194 | ``` 195 | 196 | ## 💻 Development 197 | 198 | - Clone this repository 199 | - Enable [Corepack](https://github.com/nodejs/corepack) using `corepack enable` (use `npm i -g corepack` for Node.js < 16.10) 200 | - Install dependencies using `pnpm install` 201 | - Run interactive tests using `pnpm dev` 202 | 203 | ## License 204 | 205 | [MIT](./LICENSE) 206 | 207 | 208 | 209 | [npm-version-src]: https://img.shields.io/npm/v/unbuild?style=flat-square 210 | [npm-version-href]: https://npmjs.com/package/unbuild 211 | [npm-downloads-src]: https://img.shields.io/npm/dm/unbuild?style=flat-square 212 | [npm-downloads-href]: https://npmjs.com/package/unbuild 213 | [github-actions-src]: https://img.shields.io/github/actions/workflow/status/unjs/unbuild/ci.yml?style=flat-square 214 | [github-actions-href]: https://github.com/unjs/unbuild/actions?query=workflow%3Aci 215 | [codecov-src]: https://img.shields.io/codecov/c/gh/unjs/unbuild/main?style=flat-square 216 | [codecov-href]: https://codecov.io/gh/unjs/unbuild 217 | 218 | # EXAMPLES 219 | 220 | import { defineBuildConfig } from "unbuild"; 221 | 222 | export default defineBuildConfig({ 223 | entries: [ 224 | "src/index.ts", 225 | { 226 | builder: "untyped", 227 | input: "src/index.ts", 228 | outDir: "schema", 229 | name: "schema", 230 | }, 231 | ], 232 | declaration: true, 233 | rollup: { 234 | emitCJS: true, 235 | }, 236 | }); 237 | 238 | import { defineBuildConfig } from "unbuild"; 239 | 240 | export default defineBuildConfig({ 241 | entries: [ 242 | "src/index.ts", 243 | { 244 | input: "src/plugins/", 245 | outDir: "dist/plugins/", 246 | format: "esm", 247 | }, 248 | { 249 | input: "src/plugins/", 250 | outDir: "dist/plugins/", 251 | format: "cjs", 252 | ext: "cjs", 253 | declaration: false, 254 | }, 255 | ], 256 | declaration: true, 257 | rollup: { 258 | emitCJS: true, 259 | }, 260 | }); 261 | 262 | import { defineBuildConfig } from 'unbuild' 263 | 264 | // Separeate config required for dev because mkdist + cli-entry doesn't work 265 | // with stub. It will create a .d.ts and .mjs file in the src folder 266 | const dev = defineBuildConfig({ 267 | entries: ['src/cli-entry'], 268 | outDir: 'dist', 269 | clean: true, 270 | declaration: true, 271 | rollup: { 272 | inlineDependencies: true, 273 | esbuild: { 274 | target: 'node18', 275 | minify: false, 276 | }, 277 | }, 278 | }) 279 | 280 | const prod = defineBuildConfig({ 281 | entries: [ 282 | { 283 | builder: 'mkdist', 284 | cleanDist: true, 285 | input: './src/', 286 | pattern: ['**/*.{ts,tsx}', '!**/template/**'], 287 | }, 288 | ], 289 | outDir: 'dist', 290 | clean: true, 291 | declaration: true, 292 | rollup: { 293 | inlineDependencies: true, 294 | esbuild: { 295 | target: 'node18', 296 | minify: false, 297 | }, 298 | }, 299 | }) 300 | 301 | const config = process.env.BUILD_ENV === 'production' ? prod : dev 302 | export default config 303 | 304 | import { defineBuildConfig } from 'unbuild' 305 | import { addRollupTimingsPlugin, stubOptions } from '../../debug/build-config' 306 | 307 | export default defineBuildConfig({ 308 | declaration: true, 309 | entries: [ 310 | 'src/index', 311 | ], 312 | stubOptions, 313 | hooks: { 314 | 'rollup:options' (ctx, options) { 315 | addRollupTimingsPlugin(options) 316 | }, 317 | }, 318 | externals: [ 319 | '@rspack/core', 320 | '@nuxt/schema', 321 | 'nitropack', 322 | 'nitro', 323 | 'webpack', 324 | 'vite', 325 | 'h3', 326 | ], 327 | }) 328 | 329 | import type { InputPluginOption } from 'rollup' 330 | import process from 'node:process' 331 | import { visualizer } from 'rollup-plugin-visualizer' 332 | import { defineBuildConfig } from 'unbuild' 333 | import { purgePolyfills } from 'unplugin-purge-polyfills' 334 | 335 | const isAnalysingSize = process.env.BUNDLE_SIZE === 'true' 336 | 337 | export default defineBuildConfig({ 338 | declaration: !isAnalysingSize, 339 | failOnWarn: !isAnalysingSize, 340 | hooks: { 341 | 'rollup:options': function (ctx, options) { 342 | const plugins = (options.plugins ||= []) as InputPluginOption[] 343 | plugins.push(purgePolyfills.rollup({ logLevel: 'verbose' })) 344 | if (isAnalysingSize) { 345 | plugins.unshift(visualizer({ template: 'raw-data' })) 346 | } 347 | }, 348 | }, 349 | rollup: { 350 | dts: { 351 | respectExternal: false, 352 | }, 353 | inlineDependencies: true, 354 | resolve: { 355 | exportConditions: ['production', 'node'], 356 | }, 357 | }, 358 | entries: ['src/index'], 359 | externals: [ 360 | '@nuxt/test-utils', 361 | 'fsevents', 362 | 'node:url', 363 | 'node:buffer', 364 | 'node:path', 365 | 'node:child_process', 366 | 'node:process', 367 | 'node:path', 368 | 'node:os', 369 | ], 370 | }) 371 | -------------------------------------------------------------------------------- /eslint.config.js: -------------------------------------------------------------------------------- 1 | // @ts-check 2 | import antfu from '@antfu/eslint-config' 3 | 4 | export default antfu( 5 | { 6 | type: 'app', 7 | pnpm: true, 8 | rules: { 9 | 'pnpm/json-enforce-catalog': 'off', 10 | 'no-console': 'warn', 11 | 'node/prefer-global/process': 'off', 12 | }, 13 | }, 14 | ) 15 | -------------------------------------------------------------------------------- /logo.svg: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "mcp-deepwiki", 3 | "type": "module", 4 | "version": "0.0.10", 5 | "packageManager": "pnpm@9.14.4+sha512.c8180b3fbe4e4bca02c94234717896b5529740a6cbadf19fa78254270403ea2f27d4e1d46a08a0f56c89b63dc8ebfd3ee53326da720273794e6200fcf0d184ab", 6 | "description": "MCP server for fetch deepwiki.com and turn content into LLM readable markdown", 7 | "contributors": [ 8 | { 9 | "name": "Kevin Kern", 10 | "email": "kevin@instructa.org" 11 | } 12 | ], 13 | "license": "MIT", 14 | "homepage": "https://github.com/regenrek/deepwiki-mcp", 15 | "repository": { 16 | "type": "git", 17 | "url": "git+https://github.com/regenrek/deepwiki-mcp" 18 | }, 19 | "keywords": [ 20 | "mcp", 21 | "mcp-starter", 22 | "model-context-protocol" 23 | ], 24 | "exports": { 25 | ".": "./dist/index.mjs", 26 | "./cli": "./bin/cli.mjs" 27 | }, 28 | "bin": { 29 | "mcp-instruct": "./bin/cli.mjs" 30 | }, 31 | "files": [ 32 | "bin", 33 | "dist" 34 | ], 35 | "engines": { 36 | "node": ">=18.0.0" 37 | }, 38 | "scripts": { 39 | "build": "unbuild && npm run chmod-run", 40 | "chmod-run": "node -e \"fs.chmodSync('dist/index.mjs', '755'); if (require('fs').existsSync('dist/cli.mjs')) require('fs').chmodSync('dist/cli.mjs', '755');\"", 41 | "start": "nodemon --exec 'tsx src/index.ts'", 42 | "dev:prepare": "nr build", 43 | "inspect": "npx @modelcontextprotocol/inspector@latest", 44 | "dev": "npx concurrently 'unbuild --stub' 'npm run inspect'", 45 | "run-cli": "node bin/cli.mjs", 46 | "dev-stdio": "npx concurrently 'npm run run-cli' 'npm run inspect node ./bin/cli.mjs'", 47 | "dev-http": "npx concurrently 'npm run run-cli -- --http --port 4200' 'npm run inspect http://localhost:4200/mcp'", 48 | "dev-sse": "npx concurrently 'npm run run-cli -- --sse --port 4201' 'npm run inspect http://localhost:4201/sse'", 49 | "lint": "eslint", 50 | "lint:fix": "eslint --fix", 51 | "typecheck": "tsc --noEmit", 52 | "test": "vitest", 53 | "release": "tsx scripts/release.ts" 54 | }, 55 | "dependencies": { 56 | "@chatmcp/sdk": "^1.0.6", 57 | "@modelcontextprotocol/sdk": "^1.9.0", 58 | "@vitest/eslint-plugin": "^1.1.43", 59 | "citty": "^0.1.6", 60 | "eslint": "^9.25.1", 61 | "h3": "^1.15.1", 62 | "hast-util-from-html": "^2.0.3", 63 | "hast-util-sanitize": "^5.0.2", 64 | "linkedom": "^0.18.9", 65 | "mcp-deepwiki": "^0.0.9", 66 | "ofetch": "^1.4.1", 67 | "p-queue": "^8.1.0", 68 | "rehype-parse": "^9.0.1", 69 | "rehype-remark": "^10.0.1", 70 | "rehype-sanitize": "^6.0.0", 71 | "remark-gfm": "^4.0.1", 72 | "remark-stringify": "^11.0.0", 73 | "robots-parser": "^3.0.1", 74 | "undici": "^7.8.0", 75 | "unified": "^11.0.5", 76 | "unist-util-visit": "^5.0.0", 77 | "wink-eng-lite-web-model": "^1.0.0", 78 | "wink-nlp": "^1.10.0", 79 | "zod": "^3.24.3" 80 | }, 81 | "devDependencies": { 82 | "@antfu/eslint-config": "^4.12.0", 83 | "@types/node": "^22.14.1", 84 | "dotenv": "^16.5.0", 85 | "esbuild": "^0.25.2", 86 | "nodemon": "^3.1.9", 87 | "tsx": "^4.19.3", 88 | "typescript": "^5.8.3", 89 | "unbuild": "^3.5.0", 90 | "vite": "^6.3.1", 91 | "vitest": "^3.1.1" 92 | } 93 | } 94 | -------------------------------------------------------------------------------- /public/banner.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/regenrek/deepwiki-mcp/58bcb2f06279029dd26f17612b99a049dd416f11/public/banner.png -------------------------------------------------------------------------------- /public/deepwiki.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/regenrek/deepwiki-mcp/58bcb2f06279029dd26f17612b99a049dd416f11/public/deepwiki.jpg -------------------------------------------------------------------------------- /public/deepwiki_logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/regenrek/deepwiki-mcp/58bcb2f06279029dd26f17612b99a049dd416f11/public/deepwiki_logo.png -------------------------------------------------------------------------------- /public/inspect.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/regenrek/deepwiki-mcp/58bcb2f06279029dd26f17612b99a049dd416f11/public/inspect.jpg -------------------------------------------------------------------------------- /public/mcp-sse-starter.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/regenrek/deepwiki-mcp/58bcb2f06279029dd26f17612b99a049dd416f11/public/mcp-sse-starter.jpg -------------------------------------------------------------------------------- /public/starter2.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/regenrek/deepwiki-mcp/58bcb2f06279029dd26f17612b99a049dd416f11/public/starter2.jpg -------------------------------------------------------------------------------- /public/stdio-mcp-starter.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/regenrek/deepwiki-mcp/58bcb2f06279029dd26f17612b99a049dd416f11/public/stdio-mcp-starter.jpg -------------------------------------------------------------------------------- /public/streamable2.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/regenrek/deepwiki-mcp/58bcb2f06279029dd26f17612b99a049dd416f11/public/streamable2.jpg -------------------------------------------------------------------------------- /repomix.config.json: -------------------------------------------------------------------------------- 1 | { 2 | "output": { 3 | "filePath": "repomix-output.xml", 4 | "style": "xml", 5 | "parsableStyle": false, 6 | "fileSummary": true, 7 | "directoryStructure": true, 8 | "removeComments": false, 9 | "removeEmptyLines": false, 10 | "compress": false, 11 | "topFilesLength": 5, 12 | "showLineNumbers": false, 13 | "copyToClipboard": false, 14 | "git": { 15 | "sortByChanges": true, 16 | "sortByChangesMaxCommits": 100 17 | } 18 | }, 19 | "include": [], 20 | "ignore": { 21 | "useGitignore": true, 22 | "useDefaultPatterns": true, 23 | "customPatterns": [] 24 | }, 25 | "security": { 26 | "enableSecurityCheck": true 27 | }, 28 | "tokenCount": { 29 | "encoding": "o200k_base" 30 | } 31 | } -------------------------------------------------------------------------------- /scripts/release.ts: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env tsx 2 | /** 3 | * Release Script 4 | * 5 | * This script automates the process of creating and publishing releases 6 | * for the current package. 7 | * 8 | * Usage: 9 | * pnpm tsx scripts/release.ts [version-type] [--alpha] [--no-git] 10 | * 11 | * version-type: 'major', 'minor', 'patch', or specific version (default: 'patch') 12 | * --alpha: Create an alpha release 13 | * --no-git: Skip git commit and tag 14 | */ 15 | 16 | import { execSync } from 'node:child_process' 17 | import fs from 'node:fs' 18 | import path from 'node:path' 19 | 20 | // Parse command line arguments 21 | const args = process.argv.slice(2) 22 | const versionBumpArg = args.find(arg => !arg.startsWith('--')) || 'patch' 23 | const isAlpha = args.includes('--alpha') 24 | const skipGit = args.includes('--no-git') 25 | 26 | const rootPath = path.resolve('.') 27 | 28 | function run(command: string, cwd: string) { 29 | console.log(`Executing: ${command} in ${cwd}`) 30 | execSync(command, { stdio: 'inherit', cwd }) 31 | } 32 | 33 | /** 34 | * Bump version in package.json 35 | * @param pkgPath Path to the package directory (project root) 36 | * @param type Version bump type: 'major', 'minor', 'patch', or specific version 37 | * @param isAlpha Whether to create an alpha version 38 | * @returns The new version 39 | */ 40 | function bumpVersion(pkgPath: string, type: 'major' | 'minor' | 'patch' | string, isAlpha: boolean = false): string { 41 | const pkgJsonPath = path.join(pkgPath, 'package.json') 42 | const pkgJson = JSON.parse(fs.readFileSync(pkgJsonPath, 'utf-8')) 43 | const currentVersion = pkgJson.version 44 | let newVersion: string 45 | 46 | // Parse current version to check if it's already an alpha version 47 | const versionRegex = /^(\d+\.\d+\.\d+)(?:-alpha\.(\d+))?$/ 48 | const match = currentVersion.match(versionRegex) 49 | 50 | if (!match) { 51 | throw new Error(`Invalid version format: ${currentVersion}`) 52 | } 53 | 54 | let baseVersion = match[1] 55 | const currentAlphaVersion = match[2] ? Number.parseInt(match[2], 10) : -1 56 | 57 | // Handle version bumping 58 | if (type === 'major' || type === 'minor' || type === 'patch') { 59 | const [major, minor, patch] = baseVersion.split('.').map(Number) 60 | 61 | // Bump version according to type 62 | if (type === 'major') { 63 | baseVersion = `${major + 1}.0.0` 64 | } 65 | else if (type === 'minor') { 66 | baseVersion = `${major}.${minor + 1}.0` 67 | } 68 | else { // patch 69 | baseVersion = `${major}.${minor}.${patch + 1}` 70 | } 71 | } 72 | else if (type.match(/^\d+\.\d+\.\d+$/)) { 73 | // Use the provided version string directly as base version 74 | baseVersion = type 75 | } 76 | else { 77 | throw new Error(`Invalid version bump type: ${type}. Use 'major', 'minor', 'patch', or a specific version like '1.2.3'.`) 78 | } 79 | 80 | // Create final version string 81 | if (isAlpha) { 82 | // For alpha releases, always start at alpha.0 when base version changes 83 | // If the base version is the same, increment the alpha number. 84 | const alphaVersion = baseVersion === match[1] ? currentAlphaVersion + 1 : 0 85 | if (alphaVersion < 0) { 86 | throw new Error(`Cannot create alpha version from non-alpha version ${currentVersion} without bumping base version (major, minor, patch, or specific).`) 87 | } 88 | newVersion = `${baseVersion}-alpha.${alphaVersion}` 89 | } 90 | else { 91 | // If bumping from an alpha version to a stable version, use the current or bumped baseVersion 92 | newVersion = baseVersion 93 | } 94 | 95 | // Update package.json 96 | pkgJson.version = newVersion 97 | fs.writeFileSync(pkgJsonPath, `${JSON.stringify(pkgJson, null, 2)}\n`) 98 | 99 | console.log(`Bumped version from ${currentVersion} to ${newVersion} in ${pkgJsonPath}`) 100 | return newVersion 101 | } 102 | 103 | /** 104 | * Create a git commit and tag for the release 105 | * @param version The version to tag 106 | * @param isAlpha Whether this is an alpha release 107 | */ 108 | function createGitCommitAndTag(version: string, isAlpha: boolean = false) { 109 | console.log('Creating git commit and tag...') 110 | 111 | try { 112 | // Stage package.json and any other changes 113 | run('git add package.json', rootPath) // Specifically add package.json 114 | // Optional: Add other specific files if needed, or 'git add .' if all changes should be included 115 | 116 | // Create commit with version message 117 | const commitMsg = isAlpha 118 | ? `chore: alpha release v${version}` 119 | : `chore: release v${version}` 120 | run(`git commit -m "${commitMsg}"`, rootPath) 121 | 122 | // Create tag 123 | const tagMsg = isAlpha 124 | ? `Alpha Release v${version}` 125 | : `Release v${version}` 126 | run(`git tag -a v${version} -m "${tagMsg}"`, rootPath) 127 | 128 | // Push commit and tag to remote 129 | console.log('Pushing commit and tag to remote...') 130 | run('git push', rootPath) 131 | run('git push --tags', rootPath) 132 | 133 | console.log(`Successfully created and pushed git tag v${version}`) 134 | } 135 | catch (error) { 136 | console.error('Failed to create git commit and tag:', error) 137 | // Decide if we should proceed with publishing even if git fails 138 | // For now, let's throw to stop the process. 139 | throw error 140 | } 141 | } 142 | 143 | async function publishPackage() { 144 | console.log(`🚀 Starting ${isAlpha ? 'alpha' : ''} release process...`) 145 | console.log(`📝 Version bump: ${versionBumpArg}`) 146 | 147 | // Build package first (assuming a build script exists in package.json) 148 | console.log('🔨 Building package...') 149 | run('pnpm build', rootPath) // Use the build script from package.json 150 | 151 | // Bump the version in the root package.json 152 | const newVersion = bumpVersion(rootPath, versionBumpArg, isAlpha) 153 | 154 | // Create git commit and tag if not skipped 155 | if (!skipGit) { 156 | createGitCommitAndTag(newVersion, isAlpha) 157 | } 158 | 159 | // Publish the package to npm 160 | console.log(`📤 Publishing package@${newVersion} to npm...`) 161 | 162 | const publishCmd = isAlpha 163 | ? 'pnpm publish --tag alpha --no-git-checks --access public' 164 | : 'pnpm publish --no-git-checks --access public' // --no-git-checks is often needed if git tagging is manual or separate 165 | 166 | run(publishCmd, rootPath) 167 | 168 | console.log(`✅ Successfully completed ${isAlpha ? 'alpha' : ''} release v${newVersion}!`) 169 | } 170 | 171 | // Run the publish process 172 | publishPackage().catch((error) => { 173 | console.error('❌ Error during release process:', error) 174 | process.exit(1) 175 | }) 176 | -------------------------------------------------------------------------------- /src/converter/htmlToMarkdown.ts: -------------------------------------------------------------------------------- 1 | import type { ModeEnum } from '../schemas/deepwiki' 2 | import { parseHTML } from 'linkedom' 3 | import rehypeParse from 'rehype-parse' 4 | import rehypeRemark from 'rehype-remark' 5 | import rehypeSanitize from 'rehype-sanitize' 6 | import remarkGfm from 'remark-gfm' 7 | import remarkStringify from 'remark-stringify' 8 | import { unified } from 'unified' 9 | import { rehypeRewriteLinks } from '../lib/linkRewrite' 10 | import { sanitizeSchema } from '../lib/sanitizeSchema' 11 | 12 | export async function htmlToMarkdown( 13 | html: string, 14 | mode: typeof ModeEnum._type, 15 | ): Promise { 16 | // Ensure a DOM is available for rehype-parse with LinkeDOM 17 | const { document } = parseHTML('') 18 | globalThis.document = document 19 | 20 | const file = await unified() 21 | .use(rehypeParse, { fragment: true }) 22 | .use(rehypeSanitize, sanitizeSchema) 23 | .use(rehypeRewriteLinks, { mode }) 24 | .use(rehypeRemark) 25 | .use(remarkGfm) 26 | .use(remarkStringify, { fences: true, bullet: '-', rule: '-' }) 27 | .process(html) 28 | 29 | return String(file) 30 | } 31 | -------------------------------------------------------------------------------- /src/index.ts: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | import type { McpToolContext } from './types' 4 | import { runMain as _runMain, defineCommand } from 'citty' 5 | import { version } from '../package.json' 6 | import { createServer, startServer, stopServer } from './server' 7 | import { deepwikiTool } from './tools/deepwiki' 8 | import { deepwikiSearchTool } from './tools/deepwikiSearch' 9 | 10 | const cli = defineCommand({ 11 | meta: { 12 | name: 'mcp-instruct', 13 | version, 14 | description: 'Run the MCP starter with stdio, http, or sse transport', 15 | }, 16 | args: { 17 | http: { type: 'boolean', description: 'Run with HTTP transport' }, 18 | sse: { type: 'boolean', description: 'Run with SSE transport' }, 19 | stdio: { type: 'boolean', description: 'Run with stdio transport (default)' }, 20 | port: { type: 'string', description: 'Port for http/sse (default 3000)', default: '3000' }, 21 | endpoint: { type: 'string', description: 'HTTP endpoint (default /mcp)', default: '/mcp' }, 22 | }, 23 | async run({ args }) { 24 | const mode = args.http ? 'http' : args.sse ? 'sse' : 'stdio' 25 | const mcp = createServer({ name: 'my-mcp-server', version }) 26 | 27 | process.on('SIGTERM', () => stopServer(mcp)) 28 | process.on('SIGINT', () => stopServer(mcp)) 29 | 30 | deepwikiTool({ mcp } as McpToolContext) 31 | // deepwikiSearchTool({ mcp } as McpToolContext) 32 | 33 | if (mode === 'http') { 34 | await startServer(mcp, { type: 'http', port: Number(args.port), endpoint: args.endpoint }) 35 | } 36 | else if (mode === 'sse') { 37 | console.log('Starting SSE server...') 38 | await startServer(mcp, { type: 'sse', port: Number(args.port) }) 39 | } 40 | else if (mode === 'stdio') { 41 | await startServer(mcp, { type: 'stdio' }) 42 | } 43 | }, 44 | }) 45 | 46 | export const runMain = () => _runMain(cli) 47 | -------------------------------------------------------------------------------- /src/lib/httpCrawler.ts: -------------------------------------------------------------------------------- 1 | import type { ProgressEvent } from '../schemas/deepwiki' 2 | import { Buffer } from 'node:buffer' 3 | import { performance } from 'node:perf_hooks' 4 | import { setTimeout } from 'node:timers/promises' 5 | import { URL } from 'node:url' 6 | import PQueue from 'p-queue' 7 | import robotsParser from 'robots-parser' 8 | import { Agent, fetch } from 'undici' 9 | 10 | const MAX_CONCURRENCY = Number(process.env.DEEPWIKI_CONCURRENCY ?? 5) 11 | const RETRY_LIMIT = 3 12 | const BACKOFF_BASE_MS = 250 13 | 14 | export interface CrawlOptions { 15 | root: URL 16 | maxDepth: number 17 | emit: (e: ProgressEvent) => void 18 | verbose?: boolean 19 | } 20 | 21 | export interface CrawlResult { 22 | html: Record // key = path 23 | errors: { path: string, reason: string }[] 24 | bytes: number 25 | elapsedMs: number 26 | } 27 | 28 | /** 29 | * Breadth-first crawler with depth limiting, domain whitelist, 30 | * robots.txt respect, request throttling and retries. 31 | */ 32 | export async function crawl(options: CrawlOptions): Promise { 33 | const { root, maxDepth, emit, verbose } = options 34 | const queue = new PQueue({ concurrency: MAX_CONCURRENCY }) 35 | const agent = new Agent({ keepAliveTimeout: 5_000 }) 36 | const crawled = new Set() 37 | const html: Record = {} 38 | const errors: { path: string, reason: string }[] = [] 39 | let totalBytes = 0 40 | const t0 = performance.now() 41 | 42 | // Pre-fetch robots.txt and build allowlist 43 | const robotsUrl = new URL('/robots.txt', root) 44 | let robots: ReturnType | undefined 45 | try { 46 | const res = await fetch(robotsUrl) 47 | const body = await res.text() 48 | robots = robotsParser(robotsUrl.href, body) 49 | } 50 | catch { 51 | robots = undefined 52 | } 53 | 54 | async function enqueue(url: URL, depth: number) { 55 | // Skip non-HTML file extensions 56 | const nonHtmlExt = [ 57 | '.css', 58 | '.js', 59 | '.mjs', 60 | '.json', 61 | '.png', 62 | '.jpg', 63 | '.jpeg', 64 | '.gif', 65 | '.svg', 66 | '.webp', 67 | '.ico', 68 | '.woff', 69 | '.woff2', 70 | '.ttf', 71 | '.eot', 72 | '.otf', 73 | '.pdf', 74 | '.zip', 75 | '.tar', 76 | '.gz', 77 | '.mp4', 78 | '.mp3', 79 | '.avi', 80 | '.mov', 81 | '.wmv', 82 | '.flv', 83 | '.m4a', 84 | '.ogg', 85 | '.wav', 86 | '.bmp', 87 | '.tiff', 88 | '.psd', 89 | '.exe', 90 | '.dmg', 91 | '.apk', 92 | '.bin', 93 | '.7z', 94 | '.rar', 95 | '.xml', 96 | '.rss', 97 | '.atom', 98 | '.map', 99 | '.txt', 100 | '.csv', 101 | '.md', 102 | '.yml', 103 | '.yaml', 104 | '.log', 105 | '.rtf', 106 | '.doc', 107 | '.docx', 108 | '.ppt', 109 | '.pptx', 110 | '.xls', 111 | '.xlsx', 112 | '.db', 113 | '.sqlite', 114 | '.bak', 115 | '.swf', 116 | '.dat', 117 | '.bak', 118 | '.bak1', 119 | '.bak2', 120 | '.bak3', 121 | '.bak4', 122 | '.bak5', 123 | '.bak6', 124 | '.bak7', 125 | '.bak8', 126 | '.bak9', 127 | '.bak10', 128 | '.bak11', 129 | '.bak12', 130 | '.bak13', 131 | '.bak14', 132 | '.bak15', 133 | '.bak16', 134 | '.bak17', 135 | '.bak18', 136 | '.bak19', 137 | '.bak20', 138 | '.bak21', 139 | '.bak22', 140 | '.bak23', 141 | '.bak24', 142 | '.bak25', 143 | '.bak26', 144 | '.bak27', 145 | '.bak28', 146 | '.bak29', 147 | '.bak30', 148 | '.bak31', 149 | '.bak32', 150 | '.bak33', 151 | '.bak34', 152 | '.bak35', 153 | '.bak36', 154 | '.bak37', 155 | '.bak38', 156 | '.bak39', 157 | '.bak40', 158 | '.bak41', 159 | '.bak42', 160 | '.bak43', 161 | '.bak44', 162 | '.bak45', 163 | '.bak46', 164 | '.bak47', 165 | '.bak48', 166 | '.bak49', 167 | '.bak50', 168 | '.bak51', 169 | '.bak52', 170 | '.bak53', 171 | '.bak54', 172 | '.bak55', 173 | '.bak56', 174 | '.bak57', 175 | '.bak58', 176 | '.bak59', 177 | '.bak60', 178 | '.bak61', 179 | '.bak62', 180 | '.bak63', 181 | '.bak64', 182 | '.bak65', 183 | '.bak66', 184 | '.bak67', 185 | '.bak68', 186 | '.bak69', 187 | '.bak70', 188 | '.bak71', 189 | '.bak72', 190 | '.bak73', 191 | '.bak74', 192 | '.bak75', 193 | '.bak76', 194 | '.bak77', 195 | '.bak78', 196 | '.bak79', 197 | '.bak80', 198 | '.bak81', 199 | '.bak82', 200 | '.bak83', 201 | '.bak84', 202 | '.bak85', 203 | '.bak86', 204 | '.bak87', 205 | '.bak88', 206 | '.bak89', 207 | '.bak90', 208 | '.bak91', 209 | '.bak92', 210 | '.bak93', 211 | '.bak94', 212 | '.bak95', 213 | '.bak96', 214 | '.bak97', 215 | '.bak98', 216 | '.bak99', 217 | '.bak100', 218 | ] 219 | const lowerPath = url.pathname.toLowerCase() 220 | if (nonHtmlExt.some(ext => lowerPath.endsWith(ext))) { 221 | return 222 | } 223 | if (depth > maxDepth) 224 | return 225 | if (url.hostname !== root.hostname || url.pathname === '/robots.txt') 226 | return 227 | const key = url.pathname 228 | if (crawled.has(key)) 229 | return 230 | if (robots && !robots.isAllowed(url.href, '*')) 231 | return 232 | crawled.add(key) 233 | 234 | queue.add(async () => { 235 | const start = performance.now() 236 | let retries = 0 237 | while (true) { 238 | try { 239 | const res = await fetch(url, { dispatcher: agent }) 240 | // Check Content-Type header for HTML 241 | const contentType = res.headers.get('content-type') || '' 242 | if (!contentType.includes('text/html')) { 243 | return 244 | } 245 | const buf = await res.arrayBuffer() 246 | const bytes = buf.byteLength 247 | totalBytes += bytes 248 | const htmlStr = Buffer.from(buf).toString('utf8') 249 | html[key] = htmlStr 250 | 251 | const elapsedMs = Math.round(performance.now() - start) 252 | emit({ 253 | type: 'progress', 254 | url: url.href, 255 | bytes, 256 | elapsedMs, 257 | fetched: Object.keys(html).length, 258 | queued: queue.size + queue.pending, 259 | retries, 260 | } as any) 261 | 262 | // naïve link extraction via regex, replaced by DOM parse later 263 | const linkRe 264 | = /href="([^"#]+)(?:#[^"#]*)?"/gi 265 | let match: RegExpExecArray | null 266 | while (true) { 267 | match = linkRe.exec(htmlStr) 268 | if (!match) 269 | break 270 | try { 271 | const child = new URL(match[1], url) 272 | await enqueue(child, depth + 1) 273 | } 274 | catch {} 275 | } 276 | return 277 | } 278 | catch (err: any) { 279 | if (retries < RETRY_LIMIT) { 280 | retries++ 281 | await setTimeout(BACKOFF_BASE_MS * 2 ** (retries - 1)) 282 | continue 283 | } 284 | errors.push({ path: key, reason: String(err) }) 285 | return 286 | } 287 | } 288 | }) 289 | } 290 | 291 | await enqueue(new URL(root.href), 0) 292 | await queue.onIdle() 293 | 294 | const elapsedMs = Math.round(performance.now() - t0) 295 | if (verbose) { 296 | console.error( 297 | `Crawl finished: ${Object.keys(html).length} ok, ${errors.length} failed, ${totalBytes} B, ${elapsedMs} ms`, 298 | ) 299 | } 300 | return { html, errors, bytes: totalBytes, elapsedMs } 301 | } 302 | -------------------------------------------------------------------------------- /src/lib/linkRewrite.ts: -------------------------------------------------------------------------------- 1 | import type { ModeEnum } from '../schemas/deepwiki' 2 | import { fromHtml } from 'hast-util-from-html' 3 | import { visit } from 'unist-util-visit' 4 | 5 | interface Opts { 6 | mode: typeof ModeEnum._type 7 | } 8 | 9 | /** rehype plugin to rewrite internal links to anchors or markdown files */ 10 | export function rehypeRewriteLinks(opts: Opts) { 11 | return function transformer(tree: any, file: any) { 12 | visit(tree, 'element', (node: any) => { 13 | if (node.tagName !== 'a') 14 | return 15 | const href: string | undefined = node.properties?.href 16 | if (!href || href.startsWith('http')) 17 | return 18 | if (opts.mode === 'aggregate') { 19 | node.properties.href = `#${href.replace(/^\//, '')}` 20 | } 21 | else { 22 | node.properties.href = `${href.replace(/^\//, '')}.md` 23 | } 24 | }) 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /src/lib/sanitizeSchema.ts: -------------------------------------------------------------------------------- 1 | import type { Options as SanitizeOptions } from 'rehype-sanitize' 2 | import { defaultSchema } from 'hast-util-sanitize' 3 | 4 | // Custom schema: drop img, script, style, header, footer, nav, ads 5 | export const sanitizeSchema: SanitizeOptions = { 6 | ...defaultSchema, 7 | tagNames: (defaultSchema.tagNames ?? []).filter( 8 | t => 9 | !['img', 'script', 'style', 'header', 'footer', 'nav'].includes(t), 10 | ), 11 | attributes: { 12 | ...defaultSchema.attributes, 13 | '*': (defaultSchema.attributes?.['*'] ?? []).filter( 14 | attr => !['style', 'onload', 'onclick'].includes(attr), 15 | ), 16 | }, 17 | } 18 | -------------------------------------------------------------------------------- /src/schemas/deepwiki.ts: -------------------------------------------------------------------------------- 1 | import { z } from 'zod' 2 | 3 | /* ---------- enums ---------- */ 4 | 5 | export const ModeEnum = z.enum(['aggregate', 'pages']) 6 | 7 | /* ---------- request ---------- */ 8 | 9 | export const FetchRequest = z.object({ 10 | /** Deepwiki repo URL, eg https://deepwiki.com/user/repo */ 11 | url: z.string().describe('should be a URL, owner/repo name (e.g. "vercel/ai"), a two-word "owner repo" form (e.g. "vercel ai"), or a single library keyword'), 12 | /** Crawl depth limit: 0 means only the root page */ 13 | maxDepth: z.number().int().min(0).max(1).default(1).describe('Can fetch a single site => maxDepth 0 or multiple/all sites => maxDepth 1'), 14 | /** Conversion mode */ 15 | mode: ModeEnum.default('aggregate'), 16 | /** Verbose logging flag */ 17 | verbose: z.boolean().default(false), 18 | }) 19 | 20 | /* ---------- progress event ---------- */ 21 | 22 | export const ProgressEvent = z.object({ 23 | type: z.literal('progress'), 24 | url: z.string(), 25 | bytes: z.number().int().nonnegative(), 26 | elapsedMs: z.number().int().nonnegative(), 27 | fetched: z.number().int().nonnegative(), 28 | queued: z.number().int().nonnegative(), 29 | retries: z.number().int().nonnegative(), 30 | }) 31 | 32 | /* ---------- success / error envelopes ---------- */ 33 | 34 | export const PageObject = z.object({ 35 | path: z.string(), 36 | markdown: z.string(), 37 | }) 38 | 39 | export const FetchSuccess = z.object({ 40 | status: z.enum(['ok', 'partial']), 41 | mode: ModeEnum, 42 | pages: z.array(PageObject), 43 | totalBytes: z.number().int().nonnegative(), 44 | totalElapsedMs: z.number().int().nonnegative(), 45 | errors: z 46 | .array( 47 | z.object({ 48 | path: z.string(), 49 | reason: z.string(), 50 | }), 51 | ) 52 | .optional(), 53 | }) 54 | 55 | export const ErrorEnvelope = z.object({ 56 | status: z.literal('error'), 57 | code: z.enum([ 58 | 'VALIDATION', 59 | 'DOMAIN_NOT_ALLOWED', 60 | 'FETCH_FAIL', 61 | ]), 62 | message: z.string(), 63 | details: z.unknown().optional(), 64 | }) 65 | 66 | export type TFetchRequest = z.infer 67 | export type TProgressEvent = z.infer 68 | export type TFetchSuccess = z.infer 69 | export type TErrorEnvelope = z.infer -------------------------------------------------------------------------------- /src/server.ts: -------------------------------------------------------------------------------- 1 | import type { McpServer } from '@modelcontextprotocol/sdk/server/mcp.js' 2 | import { createServer as createNodeServer } from 'node:http' 3 | import { RestServerTransport } from '@chatmcp/sdk/server/rest.js' 4 | import { McpServer as Server } from '@modelcontextprotocol/sdk/server/mcp.js' 5 | import { SSEServerTransport } from '@modelcontextprotocol/sdk/server/sse.js' 6 | import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js' 7 | import { createApp, createRouter, defineEventHandler, getQuery, setResponseStatus, toNodeListener } from 'h3' 8 | 9 | /** Create the bare MCP server instance */ 10 | export function createServer(options: { name: string, version: string }): McpServer { 11 | const { name, version } = options 12 | return new Server({ name, version }) 13 | } 14 | 15 | interface StdioOptions { type: 'stdio' } 16 | interface HttpOptions { type: 'http', port?: number, endpoint?: string } 17 | interface SseOptions { type: 'sse', port?: number } 18 | 19 | export type StartOptions = StdioOptions | HttpOptions | SseOptions 20 | 21 | /** 22 | * Starts the given MCP server with the selected transport. 23 | * Defaults to stdio when no options are provided. 24 | */ 25 | export async function startServer( 26 | server: McpServer, 27 | options: StartOptions = { type: 'stdio' }, 28 | ): Promise { 29 | if (options.type === 'stdio') { 30 | const transport = new StdioServerTransport() 31 | await server.connect(transport) 32 | return 33 | } 34 | 35 | if (options.type === 'http') { 36 | const port = options.port ?? 3000 37 | const endpoint = options.endpoint ?? '/mcp' 38 | const transport = new RestServerTransport({ port, endpoint }) 39 | await server.connect(transport) 40 | await transport.startServer() 41 | console.log(`HTTP server listening → http://localhost:${port}${endpoint}`) 42 | return 43 | } 44 | 45 | // SSE 46 | const port = options.port ?? 3000 47 | const transports = new Map() 48 | 49 | // Create h3 app and router 50 | const app = createApp() 51 | const router = createRouter() 52 | 53 | // SSE endpoint 54 | router.get('/sse', defineEventHandler(async (event) => { 55 | const res = event.node.res 56 | const transport = new SSEServerTransport('/messages', res) 57 | transports.set(transport.sessionId, transport) 58 | res.on('close', () => transports.delete(transport.sessionId)) 59 | await server.connect(transport) 60 | })) 61 | 62 | // Messages endpoint 63 | router.post('/messages', defineEventHandler(async (event) => { 64 | const { sessionId } = getQuery(event) as { sessionId?: string } 65 | const transport = sessionId ? transports.get(sessionId) : undefined 66 | if (transport) { 67 | await transport.handlePostMessage(event.node.req, event.node.res) 68 | } 69 | else { 70 | setResponseStatus(event, 400) 71 | return 'No transport found for sessionId' 72 | } 73 | })) 74 | 75 | app.use(router) 76 | 77 | // Start Node server using h3's Node adapter 78 | const nodeServer = createNodeServer(toNodeListener(app)) 79 | nodeServer.listen(port) 80 | console.log(`SSE server listening → http://localhost:${port}/sse`) 81 | } 82 | 83 | export async function stopServer(server: McpServer) { 84 | try { 85 | await server.close() 86 | } 87 | catch (error) { 88 | console.error('Error occurred during server stop:', error) 89 | } 90 | finally { 91 | process.exit(0) 92 | } 93 | } 94 | -------------------------------------------------------------------------------- /src/tools/deepwiki.ts: -------------------------------------------------------------------------------- 1 | import type { z } from 'zod' 2 | import type { 3 | ErrorEnvelope, 4 | FetchSuccess, 5 | TProgressEvent, 6 | } from '../schemas/deepwiki' 7 | import type { McpToolContext } from '../types' 8 | import { htmlToMarkdown } from '../converter/htmlToMarkdown' 9 | import { resolveRepo } from '../utils/resolveRepoFetch' 10 | import { extractKeyword } from '../utils/extractKeyword' 11 | import { crawl } from '../lib/httpCrawler' 12 | import { FetchRequest } from '../schemas/deepwiki' 13 | 14 | export function deepwikiTool({ mcp }: McpToolContext) { 15 | mcp.tool( 16 | 'deepwiki_fetch', 17 | 'Fetch a deepwiki.com repo and return Markdown', 18 | FetchRequest.shape, 19 | async (input) => { 20 | // Normalize the URL to support short forms 21 | const normalizedInput = { ...input } 22 | if (typeof normalizedInput.url === 'string') { 23 | let url = normalizedInput.url.trim() 24 | 25 | // Only transform when it is not already an explicit HTTP(S) URL 26 | if (!/^https?:\/\//.test(url)) { 27 | // Check if the URL is already in the owner/repo format 28 | if (/^[^/]+\/[^/]+$/.test(url)) { 29 | // Already in owner/repo format, keep it as is 30 | // Just prefix with deepwiki.com 31 | } 32 | // Single word/term with no slash - process differently 33 | else if (/^[^/]+$/.test(url)) { 34 | // For single words, first try to extract a meaningful keyword if the input has spaces 35 | if (url.includes(' ')) { 36 | const extracted = extractKeyword(url) 37 | if (extracted) { 38 | url = extracted 39 | } 40 | } 41 | 42 | // Try to resolve the single term against GitHub 43 | try { 44 | const repo = await resolveRepo(url) // "owner/repo" 45 | url = repo 46 | } 47 | catch { 48 | // Fallback to previous behaviour for backward compatibility 49 | url = `defaultuser/${url}` // TODO: replace defaultuser logic 50 | } 51 | } 52 | // Other formats (phrases with slashes that don't match owner/repo) 53 | else { 54 | // Try to extract a library keyword from a free form phrase 55 | const extracted = extractKeyword(url) 56 | if (extracted) { 57 | // Resolve the extracted keyword 58 | try { 59 | const repo = await resolveRepo(extracted) 60 | url = repo 61 | } catch { 62 | url = `defaultuser/${extracted}` 63 | } 64 | } 65 | } 66 | 67 | // At this point url should be "owner/repo" 68 | url = `https://deepwiki.com/${url}` 69 | } 70 | 71 | normalizedInput.url = url 72 | } 73 | const parse = FetchRequest.safeParse(normalizedInput) 74 | if (!parse.success) { 75 | const err: z.infer = { 76 | status: 'error', 77 | code: 'VALIDATION', 78 | message: 'Request failed schema validation', 79 | details: parse.error.flatten(), 80 | } 81 | return err 82 | } 83 | 84 | const req = parse.data 85 | const root = new URL(req.url) 86 | 87 | if (req.maxDepth > 1) { 88 | const err: z.infer = { 89 | status: 'error', 90 | code: 'VALIDATION', 91 | message: 'maxDepth > 1 is not allowed', 92 | } 93 | return err 94 | } 95 | 96 | if (root.hostname !== 'deepwiki.com') { 97 | const err: z.infer = { 98 | status: 'error', 99 | code: 'DOMAIN_NOT_ALLOWED', 100 | message: 'Only deepwiki.com domains are allowed', 101 | } 102 | return err 103 | } 104 | 105 | // Progress emitter 106 | function emitProgress(e: any) { 107 | // Progress reporting is not supported in this context because McpServer does not have a sendEvent method. 108 | } 109 | 110 | const crawlResult = await crawl({ 111 | root, 112 | maxDepth: req.maxDepth, 113 | emit: emitProgress, 114 | verbose: req.verbose, 115 | }) 116 | 117 | // Convert each page 118 | const pages = await Promise.all( 119 | Object.entries(crawlResult.html).map(async ([path, html]) => ({ 120 | path, 121 | markdown: await htmlToMarkdown(html, req.mode), 122 | })), 123 | ) 124 | 125 | return { 126 | content: pages.map(page => ({ 127 | type: 'text', 128 | text: `# ${page.path}\n\n${page.markdown}`, 129 | })), 130 | } 131 | }, 132 | ) 133 | } -------------------------------------------------------------------------------- /src/tools/deepwikiSearch.ts: -------------------------------------------------------------------------------- 1 | import type { McpToolContext } from '../types' 2 | import { z } from 'zod' 3 | import { htmlToMarkdown } from '../converter/htmlToMarkdown' 4 | import { crawl } from '../lib/httpCrawler' 5 | import { FetchRequest } from '../schemas/deepwiki' 6 | 7 | /* ------------------------------------------------------------------ */ 8 | /* Schema */ 9 | /* ------------------------------------------------------------------ */ 10 | 11 | const SearchRequest = FetchRequest.extend({ 12 | /** Case-insensitive literal search term */ 13 | query: z.string().min(1, 'query cannot be empty'), 14 | /** Hard cap on number of snippets to return (default 10) */ 15 | maxMatches: z.number().int().positive().max(100).default(10), 16 | }) 17 | 18 | /* ------------------------------------------------------------------ */ 19 | /* Tool registration */ 20 | /* ------------------------------------------------------------------ */ 21 | 22 | export function deepwikiSearchTool({ mcp }: McpToolContext) { 23 | mcp.tool( 24 | 'deepwiki_search', 25 | `Download pages from a deepwiki.com, look for a case-insensitive 26 | substring, and return up to maxMatches short snippets with the match 27 | wrapped in **bold**. 28 | 29 | Required: 30 | • url – root Deepwiki repo 31 | • query – plain text (literal string) 32 | 33 | Optional: 34 | • maxDepth – crawl depth (default 1, like deepwiki.fetch) 35 | • maxMatches – limit on snippets (default 10) 36 | • mode – "aggregate" | "pages" (affects link rewriting only) 37 | • verbose – log progress to stderr`, 38 | SearchRequest.shape, 39 | async (raw) => { 40 | /* Helper to wrap everything into the MCP chat-message envelope */ 41 | const toMsg = (body: unknown) => ({ 42 | content: [{ type: 'text', text: JSON.stringify(body) }], 43 | }) 44 | 45 | /* ---------- validate input ---------- */ 46 | const parsed = SearchRequest.safeParse(raw) 47 | if (!parsed.success) { 48 | return toMsg({ 49 | status: 'error', 50 | code: 'VALIDATION', 51 | details: parsed.error.flatten(), 52 | }) 53 | } 54 | 55 | const req = parsed.data 56 | const root = new URL(req.url) 57 | 58 | /* ---------- crawl ---------- */ 59 | const { html } = await crawl({ 60 | root, 61 | maxDepth: req.maxDepth, 62 | emit: () => {}, 63 | verbose: req.verbose, 64 | }) 65 | 66 | /* ---------- build regex ---------- */ 67 | const safe = req.query.replace(/[.*+?^${}()|[\]\\]/g, '\\$&') 68 | const re = new RegExp(safe, 'i') 69 | const matches: { path: string, snippet: string }[] = [] 70 | 71 | /* ---------- convert + search ---------- */ 72 | for (const [path, sourceHtml] of Object.entries(html)) { 73 | if (matches.length >= req.maxMatches) 74 | break 75 | 76 | const md = await htmlToMarkdown(sourceHtml, req.mode) 77 | let m: RegExpExecArray | null 78 | while ((m = re.exec(md)) !== null) { 79 | const start = Math.max(0, m.index - 80) 80 | const end = Math.min(md.length, m.index + m[0].length + 80) 81 | const rawSnippet = md.slice(start, end) 82 | const snippet = rawSnippet.replace(re, s => `**${s}**`) 83 | matches.push({ path, snippet }) 84 | if (matches.length >= req.maxMatches) 85 | break 86 | } 87 | } 88 | 89 | /* ---------- reply ---------- */ 90 | return toMsg({ 91 | status: 'ok', 92 | query: req.query, 93 | matches, 94 | totalSearchedPages: Object.keys(html).length, 95 | }) 96 | }, 97 | ) 98 | } 99 | -------------------------------------------------------------------------------- /src/types.ts: -------------------------------------------------------------------------------- 1 | import type { McpServer } from '@modelcontextprotocol/sdk' 2 | 3 | export interface McpToolContext { 4 | mcp: McpServer 5 | } 6 | 7 | // Define the options type 8 | export interface McpServerOptions { 9 | name: string 10 | version: string 11 | } 12 | 13 | export type Tools = (context: McpToolContext) => void 14 | -------------------------------------------------------------------------------- /src/utils.ts: -------------------------------------------------------------------------------- 1 | import type { McpToolContext, Tools } from './types' // Assuming McpToolContext is defined in types.ts 2 | import fs from 'node:fs' 3 | import path from 'node:path' 4 | 5 | interface PackageJson { 6 | name?: string 7 | version: string 8 | [key: string]: any 9 | } 10 | 11 | export function getPackageJson(): PackageJson | null { 12 | try { 13 | const packageJsonPath = path.resolve(process.cwd(), 'package.json') 14 | if (!fs.existsSync(packageJsonPath)) { 15 | console.error('Error: package.json not found at', packageJsonPath) 16 | return null 17 | } 18 | const packageJsonContent = fs.readFileSync(packageJsonPath, 'utf-8') 19 | const packageJson: PackageJson = JSON.parse(packageJsonContent) 20 | 21 | if (!packageJson.version) { 22 | console.error('Error: package.json is missing the required \'version\' field.') 23 | return null 24 | } 25 | 26 | return packageJson 27 | } 28 | catch (error) { 29 | console.error('Error reading or parsing package.json:', error) 30 | return null // Return null on error 31 | } 32 | } 33 | 34 | export function registerTools(context: McpToolContext, tools: Tools[]): void { 35 | tools.forEach(register => register(context)) 36 | } 37 | -------------------------------------------------------------------------------- /src/utils/extractKeyword.ts: -------------------------------------------------------------------------------- 1 | import winkNLP from 'wink-nlp' 2 | import model from 'wink-eng-lite-web-model' 3 | 4 | const nlp = winkNLP(model) 5 | const its = nlp.its 6 | 7 | // Very small list of words we never want as a "library keyword”. 8 | const stopTerms = new Set([ 9 | 'how', 'what', 'when', 'where', 'upgrade', 'update', 'new', 'latest', 10 | 'can', 'i', 'to', 'in', 'for', 'with', 'the', 'a', 'an', 11 | ]) 12 | 13 | /** 14 | * Pull the most likely tech/library word from free-form user input. 15 | * Returns `undefined` if nothing useful is found. 16 | */ 17 | export function extractKeyword(text: string): string | undefined { 18 | const doc = nlp.readDoc(text) 19 | 20 | const candidates: string[] = [] 21 | doc.tokens().each((t) => { 22 | const pos = t.out(its.pos) // e.g. "NOUN", "PROPN" 23 | const value = t.out(its.normal) 24 | if ((pos === 'NOUN' || pos === 'PROPN') && !stopTerms.has(value)) { 25 | candidates.push(value) 26 | } 27 | }) 28 | 29 | return candidates[0] 30 | } -------------------------------------------------------------------------------- /src/utils/resolveRepoFetch.ts: -------------------------------------------------------------------------------- 1 | import { fetch } from 'undici' 2 | 3 | /** 4 | * Resolve a single-word keyword (library or project name) to "owner/repo" 5 | * by querying the GitHub Search API and returning the top match. 6 | * 7 | * @param keyword The library or project name to search for. 8 | * @throws If the GitHub request fails or no repository matches. 9 | */ 10 | export async function resolveRepo(keyword: string): Promise { 11 | const url = 12 | `https://api.github.com/search/repositories?q=${encodeURIComponent( 13 | `${keyword} in:name`, 14 | )}&per_page=1` 15 | 16 | const headers: Record = { 17 | Accept: 'application/vnd.github+json', 18 | } 19 | 20 | // Optional authentication to lift the unauthenticated rate limit (10 req/min) 21 | if (process.env.GITHUB_TOKEN) 22 | headers.Authorization = `Bearer ${process.env.GITHUB_TOKEN}` 23 | 24 | const res = await fetch(url, { headers }) 25 | if (!res.ok) 26 | throw new Error(`GitHub error: ${res.status}`) 27 | 28 | const { items } = (await res.json()) as { 29 | items: { full_name: string }[] 30 | } 31 | 32 | if (!items?.length) 33 | throw new Error('no match') 34 | 35 | // "full_name" is "owner/repo" 36 | return items[0].full_name 37 | } -------------------------------------------------------------------------------- /tests/McpClient.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * @fileoverview MCP test client for testing the MCP server 3 | */ 4 | 5 | import { Client } from '@modelcontextprotocol/sdk/client/index.js' 6 | import { StdioClientTransport } from '@modelcontextprotocol/sdk/client/stdio.js' 7 | 8 | interface McpTestClientOptions { 9 | cliEntryPoint: string // Path to the cli-entry.mjs file 10 | } 11 | 12 | export class McpTestClient { 13 | private client: Client 14 | private transport: StdioClientTransport | undefined 15 | private options: McpTestClientOptions 16 | 17 | constructor(options: McpTestClientOptions) { 18 | this.options = options 19 | this.client = new Client( 20 | { 21 | name: 'devtools-mcp-test-client', 22 | version: '0.1.0', 23 | }, 24 | { 25 | capabilities: { 26 | prompts: {}, 27 | resources: {}, 28 | tools: { 29 | list: {}, 30 | call: {}, 31 | }, 32 | }, 33 | }, 34 | ) 35 | } 36 | 37 | /** 38 | * Start the MCP server with the given command line arguments 39 | * @param args Additional arguments to pass to the server 40 | */ 41 | async connect(args: string[] = []): Promise { 42 | // Create a new transport with the specified args 43 | 44 | console.log('Starting MCP server with args:', [ 45 | this.options.cliEntryPoint, 46 | ...args, 47 | ]) 48 | 49 | this.transport = new StdioClientTransport({ 50 | command: 'node', 51 | args: [this.options.cliEntryPoint, ...args], // Use the provided entry point 52 | }) 53 | 54 | // Connect the client to the transport 55 | await this.client.connect(this.transport) 56 | console.log('Connected to MCP server') 57 | } 58 | 59 | /** 60 | * Connect to the server with "server" as the first argument 61 | * @param args Additional arguments to pass to the server 62 | */ 63 | async connectServer(args: string[] = []): Promise { 64 | return this.connect(['server', ...args]) 65 | } 66 | 67 | /** 68 | * Close the connection to the server 69 | */ 70 | async close(): Promise { 71 | if (this.transport) { 72 | await this.transport.close() 73 | console.log('Disconnected from MCP server') 74 | this.transport = undefined 75 | } 76 | else { 77 | console.log('Transport not initialized, skipping close.') 78 | } 79 | } 80 | 81 | /** 82 | * List all available tools 83 | */ 84 | async listTools(): Promise { 85 | return await this.client.listTools() 86 | } 87 | 88 | /** 89 | * Call a tool by name with the given arguments 90 | * @param name Tool name 91 | * @param args Tool arguments (using Record for flexibility) 92 | */ 93 | async callTool(name: string, args: Record = {}): Promise { 94 | return await this.client.callTool({ 95 | name, 96 | arguments: args, 97 | }) 98 | } 99 | 100 | /** 101 | * Get the prompt for a specific tool or configuration; used for testing 102 | * @param name Name of the tool or configuration to get prompt for 103 | * @returns Promise containing the prompt text 104 | */ 105 | async getPrompt(name: string): Promise { 106 | // Call the tool with no arguments to get its prompt 107 | return await this.client.callTool({ 108 | name, 109 | arguments: {}, 110 | }) 111 | } 112 | } 113 | -------------------------------------------------------------------------------- /tests/client.test.ts: -------------------------------------------------------------------------------- 1 | import path from 'node:path' 2 | import { fileURLToPath } from 'node:url' 3 | import { afterEach, beforeEach, describe, expect, it } from 'vitest' 4 | import { McpTestClient } from './McpClient.js' 5 | 6 | // Resolve the CLI entry point path dynamically 7 | const __filename = fileURLToPath(import.meta.url) 8 | const __dirname = path.dirname(__filename) 9 | // Go up three levels from test/ -> test-utils/ -> packages/ -> mcpn/ 10 | // Then down to packages/cli/bin/mcpn.mjs 11 | const cliEntryPointPath = path.resolve(__dirname, '../bin/cli.mjs') 12 | 13 | describe('mCP Client Tests', () => { 14 | let client: McpTestClient 15 | 16 | beforeEach(() => { 17 | console.log('cliEntryPointPath', cliEntryPointPath) 18 | 19 | client = new McpTestClient({ 20 | cliEntryPoint: cliEntryPointPath, // Use the dynamically resolved path 21 | }) 22 | }) 23 | 24 | afterEach(async () => { 25 | try { 26 | await client.close() 27 | } 28 | catch (error) { 29 | console.error('Error closing client:', error) 30 | } 31 | }) 32 | 33 | it('should connect to server with default configuration', async () => { 34 | await client.connectServer() 35 | const tools = await client.listTools() 36 | 37 | // When no args provided, default preset is "thinking", so it should include generate_thought 38 | expect(Array.isArray(tools.tools)).toBe(true) 39 | const toolNames = tools.tools.map((t: any) => t.name) 40 | console.log('Available tools:', toolNames) 41 | expect(toolNames).toContain('deepwiki.fetch') 42 | }) 43 | }) 44 | 45 | describe('deepWiki Tool Tests', () => { 46 | let client: McpTestClient 47 | 48 | beforeEach(() => { 49 | // Resolve the CLI entry point path dynamically 50 | const __filename = fileURLToPath(import.meta.url) 51 | const __dirname = path.dirname(__filename) 52 | 53 | client = new McpTestClient({ 54 | cliEntryPoint: cliEntryPointPath, 55 | }) 56 | }) 57 | 58 | afterEach(async () => { 59 | try { 60 | await client.close() 61 | } 62 | catch (error) { 63 | console.error('Error closing client:', error) 64 | } 65 | }) 66 | 67 | it('should fetch content from a deepwiki.com URL', async () => { 68 | await client.connectServer() // Connect with default settings 69 | 70 | // Verify deepwiki.fetch tool is available 71 | const tools = await client.listTools() 72 | const toolNames = tools.tools.map((t: any) => t.name) 73 | expect(toolNames).toContain('deepwiki.fetch') 74 | 75 | // Call the deepwiki.fetch tool 76 | const result = await client.callTool('deepwiki.fetch', { 77 | url: 'https://deepwiki.com/antiwork/gumroad/3.1-navigation-components', 78 | maxDepth: 1, 79 | mode: 'pages', 80 | }) 81 | 82 | console.log('deepwiki.fetch result:', JSON.stringify(result, null, 2)) 83 | 84 | expect(result.content[0].text).toMatch(/Navigation Components/) 85 | }, 30000) // Increase timeout for network request 86 | 87 | it('should return error for non-deepwiki.com URL', async () => { 88 | await client.connectServer() 89 | 90 | // Expect the call to reject with a specific error structure 91 | await expect(client.callTool('deepwiki.fetch', { 92 | url: 'https://example.com/some/path', // Use a non-deepwiki URL 93 | maxDepth: 0, 94 | // mode: 'pages' // Mode is irrelevant here, but keep it valid if needed 95 | })).rejects.toMatchObject({ 96 | // Adjust based on the actual error structure returned by MCP client/server 97 | // It might be a generic RPC error code like -32602 for invalid params 98 | // or a custom error code if the tool handles it specifically. 99 | // Based on the logs, it seems to be -32602 100 | code: -32602, 101 | message: expect.stringContaining('Invalid arguments'), // Or a more specific message if available 102 | }) 103 | }) 104 | 105 | it('should return validation error for missing URL', async () => { 106 | await client.connectServer() 107 | 108 | // Expect the call to reject because validation fails 109 | await expect(client.callTool('deepwiki.fetch', { 110 | // url is missing 111 | maxDepth: 1, 112 | mode: 'pages', 113 | })).rejects.toMatchObject({ 114 | code: -32602, // MCP error code for invalid parameters 115 | message: expect.stringContaining('Invalid arguments'), // Check for a part of the error message 116 | // Optionally, check for more details if the error object provides them 117 | // data: expect.objectContaining({ /* ... details ... */ }) 118 | }) 119 | }) 120 | }) 121 | -------------------------------------------------------------------------------- /tests/crawler.test.ts: -------------------------------------------------------------------------------- 1 | import { once } from 'node:events' 2 | import { mkdirSync, readFileSync, writeFileSync } from 'node:fs' 3 | import { createServer } from 'node:http' 4 | import { join } from 'node:path' 5 | import { beforeAll, describe, expect, it } from 'vitest' 6 | import { htmlToMarkdown } from '../src/converter/htmlToMarkdown' 7 | import { crawl } from '../src/lib/httpCrawler' 8 | 9 | const OUTPUT_DIR = join(__dirname, 'output') 10 | const TARGET_URL = 'https://deepwiki.com/regenrek/codefetch' 11 | const ROOT_URL = new URL(TARGET_URL) 12 | const ROOT_PATH = ROOT_URL.pathname 13 | 14 | function saveMarkdown(filename: string, content: string) { 15 | writeFileSync(join(OUTPUT_DIR, filename), content) 16 | } 17 | 18 | function serve(html: string) { 19 | return new Promise<{ url: string, close: () => void }>((resolve) => { 20 | const srv = createServer((_, res) => { 21 | res.setHeader('content-type', 'text/html') 22 | res.end(html) 23 | }).listen(0, () => { 24 | const { port } = srv.address() as any 25 | resolve({ 26 | url: `http://localhost:${port}/index.html`, 27 | close: () => srv.close(), 28 | }) 29 | }) 30 | }) 31 | } 32 | 33 | describe('crawl', () => { 34 | beforeAll(() => { 35 | mkdirSync(OUTPUT_DIR, { recursive: true }) 36 | }) 37 | 38 | it('fetches one page and respects depth 0', async () => { 39 | const { url, close } = await serve('

Hello

') 40 | try { 41 | const { html } = await crawl({ 42 | root: new URL(url), 43 | maxDepth: 0, 44 | emit: () => {}, 45 | }) 46 | expect(Object.keys(html)).toEqual(['/index.html']) 47 | } 48 | finally { 49 | close() 50 | } 51 | }) 52 | 53 | it('crawls deepwiki.com (pages mode) and converts to markdown', async () => { 54 | try { 55 | const { html, errors } = await crawl({ 56 | root: ROOT_URL, 57 | maxDepth: 0, 58 | emit: () => {}, 59 | }) 60 | 61 | expect(errors).toEqual([]) 62 | expect(Object.keys(html)).toContain(ROOT_PATH) 63 | expect(html[ROOT_PATH]).toBeTypeOf('string') 64 | expect(html[ROOT_PATH]).not.toBe('') 65 | 66 | const markdown = await htmlToMarkdown(html[ROOT_PATH], 'pages') 67 | expect(markdown).toBeTypeOf('string') 68 | expect(markdown.length).toBeGreaterThan(50) 69 | saveMarkdown('crawl-pages.result.md', markdown) 70 | } 71 | catch (error) { 72 | expect.fail(`Test threw an error: ${error}`) 73 | } 74 | }, 30000) 75 | 76 | it('crawls deepwiki.com (aggregate mode) and converts to markdown', async () => { 77 | try { 78 | const { html, errors } = await crawl({ 79 | root: ROOT_URL, 80 | maxDepth: 0, 81 | emit: () => {}, 82 | }) 83 | 84 | expect(errors).toEqual([]) 85 | expect(Object.keys(html)).toContain(ROOT_PATH) 86 | expect(html[ROOT_PATH]).toBeTypeOf('string') 87 | expect(html[ROOT_PATH]).not.toBe('') 88 | 89 | const markdown = await htmlToMarkdown(html[ROOT_PATH], 'aggregate') 90 | expect(markdown).toBeTypeOf('string') 91 | expect(markdown.length).toBeGreaterThan(50) 92 | saveMarkdown('crawl-aggregate.result.md', markdown) 93 | } 94 | catch (error) { 95 | expect.fail(`Test threw an error: ${error}`) 96 | } 97 | }, 30000) 98 | }) 99 | 100 | describe('crawl depth', () => { 101 | beforeAll(() => { 102 | mkdirSync(OUTPUT_DIR, { recursive: true }) 103 | }) 104 | 105 | // it.each([0, 1, 2])('crawls deepwiki.com with maxDepth %i', async (maxDepth) => { 106 | // try { 107 | // const { html, errors } = await crawl({ 108 | // root: ROOT_URL, 109 | // maxDepth, 110 | // emit: () => {}, 111 | // }) 112 | 113 | // expect(errors).toEqual([]) 114 | // expect(Object.keys(html).length).toBeGreaterThan(0) 115 | // expect(html).toHaveProperty(ROOT_PATH) 116 | // expect(html[ROOT_PATH]).toBeTypeOf('string') 117 | 118 | // const markdown = await htmlToMarkdown(html[ROOT_PATH], 'pages') 119 | // expect(markdown).toBeTypeOf('string') 120 | // saveMarkdown(`crawl-depth-${maxDepth}.result.md`, markdown) 121 | // } 122 | // catch (error) { 123 | // expect.fail(`Test (depth ${maxDepth}) threw an error: ${error}`) 124 | // } 125 | // }, 30000 * 5) 126 | it.each([0, 1, 2])('crawls deepwiki.com with maxDepth %i', async (maxDepth) => { 127 | try { 128 | const { html, errors } = await crawl({ 129 | root: ROOT_URL, 130 | maxDepth, 131 | emit: () => {}, 132 | }) 133 | 134 | expect(errors).toEqual([]) 135 | expect(Object.keys(html).length).toBeGreaterThan(0) 136 | // expect(html).toHaveProperty(ROOT_PATH) 137 | expect(html).toBeTypeOf('string') 138 | 139 | const markdown = await htmlToMarkdown(html, 'pages') 140 | expect(markdown).toBeTypeOf('string') 141 | saveMarkdown(`crawl-depth-${maxDepth}.result.md`, markdown) 142 | } 143 | catch (error) { 144 | expect.fail(`Test (depth ${maxDepth}) threw an error: ${error}`) 145 | } 146 | }, 30000 * 5) 147 | }) 148 | -------------------------------------------------------------------------------- /tests/server.test.ts: -------------------------------------------------------------------------------- 1 | import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest' 2 | // Import mock internals at the top 3 | // Note: Vitest often handles hoisting, but dynamic import in loadModule might affect this. 4 | // We'll revisit using vi.mocked if these direct imports cause issues later. 5 | import { __mocks as restMocks } from '@chatmcp/sdk/server/rest.js' 6 | import { McpServer as MockMcpServer, __spies as mcpSpies } from '@modelcontextprotocol/sdk/server/mcp.js' 7 | import { __mocks as sseMocks } from '@modelcontextprotocol/sdk/server/sse.js' 8 | import { __mocks as stdioMocks } from '@modelcontextprotocol/sdk/server/stdio.js' 9 | // Mock h3 internals needed for assertions 10 | import { __mocks as h3Mocks } from 'h3' 11 | 12 | // --------------------------------------------------------------------------- 13 | // Mocking external dependencies used by src/server.ts 14 | // --------------------------------------------------------------------------- 15 | 16 | /** 17 | * Mock the MCP server class so we can track calls to `connect`/`close` without 18 | * requiring the actual implementation provided by the SDK. 19 | */ 20 | vi.mock('@modelcontextprotocol/sdk/server/mcp.js', () => { 21 | const connectSpy = vi.fn().mockResolvedValue(undefined) 22 | const closeSpy = vi.fn().mockResolvedValue(undefined) 23 | let lastInstance: unknown 24 | 25 | // Simple mock class replicating the public surface we rely on 26 | class McpServer { 27 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 28 | constructor(public readonly opts: any) { 29 | lastInstance = this 30 | } 31 | 32 | connect = connectSpy 33 | 34 | close = closeSpy 35 | 36 | // Helper to access the last created instance for assertions 37 | static __getLastInstance = () => lastInstance 38 | } 39 | 40 | return { 41 | McpServer, 42 | /** spies exported for assertion purposes */ 43 | __spies: { connectSpy, closeSpy }, 44 | } 45 | }) 46 | 47 | /** 48 | * Mock STDIO transport 49 | */ 50 | vi.mock('@modelcontextprotocol/sdk/server/stdio.js', () => { 51 | let lastInstance: unknown 52 | class StdioServerTransport { 53 | constructor() { 54 | // Assign instance upon creation 55 | lastInstance = this 56 | } 57 | } 58 | return { 59 | StdioServerTransport, 60 | // Expose a way to get the last instance 61 | __mocks: { getLastInstance: () => lastInstance }, 62 | } 63 | }) 64 | 65 | /** 66 | * Mock REST (streamable HTTP) transport 67 | */ 68 | vi.mock('@chatmcp/sdk/server/rest.js', () => { 69 | let lastInstance: unknown 70 | const startServerSpy = vi.fn().mockResolvedValue(undefined) 71 | class RestServerTransport { 72 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 73 | constructor(public readonly opts: any) { 74 | // Assign instance upon creation 75 | lastInstance = this 76 | } 77 | 78 | startServer = startServerSpy 79 | } 80 | return { 81 | RestServerTransport, 82 | __mocks: { 83 | getLastInstance: () => lastInstance, 84 | startServerSpy, 85 | }, 86 | } 87 | }) 88 | 89 | /** 90 | * Mock SSE transport 91 | */ 92 | vi.mock('@modelcontextprotocol/sdk/server/sse.js', () => { 93 | let lastInstance: unknown 94 | class SSEServerTransport { 95 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 96 | constructor(public readonly path: string, public readonly res: any) { 97 | // Assign instance upon creation 98 | lastInstance = this 99 | } 100 | 101 | sessionId = 'mock-session-id' 102 | 103 | handlePostMessage = vi.fn().mockResolvedValue(undefined) 104 | } 105 | return { 106 | SSEServerTransport, 107 | // Expose a way to get the last instance 108 | __mocks: { getLastInstance: () => lastInstance }, 109 | } 110 | }) 111 | 112 | /** 113 | * Mock `h3` – we do not want to start a real HTTP server. We simply need 114 | * the API surface (`createApp`, `createRouter`, `defineEventHandler`, `listen`) that src/server.ts expects. 115 | */ 116 | vi.mock('h3', () => { 117 | const appUseSpy = vi.fn() 118 | const routerGetSpy = vi.fn().mockReturnThis() 119 | const routerPostSpy = vi.fn().mockReturnThis() 120 | const routerUseSpy = vi.fn().mockReturnThis() 121 | 122 | const createAppMock = vi.fn(() => ({ 123 | use: appUseSpy, 124 | handler: vi.fn(), 125 | })) 126 | const createRouterMock = vi.fn(() => ({ 127 | get: routerGetSpy, 128 | post: routerPostSpy, 129 | use: routerUseSpy, 130 | handler: vi.fn(), 131 | })) 132 | // Ensure the event handler function itself is returned to be executed 133 | const defineEventHandlerMock = vi.fn(fn => fn) 134 | const listenMock = vi.fn() 135 | const toNodeListenerMock = vi.fn() 136 | 137 | return { 138 | createApp: createAppMock, 139 | createRouter: createRouterMock, 140 | defineEventHandler: defineEventHandlerMock, 141 | listen: listenMock, 142 | toNodeListener: toNodeListenerMock, 143 | // Expose spies for detailed assertions 144 | __mocks: { 145 | createAppMock, 146 | createRouterMock, 147 | defineEventHandlerMock, 148 | listenMock, 149 | toNodeListenerMock, 150 | appUseSpy, 151 | routerGetSpy, 152 | routerPostSpy, 153 | routerUseSpy, 154 | }, 155 | } 156 | }) 157 | 158 | 159 | // --------------------------------------------------------------------------- 160 | // Actual tests start here – we import the module under test AFTER the mocks. 161 | // --------------------------------------------------------------------------- 162 | 163 | // Removed unused StartOptions import 164 | 165 | // Utility loader so we import the fresh module within each test after mocks 166 | async function loadModule() { 167 | return await import('../src/server') 168 | } 169 | 170 | // Imports for mock spies/helpers moved to the top 171 | 172 | beforeEach(() => { 173 | // Reset mocks before each test 174 | vi.clearAllMocks() 175 | }) 176 | 177 | afterEach(() => { 178 | // Restore mocks after each test 179 | vi.restoreAllMocks() 180 | }) 181 | 182 | /** 183 | * createServer tests 184 | */ 185 | describe('createServer', () => { 186 | it('should return an instance of McpServer with provided options', async () => { 187 | const { createServer } = await loadModule() 188 | const options = { name: 'test-server', version: '1.2.3' } 189 | const server = createServer(options) 190 | 191 | // Check if it's an instance of our *mocked* McpServer 192 | expect(server).toBeInstanceOf(MockMcpServer) 193 | // Check if the constructor was called with the correct options 194 | const lastInstance = MockMcpServer.__getLastInstance() as any 195 | expect(lastInstance?.opts).toEqual(options) 196 | }) 197 | }) 198 | 199 | /** 200 | * STDIO transport tests 201 | */ 202 | describe('startServer – stdio transport', () => { 203 | it('invokes StdioServerTransport and connects', async () => { 204 | const { createServer, startServer } = await loadModule() 205 | const server = createServer({ name: 'test', version: '1.0.0' }) 206 | 207 | await startServer(server, { type: 'stdio' }) 208 | 209 | const transportInstance = stdioMocks.getLastInstance() 210 | expect(transportInstance).toBeDefined() 211 | expect(mcpSpies.connectSpy).toHaveBeenCalledTimes(1) 212 | expect(mcpSpies.connectSpy).toHaveBeenCalledWith(transportInstance) 213 | }) 214 | }) 215 | 216 | /** 217 | * HTTP (REST) transport tests 218 | */ 219 | describe('startServer – streamable HTTP transport', () => { 220 | it('invokes RestServerTransport with defaults and starts server', async () => { 221 | const { createServer, startServer } = await loadModule() 222 | const server = createServer({ name: 'test', version: '1.0.0' }) 223 | 224 | await startServer(server, { type: 'http' }) 225 | 226 | const transportInstance = restMocks.getLastInstance() as any 227 | expect(transportInstance).toBeDefined() 228 | expect(transportInstance.opts).toEqual({ port: 3000, endpoint: '/mcp' }) 229 | 230 | expect(mcpSpies.connectSpy).toHaveBeenCalledTimes(1) 231 | expect(mcpSpies.connectSpy).toHaveBeenCalledWith(transportInstance) 232 | expect(restMocks.startServerSpy).toHaveBeenCalledTimes(1) 233 | }) 234 | 235 | it('invokes RestServerTransport with custom options and starts server', async () => { 236 | const { createServer, startServer } = await loadModule() 237 | const server = createServer({ name: 'test', version: '1.0.0' }) 238 | const customOptions = { type: 'http' as const, port: 8080, endpoint: '/api/mcp' } 239 | 240 | await startServer(server, customOptions) 241 | 242 | const transportInstance = restMocks.getLastInstance() as any 243 | expect(transportInstance).toBeDefined() 244 | expect(transportInstance.opts).toEqual({ port: customOptions.port, endpoint: customOptions.endpoint }) 245 | 246 | expect(mcpSpies.connectSpy).toHaveBeenCalledTimes(1) 247 | expect(mcpSpies.connectSpy).toHaveBeenCalledWith(transportInstance) 248 | expect(restMocks.startServerSpy).toHaveBeenCalledTimes(1) 249 | }) 250 | }) 251 | 252 | /** 253 | * SSE transport tests 254 | */ 255 | describe('startServer – SSE transport', () => { 256 | it('sets up h3 server and listens on default port', async () => { 257 | const { createServer, startServer } = await loadModule() 258 | const server = createServer({ name: 'test', version: '1.0.0' }) 259 | 260 | await startServer(server, { type: 'sse' }) // Default port 3000 261 | 262 | expect(h3Mocks.createAppMock).toHaveBeenCalledTimes(1) 263 | expect(h3Mocks.createRouterMock).toHaveBeenCalledTimes(1) 264 | 265 | // Check router configuration 266 | expect(h3Mocks.routerGetSpy).toHaveBeenCalledWith('/sse', expect.any(Function)) 267 | expect(h3Mocks.routerPostSpy).toHaveBeenCalledWith('/messages', expect.any(Function)) 268 | expect(h3Mocks.appUseSpy).toHaveBeenCalledWith(expect.anything()) // Router passed to app.use 269 | 270 | // Check server listening (either via listen or toNodeListener) 271 | expect( 272 | h3Mocks.listenMock.mock.calls.length > 0 273 | || h3Mocks.toNodeListenerMock.mock.calls.length > 0, 274 | ).toBe(true) 275 | 276 | // If using toNodeListener (preferred), check listen was called on the node server 277 | if (h3Mocks.toNodeListenerMock.mock.calls.length > 0) { 278 | // We need to mock node:http createServer to check .listen() 279 | // This adds complexity, maybe checking toNodeListener is sufficient for this test level 280 | } 281 | }) 282 | 283 | it('sets up h3 server and listens on custom port', async () => { 284 | const { createServer, startServer } = await loadModule() 285 | const server = createServer({ name: 'test', version: '1.0.0' }) 286 | const customPort = 9000 287 | 288 | await startServer(server, { type: 'sse', port: customPort }) 289 | 290 | // We need a way to check the port passed to listen/createNodeServer 291 | // This requires mocking 'node:http' or refining the h3 mock further 292 | // For now, we assert the basic setup happened 293 | expect(h3Mocks.createAppMock).toHaveBeenCalledTimes(1) 294 | expect(h3Mocks.createRouterMock).toHaveBeenCalledTimes(1) 295 | expect( 296 | h3Mocks.listenMock.mock.calls.length > 0 297 | || h3Mocks.toNodeListenerMock.mock.calls.length > 0, 298 | ).toBe(true) 299 | }) 300 | 301 | // TODO: Add more detailed SSE tests: 302 | // - Simulate GET /sse -> check transport created, connect called, transport stored 303 | // - Simulate POST /messages -> check handlePostMessage called 304 | // - Simulate POST /messages (invalid session) -> check 400 status 305 | // - Simulate client disconnect -> check transport removed 306 | }) 307 | 308 | 309 | /** 310 | * stopServer tests 311 | */ 312 | // TODO: Add stopServer tests 313 | // - Mock process.exit 314 | // - Assert server.close() is called 315 | // - Test error handling in server.close() 316 | // ... existing code ... 317 | // Example structure: 318 | // describe('stopServer', () => { 319 | // let exitSpy: MockInstance; 320 | 321 | // beforeEach(() => { 322 | // // Prevent tests from exiting 323 | // exitSpy = vi.spyOn(process, 'exit').mockImplementation((() => {}) as any); 324 | // }); 325 | 326 | // afterEach(() => { 327 | // exitSpy.mockRestore(); 328 | // }); 329 | 330 | // it('calls server.close and process.exit(0) on success', async () => { 331 | // const { createServer, stopServer } = await loadModule(); 332 | // const server = createServer({ name: 'test', version: '1.0.0' }); 333 | // // Ensure close resolves successfully 334 | // mcpSpies.closeSpy.mockResolvedValue(undefined); 335 | 336 | // await stopServer(server); 337 | 338 | // expect(mcpSpies.closeSpy).toHaveBeenCalledTimes(1); 339 | // expect(exitSpy).toHaveBeenCalledWith(0); 340 | // }); 341 | 342 | // it('calls process.exit(0) even if server.close rejects', async () => { 343 | // const { createServer, stopServer } = await loadModule(); 344 | // const server = createServer({ name: 'test', version: '1.0.0' }); 345 | // const closeError = new Error('Close failed'); 346 | // mcpSpies.closeSpy.mockRejectedValue(closeError); 347 | // // Mock console.error to suppress output during test 348 | // const errorSpy = vi.spyOn(console, 'error').mockImplementation(() => {}); 349 | 350 | // await stopServer(server); 351 | 352 | // expect(mcpSpies.closeSpy).toHaveBeenCalledTimes(1); 353 | // expect(errorSpy).toHaveBeenCalledWith('Error occurred during server stop:', closeError); 354 | // expect(exitSpy).toHaveBeenCalledWith(0); 355 | 356 | // errorSpy.mockRestore(); 357 | // }); 358 | // }); 359 | // ... existing code ... 360 | -------------------------------------------------------------------------------- /vite.config.ts: -------------------------------------------------------------------------------- 1 | import { defineConfig } from 'vite' 2 | 3 | export default defineConfig({ 4 | build: { 5 | target: 'node18', 6 | ssr: true, 7 | outDir: 'dist', 8 | rollupOptions: { 9 | output: { 10 | format: 'esm', 11 | entryFileNames: 'server.mjs', 12 | }, 13 | }, 14 | }, 15 | test: { 16 | environment: 'node', 17 | }, 18 | }) 19 | --------------------------------------------------------------------------------