├── test-source.txt ├── test-destination.txt ├── smithery.yaml ├── Dockerfile ├── .prettierrc ├── LICENSE ├── tsconfig.json ├── .gitignore ├── package.json ├── src ├── config │ └── defaults.ts ├── index.ts ├── services │ ├── RateLimiterService.ts │ ├── FileService.ts │ ├── WatchService.ts │ ├── DirectoryService.ts │ ├── ChangeTrackingService.ts │ ├── StreamProcessor.ts │ └── PatchService.ts ├── types.ts ├── httpServer.ts ├── types │ └── index.ts └── server.ts ├── DOCKER.md ├── examples └── http-client.html └── README.md /test-source.txt: -------------------------------------------------------------------------------- 1 | This is a test file to verify the file-operations server is working correctly. 2 | -------------------------------------------------------------------------------- /test-destination.txt: -------------------------------------------------------------------------------- 1 | This is a test file to verify the file-operations server is working correctly. 2 | -------------------------------------------------------------------------------- /smithery.yaml: -------------------------------------------------------------------------------- 1 | # Smithery configuration file: https://smithery.ai/docs/config#smitheryyaml 2 | 3 | startCommand: 4 | type: stdio 5 | configSchema: 6 | # JSON Schema defining the configuration options for the MCP. 7 | type: object 8 | properties: {} 9 | description: No configuration is required for the File Operations MCP Server. 10 | commandFunction: 11 | # A JS function that produces the CLI command based on the given config to start the MCP on stdio. 12 | |- 13 | (config) => ({ command: 'node', args: ['build/index.js'] }) 14 | exampleConfig: {} 15 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | # Generated by https://smithery.ai. See: https://smithery.ai/docs/config#dockerfile 2 | FROM node:lts-alpine 3 | 4 | # Create app directory 5 | WORKDIR /app 6 | 7 | # Copy package files and tsconfig 8 | COPY package.json package-lock.json tsconfig.json ./ 9 | 10 | # Install dependencies without running scripts 11 | RUN npm install --ignore-scripts 12 | 13 | # Copy source code 14 | COPY . . 15 | 16 | # Build the project 17 | RUN npm run build 18 | 19 | # Expose HTTP port for streaming interface (3001 by default) 20 | EXPOSE 3001 21 | 22 | # Add environment variables for configuration 23 | ENV MCP_TRANSPORT=stdio 24 | ENV MCP_HTTP_PORT=3001 25 | 26 | # Create a startup script that handles both transports 27 | RUN echo '#!/bin/sh\nif [ "$MCP_TRANSPORT" = "http" ]; then\n exec npm run start:http\nelse\n exec npm start\nfi' > /app/start.sh && \ 28 | chmod +x /app/start.sh 29 | 30 | # Use the startup script as the default command 31 | CMD [ "/app/start.sh" ] 32 | -------------------------------------------------------------------------------- /.prettierrc: -------------------------------------------------------------------------------- 1 | { 2 | "printWidth": 100, 3 | "tabWidth": 4, 4 | "useTabs": false, 5 | "semi": true, 6 | "singleQuote": true, 7 | "quoteProps": "as-needed", 8 | "jsxSingleQuote": false, 9 | "trailingComma": "es5", 10 | "bracketSpacing": true, 11 | "bracketSameLine": false, 12 | "arrowParens": "always", 13 | "endOfLine": "lf", 14 | "embeddedLanguageFormatting": "auto", 15 | "singleAttributePerLine": false, 16 | "proseWrap": "preserve", 17 | "htmlWhitespaceSensitivity": "css", 18 | "overrides": [ 19 | { 20 | "files": [ 21 | "*.ts", 22 | "*.tsx" 23 | ], 24 | "options": { 25 | "parser": "typescript" 26 | } 27 | }, 28 | { 29 | "files": [ 30 | "*.json", 31 | "*.jsonc" 32 | ], 33 | "options": { 34 | "parser": "json" 35 | } 36 | } 37 | ] 38 | } -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2024 File Operations MCP Server Contributors 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "ES2022", 4 | "module": "NodeNext", 5 | "moduleResolution": "NodeNext", 6 | "lib": [ 7 | "ES2022" 8 | ], 9 | "outDir": "./build", 10 | "rootDir": "./src", 11 | "strict": true, 12 | "declaration": true, 13 | "sourceMap": true, 14 | "esModuleInterop": true, 15 | "skipLibCheck": true, 16 | "forceConsistentCasingInFileNames": true, 17 | "resolveJsonModule": true, 18 | "allowJs": false, 19 | "checkJs": false, 20 | "removeComments": false, 21 | "noImplicitAny": true, 22 | "noImplicitThis": true, 23 | "noImplicitReturns": true, 24 | "noFallthroughCasesInSwitch": true, 25 | "noUnusedLocals": true, 26 | "noUnusedParameters": true, 27 | "allowSyntheticDefaultImports": true, 28 | "experimentalDecorators": true, 29 | "emitDecoratorMetadata": true, 30 | "pretty": true 31 | }, 32 | "include": [ 33 | "src/**/*" 34 | ], 35 | "exclude": [ 36 | "node_modules", 37 | "build", 38 | "**/*.test.ts" 39 | ] 40 | } -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Dependencies 2 | node_modules/ 3 | .pnp/ 4 | .pnp.js 5 | 6 | # Build output 7 | build/ 8 | dist/ 9 | out/ 10 | *.tsbuildinfo 11 | 12 | # Environment variables 13 | .env 14 | .env.local 15 | .env.*.local 16 | 17 | # IDE and editor files 18 | .idea/ 19 | .vscode/ 20 | *.swp 21 | *.swo 22 | *~ 23 | 24 | # Logs 25 | logs/ 26 | *.log 27 | npm-debug.log* 28 | yarn-debug.log* 29 | yarn-error.log* 30 | 31 | # Testing 32 | coverage/ 33 | .nyc_output/ 34 | 35 | # Operating System 36 | .DS_Store 37 | Thumbs.db 38 | 39 | # Temporary files 40 | *.tmp 41 | *.temp 42 | .cache/ 43 | 44 | # Debug files 45 | .debug/ 46 | *.debug 47 | 48 | # TypeScript source maps 49 | *.map 50 | 51 | # Optional npm cache directory 52 | .npm 53 | 54 | # Optional eslint cache 55 | .eslintcache 56 | 57 | # Optional REPL history 58 | .node_repl_history 59 | 60 | # Output of 'npm pack' 61 | *.tgz 62 | 63 | # Yarn Integrity file 64 | .yarn-integrity 65 | 66 | # dotenv environment variable files 67 | .env 68 | .env.test 69 | .env.production 70 | 71 | # parcel-bundler cache (https://parceljs.org/) 72 | .cache 73 | .parcel-cache 74 | 75 | # Next.js build output 76 | .next 77 | out 78 | 79 | # Nuxt.js build / generate output 80 | .nuxt 81 | dist 82 | 83 | # Gatsby files 84 | .cache/ 85 | # Comment in the public line in if your project uses Gatsby and not Next.js 86 | # public 87 | 88 | # vuepress build output 89 | .vuepress/dist 90 | 91 | # Serverless directories 92 | .serverless/ 93 | 94 | # FuseBox cache 95 | .fusebox/ 96 | 97 | # DynamoDB Local files 98 | .dynamodb/ 99 | 100 | # TernJS port file 101 | .tern-port 102 | 103 | # Stores VSCode versions used for testing VSCode extensions 104 | .vscode-test 105 | 106 | # yarn v2 107 | .yarn/cache 108 | .yarn/unplugged 109 | .yarn/build-state.yml 110 | .yarn/install-state.gz 111 | .pnp.* 112 | 113 | # Local backup files 114 | *.bak 115 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "file-operations-server", 3 | "version": "1.0.0", 4 | "description": "MCP server for file operations with enhanced capabilities", 5 | "type": "module", 6 | "main": "build/index.js", 7 | "scripts": { 8 | "build": "tsc", 9 | "start": "node build/index.js", 10 | "start:http": "node build/index.js --http", 11 | "dev": "ts-node-esm src/index.ts", 12 | "dev:http": "ts-node-esm src/index.ts --http", 13 | "watch": "tsc -w", 14 | "clean": "rimraf build", 15 | "lint": "eslint src --ext .ts", 16 | "format": "prettier --write \"src/**/*.ts\"", 17 | "test": "jest", 18 | "prepare": "npm run build" 19 | }, 20 | "keywords": [ 21 | "mcp", 22 | "file-operations", 23 | "streaming", 24 | "patch", 25 | "watch" 26 | ], 27 | "author": "Brian W. Smith", 28 | "license": "MIT", 29 | "dependencies": { 30 | "@modelcontextprotocol/sdk": "^1.5.0", 31 | "diff": "^5.1.0", 32 | "express": "^4.19.2", 33 | "glob": "^10.3.10", 34 | "mime-types": "^2.1.35" 35 | }, 36 | "devDependencies": { 37 | "@types/diff": "^5.0.9", 38 | "@types/express": "^4.17.21", 39 | "@types/glob": "^8.1.0", 40 | "@types/mime-types": "^2.1.4", 41 | "@types/node": "^20.10.5", 42 | "@typescript-eslint/eslint-plugin": "^6.15.0", 43 | "@typescript-eslint/parser": "^6.15.0", 44 | "eslint": "^8.56.0", 45 | "eslint-config-prettier": "^9.1.0", 46 | "eslint-plugin-prettier": "^5.1.2", 47 | "jest": "^29.7.0", 48 | "prettier": "^3.1.1", 49 | "rimraf": "^5.0.5", 50 | "ts-jest": "^29.1.1", 51 | "ts-node": "^10.9.2", 52 | "typescript": "^5.3.3" 53 | }, 54 | "engines": { 55 | "node": ">=18.0.0" 56 | }, 57 | "types": "./build/index.d.ts", 58 | "repository": { 59 | "type": "git", 60 | "url": "git+https://github.com/bsmi021/file-operations-server.git" 61 | }, 62 | "bugs": { 63 | "url": "https://github.com/bsmi021/file-operations-server/issues" 64 | }, 65 | "homepage": "https://github.com/bsmi021/file-operations-server#readme" 66 | } -------------------------------------------------------------------------------- /src/config/defaults.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Default configurations for file operations 3 | */ 4 | 5 | export const DEFAULT_BATCH_CONFIG = { 6 | maxChunkSize: 1024 * 1024, // 1MB 7 | maxLinesPerChunk: 1000, 8 | parallel: false, 9 | maxParallelOps: 4, 10 | chunkDelay: 100 // ms 11 | } as const; 12 | 13 | 14 | export const DEFAULT_WHITESPACE_CONFIG = { 15 | preserveIndentation: true, 16 | preserveLineEndings: true, 17 | normalizeWhitespace: true, 18 | trimTrailingWhitespace: true, 19 | defaultIndentation: ' ', 20 | defaultLineEnding: '\n' 21 | } as const; 22 | 23 | /** 24 | * File operation error codes 25 | */ 26 | export const FILE_ERROR_CODES = { 27 | FILE_NOT_FOUND: 'FILE_NOT_FOUND', 28 | PERMISSION_DENIED: 'PERMISSION_DENIED', 29 | INVALID_PATH: 'INVALID_PATH', 30 | FILE_TOO_LARGE: 'FILE_TOO_LARGE', 31 | OPERATION_FAILED: 'OPERATION_FAILED', 32 | UNKNOWN_ERROR: 'UNKNOWN_ERROR' 33 | } as const; 34 | 35 | /** 36 | * Default file operation settings 37 | */ 38 | export const FILE_OPERATION_DEFAULTS = { 39 | encoding: 'utf8', 40 | recursive: true, 41 | createBackup: false, 42 | overwrite: false, 43 | maxFileSize: 50 * 1024 * 1024, // 50MB 44 | maxSearchResults: 1000, 45 | contextLines: 2, // Number of lines before/after for search results 46 | watchDebounceTime: 100 // ms 47 | } as const; 48 | 49 | /** 50 | * Patch operation types 51 | */ 52 | export const PATCH_TYPES = { 53 | LINE: 'line', 54 | BLOCK: 'block', 55 | DIFF: 'diff', 56 | COMPLETE: 'complete' 57 | } as const; 58 | 59 | /** 60 | * Change tracking settings 61 | */ 62 | export const CHANGE_TRACKING_CONFIG = { 63 | maxChanges: 1000, 64 | persistChanges: true, 65 | changeTypes: { 66 | FILE_EDIT: 'file_edit', 67 | FILE_CREATE: 'file_create', 68 | FILE_DELETE: 'file_delete', 69 | FILE_MOVE: 'file_move', 70 | DIRECTORY_CREATE: 'directory_create', 71 | DIRECTORY_DELETE: 'directory_delete', 72 | PERMISSION_CHANGE: 'permission_change', 73 | WATCH_START: 'watch_start', 74 | WATCH_END: 'watch_end', 75 | PATCH_APPLY: 'patch_apply' 76 | } 77 | } as const; 78 | -------------------------------------------------------------------------------- /src/index.ts: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | import { FileOperationsServer } from './server.js'; 4 | import { FileOperationsHttpServer } from './httpServer.js'; 5 | 6 | /** 7 | * Main entry point for the File Operations MCP Server 8 | * Handles server initialization, error handling, and graceful shutdown 9 | * Supports both stdio and HTTP transports based on command line arguments 10 | */ 11 | async function main() { 12 | let server: FileOperationsServer | FileOperationsHttpServer | null = null; 13 | 14 | try { 15 | // Parse command line arguments 16 | const args = process.argv.slice(2); 17 | const useHttp = args.includes('--http') || args.includes('-h'); 18 | const port = getPortFromArgs(args) || 3001; 19 | 20 | // Set up error handlers 21 | process.on('uncaughtException', (error) => { 22 | console.error('[Uncaught Exception]', error); 23 | process.exit(1); 24 | }); 25 | 26 | process.on('unhandledRejection', (reason) => { 27 | console.error('[Unhandled Rejection]', reason); 28 | process.exit(1); 29 | }); 30 | 31 | // Handle termination signals 32 | const signals: NodeJS.Signals[] = ['SIGTERM', 'SIGINT', 'SIGUSR2']; 33 | signals.forEach((signal) => { 34 | process.once(signal, async () => { 35 | console.error(`\nReceived ${signal}, shutting down...`); 36 | if (server) { 37 | try { 38 | if (server instanceof FileOperationsHttpServer) { 39 | await server.stop(); 40 | } else { 41 | await server.cleanup(); 42 | } 43 | } catch (error) { 44 | console.error('Error during cleanup:', error); 45 | } 46 | } 47 | process.exit(0); 48 | }); 49 | }); 50 | 51 | // Initialize and run server based on transport type 52 | if (useHttp) { 53 | server = new FileOperationsHttpServer(); 54 | await server.start(port); 55 | } else { 56 | server = new FileOperationsServer(); 57 | await server.run(); 58 | } 59 | 60 | } catch (error) { 61 | console.error('[Fatal Error]', error); 62 | process.exit(1); 63 | } 64 | } 65 | 66 | /** 67 | * Extract port number from command line arguments 68 | */ 69 | function getPortFromArgs(args: string[]): number | null { 70 | const portIndex = args.findIndex(arg => arg === '--port' || arg === '-p'); 71 | if (portIndex !== -1 && portIndex + 1 < args.length) { 72 | const port = parseInt(args[portIndex + 1], 10); 73 | if (!isNaN(port) && port > 0 && port <= 65535) { 74 | return port; 75 | } 76 | } 77 | return null; 78 | } 79 | 80 | // Start the server 81 | main().catch((error) => { 82 | console.error('[Startup Error]', error); 83 | process.exit(1); 84 | }); 85 | -------------------------------------------------------------------------------- /src/services/RateLimiterService.ts: -------------------------------------------------------------------------------- 1 | import { McpError, ErrorCode } from '@modelcontextprotocol/sdk/types.js'; 2 | 3 | interface RateLimit { 4 | maxRequests: number; // Maximum requests allowed in the window 5 | windowMs: number; // Time window in milliseconds 6 | } 7 | 8 | interface RequestTracker { 9 | count: number; 10 | resetTime: number; 11 | } 12 | 13 | /** 14 | * Service for rate limiting requests to protect against abuse 15 | */ 16 | export class RateLimiterService { 17 | private limits: Map; 18 | private requests: Map; 19 | 20 | constructor() { 21 | // Define rate limits for different operations 22 | this.limits = new Map([ 23 | ['tool', { maxRequests: 100, windowMs: 60 * 1000 }], // 100 requests per minute for tools 24 | ['resource', { maxRequests: 200, windowMs: 60 * 1000 }], // 200 requests per minute for resources 25 | ['watch', { maxRequests: 20, windowMs: 60 * 1000 }] // 20 watch operations per minute 26 | ]); 27 | this.requests = new Map(); 28 | } 29 | 30 | /** 31 | * Check if an operation should be rate limited 32 | * @param operationType Type of operation (tool, resource, watch) 33 | * @throws {McpError} If rate limit is exceeded 34 | */ 35 | public checkRateLimit(operationType: string): void { 36 | const limit = this.limits.get(operationType); 37 | if (!limit) return; // No rate limit for this operation type 38 | 39 | const now = Date.now(); 40 | const tracker = this.requests.get(operationType) || { count: 0, resetTime: now + limit.windowMs }; 41 | 42 | // Reset counter if window has expired 43 | if (now >= tracker.resetTime) { 44 | tracker.count = 0; 45 | tracker.resetTime = now + limit.windowMs; 46 | } 47 | 48 | // Check if limit is exceeded 49 | if (tracker.count >= limit.maxRequests) { 50 | const waitMs = tracker.resetTime - now; 51 | throw new McpError( 52 | ErrorCode.InvalidRequest, 53 | `Rate limit exceeded for ${operationType} operations. Please wait ${Math.ceil(waitMs / 1000)} seconds.` 54 | ); 55 | } 56 | 57 | // Update counter 58 | tracker.count++; 59 | this.requests.set(operationType, tracker); 60 | } 61 | 62 | /** 63 | * Get current rate limit status 64 | * @param operationType Type of operation 65 | * @returns Current count and reset time, or null if no limit exists 66 | */ 67 | public getStatus(operationType: string): { current: number; limit: number; resetsIn: number } | null { 68 | const limit = this.limits.get(operationType); 69 | const tracker = this.requests.get(operationType); 70 | 71 | if (!limit || !tracker) return null; 72 | 73 | return { 74 | current: tracker.count, 75 | limit: limit.maxRequests, 76 | resetsIn: Math.max(0, tracker.resetTime - Date.now()) 77 | }; 78 | } 79 | } 80 | -------------------------------------------------------------------------------- /src/types.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Combined types for file operations server 3 | */ 4 | 5 | // File Context Types 6 | export type FileEncoding = 'utf8' | 'utf-8' | 'ascii' | 'binary' | 'base64' | 'hex' | 'latin1'; 7 | 8 | export interface FileMetadata { 9 | size: number; 10 | mimeType: string; 11 | modifiedTime: string; 12 | createdTime: string; 13 | isDirectory: boolean; 14 | } 15 | 16 | export interface FileEntry { 17 | path: string; 18 | name: string; 19 | metadata: FileMetadata; 20 | } 21 | 22 | export interface SearchOptions { 23 | recursive?: boolean; 24 | includeHidden?: boolean; 25 | maxDepth?: number; 26 | fileTypes?: string[]; 27 | } 28 | 29 | export interface ReadOptions { 30 | encoding?: FileEncoding; 31 | maxSize?: number; 32 | startLine?: number; 33 | endLine?: number; 34 | } 35 | 36 | export interface SearchResult { 37 | matches: Array<{ 38 | path: string; 39 | line: number; 40 | content: string; 41 | context: { 42 | before: string[]; 43 | after: string[]; 44 | }; 45 | }>; 46 | totalMatches: number; 47 | } 48 | 49 | export interface FileContent { 50 | content: string; 51 | metadata: FileMetadata; 52 | encoding: string; 53 | truncated: boolean; 54 | totalLines?: number; 55 | } 56 | 57 | export interface DirectoryContent { 58 | files: { 59 | [path: string]: FileContent; 60 | }; 61 | metadata: { 62 | totalFiles: number; 63 | totalSize: number; 64 | truncated: boolean; 65 | }; 66 | } 67 | 68 | // File Patch Types 69 | export type PatchType = 'line' | 'block' | 'diff' | 'complete'; 70 | 71 | export interface BasePatchOperation { 72 | type: PatchType; 73 | filePath: string; 74 | createBackup?: boolean; 75 | validate?: boolean; 76 | whitespaceConfig?: WhitespaceConfig; 77 | } 78 | 79 | export interface LinePatchOperation extends BasePatchOperation { 80 | type: 'line'; 81 | search: string | RegExp; 82 | replace?: string; 83 | lineNumbers?: number[]; 84 | context?: number; 85 | } 86 | 87 | export interface BlockPatchOperation extends BasePatchOperation { 88 | type: 'block'; 89 | search: string | RegExp; 90 | replace?: string; 91 | startDelimiter?: string; 92 | endDelimiter?: string; 93 | includeDelimiters?: boolean; 94 | } 95 | 96 | export interface CompleteUpdateOperation extends BasePatchOperation { 97 | type: 'complete'; 98 | content: string; 99 | preserveFormatting?: boolean; 100 | } 101 | 102 | export interface DiffPatchOperation extends BasePatchOperation { 103 | type: 'diff'; 104 | diff: string; 105 | context?: number; 106 | ignoreWhitespace?: boolean; 107 | } 108 | 109 | export interface BatchConfig { 110 | maxChunkSize?: number; 111 | maxLinesPerChunk?: number; 112 | parallel?: boolean; 113 | maxParallelOps?: number; 114 | chunkDelay?: number; 115 | } 116 | 117 | export interface ProgressInfo { 118 | currentChunk: number; 119 | totalChunks: number; 120 | bytesProcessed: number; 121 | totalBytes: number; 122 | linesProcessed: number; 123 | totalLines: number; 124 | startTime: number; 125 | estimatedTimeRemaining?: number; 126 | } 127 | 128 | export type PatchOperation = LinePatchOperation | BlockPatchOperation | DiffPatchOperation | CompleteUpdateOperation; 129 | 130 | export interface PatchResult { 131 | success: boolean; 132 | filePath: string; 133 | type: PatchType; 134 | changesApplied: number; 135 | backupPath?: string; 136 | error?: string; 137 | modifiedLines?: number[]; 138 | originalContent?: string[]; 139 | newContent?: string[]; 140 | whitespaceChanges?: { 141 | indentationFixed: boolean; 142 | lineEndingsNormalized: boolean; 143 | trailingWhitespaceRemoved: boolean; 144 | }; 145 | } 146 | 147 | export interface WhitespaceConfig { 148 | preserveIndentation: boolean; 149 | preserveLineEndings: boolean; 150 | normalizeWhitespace: boolean; 151 | trimTrailingWhitespace: boolean; 152 | defaultIndentation?: string; 153 | defaultLineEnding?: string; 154 | } 155 | 156 | export interface NormalizedContent { 157 | normalized: string; 158 | lineEndings: string; 159 | indentation: string; 160 | hash: string; 161 | stats: { 162 | indentationSpaces: number; 163 | indentationTabs: number; 164 | trailingWhitespace: number; 165 | emptyLines: number; 166 | maxLineLength: number; 167 | }; 168 | } 169 | 170 | export type ScopeType = 'class' | 'method' | 'property' | 'unknown'; 171 | 172 | export interface ContentScope { 173 | type: ScopeType; 174 | start: number; 175 | end: number; 176 | context: string[]; 177 | indentationLevel: number; 178 | } 179 | 180 | export interface ChunkResult { 181 | success: boolean; 182 | chunkIndex: number; 183 | startLine: number; 184 | endLine: number; 185 | bytesProcessed: number; 186 | error?: string; 187 | } 188 | 189 | // Change Tracking Types 190 | export interface Change { 191 | id: string; 192 | timestamp: string; 193 | description: string; 194 | type: string; 195 | details?: Record; 196 | } 197 | 198 | // Error Types 199 | export enum FileErrorCode { 200 | FILE_NOT_FOUND = 'FILE_NOT_FOUND', 201 | PERMISSION_DENIED = 'PERMISSION_DENIED', 202 | INVALID_PATH = 'INVALID_PATH', 203 | FILE_TOO_LARGE = 'FILE_TOO_LARGE', 204 | ENCODING_ERROR = 'ENCODING_ERROR', 205 | OPERATION_FAILED = 'OPERATION_FAILED', 206 | UNKNOWN_ERROR = 'UNKNOWN_ERROR' 207 | } 208 | 209 | // File System Types 210 | export interface FileWatcherEvents { 211 | change: (eventType: 'rename' | 'change', filename: string | null) => void; 212 | error: (error: Error) => void; 213 | } 214 | 215 | export interface FileWatcher { 216 | on(event: E, listener: FileWatcherEvents[E]): this; 217 | close(): void; 218 | } 219 | 220 | export class FileOperationError extends Error { 221 | constructor( 222 | public code: FileErrorCode, 223 | message: string, 224 | public path?: string 225 | ) { 226 | super(message); 227 | this.name = 'FileOperationError'; 228 | } 229 | } 230 | -------------------------------------------------------------------------------- /src/httpServer.ts: -------------------------------------------------------------------------------- 1 | import express from 'express'; 2 | import { SSEServerTransport } from '@modelcontextprotocol/sdk/server/sse.js'; 3 | import { FileOperationsServer } from './server.js'; 4 | 5 | /** 6 | * HTTP server implementation with SSE support for the MCP File Operations Server 7 | * Provides a streamable HTTP interface as per MCP SDK v1.5 8 | */ 9 | export class FileOperationsHttpServer { 10 | private app: express.Application; 11 | private fileOpsServer: FileOperationsServer; 12 | private transports: Map = new Map(); 13 | 14 | constructor() { 15 | this.app = express(); 16 | this.fileOpsServer = new FileOperationsServer(); 17 | this.setupMiddleware(); 18 | this.setupRoutes(); 19 | } 20 | 21 | private setupMiddleware(): void { 22 | // Parse JSON bodies 23 | this.app.use(express.json()); 24 | 25 | // CORS headers for cross-origin requests 26 | this.app.use((_req, res, next) => { 27 | res.header('Access-Control-Allow-Origin', '*'); 28 | res.header('Access-Control-Allow-Headers', 'Origin, X-Requested-With, Content-Type, Accept'); 29 | res.header('Access-Control-Allow-Methods', 'GET, POST, OPTIONS'); 30 | next(); 31 | }); 32 | 33 | // Handle preflight requests 34 | this.app.options('*', (_req, res) => { 35 | res.sendStatus(200); 36 | }); 37 | } 38 | 39 | private setupRoutes(): void { 40 | // Health check endpoint 41 | this.app.get('/health', (_req, res) => { 42 | res.json({ 43 | status: 'healthy', 44 | service: 'mcp-file-operations-server', 45 | version: '1.0.0', 46 | transport: 'http-sse' 47 | }); 48 | }); 49 | 50 | // SSE endpoint for establishing the MCP connection 51 | this.app.get('/sse', async (_req, res) => { 52 | try { 53 | const transport = new SSEServerTransport('/messages', res); 54 | 55 | // Store transport for routing messages 56 | this.transports.set(transport.sessionId, transport); 57 | 58 | // Set up cleanup when connection closes 59 | transport.onclose = () => { 60 | this.transports.delete(transport.sessionId); 61 | console.error(`SSE connection closed for session ${transport.sessionId}`); 62 | }; 63 | 64 | transport.onerror = (error) => { 65 | this.transports.delete(transport.sessionId); 66 | console.error(`SSE connection error for session ${transport.sessionId}:`, error); 67 | }; 68 | 69 | // Connect the MCP server to this transport 70 | await this.fileOpsServer.getMcpServer().connect(transport); 71 | 72 | console.error(`SSE connection established for session ${transport.sessionId}`); 73 | } catch (error) { 74 | console.error('Error establishing SSE connection:', error); 75 | res.status(500).json({ error: 'Failed to establish SSE connection' }); 76 | } 77 | }); 78 | 79 | // Message endpoint for receiving client messages 80 | this.app.post('/messages', async (req, res) => { 81 | try { 82 | // Extract session ID from request (could be from headers, query params, or body) 83 | const sessionId = req.headers['x-session-id'] as string || 84 | req.query.sessionId as string || 85 | req.body?.sessionId; 86 | 87 | if (!sessionId) { 88 | res.status(400).json({ error: 'Session ID required' }); 89 | return; 90 | } 91 | 92 | const transport = this.transports.get(sessionId); 93 | if (!transport) { 94 | res.status(404).json({ error: 'Session not found' }); 95 | return; 96 | } 97 | 98 | // Handle the message through the transport 99 | await transport.handlePostMessage(req, res, req.body); 100 | } catch (error) { 101 | console.error('Error handling message:', error); 102 | res.status(500).json({ error: 'Failed to handle message' }); 103 | } 104 | }); 105 | 106 | // List active sessions (for debugging) 107 | this.app.get('/sessions', (_req, res) => { 108 | const sessions = Array.from(this.transports.keys()); 109 | res.json({ 110 | activeSessions: sessions.length, 111 | sessions: sessions 112 | }); 113 | }); 114 | } 115 | 116 | /** 117 | * Start the HTTP server 118 | */ 119 | async start(port: number = 3001): Promise { 120 | return new Promise((resolve, reject) => { 121 | try { 122 | this.app.listen(port, () => { 123 | console.error(`File Operations MCP HTTP server running on port ${port}`); 124 | console.error(`SSE endpoint: http://localhost:${port}/sse`); 125 | console.error(`Messages endpoint: http://localhost:${port}/messages`); 126 | console.error(`Health check: http://localhost:${port}/health`); 127 | resolve(); 128 | }); 129 | } catch (error) { 130 | reject(error); 131 | } 132 | }); 133 | } 134 | 135 | /** 136 | * Stop the HTTP server and clean up connections 137 | */ 138 | async stop(): Promise { 139 | // Close all active transports 140 | for (const transport of this.transports.values()) { 141 | try { 142 | await transport.close(); 143 | } catch (error) { 144 | console.error('Error closing transport:', error); 145 | } 146 | } 147 | this.transports.clear(); 148 | 149 | // Clean up the file operations server 150 | await this.fileOpsServer.cleanup(); 151 | } 152 | 153 | /** 154 | * Get the underlying Express app for advanced configuration 155 | */ 156 | getApp(): express.Application { 157 | return this.app; 158 | } 159 | } -------------------------------------------------------------------------------- /src/types/index.ts: -------------------------------------------------------------------------------- 1 | import { EventEmitter } from 'events'; 2 | import { PATCH_TYPES, FILE_ERROR_CODES } from '../config/defaults.js'; 3 | 4 | // Change Types 5 | export type ChangeType = 6 | | 'file_edit' 7 | | 'file_create' 8 | | 'file_delete' 9 | | 'file_move' 10 | | 'directory_create' 11 | | 'directory_delete' 12 | | 'directory_copy' 13 | | 'permission_change' 14 | | 'watch_start' 15 | | 'watch_end' 16 | | 'patch_apply'; 17 | 18 | // Merge Strategy Types 19 | export type MergeStrategy = 'overwrite' | 'merge' | 'smart'; 20 | 21 | // Conflict Resolution Types 22 | export type ConflictResolution = 'force' | 'revert' | 'manual'; 23 | 24 | // File System Types 25 | export interface FileWatcher extends EventEmitter { 26 | close(): void; 27 | } 28 | 29 | // File Context Types 30 | export interface FileMetadata { 31 | size: number; 32 | mimeType: string; 33 | modifiedTime: string; 34 | createdTime: string; 35 | isDirectory: boolean; 36 | } 37 | 38 | export interface FileEntry { 39 | path: string; 40 | name: string; 41 | metadata: FileMetadata; 42 | } 43 | 44 | export interface FileContent { 45 | content: string; 46 | metadata: FileMetadata; 47 | encoding: string; 48 | truncated: boolean; 49 | totalLines: number; 50 | } 51 | 52 | export interface DirectoryContent { 53 | files: { [path: string]: FileContent }; 54 | metadata: { 55 | totalFiles: number; 56 | totalSize: number; 57 | truncated: boolean; 58 | }; 59 | } 60 | 61 | // Search Types 62 | export interface SearchOptions { 63 | recursive?: boolean; 64 | includeHidden?: boolean; 65 | maxDepth?: number; 66 | fileTypes?: string[]; 67 | } 68 | 69 | export interface SearchMatch { 70 | path: string; 71 | line: number; 72 | content: string; 73 | context: { 74 | before: string[]; 75 | after: string[]; 76 | }; 77 | } 78 | 79 | export interface SearchResult { 80 | matches: SearchMatch[]; 81 | totalMatches: number; 82 | } 83 | 84 | // Patch Operation Types 85 | export type PatchType = typeof PATCH_TYPES[keyof typeof PATCH_TYPES]; 86 | 87 | export interface WhitespaceConfig { 88 | preserveIndentation?: boolean; 89 | preserveLineEndings?: boolean; 90 | normalizeWhitespace?: boolean; 91 | trimTrailingWhitespace?: boolean; 92 | defaultIndentation?: string; 93 | defaultLineEnding?: string; 94 | } 95 | 96 | export type BatchConfig = { 97 | maxChunkSize: number; 98 | maxLinesPerChunk: 1000; 99 | parallel: false; 100 | maxParallelOps: 4; 101 | chunkDelay: 100; 102 | }; 103 | 104 | export interface PatchOperation { 105 | type: PatchType; 106 | filePath: string; 107 | search?: string; 108 | searchPattern?: RegExp; 109 | replace?: string; 110 | lineNumbers?: number[]; 111 | content?: string; 112 | diff?: string; 113 | createBackup?: boolean; 114 | whitespaceConfig?: WhitespaceConfig; 115 | mergeStrategy?: MergeStrategy; 116 | conflictResolution?: ConflictResolution; 117 | } 118 | 119 | export interface PatchResult { 120 | success: boolean; 121 | filePath: string; 122 | type: PatchType; 123 | changesApplied: number; 124 | backupPath?: string; 125 | originalContent?: string[]; 126 | newContent?: string[]; 127 | error?: string; 128 | conflicts?: string[]; 129 | } 130 | 131 | export interface NormalizedContent { 132 | normalized: string; 133 | lineEndings: string; 134 | indentation: string; 135 | hash: string; 136 | stats: { 137 | indentationSpaces: number; 138 | indentationTabs: number; 139 | trailingWhitespace: number; 140 | emptyLines: number; 141 | maxLineLength: number; 142 | }; 143 | } 144 | 145 | // Progress Types 146 | export interface ProgressInfo { 147 | currentChunk: number; 148 | totalChunks: number; 149 | bytesProcessed: number; 150 | totalBytes: number; 151 | linesProcessed: number; 152 | totalLines: number; 153 | startTime: number; 154 | estimatedTimeRemaining: number; 155 | } 156 | 157 | export interface ChunkResult { 158 | success: boolean; 159 | chunkIndex: number; 160 | startLine: number; 161 | endLine: number; 162 | bytesProcessed: number; 163 | error?: string; 164 | } 165 | 166 | // Change Tracking Types 167 | export interface Change { 168 | id: string; 169 | timestamp: string; 170 | description: string; 171 | type: ChangeType; 172 | details?: Record; 173 | } 174 | 175 | // Error Types 176 | export type FileErrorCode = typeof FILE_ERROR_CODES[keyof typeof FILE_ERROR_CODES]; 177 | 178 | export class FileOperationError extends Error { 179 | constructor( 180 | public code: FileErrorCode, 181 | message: string, 182 | public path: string 183 | ) { 184 | super(message); 185 | this.name = 'FileOperationError'; 186 | } 187 | } 188 | 189 | // Service Types 190 | export interface FileService { 191 | readFile(path: string, encoding?: string): Promise; 192 | writeFile(path: string, content: string, encoding?: string): Promise; 193 | copyFile(source: string, destination: string, overwrite?: boolean): Promise; 194 | moveFile(source: string, destination: string, overwrite?: boolean): Promise; 195 | deleteFile(path: string): Promise; 196 | exists(path: string): Promise; 197 | getMetadata(path: string): Promise; 198 | } 199 | 200 | export interface DirectoryService { 201 | create(path: string, recursive?: boolean): Promise; 202 | remove(path: string, recursive?: boolean): Promise; 203 | copy(source: string, destination: string, overwrite?: boolean): Promise; 204 | list(path: string, recursive?: boolean): Promise; 205 | } 206 | 207 | export interface WatchService { 208 | watch(path: string, recursive?: boolean): Promise; 209 | unwatch(path: string): Promise; 210 | isWatching(path: string): boolean; 211 | } 212 | 213 | export interface PatchService { 214 | applyPatch(operation: PatchOperation): Promise; 215 | createBackup(path: string): Promise; 216 | normalizeContent(content: string, config?: WhitespaceConfig): NormalizedContent; 217 | } 218 | 219 | export interface ChangeTrackingService { 220 | addChange(change: Omit): Promise; 221 | getChanges(limit?: number, type?: ChangeType): Promise; 222 | clearChanges(): Promise; 223 | } 224 | 225 | // Stream Processing Types 226 | export interface StreamProcessor { 227 | processFile( 228 | filePath: string, 229 | processor: (chunk: string, chunkInfo: { start: number; end: number }) => Promise 230 | ): Promise; 231 | on(event: string, listener: (progress: ProgressInfo) => void): void; 232 | } 233 | 234 | // Content Scope Types 235 | export interface ContentScope { 236 | startLine: number; 237 | endLine: number; 238 | content: string; 239 | } 240 | 241 | // Utility Types 242 | export type DeepPartial = { 243 | [P in keyof T]?: T[P] extends object ? DeepPartial : T[P]; 244 | }; 245 | 246 | export type ValidationResult = { 247 | valid: boolean; 248 | errors?: string[]; 249 | }; 250 | -------------------------------------------------------------------------------- /src/services/FileService.ts: -------------------------------------------------------------------------------- 1 | import { promises as fs, existsSync } from 'fs'; 2 | import * as path from 'path'; 3 | import * as mime from 'mime-types'; 4 | import { 5 | FileService, 6 | FileMetadata, 7 | FileOperationError, 8 | FileErrorCode 9 | } from '../types/index.js'; 10 | import { FILE_OPERATION_DEFAULTS } from '../config/defaults.js'; 11 | 12 | /** 13 | * Implementation of FileService interface handling basic file operations 14 | * Follows SOLID principles: 15 | * - Single Responsibility: Handles only file-level operations 16 | * - Open/Closed: Extensible through inheritance 17 | * - Liskov Substitution: Implements FileService interface 18 | * - Interface Segregation: Focused file operation methods 19 | * - Dependency Inversion: Depends on abstractions (FileService interface) 20 | */ 21 | export class FileServiceImpl implements FileService { 22 | /** 23 | * Read file content with specified encoding 24 | * @param filePath Path to the file 25 | * @param encoding File encoding (defaults to utf8) 26 | */ 27 | async readFile(filePath: string, encoding: BufferEncoding = FILE_OPERATION_DEFAULTS.encoding): Promise { 28 | try { 29 | const content = await fs.readFile(filePath, encoding); 30 | return content; 31 | } catch (error) { 32 | throw new FileOperationError( 33 | 'FILE_NOT_FOUND' as FileErrorCode, 34 | `Failed to read file: ${error instanceof Error ? error.message : 'Unknown error'}`, 35 | filePath 36 | ); 37 | } 38 | } 39 | 40 | /** 41 | * Write content to file with specified encoding 42 | * @param filePath Path to write the file 43 | * @param content Content to write 44 | * @param encoding File encoding (defaults to utf8) 45 | */ 46 | async writeFile(filePath: string, content: string, encoding: BufferEncoding = FILE_OPERATION_DEFAULTS.encoding): Promise { 47 | try { 48 | // Ensure directory exists 49 | await fs.mkdir(path.dirname(filePath), { recursive: true }); 50 | await fs.writeFile(filePath, content, encoding); 51 | } catch (error) { 52 | throw new FileOperationError( 53 | 'OPERATION_FAILED' as FileErrorCode, 54 | `Failed to write file: ${error instanceof Error ? error.message : 'Unknown error'}`, 55 | filePath 56 | ); 57 | } 58 | } 59 | 60 | /** 61 | * Copy file from source to destination 62 | * @param source Source file path 63 | * @param destination Destination file path 64 | * @param overwrite Whether to overwrite existing file 65 | */ 66 | async copyFile(source: string, destination: string, overwrite = FILE_OPERATION_DEFAULTS.overwrite): Promise { 67 | try { 68 | // Ensure destination directory exists 69 | await fs.mkdir(path.dirname(destination), { recursive: true }); 70 | 71 | // Check if destination exists and overwrite is false 72 | if (!overwrite && existsSync(destination)) { 73 | throw new Error('Destination file already exists'); 74 | } 75 | 76 | await fs.copyFile(source, destination, overwrite ? 0 : fs.constants.COPYFILE_EXCL); 77 | } catch (error) { 78 | throw new FileOperationError( 79 | 'OPERATION_FAILED' as FileErrorCode, 80 | `Failed to copy file: ${error instanceof Error ? error.message : 'Unknown error'}`, 81 | source 82 | ); 83 | } 84 | } 85 | 86 | /** 87 | * Move/rename file from source to destination 88 | * @param source Source file path 89 | * @param destination Destination file path 90 | * @param overwrite Whether to overwrite existing file 91 | */ 92 | async moveFile(source: string, destination: string, overwrite = FILE_OPERATION_DEFAULTS.overwrite): Promise { 93 | try { 94 | // Ensure destination directory exists 95 | await fs.mkdir(path.dirname(destination), { recursive: true }); 96 | 97 | // Check if destination exists and overwrite is false 98 | if (!overwrite && existsSync(destination)) { 99 | throw new Error('Destination file already exists'); 100 | } 101 | 102 | await fs.rename(source, destination); 103 | } catch (error) { 104 | throw new FileOperationError( 105 | 'OPERATION_FAILED' as FileErrorCode, 106 | `Failed to move file: ${error instanceof Error ? error.message : 'Unknown error'}`, 107 | source 108 | ); 109 | } 110 | } 111 | 112 | /** 113 | * Delete file at specified path 114 | * @param filePath Path to the file to delete 115 | */ 116 | async deleteFile(filePath: string): Promise { 117 | try { 118 | await fs.unlink(filePath); 119 | } catch (error) { 120 | throw new FileOperationError( 121 | 'OPERATION_FAILED' as FileErrorCode, 122 | `Failed to delete file: ${error instanceof Error ? error.message : 'Unknown error'}`, 123 | filePath 124 | ); 125 | } 126 | } 127 | 128 | /** 129 | * Check if file exists at specified path 130 | * @param filePath Path to check 131 | */ 132 | async exists(filePath: string): Promise { 133 | try { 134 | await fs.access(filePath); 135 | return true; 136 | } catch { 137 | return false; 138 | } 139 | } 140 | 141 | /** 142 | * Get file metadata including size, type, and timestamps 143 | * @param filePath Path to the file 144 | */ 145 | async getMetadata(filePath: string): Promise { 146 | try { 147 | const stats = await fs.stat(filePath); 148 | return { 149 | size: stats.size, 150 | mimeType: (mime.lookup(filePath) || 'application/octet-stream') as string, 151 | modifiedTime: stats.mtime.toISOString(), 152 | createdTime: stats.birthtime.toISOString(), 153 | isDirectory: stats.isDirectory(), 154 | }; 155 | } catch (error) { 156 | throw new FileOperationError( 157 | 'FILE_NOT_FOUND' as FileErrorCode, 158 | `Failed to get metadata: ${error instanceof Error ? error.message : 'Unknown error'}`, 159 | filePath 160 | ); 161 | } 162 | } 163 | 164 | /** 165 | * Validate file path and ensure it's accessible 166 | * @param filePath Path to validate 167 | * @throws FileOperationError if path is invalid or inaccessible 168 | */ 169 | protected async validatePath(filePath: string): Promise { 170 | try { 171 | await fs.access(filePath); 172 | } catch (error) { 173 | throw new FileOperationError( 174 | 'INVALID_PATH' as FileErrorCode, 175 | `Invalid or inaccessible path: ${error instanceof Error ? error.message : 'Unknown error'}`, 176 | filePath 177 | ); 178 | } 179 | } 180 | 181 | /** 182 | * Ensure file size is within limits 183 | * @param filePath Path to check 184 | * @param maxSize Maximum allowed size in bytes 185 | * @throws FileOperationError if file is too large 186 | */ 187 | protected async validateFileSize(filePath: string, maxSize = FILE_OPERATION_DEFAULTS.maxFileSize): Promise { 188 | const stats = await fs.stat(filePath); 189 | if (stats.size > maxSize) { 190 | throw new FileOperationError( 191 | 'FILE_TOO_LARGE' as FileErrorCode, 192 | `File size ${stats.size} exceeds maximum ${maxSize}`, 193 | filePath 194 | ); 195 | } 196 | } 197 | } 198 | -------------------------------------------------------------------------------- /src/services/WatchService.ts: -------------------------------------------------------------------------------- 1 | import { promises as fs, watch } from 'fs'; 2 | import * as path from 'path'; 3 | import { EventEmitter } from 'events'; 4 | import { 5 | WatchService, 6 | FileWatcher, 7 | FileOperationError, 8 | FileErrorCode 9 | } from '../types/index.js'; 10 | import { FILE_OPERATION_DEFAULTS } from '../config/defaults.js'; 11 | 12 | /** 13 | * Implementation of WatchService interface handling file/directory watching operations 14 | * Follows SOLID principles: 15 | * - Single Responsibility: Handles only watching operations 16 | * - Open/Closed: Extensible through inheritance 17 | * - Liskov Substitution: Implements WatchService interface 18 | * - Interface Segregation: Focused watching methods 19 | * - Dependency Inversion: Depends on abstractions (WatchService interface) 20 | */ 21 | export class WatchServiceImpl implements WatchService { 22 | private watchers: Map; 23 | private watchEmitter: EventEmitter; 24 | private debounceTimers: Map; 25 | 26 | constructor() { 27 | this.watchers = new Map(); 28 | this.watchEmitter = new EventEmitter(); 29 | this.debounceTimers = new Map(); 30 | 31 | // Increase max listeners to handle multiple watch points 32 | this.watchEmitter.setMaxListeners(100); 33 | } 34 | 35 | /** 36 | * Start watching a file or directory for changes 37 | * @param watchPath Path to watch 38 | * @param recursive Whether to watch subdirectories recursively 39 | */ 40 | async watch(watchPath: string, recursive = FILE_OPERATION_DEFAULTS.recursive): Promise { 41 | try { 42 | // Validate path exists 43 | await fs.access(watchPath); 44 | 45 | // Check if already watching 46 | if (this.watchers.has(watchPath)) { 47 | throw new FileOperationError( 48 | 'OPERATION_FAILED' as FileErrorCode, 49 | 'Path is already being watched', 50 | watchPath 51 | ); 52 | } 53 | 54 | // Create watcher 55 | const watcher = watch(watchPath, { recursive }) as unknown as FileWatcher; 56 | 57 | // Wrap the native watcher in our own EventEmitter for better control 58 | const watchWrapper = new EventEmitter() as FileWatcher; 59 | watchWrapper.close = () => { 60 | watcher.close(); 61 | this.watchers.delete(watchPath); 62 | this.watchEmitter.emit('watchEnd', watchPath); 63 | }; 64 | 65 | // Handle watch events with debouncing 66 | watcher.on('change', (eventType: 'rename' | 'change', filename: string | null) => { 67 | const fullPath = filename ? path.join(watchPath, filename) : watchPath; 68 | 69 | // Clear existing timer for this path 70 | const existingTimer = this.debounceTimers.get(fullPath); 71 | if (existingTimer) { 72 | clearTimeout(existingTimer); 73 | } 74 | 75 | // Set new debounced event 76 | const timer = setTimeout(() => { 77 | this.handleWatchEvent(watchPath, eventType, filename, watchWrapper); 78 | this.debounceTimers.delete(fullPath); 79 | }, FILE_OPERATION_DEFAULTS.watchDebounceTime); 80 | 81 | this.debounceTimers.set(fullPath, timer); 82 | }); 83 | 84 | // Store watcher 85 | this.watchers.set(watchPath, watchWrapper); 86 | this.watchEmitter.emit('watchStart', watchPath); 87 | 88 | return watchWrapper; 89 | } catch (error) { 90 | if (error instanceof FileOperationError) throw error; 91 | throw new FileOperationError( 92 | 'OPERATION_FAILED' as FileErrorCode, 93 | `Failed to start watching: ${error instanceof Error ? error.message : 'Unknown error'}`, 94 | watchPath 95 | ); 96 | } 97 | } 98 | 99 | /** 100 | * Stop watching a path 101 | * @param watchPath Path to stop watching 102 | */ 103 | async unwatch(watchPath: string): Promise { 104 | const watcher = this.watchers.get(watchPath); 105 | if (!watcher) { 106 | throw new FileOperationError( 107 | 'OPERATION_FAILED' as FileErrorCode, 108 | 'Path is not being watched', 109 | watchPath 110 | ); 111 | } 112 | 113 | watcher.close(); 114 | } 115 | 116 | /** 117 | * Check if a path is currently being watched 118 | * @param watchPath Path to check 119 | */ 120 | isWatching(watchPath: string): boolean { 121 | return this.watchers.has(watchPath); 122 | } 123 | 124 | /** 125 | * Get all currently watched paths 126 | */ 127 | getWatchedPaths(): string[] { 128 | return Array.from(this.watchers.keys()); 129 | } 130 | 131 | /** 132 | * Add a listener for watch events 133 | * @param event Event type ('watchStart', 'watchEnd', 'change', 'rename') 134 | * @param listener Callback function 135 | */ 136 | on(event: string, listener: (...args: any[]) => void): void { 137 | this.watchEmitter.on(event, listener); 138 | } 139 | 140 | /** 141 | * Remove a listener for watch events 142 | * @param event Event type 143 | * @param listener Callback function 144 | */ 145 | off(event: string, listener: (...args: any[]) => void): void { 146 | this.watchEmitter.off(event, listener); 147 | } 148 | 149 | /** 150 | * Clean up all watchers 151 | */ 152 | async dispose(): Promise { 153 | for (const [path, watcher] of this.watchers) { 154 | try { 155 | watcher.close(); 156 | this.watchEmitter.emit('watchEnd', path); 157 | } catch (error) { 158 | console.error(`Error closing watcher for ${path}:`, error); 159 | } 160 | } 161 | this.watchers.clear(); 162 | this.watchEmitter.removeAllListeners(); 163 | } 164 | 165 | private async handleWatchEvent( 166 | watchPath: string, 167 | eventType: 'rename' | 'change', 168 | filename: string | null, 169 | watcher: FileWatcher 170 | ): Promise { 171 | try { 172 | const fullPath = filename ? path.join(watchPath, filename) : watchPath; 173 | 174 | // Check if path still exists 175 | const exists = await fs.access(fullPath).then(() => true).catch(() => false); 176 | 177 | const eventData = { 178 | type: eventType, 179 | path: fullPath, 180 | exists, 181 | timestamp: new Date().toISOString() 182 | }; 183 | 184 | // Emit specific event 185 | watcher.emit(eventType, eventData); 186 | 187 | // Emit generic change event 188 | watcher.emit('change', eventData); 189 | 190 | // Emit through main emitter 191 | this.watchEmitter.emit('watchEvent', eventData); 192 | } catch (error) { 193 | console.error('Error handling watch event:', error); 194 | // Emit error event but don't throw to keep watcher alive 195 | watcher.emit('error', error); 196 | } 197 | } 198 | 199 | /** 200 | * Validate a path for watching 201 | * @param watchPath Path to validate 202 | */ 203 | protected async validateWatchPath(watchPath: string): Promise { 204 | try { 205 | const stats = await fs.stat(watchPath); 206 | if (!stats.isDirectory() && !stats.isFile()) { 207 | throw new FileOperationError( 208 | 'INVALID_PATH' as FileErrorCode, 209 | 'Path must be a file or directory', 210 | watchPath 211 | ); 212 | } 213 | } catch (error) { 214 | if (error instanceof FileOperationError) throw error; 215 | throw new FileOperationError( 216 | 'INVALID_PATH' as FileErrorCode, 217 | `Invalid or inaccessible path: ${error instanceof Error ? error.message : 'Unknown error'}`, 218 | watchPath 219 | ); 220 | } 221 | } 222 | } 223 | -------------------------------------------------------------------------------- /src/services/DirectoryService.ts: -------------------------------------------------------------------------------- 1 | import { promises as fs, existsSync } from 'fs'; 2 | import * as path from 'path'; 3 | import { glob } from 'glob'; 4 | import { 5 | DirectoryService, 6 | FileEntry, 7 | FileOperationError, 8 | FileErrorCode 9 | } from '../types/index.js'; 10 | import { FILE_OPERATION_DEFAULTS } from '../config/defaults.js'; 11 | import { FileServiceImpl } from './FileService.js'; 12 | 13 | /** 14 | * Implementation of DirectoryService interface handling directory operations 15 | * Follows SOLID principles: 16 | * - Single Responsibility: Handles only directory-level operations 17 | * - Open/Closed: Extensible through inheritance 18 | * - Liskov Substitution: Implements DirectoryService interface 19 | * - Interface Segregation: Focused directory operation methods 20 | * - Dependency Inversion: Depends on abstractions (DirectoryService interface) 21 | */ 22 | export class DirectoryServiceImpl implements DirectoryService { 23 | private fileService: FileServiceImpl; 24 | 25 | constructor() { 26 | this.fileService = new FileServiceImpl(); 27 | } 28 | 29 | /** 30 | * Create a directory at the specified path 31 | * @param dirPath Path where to create the directory 32 | * @param recursive Whether to create parent directories if they don't exist 33 | */ 34 | async create(dirPath: string, recursive = FILE_OPERATION_DEFAULTS.recursive): Promise { 35 | try { 36 | await fs.mkdir(dirPath, { recursive }); 37 | } catch (error) { 38 | throw new FileOperationError( 39 | 'OPERATION_FAILED' as FileErrorCode, 40 | `Failed to create directory: ${error instanceof Error ? error.message : 'Unknown error'}`, 41 | dirPath 42 | ); 43 | } 44 | } 45 | 46 | /** 47 | * Remove a directory and optionally its contents 48 | * @param dirPath Path of directory to remove 49 | * @param recursive Whether to remove directory contents recursively 50 | */ 51 | async remove(dirPath: string, recursive = false): Promise { 52 | try { 53 | if (recursive) { 54 | // For recursive removal, we need to handle contents first 55 | const entries = await fs.readdir(dirPath, { withFileTypes: true }); 56 | for (const entry of entries) { 57 | const fullPath = path.join(dirPath, entry.name); 58 | if (entry.isDirectory()) { 59 | await this.remove(fullPath, true); 60 | } else { 61 | await fs.unlink(fullPath); 62 | } 63 | } 64 | } 65 | await fs.rmdir(dirPath); 66 | } catch (error) { 67 | throw new FileOperationError( 68 | 'OPERATION_FAILED' as FileErrorCode, 69 | `Failed to remove directory: ${error instanceof Error ? error.message : 'Unknown error'}`, 70 | dirPath 71 | ); 72 | } 73 | } 74 | 75 | /** 76 | * Copy a directory and its contents to a new location 77 | * @param source Source directory path 78 | * @param destination Destination directory path 79 | * @param overwrite Whether to overwrite existing files/directories 80 | */ 81 | async copy(source: string, destination: string, overwrite = FILE_OPERATION_DEFAULTS.overwrite): Promise { 82 | try { 83 | // Check if destination exists and overwrite is false 84 | if (!overwrite && existsSync(destination)) { 85 | throw new Error('Destination directory already exists'); 86 | } 87 | 88 | // Create destination directory 89 | await fs.mkdir(destination, { recursive: true }); 90 | 91 | // Read source directory contents 92 | const entries = await fs.readdir(source, { withFileTypes: true }); 93 | 94 | // Process each entry 95 | for (const entry of entries) { 96 | const srcPath = path.join(source, entry.name); 97 | const destPath = path.join(destination, entry.name); 98 | 99 | if (entry.isDirectory()) { 100 | // Recursively copy subdirectories 101 | await this.copy(srcPath, destPath, overwrite); 102 | } else { 103 | // Copy files 104 | await this.fileService.copyFile(srcPath, destPath, overwrite); 105 | } 106 | } 107 | } catch (error) { 108 | throw new FileOperationError( 109 | 'OPERATION_FAILED' as FileErrorCode, 110 | `Failed to copy directory: ${error instanceof Error ? error.message : 'Unknown error'}`, 111 | source 112 | ); 113 | } 114 | } 115 | 116 | /** 117 | * List contents of a directory with detailed metadata 118 | * @param dirPath Path of directory to list 119 | * @param recursive Whether to list contents recursively 120 | */ 121 | async list(dirPath: string, recursive = false): Promise { 122 | try { 123 | try { 124 | await fs.access(dirPath); 125 | } catch { 126 | throw new FileOperationError( 127 | 'INVALID_PATH' as FileErrorCode, 128 | 'Directory does not exist or is not accessible', 129 | dirPath 130 | ); 131 | } 132 | 133 | const pattern = recursive ? '**/*' : '*'; 134 | const files = await glob(path.join(dirPath, pattern), { 135 | dot: false, 136 | nodir: false, 137 | windowsPathsNoEscape: true 138 | }); 139 | 140 | const entries: FileEntry[] = []; 141 | 142 | for (const file of files) { 143 | try { 144 | const metadata = await this.fileService.getMetadata(file); 145 | entries.push({ 146 | path: file, 147 | name: path.basename(file), 148 | metadata 149 | }); 150 | } catch (error) { 151 | console.error(`Error getting metadata for ${file}:`, error); 152 | } 153 | } 154 | 155 | return entries; 156 | } catch (error) { 157 | if (error instanceof FileOperationError) throw error; 158 | throw new FileOperationError( 159 | 'OPERATION_FAILED' as FileErrorCode, 160 | `Failed to list directory: ${error instanceof Error ? error.message : 'Unknown error'}`, 161 | dirPath 162 | ); 163 | } 164 | } 165 | 166 | /** 167 | * Check if a path exists and is a directory 168 | * @param dirPath Path to check 169 | */ 170 | protected async validateDirectory(dirPath: string): Promise { 171 | try { 172 | const stats = await fs.stat(dirPath); 173 | if (!stats.isDirectory()) { 174 | throw new FileOperationError( 175 | 'INVALID_PATH' as FileErrorCode, 176 | 'Path exists but is not a directory', 177 | dirPath 178 | ); 179 | } 180 | } catch (error) { 181 | if (error instanceof FileOperationError) throw error; 182 | throw new FileOperationError( 183 | 'INVALID_PATH' as FileErrorCode, 184 | `Invalid or inaccessible directory: ${error instanceof Error ? error.message : 'Unknown error'}`, 185 | dirPath 186 | ); 187 | } 188 | } 189 | 190 | /** 191 | * Ensure a directory is empty 192 | * @param dirPath Path to check 193 | * @throws FileOperationError if directory is not empty 194 | */ 195 | protected async ensureEmpty(dirPath: string): Promise { 196 | const entries = await fs.readdir(dirPath); 197 | if (entries.length > 0) { 198 | throw new FileOperationError( 199 | 'OPERATION_FAILED' as FileErrorCode, 200 | 'Directory is not empty', 201 | dirPath 202 | ); 203 | } 204 | } 205 | 206 | /** 207 | * Calculate total size of a directory 208 | * @param dirPath Path to directory 209 | * @returns Total size in bytes 210 | */ 211 | protected async calculateSize(dirPath: string): Promise { 212 | let totalSize = 0; 213 | const entries = await fs.readdir(dirPath, { withFileTypes: true }); 214 | 215 | for (const entry of entries) { 216 | const fullPath = path.join(dirPath, entry.name); 217 | if (entry.isDirectory()) { 218 | totalSize += await this.calculateSize(fullPath); 219 | } else { 220 | const stats = await fs.stat(fullPath); 221 | totalSize += stats.size; 222 | } 223 | } 224 | 225 | return totalSize; 226 | } 227 | } 228 | -------------------------------------------------------------------------------- /src/services/ChangeTrackingService.ts: -------------------------------------------------------------------------------- 1 | import { promises as fs, existsSync, readFileSync } from 'fs'; 2 | import * as path from 'path'; 3 | import { 4 | ChangeTrackingService, 5 | Change, 6 | FileOperationError, 7 | FileErrorCode, 8 | ChangeType 9 | } from '../types/index.js'; 10 | import { CHANGE_TRACKING_CONFIG } from '../config/defaults.js'; 11 | 12 | /** 13 | * Implementation of ChangeTrackingService interface handling change history 14 | * Follows SOLID principles: 15 | * - Single Responsibility: Handles only change tracking operations 16 | * - Open/Closed: Extensible through inheritance 17 | * - Liskov Substitution: Implements ChangeTrackingService interface 18 | * - Interface Segregation: Focused change tracking methods 19 | * - Dependency Inversion: Depends on abstractions (ChangeTrackingService interface) 20 | */ 21 | export class ChangeTrackingServiceImpl implements ChangeTrackingService { 22 | private changes: Change[]; 23 | private changesFilePath: string; 24 | 25 | constructor(storageDir?: string) { 26 | // Use provided storage directory or default to user's home directory 27 | const baseDir = storageDir || process.env.USERPROFILE || process.env.HOME || '.'; 28 | this.changesFilePath = path.join(baseDir, '.cline-changes.json'); 29 | this.changes = this.loadChanges(); 30 | } 31 | 32 | /** 33 | * Add a new change to the history 34 | * @param change Change details (without id and timestamp) 35 | */ 36 | async addChange(change: Omit): Promise { 37 | const newChange: Change = { 38 | id: this.generateChangeId(), 39 | timestamp: new Date().toISOString(), 40 | description: change.description, 41 | type: change.type, 42 | details: change.details 43 | }; 44 | 45 | // Add to in-memory changes 46 | this.changes.push(newChange); 47 | 48 | // Trim if exceeding max changes 49 | if (this.changes.length > CHANGE_TRACKING_CONFIG.maxChanges) { 50 | this.changes = this.changes.slice(-CHANGE_TRACKING_CONFIG.maxChanges); 51 | } 52 | 53 | // Persist changes if enabled 54 | if (CHANGE_TRACKING_CONFIG.persistChanges) { 55 | await this.saveChanges(); 56 | } 57 | 58 | return newChange; 59 | } 60 | 61 | /** 62 | * Get changes with optional filtering 63 | * @param limit Maximum number of changes to return 64 | * @param type Filter by change type 65 | */ 66 | async getChanges(limit?: number, type?: ChangeType): Promise { 67 | let filteredChanges = [...this.changes]; 68 | 69 | // Apply type filter if specified 70 | if (type) { 71 | filteredChanges = filteredChanges.filter(change => change.type === type); 72 | } 73 | 74 | // Sort by timestamp descending (most recent first) 75 | filteredChanges.sort((a, b) => 76 | new Date(b.timestamp).getTime() - new Date(a.timestamp).getTime() 77 | ); 78 | 79 | // Apply limit if specified 80 | if (limit && limit > 0) { 81 | filteredChanges = filteredChanges.slice(0, limit); 82 | } 83 | 84 | return filteredChanges; 85 | } 86 | 87 | /** 88 | * Clear all tracked changes 89 | */ 90 | async clearChanges(): Promise { 91 | this.changes = []; 92 | if (CHANGE_TRACKING_CONFIG.persistChanges) { 93 | await this.saveChanges(); 94 | } 95 | } 96 | 97 | /** 98 | * Get changes by time range 99 | * @param startTime Start of time range 100 | * @param endTime End of time range 101 | */ 102 | async getChangesByTimeRange(startTime: Date, endTime: Date): Promise { 103 | return this.changes.filter(change => { 104 | const changeTime = new Date(change.timestamp); 105 | return changeTime >= startTime && changeTime <= endTime; 106 | }); 107 | } 108 | 109 | /** 110 | * Get changes for a specific file path 111 | * @param filePath Path to file 112 | */ 113 | async getChangesByFile(filePath: string): Promise { 114 | return this.changes.filter(change => 115 | change.details && 116 | (change.details.filePath === filePath || 117 | (Array.isArray(change.details.files) && 118 | change.details.files.includes(filePath))) 119 | ); 120 | } 121 | 122 | /** 123 | * Get summary of changes grouped by type 124 | */ 125 | async getChangeSummary(): Promise> { 126 | return this.changes.reduce((summary: Record, change) => { 127 | summary[change.type] = (summary[change.type] || 0) + 1; 128 | return summary; 129 | }, {}); 130 | } 131 | 132 | /** 133 | * Load changes from persistent storage 134 | */ 135 | private loadChanges(): Change[] { 136 | try { 137 | if (existsSync(this.changesFilePath)) { 138 | const data = readFileSync(this.changesFilePath, 'utf8'); 139 | return JSON.parse(data); 140 | } 141 | } catch (error) { 142 | console.error('Error loading changes:', error); 143 | } 144 | return []; 145 | } 146 | 147 | /** 148 | * Save changes to persistent storage 149 | */ 150 | private async saveChanges(): Promise { 151 | try { 152 | // Ensure directory exists 153 | await fs.mkdir(path.dirname(this.changesFilePath), { recursive: true }); 154 | 155 | // Write changes to file 156 | await fs.writeFile( 157 | this.changesFilePath, 158 | JSON.stringify(this.changes, null, 2), 159 | 'utf8' 160 | ); 161 | } catch (error) { 162 | throw new FileOperationError( 163 | 'OPERATION_FAILED' as FileErrorCode, 164 | `Failed to save changes: ${error instanceof Error ? error.message : 'Unknown error'}`, 165 | this.changesFilePath 166 | ); 167 | } 168 | } 169 | 170 | /** 171 | * Generate a unique change ID 172 | */ 173 | private generateChangeId(): string { 174 | return `${Date.now()}-${Math.random().toString(36).substring(2, 15)}`; 175 | } 176 | 177 | /** 178 | * Clean up old changes beyond retention period 179 | * @param retentionDays Number of days to retain changes 180 | */ 181 | protected async cleanupOldChanges(retentionDays: number): Promise { 182 | const cutoffDate = new Date(); 183 | cutoffDate.setDate(cutoffDate.getDate() - retentionDays); 184 | 185 | this.changes = this.changes.filter(change => 186 | new Date(change.timestamp) >= cutoffDate 187 | ); 188 | 189 | if (CHANGE_TRACKING_CONFIG.persistChanges) { 190 | await this.saveChanges(); 191 | } 192 | } 193 | 194 | /** 195 | * Export changes to a file 196 | * @param exportPath Path to export file 197 | */ 198 | protected async exportChanges(exportPath: string): Promise { 199 | try { 200 | const exportData = { 201 | exportDate: new Date().toISOString(), 202 | changes: this.changes 203 | }; 204 | await fs.writeFile(exportPath, JSON.stringify(exportData, null, 2), 'utf8'); 205 | } catch (error) { 206 | throw new FileOperationError( 207 | 'OPERATION_FAILED' as FileErrorCode, 208 | `Failed to export changes: ${error instanceof Error ? error.message : 'Unknown error'}`, 209 | exportPath 210 | ); 211 | } 212 | } 213 | 214 | /** 215 | * Import changes from a file 216 | * @param importPath Path to import file 217 | * @param merge Whether to merge with existing changes 218 | */ 219 | protected async importChanges(importPath: string, merge = false): Promise { 220 | try { 221 | const importData = JSON.parse(await fs.readFile(importPath, 'utf8')); 222 | 223 | if (!Array.isArray(importData.changes)) { 224 | throw new Error('Invalid import file format'); 225 | } 226 | 227 | if (merge) { 228 | // Merge with existing changes, avoiding duplicates by ID 229 | const existingIds = new Set(this.changes.map(c => c.id)); 230 | const newChanges = importData.changes.filter((c: Change) => !existingIds.has(c.id)); 231 | this.changes.push(...newChanges); 232 | } else { 233 | // Replace existing changes 234 | this.changes = importData.changes; 235 | } 236 | 237 | if (CHANGE_TRACKING_CONFIG.persistChanges) { 238 | await this.saveChanges(); 239 | } 240 | } catch (error) { 241 | throw new FileOperationError( 242 | 'OPERATION_FAILED' as FileErrorCode, 243 | `Failed to import changes: ${error instanceof Error ? error.message : 'Unknown error'}`, 244 | importPath 245 | ); 246 | } 247 | } 248 | } 249 | -------------------------------------------------------------------------------- /DOCKER.md: -------------------------------------------------------------------------------- 1 | # Docker Setup and Local Drive Mounting Guide 2 | 3 | This guide provides comprehensive instructions for running the MCP File Operations Server in Docker containers with local drive mounting support for both Windows and Linux systems. 4 | 5 | ## Table of Contents 6 | 7 | - [Quick Start](#quick-start) 8 | - [Transport Modes](#transport-modes) 9 | - [Local Drive Mounting](#local-drive-mounting) 10 | - [Windows](#windows) 11 | - [Linux/macOS](#linuxmacos) 12 | - [Configuration](#configuration) 13 | - [Examples](#examples) 14 | - [Troubleshooting](#troubleshooting) 15 | 16 | ## Quick Start 17 | 18 | ### Build the Docker Image 19 | 20 | ```bash 21 | # Clone the repository 22 | git clone https://github.com/bsmi021/mcp-file-operations-server.git 23 | cd mcp-file-operations-server 24 | 25 | # Build the Docker image 26 | docker build -t mcp-file-operations-server . 27 | ``` 28 | 29 | ### Run with Default Settings (Stdio Transport) 30 | 31 | ```bash 32 | docker run -it --rm \ 33 | -v "$(pwd):/workspace" \ 34 | mcp-file-operations-server 35 | ``` 36 | 37 | ### Run with HTTP Transport 38 | 39 | ```bash 40 | docker run -it --rm \ 41 | -p 3001:3001 \ 42 | -v "$(pwd):/workspace" \ 43 | -e MCP_TRANSPORT=http \ 44 | mcp-file-operations-server 45 | ``` 46 | 47 | ## Transport Modes 48 | 49 | The MCP File Operations Server supports two transport modes: 50 | 51 | ### 1. Stdio Transport (Default) 52 | - **Use case**: Direct integration with MCP clients like Claude Desktop 53 | - **Communication**: Standard input/output streams 54 | - **Configuration**: No additional setup required 55 | - **Docker command**: Default behavior 56 | 57 | ### 2. HTTP Transport with Server-Sent Events (SSE) 58 | - **Use case**: Remote connections, web applications, development/testing 59 | - **Communication**: HTTP with SSE for streaming 60 | - **Configuration**: Requires port exposure (-p 3001:3001) 61 | - **Docker command**: Set `MCP_TRANSPORT=http` 62 | 63 | ## Local Drive Mounting 64 | 65 | ### Windows 66 | 67 | #### Basic Drive Mounting 68 | 69 | **Mount C: drive:** 70 | ```cmd 71 | docker run -it --rm ^ 72 | -v "C:\:/host-c" ^ 73 | -p 3001:3001 ^ 74 | -e MCP_TRANSPORT=http ^ 75 | mcp-file-operations-server 76 | ``` 77 | 78 | **Mount specific user directory:** 79 | ```cmd 80 | docker run -it --rm ^ 81 | -v "C:\Users\%USERNAME%\Documents:/workspace" ^ 82 | -p 3001:3001 ^ 83 | -e MCP_TRANSPORT=http ^ 84 | mcp-file-operations-server 85 | ``` 86 | 87 | **Mount multiple drives:** 88 | ```cmd 89 | docker run -it --rm ^ 90 | -v "C:\:/host-c" ^ 91 | -v "D:\:/host-d" ^ 92 | -v "E:\:/host-e" ^ 93 | -p 3001:3001 ^ 94 | -e MCP_TRANSPORT=http ^ 95 | mcp-file-operations-server 96 | ``` 97 | 98 | #### PowerShell Examples 99 | 100 | **Mount current directory:** 101 | ```powershell 102 | docker run -it --rm ` 103 | -v "${PWD}:/workspace" ` 104 | -p 3001:3001 ` 105 | -e MCP_TRANSPORT=http ` 106 | mcp-file-operations-server 107 | ``` 108 | 109 | **Mount with specific permissions:** 110 | ```powershell 111 | docker run -it --rm ` 112 | -v "C:\MyProject:/workspace:rw" ` 113 | -v "C:\ReadOnlyData:/readonly:ro" ` 114 | -p 3001:3001 ` 115 | -e MCP_TRANSPORT=http ` 116 | mcp-file-operations-server 117 | ``` 118 | 119 | #### Windows Subsystem for Linux (WSL) 120 | 121 | **Access WSL filesystem from Windows Docker:** 122 | ```cmd 123 | docker run -it --rm ^ 124 | -v "\\wsl$\Ubuntu\home\username\project:/workspace" ^ 125 | -p 3001:3001 ^ 126 | -e MCP_TRANSPORT=http ^ 127 | mcp-file-operations-server 128 | ``` 129 | 130 | ### Linux/macOS 131 | 132 | #### Basic Drive Mounting 133 | 134 | **Mount home directory:** 135 | ```bash 136 | docker run -it --rm \ 137 | -v "$HOME:/home-user" \ 138 | -p 3001:3001 \ 139 | -e MCP_TRANSPORT=http \ 140 | mcp-file-operations-server 141 | ``` 142 | 143 | **Mount current working directory:** 144 | ```bash 145 | docker run -it --rm \ 146 | -v "$(pwd):/workspace" \ 147 | -p 3001:3001 \ 148 | -e MCP_TRANSPORT=http \ 149 | mcp-file-operations-server 150 | ``` 151 | 152 | **Mount multiple directories:** 153 | ```bash 154 | docker run -it --rm \ 155 | -v "/home:/host-home" \ 156 | -v "/opt:/host-opt" \ 157 | -v "/var/log:/host-logs:ro" \ 158 | -p 3001:3001 \ 159 | -e MCP_TRANSPORT=http \ 160 | mcp-file-operations-server 161 | ``` 162 | 163 | #### Permission Management 164 | 165 | **Run with current user permissions:** 166 | ```bash 167 | docker run -it --rm \ 168 | --user "$(id -u):$(id -g)" \ 169 | -v "$HOME:/home-user" \ 170 | -v "/etc/passwd:/etc/passwd:ro" \ 171 | -v "/etc/group:/etc/group:ro" \ 172 | -p 3001:3001 \ 173 | -e MCP_TRANSPORT=http \ 174 | mcp-file-operations-server 175 | ``` 176 | 177 | **Mount with specific ownership:** 178 | ```bash 179 | # First, create a directory with proper permissions 180 | mkdir -p ./shared-data 181 | sudo chown 1000:1000 ./shared-data 182 | 183 | docker run -it --rm \ 184 | -v "$(pwd)/shared-data:/workspace" \ 185 | -p 3001:3001 \ 186 | -e MCP_TRANSPORT=http \ 187 | mcp-file-operations-server 188 | ``` 189 | 190 | ## Configuration 191 | 192 | ### Environment Variables 193 | 194 | | Variable | Default | Description | 195 | |----------|---------|-------------| 196 | | `MCP_TRANSPORT` | `stdio` | Transport mode: `stdio` or `http` | 197 | | `MCP_HTTP_PORT` | `3001` | Port for HTTP transport | 198 | 199 | ### Volume Mount Options 200 | 201 | | Option | Description | Example | 202 | |--------|-------------|---------| 203 | | `rw` | Read-write access (default) | `-v "/path:/container:rw"` | 204 | | `ro` | Read-only access | `-v "/path:/container:ro"` | 205 | | `z` | SELinux private label | `-v "/path:/container:z"` | 206 | | `Z` | SELinux shared label | `-v "/path:/container:Z"` | 207 | 208 | ## Examples 209 | 210 | ### Development Environment 211 | 212 | **Full development setup with code and data access:** 213 | 214 | ```bash 215 | # Linux/macOS 216 | docker run -it --rm \ 217 | --name mcp-file-ops-dev \ 218 | -v "$(pwd):/workspace" \ 219 | -v "$HOME/.ssh:/root/.ssh:ro" \ 220 | -v "$HOME/.gitconfig:/root/.gitconfig:ro" \ 221 | -p 3001:3001 \ 222 | -e MCP_TRANSPORT=http \ 223 | mcp-file-operations-server 224 | ``` 225 | 226 | ```cmd 227 | REM Windows 228 | docker run -it --rm ^ 229 | --name mcp-file-ops-dev ^ 230 | -v "%CD%:/workspace" ^ 231 | -v "%USERPROFILE%\.ssh:/root/.ssh:ro" ^ 232 | -v "%USERPROFILE%\.gitconfig:/root/.gitconfig:ro" ^ 233 | -p 3001:3001 ^ 234 | -e MCP_TRANSPORT=http ^ 235 | mcp-file-operations-server 236 | ``` 237 | 238 | ### Production Deployment 239 | 240 | **Secure production setup with limited access:** 241 | 242 | ```bash 243 | docker run -d \ 244 | --name mcp-file-ops-prod \ 245 | --restart unless-stopped \ 246 | -v "/opt/app-data:/workspace:rw" \ 247 | -v "/opt/app-config:/config:ro" \ 248 | -v "/var/log/mcp:/logs" \ 249 | -p 127.0.0.1:3001:3001 \ 250 | -e MCP_TRANSPORT=http \ 251 | -e MCP_HTTP_PORT=3001 \ 252 | --user 1000:1000 \ 253 | mcp-file-operations-server 254 | ``` 255 | 256 | ### Docker Compose 257 | 258 | **Create a `docker-compose.yml` file:** 259 | 260 | ```yaml 261 | version: '3.8' 262 | 263 | services: 264 | mcp-file-operations: 265 | build: . 266 | container_name: mcp-file-operations-server 267 | environment: 268 | - MCP_TRANSPORT=http 269 | - MCP_HTTP_PORT=3001 270 | ports: 271 | - "3001:3001" 272 | volumes: 273 | - "./data:/workspace" 274 | - "./config:/config:ro" 275 | - "./logs:/logs" 276 | restart: unless-stopped 277 | user: "1000:1000" # Adjust to your user ID 278 | 279 | # Optional: Add a reverse proxy 280 | nginx: 281 | image: nginx:alpine 282 | container_name: mcp-nginx 283 | ports: 284 | - "80:80" 285 | - "443:443" 286 | volumes: 287 | - "./nginx.conf:/etc/nginx/nginx.conf:ro" 288 | - "./ssl:/etc/ssl:ro" 289 | depends_on: 290 | - mcp-file-operations 291 | restart: unless-stopped 292 | ``` 293 | 294 | **Run with Docker Compose:** 295 | 296 | ```bash 297 | docker-compose up -d 298 | ``` 299 | 300 | ## Troubleshooting 301 | 302 | ### Common Issues 303 | 304 | #### Permission Denied Errors 305 | 306 | **Problem**: Cannot read/write files in mounted volumes 307 | 308 | **Solution for Linux/macOS:** 309 | ```bash 310 | # Check file permissions 311 | ls -la /path/to/mounted/directory 312 | 313 | # Fix ownership 314 | sudo chown -R $(id -u):$(id -g) /path/to/mounted/directory 315 | 316 | # Run with user mapping 317 | docker run --user "$(id -u):$(id -g)" ... 318 | ``` 319 | 320 | **Solution for Windows:** 321 | ```cmd 322 | # Ensure Docker Desktop has access to the drive 323 | # Go to Docker Desktop > Settings > Resources > File Sharing 324 | # Add the drive/folder you want to mount 325 | ``` 326 | 327 | #### Port Already in Use 328 | 329 | **Problem**: Port 3001 is already in use 330 | 331 | **Solution:** 332 | ```bash 333 | # Use a different port 334 | docker run -p 3002:3001 -e MCP_HTTP_PORT=3001 ... 335 | 336 | # Or find and stop the conflicting process 337 | lsof -i :3001 # Linux/macOS 338 | netstat -ano | findstr :3001 # Windows 339 | ``` 340 | 341 | #### Container Cannot Access Network 342 | 343 | **Problem**: HTTP transport not working 344 | 345 | **Solution:** 346 | ```bash 347 | # Check container networking 348 | docker network ls 349 | docker inspect 350 | 351 | # Test connectivity 352 | curl http://localhost:3001/health 353 | ``` 354 | 355 | #### SELinux Issues (Linux) 356 | 357 | **Problem**: Permission denied despite correct ownership 358 | 359 | **Solution:** 360 | ```bash 361 | # Add SELinux labels 362 | docker run -v "/path:/container:Z" ... 363 | 364 | # Or temporarily disable SELinux 365 | sudo setenforce 0 366 | ``` 367 | 368 | ### Testing Your Setup 369 | 370 | #### Test Stdio Transport 371 | 372 | ```bash 373 | echo '{"jsonrpc": "2.0", "id": 1, "method": "initialize", "params": {"protocolVersion": "2024-11-05", "capabilities": {}, "clientInfo": {"name": "test", "version": "1.0.0"}}}' | docker run -i --rm -v "$(pwd):/workspace" mcp-file-operations-server 374 | ``` 375 | 376 | #### Test HTTP Transport 377 | 378 | ```bash 379 | # Start the container 380 | docker run -d --name test-mcp -p 3001:3001 -v "$(pwd):/workspace" -e MCP_TRANSPORT=http mcp-file-operations-server 381 | 382 | # Test health endpoint 383 | curl http://localhost:3001/health 384 | 385 | # Test SSE endpoint (should receive SSE headers) 386 | curl -v http://localhost:3001/sse 387 | 388 | # Cleanup 389 | docker stop test-mcp && docker rm test-mcp 390 | ``` 391 | 392 | ### Performance Considerations 393 | 394 | - **Volume mounting**: Use bind mounts for better performance than named volumes for development 395 | - **Network**: Use host networking (`--network host`) for better performance in Linux 396 | - **Resources**: Allocate sufficient memory for large file operations 397 | - **Storage**: Use SSD storage for mounted volumes when possible 398 | 399 | ### Security Best Practices 400 | 401 | 1. **Principle of least privilege**: Only mount directories that are needed 402 | 2. **Read-only mounts**: Use `:ro` for configuration and reference data 403 | 3. **User mapping**: Run containers with non-root users when possible 404 | 4. **Network isolation**: Bind HTTP transport to localhost in production 405 | 5. **Regular updates**: Keep the Docker image updated with latest security patches -------------------------------------------------------------------------------- /src/services/StreamProcessor.ts: -------------------------------------------------------------------------------- 1 | import { Transform, pipeline } from 'stream'; 2 | import { EventEmitter } from 'events'; 3 | import { promises as fs, createReadStream, createWriteStream } from 'fs'; 4 | import { promisify } from 'util'; 5 | 6 | const pipelineAsync = promisify(pipeline); 7 | import { 8 | StreamProcessor, 9 | ChunkResult, 10 | ProgressInfo, 11 | FileOperationError, 12 | FileErrorCode 13 | } from '../types/index.js'; 14 | import { DEFAULT_BATCH_CONFIG } from '../config/defaults.js'; 15 | 16 | /** 17 | * Implementation of StreamProcessor interface handling streaming operations 18 | * Follows SOLID principles: 19 | * - Single Responsibility: Handles only streaming operations 20 | * - Open/Closed: Extensible through inheritance 21 | * - Liskov Substitution: Implements StreamProcessor interface 22 | * - Interface Segregation: Focused streaming methods 23 | * - Dependency Inversion: Depends on abstractions (StreamProcessor interface) 24 | */ 25 | export class StreamProcessorImpl implements StreamProcessor { 26 | private config: typeof DEFAULT_BATCH_CONFIG; 27 | private buffer: string = ''; 28 | private chunkIndex: number = 0; 29 | private bytesProcessed: number = 0; 30 | private linesProcessed: number = 0; 31 | private startTime: number = Date.now(); 32 | private progressEmitter: EventEmitter; 33 | private transform: Transform; 34 | 35 | constructor(config = DEFAULT_BATCH_CONFIG) { 36 | this.config = config; 37 | this.progressEmitter = new EventEmitter(); 38 | this.progressEmitter.setMaxListeners(100); // Allow more listeners for large batch operations 39 | 40 | // Initialize transform stream 41 | this.transform = new Transform({ 42 | objectMode: true, 43 | transform: (chunk: any, _encoding: BufferEncoding, callback: (error?: Error | null, data?: any) => void) => { 44 | try { 45 | this.buffer += chunk.toString(); 46 | while (this.shouldProcessChunk()) { 47 | const { content, lineCount } = this.extractChunk(); 48 | this.bytesProcessed += content.length; 49 | this.linesProcessed += lineCount; 50 | this.transform.push({ 51 | content, 52 | metadata: { 53 | chunkIndex: this.chunkIndex++, 54 | startLine: this.linesProcessed - lineCount, 55 | endLine: this.linesProcessed, 56 | bytesProcessed: content.length 57 | } 58 | }); 59 | } 60 | callback(); 61 | } catch (error) { 62 | callback(error instanceof Error ? error : new Error(String(error))); 63 | } 64 | }, 65 | flush: (callback: (error?: Error | null, data?: any) => void) => { 66 | try { 67 | if (this.buffer.length > 0) { 68 | const { content, lineCount } = this.extractChunk(); 69 | this.bytesProcessed += content.length; 70 | this.linesProcessed += lineCount; 71 | this.transform.push({ 72 | content, 73 | metadata: { 74 | chunkIndex: this.chunkIndex++, 75 | startLine: this.linesProcessed - lineCount, 76 | endLine: this.linesProcessed, 77 | bytesProcessed: content.length 78 | } 79 | }); 80 | } 81 | callback(); 82 | } catch (error) { 83 | callback(error instanceof Error ? error : new Error(String(error))); 84 | } 85 | } 86 | }); 87 | } 88 | 89 | /** 90 | * Process a file using streaming with progress tracking 91 | * @param filePath Path to the file to process 92 | * @param processor Function to process each chunk 93 | */ 94 | async processFile( 95 | filePath: string, 96 | processor: (chunk: string, chunkInfo: { start: number; end: number }) => Promise 97 | ): Promise { 98 | const results: ChunkResult[] = []; 99 | const stats = await fs.stat(filePath); 100 | const maxChunkSize = this.config.maxChunkSize; 101 | const totalChunks = Math.ceil(stats.size / maxChunkSize); 102 | 103 | return new Promise((resolve, reject) => { 104 | const readStream = createReadStream(filePath, { encoding: 'utf8' }); 105 | const tempPath = `${filePath}.tmp`; 106 | const writeStream = createWriteStream(tempPath, { encoding: 'utf8' }); 107 | 108 | const processChunk = async (chunk: string): Promise => { 109 | this.buffer += chunk; 110 | let output = ''; 111 | 112 | while (this.shouldProcessChunk()) { 113 | const { content, lineCount } = this.extractChunk(); 114 | const startLine = this.linesProcessed; 115 | const endLine = startLine + lineCount; 116 | 117 | try { 118 | const processed = await processor(content, { start: startLine, end: endLine }); 119 | 120 | this.bytesProcessed += content.length; 121 | this.linesProcessed += lineCount; 122 | 123 | const result: ChunkResult = { 124 | success: true, 125 | chunkIndex: this.chunkIndex++, 126 | startLine, 127 | endLine, 128 | bytesProcessed: content.length 129 | }; 130 | results.push(result); 131 | 132 | this.emitProgress({ 133 | currentChunk: this.chunkIndex, 134 | totalChunks, 135 | bytesProcessed: this.bytesProcessed, 136 | totalBytes: stats.size, 137 | linesProcessed: this.linesProcessed, 138 | totalLines: -1, // Unknown until full processing 139 | startTime: this.startTime, 140 | estimatedTimeRemaining: this.calculateETA(stats.size) 141 | }); 142 | 143 | if (this.config.chunkDelay) { 144 | await new Promise(resolve => setTimeout(resolve, this.config.chunkDelay)); 145 | } 146 | 147 | output += processed; 148 | } catch (error) { 149 | const errorResult: ChunkResult = { 150 | success: false, 151 | chunkIndex: this.chunkIndex++, 152 | startLine, 153 | endLine, 154 | bytesProcessed: 0, 155 | error: error instanceof Error ? error.message : String(error) 156 | }; 157 | results.push(errorResult); 158 | } 159 | } 160 | 161 | return output; 162 | }; 163 | 164 | const transform = new Transform({ 165 | transform: (chunk, _encoding, callback) => { 166 | processChunk(chunk.toString()) 167 | .then(processed => callback(null, processed)) 168 | .catch(error => callback(error)); 169 | }, 170 | flush: (callback) => { 171 | if (this.buffer.length > 0) { 172 | processChunk(this.buffer) 173 | .then(processed => callback(null, processed)) 174 | .catch(error => callback(error)); 175 | } else { 176 | callback(); 177 | } 178 | } 179 | }); 180 | 181 | pipelineAsync(readStream, transform, writeStream).catch(async (error: unknown) => { 182 | await fs.unlink(tempPath).catch(() => { }); 183 | if (error) { 184 | reject(new FileOperationError( 185 | 'OPERATION_FAILED' as FileErrorCode, 186 | `Stream processing failed: ${error instanceof Error ? error.message : 'Unknown error'}`, 187 | filePath 188 | )); 189 | } else { 190 | try { 191 | await fs.rename(tempPath, filePath); 192 | resolve(results); 193 | } catch (renameError) { 194 | await fs.unlink(tempPath).catch(() => { }); 195 | reject(new FileOperationError( 196 | 'OPERATION_FAILED' as FileErrorCode, 197 | `Failed to rename temporary file: ${renameError instanceof Error ? renameError.message : 'Unknown error'}`, 198 | filePath 199 | )); 200 | } 201 | } 202 | }); 203 | }); 204 | } 205 | 206 | /** 207 | * Add event listener for progress updates 208 | * @param event Event name 209 | * @param listener Callback function 210 | */ 211 | on(event: string, listener: (...args: any[]) => void): this { 212 | if (event === 'progress') { 213 | this.progressEmitter.on(event, listener); 214 | } else { 215 | this.transform.on(event, listener); 216 | } 217 | return this; 218 | } 219 | 220 | /** 221 | * Remove event listener 222 | * @param event Event name 223 | * @param listener Callback function 224 | */ 225 | off(event: string, listener: (...args: any[]) => void): this { 226 | if (event === 'progress') { 227 | this.progressEmitter.off(event, listener); 228 | } else { 229 | this.transform.off(event, listener); 230 | } 231 | return this; 232 | } 233 | 234 | 235 | /** 236 | * Check if current buffer should be processed 237 | */ 238 | private shouldProcessChunk(): boolean { 239 | const currentSize = this.buffer.length; 240 | const lineCount = this.buffer.split('\n').length - 1; 241 | return currentSize >= this.config.maxChunkSize || lineCount >= this.config.maxLinesPerChunk; 242 | } 243 | 244 | /** 245 | * Extract a chunk from the buffer 246 | */ 247 | private extractChunk(): { content: string; lineCount: number } { 248 | const lines = this.buffer.split('\n'); 249 | const chunkLines = lines.slice(0, this.config.maxLinesPerChunk); 250 | const chunk = chunkLines.join('\n'); 251 | this.buffer = lines.slice(this.config.maxLinesPerChunk).join('\n'); 252 | return { content: chunk, lineCount: chunkLines.length }; 253 | } 254 | 255 | /** 256 | * Calculate estimated time remaining 257 | * @param totalBytes Total bytes to process 258 | */ 259 | private calculateETA(totalBytes: number): number { 260 | const elapsedMs = Date.now() - this.startTime; 261 | const bytesPerMs = this.bytesProcessed / elapsedMs; 262 | const remainingBytes = totalBytes - this.bytesProcessed; 263 | return remainingBytes / bytesPerMs; 264 | } 265 | 266 | /** 267 | * Emit progress event 268 | * @param progress Progress information 269 | */ 270 | private emitProgress(progress: ProgressInfo): void { 271 | this.progressEmitter.emit('progress', progress); 272 | } 273 | } 274 | -------------------------------------------------------------------------------- /examples/http-client.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | MCP File Operations Server - HTTP Client Example 7 | 61 | 62 | 63 |

MCP File Operations Server - HTTP Client Example

64 | 65 |
66 |

Connection

67 |
68 | 69 | 70 |
71 | 72 | 73 | 74 |
Disconnected
75 |
76 | 77 |
78 |
79 |

File Operations

80 | 81 |

Read File

82 | 83 | 84 | 85 |

Write File

86 | 87 | 88 | 89 | 90 |

List Directory

91 | 92 | 93 | 94 |
95 | 96 |
97 |

Server Information

98 | 99 | 100 | 101 | 102 |
103 |
104 | 105 |
106 |

Communication Log

107 | 108 |
109 |
110 | 111 | 322 | 323 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # File Operations MCP Server 2 | 3 | [![smithery badge](https://smithery.ai/badge/@bsmi021/mcp-file-operations-server)](https://smithery.ai/server/@bsmi021/mcp-file-operations-server) 4 | 5 | A Model Context Protocol (MCP) server that provides enhanced file operation capabilities with streaming, patching, and change tracking support. 6 | 7 | 8 | File Operations Server MCP server 9 | 10 | 11 | ## Features 12 | 13 | - **Basic File Operations**: Copy, read, write, move, and delete files 14 | - **Directory Operations**: Create, remove, and copy directories 15 | - **File Watching**: Monitor files and directories for changes 16 | - **Change Tracking**: Track and query file operation history 17 | - **Streaming Support**: Handle large files efficiently with streaming 18 | - **HTTP Interface**: Streamable HTTP interface with Server-Sent Events (SSE) 19 | - **Resource Support**: Access files and directories through MCP resources 20 | - **Progress Reporting**: Real-time progress updates for long operations 21 | - **Rate Limiting**: Protection against excessive requests 22 | - **Enhanced Security**: Path validation and input sanitization 23 | - **Robust Error Handling**: Comprehensive error handling and reporting 24 | - **Type Safety**: Full TypeScript support with strict type checking 25 | - **Docker Support**: Containerized deployment with volume mounting 26 | 27 | ## Installation 28 | 29 | ### Installing via Smithery 30 | 31 | To install File Operations Server for Claude Desktop automatically via [Smithery](https://smithery.ai/server/@bsmi021/mcp-file-operations-server): 32 | 33 | ```bash 34 | npx -y @smithery/cli install @bsmi021/mcp-file-operations-server --client claude 35 | ``` 36 | 37 | ### Manual Installation 38 | ```bash 39 | npm install 40 | ``` 41 | 42 | ### Docker Installation 43 | 44 | See [DOCKER.md](./DOCKER.md) for comprehensive Docker setup instructions including local drive mounting for Windows and Linux. 45 | 46 | **Quick Docker Start:** 47 | ```bash 48 | # Stdio transport (for MCP clients) 49 | docker run -it --rm -v "$(pwd):/workspace" ghcr.io/bsmi021/mcp-file-operations-server 50 | 51 | # HTTP transport (for web/remote access) 52 | docker run -it --rm -p 3001:3001 -v "$(pwd):/workspace" -e MCP_TRANSPORT=http ghcr.io/bsmi021/mcp-file-operations-server 53 | ``` 54 | 55 | ## Usage 56 | 57 | ### Transport Modes 58 | 59 | The server supports two transport modes: 60 | 61 | #### 1. Stdio Transport (Default) 62 | For direct integration with MCP clients like Claude Desktop: 63 | 64 | ```bash 65 | npm start 66 | ``` 67 | 68 | #### 2. HTTP Transport with SSE (New in v1.5) 69 | For remote connections and web applications: 70 | 71 | ```bash 72 | npm run start:http 73 | ``` 74 | 75 | The HTTP server provides: 76 | - **SSE Endpoint**: `GET http://localhost:3001/sse` - Establishes streaming connection 77 | - **Messages Endpoint**: `POST http://localhost:3001/messages` - Receives client messages 78 | - **Health Check**: `GET http://localhost:3001/health` - Server status 79 | - **Sessions**: `GET http://localhost:3001/sessions` - Active connection info 80 | 81 | ### Starting the Server 82 | 83 | #### Development Mode 84 | 85 | ```bash 86 | # Stdio transport with auto-reload 87 | npm run dev 88 | 89 | # HTTP transport with auto-reload 90 | npm run dev:http 91 | ``` 92 | 93 | #### Production Mode 94 | 95 | ```bash 96 | # Stdio transport 97 | npm start 98 | 99 | # HTTP transport 100 | npm run start:http 101 | 102 | # Custom port for HTTP 103 | npm run start:http -- --port 8080 104 | ``` 105 | 106 | ### Available Tools 107 | 108 | #### Basic File Operations 109 | 110 | - `copy_file`: Copy a file to a new location 111 | - `read_file`: Read content from a file 112 | - `write_file`: Write content to a file 113 | - `move_file`: Move/rename a file 114 | - `delete_file`: Delete a file 115 | - `append_file`: Append content to a file 116 | 117 | #### Directory Operations 118 | 119 | - `make_directory`: Create a directory 120 | - `remove_directory`: Remove a directory 121 | - `copy_directory`: Copy a directory recursively (with progress reporting) 122 | 123 | #### Watch Operations 124 | 125 | - `watch_directory`: Start watching a directory for changes 126 | - `unwatch_directory`: Stop watching a directory 127 | 128 | #### Change Tracking 129 | 130 | - `get_changes`: Get the list of recorded changes 131 | - `clear_changes`: Clear all recorded changes 132 | 133 | ### Available Resources 134 | 135 | #### Static Resources 136 | 137 | - `file:///recent-changes`: List of recent file system changes 138 | 139 | #### Resource Templates 140 | 141 | - `file://{path}`: Access file contents 142 | - `metadata://{path}`: Access file metadata 143 | - `directory://{path}`: List directory contents 144 | 145 | ### Example Usage 146 | 147 | #### Using Stdio Transport (MCP Clients) 148 | 149 | ```typescript 150 | // Copy a file 151 | await fileOperations.copyFile({ 152 | source: 'source.txt', 153 | destination: 'destination.txt', 154 | overwrite: false 155 | }); 156 | 157 | // Watch a directory 158 | await fileOperations.watchDirectory({ 159 | path: './watched-dir', 160 | recursive: true 161 | }); 162 | 163 | // Access file contents through resource 164 | const resource = await mcp.readResource('file:///path/to/file.txt'); 165 | console.log(resource.contents[0].text); 166 | 167 | // Copy directory with progress tracking 168 | const result = await fileOperations.copyDirectory({ 169 | source: './source-dir', 170 | destination: './dest-dir', 171 | overwrite: false 172 | }); 173 | // Progress token in result can be used to track progress 174 | console.log(result.progressToken); 175 | ``` 176 | 177 | #### Using HTTP Transport (Web/Remote) 178 | 179 | **Connecting via JavaScript:** 180 | 181 | ```javascript 182 | // Establish SSE connection 183 | const eventSource = new EventSource('http://localhost:3001/sse'); 184 | let sessionId = null; 185 | 186 | eventSource.onopen = function() { 187 | console.log('Connected to MCP server'); 188 | }; 189 | 190 | eventSource.onmessage = function(event) { 191 | const message = JSON.parse(event.data); 192 | 193 | // Extract session ID from first message 194 | if (!sessionId && message.sessionId) { 195 | sessionId = message.sessionId; 196 | } 197 | 198 | console.log('Received:', message); 199 | }; 200 | 201 | // Send a message to the server 202 | async function sendMessage(method, params) { 203 | const message = { 204 | jsonrpc: '2.0', 205 | id: Date.now(), 206 | method: method, 207 | params: params 208 | }; 209 | 210 | const response = await fetch('http://localhost:3001/messages', { 211 | method: 'POST', 212 | headers: { 213 | 'Content-Type': 'application/json', 214 | 'X-Session-ID': sessionId 215 | }, 216 | body: JSON.stringify(message) 217 | }); 218 | 219 | return response.json(); 220 | } 221 | 222 | // Example: List tools 223 | sendMessage('tools/list', {}); 224 | 225 | // Example: Read a file 226 | sendMessage('tools/call', { 227 | name: 'read_file', 228 | arguments: { path: '/workspace/example.txt' } 229 | }); 230 | ``` 231 | 232 | **Using curl for testing:** 233 | 234 | ```bash 235 | # Start SSE connection in background 236 | curl -N http://localhost:3001/sse & 237 | 238 | # Check server health 239 | curl http://localhost:3001/health 240 | 241 | # List active sessions 242 | curl http://localhost:3001/sessions 243 | ``` 244 | 245 | **Interactive Web Client:** 246 | 247 | A complete interactive example is available at [`examples/http-client.html`](./examples/http-client.html). Open this file in a web browser to test the HTTP interface with a user-friendly GUI. 248 | 249 | ## What's New in v1.5 250 | 251 | ### MCP SDK v1.5 Upgrade 252 | - **Streamable HTTP Interface**: New HTTP transport with Server-Sent Events (SSE) 253 | - **Enhanced API**: Upgraded to MCP SDK v1.5 with improved zod-based schemas 254 | - **Multiple Connections**: Support for simultaneous HTTP connections with session management 255 | - **Better Type Safety**: Improved TypeScript integration and error handling 256 | 257 | ### Streaming Features 258 | - **Large File Support**: Efficient streaming for large file operations 259 | - **Real-time Progress**: Progress updates via SSE for long-running operations 260 | - **Session Management**: Multiple client connections with isolated sessions 261 | - **HTTP API**: RESTful endpoints alongside traditional MCP protocol 262 | 263 | ## Docker Support 264 | 265 | ### Quick Start with Docker 266 | 267 | ```bash 268 | # Build the image 269 | docker build -t mcp-file-operations-server . 270 | 271 | # Run with stdio (for MCP clients) 272 | docker run -it --rm -v "$(pwd):/workspace" mcp-file-operations-server 273 | 274 | # Run with HTTP interface 275 | docker run -it --rm -p 3001:3001 -v "$(pwd):/workspace" -e MCP_TRANSPORT=http mcp-file-operations-server 276 | ``` 277 | 278 | ### Volume Mounting 279 | 280 | **Windows:** 281 | ```cmd 282 | docker run -it --rm -v "C:\MyProject:/workspace" -p 3001:3001 -e MCP_TRANSPORT=http mcp-file-operations-server 283 | ``` 284 | 285 | **Linux/macOS:** 286 | ```bash 287 | docker run -it --rm -v "/home/user/project:/workspace" -p 3001:3001 -e MCP_TRANSPORT=http mcp-file-operations-server 288 | ``` 289 | 290 | For comprehensive Docker setup instructions including local drive mounting for Windows and Linux, see [DOCKER.md](./DOCKER.md). 291 | 292 | ## Rate Limits 293 | 294 | The server implements rate limiting to prevent abuse: 295 | 296 | - **Tools**: 100 requests per minute 297 | - **Resources**: 200 requests per minute 298 | - **Watch Operations**: 20 operations per minute 299 | 300 | Rate limit errors include a retry-after period in the error message. 301 | 302 | ## Security Features 303 | 304 | ### Path Validation 305 | 306 | All file paths are validated to prevent directory traversal attacks: 307 | 308 | - No parent directory references (`../`) 309 | - Proper path normalization 310 | - Input sanitization 311 | 312 | ### Resource Protection 313 | 314 | - Rate limiting on all operations 315 | - Proper error handling and logging 316 | - Input validation on all parameters 317 | - Safe resource cleanup 318 | 319 | ## Progress Reporting 320 | 321 | Long-running operations like directory copying provide progress updates: 322 | 323 | ```typescript 324 | interface ProgressUpdate { 325 | token: string | number; 326 | message: string; 327 | percentage: number; 328 | } 329 | ``` 330 | 331 | Progress can be tracked through the progress token returned in the operation result. 332 | 333 | ## Development 334 | 335 | ### Building 336 | 337 | ```bash 338 | npm run build 339 | ``` 340 | 341 | ### Linting 342 | 343 | ```bash 344 | npm run lint 345 | ``` 346 | 347 | ### Formatting 348 | 349 | ```bash 350 | npm run format 351 | ``` 352 | 353 | ### Testing 354 | 355 | ```bash 356 | npm test 357 | ``` 358 | 359 | ## Configuration 360 | 361 | ### Environment Variables 362 | 363 | | Variable | Default | Description | 364 | |----------|---------|-------------| 365 | | `MCP_TRANSPORT` | `stdio` | Transport mode: `stdio` or `http` | 366 | | `MCP_HTTP_PORT` | `3001` | Port for HTTP transport | 367 | 368 | ### Transport Selection 369 | 370 | - **Stdio**: Best for MCP clients like Claude Desktop, direct integration 371 | - **HTTP**: Best for web applications, remote access, development/testing 372 | 373 | The server can be configured through various settings: 374 | 375 | - **Rate Limiting**: Configure request limits and windows 376 | - **Progress Reporting**: Control update frequency and detail level 377 | - **Resource Access**: Configure resource permissions and limits 378 | - **Security Settings**: Configure path validation rules 379 | - **Change Tracking**: Set retention periods and storage options 380 | - **Watch Settings**: Configure debounce times and recursive watching 381 | 382 | ## Error Handling 383 | 384 | The server provides detailed error information through the `FileOperationError` class and MCP error codes: 385 | 386 | ### Standard MCP Error Codes 387 | 388 | - `InvalidRequest`: Invalid parameters or request format 389 | - `MethodNotFound`: Unknown tool or resource requested 390 | - `InvalidParams`: Invalid parameters (e.g., path validation failure) 391 | - `InternalError`: Server-side errors 392 | 393 | ### Custom Error Types 394 | 395 | - File operation failures 396 | - Rate limit exceeded 397 | - Path validation errors 398 | - Resource access errors 399 | 400 | Each error includes: 401 | 402 | - Specific error code 403 | - Detailed error message 404 | - Relevant metadata (file paths, limits, etc.) 405 | - Stack traces in development mode 406 | 407 | ## Contributing 408 | 409 | 1. Fork the repository 410 | 2. Create your feature branch (`git checkout -b feature/amazing-feature`) 411 | 3. Commit your changes (`git commit -m 'Add amazing feature'`) 412 | 4. Push to the branch (`git push origin feature/amazing-feature`) 413 | 5. Open a Pull Request 414 | 415 | ## License 416 | 417 | This project is licensed under the MIT License - see the LICENSE file for details. -------------------------------------------------------------------------------- /src/server.ts: -------------------------------------------------------------------------------- 1 | // Node.js built-ins 2 | import { Buffer } from 'node:buffer'; 3 | 4 | // MCP SDK imports 5 | import { McpServer, ResourceTemplate } from '@modelcontextprotocol/sdk/server/mcp.js'; 6 | import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js'; 7 | import { 8 | ErrorCode, 9 | McpError 10 | } from '@modelcontextprotocol/sdk/types.js'; 11 | import { z } from 'zod'; 12 | 13 | // Local service implementations 14 | import { FileServiceImpl } from './services/FileService.js'; 15 | import { DirectoryServiceImpl } from './services/DirectoryService.js'; 16 | import { WatchServiceImpl } from './services/WatchService.js'; 17 | import { ChangeTrackingServiceImpl } from './services/ChangeTrackingService.js'; 18 | import { RateLimiterService } from './services/RateLimiterService.js'; 19 | 20 | // Local types and constants 21 | import { 22 | ChangeType 23 | } from './types/index.js'; 24 | 25 | // Progress tracking types 26 | type ProgressToken = string | number; 27 | interface ProgressNotification { 28 | method: 'progress/update'; 29 | params: { 30 | token: ProgressToken; 31 | message: string; 32 | percentage: number; 33 | }; 34 | } 35 | type BufferEncoding = Parameters[1]; 36 | 37 | /** 38 | * Progress tracking helper class 39 | */ 40 | class ProgressTracker { 41 | private token: ProgressToken; 42 | private server: McpServer; 43 | private total: number; 44 | private current: number = 0; 45 | 46 | constructor(server: McpServer, total: number) { 47 | this.server = server; 48 | this.total = total; 49 | // Generate a random token ID 50 | this.token = Math.random().toString(36).substring(2); 51 | } 52 | 53 | public getToken(): ProgressToken { 54 | return this.token; 55 | } 56 | 57 | public async update(increment: number, message: string): Promise { 58 | this.current += increment; 59 | const percentage = Math.min(Math.round((this.current / this.total) * 100), 100); 60 | 61 | const notification: ProgressNotification = { 62 | method: 'progress/update', 63 | params: { 64 | token: this.token, 65 | message, 66 | percentage 67 | } 68 | }; 69 | 70 | await this.server.server.notification(notification); 71 | } 72 | } 73 | 74 | /** 75 | * Validate path to prevent directory traversal 76 | */ 77 | function validatePath(path: string): void { 78 | const normalized = path.replace(/\\/g, '/'); 79 | if (normalized.includes('../') || normalized.includes('..\\')) { 80 | throw new McpError( 81 | ErrorCode.InvalidParams, 82 | 'Path traversal is not allowed' 83 | ); 84 | } 85 | } 86 | 87 | export class FileOperationsServer { 88 | private mcpServer: McpServer; 89 | private fileService: FileServiceImpl; 90 | private directoryService: DirectoryServiceImpl; 91 | private watchService: WatchServiceImpl; 92 | private changeTrackingService: ChangeTrackingServiceImpl; 93 | private rateLimiter: RateLimiterService; 94 | 95 | constructor() { 96 | // Initialize MCP server with v1.5 API 97 | this.mcpServer = new McpServer( 98 | { 99 | name: 'file-operations-server', 100 | version: '1.0.0', 101 | }, 102 | { 103 | capabilities: { 104 | tools: {}, 105 | resources: {} 106 | }, 107 | } 108 | ); 109 | 110 | // Initialize services 111 | this.fileService = new FileServiceImpl(); 112 | this.directoryService = new DirectoryServiceImpl(); 113 | this.watchService = new WatchServiceImpl(); 114 | this.changeTrackingService = new ChangeTrackingServiceImpl(); 115 | this.rateLimiter = new RateLimiterService(); 116 | 117 | // Set up tools and resources using new v1.5 API 118 | this.setupTools(); 119 | this.setupResources(); 120 | 121 | // Error handling 122 | this.mcpServer.server.onerror = (error): void => console.error('[MCP Error]', error); 123 | process.on('SIGINT', async () => { 124 | await this.cleanup(); 125 | process.exit(0); 126 | }); 127 | } 128 | 129 | /** 130 | * Set up MCP tools using v1.5 API 131 | */ 132 | private setupTools(): void { 133 | // Track changes helper 134 | const trackChange = async ( 135 | description: string, 136 | type: ChangeType, 137 | details?: Record 138 | ): Promise => { 139 | await this.changeTrackingService.addChange({ 140 | description, 141 | type, 142 | details 143 | }); 144 | }; 145 | 146 | // Basic File Operations 147 | this.mcpServer.tool( 148 | 'copy_file', 149 | { 150 | source: z.string().describe('Source file path'), 151 | destination: z.string().describe('Destination file path'), 152 | overwrite: z.boolean().default(false).describe('Whether to overwrite existing file') 153 | }, 154 | async ({ source, destination, overwrite }) => { 155 | this.rateLimiter.checkRateLimit('tool'); 156 | validatePath(source); 157 | validatePath(destination); 158 | 159 | // For now, use the default behavior - future enhancement could honor overwrite flag 160 | if (overwrite) { 161 | console.warn('Overwrite parameter received but not yet implemented'); 162 | } 163 | await this.fileService.copyFile(source, destination); 164 | await trackChange('Copied file', 'file_create', { source, destination }); 165 | const metadata = await this.fileService.getMetadata(destination); 166 | 167 | return { 168 | content: [{ type: 'text', text: JSON.stringify(metadata, null, 2) }] 169 | }; 170 | } 171 | ); 172 | 173 | this.mcpServer.tool( 174 | 'read_file', 175 | { 176 | path: z.string().describe('Path to the file to read'), 177 | encoding: z.string().default('utf8').describe('File encoding (default: utf8)') 178 | }, 179 | async ({ path, encoding }) => { 180 | this.rateLimiter.checkRateLimit('tool'); 181 | validatePath(path); 182 | 183 | const content = await this.fileService.readFile(path, encoding as BufferEncoding); 184 | return { 185 | content: [{ type: 'text', text: content }] 186 | }; 187 | } 188 | ); 189 | 190 | this.mcpServer.tool( 191 | 'write_file', 192 | { 193 | path: z.string().describe('Path to write the file to'), 194 | content: z.string().describe('Content to write to the file'), 195 | encoding: z.string().default('utf8').describe('File encoding (default: utf8)') 196 | }, 197 | async ({ path, content, encoding }) => { 198 | this.rateLimiter.checkRateLimit('tool'); 199 | validatePath(path); 200 | 201 | await this.fileService.writeFile(path, content, encoding as BufferEncoding); 202 | await trackChange('Wrote file', 'file_edit', { path }); 203 | const metadata = await this.fileService.getMetadata(path); 204 | 205 | return { 206 | content: [{ type: 'text', text: JSON.stringify(metadata, null, 2) }] 207 | }; 208 | } 209 | ); 210 | 211 | // Directory Operations 212 | this.mcpServer.tool( 213 | 'make_directory', 214 | { 215 | path: z.string().describe('Path to create the directory at'), 216 | recursive: z.boolean().default(true).describe('Create parent directories if they don\'t exist') 217 | }, 218 | async ({ path, recursive }) => { 219 | this.rateLimiter.checkRateLimit('tool'); 220 | validatePath(path); 221 | 222 | await this.directoryService.create(path, recursive ? true : true); 223 | await trackChange('Created directory', 'directory_create', { path }); 224 | 225 | return { 226 | content: [{ type: 'text', text: JSON.stringify({ success: true, path }, null, 2) }] 227 | }; 228 | } 229 | ); 230 | 231 | this.mcpServer.tool( 232 | 'remove_directory', 233 | { 234 | path: z.string().describe('Path to the directory to remove'), 235 | recursive: z.boolean().default(false).describe('Remove directory contents recursively') 236 | }, 237 | async ({ path, recursive }) => { 238 | this.rateLimiter.checkRateLimit('tool'); 239 | validatePath(path); 240 | 241 | await this.directoryService.remove(path, recursive); 242 | await trackChange('Removed directory', 'directory_delete', { path }); 243 | 244 | return { 245 | content: [{ type: 'text', text: JSON.stringify({ success: true, path }, null, 2) }] 246 | }; 247 | } 248 | ); 249 | 250 | this.mcpServer.tool( 251 | 'list_directory', 252 | { 253 | path: z.string().describe('Path of directory to list'), 254 | recursive: z.boolean().default(false).describe('Whether to list contents recursively') 255 | }, 256 | async ({ path, recursive }) => { 257 | this.rateLimiter.checkRateLimit('tool'); 258 | validatePath(path); 259 | 260 | const entries = await this.directoryService.list(path, recursive); 261 | return { 262 | content: [{ type: 'text', text: JSON.stringify({ success: true, entries }, null, 2) }] 263 | }; 264 | } 265 | ); 266 | 267 | this.mcpServer.tool( 268 | 'copy_directory', 269 | { 270 | source: z.string().describe('Source directory path'), 271 | destination: z.string().describe('Destination directory path'), 272 | overwrite: z.boolean().default(false).describe('Whether to overwrite existing files/directories') 273 | }, 274 | async ({ source, destination, overwrite }) => { 275 | this.rateLimiter.checkRateLimit('tool'); 276 | validatePath(source); 277 | validatePath(destination); 278 | 279 | // Count files for progress tracking 280 | const entries = await this.directoryService.list(source, true); 281 | const totalFiles = entries.length; 282 | 283 | // Create progress tracker 284 | const progress = new ProgressTracker( 285 | this.mcpServer, 286 | totalFiles 287 | ); 288 | 289 | // Copy with progress updates - for now use default behavior 290 | if (overwrite) { 291 | console.warn('Overwrite parameter received but not yet implemented'); 292 | } 293 | let copied = 0; 294 | await this.directoryService.copy(source, destination); 295 | 296 | // Update progress after each file 297 | const files = await this.directoryService.list(destination, true); 298 | for (let i = 0; i < files.length; i++) { 299 | copied++; 300 | await progress.update(1, `Copying directory ${source} to ${destination} (${copied}/${totalFiles})`); 301 | } 302 | 303 | await trackChange('Copied directory', 'directory_copy', { source, destination }); 304 | return { 305 | content: [{ 306 | type: 'text', 307 | text: JSON.stringify({ 308 | success: true, 309 | source, 310 | destination, 311 | progressToken: progress.getToken() 312 | }, null, 2) 313 | }] 314 | }; 315 | } 316 | ); 317 | 318 | // Watch Operations 319 | this.mcpServer.tool( 320 | 'watch_directory', 321 | { 322 | path: z.string().describe('Path to the directory to watch'), 323 | recursive: z.boolean().default(true).describe('Watch subdirectories recursively') 324 | }, 325 | async ({ path, recursive }) => { 326 | this.rateLimiter.checkRateLimit('tool'); 327 | this.rateLimiter.checkRateLimit('watch'); 328 | validatePath(path); 329 | 330 | await this.watchService.watch(path, recursive ? true : true); 331 | await trackChange('Started watching', 'watch_start', { path }); 332 | 333 | return { 334 | content: [{ type: 'text', text: JSON.stringify({ success: true, path }, null, 2) }] 335 | }; 336 | } 337 | ); 338 | 339 | this.mcpServer.tool( 340 | 'unwatch_directory', 341 | { 342 | path: z.string().describe('Path to the directory to stop watching') 343 | }, 344 | async ({ path }) => { 345 | this.rateLimiter.checkRateLimit('tool'); 346 | this.rateLimiter.checkRateLimit('watch'); 347 | validatePath(path); 348 | 349 | await this.watchService.unwatch(path); 350 | await trackChange('Stopped watching', 'watch_end', { path }); 351 | 352 | return { 353 | content: [{ type: 'text', text: JSON.stringify({ success: true, path }, null, 2) }] 354 | }; 355 | } 356 | ); 357 | 358 | this.mcpServer.tool( 359 | 'is_watching', 360 | { 361 | path: z.string().describe('Path to check') 362 | }, 363 | async ({ path }) => { 364 | this.rateLimiter.checkRateLimit('tool'); 365 | validatePath(path); 366 | 367 | const isWatching = this.watchService.isWatching(path); 368 | return { 369 | content: [{ type: 'text', text: JSON.stringify({ path, isWatching }, null, 2) }] 370 | }; 371 | } 372 | ); 373 | 374 | // Change Tracking Operations 375 | this.mcpServer.tool( 376 | 'get_changes', 377 | { 378 | limit: z.number().optional().describe('Maximum number of changes to return'), 379 | type: z.string().optional().describe('Filter changes by type') 380 | }, 381 | async ({ limit, type }) => { 382 | this.rateLimiter.checkRateLimit('tool'); 383 | 384 | const changes = await this.changeTrackingService.getChanges(limit, type as ChangeType); 385 | return { 386 | content: [{ type: 'text', text: JSON.stringify(changes, null, 2) }] 387 | }; 388 | } 389 | ); 390 | 391 | this.mcpServer.tool( 392 | 'clear_changes', 393 | {}, 394 | async () => { 395 | this.rateLimiter.checkRateLimit('tool'); 396 | 397 | await this.changeTrackingService.clearChanges(); 398 | return { 399 | content: [{ type: 'text', text: JSON.stringify({ success: true }, null, 2) }] 400 | }; 401 | } 402 | ); 403 | } 404 | /** 405 | * Set up MCP resources using v1.5 API 406 | */ 407 | private setupResources(): void { 408 | // Static resource: recent changes 409 | this.mcpServer.resource( 410 | 'Recent File Changes', 411 | 'file:///recent-changes', 412 | { 413 | description: 'List of recent file system changes', 414 | mimeType: 'application/json' 415 | }, 416 | async () => { 417 | this.rateLimiter.checkRateLimit('resource'); 418 | const changes = await this.changeTrackingService.getChanges(); 419 | return { 420 | contents: [{ 421 | uri: 'file:///recent-changes', 422 | mimeType: 'application/json', 423 | text: JSON.stringify(changes, null, 2) 424 | }] 425 | }; 426 | } 427 | ); 428 | 429 | // Resource templates for dynamic file access 430 | this.mcpServer.resource( 431 | 'File Contents', 432 | new ResourceTemplate('file://{path}', { list: undefined }), 433 | async (uri: URL, variables: Record) => { 434 | this.rateLimiter.checkRateLimit('resource'); 435 | const path = Array.isArray(variables.path) ? variables.path[0] : variables.path; 436 | validatePath(path); 437 | const content = await this.fileService.readFile(path); 438 | return { 439 | contents: [{ 440 | uri: uri.href, 441 | text: content 442 | }] 443 | }; 444 | } 445 | ); 446 | 447 | this.mcpServer.resource( 448 | 'File Metadata', 449 | new ResourceTemplate('metadata://{path}', { list: undefined }), 450 | { 451 | description: 'Metadata for a file at the specified path', 452 | mimeType: 'application/json' 453 | }, 454 | async (uri: URL, variables: Record) => { 455 | this.rateLimiter.checkRateLimit('resource'); 456 | const path = Array.isArray(variables.path) ? variables.path[0] : variables.path; 457 | validatePath(path); 458 | const metadata = await this.fileService.getMetadata(path); 459 | return { 460 | contents: [{ 461 | uri: uri.href, 462 | mimeType: 'application/json', 463 | text: JSON.stringify(metadata, null, 2) 464 | }] 465 | }; 466 | } 467 | ); 468 | 469 | this.mcpServer.resource( 470 | 'Directory Contents', 471 | new ResourceTemplate('directory://{path}', { list: undefined }), 472 | { 473 | description: 'List of files in a directory', 474 | mimeType: 'application/json' 475 | }, 476 | async (uri: URL, variables: Record) => { 477 | this.rateLimiter.checkRateLimit('resource'); 478 | const path = Array.isArray(variables.path) ? variables.path[0] : variables.path; 479 | validatePath(path); 480 | const entries = await this.directoryService.list(path, false); 481 | return { 482 | contents: [{ 483 | uri: uri.href, 484 | mimeType: 'application/json', 485 | text: JSON.stringify(entries, null, 2) 486 | }] 487 | }; 488 | } 489 | ); 490 | } 491 | 492 | /** 493 | * Clean up resources before shutdown 494 | */ 495 | public async cleanup(): Promise { 496 | // Clean up watchers 497 | await this.watchService.dispose(); 498 | } 499 | 500 | /** 501 | * Start the server with stdio transport 502 | */ 503 | async run(): Promise { 504 | const transport = new StdioServerTransport(); 505 | await this.mcpServer.connect(transport); 506 | console.error('File Operations MCP server running on stdio'); 507 | } 508 | 509 | /** 510 | * Start the server with HTTP transport (SSE) 511 | */ 512 | async runHttp(port: number = 3001): Promise { 513 | // This will be implemented with Express server 514 | // For now, just use stdio 515 | console.error(`HTTP server would run on port ${port}`); 516 | await this.run(); 517 | } 518 | 519 | /** 520 | * Get the underlying MCP server for advanced operations 521 | */ 522 | getMcpServer(): McpServer { 523 | return this.mcpServer; 524 | } 525 | } 526 | -------------------------------------------------------------------------------- /src/services/PatchService.ts: -------------------------------------------------------------------------------- 1 | import * as diff from 'diff'; 2 | import { 3 | PatchService, 4 | PatchOperation, 5 | PatchResult, 6 | WhitespaceConfig, 7 | NormalizedContent, 8 | FileOperationError, 9 | FileErrorCode, 10 | MergeStrategy, 11 | ConflictResolution 12 | } from '../types/index.js'; 13 | import { DEFAULT_WHITESPACE_CONFIG } from '../config/defaults.js'; 14 | import { FileServiceImpl } from './FileService.js'; 15 | 16 | /** 17 | * Enhanced implementation of PatchService interface handling file patching operations 18 | * Follows SOLID principles and implements advanced patching strategies: 19 | * - Single Responsibility: Handles only patching operations 20 | * - Open/Closed: Extensible through inheritance and strategy patterns 21 | * - Liskov Substitution: Implements PatchService interface 22 | * - Interface Segregation: Focused patching methods 23 | * - Dependency Inversion: Depends on abstractions 24 | */ 25 | export class PatchServiceImpl implements PatchService { 26 | private fileService: FileServiceImpl; 27 | private readonly CHUNK_SIZE = 100; // Size of chunks for token-based diff 28 | private readonly SIMILARITY_THRESHOLD = 0.8; // Threshold for fuzzy matching 29 | 30 | constructor() { 31 | this.fileService = new FileServiceImpl(); 32 | } 33 | 34 | /** 35 | * Apply a patch operation to a file with enhanced diff and merge capabilities 36 | * @param operation Patch operation details 37 | */ 38 | async applyPatch(operation: PatchOperation): Promise { 39 | try { 40 | // Create atomic operation context 41 | const context = await this.createAtomicContext(operation); 42 | 43 | try { 44 | const fileContent = await this.fileService.readFile(operation.filePath); 45 | const effectiveConfig = operation.whitespaceConfig || DEFAULT_WHITESPACE_CONFIG; 46 | const { normalized: content } = this.normalizeContent(fileContent, effectiveConfig); 47 | 48 | let newContent: string; 49 | let changesApplied = 0; 50 | let conflicts: string[] = []; 51 | 52 | switch (operation.type) { 53 | case 'complete': { 54 | if (!operation.content) { 55 | throw new Error('Content is required for complete replacement'); 56 | } 57 | const { normalized, conflicts: completeConflicts } = await this.handleCompleteReplacement( 58 | content, 59 | operation.content, 60 | effectiveConfig, 61 | operation.mergeStrategy 62 | ); 63 | newContent = normalized; 64 | changesApplied = 1; 65 | conflicts = completeConflicts; 66 | break; 67 | } 68 | case 'line': { 69 | const { content: lineContent, changes: lineChanges, conflicts: lineConflicts } = 70 | await this.handleLineOperation(content, operation, effectiveConfig); 71 | newContent = lineContent; 72 | changesApplied = lineChanges; 73 | conflicts = lineConflicts; 74 | break; 75 | } 76 | case 'block': { 77 | if (!operation.search || !operation.replace) { 78 | throw new Error('Search and replace are required for block replacement'); 79 | } 80 | const { content: blockContent, changes: blockChanges, conflicts: blockConflicts } = 81 | await this.handleBlockOperation(content, operation, effectiveConfig); 82 | newContent = blockContent; 83 | changesApplied = blockChanges; 84 | conflicts = blockConflicts; 85 | break; 86 | } 87 | case 'diff': { 88 | if (!operation.diff) { 89 | throw new Error('Diff content is required for diff patching'); 90 | } 91 | const { content: diffContent, changes: diffChanges, conflicts: diffConflicts } = 92 | await this.handleDiffOperation(content, operation); 93 | newContent = diffContent; 94 | changesApplied = diffChanges; 95 | conflicts = diffConflicts; 96 | break; 97 | } 98 | default: 99 | throw new Error(`Unsupported patch type: ${operation.type}`); 100 | } 101 | 102 | // Verify changes before applying 103 | if (!await this.verifyChanges(content, newContent, operation)) { 104 | throw new Error('Change verification failed'); 105 | } 106 | 107 | // Only write if changes were made and there are no conflicts 108 | if (changesApplied > 0 && conflicts.length === 0) { 109 | await this.fileService.writeFile(operation.filePath, newContent); 110 | await this.commitAtomicOperation(context); 111 | } else if (conflicts.length > 0) { 112 | await this.handleConflicts(context, conflicts, operation.conflictResolution); 113 | } 114 | 115 | return { 116 | success: true, 117 | filePath: operation.filePath, 118 | type: operation.type, 119 | changesApplied, 120 | backupPath: context.backupPath, 121 | originalContent: content.split('\n'), 122 | newContent: newContent.split('\n'), 123 | conflicts: conflicts.length > 0 ? conflicts : undefined 124 | }; 125 | } catch (error) { 126 | await this.rollbackAtomicOperation(context); 127 | throw error; 128 | } 129 | } catch (error) { 130 | return { 131 | success: false, 132 | filePath: operation.filePath, 133 | type: operation.type, 134 | changesApplied: 0, 135 | error: error instanceof Error ? error.message : String(error) 136 | }; 137 | } 138 | } 139 | 140 | /** 141 | * Create context for atomic operation 142 | * @param operation Patch operation 143 | */ 144 | private async createAtomicContext(operation: PatchOperation) { 145 | const context = { 146 | backupPath: undefined as string | undefined, 147 | tempPath: undefined as string | undefined 148 | }; 149 | 150 | if (operation.createBackup) { 151 | context.backupPath = await this.createBackup(operation.filePath); 152 | context.tempPath = operation.filePath; 153 | } 154 | 155 | return context; 156 | } 157 | 158 | /** 159 | * Handle complete file replacement with merge support 160 | * @param original Original content 161 | * @param replacement Replacement content 162 | * @param config Whitespace configuration 163 | * @param strategy Merge strategy 164 | */ 165 | private async handleCompleteReplacement( 166 | original: string, 167 | replacement: string, 168 | config: WhitespaceConfig, 169 | strategy?: MergeStrategy 170 | ): Promise<{ normalized: string; conflicts: string[] }> { 171 | const normalized = this.normalizeContent(replacement, config).normalized; 172 | 173 | if (!strategy || strategy === 'overwrite') { 174 | return { normalized, conflicts: [] }; 175 | } 176 | 177 | // Implement three-way merge for complete replacement 178 | const base = await this.findCommonAncestor(original, normalized); 179 | return this.performThreeWayMerge(base, original, normalized); 180 | } 181 | 182 | /** 183 | * Handle line-based operations with token matching 184 | * @param content Original content 185 | * @param operation Patch operation 186 | * @param config Whitespace configuration 187 | */ 188 | private async handleLineOperation( 189 | content: string, 190 | operation: PatchOperation, 191 | config: WhitespaceConfig 192 | ): Promise<{ content: string; changes: number; conflicts: string[] }> { 193 | const lines = content.split('\n'); 194 | let changes = 0; 195 | const conflicts: string[] = []; 196 | 197 | if (operation.lineNumbers) { 198 | // Enhanced line number based replacement with validation 199 | for (const lineNum of operation.lineNumbers) { 200 | if (lineNum > 0 && lineNum <= lines.length) { 201 | const originalLine = lines[lineNum - 1]; 202 | if (typeof operation.replace === 'string') { 203 | if (await this.validateLineChange(originalLine, operation.replace)) { 204 | lines[lineNum - 1] = operation.replace; 205 | changes++; 206 | } else { 207 | conflicts.push(`Line ${lineNum}: Invalid change detected`); 208 | } 209 | } else { 210 | if (await this.validateLineDeletion(originalLine)) { 211 | lines.splice(lineNum - 1, 1); 212 | changes++; 213 | } else { 214 | conflicts.push(`Line ${lineNum}: Cannot delete protected line`); 215 | } 216 | } 217 | } 218 | } 219 | } else if (operation.search || operation.searchPattern) { 220 | // Enhanced pattern based replacement with token matching 221 | const pattern = operation.searchPattern || 222 | (operation.search ? this.createTokenPattern(operation.search, config) : null); 223 | 224 | if (!pattern) { 225 | throw new Error('Either search or searchPattern must be provided'); 226 | } 227 | 228 | for (let i = 0; i < lines.length; i++) { 229 | const line = lines[i]; 230 | const normalizedLine = this.normalizeContent(line.toString(), config).normalized; 231 | if (this.matchesWithTokens(normalizedLine, pattern)) { 232 | if (typeof operation.replace === 'string') { 233 | if (await this.validateLineChange(line, operation.replace)) { 234 | lines[i] = operation.replace; 235 | changes++; 236 | } else { 237 | conflicts.push(`Line ${i + 1}: Invalid change detected`); 238 | } 239 | } else { 240 | if (await this.validateLineDeletion(line)) { 241 | lines.splice(i, 1); 242 | i--; // Adjust index after removal 243 | changes++; 244 | } else { 245 | conflicts.push(`Line ${i + 1}: Cannot delete protected line`); 246 | } 247 | } 248 | } 249 | } 250 | } 251 | 252 | return { content: lines.join('\n'), changes, conflicts }; 253 | } 254 | 255 | /** 256 | * Handle block-based operations with improved matching 257 | * @param content Original content 258 | * @param operation Patch operation 259 | * @param config Whitespace configuration 260 | */ 261 | private async handleBlockOperation( 262 | content: string, 263 | operation: PatchOperation, 264 | config: WhitespaceConfig 265 | ): Promise<{ content: string; changes: number; conflicts: string[] }> { 266 | if (!operation.search || !operation.replace) { 267 | throw new Error('Search and replace are required for block replacement'); 268 | } 269 | 270 | const searchNormalized = this.normalizeContent(operation.search, config).normalized; 271 | const replaceNormalized = this.normalizeContent(operation.replace, config).normalized; 272 | 273 | // Create token-based pattern for block matching 274 | const pattern = this.createBlockTokenPattern(searchNormalized, config); 275 | const conflicts: string[] = []; 276 | 277 | // Use chunking for large blocks 278 | const chunks = this.splitIntoChunks(content, this.CHUNK_SIZE); 279 | let newContent = ''; 280 | let changes = 0; 281 | 282 | for (const chunk of chunks) { 283 | if (this.matchesWithTokens(chunk, pattern)) { 284 | if (await this.validateBlockChange(chunk, replaceNormalized)) { 285 | newContent += chunk.replace(pattern, replaceNormalized); 286 | changes++; 287 | } else { 288 | conflicts.push(`Block change validation failed`); 289 | newContent += chunk; 290 | } 291 | } else { 292 | newContent += chunk; 293 | } 294 | } 295 | 296 | return { content: newContent, changes, conflicts }; 297 | } 298 | 299 | /** 300 | * Handle diff-based operations with improved diff algorithm 301 | * @param content Original content 302 | * @param operation Patch operation 303 | */ 304 | private async handleDiffOperation( 305 | content: string, 306 | operation: PatchOperation 307 | ): Promise<{ content: string; changes: number; conflicts: string[] }> { 308 | const conflicts: string[] = []; 309 | 310 | try { 311 | const patches = diff.parsePatch(operation.diff!); 312 | let newContent = content; 313 | let changes = 0; 314 | 315 | for (const patch of patches) { 316 | const patchResult = diff.applyPatch(newContent, patch); 317 | if (patchResult === false) { 318 | conflicts.push(`Failed to apply patch: ${JSON.stringify(patch)}`); 319 | continue; 320 | } 321 | 322 | if (await this.validateDiffChange(newContent, patchResult)) { 323 | newContent = patchResult; 324 | changes++; 325 | } else { 326 | conflicts.push(`Diff change validation failed`); 327 | } 328 | } 329 | 330 | return { content: newContent, changes, conflicts }; 331 | } catch (error) { 332 | throw new Error(`Failed to apply diff patch: ${error instanceof Error ? error.message : 'Unknown error'}`); 333 | } 334 | } 335 | 336 | /** 337 | * Create a backup of a file with enhanced error handling 338 | * @param filePath Path to the file 339 | */ 340 | public async createBackup(filePath: string): Promise { 341 | const backupPath = `${filePath}.bak`; 342 | try { 343 | // Cast overwrite to allow true value 344 | await this.fileService.copyFile(filePath, backupPath, true as false); 345 | return backupPath; 346 | } catch (error) { 347 | throw new FileOperationError( 348 | 'BACKUP_FAILED' as FileErrorCode, 349 | `Failed to create backup: ${error instanceof Error ? error.message : 'Unknown error'}`, 350 | filePath 351 | ); 352 | } 353 | } 354 | 355 | /** 356 | * Normalize content with enhanced token handling 357 | * @param content Content to normalize 358 | * @param config Whitespace configuration 359 | */ 360 | public normalizeContent(content: string, config: WhitespaceConfig = DEFAULT_WHITESPACE_CONFIG): NormalizedContent { 361 | const lines = content.split(/\r\n|\r|\n/); 362 | const lineEndings = content.includes('\r\n') ? '\r\n' : 363 | content.includes('\r') ? '\r' : '\n'; 364 | 365 | const indentationMatch = content.match(/^[ \t]+/m); 366 | const indentation = (indentationMatch ? indentationMatch[0] : config.defaultIndentation) || ' '; 367 | 368 | let indentationSpaces = 0; 369 | let indentationTabs = 0; 370 | let trailingWhitespace = 0; 371 | let emptyLines = 0; 372 | let maxLineLength = 0; 373 | 374 | const processedLines = lines.map(line => { 375 | maxLineLength = Math.max(maxLineLength, line.length); 376 | if (line.trim().length === 0) emptyLines++; 377 | if (line.match(/[ \t]+$/)) trailingWhitespace++; 378 | 379 | const indent = line.match(/^[ \t]+/); 380 | if (indent) { 381 | indentationSpaces += (indent[0].match(/ /g) || []).length; 382 | indentationTabs += (indent[0].match(/\t/g) || []).length; 383 | } 384 | 385 | let processedLine = line; 386 | if (config.trimTrailingWhitespace) { 387 | processedLine = processedLine.replace(/[ \t]+$/, ''); 388 | } 389 | if (!config.preserveIndentation) { 390 | processedLine = processedLine.replace(/^[ \t]+/, config.defaultIndentation || ' '); 391 | } 392 | return processedLine; 393 | }); 394 | 395 | const normalized = processedLines.join( 396 | config.preserveLineEndings ? lineEndings : (config.defaultLineEnding || '\n') 397 | ); 398 | 399 | const hash = Buffer.from(normalized).toString('base64'); 400 | 401 | return { 402 | normalized, 403 | lineEndings, 404 | indentation, 405 | hash, 406 | stats: { 407 | indentationSpaces, 408 | indentationTabs, 409 | trailingWhitespace, 410 | emptyLines, 411 | maxLineLength 412 | } 413 | }; 414 | } 415 | 416 | /** 417 | * Create a token-based pattern for matching 418 | * @param pattern Original pattern 419 | * @param config Whitespace configuration 420 | */ 421 | private createTokenPattern(pattern: string, config: WhitespaceConfig): RegExp { 422 | const tokens = this.tokenize(pattern); 423 | let flexPattern = tokens.map(token => { 424 | // Escape regex special characters except those we want to keep flexible 425 | const escaped = token.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); 426 | 427 | // Make whitespace flexible based on config 428 | if (!config.preserveIndentation && /^\s+$/.test(token)) { 429 | return '\\s*'; 430 | } 431 | if (config.normalizeWhitespace && /\s/.test(token)) { 432 | return escaped.replace(/\s+/g, '\\s+'); 433 | } 434 | return escaped; 435 | }).join(''); 436 | 437 | return new RegExp(flexPattern); 438 | } 439 | 440 | /** 441 | * Create a token-based pattern for matching blocks 442 | * @param pattern Original pattern 443 | * @param config Whitespace configuration 444 | */ 445 | private createBlockTokenPattern(pattern: string, config: WhitespaceConfig): RegExp { 446 | const tokens = this.tokenize(pattern); 447 | let flexPattern = tokens.map(token => { 448 | const escaped = token.replace(/[.*+?^${}()|[\]\\]/g, '\\$&'); 449 | 450 | // Handle line endings 451 | if (!config.preserveLineEndings && /\r?\n/.test(token)) { 452 | return '\\r?\\n'; 453 | } 454 | 455 | // Handle whitespace 456 | if (!config.preserveIndentation && /^\s+$/.test(token)) { 457 | return '\\s*'; 458 | } 459 | if (config.normalizeWhitespace && /\s/.test(token)) { 460 | return escaped.replace(/\s+/g, '\\s+'); 461 | } 462 | 463 | return escaped; 464 | }).join(''); 465 | 466 | return new RegExp(flexPattern, 'gm'); 467 | } 468 | 469 | /** 470 | * Split content into manageable chunks 471 | * @param content Content to split 472 | * @param size Chunk size 473 | */ 474 | private splitIntoChunks(content: string, size: number): string[] { 475 | const chunks: string[] = []; 476 | let index = 0; 477 | while (index < content.length) { 478 | chunks.push(content.slice(index, index + size)); 479 | index += size; 480 | } 481 | return chunks; 482 | } 483 | 484 | /** 485 | * Tokenize content for improved matching 486 | * @param content Content to tokenize 487 | */ 488 | private tokenize(content: string): string[] { 489 | // Split into meaningful tokens (words, whitespace, symbols) 490 | return content.match(/\s+|\w+|[^\s\w]+/g) || []; 491 | } 492 | 493 | /** 494 | * Check if content matches pattern using token-based comparison 495 | * @param content Content to check 496 | * @param pattern Pattern to match against 497 | */ 498 | private matchesWithTokens(content: string, pattern: RegExp): boolean { 499 | const contentTokens = this.tokenize(content); 500 | const patternStr = pattern.source; 501 | const patternTokens = this.tokenize(patternStr); 502 | 503 | // Use Levenshtein distance for fuzzy matching 504 | return this.calculateSimilarity(contentTokens, patternTokens) >= this.SIMILARITY_THRESHOLD; 505 | } 506 | 507 | /** 508 | * Calculate similarity between token arrays 509 | * @param tokens1 First token array 510 | * @param tokens2 Second token array 511 | */ 512 | private calculateSimilarity(tokens1: string[], tokens2: string[]): number { 513 | const matrix: number[][] = []; 514 | 515 | // Initialize matrix 516 | for (let i = 0; i <= tokens1.length; i++) { 517 | matrix[i] = [i]; 518 | } 519 | for (let j = 0; j <= tokens2.length; j++) { 520 | matrix[0][j] = j; 521 | } 522 | 523 | // Fill matrix 524 | for (let i = 1; i <= tokens1.length; i++) { 525 | for (let j = 1; j <= tokens2.length; j++) { 526 | if (tokens1[i - 1] === tokens2[j - 1]) { 527 | matrix[i][j] = matrix[i - 1][j - 1]; 528 | } else { 529 | matrix[i][j] = Math.min( 530 | matrix[i - 1][j - 1] + 1, // substitution 531 | matrix[i][j - 1] + 1, // insertion 532 | matrix[i - 1][j] + 1 // deletion 533 | ); 534 | } 535 | } 536 | } 537 | 538 | const maxLength = Math.max(tokens1.length, tokens2.length); 539 | return 1 - matrix[tokens1.length][tokens2.length] / maxLength; 540 | } 541 | 542 | /** 543 | * Find common ancestor for three-way merge 544 | * @param content1 First content 545 | * @param content2 Second content 546 | */ 547 | private async findCommonAncestor(content1: string, content2: string): Promise { 548 | const tokens1 = this.tokenize(content1); 549 | const tokens2 = this.tokenize(content2); 550 | 551 | // Find longest common subsequence 552 | const lcs = this.findLCS(tokens1, tokens2); 553 | return lcs.join(''); 554 | } 555 | 556 | /** 557 | * Find longest common subsequence 558 | * @param tokens1 First token array 559 | * @param tokens2 Second token array 560 | */ 561 | private findLCS(tokens1: string[], tokens2: string[]): string[] { 562 | const matrix: number[][] = Array(tokens1.length + 1).fill(0) 563 | .map(() => Array(tokens2.length + 1).fill(0)); 564 | 565 | // Fill LCS matrix 566 | for (let i = 1; i <= tokens1.length; i++) { 567 | for (let j = 1; j <= tokens2.length; j++) { 568 | if (tokens1[i - 1] === tokens2[j - 1]) { 569 | matrix[i][j] = matrix[i - 1][j - 1] + 1; 570 | } else { 571 | matrix[i][j] = Math.max(matrix[i - 1][j], matrix[i][j - 1]); 572 | } 573 | } 574 | } 575 | 576 | // Reconstruct LCS 577 | const lcs: string[] = []; 578 | let i = tokens1.length; 579 | let j = tokens2.length; 580 | 581 | while (i > 0 && j > 0) { 582 | if (tokens1[i - 1] === tokens2[j - 1]) { 583 | lcs.unshift(tokens1[i - 1]); 584 | i--; 585 | j--; 586 | } else if (matrix[i - 1][j] > matrix[i][j - 1]) { 587 | i--; 588 | } else { 589 | j--; 590 | } 591 | } 592 | 593 | return lcs; 594 | } 595 | 596 | /** 597 | * Perform three-way merge 598 | * @param base Base content 599 | * @param current Current content 600 | * @param target Target content 601 | */ 602 | private async performThreeWayMerge( 603 | base: string, 604 | current: string, 605 | target: string 606 | ): Promise<{ normalized: string; conflicts: string[] }> { 607 | const baseTokens = this.tokenize(base); 608 | const currentTokens = this.tokenize(current); 609 | const targetTokens = this.tokenize(target); 610 | 611 | const conflicts: string[] = []; 612 | const merged: string[] = []; 613 | 614 | let i = 0, j = 0, k = 0; 615 | while (i < baseTokens.length || j < currentTokens.length || k < targetTokens.length) { 616 | if (currentTokens[j] === targetTokens[k]) { 617 | merged.push(currentTokens[j]); 618 | i++; j++; k++; 619 | } else if (currentTokens[j] === baseTokens[i]) { 620 | merged.push(targetTokens[k]); 621 | i++; j++; k++; 622 | } else if (targetTokens[k] === baseTokens[i]) { 623 | merged.push(currentTokens[j]); 624 | i++; j++; k++; 625 | } else { 626 | // Conflict detected 627 | conflicts.push(`Conflict at position ${merged.length}`); 628 | merged.push(currentTokens[j] || targetTokens[k]); 629 | i++; j++; k++; 630 | } 631 | } 632 | 633 | return { normalized: merged.join(''), conflicts }; 634 | } 635 | 636 | /** 637 | * Verify changes before applying 638 | * @param original Original content 639 | * @param modified Modified content 640 | * @param operation Patch operation 641 | */ 642 | private async verifyChanges( 643 | original: string, 644 | modified: string, 645 | operation: PatchOperation 646 | ): Promise { 647 | // Verify content hasn't been corrupted 648 | if (!this.isValidContent(modified)) { 649 | return false; 650 | } 651 | 652 | // Verify operation-specific constraints 653 | switch (operation.type) { 654 | case 'complete': 655 | return this.verifyCompleteReplacement(original, modified); 656 | case 'line': 657 | return this.verifyLineChanges(original, modified); 658 | case 'block': 659 | return this.verifyBlockChanges(original, modified); 660 | case 'diff': 661 | return this.verifyDiffChanges(original, modified); 662 | default: 663 | return false; 664 | } 665 | } 666 | 667 | /** 668 | * Verify complete replacement 669 | * @param original Original content 670 | * @param modified Modified content 671 | */ 672 | private verifyCompleteReplacement(original: string, modified: string): boolean { 673 | // Ensure basic file structure is maintained and content hasn't regressed 674 | return this.hasValidStructure(modified) && modified.length >= original.length * 0.5; 675 | } 676 | 677 | /** 678 | * Verify line changes 679 | * @param original Original content 680 | * @param modified Modified content 681 | */ 682 | private verifyLineChanges(original: string, modified: string): boolean { 683 | const originalLines = original.split('\n'); 684 | const modifiedLines = modified.split('\n'); 685 | 686 | // Verify line count hasn't changed unexpectedly 687 | return Math.abs(originalLines.length - modifiedLines.length) <= 688 | originalLines.filter(line => line.trim().length === 0).length; 689 | } 690 | 691 | /** 692 | * Verify block changes 693 | * @param original Original content 694 | * @param modified Modified content 695 | */ 696 | private verifyBlockChanges(original: string, modified: string): boolean { 697 | // Verify block structure and ensure content similarity 698 | return this.hasValidBlockStructure(modified) && 699 | this.calculateSimilarity(this.tokenize(original), this.tokenize(modified)) >= 0.3; 700 | } 701 | 702 | /** 703 | * Verify diff changes 704 | * @param original Original content 705 | * @param modified Modified content 706 | */ 707 | private verifyDiffChanges(original: string, modified: string): boolean { 708 | // Verify diff hasn't corrupted content and maintains reasonable similarity 709 | return this.isValidContent(modified) && 710 | this.hasValidStructure(modified) && 711 | this.calculateSimilarity(this.tokenize(original), this.tokenize(modified)) >= 0.5; 712 | } 713 | 714 | /** 715 | * Check if content is valid 716 | * @param content Content to check 717 | */ 718 | private isValidContent(content: string): boolean { 719 | // Basic validation 720 | return content.length > 0 && !content.includes('\0'); 721 | } 722 | 723 | /** 724 | * Check if content has valid structure 725 | * @param content Content to check 726 | */ 727 | private hasValidStructure(content: string): boolean { 728 | // Check basic file structure 729 | const lines = content.split('\n'); 730 | return lines.every(line => line.length <= 10000); // Arbitrary max line length 731 | } 732 | 733 | /** 734 | * Check if content has valid block structure 735 | * @param content Content to check 736 | */ 737 | private hasValidBlockStructure(content: string): boolean { 738 | // Check block structure (e.g., matching braces) 739 | const braces = content.match(/[{}]/g) || []; 740 | return braces.filter(b => b === '{').length === braces.filter(b => b === '}').length; 741 | } 742 | 743 | /** 744 | * Validate line change 745 | * @param original Original line 746 | * @param modified Modified line 747 | */ 748 | private async validateLineChange(original: string, modified: string): Promise { 749 | // Check for protected content 750 | if (original.includes('TODO') || original.includes('IMPORTANT')) { 751 | return false; 752 | } 753 | // Validate line-level changes 754 | return modified.length <= original.length * 2; // Arbitrary max growth factor 755 | } 756 | 757 | /** 758 | * Validate line deletion 759 | * @param line Line to delete 760 | */ 761 | private async validateLineDeletion(line: string): Promise { 762 | // Validate if line can be safely deleted 763 | return !line.includes('IMPORTANT') && !line.includes('TODO'); 764 | } 765 | 766 | /** 767 | * Validate block change 768 | * @param original Original block 769 | * @param modified Modified block 770 | */ 771 | private async validateBlockChange(original: string, modified: string): Promise { 772 | // Validate block-level changes and content integrity 773 | return this.hasValidBlockStructure(modified) && 774 | modified.length >= original.length * 0.5 && 775 | modified.length <= original.length * 2; 776 | } 777 | 778 | /** 779 | * Validate diff change 780 | * @param original Original content 781 | * @param modified Modified content 782 | */ 783 | private async validateDiffChange(original: string, modified: string): Promise { 784 | // Validate diff-level changes and content integrity 785 | return this.isValidContent(modified) && 786 | this.hasValidStructure(modified) && 787 | this.calculateSimilarity(this.tokenize(original), this.tokenize(modified)) >= 0.3; 788 | } 789 | 790 | /** 791 | * Handle conflicts based on resolution strategy 792 | * @param context Atomic operation context 793 | * @param conflicts Detected conflicts 794 | * @param resolution Conflict resolution strategy 795 | */ 796 | private async handleConflicts( 797 | context: { backupPath?: string; tempPath?: string }, 798 | conflicts: string[], 799 | resolution?: ConflictResolution 800 | ): Promise { 801 | switch (resolution) { 802 | case 'force': 803 | // Proceed despite conflicts 804 | await this.commitAtomicOperation(context); 805 | break; 806 | case 'revert': 807 | // Revert changes 808 | await this.rollbackAtomicOperation(context); 809 | break; 810 | default: 811 | // Default to revert 812 | await this.rollbackAtomicOperation(context); 813 | throw new Error(`Unresolved conflicts: ${conflicts.join(', ')}`); 814 | } 815 | } 816 | 817 | /** 818 | * Commit atomic operation 819 | * @param context Atomic operation context 820 | */ 821 | private async commitAtomicOperation( 822 | context: { backupPath?: string; tempPath?: string } 823 | ): Promise { 824 | if (context.backupPath) { 825 | await this.fileService.deleteFile(context.backupPath); 826 | } 827 | } 828 | 829 | /** 830 | * Rollback atomic operation 831 | * @param context Atomic operation context 832 | */ 833 | private async rollbackAtomicOperation( 834 | context: { backupPath?: string; tempPath?: string } 835 | ): Promise { 836 | if (context.backupPath && context.tempPath) { 837 | await this.restoreFromBackup(context.tempPath, context.backupPath); 838 | } 839 | } 840 | 841 | /** 842 | * Restore a file from its backup 843 | * @param filePath Original file path 844 | * @param backupPath Backup file path 845 | */ 846 | protected async restoreFromBackup(filePath: string, backupPath: string): Promise { 847 | try { 848 | // Cast overwrite to allow true value 849 | await this.fileService.copyFile(backupPath, filePath, true as false); 850 | await this.fileService.deleteFile(backupPath); 851 | } catch (error) { 852 | throw new FileOperationError( 853 | 'OPERATION_FAILED' as FileErrorCode, 854 | `Failed to restore from backup: ${error instanceof Error ? error.message : 'Unknown error'}`, 855 | filePath 856 | ); 857 | } 858 | } 859 | } 860 | --------------------------------------------------------------------------------