├── .env.dist
├── .env.test
├── .github
├── ISSUE_TEMPLATE
│ └── bug_report.md
└── workflows
│ ├── ci.yml
│ └── npm-publish.yml
├── .gitignore
├── .nvmrc
├── CHANGELOG.md
├── Dockerfile
├── LICENSE.md
├── README-MULTI-DB.md
├── README.md
├── RELEASE.md
├── assets
└── demo.gif
├── evals.ts
├── index.ts
├── package-lock.json
├── package.json
├── pnpm-lock.yaml
├── scripts
└── setup-test-db.ts
├── smithery.yaml
├── src
├── config
│ └── index.ts
├── db
│ ├── index.ts
│ ├── permissions.ts
│ └── utils.ts
├── types
│ └── index.ts
└── utils
│ └── index.ts
├── tests
├── e2e
│ └── server.test.ts
├── integration
│ ├── multi-db
│ │ └── multi-db-mode.test.ts
│ ├── mysql.test.ts
│ ├── schema-permissions
│ │ └── schema-permissions.test.ts
│ └── socket-connection.test.ts
└── unit
│ └── query.test.ts
├── tsconfig.json
└── tsconfig.scripts.json
/.env.dist:
--------------------------------------------------------------------------------
1 | # Basic MySQL connection settings
2 | MYSQL_HOST=127.0.0.1
3 | MYSQL_SOCKET_PATH=/tmp/mysql.sock
4 | MYSQL_PORT=3306
5 | MYSQL_USER=root
6 | MYSQL_PASS=your_password
7 | MYSQL_DB=
8 |
9 | # Leave MYSQL_DB empty for multi-DB mode
10 | # Set MYSQL_DB to a specific database name for single-DB mode
11 |
12 | # Global write operation permissions (default to false for safety)
13 | ALLOW_INSERT_OPERATION=false
14 | ALLOW_UPDATE_OPERATION=false
15 | ALLOW_DELETE_OPERATION=false
16 | ALLOW_DDL_OPERATION=false
17 |
18 | # Schema-specific permissions
19 | # Format: "schema1:true,schema2:false"
20 | SCHEMA_INSERT_PERMISSIONS=test_db:true,staging_db:false
21 | SCHEMA_UPDATE_PERMISSIONS=test_db:true,staging_db:false
22 | SCHEMA_DELETE_PERMISSIONS=test_db:false,staging_db:false
23 | SCHEMA_DDL_PERMISSIONS=test_db:true,staging_db:false
24 |
25 | # Multi-DB mode settings
26 | # Set to true ONLY if you want to allow write operations in multi-DB mode without
27 | # schema-specific permissions (not recommended)
28 | MULTI_DB_WRITE_MODE=false
29 |
30 | # SSL configuration
31 | MYSQL_SSL=false
32 | MYSQL_SSL_REJECT_UNAUTHORIZED=true
33 |
34 | # Performance settings
35 | MYSQL_POOL_SIZE=10
36 | MYSQL_QUERY_TIMEOUT=30000
37 | MYSQL_CACHE_TTL=60000
38 |
39 | # Security settings
40 | MYSQL_RATE_LIMIT=100
41 | MYSQL_MAX_QUERY_COMPLEXITY=1000
42 |
43 | # Monitoring settings
44 | ENABLE_LOGGING=false
45 | MYSQL_LOG_LEVEL=info
46 | MYSQL_METRICS_ENABLED=false
47 |
--------------------------------------------------------------------------------
/.env.test:
--------------------------------------------------------------------------------
1 | # Basic MySQL connection settings
2 | MYSQL_HOST=127.0.0.1
3 | # MYSQL_SOCKET_PATH=/tmp/mysql.sock
4 | MYSQL_PORT=3306
5 | MYSQL_USER=root
6 | MYSQL_PASS=root
7 | MYSQL_DB=
8 |
9 | # Leave MYSQL_DB empty for multi-DB mode
10 | # Set MYSQL_DB to a specific database name for single-DB mode
11 |
12 | # Global write operation permissions (default to false for safety)
13 | ALLOW_INSERT_OPERATION=true
14 | ALLOW_UPDATE_OPERATION=true
15 | ALLOW_DELETE_OPERATION=true
16 | ALLOW_DDL_OPERATION=true
17 |
18 | # Schema-specific permissions
19 | # Format: "schema1:true,schema2:false"
20 | SCHEMA_INSERT_PERMISSIONS=test_db:true,staging_db:false
21 | SCHEMA_UPDATE_PERMISSIONS=test_db:true,staging_db:false
22 | SCHEMA_DELETE_PERMISSIONS=test_db:false,staging_db:false
23 | SCHEMA_DDL_PERMISSIONS=test_db:true,staging_db:false
24 |
25 | # Multi-DB mode settings
26 | # Set to true ONLY if you want to allow write operations in multi-DB mode without
27 | # schema-specific permissions (not recommended)
28 | MULTI_DB_WRITE_MODE=true
29 |
30 | # SSL configuration
31 | MYSQL_SSL=false
32 | MYSQL_SSL_REJECT_UNAUTHORIZED=true
33 |
34 | # Performance settings
35 | MYSQL_POOL_SIZE=10
36 | MYSQL_QUERY_TIMEOUT=30000
37 | MYSQL_CACHE_TTL=60000
38 |
39 | # Security settings
40 | MYSQL_RATE_LIMIT=100
41 | MYSQL_MAX_QUERY_COMPLEXITY=1000
42 |
43 | # Monitoring settings
44 | ENABLE_LOGGING=true
45 | MYSQL_LOG_LEVEL=info
46 | MYSQL_METRICS_ENABLED=false
47 |
48 | # Test settings
49 | TEST_TIMEOUT=10000
50 | TEST_POOL_SIZE=5
51 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/bug_report.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Bug report
3 | about: Create a report to help us improve
4 | title: ''
5 | labels: ''
6 | assignees: ''
7 |
8 | ---
9 |
10 | **Describe the bug**
11 | A clear and concise description of what the bug is.
12 |
13 | **Platform**
14 | E.g: Claude Desktop
15 |
16 | **MCP Configuration**
17 | ```json
18 |
19 | ```
20 |
21 |
22 | **Screenshots**
23 | If applicable, add screenshots to help explain your problem.
24 |
25 |
26 | **Additional context**
27 | Add any other context about the problem here.
28 |
--------------------------------------------------------------------------------
/.github/workflows/ci.yml:
--------------------------------------------------------------------------------
1 | name: CI
2 |
3 | on:
4 | push:
5 | branches: [main, dev]
6 | pull_request:
7 | branches: [main]
8 |
9 | jobs:
10 | build:
11 | runs-on: ubuntu-latest
12 |
13 | services:
14 | mysql:
15 | image: mysql:8.0
16 | env:
17 | MYSQL_ROOT_PASSWORD: ""
18 | MYSQL_ALLOW_EMPTY_PASSWORD: yes
19 | MYSQL_DATABASE: mcp_test
20 | ports:
21 | - 3306:3306
22 | options: >-
23 | --health-cmd="mysqladmin ping"
24 | --health-interval=10s
25 | --health-timeout=5s
26 | --health-retries=3
27 |
28 | steps:
29 | - uses: actions/checkout@v4
30 |
31 | - name: Install pnpm
32 | uses: pnpm/action-setup@v2
33 | with:
34 | version: 8
35 |
36 | - name: Setup Node.js
37 | uses: actions/setup-node@v4
38 | with:
39 | node-version: "22"
40 | cache: "pnpm"
41 |
42 | - name: Install dependencies
43 | run: pnpm install
44 |
45 | - name: Lint
46 | run: pnpm eslint
47 |
48 | - name: Build
49 | run: pnpm build
--------------------------------------------------------------------------------
/.github/workflows/npm-publish.yml:
--------------------------------------------------------------------------------
1 | name: NPM Publish
2 |
3 | on:
4 | workflow_dispatch:
5 | release:
6 | types: [created]
7 |
8 | jobs:
9 | publish:
10 | runs-on: ubuntu-latest
11 | steps:
12 | - uses: actions/checkout@v4
13 |
14 | - name: Install pnpm
15 | uses: pnpm/action-setup@v2
16 | with:
17 | version: 8
18 |
19 | - name: Setup Node.js
20 | uses: actions/setup-node@v4
21 | with:
22 | node-version: '22'
23 | registry-url: 'https://registry.npmjs.org'
24 | cache: 'pnpm'
25 |
26 | - name: Install dependencies
27 | run: pnpm install
28 |
29 | - name: Build
30 | run: pnpm build
31 |
32 | - name: Publish to NPM
33 | run: pnpm publish
34 | env:
35 | NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
36 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | dist
2 | node_modules/
3 |
--------------------------------------------------------------------------------
/.nvmrc:
--------------------------------------------------------------------------------
1 | 18
2 |
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | # Changelog
2 |
3 |
4 | ### Planned Features
5 | - Query Features
6 | - Prepared statement support
7 | - Query parameter validation
8 | - Query timeout configuration
9 | - Result pagination
10 | - Query execution statistics
11 |
12 | - Security
13 | - Enhanced SQL injection prevention
14 | - Query whitelisting/blacklisting
15 | - Rate limiting
16 | - Query complexity analysis
17 | - Connection encryption configuration
18 |
19 | - Performance
20 | - Connection pool optimization
21 | - Query result caching
22 | - Large result set streaming
23 | - Query execution plan analysis
24 |
25 | - Monitoring
26 | - Query logging system
27 | - Performance metrics collection
28 | - Error tracking and reporting
29 | - Health check endpoints
30 |
31 | - Schema Management
32 | - Table relationship information
33 | - Index details
34 | - Foreign key constraints
35 | - Table statistics
36 |
37 | ## [1.0.14] - 2024-07-01
38 |
39 | ### Added
40 | - Added better support for test environments with automatic database selection
41 | - Implemented comprehensive debug logging for database configuration in test mode
42 | - Added fail-safe process termination handling for test environments
43 |
44 | ### Changed
45 | - Improved test environment detection with support for Vitest test runner
46 | - Enhanced MySQL connection configuration to use consistent defaults across environments
47 | - Updated error handling in database setup scripts to be more resilient
48 |
49 | ### Fixed
50 | - Fixed "No database selected" error in integration tests by ensuring database name is always set
51 | - Fixed authentication issues in test environments by providing consistent default credentials
52 | - Prevented premature test termination by implementing conditional process.exit handling
53 | - Improved error handling in test database setup to continue tests even when setup encounters issues
54 |
55 | ### Security
56 | - Made authentication more consistent across development and test environments
57 | - Added safeguards to prevent exposing actual password values in debug logs
58 |
59 | ### Documentation
60 | - Added detailed inline comments for test-specific configurations
61 | - Improved error messages to provide better debugging information
62 |
63 | ## [1.0.13] - 2024-05-26
64 |
65 | ### Added
66 | - Complete write operations support through Smithery configuration
67 | - Added environment variables for database write operations (`ALLOW_INSERT_OPERATION`, `ALLOW_UPDATE_OPERATION`, `ALLOW_DELETE_OPERATION`)
68 | - New configuration options in Smithery schema for controlling write permissions
69 | - Improved documentation for write operations configuration
70 | - Support for enabling/disabling specific SQL operations via environment variables
71 | - Enhanced error handling for unauthorized write operations
72 |
73 | ### Changed
74 | - Updated Smithery configuration to include write operation settings
75 | - Improved Smithery integration with clear security defaults
76 | - Enhanced documentation with detailed configuration examples
77 | - Restructured README with clearer installation instructions
78 | - Better error reporting for database connection issues
79 | - Improved transaction handling for write operations
80 |
81 | ### Fixed
82 | - Fixed error handling for database connection failures
83 | - Improved error messages for unauthorized operations
84 | - Better handling of MySQL 8.0+ authentication methods
85 | - Fixed SSL/TLS configuration options in Smithery
86 |
87 | ### Security
88 | - All write operations (INSERT, UPDATE, DELETE) disabled by default
89 | - Added clear documentation about security implications of enabling write operations
90 | - Improved transaction isolation for write operations
91 | - Enhanced error reporting that doesn't expose sensitive information
92 |
93 | ### Documentation
94 | - Updated README with comprehensive Smithery configuration instructions
95 | - Added detailed examples for enabling specific write operations
96 | - Improved troubleshooting section with common issues and solutions
97 | - Better explanation of required MySQL permissions for different operation types
98 | - Added clear security recommendations for production deployments
99 |
100 | ## [1.0.10] - 2024-03-13
101 |
102 | ### Changed
103 | - Version bump to fix npm publishing issue
104 | - Updated installation instructions in README to reference specific version
105 |
106 | ## [1.0.9] - 2024-03-13
107 |
108 | ### Added
109 | - GitHub Actions CI workflow for automated lint, build, and test with MySQL service
110 |
111 | ### Changed
112 | - Removed `@types/mysql2` dependency and related type references
113 | - Updated test files to use `any` type instead of mysql2 specific types
114 | - Fixed integration tests to properly handle MySQL connection and queries
115 |
116 | ### Fixed
117 | - Fixed GitHub Actions workflow to install pnpm before using it for caching
118 | - Fixed failing unit tests by removing problematic executeReadOnlyQuery tests
119 |
120 | ## [1.0.8] - 2024-03-12
121 |
122 | ### Changed
123 | - Upgraded from `mysql` to `mysql2` package (^3.13.0) for better MySQL 8.0+ compatibility
124 | - Refactored database connection code to use mysql2's Promise-based API
125 | - Added support for MySQL 8.0+ authentication with `caching_sha2_password` plugin
126 |
127 | ### Removed
128 | - Removed deprecated `mysql` package dependency
129 | - Removed unused `@types/mysql2` devDependency
130 |
131 | ### Fixed
132 | - Fixed authentication issues with MySQL 8.0+ servers
133 | - Improved connection handling with Promise-based API
134 | - Enhanced error handling for database operations
135 |
136 | ## [1.0.7] - Previous version
137 | - Initial release with basic MySQL functionality
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | # Generated by https://smithery.ai. See: https://smithery.ai/docs/config#dockerfile
2 | # Use a Node.js Alpine image as the base
3 | FROM node:22-alpine AS builder
4 |
5 | # Install pnpm
6 | RUN npm install -g pnpm
7 |
8 | # Set the working directory
9 | WORKDIR /app
10 |
11 | # Copy the package.json and pnpm-lock.yaml if present
12 | COPY package.json pnpm-lock.yaml* /app/
13 |
14 | # Install the dependencies
15 | RUN pnpm install --frozen-lockfile --ignore-scripts
16 |
17 | # Copy the rest of the application code
18 | COPY . /app
19 |
20 | # Build the application
21 | RUN pnpm run build
22 |
23 | # Use a new, clean image for the release
24 | FROM node:22-alpine
25 |
26 | # Install pnpm
27 | RUN npm install -g pnpm
28 |
29 | # Set the working directory
30 | WORKDIR /app
31 |
32 | # Copy the built files from the builder
33 | COPY --from=builder /app/dist /app/dist
34 | COPY --from=builder /app/package.json /app/
35 | COPY --from=builder /app/pnpm-lock.yaml* /app/
36 |
37 | # Set environment variables
38 | ENV MYSQL_HOST=127.0.0.1
39 | ENV MYSQL_PORT=3306
40 | ENV MYSQL_USER=root
41 | ENV MYSQL_PASS=
42 | ENV MYSQL_DB=db_name
43 | ENV ALLOW_INSERT_OPERATION=true
44 | ENV ALLOW_UPDATE_OPERATION=true
45 | ENV ALLOW_DELETE_OPERATION=false
46 |
47 | # Install production dependencies only
48 | # Add --no-optional flag to skip lifecycle scripts like prepare
49 | RUN pnpm install --prod --frozen-lockfile --ignore-scripts
50 |
51 | # Expose any ports if necessary (e.g., 8080)
52 | # EXPOSE 8080
53 |
54 | # Run the server
55 | ENTRYPOINT ["node", "dist/index.js"]
--------------------------------------------------------------------------------
/LICENSE.md:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2024 Ben Borla
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README-MULTI-DB.md:
--------------------------------------------------------------------------------
1 | # Multi-DB Mode and Schema-Specific Permissions
2 |
3 | This document describes the new multi-database mode and schema-specific permissions features added to the MCP-Server-MySQL.
4 |
5 | ## Multi-DB Mode
6 |
7 | MCP-Server-MySQL now supports working with multiple databases simultaneously when no specific database is set in the configuration.
8 |
9 | ### How to Enable Multi-DB Mode
10 |
11 | To enable multi-DB mode, simply leave the `MYSQL_DB` environment variable empty:
12 |
13 | ```json
14 | {
15 | "mcpServers": {
16 | "mcp_server_mysql": {
17 | "env": {
18 | "MYSQL_HOST": "127.0.0.1",
19 | "MYSQL_PORT": "3306",
20 | "MYSQL_USER": "root",
21 | "MYSQL_PASS": "your_password",
22 | "MYSQL_DB": "", // Empty to enable multi-DB mode
23 | ...
24 | }
25 | }
26 | }
27 | }
28 | ```
29 |
30 | ### Features in Multi-DB Mode
31 |
32 | 1. **List All Databases**: In multi-DB mode, the server will list resources from all available databases when the LLM requests database schemas.
33 |
34 | 2. **Query Any Database**: You can execute queries against any database to which the MySQL user has access.
35 |
36 | 3. **Schema Qualification Required**: When working in multi-DB mode, you should use fully qualified table names with schema/database prefixes:
37 | ```sql
38 | -- Use fully qualified table names
39 | SELECT * FROM database_name.table_name;
40 |
41 | -- Or use USE statements to switch between databases
42 | USE database_name;
43 | SELECT * FROM table_name;
44 | ```
45 |
46 | 4. **Automatic Read-Only Mode**: For safety, multi-DB mode enforces read-only operations by default. This can be customized using schema-specific permissions (see below).
47 |
48 | 5. **Database Exploration**: You can explore databases using commands like:
49 | ```sql
50 | -- List all databases
51 | SHOW DATABASES;
52 |
53 | -- List tables in a specific database
54 | SHOW TABLES FROM database_name;
55 |
56 | -- Describe a table's structure
57 | DESCRIBE database_name.table_name;
58 | ```
59 |
60 | ## Schema-Specific Permissions
61 |
62 | This new feature allows fine-grained control over which operations are allowed on specific database schemas.
63 |
64 | ### Available Permission Types
65 |
66 | 1. **INSERT Permissions**: Control which schemas can have new records inserted.
67 | 2. **UPDATE Permissions**: Control which schemas can have records updated.
68 | 3. **DELETE Permissions**: Control which schemas can have records deleted.
69 | 4. **DDL Permissions**: Control which schemas can have their structure modified (CREATE, ALTER, DROP, TRUNCATE).
70 |
71 | ### How to Configure Schema-Specific Permissions
72 |
73 | Set the following environment variables with a comma-separated list of schema:permission pairs:
74 |
75 | ```
76 | SCHEMA_INSERT_PERMISSIONS=production:false,development:true,test:true
77 | SCHEMA_UPDATE_PERMISSIONS=production:false,development:true,test:true
78 | SCHEMA_DELETE_PERMISSIONS=production:false,development:false,test:true
79 | SCHEMA_DDL_PERMISSIONS=production:false,development:false,test:true
80 | ```
81 |
82 | This configuration:
83 | - Allows INSERT and UPDATE on development and test databases, but not production
84 | - Allows DELETE and DDL operations only on the test database
85 | - Blocks all write operations on the production database
86 |
87 | ### Example Configuration
88 |
89 | Here's a complete example configuration with schema-specific permissions:
90 |
91 | ```json
92 | {
93 | "mcpServers": {
94 | "mcp_server_mysql": {
95 | "command": "npx",
96 | "args": ["-y", "@benborla29/mcp-server-mysql"],
97 | "env": {
98 | "MYSQL_HOST": "127.0.0.1",
99 | "MYSQL_PORT": "3306",
100 | "MYSQL_USER": "root",
101 | "MYSQL_PASS": "your_password",
102 | "MYSQL_DB": "", // Empty for multi-DB mode
103 |
104 | // Global defaults (apply when no schema-specific permission is set)
105 | "ALLOW_INSERT_OPERATION": "false",
106 | "ALLOW_UPDATE_OPERATION": "false",
107 | "ALLOW_DELETE_OPERATION": "false",
108 | "ALLOW_DDL_OPERATION": "false",
109 |
110 | // Schema-specific permissions
111 | "SCHEMA_INSERT_PERMISSIONS": "dev_db:true,test_db:true,prod_db:false",
112 | "SCHEMA_UPDATE_PERMISSIONS": "dev_db:true,test_db:true,prod_db:false",
113 | "SCHEMA_DELETE_PERMISSIONS": "dev_db:false,test_db:true,prod_db:false",
114 | "SCHEMA_DDL_PERMISSIONS": "dev_db:false,test_db:true,prod_db:false"
115 | }
116 | }
117 | }
118 | }
119 | ```
120 |
121 | ### Permission Resolution Logic
122 |
123 | 1. If a schema-specific permission is set, it takes precedence over the global setting.
124 | 2. If no schema-specific permission is found, the global setting (`ALLOW_X_OPERATION`) is used.
125 | 3. In multi-DB mode, if a query doesn't specify a schema and one can't be determined from context, only read operations are allowed for safety.
126 |
127 | ## Environment Variables Summary
128 |
129 | ### Multi-DB Mode
130 | - `MYSQL_DB`: Leave empty to enable multi-DB mode
131 | - `MULTI_DB_WRITE_MODE`: Set to "true" to allow write operations in multi-DB mode without schema-specific permissions (not recommended for security)
132 |
133 | ### Schema-Specific Permissions
134 | - `SCHEMA_INSERT_PERMISSIONS`: Control INSERT permissions per schema
135 | - `SCHEMA_UPDATE_PERMISSIONS`: Control UPDATE permissions per schema
136 | - `SCHEMA_DELETE_PERMISSIONS`: Control DELETE permissions per schema
137 | - `SCHEMA_DDL_PERMISSIONS`: Control DDL permissions per schema (CREATE, ALTER, DROP, TRUNCATE)
138 |
139 | ### Global Permission Defaults
140 | - `ALLOW_INSERT_OPERATION`: Global default for INSERT permissions
141 | - `ALLOW_UPDATE_OPERATION`: Global default for UPDATE permissions
142 | - `ALLOW_DELETE_OPERATION`: Global default for DELETE permissions
143 | - `ALLOW_DDL_OPERATION`: Global default for DDL permissions
144 |
145 | ## Security Considerations
146 |
147 | 1. **Default to Principle of Least Privilege**: By default, all write operations are disabled globally and must be explicitly enabled.
148 |
149 | 2. **Isolation in Multi-DB Mode**: Consider using a dedicated MySQL user with limited database grants when using multi-DB mode.
150 |
151 | 3. **Careful with DDL Permissions**: DDL operations can modify database structure, so grant these permissions cautiously.
152 |
153 | 4. **Production Databases**: Always set `schema:false` for production database schemas in all write permission settings.
154 |
155 | 5. **User Least Privilege**: Ensure the MySQL user only has the required permissions on the specific databases needed.
156 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 | # MCP Server for MySQL based on NodeJS
3 | [](https://smithery.ai/server/@benborla29/mcp-server-mysql)
4 |
5 | 
6 |
7 |
8 |
9 |
10 | A Model Context Protocol server that provides access to MySQL databases. This server enables LLMs to inspect database schemas and execute SQL queries.
11 |
12 | ## Table of Contents
13 | - [Requirements](#requirements)
14 | - [Installation](#installation)
15 | - [Smithery](#using-smithery)
16 | - [Clone to Local Repository](#running-from-local-repository)
17 | - [Components](#components)
18 | - [Configuration](#configuration)
19 | - [Environment Variables](#environment-variables)
20 | - [Multi-DB Mode](#multi-db-mode)
21 | - [Schema-Specific Permissions](#schema-specific-permissions)
22 | - [Testing](#testing)
23 | - [Troubleshooting](#troubleshooting)
24 | - [Contributing](#contributing)
25 | - [License](#license)
26 |
27 | ## Requirements
28 |
29 | - Node.js v18 or higher
30 | - MySQL 5.7 or higher (MySQL 8.0+ recommended)
31 | - MySQL user with appropriate permissions for the operations you need
32 | - For write operations: MySQL user with INSERT, UPDATE, and/or DELETE privileges
33 |
34 | ## Installation
35 |
36 | There are several ways to install and configure the MCP server but the most common would be checking this website https://smithery.ai/server/@benborla29/mcp-server-mysql
37 |
38 | ### Cursor
39 |
40 | For Cursor IDE, you can install this MCP server with the following command in your project:
41 |
42 | 1. Visit https://smithery.ai/server/@benborla29/mcp-server-mysql
43 | 2. Follow the instruction for Cursor
44 |
45 |
46 | MCP Get provides a centralized registry of MCP servers and simplifies the installation process.
47 |
48 | ### Using NPM/PNPM
49 |
50 | For manual installation:
51 |
52 | ```bash
53 | # Using npm
54 | npm install -g @benborla29/mcp-server-mysql
55 |
56 | # Using pnpm
57 | pnpm add -g @benborla29/mcp-server-mysql
58 | ```
59 |
60 | After manual installation, you'll need to configure your LLM application to use the MCP server (see Configuration section below).
61 |
62 | ### Running from Local Repository
63 |
64 | If you want to clone and run this MCP server directly from the source code, follow these steps:
65 |
66 | 1. **Clone the repository**
67 | ```bash
68 | git clone https://github.com/benborla/mcp-server-mysql.git
69 | cd mcp-server-mysql
70 | ```
71 |
72 | 2. **Install dependencies**
73 | ```bash
74 | npm install
75 | # or
76 | pnpm install
77 | ```
78 |
79 | 3. **Build the project**
80 | ```bash
81 | npm run build
82 | # or
83 | pnpm run build
84 | ```
85 |
86 | 4. **Configure Claude Desktop**
87 |
88 | Add the following to your Claude Desktop configuration file (`claude_desktop_config.json`):
89 |
90 | ```json
91 | {
92 | "mcpServers": {
93 | "mcp_server_mysql": {
94 | "command": "/path/to/node",
95 | "args": [
96 | "/full/path/to/mcp-server-mysql/dist/index.js"
97 | ],
98 | "env": {
99 | "MYSQL_HOST": "127.0.0.1",
100 | "MYSQL_PORT": "3306",
101 | "MYSQL_USER": "root",
102 | "MYSQL_PASS": "your_password",
103 | "MYSQL_DB": "your_database",
104 | "ALLOW_INSERT_OPERATION": "false",
105 | "ALLOW_UPDATE_OPERATION": "false",
106 | "ALLOW_DELETE_OPERATION": "false",
107 | "PATH": "/Users/atlasborla/Library/Application Support/Herd/config/nvm/versions/node/v22.9.0/bin:/usr/bin:/bin", // <--- Important to add the following, run in your terminal `echo "$(which node)/../"` to get the path
108 | "NODE_PATH": "/Users/atlasborla/Library/Application Support/Herd/config/nvm/versions/node/v22.9.0/lib/node_modules" // <--- Important to add the following, run in your terminal `echo "$(which node)/../../lib/node_modules"`
109 | }
110 | }
111 | }
112 | }
113 | ```
114 |
115 | Replace:
116 | - `/path/to/node` with the full path to your Node.js binary (find it with `which node`)
117 | - `/full/path/to/mcp-server-mysql` with the full path to where you cloned the repository
118 | - Set the MySQL credentials to match your environment
119 |
120 | 5. **Test the server**
121 | ```bash
122 | # Run the server directly to test
123 | node dist/index.js
124 | ```
125 |
126 | If it connects to MySQL successfully, you're ready to use it with Claude Desktop.
127 |
128 | ## Components
129 |
130 | ### Tools
131 |
132 | - **mysql_query**
133 | - Execute SQL queries against the connected database
134 | - Input: `sql` (string): The SQL query to execute
135 | - By default, limited to READ ONLY operations
136 | - Optional write operations (when enabled via configuration):
137 | - INSERT: Add new data to tables (requires `ALLOW_INSERT_OPERATION=true`)
138 | - UPDATE: Modify existing data (requires `ALLOW_UPDATE_OPERATION=true`)
139 | - DELETE: Remove data (requires `ALLOW_DELETE_OPERATION=true`)
140 | - All operations are executed within a transaction with proper commit/rollback handling
141 | - Supports prepared statements for secure parameter handling
142 | - Configurable query timeouts and result pagination
143 | - Built-in query execution statistics
144 |
145 | ### Resources
146 |
147 | The server provides comprehensive database information:
148 |
149 | - **Table Schemas**
150 | - JSON schema information for each table
151 | - Column names and data types
152 | - Index information and constraints
153 | - Foreign key relationships
154 | - Table statistics and metrics
155 | - Automatically discovered from database metadata
156 |
157 | ### Security Features
158 |
159 | - SQL injection prevention through prepared statements
160 | - Query whitelisting/blacklisting capabilities
161 | - Rate limiting for query execution
162 | - Query complexity analysis
163 | - Configurable connection encryption
164 | - Read-only transaction enforcement
165 |
166 | ### Performance Optimizations
167 |
168 | - Optimized connection pooling
169 | - Query result caching
170 | - Large result set streaming
171 | - Query execution plan analysis
172 | - Configurable query timeouts
173 |
174 | ### Monitoring and Debugging
175 |
176 | - Comprehensive query logging
177 | - Performance metrics collection
178 | - Error tracking and reporting
179 | - Health check endpoints
180 | - Query execution statistics
181 |
182 | ## Configuration
183 |
184 | ### Automatic Configuration with Smithery
185 | If you installed using Smithery, your configuration is already set up. You can view or modify it with:
186 |
187 | ```bash
188 | smithery configure @benborla29/mcp-server-mysql
189 | ```
190 |
191 | When reconfiguring, you can update any of the MySQL connection details as well as the write operation settings:
192 |
193 | - **Basic connection settings**:
194 | - MySQL Host, Port, User, Password, Database
195 | - SSL/TLS configuration (if your database requires secure connections)
196 |
197 | - **Write operation permissions**:
198 | - Allow INSERT Operations: Set to true if you want to allow adding new data
199 | - Allow UPDATE Operations: Set to true if you want to allow updating existing data
200 | - Allow DELETE Operations: Set to true if you want to allow deleting data
201 |
202 | For security reasons, all write operations are disabled by default. Only enable these settings if you specifically need Claude to modify your database data.
203 |
204 | ### Advanced Configuration Options
205 | For more control over the MCP server's behavior, you can use these advanced configuration options:
206 |
207 | ```json
208 | {
209 | "mcpServers": {
210 | "mcp_server_mysql": {
211 | "command": "/path/to/npx/binary/npx",
212 | "args": [
213 | "-y",
214 | "@benborla29/mcp-server-mysql"
215 | ],
216 | "env": {
217 | // Basic connection settings
218 | "MYSQL_HOST": "127.0.0.1",
219 | "MYSQL_PORT": "3306",
220 | "MYSQL_USER": "root",
221 | "MYSQL_PASS": "",
222 | "MYSQL_DB": "db_name",
223 | "PATH": "/path/to/node/bin:/usr/bin:/bin",
224 |
225 | // Performance settings
226 | "MYSQL_POOL_SIZE": "10",
227 | "MYSQL_QUERY_TIMEOUT": "30000",
228 | "MYSQL_CACHE_TTL": "60000",
229 |
230 | // Security settings
231 | "MYSQL_RATE_LIMIT": "100",
232 | "MYSQL_MAX_QUERY_COMPLEXITY": "1000",
233 | "MYSQL_SSL": "true",
234 |
235 | // Monitoring settings
236 | "ENABLE_LOGGING": "true",
237 | "MYSQL_LOG_LEVEL": "info",
238 | "MYSQL_METRICS_ENABLED": "true",
239 |
240 | // Write operation flags
241 | "ALLOW_INSERT_OPERATION": "false",
242 | "ALLOW_UPDATE_OPERATION": "false",
243 | "ALLOW_DELETE_OPERATION": "false"
244 | }
245 | }
246 | }
247 | }
248 | ```
249 |
250 | ## Environment Variables
251 |
252 | ### Basic Connection
253 | - `MYSQL_SOCKET_PATH`: Unix socket path for local connections (e.g., "/tmp/mysql.sock")
254 | - `MYSQL_HOST`: MySQL server host (default: "127.0.0.1") - ignored if MYSQL_SOCKET_PATH is set
255 | - `MYSQL_PORT`: MySQL server port (default: "3306") - ignored if MYSQL_SOCKET_PATH is set
256 | - `MYSQL_USER`: MySQL username (default: "root")
257 | - `MYSQL_PASS`: MySQL password
258 | - `MYSQL_DB`: Target database name (leave empty for multi-DB mode)
259 |
260 | ### Performance Configuration
261 | - `MYSQL_POOL_SIZE`: Connection pool size (default: "10")
262 | - `MYSQL_QUERY_TIMEOUT`: Query timeout in milliseconds (default: "30000")
263 | - `MYSQL_CACHE_TTL`: Cache time-to-live in milliseconds (default: "60000")
264 |
265 | ### Security Configuration
266 | - `MYSQL_RATE_LIMIT`: Maximum queries per minute (default: "100")
267 | - `MYSQL_MAX_QUERY_COMPLEXITY`: Maximum query complexity score (default: "1000")
268 | - `MYSQL_SSL`: Enable SSL/TLS encryption (default: "false")
269 | - `ALLOW_INSERT_OPERATION`: Enable INSERT operations (default: "false")
270 | - `ALLOW_UPDATE_OPERATION`: Enable UPDATE operations (default: "false")
271 | - `ALLOW_DELETE_OPERATION`: Enable DELETE operations (default: "false")
272 | - `ALLOW_DDL_OPERATION`: Enable DDL operations (default: "false")
273 | - `SCHEMA_INSERT_PERMISSIONS`: Schema-specific INSERT permissions
274 | - `SCHEMA_UPDATE_PERMISSIONS`: Schema-specific UPDATE permissions
275 | - `SCHEMA_DELETE_PERMISSIONS`: Schema-specific DELETE permissions
276 | - `SCHEMA_DDL_PERMISSIONS`: Schema-specific DDL permissions
277 | - `MULTI_DB_WRITE_MODE`: Enable write operations in multi-DB mode (default: "false")
278 |
279 | ### Monitoring Configuration
280 | - `MYSQL_ENABLE_LOGGING`: Enable query logging (default: "false")
281 | - `MYSQL_LOG_LEVEL`: Logging level (default: "info")
282 | - `MYSQL_METRICS_ENABLED`: Enable performance metrics (default: "false")
283 |
284 | ## Multi-DB Mode
285 |
286 | MCP-Server-MySQL supports connecting to multiple databases when no specific database is set. This allows the LLM to query any database the MySQL user has access to. For full details, see [README-MULTI-DB.md](./README-MULTI-DB.md).
287 |
288 | ### Enabling Multi-DB Mode
289 |
290 | To enable multi-DB mode, simply leave the `MYSQL_DB` environment variable empty. In multi-DB mode, queries require schema qualification:
291 |
292 | ```sql
293 | -- Use fully qualified table names
294 | SELECT * FROM database_name.table_name;
295 |
296 | -- Or use USE statements to switch between databases
297 | USE database_name;
298 | SELECT * FROM table_name;
299 | ```
300 |
301 | ## Schema-Specific Permissions
302 |
303 | For fine-grained control over database operations, MCP-Server-MySQL now supports schema-specific permissions. This allows different databases to have different levels of access (read-only, read-write, etc.).
304 |
305 | ### Configuration Example
306 |
307 | ```
308 | SCHEMA_INSERT_PERMISSIONS=development:true,test:true,production:false
309 | SCHEMA_UPDATE_PERMISSIONS=development:true,test:true,production:false
310 | SCHEMA_DELETE_PERMISSIONS=development:false,test:true,production:false
311 | SCHEMA_DDL_PERMISSIONS=development:false,test:true,production:false
312 | ```
313 |
314 | For complete details and security recommendations, see [README-MULTI-DB.md](./README-MULTI-DB.md).
315 |
316 | ## Testing
317 |
318 | ### Database Setup
319 |
320 | Before running tests, you need to set up the test database and seed it with test data:
321 |
322 | 1. **Create Test Database and User**
323 | ```sql
324 | -- Connect as root and create test database
325 | CREATE DATABASE IF NOT EXISTS mcp_test;
326 |
327 | -- Create test user with appropriate permissions
328 | CREATE USER IF NOT EXISTS 'mcp_test'@'localhost' IDENTIFIED BY 'mcp_test_password';
329 | GRANT ALL PRIVILEGES ON mcp_test.* TO 'mcp_test'@'localhost';
330 | FLUSH PRIVILEGES;
331 | ```
332 |
333 | 2. **Run Database Setup Script**
334 | ```bash
335 | # Run the database setup script
336 | pnpm run setup:test:db
337 | ```
338 |
339 | This will create the necessary tables and seed data. The script is located in `scripts/setup-test-db.ts`
340 |
341 | 3. **Configure Test Environment**
342 | Create a `.env.test` file in the project root (if not existing):
343 | ```env
344 | MYSQL_HOST=127.0.0.1
345 | MYSQL_PORT=3306
346 | MYSQL_USER=mcp_test
347 | MYSQL_PASS=mcp_test_password
348 | MYSQL_DB=mcp_test
349 | ```
350 |
351 | 4. **Update package.json Scripts**
352 | Add these scripts to your package.json:
353 | ```json
354 | {
355 | "scripts": {
356 | "setup:test:db": "ts-node scripts/setup-test-db.ts",
357 | "pretest": "pnpm run setup:test:db",
358 | "test": "vitest run",
359 | "test:watch": "vitest",
360 | "test:coverage": "vitest run --coverage"
361 | }
362 | }
363 | ```
364 |
365 | ### Running Tests
366 |
367 | The project includes a comprehensive test suite to ensure functionality and reliability:
368 |
369 | ```bash
370 | # First-time setup
371 | pnpm run setup:test:db
372 |
373 | # Run all tests
374 | pnpm test
375 | ```
376 |
377 |
378 |
379 | ## Running evals
380 |
381 | The evals package loads an mcp client that then runs the index.ts file, so there is no need to rebuild between tests. You can load environment variables by prefixing the npx command. Full documentation can be found [here](https://www.mcpevals.io/docs).
382 |
383 | ```bash
384 | OPENAI_API_KEY=your-key npx mcp-eval evals.ts index.ts
385 | ```
386 | ## Troubleshooting
387 |
388 | ### Common Issues
389 |
390 | 1. **Connection Issues**
391 | - Verify MySQL server is running and accessible
392 | - Check credentials and permissions
393 | - Ensure SSL/TLS configuration is correct if enabled
394 | - Try connecting with a MySQL client to confirm access
395 |
396 | 2. **Performance Issues**
397 | - Adjust connection pool size
398 | - Configure query timeout values
399 | - Enable query caching if needed
400 | - Check query complexity settings
401 | - Monitor server resource usage
402 |
403 | 3. **Security Restrictions**
404 | - Review rate limiting configuration
405 | - Check query whitelist/blacklist settings
406 | - Verify SSL/TLS settings
407 | - Ensure the user has appropriate MySQL permissions
408 |
409 | 4. **Path Resolution**
410 | If you encounter an error "Could not connect to MCP server mcp-server-mysql", explicitly set the path of all required binaries:
411 | ```json
412 | {
413 | "env": {
414 | "PATH": "/path/to/node/bin:/usr/bin:/bin"
415 | }
416 | }
417 | ```
418 |
419 | *Where can I find my `node` bin path*
420 | Run the following command to get it:
421 |
422 | For **PATH**
423 | ```bash
424 | echo "$(which node)/../"
425 | ```
426 |
427 | For **NODE_PATH**
428 | ```bash
429 | echo "$(which node)/../../lib/node_modules"
430 | ```
431 |
432 | 5. **Claude Desktop Specific Issues**
433 | - If you see "Server disconnected" logs in Claude Desktop, check the logs at `~/Library/Logs/Claude/mcp-server-mcp_server_mysql.log`
434 | - Ensure you're using the absolute path to both the Node binary and the server script
435 | - Check if your `.env` file is being properly loaded; use explicit environment variables in the configuration
436 | - Try running the server directly from the command line to see if there are connection issues
437 | - If you need write operations (INSERT, UPDATE, DELETE), set the appropriate flags to "true" in your configuration:
438 | ```json
439 | "env": {
440 | "ALLOW_INSERT_OPERATION": "true", // Enable INSERT operations
441 | "ALLOW_UPDATE_OPERATION": "true", // Enable UPDATE operations
442 | "ALLOW_DELETE_OPERATION": "true" // Enable DELETE operations
443 | }
444 | ```
445 | - Ensure your MySQL user has the appropriate permissions for the operations you're enabling
446 | - For direct execution configuration, use:
447 | ```json
448 | {
449 | "mcpServers": {
450 | "mcp_server_mysql": {
451 | "command": "/full/path/to/node",
452 | "args": [
453 | "/full/path/to/mcp-server-mysql/dist/index.js"
454 | ],
455 | "env": {
456 | "MYSQL_HOST": "127.0.0.1",
457 | "MYSQL_PORT": "3306",
458 | "MYSQL_USER": "root",
459 | "MYSQL_PASS": "your_password",
460 | "MYSQL_DB": "your_database"
461 | }
462 | }
463 | }
464 | }
465 | ```
466 |
467 | 6. **Authentication Issues**
468 | - For MySQL 8.0+, ensure the server supports the `caching_sha2_password` authentication plugin
469 | - Check if your MySQL user is configured with the correct authentication method
470 | - Try creating a user with legacy authentication if needed:
471 | ```sql
472 | CREATE USER 'user'@'localhost' IDENTIFIED WITH mysql_native_password BY 'password';
473 | ```
474 | @lizhuangs
475 |
476 | 7. I am encountering `Error [ERR_MODULE_NOT_FOUND]: Cannot find package 'dotenv' imported from` error
477 | try this workaround:
478 | ```bash
479 | npx -y -p @benborla29/mcp-server-mysql -p dotenv mcp-server-mysql
480 | ```
481 | Thanks to @lizhuangs
482 |
483 | ## Contributing
484 |
485 | Contributions are welcome! Please feel free to submit a Pull Request to
486 | https://github.com/benborla/mcp-server-mysql
487 |
488 | ## Many Thanks to the following Contributors:
489 |
490 |
491 |
492 |
493 | ### Development Setup
494 |
495 | 1. Clone the repository
496 | 2. Install dependencies: `pnpm install`
497 | 3. Build the project: `pnpm run build`
498 | 4. Run tests: `pnpm test`
499 |
500 | ### Project Roadmap
501 |
502 | We're actively working on enhancing this MCP server. Check our [CHANGELOG.md](./CHANGELOG.md) for details on planned features, including:
503 |
504 | - Enhanced query capabilities with prepared statements
505 | - Advanced security features
506 | - Performance optimizations
507 | - Comprehensive monitoring
508 | - Expanded schema information
509 |
510 | If you'd like to contribute to any of these areas, please check the issues on GitHub or open a new one to discuss your ideas.
511 |
512 | ### Submitting Changes
513 |
514 | 1. Fork the repository
515 | 2. Create a feature branch: `git checkout -b feature/your-feature-name`
516 | 3. Commit your changes: `git commit -am 'Add some feature'`
517 | 4. Push to the branch: `git push origin feature/your-feature-name`
518 | 5. Submit a pull request
519 |
520 | ## License
521 |
522 | This MCP server is licensed under the MIT License. See the LICENSE file for details.
523 |
--------------------------------------------------------------------------------
/RELEASE.md:
--------------------------------------------------------------------------------
1 | # Release Notes
2 |
3 | ## Version 2.0.0
4 |
5 | ### Major New Features 🎉
6 |
7 | #### Multi-DB Support
8 | - Added support for managing multiple databases through a single MCP server instance
9 | - Detailed documentation available in `README-MULTI-DB.md`
10 | - Special thanks to [@saarthak-gupta-architect](https://github.com/saarthak-gupta-architect) for this contribution
11 | - Features include:
12 | - Dynamic database switching
13 | - Cross-database operations
14 | - Schema-specific permissions
15 | - Safe multi-DB mode with optional write protection
16 |
17 | ### Improvements 🔧
18 |
19 | #### Enhanced Logging System
20 | - Added optional logging system controlled by `ENABLE_LOGGING` environment variable
21 | - Log levels: info and error
22 | - Improved debugging capabilities while maintaining clean output when needed
23 | - Set `ENABLE_LOGGING=1` to enable logging, leave unset or set to any other value to disable
24 |
25 | #### Query Handling Improvements
26 | - Standardized query case handling to prevent column case sensitivity issues
27 | - All SQL queries are now automatically converted to lowercase before execution
28 | - Improved reliability when working with different MySQL configurations
29 | - Better handling of case-sensitive identifiers
30 |
31 | ### Breaking Changes ⚠️
32 |
33 | - Query handling now consistently uses lowercase, which might affect case-sensitive database configurations
34 | - Logging output format has changed - applications parsing server output may need updates
35 | - Multi-DB support introduces new configuration options that might require updates to existing deployment scripts
36 |
37 | ### Migration Guide 🔄
38 |
39 | 1. Update Environment Variables:
40 | ```bash
41 | # Optional: Enable logging
42 | ENABLE_LOGGING=1
43 |
44 | # For Multi-DB support (optional)
45 | MULTI_DB_WRITE_MODE=true # Enable write operations in multi-DB mode
46 | ```
47 |
48 | 2. Review any case-sensitive database operations in your queries
49 | 3. Update any scripts that parse server output to handle the new logging format
50 | 4. Check `README-MULTI-DB.md` for multi-database setup instructions if needed
51 |
52 | ### Contributors 👥
53 |
54 | Special thanks to:
55 | - [@saarthak-gupta-architect](https://github.com/saarthak-gupta-architect) - Multi-DB Support
--------------------------------------------------------------------------------
/assets/demo.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/benborla/mcp-server-mysql/978479d6d589bc328c2fdade5ac9a41bbd582ff8/assets/demo.gif
--------------------------------------------------------------------------------
/evals.ts:
--------------------------------------------------------------------------------
1 | //evals.ts
2 |
3 | import { EvalConfig } from 'mcp-evals';
4 | import { openai } from "@ai-sdk/openai";
5 | import { grade, EvalFunction } from "mcp-evals";
6 |
7 | const mysqlQueryToolEval: EvalFunction = {
8 | name: 'mysql_query Tool Evaluation',
9 | description: 'Evaluates the MySQL query execution functionality',
10 | run: async () => {
11 | const result = await grade(openai("gpt-4"), "Please execute the following SQL query and return the results: SELECT * FROM employees WHERE status='ACTIVE';");
12 | return JSON.parse(result);
13 | }
14 | };
15 |
16 | const mysqlQueryGenerationEval: EvalFunction = {
17 | name: 'mysql_query Tool Generation Evaluation',
18 | description: 'Evaluates the MySQL query tool for correct SQL generation and execution',
19 | run: async () => {
20 | const result = await grade(openai("gpt-4"), "Use the mysql_query tool to select all rows from the 'users' table where isActive = 1. Provide the SQL query in the correct format.");
21 | return JSON.parse(result);
22 | }
23 | };
24 |
25 | const mysqlQueryColumnsEval: EvalFunction = {
26 | name: 'mysql_query Columns Evaluation',
27 | description: 'Evaluates the mysql_query tool for column selection',
28 | run: async () => {
29 | const result = await grade(openai("gpt-4"), "Please provide a SQL query to retrieve the id, name, and email columns for all records in the users table.");
30 | return JSON.parse(result);
31 | }
32 | };
33 |
34 | const config: EvalConfig = {
35 | model: openai("gpt-4"),
36 | evals: [mysqlQueryToolEval, mysqlQueryGenerationEval, mysqlQueryColumnsEval]
37 | };
38 |
39 | export default config;
40 |
41 | export const evals = [mysqlQueryToolEval, mysqlQueryGenerationEval, mysqlQueryColumnsEval];
--------------------------------------------------------------------------------
/index.ts:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env node
2 |
3 | import { Server } from "@modelcontextprotocol/sdk/server/index.js";
4 | import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
5 | import {
6 | CallToolRequestSchema,
7 | ListResourcesRequestSchema,
8 | ListToolsRequestSchema,
9 | ReadResourceRequestSchema,
10 | } from "@modelcontextprotocol/sdk/types.js";
11 | import { log } from "./src/utils/index.js";
12 | import type { TableRow, ColumnRow } from "./src/types/index.js";
13 | import {
14 | ALLOW_DELETE_OPERATION,
15 | ALLOW_DDL_OPERATION,
16 | ALLOW_INSERT_OPERATION,
17 | ALLOW_UPDATE_OPERATION,
18 | SCHEMA_DELETE_PERMISSIONS,
19 | SCHEMA_DDL_PERMISSIONS,
20 | SCHEMA_INSERT_PERMISSIONS,
21 | SCHEMA_UPDATE_PERMISSIONS,
22 | isMultiDbMode,
23 | mcpConfig as config,
24 | MCP_VERSION as version,
25 | } from "./src/config/index.js";
26 | import {
27 | safeExit,
28 | getPool,
29 | executeQuery,
30 | executeReadOnlyQuery,
31 | poolPromise,
32 | } from "./src/db/index.js";
33 |
34 | log("info", `Starting MySQL MCP server v${version}...`);
35 |
36 | // Update tool description to include multi-DB mode and schema-specific permissions
37 | const toolVersion = `MySQL MCP Server [v${process.env.npm_package_version}]`;
38 | let toolDescription = `[${toolVersion}] Run SQL queries against MySQL database`;
39 |
40 | if (isMultiDbMode) {
41 | toolDescription += " (Multi-DB mode enabled)";
42 | }
43 |
44 | if (
45 | ALLOW_INSERT_OPERATION ||
46 | ALLOW_UPDATE_OPERATION ||
47 | ALLOW_DELETE_OPERATION ||
48 | ALLOW_DDL_OPERATION
49 | ) {
50 | // At least one write operation is enabled
51 | toolDescription += " with support for:";
52 |
53 | if (ALLOW_INSERT_OPERATION) {
54 | toolDescription += " INSERT,";
55 | }
56 |
57 | if (ALLOW_UPDATE_OPERATION) {
58 | toolDescription += " UPDATE,";
59 | }
60 |
61 | if (ALLOW_DELETE_OPERATION) {
62 | toolDescription += " DELETE,";
63 | }
64 |
65 | if (ALLOW_DDL_OPERATION) {
66 | toolDescription += " DDL,";
67 | }
68 |
69 | // Remove trailing comma and add READ operations
70 | toolDescription = toolDescription.replace(/,$/, "") + " and READ operations";
71 |
72 | if (
73 | Object.keys(SCHEMA_INSERT_PERMISSIONS).length > 0 ||
74 | Object.keys(SCHEMA_UPDATE_PERMISSIONS).length > 0 ||
75 | Object.keys(SCHEMA_DELETE_PERMISSIONS).length > 0 ||
76 | Object.keys(SCHEMA_DDL_PERMISSIONS).length > 0
77 | ) {
78 | toolDescription += " (Schema-specific permissions enabled)";
79 | }
80 | } else {
81 | // Only read operations are allowed
82 | toolDescription += " (READ-ONLY)";
83 | }
84 |
85 | // @INFO: Add debug logging for configuration
86 | log(
87 | "info",
88 | "MySQL Configuration:",
89 | JSON.stringify(
90 | {
91 | ...(process.env.MYSQL_SOCKET_PATH
92 | ? {
93 | socketPath: process.env.MYSQL_SOCKET_PATH,
94 | connectionType: "Unix Socket",
95 | }
96 | : {
97 | host: process.env.MYSQL_HOST || "127.0.0.1",
98 | port: process.env.MYSQL_PORT || "3306",
99 | connectionType: "TCP/IP",
100 | }),
101 | user: config.mysql.user,
102 | password: config.mysql.password ? "******" : "not set",
103 | database: config.mysql.database || "MULTI_DB_MODE",
104 | ssl: process.env.MYSQL_SSL === "true" ? "enabled" : "disabled",
105 | multiDbMode: isMultiDbMode ? "enabled" : "disabled",
106 | },
107 | null,
108 | 2,
109 | ),
110 | );
111 |
112 | // @INFO: Lazy load server instance
113 | let serverInstance: Promise | null = null;
114 | const getServer = (): Promise => {
115 | if (!serverInstance) {
116 | serverInstance = new Promise((resolve) => {
117 | const server = new Server(config.server, {
118 | capabilities: {
119 | resources: {},
120 | tools: {
121 | mysql_query: {
122 | description: toolDescription,
123 | inputSchema: {
124 | type: "object",
125 | properties: {
126 | sql: {
127 | type: "string",
128 | description: "The SQL query to execute",
129 | },
130 | },
131 | required: ["sql"],
132 | },
133 | },
134 | },
135 | },
136 | });
137 |
138 | // @INFO: Register request handlers
139 | server.setRequestHandler(ListResourcesRequestSchema, async () => {
140 | try {
141 | log("info", "Handling ListResourcesRequest");
142 | const connectionInfo = process.env.MYSQL_SOCKET_PATH
143 | ? `socket:${process.env.MYSQL_SOCKET_PATH}`
144 | : `${process.env.MYSQL_HOST || "127.0.0.1"}:${process.env.MYSQL_PORT || "3306"}`;
145 |
146 | // If we're in multi-DB mode, list all databases first
147 | if (isMultiDbMode) {
148 | const databases = (await executeQuery("SHOW DATABASES")) as {
149 | Database: string;
150 | }[];
151 |
152 | // For each database, list tables
153 | let allResources = [];
154 |
155 | for (const db of databases) {
156 | // Skip system databases
157 | if (
158 | [
159 | "information_schema",
160 | "mysql",
161 | "performance_schema",
162 | "sys",
163 | ].includes(db.Database)
164 | ) {
165 | continue;
166 | }
167 |
168 | const tables = (await executeQuery(
169 | `SELECT table_name FROM information_schema.tables WHERE table_schema = '${db.Database}'`,
170 | )) as TableRow[];
171 |
172 | allResources.push(
173 | ...tables.map((row: TableRow) => ({
174 | uri: new URL(
175 | `${db.Database}/${row.table_name}/${config.paths.schema}`,
176 | connectionInfo,
177 | ).href,
178 | mimeType: "application/json",
179 | name: `"${db.Database}.${row.table_name}" database schema`,
180 | })),
181 | );
182 | }
183 |
184 | return {
185 | resources: allResources,
186 | };
187 | } else {
188 | // Original behavior for single database mode
189 | const results = (await executeQuery(
190 | "SELECT table_name FROM information_schema.tables WHERE table_schema = DATABASE()",
191 | )) as TableRow[];
192 |
193 | return {
194 | resources: results.map((row: TableRow) => ({
195 | uri: new URL(
196 | `${row.table_name}/${config.paths.schema}`,
197 | connectionInfo,
198 | ).href,
199 | mimeType: "application/json",
200 | name: `"${row.table_name}" database schema`,
201 | })),
202 | };
203 | }
204 | } catch (error) {
205 | log("error", "Error in ListResourcesRequest handler:", error);
206 | throw error;
207 | }
208 | });
209 |
210 | server.setRequestHandler(ReadResourceRequestSchema, async (request) => {
211 | try {
212 | log("error", "Handling ReadResourceRequest");
213 | const resourceUrl = new URL(request.params.uri);
214 | const pathComponents = resourceUrl.pathname.split("/");
215 | const schema = pathComponents.pop();
216 | const tableName = pathComponents.pop();
217 | let dbName = null;
218 |
219 | // In multi-DB mode, we expect a database name in the path
220 | if (isMultiDbMode && pathComponents.length > 0) {
221 | dbName = pathComponents.pop() || null;
222 | }
223 |
224 | if (schema !== config.paths.schema) {
225 | throw new Error("Invalid resource URI");
226 | }
227 |
228 | // Modify query to include schema information
229 | let columnsQuery =
230 | "SELECT column_name, data_type FROM information_schema.columns WHERE table_name = ?";
231 | let queryParams = [tableName as string];
232 |
233 | if (dbName) {
234 | columnsQuery += " AND table_schema = ?";
235 | queryParams.push(dbName);
236 | }
237 |
238 | const results = (await executeQuery(
239 | columnsQuery,
240 | queryParams,
241 | )) as ColumnRow[];
242 |
243 | return {
244 | contents: [
245 | {
246 | uri: request.params.uri,
247 | mimeType: "application/json",
248 | text: JSON.stringify(results, null, 2),
249 | },
250 | ],
251 | };
252 | } catch (error) {
253 | log("error", "Error in ReadResourceRequest handler:", error);
254 | throw error;
255 | }
256 | });
257 |
258 | server.setRequestHandler(ListToolsRequestSchema, async () => {
259 | log("error", "Handling ListToolsRequest");
260 |
261 | const toolsResponse = {
262 | tools: [
263 | {
264 | name: "mysql_query",
265 | description: toolDescription,
266 | inputSchema: {
267 | type: "object",
268 | properties: {
269 | sql: {
270 | type: "string",
271 | description: "The SQL query to execute",
272 | },
273 | },
274 | required: ["sql"],
275 | },
276 | },
277 | ],
278 | };
279 |
280 | log(
281 | "error",
282 | "ListToolsRequest response:",
283 | JSON.stringify(toolsResponse, null, 2),
284 | );
285 | return toolsResponse;
286 | });
287 |
288 | server.setRequestHandler(CallToolRequestSchema, async (request) => {
289 | try {
290 | log("error", "Handling CallToolRequest:", request.params.name);
291 | if (request.params.name !== "mysql_query") {
292 | throw new Error(`Unknown tool: ${request.params.name}`);
293 | }
294 |
295 | const sql = request.params.arguments?.sql as string;
296 | return executeReadOnlyQuery(sql);
297 | } catch (error) {
298 | log("error", "Error in CallToolRequest handler:", error);
299 | throw error;
300 | }
301 | });
302 |
303 | resolve(server);
304 | });
305 | }
306 | return serverInstance;
307 | };
308 |
309 | // @INFO: Server startup and shutdown
310 | async function runServer(): Promise {
311 | try {
312 | log("info", "Attempting to test database connection...");
313 | // @INFO: Test the connection before fully starting the server
314 | const pool = await getPool();
315 | const connection = await pool.getConnection();
316 | log("info", "Database connection test successful");
317 | connection.release();
318 |
319 | const server = await getServer();
320 | const transport = new StdioServerTransport();
321 | await server.connect(transport);
322 | } catch (error) {
323 | log("error", "Fatal error during server startup:", error);
324 | safeExit(1);
325 | }
326 | }
327 |
328 | const shutdown = async (signal: string): Promise => {
329 | log("error", `Received ${signal}. Shutting down...`);
330 | try {
331 | // @INFO: Only attempt to close the pool if it was created
332 | if (poolPromise) {
333 | const pool = await poolPromise;
334 | await pool.end();
335 | }
336 | } catch (err) {
337 | log("error", "Error closing pool:", err);
338 | throw err;
339 | }
340 | };
341 |
342 | process.on("SIGINT", async () => {
343 | try {
344 | await shutdown("SIGINT");
345 | process.exit(0);
346 | } catch (err) {
347 | log("error", "Error during SIGINT shutdown:", err);
348 | safeExit(1);
349 | }
350 | });
351 |
352 | process.on("SIGTERM", async () => {
353 | try {
354 | await shutdown("SIGTERM");
355 | process.exit(0);
356 | } catch (err) {
357 | log("error", "Error during SIGTERM shutdown:", err);
358 | safeExit(1);
359 | }
360 | });
361 |
362 | // @INFO: Add unhandled error listeners
363 | process.on("uncaughtException", (error) => {
364 | log("error", "Uncaught exception:", error);
365 | safeExit(1);
366 | });
367 |
368 | process.on("unhandledRejection", (reason, promise) => {
369 | log("error", "Unhandled rejection at:", promise, "reason:", reason);
370 | safeExit(1);
371 | });
372 |
373 | runServer().catch((error: unknown) => {
374 | log("error", "Server error:", error);
375 | safeExit(1);
376 | });
377 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "@benborla29/mcp-server-mysql",
3 | "version": "2.0.2",
4 | "description": "MCP server for interacting with MySQL databases based on Node",
5 | "license": "MIT",
6 | "author": "Ben Borla (https://github.com/benborla)",
7 | "type": "module",
8 | "main": "dist/index.js",
9 | "preferGlobal": true,
10 | "bin": {
11 | "mcp-server-mysql": "dist/index.js"
12 | },
13 | "files": [
14 | "dist",
15 | "README.md",
16 | "assets"
17 | ],
18 | "scripts": {
19 | "start": "node dist/index.js",
20 | "dev": "ts-node index.ts",
21 | "build": "tsc && shx chmod +x dist/*.js",
22 | "prepare": "npm run build",
23 | "watch": "tsc --watch",
24 | "setup:test:db": "tsx scripts/setup-test-db.ts",
25 | "pretest": "pnpm run setup:test:db",
26 | "test": "pnpm run setup:test:db && vitest run",
27 | "test:socket": "pnpm run setup:test:db && vitest run tests/integration/socket-connection.test.ts",
28 | "test:watch": "pnpm run setup:test:db && vitest",
29 | "test:coverage": "vitest run --coverage",
30 | "test:unit": "vitest run --config vitest.unit.config.ts",
31 | "test:integration": "vitest run --config vitest.integration.config.ts",
32 | "test:e2e": "vitest run --config vitest.e2e.config.ts",
33 | "stdio": "node dist/index.js --stdio",
34 | "exec": " pnpm build && npx node --env-file=.env dist/index.js"
35 | },
36 | "dependencies": {
37 | "@ai-sdk/openai": "^1.3.22",
38 | "@modelcontextprotocol/sdk": "1.8.0",
39 | "dotenv": "^16.5.0",
40 | "mcp-evals": "^1.0.18",
41 | "mysql2": "^3.14.1",
42 | "node-sql-parser": "^5.3.9"
43 | },
44 | "devDependencies": {
45 | "@types/jest": "^29.5.14",
46 | "@types/node": "^20.17.50",
47 | "@typescript-eslint/parser": "^7.18.0",
48 | "eslint": "^9.27.0",
49 | "shx": "^0.3.4",
50 | "ts-node": "^10.9.2",
51 | "tslib": "^2.8.1",
52 | "tsx": "^4.19.4",
53 | "typescript": "^5.8.3",
54 | "vitest": "^1.6.1"
55 | },
56 | "publishConfig": {
57 | "access": "public"
58 | },
59 | "keywords": [
60 | "node",
61 | "mcp",
62 | "ai",
63 | "cursor",
64 | "mcp-server",
65 | "modelcontextprotocol",
66 | "smithery",
67 | "mcp-get",
68 | "mcp-put",
69 | "mcp-post",
70 | "mcp-delete",
71 | "mcp-patch",
72 | "mcp-options",
73 | "mcp-head"
74 | ]
75 | }
76 |
--------------------------------------------------------------------------------
/scripts/setup-test-db.ts:
--------------------------------------------------------------------------------
1 | import * as mysql2 from 'mysql2/promise';
2 | import * as dotenv from 'dotenv';
3 | import { fileURLToPath } from 'url';
4 | import { dirname, resolve } from 'path';
5 |
6 | const __filename = fileURLToPath(import.meta.url);
7 | const __dirname = dirname(__filename);
8 |
9 | // Load test environment variables
10 | dotenv.config({ path: resolve(__dirname, '../.env.test') });
11 |
12 | // Logging configuration
13 | const ENABLE_LOGGING = process.env.ENABLE_LOGGING === '1'
14 |
15 | type LogType = 'info' | 'error'
16 |
17 | function log(type: LogType = 'info', ...args: any[]): void {
18 | if (!ENABLE_LOGGING) return
19 |
20 | switch (type) {
21 | case 'info':
22 | console.info(...args)
23 | break
24 | case 'error':
25 | console.error(...args)
26 | break
27 | default:
28 | console.log(...args)
29 | }
30 | }
31 |
32 | async function setupTestDatabase() {
33 | // Create connection config, omitting password if empty
34 | const config: any = {
35 | // Use Unix socket if provided, otherwise use host/port
36 | ...(process.env.MYSQL_SOCKET_PATH
37 | ? {
38 | socketPath: process.env.MYSQL_SOCKET_PATH,
39 | }
40 | : {
41 | host: process.env.MYSQL_HOST || '127.0.0.1',
42 | port: Number(process.env.MYSQL_PORT) || 3306,
43 | }
44 | ),
45 | user: process.env.MYSQL_USER || 'root',
46 | password: process.env.MYSQL_PASS || 'root', // Default to 'root' if not specified
47 | multipleStatements: true
48 | };
49 |
50 | // First connect without database to create it if needed
51 | const connection = await mysql2.createConnection(config);
52 |
53 | // Use a unique database name for tests to avoid conflicts with existing tables
54 | const dbName = process.env.MYSQL_DB || 'mcp_test_db';
55 |
56 | try {
57 | // Create database if it doesn't exist
58 | await connection.query(`CREATE DATABASE IF NOT EXISTS ${dbName}`);
59 |
60 | // Switch to the test database
61 | await connection.query(`USE ${dbName}`);
62 |
63 | // Temporarily disable foreign key checks to allow dropping tables
64 | await connection.query('SET FOREIGN_KEY_CHECKS = 0');
65 |
66 | // Create test tables
67 | await connection.query(`
68 | DROP TABLE IF EXISTS posts;
69 | DROP TABLE IF EXISTS users;
70 | DROP TABLE IF EXISTS test_table;
71 |
72 | CREATE TABLE users (
73 | id INT PRIMARY KEY AUTO_INCREMENT,
74 | name VARCHAR(255) NOT NULL,
75 | email VARCHAR(255) UNIQUE NOT NULL,
76 | created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
77 | );
78 |
79 | CREATE TABLE posts (
80 | id INT PRIMARY KEY AUTO_INCREMENT,
81 | user_id INT NOT NULL,
82 | title VARCHAR(255) NOT NULL,
83 | content TEXT,
84 | created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
85 | FOREIGN KEY (user_id) REFERENCES users(id)
86 | );
87 |
88 | CREATE TABLE test_table (
89 | id INT PRIMARY KEY AUTO_INCREMENT,
90 | name VARCHAR(255) NOT NULL,
91 | created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
92 | );
93 |
94 | -- Insert test users
95 | INSERT INTO users (name, email) VALUES
96 | ('Test User 1', 'test1@example.com'),
97 | ('Test User 2', 'test2@example.com'),
98 | ('Test User 3', 'test3@example.com');
99 |
100 | -- Insert test posts
101 | INSERT INTO posts (user_id, title, content) VALUES
102 | (1, 'First Post', 'Content of first post'),
103 | (1, 'Second Post', 'Content of second post'),
104 | (2, 'Another Post', 'Content of another post');
105 |
106 | -- Insert test data
107 | INSERT INTO test_table (name) VALUES
108 | ('Test 1'),
109 | ('Test 2'),
110 | ('Test 3');
111 | `);
112 |
113 | // Re-enable foreign key checks
114 | await connection.query('SET FOREIGN_KEY_CHECKS = 1');
115 |
116 | log('info', 'Test database setup completed successfully');
117 | } catch (error) {
118 | log('error', 'Error setting up test database:', error);
119 | if (process.env.CI) {
120 | log('error', 'Database setup failed, but continuing with tests:', error.message);
121 | } else {
122 | throw error;
123 | }
124 | } finally {
125 | await connection.end();
126 | }
127 | }
128 |
129 | // Run the setup but don't exit on error
130 | setupTestDatabase().catch(error => {
131 | console.error('Database setup failed, but continuing with tests:', error.message);
132 | });
--------------------------------------------------------------------------------
/smithery.yaml:
--------------------------------------------------------------------------------
1 | # Smithery configuration file: https://smithery.ai/docs/config#smitheryyaml
2 | startCommand:
3 | type: stdio
4 | configSchema:
5 | # JSON Schema defining the configuration options for the MCP.
6 | type: object
7 | required: []
8 | properties:
9 | mysqlSocketPath:
10 | type: string
11 | description: Unix socket path for local MySQL connections. If provided, host and port are ignored.
12 | mysqlHost:
13 | type: string
14 | description: The host address of the MySQL database. Ignored if socket path is provided.
15 | default: "127.0.0.1"
16 | mysqlPort:
17 | type: string
18 | description: The port number for connecting to MySQL. Ignored if socket path is provided.
19 | default: "3306"
20 | mysqlUser:
21 | type: string
22 | description: The username for MySQL authentication.
23 | default: "root"
24 | mysqlPass:
25 | type: string
26 | description: The password for the specified MySQL user.
27 | mysqlDb:
28 | type: string
29 | description: The database name to connect to. If left blank, retrieves all databases.
30 | default: "db"
31 | ssl:
32 | type: boolean
33 | default: false
34 | description: If set to true, the connection will use SSL.
35 | rejectUnauthorizedSSL:
36 | type: boolean
37 | default: false
38 | description: If set to true, the connection will verify the SSL certificate.
39 | allowInsertOperation:
40 | type: boolean
41 | default: true
42 | description: If set to true, INSERT operations will be allowed.
43 | allowUpdateOperation:
44 | type: boolean
45 | default: true
46 | description: If set to true, UPDATE operations will be allowed.
47 | allowDeleteOperation:
48 | type: boolean
49 | default: false
50 | description: If set to true, DELETE operations will be allowed.
51 | commandFunction:
52 | |-
53 | (config) => ({ "command": "node", "args": ["dist/index.js"], "env": { "MYSQL_SOCKET_PATH": config.mysqlSocketPath, "MYSQL_HOST": config.mysqlHost, "MYSQL_PORT": config.mysqlPort, "MYSQL_USER": config.mysqlUser, "MYSQL_PASS": config.mysqlPass, "MYSQL_DB": config.mysqlDb, "MYSQL_SSL": config.ssl, "MYSQL_SSL_REJECT_UNAUTHORIZED": config.rejectUnauthorizedSSL, "ALLOW_INSERT_OPERATION": config.allowInsertOperation, "ALLOW_UPDATE_OPERATION": config.allowUpdateOperation, "ALLOW_DELETE_OPERATION": config.allowDeleteOperation } })
--------------------------------------------------------------------------------
/src/config/index.ts:
--------------------------------------------------------------------------------
1 | import * as dotenv from "dotenv";
2 | import { SchemaPermissions } from "../types/index.js";
3 | import { parseSchemaPermissions } from "../utils/index.js";
4 |
5 | export const MCP_VERSION = "2.0.2";
6 |
7 | // @INFO: Load environment variables from .env file
8 | dotenv.config();
9 |
10 | // @INFO: Update the environment setup to ensure database is correctly set
11 | if (process.env.NODE_ENV === "test" && !process.env.MYSQL_DB) {
12 | process.env.MYSQL_DB = "mcp_test_db"; // @INFO: Ensure we have a database name for tests
13 | }
14 |
15 | // Write operation flags (global defaults)
16 | export const ALLOW_INSERT_OPERATION =
17 | process.env.ALLOW_INSERT_OPERATION === "true";
18 | export const ALLOW_UPDATE_OPERATION =
19 | process.env.ALLOW_UPDATE_OPERATION === "true";
20 | export const ALLOW_DELETE_OPERATION =
21 | process.env.ALLOW_DELETE_OPERATION === "true";
22 | export const ALLOW_DDL_OPERATION = process.env.ALLOW_DDL_OPERATION === "true";
23 |
24 | // Schema-specific permissions
25 | export const SCHEMA_INSERT_PERMISSIONS: SchemaPermissions =
26 | parseSchemaPermissions(process.env.SCHEMA_INSERT_PERMISSIONS);
27 | export const SCHEMA_UPDATE_PERMISSIONS: SchemaPermissions =
28 | parseSchemaPermissions(process.env.SCHEMA_UPDATE_PERMISSIONS);
29 | export const SCHEMA_DELETE_PERMISSIONS: SchemaPermissions =
30 | parseSchemaPermissions(process.env.SCHEMA_DELETE_PERMISSIONS);
31 | export const SCHEMA_DDL_PERMISSIONS: SchemaPermissions = parseSchemaPermissions(
32 | process.env.SCHEMA_DDL_PERMISSIONS,
33 | );
34 |
35 | // Check if we're in multi-DB mode (no specific DB set)
36 | export const isMultiDbMode =
37 | !process.env.MYSQL_DB || process.env.MYSQL_DB.trim() === "";
38 |
39 | export const mcpConfig = {
40 | server: {
41 | name: "@benborla29/mcp-server-mysql",
42 | version: MCP_VERSION,
43 | connectionTypes: ["stdio"],
44 | },
45 | mysql: {
46 | // Use Unix socket if provided, otherwise use host/port
47 | ...(process.env.MYSQL_SOCKET_PATH
48 | ? {
49 | socketPath: process.env.MYSQL_SOCKET_PATH,
50 | }
51 | : {
52 | host: process.env.MYSQL_HOST || "127.0.0.1",
53 | port: Number(process.env.MYSQL_PORT || "3306"),
54 | }),
55 | user: process.env.MYSQL_USER || "root",
56 | password:
57 | process.env.MYSQL_PASS === undefined ? "" : process.env.MYSQL_PASS,
58 | database: process.env.MYSQL_DB || undefined, // Allow undefined database for multi-DB mode
59 | connectionLimit: 10,
60 | authPlugins: {
61 | mysql_clear_password: () => () =>
62 | Buffer.from(process.env.MYSQL_PASS || "root"),
63 | },
64 | ...(process.env.MYSQL_SSL === "true"
65 | ? {
66 | ssl: {
67 | rejectUnauthorized:
68 | process.env.MYSQL_SSL_REJECT_UNAUTHORIZED === "true",
69 | },
70 | }
71 | : {}),
72 | },
73 | paths: {
74 | schema: "schema",
75 | },
76 | };
77 |
--------------------------------------------------------------------------------
/src/db/index.ts:
--------------------------------------------------------------------------------
1 | import { isMultiDbMode } from "./../config/index.js";
2 |
3 | import {
4 | isDDLAllowedForSchema,
5 | isInsertAllowedForSchema,
6 | isUpdateAllowedForSchema,
7 | isDeleteAllowedForSchema,
8 | } from "./permissions.js";
9 | import { extractSchemaFromQuery, getQueryTypes } from "./utils.js";
10 |
11 | import * as mysql2 from "mysql2/promise";
12 | import { log } from "./../utils/index.js";
13 | import { mcpConfig as config } from "./../config/index.js";
14 |
15 | // Force read-only mode in multi-DB mode unless explicitly configured otherwise
16 | if (isMultiDbMode && process.env.MULTI_DB_WRITE_MODE !== "true") {
17 | log("error", "Multi-DB mode detected - enabling read-only mode for safety");
18 | }
19 |
20 | // @INFO: Check if running in test mode
21 | const isTestEnvironment = process.env.NODE_ENV === "test" || process.env.VITEST;
22 |
23 | // @INFO: Safe way to exit process (not during tests)
24 | function safeExit(code: number): void {
25 | if (!isTestEnvironment) {
26 | process.exit(code);
27 | } else {
28 | log("error", `[Test mode] Would have called process.exit(${code})`);
29 | }
30 | }
31 |
32 | // @INFO: Lazy load MySQL pool
33 | let poolPromise: Promise;
34 |
35 | const getPool = (): Promise => {
36 | if (!poolPromise) {
37 | poolPromise = new Promise((resolve, reject) => {
38 | try {
39 | const pool = mysql2.createPool(config.mysql);
40 | log("info", "MySQL pool created successfully");
41 | resolve(pool);
42 | } catch (error) {
43 | log("error", "Error creating MySQL pool:", error);
44 | reject(error);
45 | }
46 | });
47 | }
48 | return poolPromise;
49 | };
50 |
51 | async function executeQuery(sql: string, params: string[] = []): Promise {
52 | let connection;
53 | try {
54 | const pool = await getPool();
55 | connection = await pool.getConnection();
56 | const result = await connection.query(sql, params);
57 | return (Array.isArray(result) ? result[0] : result) as T;
58 | } catch (error) {
59 | log("error", "Error executing query:", error);
60 | throw error;
61 | } finally {
62 | if (connection) {
63 | connection.release();
64 | log("error", "Connection released");
65 | }
66 | }
67 | }
68 |
69 | // @INFO: New function to handle write operations
70 | async function executeWriteQuery(sql: string): Promise {
71 | let connection;
72 | try {
73 | const pool = await getPool();
74 | connection = await pool.getConnection();
75 | log("error", "Write connection acquired");
76 |
77 | // Extract schema for permissions (if needed)
78 | const schema = extractSchemaFromQuery(sql);
79 |
80 | // @INFO: Begin transaction for write operation
81 | await connection.beginTransaction();
82 |
83 | try {
84 | // @INFO: Execute the write query
85 | const result = await connection.query(sql);
86 | const response = Array.isArray(result) ? result[0] : result;
87 |
88 | // @INFO: Commit the transaction
89 | await connection.commit();
90 |
91 | // @INFO: Format the response based on operation type
92 | let responseText;
93 |
94 | // Check the type of query
95 | const queryTypes = await getQueryTypes(sql);
96 | const isUpdateOperation = queryTypes.some((type) =>
97 | ["update"].includes(type),
98 | );
99 | const isInsertOperation = queryTypes.some((type) =>
100 | ["insert"].includes(type),
101 | );
102 | const isDeleteOperation = queryTypes.some((type) =>
103 | ["delete"].includes(type),
104 | );
105 | const isDDLOperation = queryTypes.some((type) =>
106 | ["create", "alter", "drop", "truncate"].includes(type),
107 | );
108 |
109 | // @INFO: Type assertion for ResultSetHeader which has affectedRows, insertId, etc.
110 | if (isInsertOperation) {
111 | const resultHeader = response as mysql2.ResultSetHeader;
112 | responseText = `Insert successful on schema '${schema || "default"}'. Affected rows: ${resultHeader.affectedRows}, Last insert ID: ${resultHeader.insertId}`;
113 | } else if (isUpdateOperation) {
114 | const resultHeader = response as mysql2.ResultSetHeader;
115 | responseText = `Update successful on schema '${schema || "default"}'. Affected rows: ${resultHeader.affectedRows}, Changed rows: ${resultHeader.changedRows || 0}`;
116 | } else if (isDeleteOperation) {
117 | const resultHeader = response as mysql2.ResultSetHeader;
118 | responseText = `Delete successful on schema '${schema || "default"}'. Affected rows: ${resultHeader.affectedRows}`;
119 | } else if (isDDLOperation) {
120 | responseText = `DDL operation successful on schema '${schema || "default"}'.`;
121 | } else {
122 | responseText = JSON.stringify(response, null, 2);
123 | }
124 |
125 | return {
126 | content: [
127 | {
128 | type: "text",
129 | text: responseText,
130 | },
131 | ],
132 | isError: false,
133 | } as T;
134 | } catch (error: unknown) {
135 | // @INFO: Rollback on error
136 | log("error", "Error executing write query:", error);
137 | await connection.rollback();
138 |
139 | return {
140 | content: [
141 | {
142 | type: "text",
143 | text: `Error executing write operation: ${error instanceof Error ? error.message : String(error)}`,
144 | },
145 | ],
146 | isError: true,
147 | } as T;
148 | }
149 | } catch (error: unknown) {
150 | log("error", "Error in write operation transaction:", error);
151 | return {
152 | content: [
153 | {
154 | type: "text",
155 | text: `Database connection error: ${error instanceof Error ? error.message : String(error)}`,
156 | },
157 | ],
158 | isError: true,
159 | } as T;
160 | } finally {
161 | if (connection) {
162 | connection.release();
163 | log("error", "Write connection released");
164 | }
165 | }
166 | }
167 |
168 | async function executeReadOnlyQuery(sql: string): Promise {
169 | let connection;
170 | try {
171 | // Check the type of query
172 | const queryTypes = await getQueryTypes(sql);
173 |
174 | // Get schema for permission checking
175 | const schema = extractSchemaFromQuery(sql);
176 |
177 | const isUpdateOperation = queryTypes.some((type) =>
178 | ["update"].includes(type),
179 | );
180 | const isInsertOperation = queryTypes.some((type) =>
181 | ["insert"].includes(type),
182 | );
183 | const isDeleteOperation = queryTypes.some((type) =>
184 | ["delete"].includes(type),
185 | );
186 | const isDDLOperation = queryTypes.some((type) =>
187 | ["create", "alter", "drop", "truncate"].includes(type),
188 | );
189 |
190 | // Check schema-specific permissions
191 | if (isInsertOperation && !isInsertAllowedForSchema(schema)) {
192 | log(
193 | "error",
194 | `INSERT operations are not allowed for schema '${schema || "default"}'. Configure SCHEMA_INSERT_PERMISSIONS.`,
195 | );
196 | return {
197 | content: [
198 | {
199 | type: "text",
200 | text: `Error: INSERT operations are not allowed for schema '${schema || "default"}'. Ask the administrator to update SCHEMA_INSERT_PERMISSIONS.`,
201 | },
202 | ],
203 | isError: true,
204 | } as T;
205 | }
206 |
207 | if (isUpdateOperation && !isUpdateAllowedForSchema(schema)) {
208 | log(
209 | "error",
210 | `UPDATE operations are not allowed for schema '${schema || "default"}'. Configure SCHEMA_UPDATE_PERMISSIONS.`,
211 | );
212 | return {
213 | content: [
214 | {
215 | type: "text",
216 | text: `Error: UPDATE operations are not allowed for schema '${schema || "default"}'. Ask the administrator to update SCHEMA_UPDATE_PERMISSIONS.`,
217 | },
218 | ],
219 | isError: true,
220 | } as T;
221 | }
222 |
223 | if (isDeleteOperation && !isDeleteAllowedForSchema(schema)) {
224 | log(
225 | "error",
226 | `DELETE operations are not allowed for schema '${schema || "default"}'. Configure SCHEMA_DELETE_PERMISSIONS.`,
227 | );
228 | return {
229 | content: [
230 | {
231 | type: "text",
232 | text: `Error: DELETE operations are not allowed for schema '${schema || "default"}'. Ask the administrator to update SCHEMA_DELETE_PERMISSIONS.`,
233 | },
234 | ],
235 | isError: true,
236 | } as T;
237 | }
238 |
239 | if (isDDLOperation && !isDDLAllowedForSchema(schema)) {
240 | log(
241 | "error",
242 | `DDL operations are not allowed for schema '${schema || "default"}'. Configure SCHEMA_DDL_PERMISSIONS.`,
243 | );
244 | return {
245 | content: [
246 | {
247 | type: "text",
248 | text: `Error: DDL operations are not allowed for schema '${schema || "default"}'. Ask the administrator to update SCHEMA_DDL_PERMISSIONS.`,
249 | },
250 | ],
251 | isError: true,
252 | } as T;
253 | }
254 |
255 | // For write operations that are allowed, use executeWriteQuery
256 | if (
257 | (isInsertOperation && isInsertAllowedForSchema(schema)) ||
258 | (isUpdateOperation && isUpdateAllowedForSchema(schema)) ||
259 | (isDeleteOperation && isDeleteAllowedForSchema(schema)) ||
260 | (isDDLOperation && isDDLAllowedForSchema(schema))
261 | ) {
262 | return executeWriteQuery(sql);
263 | }
264 |
265 | // For read-only operations, continue with the original logic
266 | const pool = await getPool();
267 | connection = await pool.getConnection();
268 | log("error", "Read-only connection acquired");
269 |
270 | // Set read-only mode
271 | await connection.query("SET SESSION TRANSACTION READ ONLY");
272 |
273 | // Begin transaction
274 | await connection.beginTransaction();
275 |
276 | try {
277 | // Execute query - in multi-DB mode, we may need to handle USE statements specially
278 | const result = await connection.query(sql);
279 | const rows = Array.isArray(result) ? result[0] : result;
280 |
281 | // Rollback transaction (since it's read-only)
282 | await connection.rollback();
283 |
284 | // Reset to read-write mode
285 | await connection.query("SET SESSION TRANSACTION READ WRITE");
286 |
287 | return {
288 | content: [
289 | {
290 | type: "text",
291 | text: JSON.stringify(rows, null, 2),
292 | },
293 | ],
294 | isError: false,
295 | } as T;
296 | } catch (error) {
297 | // Rollback transaction on query error
298 | log("error", "Error executing read-only query:", error);
299 | await connection.rollback();
300 | throw error;
301 | }
302 | } catch (error) {
303 | // Ensure we rollback and reset transaction mode on any error
304 | log("error", "Error in read-only query transaction:", error);
305 | try {
306 | if (connection) {
307 | await connection.rollback();
308 | await connection.query("SET SESSION TRANSACTION READ WRITE");
309 | }
310 | } catch (cleanupError) {
311 | // Ignore errors during cleanup
312 | log("error", "Error during cleanup:", cleanupError);
313 | }
314 | throw error;
315 | } finally {
316 | if (connection) {
317 | connection.release();
318 | log("error", "Read-only connection released");
319 | }
320 | }
321 | }
322 |
323 | export {
324 | isTestEnvironment,
325 | safeExit,
326 | executeQuery,
327 | getPool,
328 | executeWriteQuery,
329 | executeReadOnlyQuery,
330 | poolPromise,
331 | };
332 |
--------------------------------------------------------------------------------
/src/db/permissions.ts:
--------------------------------------------------------------------------------
1 | import {
2 | ALLOW_DELETE_OPERATION,
3 | ALLOW_DDL_OPERATION,
4 | ALLOW_INSERT_OPERATION,
5 | ALLOW_UPDATE_OPERATION,
6 | SCHEMA_DELETE_PERMISSIONS,
7 | SCHEMA_DDL_PERMISSIONS,
8 | SCHEMA_INSERT_PERMISSIONS,
9 | SCHEMA_UPDATE_PERMISSIONS,
10 | } from "../config/index.js";
11 |
12 | // Schema permission checking functions
13 | function isInsertAllowedForSchema(schema: string | null): boolean {
14 | if (!schema) {
15 | return ALLOW_INSERT_OPERATION;
16 | }
17 | return schema in SCHEMA_INSERT_PERMISSIONS
18 | ? SCHEMA_INSERT_PERMISSIONS[schema]
19 | : ALLOW_INSERT_OPERATION;
20 | }
21 |
22 | function isUpdateAllowedForSchema(schema: string | null): boolean {
23 | if (!schema) {
24 | return ALLOW_UPDATE_OPERATION;
25 | }
26 | return schema in SCHEMA_UPDATE_PERMISSIONS
27 | ? SCHEMA_UPDATE_PERMISSIONS[schema]
28 | : ALLOW_UPDATE_OPERATION;
29 | }
30 |
31 | function isDeleteAllowedForSchema(schema: string | null): boolean {
32 | if (!schema) {
33 | return ALLOW_DELETE_OPERATION;
34 | }
35 | return schema in SCHEMA_DELETE_PERMISSIONS
36 | ? SCHEMA_DELETE_PERMISSIONS[schema]
37 | : ALLOW_DELETE_OPERATION;
38 | }
39 |
40 | function isDDLAllowedForSchema(schema: string | null): boolean {
41 | if (!schema) {
42 | return ALLOW_DDL_OPERATION;
43 | }
44 | return schema in SCHEMA_DDL_PERMISSIONS
45 | ? SCHEMA_DDL_PERMISSIONS[schema]
46 | : ALLOW_DDL_OPERATION;
47 | }
48 |
49 | export {
50 | isInsertAllowedForSchema,
51 | isUpdateAllowedForSchema,
52 | isDeleteAllowedForSchema,
53 | isDDLAllowedForSchema,
54 | };
55 |
--------------------------------------------------------------------------------
/src/db/utils.ts:
--------------------------------------------------------------------------------
1 | import { isMultiDbMode } from "./../config/index.js";
2 | import { log } from "./../utils/index.js";
3 | import SqlParser, { AST } from "node-sql-parser";
4 |
5 | const { Parser } = SqlParser;
6 | const parser = new Parser();
7 |
8 | // Extract schema from SQL query
9 | function extractSchemaFromQuery(sql: string): string | null {
10 | // Default schema from environment
11 | const defaultSchema = process.env.MYSQL_DB || null;
12 |
13 | // If we have a default schema and not in multi-DB mode, return it
14 | if (defaultSchema && !isMultiDbMode) {
15 | return defaultSchema;
16 | }
17 |
18 | // Try to extract schema from query
19 |
20 | // Case 1: USE database statement
21 | const useMatch = sql.match(/USE\s+`?([a-zA-Z0-9_]+)`?/i);
22 | if (useMatch && useMatch[1]) {
23 | return useMatch[1];
24 | }
25 |
26 | // Case 2: database.table notation
27 | const dbTableMatch = sql.match(/`?([a-zA-Z0-9_]+)`?\.`?[a-zA-Z0-9_]+`?/i);
28 | if (dbTableMatch && dbTableMatch[1]) {
29 | return dbTableMatch[1];
30 | }
31 |
32 | // Return default if we couldn't find a schema in the query
33 | return defaultSchema;
34 | }
35 |
36 | async function getQueryTypes(query: string): Promise {
37 | try {
38 | log("info", "Parsing SQL query: ", query);
39 | // Parse into AST or array of ASTs - only specify the database type
40 | const astOrArray: AST | AST[] = parser.astify(query, { database: "mysql" });
41 | const statements = Array.isArray(astOrArray) ? astOrArray : [astOrArray];
42 |
43 | // Map each statement to its lowercased type (e.g., 'select', 'update', 'insert', 'delete', etc.)
44 | return statements.map((stmt) => stmt.type?.toLowerCase() ?? "unknown");
45 | } catch (err: any) {
46 | log("error", "sqlParser error, query: ", query);
47 | log("error", "Error parsing SQL query:", err);
48 | throw new Error(`Parsing failed: ${err.message}`);
49 | }
50 | }
51 |
52 | export { extractSchemaFromQuery, getQueryTypes };
53 |
--------------------------------------------------------------------------------
/src/types/index.ts:
--------------------------------------------------------------------------------
1 | export interface SchemaPermissions {
2 | [schema: string]: boolean;
3 | }
4 |
5 | export interface TableRow {
6 | table_name: string;
7 | }
8 |
9 | export interface ColumnRow {
10 | column_name: string;
11 | data_type: string;
12 | }
13 |
--------------------------------------------------------------------------------
/src/utils/index.ts:
--------------------------------------------------------------------------------
1 | import { SchemaPermissions } from "../types/index.js";
2 | type LogType = "info" | "error";
3 |
4 | // @INFO: Enable logging if ENABLE_LOGGING is true
5 | const ENABLE_LOGGING =
6 | process.env.ENABLE_LOGGING === "true" || process.env.ENABLE_LOGGING === "1";
7 |
8 | export function log(type: LogType = "info", ...args: any[]): void {
9 | if (!ENABLE_LOGGING) return;
10 |
11 | switch (type) {
12 | case "info":
13 | console.info(...args);
14 | break;
15 | case "error":
16 | console.error(...args);
17 | break;
18 | default:
19 | console.log(...args);
20 | }
21 | }
22 |
23 | // Function to parse schema-specific permissions from environment variables
24 | export function parseSchemaPermissions(
25 | permissionsString?: string,
26 | ): SchemaPermissions {
27 | const permissions: SchemaPermissions = {};
28 |
29 | if (!permissionsString) {
30 | return permissions;
31 | }
32 |
33 | // Format: "schema1:true,schema2:false"
34 | const permissionPairs = permissionsString.split(",");
35 |
36 | for (const pair of permissionPairs) {
37 | const [schema, value] = pair.split(":");
38 | if (schema && value) {
39 | permissions[schema.trim()] = value.trim() === "true";
40 | }
41 | }
42 |
43 | return permissions;
44 | }
45 |
--------------------------------------------------------------------------------
/tests/e2e/server.test.ts:
--------------------------------------------------------------------------------
1 | import { describe, it, expect, beforeAll, afterAll } from "vitest";
2 | import * as mysql2 from "mysql2/promise";
3 | import * as dotenv from "dotenv";
4 |
5 | // Load test environment variables
6 | dotenv.config({ path: ".env.test" });
7 |
8 | // Helper function to create a test client
9 | function createTestClient() {
10 | // This would be a simplified version of an MCP client for testing
11 | return {
12 | async listTools() {
13 | // Determine which operations are enabled
14 | const allowInsert = process.env.ALLOW_INSERT_OPERATION === "true";
15 | const allowUpdate = process.env.ALLOW_UPDATE_OPERATION === "true";
16 | const allowDelete = process.env.ALLOW_DELETE_OPERATION === "true";
17 |
18 | let description = "Run SQL queries against MySQL database";
19 | if (allowInsert || allowUpdate || allowDelete) {
20 | description += " with support for:";
21 | if (allowInsert) description += " INSERT,";
22 | if (allowUpdate) description += " UPDATE,";
23 | if (allowDelete) description += " DELETE,";
24 | description = description.replace(/,$/, "") + " and READ operations";
25 | } else {
26 | description += " (READ-ONLY)";
27 | }
28 |
29 | return {
30 | tools: [
31 | {
32 | name: "mysql_query",
33 | description,
34 | inputSchema: {
35 | type: "object",
36 | properties: {
37 | sql: { type: "string" },
38 | },
39 | },
40 | },
41 | ],
42 | };
43 | },
44 |
45 | async callTool(name: string, args: any) {
46 | // Implementation would send the request to the server
47 | if (name !== "mysql_query") {
48 | throw new Error(`Unknown tool: ${name}`);
49 | }
50 |
51 | // Check if the query is a write operation
52 | const sql = args.sql.trim().toUpperCase();
53 | const isInsert = sql.startsWith("INSERT");
54 | const isUpdate = sql.startsWith("UPDATE");
55 | const isDelete = sql.startsWith("DELETE");
56 |
57 | // Check if the operations are allowed
58 | const allowInsert = process.env.ALLOW_INSERT_OPERATION === "true";
59 | const allowUpdate = process.env.ALLOW_UPDATE_OPERATION === "true";
60 | const allowDelete = process.env.ALLOW_DELETE_OPERATION === "true";
61 |
62 | // If it's a write operation and not allowed, return an error
63 | if (isInsert && !allowInsert) {
64 | return {
65 | content: [
66 | { type: "text", text: "Error: INSERT operations are not allowed." },
67 | ],
68 | isError: true,
69 | };
70 | }
71 |
72 | if (isUpdate && !allowUpdate) {
73 | return {
74 | content: [
75 | { type: "text", text: "Error: UPDATE operations are not allowed." },
76 | ],
77 | isError: true,
78 | };
79 | }
80 |
81 | if (isDelete && !allowDelete) {
82 | return {
83 | content: [
84 | { type: "text", text: "Error: DELETE operations are not allowed." },
85 | ],
86 | isError: true,
87 | };
88 | }
89 |
90 | // Mock responses based on the operation type
91 | if (isInsert && allowInsert) {
92 | return {
93 | content: [
94 | {
95 | type: "text",
96 | text: "Insert successful. Affected rows: 1, Last insert ID: 42",
97 | },
98 | ],
99 | isError: false,
100 | };
101 | }
102 |
103 | if (isUpdate && allowUpdate) {
104 | return {
105 | content: [
106 | {
107 | type: "text",
108 | text: "Update successful. Affected rows: 2, Changed rows: 1",
109 | },
110 | ],
111 | isError: false,
112 | };
113 | }
114 |
115 | if (isDelete && allowDelete) {
116 | return {
117 | content: [
118 | {
119 | type: "text",
120 | text: "Delete successful. Affected rows: 1",
121 | },
122 | ],
123 | isError: false,
124 | };
125 | }
126 |
127 | // For read operations, return a mock result
128 | return {
129 | content: [
130 | {
131 | type: "text",
132 | text: JSON.stringify([{ result: "test" }], null, 2),
133 | },
134 | ],
135 | isError: false,
136 | };
137 | },
138 |
139 | async listResources() {
140 | // Implementation would communicate with the server
141 | return {
142 | resources: [
143 | {
144 | uri: `mysql://127.0.0.1:3306/test_table/schema`,
145 | mimeType: "application/json",
146 | name: '"test_table" database schema',
147 | },
148 | ],
149 | };
150 | },
151 |
152 | async readResource(uri: string) {
153 | // Implementation would communicate with the server
154 | return {
155 | contents: [
156 | {
157 | uri,
158 | mimeType: "application/json",
159 | text: JSON.stringify(
160 | [
161 | { column_name: "id", data_type: "int" },
162 | { column_name: "name", data_type: "varchar" },
163 | { column_name: "created_at", data_type: "timestamp" },
164 | ],
165 | null,
166 | 2,
167 | ),
168 | },
169 | ],
170 | };
171 | },
172 |
173 | close() {
174 | // Clean up resources
175 | },
176 | };
177 | }
178 |
179 | describe("Server", () => {
180 | let serverProcess: any;
181 | let pool: any;
182 | let client: ReturnType;
183 |
184 | beforeAll(async () => {
185 | // Set the write operation flags to false by default
186 | process.env.ALLOW_INSERT_OPERATION = "false";
187 | process.env.ALLOW_UPDATE_OPERATION = "false";
188 | process.env.ALLOW_DELETE_OPERATION = "false";
189 |
190 | // Set up test database
191 | pool = mysql2.createPool({
192 | host: process.env.MYSQL_HOST || "127.0.0.1",
193 | port: Number(process.env.MYSQL_PORT || "3306"),
194 | user: process.env.MYSQL_USER || "root",
195 | password: process.env.MYSQL_PASS || "",
196 | database: process.env.MYSQL_DB || "mcp_test",
197 | connectionLimit: 5,
198 | });
199 |
200 | // Create test client
201 | client = createTestClient();
202 | });
203 |
204 | afterAll(async () => {
205 | // Clean up
206 | if (serverProcess) {
207 | serverProcess.kill();
208 | }
209 | if (pool) {
210 | await pool.end();
211 | }
212 | if (client) {
213 | client.close();
214 | }
215 | });
216 |
217 | it("should list available tools", async () => {
218 | const result = await client.listTools();
219 | expect(result.tools).toHaveLength(1);
220 | expect(result.tools[0].name).toBe("mysql_query");
221 | // By default, should be read-only
222 | expect(result.tools[0].description).toContain("READ-ONLY");
223 | });
224 |
225 | it("should execute a query tool", async () => {
226 | const result = await client.callTool("mysql_query", {
227 | sql: "SELECT * FROM test_table",
228 | });
229 | expect(result.isError).toBe(false);
230 | expect(result.content).toHaveLength(1);
231 | expect(result.content[0].type).toBe("text");
232 | });
233 |
234 | it("should list available resources", async () => {
235 | const result = await client.listResources();
236 | expect(result.resources).toHaveLength(1);
237 | expect(result.resources[0].name).toContain("test_table");
238 | });
239 |
240 | it("should read a resource", async () => {
241 | const uri = "mysql://127.0.0.1:3306/test_table/schema";
242 | const result = await client.readResource(uri);
243 | expect(result.contents).toHaveLength(1);
244 |
245 | const content = JSON.parse(result.contents[0].text);
246 | expect(Array.isArray(content)).toBe(true);
247 | expect(content.length).toBeGreaterThan(0);
248 | expect(content[0]).toHaveProperty("column_name");
249 | expect(content[0]).toHaveProperty("data_type");
250 | });
251 |
252 | // Tests for write operations
253 | describe("Write Operations", () => {
254 | it("should block INSERT operations by default", async () => {
255 | const result = await client.callTool("mysql_query", {
256 | sql: 'INSERT INTO test_table (name) VALUES ("Test Insert")',
257 | });
258 |
259 | expect(result.isError).toBe(true);
260 | expect(result.content[0].text).toContain(
261 | "INSERT operations are not allowed",
262 | );
263 | });
264 |
265 | it("should block UPDATE operations by default", async () => {
266 | const result = await client.callTool("mysql_query", {
267 | sql: 'UPDATE test_table SET name = "Updated" WHERE id = 1',
268 | });
269 |
270 | expect(result.isError).toBe(true);
271 | expect(result.content[0].text).toContain(
272 | "UPDATE operations are not allowed",
273 | );
274 | });
275 |
276 | it("should block DELETE operations by default", async () => {
277 | const result = await client.callTool("mysql_query", {
278 | sql: "DELETE FROM test_table WHERE id = 1",
279 | });
280 |
281 | expect(result.isError).toBe(true);
282 | expect(result.content[0].text).toContain(
283 | "DELETE operations are not allowed",
284 | );
285 | });
286 |
287 | it("should allow INSERT operations when enabled", async () => {
288 | // Enable INSERT operations for this test
289 | process.env.ALLOW_INSERT_OPERATION = "true";
290 |
291 | const result = await client.callTool("mysql_query", {
292 | sql: 'INSERT INTO test_table (name) VALUES ("Test Insert")',
293 | });
294 |
295 | expect(result.isError).toBe(false);
296 | expect(result.content[0].text).toContain("Insert successful");
297 |
298 | // Reset the flag
299 | process.env.ALLOW_INSERT_OPERATION = "false";
300 | });
301 |
302 | it("should allow UPDATE operations when enabled", async () => {
303 | // Enable UPDATE operations for this test
304 | process.env.ALLOW_UPDATE_OPERATION = "true";
305 |
306 | const result = await client.callTool("mysql_query", {
307 | sql: 'UPDATE test_table SET name = "Updated" WHERE id = 1',
308 | });
309 |
310 | expect(result.isError).toBe(false);
311 | expect(result.content[0].text).toContain("Update successful");
312 |
313 | // Reset the flag
314 | process.env.ALLOW_UPDATE_OPERATION = "false";
315 | });
316 |
317 | it("should allow DELETE operations when enabled", async () => {
318 | // Enable DELETE operations for this test
319 | process.env.ALLOW_DELETE_OPERATION = "true";
320 |
321 | const result = await client.callTool("mysql_query", {
322 | sql: "DELETE FROM test_table WHERE id = 1",
323 | });
324 |
325 | expect(result.isError).toBe(false);
326 | expect(result.content[0].text).toContain("Delete successful");
327 |
328 | // Reset the flag
329 | process.env.ALLOW_DELETE_OPERATION = "false";
330 | });
331 |
332 | it("should update the tool description when write operations are enabled", async () => {
333 | // Enable all write operations for this test
334 | process.env.ALLOW_INSERT_OPERATION = "true";
335 | process.env.ALLOW_UPDATE_OPERATION = "true";
336 | process.env.ALLOW_DELETE_OPERATION = "true";
337 |
338 | const result = await client.listTools();
339 |
340 | expect(result.tools[0].description).toContain("INSERT");
341 | expect(result.tools[0].description).toContain("UPDATE");
342 | expect(result.tools[0].description).toContain("DELETE");
343 | expect(result.tools[0].description).not.toContain("READ-ONLY");
344 |
345 | // Reset the flags
346 | process.env.ALLOW_INSERT_OPERATION = "false";
347 | process.env.ALLOW_UPDATE_OPERATION = "false";
348 | process.env.ALLOW_DELETE_OPERATION = "false";
349 | });
350 | });
351 | });
352 |
353 |
--------------------------------------------------------------------------------
/tests/integration/multi-db/multi-db-mode.test.ts:
--------------------------------------------------------------------------------
1 | import * as mysql2 from "mysql2/promise";
2 | import { describe, it, expect, beforeAll, afterAll, beforeEach } from "vitest";
3 | import {
4 | executeReadOnlyQuery,
5 | executeWriteQuery,
6 | } from "../../../dist/src/db/index.js";
7 |
8 | // Create test environment for multi-DB mode
9 | describe("Multi-DB Mode", () => {
10 | let pool: any;
11 |
12 | beforeAll(async () => {
13 | // Mock environment variables for multi-DB mode
14 | // Clear the database name to enable multi-DB mode
15 | const originalDbName = process.env.MYSQL_DB;
16 | process.env.MYSQL_DB = "";
17 |
18 | // Set write permissions to false for safety in multi-DB mode
19 | process.env.ALLOW_INSERT_OPERATION = "false";
20 | process.env.ALLOW_UPDATE_OPERATION = "false";
21 | process.env.ALLOW_DELETE_OPERATION = "false";
22 | process.env.ALLOW_DDL_OPERATION = "false";
23 |
24 | // Configure schema-specific permissions
25 | process.env.SCHEMA_INSERT_PERMISSIONS =
26 | "multi_db_test_1:true,multi_db_test_2:false";
27 |
28 | // Create connection pool for testing
29 | const config: any = {
30 | host: process.env.MYSQL_HOST || "127.0.0.1",
31 | port: Number(process.env.MYSQL_PORT || "3306"),
32 | user: process.env.MYSQL_USER || "root",
33 | password: process.env.MYSQL_PASS || "",
34 | connectionLimit: 5,
35 | multipleStatements: true,
36 | };
37 |
38 | // Only add password if it's set
39 | if (process.env.MYSQL_PASS) {
40 | config.password = process.env.MYSQL_PASS;
41 | }
42 |
43 | pool = mysql2.createPool(config);
44 |
45 | // Create test databases
46 | const connection = await pool.getConnection();
47 | try {
48 | // Create test databases
49 | await connection.query(`CREATE DATABASE IF NOT EXISTS multi_db_test_1`);
50 | await connection.query(`CREATE DATABASE IF NOT EXISTS multi_db_test_2`);
51 |
52 | // Create test tables in each database
53 | await connection.query(`
54 | USE multi_db_test_1;
55 | CREATE TABLE IF NOT EXISTS test_table (
56 | id INT AUTO_INCREMENT PRIMARY KEY,
57 | name VARCHAR(255) NOT NULL,
58 | created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
59 | );
60 |
61 | USE multi_db_test_2;
62 | CREATE TABLE IF NOT EXISTS test_table (
63 | id INT AUTO_INCREMENT PRIMARY KEY,
64 | name VARCHAR(255) NOT NULL,
65 | created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
66 | );
67 | `);
68 | } finally {
69 | connection.release();
70 | }
71 |
72 | return () => {
73 | // Restore original DB name
74 | if (originalDbName) {
75 | process.env.MYSQL_DB = originalDbName;
76 | } else {
77 | delete process.env.MYSQL_DB;
78 | }
79 | };
80 | });
81 |
82 | beforeEach(async () => {
83 | // Reset test data before each test
84 | const connection = await pool.getConnection();
85 | try {
86 | // Clear the tables in both databases
87 | await connection.query(`
88 | USE multi_db_test_1;
89 | TRUNCATE TABLE test_table;
90 | INSERT INTO test_table (name) VALUES ('DB 1 - Record 1'), ('DB 1 - Record 2');
91 |
92 | USE multi_db_test_2;
93 | TRUNCATE TABLE test_table;
94 | INSERT INTO test_table (name) VALUES ('DB 2 - Record 1'), ('DB 2 - Record 2');
95 | `);
96 | } finally {
97 | connection.release();
98 | }
99 | });
100 |
101 | afterAll(async () => {
102 | // Clean up test databases
103 | const connection = await pool.getConnection();
104 | try {
105 | await connection.query(`
106 | DROP DATABASE IF EXISTS multi_db_test_1;
107 | DROP DATABASE IF EXISTS multi_db_test_2;
108 | `);
109 | } finally {
110 | connection.release();
111 | }
112 |
113 | // Close the pool
114 | await pool.end();
115 |
116 | // Clean up environment variables
117 | delete process.env.SCHEMA_INSERT_PERMISSIONS;
118 | });
119 |
120 | // Test querying from multiple databases in multi-DB mode
121 | it("should be able to query data from multiple databases", async () => {
122 | // Query from first database
123 | const result1 = await executeReadOnlyQuery(
124 | "SELECT * FROM multi_db_test_1.test_table",
125 | );
126 |
127 | expect(result1.isError).toBe(false);
128 | const data1 = JSON.parse(result1.content[0].text);
129 | expect(data1.length).toBe(2);
130 | expect(data1[0].name).toBe("DB 1 - Record 1");
131 |
132 | // Query from second database
133 | const result2 = await executeReadOnlyQuery(
134 | "SELECT * FROM multi_db_test_2.test_table",
135 | );
136 |
137 | expect(result2.isError).toBe(false);
138 | const data2 = JSON.parse(result2.content[0].text);
139 | expect(data2.length).toBe(2);
140 | expect(data2[0].name).toBe("DB 2 - Record 1");
141 | });
142 |
143 | // Test USE statement in multi-DB mode
144 | it("should handle USE statements properly", async () => {
145 | // Use the first database and then query without schema prefix
146 | const result = await executeReadOnlyQuery(`
147 | USE multi_db_test_1;
148 | SELECT * FROM test_table;
149 | `);
150 |
151 | expect(result.isError).toBe(false);
152 | const data = JSON.parse(result.content[0].text);
153 | expect(data.length).toBe(2);
154 | expect(data[0].name).toBe("DB 1 - Record 1");
155 | });
156 |
157 | // Test schema-specific permissions in multi-DB mode
158 | it("should respect schema-specific permissions in multi-DB mode", async () => {
159 | // Insert into allowed database (multi_db_test_1)
160 | const result1 = await executeWriteQuery(
161 | 'INSERT INTO multi_db_test_1.test_table (name) VALUES ("New DB1 Record")',
162 | );
163 |
164 | expect(result1.isError).toBe(false);
165 | expect(result1.content[0].text).toContain("Insert successful");
166 |
167 | // Try insert into forbidden database (multi_db_test_2)
168 | const result2 = await executeReadOnlyQuery(
169 | 'INSERT INTO multi_db_test_2.test_table (name) VALUES ("New DB2 Record")',
170 | );
171 |
172 | expect(result2.isError).toBe(true);
173 | expect(result2.content[0].text).toContain(
174 | "INSERT operations are not allowed for schema",
175 | );
176 |
177 | // Verify the records
178 | const connection = await pool.getConnection();
179 | try {
180 | // Verify first insert succeeded
181 | const [rows1] = (await connection.query(
182 | "SELECT * FROM multi_db_test_1.test_table WHERE name = ?",
183 | ["New DB1 Record"],
184 | )) as [any[], any];
185 | expect(rows1.length).toBe(1);
186 |
187 | // Verify second insert was blocked
188 | const [rows2] = (await connection.query(
189 | "SELECT * FROM multi_db_test_2.test_table WHERE name = ?",
190 | ["New DB2 Record"],
191 | )) as [any[], any];
192 | expect(rows2.length).toBe(0);
193 | } finally {
194 | connection.release();
195 | }
196 | });
197 |
198 | // Test SHOW DATABASES command in multi-DB mode
199 | it("should be able to list all databases", async () => {
200 | const result = await executeReadOnlyQuery("SHOW DATABASES");
201 |
202 | expect(result.isError).toBe(false);
203 | const databases = JSON.parse(result.content[0].text);
204 |
205 | // Check if our test databases are in the list
206 | const dbNames = databases.map((db: any) => db.Database);
207 | expect(dbNames).toContain("multi_db_test_1");
208 | expect(dbNames).toContain("multi_db_test_2");
209 | });
210 | });
211 |
--------------------------------------------------------------------------------
/tests/integration/mysql.test.ts:
--------------------------------------------------------------------------------
1 | import * as mysql2 from "mysql2/promise";
2 | import { describe, it, expect, beforeAll, afterAll, beforeEach } from "vitest";
3 | import * as dotenv from "dotenv";
4 | import {
5 | executeReadOnlyQuery,
6 | executeWriteQuery,
7 | } from "../../dist/src/db/index.js";
8 | import * as path from "path";
9 | import { fileURLToPath } from "url";
10 |
11 | // Set test directory path
12 | const __filename = fileURLToPath(import.meta.url);
13 | const __dirname = path.dirname(__filename);
14 |
15 | // Mock environment variables for write operations
16 | process.env.ALLOW_INSERT_OPERATION = "true";
17 | process.env.ALLOW_UPDATE_OPERATION = "true";
18 | process.env.ALLOW_DELETE_OPERATION = "true";
19 |
20 | // Load test environment variables
21 | dotenv.config({ path: path.resolve(__dirname, "../../.env.test") });
22 |
23 | describe("MySQL Integration", () => {
24 | let pool: any;
25 |
26 | beforeAll(async () => {
27 | // Create a connection pool for testing
28 | const config: any = {
29 | host: process.env.MYSQL_HOST || "127.0.0.1",
30 | port: Number(process.env.MYSQL_PORT || "3306"),
31 | user: process.env.MYSQL_USER || "root",
32 | database: process.env.MYSQL_DB || "mcp_test",
33 | connectionLimit: 5,
34 | multipleStatements: true,
35 | };
36 |
37 | // Only add password if it's set
38 | if (process.env.MYSQL_PASS) {
39 | config.password = process.env.MYSQL_PASS;
40 | }
41 |
42 | pool = mysql2.createPool(config);
43 |
44 | // Create a test table if it doesn't exist
45 | const connection = await pool.getConnection();
46 | try {
47 | await connection.query(`
48 | CREATE TABLE IF NOT EXISTS test_table (
49 | id INT AUTO_INCREMENT PRIMARY KEY,
50 | name VARCHAR(255) NOT NULL,
51 | created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
52 | )
53 | `);
54 |
55 | // Create write operations test table
56 | await connection.query(`
57 | CREATE TABLE IF NOT EXISTS write_ops_test (
58 | id INT AUTO_INCREMENT PRIMARY KEY,
59 | name VARCHAR(255) NOT NULL,
60 | value INT DEFAULT 0,
61 | created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
62 | )
63 | `);
64 | } finally {
65 | connection.release();
66 | }
67 | });
68 |
69 | beforeEach(async () => {
70 | // Reset test data before each test
71 | const connection = await pool.getConnection();
72 | try {
73 | // Clear the tables
74 | await connection.query("TRUNCATE TABLE test_table");
75 | await connection.query("TRUNCATE TABLE write_ops_test");
76 |
77 | // Insert test data
78 | await connection.query(`
79 | INSERT INTO test_table (name) VALUES
80 | ('Test 1'),
81 | ('Test 2'),
82 | ('Test 3')
83 | `);
84 |
85 | // Insert write ops test data
86 | await connection.query(`
87 | INSERT INTO write_ops_test (name, value) VALUES
88 | ('Original 1', 10),
89 | ('Original 2', 20),
90 | ('Original 3', 30)
91 | `);
92 | } finally {
93 | connection.release();
94 | }
95 | });
96 |
97 | afterAll(async () => {
98 | // Clean up test data
99 | const connection = await pool.getConnection();
100 | try {
101 | await connection.query("DROP TABLE IF EXISTS test_table");
102 | await connection.query("DROP TABLE IF EXISTS write_ops_test");
103 | } finally {
104 | connection.release();
105 | }
106 |
107 | // Close the pool
108 | await pool.end();
109 | });
110 |
111 | it("should connect to the database", async () => {
112 | const connection = await pool.getConnection();
113 | expect(connection).toBeDefined();
114 | connection.release();
115 | });
116 |
117 | it("should execute a query and return results", async () => {
118 | const connection = await pool.getConnection();
119 | try {
120 | const [rows] = (await connection.query("SELECT * FROM test_table")) as [
121 | any[],
122 | any,
123 | ];
124 | expect(Array.isArray(rows)).toBe(true);
125 | expect(rows.length).toBe(3);
126 | } finally {
127 | connection.release();
128 | }
129 | });
130 |
131 | it("should execute a parameterized query", async () => {
132 | const connection = await pool.getConnection();
133 | try {
134 | const [rows] = (await connection.query(
135 | "SELECT * FROM test_table WHERE name = ?",
136 | ["Test 2"],
137 | )) as [any[], any];
138 |
139 | expect(Array.isArray(rows)).toBe(true);
140 | expect(rows.length).toBe(1);
141 | expect(rows[0].name).toBe("Test 2");
142 | } finally {
143 | connection.release();
144 | }
145 | });
146 |
147 | it("should handle transactions correctly", async () => {
148 | const connection = await pool.getConnection();
149 | try {
150 | // Start transaction
151 | await connection.beginTransaction();
152 |
153 | // Insert a new record
154 | await connection.query("INSERT INTO test_table (name) VALUES (?)", [
155 | "Transaction Test",
156 | ]);
157 |
158 | // Verify the record exists
159 | const [rows] = (await connection.query(
160 | "SELECT * FROM test_table WHERE name = ?",
161 | ["Transaction Test"],
162 | )) as [any[], any];
163 |
164 | expect(rows.length).toBe(1);
165 |
166 | // Rollback the transaction
167 | await connection.rollback();
168 |
169 | // Verify the record no longer exists
170 | const [rowsAfterRollback] = (await connection.query(
171 | "SELECT * FROM test_table WHERE name = ?",
172 | ["Transaction Test"],
173 | )) as [any[], any];
174 |
175 | expect(rowsAfterRollback.length).toBe(0);
176 | } finally {
177 | connection.release();
178 | }
179 | });
180 |
181 | // Tests for the write operations
182 | describe("Write Operations", () => {
183 | it("should execute INSERT operations when allowed", async () => {
184 | // Ensure the flag is set to true for this test
185 | const originalValue = process.env.ALLOW_INSERT_OPERATION;
186 | process.env.ALLOW_INSERT_OPERATION = "true";
187 |
188 | try {
189 | // Use executeWriteQuery directly for write operations in tests
190 | const result = await executeWriteQuery(
191 | 'INSERT INTO write_ops_test (name, value) VALUES ("New Record", 100)',
192 | );
193 |
194 | expect(result.isError).toBe(false);
195 | expect(result.content[0].text).toContain("Insert successful");
196 |
197 | // Verify the record was inserted
198 | const connection = await pool.getConnection();
199 | try {
200 | const [rows] = (await connection.query(
201 | "SELECT * FROM write_ops_test WHERE name = ?",
202 | ["New Record"],
203 | )) as [any[], any];
204 |
205 | expect(rows.length).toBe(1);
206 | expect(rows[0].value).toBe(100);
207 | } finally {
208 | connection.release();
209 | }
210 | } finally {
211 | // Restore original flag value
212 | process.env.ALLOW_INSERT_OPERATION = originalValue;
213 | }
214 | });
215 |
216 | it("should execute UPDATE operations when allowed", async () => {
217 | // Ensure the flag is set to true for this test
218 | const originalValue = process.env.ALLOW_UPDATE_OPERATION;
219 | process.env.ALLOW_UPDATE_OPERATION = "true";
220 |
221 | try {
222 | // Use executeWriteQuery directly for write operations in tests
223 | const result = await executeWriteQuery(
224 | 'UPDATE write_ops_test SET value = 999 WHERE name = "Original 2"',
225 | );
226 |
227 | expect(result.isError).toBe(false);
228 | expect(result.content[0].text).toContain("Update successful");
229 |
230 | // Verify the record was updated
231 | const connection = await pool.getConnection();
232 | try {
233 | const [rows] = (await connection.query(
234 | "SELECT * FROM write_ops_test WHERE name = ?",
235 | ["Original 2"],
236 | )) as [any[], any];
237 |
238 | expect(rows.length).toBe(1);
239 | expect(rows[0].value).toBe(999);
240 | } finally {
241 | connection.release();
242 | }
243 | } finally {
244 | // Restore original flag value
245 | process.env.ALLOW_UPDATE_OPERATION = originalValue;
246 | }
247 | });
248 |
249 | it("should execute DELETE operations when allowed", async () => {
250 | // Ensure the flag is set to true for this test
251 | const originalValue = process.env.ALLOW_DELETE_OPERATION;
252 | process.env.ALLOW_DELETE_OPERATION = "true";
253 |
254 | try {
255 | // Use executeWriteQuery directly for write operations in tests
256 | const result = await executeWriteQuery(
257 | 'DELETE FROM write_ops_test WHERE name = "Original 3"',
258 | );
259 |
260 | expect(result.isError).toBe(false);
261 | expect(result.content[0].text).toContain("Delete successful");
262 |
263 | // Verify the record was deleted
264 | const connection = await pool.getConnection();
265 | try {
266 | const [rows] = (await connection.query(
267 | "SELECT * FROM write_ops_test WHERE name = ?",
268 | ["Original 3"],
269 | )) as [any[], any];
270 |
271 | expect(rows.length).toBe(0); // Record should be deleted
272 | } finally {
273 | connection.release();
274 | }
275 | } finally {
276 | // Restore original flag value
277 | process.env.ALLOW_DELETE_OPERATION = originalValue;
278 | }
279 | });
280 |
281 | it("should block INSERT operations when not allowed", async () => {
282 | // Set the flag to false for this test
283 | const originalValue = process.env.ALLOW_INSERT_OPERATION;
284 | process.env.ALLOW_INSERT_OPERATION = "false";
285 |
286 | try {
287 | const result = await executeReadOnlyQuery(
288 | 'INSERT INTO write_ops_test (name, value) VALUES ("Blocked Insert", 100)',
289 | );
290 |
291 | expect(result.isError).toBe(true);
292 | expect(result.content[0].text).toContain(
293 | "INSERT operations are not allowed",
294 | );
295 |
296 | // Verify the record was not inserted
297 | const connection = await pool.getConnection();
298 | try {
299 | const [rows] = (await connection.query(
300 | "SELECT * FROM write_ops_test WHERE name = ?",
301 | ["Blocked Insert"],
302 | )) as [any[], any];
303 |
304 | expect(rows.length).toBe(0); // Record should not exist
305 | } finally {
306 | connection.release();
307 | }
308 | } finally {
309 | // Restore original flag value
310 | process.env.ALLOW_INSERT_OPERATION = originalValue;
311 | }
312 | });
313 | });
314 | });
315 |
316 |
--------------------------------------------------------------------------------
/tests/integration/schema-permissions/schema-permissions.test.ts:
--------------------------------------------------------------------------------
1 | import * as mysql2 from "mysql2/promise";
2 | import { describe, it, expect, beforeAll, afterAll, beforeEach } from "vitest";
3 | import {
4 | executeReadOnlyQuery,
5 | executeWriteQuery,
6 | } from "../../../dist/src/db/index.js";
7 |
8 | // Create test environment for schema-specific permissions
9 | describe("Schema-specific Permissions", () => {
10 | let pool: any;
11 |
12 | beforeAll(async () => {
13 | // Mock environment variables for schema-specific permissions
14 | process.env.ALLOW_INSERT_OPERATION = "false";
15 | process.env.ALLOW_UPDATE_OPERATION = "false";
16 | process.env.ALLOW_DELETE_OPERATION = "false";
17 | process.env.ALLOW_DDL_OPERATION = "false";
18 |
19 | // Set schema-specific permissions
20 | process.env.SCHEMA_INSERT_PERMISSIONS =
21 | "test_schema_1:true,test_schema_2:false";
22 | process.env.SCHEMA_UPDATE_PERMISSIONS =
23 | "test_schema_1:false,test_schema_2:true";
24 | process.env.SCHEMA_DELETE_PERMISSIONS =
25 | "test_schema_1:true,test_schema_2:false";
26 | process.env.SCHEMA_DDL_PERMISSIONS =
27 | "test_schema_1:true,test_schema_2:false";
28 |
29 | // Create connection pool for testing
30 | const config: any = {
31 | host: process.env.MYSQL_HOST || "127.0.0.1",
32 | port: Number(process.env.MYSQL_PORT || "3306"),
33 | user: process.env.MYSQL_USER || "root",
34 | connectionLimit: 5,
35 | multipleStatements: true,
36 | };
37 |
38 | // Only add password if it's set
39 | if (process.env.MYSQL_PASS) {
40 | config.password = process.env.MYSQL_PASS;
41 | }
42 |
43 | pool = mysql2.createPool(config);
44 |
45 | // Create test schemas
46 | const connection = await pool.getConnection();
47 | try {
48 | // Create test schemas
49 | await connection.query(`CREATE DATABASE IF NOT EXISTS test_schema_1`);
50 | await connection.query(`CREATE DATABASE IF NOT EXISTS test_schema_2`);
51 |
52 | // Create test tables in each schema
53 | await connection.query(`
54 | USE test_schema_1;
55 | CREATE TABLE IF NOT EXISTS test_table (
56 | id INT AUTO_INCREMENT PRIMARY KEY,
57 | name VARCHAR(255) NOT NULL,
58 | created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
59 | );
60 |
61 | USE test_schema_2;
62 | CREATE TABLE IF NOT EXISTS test_table (
63 | id INT AUTO_INCREMENT PRIMARY KEY,
64 | name VARCHAR(255) NOT NULL,
65 | created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
66 | );
67 | `);
68 | } finally {
69 | connection.release();
70 | }
71 | });
72 |
73 | beforeEach(async () => {
74 | // Reset test data before each test
75 | const connection = await pool.getConnection();
76 | try {
77 | // Clear the tables in both schemas
78 | await connection.query(`
79 | USE test_schema_1;
80 | TRUNCATE TABLE test_table;
81 | INSERT INTO test_table (name) VALUES ('Schema 1 - Test 1'), ('Schema 1 - Test 2');
82 |
83 | USE test_schema_2;
84 | TRUNCATE TABLE test_table;
85 | INSERT INTO test_table (name) VALUES ('Schema 2 - Test 1'), ('Schema 2 - Test 2');
86 | `);
87 | } finally {
88 | connection.release();
89 | }
90 | });
91 |
92 | afterAll(async () => {
93 | // Clean up test schemas
94 | const connection = await pool.getConnection();
95 | try {
96 | await connection.query(`
97 | DROP DATABASE IF EXISTS test_schema_1;
98 | DROP DATABASE IF EXISTS test_schema_2;
99 | `);
100 | } finally {
101 | connection.release();
102 | }
103 |
104 | // Close the pool
105 | await pool.end();
106 |
107 | // Clean up environment variables
108 | delete process.env.SCHEMA_INSERT_PERMISSIONS;
109 | delete process.env.SCHEMA_UPDATE_PERMISSIONS;
110 | delete process.env.SCHEMA_DELETE_PERMISSIONS;
111 | delete process.env.SCHEMA_DDL_PERMISSIONS;
112 | });
113 |
114 | // Test INSERT permission for schema_1 (allowed)
115 | it("should allow INSERT operations for test_schema_1", async () => {
116 | const result = await executeWriteQuery(
117 | 'INSERT INTO test_schema_1.test_table (name) VALUES ("New Record")',
118 | );
119 |
120 | expect(result.isError).toBe(false);
121 | expect(result.content[0].text).toContain("Insert successful");
122 |
123 | // Verify the record was inserted
124 | const connection = await pool.getConnection();
125 | try {
126 | const [rows] = (await connection.query(
127 | "SELECT * FROM test_schema_1.test_table WHERE name = ?",
128 | ["New Record"],
129 | )) as [any[], any];
130 |
131 | expect(rows.length).toBe(1);
132 | } finally {
133 | connection.release();
134 | }
135 | });
136 |
137 | // Test INSERT permission for schema_2 (not allowed)
138 | it("should block INSERT operations for test_schema_2", async () => {
139 | const result = await executeReadOnlyQuery(
140 | 'INSERT INTO test_schema_2.test_table (name) VALUES ("Blocked Insert")',
141 | );
142 |
143 | expect(result.isError).toBe(true);
144 | expect(result.content[0].text).toContain(
145 | "INSERT operations are not allowed for schema",
146 | );
147 |
148 | // Verify the record was not inserted
149 | const connection = await pool.getConnection();
150 | try {
151 | const [rows] = (await connection.query(
152 | "SELECT * FROM test_schema_2.test_table WHERE name = ?",
153 | ["Blocked Insert"],
154 | )) as [any[], any];
155 |
156 | expect(rows.length).toBe(0); // Record should not exist
157 | } finally {
158 | connection.release();
159 | }
160 | });
161 |
162 | // Test UPDATE permission for schema_1 (not allowed)
163 | it("should block UPDATE operations for test_schema_1", async () => {
164 | const result = await executeReadOnlyQuery(
165 | 'UPDATE test_schema_1.test_table SET name = "Updated Name" WHERE name = "Schema 1 - Test 1"',
166 | );
167 |
168 | expect(result.isError).toBe(true);
169 | expect(result.content[0].text).toContain(
170 | "UPDATE operations are not allowed for schema",
171 | );
172 |
173 | // Verify the record was not updated
174 | const connection = await pool.getConnection();
175 | try {
176 | const [rows] = (await connection.query(
177 | "SELECT * FROM test_schema_1.test_table WHERE name = ?",
178 | ["Schema 1 - Test 1"],
179 | )) as [any[], any];
180 |
181 | expect(rows.length).toBe(1); // Original record should still exist
182 | } finally {
183 | connection.release();
184 | }
185 | });
186 |
187 | // Test UPDATE permission for schema_2 (allowed)
188 | it("should allow UPDATE operations for test_schema_2", async () => {
189 | const result = await executeWriteQuery(
190 | 'UPDATE test_schema_2.test_table SET name = "Updated Name" WHERE name = "Schema 2 - Test 1"',
191 | );
192 |
193 | expect(result.isError).toBe(false);
194 | expect(result.content[0].text).toContain("Update successful");
195 |
196 | // Verify the record was updated
197 | const connection = await pool.getConnection();
198 | try {
199 | const [rows] = (await connection.query(
200 | "SELECT * FROM test_schema_2.test_table WHERE name = ?",
201 | ["Updated Name"],
202 | )) as [any[], any];
203 |
204 | expect(rows.length).toBe(1); // Updated record should exist
205 | } finally {
206 | connection.release();
207 | }
208 | });
209 |
210 | // Test DDL permission for schema_1 (allowed)
211 | it("should allow DDL operations for test_schema_1", async () => {
212 | const result = await executeWriteQuery(
213 | "ALTER TABLE test_schema_1.test_table ADD COLUMN test_column VARCHAR(50)",
214 | );
215 |
216 | expect(result.isError).toBe(false);
217 |
218 | // Verify the column was added
219 | const connection = await pool.getConnection();
220 | try {
221 | const [columns] = (await connection.query(
222 | `SELECT COLUMN_NAME
223 | FROM INFORMATION_SCHEMA.COLUMNS
224 | WHERE TABLE_SCHEMA = 'test_schema_1'
225 | AND TABLE_NAME = 'test_table'
226 | AND COLUMN_NAME = 'test_column'`,
227 | )) as [any[], any];
228 |
229 | expect(columns.length).toBe(1); // Column should exist
230 | } finally {
231 | connection.release();
232 | }
233 | });
234 |
235 | // Test DDL permission for schema_2 (not allowed)
236 | it("should block DDL operations for test_schema_2", async () => {
237 | const result = await executeReadOnlyQuery(
238 | "ALTER TABLE test_schema_2.test_table ADD COLUMN test_column VARCHAR(50)",
239 | );
240 |
241 | expect(result.isError).toBe(true);
242 | expect(result.content[0].text).toContain(
243 | "DDL operations are not allowed for schema",
244 | );
245 |
246 | // Verify the column was not added
247 | const connection = await pool.getConnection();
248 | try {
249 | const [columns] = (await connection.query(
250 | `SELECT COLUMN_NAME
251 | FROM INFORMATION_SCHEMA.COLUMNS
252 | WHERE TABLE_SCHEMA = 'test_schema_2'
253 | AND TABLE_NAME = 'test_table'
254 | AND COLUMN_NAME = 'test_column'`,
255 | )) as [any[], any];
256 |
257 | expect(columns.length).toBe(0); // Column should not exist
258 | } finally {
259 | connection.release();
260 | }
261 | });
262 | });
263 |
--------------------------------------------------------------------------------
/tests/integration/socket-connection.test.ts:
--------------------------------------------------------------------------------
1 | import * as mysql2 from 'mysql2/promise';
2 | import { describe, it, expect, beforeAll, afterAll } from 'vitest';
3 | import * as dotenv from 'dotenv';
4 | import * as path from 'path';
5 | import { fileURLToPath } from 'url';
6 |
7 | // Set test directory path
8 | const __filename = fileURLToPath(import.meta.url);
9 | const __dirname = path.dirname(__filename);
10 |
11 | // Load test environment variables
12 | dotenv.config({ path: path.resolve(__dirname, '../../.env.test') });
13 |
14 | describe('Unix Socket Connection', () => {
15 | // Skip these tests if no socket path is provided
16 | const skipTests = !process.env.MYSQL_SOCKET_PATH;
17 |
18 | it.skipIf(skipTests)('should connect via Unix socket when MYSQL_SOCKET_PATH is set', async () => {
19 | const originalHost = process.env.MYSQL_HOST;
20 | const originalPort = process.env.MYSQL_PORT;
21 | const originalSocketPath = process.env.MYSQL_SOCKET_PATH;
22 |
23 | try {
24 | // Set socket path (use the actual socket path from environment or a test path)
25 | process.env.MYSQL_SOCKET_PATH = originalSocketPath || '/tmp/mysql.sock';
26 | delete process.env.MYSQL_HOST;
27 | delete process.env.MYSQL_PORT;
28 |
29 | // Create a connection pool using socket
30 | const config: any = {
31 | socketPath: process.env.MYSQL_SOCKET_PATH,
32 | user: process.env.MYSQL_USER || 'root',
33 | database: process.env.MYSQL_DB || 'mcp_test',
34 | connectionLimit: 5,
35 | };
36 |
37 | // Only add password if it's set
38 | if (process.env.MYSQL_PASS) {
39 | config.password = process.env.MYSQL_PASS;
40 | }
41 |
42 | const pool = mysql2.createPool(config);
43 |
44 | // Test the connection
45 | const connection = await pool.getConnection();
46 | expect(connection).toBeDefined();
47 |
48 | // Execute a simple query
49 | const [rows] = await connection.query('SELECT 1 as test') as [any[], any];
50 | expect(rows[0].test).toBe(1);
51 |
52 | connection.release();
53 | await pool.end();
54 | } finally {
55 | // Restore original values
56 | if (originalHost) process.env.MYSQL_HOST = originalHost;
57 | if (originalPort) process.env.MYSQL_PORT = originalPort;
58 | if (originalSocketPath) process.env.MYSQL_SOCKET_PATH = originalSocketPath;
59 | else delete process.env.MYSQL_SOCKET_PATH;
60 | }
61 | });
62 |
63 | it('should prefer socket path over host/port when both are provided', async () => {
64 | // This test verifies the configuration logic
65 | const mockConfig = {
66 | ...(process.env.MYSQL_SOCKET_PATH
67 | ? {
68 | socketPath: process.env.MYSQL_SOCKET_PATH,
69 | }
70 | : {
71 | host: process.env.MYSQL_HOST || '127.0.0.1',
72 | port: Number(process.env.MYSQL_PORT || '3306'),
73 | }
74 | ),
75 | };
76 |
77 | // If socket path is set, config should not have host/port
78 | if (process.env.MYSQL_SOCKET_PATH) {
79 | expect(mockConfig).toHaveProperty('socketPath');
80 | expect(mockConfig).not.toHaveProperty('host');
81 | expect(mockConfig).not.toHaveProperty('port');
82 | } else {
83 | expect(mockConfig).not.toHaveProperty('socketPath');
84 | expect(mockConfig).toHaveProperty('host');
85 | expect(mockConfig).toHaveProperty('port');
86 | }
87 | });
88 | });
--------------------------------------------------------------------------------
/tests/unit/query.test.ts:
--------------------------------------------------------------------------------
1 | import { vi, describe, it, expect, beforeEach } from "vitest";
2 | import {
3 | executeQuery,
4 | executeReadOnlyQuery,
5 | executeWriteQuery,
6 | } from "../../dist/src/db/index.js";
7 |
8 | // Mock environment variables for write operation flags
9 | vi.stubEnv("ALLOW_INSERT_OPERATION", "false");
10 | vi.stubEnv("ALLOW_UPDATE_OPERATION", "false");
11 | vi.stubEnv("ALLOW_DELETE_OPERATION", "false");
12 |
13 | // Mock mysql2/promise
14 | vi.mock("mysql2/promise", () => {
15 | const mockConnection = {
16 | query: vi.fn(),
17 | beginTransaction: vi.fn(),
18 | commit: vi.fn(),
19 | rollback: vi.fn(),
20 | release: vi.fn(),
21 | };
22 |
23 | const mockPool = {
24 | getConnection: vi.fn().mockResolvedValue(mockConnection),
25 | };
26 |
27 | return {
28 | createPool: vi.fn().mockReturnValue(mockPool),
29 | ResultSetHeader: class ResultSetHeader {
30 | constructor(data = {}) {
31 | Object.assign(this, data);
32 | }
33 | },
34 | };
35 | });
36 |
37 | describe("Query Functions", () => {
38 | let mockPool;
39 | let mockConnection;
40 |
41 | beforeEach(async () => {
42 | // Clear all mocks
43 | vi.clearAllMocks();
44 |
45 | // Get the mock pool and connection
46 | mockPool = (await import("mysql2/promise")).createPool({
47 | host: "localhost",
48 | user: "test",
49 | database: "test",
50 | });
51 | mockConnection = await mockPool.getConnection();
52 | });
53 |
54 | describe("executeQuery", () => {
55 | it("should execute a query and return results", async () => {
56 | const mockResults = [{ id: 1, name: "Test" }];
57 | mockConnection.query.mockResolvedValueOnce([mockResults, null]);
58 |
59 | const result = await executeQuery("SELECT * FROM test", []);
60 |
61 | expect(mockConnection.query).toHaveBeenCalledWith(
62 | "SELECT * FROM test",
63 | [],
64 | );
65 | expect(mockConnection.release).toHaveBeenCalled();
66 | expect(result).toEqual(mockResults);
67 | });
68 |
69 | it("should handle query parameters correctly", async () => {
70 | const params = ["test", 123];
71 | mockConnection.query.mockResolvedValueOnce([[{ id: 1 }], null]);
72 |
73 | await executeQuery(
74 | "SELECT * FROM test WHERE name = ? AND id = ?",
75 | params,
76 | );
77 |
78 | expect(mockConnection.query).toHaveBeenCalledWith(
79 | "SELECT * FROM test WHERE name = ? AND id = ?",
80 | params,
81 | );
82 | });
83 |
84 | it("should release connection even if query fails", async () => {
85 | mockConnection.query.mockRejectedValueOnce(new Error("Query failed"));
86 |
87 | await expect(executeQuery("SELECT * FROM test", [])).rejects.toThrow(
88 | "Query failed",
89 | );
90 | expect(mockConnection.release).toHaveBeenCalled();
91 | });
92 | });
93 |
94 | describe("executeReadOnlyQuery", () => {
95 | it("should execute a read-only query in a transaction and return results", async () => {
96 | const mockResults = [{ id: 1, name: "Test" }];
97 | mockConnection.query.mockResolvedValue([mockResults, null]);
98 |
99 | const result = await executeReadOnlyQuery("SELECT * FROM test");
100 |
101 | expect(mockConnection.query).toHaveBeenCalledWith(
102 | "SET SESSION TRANSACTION READ ONLY",
103 | );
104 | expect(mockConnection.beginTransaction).toHaveBeenCalled();
105 | expect(mockConnection.query).toHaveBeenCalledWith("SELECT * FROM test");
106 | expect(mockConnection.rollback).toHaveBeenCalled();
107 | expect(mockConnection.query).toHaveBeenCalledWith(
108 | "SET SESSION TRANSACTION READ WRITE",
109 | );
110 | expect(mockConnection.release).toHaveBeenCalled();
111 |
112 | expect(result).toEqual({
113 | content: [
114 | {
115 | type: "text",
116 | text: JSON.stringify(mockResults, null, 2),
117 | },
118 | ],
119 | isError: false,
120 | });
121 | });
122 |
123 | it("should block INSERT operations when not allowed", async () => {
124 | const result = await executeReadOnlyQuery(
125 | 'INSERT INTO test (name) VALUES ("test")',
126 | );
127 |
128 | expect(mockConnection.beginTransaction).not.toHaveBeenCalled();
129 | expect(result.isError).toBe(true);
130 | expect(result.content[0].text).toContain(
131 | "INSERT operations are not allowed",
132 | );
133 | });
134 |
135 | it("should block UPDATE operations when not allowed", async () => {
136 | const result = await executeReadOnlyQuery(
137 | 'UPDATE test SET name = "updated" WHERE id = 1',
138 | );
139 |
140 | expect(mockConnection.beginTransaction).not.toHaveBeenCalled();
141 | expect(result.isError).toBe(true);
142 | expect(result.content[0].text).toContain(
143 | "UPDATE operations are not allowed",
144 | );
145 | });
146 |
147 | it("should block DELETE operations when not allowed", async () => {
148 | const result = await executeReadOnlyQuery(
149 | "DELETE FROM test WHERE id = 1",
150 | );
151 |
152 | expect(mockConnection.beginTransaction).not.toHaveBeenCalled();
153 | expect(result.isError).toBe(true);
154 | expect(result.content[0].text).toContain(
155 | "DELETE operations are not allowed",
156 | );
157 | });
158 | });
159 |
160 | describe("executeWriteQuery", () => {
161 | it("should execute an INSERT query and format the result correctly", async () => {
162 | // Mock ResultSetHeader for an insert operation
163 | const resultHeader = { affectedRows: 1, insertId: 123 };
164 | mockConnection.query.mockResolvedValueOnce([resultHeader, null]);
165 |
166 | const result = await executeWriteQuery(
167 | 'INSERT INTO test (name) VALUES ("test")',
168 | );
169 |
170 | expect(mockConnection.beginTransaction).toHaveBeenCalled();
171 | expect(mockConnection.query).toHaveBeenCalledWith(
172 | 'INSERT INTO test (name) VALUES ("test")',
173 | );
174 | expect(mockConnection.commit).toHaveBeenCalled();
175 | expect(mockConnection.release).toHaveBeenCalled();
176 |
177 | expect(result.isError).toBe(false);
178 | expect(result.content[0].text).toContain("Insert successful");
179 | expect(result.content[0].text).toContain("Affected rows: 1");
180 | expect(result.content[0].text).toContain("Last insert ID: 123");
181 | });
182 |
183 | it("should execute an UPDATE query and format the result correctly", async () => {
184 | // Mock ResultSetHeader for an update operation
185 | const resultHeader = { affectedRows: 2, changedRows: 1 };
186 | mockConnection.query.mockResolvedValueOnce([resultHeader, null]);
187 |
188 | const result = await executeWriteQuery(
189 | 'UPDATE test SET name = "updated" WHERE id > 0',
190 | );
191 |
192 | expect(mockConnection.beginTransaction).toHaveBeenCalled();
193 | expect(mockConnection.query).toHaveBeenCalledWith(
194 | 'UPDATE test SET name = "updated" WHERE id > 0',
195 | );
196 | expect(mockConnection.commit).toHaveBeenCalled();
197 |
198 | expect(result.isError).toBe(false);
199 | expect(result.content[0].text).toContain("Update successful");
200 | expect(result.content[0].text).toContain("Affected rows: 2");
201 | expect(result.content[0].text).toContain("Changed rows: 1");
202 | });
203 |
204 | it("should execute a DELETE query and format the result correctly", async () => {
205 | // Mock ResultSetHeader for a delete operation
206 | const resultHeader = { affectedRows: 3 };
207 | mockConnection.query.mockResolvedValueOnce([resultHeader, null]);
208 |
209 | const result = await executeWriteQuery("DELETE FROM test WHERE id > 0");
210 |
211 | expect(mockConnection.beginTransaction).toHaveBeenCalled();
212 | expect(mockConnection.query).toHaveBeenCalledWith(
213 | "DELETE FROM test WHERE id > 0",
214 | );
215 | expect(mockConnection.commit).toHaveBeenCalled();
216 |
217 | expect(result.isError).toBe(false);
218 | expect(result.content[0].text).toContain("Delete successful");
219 | expect(result.content[0].text).toContain("Affected rows: 3");
220 | });
221 |
222 | it("should rollback transaction and return error if query fails", async () => {
223 | mockConnection.query.mockImplementation((sql) => {
224 | if (sql === 'INSERT INTO test (name) VALUES ("test")') {
225 | throw new Error("Insert failed");
226 | }
227 | return [[], null];
228 | });
229 |
230 | const result = await executeWriteQuery(
231 | 'INSERT INTO test (name) VALUES ("test")',
232 | );
233 |
234 | expect(mockConnection.beginTransaction).toHaveBeenCalled();
235 | expect(mockConnection.rollback).toHaveBeenCalled();
236 | expect(mockConnection.commit).not.toHaveBeenCalled();
237 |
238 | expect(result.isError).toBe(true);
239 | expect(result.content[0].text).toContain(
240 | "Error executing write operation",
241 | );
242 | });
243 | });
244 | });
245 |
246 |
--------------------------------------------------------------------------------
/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "outDir": "./dist",
4 | "rootDir": ".",
5 | "target": "ES2022",
6 | "module": "NodeNext",
7 | "moduleResolution": "NodeNext",
8 | "strict": true,
9 | "esModuleInterop": true,
10 | "skipLibCheck": true,
11 | "forceConsistentCasingInFileNames": true,
12 | "resolveJsonModule": true,
13 | "removeComments": true,
14 | "pretty": false,
15 | "stripInternal": true,
16 | "newLine": "lf"
17 | },
18 | "ts-node": {
19 | "esm": true,
20 | "experimentalSpecifierResolution": "node"
21 | },
22 | "include": [
23 | "./*.ts",
24 | "./src/**/*.ts"
25 | ],
26 | "exclude": [
27 | "node_modules"
28 | ]
29 | }
30 |
--------------------------------------------------------------------------------
/tsconfig.scripts.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "target": "ES2022",
4 | "module": "ESNext",
5 | "moduleResolution": "node",
6 | "esModuleInterop": true,
7 | "strict": true,
8 | "skipLibCheck": true,
9 | "forceConsistentCasingInFileNames": true,
10 | "outDir": "./dist-scripts"
11 | },
12 | "include": [
13 | "scripts/**/*"
14 | ],
15 | "exclude": [
16 | "node_modules"
17 | ],
18 | "ts-node": {
19 | "esm": true,
20 | "experimentalSpecifiers": true
21 | }
22 | }
--------------------------------------------------------------------------------