├── .claude └── settings.local.json ├── .gitignore ├── LICENSE ├── README.md ├── docs ├── briefs │ ├── entity-versioning-update-operations.md │ └── multiple-memory-contexts.md ├── development │ ├── placeholder.md │ ├── pr-instructions.md │ └── workflow.md └── plans │ └── planned-features.md ├── example.jsonl ├── img ├── read-function.png └── server-name.png ├── index.ts ├── package-lock.json ├── package.json ├── tsconfig.base.json └── tsconfig.json /.claude/settings.local.json: -------------------------------------------------------------------------------- 1 | { 2 | "permissions": { 3 | "allow": [ 4 | "Bash(npm whoami:*)", 5 | "Bash(npm run build:*)", 6 | "Bash(npm install)", 7 | "Bash(npm version:*)", 8 | "Bash(npm publish:*)" 9 | ] 10 | }, 11 | "enableAllProjectMcpServers": false 12 | } -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Build output 2 | dist/ 3 | build/ 4 | *.tsbuildinfo 5 | 6 | # Dependencies 7 | node_modules/ 8 | .npm 9 | .pnp.* 10 | .yarn/* 11 | !.yarn/patches 12 | !.yarn/plugins 13 | !.yarn/releases 14 | !.yarn/sdks 15 | !.yarn/versions 16 | 17 | # Logs 18 | logs 19 | *.log 20 | npm-debug.log* 21 | yarn-debug.log* 22 | yarn-error.log* 23 | 24 | # Runtime data 25 | pids 26 | *.pid 27 | *.seed 28 | *.pid.lock 29 | 30 | # Testing 31 | coverage/ 32 | .nyc_output/ 33 | 34 | # IDEs and editors 35 | .idea/ 36 | .vscode/* 37 | !.vscode/extensions.json 38 | !.vscode/settings.json 39 | !.vscode/tasks.json 40 | !.vscode/launch.json 41 | *.swp 42 | *.swo 43 | .DS_Store 44 | .env 45 | .env.local 46 | .env.*.local 47 | 48 | # TypeScript cache 49 | *.tsbuildinfo 50 | 51 | # Optional eslint cache 52 | .eslintcache 53 | 54 | # Memory files (except examples) 55 | *.jsonl 56 | !example*.jsonl 57 | 58 | # Local documentation 59 | PUBLISHING.md 60 | VERSION_UPDATE.md 61 | 62 | # History files 63 | .history/ 64 | 65 | # Package files 66 | *.tgz 67 | 68 | # OS generated files 69 | .DS_Store 70 | .DS_Store? 71 | ._* 72 | .Spotlight-V100 73 | .Trashes 74 | ehthumbs.db 75 | Thumbs.db 76 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | # MIT License 2 | 3 | Copyright (c) 2025 Shane Holloman 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # `mcp-knowledge-graph` 2 | 3 | > Knowledge Graph Memory Server 4 | 5 | An improved implementation of persistent memory using a local knowledge graph with a customizable `--memory-path`. 6 | 7 | This lets AI models remember information about the user across chats. It works with any AI model that supports the Model Context Protocol (MCP) or function calling capabilities. 8 | 9 | > [!NOTE] 10 | > This is a fork of the original [Memory Server](https://github.com/modelcontextprotocol/servers/tree/main/src/memory) and is intended to not use the ephemeral memory npx installation method. 11 | 12 | ## Server Name 13 | 14 | ```txt 15 | mcp-knowledge-graph 16 | ``` 17 | 18 | ![screen-of-server-name](https://raw.githubusercontent.com/shaneholloman/mcp-knowledge-graph/main/img/server-name.png) 19 | 20 | ![read-function](https://raw.githubusercontent.com/shaneholloman/mcp-knowledge-graph/main/img/read-function.png) 21 | 22 | ## Core Concepts 23 | 24 | ### Entities 25 | 26 | Entities are the primary nodes in the knowledge graph. Each entity has: 27 | 28 | - A unique name (identifier) 29 | - An entity type (e.g., "person", "organization", "event") 30 | - A list of observations 31 | 32 | Example: 33 | 34 | ```json 35 | { 36 | "name": "John_Smith", 37 | "entityType": "person", 38 | "observations": ["Speaks fluent Spanish"] 39 | } 40 | ``` 41 | 42 | ### Relations 43 | 44 | Relations define directed connections between entities. They are always stored in active voice and describe how entities interact or relate to each other. 45 | 46 | Example: 47 | 48 | ```json 49 | { 50 | "from": "John_Smith", 51 | "to": "ExampleCorp", 52 | "relationType": "works_at" 53 | } 54 | ``` 55 | 56 | ### Observations 57 | 58 | Observations are discrete pieces of information about an entity. They are: 59 | 60 | - Stored as strings 61 | - Attached to specific entities 62 | - Can be added or removed independently 63 | - Should be atomic (one fact per observation) 64 | 65 | Example: 66 | 67 | ```json 68 | { 69 | "entityName": "John_Smith", 70 | "observations": [ 71 | "Speaks fluent Spanish", 72 | "Graduated in 2019", 73 | "Prefers morning meetings" 74 | ] 75 | } 76 | ``` 77 | 78 | ## API 79 | 80 | ### Tools 81 | 82 | - **create_entities** 83 | - Create multiple new entities in the knowledge graph 84 | - Input: `entities` (array of objects) 85 | - Each object contains: 86 | - `name` (string): Entity identifier 87 | - `entityType` (string): Type classification 88 | - `observations` (string[]): Associated observations 89 | - Ignores entities with existing names 90 | 91 | - **create_relations** 92 | - Create multiple new relations between entities 93 | - Input: `relations` (array of objects) 94 | - Each object contains: 95 | - `from` (string): Source entity name 96 | - `to` (string): Target entity name 97 | - `relationType` (string): Relationship type in active voice 98 | - Skips duplicate relations 99 | 100 | - **add_observations** 101 | - Add new observations to existing entities 102 | - Input: `observations` (array of objects) 103 | - Each object contains: 104 | - `entityName` (string): Target entity 105 | - `contents` (string[]): New observations to add 106 | - Returns added observations per entity 107 | - Fails if entity doesn't exist 108 | 109 | - **delete_entities** 110 | - Remove entities and their relations 111 | - Input: `entityNames` (string[]) 112 | - Cascading deletion of associated relations 113 | - Silent operation if entity doesn't exist 114 | 115 | - **delete_observations** 116 | - Remove specific observations from entities 117 | - Input: `deletions` (array of objects) 118 | - Each object contains: 119 | - `entityName` (string): Target entity 120 | - `observations` (string[]): Observations to remove 121 | - Silent operation if observation doesn't exist 122 | 123 | - **delete_relations** 124 | - Remove specific relations from the graph 125 | - Input: `relations` (array of objects) 126 | - Each object contains: 127 | - `from` (string): Source entity name 128 | - `to` (string): Target entity name 129 | - `relationType` (string): Relationship type 130 | - Silent operation if relation doesn't exist 131 | 132 | - **read_graph** 133 | - Read the entire knowledge graph 134 | - No input required 135 | - Returns complete graph structure with all entities and relations 136 | 137 | - **search_nodes** 138 | - Search for nodes based on query 139 | - Input: `query` (string) 140 | - Searches across: 141 | - Entity names 142 | - Entity types 143 | - Observation content 144 | - Returns matching entities and their relations 145 | 146 | - **open_nodes** 147 | - Retrieve specific nodes by name 148 | - Input: `names` (string[]) 149 | - Returns: 150 | - Requested entities 151 | - Relations between requested entities 152 | - Silently skips non-existent nodes 153 | 154 | ## Usage with MCP-Compatible Platforms 155 | 156 | This server can be used with any AI platform that supports the Model Context Protocol (MCP) or function calling capabilities, including Claude, GPT, Llama, and others. 157 | 158 | ### Setup with Claude Desktop 159 | 160 | Add this to your claude_desktop_config.json: 161 | 162 | ```json 163 | { 164 | "mcpServers": { 165 | "memory": { 166 | "command": "npx", 167 | "args": [ 168 | "-y", 169 | "mcp-knowledge-graph", 170 | "--memory-path", 171 | "/Users/shaneholloman/Dropbox/shane/db/memory.jsonl" 172 | ], 173 | "autoapprove": [ 174 | "create_entities", 175 | "create_relations", 176 | "add_observations", 177 | "delete_entities", 178 | "delete_observations", 179 | "delete_relations", 180 | "read_graph", 181 | "search_nodes", 182 | "open_nodes" 183 | ] 184 | }, 185 | } 186 | } 187 | ``` 188 | 189 | ### Setup with Other AI Platforms 190 | 191 | Any AI platform that supports function calling or the MCP standard can connect to this server. The specific configuration will depend on the platform, but the server exposes standard tools through the MCP interface. 192 | 193 | ### Custom Memory Path 194 | 195 | You can specify a custom path for the memory file: 196 | 197 | ```json 198 | { 199 | "mcpServers": { 200 | "memory": { 201 | "command": "npx", 202 | "args": [ 203 | "-y", 204 | "mcp-knowledge-graph", 205 | "--memory-path", 206 | "/Users/shaneholloman/Dropbox/shane/db/memory.jsonl" 207 | ], 208 | "autoapprove": [ 209 | "create_entities", 210 | "create_relations", 211 | "add_observations", 212 | "delete_entities", 213 | "delete_observations", 214 | "delete_relations", 215 | "read_graph", 216 | "search_nodes", 217 | "open_nodes" 218 | ] 219 | }, 220 | } 221 | } 222 | ``` 223 | 224 | If no path is specified, it will default to memory.jsonl in the server's installation directory. 225 | 226 | ### System Prompt 227 | 228 | The prompt for utilizing memory depends on the use case and the AI model you're using. Changing the prompt will help the model determine the frequency and types of memories created. 229 | 230 | Here is an example prompt for chat personalization that can be adapted for any AI model. For Claude users, you could use this prompt in the "Custom Instructions" field of a [Claude.ai Project](https://www.anthropic.com/news/projects). For other models, adapt it to their respective instruction formats. 231 | 232 | ```txt 233 | Follow these steps for each interaction: 234 | 235 | 1. User Identification: 236 | - You should assume that you are interacting with default_user 237 | - If you have not identified default_user, proactively try to do so. 238 | 239 | 2. Memory Retrieval: 240 | - Always begin your chat by saying only "Remembering..." and retrieve all relevant information from your knowledge graph 241 | - Always refer to your knowledge graph as your "memory" 242 | 243 | 3. Memory Gathering: 244 | - While conversing with the user, be attentive to any new information that falls into these categories: 245 | a) Basic Identity (age, gender, location, job title, education level, etc.) 246 | b) Behaviors (interests, habits, etc.) 247 | c) Preferences (communication style, preferred language, etc.) 248 | d) Goals (goals, targets, aspirations, etc.) 249 | e) Relationships (personal and professional relationships up to 3 degrees of separation) 250 | 251 | 4. Memory Update: 252 | - If any new information was gathered during the interaction, update your memory as follows: 253 | a) Create entities for recurring organizations, people, and significant events 254 | b) Connect them to the current entities using relations 255 | c) Store facts about them as observations 256 | ``` 257 | 258 | ## Integration with Other AI Models 259 | 260 | This server implements the Model Context Protocol (MCP) standard, making it compatible with any AI model that supports function calling. The knowledge graph structure and API are model-agnostic, allowing for flexible integration with various AI platforms. 261 | 262 | To integrate with other models: 263 | 264 | 1. Configure the model to access the MCP server 265 | 2. Ensure the model can make function calls to the exposed tools 266 | 3. Adapt the system prompt to the specific model's instruction format 267 | 4. Use the same knowledge graph operations regardless of the model 268 | 269 | ## License 270 | 271 | This MCP server is licensed under the MIT License. This means you are free to use, modify, and distribute the software, subject to the terms and conditions of the MIT License. For more details, please see the LICENSE file in the project repository. 272 | -------------------------------------------------------------------------------- /docs/briefs/entity-versioning-update-operations.md: -------------------------------------------------------------------------------- 1 | # Technical Brief: Entity Versioning and Update Operations 2 | 3 | ## Overview 4 | 5 | This document outlines the technical implementation plan for adding version tracking and update capabilities to the MCP Knowledge Graph server. These features will enable tracking the history and evolution of knowledge entities and relations, as well as provide tools to update existing graph elements. 6 | 7 | ## Background 8 | 9 | Currently, the MCP Knowledge Graph server allows creating entities and relations but lacks the ability to track their history or update them after creation. This makes it difficult to understand how knowledge evolves over time or to correct/update existing information. 10 | 11 | ## User Personas 12 | 13 | 1. **Knowledge Administrators**: Need to maintain and update knowledge bases with accurate information 14 | 2. **History Trackers**: Want to understand how knowledge has evolved over time 15 | 3. **Integration Developers**: Need to update existing entities and relations through API calls 16 | 17 | ## Implementation Details 18 | 19 | ### 1. Version Tracking System 20 | 21 | We will enhance entity and relation objects to include version information: 22 | 23 | ```typescript 24 | interface Entity { 25 | name: string; 26 | entityType: string; 27 | observations: string[]; 28 | createdAt: string; // ISO timestamp of creation/update 29 | version: number; // Incremented with each update 30 | } 31 | 32 | interface Relation { 33 | from: string; 34 | to: string; 35 | relationType: string; 36 | createdAt: string; // ISO timestamp of creation/update 37 | version: number; // Incremented with each update 38 | } 39 | ``` 40 | 41 | ### 2. Entity and Relation Update Operations 42 | 43 | We'll implement methods to update existing entities and relations: 44 | 45 | ```typescript 46 | async updateEntities(entities: Entity[]): Promise { 47 | const graph = await this.loadGraph(); 48 | const updatedEntities = entities.map(updateEntity => { 49 | const existingEntity = graph.entities.find(e => e.name === updateEntity.name); 50 | if (!existingEntity) { 51 | throw new Error(`Entity with name ${updateEntity.name} not found`); 52 | } 53 | return { 54 | ...existingEntity, 55 | ...updateEntity, 56 | version: existingEntity.version + 1, 57 | createdAt: new Date().toISOString() 58 | }; 59 | }); 60 | 61 | // Update entities in the graph 62 | updatedEntities.forEach(updatedEntity => { 63 | const index = graph.entities.findIndex(e => e.name === updatedEntity.name); 64 | if (index !== -1) { 65 | graph.entities[index] = updatedEntity; 66 | } 67 | }); 68 | 69 | await this.saveGraph(graph); 70 | return updatedEntities; 71 | } 72 | 73 | async updateRelations(relations: Relation[]): Promise { 74 | const graph = await this.loadGraph(); 75 | const updatedRelations = relations.map(updateRelation => { 76 | const existingRelation = graph.relations.find(r => 77 | r.from === updateRelation.from && 78 | r.to === updateRelation.to && 79 | r.relationType === updateRelation.relationType 80 | ); 81 | if (!existingRelation) { 82 | throw new Error(`Relation not found`); 83 | } 84 | return { 85 | ...existingRelation, 86 | ...updateRelation, 87 | version: existingRelation.version + 1, 88 | createdAt: new Date().toISOString() 89 | }; 90 | }); 91 | 92 | // Update relations in the graph 93 | updatedRelations.forEach(updatedRelation => { 94 | const index = graph.relations.findIndex(r => 95 | r.from === updatedRelation.from && 96 | r.to === updatedRelation.to && 97 | r.relationType === updatedRelation.relationType 98 | ); 99 | if (index !== -1) { 100 | graph.relations[index] = updatedRelation; 101 | } 102 | }); 103 | 104 | await this.saveGraph(graph); 105 | return updatedRelations; 106 | } 107 | ``` 108 | 109 | ### 3. New Tools for Entity and Relation Management 110 | 111 | We'll add these tools to manage knowledge graph elements: 112 | 113 | - `update_entities`: Update multiple existing entities in the knowledge graph 114 | - `update_relations`: Update multiple existing relations in the knowledge graph 115 | 116 | These tools will be exposed through the MCP server interface: 117 | 118 | ```typescript 119 | { 120 | name: "update_entities", 121 | description: "Update multiple existing entities in the knowledge graph", 122 | inputSchema: { 123 | type: "object", 124 | properties: { 125 | entities: { 126 | type: "array", 127 | items: { 128 | type: "object", 129 | properties: { 130 | name: { type: "string", description: "The name of the entity to update" }, 131 | entityType: { type: "string", description: "The updated type of the entity" }, 132 | observations: { 133 | type: "array", 134 | items: { type: "string" }, 135 | description: "The updated array of observation contents" 136 | }, 137 | }, 138 | required: ["name"], 139 | }, 140 | }, 141 | }, 142 | required: ["entities"], 143 | }, 144 | } 145 | ``` 146 | 147 | ### 4. Environment Variable Support 148 | 149 | To improve configuration flexibility, we'll add environment variable support: 150 | 151 | ```typescript 152 | // Check for memory path in command line args or environment variable 153 | let memoryPath = argv['memory-path'] || process.env.MEMORY_FILE_PATH; 154 | ``` 155 | 156 | ### 5. Dockerization Support 157 | 158 | We'll add a Dockerfile to enable easy containerized deployment: 159 | 160 | ```dockerfile 161 | FROM node:lts-alpine 162 | 163 | # Create app directory 164 | WORKDIR /app 165 | 166 | # Copy package files 167 | COPY package*.json ./ 168 | 169 | # Install dependencies 170 | RUN npm install --ignore-scripts 171 | 172 | # Copy the rest of the code 173 | COPY . . 174 | 175 | # Build the project 176 | RUN npm run build 177 | 178 | # Run the MCP server 179 | CMD [ "node", "dist/index.js" ] 180 | ``` 181 | 182 | ## User Experience Improvements 183 | 184 | 1. **Version Information**: Include version and timestamp information in API responses 185 | 2. **Update Validation**: Verify entities exist before allowing updates 186 | 3. **Documentation**: Clear examples for updating entities and relations 187 | 188 | ## Safety Mechanisms 189 | 190 | 1. **Version Incrementing**: Automatically track version numbers to prevent overwriting 191 | 2. **Error Handling**: Clear error messages when entities or relations don't exist 192 | 3. **Field Preservation**: Preserve existing fields not explicitly updated 193 | 194 | ## Implementation Plan 195 | 196 | 1. **Phase 1: Core Version Tracking** 197 | - Update Entity and Relation interfaces 198 | - Modify creation operations to initialize version fields 199 | - Add updateEntities and updateRelations methods 200 | 201 | 2. **Phase 2: Tool Interface** 202 | - Implement update_entities and update_relations tools 203 | - Update server registration and tool handling 204 | 205 | 3. **Phase 3: Configuration Enhancements** 206 | - Add environment variable support 207 | - Improve documentation and configuration examples 208 | 209 | 4. **Phase 4: Deployment Improvements** 210 | - Add Dockerfile 211 | - Create deployment documentation 212 | 213 | 5. **Phase 5: Testing & Documentation** 214 | - Comprehensive testing across different scenarios 215 | - Update README with version tracking examples 216 | 217 | ## Benefits 218 | 219 | 1. **Knowledge Evolution**: Track how knowledge changes over time 220 | 2. **Error Correction**: Easily fix mistakes in the knowledge graph 221 | 3. **Audit Trail**: Understand when and how often knowledge elements change 222 | 4. **Deployment Flexibility**: Run as container or local process with configurable options 223 | 5. **Integration Simplicity**: Update APIs make it easier to maintain knowledge programmatically 224 | -------------------------------------------------------------------------------- /docs/briefs/multiple-memory-contexts.md: -------------------------------------------------------------------------------- 1 | # Technical Brief: Multiple Memory Contexts Implementation 2 | 3 | ## Overview 4 | 5 | This document outlines the technical implementation plan for adding multiple memory contexts to the MCP Knowledge Graph server. This feature will allow users to define, manage, and switch between different memory files, supporting both static named contexts and dynamic project-based memory paths. 6 | 7 | ## Background 8 | 9 | Currently, the MCP Knowledge Graph server uses a single memory file specified by the `--memory-path` parameter. Issue #6 requested the ability to define different memory files for different projects, allowing AI models to access project-based memory. 10 | 11 | ## User Personas 12 | 13 | 1. **Set-and-Forget Users**: Want a single memory file location configured once in their AI platform 14 | 2. **Multi-Context Users**: Need to switch between different memory contexts while using their AI platform 15 | 3. **Developers**: Comfortable with project-based approaches and explicit path management 16 | 17 | ## Implementation Details 18 | 19 | ### 1. Memory Context System 20 | 21 | We will implement a "memory context" system that manages multiple memory files: 22 | 23 | ```typescript 24 | interface MemoryContext { 25 | name: string; // Human-readable name 26 | path: string | PathTemplate; // File path (static or dynamic) 27 | description?: string; // Optional description 28 | isProjectBased: boolean; // Whether to use project detection 29 | lastAccessed?: Date; // When it was last used 30 | projectDetectionRules?: { // Rules for detecting project directories 31 | markers: string[]; // Files that indicate a project root 32 | maxDepth: number; // How far up to look for project markers 33 | }; 34 | } 35 | 36 | type PathTemplate = string; // e.g., "{projectDir}/.ai-memory.jsonl" 37 | 38 | interface ContextsConfig { 39 | activeContext: string; 40 | contexts: MemoryContext[]; 41 | } 42 | ``` 43 | 44 | ### 2. Configuration Parameters 45 | 46 | We'll add the following configuration parameters: 47 | 48 | - `--memory-path`: (Existing) Default memory file path 49 | - `--contexts-directory`: Directory where memory context configurations are stored 50 | - `--default-context`: Name of the default context to use if none is specified 51 | 52 | ### 3. New Tools for Context Management 53 | 54 | We'll add these tools to manage memory contexts: 55 | 56 | - `list_contexts`: List all available memory contexts 57 | - `get_active_context`: Show which context is currently active 58 | - `set_active_context`: Change the active context 59 | - `add_context`: Add a new memory context 60 | - `remove_context`: Remove a memory context (doesn't delete the file) 61 | 62 | ### 4. Path Resolution Logic 63 | 64 | When a tool is called, we'll resolve the memory path using this logic: 65 | 66 | ```typescript 67 | async function resolveMemoryPath(contextName?: string): Promise { 68 | // Load contexts configuration 69 | const config = await loadContexts(CONTEXTS_FILE_PATH); 70 | 71 | // Get requested context or active context 72 | const contextToUse = contextName || config.activeContext || "default"; 73 | const context = config.contexts.find(c => c.name === contextToUse); 74 | 75 | if (!context) { 76 | // Fall back to default if context not found 77 | return MEMORY_FILE_PATH; 78 | } 79 | 80 | // If it's a static path, return it directly 81 | if (!context.isProjectBased) { 82 | return context.path; 83 | } 84 | 85 | // For project-based paths, resolve the template 86 | const projectInfo = await detectProjectInfo(); 87 | return resolvePathTemplate(context.path, projectInfo); 88 | } 89 | ``` 90 | 91 | ### 5. Project Detection 92 | 93 | For project-based contexts, we'll implement project detection: 94 | 95 | ```typescript 96 | async function detectProjectInfo(): Promise { 97 | // Start at current directory 98 | let currentDir = process.cwd(); 99 | const maxDepth = 5; // Default max depth 100 | 101 | for (let i = 0; i < maxDepth; i++) { 102 | // Check for project markers 103 | for (const marker of ['package.json', '.git', 'pyproject.toml']) { 104 | if (await fileExists(path.join(currentDir, marker))) { 105 | // Found a project marker 106 | return { 107 | directory: currentDir, 108 | name: path.basename(currentDir), 109 | marker: marker 110 | }; 111 | } 112 | } 113 | 114 | // Move up one directory 115 | const parentDir = path.dirname(currentDir); 116 | if (parentDir === currentDir) { 117 | // Reached root directory 118 | break; 119 | } 120 | currentDir = parentDir; 121 | } 122 | 123 | // No project detected, use current directory 124 | return { 125 | directory: process.cwd(), 126 | name: path.basename(process.cwd()), 127 | marker: null 128 | }; 129 | } 130 | ``` 131 | 132 | ### 6. Context Configuration Management 133 | 134 | We'll implement functions to load and save context configurations: 135 | 136 | ```typescript 137 | async function loadContexts(contextsFilePath: string): Promise { 138 | try { 139 | const data = await fs.readFile(contextsFilePath, "utf-8"); 140 | return JSON.parse(data); 141 | } catch (error) { 142 | // If file doesn't exist or is invalid, create default 143 | const defaultConfig: ContextsConfig = { 144 | activeContext: "default", 145 | contexts: [{ 146 | name: "default", 147 | path: MEMORY_FILE_PATH, 148 | isProjectBased: false, 149 | description: "Default memory context" 150 | }] 151 | }; 152 | 153 | // Ensure directory exists 154 | await fs.mkdir(path.dirname(contextsFilePath), { recursive: true }); 155 | 156 | // Write default config 157 | await fs.writeFile(contextsFilePath, JSON.stringify(defaultConfig, null, 2)); 158 | return defaultConfig; 159 | } 160 | } 161 | 162 | async function saveContexts(contextsFilePath: string, config: ContextsConfig): Promise { 163 | await fs.writeFile(contextsFilePath, JSON.stringify(config, null, 2)); 164 | } 165 | ``` 166 | 167 | ### 7. Tool Interface Updates 168 | 169 | All existing tools will be updated to support context specification: 170 | 171 | ```typescript 172 | // Example tool schema update 173 | { 174 | name: "create_entities", 175 | description: "Create multiple new entities in the knowledge graph", 176 | inputSchema: { 177 | type: "object", 178 | properties: { 179 | entities: { 180 | // existing schema 181 | }, 182 | context: { 183 | type: "string", 184 | description: "Memory context to use (optional, defaults to active context)" 185 | } 186 | }, 187 | required: ["entities"] 188 | } 189 | } 190 | ``` 191 | 192 | ### 8. KnowledgeGraphManager Updates 193 | 194 | The `KnowledgeGraphManager` class will be updated to work with dynamic file paths: 195 | 196 | ```typescript 197 | class KnowledgeGraphManager { 198 | private async loadGraph(filePath: string): Promise { 199 | try { 200 | const data = await fs.readFile(filePath, "utf-8"); 201 | // Rest of the method remains the same 202 | } catch (error) { 203 | // Error handling 204 | } 205 | } 206 | 207 | // Other methods would be updated similarly to accept a filePath parameter 208 | } 209 | ``` 210 | 211 | ## User Experience Improvements 212 | 213 | 1. **Visual Indicators**: When using tools, include the active context name in responses 214 | 2. **Confirmation Prompts**: When changing contexts, provide clear confirmation 215 | 3. **Context Status**: Add a way to query the current active context 216 | 217 | ## Safety Mechanisms 218 | 219 | 1. **Read-Only Mode**: Option to make certain contexts read-only 220 | 2. **Context Validation**: Verify contexts exist before switching 221 | 3. **Default Fallback**: If a context becomes invalid, fall back to the default 222 | 4. **Explicit Context Operations**: Require explicit context switching rather than passing paths to every operation 223 | 224 | ## Migration Path 225 | 226 | For existing users: 227 | 228 | 1. The first time they run with the new version, their existing memory file becomes the "default" context 229 | 2. No changes to their workflow unless they want to use multiple contexts 230 | 3. Clear documentation on how to set up and use multiple contexts 231 | 232 | ## Configuration Examples 233 | 234 | ### Example Context Configuration 235 | 236 | ```json 237 | { 238 | "activeContext": "work", 239 | "contexts": [ 240 | { 241 | "name": "default", 242 | "path": "/Users/username/.ai-memory/default.jsonl", 243 | "isProjectBased": false, 244 | "description": "Default memory context" 245 | }, 246 | { 247 | "name": "work", 248 | "path": "/Users/username/.ai-memory/work.jsonl", 249 | "isProjectBased": false, 250 | "description": "Work-related memories" 251 | }, 252 | { 253 | "name": "project-specific", 254 | "path": "{projectDir}/.ai-memory.jsonl", 255 | "isProjectBased": true, 256 | "description": "Project-specific memories", 257 | "projectDetectionRules": { 258 | "markers": [".git", "package.json", "pyproject.toml"], 259 | "maxDepth": 3 260 | } 261 | } 262 | ] 263 | } 264 | ``` 265 | 266 | ### AI Platform Configuration Example 267 | 268 | ```json 269 | { 270 | "mcpServers": { 271 | "memory": { 272 | "command": "npx", 273 | "args": [ 274 | "-y", 275 | "@modelcontextprotocol/server-memory", 276 | "--contexts-directory", 277 | "/Users/username/.ai-memory/contexts", 278 | "--default-context", 279 | "personal" 280 | ] 281 | } 282 | } 283 | } 284 | ``` 285 | 286 | ## Implementation Plan 287 | 288 | 1. **Phase 1: Core Context System** 289 | - Implement context configuration loading/saving 290 | - Add context resolution logic 291 | - Update KnowledgeGraphManager to use dynamic paths 292 | 293 | 2. **Phase 2: Context Management Tools** 294 | - Implement list_contexts, get_active_context, set_active_context 295 | - Implement add_context, remove_context 296 | - Update existing tools to support context parameter 297 | 298 | 3. **Phase 3: Project-Based Contexts** 299 | - Implement project detection 300 | - Add path template resolution 301 | - Support project-specific context rules 302 | 303 | 4. **Phase 4: User Experience & Safety** 304 | - Add visual indicators for active context 305 | - Implement safety mechanisms 306 | - Add migration support for existing users 307 | 308 | 5. **Phase 5: Documentation & Testing** 309 | - Update README with context usage examples 310 | - Add configuration examples for different user personas 311 | - Comprehensive testing across different scenarios 312 | 313 | ## Benefits 314 | 315 | 1. **Flexibility**: Users can have separate memory files for different projects or contexts 316 | 2. **Backward Compatibility**: Existing usage with a single memory file still works 317 | 3. **Simplicity**: Clear context naming and management for non-developers 318 | 4. **Scalability**: Each project/context gets its own file, preventing one large memory file 319 | 5. **Safety**: Explicit context switching and validation prevents accidental data corruption 320 | -------------------------------------------------------------------------------- /docs/development/placeholder.md: -------------------------------------------------------------------------------- 1 | # NPM Package Name Reservation 2 | 3 | DONE 4 | 5 | > [!NOTE] 6 | > These instructions are for reserving the npm package name `mcp-knowledge-base` with a minimal placeholder package. 7 | 8 | ## Steps to Reserve Package Name 9 | 10 | ### 1. Create Minimal Project Structure 11 | 12 | ```bash 13 | mkdir mcp-knowledge-base 14 | cd mcp-knowledge-base 15 | ``` 16 | 17 | ### 2. Initialize Package 18 | 19 | ```bash 20 | npm init -y 21 | ``` 22 | 23 | ### 3. Update package.json 24 | 25 | ```json 26 | { 27 | "name": "mcp-knowledge-base", 28 | "version": "0.0.1", 29 | "description": "MCP server for knowledge base functionality - Coming Soon", 30 | "main": "index.js", 31 | "scripts": { 32 | "test": "echo \"Error: no test specified\" && exit 1" 33 | }, 34 | "keywords": [ 35 | "mcp", 36 | "knowledge-base", 37 | "ai", 38 | "memory" 39 | ], 40 | "author": "Your Name", 41 | "license": "MIT" 42 | } 43 | ``` 44 | 45 | ### 4. Create Minimal index.js 46 | 47 | ```javascript 48 | console.log('MCP Knowledge Base - Coming Soon'); 49 | ``` 50 | 51 | ### 5. Create README.md 52 | 53 | ```markdown 54 | # MCP Knowledge Base 55 | 56 | > [!NOTE] 57 | > This package is currently in development. Future versions will provide knowledge base functionality for AI models that support the Model Context Protocol (MCP). 58 | 59 | ## Coming Soon 60 | 61 | This package will build upon mcp-knowledge-graph to provide: 62 | - Enhanced knowledge base capabilities 63 | - Improved memory management 64 | - Advanced querying features 65 | 66 | ## Status 67 | 68 | This is a placeholder release. Production version coming soon. 69 | ``` 70 | 71 | ### 6. Publish Placeholder 72 | 73 | ```bash 74 | npm login # if not already logged in 75 | npm publish 76 | ``` 77 | 78 | ## Important Notes 79 | 80 | 1. Version Strategy 81 | - Start with 0.0.1 for placeholder 82 | - Use 0.x.x for development versions 83 | - Release 1.0.0 when ready for production 84 | 85 | 2. Package Maintenance 86 | - Update placeholder occasionally to maintain npm listing 87 | - Add "under development" notices in README 88 | - Consider adding GitHub repository with roadmap 89 | 90 | 3. Name Protection 91 | - Publishing placeholder prevents name squatting 92 | - Establishes your ownership of the name 93 | - Allows time for proper development 94 | 95 | 4. Future Updates 96 | - When ready to develop, use same package name 97 | - Increment version appropriately 98 | - Update with actual functionality 99 | 100 | ## Verification 101 | 102 | After publishing, verify reservation: 103 | 104 | ```bash 105 | npm view mcp-knowledge-base 106 | ``` 107 | 108 | ## Cleanup When Ready 109 | 110 | When ready to develop the actual package: 111 | 112 | 1. Archive placeholder code 113 | 2. Start development in new repository 114 | 3. Maintain same package name 115 | 4. Update version to reflect development status 116 | -------------------------------------------------------------------------------- /docs/development/pr-instructions.md: -------------------------------------------------------------------------------- 1 | # MCP Memory Server Pull Request Instructions 2 | 3 | ## Current Working Setup Preservation 4 | 5 | Keep your local working version intact at `C:\Users\shane\Desktop\memory` until PR is merged: 6 | 7 | ```json 8 | { 9 | "mcpServers": { 10 | "memory": { 11 | "command": "volta", 12 | "args": [ 13 | "run", 14 | "node", 15 | "C:\\Users\\shane\\Desktop\\memory\\dist\\index.js", 16 | "--memory-path", 17 | "C:\\Users\\shane\\Desktop\\memory\\memory.jsonl" 18 | ] 19 | } 20 | } 21 | } 22 | ``` 23 | 24 | ## Getting the Original Repository 25 | 26 | ```bash 27 | git clone https://github.com/modelcontextprotocol/servers.git 28 | cd servers 29 | ``` 30 | 31 | ## Modified Files to Track 32 | 33 | Current local modifications: 34 | 35 | - `tsconfig.json` - Changed from monorepo to local paths 36 | - `tsconfig.base.json` - Created locally for standalone build 37 | - `index.ts` - Added memory path functionality 38 | 39 | ## Preparing the Pull Request 40 | 41 | ### 1. Configuration Files 42 | 43 | Revert tsconfig.json back to monorepo structure: 44 | 45 | ```json 46 | { 47 | "extends": "../../tsconfig.json", 48 | "compilerOptions": { 49 | "outDir": "./dist", 50 | "rootDir": "." 51 | }, 52 | "include": [ 53 | "./**/*.ts" 54 | ] 55 | } 56 | ``` 57 | 58 | ### 2. Remove Local-Only Files 59 | 60 | - Delete local `tsconfig.base.json` (not needed in monorepo) 61 | 62 | ### 3. Code Changes to Submit 63 | 64 | - Keep all memory path functionality changes in `index.ts` 65 | - Ensure cross-platform path handling remains intact 66 | - Verify JSONL extension usage 67 | 68 | ### 4. Dependencies 69 | 70 | Ensure these are in the monorepo's package.json: 71 | 72 | ```json 73 | { 74 | "dependencies": { 75 | "minimist": "^1.2.8" 76 | }, 77 | "devDependencies": { 78 | "@types/minimist": "^1.2.5" 79 | } 80 | } 81 | ``` 82 | 83 | ### 5. Documentation Updates 84 | 85 | - Update README.md with new --memory-path option 86 | - Document JSONL format requirement 87 | - Add cross-platform path handling notes 88 | 89 | ### 6. Pull Request Process 90 | 91 | 1. Create new branch: 92 | 93 | ```bash 94 | git checkout -b feature/custom-memory-path 95 | ``` 96 | 97 | 2. Copy modified files: 98 | 99 | ```bash 100 | cp /path/to/your/index.ts packages/server-memory/ 101 | ``` 102 | 103 | 3. Test build in monorepo: 104 | 105 | ```bash 106 | npm install 107 | npm run build 108 | ``` 109 | 110 | 4. Commit changes: 111 | 112 | ```bash 113 | git add . 114 | git commit -m "Add custom memory path support with cross-platform handling" 115 | ``` 116 | 117 | 5. Create PR: 118 | - Push to GitHub 119 | - Create pull request 120 | - Reference any related issues 121 | - Describe testing performed 122 | 123 | ## Additional Considerations 124 | 125 | - Consider adding tests for the new functionality 126 | - Follow monorepo's contribution guidelines 127 | - Document any breaking changes 128 | - Test on multiple platforms if possible 129 | 130 | ## Backup Plan 131 | 132 | Until PR is merged, maintain your working local version: 133 | 134 | 1. Keep local build working 135 | 2. Note any improvements needed for PR 136 | 3. Continue using local version for development 137 | -------------------------------------------------------------------------------- /docs/development/workflow.md: -------------------------------------------------------------------------------- 1 | # Development Workflow 2 | 3 | ## Initial Environment 4 | 5 | - Windows 11 system 6 | - Node.js environment managed by Volta 7 | - PowerShell 7.4.6 as the terminal 8 | 9 | ## Project Structure 10 | 11 | This is part of a monorepo project: 12 | 13 | - Package: @modelcontextprotocol/server-memory 14 | - Version: 0.6.2 15 | - Type: ES Module (package.json "type": "module") 16 | 17 | ## Setup Steps 18 | 19 | 1. **TypeScript Configuration** 20 | - Created tsconfig.base.json with ES module support: 21 | 22 | ```json 23 | { 24 | "compilerOptions": { 25 | "target": "ES2020", 26 | "module": "NodeNext", 27 | "moduleResolution": "NodeNext", 28 | "esModuleInterop": true, 29 | "strict": true, 30 | "skipLibCheck": true, 31 | "forceConsistentCasingInFileNames": true, 32 | "declaration": true, 33 | "sourceMap": true, 34 | "allowJs": true, 35 | "checkJs": true 36 | } 37 | } 38 | ``` 39 | 40 | - Maintained monorepo compatibility in tsconfig.json: 41 | 42 | ```json 43 | { 44 | "extends": "./tsconfig.base.json", 45 | "compilerOptions": { 46 | "outDir": "./dist", 47 | "rootDir": "." 48 | }, 49 | "include": [ 50 | "./**/*.ts" 51 | ] 52 | } 53 | ``` 54 | 55 | 2. **Dependencies** 56 | - Installed TypeScript globally with Volta: 57 | 58 | ```bash 59 | volta install typescript 60 | ``` 61 | 62 | - Added type definitions for minimist: 63 | 64 | ```bash 65 | volta run npm install --save-dev @types/minimist 66 | ``` 67 | 68 | 3. **Code Fixes** 69 | - Fixed duplicate argv declarations in index.ts 70 | - Removed backup directory that was causing build conflicts 71 | - Ensured proper ES module imports 72 | 73 | 4. **Build Process** 74 | - Build script in package.json: 75 | 76 | ```json 77 | "scripts": { 78 | "build": "tsc && shx chmod +x dist/*.js", 79 | "prepare": "npm run build", 80 | "watch": "tsc --watch" 81 | } 82 | ``` 83 | 84 | - Successfully built with: 85 | 86 | ```bash 87 | volta run npm run build 88 | ``` 89 | 90 | ## Build Output 91 | 92 | The successful build generates: 93 | 94 | - dist/index.js (compiled JavaScript) 95 | - dist/index.d.ts (TypeScript declarations) 96 | - dist/index.js.map (source maps) 97 | 98 | ## Testing via Inspector 99 | 100 | - Run the inspector with a memory path argument: 101 | 102 | ```sh 103 | volta run npx @modelcontextprotocol/inspector dist/index.js --memory-path=C:/Users/shane/Desktop/memory/memory.jsonl 104 | ``` 105 | 106 | ## Development Notes 107 | 108 | - Keep monorepo compatibility in mind when making changes 109 | - Use Volta for all Node.js/npm operations 110 | - Maintain ES module format throughout the codebase 111 | - Run builds with `volta run npm run build` 112 | -------------------------------------------------------------------------------- /docs/plans/planned-features.md: -------------------------------------------------------------------------------- 1 | # Planned Features 2 | 3 | ## Multiple Memory Contexts 4 | 5 | We plan to implement a memory context system that allows users to define and switch between multiple memory files. This feature will support both static named contexts and dynamic project-based memory paths, enabling AI models to access different memory stores for different projects while maintaining a clear and safe user experience. 6 | 7 | ## Entity Versioning and Update Operations 8 | 9 | We plan to implement version tracking for entities and relations in the knowledge graph, enabling history tracking and evolution of knowledge over time. This enhancement will be accompanied by new update operations that allow modifying existing entities and relations while preserving version history. Additionally, we'll add flexible configuration options through environment variables and Docker containerization support for easier deployment. 10 | -------------------------------------------------------------------------------- /example.jsonl: -------------------------------------------------------------------------------- 1 | {"type":"entity","data":{"name":"Alice_Smith","entityType":"person","observations":["Works as a software engineer","Lives in San Francisco","Speaks Mandarin fluently"]}} 2 | {"type":"entity","data":{"name":"ML_Project_X","entityType":"project","observations":["Started in 2023","Focus on natural language processing","Currently in development phase"]}} 3 | {"type":"entity","data":{"name":"TechCorp","entityType":"organization","observations":["Founded in 2010","Specializes in AI development","Headquartered in San Francisco"]}} 4 | {"type":"relation","data":{"from":"Alice_Smith","to":"ML_Project_X","relationType":"leads"}} 5 | {"type":"relation","data":{"from":"Alice_Smith","to":"TechCorp","relationType":"works_at"}} 6 | {"type":"relation","data":{"from":"TechCorp","to":"ML_Project_X","relationType":"owns"} 7 | -------------------------------------------------------------------------------- /img/read-function.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/shaneholloman/mcp-knowledge-graph/3e41a7033e0e953e014281c6ba844f516ffb1eb4/img/read-function.png -------------------------------------------------------------------------------- /img/server-name.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/shaneholloman/mcp-knowledge-graph/3e41a7033e0e953e014281c6ba844f516ffb1eb4/img/server-name.png -------------------------------------------------------------------------------- /index.ts: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | import { Server } from "@modelcontextprotocol/sdk/server/index.js"; 4 | import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js"; 5 | import { 6 | CallToolRequestSchema, 7 | ListToolsRequestSchema, 8 | } from "@modelcontextprotocol/sdk/types.js"; 9 | import { promises as fs } from 'fs'; 10 | import path from 'path'; 11 | import { fileURLToPath } from 'url'; 12 | import minimist from 'minimist'; 13 | import { isAbsolute } from 'path'; 14 | 15 | // Parse args and handle paths safely 16 | const argv = minimist(process.argv.slice(2)); 17 | let memoryPath = argv['memory-path']; 18 | 19 | // If a custom path is provided, ensure it's absolute 20 | if (memoryPath && !isAbsolute(memoryPath)) { 21 | memoryPath = path.resolve(process.cwd(), memoryPath); 22 | } 23 | 24 | // Define the path to the JSONL file 25 | const __dirname = path.dirname(fileURLToPath(import.meta.url)); 26 | // Use the custom path or default to the installation directory 27 | const MEMORY_FILE_PATH = memoryPath || path.join(__dirname, 'memory.jsonl'); 28 | 29 | // We are storing our memory using entities, relations, and observations in a graph structure 30 | interface Entity { 31 | name: string; 32 | entityType: string; 33 | observations: string[]; 34 | } 35 | 36 | interface Relation { 37 | from: string; 38 | to: string; 39 | relationType: string; 40 | } 41 | 42 | interface KnowledgeGraph { 43 | entities: Entity[]; 44 | relations: Relation[]; 45 | } 46 | 47 | // The KnowledgeGraphManager class contains all operations to interact with the knowledge graph 48 | class KnowledgeGraphManager { 49 | private async loadGraph(): Promise { 50 | try { 51 | const data = await fs.readFile(MEMORY_FILE_PATH, "utf-8"); 52 | const lines = data.split("\n").filter(line => line.trim() !== ""); 53 | return lines.reduce((graph: KnowledgeGraph, line) => { 54 | const item = JSON.parse(line); 55 | if (item.type === "entity") graph.entities.push(item as Entity); 56 | if (item.type === "relation") graph.relations.push(item as Relation); 57 | return graph; 58 | }, { entities: [], relations: [] }); 59 | } catch (error) { 60 | if (error instanceof Error && 'code' in error && (error as any).code === "ENOENT") { 61 | return { entities: [], relations: [] }; 62 | } 63 | throw error; 64 | } 65 | } 66 | 67 | private async saveGraph(graph: KnowledgeGraph): Promise { 68 | const lines = [ 69 | ...graph.entities.map(e => JSON.stringify({ type: "entity", ...e })), 70 | ...graph.relations.map(r => JSON.stringify({ type: "relation", ...r })), 71 | ]; 72 | await fs.writeFile(MEMORY_FILE_PATH, lines.join("\n")); 73 | } 74 | 75 | async createEntities(entities: Entity[]): Promise { 76 | const graph = await this.loadGraph(); 77 | const newEntities = entities.filter(e => !graph.entities.some(existingEntity => existingEntity.name === e.name)); 78 | graph.entities.push(...newEntities); 79 | await this.saveGraph(graph); 80 | return newEntities; 81 | } 82 | 83 | async createRelations(relations: Relation[]): Promise { 84 | const graph = await this.loadGraph(); 85 | const newRelations = relations.filter(r => !graph.relations.some(existingRelation => 86 | existingRelation.from === r.from && 87 | existingRelation.to === r.to && 88 | existingRelation.relationType === r.relationType 89 | )); 90 | graph.relations.push(...newRelations); 91 | await this.saveGraph(graph); 92 | return newRelations; 93 | } 94 | 95 | async addObservations(observations: { entityName: string; contents: string[] }[]): Promise<{ entityName: string; addedObservations: string[] }[]> { 96 | const graph = await this.loadGraph(); 97 | const results = observations.map(o => { 98 | const entity = graph.entities.find(e => e.name === o.entityName); 99 | if (!entity) { 100 | throw new Error(`Entity with name ${o.entityName} not found`); 101 | } 102 | const newObservations = o.contents.filter(content => !entity.observations.includes(content)); 103 | entity.observations.push(...newObservations); 104 | return { entityName: o.entityName, addedObservations: newObservations }; 105 | }); 106 | await this.saveGraph(graph); 107 | return results; 108 | } 109 | 110 | async deleteEntities(entityNames: string[]): Promise { 111 | const graph = await this.loadGraph(); 112 | graph.entities = graph.entities.filter(e => !entityNames.includes(e.name)); 113 | graph.relations = graph.relations.filter(r => !entityNames.includes(r.from) && !entityNames.includes(r.to)); 114 | await this.saveGraph(graph); 115 | } 116 | 117 | async deleteObservations(deletions: { entityName: string; observations: string[] }[]): Promise { 118 | const graph = await this.loadGraph(); 119 | deletions.forEach(d => { 120 | const entity = graph.entities.find(e => e.name === d.entityName); 121 | if (entity) { 122 | entity.observations = entity.observations.filter(o => !d.observations.includes(o)); 123 | } 124 | }); 125 | await this.saveGraph(graph); 126 | } 127 | 128 | async deleteRelations(relations: Relation[]): Promise { 129 | const graph = await this.loadGraph(); 130 | graph.relations = graph.relations.filter(r => !relations.some(delRelation => 131 | r.from === delRelation.from && 132 | r.to === delRelation.to && 133 | r.relationType === delRelation.relationType 134 | )); 135 | await this.saveGraph(graph); 136 | } 137 | 138 | async readGraph(): Promise { 139 | return this.loadGraph(); 140 | } 141 | 142 | // Very basic search function 143 | async searchNodes(query: string): Promise { 144 | const graph = await this.loadGraph(); 145 | 146 | // Filter entities 147 | const filteredEntities = graph.entities.filter(e => 148 | e.name.toLowerCase().includes(query.toLowerCase()) || 149 | e.entityType.toLowerCase().includes(query.toLowerCase()) || 150 | e.observations.some(o => o.toLowerCase().includes(query.toLowerCase())) 151 | ); 152 | 153 | // Create a Set of filtered entity names for quick lookup 154 | const filteredEntityNames = new Set(filteredEntities.map(e => e.name)); 155 | 156 | // Filter relations to only include those between filtered entities 157 | const filteredRelations = graph.relations.filter(r => 158 | filteredEntityNames.has(r.from) && filteredEntityNames.has(r.to) 159 | ); 160 | 161 | const filteredGraph: KnowledgeGraph = { 162 | entities: filteredEntities, 163 | relations: filteredRelations, 164 | }; 165 | 166 | return filteredGraph; 167 | } 168 | 169 | async openNodes(names: string[]): Promise { 170 | const graph = await this.loadGraph(); 171 | 172 | // Filter entities 173 | const filteredEntities = graph.entities.filter(e => names.includes(e.name)); 174 | 175 | // Create a Set of filtered entity names for quick lookup 176 | const filteredEntityNames = new Set(filteredEntities.map(e => e.name)); 177 | 178 | // Filter relations to only include those between filtered entities 179 | const filteredRelations = graph.relations.filter(r => 180 | filteredEntityNames.has(r.from) && filteredEntityNames.has(r.to) 181 | ); 182 | 183 | const filteredGraph: KnowledgeGraph = { 184 | entities: filteredEntities, 185 | relations: filteredRelations, 186 | }; 187 | 188 | return filteredGraph; 189 | } 190 | } 191 | 192 | const knowledgeGraphManager = new KnowledgeGraphManager(); 193 | 194 | 195 | // The server instance and tools exposed to AI models 196 | const server = new Server({ 197 | name: "mcp-knowledge-graph", 198 | version: "1.0.1", 199 | }, { 200 | capabilities: { 201 | tools: {}, 202 | }, 203 | },); 204 | 205 | server.setRequestHandler(ListToolsRequestSchema, async () => { 206 | return { 207 | tools: [ 208 | { 209 | name: "create_entities", 210 | description: "Create multiple new entities in the knowledge graph", 211 | inputSchema: { 212 | type: "object", 213 | properties: { 214 | entities: { 215 | type: "array", 216 | items: { 217 | type: "object", 218 | properties: { 219 | name: { type: "string", description: "The name of the entity" }, 220 | entityType: { type: "string", description: "The type of the entity" }, 221 | observations: { 222 | type: "array", 223 | items: { type: "string" }, 224 | description: "An array of observation contents associated with the entity" 225 | }, 226 | }, 227 | required: ["name", "entityType", "observations"], 228 | }, 229 | }, 230 | }, 231 | required: ["entities"], 232 | }, 233 | }, 234 | { 235 | name: "create_relations", 236 | description: "Create multiple new relations between entities in the knowledge graph. Relations should be in active voice", 237 | inputSchema: { 238 | type: "object", 239 | properties: { 240 | relations: { 241 | type: "array", 242 | items: { 243 | type: "object", 244 | properties: { 245 | from: { type: "string", description: "The name of the entity where the relation starts" }, 246 | to: { type: "string", description: "The name of the entity where the relation ends" }, 247 | relationType: { type: "string", description: "The type of the relation" }, 248 | }, 249 | required: ["from", "to", "relationType"], 250 | }, 251 | }, 252 | }, 253 | required: ["relations"], 254 | }, 255 | }, 256 | { 257 | name: "add_observations", 258 | description: "Add new observations to existing entities in the knowledge graph", 259 | inputSchema: { 260 | type: "object", 261 | properties: { 262 | observations: { 263 | type: "array", 264 | items: { 265 | type: "object", 266 | properties: { 267 | entityName: { type: "string", description: "The name of the entity to add the observations to" }, 268 | contents: { 269 | type: "array", 270 | items: { type: "string" }, 271 | description: "An array of observation contents to add" 272 | }, 273 | }, 274 | required: ["entityName", "contents"], 275 | }, 276 | }, 277 | }, 278 | required: ["observations"], 279 | }, 280 | }, 281 | { 282 | name: "delete_entities", 283 | description: "Delete multiple entities and their associated relations from the knowledge graph", 284 | inputSchema: { 285 | type: "object", 286 | properties: { 287 | entityNames: { 288 | type: "array", 289 | items: { type: "string" }, 290 | description: "An array of entity names to delete" 291 | }, 292 | }, 293 | required: ["entityNames"], 294 | }, 295 | }, 296 | { 297 | name: "delete_observations", 298 | description: "Delete specific observations from entities in the knowledge graph", 299 | inputSchema: { 300 | type: "object", 301 | properties: { 302 | deletions: { 303 | type: "array", 304 | items: { 305 | type: "object", 306 | properties: { 307 | entityName: { type: "string", description: "The name of the entity containing the observations" }, 308 | observations: { 309 | type: "array", 310 | items: { type: "string" }, 311 | description: "An array of observations to delete" 312 | }, 313 | }, 314 | required: ["entityName", "observations"], 315 | }, 316 | }, 317 | }, 318 | required: ["deletions"], 319 | }, 320 | }, 321 | { 322 | name: "delete_relations", 323 | description: "Delete multiple relations from the knowledge graph", 324 | inputSchema: { 325 | type: "object", 326 | properties: { 327 | relations: { 328 | type: "array", 329 | items: { 330 | type: "object", 331 | properties: { 332 | from: { type: "string", description: "The name of the entity where the relation starts" }, 333 | to: { type: "string", description: "The name of the entity where the relation ends" }, 334 | relationType: { type: "string", description: "The type of the relation" }, 335 | }, 336 | required: ["from", "to", "relationType"], 337 | }, 338 | description: "An array of relations to delete" 339 | }, 340 | }, 341 | required: ["relations"], 342 | }, 343 | }, 344 | { 345 | name: "read_graph", 346 | description: "Read the entire knowledge graph", 347 | inputSchema: { 348 | type: "object", 349 | properties: {}, 350 | }, 351 | }, 352 | { 353 | name: "search_nodes", 354 | description: "Search for nodes in the knowledge graph based on a query", 355 | inputSchema: { 356 | type: "object", 357 | properties: { 358 | query: { type: "string", description: "The search query to match against entity names, types, and observation content" }, 359 | }, 360 | required: ["query"], 361 | }, 362 | }, 363 | { 364 | name: "open_nodes", 365 | description: "Open specific nodes in the knowledge graph by their names", 366 | inputSchema: { 367 | type: "object", 368 | properties: { 369 | names: { 370 | type: "array", 371 | items: { type: "string" }, 372 | description: "An array of entity names to retrieve", 373 | }, 374 | }, 375 | required: ["names"], 376 | }, 377 | }, 378 | ], 379 | }; 380 | }); 381 | 382 | server.setRequestHandler(CallToolRequestSchema, async (request) => { 383 | const { name, arguments: args } = request.params; 384 | 385 | if (!args) { 386 | throw new Error(`No arguments provided for tool: ${name}`); 387 | } 388 | 389 | switch (name) { 390 | case "create_entities": 391 | return { content: [{ type: "text", text: JSON.stringify(await knowledgeGraphManager.createEntities(args.entities as Entity[]), null, 2) }] }; 392 | case "create_relations": 393 | return { content: [{ type: "text", text: JSON.stringify(await knowledgeGraphManager.createRelations(args.relations as Relation[]), null, 2) }] }; 394 | case "add_observations": 395 | return { content: [{ type: "text", text: JSON.stringify(await knowledgeGraphManager.addObservations(args.observations as { entityName: string; contents: string[] }[]), null, 2) }] }; 396 | case "delete_entities": 397 | await knowledgeGraphManager.deleteEntities(args.entityNames as string[]); 398 | return { content: [{ type: "text", text: "Entities deleted successfully" }] }; 399 | case "delete_observations": 400 | await knowledgeGraphManager.deleteObservations(args.deletions as { entityName: string; observations: string[] }[]); 401 | return { content: [{ type: "text", text: "Observations deleted successfully" }] }; 402 | case "delete_relations": 403 | await knowledgeGraphManager.deleteRelations(args.relations as Relation[]); 404 | return { content: [{ type: "text", text: "Relations deleted successfully" }] }; 405 | case "read_graph": 406 | return { content: [{ type: "text", text: JSON.stringify(await knowledgeGraphManager.readGraph(), null, 2) }] }; 407 | case "search_nodes": 408 | return { content: [{ type: "text", text: JSON.stringify(await knowledgeGraphManager.searchNodes(args.query as string), null, 2) }] }; 409 | case "open_nodes": 410 | return { content: [{ type: "text", text: JSON.stringify(await knowledgeGraphManager.openNodes(args.names as string[]), null, 2) }] }; 411 | default: 412 | throw new Error(`Unknown tool: ${name}`); 413 | } 414 | }); 415 | 416 | async function main() { 417 | const transport = new StdioServerTransport(); 418 | await server.connect(transport); 419 | console.error("Knowledge Graph MCP Server running on stdio"); 420 | } 421 | 422 | main().catch((error) => { 423 | console.error("Fatal error in main():", error); 424 | process.exit(1); 425 | }); 426 | -------------------------------------------------------------------------------- /package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "mcp-knowledge-graph", 3 | "version": "1.0.2", 4 | "lockfileVersion": 3, 5 | "requires": true, 6 | "packages": { 7 | "": { 8 | "name": "mcp-knowledge-graph", 9 | "version": "1.0.2", 10 | "license": "MIT", 11 | "dependencies": { 12 | "@modelcontextprotocol/sdk": "1.0.1", 13 | "minimist": "^1.2.8" 14 | }, 15 | "bin": { 16 | "mcp-knowledge-graph": "dist/index.js" 17 | }, 18 | "devDependencies": { 19 | "@types/minimist": "^1.2.5", 20 | "@types/node": "^22.9.3", 21 | "shx": "^0.3.4", 22 | "typescript": "^5.6.2" 23 | } 24 | }, 25 | "node_modules/@modelcontextprotocol/sdk": { 26 | "version": "1.0.1", 27 | "resolved": "https://registry.npmjs.org/@modelcontextprotocol/sdk/-/sdk-1.0.1.tgz", 28 | "integrity": "sha512-slLdFaxQJ9AlRg+hw28iiTtGvShAOgOKXcD0F91nUcRYiOMuS9ZBYjcdNZRXW9G5JQ511GRTdUy1zQVZDpJ+4w==", 29 | "license": "MIT", 30 | "dependencies": { 31 | "content-type": "^1.0.5", 32 | "raw-body": "^3.0.0", 33 | "zod": "^3.23.8" 34 | } 35 | }, 36 | "node_modules/@types/minimist": { 37 | "version": "1.2.5", 38 | "resolved": "https://registry.npmjs.org/@types/minimist/-/minimist-1.2.5.tgz", 39 | "integrity": "sha512-hov8bUuiLiyFPGyFPE1lwWhmzYbirOXQNNo40+y3zow8aFVTeyn3VWL0VFFfdNddA8S4Vf0Tc062rzyNr7Paag==", 40 | "dev": true, 41 | "license": "MIT" 42 | }, 43 | "node_modules/@types/node": { 44 | "version": "22.10.1", 45 | "resolved": "https://registry.npmjs.org/@types/node/-/node-22.10.1.tgz", 46 | "integrity": "sha512-qKgsUwfHZV2WCWLAnVP1JqnpE6Im6h3Y0+fYgMTasNQ7V++CBX5OT1as0g0f+OyubbFqhf6XVNIsmN4IIhEgGQ==", 47 | "dev": true, 48 | "license": "MIT", 49 | "dependencies": { 50 | "undici-types": "~6.20.0" 51 | } 52 | }, 53 | "node_modules/balanced-match": { 54 | "version": "1.0.2", 55 | "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", 56 | "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", 57 | "dev": true, 58 | "license": "MIT" 59 | }, 60 | "node_modules/brace-expansion": { 61 | "version": "1.1.11", 62 | "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", 63 | "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", 64 | "dev": true, 65 | "license": "MIT", 66 | "dependencies": { 67 | "balanced-match": "^1.0.0", 68 | "concat-map": "0.0.1" 69 | } 70 | }, 71 | "node_modules/bytes": { 72 | "version": "3.1.2", 73 | "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", 74 | "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==", 75 | "license": "MIT", 76 | "engines": { 77 | "node": ">= 0.8" 78 | } 79 | }, 80 | "node_modules/concat-map": { 81 | "version": "0.0.1", 82 | "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", 83 | "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", 84 | "dev": true, 85 | "license": "MIT" 86 | }, 87 | "node_modules/content-type": { 88 | "version": "1.0.5", 89 | "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz", 90 | "integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==", 91 | "license": "MIT", 92 | "engines": { 93 | "node": ">= 0.6" 94 | } 95 | }, 96 | "node_modules/depd": { 97 | "version": "2.0.0", 98 | "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", 99 | "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", 100 | "license": "MIT", 101 | "engines": { 102 | "node": ">= 0.8" 103 | } 104 | }, 105 | "node_modules/fs.realpath": { 106 | "version": "1.0.0", 107 | "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", 108 | "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", 109 | "dev": true, 110 | "license": "ISC" 111 | }, 112 | "node_modules/function-bind": { 113 | "version": "1.1.2", 114 | "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", 115 | "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", 116 | "dev": true, 117 | "license": "MIT", 118 | "funding": { 119 | "url": "https://github.com/sponsors/ljharb" 120 | } 121 | }, 122 | "node_modules/glob": { 123 | "version": "7.2.3", 124 | "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", 125 | "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", 126 | "deprecated": "Glob versions prior to v9 are no longer supported", 127 | "dev": true, 128 | "license": "ISC", 129 | "dependencies": { 130 | "fs.realpath": "^1.0.0", 131 | "inflight": "^1.0.4", 132 | "inherits": "2", 133 | "minimatch": "^3.1.1", 134 | "once": "^1.3.0", 135 | "path-is-absolute": "^1.0.0" 136 | }, 137 | "engines": { 138 | "node": "*" 139 | }, 140 | "funding": { 141 | "url": "https://github.com/sponsors/isaacs" 142 | } 143 | }, 144 | "node_modules/hasown": { 145 | "version": "2.0.2", 146 | "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", 147 | "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", 148 | "dev": true, 149 | "license": "MIT", 150 | "dependencies": { 151 | "function-bind": "^1.1.2" 152 | }, 153 | "engines": { 154 | "node": ">= 0.4" 155 | } 156 | }, 157 | "node_modules/http-errors": { 158 | "version": "2.0.0", 159 | "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz", 160 | "integrity": "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==", 161 | "license": "MIT", 162 | "dependencies": { 163 | "depd": "2.0.0", 164 | "inherits": "2.0.4", 165 | "setprototypeof": "1.2.0", 166 | "statuses": "2.0.1", 167 | "toidentifier": "1.0.1" 168 | }, 169 | "engines": { 170 | "node": ">= 0.8" 171 | } 172 | }, 173 | "node_modules/iconv-lite": { 174 | "version": "0.6.3", 175 | "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", 176 | "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", 177 | "license": "MIT", 178 | "dependencies": { 179 | "safer-buffer": ">= 2.1.2 < 3.0.0" 180 | }, 181 | "engines": { 182 | "node": ">=0.10.0" 183 | } 184 | }, 185 | "node_modules/inflight": { 186 | "version": "1.0.6", 187 | "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", 188 | "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", 189 | "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.", 190 | "dev": true, 191 | "license": "ISC", 192 | "dependencies": { 193 | "once": "^1.3.0", 194 | "wrappy": "1" 195 | } 196 | }, 197 | "node_modules/inherits": { 198 | "version": "2.0.4", 199 | "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", 200 | "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", 201 | "license": "ISC" 202 | }, 203 | "node_modules/interpret": { 204 | "version": "1.4.0", 205 | "resolved": "https://registry.npmjs.org/interpret/-/interpret-1.4.0.tgz", 206 | "integrity": "sha512-agE4QfB2Lkp9uICn7BAqoscw4SZP9kTE2hxiFI3jBPmXJfdqiahTbUuKGsMoN2GtqL9AxhYioAcVvgsb1HvRbA==", 207 | "dev": true, 208 | "license": "MIT", 209 | "engines": { 210 | "node": ">= 0.10" 211 | } 212 | }, 213 | "node_modules/is-core-module": { 214 | "version": "2.15.1", 215 | "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.15.1.tgz", 216 | "integrity": "sha512-z0vtXSwucUJtANQWldhbtbt7BnL0vxiFjIdDLAatwhDYty2bad6s+rijD6Ri4YuYJubLzIJLUidCh09e1djEVQ==", 217 | "dev": true, 218 | "license": "MIT", 219 | "dependencies": { 220 | "hasown": "^2.0.2" 221 | }, 222 | "engines": { 223 | "node": ">= 0.4" 224 | }, 225 | "funding": { 226 | "url": "https://github.com/sponsors/ljharb" 227 | } 228 | }, 229 | "node_modules/minimatch": { 230 | "version": "3.1.2", 231 | "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", 232 | "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", 233 | "dev": true, 234 | "license": "ISC", 235 | "dependencies": { 236 | "brace-expansion": "^1.1.7" 237 | }, 238 | "engines": { 239 | "node": "*" 240 | } 241 | }, 242 | "node_modules/minimist": { 243 | "version": "1.2.8", 244 | "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", 245 | "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", 246 | "license": "MIT", 247 | "funding": { 248 | "url": "https://github.com/sponsors/ljharb" 249 | } 250 | }, 251 | "node_modules/once": { 252 | "version": "1.4.0", 253 | "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", 254 | "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", 255 | "dev": true, 256 | "license": "ISC", 257 | "dependencies": { 258 | "wrappy": "1" 259 | } 260 | }, 261 | "node_modules/path-is-absolute": { 262 | "version": "1.0.1", 263 | "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", 264 | "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", 265 | "dev": true, 266 | "license": "MIT", 267 | "engines": { 268 | "node": ">=0.10.0" 269 | } 270 | }, 271 | "node_modules/path-parse": { 272 | "version": "1.0.7", 273 | "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", 274 | "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", 275 | "dev": true, 276 | "license": "MIT" 277 | }, 278 | "node_modules/raw-body": { 279 | "version": "3.0.0", 280 | "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-3.0.0.tgz", 281 | "integrity": "sha512-RmkhL8CAyCRPXCE28MMH0z2PNWQBNk2Q09ZdxM9IOOXwxwZbN+qbWaatPkdkWIKL2ZVDImrN/pK5HTRz2PcS4g==", 282 | "license": "MIT", 283 | "dependencies": { 284 | "bytes": "3.1.2", 285 | "http-errors": "2.0.0", 286 | "iconv-lite": "0.6.3", 287 | "unpipe": "1.0.0" 288 | }, 289 | "engines": { 290 | "node": ">= 0.8" 291 | } 292 | }, 293 | "node_modules/rechoir": { 294 | "version": "0.6.2", 295 | "resolved": "https://registry.npmjs.org/rechoir/-/rechoir-0.6.2.tgz", 296 | "integrity": "sha512-HFM8rkZ+i3zrV+4LQjwQ0W+ez98pApMGM3HUrN04j3CqzPOzl9nmP15Y8YXNm8QHGv/eacOVEjqhmWpkRV0NAw==", 297 | "dev": true, 298 | "dependencies": { 299 | "resolve": "^1.1.6" 300 | }, 301 | "engines": { 302 | "node": ">= 0.10" 303 | } 304 | }, 305 | "node_modules/resolve": { 306 | "version": "1.22.8", 307 | "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.8.tgz", 308 | "integrity": "sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw==", 309 | "dev": true, 310 | "license": "MIT", 311 | "dependencies": { 312 | "is-core-module": "^2.13.0", 313 | "path-parse": "^1.0.7", 314 | "supports-preserve-symlinks-flag": "^1.0.0" 315 | }, 316 | "bin": { 317 | "resolve": "bin/resolve" 318 | }, 319 | "funding": { 320 | "url": "https://github.com/sponsors/ljharb" 321 | } 322 | }, 323 | "node_modules/safer-buffer": { 324 | "version": "2.1.2", 325 | "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", 326 | "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", 327 | "license": "MIT" 328 | }, 329 | "node_modules/setprototypeof": { 330 | "version": "1.2.0", 331 | "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", 332 | "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==", 333 | "license": "ISC" 334 | }, 335 | "node_modules/shelljs": { 336 | "version": "0.8.5", 337 | "resolved": "https://registry.npmjs.org/shelljs/-/shelljs-0.8.5.tgz", 338 | "integrity": "sha512-TiwcRcrkhHvbrZbnRcFYMLl30Dfov3HKqzp5tO5b4pt6G/SezKcYhmDg15zXVBswHmctSAQKznqNW2LO5tTDow==", 339 | "dev": true, 340 | "license": "BSD-3-Clause", 341 | "dependencies": { 342 | "glob": "^7.0.0", 343 | "interpret": "^1.0.0", 344 | "rechoir": "^0.6.2" 345 | }, 346 | "bin": { 347 | "shjs": "bin/shjs" 348 | }, 349 | "engines": { 350 | "node": ">=4" 351 | } 352 | }, 353 | "node_modules/shx": { 354 | "version": "0.3.4", 355 | "resolved": "https://registry.npmjs.org/shx/-/shx-0.3.4.tgz", 356 | "integrity": "sha512-N6A9MLVqjxZYcVn8hLmtneQWIJtp8IKzMP4eMnx+nqkvXoqinUPCbUFLp2UcWTEIUONhlk0ewxr/jaVGlc+J+g==", 357 | "dev": true, 358 | "license": "MIT", 359 | "dependencies": { 360 | "minimist": "^1.2.3", 361 | "shelljs": "^0.8.5" 362 | }, 363 | "bin": { 364 | "shx": "lib/cli.js" 365 | }, 366 | "engines": { 367 | "node": ">=6" 368 | } 369 | }, 370 | "node_modules/statuses": { 371 | "version": "2.0.1", 372 | "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", 373 | "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==", 374 | "license": "MIT", 375 | "engines": { 376 | "node": ">= 0.8" 377 | } 378 | }, 379 | "node_modules/supports-preserve-symlinks-flag": { 380 | "version": "1.0.0", 381 | "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", 382 | "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", 383 | "dev": true, 384 | "license": "MIT", 385 | "engines": { 386 | "node": ">= 0.4" 387 | }, 388 | "funding": { 389 | "url": "https://github.com/sponsors/ljharb" 390 | } 391 | }, 392 | "node_modules/toidentifier": { 393 | "version": "1.0.1", 394 | "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz", 395 | "integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==", 396 | "license": "MIT", 397 | "engines": { 398 | "node": ">=0.6" 399 | } 400 | }, 401 | "node_modules/typescript": { 402 | "version": "5.7.2", 403 | "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.7.2.tgz", 404 | "integrity": "sha512-i5t66RHxDvVN40HfDd1PsEThGNnlMCMT3jMUuoh9/0TaqWevNontacunWyN02LA9/fIbEWlcHZcgTKb9QoaLfg==", 405 | "dev": true, 406 | "license": "Apache-2.0", 407 | "bin": { 408 | "tsc": "bin/tsc", 409 | "tsserver": "bin/tsserver" 410 | }, 411 | "engines": { 412 | "node": ">=14.17" 413 | } 414 | }, 415 | "node_modules/undici-types": { 416 | "version": "6.20.0", 417 | "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.20.0.tgz", 418 | "integrity": "sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg==", 419 | "dev": true, 420 | "license": "MIT" 421 | }, 422 | "node_modules/unpipe": { 423 | "version": "1.0.0", 424 | "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", 425 | "integrity": "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==", 426 | "license": "MIT", 427 | "engines": { 428 | "node": ">= 0.8" 429 | } 430 | }, 431 | "node_modules/wrappy": { 432 | "version": "1.0.2", 433 | "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", 434 | "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", 435 | "dev": true, 436 | "license": "ISC" 437 | }, 438 | "node_modules/zod": { 439 | "version": "3.23.8", 440 | "resolved": "https://registry.npmjs.org/zod/-/zod-3.23.8.tgz", 441 | "integrity": "sha512-XBx9AXhXktjUqnepgTiE5flcKIYWi/rme0Eaj+5Y0lftuGBq+jyRu/md4WnuxqgP1ubdpNCsYEYPxrzVHD8d6g==", 442 | "license": "MIT", 443 | "funding": { 444 | "url": "https://github.com/sponsors/colinhacks" 445 | } 446 | } 447 | } 448 | } 449 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "mcp-knowledge-graph", 3 | "version": "1.0.3", 4 | "description": "MCP server enabling persistent memory for AI models through a local knowledge graph", 5 | "license": "MIT", 6 | "author": "Shane Holloman", 7 | "homepage": "https://github.com/shaneholloman/mcp-knowledge-graph", 8 | "bugs": "https://github.com/shaneholloman/mcp-knowledge-graph/issues", 9 | "type": "module", 10 | "bin": { 11 | "mcp-knowledge-graph": "dist/index.js" 12 | }, 13 | "files": [ 14 | "dist" 15 | ], 16 | "scripts": { 17 | "build": "tsc && shx chmod +x dist/*.js", 18 | "prepare": "npm run build", 19 | "watch": "tsc --watch" 20 | }, 21 | "dependencies": { 22 | "@modelcontextprotocol/sdk": "1.0.1", 23 | "minimist": "^1.2.8" 24 | }, 25 | "devDependencies": { 26 | "@types/minimist": "^1.2.5", 27 | "@types/node": "^22.9.3", 28 | "shx": "^0.3.4", 29 | "typescript": "^5.6.2" 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /tsconfig.base.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "ES2020", 4 | "module": "NodeNext", 5 | "moduleResolution": "NodeNext", 6 | "esModuleInterop": true, 7 | "strict": true, 8 | "skipLibCheck": true, 9 | "forceConsistentCasingInFileNames": true, 10 | "declaration": true, 11 | "sourceMap": true, 12 | "allowJs": true, 13 | "checkJs": true 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "./tsconfig.base.json", 3 | "compilerOptions": { 4 | "outDir": "./dist", 5 | "rootDir": "." 6 | }, 7 | "include": [ 8 | "./**/*.ts" 9 | ] 10 | } 11 | --------------------------------------------------------------------------------