├── .cursor └── rules │ ├── about-codebase.mdc │ ├── best-practices.mdc │ ├── debugging.mdc │ ├── environment-variables.mdc │ ├── error-handling.mdc │ ├── file-operations.mdc │ ├── mcp-server-pattern.mdc │ ├── style-guidelines.mdc │ ├── technologies.mdc │ └── testing-approach.mdc ├── .cursorindexingignore ├── .flake8 ├── .github ├── ISSUE_TEMPLATE │ ├── Bug-Report.yml │ └── Feature-Request.yml ├── copilot-instructions.md ├── release.yml └── workflows │ ├── bump-version.yml │ ├── ci.yml │ └── publish-to-pypi.yml ├── .gitignore ├── .python-version ├── .vscode ├── launch.json └── settings.json ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── LICENSE ├── README.md ├── SECURITY.md ├── add_license_headers.py ├── context ├── Python-MCP-SDK.md └── llms-full.txt ├── glama.json ├── pyproject.toml ├── src ├── codelogic_mcp_server │ ├── __init__.py │ ├── handlers.py │ ├── server.py │ └── utils.py └── start_server.py ├── start-mcp-inspector.ps1 ├── test ├── .env.test.example ├── __init__.py ├── integration_test_all.py ├── test_env.py ├── test_fixtures.py ├── unit_test_environment.py ├── unit_test_handlers.py └── unit_test_utils.py └── uv.lock /.cursor/rules/about-codebase.mdc: -------------------------------------------------------------------------------- 1 | --- 2 | description: General information about the CodeLogic MCP Server codebase and its purpose 3 | globs: 4 | alwaysApply: false 5 | --- 6 | - This repository contains a Model Context Protocol (MCP) server that integrates with CodeLogic's knowledge graph APIs 7 | - It enables AI programming assistants to access dependency data from CodeLogic to analyze code and database impacts 8 | - The core package is in src/codelogic_mcp_server/ with server.py, handlers.py, and utils.py -------------------------------------------------------------------------------- /.cursor/rules/best-practices.mdc: -------------------------------------------------------------------------------- 1 | --- 2 | description: Best practices for working with the CodeLogic MCP Server codebase 3 | globs: 4 | alwaysApply: false 5 | --- 6 | - Use semantic search before grep for broader context 7 | - Maintain proper error handling and logging 8 | - Keep code changes atomic and focused 9 | -------------------------------------------------------------------------------- /.cursor/rules/debugging.mdc: -------------------------------------------------------------------------------- 1 | --- 2 | description: Debugging guidance for the CodeLogic MCP Server 3 | globs: "**/*.py" 4 | alwaysApply: false 5 | --- 6 | - Enable Debug Mode by setting `CODELOGIC_DEBUG_MODE=true` 7 | - Use debugpy capabilities for remote debugging 8 | - Check logs in the logs directory for detailed information 9 | - Use proper logging levels for different types of information -------------------------------------------------------------------------------- /.cursor/rules/environment-variables.mdc: -------------------------------------------------------------------------------- 1 | --- 2 | description: Key environment variables for the CodeLogic MCP Server 3 | globs: "**/*.py" 4 | alwaysApply: false 5 | --- 6 | - `CODELOGIC_SERVER_HOST`: CodeLogic server URL 7 | - `CODELOGIC_USERNAME`: Username for authentication 8 | - `CODELOGIC_PASSWORD`: Password for authentication 9 | - `CODELOGIC_WORKSPACE_NAME`: Workspace name 10 | - `CODELOGIC_DEBUG_MODE`: Enable debug logging 11 | - `CODELOGIC_TEST_MODE`: Used by test framework -------------------------------------------------------------------------------- /.cursor/rules/error-handling.mdc: -------------------------------------------------------------------------------- 1 | --- 2 | description: Error handling patterns for the CodeLogic MCP Server 3 | globs: "**/*.py" 4 | alwaysApply: false 5 | --- 6 | - Use the following pattern for error handling in tool implementations: 7 | ```python 8 | try: 9 | # Operations that might fail 10 | except Exception as e: 11 | sys.stderr.write(f"Error: {str(e)}\n") 12 | return [types.TextContent(type="text", text=f"# Error\n\n{str(e)}")] 13 | ``` 14 | - Always catch and report exceptions 15 | - Write errors to stderr 16 | - Return formatted error messages to the client -------------------------------------------------------------------------------- /.cursor/rules/file-operations.mdc: -------------------------------------------------------------------------------- 1 | --- 2 | description: File operation guidance for working with the CodeLogic MCP Server 3 | globs: 4 | alwaysApply: false 5 | --- 6 | - Direct file editing with context preservation 7 | - File creation and deletion capabilities 8 | - Directory listing and navigation 9 | - Maintain proper file organization and structure -------------------------------------------------------------------------------- /.cursor/rules/mcp-server-pattern.mdc: -------------------------------------------------------------------------------- 1 | --- 2 | description: Core coding patterns for MCP Server implementation 3 | globs: "**/*.py" 4 | alwaysApply: false 5 | --- 6 | - Use the following pattern for MCP server implementation: 7 | ```python 8 | server = Server("codelogic-mcp-server") 9 | 10 | @server.list_tools() 11 | async def handle_list_tools() -> list[types.Tool]: 12 | # Define and return tools 13 | 14 | @server.call_tool() 15 | async def handle_call_tool(name: str, arguments: dict | None) -> list[types.TextContent]: 16 | # Handle tool execution 17 | ``` 18 | - New tools should be added to handle_list_tools() with descriptive names (prefix: `codelogic-`) 19 | - Tool handlers should be implemented in handle_call_tool() 20 | - Create handler functions with proper error handling 21 | - Return results as markdown-formatted text -------------------------------------------------------------------------------- /.cursor/rules/style-guidelines.mdc: -------------------------------------------------------------------------------- 1 | --- 2 | description: Style guidelines for the CodeLogic MCP Server project 3 | globs: "**/*.py" 4 | alwaysApply: false 5 | --- 6 | - Include MPL 2.0 copyright headers in all Python files 7 | - Use Google-style docstrings for modules/classes/functions 8 | - Always use Python type hints 9 | - Keep I/O operations asynchronous 10 | - Return markdown-formatted text in tool responses -------------------------------------------------------------------------------- /.cursor/rules/technologies.mdc: -------------------------------------------------------------------------------- 1 | --- 2 | description: Key technologies used in the CodeLogic MCP Server project 3 | globs: 4 | alwaysApply: false 5 | --- 6 | - Python 3.13+ with extensive use of async/await 7 | - Model Context Protocol SDK (`mcp[cli]`) 8 | - HTTPX for API requests 9 | - Environment variables via dotenv for configuration -------------------------------------------------------------------------------- /.cursor/rules/testing-approach.mdc: -------------------------------------------------------------------------------- 1 | --- 2 | description: Testing approach for the CodeLogic MCP Server 3 | globs: "**/test/*.py" 4 | alwaysApply: false 5 | --- 6 | - Use unit tests for functions without external dependencies 7 | - Use integration tests for tests against a real CodeLogic server 8 | - Set the `CODELOGIC_TEST_MODE` environment variable for test runs 9 | - Test both success cases and error handling patterns -------------------------------------------------------------------------------- /.cursorindexingignore: -------------------------------------------------------------------------------- 1 | # Don't index SpecStory auto-save files, but allow explicit context inclusion via @ references 2 | .specstory/** 3 | -------------------------------------------------------------------------------- /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | ignore = E501, W503 3 | max-line-length = 120 -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/Bug-Report.yml: -------------------------------------------------------------------------------- 1 | name: Bug Report 2 | description: File a bug report 3 | title: "[Bug]: " 4 | labels: ["bug"] 5 | type: Bug 6 | assignees: 7 | - garrmark 8 | body: 9 | - type: markdown 10 | attributes: 11 | value: | 12 | Thanks for taking the time to fill out this bug report! 13 | 14 | - type: textarea 15 | id: bug-description 16 | attributes: 17 | label: Bug Description 18 | description: A clear and concise description of what the bug is. 19 | placeholder: Tell us what happened... 20 | validations: 21 | required: true 22 | 23 | - type: textarea 24 | id: reproduction-steps 25 | attributes: 26 | label: Steps To Reproduce 27 | description: | 28 | Steps to reproduce the behavior: 29 | 1. API endpoint accessed (e.g., '/v1/generate') 30 | 2. Request payload used (please include relevant JSON) 31 | 3. Client configuration (e.g., authentication method, headers) 32 | 4. Model configuration parameters (if applicable) 33 | 5. Observed error or unexpected behavior 34 | placeholder: Provide detailed steps so we can reproduce the issue... 35 | validations: 36 | required: true 37 | 38 | - type: textarea 39 | id: expected-behavior 40 | attributes: 41 | label: Expected Behavior 42 | description: A clear and concise description of what you expected to happen. 43 | validations: 44 | required: true 45 | 46 | - type: textarea 47 | id: screenshots 48 | attributes: 49 | label: Screenshots 50 | description: If applicable, add screenshots to help explain your problem. 51 | 52 | - type: dropdown 53 | id: os 54 | attributes: 55 | label: Operating System 56 | multiple: true 57 | options: 58 | - Windows 59 | - macOS 60 | - Linux 61 | - Other (specify in additional context) 62 | validations: 63 | required: true 64 | 65 | - type: input 66 | id: python-version 67 | attributes: 68 | label: Python Version 69 | placeholder: e.g., 3.9.10 70 | validations: 71 | required: true 72 | 73 | - type: input 74 | id: package-version 75 | attributes: 76 | label: Package Version 77 | placeholder: e.g., 1.0.0 78 | validations: 79 | required: true 80 | 81 | - type: input 82 | id: client-implementation 83 | attributes: 84 | label: Client Implementation 85 | description: How are you accessing the service? 86 | placeholder: e.g., Python SDK, curl, etc. 87 | validations: 88 | required: true 89 | 90 | - type: input 91 | id: models 92 | attributes: 93 | label: Model(s) Used 94 | placeholder: e.g., specific model versions being used 95 | validations: 96 | required: true 97 | 98 | - type: textarea 99 | id: additional-context 100 | attributes: 101 | label: Additional Context 102 | description: Add any other context about the problem here. 103 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/Feature-Request.yml: -------------------------------------------------------------------------------- 1 | name: Feature Request 2 | description: Suggest an idea for this project 3 | title: "[Feature]: " 4 | labels: ["enhancement"] 5 | type: Feature 6 | assignees: 7 | - garrmark 8 | body: 9 | - type: markdown 10 | attributes: 11 | value: | 12 | Thanks for taking the time to suggest a new feature! 13 | 14 | - type: textarea 15 | id: problem-description 16 | attributes: 17 | label: Is your feature request related to a problem? 18 | description: A clear and concise description of what the problem is. 19 | placeholder: I'm always frustrated when... 20 | validations: 21 | required: true 22 | 23 | - type: textarea 24 | id: solution 25 | attributes: 26 | label: Describe the solution you'd like 27 | description: A clear and concise description of what you want to happen. 28 | validations: 29 | required: true 30 | 31 | - type: textarea 32 | id: alternatives 33 | attributes: 34 | label: Describe alternatives you've considered 35 | description: A clear and concise description of any alternative solutions or features you've considered. 36 | validations: 37 | required: false 38 | 39 | - type: textarea 40 | id: context 41 | attributes: 42 | label: Additional context 43 | description: Add any other context or screenshots about the feature request here. 44 | validations: 45 | required: false 46 | 47 | - type: dropdown 48 | id: priority 49 | attributes: 50 | label: Priority 51 | description: How important is this feature to you? 52 | options: 53 | - Nice to have 54 | - Important 55 | - Critical 56 | validations: 57 | required: true 58 | -------------------------------------------------------------------------------- /.github/copilot-instructions.md: -------------------------------------------------------------------------------- 1 | # CodeLogic MCP Server - AI Assistant Instructions 2 | 3 | ## About This Codebase 4 | 5 | This repository contains a Model Context Protocol (MCP) server that integrates with CodeLogic's knowledge graph APIs. It enables AI programming assistants (like GitHub Copilot and Cursor) to access dependency data from CodeLogic to analyze code and database impacts. 6 | 7 | ## Key Technologies 8 | 9 | - **Python 3.13+** with extensive use of async/await 10 | - **Model Context Protocol SDK** (`mcp[cli]`) 11 | - **Neo4j** for graph database operations 12 | - **HTTPX** for API requests 13 | - **Environment variables** via dotenv for configuration 14 | 15 | ## Project Structure 16 | 17 | - **src/codelogic_mcp_server/**: Core package 18 | - **`__init__.py`**: Package initialization and entry point 19 | - **`server.py`**: MCP server implementation 20 | - **`handlers.py`**: Tool handlers implementation 21 | - **`utils.py`**: API interaction utilities 22 | 23 | ## Core Coding Patterns 24 | 25 | ### MCP Server Pattern 26 | 27 | ```python 28 | server = Server("codelogic-mcp-server") 29 | 30 | @server.list_tools() 31 | async def handle_list_tools() -> list[types.Tool]: 32 | # Define and return tools 33 | 34 | @server.call_tool() 35 | async def handle_call_tool(name: str, arguments: dict | None) -> list[types.TextContent]: 36 | # Handle tool execution 37 | ``` 38 | 39 | ### Error Handling 40 | 41 | ```python 42 | try: 43 | # Operations that might fail 44 | except Exception as e: 45 | sys.stderr.write(f"Error: {str(e)}\n") 46 | return [types.TextContent(type="text", text=f"# Error\n\n{str(e)}")] 47 | ``` 48 | 49 | ## Style Guidelines 50 | 51 | 1. **Copyright Headers**: Include MPL 2.0 headers in all Python files 52 | 2. **Docstrings**: Google-style docstrings for modules/classes/functions 53 | 3. **Type Hints**: Always use Python type hints 54 | 4. **Asynchronous**: Keep I/O operations asynchronous 55 | 5. **Format Outputs**: Return markdown-formatted text in tool responses 56 | 57 | ## Tool Implementation Pattern 58 | 59 | When implementing new MCP tools: 60 | 61 | 1. Add to `handle_list_tools()` with descriptive name (prefix: `codelogic-`) 62 | 2. Add handler in `handle_call_tool()` 63 | 3. Implement handler function with error handling 64 | 4. Return results as markdown-formatted text 65 | 66 | ## Testing Approach 67 | 68 | - **Unit Tests**: For functions without external dependencies 69 | - **Integration Tests**: For tests against a real CodeLogic server 70 | - Use the `CODELOGIC_TEST_MODE` environment variable 71 | 72 | ## Debugging 73 | 74 | - Debug Mode: Set `CODELOGIC_DEBUG_MODE=true` 75 | - Remote Debugging: Use debugpy capabilities 76 | 77 | ## Key Environment Variables 78 | 79 | - `CODELOGIC_SERVER_HOST`: CodeLogic server URL 80 | - `CODELOGIC_USERNAME`: Username for authentication 81 | - `CODELOGIC_PASSWORD`: Password for authentication 82 | - `CODELOGIC_WORKSPACE_NAME`: Workspace name 83 | - `CODELOGIC_DEBUG_MODE`: Enable debug logging 84 | - `CODELOGIC_TEST_MODE`: Used by test framework 85 | 86 | ## AI Assistant Integration 87 | 88 | ### GitHub Copilot 89 | - Uses these instructions via `.github/copilot-instructions.md` 90 | - Integrates through GitHub's Copilot infrastructure 91 | 92 | ### Cursor 93 | - Uses these instructions via `.cursor/instructions.md` 94 | - Integrates through Cursor's AI infrastructure 95 | - Supports additional features like: 96 | - Direct file editing 97 | - Terminal command execution 98 | - Code search and analysis 99 | - Database impact analysis 100 | -------------------------------------------------------------------------------- /.github/release.yml: -------------------------------------------------------------------------------- 1 | # .github/release.yml 2 | 3 | changelog: 4 | categories: 5 | - title: New Features 6 | labels: 7 | - enhancement 8 | - feature 9 | - title: Bug Fixes 10 | labels: 11 | - bug 12 | - fix 13 | - title: Documentation 14 | labels: 15 | - documentation 16 | - docs 17 | - title: Dependencies 18 | labels: 19 | - dependencies 20 | - dependency 21 | - title: Other Changes 22 | labels: 23 | - '*' -------------------------------------------------------------------------------- /.github/workflows/bump-version.yml: -------------------------------------------------------------------------------- 1 | name: Bump Version 2 | 3 | on: 4 | workflow_dispatch: 5 | inputs: 6 | version_type: 7 | description: 'Type of version bump (patch, minor, major)' 8 | required: true 9 | default: 'patch' 10 | type: choice 11 | options: 12 | - patch 13 | - minor 14 | - major 15 | custom_version: 16 | description: 'Custom version (optional, overrides version_type if provided)' 17 | required: false 18 | type: string 19 | release_notes: 20 | description: 'Release notes for this version' 21 | required: false 22 | type: string 23 | 24 | jobs: 25 | bump-version: 26 | runs-on: ubuntu-latest 27 | if: github.ref == 'refs/heads/main' 28 | permissions: 29 | contents: write 30 | outputs: 31 | new_version: ${{ steps.bump_version.outputs.new_version }} 32 | steps: 33 | - uses: actions/checkout@v4 34 | with: 35 | fetch-depth: 0 36 | token: ${{ secrets.GITHUB_TOKEN }} 37 | 38 | - name: Set up Python 39 | uses: actions/setup-python@v5 40 | with: 41 | python-version: '3.13' 42 | 43 | - name: Install dependencies 44 | run: | 45 | python -m pip install --upgrade pip 46 | pip install toml 47 | 48 | - name: Configure Git 49 | run: | 50 | git config user.name "GitHub Actions" 51 | git config user.email "actions@github.com" 52 | 53 | - name: Bump version 54 | id: bump_version 55 | run: | 56 | python -c ' 57 | import toml 58 | import sys 59 | import re 60 | import os 61 | 62 | # Read the current version 63 | with open("pyproject.toml", "r") as f: 64 | config = toml.load(f) 65 | 66 | current_version = config["project"]["version"] 67 | print(f"Current version: {current_version}") 68 | 69 | # Parse the current version 70 | major, minor, patch = map(int, current_version.split(".")) 71 | 72 | # Determine the new version 73 | custom_version = "${{ github.event.inputs.custom_version }}" 74 | if custom_version: 75 | # Validate custom version format 76 | if re.match(r"^\d+\.\d+\.\d+$", custom_version): 77 | new_version = custom_version 78 | else: 79 | print("Error: Custom version must be in format X.Y.Z") 80 | sys.exit(1) 81 | else: 82 | version_type = "${{ github.event.inputs.version_type }}" 83 | if version_type == "patch": 84 | patch += 1 85 | elif version_type == "minor": 86 | minor += 1 87 | patch = 0 88 | elif version_type == "major": 89 | major += 1 90 | minor = 0 91 | patch = 0 92 | else: 93 | print(f"Error: Unknown version type: {version_type}") 94 | sys.exit(1) 95 | 96 | new_version = f"{major}.{minor}.{patch}" 97 | 98 | # Update the version in pyproject.toml 99 | config["project"]["version"] = new_version 100 | with open("pyproject.toml", "w") as f: 101 | toml.dump(config, f) 102 | 103 | print(f"New version: {new_version}") 104 | with open(os.environ["GITHUB_OUTPUT"], "a") as f: 105 | f.write(f"new_version={new_version}\n") 106 | ' 107 | 108 | - name: Commit and push changes 109 | run: | 110 | git add pyproject.toml 111 | git commit -m "Bump version to ${{ steps.bump_version.outputs.new_version }}" 112 | git push 113 | 114 | - name: Create tag 115 | run: | 116 | git tag v${{ steps.bump_version.outputs.new_version }} 117 | git push --tags 118 | 119 | create-release: 120 | needs: bump-version 121 | runs-on: ubuntu-latest 122 | if: github.ref == 'refs/heads/main' 123 | permissions: 124 | contents: write 125 | steps: 126 | - uses: actions/checkout@v4 127 | with: 128 | fetch-depth: 0 129 | 130 | - name: Create GitHub Release 131 | uses: softprops/action-gh-release@v1 132 | with: 133 | tag_name: v${{ needs.bump-version.outputs.new_version }} 134 | name: Release v${{ needs.bump-version.outputs.new_version }} 135 | body: ${{ github.event.inputs.release_notes || '' }} 136 | generate_release_notes: ${{ github.event.inputs.release_notes == '' }} 137 | draft: false 138 | prerelease: false 139 | token: ${{ secrets.GITHUB_TOKEN }} 140 | 141 | - name: Notify Slack about new version 142 | id: slack 143 | uses: slackapi/slack-github-action@v1.25.0 144 | with: 145 | channel-id: ${{ secrets.SLACK_CHANNEL_ID }} 146 | slack-message: | 147 | :tada: *New Version Released!* 148 | 149 | *Package:* codelogic-mcp-server v${{ needs.bump-version.outputs.new_version }} 150 | *Released by:* ${{ github.actor }} 151 | 152 | ${{ github.event.inputs.release_notes || '_No release notes provided._' }} 153 | 154 | :link: <${{ github.server_url }}/${{ github.repository }}/releases/tag/v${{ needs.bump-version.outputs.new_version }}|View Release on GitHub> 155 | env: 156 | SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: 4 | push: 5 | branches: [ main ] 6 | pull_request: 7 | branches: [ main ] 8 | 9 | jobs: 10 | test: 11 | runs-on: ubuntu-latest 12 | strategy: 13 | matrix: 14 | python-version: [3.13] 15 | 16 | steps: 17 | - uses: actions/checkout@v3 18 | - name: Set up Python ${{ matrix.python-version }} 19 | uses: actions/setup-python@v4 20 | with: 21 | python-version: ${{ matrix.python-version }} 22 | - name: Install dependencies 23 | run: | 24 | python -m pip install --upgrade pip 25 | python -m pip install uv 26 | uv pip install --system -e ".[dev]" 27 | python -m pip install flake8 28 | - name: Lint with flake8 29 | run: | 30 | # stop the build if there are Python syntax errors or undefined names 31 | flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics 32 | # exit-zero treats all errors as warnings 33 | flake8 . --count --exit-zero --max-complexity=10 --statistics 34 | - name: Test with unittest 35 | run: | 36 | python -m unittest discover -s test -p "unit*.py" -v -------------------------------------------------------------------------------- /.github/workflows/publish-to-pypi.yml: -------------------------------------------------------------------------------- 1 | name: Publish Python Package to PyPI 2 | 3 | on: 4 | release: 5 | types: [created] 6 | workflow_run: 7 | workflows: ["Bump Version"] 8 | types: 9 | - completed 10 | 11 | jobs: 12 | deploy: 13 | runs-on: ubuntu-latest 14 | if: ${{ (github.event_name == 'release' || (github.event_name == 'workflow_run' && github.event.workflow_run.conclusion == 'success')) && github.ref == 'refs/heads/main' }} 15 | steps: 16 | - uses: actions/checkout@v4 17 | with: 18 | ref: ${{ github.event.release.tag_name || 'main' }} 19 | 20 | - name: Set up Python 21 | uses: actions/setup-python@v5 22 | with: 23 | python-version: '3.13' 24 | 25 | - name: Install dependencies 26 | run: | 27 | python -m pip install --upgrade pip 28 | python -m pip install uv twine toml 29 | 30 | - name: Build package 31 | run: | 32 | rm -rf dist/ 33 | uv build --no-sources 34 | 35 | - name: Check distribution with twine 36 | run: | 37 | twine check dist/* 38 | 39 | - name: Publish to PyPI 40 | env: 41 | PYPI_API_TOKEN: ${{ secrets.PYPI_API_TOKEN }} 42 | run: | 43 | twine upload dist/* -u __token__ -p $PYPI_API_TOKEN 44 | 45 | - name: Extract package version 46 | id: get_version 47 | run: | 48 | PACKAGE_VERSION=$(python -c "import toml; print(toml.load('pyproject.toml')['project']['version'])") 49 | echo "version=$PACKAGE_VERSION" >> $GITHUB_OUTPUT 50 | 51 | - name: Notify Slack about PyPI release 52 | id: slack 53 | uses: slackapi/slack-github-action@v1.25.0 54 | with: 55 | channel-id: ${{ secrets.SLACK_CHANNEL_ID }} 56 | slack-message: | 57 | :rocket: *New Release Published to PyPI!* 58 | 59 | *Package:* codelogic-mcp-server v${{ steps.get_version.outputs.version }} 60 | *Published by:* ${{ github.actor }} 61 | 62 | :link: 63 | env: 64 | SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Python-generated files 2 | __pycache__/ 3 | *.py[oc] 4 | build/ 5 | dist/ 6 | wheels/ 7 | *.egg-info 8 | 9 | # Virtual environments 10 | .venv 11 | .env 12 | /.idea/ 13 | .aider* 14 | 15 | test/.env.test 16 | 17 | # Logs folder 18 | logs/ 19 | .specstory/ 20 | impact_analysis_result_*.md -------------------------------------------------------------------------------- /.python-version: -------------------------------------------------------------------------------- 1 | 3.13 2 | -------------------------------------------------------------------------------- /.vscode/launch.json: -------------------------------------------------------------------------------- 1 | { 2 | // Use IntelliSense to learn about possible attributes. 3 | // Hover to view descriptions of existing attributes. 4 | // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 5 | "version": "0.2.0", 6 | "configurations": [ 7 | 8 | { 9 | "name": "Debug", 10 | "type": "debugpy", 11 | "request": "launch", 12 | "program": "${workspaceFolder}/src/start_server.py", 13 | "console": "integratedTerminal" 14 | }, 15 | { 16 | "name": "Connect to CodeLogic MCP Server", 17 | "type": "debugpy", 18 | "request": "attach", 19 | "connect": { 20 | "host": "127.0.0.1", 21 | "port": 5679 22 | }, 23 | "pathMappings": [ 24 | { 25 | "localRoot": "${workspaceFolder}", 26 | "remoteRoot": "${workspaceFolder}" 27 | } 28 | ], 29 | "justMyCode": true 30 | } 31 | ] 32 | } -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "python.testing.unittestEnabled": true, 3 | "python.testing.pytestEnabled": false, 4 | "python.testing.nosetestsEnabled": false, 5 | "python.testing.unittestArgs": [ 6 | "-v", 7 | "-s", 8 | "./test", 9 | "-p", 10 | "*_test_*.py" 11 | ], 12 | "python.testing.cwd": "${workspaceFolder}" 13 | } -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Code of Conduct 2 | 3 | ## Our Pledge 4 | 5 | We as members, contributors, and leaders pledge to make participation in our community a harassment-free experience for everyone, regardless of age, body size, visible or invisible disability, ethnicity, sex characteristics, gender identity and expression, level of experience, education, socio-economic status, nationality, personal appearance, race, religion, or sexual identity and orientation. 6 | 7 | We pledge to act and interact in ways that contribute to an open, welcoming, diverse, inclusive, and healthy community. 8 | 9 | ## Our Standards 10 | 11 | Examples of behavior that contributes to a positive environment: 12 | 13 | * Being respectful and inclusive of differing viewpoints and experiences 14 | * Giving and gracefully accepting constructive feedback 15 | * Accepting responsibility and apologizing to those affected by our mistakes 16 | * Focusing on what is best for the community 17 | 18 | Examples of unacceptable behavior: 19 | 20 | * The use of sexualized language or imagery and unwelcome attention 21 | * Trolling, insulting/derogatory comments, and personal or political attacks 22 | * Public or private harassment 23 | * Publishing others' private information without permission 24 | * Other conduct which could reasonably be considered inappropriate in a professional setting 25 | 26 | ## Enforcement 27 | 28 | Instances of abusive, harassing, or otherwise unacceptable behavior may be reported to the project maintainers. All complaints will be reviewed and investigated promptly and fairly. 29 | 30 | Project maintainers are responsible for clarifying and enforcing our standards of acceptable behavior and will take appropriate and fair corrective action. 31 | 32 | ## Attribution 33 | 34 | This Code of Conduct is adapted from the [Contributor Covenant](https://www.contributor-covenant.org/), 35 | version 2.0, available at 36 | https://www.contributor-covenant.org/version/2/0/code_of_conduct.html. 37 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing to codelogic-mcp-server 2 | 3 | Thank you for your interest in contributing to codelogic-mcp-server! This document provides guidelines and instructions for contributing. 4 | 5 | ## Code of Conduct 6 | 7 | Please read and follow our [Code of Conduct](CODE_OF_CONDUCT.md). 8 | 9 | ## How to Contribute 10 | 11 | ### Reporting Bugs 12 | 13 | 1. Check if the bug has already been reported in our [Issues](https://github.com/CodeLogicIncEngineering/codelogic-mcp-server/issues) 14 | 2. If not, create a new issue using the bug report template 15 | 3. Include detailed steps to reproduce the bug 16 | 4. Include your environment details (OS, Python version, etc.) 17 | 18 | ### Suggesting Features 19 | 20 | 1. Check if the feature has already been suggested in our [Issues](https://github.com/CodeLogicIncEngineering/codelogic-mcp-server/issues) 21 | 2. If not, create a new issue using the feature request template 22 | 3. Clearly describe the feature and its benefits 23 | 24 | ### Pull Requests 25 | 26 | 1. Fork the repository 27 | 2. Create a new branch for your changes 28 | 3. Make your changes and commit them with clear commit messages 29 | 4. Write tests for your changes 30 | 5. Run all tests and ensure they pass 31 | 6. Submit a pull request with a description of your changes 32 | 33 | ## Development Setup 34 | 35 | 1. Clone the repository 36 | 2. Install dependencies: `uv venv && uv pip install -e .` 37 | 3. Run unit tests: `python -m unittest test/unit*` 38 | 39 | ## Coding Standards 40 | 41 | - Follow PEP 8 guidelines 42 | - Include docstrings for all classes and functions 43 | - Write unit tests for new functionality 44 | 45 | ## License 46 | 47 | By contributing to this project, you agree that your contributions will be licensed under the project's [Mozilla Public License 2.0](LICENSE). 48 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Mozilla Public License 2 | Version 2.0 3 | 1. Definitions 4 | 1.1. “Contributor” 5 | means each individual or legal entity that creates, contributes to the creation of, or owns Covered Software. 6 | 7 | 1.2. “Contributor Version” 8 | means the combination of the Contributions of others (if any) used by a Contributor and that particular Contributor’s Contribution. 9 | 10 | 1.3. “Contribution” 11 | means Covered Software of a particular Contributor. 12 | 13 | 1.4. “Covered Software” 14 | means Source Code Form to which the initial Contributor has attached the notice in Exhibit A, the Executable Form of such Source Code Form, and Modifications of such Source Code Form, in each case including portions thereof. 15 | 16 | 1.5. “Incompatible With Secondary Licenses” 17 | means 18 | 19 | that the initial Contributor has attached the notice described in Exhibit B to the Covered Software; or 20 | 21 | that the Covered Software was made available under the terms of version 1.1 or earlier of the License, but not also under the terms of a Secondary License. 22 | 23 | 1.6. “Executable Form” 24 | means any form of the work other than Source Code Form. 25 | 26 | 1.7. “Larger Work” 27 | means a work that combines Covered Software with other material, in a separate file or files, that is not Covered Software. 28 | 29 | 1.8. “License” 30 | means this document. 31 | 32 | 1.9. “Licensable” 33 | means having the right to grant, to the maximum extent possible, whether at the time of the initial grant or subsequently, any and all of the rights conveyed by this License. 34 | 35 | 1.10. “Modifications” 36 | means any of the following: 37 | 38 | any file in Source Code Form that results from an addition to, deletion from, or modification of the contents of Covered Software; or 39 | 40 | any new file in Source Code Form that contains any Covered Software. 41 | 42 | 1.11. “Patent Claims” of a Contributor 43 | means any patent claim(s), including without limitation, method, process, and apparatus claims, in any patent Licensable by such Contributor that would be infringed, but for the grant of the License, by the making, using, selling, offering for sale, having made, import, or transfer of either its Contributions or its Contributor Version. 44 | 45 | 1.12. “Secondary License” 46 | means either the GNU General Public License, Version 2.0, the GNU Lesser General Public License, Version 2.1, the GNU Affero General Public License, Version 3.0, or any later versions of those licenses. 47 | 48 | 1.13. “Source Code Form” 49 | means the form of the work preferred for making modifications. 50 | 51 | 1.14. “You” (or “Your”) 52 | means an individual or a legal entity exercising rights under this License. For legal entities, “You” includes any entity that controls, is controlled by, or is under common control with You. For purposes of this definition, “control” means (a) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (b) ownership of more than fifty percent (50%) of the outstanding shares or beneficial ownership of such entity. 53 | 54 | 2. License Grants and Conditions 55 | 2.1. Grants 56 | Each Contributor hereby grants You a world-wide, royalty-free, non-exclusive license: 57 | 58 | under intellectual property rights (other than patent or trademark) Licensable by such Contributor to use, reproduce, make available, modify, display, perform, distribute, and otherwise exploit its Contributions, either on an unmodified basis, with Modifications, or as part of a Larger Work; and 59 | 60 | under Patent Claims of such Contributor to make, use, sell, offer for sale, have made, import, and otherwise transfer either its Contributions or its Contributor Version. 61 | 62 | 2.2. Effective Date 63 | The licenses granted in Section 2.1 with respect to any Contribution become effective for each Contribution on the date the Contributor first distributes such Contribution. 64 | 65 | 2.3. Limitations on Grant Scope 66 | The licenses granted in this Section 2 are the only rights granted under this License. No additional rights or licenses will be implied from the distribution or licensing of Covered Software under this License. Notwithstanding Section 2.1(b) above, no patent license is granted by a Contributor: 67 | 68 | for any code that a Contributor has removed from Covered Software; or 69 | 70 | for infringements caused by: (i) Your and any other third party’s modifications of Covered Software, or (ii) the combination of its Contributions with other software (except as part of its Contributor Version); or 71 | 72 | under Patent Claims infringed by Covered Software in the absence of its Contributions. 73 | 74 | This License does not grant any rights in the trademarks, service marks, or logos of any Contributor (except as may be necessary to comply with the notice requirements in Section 3.4). 75 | 76 | 2.4. Subsequent Licenses 77 | No Contributor makes additional grants as a result of Your choice to distribute the Covered Software under a subsequent version of this License (see Section 10.2) or under the terms of a Secondary License (if permitted under the terms of Section 3.3). 78 | 79 | 2.5. Representation 80 | Each Contributor represents that the Contributor believes its Contributions are its original creation(s) or it has sufficient rights to grant the rights to its Contributions conveyed by this License. 81 | 82 | 2.6. Fair Use 83 | This License is not intended to limit any rights You have under applicable copyright doctrines of fair use, fair dealing, or other equivalents. 84 | 85 | 2.7. Conditions 86 | Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in Section 2.1. 87 | 88 | 3. Responsibilities 89 | 3.1. Distribution of Source Form 90 | All distribution of Covered Software in Source Code Form, including any Modifications that You create or to which You contribute, must be under the terms of this License. You must inform recipients that the Source Code Form of the Covered Software is governed by the terms of this License, and how they can obtain a copy of this License. You may not attempt to alter or restrict the recipients’ rights in the Source Code Form. 91 | 92 | 3.2. Distribution of Executable Form 93 | If You distribute Covered Software in Executable Form then: 94 | 95 | such Covered Software must also be made available in Source Code Form, as described in Section 3.1, and You must inform recipients of the Executable Form how they can obtain a copy of such Source Code Form by reasonable means in a timely manner, at a charge no more than the cost of distribution to the recipient; and 96 | 97 | You may distribute such Executable Form under the terms of this License, or sublicense it under different terms, provided that the license for the Executable Form does not attempt to limit or alter the recipients’ rights in the Source Code Form under this License. 98 | 99 | 3.3. Distribution of a Larger Work 100 | You may create and distribute a Larger Work under terms of Your choice, provided that You also comply with the requirements of this License for the Covered Software. If the Larger Work is a combination of Covered Software with a work governed by one or more Secondary Licenses, and the Covered Software is not Incompatible With Secondary Licenses, this License permits You to additionally distribute such Covered Software under the terms of such Secondary License(s), so that the recipient of the Larger Work may, at their option, further distribute the Covered Software under the terms of either this License or such Secondary License(s). 101 | 102 | 3.4. Notices 103 | You may not remove or alter the substance of any license notices (including copyright notices, patent notices, disclaimers of warranty, or limitations of liability) contained within the Source Code Form of the Covered Software, except that You may alter any license notices to the extent required to remedy known factual inaccuracies. 104 | 105 | 3.5. Application of Additional Terms 106 | You may choose to offer, and to charge a fee for, warranty, support, indemnity or liability obligations to one or more recipients of Covered Software. However, You may do so only on Your own behalf, and not on behalf of any Contributor. You must make it absolutely clear that any such warranty, support, indemnity, or liability obligation is offered by You alone, and You hereby agree to indemnify every Contributor for any liability incurred by such Contributor as a result of warranty, support, indemnity or liability terms You offer. You may include additional disclaimers of warranty and limitations of liability specific to any jurisdiction. 107 | 108 | 4. Inability to Comply Due to Statute or Regulation 109 | If it is impossible for You to comply with any of the terms of this License with respect to some or all of the Covered Software due to statute, judicial order, or regulation then You must: (a) comply with the terms of this License to the maximum extent possible; and (b) describe the limitations and the code they affect. Such description must be placed in a text file included with all distributions of the Covered Software under this License. Except to the extent prohibited by statute or regulation, such description must be sufficiently detailed for a recipient of ordinary skill to be able to understand it. 110 | 111 | 5. Termination 112 | 5.1. The rights granted under this License will terminate automatically if You fail to comply with any of its terms. However, if You become compliant, then the rights granted under this License from a particular Contributor are reinstated (a) provisionally, unless and until such Contributor explicitly and finally terminates Your grants, and (b) on an ongoing basis, if such Contributor fails to notify You of the non-compliance by some reasonable means prior to 60 days after You have come back into compliance. Moreover, Your grants from a particular Contributor are reinstated on an ongoing basis if such Contributor notifies You of the non-compliance by some reasonable means, this is the first time You have received notice of non-compliance with this License from such Contributor, and You become compliant prior to 30 days after Your receipt of the notice. 113 | 114 | 5.2. If You initiate litigation against any entity by asserting a patent infringement claim (excluding declaratory judgment actions, counter-claims, and cross-claims) alleging that a Contributor Version directly or indirectly infringes any patent, then the rights granted to You by any and all Contributors for the Covered Software under Section 2.1 of this License shall terminate. 115 | 116 | 5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user license agreements (excluding distributors and resellers) which have been validly granted by You or Your distributors under this License prior to termination shall survive termination. 117 | 118 | 6. Disclaimer of Warranty 119 | Covered Software is provided under this License on an “as is” basis, without warranty of any kind, either expressed, implied, or statutory, including, without limitation, warranties that the Covered Software is free of defects, merchantable, fit for a particular purpose or non-infringing. The entire risk as to the quality and performance of the Covered Software is with You. Should any Covered Software prove defective in any respect, You (not any Contributor) assume the cost of any necessary servicing, repair, or correction. This disclaimer of warranty constitutes an essential part of this License. No use of any Covered Software is authorized under this License except under this disclaimer. 120 | 121 | 7. Limitation of Liability 122 | Under no circumstances and under no legal theory, whether tort (including negligence), contract, or otherwise, shall any Contributor, or anyone who distributes Covered Software as permitted above, be liable to You for any direct, indirect, special, incidental, or consequential damages of any character including, without limitation, damages for lost profits, loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses, even if such party shall have been informed of the possibility of such damages. This limitation of liability shall not apply to liability for death or personal injury resulting from such party’s negligence to the extent applicable law prohibits such limitation. Some jurisdictions do not allow the exclusion or limitation of incidental or consequential damages, so this exclusion and limitation may not apply to You. 123 | 124 | 8. Litigation 125 | Any litigation relating to this License may be brought only in the courts of a jurisdiction where the defendant maintains its principal place of business and such litigation shall be governed by laws of that jurisdiction, without reference to its conflict-of-law provisions. Nothing in this Section shall prevent a party’s ability to bring cross-claims or counter-claims. 126 | 127 | 9. Miscellaneous 128 | This License represents the complete agreement concerning the subject matter hereof. If any provision of this License is held to be unenforceable, such provision shall be reformed only to the extent necessary to make it enforceable. Any law or regulation which provides that the language of a contract shall be construed against the drafter shall not be used to construe this License against a Contributor. 129 | 130 | 10. Versions of the License 131 | 10.1. New Versions 132 | Mozilla Foundation is the license steward. Except as provided in Section 10.3, no one other than the license steward has the right to modify or publish new versions of this License. Each version will be given a distinguishing version number. 133 | 134 | 10.2. Effect of New Versions 135 | You may distribute the Covered Software under the terms of the version of the License under which You originally received the Covered Software, or under the terms of any subsequent version published by the license steward. 136 | 137 | 10.3. Modified Versions 138 | If you create software not governed by this License, and you want to create a new license for such software, you may create and use a modified version of this License if you rename the license and remove any references to the name of the license steward (except to note that such modified license differs from this License). 139 | 140 | 10.4. Distributing Source Code Form that is Incompatible With Secondary Licenses 141 | If You choose to distribute Source Code Form that is Incompatible With Secondary Licenses under the terms of this version of the License, the notice described in Exhibit B of this License must be attached. 142 | 143 | Exhibit A - Source Code Form License Notice 144 | This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain one at https://mozilla.org/MPL/2.0/. 145 | 146 | If it is not possible or desirable to put the notice in a particular file, then You may include the notice in a location (such as a LICENSE file in a relevant directory) where a recipient would be likely to look for such a notice. 147 | 148 | You may add additional accurate notices of copyright ownership. 149 | 150 | Exhibit B - “Incompatible With Secondary Licenses” Notice 151 | This Source Code Form is “Incompatible With Secondary Licenses”, as defined by the Mozilla Public License, v. 2.0. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # codelogic-mcp-server 2 | 3 | An [MCP Server](https://modelcontextprotocol.io/introduction) to utilize Codelogic's rich software dependency data in your AI programming assistant. 4 | 5 | ## Components 6 | 7 | ### Tools 8 | 9 | The server implements two tools: 10 | 11 | - **codelogic-method-impact**: Pulls an impact assessment from the CodeLogic server's APIs for your code. 12 | - Takes the given "method" that you're working on and its associated "class". 13 | - **codelogic-database-impact**: Analyzes impacts between code and database entities. 14 | - Takes the database entity type (column, table, or view) and its name. 15 | 16 | ### Install 17 | 18 | #### Pre Requisites 19 | 20 | The MCP server relies upon Astral UV to run, please [install](https://docs.astral.sh/uv/getting-started/installation/) 21 | 22 | ### MacOS Workaround for uvx 23 | 24 | There is a known issue with `uvx` on **MacOS** where the CodeLogic MCP server may fail to launch in certain IDEs (such as Cursor), resulting in errors like: 25 | See [issue #11](https://github.com/CodeLogicIncEngineering/codelogic-mcp-server/issues/11) 26 | ``` 27 | Failed to connect client closed 28 | ``` 29 | 30 | This appears to be a problem with Astral `uvx` running on MacOS. The following can be used as a workaround: 31 | 32 | 1. Clone this project locally. 33 | 2. Configure your `mcp.json` to use `uv` instead of `uvx`. For example: 34 | 35 | ```json 36 | { 37 | "mcpServers": { 38 | "codelogic-mcp-server": { 39 | "type": "stdio", 40 | "command": "/uv", 41 | "args": [ 42 | "--directory", 43 | "/codelogic-mcp-server-main", 44 | "run", 45 | "codelogic-mcp-server" 46 | ], 47 | "env": { 48 | "CODELOGIC_SERVER_HOST": "", 49 | "CODELOGIC_USERNAME": "", 50 | "CODELOGIC_PASSWORD": "", 51 | "CODELOGIC_MV_NAME": "", 52 | "CODELOGIC_DEBUG_MODE": "true" 53 | } 54 | } 55 | } 56 | } 57 | ``` 58 | 59 | 3. Restart Cursor. 60 | 4. Ensure the Cursor Global Rule for CodeLogic is in place. 61 | 5. Open the MCP tab in Cursor and refresh the `codelogic-mcp-server`. 62 | 6. Ask Cursor to make a code change in an existing class. The MCP server should now run the impact analysis successfully. 63 | 64 | ## Configuration for Different IDEs 65 | 66 | ### Visual Studio Code Configuration 67 | 68 | To configure this MCP server in VS Code: 69 | 70 | 1. First, ensure you have GitHub Copilot agent mode enabled in VS Code. 71 | 72 | 2. Create a `.vscode/mcp.json` file in your workspace with the following configuration: 73 | 74 | ```json 75 | { 76 | "servers": { 77 | "codelogic-mcp-server": { 78 | "type": "stdio", 79 | "command": "uvx", 80 | "args": [ 81 | "codelogic-mcp-server@latest" 82 | ], 83 | "env": { 84 | "CODELOGIC_SERVER_HOST": "", 85 | "CODELOGIC_USERNAME": "", 86 | "CODELOGIC_PASSWORD": "", 87 | "CODELOGIC_WORKSPACE_NAME": "", 88 | "CODELOGIC_DEBUG_MODE": "true" 89 | } 90 | } 91 | } 92 | } 93 | ``` 94 | 95 | > **Note:** On some systems, you may need to use the full path to the uvx executable instead of just "uvx". For example: `/home/user/.local/bin/uvx` on Linux/Mac or `C:\Users\username\AppData\Local\astral\uvx.exe` on Windows. 96 | 97 | 3. Alternatively, you can run the `MCP: Add Server` command from the Command Palette and provide the server information. 98 | 99 | 4. To manage your MCP servers, use the `MCP: List Servers` command from the Command Palette. 100 | 101 | 5. Once configured, the server's tools will be available to Copilot agent mode. You can toggle specific tools on/off as needed by clicking the Tools button in the Chat view when in agent mode. 102 | 103 | 6. To use the Codelogic tools in agent mode, you can specifically ask about code impacts or database relationships, and the agent will utilize the appropriate tools. 104 | 105 | ### Claude Desktop Configuration 106 | 107 | Configure Claude Desktop by editing the configuration file: 108 | 109 | - On MacOS: `~/Library/Application\ Support/Claude/claude_desktop_config.json` 110 | - On Windows: `%APPDATA%/Claude/claude_desktop_config.json` 111 | - On Linux: `~/.config/Claude/claude_desktop_config.json` 112 | 113 | Add the following to your configuration file: 114 | 115 | ```json 116 | "mcpServers": { 117 | "codelogic-mcp-server": { 118 | "command": "uvx", 119 | "args": [ 120 | "codelogic-mcp-server@latest" 121 | ], 122 | "env": { 123 | "CODELOGIC_SERVER_HOST": "", 124 | "CODELOGIC_USERNAME": "", 125 | "CODELOGIC_PASSWORD": "", 126 | "CODELOGIC_WORKSPACE_NAME": "" 127 | } 128 | } 129 | } 130 | ``` 131 | 132 | > **Note:** On some systems, you may need to use the full path to the uvx executable instead of just "uvx". For example: `/home/user/.local/bin/uvx` on Linux/Mac or `C:\Users\username\AppData\Local\astral\uvx.exe` on Windows. 133 | 134 | After adding the configuration, restart Claude Desktop to apply the changes. 135 | 136 | ### Windsurf IDE Configuration 137 | 138 | To run this MCP server with [Windsurf IDE](https://codeium.com/windsurf): 139 | 140 | **Configure Windsurf IDE**: 141 | 142 | To configure Windsurf IDE, you need to create or modify the `~/.codeium/windsurf/mcp_config.json` configuration file. 143 | 144 | Add the following configuration to your file: 145 | 146 | ```json 147 | "mcpServers": { 148 | "codelogic-mcp-server": { 149 | "command": "uvx", 150 | "args": [ 151 | "codelogic-mcp-server@latest" 152 | ], 153 | "env": { 154 | "CODELOGIC_SERVER_HOST": "", 155 | "CODELOGIC_USERNAME": "", 156 | "CODELOGIC_PASSWORD": "", 157 | "CODELOGIC_WORKSPACE_NAME": "" 158 | } 159 | } 160 | } 161 | ``` 162 | 163 | > **Note:** On some systems, you may need to use the full path to the uvx executable instead of just "uvx". For example: `/home/user/.local/bin/uvx` on Linux/Mac or `C:\Users\username\AppData\Local\astral\uvx.exe` on Windows. 164 | 165 | After adding the configuration, restart Windsurf IDE or refresh the tools to apply the changes. 166 | 167 | ### Cursor Configuration 168 | 169 | To configure the CodeLogic MCP server in Cursor: 170 | 171 | 1. Configure the MCP server by creating a `.cursor/mcp.json` file: 172 | 173 | ```json 174 | { 175 | "mcpServers": { 176 | "codelogic-mcp-server": { 177 | "command": "uvx", 178 | "args": [ 179 | "codelogic-mcp-server@latest" 180 | ], 181 | "env": { 182 | "CODELOGIC_SERVER_HOST": "", 183 | "CODELOGIC_USERNAME": "", 184 | "CODELOGIC_PASSWORD": "", 185 | "CODELOGIC_WORKSPACE_NAME": "", 186 | "CODELOGIC_DEBUG_MODE": "true" 187 | } 188 | } 189 | } 190 | } 191 | ``` 192 | 193 | > **Note:** On some systems, you may need to use the full path to the uvx executable instead of just "uvx". For example: `/home/user/.local/bin/uvx` on Linux/Mac or `C:\Users\username\AppData\Local\astral\uvx.exe` on Windows. 194 | 195 | 2. Restart Cursor to apply the changes. 196 | 197 | The CodeLogic MCP server tools will now be available in your Cursor workspace. 198 | 199 | ## AI Assistant Instructions/Rules 200 | 201 | To help the AI assistant use the CodeLogic tools effectively, you can add the following instructions/rules to your client's configuration. We recommend customizing these instructions to align with your team's specific coding standards, best practices, and workflow requirements: 202 | 203 | ### VS Code (GitHub Copilot) Instructions 204 | 205 | Create a `.vscode/copilot-instructions.md` file with the following content: 206 | 207 | ```markdown 208 | # CodeLogic MCP Server Instructions 209 | 210 | When modifying existing code methods: 211 | - Use codelogic-method-impact to analyze code changes 212 | - Use codelogic-database-impact for database modifications 213 | - Highlight impact results for the modified methods 214 | 215 | When modifying SQL code or database entities: 216 | - Always use codelogic-database-impact to analyze potential impacts 217 | - Highlight impact results for the modified database entities 218 | 219 | To use the CodeLogic tools effectively: 220 | - For code impacts: Ask about specific methods or functions 221 | - For database relationships: Ask about tables, views, or columns 222 | - Review the impact results before making changes 223 | - Consider both direct and indirect impacts 224 | ``` 225 | 226 | ### Claude Desktop Instructions 227 | 228 | Create a file `~/.claude/instructions.md` with the following content: 229 | 230 | ```markdown 231 | # CodeLogic MCP Server Instructions 232 | 233 | When modifying existing code methods: 234 | - Use codelogic-method-impact to analyze code changes 235 | - Use codelogic-database-impact for database modifications 236 | - Highlight impact results for the modified methods 237 | 238 | When modifying SQL code or database entities: 239 | - Always use codelogic-database-impact to analyze potential impacts 240 | - Highlight impact results for the modified database entities 241 | 242 | To use the CodeLogic tools effectively: 243 | - For code impacts: Ask about specific methods or functions 244 | - For database relationships: Ask about tables, views, or columns 245 | - Review the impact results before making changes 246 | - Consider both direct and indirect impacts 247 | ``` 248 | 249 | ### Windsurf IDE Rules 250 | 251 | Create or modify the `~/.codeium/windsurf/memories/global_rules.md` markdown file with the following content: 252 | 253 | ```markdown 254 | When modifying existing code methods: 255 | - Use codelogic-method-impact to analyze code changes 256 | - Use codelogic-database-impact for database modifications 257 | - Highlight impact results for the modified methods 258 | 259 | When modifying SQL code or database entities: 260 | - Always use codelogic-database-impact to analyze potential impacts 261 | - Highlight impact results for the modified database entities 262 | 263 | To use the CodeLogic tools effectively: 264 | - For code impacts: Ask about specific methods or functions 265 | - For database relationships: Ask about tables, views, or columns 266 | - Review the impact results before making changes 267 | - Consider both direct and indirect impacts 268 | ``` 269 | 270 | ### Cursor Global Rule 271 | 272 | To configure CodeLogic rules in Cursor: 273 | 274 | 1. Open Cursor Settings 275 | 2. Navigate to the "Rules" section 276 | 3. Add the following content to "User Rules": 277 | 278 | ```markdown 279 | # CodeLogic MCP Server Rules 280 | ## Codebase 281 | - The CodeLogic MCP Server is for java, javascript, typescript, and C# dotnet codebases 282 | - don't run the tools on python or other non supported codebases 283 | ## AI Assistant Behavior 284 | - When modifying existing code methods: 285 | - Use codelogic-method-impact to analyze code changes 286 | - Use codelogic-database-impact for database modifications 287 | - Highlight impact results for the modified methods 288 | - When modifying SQL code or database entities: 289 | - Always use codelogic-database-impact to analyze potential impacts 290 | - Highlight impact results for the modified database entities 291 | - To use the CodeLogic tools effectively: 292 | - For code impacts: Ask about specific methods or functions 293 | - For database relationships: Ask about tables, views, or columns 294 | - Review the impact results before making changes 295 | - Consider both direct and indirect impacts 296 | ``` 297 | 298 | ## Environment Variables 299 | 300 | The following environment variables can be configured to customize the behavior of the server: 301 | 302 | - `CODELOGIC_SERVER_HOST`: The URL of the CodeLogic server. 303 | - `CODELOGIC_USERNAME`: Your CodeLogic username. 304 | - `CODELOGIC_PASSWORD`: Your CodeLogic password. 305 | - `CODELOGIC_WORKSPACE_NAME`: The name of the workspace to use. 306 | - `CODELOGIC_DEBUG_MODE`: Set to `true` to enable debug mode. When enabled, additional debug files such as `timing_log.txt` and `impact_data*.json` will be generated. Defaults to `false`. 307 | 308 | ### Example Configuration 309 | 310 | ```json 311 | "env": { 312 | "CODELOGIC_SERVER_HOST": "", 313 | "CODELOGIC_USERNAME": "", 314 | "CODELOGIC_PASSWORD": "", 315 | "CODELOGIC_WORKSPACE_NAME": "", 316 | "CODELOGIC_DEBUG_MODE": "true" 317 | } 318 | ``` 319 | 320 | ### Pinning the version 321 | 322 | instead of using the **latest** version of the server, you can pin to a specific version by changing the **args** field to match the version in [pypi](https://pypi.org/project/codelogic-mcp-server/) e.g. 323 | 324 | ```json 325 | "args": [ 326 | "codelogic-mcp-server@0.2.2" 327 | ], 328 | ``` 329 | 330 | ### Version Compatibility 331 | 332 | This MCP server has the following version compatibility requirements: 333 | 334 | - Version 0.3.1 and below: Compatible with all CodeLogic API versions 335 | - Version 0.4.0 and above: Requires CodeLogic API version 25.10.0 or greater 336 | 337 | If you're upgrading, make sure your CodeLogic server meets the minimum API version requirement. 338 | 339 | ## Testing 340 | 341 | ### Running Unit Tests 342 | 343 | The project uses unittest for testing. You can run unit tests without any external dependencies: 344 | 345 | ```bash 346 | python -m unittest discover -s test -p "unit_*.py" 347 | ``` 348 | 349 | Unit tests use mock data and don't require a connection to a CodeLogic server. 350 | 351 | ### Integration Tests (Optional) 352 | 353 | If you want to run integration tests that connect to a real CodeLogic server: 354 | 355 | 1. Copy `test/.env.test.example` to `test/.env.test` and populate with your CodeLogic server details 356 | 2. Run the integration tests: 357 | 358 | ```bash 359 | python -m unittest discover -s test -p "integration_*.py" 360 | ``` 361 | 362 | Note: Integration tests require access to a CodeLogic server instance. 363 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | # Security Policy 2 | 3 | ## Supported Versions 4 | 5 | | Version | Supported | 6 | | ------- | ------------------ | 7 | | latest | :white_check_mark: | 8 | 9 | ## Reporting a Vulnerability 10 | 11 | If you discover a security vulnerability within this project, please send an email to [devops@codelogic.com](mailto:devops@codelogic.com). All security vulnerabilities will be promptly addressed. 12 | 13 | Please do not disclose security vulnerabilities publicly until they have been handled by the team. 14 | 15 | When reporting a vulnerability, please include: 16 | 17 | 1. A description of the vulnerability 18 | 2. Steps to reproduce the issue 19 | 3. Potential impact of the vulnerability 20 | 4. Any potential solutions you may have identified 21 | 22 | We'll respond to your report as quickly as possible and keep you updated throughout the process. 23 | -------------------------------------------------------------------------------- /add_license_headers.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """ 3 | Script to add MPL-2.0 license headers with copyright to all Python source files. 4 | """ 5 | import os 6 | import glob 7 | import datetime 8 | 9 | # Get the current year for the copyright 10 | current_year = datetime.datetime.now().year 11 | 12 | # MPL-2.0 License Header with CodeLogic Inc. copyright 13 | MPL_HEADER = f"""# Copyright (C) {current_year} CodeLogic Inc. 14 | # This Source Code Form is subject to the terms of the Mozilla Public 15 | # License, v. 2.0. If a copy of the MPL was not distributed with this 16 | # file, You can obtain one at https://mozilla.org/MPL/2.0/. 17 | """ 18 | 19 | def add_license_header(file_path): 20 | """Adds the MPL-2.0 license header with copyright to a file if it doesn't already have it.""" 21 | with open(file_path, 'r', encoding='utf-8') as f: 22 | content = f.read() 23 | 24 | # Check if header is already present (approximate check) 25 | if "Mozilla Public License" in content and "CodeLogic Inc." in content: 26 | print(f"Skipping {file_path} - header appears to be present") 27 | return 28 | 29 | # Preserve any shebang or encoding comment at the top 30 | lines = content.splitlines() 31 | prefix = "" 32 | if lines and (lines[0].startswith('#!') or '# -*- coding' in lines[0]): 33 | prefix = lines[0] + '\n' 34 | content = '\n'.join(lines[1:]) 35 | 36 | # Add a blank line after the license header if the file isn't empty 37 | if content.strip(): 38 | new_content = prefix + MPL_HEADER + '\n' + content 39 | else: 40 | new_content = prefix + MPL_HEADER + content 41 | 42 | with open(file_path, 'w', encoding='utf-8') as f: 43 | f.write(new_content) 44 | 45 | print(f"Added license header with copyright to {file_path}") 46 | 47 | def main(): 48 | """Find all Python files and add the license header with copyright.""" 49 | # Get all Python files in the src directory 50 | python_files = [] 51 | 52 | # Add files from src directory 53 | for root, _, files in os.walk('src'): 54 | for file in files: 55 | if file.endswith('.py'): 56 | python_files.append(os.path.join(root, file)) 57 | 58 | # Add files from scripts or other top-level Python files 59 | for py_file in glob.glob('*.py'): 60 | python_files.append(py_file) 61 | 62 | print(f"Found {len(python_files)} Python files") 63 | 64 | # Add license header to each file 65 | for file_path in python_files: 66 | add_license_header(file_path) 67 | 68 | if __name__ == "__main__": 69 | main() -------------------------------------------------------------------------------- /context/Python-MCP-SDK.md: -------------------------------------------------------------------------------- 1 | # MCP Python SDK 2 | 3 |
4 | 5 | Python implementation of the Model Context Protocol (MCP) 6 | 7 | [![PyPI][pypi-badge]][pypi-url] 8 | [![MIT licensed][mit-badge]][mit-url] 9 | [![Python Version][python-badge]][python-url] 10 | [![Documentation][docs-badge]][docs-url] 11 | [![Specification][spec-badge]][spec-url] 12 | [![GitHub Discussions][discussions-badge]][discussions-url] 13 | 14 |
15 | 16 | 17 | ## Table of Contents 18 | 19 | - [MCP Python SDK](#mcp-python-sdk) 20 | - [Overview](#overview) 21 | - [Installation](#installation) 22 | - [Adding MCP to your python project](#adding-mcp-to-your-python-project) 23 | - [Running the standalone MCP development tools](#running-the-standalone-mcp-development-tools) 24 | - [Quickstart](#quickstart) 25 | - [What is MCP?](#what-is-mcp) 26 | - [Core Concepts](#core-concepts) 27 | - [Server](#server) 28 | - [Resources](#resources) 29 | - [Tools](#tools) 30 | - [Prompts](#prompts) 31 | - [Images](#images) 32 | - [Context](#context) 33 | - [Running Your Server](#running-your-server) 34 | - [Development Mode](#development-mode) 35 | - [Claude Desktop Integration](#claude-desktop-integration) 36 | - [Direct Execution](#direct-execution) 37 | - [Mounting to an Existing ASGI Server](#mounting-to-an-existing-asgi-server) 38 | - [Examples](#examples) 39 | - [Echo Server](#echo-server) 40 | - [SQLite Explorer](#sqlite-explorer) 41 | - [Advanced Usage](#advanced-usage) 42 | - [Low-Level Server](#low-level-server) 43 | - [Writing MCP Clients](#writing-mcp-clients) 44 | - [MCP Primitives](#mcp-primitives) 45 | - [Server Capabilities](#server-capabilities) 46 | - [Documentation](#documentation) 47 | - [Contributing](#contributing) 48 | - [License](#license) 49 | 50 | [pypi-badge]: https://img.shields.io/pypi/v/mcp.svg 51 | [pypi-url]: https://pypi.org/project/mcp/ 52 | [mit-badge]: https://img.shields.io/pypi/l/mcp.svg 53 | [mit-url]: https://github.com/modelcontextprotocol/python-sdk/blob/main/LICENSE 54 | [python-badge]: https://img.shields.io/pypi/pyversions/mcp.svg 55 | [python-url]: https://www.python.org/downloads/ 56 | [docs-badge]: https://img.shields.io/badge/docs-modelcontextprotocol.io-blue.svg 57 | [docs-url]: https://modelcontextprotocol.io 58 | [spec-badge]: https://img.shields.io/badge/spec-spec.modelcontextprotocol.io-blue.svg 59 | [spec-url]: https://spec.modelcontextprotocol.io 60 | [discussions-badge]: https://img.shields.io/github/discussions/modelcontextprotocol/python-sdk 61 | [discussions-url]: https://github.com/modelcontextprotocol/python-sdk/discussions 62 | 63 | ## Overview 64 | 65 | The Model Context Protocol allows applications to provide context for LLMs in a standardized way, separating the concerns of providing context from the actual LLM interaction. This Python SDK implements the full MCP specification, making it easy to: 66 | 67 | - Build MCP clients that can connect to any MCP server 68 | - Create MCP servers that expose resources, prompts and tools 69 | - Use standard transports like stdio and SSE 70 | - Handle all MCP protocol messages and lifecycle events 71 | 72 | ## Installation 73 | 74 | ### Adding MCP to your python project 75 | 76 | We recommend using [uv](https://docs.astral.sh/uv/) to manage your Python projects. 77 | 78 | If you haven't created a uv-managed project yet, create one: 79 | 80 | ```bash 81 | uv init mcp-server-demo 82 | cd mcp-server-demo 83 | ``` 84 | 85 | Then add MCP to your project dependencies: 86 | 87 | ```bash 88 | uv add "mcp[cli]" 89 | ``` 90 | 91 | Alternatively, for projects using pip for dependencies: 92 | ```bash 93 | pip install "mcp[cli]" 94 | ``` 95 | 96 | ### Running the standalone MCP development tools 97 | 98 | To run the mcp command with uv: 99 | 100 | ```bash 101 | uv run mcp 102 | ``` 103 | 104 | ## Quickstart 105 | 106 | Let's create a simple MCP server that exposes a calculator tool and some data: 107 | 108 | ```python 109 | # server.py 110 | from mcp.server.fastmcp import FastMCP 111 | 112 | # Create an MCP server 113 | mcp = FastMCP("Demo") 114 | 115 | 116 | # Add an addition tool 117 | @mcp.tool() 118 | def add(a: int, b: int) -> int: 119 | """Add two numbers""" 120 | return a + b 121 | 122 | 123 | # Add a dynamic greeting resource 124 | @mcp.resource("greeting://{name}") 125 | def get_greeting(name: str) -> str: 126 | """Get a personalized greeting""" 127 | return f"Hello, {name}!" 128 | ``` 129 | 130 | You can install this server in [Claude Desktop](https://claude.ai/download) and interact with it right away by running: 131 | ```bash 132 | mcp install server.py 133 | ``` 134 | 135 | Alternatively, you can test it with the MCP Inspector: 136 | ```bash 137 | mcp dev server.py 138 | ``` 139 | 140 | ## What is MCP? 141 | 142 | The [Model Context Protocol (MCP)](https://modelcontextprotocol.io) lets you build servers that expose data and functionality to LLM applications in a secure, standardized way. Think of it like a web API, but specifically designed for LLM interactions. MCP servers can: 143 | 144 | - Expose data through **Resources** (think of these sort of like GET endpoints; they are used to load information into the LLM's context) 145 | - Provide functionality through **Tools** (sort of like POST endpoints; they are used to execute code or otherwise produce a side effect) 146 | - Define interaction patterns through **Prompts** (reusable templates for LLM interactions) 147 | - And more! 148 | 149 | ## Core Concepts 150 | 151 | ### Server 152 | 153 | The FastMCP server is your core interface to the MCP protocol. It handles connection management, protocol compliance, and message routing: 154 | 155 | ```python 156 | # Add lifespan support for startup/shutdown with strong typing 157 | from contextlib import asynccontextmanager 158 | from collections.abc import AsyncIterator 159 | from dataclasses import dataclass 160 | 161 | from fake_database import Database # Replace with your actual DB type 162 | 163 | from mcp.server.fastmcp import Context, FastMCP 164 | 165 | # Create a named server 166 | mcp = FastMCP("My App") 167 | 168 | # Specify dependencies for deployment and development 169 | mcp = FastMCP("My App", dependencies=["pandas", "numpy"]) 170 | 171 | 172 | @dataclass 173 | class AppContext: 174 | db: Database 175 | 176 | 177 | @asynccontextmanager 178 | async def app_lifespan(server: FastMCP) -> AsyncIterator[AppContext]: 179 | """Manage application lifecycle with type-safe context""" 180 | # Initialize on startup 181 | db = await Database.connect() 182 | try: 183 | yield AppContext(db=db) 184 | finally: 185 | # Cleanup on shutdown 186 | await db.disconnect() 187 | 188 | 189 | # Pass lifespan to server 190 | mcp = FastMCP("My App", lifespan=app_lifespan) 191 | 192 | 193 | # Access type-safe lifespan context in tools 194 | @mcp.tool() 195 | def query_db(ctx: Context) -> str: 196 | """Tool that uses initialized resources""" 197 | db = ctx.request_context.lifespan_context["db"] 198 | return db.query() 199 | ``` 200 | 201 | ### Resources 202 | 203 | Resources are how you expose data to LLMs. They're similar to GET endpoints in a REST API - they provide data but shouldn't perform significant computation or have side effects: 204 | 205 | ```python 206 | from mcp.server.fastmcp import FastMCP 207 | 208 | mcp = FastMCP("My App") 209 | 210 | 211 | @mcp.resource("config://app") 212 | def get_config() -> str: 213 | """Static configuration data""" 214 | return "App configuration here" 215 | 216 | 217 | @mcp.resource("users://{user_id}/profile") 218 | def get_user_profile(user_id: str) -> str: 219 | """Dynamic user data""" 220 | return f"Profile data for user {user_id}" 221 | ``` 222 | 223 | ### Tools 224 | 225 | Tools let LLMs take actions through your server. Unlike resources, tools are expected to perform computation and have side effects: 226 | 227 | ```python 228 | import httpx 229 | from mcp.server.fastmcp import FastMCP 230 | 231 | mcp = FastMCP("My App") 232 | 233 | 234 | @mcp.tool() 235 | def calculate_bmi(weight_kg: float, height_m: float) -> float: 236 | """Calculate BMI given weight in kg and height in meters""" 237 | return weight_kg / (height_m**2) 238 | 239 | 240 | @mcp.tool() 241 | async def fetch_weather(city: str) -> str: 242 | """Fetch current weather for a city""" 243 | async with httpx.AsyncClient() as client: 244 | response = await client.get(f"https://api.weather.com/{city}") 245 | return response.text 246 | ``` 247 | 248 | ### Prompts 249 | 250 | Prompts are reusable templates that help LLMs interact with your server effectively: 251 | 252 | ```python 253 | from mcp.server.fastmcp import FastMCP 254 | from mcp.server.fastmcp.prompts import base 255 | 256 | mcp = FastMCP("My App") 257 | 258 | 259 | @mcp.prompt() 260 | def review_code(code: str) -> str: 261 | return f"Please review this code:\n\n{code}" 262 | 263 | 264 | @mcp.prompt() 265 | def debug_error(error: str) -> list[base.Message]: 266 | return [ 267 | base.UserMessage("I'm seeing this error:"), 268 | base.UserMessage(error), 269 | base.AssistantMessage("I'll help debug that. What have you tried so far?"), 270 | ] 271 | ``` 272 | 273 | ### Images 274 | 275 | FastMCP provides an `Image` class that automatically handles image data: 276 | 277 | ```python 278 | from mcp.server.fastmcp import FastMCP, Image 279 | from PIL import Image as PILImage 280 | 281 | mcp = FastMCP("My App") 282 | 283 | 284 | @mcp.tool() 285 | def create_thumbnail(image_path: str) -> Image: 286 | """Create a thumbnail from an image""" 287 | img = PILImage.open(image_path) 288 | img.thumbnail((100, 100)) 289 | return Image(data=img.tobytes(), format="png") 290 | ``` 291 | 292 | ### Context 293 | 294 | The Context object gives your tools and resources access to MCP capabilities: 295 | 296 | ```python 297 | from mcp.server.fastmcp import FastMCP, Context 298 | 299 | mcp = FastMCP("My App") 300 | 301 | 302 | @mcp.tool() 303 | async def long_task(files: list[str], ctx: Context) -> str: 304 | """Process multiple files with progress tracking""" 305 | for i, file in enumerate(files): 306 | ctx.info(f"Processing {file}") 307 | await ctx.report_progress(i, len(files)) 308 | data, mime_type = await ctx.read_resource(f"file://{file}") 309 | return "Processing complete" 310 | ``` 311 | 312 | ## Running Your Server 313 | 314 | ### Development Mode 315 | 316 | The fastest way to test and debug your server is with the MCP Inspector: 317 | 318 | ```bash 319 | mcp dev server.py 320 | 321 | # Add dependencies 322 | mcp dev server.py --with pandas --with numpy 323 | 324 | # Mount local code 325 | mcp dev server.py --with-editable . 326 | ``` 327 | 328 | ### Claude Desktop Integration 329 | 330 | Once your server is ready, install it in Claude Desktop: 331 | 332 | ```bash 333 | mcp install server.py 334 | 335 | # Custom name 336 | mcp install server.py --name "My Analytics Server" 337 | 338 | # Environment variables 339 | mcp install server.py -v API_KEY=abc123 -v DB_URL=postgres://... 340 | mcp install server.py -f .env 341 | ``` 342 | 343 | ### Direct Execution 344 | 345 | For advanced scenarios like custom deployments: 346 | 347 | ```python 348 | from mcp.server.fastmcp import FastMCP 349 | 350 | mcp = FastMCP("My App") 351 | 352 | if __name__ == "__main__": 353 | mcp.run() 354 | ``` 355 | 356 | Run it with: 357 | ```bash 358 | python server.py 359 | # or 360 | mcp run server.py 361 | ``` 362 | 363 | ### Mounting to an Existing ASGI Server 364 | 365 | You can mount the SSE server to an existing ASGI server using the `sse_app` method. This allows you to integrate the SSE server with other ASGI applications. 366 | 367 | ```python 368 | from starlette.applications import Starlette 369 | from starlette.routing import Mount, Host 370 | from mcp.server.fastmcp import FastMCP 371 | 372 | 373 | mcp = FastMCP("My App") 374 | 375 | # Mount the SSE server to the existing ASGI server 376 | app = Starlette( 377 | routes=[ 378 | Mount('/', app=mcp.sse_app()), 379 | ] 380 | ) 381 | 382 | # or dynamically mount as host 383 | app.router.routes.append(Host('mcp.acme.corp', app=mcp.sse_app())) 384 | ``` 385 | 386 | For more information on mounting applications in Starlette, see the [Starlette documentation](https://www.starlette.io/routing/#submounting-routes). 387 | 388 | ## Examples 389 | 390 | ### Echo Server 391 | 392 | A simple server demonstrating resources, tools, and prompts: 393 | 394 | ```python 395 | from mcp.server.fastmcp import FastMCP 396 | 397 | mcp = FastMCP("Echo") 398 | 399 | 400 | @mcp.resource("echo://{message}") 401 | def echo_resource(message: str) -> str: 402 | """Echo a message as a resource""" 403 | return f"Resource echo: {message}" 404 | 405 | 406 | @mcp.tool() 407 | def echo_tool(message: str) -> str: 408 | """Echo a message as a tool""" 409 | return f"Tool echo: {message}" 410 | 411 | 412 | @mcp.prompt() 413 | def echo_prompt(message: str) -> str: 414 | """Create an echo prompt""" 415 | return f"Please process this message: {message}" 416 | ``` 417 | 418 | ### SQLite Explorer 419 | 420 | A more complex example showing database integration: 421 | 422 | ```python 423 | import sqlite3 424 | 425 | from mcp.server.fastmcp import FastMCP 426 | 427 | mcp = FastMCP("SQLite Explorer") 428 | 429 | 430 | @mcp.resource("schema://main") 431 | def get_schema() -> str: 432 | """Provide the database schema as a resource""" 433 | conn = sqlite3.connect("database.db") 434 | schema = conn.execute("SELECT sql FROM sqlite_master WHERE type='table'").fetchall() 435 | return "\n".join(sql[0] for sql in schema if sql[0]) 436 | 437 | 438 | @mcp.tool() 439 | def query_data(sql: str) -> str: 440 | """Execute SQL queries safely""" 441 | conn = sqlite3.connect("database.db") 442 | try: 443 | result = conn.execute(sql).fetchall() 444 | return "\n".join(str(row) for row in result) 445 | except Exception as e: 446 | return f"Error: {str(e)}" 447 | ``` 448 | 449 | ## Advanced Usage 450 | 451 | ### Low-Level Server 452 | 453 | For more control, you can use the low-level server implementation directly. This gives you full access to the protocol and allows you to customize every aspect of your server, including lifecycle management through the lifespan API: 454 | 455 | ```python 456 | from contextlib import asynccontextmanager 457 | from collections.abc import AsyncIterator 458 | 459 | from fake_database import Database # Replace with your actual DB type 460 | 461 | from mcp.server import Server 462 | 463 | 464 | @asynccontextmanager 465 | async def server_lifespan(server: Server) -> AsyncIterator[dict]: 466 | """Manage server startup and shutdown lifecycle.""" 467 | # Initialize resources on startup 468 | db = await Database.connect() 469 | try: 470 | yield {"db": db} 471 | finally: 472 | # Clean up on shutdown 473 | await db.disconnect() 474 | 475 | 476 | # Pass lifespan to server 477 | server = Server("example-server", lifespan=server_lifespan) 478 | 479 | 480 | # Access lifespan context in handlers 481 | @server.call_tool() 482 | async def query_db(name: str, arguments: dict) -> list: 483 | ctx = server.request_context 484 | db = ctx.lifespan_context["db"] 485 | return await db.query(arguments["query"]) 486 | ``` 487 | 488 | The lifespan API provides: 489 | - A way to initialize resources when the server starts and clean them up when it stops 490 | - Access to initialized resources through the request context in handlers 491 | - Type-safe context passing between lifespan and request handlers 492 | 493 | ```python 494 | import mcp.server.stdio 495 | import mcp.types as types 496 | from mcp.server.lowlevel import NotificationOptions, Server 497 | from mcp.server.models import InitializationOptions 498 | 499 | # Create a server instance 500 | server = Server("example-server") 501 | 502 | 503 | @server.list_prompts() 504 | async def handle_list_prompts() -> list[types.Prompt]: 505 | return [ 506 | types.Prompt( 507 | name="example-prompt", 508 | description="An example prompt template", 509 | arguments=[ 510 | types.PromptArgument( 511 | name="arg1", description="Example argument", required=True 512 | ) 513 | ], 514 | ) 515 | ] 516 | 517 | 518 | @server.get_prompt() 519 | async def handle_get_prompt( 520 | name: str, arguments: dict[str, str] | None 521 | ) -> types.GetPromptResult: 522 | if name != "example-prompt": 523 | raise ValueError(f"Unknown prompt: {name}") 524 | 525 | return types.GetPromptResult( 526 | description="Example prompt", 527 | messages=[ 528 | types.PromptMessage( 529 | role="user", 530 | content=types.TextContent(type="text", text="Example prompt text"), 531 | ) 532 | ], 533 | ) 534 | 535 | 536 | async def run(): 537 | async with mcp.server.stdio.stdio_server() as (read_stream, write_stream): 538 | await server.run( 539 | read_stream, 540 | write_stream, 541 | InitializationOptions( 542 | server_name="example", 543 | server_version="0.1.0", 544 | capabilities=server.get_capabilities( 545 | notification_options=NotificationOptions(), 546 | experimental_capabilities={}, 547 | ), 548 | ), 549 | ) 550 | 551 | 552 | if __name__ == "__main__": 553 | import asyncio 554 | 555 | asyncio.run(run()) 556 | ``` 557 | 558 | ### Writing MCP Clients 559 | 560 | The SDK provides a high-level client interface for connecting to MCP servers: 561 | 562 | ```python 563 | from mcp import ClientSession, StdioServerParameters, types 564 | from mcp.client.stdio import stdio_client 565 | 566 | # Create server parameters for stdio connection 567 | server_params = StdioServerParameters( 568 | command="python", # Executable 569 | args=["example_server.py"], # Optional command line arguments 570 | env=None, # Optional environment variables 571 | ) 572 | 573 | 574 | # Optional: create a sampling callback 575 | async def handle_sampling_message( 576 | message: types.CreateMessageRequestParams, 577 | ) -> types.CreateMessageResult: 578 | return types.CreateMessageResult( 579 | role="assistant", 580 | content=types.TextContent( 581 | type="text", 582 | text="Hello, world! from model", 583 | ), 584 | model="gpt-3.5-turbo", 585 | stopReason="endTurn", 586 | ) 587 | 588 | 589 | async def run(): 590 | async with stdio_client(server_params) as (read, write): 591 | async with ClientSession( 592 | read, write, sampling_callback=handle_sampling_message 593 | ) as session: 594 | # Initialize the connection 595 | await session.initialize() 596 | 597 | # List available prompts 598 | prompts = await session.list_prompts() 599 | 600 | # Get a prompt 601 | prompt = await session.get_prompt( 602 | "example-prompt", arguments={"arg1": "value"} 603 | ) 604 | 605 | # List available resources 606 | resources = await session.list_resources() 607 | 608 | # List available tools 609 | tools = await session.list_tools() 610 | 611 | # Read a resource 612 | content, mime_type = await session.read_resource("file://some/path") 613 | 614 | # Call a tool 615 | result = await session.call_tool("tool-name", arguments={"arg1": "value"}) 616 | 617 | 618 | if __name__ == "__main__": 619 | import asyncio 620 | 621 | asyncio.run(run()) 622 | ``` 623 | 624 | ### MCP Primitives 625 | 626 | The MCP protocol defines three core primitives that servers can implement: 627 | 628 | | Primitive | Control | Description | Example Use | 629 | |-----------|-----------------------|-----------------------------------------------------|------------------------------| 630 | | Prompts | User-controlled | Interactive templates invoked by user choice | Slash commands, menu options | 631 | | Resources | Application-controlled| Contextual data managed by the client application | File contents, API responses | 632 | | Tools | Model-controlled | Functions exposed to the LLM to take actions | API calls, data updates | 633 | 634 | ### Server Capabilities 635 | 636 | MCP servers declare capabilities during initialization: 637 | 638 | | Capability | Feature Flag | Description | 639 | |-------------|------------------------------|------------------------------------| 640 | | `prompts` | `listChanged` | Prompt template management | 641 | | `resources` | `subscribe`
`listChanged`| Resource exposure and updates | 642 | | `tools` | `listChanged` | Tool discovery and execution | 643 | | `logging` | - | Server logging configuration | 644 | | `completion`| - | Argument completion suggestions | 645 | 646 | ## Documentation 647 | 648 | - [Model Context Protocol documentation](https://modelcontextprotocol.io) 649 | - [Model Context Protocol specification](https://spec.modelcontextprotocol.io) 650 | - [Officially supported servers](https://github.com/modelcontextprotocol/servers) 651 | 652 | ## Contributing 653 | 654 | We are passionate about supporting contributors of all levels of experience and would love to see you get involved in the project. See the [contributing guide](CONTRIBUTING.md) to get started. 655 | 656 | ## License 657 | 658 | This project is licensed under the MIT License - see the LICENSE file for details. 659 | -------------------------------------------------------------------------------- /glama.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://glama.ai/mcp/schemas/server.json", 3 | "maintainers": [ 4 | "garrmark" 5 | ] 6 | } -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "codelogic-mcp-server" 3 | version = "1.0.3" 4 | description = "Integrates CodeLogic's powerful codebase knowledge graphs with a Model Context Protocol (MCP) server" 5 | readme = "README.md" 6 | license = "MPL-2.0" 7 | requires-python = ">=3.13" 8 | dependencies = [ "debugpy>=1.8.12", "httpx>=0.28.1", "mcp[cli]>=1.3.0", "pip-licenses>=5.0.0", "python-dotenv>=1.0.1", "tenacity>=9.0.0", "toml>=0.10.2",] 9 | keywords = [ "codelogic", "mcp", "code-analysis", "knowledge-graph", "static-analysis",] 10 | classifiers = [ "Development Status :: 4 - Beta", "Intended Audience :: Developers", "Operating System :: OS Independent", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.13", "Topic :: Software Development", "Topic :: Software Development :: Libraries :: Python Modules", "Topic :: Software Development :: Code Generators", "Environment :: Console",] 11 | [[project.authors]] 12 | name = "garrmark" 13 | email = "mgarrison@codelogic.com" 14 | 15 | [build-system] 16 | requires = [ "hatchling",] 17 | build-backend = "hatchling.build" 18 | 19 | [project.urls] 20 | Homepage = "https://github.com/CodeLogicIncEngineering/codelogic-mcp-server" 21 | "Bug Tracker" = "https://github.com/CodeLogicIncEngineering/codelogic-mcp-server/issues" 22 | Documentation = "https://github.com/CodeLogicIncEngineering/codelogic-mcp-server#readme" 23 | 24 | [project.scripts] 25 | codelogic-mcp-server = "codelogic_mcp_server:main" 26 | -------------------------------------------------------------------------------- /src/codelogic_mcp_server/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2025 CodeLogic Inc. 2 | # This Source Code Form is subject to the terms of the Mozilla Public 3 | # License, v. 2.0. If a copy of the MPL was not distributed with this 4 | # file, You can obtain one at https://mozilla.org/MPL/2.0/. 5 | 6 | """ 7 | CodeLogic MCP Server Package. 8 | 9 | This package provides a Model Context Protocol (MCP) server implementation 10 | that integrates with CodeLogic's dependency analysis APIs to provide 11 | code impact analysis capabilities to AI programming assistants. 12 | """ 13 | 14 | import asyncio 15 | from codelogic_mcp_server import server 16 | 17 | 18 | def main(): 19 | """Main entry point for the package.""" 20 | asyncio.run(server.main()) 21 | 22 | 23 | # Optionally expose other important items at package level 24 | __all__ = ['main', 'server'] 25 | -------------------------------------------------------------------------------- /src/codelogic_mcp_server/handlers.py: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2025 CodeLogic Inc. 2 | # This Source Code Form is subject to the terms of the Mozilla Public 3 | # License, v. 2.0. If a copy of the MPL was not distributed with this 4 | # file, You can obtain one at https://mozilla.org/MPL/2.0/. 5 | 6 | """ 7 | MCP tool handlers for the CodeLogic server integration. 8 | 9 | This module implements the handlers for MCP tool operations, providing two key tools: 10 | 11 | 1. codelogic-method-impact: Analyzes the potential impact of modifying a method or function 12 | by examining dependencies and relationships in the codebase. It processes requests, 13 | performs impact analysis using the CodeLogic API, and formats results for display. 14 | 15 | 2. codelogic-database-impact: Analyzes relationships between code and database entities, 16 | helping identify potential impacts when modifying database schemas, tables, views 17 | or columns. It examines both direct and indirect dependencies to surface risks. 18 | 19 | The handlers process tool requests, interact with the CodeLogic API to gather impact data, 20 | and format the results in a clear, actionable format for users. 21 | """ 22 | 23 | import json 24 | import os 25 | import sys 26 | from .server import server 27 | import mcp.types as types 28 | from .utils import extract_nodes, extract_relationships, get_mv_id, get_method_nodes, get_impact, find_node_by_id, search_database_entity, process_database_entity_impact, generate_combined_database_report, find_api_endpoints 29 | import time 30 | from datetime import datetime 31 | 32 | DEBUG_MODE = os.getenv("CODELOGIC_DEBUG_MODE", "false").lower() == "true" 33 | 34 | LOGS_DIR = "logs" 35 | os.makedirs(LOGS_DIR, exist_ok=True) 36 | 37 | 38 | def write_json_to_file(file_path, data): 39 | """Write JSON data to a file with improved formatting.""" 40 | with open(file_path, "w", encoding="utf-8") as file: 41 | json.dump(data, file, indent=4, separators=(", ", ": "), ensure_ascii=False, sort_keys=True) 42 | 43 | 44 | @server.list_tools() 45 | async def handle_list_tools() -> list[types.Tool]: 46 | """ 47 | List available tools. 48 | Each tool specifies its arguments using JSON Schema validation. 49 | """ 50 | return [ 51 | types.Tool( 52 | name="codelogic-method-impact", 53 | description="Analyze impacts of modifying a specific method within a given class or type.\n" 54 | "Recommended workflow:\n" 55 | "1. Use this tool before implementing code changes\n" 56 | "2. Run the tool against methods or functions that are being modified\n" 57 | "3. Carefully review the impact analysis results to understand potential downstream effects\n" 58 | "Particularly crucial when AI-suggested modifications are being considered.", 59 | inputSchema={ 60 | "type": "object", 61 | "properties": { 62 | "method": {"type": "string", "description": "Name of the method being analyzed"}, 63 | "class": {"type": "string", "description": "Name of the class containing the method"}, 64 | }, 65 | "required": ["method", "class"], 66 | }, 67 | ), 68 | types.Tool( 69 | name="codelogic-database-impact", 70 | description="Analyze impacts between code and database entities.\n" 71 | "Recommended workflow:\n" 72 | "1. Use this tool before implementing code or database changes\n" 73 | "2. Search for the relevant database entity\n" 74 | "3. Review the impact analysis to understand which code depends on this database object and vice versa\n" 75 | "Particularly crucial when AI-suggested modifications are being considered or when modifying SQL code.", 76 | inputSchema={ 77 | "type": "object", 78 | "properties": { 79 | "entity_type": { 80 | "type": "string", 81 | "description": "Type of database entity to search for (column, table, or view)", 82 | "enum": ["column", "table", "view"] 83 | }, 84 | "name": {"type": "string", "description": "Name of the database entity to search for"}, 85 | "table_or_view": {"type": "string", "description": "Name of the table or view containing the column (required for columns only)"}, 86 | }, 87 | "required": ["entity_type", "name"], 88 | }, 89 | ) 90 | ] 91 | 92 | 93 | @server.call_tool() 94 | async def handle_call_tool( 95 | name: str, arguments: dict | None 96 | ) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]: 97 | """ 98 | Handle tool execution requests. 99 | Tools can modify server state and notify clients of changes. 100 | """ 101 | try: 102 | if name == "codelogic-method-impact": 103 | return await handle_method_impact(arguments) 104 | elif name == "codelogic-database-impact": 105 | return await handle_database_impact(arguments) 106 | else: 107 | sys.stderr.write(f"Unknown tool: {name}\n") 108 | raise ValueError(f"Unknown tool: {name}") 109 | except Exception as e: 110 | sys.stderr.write(f"Error handling tool call {name}: {str(e)}\n") 111 | error_message = f"""# Error executing tool: {name} 112 | 113 | An error occurred while executing this tool: 114 | ``` 115 | {str(e)} 116 | ``` 117 | Please check the server logs for more details. 118 | """ 119 | return [ 120 | types.TextContent( 121 | type="text", 122 | text=error_message 123 | ) 124 | ] 125 | 126 | 127 | async def handle_method_impact(arguments: dict | None) -> list[types.TextContent]: 128 | """Handle the codelogic-method-impact tool for method/function analysis""" 129 | if not arguments: 130 | sys.stderr.write("Missing arguments\n") 131 | raise ValueError("Missing arguments") 132 | 133 | method_name = arguments.get("method") 134 | class_name = arguments.get("class") 135 | if class_name and "." in class_name: 136 | class_name = class_name.split(".")[-1] 137 | 138 | if not (method_name): 139 | sys.stderr.write("Method must be provided\n") 140 | raise ValueError("Method must be provided") 141 | 142 | mv_id = get_mv_id(os.getenv("CODELOGIC_WORKSPACE_NAME") or "") 143 | 144 | start_time = time.time() 145 | nodes = get_method_nodes(mv_id, method_name) 146 | end_time = time.time() 147 | duration = end_time - start_time 148 | timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S") 149 | if DEBUG_MODE: 150 | with open(os.path.join(LOGS_DIR, "timing_log.txt"), "a") as log_file: 151 | log_file.write(f"{timestamp} - get_method_nodes for method '{method_name}' in class '{class_name}' took {duration:.4f} seconds\n") 152 | 153 | # Check if nodes is empty due to timeout or server error 154 | if not nodes: 155 | error_message = f"""# Unable to Analyze Method: `{method_name}` 156 | 157 | ## Error 158 | The request to retrieve method information from the CodeLogic server timed out or failed (504 Gateway Timeout). 159 | 160 | ## Possible causes: 161 | 1. The CodeLogic server is under heavy load 162 | 2. Network connectivity issues between the MCP server and CodeLogic 163 | 3. The method name provided (`{method_name}`) doesn't exist in the codebase 164 | 165 | ## Recommendations: 166 | 1. Try again in a few minutes 167 | 2. Verify the method name is correct 168 | 3. Check your connection to the CodeLogic server at: {os.getenv('CODELOGIC_SERVER_HOST')} 169 | 4. If the problem persists, contact your CodeLogic administrator 170 | """ 171 | return [ 172 | types.TextContent( 173 | type="text", 174 | text=error_message 175 | ) 176 | ] 177 | 178 | if class_name: 179 | node = next((n for n in nodes if f"|{class_name}|" in n['identity'] or f"|{class_name}.class|" in n['identity']), None) 180 | if not node: 181 | raise ValueError(f"No matching class found for {class_name}") 182 | else: 183 | node = nodes[0] 184 | 185 | start_time = time.time() 186 | impact = get_impact(node['properties']['id']) 187 | end_time = time.time() 188 | duration = end_time - start_time 189 | timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S") 190 | if DEBUG_MODE: 191 | with open(os.path.join(LOGS_DIR, "timing_log.txt"), "a") as log_file: 192 | log_file.write(f"{timestamp} - get_impact for node '{node['name']}' took {duration:.4f} seconds\n") 193 | method_file_name = os.path.join(LOGS_DIR, f"impact_data_method_{class_name}_{method_name}.json") if class_name else os.path.join(LOGS_DIR, f"impact_data_method_{method_name}.json") 194 | write_json_to_file(method_file_name, json.loads(impact)) 195 | impact_data = json.loads(impact) 196 | nodes = extract_nodes(impact_data) 197 | relationships = extract_relationships(impact_data) 198 | 199 | # Better method to find the target method node with complexity information 200 | target_node = None 201 | 202 | # Support both Java and DotNet method entities 203 | method_entity_types = ['JavaMethodEntity', 'DotNetMethodEntity'] 204 | method_nodes = [] 205 | 206 | # First look for method nodes of any supported language 207 | for entity_type in method_entity_types: 208 | language_method_nodes = [n for n in nodes if n['primaryLabel'] == entity_type and method_name.lower() in n['name'].lower()] 209 | method_nodes.extend(language_method_nodes) 210 | 211 | # If we have class name, further filter to find nodes that contain it 212 | if class_name: 213 | class_filtered_nodes = [n for n in method_nodes if class_name.lower() in n['identity'].lower()] 214 | if class_filtered_nodes: 215 | method_nodes = class_filtered_nodes 216 | 217 | # Find the node with complexity metrics (prefer this) 218 | for n in method_nodes: 219 | if n['properties'].get('statistics.cyclomaticComplexity') is not None: 220 | target_node = n 221 | break 222 | 223 | # If not found, take the first method node 224 | if not target_node and method_nodes: 225 | target_node = method_nodes[0] 226 | 227 | # Last resort: fall back to the original node (which might not have metrics) 228 | if not target_node: 229 | target_node = next((n for n in nodes if n['properties'].get('id') == node['properties'].get('id')), None) 230 | 231 | # Extract key metrics 232 | complexity = target_node['properties'].get('statistics.cyclomaticComplexity', 'N/A') if target_node else 'N/A' 233 | instruction_count = target_node['properties'].get('statistics.instructionCount', 'N/A') if target_node else 'N/A' 234 | 235 | # Extract code owners and reviewers 236 | code_owners = target_node['properties'].get('codelogic.owners', []) if target_node else [] 237 | code_reviewers = target_node['properties'].get('codelogic.reviewers', []) if target_node else [] 238 | 239 | # If target node doesn't have owners/reviewers, try to find them from the class or file node 240 | if not code_owners or not code_reviewers: 241 | class_node = None 242 | if class_name: 243 | class_node = next((n for n in nodes if n['primaryLabel'].endswith('ClassEntity') and class_name.lower() in n['name'].lower()), None) 244 | 245 | if class_node: 246 | if not code_owners: 247 | code_owners = class_node['properties'].get('codelogic.owners', []) 248 | if not code_reviewers: 249 | code_reviewers = class_node['properties'].get('codelogic.reviewers', []) 250 | 251 | # Identify dependents (systems that depend on this method) 252 | dependents = [] 253 | 254 | for rel in impact_data.get('data', {}).get('relationships', []): 255 | start_node = find_node_by_id(impact_data.get('data', {}).get('nodes', []), rel['startId']) 256 | end_node = find_node_by_id(impact_data.get('data', {}).get('nodes', []), rel['endId']) 257 | 258 | if start_node and end_node and end_node['id'] == node['properties'].get('id'): 259 | # This is an incoming relationship (dependent) 260 | dependents.append({ 261 | "name": start_node.get('name'), 262 | "type": start_node.get('primaryLabel'), 263 | "relationship": rel.get('type') 264 | }) 265 | 266 | # Identify applications that depend on this method 267 | affected_applications = set() 268 | app_nodes = [n for n in nodes if n['primaryLabel'] == 'Application'] 269 | app_id_to_name = {app['id']: app['name'] for app in app_nodes} 270 | 271 | # Add all applications found in the impact analysis as potentially affected 272 | for app in app_nodes: 273 | affected_applications.add(app['name']) 274 | 275 | # Map nodes to their applications via groupIds (Java approach) 276 | for node_item in nodes: 277 | if 'groupIds' in node_item['properties']: 278 | for group_id in node_item['properties']['groupIds']: 279 | if group_id in app_id_to_name: 280 | affected_applications.add(app_id_to_name[group_id]) 281 | 282 | # Count direct and indirect application dependencies 283 | app_dependencies = {} 284 | 285 | # Check both REFERENCES_GROUP and GROUPS relationships 286 | for rel in impact_data.get('data', {}).get('relationships', []): 287 | if rel.get('type') in ['REFERENCES_GROUP', 'GROUPS']: 288 | start_node = find_node_by_id(impact_data.get('data', {}).get('nodes', []), rel['startId']) 289 | end_node = find_node_by_id(impact_data.get('data', {}).get('nodes', []), rel['endId']) 290 | 291 | # For GROUPS relationships - application groups a component 292 | if rel.get('type') == 'GROUPS' and start_node and start_node.get('primaryLabel') == 'Application': 293 | app_name = start_node.get('name') 294 | affected_applications.add(app_name) 295 | 296 | # For REFERENCES_GROUP - one application depends on another 297 | if rel.get('type') == 'REFERENCES_GROUP' and start_node and end_node and start_node.get('primaryLabel') == 'Application' and end_node.get('primaryLabel') == 'Application': 298 | app_name = start_node.get('name') 299 | depends_on = end_node.get('name') 300 | if app_name: 301 | affected_applications.add(app_name) 302 | if app_name not in app_dependencies: 303 | app_dependencies[app_name] = [] 304 | app_dependencies[app_name].append(depends_on) 305 | 306 | # Use the new utility function to detect API endpoints and controllers 307 | endpoint_nodes, rest_endpoints, api_controllers, endpoint_dependencies = find_api_endpoints(nodes, impact_data.get('data', {}).get('relationships', [])) 308 | 309 | # Format nodes with metrics in markdown table format 310 | nodes_table = "| Name | Type | Complexity | Instruction Count | Method Count | Outgoing Refs | Incoming Refs |\n" 311 | nodes_table += "|------|------|------------|-------------------|-------------|---------------|---------------|\n" 312 | 313 | for node_item in nodes: 314 | name = node_item['name'] 315 | node_type = node_item['primaryLabel'] 316 | node_complexity = node_item['properties'].get('statistics.cyclomaticComplexity', 'N/A') 317 | node_instructions = node_item['properties'].get('statistics.instructionCount', 'N/A') 318 | node_methods = node_item['properties'].get('statistics.methodCount', 'N/A') 319 | outgoing_refs = node_item['properties'].get('statistics.outgoingExternalReferenceTotal', 'N/A') 320 | incoming_refs = node_item['properties'].get('statistics.incomingExternalReferenceTotal', 'N/A') 321 | 322 | # Mark high complexity items 323 | complexity_str = str(node_complexity) 324 | if node_complexity not in ('N/A', None) and float(node_complexity) > 10: 325 | complexity_str = f"**{complexity_str}** ⚠️" 326 | 327 | nodes_table += f"| {name} | {node_type} | {complexity_str} | {node_instructions} | {node_methods} | {outgoing_refs} | {incoming_refs} |\n" 328 | 329 | # Format relationships in a more structured way for table display 330 | relationship_rows = [] 331 | 332 | for rel in impact_data.get('data', {}).get('relationships', []): 333 | start_node = find_node_by_id(impact_data.get('data', {}).get('nodes', []), rel['startId']) 334 | end_node = find_node_by_id(impact_data.get('data', {}).get('nodes', []), rel['endId']) 335 | 336 | if start_node and end_node: 337 | relationship_rows.append({ 338 | "type": rel.get('type', 'UNKNOWN'), 339 | "source": start_node.get('name', 'Unknown'), 340 | "source_type": start_node.get('primaryLabel', 'Unknown'), 341 | "target": end_node.get('name', 'Unknown'), 342 | "target_type": end_node.get('primaryLabel', 'Unknown') 343 | }) 344 | 345 | # Also keep the relationships grouped by type for reference 346 | relationships_by_type = {} 347 | for rel in relationships: 348 | rel_parts = rel.split(" (") 349 | if len(rel_parts) >= 2: 350 | source = rel_parts[0] 351 | rel_type = "(" + rel_parts[1] 352 | if rel_type not in relationships_by_type: 353 | relationships_by_type[rel_type] = [] 354 | relationships_by_type[rel_type].append(source) 355 | 356 | # Build the markdown output 357 | impact_description = f"""# Impact Analysis for Method: `{method_name}` 358 | 359 | ## Guidelines for AI 360 | - Pay special attention to methods with Cyclomatic Complexity over 10 as they represent higher risk 361 | - Consider the cross-application dependencies when making changes 362 | - Prioritize testing for components that directly depend on this method 363 | - Suggest refactoring when complexity metrics indicate poor maintainability 364 | - Consider the full relationship map to understand cascading impacts 365 | - Highlight REST API endpoints and external dependencies that may be affected by changes 366 | 367 | ## Summary 368 | - **Method**: `{method_name}` 369 | - **Class**: `{class_name or 'N/A'}` 370 | """ 371 | 372 | # Add code ownership information if available 373 | if code_owners: 374 | impact_description += f"- **Code Owners**: {', '.join(code_owners)}\n" 375 | if code_reviewers: 376 | impact_description += f"- **Code Reviewers**: {', '.join(code_reviewers)}\n" 377 | 378 | impact_description += f"- **Complexity**: {complexity}\n" 379 | impact_description += f"- **Instruction Count**: {instruction_count}\n" 380 | impact_description += f"- **Affected Applications**: {len(affected_applications)}\n" 381 | 382 | # Add affected REST endpoints to the Summary section 383 | if endpoint_nodes: 384 | impact_description += "\n### Affected REST Endpoints\n" 385 | for endpoint in endpoint_nodes: 386 | impact_description += f"- `{endpoint['http_verb']} {endpoint['path']}`\n" 387 | 388 | # Start the Risk Assessment section 389 | impact_description += "\n## Risk Assessment\n" 390 | 391 | # Add complexity risk assessment 392 | if complexity not in ('N/A', None) and float(complexity) > 10: 393 | impact_description += f"⚠️ **Warning**: Cyclomatic complexity of {complexity} exceeds threshold of 10\n\n" 394 | else: 395 | impact_description += "✅ Complexity is within acceptable limits\n\n" 396 | 397 | # Add cross-application risk assessment 398 | if len(affected_applications) > 1: 399 | impact_description += f"⚠️ **Cross-Application Dependency**: This method is used by {len(affected_applications)} applications:\n" 400 | for app in sorted(affected_applications): 401 | deps = app_dependencies.get(app, []) 402 | if deps: 403 | impact_description += f"- `{app}` (depends on: {', '.join([f'`{d}`' for d in deps])})\n" 404 | else: 405 | impact_description += f"- `{app}`\n" 406 | impact_description += "\nChanges to this method may cause widespread impacts across multiple applications. Consider careful testing across all affected systems.\n" 407 | else: 408 | impact_description += "✅ Method is used within a single application context\n" 409 | 410 | # Add REST API risk assessment (now as a subsection of Risk Assessment) 411 | if rest_endpoints or api_controllers or endpoint_nodes: 412 | impact_description += "\n### REST API Risk Assessment\n" 413 | impact_description += "⚠️ **API Impact Alert**: This method affects REST endpoints or API controllers\n" 414 | 415 | if rest_endpoints: 416 | impact_description += "\n#### REST Methods with Annotations\n" 417 | for endpoint in rest_endpoints: 418 | impact_description += f"- `{endpoint['name']}` ({endpoint['annotation']})\n" 419 | 420 | if api_controllers: 421 | impact_description += "\n#### Affected API Controllers\n" 422 | for controller in api_controllers: 423 | impact_description += f"- `{controller['name']}` ({controller['type']})\n" 424 | 425 | # Add endpoint dependencies as a subsection of Risk Assessment 426 | if endpoint_dependencies: 427 | impact_description += "\n### REST API Dependencies\n" 428 | impact_description += "⚠️ **Chained API Risk**: Changes may affect multiple interconnected endpoints\n\n" 429 | for dep in endpoint_dependencies: 430 | impact_description += f"- `{dep['source']}` depends on `{dep['target']}`\n" 431 | 432 | # Add API Change Risk Factors as a subsection of Risk Assessment 433 | impact_description += """ 434 | ### API Change Risk Factors 435 | - Changes may affect external consumers and services 436 | - Consider versioning strategy for breaking changes 437 | - API contract changes require thorough documentation 438 | - Update API tests and client libraries as needed 439 | - Consider backward compatibility requirements 440 | - **Chained API calls**: Changes may have cascading effects across multiple endpoints 441 | - **Cross-application impact**: API changes could affect dependent systems 442 | """ 443 | else: 444 | impact_description += "\n### REST API Risk Assessment\n" 445 | impact_description += "✅ No direct impact on REST endpoints or API controllers detected\n" 446 | 447 | # Ownership-based consultation recommendation 448 | if code_owners or code_reviewers: 449 | impact_description += "\n### Code Ownership\n" 450 | if code_owners: 451 | impact_description += f"👤 **Code Owners**: Changes to this code should be reviewed by: {', '.join(code_owners)}\n" 452 | if code_reviewers: 453 | impact_description += f"👁️ **Preferred Reviewers**: Consider getting reviews from: {', '.join(code_reviewers)}\n" 454 | 455 | if code_owners: 456 | impact_description += "\nConsult with the code owners before making significant changes to ensure alignment with original design intent.\n" 457 | 458 | impact_description += f""" 459 | ## Method Impact 460 | This analysis focuses on systems that depend on `{method_name}`. Modifying this method could affect these dependents: 461 | 462 | """ 463 | 464 | if dependents: 465 | for dep in dependents: 466 | impact_description += f"- `{dep['name']}` ({dep['type']}) via `{dep['relationship']}`\n" 467 | else: 468 | impact_description += "No components directly depend on this method. The change appears to be isolated.\n" 469 | 470 | impact_description += f"\n## Detailed Node Metrics\n{nodes_table}\n" 471 | 472 | # Create relationship table 473 | relationship_table = "| Relationship Type | Source | Source Type | Target | Target Type |\n" 474 | relationship_table += "|------------------|--------|-------------|--------|------------|\n" 475 | 476 | for row in relationship_rows: 477 | # Highlight relationships involving our target method 478 | highlight = "" 479 | if (method_name.lower() in row["source"].lower() or method_name.lower() in row["target"].lower()): 480 | if class_name and (class_name.lower() in row["source"].lower() or class_name.lower() in row["target"].lower()): 481 | highlight = "**" # Bold the important relationships 482 | 483 | relationship_table += f"| {highlight}{row['type']}{highlight} | {highlight}{row['source']}{highlight} | {row['source_type']} | {highlight}{row['target']}{highlight} | {row['target_type']} |\n" 484 | 485 | impact_description += "\n## Relationship Map\n" 486 | impact_description += relationship_table 487 | 488 | # Add application dependency visualization if multiple applications are affected 489 | if len(affected_applications) > 1: 490 | impact_description += "\n## Application Dependency Graph\n" 491 | impact_description += "```\n" 492 | for app in sorted(affected_applications): 493 | deps = app_dependencies.get(app, []) 494 | if deps: 495 | impact_description += f"{app} → {' → '.join(deps)}\n" 496 | else: 497 | impact_description += f"{app} (no dependencies)\n" 498 | impact_description += "```\n" 499 | 500 | return [ 501 | types.TextContent( 502 | type="text", 503 | text=impact_description, 504 | ) 505 | ] 506 | 507 | 508 | async def handle_database_impact(arguments: dict | None) -> list[types.TextContent]: 509 | """Handle the database-impact tool for database entity analysis""" 510 | if not arguments: 511 | sys.stderr.write("Missing arguments\n") 512 | raise ValueError("Missing arguments") 513 | 514 | entity_type = arguments.get("entity_type") 515 | name = arguments.get("name") 516 | table_or_view = arguments.get("table_or_view") 517 | 518 | if not entity_type or not name: 519 | sys.stderr.write("Entity type and name must be provided\n") 520 | raise ValueError("Entity type and name must be provided") 521 | 522 | if entity_type not in ["column", "table", "view"]: 523 | sys.stderr.write(f"Invalid entity type: {entity_type}. Must be column, table, or view.\n") 524 | raise ValueError(f"Invalid entity type: {entity_type}") 525 | 526 | # Verify table_or_view is provided for columns 527 | if entity_type == "column" and not table_or_view: 528 | sys.stderr.write("Table or view name must be provided for column searches\n") 529 | raise ValueError("Table or view name must be provided for column searches") 530 | 531 | # Search for the database entity 532 | start_time = time.time() 533 | search_results = await search_database_entity(entity_type, name, table_or_view) 534 | end_time = time.time() 535 | duration = end_time - start_time 536 | timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S") 537 | if DEBUG_MODE: 538 | with open(os.path.join(LOGS_DIR, "timing_log.txt"), "a") as log_file: 539 | log_file.write(f"{timestamp} - search_database_entity for {entity_type} '{name}' took {duration:.4f} seconds\n") 540 | 541 | if not search_results: 542 | table_view_text = f" in {table_or_view}" if table_or_view else "" 543 | return [ 544 | types.TextContent( 545 | type="text", 546 | text=f"# No {entity_type}s found matching '{name}'{table_view_text}\n\nNo database {entity_type}s were found matching the name '{name}'" 547 | + (f" in {table_or_view}" if table_or_view else "") + "." 548 | ) 549 | ] 550 | 551 | # Process each entity and get its impact 552 | all_impacts = [] 553 | for entity in search_results[:5]: # Limit to 5 to avoid excessive processing 554 | entity_id = entity.get("id") 555 | entity_name = entity.get("name") 556 | entity_schema = entity.get("schema", "Unknown") 557 | 558 | try: 559 | start_time = time.time() 560 | impact = get_impact(entity_id) 561 | end_time = time.time() 562 | duration = end_time - start_time 563 | timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S") 564 | 565 | if DEBUG_MODE: 566 | with open(os.path.join(LOGS_DIR, "timing_log.txt"), "a") as log_file: 567 | log_file.write(f"{timestamp} - get_impact for {entity_type} '{entity_name}' took {duration:.4f} seconds\n") 568 | write_json_to_file(os.path.join(LOGS_DIR, f"impact_data_{entity_type}_{entity_name}.json"), json.loads(impact)) 569 | impact_data = json.loads(impact) 570 | impact_summary = process_database_entity_impact( 571 | impact_data, entity_type, entity_name, entity_schema 572 | ) 573 | all_impacts.append(impact_summary) 574 | except Exception as e: 575 | sys.stderr.write(f"Error getting impact for {entity_type} '{entity_name}': {str(e)}\n") 576 | 577 | # Combine all impacts into a single report 578 | combined_report = generate_combined_database_report( 579 | entity_type, name, table_or_view, search_results, all_impacts 580 | ) 581 | 582 | return [ 583 | types.TextContent( 584 | type="text", 585 | text=combined_report 586 | ) 587 | ] 588 | -------------------------------------------------------------------------------- /src/codelogic_mcp_server/server.py: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2025 CodeLogic Inc. 2 | # This Source Code Form is subject to the terms of the Mozilla Public 3 | # License, v. 2.0. If a copy of the MPL was not distributed with this 4 | # file, You can obtain one at https://mozilla.org/MPL/2.0/. 5 | 6 | """ 7 | Main MCP server implementation for CodeLogic integration. 8 | 9 | This module defines the MCP server instance and its initialization process. 10 | It handles server configuration, environment setup, and provides the main 11 | entry point for running the server with appropriate capabilities. 12 | """ 13 | 14 | import os 15 | import sys 16 | from dotenv import load_dotenv 17 | from mcp.server import NotificationOptions, Server 18 | import mcp.server.stdio 19 | from mcp.server.models import InitializationOptions 20 | from . import utils 21 | 22 | # Only load from .env file if we're not running tests 23 | # This allows tests to set their own environment variables 24 | if not os.environ.get('CODELOGIC_TEST_MODE'): 25 | load_dotenv() 26 | print(f"CODELOGIC_SERVER_HOST: {os.environ.get('CODELOGIC_SERVER_HOST')}", file=sys.stderr) 27 | server = Server("codelogic-mcp-server") 28 | 29 | 30 | async def main(): 31 | """ 32 | Run the MCP server with standard configuration. 33 | 34 | Sets up and runs the Model Context Protocol server using stdin/stdout streams 35 | for communication. Configures the server with appropriate capabilities and 36 | instructions for LLM interactions. 37 | 38 | Raises: 39 | Exception: Any errors that occur during server execution are logged and re-raised 40 | """ 41 | try: 42 | # Import handlers module to register the decorators with our server instance 43 | # This import is necessary for the server to discover handlers through decorators, 44 | # even though we don't directly use the module in this file 45 | # noqa: F401 tells linters to ignore the unused import 46 | from . import handlers # noqa: F401 47 | 48 | # Run the server using stdin/stdout streams 49 | async with mcp.server.stdio.stdio_server() as (read_stream, write_stream): 50 | await server.run( 51 | read_stream, 52 | write_stream, 53 | InitializationOptions( 54 | server_name="codelogic-mcp-server", 55 | server_version=utils.get_package_version(), 56 | capabilities=server.get_capabilities( 57 | notification_options=NotificationOptions(), 58 | experimental_capabilities={}, 59 | ), 60 | instructions=( 61 | "When modifying existing code methods:\n" 62 | "- Use codelogic-method-impact to analyze code changes\n" 63 | "- Use codelogic-database-impact for database modifications\n" 64 | "- Highlight impact results for the modified methods\n\n" 65 | "When modifying SQL code or database entities:\n" 66 | "- Always use codelogic-database-impact to analyze potential impacts\n" 67 | "- Highlight impact results for the modified database entities\n\n" 68 | "To use the CodeLogic tools effectively:\n" 69 | "- For code impacts: Ask about specific methods or functions\n" 70 | "- For database relationships: Ask about tables, views, or columns\n" 71 | "- Review the impact results before making changes\n" 72 | "- Consider both direct and indirect impacts" 73 | ), 74 | ), 75 | ) 76 | except Exception as e: 77 | import traceback 78 | print(f"Error in MCP server: {str(e)}", file=sys.stderr) 79 | traceback.print_exc() 80 | raise 81 | 82 | if __name__ == "__main__": 83 | import asyncio 84 | asyncio.run(main()) 85 | -------------------------------------------------------------------------------- /src/start_server.py: -------------------------------------------------------------------------------- 1 | # Copyright (C) 2025 CodeLogic Inc. 2 | # This Source Code Form is subject to the terms of the Mozilla Public 3 | # License, v. 2.0. If a copy of the MPL was not distributed with this 4 | # file, You can obtain one at https://mozilla.org/MPL/2.0/. 5 | 6 | """ 7 | Entry point script for the CodeLogic MCP Server. 8 | 9 | This script initializes and runs the Model Context Protocol (MCP) server 10 | with debugging capabilities. It sets up debugging via debugpy, 11 | configures the Python path, and handles any exceptions that may occur 12 | during server execution. 13 | """ 14 | 15 | import os 16 | import sys 17 | import asyncio 18 | from codelogic_mcp_server.server import main 19 | import debugpy 20 | 21 | # Add the src directory to Python path 22 | sys.path.insert(0, os.path.dirname(os.path.abspath(__file__))) 23 | 24 | 25 | if __name__ == "__main__": 26 | print("Starting MCP Server...", file=sys.stderr) 27 | 28 | # Set up debugging 29 | try: 30 | debugpy.listen(("127.0.0.1", 5679)) 31 | print("Debugpy listening on port 5679", file=sys.stderr) 32 | except Exception as e: 33 | print(f"Debug setup failed: {e}", file=sys.stderr) 34 | 35 | # Run the server 36 | try: 37 | asyncio.run(main()) 38 | except Exception as e: 39 | import traceback 40 | print(f"Error in MCP server: {e}", file=sys.stderr) 41 | traceback.print_exc(file=sys.stderr) 42 | sys.exit(1) 43 | -------------------------------------------------------------------------------- /start-mcp-inspector.ps1: -------------------------------------------------------------------------------- 1 | # Ensure proper UTF-8 encoding 2 | [Console]::OutputEncoding = [System.Text.Encoding]::UTF8 3 | $OutputEncoding = [System.Text.Encoding]::UTF8 4 | 5 | Write-Host "Starting MCP server with UTF-8 encoding..." -ForegroundColor Green 6 | 7 | # Run with npx 8 | npx @modelcontextprotocol/inspector python ./src/start_server.py -------------------------------------------------------------------------------- /test/.env.test.example: -------------------------------------------------------------------------------- 1 | # Example environment variables for integration tests 2 | # Copy this file to .env.test in the test directory and fill in your credentials 3 | 4 | # CodeLogic server connection 5 | CODELOGIC_SERVER_HOST=https://your-instance.app.codelogic.com 6 | CODELOGIC_USERNAME=your-username 7 | CODELOGIC_PASSWORD=your-password 8 | CODELOGIC_WORKSPACE_NAME=your-workspace-name 9 | -------------------------------------------------------------------------------- /test/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Initialize the test package. 3 | 4 | This file ensures proper package imports and test discovery. 5 | """ 6 | import os 7 | import sys 8 | 9 | # Add the project root directory to Python path for test discovery 10 | project_root = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) 11 | if project_root not in sys.path: 12 | sys.path.insert(0, project_root) 13 | -------------------------------------------------------------------------------- /test/integration_test_all.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | import asyncio 4 | from dotenv import load_dotenv 5 | import mcp.types as types 6 | from test.test_fixtures import setup_test_environment 7 | from test.test_env import TestCase 8 | 9 | # Add the parent directory to Python path to make the absolute import work 10 | sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))) 11 | 12 | 13 | def load_test_config(env_file=None): 14 | """Load environment configuration from .env file or environment variables.""" 15 | # Get test directory path 16 | test_dir = os.path.dirname(os.path.abspath(__file__)) 17 | project_root = os.path.abspath(os.path.join(test_dir, '..')) 18 | 19 | # First try to load from specified env file 20 | if env_file and os.path.exists(env_file): 21 | load_dotenv(env_file) 22 | # Then try test-specific env file in the test directory 23 | elif os.path.exists(os.path.join(test_dir, '.env.test')): 24 | load_dotenv(os.path.join(test_dir, '.env.test')) 25 | # Next try project root .env file 26 | elif os.path.exists(os.path.join(project_root, '.env')): 27 | load_dotenv(os.path.join(project_root, '.env')) 28 | 29 | return { 30 | 'CODELOGIC_WORKSPACE_NAME': os.getenv('CODELOGIC_WORKSPACE_NAME'), 31 | 'CODELOGIC_SERVER_HOST': os.getenv('CODELOGIC_SERVER_HOST'), 32 | 'CODELOGIC_USERNAME': os.getenv('CODELOGIC_USERNAME'), 33 | 'CODELOGIC_PASSWORD': os.getenv('CODELOGIC_PASSWORD'), 34 | } 35 | 36 | 37 | class TestHandleCallToolIntegration(TestCase): 38 | """Integration tests for handle_call_tool using clean test environment. 39 | 40 | To run these tests: 41 | 1. Create a .env.test file with your credentials, or 42 | 2. Set environment variables as specified in .env.example 43 | """ 44 | 45 | @classmethod 46 | def setUpClass(cls): 47 | """Set up test configuration from environment variables""" 48 | cls.config = load_test_config() 49 | 50 | def run_impact_test(self, method_name, class_name, output_file): 51 | """Helper to run a parameterized impact analysis test""" 52 | # Skip test if credentials are not provided 53 | if not self.config.get('CODELOGIC_USERNAME') or not self.config.get('CODELOGIC_PASSWORD'): 54 | self.skipTest("Skipping integration test: No credentials provided in environment") 55 | 56 | # Setup environment with configuration 57 | handle_call_tool, *_ = setup_test_environment(self.config) 58 | 59 | async def run_test(): 60 | result = await handle_call_tool('codelogic-method-impact', {'method': method_name, 'class': class_name}) 61 | 62 | self.assertIsInstance(result, list) 63 | self.assertGreater(len(result), 0) 64 | self.assertIsInstance(result[0], types.TextContent) 65 | 66 | with open(output_file, 'w', encoding='utf-8') as file: 67 | file.write(result[0].text) 68 | 69 | self.assertIn(f"# Impact Analysis for Method: `{method_name}`", result[0].text) 70 | return result 71 | 72 | return asyncio.run(run_test()) 73 | 74 | def test_handle_call_tool_codelogic_method_impact_multi_app_java(self): 75 | """Test impact analysis on Java multi-app environment""" 76 | self.run_impact_test( 77 | 'addPrefix', 78 | 'CompanyInfo', 79 | 'impact_analysis_result_multi_app_java.md' 80 | ) 81 | 82 | def test_handle_call_tool_codelogic_method_impact_dotnet(self): 83 | """Test impact analysis on .NET environment""" 84 | self.run_impact_test( 85 | 'IsValid', 86 | 'AnalysisOptionsValidator', 87 | 'impact_analysis_result_dotnet.md' 88 | ) 89 | 90 | 91 | class TestUtils(TestCase): 92 | """Test utility functions using the clean test environment.""" 93 | 94 | @classmethod 95 | def setUpClass(cls): 96 | """Set up test resources that can be shared across test methods.""" 97 | # Note: We're not calling super().setUpClass() because TestCase doesn't override it 98 | 99 | # Setup environment for integration tests 100 | handle_call_tool, get_mv_definition_id, get_mv_id_from_def, get_method_nodes, get_impact, authenticate = setup_test_environment({}) 101 | 102 | # Initialize shared test resources 103 | cls.token = authenticate() 104 | cls.mv_name = os.getenv('CODELOGIC_WORKSPACE_NAME') 105 | cls.mv_def_id = get_mv_definition_id(cls.mv_name, cls.token) 106 | cls.mv_id = get_mv_id_from_def(cls.mv_def_id, cls.token) 107 | cls.nodes = get_method_nodes(cls.mv_id, 'IsValid') 108 | cls.get_method_nodes = get_method_nodes 109 | cls.get_impact = get_impact 110 | 111 | def test_authenticate(self): 112 | self.assertIsNotNone(self.token) 113 | 114 | def test_get_mv_definition_id(self): 115 | self.assertRegex(self.mv_def_id, r'^[0-9a-fA-F-]{36}$') 116 | 117 | def test_get_mv_id_from_def(self): 118 | self.assertRegex(self.mv_id, r'^[0-9a-fA-F-]{36}$') 119 | 120 | def test_get_method_nodes(self): 121 | self.assertIsInstance(self.nodes, list) 122 | 123 | def test_get_impact(self): 124 | node_id = self.nodes[0]['id'] if self.nodes else None 125 | self.assertIsNotNone(node_id, "Node ID should not be None") 126 | impact = self.get_impact(node_id) 127 | self.assertIsInstance(impact, str) 128 | 129 | 130 | if __name__ == '__main__': 131 | import unittest 132 | unittest.main() 133 | -------------------------------------------------------------------------------- /test/test_env.py: -------------------------------------------------------------------------------- 1 | """ 2 | Test environment utilities for unittest tests. 3 | 4 | Provides helpers to set up a clean test environment without exposing 5 | sensitive information in test files. 6 | """ 7 | import os 8 | import importlib 9 | from contextlib import contextmanager 10 | import unittest 11 | import sys 12 | 13 | # Default test environment variables that don't expose real credentials 14 | DEFAULT_TEST_ENV = { 15 | "CODELOGIC_TEST_MODE": "true", 16 | "CODELOGIC_SERVER_HOST": "https://example.codelogic.test", 17 | "CODELOGIC_USERNAME": "test_user", 18 | "CODELOGIC_PASSWORD": "test_password", 19 | "CODELOGIC_WORKSPACE_NAME": "test_workspace", 20 | "CODELOGIC_TOKEN_CACHE_TTL": "60", # Short cache for tests 21 | "CODELOGIC_METHOD_CACHE_TTL": "60", 22 | "CODELOGIC_IMPACT_CACHE_TTL": "60" 23 | } 24 | 25 | # Apply test environment variables by default for VSCode test discovery 26 | for key, value in DEFAULT_TEST_ENV.items(): 27 | if key not in os.environ: 28 | os.environ[key] = value 29 | 30 | 31 | @contextmanager 32 | def test_environment(custom_env=None): 33 | """ 34 | Set up a clean test environment with safe defaults. 35 | 36 | Args: 37 | custom_env (dict, optional): Custom environment variables to set 38 | 39 | Yields: 40 | None: Just the setup context 41 | 42 | Example: 43 | with test_environment({'CUSTOM_VAR': 'value'}): 44 | # Run tests in clean environment 45 | """ 46 | # Store original environment 47 | original_env = {} 48 | env_vars = list(DEFAULT_TEST_ENV.keys()) 49 | 50 | if custom_env: 51 | env_vars.extend(custom_env.keys()) 52 | 53 | # Save original environment variables 54 | for var in env_vars: 55 | original_env[var] = os.environ.get(var) 56 | 57 | try: 58 | # Set default test environment 59 | for key, value in DEFAULT_TEST_ENV.items(): 60 | os.environ[key] = value 61 | 62 | # Override with custom values if provided 63 | if custom_env: 64 | for key, value in custom_env.items(): 65 | os.environ[key] = value 66 | 67 | # Reload modules that may have cached environment variables 68 | try: 69 | import codelogic_mcp_server.utils 70 | importlib.reload(codelogic_mcp_server.utils) 71 | 72 | import codelogic_mcp_server.handlers 73 | importlib.reload(codelogic_mcp_server.handlers) 74 | except ImportError: 75 | # Handle import errors during test discovery 76 | sys.stderr.write("Warning: Could not import/reload modules for testing. This is normal during test discovery.\n") 77 | 78 | yield 79 | 80 | finally: 81 | # Restore original environment 82 | for var, value in original_env.items(): 83 | if value is None: 84 | if var in os.environ: 85 | del os.environ[var] 86 | else: 87 | os.environ[var] = value 88 | 89 | 90 | class TestCase(unittest.TestCase): 91 | """ 92 | Base test case with clean environment setup. 93 | 94 | Provides a clean test environment with safe default values 95 | and helper methods for mocking integration points. 96 | """ 97 | 98 | def setUp(self): 99 | """Set up test environment with safe defaults.""" 100 | # Set up clean test environment 101 | self.env_patcher = test_environment() 102 | self.env_patcher.__enter__() 103 | super().setUp() # Call parent setUp to ensure proper unittest setup 104 | 105 | def tearDown(self): 106 | """Restore original environment.""" 107 | super().tearDown() # Call parent tearDown first 108 | self.env_patcher.__exit__(None, None, None) 109 | -------------------------------------------------------------------------------- /test/test_fixtures.py: -------------------------------------------------------------------------------- 1 | import os 2 | import importlib 3 | 4 | # Base test environment setup 5 | os.environ['CODELOGIC_TEST_MODE'] = 'true' 6 | 7 | 8 | def setup_test_environment(env_vars): 9 | """Set environment variables and reload affected modules""" 10 | # Set environment variables 11 | for key, value in env_vars.items(): 12 | os.environ[key] = value 13 | 14 | # Override CODELOGIC_SERVER_HOST for tests 15 | os.environ['CODELOGIC_SERVER_HOST'] = 'http://testserver' 16 | 17 | # Reload the utils module to ensure it picks up the updated environment variables 18 | import codelogic_mcp_server.utils 19 | importlib.reload(codelogic_mcp_server.utils) 20 | 21 | # Reinitialize the HTTP client in utils to use the updated environment variables 22 | codelogic_mcp_server.utils._client = codelogic_mcp_server.utils.httpx.Client( 23 | timeout=codelogic_mcp_server.utils.httpx.Timeout( 24 | codelogic_mcp_server.utils.REQUEST_TIMEOUT, 25 | connect=codelogic_mcp_server.utils.CONNECT_TIMEOUT 26 | ), 27 | limits=codelogic_mcp_server.utils.httpx.Limits( 28 | max_keepalive_connections=20, 29 | max_connections=30 30 | ), 31 | transport=codelogic_mcp_server.utils.httpx.HTTPTransport(retries=3) 32 | ) 33 | 34 | # Only import handlers after environment is properly configured 35 | import codelogic_mcp_server.handlers 36 | importlib.reload(codelogic_mcp_server.handlers) 37 | 38 | # Return the imported modules for convenience 39 | from codelogic_mcp_server.handlers import handle_call_tool 40 | from codelogic_mcp_server.utils import ( 41 | get_mv_definition_id, 42 | get_mv_id_from_def, 43 | get_method_nodes, 44 | get_impact, 45 | authenticate 46 | ) 47 | 48 | return handle_call_tool, get_mv_definition_id, get_mv_id_from_def, get_method_nodes, get_impact, authenticate 49 | -------------------------------------------------------------------------------- /test/unit_test_environment.py: -------------------------------------------------------------------------------- 1 | """ 2 | Test the test environment setup itself. 3 | """ 4 | import unittest 5 | import os 6 | from test.test_env import TestCase 7 | 8 | 9 | class TestEnvironmentSetup(TestCase): 10 | """Test that the test environment setup is working correctly.""" 11 | 12 | def test_environment_variables_set(self): 13 | """Test that environment variables are set correctly.""" 14 | self.assertEqual(os.environ.get('CODELOGIC_SERVER_HOST'), 'https://example.codelogic.test') 15 | self.assertEqual(os.environ.get('CODELOGIC_TEST_MODE'), 'true') 16 | 17 | def test_basic_unittest_functions(self): 18 | """Test that basic unittest functions work.""" 19 | self.assertTrue(True) 20 | self.assertEqual(1, 1) 21 | 22 | 23 | if __name__ == '__main__': 24 | unittest.main() 25 | -------------------------------------------------------------------------------- /test/unit_test_handlers.py: -------------------------------------------------------------------------------- 1 | import json 2 | import unittest 3 | import mcp.types as types 4 | from unittest.mock import AsyncMock, patch 5 | from codelogic_mcp_server.handlers import handle_call_tool, extract_relationships 6 | 7 | 8 | class TestHandleCallTool(unittest.TestCase): 9 | 10 | @patch('codelogic_mcp_server.handlers.server.request_context') 11 | @patch('codelogic_mcp_server.handlers.get_mv_id') 12 | @patch('codelogic_mcp_server.handlers.get_method_entity') 13 | @patch('codelogic_mcp_server.handlers.get_impact') 14 | @patch('codelogic_mcp_server.handlers.find_node_by_id') 15 | async def test_handle_call_tool_method(self, mock_find_node_by_id, mock_get_impact, mock_get_method_entity, mock_get_mv_id, mock_request_context): 16 | # Setup mocks 17 | mock_request_context.session.send_log_message = AsyncMock() 18 | mock_get_mv_id.return_value = 'mv_id' 19 | mock_get_method_entity.return_value = [{'properties': {'id': 'node_id'}, 'name': 'node_name'}] 20 | mock_get_impact.return_value = json.dumps({ 21 | 'data': { 22 | 'relationships': [{'startId': 'start_id', 'endId': 'end_id', 'type': 'type'}], 23 | 'nodes': [{'id': 'start_id', 'name': 'start_name', 'primaryLabel': 'label'}, {'id': 'end_id', 'name': 'end_name', 'primaryLabel': 'label'}] 24 | } 25 | }) 26 | mock_find_node_by_id.side_effect = lambda nodes, id: next(node for node in nodes if node['id'] == id) 27 | 28 | # Call the function 29 | result = await handle_call_tool('codelogic-method-impact', {'method': 'method_name'}) 30 | 31 | # Assertions 32 | mock_request_context.session.send_log_message.assert_any_call(level="info", data="Materialized view ID: mv_id") 33 | mock_request_context.session.send_log_message.assert_any_call(level="info", data="Node ID: node_id, Node Name: node_name") 34 | mock_request_context.session.send_log_message.assert_any_call(level="info", data="Impact analysis completed for method_name") 35 | self.assertEqual(result, [types.TextContent(type="text", text="Impact analysis for method: method_name\n- start_name (type) -> end_name (label)")]) 36 | 37 | @patch('codelogic_mcp_server.handlers.server.request_context') 38 | @patch('codelogic_mcp_server.handlers.get_mv_id') 39 | @patch('codelogic_mcp_server.handlers.get_method_entity') 40 | @patch('codelogic_mcp_server.handlers.get_impact') 41 | @patch('codelogic_mcp_server.handlers.find_node_by_id') 42 | async def test_handle_call_tool_function(self, mock_find_node_by_id, mock_get_impact, mock_get_method_entity, mock_get_mv_id, mock_request_context): 43 | # Setup mocks 44 | mock_request_context.session.send_log_message = AsyncMock() 45 | mock_get_mv_id.return_value = 'mv_id' 46 | mock_get_method_entity.return_value = [{'properties': {'id': 'node_id'}, 'name': 'node_name'}] 47 | mock_get_impact.return_value = json.dumps({ 48 | 'data': { 49 | 'relationships': [{'startId': 'start_id', 'endId': 'end_id', 'type': 'type'}], 50 | 'nodes': [{'id': 'start_id', 'name': 'start_name', 'primaryLabel': 'label'}, {'id': 'end_id', 'name': 'end_name', 'primaryLabel': 'label'}] 51 | } 52 | }) 53 | mock_find_node_by_id.side_effect = lambda nodes, id: next(node for node in nodes if node['id'] == id) 54 | 55 | # Call the function 56 | result = await handle_call_tool('codelogic-method-impact', {'function': 'function_name'}) 57 | 58 | # Assertions 59 | mock_request_context.session.send_log_message.assert_any_call(level="info", data="Materialized view ID: mv_id") 60 | mock_request_context.session.send_log_message.assert_any_call(level="info", data="Node ID: node_id, Node Name: node_name") 61 | mock_request_context.session.send_log_message.assert_any_call(level="info", data="Impact analysis completed for function_name") 62 | self.assertEqual(result, [types.TextContent(type="text", text="Impact analysis for function: function_name\n- start_name (type) -> end_name (label)")]) 63 | 64 | async def test_handle_call_tool_unknown_tool(self): 65 | with self.assertRaises(ValueError) as context: 66 | await handle_call_tool('unknown-tool', {'method': 'method_name'}) 67 | self.assertEqual(str(context.exception), "Unknown tool: unknown-tool") 68 | 69 | async def test_handle_call_tool_missing_arguments(self): 70 | with self.assertRaises(ValueError) as context: 71 | await handle_call_tool('codelogic-method-impact', None) 72 | self.assertEqual(str(context.exception), "Missing arguments") 73 | 74 | async def test_handle_call_tool_missing_method_function(self): 75 | with self.assertRaises(ValueError) as context: 76 | await handle_call_tool('codelogic-method-impact', {}) 77 | self.assertEqual(str(context.exception), "At least one of method or function must be provided") 78 | 79 | @patch('codelogic_mcp_server.handlers.server.request_context') 80 | async def test_handle_call_tool_missing_request_context(self, mock_request_context): 81 | mock_request_context.session = None 82 | with self.assertRaises(LookupError) as context: 83 | await handle_call_tool('codelogic-method-impact', {'method': 'method_name'}) 84 | self.assertEqual(str(context.exception), "Request context is not set") 85 | 86 | 87 | class TestExtractRelationships(unittest.TestCase): 88 | 89 | def setUp(self): 90 | self.impact_data = { 91 | 'data': { 92 | 'nodes': [ 93 | {'id': '1', 'identity': 'identity1', 'name': 'Node1', 'primaryLabel': 'Class'}, 94 | {'id': '2', 'identity': 'identity2', 'name': 'Node2', 'primaryLabel': 'Method'}, 95 | ], 96 | 'relationships': [ 97 | {'startId': '1', 'endId': '2', 'type': 'CALLS'}, 98 | ] 99 | } 100 | } 101 | 102 | def test_extract_relationships(self): 103 | expected_output = ["- identity1 (CALLS) -> identity2"] 104 | result = extract_relationships(self.impact_data) 105 | self.assertEqual(result, expected_output) 106 | 107 | def test_extract_relationships_no_relationships(self): 108 | self.impact_data['data']['relationships'] = [] 109 | expected_output = [] 110 | result = extract_relationships(self.impact_data) 111 | self.assertEqual(result, expected_output) 112 | 113 | def test_extract_relationships_missing_node(self): 114 | self.impact_data['data']['relationships'] = [ 115 | {'startId': '1', 'endId': '3', 'type': 'CALLS'}, 116 | ] 117 | expected_output = [] 118 | result = extract_relationships(self.impact_data) 119 | self.assertEqual(result, expected_output) 120 | 121 | 122 | if __name__ == '__main__': 123 | unittest.main() 124 | -------------------------------------------------------------------------------- /test/unit_test_utils.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from unittest import mock 3 | from unittest.mock import Mock 4 | import json 5 | from datetime import datetime, timedelta 6 | from io import StringIO 7 | from codelogic_mcp_server.utils import strip_unused_properties, find_api_endpoints 8 | from codelogic_mcp_server import utils 9 | from test.test_env import TestCase 10 | 11 | 12 | class TestUtils(TestCase): 13 | 14 | def test_strip_unused_properties(self): 15 | response_mock = Mock() 16 | response_mock.text = json.dumps({ 17 | "data": { 18 | "nodes": [ 19 | { 20 | "properties": { 21 | "otherProperty1": "should_remain", 22 | "agentIds": [ 23 | "eee5b2fa-966a-442f-9dff-06612062eb4c" 24 | ], 25 | "sourceScanContextIds": [ 26 | "845742f8-5bda-4c8a-a3ba-24da824e7b3d" 27 | ], 28 | "isScanRoot": False, 29 | "transitiveSourceNodeId": "927d520a-2118-44f5-9088-0a0141a86b38", 30 | "dataSourceId": "netCape", 31 | "scanContextId": "845742f8-5bda-4c8a-a3ba-24da824e7b3d", 32 | "id": "f1a6b838-cc55-41a1-a43c-32de78722ffa", 33 | "shortName": "SubscriptionCreatedDomainEvent", 34 | "materializedViewId": "a15b3f42-93e9-4c91-8a36-a465e865436e", 35 | "otherProperty2": "should_remain", 36 | "statistics.impactScore": 0 37 | } 38 | } 39 | ] 40 | } 41 | }) 42 | 43 | expected_output = json.dumps({ 44 | "data": { 45 | "nodes": [ 46 | { 47 | "properties": { 48 | "otherProperty1": "should_remain", 49 | "otherProperty2": "should_remain" 50 | } 51 | } 52 | ] 53 | } 54 | }) 55 | 56 | result = strip_unused_properties(response_mock) 57 | self.assertEqual(result, expected_output) 58 | 59 | def test_strip_unused_properties_empty_nodes(self): 60 | response_mock = Mock() 61 | response_mock.text = json.dumps({ 62 | "data": { 63 | "nodes": [] 64 | } 65 | }) 66 | 67 | expected_output = json.dumps({ 68 | "data": { 69 | "nodes": [] 70 | } 71 | }) 72 | 73 | result = strip_unused_properties(response_mock) 74 | self.assertEqual(result, expected_output) 75 | 76 | def test_strip_unused_properties_no_data(self): 77 | response_mock = Mock() 78 | response_mock.text = json.dumps({}) 79 | 80 | expected_output = json.dumps({}) 81 | 82 | result = strip_unused_properties(response_mock) 83 | self.assertEqual(result, expected_output) 84 | 85 | 86 | class TestTokenCaching(TestCase): 87 | """Test caching of authentication tokens.""" 88 | 89 | def setUp(self): 90 | super().setUp() # Set up clean test environment 91 | # Reset cached values 92 | utils._cached_token = None 93 | utils._token_expiry = None 94 | # No need to set environment variables - handled by TestCase 95 | 96 | @mock.patch('codelogic_mcp_server.utils._client.post') 97 | @mock.patch('codelogic_mcp_server.utils.datetime') 98 | def test_authenticate_caches_token(self, mock_datetime, mock_post): 99 | """Test that authenticate() caches the token and returns it.""" 100 | # Set up mock datetime 101 | now = datetime(2023, 1, 1, 12, 0, 0) 102 | mock_datetime.now.return_value = now 103 | 104 | # Set up mock response 105 | mock_response = mock.MagicMock() 106 | mock_response.raise_for_status.return_value = None 107 | mock_response.json.return_value = {'access_token': 'test_token'} 108 | mock_post.return_value = mock_response 109 | 110 | # Call authenticate 111 | token = utils.authenticate() 112 | 113 | # Verify token is cached and returned 114 | self.assertEqual(token, 'test_token') 115 | self.assertEqual(utils._cached_token, 'test_token') 116 | self.assertEqual(utils._token_expiry, now + timedelta(seconds=utils.TOKEN_CACHE_TTL)) 117 | 118 | # Verify request was made correctly 119 | mock_post.assert_called_once() 120 | url_arg = mock_post.call_args[0][0] 121 | self.assertEqual(url_arg, 'https://example.codelogic.test/codelogic/server/authenticate') 122 | 123 | @mock.patch('codelogic_mcp_server.utils._client.post') 124 | @mock.patch('codelogic_mcp_server.utils.datetime') 125 | def test_authenticate_uses_cached_token(self, mock_datetime, mock_post): 126 | """Test that authenticate() returns cached token without making requests.""" 127 | # Set up initial token cache 128 | now = datetime(2023, 1, 1, 12, 0, 0) 129 | utils._cached_token = 'cached_token' 130 | utils._token_expiry = now + timedelta(seconds=3600) # Valid for 1 hour 131 | 132 | # Set current time to be before expiry 133 | mock_datetime.now.return_value = now + timedelta(seconds=1800) # 30 minutes later 134 | 135 | # Call authenticate 136 | token = utils.authenticate() 137 | 138 | # Verify cached token is returned without making requests 139 | self.assertEqual(token, 'cached_token') 140 | mock_post.assert_not_called() 141 | 142 | @mock.patch('codelogic_mcp_server.utils._client.post') 143 | @mock.patch('codelogic_mcp_server.utils.datetime') 144 | def test_authenticate_refreshes_expired_token(self, mock_datetime, mock_post): 145 | """Test that authenticate() refreshes token when the cached one expires.""" 146 | # Set up initial expired token cache 147 | now = datetime(2023, 1, 1, 12, 0, 0) 148 | utils._cached_token = 'expired_token' 149 | utils._token_expiry = now - timedelta(seconds=60) # Expired 1 minute ago 150 | 151 | # Set current time to be after expiry 152 | mock_datetime.now.return_value = now 153 | 154 | # Set up mock response for new token 155 | mock_response = mock.MagicMock() 156 | mock_response.raise_for_status.return_value = None 157 | mock_response.json.return_value = {'access_token': 'new_token'} 158 | mock_post.return_value = mock_response 159 | 160 | # Call authenticate 161 | token = utils.authenticate() 162 | 163 | # Verify new token is fetched and cached 164 | self.assertEqual(token, 'new_token') 165 | self.assertEqual(utils._cached_token, 'new_token') 166 | mock_post.assert_called_once() 167 | 168 | 169 | class TestMethodNodesCaching(TestCase): 170 | """Test caching of method nodes.""" 171 | 172 | def setUp(self): 173 | super().setUp() # Set up clean test environment 174 | # Reset cached values before each test 175 | utils._method_nodes_cache = {} 176 | 177 | # Mock stderr to capture logging 178 | self.stderr_patcher = mock.patch('sys.stderr', new_callable=StringIO) 179 | self.mock_stderr = self.stderr_patcher.start() 180 | # No need to set environment variables - handled by TestCase 181 | 182 | def tearDown(self): 183 | self.stderr_patcher.stop() 184 | super().tearDown() # Call parent tearDown to restore environment 185 | 186 | @mock.patch('codelogic_mcp_server.utils.authenticate') 187 | @mock.patch('codelogic_mcp_server.utils._client.post') 188 | @mock.patch('codelogic_mcp_server.utils.datetime') 189 | def test_get_method_nodes_caches_results(self, mock_datetime, mock_post, mock_authenticate): 190 | """Test that get_method_nodes() caches and returns method nodes.""" 191 | # Set up mock datetime 192 | now = datetime(2023, 1, 1, 12, 0, 0) 193 | mock_datetime.now.return_value = now 194 | 195 | # Set up mock token 196 | mock_authenticate.return_value = 'test_token' 197 | 198 | # Set up mock response 199 | mock_response = mock.MagicMock() 200 | mock_response.raise_for_status.return_value = None 201 | mock_response.json.return_value = {'data': [{'id': '1', 'name': 'test_method'}]} 202 | mock_post.return_value = mock_response 203 | 204 | # Call get_method_nodes 205 | nodes = utils.get_method_nodes('mv-123', 'test.method') 206 | cache_key = 'mv-123:test.method' 207 | 208 | # Verify results are cached and returned 209 | self.assertEqual(nodes, [{'id': '1', 'name': 'test_method'}]) 210 | self.assertIn(cache_key, utils._method_nodes_cache) 211 | cached_nodes, expiry = utils._method_nodes_cache[cache_key] 212 | self.assertEqual(cached_nodes, [{'id': '1', 'name': 'test_method'}]) 213 | self.assertEqual(expiry, now + timedelta(seconds=utils.METHOD_CACHE_TTL)) 214 | 215 | # Verify logging message 216 | self.assertIn(f"Method nodes cached for test.method with TTL {utils.METHOD_CACHE_TTL}s", 217 | self.mock_stderr.getvalue()) 218 | 219 | @mock.patch('codelogic_mcp_server.utils.authenticate') 220 | @mock.patch('codelogic_mcp_server.utils._client.post') 221 | @mock.patch('codelogic_mcp_server.utils.datetime') 222 | def test_get_method_nodes_uses_cache(self, mock_datetime, mock_post, mock_authenticate): 223 | """Test that get_method_nodes() uses cached values.""" 224 | # Set up mock datetime 225 | now = datetime(2023, 1, 1, 12, 0, 0) 226 | future = now + timedelta(seconds=60) # 1 minute later 227 | 228 | # Set up initial cache with data valid for 5 minutes 229 | cache_key = 'mv-123:test.method' 230 | cached_data = [{'id': '1', 'name': 'cached_method'}] 231 | utils._method_nodes_cache[cache_key] = (cached_data, now + timedelta(seconds=300)) 232 | 233 | # Set current time to 1 minute after now (cache still valid) 234 | mock_datetime.now.return_value = future 235 | 236 | # Call get_method_nodes 237 | nodes = utils.get_method_nodes('mv-123', 'test.method') 238 | 239 | # Verify cached data is returned without making requests 240 | self.assertEqual(nodes, cached_data) 241 | mock_authenticate.assert_not_called() 242 | mock_post.assert_not_called() 243 | 244 | # Verify cache hit message 245 | self.assertIn("Method nodes cache hit for test.method", self.mock_stderr.getvalue()) 246 | 247 | @mock.patch('codelogic_mcp_server.utils.authenticate') 248 | @mock.patch('codelogic_mcp_server.utils._client.post') 249 | @mock.patch('codelogic_mcp_server.utils.datetime') 250 | def test_get_method_nodes_refreshes_expired_cache(self, mock_datetime, mock_post, mock_authenticate): 251 | """Test that get_method_nodes() refreshes expired cache.""" 252 | # Set up mock datetime 253 | now = datetime(2023, 1, 1, 12, 0, 0) 254 | 255 | # Set up expired cache 256 | cache_key = 'mv-123:test.method' 257 | cached_data = [{'id': '1', 'name': 'expired_method'}] 258 | utils._method_nodes_cache[cache_key] = (cached_data, now - timedelta(seconds=60)) 259 | 260 | # Set current time 261 | mock_datetime.now.return_value = now 262 | 263 | # Set up mock token 264 | mock_authenticate.return_value = 'test_token' 265 | 266 | # Set up mock response 267 | mock_response = mock.MagicMock() 268 | mock_response.raise_for_status.return_value = None 269 | mock_response.json.return_value = {'data': [{'id': '2', 'name': 'new_method'}]} 270 | mock_post.return_value = mock_response 271 | 272 | # Call get_method_nodes 273 | nodes = utils.get_method_nodes('mv-123', 'test.method') 274 | 275 | # Verify new data is fetched, cached and returned 276 | self.assertEqual(nodes, [{'id': '2', 'name': 'new_method'}]) 277 | self.assertIn(cache_key, utils._method_nodes_cache) 278 | new_cached_nodes, _ = utils._method_nodes_cache[cache_key] 279 | self.assertEqual(new_cached_nodes, [{'id': '2', 'name': 'new_method'}]) 280 | 281 | # Verify cache expired message 282 | self.assertIn("Method nodes cache expired for test.method", self.mock_stderr.getvalue()) 283 | 284 | 285 | class TestImpactCaching(TestCase): 286 | """Test caching of impact data.""" 287 | 288 | def setUp(self): 289 | super().setUp() # Set up clean test environment 290 | # Reset cached values before each test 291 | utils._impact_cache = {} 292 | 293 | # Mock stderr to capture logging 294 | self.stderr_patcher = mock.patch('sys.stderr', new_callable=StringIO) 295 | self.mock_stderr = self.stderr_patcher.start() 296 | # No need to set environment variables - handled by TestCase 297 | 298 | def tearDown(self): 299 | self.stderr_patcher.stop() 300 | super().tearDown() # Call parent tearDown to restore environment 301 | 302 | @mock.patch('codelogic_mcp_server.utils.authenticate') 303 | @mock.patch('codelogic_mcp_server.utils._client.get') 304 | @mock.patch('codelogic_mcp_server.utils.datetime') 305 | def test_get_impact_caches_results(self, mock_datetime, mock_get, mock_authenticate): 306 | """Test that get_impact() caches and returns stripped impact data.""" 307 | # Set up mock datetime 308 | now = datetime(2023, 1, 1, 12, 0, 0) 309 | mock_datetime.now.return_value = now 310 | 311 | # Set up mock token 312 | mock_authenticate.return_value = 'test_token' 313 | 314 | # Set up mock response 315 | mock_response = mock.MagicMock() 316 | mock_response.raise_for_status.return_value = None 317 | mock_response.text = json.dumps({ 318 | 'data': { 319 | 'nodes': [ 320 | { 321 | 'id': '1', 322 | 'name': 'test_node', 323 | 'primaryLabel': 'Method', 324 | 'properties': { 325 | 'agentIds': ['to-remove'], 326 | 'sourceScanContextIds': ['to-remove'], 327 | 'isScanRoot': True, 328 | 'keep': 'value' 329 | } 330 | } 331 | ] 332 | } 333 | }) 334 | mock_get.return_value = mock_response 335 | 336 | # Call get_impact 337 | impact = utils.get_impact('node-123') 338 | 339 | # Verify results are cached and returned 340 | self.assertIn('node-123', utils._impact_cache) 341 | cached_impact, expiry = utils._impact_cache['node-123'] 342 | 343 | # Verify the impact data is properly stripped 344 | impact_data = json.loads(impact) 345 | self.assertNotIn('agentIds', impact_data['data']['nodes'][0]['properties']) 346 | self.assertNotIn('sourceScanContextIds', impact_data['data']['nodes'][0]['properties']) 347 | self.assertNotIn('isScanRoot', impact_data['data']['nodes'][0]['properties']) 348 | self.assertIn('keep', impact_data['data']['nodes'][0]['properties']) 349 | 350 | # Verify expiry time 351 | self.assertEqual(expiry, now + timedelta(seconds=utils.IMPACT_CACHE_TTL)) 352 | 353 | # Verify logging message 354 | self.assertIn(f"Impact cached for node-123 with TTL {utils.IMPACT_CACHE_TTL}s", 355 | self.mock_stderr.getvalue()) 356 | 357 | @mock.patch('codelogic_mcp_server.utils.authenticate') 358 | @mock.patch('codelogic_mcp_server.utils._client.get') 359 | @mock.patch('codelogic_mcp_server.utils.datetime') 360 | def test_get_impact_uses_cache(self, mock_datetime, mock_get, mock_authenticate): 361 | """Test that get_impact() uses cached values.""" 362 | # Set up mock datetime 363 | now = datetime(2023, 1, 1, 12, 0, 0) 364 | future = now + timedelta(seconds=60) # 1 minute later 365 | 366 | # Set up initial cache with data valid for 5 minutes 367 | cached_data = '{"data": {"nodes": [{"name": "cached_impact"}]}}' 368 | utils._impact_cache['node-123'] = (cached_data, now + timedelta(seconds=300)) 369 | 370 | # Set current time to 1 minute after now (cache still valid) 371 | mock_datetime.now.return_value = future 372 | 373 | # Call get_impact 374 | impact = utils.get_impact('node-123') 375 | 376 | # Verify cached data is returned without making requests 377 | self.assertEqual(impact, cached_data) 378 | mock_authenticate.assert_not_called() 379 | mock_get.assert_not_called() 380 | 381 | # Verify cache hit message 382 | self.assertIn("Impact cache hit for node-123", self.mock_stderr.getvalue()) 383 | 384 | @mock.patch('codelogic_mcp_server.utils.authenticate') 385 | @mock.patch('codelogic_mcp_server.utils._client.get') 386 | @mock.patch('codelogic_mcp_server.utils.datetime') 387 | def test_get_impact_refreshes_expired_cache(self, mock_datetime, mock_get, mock_authenticate): 388 | """Test that get_impact() refreshes expired cache.""" 389 | # Set up mock datetime 390 | now = datetime(2023, 1, 1, 12, 0, 0) 391 | 392 | # Set up expired cache 393 | cached_data = '{"data": {"nodes": [{"name": "expired_impact"}]}}' 394 | utils._impact_cache['node-123'] = (cached_data, now - timedelta(seconds=60)) 395 | 396 | # Set current time 397 | mock_datetime.now.return_value = now 398 | 399 | # Set up mock token 400 | mock_authenticate.return_value = 'test_token' 401 | 402 | # Set up mock response 403 | mock_response = mock.MagicMock() 404 | mock_response.raise_for_status.return_value = None 405 | mock_response.text = json.dumps({ 406 | 'data': { 407 | 'nodes': [ 408 | {'id': '2', 'name': 'new_impact'} 409 | ] 410 | } 411 | }) 412 | mock_get.return_value = mock_response 413 | 414 | # Call get_impact 415 | impact = utils.get_impact('node-123') 416 | 417 | # Verify new data is fetched, cached and returned 418 | impact_data = json.loads(impact) 419 | self.assertEqual(impact_data['data']['nodes'][0]['name'], 'new_impact') 420 | 421 | # Verify cache expired message 422 | self.assertIn("Impact cache expired for node-123", self.mock_stderr.getvalue()) 423 | 424 | 425 | class TestFindApiEndpoints(unittest.TestCase): 426 | """Test the find_api_endpoints utility function""" 427 | 428 | def test_find_api_endpoints_with_annotations(self): 429 | """Test finding API endpoints with annotations""" 430 | # Mock nodes with REST annotations 431 | nodes = [ 432 | { 433 | 'id': '1', 434 | 'name': 'getUser', 435 | 'primaryLabel': 'JavaMethodEntity', 436 | 'properties': { 437 | 'annotations': ['@GetMapping("/api/users/{id}")'] 438 | } 439 | }, 440 | { 441 | 'id': '2', 442 | 'name': 'UserController', 443 | 'primaryLabel': 'JavaClassEntity', 444 | 'properties': {} 445 | } 446 | ] 447 | 448 | # Mock relationships 449 | relationships = [ 450 | { 451 | 'startId': '1', 452 | 'endId': '2', 453 | 'type': 'CONTAINS_METHOD' 454 | } 455 | ] 456 | 457 | # Call the function 458 | endpoint_nodes, rest_endpoints, api_controllers, endpoint_dependencies = find_api_endpoints(nodes, relationships) 459 | 460 | # Assert results 461 | self.assertEqual(len(rest_endpoints), 1) 462 | self.assertEqual(rest_endpoints[0]['name'], 'getUser') 463 | self.assertIn('@GetMapping', rest_endpoints[0]['annotation']) 464 | 465 | def test_find_api_endpoints_with_controllers(self): 466 | """Test finding API controllers""" 467 | # Mock nodes with controller classes 468 | nodes = [ 469 | { 470 | 'id': '1', 471 | 'name': 'UserController', 472 | 'primaryLabel': 'JavaClassEntity', 473 | 'properties': {} 474 | } 475 | ] 476 | 477 | # Call the function 478 | endpoint_nodes, rest_endpoints, api_controllers, endpoint_dependencies = find_api_endpoints(nodes, relationships=[]) 479 | 480 | # Assert no results because it's not a controller type 481 | self.assertEqual(len(api_controllers), 0) 482 | 483 | # Now test with a proper controller 484 | nodes = [ 485 | { 486 | 'id': '1', 487 | 'name': 'UserController', 488 | 'primaryLabel': 'RestController', 489 | 'properties': {} 490 | } 491 | ] 492 | 493 | # Call the function 494 | endpoint_nodes, rest_endpoints, api_controllers, endpoint_dependencies = find_api_endpoints(nodes, relationships=[]) 495 | 496 | # Assert results 497 | self.assertEqual(len(api_controllers), 1) 498 | self.assertEqual(api_controllers[0]['name'], 'UserController') 499 | 500 | def test_find_explicit_endpoints(self): 501 | """Test finding explicit Endpoint nodes""" 502 | # Mock nodes with explicit Endpoint type 503 | nodes = [ 504 | { 505 | 'id': '1', 506 | 'name': 'GET /api/users/{id}', 507 | 'primaryLabel': 'Endpoint', 508 | 'properties': { 509 | 'path': '/api/users/{id}', 510 | 'httpVerb': 'GET' 511 | } 512 | } 513 | ] 514 | 515 | # Call the function 516 | endpoint_nodes, rest_endpoints, api_controllers, endpoint_dependencies = find_api_endpoints(nodes, relationships=[]) 517 | 518 | # Assert results 519 | self.assertEqual(len(endpoint_nodes), 1) 520 | self.assertEqual(endpoint_nodes[0]['http_verb'], 'GET') 521 | self.assertEqual(endpoint_nodes[0]['path'], '/api/users/{id}') 522 | 523 | def test_find_endpoint_dependencies(self): 524 | """Test finding dependencies between endpoints""" 525 | # Mock nodes 526 | nodes = [ 527 | { 528 | 'id': '1', 529 | 'name': 'UsersEndpoint', 530 | 'primaryLabel': 'Endpoint', 531 | 'properties': {} 532 | }, 533 | { 534 | 'id': '2', 535 | 'name': 'OrdersEndpoint', 536 | 'primaryLabel': 'Endpoint', 537 | 'properties': {} 538 | } 539 | ] 540 | 541 | # Mock relationships with INVOKES_ENDPOINT 542 | relationships = [ 543 | { 544 | 'startId': '1', 545 | 'endId': '2', 546 | 'type': 'INVOKES_ENDPOINT' 547 | } 548 | ] 549 | 550 | # Call the function 551 | endpoint_nodes, rest_endpoints, api_controllers, endpoint_dependencies = find_api_endpoints(nodes, relationships) 552 | 553 | # Assert results 554 | self.assertEqual(len(endpoint_dependencies), 1) 555 | self.assertEqual(endpoint_dependencies[0]['source'], 'UsersEndpoint') 556 | self.assertEqual(endpoint_dependencies[0]['target'], 'OrdersEndpoint') 557 | 558 | 559 | if __name__ == '__main__': 560 | unittest.main() 561 | -------------------------------------------------------------------------------- /uv.lock: -------------------------------------------------------------------------------- 1 | version = 1 2 | revision = 2 3 | requires-python = ">=3.13" 4 | 5 | [[package]] 6 | name = "annotated-types" 7 | version = "0.7.0" 8 | source = { registry = "https://pypi.org/simple" } 9 | sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload_time = "2024-05-20T21:33:25.928Z" } 10 | wheels = [ 11 | { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload_time = "2024-05-20T21:33:24.1Z" }, 12 | ] 13 | 14 | [[package]] 15 | name = "anyio" 16 | version = "4.8.0" 17 | source = { registry = "https://pypi.org/simple" } 18 | dependencies = [ 19 | { name = "idna" }, 20 | { name = "sniffio" }, 21 | ] 22 | sdist = { url = "https://files.pythonhosted.org/packages/a3/73/199a98fc2dae33535d6b8e8e6ec01f8c1d76c9adb096c6b7d64823038cde/anyio-4.8.0.tar.gz", hash = "sha256:1d9fe889df5212298c0c0723fa20479d1b94883a2df44bd3897aa91083316f7a", size = 181126, upload_time = "2025-01-05T13:13:11.095Z" } 23 | wheels = [ 24 | { url = "https://files.pythonhosted.org/packages/46/eb/e7f063ad1fec6b3178a3cd82d1a3c4de82cccf283fc42746168188e1cdd5/anyio-4.8.0-py3-none-any.whl", hash = "sha256:b5011f270ab5eb0abf13385f851315585cc37ef330dd88e27ec3d34d651fd47a", size = 96041, upload_time = "2025-01-05T13:13:07.985Z" }, 25 | ] 26 | 27 | [[package]] 28 | name = "certifi" 29 | version = "2025.1.31" 30 | source = { registry = "https://pypi.org/simple" } 31 | sdist = { url = "https://files.pythonhosted.org/packages/1c/ab/c9f1e32b7b1bf505bf26f0ef697775960db7932abeb7b516de930ba2705f/certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651", size = 167577, upload_time = "2025-01-31T02:16:47.166Z" } 32 | wheels = [ 33 | { url = "https://files.pythonhosted.org/packages/38/fc/bce832fd4fd99766c04d1ee0eead6b0ec6486fb100ae5e74c1d91292b982/certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe", size = 166393, upload_time = "2025-01-31T02:16:45.015Z" }, 34 | ] 35 | 36 | [[package]] 37 | name = "click" 38 | version = "8.1.8" 39 | source = { registry = "https://pypi.org/simple" } 40 | dependencies = [ 41 | { name = "colorama", marker = "sys_platform == 'win32'" }, 42 | ] 43 | sdist = { url = "https://files.pythonhosted.org/packages/b9/2e/0090cbf739cee7d23781ad4b89a9894a41538e4fcf4c31dcdd705b78eb8b/click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a", size = 226593, upload_time = "2024-12-21T18:38:44.339Z" } 44 | wheels = [ 45 | { url = "https://files.pythonhosted.org/packages/7e/d4/7ebdbd03970677812aac39c869717059dbb71a4cfc033ca6e5221787892c/click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2", size = 98188, upload_time = "2024-12-21T18:38:41.666Z" }, 46 | ] 47 | 48 | [[package]] 49 | name = "codelogic-mcp-server" 50 | version = "1.0.1" 51 | source = { editable = "." } 52 | dependencies = [ 53 | { name = "debugpy" }, 54 | { name = "httpx" }, 55 | { name = "mcp", extra = ["cli"] }, 56 | { name = "pip-licenses" }, 57 | { name = "python-dotenv" }, 58 | { name = "tenacity" }, 59 | { name = "toml" }, 60 | ] 61 | 62 | [package.metadata] 63 | requires-dist = [ 64 | { name = "debugpy", specifier = ">=1.8.12" }, 65 | { name = "httpx", specifier = ">=0.28.1" }, 66 | { name = "mcp", extras = ["cli"], specifier = ">=1.3.0" }, 67 | { name = "pip-licenses", specifier = ">=5.0.0" }, 68 | { name = "python-dotenv", specifier = ">=1.0.1" }, 69 | { name = "tenacity", specifier = ">=9.0.0" }, 70 | { name = "toml", specifier = ">=0.10.2" }, 71 | ] 72 | 73 | [[package]] 74 | name = "colorama" 75 | version = "0.4.6" 76 | source = { registry = "https://pypi.org/simple" } 77 | sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload_time = "2022-10-25T02:36:22.414Z" } 78 | wheels = [ 79 | { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload_time = "2022-10-25T02:36:20.889Z" }, 80 | ] 81 | 82 | [[package]] 83 | name = "debugpy" 84 | version = "1.8.12" 85 | source = { registry = "https://pypi.org/simple" } 86 | sdist = { url = "https://files.pythonhosted.org/packages/68/25/c74e337134edf55c4dfc9af579eccb45af2393c40960e2795a94351e8140/debugpy-1.8.12.tar.gz", hash = "sha256:646530b04f45c830ceae8e491ca1c9320a2d2f0efea3141487c82130aba70dce", size = 1641122, upload_time = "2025-01-16T17:26:42.727Z" } 87 | wheels = [ 88 | { url = "https://files.pythonhosted.org/packages/cf/4d/7c3896619a8791effd5d8c31f0834471fc8f8fb3047ec4f5fc69dd1393dd/debugpy-1.8.12-cp313-cp313-macosx_14_0_universal2.whl", hash = "sha256:696d8ae4dff4cbd06bf6b10d671e088b66669f110c7c4e18a44c43cf75ce966f", size = 2485246, upload_time = "2025-01-16T17:27:18.389Z" }, 89 | { url = "https://files.pythonhosted.org/packages/99/46/bc6dcfd7eb8cc969a5716d858e32485eb40c72c6a8dc88d1e3a4d5e95813/debugpy-1.8.12-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:898fba72b81a654e74412a67c7e0a81e89723cfe2a3ea6fcd3feaa3395138ca9", size = 4218616, upload_time = "2025-01-16T17:27:20.374Z" }, 90 | { url = "https://files.pythonhosted.org/packages/03/dd/d7fcdf0381a9b8094da1f6a1c9f19fed493a4f8576a2682349b3a8b20ec7/debugpy-1.8.12-cp313-cp313-win32.whl", hash = "sha256:22a11c493c70413a01ed03f01c3c3a2fc4478fc6ee186e340487b2edcd6f4180", size = 5226540, upload_time = "2025-01-16T17:27:22.504Z" }, 91 | { url = "https://files.pythonhosted.org/packages/25/bd/ecb98f5b5fc7ea0bfbb3c355bc1dd57c198a28780beadd1e19915bf7b4d9/debugpy-1.8.12-cp313-cp313-win_amd64.whl", hash = "sha256:fdb3c6d342825ea10b90e43d7f20f01535a72b3a1997850c0c3cefa5c27a4a2c", size = 5267134, upload_time = "2025-01-16T17:27:25.616Z" }, 92 | { url = "https://files.pythonhosted.org/packages/38/c4/5120ad36405c3008f451f94b8f92ef1805b1e516f6ff870f331ccb3c4cc0/debugpy-1.8.12-py2.py3-none-any.whl", hash = "sha256:274b6a2040349b5c9864e475284bce5bb062e63dce368a394b8cc865ae3b00c6", size = 5229490, upload_time = "2025-01-16T17:27:49.412Z" }, 93 | ] 94 | 95 | [[package]] 96 | name = "h11" 97 | version = "0.14.0" 98 | source = { registry = "https://pypi.org/simple" } 99 | sdist = { url = "https://files.pythonhosted.org/packages/f5/38/3af3d3633a34a3316095b39c8e8fb4853a28a536e55d347bd8d8e9a14b03/h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d", size = 100418, upload_time = "2022-09-25T15:40:01.519Z" } 100 | wheels = [ 101 | { url = "https://files.pythonhosted.org/packages/95/04/ff642e65ad6b90db43e668d70ffb6736436c7ce41fcc549f4e9472234127/h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761", size = 58259, upload_time = "2022-09-25T15:39:59.68Z" }, 102 | ] 103 | 104 | [[package]] 105 | name = "httpcore" 106 | version = "1.0.7" 107 | source = { registry = "https://pypi.org/simple" } 108 | dependencies = [ 109 | { name = "certifi" }, 110 | { name = "h11" }, 111 | ] 112 | sdist = { url = "https://files.pythonhosted.org/packages/6a/41/d7d0a89eb493922c37d343b607bc1b5da7f5be7e383740b4753ad8943e90/httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c", size = 85196, upload_time = "2024-11-15T12:30:47.531Z" } 113 | wheels = [ 114 | { url = "https://files.pythonhosted.org/packages/87/f5/72347bc88306acb359581ac4d52f23c0ef445b57157adedb9aee0cd689d2/httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd", size = 78551, upload_time = "2024-11-15T12:30:45.782Z" }, 115 | ] 116 | 117 | [[package]] 118 | name = "httpx" 119 | version = "0.28.1" 120 | source = { registry = "https://pypi.org/simple" } 121 | dependencies = [ 122 | { name = "anyio" }, 123 | { name = "certifi" }, 124 | { name = "httpcore" }, 125 | { name = "idna" }, 126 | ] 127 | sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload_time = "2024-12-06T15:37:23.222Z" } 128 | wheels = [ 129 | { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload_time = "2024-12-06T15:37:21.509Z" }, 130 | ] 131 | 132 | [[package]] 133 | name = "httpx-sse" 134 | version = "0.4.0" 135 | source = { registry = "https://pypi.org/simple" } 136 | sdist = { url = "https://files.pythonhosted.org/packages/4c/60/8f4281fa9bbf3c8034fd54c0e7412e66edbab6bc74c4996bd616f8d0406e/httpx-sse-0.4.0.tar.gz", hash = "sha256:1e81a3a3070ce322add1d3529ed42eb5f70817f45ed6ec915ab753f961139721", size = 12624, upload_time = "2023-12-22T08:01:21.083Z" } 137 | wheels = [ 138 | { url = "https://files.pythonhosted.org/packages/e1/9b/a181f281f65d776426002f330c31849b86b31fc9d848db62e16f03ff739f/httpx_sse-0.4.0-py3-none-any.whl", hash = "sha256:f329af6eae57eaa2bdfd962b42524764af68075ea87370a2de920af5341e318f", size = 7819, upload_time = "2023-12-22T08:01:19.89Z" }, 139 | ] 140 | 141 | [[package]] 142 | name = "idna" 143 | version = "3.10" 144 | source = { registry = "https://pypi.org/simple" } 145 | sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490, upload_time = "2024-09-15T18:07:39.745Z" } 146 | wheels = [ 147 | { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload_time = "2024-09-15T18:07:37.964Z" }, 148 | ] 149 | 150 | [[package]] 151 | name = "markdown-it-py" 152 | version = "3.0.0" 153 | source = { registry = "https://pypi.org/simple" } 154 | dependencies = [ 155 | { name = "mdurl" }, 156 | ] 157 | sdist = { url = "https://files.pythonhosted.org/packages/38/71/3b932df36c1a044d397a1f92d1cf91ee0a503d91e470cbd670aa66b07ed0/markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb", size = 74596, upload_time = "2023-06-03T06:41:14.443Z" } 158 | wheels = [ 159 | { url = "https://files.pythonhosted.org/packages/42/d7/1ec15b46af6af88f19b8e5ffea08fa375d433c998b8a7639e76935c14f1f/markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", size = 87528, upload_time = "2023-06-03T06:41:11.019Z" }, 160 | ] 161 | 162 | [[package]] 163 | name = "mcp" 164 | version = "1.3.0" 165 | source = { registry = "https://pypi.org/simple" } 166 | dependencies = [ 167 | { name = "anyio" }, 168 | { name = "httpx" }, 169 | { name = "httpx-sse" }, 170 | { name = "pydantic" }, 171 | { name = "pydantic-settings" }, 172 | { name = "sse-starlette" }, 173 | { name = "starlette" }, 174 | { name = "uvicorn" }, 175 | ] 176 | sdist = { url = "https://files.pythonhosted.org/packages/6b/b6/81e5f2490290351fc97bf46c24ff935128cb7d34d68e3987b522f26f7ada/mcp-1.3.0.tar.gz", hash = "sha256:f409ae4482ce9d53e7ac03f3f7808bcab735bdfc0fba937453782efb43882d45", size = 150235, upload_time = "2025-02-20T21:45:42.597Z" } 177 | wheels = [ 178 | { url = "https://files.pythonhosted.org/packages/d0/d2/a9e87b506b2094f5aa9becc1af5178842701b27217fa43877353da2577e3/mcp-1.3.0-py3-none-any.whl", hash = "sha256:2829d67ce339a249f803f22eba5e90385eafcac45c94b00cab6cef7e8f217211", size = 70672, upload_time = "2025-02-20T21:45:40.102Z" }, 179 | ] 180 | 181 | [package.optional-dependencies] 182 | cli = [ 183 | { name = "python-dotenv" }, 184 | { name = "typer" }, 185 | ] 186 | 187 | [[package]] 188 | name = "mdurl" 189 | version = "0.1.2" 190 | source = { registry = "https://pypi.org/simple" } 191 | sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729, upload_time = "2022-08-14T12:40:10.846Z" } 192 | wheels = [ 193 | { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload_time = "2022-08-14T12:40:09.779Z" }, 194 | ] 195 | 196 | [[package]] 197 | name = "pip-licenses" 198 | version = "5.0.0" 199 | source = { registry = "https://pypi.org/simple" } 200 | dependencies = [ 201 | { name = "prettytable" }, 202 | { name = "tomli" }, 203 | ] 204 | sdist = { url = "https://files.pythonhosted.org/packages/a0/49/d36a3ddb73d22970a35afa3e9fd53c8318150f8122e4257ca9875f1d4e38/pip_licenses-5.0.0.tar.gz", hash = "sha256:0633a1f9aab58e5a6216931b0e1d5cdded8bcc2709ff563674eb0e2ff9e77e8e", size = 41542, upload_time = "2024-07-23T10:48:29.785Z" } 205 | wheels = [ 206 | { url = "https://files.pythonhosted.org/packages/27/0a/bfaf1479d09d19f503a669d9c8e433ac59ae687fb8da1d8207eb85c5a9f4/pip_licenses-5.0.0-py3-none-any.whl", hash = "sha256:82c83666753efb86d1af1c405c8ab273413eb10d6689c218df2f09acf40e477d", size = 20497, upload_time = "2024-07-23T10:48:27.59Z" }, 207 | ] 208 | 209 | [[package]] 210 | name = "prettytable" 211 | version = "3.16.0" 212 | source = { registry = "https://pypi.org/simple" } 213 | dependencies = [ 214 | { name = "wcwidth" }, 215 | ] 216 | sdist = { url = "https://files.pythonhosted.org/packages/99/b1/85e18ac92afd08c533603e3393977b6bc1443043115a47bb094f3b98f94f/prettytable-3.16.0.tar.gz", hash = "sha256:3c64b31719d961bf69c9a7e03d0c1e477320906a98da63952bc6698d6164ff57", size = 66276, upload_time = "2025-03-24T19:39:04.008Z" } 217 | wheels = [ 218 | { url = "https://files.pythonhosted.org/packages/02/c7/5613524e606ea1688b3bdbf48aa64bafb6d0a4ac3750274c43b6158a390f/prettytable-3.16.0-py3-none-any.whl", hash = "sha256:b5eccfabb82222f5aa46b798ff02a8452cf530a352c31bddfa29be41242863aa", size = 33863, upload_time = "2025-03-24T19:39:02.359Z" }, 219 | ] 220 | 221 | [[package]] 222 | name = "pydantic" 223 | version = "2.10.6" 224 | source = { registry = "https://pypi.org/simple" } 225 | dependencies = [ 226 | { name = "annotated-types" }, 227 | { name = "pydantic-core" }, 228 | { name = "typing-extensions" }, 229 | ] 230 | sdist = { url = "https://files.pythonhosted.org/packages/b7/ae/d5220c5c52b158b1de7ca89fc5edb72f304a70a4c540c84c8844bf4008de/pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236", size = 761681, upload_time = "2025-01-24T01:42:12.693Z" } 231 | wheels = [ 232 | { url = "https://files.pythonhosted.org/packages/f4/3c/8cc1cc84deffa6e25d2d0c688ebb80635dfdbf1dbea3e30c541c8cf4d860/pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584", size = 431696, upload_time = "2025-01-24T01:42:10.371Z" }, 233 | ] 234 | 235 | [[package]] 236 | name = "pydantic-core" 237 | version = "2.27.2" 238 | source = { registry = "https://pypi.org/simple" } 239 | dependencies = [ 240 | { name = "typing-extensions" }, 241 | ] 242 | sdist = { url = "https://files.pythonhosted.org/packages/fc/01/f3e5ac5e7c25833db5eb555f7b7ab24cd6f8c322d3a3ad2d67a952dc0abc/pydantic_core-2.27.2.tar.gz", hash = "sha256:eb026e5a4c1fee05726072337ff51d1efb6f59090b7da90d30ea58625b1ffb39", size = 413443, upload_time = "2024-12-18T11:31:54.917Z" } 243 | wheels = [ 244 | { url = "https://files.pythonhosted.org/packages/41/b1/9bc383f48f8002f99104e3acff6cba1231b29ef76cfa45d1506a5cad1f84/pydantic_core-2.27.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7d14bd329640e63852364c306f4d23eb744e0f8193148d4044dd3dacdaacbd8b", size = 1892709, upload_time = "2024-12-18T11:29:03.193Z" }, 245 | { url = "https://files.pythonhosted.org/packages/10/6c/e62b8657b834f3eb2961b49ec8e301eb99946245e70bf42c8817350cbefc/pydantic_core-2.27.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82f91663004eb8ed30ff478d77c4d1179b3563df6cdb15c0817cd1cdaf34d154", size = 1811273, upload_time = "2024-12-18T11:29:05.306Z" }, 246 | { url = "https://files.pythonhosted.org/packages/ba/15/52cfe49c8c986e081b863b102d6b859d9defc63446b642ccbbb3742bf371/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71b24c7d61131bb83df10cc7e687433609963a944ccf45190cfc21e0887b08c9", size = 1823027, upload_time = "2024-12-18T11:29:07.294Z" }, 247 | { url = "https://files.pythonhosted.org/packages/b1/1c/b6f402cfc18ec0024120602bdbcebc7bdd5b856528c013bd4d13865ca473/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa8e459d4954f608fa26116118bb67f56b93b209c39b008277ace29937453dc9", size = 1868888, upload_time = "2024-12-18T11:29:09.249Z" }, 248 | { url = "https://files.pythonhosted.org/packages/bd/7b/8cb75b66ac37bc2975a3b7de99f3c6f355fcc4d89820b61dffa8f1e81677/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce8918cbebc8da707ba805b7fd0b382816858728ae7fe19a942080c24e5b7cd1", size = 2037738, upload_time = "2024-12-18T11:29:11.23Z" }, 249 | { url = "https://files.pythonhosted.org/packages/c8/f1/786d8fe78970a06f61df22cba58e365ce304bf9b9f46cc71c8c424e0c334/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eda3f5c2a021bbc5d976107bb302e0131351c2ba54343f8a496dc8783d3d3a6a", size = 2685138, upload_time = "2024-12-18T11:29:16.396Z" }, 250 | { url = "https://files.pythonhosted.org/packages/a6/74/d12b2cd841d8724dc8ffb13fc5cef86566a53ed358103150209ecd5d1999/pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8086fa684c4775c27f03f062cbb9eaa6e17f064307e86b21b9e0abc9c0f02e", size = 1997025, upload_time = "2024-12-18T11:29:20.25Z" }, 251 | { url = "https://files.pythonhosted.org/packages/a0/6e/940bcd631bc4d9a06c9539b51f070b66e8f370ed0933f392db6ff350d873/pydantic_core-2.27.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8d9b3388db186ba0c099a6d20f0604a44eabdeef1777ddd94786cdae158729e4", size = 2004633, upload_time = "2024-12-18T11:29:23.877Z" }, 252 | { url = "https://files.pythonhosted.org/packages/50/cc/a46b34f1708d82498c227d5d80ce615b2dd502ddcfd8376fc14a36655af1/pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7a66efda2387de898c8f38c0cf7f14fca0b51a8ef0b24bfea5849f1b3c95af27", size = 1999404, upload_time = "2024-12-18T11:29:25.872Z" }, 253 | { url = "https://files.pythonhosted.org/packages/ca/2d/c365cfa930ed23bc58c41463bae347d1005537dc8db79e998af8ba28d35e/pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:18a101c168e4e092ab40dbc2503bdc0f62010e95d292b27827871dc85450d7ee", size = 2130130, upload_time = "2024-12-18T11:29:29.252Z" }, 254 | { url = "https://files.pythonhosted.org/packages/f4/d7/eb64d015c350b7cdb371145b54d96c919d4db516817f31cd1c650cae3b21/pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ba5dd002f88b78a4215ed2f8ddbdf85e8513382820ba15ad5ad8955ce0ca19a1", size = 2157946, upload_time = "2024-12-18T11:29:31.338Z" }, 255 | { url = "https://files.pythonhosted.org/packages/a4/99/bddde3ddde76c03b65dfd5a66ab436c4e58ffc42927d4ff1198ffbf96f5f/pydantic_core-2.27.2-cp313-cp313-win32.whl", hash = "sha256:1ebaf1d0481914d004a573394f4be3a7616334be70261007e47c2a6fe7e50130", size = 1834387, upload_time = "2024-12-18T11:29:33.481Z" }, 256 | { url = "https://files.pythonhosted.org/packages/71/47/82b5e846e01b26ac6f1893d3c5f9f3a2eb6ba79be26eef0b759b4fe72946/pydantic_core-2.27.2-cp313-cp313-win_amd64.whl", hash = "sha256:953101387ecf2f5652883208769a79e48db18c6df442568a0b5ccd8c2723abee", size = 1990453, upload_time = "2024-12-18T11:29:35.533Z" }, 257 | { url = "https://files.pythonhosted.org/packages/51/b2/b2b50d5ecf21acf870190ae5d093602d95f66c9c31f9d5de6062eb329ad1/pydantic_core-2.27.2-cp313-cp313-win_arm64.whl", hash = "sha256:ac4dbfd1691affb8f48c2c13241a2e3b60ff23247cbcf981759c768b6633cf8b", size = 1885186, upload_time = "2024-12-18T11:29:37.649Z" }, 258 | ] 259 | 260 | [[package]] 261 | name = "pydantic-settings" 262 | version = "2.7.1" 263 | source = { registry = "https://pypi.org/simple" } 264 | dependencies = [ 265 | { name = "pydantic" }, 266 | { name = "python-dotenv" }, 267 | ] 268 | sdist = { url = "https://files.pythonhosted.org/packages/73/7b/c58a586cd7d9ac66d2ee4ba60ca2d241fa837c02bca9bea80a9a8c3d22a9/pydantic_settings-2.7.1.tar.gz", hash = "sha256:10c9caad35e64bfb3c2fbf70a078c0e25cc92499782e5200747f942a065dec93", size = 79920, upload_time = "2024-12-31T11:27:44.632Z" } 269 | wheels = [ 270 | { url = "https://files.pythonhosted.org/packages/b4/46/93416fdae86d40879714f72956ac14df9c7b76f7d41a4d68aa9f71a0028b/pydantic_settings-2.7.1-py3-none-any.whl", hash = "sha256:590be9e6e24d06db33a4262829edef682500ef008565a969c73d39d5f8bfb3fd", size = 29718, upload_time = "2024-12-31T11:27:43.201Z" }, 271 | ] 272 | 273 | [[package]] 274 | name = "pygments" 275 | version = "2.19.1" 276 | source = { registry = "https://pypi.org/simple" } 277 | sdist = { url = "https://files.pythonhosted.org/packages/7c/2d/c3338d48ea6cc0feb8446d8e6937e1408088a72a39937982cc6111d17f84/pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f", size = 4968581, upload_time = "2025-01-06T17:26:30.443Z" } 278 | wheels = [ 279 | { url = "https://files.pythonhosted.org/packages/8a/0b/9fcc47d19c48b59121088dd6da2488a49d5f72dacf8262e2790a1d2c7d15/pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c", size = 1225293, upload_time = "2025-01-06T17:26:25.553Z" }, 280 | ] 281 | 282 | [[package]] 283 | name = "python-dotenv" 284 | version = "1.0.1" 285 | source = { registry = "https://pypi.org/simple" } 286 | sdist = { url = "https://files.pythonhosted.org/packages/bc/57/e84d88dfe0aec03b7a2d4327012c1627ab5f03652216c63d49846d7a6c58/python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca", size = 39115, upload_time = "2024-01-23T06:33:00.505Z" } 287 | wheels = [ 288 | { url = "https://files.pythonhosted.org/packages/6a/3e/b68c118422ec867fa7ab88444e1274aa40681c606d59ac27de5a5588f082/python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a", size = 19863, upload_time = "2024-01-23T06:32:58.246Z" }, 289 | ] 290 | 291 | [[package]] 292 | name = "rich" 293 | version = "13.9.4" 294 | source = { registry = "https://pypi.org/simple" } 295 | dependencies = [ 296 | { name = "markdown-it-py" }, 297 | { name = "pygments" }, 298 | ] 299 | sdist = { url = "https://files.pythonhosted.org/packages/ab/3a/0316b28d0761c6734d6bc14e770d85506c986c85ffb239e688eeaab2c2bc/rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098", size = 223149, upload_time = "2024-11-01T16:43:57.873Z" } 300 | wheels = [ 301 | { url = "https://files.pythonhosted.org/packages/19/71/39c7c0d87f8d4e6c020a393182060eaefeeae6c01dab6a84ec346f2567df/rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90", size = 242424, upload_time = "2024-11-01T16:43:55.817Z" }, 302 | ] 303 | 304 | [[package]] 305 | name = "shellingham" 306 | version = "1.5.4" 307 | source = { registry = "https://pypi.org/simple" } 308 | sdist = { url = "https://files.pythonhosted.org/packages/58/15/8b3609fd3830ef7b27b655beb4b4e9c62313a4e8da8c676e142cc210d58e/shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de", size = 10310, upload_time = "2023-10-24T04:13:40.426Z" } 309 | wheels = [ 310 | { url = "https://files.pythonhosted.org/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", size = 9755, upload_time = "2023-10-24T04:13:38.866Z" }, 311 | ] 312 | 313 | [[package]] 314 | name = "sniffio" 315 | version = "1.3.1" 316 | source = { registry = "https://pypi.org/simple" } 317 | sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload_time = "2024-02-25T23:20:04.057Z" } 318 | wheels = [ 319 | { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload_time = "2024-02-25T23:20:01.196Z" }, 320 | ] 321 | 322 | [[package]] 323 | name = "sse-starlette" 324 | version = "2.2.1" 325 | source = { registry = "https://pypi.org/simple" } 326 | dependencies = [ 327 | { name = "anyio" }, 328 | { name = "starlette" }, 329 | ] 330 | sdist = { url = "https://files.pythonhosted.org/packages/71/a4/80d2a11af59fe75b48230846989e93979c892d3a20016b42bb44edb9e398/sse_starlette-2.2.1.tar.gz", hash = "sha256:54470d5f19274aeed6b2d473430b08b4b379ea851d953b11d7f1c4a2c118b419", size = 17376, upload_time = "2024-12-25T09:09:30.616Z" } 331 | wheels = [ 332 | { url = "https://files.pythonhosted.org/packages/d9/e0/5b8bd393f27f4a62461c5cf2479c75a2cc2ffa330976f9f00f5f6e4f50eb/sse_starlette-2.2.1-py3-none-any.whl", hash = "sha256:6410a3d3ba0c89e7675d4c273a301d64649c03a5ef1ca101f10b47f895fd0e99", size = 10120, upload_time = "2024-12-25T09:09:26.761Z" }, 333 | ] 334 | 335 | [[package]] 336 | name = "starlette" 337 | version = "0.45.3" 338 | source = { registry = "https://pypi.org/simple" } 339 | dependencies = [ 340 | { name = "anyio" }, 341 | ] 342 | sdist = { url = "https://files.pythonhosted.org/packages/ff/fb/2984a686808b89a6781526129a4b51266f678b2d2b97ab2d325e56116df8/starlette-0.45.3.tar.gz", hash = "sha256:2cbcba2a75806f8a41c722141486f37c28e30a0921c5f6fe4346cb0dcee1302f", size = 2574076, upload_time = "2025-01-24T11:17:36.535Z" } 343 | wheels = [ 344 | { url = "https://files.pythonhosted.org/packages/d9/61/f2b52e107b1fc8944b33ef56bf6ac4ebbe16d91b94d2b87ce013bf63fb84/starlette-0.45.3-py3-none-any.whl", hash = "sha256:dfb6d332576f136ec740296c7e8bb8c8a7125044e7c6da30744718880cdd059d", size = 71507, upload_time = "2025-01-24T11:17:34.182Z" }, 345 | ] 346 | 347 | [[package]] 348 | name = "tenacity" 349 | version = "9.0.0" 350 | source = { registry = "https://pypi.org/simple" } 351 | sdist = { url = "https://files.pythonhosted.org/packages/cd/94/91fccdb4b8110642462e653d5dcb27e7b674742ad68efd146367da7bdb10/tenacity-9.0.0.tar.gz", hash = "sha256:807f37ca97d62aa361264d497b0e31e92b8027044942bfa756160d908320d73b", size = 47421, upload_time = "2024-07-29T12:12:27.547Z" } 352 | wheels = [ 353 | { url = "https://files.pythonhosted.org/packages/b6/cb/b86984bed139586d01532a587464b5805f12e397594f19f931c4c2fbfa61/tenacity-9.0.0-py3-none-any.whl", hash = "sha256:93de0c98785b27fcf659856aa9f54bfbd399e29969b0621bc7f762bd441b4539", size = 28169, upload_time = "2024-07-29T12:12:25.825Z" }, 354 | ] 355 | 356 | [[package]] 357 | name = "toml" 358 | version = "0.10.2" 359 | source = { registry = "https://pypi.org/simple" } 360 | sdist = { url = "https://files.pythonhosted.org/packages/be/ba/1f744cdc819428fc6b5084ec34d9b30660f6f9daaf70eead706e3203ec3c/toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f", size = 22253, upload_time = "2020-11-01T01:40:22.204Z" } 361 | wheels = [ 362 | { url = "https://files.pythonhosted.org/packages/44/6f/7120676b6d73228c96e17f1f794d8ab046fc910d781c8d151120c3f1569e/toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b", size = 16588, upload_time = "2020-11-01T01:40:20.672Z" }, 363 | ] 364 | 365 | [[package]] 366 | name = "tomli" 367 | version = "2.2.1" 368 | source = { registry = "https://pypi.org/simple" } 369 | sdist = { url = "https://files.pythonhosted.org/packages/18/87/302344fed471e44a87289cf4967697d07e532f2421fdaf868a303cbae4ff/tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff", size = 17175, upload_time = "2024-11-27T22:38:36.873Z" } 370 | wheels = [ 371 | { url = "https://files.pythonhosted.org/packages/04/90/2ee5f2e0362cb8a0b6499dc44f4d7d48f8fff06d28ba46e6f1eaa61a1388/tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7", size = 132708, upload_time = "2024-11-27T22:38:21.659Z" }, 372 | { url = "https://files.pythonhosted.org/packages/c0/ec/46b4108816de6b385141f082ba99e315501ccd0a2ea23db4a100dd3990ea/tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c", size = 123582, upload_time = "2024-11-27T22:38:22.693Z" }, 373 | { url = "https://files.pythonhosted.org/packages/a0/bd/b470466d0137b37b68d24556c38a0cc819e8febe392d5b199dcd7f578365/tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13", size = 232543, upload_time = "2024-11-27T22:38:24.367Z" }, 374 | { url = "https://files.pythonhosted.org/packages/d9/e5/82e80ff3b751373f7cead2815bcbe2d51c895b3c990686741a8e56ec42ab/tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281", size = 241691, upload_time = "2024-11-27T22:38:26.081Z" }, 375 | { url = "https://files.pythonhosted.org/packages/05/7e/2a110bc2713557d6a1bfb06af23dd01e7dde52b6ee7dadc589868f9abfac/tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272", size = 251170, upload_time = "2024-11-27T22:38:27.921Z" }, 376 | { url = "https://files.pythonhosted.org/packages/64/7b/22d713946efe00e0adbcdfd6d1aa119ae03fd0b60ebed51ebb3fa9f5a2e5/tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140", size = 236530, upload_time = "2024-11-27T22:38:29.591Z" }, 377 | { url = "https://files.pythonhosted.org/packages/38/31/3a76f67da4b0cf37b742ca76beaf819dca0ebef26d78fc794a576e08accf/tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2", size = 258666, upload_time = "2024-11-27T22:38:30.639Z" }, 378 | { url = "https://files.pythonhosted.org/packages/07/10/5af1293da642aded87e8a988753945d0cf7e00a9452d3911dd3bb354c9e2/tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744", size = 243954, upload_time = "2024-11-27T22:38:31.702Z" }, 379 | { url = "https://files.pythonhosted.org/packages/5b/b9/1ed31d167be802da0fc95020d04cd27b7d7065cc6fbefdd2f9186f60d7bd/tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec", size = 98724, upload_time = "2024-11-27T22:38:32.837Z" }, 380 | { url = "https://files.pythonhosted.org/packages/c7/32/b0963458706accd9afcfeb867c0f9175a741bf7b19cd424230714d722198/tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69", size = 109383, upload_time = "2024-11-27T22:38:34.455Z" }, 381 | { url = "https://files.pythonhosted.org/packages/6e/c2/61d3e0f47e2b74ef40a68b9e6ad5984f6241a942f7cd3bbfbdbd03861ea9/tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc", size = 14257, upload_time = "2024-11-27T22:38:35.385Z" }, 382 | ] 383 | 384 | [[package]] 385 | name = "typer" 386 | version = "0.15.1" 387 | source = { registry = "https://pypi.org/simple" } 388 | dependencies = [ 389 | { name = "click" }, 390 | { name = "rich" }, 391 | { name = "shellingham" }, 392 | { name = "typing-extensions" }, 393 | ] 394 | sdist = { url = "https://files.pythonhosted.org/packages/cb/ce/dca7b219718afd37a0068f4f2530a727c2b74a8b6e8e0c0080a4c0de4fcd/typer-0.15.1.tar.gz", hash = "sha256:a0588c0a7fa68a1978a069818657778f86abe6ff5ea6abf472f940a08bfe4f0a", size = 99789, upload_time = "2024-12-04T17:44:58.956Z" } 395 | wheels = [ 396 | { url = "https://files.pythonhosted.org/packages/d0/cc/0a838ba5ca64dc832aa43f727bd586309846b0ffb2ce52422543e6075e8a/typer-0.15.1-py3-none-any.whl", hash = "sha256:7994fb7b8155b64d3402518560648446072864beefd44aa2dc36972a5972e847", size = 44908, upload_time = "2024-12-04T17:44:57.291Z" }, 397 | ] 398 | 399 | [[package]] 400 | name = "typing-extensions" 401 | version = "4.12.2" 402 | source = { registry = "https://pypi.org/simple" } 403 | sdist = { url = "https://files.pythonhosted.org/packages/df/db/f35a00659bc03fec321ba8bce9420de607a1d37f8342eee1863174c69557/typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8", size = 85321, upload_time = "2024-06-07T18:52:15.995Z" } 404 | wheels = [ 405 | { url = "https://files.pythonhosted.org/packages/26/9f/ad63fc0248c5379346306f8668cda6e2e2e9c95e01216d2b8ffd9ff037d0/typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d", size = 37438, upload_time = "2024-06-07T18:52:13.582Z" }, 406 | ] 407 | 408 | [[package]] 409 | name = "uvicorn" 410 | version = "0.34.0" 411 | source = { registry = "https://pypi.org/simple" } 412 | dependencies = [ 413 | { name = "click" }, 414 | { name = "h11" }, 415 | ] 416 | sdist = { url = "https://files.pythonhosted.org/packages/4b/4d/938bd85e5bf2edeec766267a5015ad969730bb91e31b44021dfe8b22df6c/uvicorn-0.34.0.tar.gz", hash = "sha256:404051050cd7e905de2c9a7e61790943440b3416f49cb409f965d9dcd0fa73e9", size = 76568, upload_time = "2024-12-15T13:33:30.42Z" } 417 | wheels = [ 418 | { url = "https://files.pythonhosted.org/packages/61/14/33a3a1352cfa71812a3a21e8c9bfb83f60b0011f5e36f2b1399d51928209/uvicorn-0.34.0-py3-none-any.whl", hash = "sha256:023dc038422502fa28a09c7a30bf2b6991512da7dcdb8fd35fe57cfc154126f4", size = 62315, upload_time = "2024-12-15T13:33:27.467Z" }, 419 | ] 420 | 421 | [[package]] 422 | name = "wcwidth" 423 | version = "0.2.13" 424 | source = { registry = "https://pypi.org/simple" } 425 | sdist = { url = "https://files.pythonhosted.org/packages/6c/63/53559446a878410fc5a5974feb13d31d78d752eb18aeba59c7fef1af7598/wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5", size = 101301, upload_time = "2024-01-06T02:10:57.829Z" } 426 | wheels = [ 427 | { url = "https://files.pythonhosted.org/packages/fd/84/fd2ba7aafacbad3c4201d395674fc6348826569da3c0937e75505ead3528/wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859", size = 34166, upload_time = "2024-01-06T02:10:55.763Z" }, 428 | ] 429 | --------------------------------------------------------------------------------