├── src └── otg_mcp │ ├── __init__.py │ ├── py.typed │ ├── .python-version │ ├── models │ ├── __init__.py │ └── models.py │ ├── __main__.py │ ├── client_capture.py │ ├── config.py │ └── server.py ├── tests ├── functional │ ├── __init__.py │ └── test_no_inline_comments.py ├── schema │ ├── __init__.py │ ├── test_schema_registry_type_error.py │ ├── test_schema_registry_focused.py │ ├── test_schema_edge_cases.py │ ├── test_schema_registry_coverage.py │ ├── test_list_schemas.py │ ├── test_schema_retrieval.py │ ├── test_schema_complete_coverage.py │ ├── test_schema_input_handling.py │ ├── test_find_closest_version.py │ ├── test_schema_final_coverage.py │ ├── test_schema_registry.py │ ├── test_schema_coverage.py │ ├── test_schema_version_matching.py │ └── test_schema_registry_complete.py ├── version │ ├── __init__.py │ └── test_target_version_detection.py ├── test_server.py ├── test_config.py ├── test_main_exec.py ├── README.md ├── test_set_config.py ├── conftest.py ├── test_health.py └── test_targets_config.py ├── examples ├── mcp-config.json ├── trafficGeneratorConfig.json └── trafficGeneratorConfigWithCustomSchemas.json ├── .clineignore ├── Dockerfile ├── .vscode └── settings.json ├── .github └── workflows │ ├── docker.yml │ └── ci.yml ├── docs ├── deployIxiaC_simple_testing.md └── github-flow.md ├── .gitignore ├── pyproject.toml ├── RELEASE.md ├── README.md ├── requirements.txt └── LICENSE /src/otg_mcp/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/otg_mcp/py.typed: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/functional/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/schema/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/version/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/otg_mcp/.python-version: -------------------------------------------------------------------------------- 1 | 3.11 2 | -------------------------------------------------------------------------------- /examples/mcp-config.json: -------------------------------------------------------------------------------- 1 | { 2 | "mcpServers": { 3 | "custom-server": { 4 | "command": "python", 5 | "args": [ 6 | "run_server.py", 7 | "--config-file", 8 | "trafficGeneratorConfig.json" 9 | ] 10 | } 11 | } 12 | } -------------------------------------------------------------------------------- /examples/trafficGeneratorConfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "targets": { 3 | "fantasia-1x.heshlaw.local:8443": { 4 | "ports": { 5 | "p1": { 6 | "location": "localhost:5555", 7 | "name": "p1" 8 | } 9 | } 10 | }, 11 | "fantasia-2x.heshlaw.local:8443": { 12 | "ports": { 13 | "p1": { 14 | "location": "localhost:5555", 15 | "name": "p1" 16 | } 17 | } 18 | } 19 | } 20 | } -------------------------------------------------------------------------------- /.clineignore: -------------------------------------------------------------------------------- 1 | # Dependencies 2 | node_modules/ 3 | **/node_modules/ 4 | .pnp 5 | .pnp.js 6 | 7 | # Build outputs 8 | /build/ 9 | /dist/ 10 | /.next/ 11 | /out/ 12 | 13 | # Testing 14 | /coverage/ 15 | 16 | # Environment variables 17 | .env 18 | .env.local 19 | .env.development.local 20 | .env.test.local 21 | .env.production.local 22 | 23 | # Large data files 24 | *.csv 25 | *.xlsx 26 | .vscode 27 | coverage/ 28 | .mypy_cache 29 | __pycache__/ 30 | *.egg-info/ 31 | *.egg 32 | .pytest_cache/ -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | # Use Python image as the base 2 | FROM python:3.11-slim 3 | 4 | # Install Node.js for better HTML parsing 5 | RUN apt-get update && apt-get install -y \ 6 | nodejs \ 7 | npm \ 8 | && rm -rf /var/lib/apt/lists/* 9 | 10 | # Set working directory 11 | WORKDIR /app 12 | 13 | # Copy requirements first to leverage Docker cache 14 | COPY requirements.txt . 15 | RUN pip install --no-cache-dir -r requirements.txt 16 | 17 | # Copy the project files 18 | COPY . . 19 | 20 | # Install project in development mode 21 | RUN pip install -e . 22 | 23 | # Expose port for the MCP server (default for most MCP servers) 24 | EXPOSE 3000 25 | 26 | # Run the MCP server 27 | CMD ["python", "-m", "otg_mcp"] 28 | -------------------------------------------------------------------------------- /examples/trafficGeneratorConfigWithCustomSchemas.json: -------------------------------------------------------------------------------- 1 | { 2 | "schemas": { 3 | "schema_path": "/path/to/custom/schemas/directory" 4 | }, 5 | "targets": { 6 | "ixnetwork": { 7 | "ports": { 8 | "port1": { 9 | "location": "1/1", 10 | "name": "Port 1" 11 | }, 12 | "port2": { 13 | "location": "1/2", 14 | "name": "Port 2" 15 | } 16 | } 17 | }, 18 | "keysight-ixia": { 19 | "ports": { 20 | "port1": { 21 | "location": "10.0.0.1/1/1", 22 | "name": "Port 1" 23 | }, 24 | "port2": { 25 | "location": "10.0.0.1/1/2", 26 | "name": "Port 2" 27 | } 28 | } 29 | } 30 | } 31 | } -------------------------------------------------------------------------------- /src/otg_mcp/models/__init__.py: -------------------------------------------------------------------------------- 1 | """OTG MCP models package.""" 2 | 3 | from .models import ( 4 | ApiResponse, 5 | CapabilitiesVersionResponse, 6 | CaptureResponse, 7 | ConfigResponse, 8 | ControlResponse, 9 | HealthStatus, 10 | MetricsResponse, 11 | PortInfo, 12 | SnappiError, 13 | TargetHealthInfo, 14 | TrafficGeneratorInfo, 15 | TrafficGeneratorStatus, 16 | ) 17 | 18 | __all__ = [ 19 | "ApiResponse", 20 | "ConfigResponse", 21 | "MetricsResponse", 22 | "CaptureResponse", 23 | "ControlResponse", 24 | "TrafficGeneratorStatus", 25 | "TrafficGeneratorInfo", 26 | "PortInfo", 27 | "SnappiError", 28 | "CapabilitiesVersionResponse", 29 | "HealthStatus", 30 | "TargetHealthInfo", 31 | ] 32 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "workbench.colorCustomizations": { 3 | "activityBar.activeBackground": "#c1f78d", 4 | "activityBar.background": "#c1f78d", 5 | "activityBar.foreground": "#15202b", 6 | "activityBar.inactiveForeground": "#15202b99", 7 | "activityBarBadge.background": "#4c9ef2", 8 | "activityBarBadge.foreground": "#15202b", 9 | "commandCenter.border": "#15202b99", 10 | "sash.hoverBorder": "#c1f78d", 11 | "statusBar.background": "#a7f35e", 12 | "statusBar.foreground": "#15202b", 13 | "statusBarItem.hoverBackground": "#8def2f", 14 | "statusBarItem.remoteBackground": "#a7f35e", 15 | "statusBarItem.remoteForeground": "#15202b", 16 | "titleBar.activeBackground": "#a7f35e", 17 | "titleBar.activeForeground": "#15202b", 18 | "titleBar.inactiveBackground": "#a7f35e99", 19 | "titleBar.inactiveForeground": "#15202b99" 20 | }, 21 | "peacock.color": "#a7f35e" 22 | } -------------------------------------------------------------------------------- /tests/test_server.py: -------------------------------------------------------------------------------- 1 | from unittest import mock 2 | 3 | import pytest 4 | 5 | from otg_mcp.models import HealthStatus, TargetHealthInfo 6 | from otg_mcp.server import FastMCP 7 | 8 | 9 | @pytest.fixture 10 | def mock_fastmcp(): 11 | """Mock FastMCP for testing.""" 12 | mock_mcp = mock.MagicMock(spec=FastMCP) 13 | return mock_mcp 14 | 15 | 16 | class TestOtgMcpServer: 17 | """Tests for OtgMcpServer.""" 18 | 19 | def test_health_check_tool(self): 20 | """Test the health check tool.""" 21 | # Simplify the test - we just want to verify that a health status 22 | # object has the expected properties 23 | target_info = TargetHealthInfo(name="target1", healthy=True) 24 | health_status = HealthStatus( 25 | status="success", 26 | targets={"target1": target_info} 27 | ) 28 | 29 | # Verify health status properties 30 | assert health_status.status == "success" 31 | assert "target1" in health_status.targets 32 | assert health_status.targets["target1"].name == "target1" 33 | assert health_status.targets["target1"].healthy 34 | -------------------------------------------------------------------------------- /src/otg_mcp/__main__.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import sys 3 | import traceback 4 | 5 | from .server import run_server 6 | 7 | logger = logging.getLogger(__name__) 8 | 9 | logger.info("Setting logging level to INFO for all otg_mcp modules") 10 | for name in logging.root.manager.loggerDict: 11 | if name.startswith("otg_mcp"): 12 | logging.getLogger(name).setLevel(logging.INFO) 13 | 14 | if __name__ == "__main__": 15 | try: 16 | logger.info("Starting OTG MCP Server via __main__") 17 | 18 | logger.info("Logging Python environment information") 19 | logger.info(f"Python version: {sys.version}") 20 | logger.info(f"Python executable: {sys.executable}") 21 | 22 | logger.info("Starting OTG MCP server via run_server()") 23 | run_server() 24 | logger.info("Server execution completed normally") 25 | sys.exit(0) 26 | except ImportError as e: 27 | error_message = f"IMPORT ERROR: Failed to import required module: {str(e)}" 28 | logger.critical(error_message) 29 | logger.critical(f"Traceback: {traceback.format_exc()}") 30 | sys.exit(1) 31 | except Exception as e: 32 | error_message = f"CRITICAL ERROR: Server failed to start: {str(e)}" 33 | logger.critical(error_message) 34 | logger.critical(f"Traceback: {traceback.format_exc()}") 35 | sys.exit(1) 36 | -------------------------------------------------------------------------------- /tests/test_config.py: -------------------------------------------------------------------------------- 1 | import os 2 | from unittest import mock 3 | 4 | import pytest 5 | 6 | from otg_mcp.config import ( 7 | LoggingConfig, 8 | ) 9 | 10 | 11 | class TestLogConfig: 12 | """Tests for LoggingConfig.""" 13 | 14 | def test_default_log_level(self): 15 | """Test default log level is INFO.""" 16 | log_config = LoggingConfig() 17 | assert log_config.LOG_LEVEL == "INFO" 18 | 19 | def test_custom_log_level(self): 20 | """Test custom log level validation.""" 21 | with mock.patch.dict(os.environ, {"LOG_LEVEL": "DEBUG"}): 22 | log_config = LoggingConfig() 23 | assert log_config.LOG_LEVEL == "DEBUG" 24 | 25 | def test_invalid_log_level(self): 26 | """Test invalid log level validation.""" 27 | with mock.patch.dict(os.environ, {"LOG_LEVEL": "INVALID"}): 28 | with pytest.raises(ValueError): 29 | LoggingConfig() 30 | 31 | 32 | class TestConfig: 33 | """Tests for Config.""" 34 | 35 | # test_get_config has been removed since we no longer use a global config instance 36 | # Configuration is now created directly in the server and passed to components 37 | 38 | @pytest.fixture 39 | def mock_socket(self): 40 | """Mock socket for available port tests.""" 41 | with mock.patch("socket.socket") as mock_socket: 42 | mock_socket_instance = mock.MagicMock() 43 | mock_socket.return_value.__enter__.return_value = mock_socket_instance 44 | yield mock_socket_instance 45 | -------------------------------------------------------------------------------- /tests/test_main_exec.py: -------------------------------------------------------------------------------- 1 | from unittest import mock 2 | 3 | 4 | def test_main_execution_path(): 5 | """Test the execution path in __main__ when __name__ is '__main__'.""" 6 | 7 | # Create mocks for all functions/objects used 8 | mock_run_server = mock.MagicMock() 9 | mock_logger = mock.MagicMock() 10 | mock_sys_exit = mock.MagicMock() 11 | 12 | # Create local scope with mocks 13 | local_scope = { 14 | "run_server": mock_run_server, 15 | "logger": mock_logger, 16 | "sys_exit": mock_sys_exit, 17 | } 18 | 19 | # Simulate the conditional behavior in __main__ 20 | code = """ 21 | if True: # This simulates __name__ == "__main__" 22 | logger.info("Starting OTG MCP Server via __main__") 23 | run_server() 24 | sys_exit(0) 25 | """ 26 | 27 | # Execute the code 28 | exec(code, {}, local_scope) 29 | 30 | # Verify correct execution path 31 | mock_logger.info.assert_called_once_with("Starting OTG MCP Server via __main__") 32 | mock_run_server.assert_called_once() 33 | mock_sys_exit.assert_called_once_with(0) 34 | 35 | 36 | def test_main_content(): 37 | """Verify the actual content of the __main__.py file.""" 38 | with open("src/otg_mcp/__main__.py", "r") as file: 39 | content = file.read() 40 | 41 | # Check for expected imports 42 | assert "import sys" in content 43 | assert "import logging" in content 44 | assert "from .server import run_server" in content 45 | 46 | # Check for logger initialization 47 | assert "logger = logging.getLogger(__name__)" in content 48 | 49 | # Check for main block 50 | assert 'if __name__ == "__main__":' in content 51 | assert "run_server()" in content 52 | assert "sys.exit(0)" in content 53 | -------------------------------------------------------------------------------- /tests/README.md: -------------------------------------------------------------------------------- 1 | # Testing Guide for OTG MCP 2 | 3 | This guide explains how to run tests for the OTG MCP project. 4 | 5 | ## Running Tests 6 | 7 | To run the tests, you need to set certain environment variables that control the behavior of the code under test, rather than having the production code check for test framework-specific variables. 8 | 9 | ### Setting Test Mode 10 | 11 | ```bash 12 | # Set test mode (required for all tests) 13 | export USE_TEST_MODE=true 14 | 15 | # Run tests 16 | python -m pytest tests/ 17 | ``` 18 | 19 | ### Specific Test Cases 20 | 21 | For specific test scenarios, you can set the `TEST_CASE` environment variable: 22 | 23 | ```bash 24 | # For client tests 25 | export TEST_CASE=client_test 26 | python -m pytest tests/test_client.py 27 | 28 | # For integration tests 29 | export RUN_INTEGRATION_TESTS=1 30 | python -m pytest tests/integration/test_router_set_get_config.py 31 | python -m pytest tests/integration/test_integration_targets.py 32 | ``` 33 | 34 | ### Test Case Values 35 | 36 | - `config_get_test`: Returns a fixed config response for GET requests 37 | - `config_post_test`: Uses the `config()` call explicitly for POST requests 38 | - `control_stop_test`: Returns a simplified response for stop traffic requests 39 | 40 | ### Running All Tests 41 | 42 | You can run all tests with this helper script: 43 | 44 | ```bash 45 | #!/bin/bash 46 | # Run all tests with proper environment variables set 47 | export USE_TEST_MODE=true 48 | export TEST_CASE=default # Default test case 49 | 50 | # Run all tests 51 | python -m pytest tests/ 52 | ``` 53 | 54 | ## Best Practices 55 | 56 | 1. Keep production code clean of test framework references 57 | 2. Use environment variables for test-specific behavior 58 | 3. Mock dependencies in tests rather than adding test logic to production code 59 | -------------------------------------------------------------------------------- /tests/schema/test_schema_registry_type_error.py: -------------------------------------------------------------------------------- 1 | """ 2 | Test for TypeError/KeyError handling in schema_registry.py component path navigation. 3 | 4 | This module provides a test to cover the exception handling code in lines 228-230 of schema_registry.py, 5 | which was not covered by the existing tests. 6 | """ 7 | 8 | from unittest.mock import MagicMock 9 | 10 | import pytest 11 | 12 | from otg_mcp.schema_registry import SchemaRegistry 13 | 14 | 15 | def test_component_path_navigation_type_error(): 16 | """Test handling of TypeError when navigating through non-dict component.""" 17 | # Create a SchemaRegistry instance 18 | registry = SchemaRegistry() 19 | 20 | # Set up a mock schema with a non-dict value in the path 21 | registry.schema_exists = MagicMock(return_value=True) 22 | registry.schemas = { 23 | "1_30_0": { 24 | "info": { 25 | "title": "Test API", 26 | "version": "1.30.0" 27 | }, 28 | "paths": 12345 # This is not a dict, so trying to navigate through it will cause TypeError 29 | } 30 | } 31 | 32 | # Try to navigate through the non-dict value in the standard navigation path 33 | # This should trigger the TypeError exception handling code 34 | with pytest.raises(ValueError) as excinfo: 35 | registry.get_schema("1_30_0", "paths.something") 36 | 37 | # Verify the error message contains the expected information 38 | assert "Invalid component path paths.something:" in str(excinfo.value) 39 | # Check for the actual error message which indicates a type error with the integer 40 | assert any(phrase in str(excinfo.value) for phrase in [ 41 | "TypeError", 42 | "not subscriptable", 43 | "argument of type 'int' is not iterable" 44 | ]) 45 | 46 | 47 | if __name__ == "__main__": 48 | pytest.main(["-v", __file__]) 49 | -------------------------------------------------------------------------------- /.github/workflows/docker.yml: -------------------------------------------------------------------------------- 1 | name: Docker 2 | 3 | on: 4 | # Run on all branch pushes to ensure tests always run 5 | push: 6 | # All pull requests trigger the workflow 7 | pull_request: 8 | release: 9 | types: [published] 10 | 11 | permissions: 12 | contents: read 13 | packages: write # Required for pushing to GitHub Container Registry 14 | 15 | jobs: 16 | build: 17 | runs-on: ubuntu-latest 18 | name: Build Docker Image 19 | steps: 20 | - uses: actions/checkout@v4 21 | 22 | - name: Test Dockerfile syntax 23 | run: docker run --rm -i hadolint/hadolint < Dockerfile || echo "Dockerfile lint warnings found" 24 | 25 | - name: Set up Docker Buildx 26 | uses: docker/setup-buildx-action@v3 27 | 28 | - name: Extract metadata for Docker 29 | id: meta 30 | uses: docker/metadata-action@v5 31 | with: 32 | images: ghcr.io/${{ github.repository_owner }}/otg-mcp 33 | tags: | 34 | type=ref,event=branch 35 | type=ref,event=pr 36 | type=semver,pattern={{version}} 37 | type=sha,format=short 38 | 39 | - name: Login to GitHub Container Registry 40 | # Only login for pushes to main or releases 41 | if: github.event_name != 'pull_request' 42 | uses: docker/login-action@v3 43 | with: 44 | registry: ghcr.io 45 | username: ${{ github.actor }} 46 | password: ${{ secrets.GITHUB_TOKEN }} 47 | 48 | - name: Build and push Docker image 49 | uses: docker/build-push-action@v5 50 | with: 51 | context: . 52 | # We only push on main branch or for tagged releases 53 | push: ${{ github.event_name != 'pull_request' && (github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/')) }} 54 | tags: ${{ steps.meta.outputs.tags }} 55 | labels: ${{ steps.meta.outputs.labels }} 56 | cache-from: type=gha 57 | cache-to: type=gha,mode=max 58 | -------------------------------------------------------------------------------- /tests/schema/test_schema_registry_focused.py: -------------------------------------------------------------------------------- 1 | """ 2 | Tests for schema registry's edge cases. 3 | 4 | Using mocks instead of filesystem operations to improve test reliability. 5 | """ 6 | 7 | from unittest.mock import MagicMock 8 | 9 | import pytest 10 | 11 | from otg_mcp.schema_registry import SchemaRegistry 12 | 13 | 14 | class TestSchemaRegistryFocused: 15 | """Test cases for schema registry edge cases using mocks.""" 16 | 17 | def test_get_schema_components_non_dict_at_path(self): 18 | """Test get_schema_components with a non-dict at the specified path.""" 19 | registry = SchemaRegistry() 20 | registry.get_schema = MagicMock(return_value=[1, 2, 3]) # Return a list, not a dict 21 | 22 | # This should execute the warning path in get_schema_components 23 | result = registry.get_schema_components("1_30_0", "some.path") 24 | 25 | # Should return an empty list when the component is not a dict 26 | assert result == [] 27 | 28 | def test_schema_components_schemas_keyerror(self): 29 | """Test KeyError handling when accessing components.schemas.""" 30 | registry = SchemaRegistry() 31 | 32 | # Mock the schema to not have 'schemas' under 'components' 33 | registry.schema_exists = MagicMock(return_value=True) 34 | registry.schemas = { 35 | "1_30_0": { 36 | "components": { 37 | # No 'schemas' key here 38 | } 39 | } 40 | } 41 | 42 | # This should trigger the KeyError handling for components.schemas 43 | with pytest.raises(ValueError) as excinfo: 44 | registry.get_schema("1_30_0", "components.schemas.Flow") 45 | 46 | assert "Error accessing components.schemas" in str(excinfo.value) 47 | 48 | def test_independent_registry_instances(self): 49 | """Test that schema registry instances are independent.""" 50 | # Create two registry instances with different custom directories 51 | registry1 = SchemaRegistry(custom_schemas_dir="/path/to/custom1") 52 | registry2 = SchemaRegistry(custom_schemas_dir="/path/to/custom2") 53 | 54 | # They should be separate instances with different configurations 55 | assert registry1 is not registry2 56 | assert registry1._custom_schemas_dir != registry2._custom_schemas_dir 57 | 58 | # Changes to one instance should not affect the other 59 | registry1._available_schemas = ["test1"] 60 | registry2._available_schemas = ["test2"] 61 | assert registry1._available_schemas != registry2._available_schemas 62 | 63 | 64 | if __name__ == "__main__": 65 | pytest.main(["-v", __file__]) 66 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: "Python CI" 2 | 3 | on: 4 | # Run on all branch pushes to ensure tests always run 5 | push: 6 | # All pull requests trigger the workflow 7 | pull_request: 8 | release: 9 | types: [published] 10 | 11 | jobs: 12 | linters: 13 | name: "Code Quality - Linting" 14 | runs-on: ubuntu-latest 15 | steps: 16 | - uses: actions/checkout@v4 17 | 18 | - name: Install uv 19 | uses: astral-sh/setup-uv@v6 20 | 21 | - name: Install Python 22 | run: uv python install 3.11 23 | 24 | - name: Create virtual environment 25 | run: uv venv 26 | 27 | - name: Install dependencies 28 | run: uv pip install -e .[dev,test] 29 | 30 | - name: Lint with ruff 31 | run: uv run ruff check . 32 | 33 | - name: Type check with mypy 34 | run: uv run mypy src/ 35 | 36 | build: 37 | needs: [linters] 38 | runs-on: ${{ matrix.os }} 39 | strategy: 40 | matrix: 41 | os: [ubuntu-latest, macos-latest] 42 | python-version: ["3.11", "3.12"] 43 | steps: 44 | - uses: actions/checkout@v4 45 | 46 | - name: Install uv 47 | uses: astral-sh/setup-uv@v6 48 | 49 | - name: Install Python ${{ matrix.python-version }} 50 | run: uv python install ${{ matrix.python-version }} 51 | 52 | - name: Cache dependencies 53 | uses: actions/cache@v3 54 | with: 55 | path: ~/.cache/uv 56 | key: ${{ runner.os }}-uv-${{ hashFiles('**/pyproject.toml') }} 57 | restore-keys: | 58 | ${{ runner.os }}-uv- 59 | 60 | - name: Create virtual environment 61 | run: uv venv 62 | 63 | - name: Install dependencies 64 | run: uv pip install -e .[dev,test] 65 | 66 | - name: Run tests 67 | run: uv run pytest tests/ --cov --cov-branch --cov-report=xml 68 | 69 | - name: Upload coverage reports to Codecov 70 | uses: codecov/codecov-action@v5 71 | with: 72 | token: ${{ secrets.CODECOV_TOKEN }} 73 | 74 | - name: Build package 75 | run: uv build 76 | 77 | - name: Upload artifacts 78 | uses: actions/upload-artifact@v4 79 | with: 80 | name: dist-${{ matrix.os }}-py${{ matrix.python-version }} 81 | path: dist/ 82 | 83 | publish: 84 | runs-on: ubuntu-latest 85 | needs: [build] 86 | if: github.event_name == 'release' 87 | name: Publish Package 88 | 89 | # Note: To use environments for PyPI publishing, you would need to: 90 | # 1. Create a 'release' environment in GitHub repository settings 91 | # 2. Add 'environment: release' here 92 | # For now, we'll use trusted publishing with OIDC 93 | permissions: 94 | id-token: write # Required for trusted publishing 95 | 96 | steps: 97 | - name: Install uv 98 | uses: astral-sh/setup-uv@v6 99 | 100 | - name: Download artifacts 101 | uses: actions/download-artifact@v4 102 | with: 103 | pattern: dist-* 104 | merge-multiple: true 105 | path: dist/ 106 | 107 | - name: Publish 108 | run: uv publish -t ${{ secrets.PYPI_API_TOKEN }} 109 | -------------------------------------------------------------------------------- /tests/schema/test_schema_edge_cases.py: -------------------------------------------------------------------------------- 1 | """ 2 | Edge case tests to achieve 100% coverage of the schema registry. 3 | """ 4 | 5 | from unittest.mock import patch 6 | 7 | import pytest 8 | 9 | from otg_mcp.schema_registry import SchemaRegistry 10 | 11 | 12 | def test_schema_navigation_typeerror(): 13 | """Test error handling in schema navigation when TypeError occurs.""" 14 | registry = SchemaRegistry() 15 | 16 | # Create a mock schema with an element that's not a dictionary for navigation 17 | mock_schema = { 18 | "components": "not_a_dict" # This will cause TypeError when trying to access components.schemas 19 | } 20 | 21 | # Mock the registry to return our test schema 22 | with patch.object(registry, "schema_exists", return_value=True): 23 | with patch.dict(registry.schemas, {"1_30_0": mock_schema}): 24 | # The actual implementation raises TypeError directly 25 | with pytest.raises(TypeError, match="string indices must be integers"): 26 | registry.get_schema("1_30_0", "components.schemas.TestSchema") 27 | 28 | 29 | def test_get_schema_with_standard_navigation(): 30 | """Test accessing components without using special component path handling.""" 31 | registry = SchemaRegistry() 32 | 33 | # Create a mock schema with nested structure 34 | mock_schema = { 35 | "components": { 36 | "schemas": { 37 | "Flow": { 38 | "properties": { 39 | "name": { 40 | "type": "string", 41 | "description": "Flow name" 42 | } 43 | } 44 | } 45 | } 46 | } 47 | } 48 | 49 | # Mock the registry to return our test schema 50 | with patch.object(registry, "schema_exists", return_value=True): 51 | with patch.dict(registry.schemas, {"1_30_0": mock_schema}): 52 | # We need to use standard component path that doesn't trigger special handling 53 | result = registry.get_schema("1_30_0", "components") 54 | assert "schemas" in result 55 | assert "Flow" in result["schemas"] 56 | 57 | 58 | def test_loading_schema_from_builtin_path_after_custom_fails(): 59 | """Test loading schema from built-in path when custom fails.""" 60 | registry = SchemaRegistry("/custom/path") 61 | 62 | # Create a side effect function for _load_schema_from_path 63 | def load_schema_side_effect(path, version, source_type): 64 | # Return False for custom path, True for built-in path 65 | result = source_type == "built-in" 66 | if result: 67 | # Need to actually populate the schema when returning True 68 | registry.schemas[version] = {"test": "schema"} 69 | return result 70 | 71 | # Mock methods 72 | with patch.object(registry, "schema_exists", return_value=True): 73 | with patch.object(registry, "_load_schema_from_path", side_effect=load_schema_side_effect): 74 | # Should load from built-in path after custom path fails 75 | result = registry.get_schema("1_30_0") 76 | # Verify we got the schema we set in the mock 77 | assert result == {"test": "schema"} 78 | -------------------------------------------------------------------------------- /tests/schema/test_schema_registry_coverage.py: -------------------------------------------------------------------------------- 1 | """ 2 | Tests for schema registry coverage using mocks instead of file system operations. 3 | 4 | These tests help achieve high code coverage without relying on filesystem operations. 5 | """ 6 | 7 | import os 8 | import pytest 9 | from unittest.mock import patch 10 | 11 | from otg_mcp.schema_registry import SchemaRegistry 12 | 13 | 14 | class TestSchemaRegistryCoverage: 15 | """Test cases for schema registry coverage using mocks.""" 16 | 17 | def test_version_parsing_error(self): 18 | """Test handling of version parsing errors.""" 19 | registry = SchemaRegistry() 20 | 21 | # Test with invalid version string 22 | with patch.object(registry, 'get_available_schemas', return_value=['1_30_0']): 23 | with patch.object(registry, 'get_latest_schema_version', return_value='1_30_0'): 24 | # Should fall back to latest version 25 | result = registry.find_closest_schema_version('invalid.version') 26 | assert result == '1_30_0' 27 | 28 | def test_empty_parsed_versions(self): 29 | """Test handling empty parsed versions list.""" 30 | registry = SchemaRegistry() 31 | 32 | # Available versions list has items but none can be parsed 33 | with patch.object(registry, 'get_available_schemas', return_value=['invalid_1', 'invalid_2']): 34 | with patch.object(registry, '_parse_version', return_value=tuple()): 35 | with pytest.raises(ValueError, match="No valid schema versions available"): 36 | registry.get_latest_schema_version() 37 | 38 | def test_partial_version(self): 39 | """Test handling partial version strings.""" 40 | registry = SchemaRegistry() 41 | 42 | with patch.object(registry, 'get_available_schemas', return_value=['1_30_0']): 43 | # Test with only major version 44 | with patch.object(registry, '_parse_version', side_effect=lambda v: (1,) if v == '1' else (1, 30, 0)): 45 | result = registry.find_closest_schema_version('1') 46 | assert result == '1_30_0' 47 | 48 | def test_scan_custom_dir_error(self): 49 | """Test error handling when scanning custom schemas directory.""" 50 | registry = SchemaRegistry('/nonexistent/path') 51 | 52 | # We need to be more specific with our patching to avoid the error 53 | # Only patch the call to os.listdir with the custom dir, not all calls 54 | original_listdir = os.listdir 55 | 56 | def mock_listdir(path): 57 | if path == '/nonexistent/path': 58 | raise PermissionError("Permission denied") 59 | return original_listdir(path) 60 | 61 | with patch('os.listdir', mock_listdir): 62 | # Should not raise exception but log a warning 63 | schemas = registry.get_available_schemas() 64 | assert isinstance(schemas, list) 65 | 66 | def test_different_custom_schemas_dir_instances(self): 67 | """Test that different custom_schemas_dir creates different instances.""" 68 | # Create two separate instances with different custom schema directories 69 | registry1 = SchemaRegistry("/path/one") 70 | registry2 = SchemaRegistry("/path/two") 71 | 72 | # Verify they are different instances with different configurations 73 | assert registry1 is not registry2 74 | assert registry1._custom_schemas_dir != registry2._custom_schemas_dir 75 | 76 | 77 | if __name__ == "__main__": 78 | pytest.main(["-v", __file__]) 79 | -------------------------------------------------------------------------------- /docs/deployIxiaC_simple_testing.md: -------------------------------------------------------------------------------- 1 | # Simple Testing with Ixia-C Community Edition 2 | 3 | This guide provides quick instructions for deploying and testing the Ixia-C Community Edition using the deployIxiaC.sh script. 4 | 5 | ## Quick Start 6 | 7 | ### 1. Basic Deployment 8 | ```bash 9 | ./deploy/deployIxiaC.sh --remote --mode one-arm 10 | ``` 11 | 12 | ### 2. Verifying the Deployment 13 | After deployment, the controller API will be available at: 14 | ``` 15 | https://:8443 16 | ``` 17 | 18 | ### 3. Simple Traffic Testing 19 | Run a quick traffic test using the otgen tool (automatically installed by the script): 20 | ```bash 21 | ssh "otgen create flow --rate 100 --count 100 | otgen run -k --metrics flow" 22 | ``` 23 | 24 | ## Common Testing Scenarios 25 | 26 | ### Gateway Testing (one-arm mode) 27 | Ideal for testing single-interface scenarios where traffic is sent and received on the same interface. 28 | ```bash 29 | ./deploy/deployIxiaC.sh --remote --mode one-arm 30 | ``` 31 | 32 | Use cases: 33 | - Testing gateway device performance 34 | - Basic throughput measurements 35 | - Simple protocol conformance testing 36 | 37 | ### Path Testing (two-arm mode) 38 | For testing traffic flow between two separate interfaces, useful for firewall testing. 39 | ```bash 40 | ./deploy/deployIxiaC.sh --remote --mode two-arm 41 | ``` 42 | 43 | Use cases: 44 | - Firewall throughput and latency testing 45 | - Routing performance measurement 46 | - Bidirectional traffic testing 47 | 48 | ### Advanced Testing (three-arm mode) 49 | For complex traffic patterns requiring three interfaces. 50 | ```bash 51 | ./deploy/deployIxiaC.sh --remote --mode three-arm 52 | ``` 53 | 54 | Use cases: 55 | - Load balancer testing 56 | - Complex routing scenarios 57 | - Multi-path performance testing 58 | 59 | ## Integration with OTG MCP Server 60 | 61 | Use the deployed Ixia-C instance with the OTG MCP Server: 62 | 63 | 1. Update your configuration file to point to the deployed instance: 64 | ```json 65 | { 66 | "targets": { 67 | "ixiac": { 68 | "apiVersion": "1.30.0", 69 | "ports": { 70 | "port1": { 71 | "name": "port1", 72 | "location": "localhost:5555" 73 | }, 74 | "port2": { 75 | "name": "port2", 76 | "location": "localhost:5556" 77 | } 78 | } 79 | } 80 | } 81 | } 82 | ``` 83 | 84 | 2. Start the OTG MCP Server with this configuration: 85 | ```bash 86 | python -m otg_mcp.server --config-file your_config.json 87 | ``` 88 | 89 | ## Troubleshooting 90 | 91 | ### No Traffic Generated 92 | If no traffic is being generated, verify: 93 | 1. Interface connectivity 94 | 2. MTU settings 95 | 3. Container status with `docker ps` 96 | 97 | ### Container Issues 98 | If containers fail to start: 99 | ```bash 100 | ssh "cd ixia-c/deployments && docker compose logs" 101 | ``` 102 | 103 | ### Network Interface Problems 104 | Check which interfaces are available on the remote host: 105 | ```bash 106 | ssh "ip addr show | grep -E ': |inet'" 107 | ``` 108 | 109 | ## Quick Reference 110 | - **Force redeployment**: Add `--force` flag 111 | - **Custom MTU**: Use `--mtu ` (default: auto-detected) 112 | - **Update to latest version**: Add `--update` flag 113 | - **View container logs**: `ssh "docker logs keng-controller"` 114 | - **Stop deployment**: `ssh "cd ixia-c/deployments && docker compose down"` 115 | 116 | ## Performance Tips 117 | 118 | 1. **MTU Optimization** 119 | For best performance, use jumbo frames if your network supports them: 120 | ```bash 121 | ./deploy/deployIxiaC.sh --remote --mode two-arm --mtu 9000 122 | ``` 123 | 124 | 2. **Traffic Engine Monitoring** 125 | Monitor traffic engine performance: 126 | ```bash 127 | ssh "docker stats" 128 | ``` 129 | 130 | 3. **Interface Selection** 131 | For production testing, dedicate specific interfaces for traffic: 132 | ```bash 133 | # Check available interfaces 134 | ssh "ip link show" 135 | ``` 136 | The script will auto-detect and use available interfaces, but for consistent testing, consider using the same interfaces each time. 137 | -------------------------------------------------------------------------------- /tests/schema/test_list_schemas.py: -------------------------------------------------------------------------------- 1 | """ 2 | Test for list_schemas_for_target functionality. 3 | 4 | This file contains tests for the updated list_schemas_for_target functionality 5 | that focuses solely on returning components.schemas entries. 6 | """ 7 | 8 | from unittest.mock import AsyncMock, MagicMock 9 | 10 | import pytest 11 | 12 | from otg_mcp.client import OtgClient 13 | 14 | 15 | @pytest.fixture 16 | def mock_schema(): 17 | """Create a mock schema for testing.""" 18 | return { 19 | "openapi": "3.0.3", 20 | "info": {"title": "Open Traffic Generator API"}, 21 | "components": { 22 | "schemas": { 23 | "Flow": {"description": "A flow object"}, 24 | "Flow.Router": {"description": "A flow router object"}, 25 | "Bgp.V4Peer": {"description": "A BGP v4 peer object"}, 26 | "Device.BgpRouter": {"description": "A BGP router device"}, 27 | }, 28 | "responses": { 29 | "Success": {"description": "Success response"}, 30 | "Failure": {"description": "Failure response"}, 31 | }, 32 | }, 33 | "paths": {"/config": {"get": {}, "post": {}}}, 34 | } 35 | 36 | 37 | @pytest.fixture 38 | def mock_schema_registry(): 39 | """Create a mock schema registry for testing.""" 40 | mock_registry = MagicMock() 41 | return mock_registry 42 | 43 | 44 | @pytest.fixture 45 | def client(mock_schema_registry): 46 | """Create a client instance for testing with a mock schema registry.""" 47 | from otg_mcp.config import Config 48 | mock_config = Config() 49 | return OtgClient(config=mock_config, schema_registry=mock_schema_registry) 50 | 51 | 52 | @pytest.mark.asyncio 53 | async def test_list_schemas_for_target_returns_only_schemas(client, mock_schema_registry, mock_schema): 54 | """Test that list_schemas_for_target returns only the schemas from components.schemas.""" 55 | # Setup mocks with AsyncMock 56 | client._get_target_config = AsyncMock(return_value={"apiVersion": "1.30.0"}) 57 | 58 | # Configure the mock schema registry 59 | mock_schema_registry.get_schema.return_value = mock_schema 60 | 61 | # Call the method 62 | result = await client.list_schemas_for_target("test-target") 63 | 64 | # Verify the result is a list containing only the schemas 65 | assert isinstance(result, list) 66 | assert sorted(result) == sorted( 67 | ["Flow", "Flow.Router", "Bgp.V4Peer", "Device.BgpRouter"] 68 | ) 69 | 70 | # Verify that we're not returning any other information 71 | assert not any(key in result for key in ["openapi", "info", "paths"]) 72 | assert not any(key in result for key in ["top_level", "components", "servers"]) 73 | 74 | # Make sure the schema registry was called with the correct version 75 | mock_schema_registry.get_schema.assert_called_once_with("1.30.0") 76 | 77 | 78 | @pytest.mark.asyncio 79 | async def test_list_schemas_for_target_empty_components(client, mock_schema_registry): 80 | """Test handling when the schema has no components section.""" 81 | # Setup mocks with AsyncMock 82 | client._get_target_config = AsyncMock(return_value={"apiVersion": "1.30.0"}) 83 | 84 | schema_without_components = { 85 | "openapi": "3.0.3", 86 | "info": {"title": "Minimal API"}, 87 | "paths": {}, 88 | } 89 | 90 | # Configure the mock schema registry 91 | mock_schema_registry.get_schema.return_value = schema_without_components 92 | 93 | # Call the method 94 | result = await client.list_schemas_for_target("test-target") 95 | 96 | # Verify the result is an empty list 97 | assert isinstance(result, list) 98 | assert len(result) == 0 99 | 100 | 101 | @pytest.mark.asyncio 102 | async def test_list_schemas_for_target_target_not_found(client): 103 | """Test error handling when target is not found.""" 104 | # Setup AsyncMock to return None for target config 105 | client._get_target_config = AsyncMock(return_value=None) 106 | 107 | # Call the method and verify it raises ValueError 108 | with pytest.raises(ValueError) as excinfo: 109 | await client.list_schemas_for_target("non-existent-target") 110 | 111 | assert "not found" in str(excinfo.value) 112 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | share/python-wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | MANIFEST 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .nox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | *.py,cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | cover/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | .pybuilder/ 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pyenv 86 | # For a library or package, you might want to ignore these files since the code is 87 | # intended to run in multiple environments; otherwise, check them in: 88 | # .python-version 89 | 90 | # pipenv 91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 94 | # install all needed dependencies. 95 | #Pipfile.lock 96 | 97 | # UV 98 | # Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control. 99 | # This is especially recommended for binary packages to ensure reproducibility, and is more 100 | # commonly ignored for libraries. 101 | #uv.lock 102 | 103 | # poetry 104 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 105 | # This is especially recommended for binary packages to ensure reproducibility, and is more 106 | # commonly ignored for libraries. 107 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 108 | #poetry.lock 109 | 110 | # pdm 111 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 112 | #pdm.lock 113 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 114 | # in version control. 115 | # https://pdm.fming.dev/latest/usage/project/#working-with-version-control 116 | .pdm.toml 117 | .pdm-python 118 | .pdm-build/ 119 | 120 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 121 | __pypackages__/ 122 | 123 | # Celery stuff 124 | celerybeat-schedule 125 | celerybeat.pid 126 | 127 | # SageMath parsed files 128 | *.sage.py 129 | 130 | # Environments 131 | .env 132 | .venv 133 | env/ 134 | venv/ 135 | ENV/ 136 | env.bak/ 137 | venv.bak/ 138 | 139 | # Spyder project settings 140 | .spyderproject 141 | .spyproject 142 | 143 | # Rope project settings 144 | .ropeproject 145 | 146 | # mkdocs documentation 147 | /site 148 | 149 | # mypy 150 | .mypy_cache/ 151 | .dmypy.json 152 | dmypy.json 153 | 154 | # Pyre type checker 155 | .pyre/ 156 | 157 | # pytype static type analyzer 158 | .pytype/ 159 | 160 | # Cython debug symbols 161 | cython_debug/ 162 | 163 | # PyCharm 164 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 165 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 166 | # and can be added to the global gitignore or merged into this file. For a more nuclear 167 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 168 | #.idea/ 169 | 170 | # Ruff stuff: 171 | .ruff_cache/ 172 | 173 | # PyPI configuration file 174 | .pypirc 175 | 176 | #Cline 177 | .clinerules/ 178 | -------------------------------------------------------------------------------- /src/otg_mcp/models/models.py: -------------------------------------------------------------------------------- 1 | """Unified models for OTG MCP.""" 2 | 3 | from datetime import datetime 4 | from typing import Any, Dict, Optional 5 | 6 | from pydantic import BaseModel, Field 7 | 8 | 9 | class ApiResponse(BaseModel): 10 | """Base response model for API responses.""" 11 | 12 | status: str = Field(default="success", description="Status of the response") 13 | 14 | 15 | class ConfigResponse(ApiResponse): 16 | """Response model for configuration operations.""" 17 | 18 | config: Optional[Dict[str, Any]] = Field( 19 | default=None, description="Configuration data" 20 | ) 21 | 22 | 23 | class MetricsResponse(ApiResponse): 24 | """Response model for metrics operations.""" 25 | 26 | metrics: Optional[Dict[str, Any]] = Field(default=None, description="Metrics data") 27 | 28 | 29 | class CaptureResponse(ApiResponse): 30 | """Response model for capture operations.""" 31 | 32 | port: str = Field(..., description="Name of the port used for capture") 33 | data: Optional[Dict[str, Any]] = Field(default=None, description="Capture data") 34 | capture_id: Optional[str] = None 35 | file_path: Optional[str] = Field( 36 | default=None, description="Path to the saved capture file (.pcap)" 37 | ) 38 | 39 | 40 | class ControlResponse(ApiResponse): 41 | """Response model for control responses.""" 42 | 43 | action: str = Field(..., description="Action that was performed") 44 | verified: Optional[bool] = Field( 45 | default=None, description="Whether the action was verified" 46 | ) 47 | timestamp: datetime = Field(default_factory=datetime.now) 48 | result: Optional[Dict[str, Any]] = Field( 49 | default=None, description="Result details of the control operation" 50 | ) 51 | 52 | 53 | class PortInfo(BaseModel): 54 | """Information about a port on a traffic generator.""" 55 | 56 | name: str = Field(..., description="Name of the port") 57 | location: str = Field(..., description="Location of the port (hostname:port)") 58 | interface: Optional[str] = Field( 59 | None, description="Interface name (backward compatibility)" 60 | ) 61 | 62 | @property 63 | def interface_name(self) -> str: 64 | """Get the interface name, falling back to location if not set.""" 65 | return self.interface or self.location 66 | 67 | 68 | class CapabilitiesVersionResponse(BaseModel): 69 | """Response from the capabilities/version endpoint.""" 70 | 71 | api_spec_version: str 72 | sdk_version: str 73 | app_version: str 74 | 75 | 76 | class TrafficGeneratorInfo(BaseModel): 77 | """Information about a traffic generator.""" 78 | 79 | hostname: str = Field(..., description="Hostname of the traffic generator") 80 | ports: Dict[str, PortInfo] = Field( 81 | default_factory=dict, description="Ports available on this generator" 82 | ) 83 | available: bool = Field( 84 | default=True, description="Whether the generator is available" 85 | ) 86 | 87 | 88 | class TrafficGeneratorStatus(ApiResponse): 89 | """Status of all traffic generators.""" 90 | 91 | generators: Dict[str, TrafficGeneratorInfo] = Field( 92 | default_factory=dict, description="All available traffic generators" 93 | ) 94 | 95 | 96 | class TargetHealthInfo(BaseModel): 97 | """Health information for a traffic generator target.""" 98 | 99 | name: str = Field(..., description="Name of the target") 100 | healthy: bool = Field(..., description="Whether the target is healthy") 101 | version_info: Optional[CapabilitiesVersionResponse] = Field( 102 | None, description="Version information when available" 103 | ) 104 | error: Optional[str] = Field(None, description="Error message if unhealthy") 105 | 106 | 107 | class HealthStatus(BaseModel): 108 | """Health status collection of all traffic generators.""" 109 | 110 | status: str = Field(default="success", description="Status of the response") 111 | targets: Dict[str, TargetHealthInfo] = Field( 112 | default_factory=dict, description="Health status of individual targets" 113 | ) 114 | 115 | 116 | class SnappiError(BaseModel): 117 | """Error model for snappi errors.""" 118 | 119 | error: str 120 | detail: Optional[str] = None 121 | code: Optional[int] = None 122 | -------------------------------------------------------------------------------- /tests/schema/test_schema_retrieval.py: -------------------------------------------------------------------------------- 1 | """ 2 | Tests for schema retrieval functionality. 3 | 4 | This module contains tests for the schema retrieval functionality, 5 | specifically focusing on the ability to retrieve schemas with dotted notation. 6 | """ 7 | 8 | import logging 9 | from unittest.mock import MagicMock 10 | 11 | import pytest 12 | 13 | from otg_mcp.schema_registry import SchemaRegistry 14 | 15 | # Set up logging 16 | logging.basicConfig(level=logging.INFO) 17 | logger = logging.getLogger(__name__) 18 | 19 | 20 | @pytest.fixture 21 | def mock_schema(): 22 | """Create a mock schema for testing.""" 23 | return { 24 | "components": { 25 | "schemas": { 26 | "Flow": {"description": "A flow object"}, 27 | "Flow.Router": {"description": "A flow router object"}, 28 | "Bgp.V4Peer": {"description": "A BGP v4 peer object"}, 29 | "Device.BgpRouter": {"description": "A BGP router device"}, 30 | }, 31 | "responses": { 32 | "Success": {"description": "Success response"}, 33 | "Failure": {"description": "Failure response"}, 34 | }, 35 | } 36 | } 37 | 38 | 39 | class TestSchemaRegistry: 40 | """Test class for SchemaRegistry.""" 41 | 42 | def test_get_schema_with_simple_path(self, mock_schema): 43 | """Test retrieving a schema with a simple path.""" 44 | # Create schema registry with a mocked schema 45 | registry = SchemaRegistry() 46 | registry.schema_exists = MagicMock(return_value=True) 47 | registry.schemas = {"1_30_0": mock_schema} 48 | 49 | # Test getting a basic component path 50 | result = registry.get_schema("1.30.0", "components") 51 | assert result == mock_schema["components"] 52 | 53 | # Test getting a nested component path 54 | result = registry.get_schema("1.30.0", "components.schemas") 55 | assert result == mock_schema["components"]["schemas"] 56 | 57 | def test_get_schema_with_dotted_notation(self, mock_schema): 58 | """Test retrieving a schema with dotted notation in the name.""" 59 | # Create schema registry with a mocked schema 60 | registry = SchemaRegistry() 61 | registry.schema_exists = MagicMock(return_value=True) 62 | registry.schemas = {"1_30_0": mock_schema} 63 | 64 | # Test getting a schema with dots in the name 65 | result = registry.get_schema("1.30.0", "components.schemas.Flow.Router") 66 | assert result == mock_schema["components"]["schemas"]["Flow.Router"] 67 | 68 | # Test another schema with dots 69 | result = registry.get_schema("1.30.0", "components.schemas.Bgp.V4Peer") 70 | assert result == mock_schema["components"]["schemas"]["Bgp.V4Peer"] 71 | 72 | # And another one 73 | result = registry.get_schema("1.30.0", "components.schemas.Device.BgpRouter") 74 | assert result == mock_schema["components"]["schemas"]["Device.BgpRouter"] 75 | 76 | def test_get_schema_component_not_found(self, mock_schema): 77 | """Test error handling when a schema component is not found.""" 78 | # Create schema registry with a mocked schema 79 | registry = SchemaRegistry() 80 | registry.schema_exists = MagicMock(return_value=True) 81 | registry.schemas = {"1_30_0": mock_schema} 82 | 83 | # Test getting a non-existent schema 84 | with pytest.raises(ValueError) as excinfo: 85 | registry.get_schema("1.30.0", "components.schemas.NonExistent") 86 | assert "not found" in str(excinfo.value) 87 | 88 | def test_get_schema_components(self, mock_schema): 89 | """Test getting all schema components.""" 90 | # Create schema registry with a mocked schema 91 | registry = SchemaRegistry() 92 | registry.schema_exists = MagicMock(return_value=True) 93 | registry.schemas = {"1_30_0": mock_schema} 94 | 95 | # Test getting all schema components 96 | result = registry.get_schema_components("1.30.0", "components.schemas") 97 | expected = ["Flow", "Flow.Router", "Bgp.V4Peer", "Device.BgpRouter"] 98 | assert sorted(result) == sorted(expected) 99 | 100 | 101 | if __name__ == "__main__": 102 | pytest.main(["-v", __file__]) 103 | -------------------------------------------------------------------------------- /tests/test_set_config.py: -------------------------------------------------------------------------------- 1 | """ 2 | Test for set_config functionality that returns applied configuration. 3 | """ 4 | 5 | import logging 6 | from unittest.mock import MagicMock 7 | 8 | import pytest 9 | 10 | from otg_mcp.client import OtgClient 11 | from otg_mcp.models import ConfigResponse 12 | 13 | # Configure logging 14 | logging.basicConfig(level=logging.INFO) 15 | logger = logging.getLogger(__name__) 16 | 17 | 18 | @pytest.fixture 19 | def mock_api(): 20 | """Create a mock API client for testing.""" 21 | mock_api = MagicMock() 22 | 23 | # Mock config object with serialize method 24 | mock_config = MagicMock() 25 | mock_config.serialize.return_value = { 26 | "ports": [{"name": "port1", "location": "localhost:5555"}], 27 | "flows": [{"name": "flow1", "tx_rx": {"port": ["port1", "port1"]}}], 28 | } 29 | mock_config.DICT = "dict" 30 | 31 | # Configure get_config to return our mock config 32 | mock_api.get_config.return_value = mock_config 33 | 34 | return mock_api 35 | 36 | 37 | @pytest.fixture 38 | def client(): 39 | """Create a client instance for testing.""" 40 | from otg_mcp.config import Config 41 | mock_config = Config() 42 | return OtgClient(config=mock_config) 43 | 44 | 45 | @pytest.mark.asyncio 46 | async def test_set_config_returns_applied_config(client, mock_api): 47 | """Test that set_config returns the applied configuration.""" 48 | # Mock the _get_api_client method to return our mock API 49 | client._get_api_client = MagicMock(return_value=mock_api) 50 | 51 | # Call set_config 52 | test_config = { 53 | "ports": [{"name": "port1", "location": "localhost:5555"}], 54 | "flows": [{"name": "flow1", "tx_rx": {"port": ["port1", "port1"]}}], 55 | } 56 | response = await client.set_config(config=test_config, target="localhost") 57 | 58 | # Verify the API interactions 59 | mock_api.set_config.assert_called_once() # Was set_config called? 60 | mock_api.get_config.assert_called_once() # Was get_config called after set_config? 61 | 62 | # Verify the response structure 63 | assert isinstance(response, ConfigResponse) 64 | assert response.status == "success" 65 | assert response.config is not None 66 | 67 | # Verify the returned config matches what was serialized 68 | expected_config = { 69 | "ports": [{"name": "port1", "location": "localhost:5555"}], 70 | "flows": [{"name": "flow1", "tx_rx": {"port": ["port1", "port1"]}}], 71 | } 72 | assert response.config == expected_config 73 | 74 | 75 | @pytest.mark.asyncio 76 | async def test_set_config_error_handling(client, mock_api): 77 | """Test that set_config properly handles errors.""" 78 | # Mock the _get_api_client method to return our mock API 79 | client._get_api_client = MagicMock(return_value=mock_api) 80 | 81 | # Configure mock to raise an exception on set_config 82 | mock_api.set_config.side_effect = Exception("Test error") 83 | 84 | # Call set_config 85 | test_config = {"ports": [{"name": "port1", "location": "localhost:5555"}]} 86 | response = await client.set_config(config=test_config, target="localhost") 87 | 88 | # Verify the response includes the error 89 | assert response.status == "error" 90 | assert "error" in response.config 91 | assert "Test error" in response.config["error"] 92 | 93 | # Verify that get_config was not called after the error 94 | mock_api.get_config.assert_not_called() 95 | 96 | 97 | @pytest.mark.asyncio 98 | async def test_set_config_serialization_error(client, mock_api): 99 | """Test that set_config handles serialization errors.""" 100 | # Mock the _get_api_client method to return our mock API 101 | client._get_api_client = MagicMock(return_value=mock_api) 102 | 103 | # Configure mock config to raise an exception on serialize 104 | mock_config = MagicMock() 105 | mock_config.serialize.side_effect = Exception("Serialization error") 106 | mock_config.DICT = "dict" 107 | mock_api.get_config.return_value = mock_config 108 | 109 | # Call set_config 110 | test_config = {"ports": [{"name": "port1", "location": "localhost:5555"}]} 111 | response = await client.set_config(config=test_config, target="localhost") 112 | 113 | # Verify the API interactions 114 | mock_api.set_config.assert_called_once() # Was set_config called? 115 | mock_api.get_config.assert_called_once() # Was get_config called after set_config? 116 | 117 | # Verify the response status and error message 118 | assert response.status == "error" 119 | assert "error" in response.config 120 | assert "Serialization error" in response.config["error"] 121 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["hatchling"] 3 | build-backend = "hatchling.build" 4 | 5 | [project] 6 | name = "otg-mcp" 7 | version = "0.1.3a0" 8 | readme = "README.md" 9 | description = "Open Traffic Generator - Model Context Protocol" 10 | authors = [ 11 | {name = "OTG MCP Contributors"} 12 | ] 13 | dependencies = [ 14 | "fastmcp>=2.2.5", 15 | "modelcontextprotocol>=0.1.0", 16 | "pydantic>=2.0.0", 17 | "pydantic-settings>=2.0.0", 18 | "httpx>=0.24.1", 19 | "tenacity>=8.2.2", 20 | "snappi>=1.28.2", 21 | "mcp[cli]>=1.6.0", 22 | ] 23 | requires-python = ">=3.11,<3.13" 24 | 25 | [project.optional-dependencies] 26 | dev = [ 27 | "openapi-python-client>=0.14.0", 28 | "black>=25.1.0", 29 | "ruff>=0.11.7", 30 | "mypy>=1.0.0", 31 | "pip>=25.1.1", 32 | "types-pyyaml>=6.0.12.20250516", 33 | ] 34 | 35 | test = [ 36 | "pytest>=7.0.0", 37 | "pytest-cov>=4.0.0", 38 | "pytest-asyncio>=0.21.0", 39 | ] 40 | 41 | all = [ 42 | "otg-mcp[dev,test]" 43 | ] 44 | 45 | [tool.hatch.envs.default] 46 | # This controls what version of Python you want to be the default 47 | # when running any scripts or tools to do things like debug test failures 48 | # or do general development. It's lockfile is ./requirements.txt 49 | python = "3.12" 50 | features = ["dev"] 51 | dependencies = [] 52 | 53 | [tool.hatch.envs.test] 54 | dependencies = [ 55 | "mypy>=1.0.0", 56 | "snappi>=1.28.2", 57 | ] 58 | features = ["test"] 59 | template = "default" 60 | 61 | [tool.hatch.envs.hatch-test] 62 | dependencies = [] 63 | features = ["test"] 64 | template = "default" 65 | 66 | [tool.pytest.ini_options] 67 | addopts = "--durations=5 --color=yes --cov --cov-report=term --cov-report=html --cov-report=xml" 68 | testpaths = [ "tests" ] 69 | asyncio_mode = "strict" 70 | 71 | [tool.coverage.run] 72 | source_pkgs = ["otg_mcp"] 73 | branch = true 74 | parallel = true 75 | 76 | [tool.coverage.paths] 77 | "otg_mcp" = ["src/otg_mcp", "**/site-packages/otg_mcp"] 78 | 79 | [tool.coverage.report] 80 | exclude_lines = [ 81 | "no cov", 82 | "if __name__ == .__main__.:", 83 | "if TYPE_CHECKING:", 84 | ] 85 | show_missing = true 86 | # fail_under = 99 87 | 88 | [tool.coverage.xml] 89 | output = "coverage/coverage.xml" 90 | 91 | [tool.coverage.html] 92 | directory = "coverage/" 93 | 94 | [tool.ruff.lint] 95 | isort.known-first-party = ["otg_mcp"] 96 | exclude = [ "./build", ".hatch", "private" ] 97 | 98 | [tool.ruff.lint.per-file-ignores] 99 | # Tests can use magic values, assertions, and relative imports 100 | "tests/**/*" = ["PLR2004", "S101", "TID252"] 101 | 102 | [tool.hatch.build] 103 | directory = "./build" 104 | 105 | [tool.hatch.env] 106 | requires = [ "hatch-pip-compile" ] 107 | 108 | [tool.hatch.envs.default.scripts] 109 | # These are scripts you can run using `hatch run ` 110 | typing = [ 111 | "mkdir -p .mypy_cache", 112 | # "mypy --install-types --non-interactive src/otg_mcp tests" 113 | ] 114 | 115 | # This command is for updating all your lock files across all environments 116 | update = [ "hatch-pip-compile --upgrade --all" ] 117 | 118 | release = [ 119 | "typing", 120 | "hatch run test:pytest --cov=otg_mcp --cov-report=term --cov-report=html --cov-report=xml tests/", 121 | ] 122 | 123 | [[tool.hatch.envs.hatch-test.matrix]] 124 | # This defines multiple variables you can generate combinations 125 | # to test underneath different environments. A separate environment and 126 | # lock file will be created for every combination located in `./requirements/` 127 | python = ["3.11", "3.12"] 128 | 129 | [tool.hatch.metadata] 130 | allow-direct-references = true 131 | 132 | [tool.hatch.build.targets.wheel] 133 | packages = ["src/otg_mcp"] 134 | 135 | [tool.hatch.envs.hatch-test.scripts] 136 | test = "pytest {args}" 137 | cov-report = "coverage report" 138 | 139 | ## This environment is used solely to generate a lock file on hatch, 140 | # and hatch-pip-compile that can be automatically updated 141 | [tool.hatch.envs.build-tools] 142 | # This version states what version your build tools build with. To change it, 143 | # you will need to: 144 | # * Remove the `requirements/requirements-build-tools.txt` file 145 | # * Run `hatch-pip-compile --upgrade --all` to generate a new lock file for the environment 146 | python = "3.12" 147 | detached = true 148 | skip-install = true 149 | features = ["dev"] 150 | dependencies = [ 151 | "hatch-pip-compile", 152 | ] 153 | 154 | # Custom repository and package locking plugin 155 | [tool.hatch.env.collectors.custom] 156 | path = ".hatch/hatch_plugin.py" 157 | 158 | 159 | [[tool.uv.index]] 160 | name = "pypi" 161 | url = "https://pypi.org/simple/" 162 | publish-url = "https://pypi.org/legacy/" 163 | explicit = true 164 | -------------------------------------------------------------------------------- /tests/schema/test_schema_complete_coverage.py: -------------------------------------------------------------------------------- 1 | """ 2 | Additional tests to achieve 100% coverage of the schema registry. 3 | """ 4 | 5 | from unittest.mock import patch 6 | 7 | import pytest 8 | 9 | from otg_mcp.schema_registry import SchemaRegistry 10 | 11 | 12 | def test_schema_registry_with_invalid_builtin_dir(): 13 | """Test SchemaRegistry initialization with invalid built-in directory.""" 14 | registry = SchemaRegistry() 15 | 16 | # Set the built-in schemas directory to a non-existent path 17 | registry._builtin_schemas_dir = "/non/existent/builtin/schemas" 18 | registry._available_schemas = None # Force refresh 19 | 20 | # Mock os.path.exists to simulate non-existent built-in directory 21 | with patch('os.path.exists', side_effect=lambda path: path != registry._builtin_schemas_dir): 22 | # Should gracefully handle non-existent built-in directory 23 | schemas = registry.get_available_schemas() 24 | assert isinstance(schemas, list) 25 | assert len(schemas) == 0 26 | 27 | 28 | def test_get_parsed_versions_with_mixed_formats(): 29 | """Test _get_parsed_versions with a mix of valid and invalid version formats.""" 30 | registry = SchemaRegistry() 31 | 32 | # Mix of valid and invalid versions 33 | versions = ["1_30_0", "invalid", "2_0_0", "not_a_version"] 34 | 35 | # Mock _parse_version to return appropriate tuples 36 | def mock_parse_version(version): 37 | if version == "1_30_0": 38 | return (1, 30, 0) 39 | elif version == "2_0_0": 40 | return (2, 0, 0) 41 | else: 42 | return tuple() 43 | 44 | with patch.object(registry, "_parse_version", side_effect=mock_parse_version): 45 | result = registry._get_parsed_versions(versions) 46 | 47 | # Should only include the valid versions 48 | assert len(result) == 2 49 | assert ("1_30_0", (1, 30, 0)) in result 50 | assert ("2_0_0", (2, 0, 0)) in result 51 | 52 | 53 | def test_get_schema_with_keyerror(): 54 | """Test error handling in get_schema when KeyError occurs.""" 55 | registry = SchemaRegistry() 56 | 57 | # Create a mock schema without the necessary structure 58 | mock_schema = {"components": {}} # Missing the "schemas" key 59 | 60 | # Mock the registry to return our test schema 61 | with patch.object(registry, "schema_exists", return_value=True): 62 | with patch.dict(registry.schemas, {"1_30_0": mock_schema}): 63 | # Should raise ValueError with appropriate message 64 | with pytest.raises(ValueError, match="Error accessing components.schemas"): 65 | registry.get_schema("1_30_0", "components.schemas.TestSchema") 66 | 67 | 68 | def test_find_closest_schema_version_with_unparseable_version(): 69 | """Test find_closest_schema_version with unparseable requested version.""" 70 | registry = SchemaRegistry() 71 | 72 | # Mock available_schemas 73 | available_versions = ["1_30_0", "1_28_0"] 74 | 75 | with patch.object(registry, "get_available_schemas", return_value=available_versions): 76 | # Mock _parse_version to return empty tuple for the requested version 77 | with patch.object(registry, "_parse_version", return_value=tuple()): 78 | # Should fall back to latest version 79 | with patch.object(registry, "get_latest_schema_version", return_value="1_30_0"): 80 | result = registry.find_closest_schema_version("unparseable") 81 | assert result == "1_30_0" 82 | 83 | 84 | def test_find_closest_version_with_short_version_tuple(): 85 | """Test find_closest_schema_version with short version tuple (major only).""" 86 | registry = SchemaRegistry() 87 | 88 | # Available versions 89 | available_versions = ["1_30_0", "1_28_0", "2_0_0"] 90 | 91 | # Mock getting available schemas 92 | with patch.object(registry, "get_available_schemas", return_value=available_versions): 93 | # Mock parsing to return short tuples 94 | def mock_parse(version): 95 | if version == "1.0": 96 | return (1,) # Major only 97 | elif version == "1_30_0": 98 | return (1, 30, 0) 99 | elif version == "1_28_0": 100 | return (1, 28, 0) 101 | elif version == "2_0_0": 102 | return (2, 0, 0) 103 | return tuple() 104 | 105 | with patch.object(registry, "_parse_version", side_effect=mock_parse): 106 | # Should pad the requested version with zeros 107 | result = registry.find_closest_schema_version("1.0") 108 | # Should find 1_30_0 as it's the latest with major version 1 109 | assert result == "1_30_0" 110 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | """ 2 | Shared fixtures for tests. 3 | """ 4 | 5 | import os 6 | import sys 7 | from pathlib import Path 8 | from unittest import mock 9 | 10 | import pytest 11 | import yaml 12 | 13 | # Add src to path so tests can import modules properly 14 | sys.path.insert(0, str(Path(__file__).parent.parent / "src")) 15 | 16 | from otg_mcp.client import OtgClient 17 | from otg_mcp.config import Config, PortConfig, TargetConfig 18 | 19 | 20 | @pytest.fixture 21 | def api_schema(): 22 | """ 23 | Load the test API schema from fixtures directory. 24 | 25 | Returns: 26 | dict: The parsed OpenAPI schema 27 | """ 28 | schema_path = os.path.join(os.path.dirname(__file__), "fixtures", "apiSchema.yml") 29 | with open(schema_path, "r") as f: 30 | return yaml.safe_load(f) 31 | 32 | 33 | @pytest.fixture 34 | def mock_schema_registry(api_schema, monkeypatch): 35 | """ 36 | Create a mock schema registry that uses the test apiSchema.yml. 37 | 38 | Args: 39 | api_schema: The API schema fixture 40 | monkeypatch: PyTest monkeypatch fixture 41 | 42 | Returns: 43 | Mock schema registry that returns the test schema 44 | """ 45 | from otg_mcp.schema_registry import SchemaRegistry 46 | 47 | # Create a class that inherits from the original but overrides key methods 48 | class TestSchemaRegistry(SchemaRegistry): 49 | def __init__(self): 50 | super().__init__() 51 | self.schemas = {"1_30_0": api_schema} 52 | 53 | def get_available_schemas(self): 54 | return ["1_30_0"] 55 | 56 | def schema_exists(self, version): 57 | return self._normalize_version(version) == "1_30_0" 58 | 59 | # Create an instance of our test registry 60 | test_registry = TestSchemaRegistry() 61 | 62 | # Patch the get_schema_registry function to return our test registry 63 | from otg_mcp import schema_registry 64 | monkeypatch.setattr(schema_registry, "get_schema_registry", lambda: test_registry) 65 | 66 | return test_registry 67 | 68 | 69 | @pytest.fixture 70 | def mock_api_wrapper(): 71 | """Mock OtgApiWrapper for testing.""" 72 | mock_wrapper = mock.MagicMock() 73 | # Set up necessary mock methods 74 | mock_config = mock.MagicMock() 75 | mock_wrapper.get_config.return_value = mock_config 76 | mock_config.serialize.return_value = {"ports": [], "flows": []} 77 | 78 | # Add metrics_request method 79 | mock_metrics_request = mock.MagicMock() 80 | mock_wrapper.metrics_request = mock.MagicMock(return_value=mock_metrics_request) 81 | 82 | mock_metrics = mock.MagicMock() 83 | mock_metrics.serialize.return_value = {"port_metrics": [], "flow_metrics": []} 84 | mock_wrapper.get_metrics = mock.MagicMock(return_value=mock_metrics) 85 | 86 | # Set up start_traffic and stop_traffic methods 87 | mock_wrapper.start_traffic = mock.MagicMock() 88 | mock_wrapper.stop_traffic = mock.MagicMock(return_value=True) 89 | 90 | # Set up capture methods 91 | mock_wrapper.start_capture = mock.MagicMock() 92 | mock_capture_response = mock.MagicMock() 93 | mock_capture_response.serialize.return_value = {"status": "stopped", "data": {}} 94 | mock_wrapper.stop_capture = mock.MagicMock(return_value=mock_capture_response) 95 | 96 | return mock_wrapper 97 | 98 | 99 | @pytest.fixture 100 | def test_config(): 101 | """Create a test configuration instance.""" 102 | return Config() 103 | 104 | 105 | @pytest.fixture 106 | def router(test_config): 107 | """Create a test router.""" 108 | return OtgClient(config=test_config) 109 | 110 | 111 | @pytest.fixture 112 | def example_target_config(test_config): 113 | """Create example target configuration.""" 114 | # Add example target 115 | test_config.targets.targets["test-target.example.com:8443"] = TargetConfig( 116 | ports={ 117 | "p1": PortConfig(interface="enp0s31f6", location="enp0s31f6", name="p1"), 118 | "p2": PortConfig( 119 | interface="enp0s31f6.1", location="enp0s31f6.1", name="p2" 120 | ), 121 | } 122 | ) 123 | 124 | # Return the config for use in tests 125 | return test_config.targets 126 | 127 | 128 | # Define custom marker for integration tests 129 | def pytest_configure(config): 130 | """Configure pytest with custom markers.""" 131 | config.addinivalue_line( 132 | "markers", 133 | "integration: mark test as requiring integration with real traffic generators", 134 | ) 135 | 136 | 137 | # Skip integration tests unless environment variable is set 138 | def pytest_runtest_setup(item): 139 | """Skip integration tests unless enabled via environment variable.""" 140 | if "integration" in item.keywords and not os.environ.get("RUN_INTEGRATION_TESTS"): 141 | pytest.skip("Integration test skipped. Set RUN_INTEGRATION_TESTS=1 to run") 142 | -------------------------------------------------------------------------------- /tests/schema/test_schema_input_handling.py: -------------------------------------------------------------------------------- 1 | """ 2 | Tests for schema retrieval with different input formats. 3 | 4 | This module contains tests that verify the schema retrieval logic can handle 5 | various formats of schema names (simple vs fully qualified paths). 6 | """ 7 | 8 | import pytest 9 | from unittest.mock import AsyncMock, MagicMock 10 | 11 | from otg_mcp.client import OtgClient 12 | 13 | 14 | @pytest.fixture 15 | def mock_schema_registry(): 16 | """Create a mock schema registry for testing.""" 17 | mock_registry = MagicMock() 18 | # Setup schema content for different schema names 19 | mock_registry.get_schema.side_effect = lambda version, component=None: { 20 | "description": f"Mock schema for {component or 'all'}" 21 | } 22 | return mock_registry 23 | 24 | 25 | @pytest.fixture 26 | def client(mock_schema_registry): 27 | """Create a client instance for testing with a mock schema registry.""" 28 | from otg_mcp.config import Config 29 | mock_config = Config() 30 | return OtgClient(config=mock_config, schema_registry=mock_schema_registry) 31 | 32 | 33 | @pytest.mark.asyncio 34 | async def test_get_schemas_simple_names(client, mock_schema_registry): 35 | """Test retrieving schemas using simple names like 'Flow', 'Port'.""" 36 | # Setup mocks with AsyncMock 37 | client._get_target_config = AsyncMock(return_value={"apiVersion": "1.30.0"}) 38 | 39 | # Call the method with simple names 40 | result = await client.get_schemas_for_target("test-target", ["Flow", "Port", "Config"]) 41 | 42 | # Verify the result contains the requested schemas 43 | assert "Flow" in result 44 | assert "Port" in result 45 | assert "Config" in result 46 | 47 | # Verify the schemas are valid objects 48 | assert isinstance(result["Flow"], dict) 49 | assert isinstance(result["Port"], dict) 50 | assert isinstance(result["Config"], dict) 51 | 52 | # Verify description exists in schemas 53 | assert "description" in result["Flow"] 54 | assert "description" in result["Port"] 55 | assert "description" in result["Config"] 56 | 57 | 58 | @pytest.mark.asyncio 59 | async def test_get_schemas_qualified_names(client, mock_schema_registry): 60 | """Test retrieving schemas using fully qualified paths.""" 61 | # Setup mocks with AsyncMock 62 | client._get_target_config = AsyncMock(return_value={"apiVersion": "1.30.0"}) 63 | 64 | # Call the method with qualified names 65 | result = await client.get_schemas_for_target( 66 | "test-target", 67 | ["components.schemas.Flow", "components.schemas.Port", "components.schemas.Config"] 68 | ) 69 | 70 | # Verify the result contains the requested schemas 71 | assert "components.schemas.Flow" in result 72 | assert "components.schemas.Port" in result 73 | assert "components.schemas.Config" in result 74 | 75 | # Verify the schemas are valid objects 76 | assert isinstance(result["components.schemas.Flow"], dict) 77 | assert isinstance(result["components.schemas.Port"], dict) 78 | assert isinstance(result["components.schemas.Config"], dict) 79 | 80 | 81 | @pytest.mark.asyncio 82 | async def test_get_schemas_mixed_format(client, mock_schema_registry): 83 | """Test retrieving schemas using both simple and fully qualified names.""" 84 | # Setup mocks with AsyncMock 85 | client._get_target_config = AsyncMock(return_value={"apiVersion": "1.30.0"}) 86 | 87 | # Call the method with mixed format names 88 | result = await client.get_schemas_for_target( 89 | "test-target", 90 | ["Flow", "components.schemas.Port", "Config"] 91 | ) 92 | 93 | # Verify the result contains all requested schemas 94 | assert "Flow" in result 95 | assert "components.schemas.Port" in result 96 | assert "Config" in result 97 | 98 | # Verify the schemas are valid objects 99 | assert isinstance(result["Flow"], dict) 100 | assert isinstance(result["components.schemas.Port"], dict) 101 | assert isinstance(result["Config"], dict) 102 | 103 | 104 | @pytest.mark.asyncio 105 | async def test_schema_not_found_handling(client, mock_schema_registry): 106 | """Test handling of non-existent schemas.""" 107 | # Setup mocks with AsyncMock 108 | client._get_target_config = AsyncMock(return_value={"apiVersion": "1.30.0"}) 109 | 110 | # Configure mock to raise an exception for a non-existent schema 111 | def mock_get_schema(version, component=None): 112 | if component == "components.schemas.NonExistentSchema" or component == "NonExistentSchema": 113 | raise ValueError("Schema not found") 114 | return {"description": f"Mock schema for {component or 'all'}"} 115 | 116 | mock_schema_registry.get_schema.side_effect = mock_get_schema 117 | 118 | # Call the method with a non-existent schema 119 | result = await client.get_schemas_for_target("test-target", ["NonExistentSchema"]) 120 | 121 | # Verify the result contains an error for the non-existent schema 122 | assert "NonExistentSchema" in result 123 | assert "error" in result["NonExistentSchema"] 124 | assert "not found" in result["NonExistentSchema"]["error"] 125 | 126 | 127 | if __name__ == "__main__": 128 | pytest.main(["-v", __file__]) 129 | -------------------------------------------------------------------------------- /tests/schema/test_find_closest_version.py: -------------------------------------------------------------------------------- 1 | """ 2 | Tests for the find_closest_schema_version functionality. 3 | 4 | This file contains tests specifically for the version matching algorithm 5 | that finds the closest matching schema version when an exact match isn't available. 6 | """ 7 | 8 | import pytest 9 | from unittest.mock import patch 10 | 11 | from otg_mcp.schema_registry import SchemaRegistry 12 | 13 | 14 | class TestFindClosestVersion: 15 | """Tests for the find_closest_schema_version method.""" 16 | 17 | def setup_method(self): 18 | """Set up test environment.""" 19 | self.registry = SchemaRegistry() 20 | 21 | def test_exact_match(self): 22 | """Test that an exact version match is returned when available.""" 23 | with patch.object(self.registry, "get_available_schemas", 24 | return_value=["1_28_0", "1_30_0"]): 25 | with patch.object(self.registry, "schema_exists", return_value=True): 26 | result = self.registry.find_closest_schema_version("1_28_0") 27 | assert result == "1_28_0" 28 | 29 | def test_same_major_minor_lower_patch(self): 30 | """Test finding a schema with same major.minor but lower patch version.""" 31 | with patch.object(self.registry, "get_available_schemas", 32 | return_value=["1_28_0", "1_30_0"]): 33 | with patch.object(self.registry, "schema_exists", return_value=False): 34 | result = self.registry.find_closest_schema_version("1_28_2") 35 | assert result == "1_28_0" 36 | 37 | def test_same_major_different_minor(self): 38 | """Test finding a schema with same major but different minor version.""" 39 | with patch.object(self.registry, "get_available_schemas", 40 | return_value=["1_28_0", "1_30_0"]): 41 | with patch.object(self.registry, "schema_exists", return_value=False): 42 | result = self.registry.find_closest_schema_version("1_31_0") 43 | assert result == "1_30_0" 44 | 45 | def test_different_major_version(self): 46 | """Test finding a schema when major version doesn't match.""" 47 | with patch.object(self.registry, "get_available_schemas", 48 | return_value=["1_28_0", "1_30_0"]): 49 | with patch.object(self.registry, "schema_exists", return_value=False): 50 | result = self.registry.find_closest_schema_version("2_0_0") 51 | assert result == "1_30_0" # Should use latest version 52 | 53 | def test_invalid_version_format(self): 54 | """Test handling of invalid version format.""" 55 | with patch.object(self.registry, "get_available_schemas", 56 | return_value=["1_28_0", "1_30_0"]): 57 | with patch.object(self.registry, "schema_exists", return_value=False): 58 | # With invalid format, should fall back to latest version 59 | result = self.registry.find_closest_schema_version("invalid_version") 60 | assert result == "1_30_0" 61 | 62 | def test_no_schemas_available(self): 63 | """Test error when no schemas are available.""" 64 | with patch.object(self.registry, "get_available_schemas", return_value=[]): 65 | with pytest.raises(ValueError, match="No schema versions available"): 66 | self.registry.find_closest_schema_version("1_28_0") 67 | 68 | def test_empty_requested_version(self): 69 | """Test handling of empty requested version.""" 70 | with patch.object(self.registry, "get_available_schemas", 71 | return_value=["1_28_0", "1_30_0"]): 72 | with patch.object(self.registry, "get_latest_schema_version", 73 | return_value="1_30_0"): 74 | result = self.registry.find_closest_schema_version("") 75 | assert result == "1_30_0" # Should use latest version 76 | 77 | def test_partial_version(self): 78 | """Test handling of partial version (only major or major.minor).""" 79 | with patch.object(self.registry, "get_available_schemas", 80 | return_value=["1_28_0", "1_30_0"]): 81 | # Test with just major version 82 | result = self.registry.find_closest_schema_version("1") 83 | assert result == "1_30_0" # Should use latest matching major 84 | 85 | # Test with major.minor 86 | result = self.registry.find_closest_schema_version("1.28") 87 | assert result == "1_28_0" # Should match major.minor 88 | 89 | def test_multiple_match_options(self): 90 | """Test with multiple possible matches.""" 91 | with patch.object(self.registry, "get_available_schemas", 92 | return_value=["1_28_0", "1_28_1", "1_30_0"]): 93 | # Should match 1_28_1 as it's the highest with same major.minor 94 | result = self.registry.find_closest_schema_version("1_28_2") 95 | assert result == "1_28_1" 96 | 97 | # Should match 1_28_1 as it's the highest with same major.minor 98 | result = self.registry.find_closest_schema_version("1.28.5") 99 | assert result == "1_28_1" 100 | 101 | 102 | if __name__ == "__main__": 103 | pytest.main(["-v", __file__]) 104 | -------------------------------------------------------------------------------- /tests/test_health.py: -------------------------------------------------------------------------------- 1 | """Tests for the health check functionality in the OTG client.""" 2 | 3 | import logging 4 | import pytest 5 | from unittest.mock import MagicMock, patch 6 | 7 | from otg_mcp.client import OtgClient 8 | from otg_mcp.models import CapabilitiesVersionResponse 9 | 10 | logger = logging.getLogger(__name__) 11 | 12 | 13 | @pytest.fixture 14 | def mock_config(): 15 | """Create a mocked config.""" 16 | # Create a mock TargetsConfig to hold the targets dict 17 | targets_config = MagicMock() 18 | targets_config.targets = { 19 | "target1": MagicMock(), 20 | "target2": MagicMock(), 21 | } 22 | 23 | # Create the main Config mock with the targets attribute 24 | config = MagicMock() 25 | config.targets = targets_config 26 | 27 | return config 28 | 29 | 30 | @pytest.fixture 31 | def client(mock_config): 32 | """Create client with mocked config.""" 33 | return OtgClient(mock_config) 34 | 35 | 36 | @pytest.mark.asyncio 37 | async def test_health_all_healthy(client): 38 | """Test health check when all targets are healthy.""" 39 | # Arrange 40 | mock_version_info = CapabilitiesVersionResponse( 41 | api_spec_version="1.*", sdk_version="1.28.2", app_version="1.28.0" 42 | ) 43 | 44 | # Mock the get_available_targets method to avoid it making internal get_target_version calls 45 | mock_targets = {"target1": {}, "target2": {}} 46 | 47 | with patch.object( 48 | client, "get_available_targets", return_value=mock_targets 49 | ), patch.object( 50 | client, "get_target_version", return_value=mock_version_info 51 | ) as mock_get_version: 52 | # Act 53 | result = await client.health() 54 | 55 | # Assert 56 | assert result.status == "success" 57 | assert len(result.targets) == 2 58 | assert all(target_info.healthy for target_info in result.targets.values()) 59 | assert mock_get_version.call_count == 2 60 | 61 | 62 | @pytest.mark.asyncio 63 | async def test_health_one_unhealthy(client): 64 | """Test health check when one target is unhealthy.""" 65 | # Arrange 66 | mock_version_info = CapabilitiesVersionResponse( 67 | api_spec_version="1.*", sdk_version="1.28.2", app_version="1.28.0" 68 | ) 69 | 70 | async def mock_get_target_version(target): 71 | if target == "target1": 72 | return mock_version_info 73 | else: 74 | raise Exception("Connection timeout") 75 | 76 | with patch.object( 77 | client, "get_target_version", side_effect=mock_get_target_version 78 | ): 79 | # Act 80 | result = await client.health() 81 | 82 | # Assert 83 | assert result.status == "error" 84 | assert len(result.targets) == 2 85 | assert result.targets["target1"].healthy is True 86 | assert result.targets["target2"].healthy is False 87 | assert "Connection timeout" in result.targets["target2"].error 88 | 89 | 90 | @pytest.mark.asyncio 91 | async def test_health_all_unhealthy(client): 92 | """Test health check when all targets are unhealthy.""" 93 | # Arrange 94 | with patch.object( 95 | client, "get_target_version", side_effect=Exception("Connection timeout") 96 | ): 97 | # Act 98 | result = await client.health() 99 | 100 | # Assert 101 | assert result.status == "error" 102 | assert len(result.targets) == 2 103 | assert all(not target_info.healthy for target_info in result.targets.values()) 104 | 105 | 106 | @pytest.mark.asyncio 107 | async def test_health_single_target_healthy(client): 108 | """Test health check for a single target that is healthy.""" 109 | # Arrange 110 | mock_version_info = CapabilitiesVersionResponse( 111 | api_spec_version="1.*", sdk_version="1.28.2", app_version="1.28.0" 112 | ) 113 | 114 | with patch.object( 115 | client, "get_target_version", return_value=mock_version_info 116 | ): 117 | # Act 118 | result = await client.health("target1") 119 | 120 | # Assert 121 | assert result.status == "success" 122 | assert len(result.targets) == 1 123 | assert "target1" in result.targets 124 | assert result.targets["target1"].healthy is True 125 | 126 | 127 | @pytest.mark.asyncio 128 | async def test_health_single_target_unhealthy(client): 129 | """Test health check for a single target that is unhealthy.""" 130 | # Arrange 131 | with patch.object( 132 | client, "get_target_version", side_effect=Exception("Connection timeout") 133 | ): 134 | # Act 135 | result = await client.health("target1") 136 | 137 | # Assert 138 | assert result.status == "error" 139 | assert len(result.targets) == 1 140 | assert "target1" in result.targets 141 | assert result.targets["target1"].healthy is False 142 | 143 | 144 | @pytest.mark.asyncio 145 | async def test_health_exception(client): 146 | """Test health check when an unexpected exception occurs.""" 147 | # Arrange 148 | with patch.object( 149 | client, "get_available_targets", side_effect=Exception("Unexpected error") 150 | ): 151 | # Act 152 | result = await client.health() 153 | 154 | # Assert 155 | assert result.status == "error" 156 | assert len(result.targets) == 0 157 | -------------------------------------------------------------------------------- /docs/github-flow.md: -------------------------------------------------------------------------------- 1 | # GitHub Flow Branching Strategy 2 | 3 | ## Overview 4 | 5 | OTG-MCP follows the GitHub Flow branching strategy - a lightweight, branch-based workflow that supports teams and projects where deployments are made regularly. 6 | 7 | ## Our Branching Strategy 8 | 9 | We maintain a simple branching model: 10 | 11 | 1. **Main branch (`main`)**: 12 | - Always deployable 13 | - Protected branch that requires pull request reviews 14 | - CI/CD automatically runs tests on all changes 15 | - Source of all deployments 16 | 17 | 2. **Feature branches**: 18 | - Created from `main` 19 | - Named descriptively (e.g., `feature/add-traffic-metrics`, `fix/connection-timeout`) 20 | - Used for new features, fixes, or any changes 21 | - Merged back to `main` via pull requests 22 | - Deleted after merge 23 | 24 | ## Workflow 25 | 26 | ```mermaid 27 | graph TD 28 | A[main branch] -->|Create feature branch| B[feature/xyz] 29 | B -->|Make changes| C[Commit changes] 30 | C -->|Push and create PR| D[Pull Request] 31 | D -->|Review, CI/Tests| E{Approved?} 32 | E -->|Yes| F[Merge to main] 33 | F -->|Delete branch| A 34 | E -->|No| C 35 | ``` 36 | 37 | ### Development Process 38 | 39 | 1. **Create a feature branch from `main`** 40 | ```bash 41 | git checkout main 42 | git pull origin main 43 | git checkout -b feature/your-feature-name 44 | ``` 45 | 46 | 2. **Make changes and commit** 47 | ```bash 48 | # Make changes 49 | git add . 50 | git commit -m "Descriptive commit message" 51 | ``` 52 | 53 | 3. **Push changes and create a Pull Request** 54 | ```bash 55 | git push -u origin feature/your-feature-name 56 | # Create PR via GitHub interface 57 | ``` 58 | 59 | 4. **Review Process** 60 | - CI automatically runs tests 61 | - Code review by teammates 62 | - Address feedback and make changes if needed 63 | 64 | 5. **Merge and Clean Up** 65 | - Merge the approved PR to `main` 66 | - Delete the feature branch 67 | - CI/CD deploys the changes automatically 68 | 69 | ## Release Process 70 | 71 | We use GitHub Releases to create official releases, which trigger our CI workflow to publish packages to PyPI. 72 | 73 | ### Creating a Release 74 | 75 | 1. **Ensure `main` branch is ready for release** 76 | - All tests should be passing 77 | - Documentation should be up-to-date 78 | - Version number should be updated in relevant files (e.g., `pyproject.toml`) 79 | 80 | 2. **Create and push a tag** 81 | ```bash 82 | # Create a tag using semantic versioning (vX.Y.Z) 83 | git checkout main 84 | git pull origin main 85 | git tag -a v1.2.3 -m "Release v1.2.3" 86 | git push origin v1.2.3 87 | ``` 88 | 89 | 3. **Create a GitHub Release** 90 | - Go to the GitHub repository 91 | - Navigate to "Releases" > "Create a new release" 92 | - Select the tag you just created 93 | - Add a title (e.g., "Release v1.2.3") 94 | - Write detailed release notes: 95 | - Major features and improvements 96 | - Bug fixes 97 | - Breaking changes (if any) 98 | - Migration instructions (if needed) 99 | - Click "Publish release" 100 | 101 | 4. **Automated Publication** 102 | - Our CI workflow automatically detects the new release 103 | - It builds the package for different platforms 104 | - It publishes the package to PyPI using trusted publishing 105 | - The workflow runs the `publish` job specifically when a release is published 106 | 107 | ### Release versioning 108 | 109 | We follow semantic versioning (SemVer) for release tags: 110 | 111 | - **Major version** (X.y.z): Incremented for incompatible API changes 112 | - **Minor version** (x.Y.z): Incremented for added functionality in a backward-compatible manner 113 | - **Patch version** (x.y.Z): Incremented for backward-compatible bug fixes 114 | 115 | ```mermaid 116 | graph TD 117 | A[main branch] -->|Tag v1.0.0| B[Release v1.0.0] 118 | A -->|Continue development| C[Feature updates] 119 | C -->|Tag v1.1.0| D[Release v1.1.0] 120 | C -->|Bug fix| E[Patch] 121 | E -->|Tag v1.1.1| F[Release v1.1.1] 122 | C -->|Breaking change| G[Major update] 123 | G -->|Tag v2.0.0| H[Release v2.0.0] 124 | ``` 125 | 126 | ### Pre-releases (optional) 127 | 128 | For significant changes, consider using pre-release tags: 129 | 130 | - Alpha releases: `v1.2.3-alpha.1` 131 | - Beta releases: `v1.2.3-beta.1` 132 | - Release candidates: `v1.2.3-rc.1` 133 | 134 | ```bash 135 | git tag -a v1.2.3-rc.1 -m "Release candidate 1 for v1.2.3" 136 | git push origin v1.2.3-rc.1 137 | ``` 138 | 139 | ## Benefits of GitHub Flow 140 | 141 | - **Simplicity**: Easy to understand and follow 142 | - **Continuous Delivery**: Main branch is always deployable 143 | - **Fast Feedback**: Quick review cycles and automated testing 144 | - **Reduced Overhead**: No need to manage multiple long-lived branches 145 | - **Focus on Features**: Each branch represents a discrete piece of work 146 | 147 | ## Additional Best Practices 148 | 149 | - Keep feature branches short-lived (days, not weeks) 150 | - Commit frequently with clear messages 151 | - Pull from `main` regularly to reduce merge conflicts 152 | - Write tests for new features before merging 153 | - Document significant changes 154 | 155 | GitHub Flow is particularly well-suited for our project as we focus on continuous integration, regular deployments, and maintaining a single production version. 156 | -------------------------------------------------------------------------------- /tests/schema/test_schema_final_coverage.py: -------------------------------------------------------------------------------- 1 | """ 2 | Final tests to achieve 100% coverage of the schema registry. 3 | """ 4 | 5 | from unittest.mock import patch 6 | 7 | import pytest 8 | 9 | from otg_mcp.schema_registry import SchemaRegistry 10 | 11 | 12 | def test_get_schema_error_loading(): 13 | """Test error handling in get_schema when schema fails to load.""" 14 | registry = SchemaRegistry() 15 | 16 | # Mock schema_exists to return True but load_schema to fail 17 | with patch.object(registry, "schema_exists", return_value=True): 18 | # Force _load_schema_from_path to always fail for both custom and built-in 19 | with patch.object(registry, "_load_schema_from_path", return_value=False): 20 | # Should raise ValueError 21 | with pytest.raises(ValueError, match="Error loading schema"): 22 | registry.get_schema("1_30_0") 23 | 24 | 25 | def test_schema_component_not_found_in_navigation(): 26 | """Test error handling when a component in the navigation path is not found.""" 27 | registry = SchemaRegistry() 28 | 29 | # Create a mock schema with nested structure but missing a component 30 | mock_schema = { 31 | "components": { 32 | "schemas": { 33 | "Flow": {} # Missing the 'properties' component 34 | } 35 | } 36 | } 37 | 38 | # Mock the registry to return our test schema 39 | with patch.object(registry, "schema_exists", return_value=True): 40 | with patch.dict(registry.schemas, {"1_30_0": mock_schema}): 41 | # The actual implementation treats the whole path after components.schemas as the schema name 42 | with pytest.raises(ValueError, match="Schema Flow.properties.name not found"): 43 | registry.get_schema("1_30_0", "components.schemas.Flow.properties.name") 44 | 45 | 46 | def test_parse_version_with_shorter_requested_version(): 47 | """Test find_closest_schema_version with a shorter requested version.""" 48 | registry = SchemaRegistry() 49 | 50 | # Available versions 51 | available_versions = ["1_30_0", "1_28_0", "2_0_0"] 52 | 53 | # Mock getting available schemas 54 | with patch.object(registry, "get_available_schemas", return_value=available_versions): 55 | # Return valid parsed versions but with different lengths 56 | def mock_parse_version(version): 57 | if version == "1.30": 58 | return (1, 30) # Missing patch version 59 | elif version == "1_30_0": 60 | return (1, 30, 0) 61 | elif version == "1_28_0": 62 | return (1, 28, 0) 63 | return tuple() 64 | 65 | # Mock _get_parsed_versions to use our mock_parse_version 66 | def mock_get_parsed_versions(versions): 67 | parsed = [] 68 | for v in versions: 69 | parsed_v = mock_parse_version(v) 70 | if parsed_v: # Only include successfully parsed versions 71 | parsed.append((v, parsed_v)) 72 | return parsed 73 | 74 | with patch.object(registry, "_parse_version", side_effect=mock_parse_version): 75 | with patch.object(registry, "_get_parsed_versions", side_effect=mock_get_parsed_versions): 76 | # Should handle different version tuple lengths 77 | result = registry.find_closest_schema_version("1.30") 78 | assert result == "1_30_0" 79 | 80 | 81 | def test_custom_schema_path_loading_precedence(): 82 | """Test that custom schema paths take precedence over built-in paths.""" 83 | # Create a registry with a custom path 84 | registry = SchemaRegistry("/custom/path") 85 | 86 | # Set up paths for the registry 87 | 88 | # Set up the registry with our test paths 89 | registry._custom_schemas_dir = "/custom/path" 90 | registry._builtin_schemas_dir = "/builtin/path" 91 | 92 | # Mock functions to simulate both paths existing 93 | with patch('os.path.exists', return_value=True): 94 | # Mock schema loading to track which path is tried first 95 | called_paths = [] 96 | 97 | def track_schema_loading(path, version, source_type): 98 | called_paths.append((path, source_type)) 99 | # Always succeed 100 | registry.schemas[version] = {"test": "schema"} 101 | return True 102 | 103 | with patch.object(registry, "schema_exists", return_value=True): 104 | with patch.object(registry, "_load_schema_from_path", side_effect=track_schema_loading): 105 | # Get the schema - should try custom path first 106 | registry.get_schema("1_30_0") 107 | 108 | # Verify custom was tried first 109 | assert len(called_paths) >= 1 110 | assert called_paths[0][1] == "custom" # First call should be to custom path 111 | 112 | 113 | def test_find_schema_with_no_valid_versions(): 114 | """Test find_closest_schema_version when no versions can be parsed.""" 115 | registry = SchemaRegistry() 116 | 117 | # Mock available schemas with invalid versions 118 | with patch.object(registry, "get_available_schemas", return_value=["invalid", "not_a_version"]): 119 | # Mock _get_parsed_versions to return empty list (no valid parsed versions) 120 | with patch.object(registry, "_get_parsed_versions", return_value=[]): 121 | # Should raise ValueError because no valid versions are available 122 | with pytest.raises(ValueError, match="No valid schema versions available"): 123 | registry.find_closest_schema_version("1.0.0") 124 | -------------------------------------------------------------------------------- /tests/version/test_target_version_detection.py: -------------------------------------------------------------------------------- 1 | """ 2 | Tests for automatic target API version detection and schema selection. 3 | 4 | These tests verify that the client correctly uses the actual API version 5 | reported by a target device when available, falling back to the latest 6 | available schema version when needed. 7 | """ 8 | 9 | import pytest 10 | from unittest.mock import AsyncMock, MagicMock 11 | 12 | from otg_mcp.client import OtgClient 13 | from otg_mcp.models import CapabilitiesVersionResponse 14 | 15 | 16 | @pytest.fixture 17 | def client(): 18 | """Create a client instance for testing.""" 19 | from otg_mcp.config import Config 20 | mock_config = Config() 21 | return OtgClient(config=mock_config) 22 | 23 | 24 | @pytest.mark.asyncio 25 | async def test_target_version_detection_uses_actual_version(client): 26 | """Test that client uses the actual target version when it has a matching schema.""" 27 | # Setup mocks 28 | # Mock get_available_targets to return a target with a detected version 29 | client.get_available_targets = AsyncMock(return_value={ 30 | "test-target": { 31 | "apiVersion": "1.30.0", # Previously detected version (from device) 32 | "available": True, 33 | "ports": {} 34 | } 35 | }) 36 | 37 | # Mock get_target_version to return a different version 38 | client.get_target_version = AsyncMock(return_value=CapabilitiesVersionResponse( 39 | api_spec_version="1.*", # API spec version 40 | sdk_version="1.28.2", # SDK version (now used for schema matching) 41 | app_version="1.0.0" 42 | )) 43 | 44 | # Mock schema_registry to indicate it has a schema for the actual version 45 | mock_registry = MagicMock() 46 | mock_registry.schema_exists.return_value = True # Schema exists for 1_28_2 47 | 48 | # Replace the client's schema_registry with our mock 49 | client.schema_registry = mock_registry 50 | 51 | # Call _get_target_config with our test setup 52 | target_config = await client._get_target_config("test-target") 53 | 54 | # Verify that the API version was updated to the actual version 55 | assert target_config is not None 56 | assert target_config["apiVersion"] == "1.28.2" # Should use actual version 57 | 58 | # Verify the schema registry was called correctly to check schema existence 59 | mock_registry.schema_exists.assert_called_with("1_28_2") 60 | 61 | 62 | @pytest.mark.asyncio 63 | async def test_target_version_detection_fallback_to_latest_version(client): 64 | """Test that client falls back to latest schema version when actual version has no schema.""" 65 | # Setup mocks 66 | # Mock get_available_targets to return a target with a basic config 67 | client.get_available_targets = AsyncMock(return_value={ 68 | "test-target": { 69 | "apiVersion": "unknown", # Initial placeholder 70 | "available": True, 71 | "ports": {} 72 | } 73 | }) 74 | 75 | # Mock get_target_version to return a version we don't have a schema for 76 | client.get_target_version = AsyncMock(return_value=CapabilitiesVersionResponse( 77 | api_spec_version="1.*", # API spec version 78 | sdk_version="1.28.2", # SDK version (now used for schema matching) 79 | app_version="1.0.0" 80 | )) 81 | 82 | # Mock schema_registry to indicate it does NOT have a schema for the actual version 83 | # but does have a latest version available 84 | mock_registry = MagicMock() 85 | mock_registry.schema_exists.return_value = False # No schema for 1_28_2 86 | mock_registry.find_closest_schema_version.return_value = "1_30_0" # Find closest schema version 87 | 88 | # Replace the client's schema_registry with our mock 89 | client.schema_registry = mock_registry 90 | 91 | # Call _get_target_config with our test setup 92 | target_config = await client._get_target_config("test-target") 93 | 94 | # Verify that the API version was updated to the closest matching schema version 95 | assert target_config is not None 96 | assert target_config["apiVersion"] == "1.30.0" # Should use closest matching version (1_30_0 → 1.30.0) 97 | 98 | # Verify the schema registry was called correctly to check schema existence 99 | mock_registry.schema_exists.assert_called_with("1_28_2") 100 | mock_registry.find_closest_schema_version.assert_called_with("1_28_2") 101 | 102 | 103 | @pytest.mark.asyncio 104 | async def test_target_version_detection_handles_exceptions(client): 105 | """Test that client handles exceptions when getting target version.""" 106 | # Setup mocks 107 | # Mock get_available_targets to return a target with minimal config 108 | client.get_available_targets = AsyncMock(return_value={ 109 | "test-target": { 110 | "available": True, 111 | "ports": {} 112 | } 113 | }) 114 | 115 | # Mock get_target_version to raise an exception 116 | client.get_target_version = AsyncMock(side_effect=Exception("Connection failed")) 117 | 118 | # Mock schema_registry to provide a latest version 119 | mock_registry = MagicMock() 120 | mock_registry.get_latest_schema_version.return_value = "1_30_0" 121 | 122 | # Replace the client's schema_registry with our mock 123 | client.schema_registry = mock_registry 124 | 125 | # Call _get_target_config - should not raise the exception and use the latest version 126 | target_config = await client._get_target_config("test-target") 127 | 128 | # Verify that the API version was set to the latest available version 129 | assert target_config is not None 130 | assert target_config["apiVersion"] == "1.30.0" # Should use latest version (1_30_0 → 1.30.0) 131 | mock_registry.get_latest_schema_version.assert_called_once() 132 | -------------------------------------------------------------------------------- /tests/schema/test_schema_registry.py: -------------------------------------------------------------------------------- 1 | """ 2 | Unit tests for the schema registry functionality. 3 | """ 4 | 5 | import os 6 | import shutil 7 | import tempfile 8 | 9 | import pytest 10 | import yaml 11 | 12 | from otg_mcp.schema_registry import SchemaRegistry 13 | 14 | 15 | class TestSchemaRegistry: 16 | """Test case for the SchemaRegistry class.""" 17 | 18 | @pytest.fixture 19 | def mock_schemas_dir(self): 20 | """Create a temporary directory with mock schema files.""" 21 | # Create a temporary directory 22 | temp_dir = tempfile.mkdtemp() 23 | 24 | # Create schema version directories 25 | v1_dir = os.path.join(temp_dir, "1_30_0") 26 | v2_dir = os.path.join(temp_dir, "1_31_0") 27 | os.makedirs(v1_dir) 28 | os.makedirs(v2_dir) 29 | 30 | # Create mock schema files 31 | v1_schema = { 32 | "openapi": "3.0.0", 33 | "info": {"title": "Test Schema 1.30.0", "version": "1.30.0"}, 34 | "components": { 35 | "schemas": { 36 | "Flow": { 37 | "type": "object", 38 | "properties": { 39 | "name": {"type": "string"}, 40 | "tx_rx": {"type": "object"}, 41 | }, 42 | } 43 | } 44 | }, 45 | } 46 | 47 | v2_schema = { 48 | "openapi": "3.0.0", 49 | "info": {"title": "Test Schema 1.31.0", "version": "1.31.0"}, 50 | "components": { 51 | "schemas": { 52 | "Flow": { 53 | "type": "object", 54 | "properties": { 55 | "name": {"type": "string"}, 56 | "tx_rx": {"type": "object"}, 57 | "new_property": {"type": "string"}, 58 | }, 59 | } 60 | } 61 | }, 62 | } 63 | 64 | # Write schemas to files 65 | with open(os.path.join(v1_dir, "openapi.yaml"), "w") as f: 66 | yaml.dump(v1_schema, f) 67 | 68 | with open(os.path.join(v2_dir, "openapi.yaml"), "w") as f: 69 | yaml.dump(v2_schema, f) 70 | 71 | yield temp_dir 72 | 73 | # Cleanup the temporary directory 74 | shutil.rmtree(temp_dir) 75 | 76 | def test_available_schemas(self, mock_schemas_dir): 77 | """Test getting available schemas.""" 78 | # Create registry with mocked schemas directory 79 | registry = SchemaRegistry() 80 | registry._builtin_schemas_dir = mock_schemas_dir 81 | registry._available_schemas = None # Force refresh 82 | 83 | # Test available schemas 84 | available_schemas = registry.get_available_schemas() 85 | assert len(available_schemas) == 2 86 | assert "1_30_0" in available_schemas 87 | assert "1_31_0" in available_schemas 88 | 89 | def test_schema_exists(self, mock_schemas_dir): 90 | """Test checking if a schema exists.""" 91 | registry = SchemaRegistry() 92 | registry._builtin_schemas_dir = mock_schemas_dir 93 | registry._available_schemas = None # Force refresh 94 | 95 | assert registry.schema_exists("1_30_0") is True 96 | assert registry.schema_exists("1_31_0") is True 97 | assert registry.schema_exists("2_0_0") is False 98 | 99 | def test_get_schema(self, mock_schemas_dir): 100 | """Test getting a schema.""" 101 | registry = SchemaRegistry() 102 | registry._builtin_schemas_dir = mock_schemas_dir 103 | registry._available_schemas = None # Force refresh 104 | 105 | # Get complete schema 106 | schema = registry.get_schema("1_30_0") 107 | assert schema["info"]["title"] == "Test Schema 1.30.0" 108 | 109 | # Get component 110 | flow_schema = registry.get_schema("1_31_0", "components.schemas.Flow") 111 | assert flow_schema["type"] == "object" 112 | assert "new_property" in flow_schema["properties"] 113 | 114 | def test_get_invalid_schema(self, mock_schemas_dir): 115 | """Test getting an invalid schema.""" 116 | registry = SchemaRegistry() 117 | registry._builtin_schemas_dir = mock_schemas_dir 118 | registry._available_schemas = None # Force refresh 119 | 120 | with pytest.raises(ValueError): 121 | registry.get_schema("non_existent") 122 | 123 | def test_get_invalid_component(self, mock_schemas_dir): 124 | """Test getting an invalid component.""" 125 | registry = SchemaRegistry() 126 | registry._builtin_schemas_dir = mock_schemas_dir 127 | registry._available_schemas = None # Force refresh 128 | 129 | with pytest.raises(ValueError): 130 | registry.get_schema("1_30_0", "components.schemas.NonExistentComponent") 131 | 132 | def test_shared_registry_instances(self): 133 | """Test that different SchemaRegistry instances are independent.""" 134 | registry1 = SchemaRegistry(custom_schemas_dir="/tmp/custom1") 135 | registry2 = SchemaRegistry(custom_schemas_dir="/tmp/custom2") 136 | 137 | # Should be different instances with different settings 138 | assert registry1 is not registry2 139 | assert registry1._custom_schemas_dir != registry2._custom_schemas_dir 140 | 141 | 142 | # TestTargetConfigApiVersion class has been removed since apiVersion is no longer a field 143 | # in the TargetConfig model. The version is now determined dynamically based on the target's 144 | # actual version or the latest available schema version. 145 | 146 | 147 | # Test class for schema tools integration with server removed 148 | # as the get_schema and get_available_schemas tools have been eliminated 149 | 150 | 151 | if __name__ == "__main__": 152 | pytest.main(["-v", __file__]) 153 | -------------------------------------------------------------------------------- /tests/functional/test_no_inline_comments.py: -------------------------------------------------------------------------------- 1 | """ 2 | Test to ensure there are no inline comments in the codebase. 3 | All comments should be converted to logging statements. 4 | """ 5 | 6 | import os 7 | import sys 8 | 9 | import pytest 10 | 11 | import logging 12 | 13 | 14 | logger = logging.getLogger(__name__) 15 | # Configure logging to ensure output is visible 16 | logging.basicConfig(level=logging.INFO, 17 | format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', 18 | stream=sys.stdout) 19 | 20 | 21 | def test_no_inline_comments(): 22 | """Test that there are no inline comments in Python files.""" 23 | # Test all Python files in source directory 24 | source_dirs = ["src"] 25 | # Check all .py files, not just schema_registry.py 26 | target_extension = ".py" 27 | 28 | # Force the logger to print to stdout 29 | logging.basicConfig(level=logging.DEBUG, force=True, 30 | format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', 31 | stream=sys.stdout) 32 | logger.setLevel(logging.DEBUG) 33 | print("TEST STARTING - CHECKING FOR COMMENTS") 34 | problematic_modules = {} 35 | 36 | # Allowlist for specific patterns or file beginnings (first few lines) 37 | allowlist_patterns = ["# noqa", "# type:", "# pragma:", "#!/usr/bin/env"] 38 | 39 | for source_dir in source_dirs: 40 | # Fix the path to use the absolute path to the project root 41 | project_root = os.path.abspath(os.path.join(os.path.dirname(__file__), "../..")) 42 | base_path = os.path.join(project_root, source_dir) 43 | print(f"Project root: {project_root}") 44 | print(f"Searching base path: {base_path}") 45 | print(f"Does path exist? {os.path.exists(base_path)}") 46 | 47 | # Print directory structure first 48 | for root, dirs, files in os.walk(base_path): 49 | print(f"Directory: {root}") 50 | print(f"Files: {files}") 51 | 52 | print("Now searching for Python files...") 53 | for root, _, files in os.walk(base_path): 54 | print(f"Checking dir: {root}, Files: {files}") 55 | for file in files: 56 | print(f"Found file: {file} in {root}") 57 | # Process all Python files 58 | if file.endswith(target_extension): 59 | print(f"PROCESSING TARGET FILE: {file}") 60 | filepath = os.path.join(root, file) 61 | logger.info("Examining file: %s", filepath) 62 | 63 | with open(filepath, "r", encoding="utf-8") as f: 64 | content = f.read() 65 | 66 | # Extract module name from file path 67 | rel_path = os.path.relpath( 68 | filepath, os.path.dirname(os.path.dirname(__file__)) 69 | ) 70 | if rel_path.startswith("src/"): 71 | rel_path = rel_path[4:] # Remove 'src/' prefix 72 | module_path = rel_path.replace("/", ".").replace(".py", "") 73 | logger.info("Module path: %s", module_path) 74 | comment_lines = [] 75 | for i, line in enumerate(content.splitlines()): 76 | stripped_line = line.strip() 77 | 78 | # Skip empty lines 79 | if not stripped_line: 80 | continue 81 | 82 | # Skip lines in allowlist 83 | if any( 84 | pattern in stripped_line for pattern in allowlist_patterns 85 | ): 86 | continue 87 | 88 | # Skip comments at file beginning (first 5 lines) 89 | if i < 5 and stripped_line.startswith("#"): 90 | continue 91 | 92 | # Print every line with a # character 93 | if "#" in stripped_line: 94 | print(f"Line {i+1}: {stripped_line}") 95 | # Skip allowlisted patterns 96 | if any(pattern in stripped_line for pattern in allowlist_patterns): 97 | continue 98 | 99 | # Skip comments at file beginning (first 5 lines) 100 | if i < 5 and stripped_line.startswith("#"): 101 | continue 102 | 103 | # Add to problematic lines 104 | logger.info("Found comment at line %d: %s", i + 1, stripped_line) 105 | comment_lines.append((i + 1, stripped_line)) 106 | 107 | if comment_lines: 108 | # Group by module name for better organization 109 | if module_path not in problematic_modules: 110 | problematic_modules[module_path] = {} 111 | problematic_modules[module_path][rel_path] = comment_lines 112 | 113 | # Always print a summary 114 | print(f"FOUND {len(problematic_modules)} modules with issues") 115 | if problematic_modules: 116 | error_message = ( 117 | "Found inline comments that should be converted to logging statements:\n\n" 118 | ) 119 | logger.error("FOUND COMMENT ISSUES") 120 | for module_name, files in problematic_modules.items(): 121 | error_message += f"Module: {module_name}\n" 122 | error_message += "=" * (len(module_name) + 8) + "\n" 123 | 124 | for filepath, lines in files.items(): 125 | error_message += f" File: {filepath}\n" 126 | for line_num, line_content in lines: 127 | error_message += f" Line {line_num}: {line_content}\n" 128 | error_message += "\n" 129 | 130 | pytest.fail(error_message) 131 | -------------------------------------------------------------------------------- /tests/schema/test_schema_coverage.py: -------------------------------------------------------------------------------- 1 | """ 2 | Tests for achieving full coverage of the schema registry. 3 | """ 4 | 5 | from unittest.mock import patch, mock_open 6 | 7 | import pytest 8 | import yaml 9 | 10 | from otg_mcp.schema_registry import SchemaRegistry 11 | 12 | 13 | def test_get_available_schemas_exception_handling(): 14 | """Test exception handling in get_available_schemas for custom directory.""" 15 | custom_dir = "/non/existent/path" 16 | registry = SchemaRegistry(custom_dir) 17 | 18 | # Create a selective mock that only raises an exception for the custom directory 19 | def selective_listdir(path): 20 | if path == custom_dir: 21 | raise PermissionError("Permission denied") 22 | # Return some schema directories for the built-in path 23 | return ["1_30_0", "1_28_0"] 24 | 25 | # Mock os.path.exists to return True for all paths 26 | with patch('os.path.exists', return_value=True): 27 | # Use the selective mock for listdir 28 | with patch('os.listdir', side_effect=selective_listdir): 29 | with patch('os.path.isdir', return_value=True): 30 | # This should not raise an exception but handle it gracefully 31 | available_schemas = registry.get_available_schemas() 32 | assert isinstance(available_schemas, list) 33 | # Should still have the built-in schemas 34 | assert "1_30_0" in available_schemas 35 | assert "1_28_0" in available_schemas 36 | 37 | 38 | def test_load_schema_from_path_error(): 39 | """Test error handling in _load_schema_from_path.""" 40 | registry = SchemaRegistry() 41 | 42 | # Mock open to raise an exception when trying to open the schema file 43 | mock_file = mock_open() 44 | mock_file.side_effect = IOError("File not found") 45 | 46 | with patch("builtins.open", mock_file): 47 | # Try to load from a schema path 48 | result = registry._load_schema_from_path("/fake/path/schema.yaml", "1_30_0", "test") 49 | # Should return False indicating failure 50 | assert result is False 51 | 52 | def test_load_schema_yaml_error(): 53 | """Test handling of YAML parsing errors.""" 54 | registry = SchemaRegistry() 55 | 56 | # Create a mock that returns invalid YAML content 57 | mock_file = mock_open(read_data="invalid: yaml: content: - [") 58 | 59 | with patch("builtins.open", mock_file): 60 | # Mock yaml.safe_load to raise a YAML parsing error 61 | with patch("yaml.safe_load", side_effect=yaml.YAMLError("YAML parsing error")): 62 | # Try to load from a schema path with invalid YAML 63 | result = registry._load_schema_from_path("/fake/path/schema.yaml", "1_30_0", "test") 64 | # Should return False indicating failure 65 | assert result is False 66 | 67 | 68 | def test_parse_version_malformed(): 69 | """Test _parse_version with malformed version strings.""" 70 | registry = SchemaRegistry() 71 | 72 | # Test with completely non-numeric version 73 | result = registry._parse_version("abc_def_xyz") 74 | assert result == tuple() 75 | 76 | # Test with partially numeric version 77 | result = registry._parse_version("1_abc_2") 78 | assert result == (1, 2) # Should extract the numeric parts 79 | 80 | 81 | def test_get_parsed_versions_empty_valid(): 82 | """Test _get_parsed_versions with empty or invalid versions.""" 83 | registry = SchemaRegistry() 84 | 85 | # Test with empty list 86 | result = registry._get_parsed_versions([]) 87 | assert result == [] 88 | 89 | # Test with invalid versions that can't be parsed 90 | result = registry._get_parsed_versions(["invalid", "not_a_version"]) 91 | assert result == [] 92 | 93 | def test_get_schema_with_component_not_found(): 94 | """Test error handling when requesting a non-existent component.""" 95 | registry = SchemaRegistry() 96 | 97 | # Create a mock schema with a valid structure but missing the requested component 98 | mock_schema = { 99 | "components": { 100 | "schemas": { 101 | "ExistingSchema": {"type": "object"} 102 | } 103 | } 104 | } 105 | 106 | # Mock the registry to return our test schema 107 | with patch.object(registry, "schema_exists", return_value=True): 108 | with patch.dict(registry.schemas, {"1_30_0": mock_schema}): 109 | # Try to access a non-existent schema 110 | with pytest.raises(ValueError, match="Schema NonExistentSchema not found"): 111 | registry.get_schema("1_30_0", "components.schemas.NonExistentSchema") 112 | 113 | def test_get_schema_with_invalid_path(): 114 | """Test error handling when requesting an invalid component path.""" 115 | registry = SchemaRegistry() 116 | 117 | # Create a mock schema 118 | mock_schema = {"components": {"schemas": {}}} 119 | 120 | # Mock the registry to return our test schema 121 | with patch.object(registry, "schema_exists", return_value=True): 122 | with patch.dict(registry.schemas, {"1_30_0": mock_schema}): 123 | # Try to access an invalid path (components.invalid) 124 | with pytest.raises(ValueError, match="Component invalid not found"): 125 | registry.get_schema("1_30_0", "components.invalid") 126 | 127 | def test_get_schema_components_non_dict(): 128 | """Test get_schema_components with a component that is not a dictionary.""" 129 | registry = SchemaRegistry() 130 | 131 | # Create a mock schema where a component is not a dictionary 132 | mock_schema = {"components": {"schemas": "not_a_dict"}} 133 | 134 | # Mock the registry to return our test schema 135 | with patch.object(registry, "schema_exists", return_value=True): 136 | with patch.dict(registry.schemas, {"1_30_0": mock_schema}): 137 | # Should return an empty list for non-dictionary components 138 | result = registry.get_schema_components("1_30_0") 139 | assert result == [] 140 | -------------------------------------------------------------------------------- /RELEASE.md: -------------------------------------------------------------------------------- 1 | # OTG-MCP Release Process 2 | 3 | This document outlines the process for releasing new versions of the OTG-MCP package. 4 | 5 | ## Version Management 6 | 7 | ### Current Versioning Strategy 8 | 9 | The project uses [Semantic Versioning](https://semver.org/) with pre-release tags: 10 | - **Format**: `MAJOR.MINOR.PATCH[pre-release]` 11 | - **Pre-release tags**: `a0` (alpha), `b0` (beta), `rc0` (release candidate) 12 | 13 | ### Version Locations 14 | 15 | **Single Source of Truth**: All version information is managed in `pyproject.toml` 16 | ```toml 17 | [project] 18 | version = "0.1.3a0" # Current version 19 | ``` 20 | 21 | > **Note**: The legacy `setup.py` file has been removed to eliminate version conflicts and follow modern Python packaging standards. 22 | 23 | ## Release Types 24 | 25 | ### 1. Alpha Release (Pre-release) 26 | Used for early testing and development features. 27 | 28 | **Example**: `0.1.2a0` → `0.1.3a0` (patch alpha) or `0.2.0a0` (minor alpha) 29 | 30 | ### 2. Beta Release (Pre-release) 31 | Used for feature-complete versions that need broader testing. 32 | 33 | **Example**: `0.1.3a0` → `0.1.3b0` 34 | 35 | ### 3. Release Candidate (Pre-release) 36 | Used for versions ready for release, pending final validation. 37 | 38 | **Example**: `0.1.3b0` → `0.1.3rc0` 39 | 40 | ### 4. Stable Release 41 | Production-ready version. 42 | 43 | **Example**: `0.1.3rc0` → `0.1.3` 44 | 45 | ## Version Bumping Process 46 | 47 | ### Step 1: Update Version in pyproject.toml 48 | 49 | Edit the version field in `pyproject.toml`: 50 | 51 | ```toml 52 | [project] 53 | version = "NEW_VERSION_HERE" 54 | ``` 55 | 56 | ### Step 2: Commit and Tag 57 | 58 | ```bash 59 | # Commit the version change 60 | git add pyproject.toml 61 | git commit -m "Bump version to NEW_VERSION" 62 | 63 | # Create and push tag 64 | git tag vNEW_VERSION 65 | git push origin main --tags 66 | ``` 67 | 68 | ### Step 3: Automated Publishing 69 | 70 | The CI/CD pipeline automatically handles publishing when a release is created: 71 | 72 | 1. **Tests Run**: All tests must pass on Python 3.11 and 3.12 73 | 2. **Package Build**: Automatically builds wheel and source distributions 74 | 3. **PyPI Publishing**: Publishes to PyPI using trusted publishing (OIDC) 75 | 76 | ## Creating a GitHub Release 77 | 78 | ### Manual Release Creation 79 | 80 | 1. Go to [GitHub Releases](https://github.com/h4ndzdatm0ld/otg-mcp/releases) 81 | 2. Click "Draft a new release" 82 | 3. Choose the tag you created (`vNEW_VERSION`) 83 | 4. Set release title (e.g., "OTG-MCP v0.1.3a0") 84 | 5. Add release notes (see template below) 85 | 6. Check "Set as a pre-release" for alpha/beta/rc versions 86 | 7. Click "Publish release" 87 | 88 | ### Release Notes Template 89 | 90 | ```markdown 91 | ## What's Changed 92 | 93 | ### New Features 94 | - Feature 1 description 95 | - Feature 2 description 96 | 97 | ### Bug Fixes 98 | - Bug fix 1 description 99 | - Bug fix 2 description 100 | 101 | ### Improvements 102 | - Improvement 1 description 103 | - Improvement 2 description 104 | 105 | ### Breaking Changes 106 | - Breaking change description (if any) 107 | 108 | ### Dependencies 109 | - Updated dependency X to version Y 110 | - Added new dependency Z 111 | 112 | **Full Changelog**: https://github.com/h4ndzdatm0ld/otg-mcp/compare/vPREVIOUS_VERSION...vNEW_VERSION 113 | ``` 114 | 115 | ## Pre-Release Checklist 116 | 117 | Before bumping any version: 118 | 119 | - [ ] All tests pass locally (`pytest`) 120 | - [ ] Code quality checks pass (`ruff check`, `mypy`) 121 | - [ ] Documentation is updated 122 | - [ ] CHANGELOG.md is updated (if exists) 123 | - [ ] Version number follows semantic versioning rules 124 | - [ ] Breaking changes are documented 125 | 126 | ## Post-Release Checklist 127 | 128 | After publishing a release: 129 | 130 | - [ ] Verify package is available on [PyPI](https://pypi.org/project/otg-mcp/) 131 | - [ ] Test installation from PyPI: `pip install otg-mcp==NEW_VERSION` 132 | - [ ] Update any dependent projects or documentation 133 | - [ ] Announce release in relevant channels 134 | 135 | ## Common Version Bump Examples 136 | 137 | ### Patch Alpha Release (Bug fixes, small changes) 138 | ```bash 139 | # Current: 0.1.2a0 → New: 0.1.3a0 140 | # Edit pyproject.toml version field 141 | git add pyproject.toml 142 | git commit -m "Bump version to 0.1.3a0" 143 | git tag v0.1.3a0 144 | git push origin main --tags 145 | ``` 146 | 147 | ### Minor Alpha Release (New features) 148 | ```bash 149 | # Current: 0.1.3a0 → New: 0.2.0a0 150 | # Edit pyproject.toml version field 151 | git add pyproject.toml 152 | git commit -m "Bump version to 0.2.0a0" 153 | git tag v0.2.0a0 154 | git push origin main --tags 155 | ``` 156 | 157 | ### Alpha to Beta 158 | ```bash 159 | # Current: 0.2.0a0 → New: 0.2.0b0 160 | # Edit pyproject.toml version field 161 | git add pyproject.toml 162 | git commit -m "Bump version to 0.2.0b0" 163 | git tag v0.2.0b0 164 | git push origin main --tags 165 | ``` 166 | 167 | ### Beta to Stable 168 | ```bash 169 | # Current: 0.2.0b0 → New: 0.2.0 170 | # Edit pyproject.toml version field 171 | git add pyproject.toml 172 | git commit -m "Release version 0.2.0" 173 | git tag v0.2.0 174 | git push origin main --tags 175 | ``` 176 | 177 | ## Automated CI/CD Pipeline 178 | 179 | The project uses GitHub Actions for automated testing and publishing: 180 | 181 | ### CI Pipeline (`.github/workflows/ci.yml`) 182 | - **Triggers**: Push, Pull Request, Release 183 | - **Tests**: Python 3.11 and 3.12 on Ubuntu and macOS 184 | - **Quality**: Linting (ruff), Type checking (mypy) 185 | - **Coverage**: Test coverage reporting 186 | - **Build**: Package building and artifact upload 187 | 188 | ### Publishing Pipeline 189 | - **Trigger**: GitHub Release creation 190 | - **Authentication**: Trusted publishing with OIDC (no API tokens needed) 191 | - **Target**: PyPI (https://pypi.org/project/otg-mcp/) 192 | 193 | ## Troubleshooting 194 | 195 | ### Version Conflicts 196 | If you encounter version conflicts, ensure: 197 | - Only `pyproject.toml` contains version information 198 | - No leftover `setup.py` or `__version__.py` files exist 199 | - Version follows semantic versioning format 200 | 201 | ### Publishing Failures 202 | If PyPI publishing fails: 203 | - Check that the version doesn't already exist on PyPI 204 | - Verify GitHub Actions has proper OIDC configuration 205 | - Ensure all tests pass before release 206 | 207 | ### Build Failures 208 | If package building fails: 209 | - Verify `pyproject.toml` syntax is correct 210 | - Check that all dependencies are properly specified 211 | - Ensure `src/otg_mcp/` structure is correct 212 | 213 | ## Additional Resources 214 | 215 | - [Semantic Versioning](https://semver.org/) 216 | - [Python Packaging Guide](https://packaging.python.org/) 217 | - [PyPA Build](https://build.pypa.io/) 218 | - [GitHub Releases](https://docs.github.com/en/repositories/releasing-projects-on-github) 219 | -------------------------------------------------------------------------------- /src/otg_mcp/client_capture.py: -------------------------------------------------------------------------------- 1 | """ 2 | OTG Client Capture module providing specialized capture functionality. 3 | 4 | This module contains improved implementations for packet capture operations 5 | with proper control state handling. 6 | """ 7 | 8 | import logging 9 | import os 10 | import uuid 11 | from typing import Dict, List, Optional, Union, Any 12 | 13 | logger = logging.getLogger(__name__) 14 | 15 | 16 | def start_capture(api: Any, port_names: Union[str, List[str]]) -> Dict[str, Any]: 17 | """ 18 | Start packet capture on one or more ports with proper control state handling. 19 | 20 | Args: 21 | api: Snappi API client 22 | port_names: List or single name of port(s) to capture on 23 | 24 | Returns: 25 | Dictionary with status and result information 26 | """ 27 | logger.info(f"Starting capture for ports: {port_names}") 28 | 29 | logger.debug("Converting port names to list for consistent handling") 30 | port_list = [port_names] if isinstance(port_names, str) else list(port_names) 31 | 32 | try: 33 | logger.debug("Creating control state with all required choices properly set") 34 | cs = api.control_state() 35 | 36 | logger.debug("Setting first-level choice to PORT") 37 | cs.choice = cs.PORT 38 | 39 | logger.debug("Setting second-level choice to CAPTURE for port control state") 40 | cs.port.choice = cs.port.CAPTURE 41 | 42 | logger.debug("Setting third-level choice: capture state to START") 43 | cs.port.capture.state = cs.port.capture.START 44 | 45 | logger.debug(f"Setting port names to capture on: {port_list}") 46 | cs.port.capture.port_names = port_list 47 | 48 | logger.debug("Applying control state") 49 | logger.info(f"Setting control state to start capture on ports: {port_list}") 50 | result = api.set_control_state(cs) 51 | 52 | logger.debug("Checking for warnings in the result") 53 | warnings = [] 54 | if hasattr(result, "warnings") and result.warnings: 55 | warnings = result.warnings 56 | logger.info(f"Start capture warnings: {warnings}") 57 | 58 | return {"status": "success", "warnings": warnings} 59 | 60 | except Exception as e: 61 | logger.error(f"Error starting capture: {e}") 62 | return {"status": "error", "error": str(e)} 63 | 64 | 65 | def stop_capture(api: Any, port_names: Union[str, List[str]]) -> Dict[str, Any]: 66 | """ 67 | Stop packet capture on one or more ports with proper control state handling. 68 | 69 | Args: 70 | api: Snappi API client 71 | port_names: List or single name of port(s) to stop capture on 72 | 73 | Returns: 74 | Dictionary with status and result information 75 | """ 76 | logger.info(f"Stopping capture for ports: {port_names}") 77 | 78 | logger.debug("Converting port names to list for consistent handling") 79 | port_list = [port_names] if isinstance(port_names, str) else list(port_names) 80 | 81 | try: 82 | logger.debug("Creating control state with all required choices properly set") 83 | cs = api.control_state() 84 | 85 | logger.debug("Setting first-level choice to PORT") 86 | cs.choice = cs.PORT 87 | 88 | logger.debug("Setting second-level choice to CAPTURE for port control state") 89 | cs.port.choice = cs.port.CAPTURE 90 | 91 | logger.debug("Setting third-level choice: capture state to STOP") 92 | cs.port.capture.state = cs.port.capture.STOP 93 | 94 | logger.debug(f"Setting port names to capture on: {port_list}") 95 | cs.port.capture.port_names = port_list 96 | 97 | logger.debug("Applying control state") 98 | logger.info(f"Setting control state to stop capture on ports: {port_list}") 99 | result = api.set_control_state(cs) 100 | 101 | logger.debug("Checking for warnings in the result") 102 | warnings = [] 103 | if hasattr(result, "warnings") and result.warnings: 104 | warnings = result.warnings 105 | logger.info(f"Stop capture warnings: {warnings}") 106 | 107 | return {"status": "success", "warnings": warnings} 108 | 109 | except Exception as e: 110 | logger.error(f"Error stopping capture: {e}") 111 | return {"status": "error", "error": str(e)} 112 | 113 | 114 | def get_capture( 115 | api: Any, 116 | port_name: str, 117 | output_dir: Optional[str] = None, 118 | filename: Optional[str] = None, 119 | ) -> Dict[str, Any]: 120 | """ 121 | Get capture data from a port and save it to a file. 122 | 123 | Args: 124 | api: Snappi API client 125 | port_name: Name of port to get capture from 126 | output_dir: Directory to save the capture file (default: /tmp) 127 | filename: Optional custom filename (default: auto-generated) 128 | 129 | Returns: 130 | Dictionary with status, file path, and capture data info 131 | """ 132 | logger.info(f"Getting capture data for port {port_name}") 133 | 134 | try: 135 | logger.debug("Setting default output directory if not provided") 136 | if output_dir is None: 137 | output_dir = "/tmp" 138 | 139 | logger.debug(f"Creating output directory if it doesn't exist: {output_dir}") 140 | os.makedirs(output_dir, exist_ok=True) 141 | 142 | logger.debug("Handling filename generation") 143 | if filename is None: 144 | filename = f"capture_{port_name}_{uuid.uuid4().hex[:8]}.pcap" 145 | logger.debug(f"Generated unique filename: {filename}") 146 | elif not filename.endswith(".pcap"): 147 | logger.debug(f"Adding .pcap extension to filename: {filename}") 148 | filename = f"{filename}.pcap" 149 | 150 | file_path = os.path.join(output_dir, filename) 151 | logger.info(f"Will save capture data to {file_path}") 152 | 153 | logger.debug("Creating capture request with port name") 154 | req = api.capture_request() 155 | req.port_name = port_name 156 | 157 | logger.debug("Requesting capture data from the device") 158 | logger.info("Retrieving capture data") 159 | pcap_bytes = api.get_capture(req) 160 | 161 | logger.debug("Writing capture data to output file") 162 | with open(file_path, "wb") as pcap_file: 163 | pcap_file.write(pcap_bytes.read()) 164 | 165 | logger.info(f"Capture successfully saved to {file_path}") 166 | 167 | return { 168 | "status": "success", 169 | "file_path": file_path, 170 | "capture_id": filename, 171 | "port": port_name, 172 | "size_bytes": os.path.getsize(file_path), 173 | } 174 | 175 | except Exception as e: 176 | logger.error(f"Error getting capture data: {e}") 177 | return { 178 | "status": "error", 179 | "error": str(e), 180 | "port": port_name, 181 | "file_path": None, 182 | "capture_id": None, 183 | } 184 | -------------------------------------------------------------------------------- /tests/test_targets_config.py: -------------------------------------------------------------------------------- 1 | """ 2 | Unit tests for the target configuration functionality. 3 | """ 4 | 5 | from unittest import mock 6 | 7 | import pytest 8 | 9 | from otg_mcp.config import PortConfig, TargetConfig 10 | 11 | 12 | class TestTargetConfig: 13 | """Tests for target configuration models.""" 14 | 15 | def test_port_config_model(self): 16 | """Test PortConfig Pydantic model.""" 17 | # Create a port config 18 | port = PortConfig(interface="eth0", location="eth0", name="eth0") 19 | 20 | # Test properties 21 | assert port.interface == "eth0" 22 | assert port.location == "eth0" 23 | assert port.name == "eth0" 24 | 25 | # Test serialization 26 | data = port.model_dump() 27 | assert data["interface"] == "eth0" 28 | assert data["location"] == "eth0" 29 | assert data["name"] == "eth0" 30 | 31 | # Test deserialization 32 | port2 = PortConfig.model_validate( 33 | {"interface": "eth1", "location": "eth1", "name": "eth1"} 34 | ) 35 | assert port2.interface == "eth1" 36 | assert port2.location == "eth1" 37 | assert port2.name == "eth1" 38 | 39 | def test_target_config_model(self): 40 | """Test TargetConfig Pydantic model.""" 41 | # Create a target config with ports 42 | target = TargetConfig( 43 | ports={ 44 | "p1": PortConfig(interface="eth0", location="eth0", name="p1"), 45 | "p2": PortConfig(interface="eth1", location="eth1", name="p2"), 46 | } 47 | ) 48 | 49 | # Test properties 50 | assert len(target.ports) == 2 51 | assert target.ports["p1"].interface == "eth0" 52 | assert target.ports["p1"].location == "eth0" 53 | assert target.ports["p1"].name == "p1" 54 | assert target.ports["p2"].interface == "eth1" 55 | assert target.ports["p2"].location == "eth1" 56 | assert target.ports["p2"].name == "p2" 57 | 58 | # Test serialization 59 | data = target.model_dump() 60 | assert data["ports"]["p1"]["interface"] == "eth0" 61 | assert data["ports"]["p1"]["location"] == "eth0" 62 | assert data["ports"]["p1"]["name"] == "p1" 63 | assert data["ports"]["p2"]["interface"] == "eth1" 64 | assert data["ports"]["p2"]["location"] == "eth1" 65 | assert data["ports"]["p2"]["name"] == "p2" 66 | 67 | # Test deserialization from dict 68 | target_dict = { 69 | "ports": { 70 | "p1": {"interface": "enp0s31f6", "location": "enp0s31f6", "name": "p1"}, 71 | "p2": { 72 | "interface": "enp0s31f6.1", 73 | "location": "enp0s31f6.1", 74 | "name": "p2", 75 | }, 76 | } 77 | } 78 | target2 = TargetConfig.model_validate(target_dict) 79 | assert target2.ports["p1"].interface == "enp0s31f6" 80 | assert target2.ports["p1"].location == "enp0s31f6" 81 | assert target2.ports["p1"].name == "p1" 82 | assert target2.ports["p2"].interface == "enp0s31f6.1" 83 | assert target2.ports["p2"].location == "enp0s31f6.1" 84 | assert target2.ports["p2"].name == "p2" 85 | 86 | def test_example_target_config(self, example_target_config): 87 | """Test that example_target_config fixture works correctly.""" 88 | # Verify the fixture created a valid target 89 | assert "test-target.example.com:8443" in example_target_config.targets 90 | target = example_target_config.targets["test-target.example.com:8443"] 91 | 92 | # Verify the target has the expected ports 93 | assert "p1" in target.ports 94 | assert "p2" in target.ports 95 | assert target.ports["p1"].interface == "enp0s31f6" 96 | assert target.ports["p2"].interface == "enp0s31f6.1" 97 | 98 | 99 | @pytest.mark.asyncio 100 | class TestAvailableTargets: 101 | """Tests for get_available_targets functionality.""" 102 | 103 | async def test_get_available_targets_empty(self, router, test_config): 104 | """Test getting available targets with empty config.""" 105 | # Clear any existing targets 106 | test_config.targets.targets = {} 107 | 108 | # Mock the _get_api_client method to avoid actual network calls 109 | router._get_api_client = mock.MagicMock() 110 | 111 | # Call the method 112 | result = await router.get_available_targets() 113 | 114 | # Verify the result 115 | assert result is not None 116 | assert len(result) == 0 117 | 118 | async def test_get_available_targets_with_targets( 119 | self, router, example_target_config 120 | ): 121 | """Test getting available targets with configured targets.""" 122 | # Mock the _get_api_client method to avoid actual network calls 123 | router._get_api_client = mock.MagicMock() 124 | 125 | # Call the method 126 | result = await router.get_available_targets() 127 | 128 | # Verify the result has either 1 or 2 targets depending on setup 129 | assert result is not None 130 | assert len(result) >= 1 131 | assert "test-target.example.com:8443" in result 132 | 133 | target = result["test-target.example.com:8443"] 134 | # API version may not be present if the connection to capabilities/version fails 135 | # but we should either have apiVersion or apiVersionError 136 | assert "apiVersionError" in target or "apiVersion" in target 137 | assert "ports" in target 138 | assert ( 139 | target["available"] is True 140 | ) # Because we mocked _get_api_client to succeed 141 | assert len(target["ports"]) == 2 142 | 143 | # Verify ports 144 | assert "p1" in target["ports"] 145 | assert "p2" in target["ports"] 146 | assert target["ports"]["p1"]["name"] == "p1" 147 | assert target["ports"]["p1"]["location"] == "enp0s31f6" 148 | assert target["ports"]["p2"]["name"] == "p2" 149 | assert target["ports"]["p2"]["location"] == "enp0s31f6.1" 150 | 151 | async def test_get_available_targets_connection_failure( 152 | self, router, example_target_config 153 | ): 154 | """Test getting available targets when connection fails.""" 155 | # Mock the _get_api_client method to simulate connection failure 156 | router._get_api_client = mock.MagicMock( 157 | side_effect=Exception("Connection failed") 158 | ) 159 | 160 | # Call the method 161 | result = await router.get_available_targets() 162 | 163 | # Verify the result - we should still get targets but with available=False 164 | assert result is not None 165 | assert "test-target.example.com:8443" in result 166 | 167 | target = result["test-target.example.com:8443"] 168 | assert target["available"] is False 169 | assert "error" in target 170 | 171 | # Removed test_refresh_targets as the refresh parameter has been removed from get_available_targets 172 | -------------------------------------------------------------------------------- /tests/schema/test_schema_version_matching.py: -------------------------------------------------------------------------------- 1 | """ 2 | Tests for schema version matching functionality. 3 | 4 | These tests verify that the schema registry can find the closest matching schema 5 | version when an exact match isn't available. 6 | """ 7 | 8 | import os 9 | import shutil 10 | import tempfile 11 | from unittest.mock import patch 12 | 13 | import pytest 14 | 15 | from otg_mcp.schema_registry import SchemaRegistry 16 | 17 | 18 | def test_schema_exists(): 19 | """Test the schema_exists method.""" 20 | registry = SchemaRegistry() 21 | assert registry.schema_exists("1_30_0") 22 | assert registry.schema_exists("1.30.0") 23 | assert not registry.schema_exists("1_99_0") 24 | 25 | 26 | def test_normalize_version(): 27 | """Test version normalization.""" 28 | registry = SchemaRegistry() 29 | assert registry._normalize_version("1.30.0") == "1_30_0" 30 | assert registry._normalize_version("1_30_0") == "1_30_0" 31 | 32 | 33 | class TestVersionMatching: 34 | """Tests for schema version matching functionality.""" 35 | 36 | def setup_method(self): 37 | """Set up the test environment with mock schema registry.""" 38 | self.registry = SchemaRegistry() 39 | # Mock available schemas to simulate different versions 40 | self.available_schemas = ["1_28_0", "1_29_0", "1_30_0"] 41 | 42 | def test_find_closest_exact_match(self): 43 | """Test finding exact schema version match.""" 44 | with patch.object(self.registry, "get_available_schemas", return_value=self.available_schemas): 45 | with patch.object(self.registry, "schema_exists", return_value=True): 46 | result = self.registry.find_closest_schema_version("1_30_0") 47 | assert result == "1_30_0" 48 | 49 | def test_find_closest_same_major_minor_lower_patch(self): 50 | """Test finding schema with same major.minor and lower patch.""" 51 | with patch.object(self.registry, "get_available_schemas", return_value=self.available_schemas): 52 | # Test finding 1.28.0 when requesting 1.28.2 53 | result = self.registry.find_closest_schema_version("1.28.2") 54 | assert result == "1_28_0" 55 | 56 | def test_find_closest_same_major(self): 57 | """Test finding schema with same major version when no matching minor.""" 58 | with patch.object(self.registry, "get_available_schemas", return_value=self.available_schemas): 59 | # Test finding 1.30.0 (latest with same major) when requesting 1.31.2 60 | result = self.registry.find_closest_schema_version("1.31.2") 61 | assert result == "1_30_0" 62 | 63 | def test_fallback_to_latest(self): 64 | """Test fallback to latest version when no matching major version.""" 65 | with patch.object(self.registry, "get_available_schemas", return_value=self.available_schemas): 66 | # Test finding 1.30.0 (latest overall) when requesting 2.0.0 67 | result = self.registry.find_closest_schema_version("2.0.0") 68 | assert result == "1_30_0" 69 | 70 | def test_empty_schema_list(self): 71 | """Test error when no schemas are available.""" 72 | with patch.object(self.registry, "get_available_schemas", return_value=[]): 73 | with pytest.raises(ValueError, match="No schema versions available"): 74 | self.registry.find_closest_schema_version("1.28.0") 75 | 76 | 77 | class TestCustomSchemaPaths: 78 | """Tests for custom schema path functionality.""" 79 | 80 | def setup_method(self): 81 | """Set up the test environment with temporary directories.""" 82 | # Create temporary directories for custom and built-in schemas 83 | self.temp_dir = tempfile.mkdtemp() 84 | self.custom_dir = os.path.join(self.temp_dir, "custom_schemas") 85 | os.makedirs(self.custom_dir) 86 | 87 | # Create schema directories 88 | self.custom_schema_dir = os.path.join(self.custom_dir, "1_31_0") 89 | os.makedirs(self.custom_schema_dir) 90 | 91 | # Create schema files 92 | with open(os.path.join(self.custom_schema_dir, "openapi.yaml"), "w") as f: 93 | f.write("# Custom schema 1.31.0") 94 | 95 | def teardown_method(self): 96 | """Clean up temporary directories.""" 97 | shutil.rmtree(self.temp_dir) 98 | 99 | def test_custom_schemas_directory(self): 100 | """Test that custom schemas directory is used.""" 101 | registry = SchemaRegistry(self.custom_dir) 102 | 103 | # Create a non-existent directory for built-in schemas to ensure we only get custom schemas 104 | non_existent_dir = os.path.join(self.temp_dir, "non_existent") 105 | registry._builtin_schemas_dir = non_existent_dir 106 | registry._available_schemas = None # Force refresh 107 | 108 | available = registry.get_available_schemas() 109 | assert "1_31_0" in available 110 | 111 | def test_prioritize_custom_schemas(self): 112 | """Test that custom schemas take priority over built-in schemas.""" 113 | # Create a temporary built-in directory with the same version 114 | built_in_dir = os.path.join(self.temp_dir, "built_in") 115 | os.makedirs(built_in_dir) 116 | common_version = "1_30_0" 117 | 118 | # Create same version in custom dir with valid YAML content 119 | os.makedirs(os.path.join(self.custom_dir, common_version)) 120 | with open(os.path.join(self.custom_dir, common_version, "openapi.yaml"), "w") as f: 121 | f.write(""" 122 | # Custom schema 1.30.0 123 | components: 124 | schemas: 125 | Test: 126 | type: object 127 | properties: 128 | name: 129 | type: string 130 | """) 131 | 132 | # Create same version in built-in dir (with different content) 133 | built_in_version_dir = os.path.join(built_in_dir, common_version) 134 | os.makedirs(built_in_version_dir) 135 | with open(os.path.join(built_in_version_dir, "openapi.yaml"), "w") as f: 136 | f.write(""" 137 | # Built-in schema 1.30.0 138 | components: 139 | schemas: 140 | Test: 141 | type: object 142 | properties: 143 | id: 144 | type: integer 145 | """) 146 | 147 | # Create registry with our test directories 148 | registry = SchemaRegistry(self.custom_dir) 149 | registry._builtin_schemas_dir = built_in_dir 150 | registry._available_schemas = None # Force refresh 151 | 152 | # Get available schemas (should include both custom and built-in) 153 | available = registry.get_available_schemas() 154 | assert common_version in available 155 | 156 | # Verify the schema exists at the expected path 157 | schema_path = os.path.join(self.custom_dir, common_version, "openapi.yaml") 158 | assert os.path.exists(schema_path) 159 | 160 | # Check that custom takes priority by looking at the first schema in the list 161 | # The implementation guarantees custom schemas are added first 162 | assert available.count(common_version) == 1 # Should only appear once 163 | 164 | # Ensure custom schema is loaded 165 | schema = registry.get_schema(common_version) 166 | 167 | # Now check that the schema was loaded and has the expected content 168 | assert isinstance(schema, dict) 169 | assert "components" in schema 170 | assert "schemas" in schema["components"] 171 | assert "Test" in schema["components"]["schemas"] 172 | 173 | # The schema from custom dir should have a 'name' property, not 'id' 174 | # This verifies we loaded from custom dir, not built-in 175 | assert "name" in schema["components"]["schemas"]["Test"]["properties"] 176 | assert "id" not in schema["components"]["schemas"]["Test"]["properties"] 177 | 178 | 179 | def test_get_latest_schema_version(): 180 | """Test getting the latest schema version.""" 181 | registry = SchemaRegistry() 182 | 183 | with patch.object(registry, "get_available_schemas", 184 | return_value=["1_20_0", "1_28_0", "1_30_0", "1_5_0"]): 185 | latest = registry.get_latest_schema_version() 186 | assert latest == "1_30_0" 187 | 188 | 189 | def test_get_latest_schema_version_empty(): 190 | """Test error when no schemas are available for latest version.""" 191 | registry = SchemaRegistry() 192 | 193 | with patch.object(registry, "get_available_schemas", return_value=[]): 194 | with pytest.raises(ValueError, match="No schema versions available"): 195 | registry.get_latest_schema_version() 196 | -------------------------------------------------------------------------------- /src/otg_mcp/config.py: -------------------------------------------------------------------------------- 1 | import json 2 | import logging 3 | import os 4 | from typing import Dict, Optional 5 | 6 | from pydantic import BaseModel, ConfigDict, Field, validator, ValidationError 7 | from pydantic_settings import BaseSettings 8 | 9 | logging.basicConfig( 10 | level=logging.INFO, 11 | format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", 12 | ) 13 | logger = logging.getLogger(__name__) 14 | 15 | 16 | class LoggingConfig(BaseSettings): 17 | """Configuration for logging.""" 18 | 19 | LOG_LEVEL: str = Field( 20 | default="INFO", 21 | description="Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL)", 22 | ) 23 | 24 | @validator("LOG_LEVEL") 25 | def validate_log_level(cls, v: str) -> str: 26 | valid_levels = ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"] 27 | upper_v = v.upper() 28 | if upper_v not in valid_levels: 29 | logger.error(f"LOG_LEVEL must be one of {valid_levels}") 30 | raise ValueError(f"LOG_LEVEL must be one of {valid_levels}") 31 | logger.info(f"Validated log level: {upper_v}") 32 | return upper_v 33 | 34 | 35 | class PortConfig(BaseModel): 36 | """Configuration for a port on a traffic generator.""" 37 | 38 | location: Optional[str] = Field( 39 | None, description="Location of the port (hostname:port)" 40 | ) 41 | name: Optional[str] = Field(None, description="Name of the port") 42 | interface: Optional[str] = Field( 43 | None, description="Interface name (backward compatibility)" 44 | ) 45 | 46 | @validator("location", pre=True, always=True) 47 | def validate_location(cls, v, values): 48 | """Validate location, using interface if location is not provided.""" 49 | if v is None and "interface" in values and values["interface"] is not None: 50 | return values["interface"] 51 | return v 52 | 53 | @validator("name", pre=True, always=True) 54 | def validate_name(cls, v, values): 55 | """Validate name, using interface or location if name is not provided.""" 56 | if v is None: 57 | if "interface" in values and values["interface"] is not None: 58 | return values["interface"] 59 | if "location" in values and values["location"] is not None: 60 | return values["location"] 61 | return v 62 | 63 | 64 | class TargetConfig(BaseModel): 65 | """Configuration for a traffic generator target.""" 66 | 67 | ports: Dict[str, PortConfig] = Field( 68 | default_factory=dict, description="Port configurations mapped by port name" 69 | ) 70 | 71 | model_config = ConfigDict(extra="forbid") 72 | 73 | 74 | class TargetsConfig(BaseSettings): 75 | """Configuration for all available traffic generator targets.""" 76 | 77 | targets: Dict[str, TargetConfig] = Field( 78 | default_factory=dict, 79 | description="Target configurations mapped by hostname:port", 80 | ) 81 | 82 | 83 | class SchemaConfig(BaseSettings): 84 | """Configuration for schema handling.""" 85 | 86 | schema_path: Optional[str] = Field( 87 | default=None, description="Path to directory containing custom schema files" 88 | ) 89 | 90 | 91 | class Config: 92 | """Main configuration for the MCP server.""" 93 | 94 | def __init__(self, config_file: Optional[str] = None): 95 | self.logging = LoggingConfig() 96 | self.targets = TargetsConfig() 97 | self.schemas = SchemaConfig() 98 | 99 | logger.info("Initializing configuration") 100 | if config_file: 101 | logger.info(f"Loading configuration from file: {config_file}") 102 | self.load_config_file(config_file) 103 | elif not self.targets.targets: 104 | logger.info("No targets defined - adding default development target") 105 | example_target = TargetConfig( 106 | ports={ 107 | "p1": PortConfig( 108 | location="localhost:5555", name="p1", interface=None 109 | ), 110 | "p2": PortConfig( 111 | location="localhost:5555", name="p2", interface=None 112 | ), 113 | } 114 | ) 115 | self.targets.targets["localhost:8443"] = example_target 116 | 117 | def load_config_file(self, config_file_path: str) -> None: 118 | """ 119 | Load the traffic generator configuration from a JSON file. 120 | 121 | Args: 122 | config_file_path: Path to the JSON configuration file 123 | 124 | Raises: 125 | FileNotFoundError: If the config file doesn't exist 126 | json.JSONDecodeError: If the config file isn't valid JSON 127 | ValueError: If the config file doesn't have the expected structure 128 | """ 129 | logger.info(f"Loading traffic generator configuration from: {config_file_path}") 130 | 131 | if not os.path.exists(config_file_path): 132 | error_msg = f"Configuration file not found: {config_file_path}" 133 | logger.critical(error_msg) 134 | raise FileNotFoundError(error_msg) 135 | 136 | try: 137 | with open(config_file_path, "r") as file: 138 | config_data = json.load(file) 139 | 140 | logger.info("Validating configuration structure") 141 | if "targets" not in config_data: 142 | error_msg = "Configuration file must contain a 'targets' property" 143 | logger.critical(error_msg) 144 | raise ValueError(error_msg) 145 | 146 | logger.info("Clearing existing targets and initializing new configuration") 147 | self.targets = TargetsConfig() 148 | 149 | logger.info("Processing each target in configuration") 150 | for hostname, target_data in config_data["targets"].items(): 151 | if not isinstance(target_data, dict) or "ports" not in target_data: 152 | error_msg = f"Target '{hostname}' must contain a 'ports' dictionary" 153 | logger.error(error_msg) 154 | continue 155 | 156 | logger.info(f"Creating target config for {hostname}") 157 | 158 | logger.info("Validating target configuration using Pydantic model") 159 | try: 160 | target_config = TargetConfig(**target_data) 161 | except ValidationError as e: 162 | error_msg = ( 163 | f"Invalid target configuration for '{hostname}': {str(e)}" 164 | ) 165 | logger.error(error_msg) 166 | if "extra fields not permitted" in str(e): 167 | logger.error( 168 | "The configuration contains fields that are not allowed. " 169 | "apiVersion should not be included in target configuration." 170 | ) 171 | continue 172 | 173 | logger.info(f"Adding target {hostname} to configuration") 174 | self.targets.targets[hostname] = target_config 175 | 176 | logger.info("Checking for schema path in configuration") 177 | if "schema_path" in config_data: 178 | schema_path = config_data["schema_path"] 179 | logger.info(f"Found schema_path in config: {schema_path}") 180 | if os.path.exists(schema_path): 181 | self.schemas.schema_path = schema_path 182 | logger.info(f"Using custom schema path: {schema_path}") 183 | else: 184 | logger.warning( 185 | f"Specified schema path does not exist: {schema_path}" 186 | ) 187 | 188 | logger.info( 189 | f"Successfully loaded configuration with {len(self.targets.targets)} targets" 190 | ) 191 | 192 | except json.JSONDecodeError as e: 193 | error_msg = f"Invalid JSON in configuration file: {str(e)}" 194 | logger.critical(error_msg) 195 | raise 196 | except Exception as e: 197 | error_msg = f"Error loading configuration: {str(e)}" 198 | logger.critical(error_msg) 199 | raise 200 | 201 | def setup_logging(self): 202 | """Configure logging based on the provided settings.""" 203 | try: 204 | log_level = getattr(logging, self.logging.LOG_LEVEL) 205 | print(f"Setting up logging at level {self.logging.LOG_LEVEL}") 206 | 207 | logger.info( 208 | "Setting up both basic config and console handler for comprehensive logging" 209 | ) 210 | logging.basicConfig( 211 | level=log_level, 212 | format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", 213 | ) 214 | 215 | logger.info("Configuring root logger") 216 | root_logger = logging.getLogger() 217 | root_logger.setLevel(log_level) 218 | 219 | logger.info(f"Setting module logger to level {log_level}") 220 | module_logger = logging.getLogger("otg_mcp") 221 | module_logger.setLevel(log_level) 222 | 223 | logger.info("Checking if root logger has handlers, adding if needed") 224 | if not root_logger.handlers: 225 | console_handler = logging.StreamHandler() 226 | console_handler.setLevel(log_level) 227 | formatter = logging.Formatter( 228 | "%(asctime)s - %(name)s - %(levelname)s - %(message)s" 229 | ) 230 | console_handler.setFormatter(formatter) 231 | root_logger.addHandler(console_handler) 232 | print("Added console handler to root logger") 233 | 234 | logger.info("Logging system initialized with handlers and formatters") 235 | logger.info(f"Logging configured at level {self.logging.LOG_LEVEL}") 236 | except Exception as e: 237 | print(f"CRITICAL ERROR setting up logging: {str(e)}") 238 | import traceback 239 | 240 | print(f"Stack trace: {traceback.format_exc()}") 241 | logger.critical(f"Failed to set up logging: {str(e)}") 242 | logger.critical(f"Stack trace: {traceback.format_exc()}") 243 | -------------------------------------------------------------------------------- /tests/schema/test_schema_registry_complete.py: -------------------------------------------------------------------------------- 1 | """ 2 | Complete test coverage for schema registry module. 3 | 4 | This module provides tests targeting all code paths in the schema_registry.py module, 5 | with special attention to error cases and edge conditions. 6 | """ 7 | 8 | import os 9 | import shutil 10 | import tempfile 11 | 12 | import pytest 13 | import yaml 14 | 15 | from otg_mcp.schema_registry import SchemaRegistry 16 | 17 | 18 | class TestSchemaRegistryComplete: 19 | """Test cases for 100% coverage of SchemaRegistry.""" 20 | 21 | @pytest.fixture 22 | def mock_schemas_dir(self): 23 | """Create a temporary directory with mock schema files.""" 24 | # Create a temporary directory 25 | temp_dir = tempfile.mkdtemp() 26 | 27 | # Create schema version directories 28 | v1_dir = os.path.join(temp_dir, "1_30_0") 29 | v2_dir = os.path.join(temp_dir, "1_31_0") 30 | os.makedirs(v1_dir) 31 | os.makedirs(v2_dir) 32 | 33 | # Create an empty directory (should be ignored) 34 | os.makedirs(os.path.join(temp_dir, "empty_dir")) 35 | 36 | # Create a directory with no openapi.yaml file (should be ignored) 37 | no_yaml_dir = os.path.join(temp_dir, "no_yaml") 38 | os.makedirs(no_yaml_dir) 39 | with open(os.path.join(no_yaml_dir, "some_other_file.txt"), "w") as f: 40 | f.write("Not a YAML file") 41 | 42 | # Create mock schema files 43 | v1_schema = { 44 | "openapi": "3.0.0", 45 | "info": {"title": "Test Schema 1.30.0", "version": "1.30.0"}, 46 | "components": { 47 | "schemas": { 48 | "Flow": {"type": "object"}, 49 | "Device": 12345, # Not a dict to test error handling 50 | } 51 | }, 52 | } 53 | 54 | v2_schema = { 55 | "openapi": "3.0.0", 56 | "info": {"title": "Test Schema 1.31.0", "version": "1.31.0"}, 57 | "components": { 58 | # No schemas key to test KeyError handling 59 | }, 60 | } 61 | 62 | # Write schemas to files 63 | with open(os.path.join(v1_dir, "openapi.yaml"), "w") as f: 64 | yaml.dump(v1_schema, f) 65 | 66 | with open(os.path.join(v2_dir, "openapi.yaml"), "w") as f: 67 | yaml.dump(v2_schema, f) 68 | 69 | yield temp_dir 70 | 71 | # Cleanup the temporary directory 72 | shutil.rmtree(temp_dir) 73 | 74 | def test_normalize_version(self): 75 | """Test version string normalization.""" 76 | registry = SchemaRegistry() 77 | assert registry._normalize_version("1.30.0") == "1_30_0" 78 | assert registry._normalize_version("1_30_0") == "1_30_0" 79 | 80 | def test_get_available_schemas(self, mock_schemas_dir): 81 | """Test getting available schemas.""" 82 | registry = SchemaRegistry() 83 | registry._builtin_schemas_dir = mock_schemas_dir 84 | 85 | # Reset cached value to ensure it's computed fresh 86 | registry._available_schemas = None 87 | 88 | schemas = registry.get_available_schemas() 89 | assert "1_30_0" in schemas 90 | assert "1_31_0" in schemas 91 | assert "empty_dir" not in schemas 92 | assert "no_yaml" not in schemas 93 | 94 | # Call again to use cached value 95 | cached_schemas = registry.get_available_schemas() 96 | assert cached_schemas == schemas 97 | 98 | def test_non_existent_schemas_dir(self): 99 | """Test behavior when schemas directory doesn't exist.""" 100 | registry = SchemaRegistry() 101 | temp_dir = tempfile.mkdtemp() 102 | try: 103 | # Point to a non-existent directory 104 | non_existent_dir = os.path.join(temp_dir, "non_existent") 105 | registry._builtin_schemas_dir = non_existent_dir 106 | registry._available_schemas = None 107 | 108 | # Should handle non-existent directory gracefully 109 | schemas = registry.get_available_schemas() 110 | # We expect empty list from built-in, but might have default schemas still 111 | assert isinstance(schemas, list) 112 | finally: 113 | shutil.rmtree(temp_dir) 114 | 115 | def test_schema_exists(self, mock_schemas_dir): 116 | """Test checking if schemas exist.""" 117 | registry = SchemaRegistry() 118 | registry._builtin_schemas_dir = mock_schemas_dir 119 | registry._available_schemas = None # Force refresh 120 | 121 | assert registry.schema_exists("1.30.0") is True 122 | assert registry.schema_exists("1_30_0") is True 123 | assert registry.schema_exists("1.31.0") is True 124 | assert registry.schema_exists("2.0.0") is False 125 | 126 | def test_list_schemas(self, mock_schemas_dir): 127 | """Test listing schema keys.""" 128 | registry = SchemaRegistry() 129 | registry._builtin_schemas_dir = mock_schemas_dir 130 | registry._available_schemas = None # Force refresh 131 | 132 | keys = registry.list_schemas("1.30.0") 133 | assert "openapi" in keys 134 | assert "info" in keys 135 | assert "components" in keys 136 | 137 | def test_get_schema_components(self, mock_schemas_dir): 138 | """Test getting schema components.""" 139 | registry = SchemaRegistry() 140 | registry._builtin_schemas_dir = mock_schemas_dir 141 | registry._available_schemas = None # Force refresh 142 | 143 | # Test with a valid path that returns a dict 144 | components = registry.get_schema_components("1.30.0", "components.schemas") 145 | assert "Flow" in components 146 | assert "Device" in components 147 | 148 | # Test with a path that returns a non-dict 149 | components = registry.get_schema_components("1.30.0", "components.schemas.Device") 150 | assert components == [] 151 | 152 | def test_get_schema_basic(self, mock_schemas_dir): 153 | """Test getting a basic schema.""" 154 | registry = SchemaRegistry() 155 | registry._builtin_schemas_dir = mock_schemas_dir 156 | registry._available_schemas = None # Force refresh 157 | 158 | # Get full schema 159 | schema = registry.get_schema("1.30.0") 160 | assert schema["info"]["title"] == "Test Schema 1.30.0" 161 | 162 | # Get component 163 | flow = registry.get_schema("1.30.0", "components.schemas.Flow") 164 | assert flow["type"] == "object" 165 | 166 | def test_get_schema_invalid_version(self, mock_schemas_dir): 167 | """Test getting a schema with an invalid version.""" 168 | registry = SchemaRegistry() 169 | registry._builtin_schemas_dir = mock_schemas_dir 170 | registry._available_schemas = None # Force refresh 171 | 172 | with pytest.raises(ValueError) as excinfo: 173 | registry.get_schema("non_existent") 174 | assert "not found" in str(excinfo.value) 175 | 176 | def test_get_schema_loading_exception(self): 177 | """Test exception during schema loading.""" 178 | registry = SchemaRegistry() 179 | 180 | # Create a temporary directory with an invalid YAML file 181 | temp_dir = tempfile.mkdtemp() 182 | try: 183 | v1_dir = os.path.join(temp_dir, "1_30_0") 184 | os.makedirs(v1_dir) 185 | 186 | with open(os.path.join(v1_dir, "openapi.yaml"), "w") as f: 187 | f.write("invalid YAML content:\n\tindentation error") 188 | 189 | registry._builtin_schemas_dir = temp_dir 190 | registry._available_schemas = None # Force refresh 191 | 192 | with pytest.raises(ValueError) as excinfo: 193 | registry.get_schema("1.30.0") 194 | assert "Error loading schema" in str(excinfo.value) 195 | finally: 196 | shutil.rmtree(temp_dir) 197 | 198 | def test_get_schema_component_special_handling(self, mock_schemas_dir): 199 | """Test special handling for components.schemas.X paths.""" 200 | registry = SchemaRegistry() 201 | registry._builtin_schemas_dir = mock_schemas_dir 202 | registry._available_schemas = None # Force refresh 203 | 204 | # Test with a valid schema component 205 | flow = registry.get_schema("1.30.0", "components.schemas.Flow") 206 | assert flow["type"] == "object" 207 | 208 | # Test with an invalid schema component 209 | with pytest.raises(ValueError) as excinfo: 210 | registry.get_schema("1.30.0", "components.schemas.NonExistent") 211 | assert "not found in components.schemas" in str(excinfo.value) 212 | 213 | # Test with a missing components.schemas section 214 | with pytest.raises(ValueError) as excinfo: 215 | registry.get_schema("1.31.0", "components.schemas.Flow") 216 | assert "Error accessing components.schemas" in str(excinfo.value) 217 | 218 | def test_get_schema_component_navigation(self, mock_schemas_dir): 219 | """Test component path navigation.""" 220 | registry = SchemaRegistry() 221 | registry._builtin_schemas_dir = mock_schemas_dir 222 | registry._available_schemas = None # Force refresh 223 | 224 | # Test navigation to a component 225 | component = registry.get_schema("1.30.0", "components") 226 | assert "schemas" in component 227 | 228 | # Test navigation to a non-existent component 229 | with pytest.raises(ValueError) as excinfo: 230 | registry.get_schema("1.30.0", "non_existent") 231 | assert "not found in path" in str(excinfo.value) 232 | 233 | # Test navigation through a non-dict component 234 | with pytest.raises(ValueError) as excinfo: 235 | registry.get_schema("1.30.0", "components.schemas.Device.property") 236 | # The path is actually failing in the special handling for components.schemas.X 237 | # rather than in the TypeError handling section 238 | assert "Schema Device.property not found in components.schemas" in str(excinfo.value) 239 | 240 | def test_multiple_registry_instances(self): 241 | """Test that multiple registry instances can be created with different configs.""" 242 | registry1 = SchemaRegistry(custom_schemas_dir="/path/to/custom1") 243 | registry2 = SchemaRegistry(custom_schemas_dir="/path/to/custom2") 244 | 245 | # Instances should be different 246 | assert registry1 is not registry2 247 | assert registry1._custom_schemas_dir != registry2._custom_schemas_dir 248 | 249 | 250 | if __name__ == "__main__": 251 | pytest.main(["-v", __file__]) 252 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Open Traffic Generator MCP Server 2 | 3 | [![codecov](https://codecov.io/gh/h4ndzdatm0ld/otg-mcp/graph/badge.svg?token=FCrRSKjGZz)](https://codecov.io/gh/h4ndzdatm0ld/otg-mcp) [![CI](https://github.com/h4ndzdatm0ld/otg-mcp/actions/workflows/ci.yml/badge.svg)](https://github.com/h4ndzdatm0ld/otg-mcp/actions/workflows/ci.yml) 4 | 5 | MCP (Model Context Protocol) server implementation for Open Traffic Generator (OTG) API. 6 | 7 | ## Overview 8 | 9 | The OTG MCP Server is a Python-based Model Context Protocol (MCP) to provide access to Open Traffic Generators (OTG) through a unified API. The server connects to traffic generators using a standardized configuration interface, providing a consistent way to interact with any traffic generator that respects OpenTrafficGenerator Models. 10 | 11 | ## Features 12 | 13 | - **Configuration-Based Connection**: Connect to traffic generators via standardized configuration 14 | - **OTG API Implementation**: Complete implementation of the Open Traffic Generator API 15 | - **Multi-Target Support**: Connect to multiple traffic generators simultaneously 16 | - **Type-Safe Models**: Pydantic models for configuration, metrics, and response data 17 | 18 | ## Documentation 19 | 20 | Comprehensive documentation is available in the `docs/` directory: 21 | 22 | - [Ixia-C Deployment Guide](./docs/deployIxiaC_simple_testing.md): Simple testing with Ixia-C Community Edition 23 | - [GitHub Flow](./docs/github-flow.md): Guidelines for GitHub workflow 24 | 25 | ## Configuration 26 | 27 | The OTG MCP Server uses a JSON configuration file to define traffic generator targets and their ports. 28 | 29 | Example configuration (`examples/trafficGeneratorConfig.json`): 30 | 31 | ```json 32 | { 33 | "schemas": { 34 | "schema_path": "/path/to/custom/schemas/directory" 35 | }, 36 | "targets": { 37 | "traffic-gen-1.example.com:8443": { 38 | "ports": { 39 | "p1": { 40 | "location": "localhost:5555", 41 | "name": "p1" 42 | }, 43 | "p2": { 44 | "location": "localhost:5556", 45 | "name": "p2" 46 | } 47 | } 48 | }, 49 | "traffic-gen-2.example.com:8443": { 50 | "ports": { 51 | "p1": { 52 | "location": "localhost:5555", 53 | "name": "p1" 54 | } 55 | } 56 | } 57 | } 58 | } 59 | ``` 60 | 61 | Key elements in the configuration: 62 | 63 | - `schemas`: Settings for schema management 64 | - `schema_path`: Optional path to directory containing custom schema files 65 | - `targets`: Map of traffic generator targets 66 | - `ports`: Configuration for each port on the target, with location and name 67 | 68 | ### Custom Schema Support 69 | 70 | The OTG MCP Server supports loading schema files from user-defined directories, which is useful when: 71 | 72 | - You have custom schemas for specific traffic generator versions 73 | - You need to test with unreleased API versions 74 | - You have special extensions to the standard OTG schemas 75 | 76 | To use custom schemas: 77 | 78 | 1. Add a `schemas` section to your configuration file with the `schema_path` field pointing to your schema directory 79 | 2. Organize your custom schema files in the same version-based structure as the built-in schemas 80 | 3. Custom schemas will take priority over built-in schemas when both exist 81 | 82 | Example directory structure for custom schemas: 83 | ``` 84 | /path/to/custom/schemas/ 85 | ├── 1_28_0/ 86 | │ └── openapi.yaml 87 | ├── 1_29_0/ 88 | │ └── openapi.yaml 89 | └── 1_31_0/ # Custom schema version not available in built-in schemas 90 | └── openapi.yaml 91 | ``` 92 | 93 | ### API Version Handling 94 | 95 | The OTG MCP Server automatically detects API versions from traffic generator targets: 96 | 97 | 1. When connecting to a target, the server queries its API version 98 | 2. If an exact matching schema version is available (versions 1.28.0 and newer are supported), it uses that schema 99 | 3. If no exact match exists, it follows this priority order to find the closest match: 100 | - Schema with same major.minor version and equal or lower patch version 101 | - Schema with same major version and highest available minor version 102 | - Latest available schema version as fallback 103 | 4. This process checks both custom schemas (if configured) and built-in schemas, with custom schemas taking priority 104 | 105 | This intelligent version matching ensures optimal compatibility while allowing for custom schema extensions when needed. 106 | 107 | ## Testing with deployIxiaC 108 | 109 | The project includes a utility script `deploy/deployIxiaC.sh` that helps set up and deploy Ixia-C for testing purposes. This script: 110 | 111 | - Pulls necessary Docker images for Ixia-C 112 | - Sets up the environment with the correct networking 113 | - Configures the test environment for OTG API usage 114 | 115 | To use this utility: 116 | 117 | ```bash 118 | # Navigate to the deploy directory 119 | cd deploy 120 | 121 | # Run the deployment script (requires Docker) 122 | ./deployIxiaC.sh 123 | ``` 124 | 125 | Refer to the [Ixia-C Deployment Guide](./docs/deployIxiaC_simple_testing.md) for more detailed information about using Ixia-C with this project. 126 | 127 | ## Examples 128 | 129 | The project includes examples showing how to: 130 | 131 | - Connect to traffic generators 132 | - Configure traffic flows 133 | - Start and stop traffic 134 | - Collect and analyze metrics 135 | 136 | See the examples in the `examples/` directory: 137 | 138 | - `trafficGeneratorConfig.json`: Example configuration for traffic generators 139 | - `simple_gateway_test.py`: Example script for basic testing of API executions 140 | 141 | ## Getting Started 142 | 143 | ### Prerequisites 144 | 145 | - Python 3.11 or higher 146 | - Access to traffic generator hardware or virtual devices 147 | - Configuration file for target traffic generators 148 | 149 | ### Installation 150 | 151 | ```bash 152 | # Clone the repository 153 | git clone 154 | cd 155 | 156 | # Create a virtual environment 157 | python -m venv .venv 158 | source .venv/bin/activate # On Windows: .venv\Scripts\activate 159 | 160 | # Install dependencies 161 | pip install -e ".[dev]" 162 | ``` 163 | 164 | ### Docker Container 165 | 166 | The OTG MCP Server can also be run as a Docker container, available from the GitHub Container Registry: 167 | 168 | ```bash 169 | # Pull the container image 170 | docker pull ghcr.io/h4ndzdatm0ld/otg-mcp:latest 171 | 172 | # Run the container with your configuration 173 | docker run -v $(pwd)/examples:/app/examples -p 8443:8443 ghcr.io/h4ndzdatm0ld/otg-mcp:latest --config-file examples/trafficGeneratorConfig.json 174 | ``` 175 | 176 | This approach eliminates the need for local Python environment setup and ensures consistent execution across different platforms. 177 | 178 | ### MCP Server Configuration Example 179 | 180 | When integrating with an MCP client application, you can use the following configuration example to specify the OTG MCP Server as a tool provider: 181 | 182 | > NOTE: Or use `uvx` 183 | 184 | ```json 185 | { 186 | "OpenTrafficGenerator - MCP": { 187 | "autoApprove": [ 188 | "get_available_targets", 189 | "get_config", 190 | "get_metrics", 191 | "get_schemas_for_target", 192 | "health", 193 | "list_schemas_for_target", 194 | "set_config", 195 | "start_capture", 196 | "start_traffic", 197 | "stop_capture", 198 | "stop_traffic" 199 | ], 200 | "command": "python", 201 | "args": [ 202 | "/path/to/otg-mcp/src/otg_mcp/server.py", 203 | "--config-file", 204 | "/path/to/otg-mcp/examples/trafficGeneratorConfigWithCustomSchemas.json" 205 | ], 206 | } 207 | } 208 | ``` 209 | 210 | 211 | ## Development 212 | 213 | ### Project Structure 214 | 215 | ``` 216 | . 217 | ├── docs/ # Documentation 218 | │ ├── deployIxiaC_simple_testing.md # Ixia-C testing guide 219 | │ └── github-flow.md # GitHub workflow documentation 220 | ├── deploy/ # Deployment scripts 221 | │ └── deployIxiaC.sh # Script for deploying Ixia-C testing environment 222 | ├── src/ # Source code 223 | │ └── otg_mcp/ # Main package 224 | │ ├── models/ # Data models 225 | │ │ ├── __init__.py # Model exports 226 | │ │ └── models.py # Model definitions 227 | │ ├── schemas/ # Built-in API schemas 228 | │ │ ├── 1_28_0/ # Schema version 1.28.0 229 | │ │ ├── 1_29_0/ # Schema version 1.29.0 230 | │ │ └── 1_30_0/ # Schema version 1.30.0 231 | │ ├── __init__.py # Package initialization 232 | │ ├── __main__.py # Entry point 233 | │ ├── client.py # Traffic generator client 234 | │ ├── config.py # Configuration management 235 | │ ├── schema_registry.py # Schema management 236 | │ └── server.py # MCP server implementation 237 | ├── examples/ # Example scripts and configurations 238 | │ ├── trafficGeneratorConfig.json # Example configuration 239 | │ └── simple_gateway_test.py # Example test script 240 | ├── tests/ # Test suite 241 | │ ├── fixtures/ # Test fixtures 242 | │ └── ... # Various test files 243 | ├── .gitignore # Git ignore file 244 | ├── Dockerfile # Docker build file 245 | ├── LICENSE # License file 246 | ├── README.md # This file 247 | ├── pyproject.toml # Project metadata 248 | ├── requirements.txt # Dependencies 249 | └── setup.py # Package setup 250 | ``` 251 | 252 | ### Key Components 253 | 254 | 1. **MCP Server**: Implements the Model Context Protocol interface 255 | 2. **Configuration Manager**: Handles traffic generator configuration and connections 256 | 3. **OTG Client**: Client for interacting with traffic generators 257 | 4. **Schema Registry**: Manages API schemas for different traffic generator versions 258 | 5. **Models**: Pydantic models for representing data structures 259 | 260 | ### Code Quality 261 | 262 | The project maintains high code quality standards: 263 | 264 | - **Type Safety**: Full mypy type hinting 265 | - **Testing**: Comprehensive pytest coverage 266 | - **Documentation**: Google docstring format for all code 267 | - **Logging**: Used throughout the codebase instead of comments 268 | - **Data Models**: Pydantic models for validation and serialization 269 | 270 | ## Contributing 271 | 272 | 1. Ensure all code includes proper type hints 273 | 2. Follow Google docstring format 274 | 3. Add comprehensive tests for new features 275 | 4. Use logging rather than comments for important operations 276 | 5. Update documentation for any API or behavior changes 277 | 278 | ## Release Process 279 | 280 | For information about version management and releasing new versions of this package, see [RELEASE.md](./RELEASE.md). 281 | 282 | Key points: 283 | - Version management is handled through `pyproject.toml` only 284 | - Follows semantic versioning with pre-release tags (`a0`, `b0`, `rc0`) 285 | - Automated CI/CD pipeline handles testing and PyPI publishing 286 | 287 | ## License 288 | 289 | This project is licensed under the terms of the license included in the repository. 290 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | # 2 | # This file is autogenerated by hatch-pip-compile with Python 3.12 3 | # 4 | # - pytest 5 | # - pytest-cov 6 | # - pytest-asyncio 7 | # - mypy 8 | # - snappi>=1.28.2 9 | # - black>=25.1.0 10 | # - boto3>=1.28.0 11 | # - fastmcp>=2.2.5 12 | # - httpx>=0.24.1 13 | # - modelcontextprotocol>=0.1.0 14 | # - openapi-python-client>=0.14.0 15 | # - pip>=25.1.1 16 | # - pydantic-settings>=2.0.0 17 | # - pydantic>=2.0.0 18 | # - ruff>=0.11.7 19 | # - snappi>=1.28.2 20 | # - tenacity>=8.2.2 21 | # 22 | 23 | aiohappyeyeballs==2.6.1 24 | # via aiohttp 25 | aiohttp==3.11.18 26 | # via together 27 | aiosignal==1.3.2 28 | # via aiohttp 29 | airtrain==0.1.68 30 | # via modelcontextprotocol 31 | annotated-types==0.7.0 32 | # via pydantic 33 | anthropic==0.50.0 34 | # via airtrain 35 | anyio==4.9.0 36 | # via 37 | # anthropic 38 | # cerebras-cloud-sdk 39 | # google-genai 40 | # groq 41 | # httpx 42 | # httpx-ws 43 | # mcp 44 | # openai 45 | # sse-starlette 46 | # starlette 47 | attrs==25.3.0 48 | # via 49 | # aiohttp 50 | # openapi-python-client 51 | backoff==2.2.1 52 | # via posthog 53 | black==25.1.0 54 | # via hatch.envs.default 55 | boto3==1.38.3 56 | # via 57 | # hatch.envs.default 58 | # airtrain 59 | botocore==1.38.3 60 | # via 61 | # boto3 62 | # s3transfer 63 | cachecontrol==0.14.2 64 | # via firebase-admin 65 | cachetools==5.5.2 66 | # via google-auth 67 | cerebras-cloud-sdk==1.29.0 68 | # via airtrain 69 | certifi==2025.4.26 70 | # via 71 | # httpcore 72 | # httpx 73 | # requests 74 | cffi==1.17.1 75 | # via cryptography 76 | charset-normalizer==3.4.1 77 | # via requests 78 | click==8.1.8 79 | # via 80 | # airtrain 81 | # black 82 | # together 83 | # typer 84 | # uvicorn 85 | colorama==0.4.6 86 | # via airtrain 87 | coverage==7.8.0 88 | # via pytest-cov 89 | cryptography==44.0.2 90 | # via pyjwt 91 | distro==1.9.0 92 | # via 93 | # anthropic 94 | # cerebras-cloud-sdk 95 | # groq 96 | # openai 97 | # posthog 98 | eval-type-backport==0.2.2 99 | # via together 100 | exceptiongroup==1.2.2 101 | # via fastmcp 102 | fastmcp==2.2.5 103 | # via hatch.envs.default 104 | filelock==3.18.0 105 | # via together 106 | firebase-admin==6.8.0 107 | # via airtrain 108 | fireworks-ai==0.15.12 109 | # via airtrain 110 | frozenlist==1.6.0 111 | # via 112 | # aiohttp 113 | # aiosignal 114 | google-ai-generativelanguage==0.6.15 115 | # via google-generativeai 116 | google-api-core==2.24.2 117 | # via 118 | # firebase-admin 119 | # google-ai-generativelanguage 120 | # google-api-python-client 121 | # google-cloud-core 122 | # google-cloud-firestore 123 | # google-cloud-storage 124 | # google-generativeai 125 | google-api-python-client==2.168.0 126 | # via 127 | # firebase-admin 128 | # google-generativeai 129 | google-auth==2.39.0 130 | # via 131 | # google-ai-generativelanguage 132 | # google-api-core 133 | # google-api-python-client 134 | # google-auth-httplib2 135 | # google-cloud-core 136 | # google-cloud-firestore 137 | # google-cloud-storage 138 | # google-genai 139 | # google-generativeai 140 | google-auth-httplib2==0.2.0 141 | # via google-api-python-client 142 | google-cloud-core==2.4.3 143 | # via 144 | # google-cloud-firestore 145 | # google-cloud-storage 146 | google-cloud-firestore==2.20.2 147 | # via firebase-admin 148 | google-cloud-storage==3.1.0 149 | # via firebase-admin 150 | google-crc32c==1.7.1 151 | # via 152 | # google-cloud-storage 153 | # google-resumable-media 154 | google-genai==1.12.1 155 | # via airtrain 156 | google-generativeai==0.8.5 157 | # via airtrain 158 | google-resumable-media==2.7.2 159 | # via google-cloud-storage 160 | googleapis-common-protos==1.70.0 161 | # via 162 | # google-api-core 163 | # grpcio-status 164 | groq==0.23.1 165 | # via airtrain 166 | grpcio==1.59.5 167 | # via 168 | # google-api-core 169 | # grpcio-status 170 | # grpcio-tools 171 | # snappi 172 | grpcio-status==1.59.5 173 | # via google-api-core 174 | grpcio-tools==1.59.5 175 | # via snappi 176 | h11==0.16.0 177 | # via 178 | # httpcore 179 | # uvicorn 180 | # wsproto 181 | httpcore==1.0.9 182 | # via 183 | # httpx 184 | # httpx-ws 185 | httplib2==0.22.0 186 | # via 187 | # google-api-python-client 188 | # google-auth-httplib2 189 | httpx==0.28.1 190 | # via 191 | # hatch.envs.default 192 | # anthropic 193 | # cerebras-cloud-sdk 194 | # fastmcp 195 | # fireworks-ai 196 | # google-genai 197 | # groq 198 | # httpx-ws 199 | # mcp 200 | # openai 201 | # openapi-python-client 202 | httpx-sse==0.4.0 203 | # via 204 | # fireworks-ai 205 | # mcp 206 | httpx-ws==0.7.2 207 | # via fireworks-ai 208 | idna==3.10 209 | # via 210 | # anyio 211 | # httpx 212 | # requests 213 | # yarl 214 | iniconfig==2.1.0 215 | # via pytest 216 | jinja2==3.1.6 217 | # via openapi-python-client 218 | jiter==0.9.0 219 | # via 220 | # anthropic 221 | # openai 222 | jmespath==1.0.1 223 | # via 224 | # boto3 225 | # botocore 226 | loguru==0.7.3 227 | # via airtrain 228 | markdown-it-py==3.0.0 229 | # via rich 230 | markupsafe==3.0.2 231 | # via jinja2 232 | mcp==1.6.0 233 | # via fastmcp 234 | mdurl==0.1.2 235 | # via markdown-it-py 236 | modelcontextprotocol==0.1.0 237 | # via hatch.envs.default 238 | monotonic==1.6 239 | # via posthog 240 | msgpack==1.1.0 241 | # via cachecontrol 242 | multidict==6.4.3 243 | # via 244 | # aiohttp 245 | # yarl 246 | mypy==1.11.0 247 | # via hatch.envs.default 248 | mypy-extensions==1.0.0 249 | # via 250 | # black 251 | # mypy 252 | numpy==2.2.5 253 | # via together 254 | openai==1.76.0 255 | # via airtrain 256 | openapi-pydantic==0.5.1 257 | # via fastmcp 258 | openapi-python-client==0.24.3 259 | # via hatch.envs.default 260 | packaging==25.0 261 | # via 262 | # black 263 | # modelcontextprotocol 264 | # pytest 265 | pathspec==0.12.1 266 | # via black 267 | pillow==11.2.1 268 | # via 269 | # fireworks-ai 270 | # together 271 | pip==25.1.1 272 | # via hatch.envs.default 273 | platformdirs==4.3.7 274 | # via black 275 | pluggy==1.5.0 276 | # via pytest 277 | posthog==4.0.0 278 | # via airtrain 279 | prompt-toolkit==3.0.51 280 | # via airtrain 281 | propcache==0.3.1 282 | # via 283 | # aiohttp 284 | # yarl 285 | proto-plus==1.26.1 286 | # via 287 | # google-ai-generativelanguage 288 | # google-api-core 289 | # google-cloud-firestore 290 | protobuf==4.24.4 291 | # via 292 | # google-ai-generativelanguage 293 | # google-api-core 294 | # google-cloud-firestore 295 | # google-generativeai 296 | # googleapis-common-protos 297 | # grpcio-status 298 | # grpcio-tools 299 | # proto-plus 300 | # snappi 301 | pyarrow==19.0.1 302 | # via together 303 | pyasn1==0.6.1 304 | # via 305 | # pyasn1-modules 306 | # rsa 307 | pyasn1-modules==0.4.2 308 | # via google-auth 309 | pycparser==2.22 310 | # via cffi 311 | pydantic==2.11.3 312 | # via 313 | # hatch.envs.default 314 | # airtrain 315 | # anthropic 316 | # cerebras-cloud-sdk 317 | # fireworks-ai 318 | # google-genai 319 | # google-generativeai 320 | # groq 321 | # mcp 322 | # openai 323 | # openapi-pydantic 324 | # openapi-python-client 325 | # pydantic-settings 326 | # together 327 | pydantic-core==2.33.1 328 | # via pydantic 329 | pydantic-settings==2.9.1 330 | # via 331 | # hatch.envs.default 332 | # mcp 333 | pygments==2.19.1 334 | # via rich 335 | pyjwt==2.10.1 336 | # via firebase-admin 337 | pyparsing==3.2.3 338 | # via httplib2 339 | pytest==8.3.5 340 | # via 341 | # hatch.envs.default 342 | # pytest-asyncio 343 | # pytest-cov 344 | pytest-asyncio==0.26.0 345 | # via hatch.envs.default 346 | pytest-cov==6.1.1 347 | # via hatch.envs.default 348 | python-dateutil==2.9.0.post0 349 | # via 350 | # botocore 351 | # openapi-python-client 352 | # posthog 353 | python-dotenv==1.1.0 354 | # via 355 | # airtrain 356 | # fastmcp 357 | # modelcontextprotocol 358 | # pydantic-settings 359 | pyyaml==6.0.2 360 | # via 361 | # airtrain 362 | # snappi 363 | requests==2.32.3 364 | # via 365 | # airtrain 366 | # cachecontrol 367 | # google-api-core 368 | # google-cloud-storage 369 | # google-genai 370 | # modelcontextprotocol 371 | # posthog 372 | # snappi 373 | # together 374 | rich==13.9.4 375 | # via 376 | # airtrain 377 | # fastmcp 378 | # modelcontextprotocol 379 | # together 380 | # typer 381 | rsa==4.9.1 382 | # via google-auth 383 | ruamel-yaml==0.18.10 384 | # via openapi-python-client 385 | ruamel-yaml-clib==0.2.12 386 | # via ruamel-yaml 387 | ruff==0.11.7 388 | # via 389 | # hatch.envs.default 390 | # openapi-python-client 391 | s3transfer==0.12.0 392 | # via boto3 393 | semantic-version==2.10.0 394 | # via snappi 395 | setuptools==80.0.0 396 | # via grpcio-tools 397 | shellingham==1.5.4 398 | # via 399 | # openapi-python-client 400 | # typer 401 | six==1.17.0 402 | # via 403 | # posthog 404 | # python-dateutil 405 | snappi==1.29.0 406 | # via hatch.envs.default 407 | sniffio==1.3.1 408 | # via 409 | # anthropic 410 | # anyio 411 | # cerebras-cloud-sdk 412 | # groq 413 | # openai 414 | sse-starlette==2.3.3 415 | # via mcp 416 | starlette==0.46.2 417 | # via 418 | # mcp 419 | # sse-starlette 420 | tabulate==0.9.0 421 | # via together 422 | tenacity==9.1.2 423 | # via hatch.envs.default 424 | together==1.5.5 425 | # via airtrain 426 | tqdm==4.67.1 427 | # via 428 | # google-generativeai 429 | # openai 430 | # together 431 | typer==0.15.2 432 | # via 433 | # airtrain 434 | # fastmcp 435 | # modelcontextprotocol 436 | # openapi-python-client 437 | # together 438 | typing-extensions==4.12.2 439 | # via 440 | # anthropic 441 | # anyio 442 | # cerebras-cloud-sdk 443 | # google-genai 444 | # google-generativeai 445 | # groq 446 | # mypy 447 | # openai 448 | # openapi-python-client 449 | # pydantic 450 | # pydantic-core 451 | # typer 452 | # typing-inspection 453 | typing-inspection==0.4.0 454 | # via 455 | # pydantic 456 | # pydantic-settings 457 | uritemplate==4.1.1 458 | # via google-api-python-client 459 | urllib3==2.4.0 460 | # via 461 | # botocore 462 | # requests 463 | # snappi 464 | uvicorn==0.34.2 465 | # via mcp 466 | wcwidth==0.2.13 467 | # via prompt-toolkit 468 | websockets==15.0.1 469 | # via 470 | # fastmcp 471 | # google-genai 472 | wsproto==1.2.0 473 | # via httpx-ws 474 | yarl==1.20.0 475 | # via aiohttp 476 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /src/otg_mcp/server.py: -------------------------------------------------------------------------------- 1 | """OTG MCP Server implementation. 2 | 3 | This module implements a Model Context Protocol (MCP) server that provides access to 4 | Open Traffic Generator (OTG) APIs via direct connections to traffic generators. 5 | """ 6 | 7 | import argparse 8 | import logging 9 | import sys 10 | import traceback 11 | from typing import Annotated, Any, Dict, List, Literal, Optional, Union 12 | 13 | from fastmcp import FastMCP 14 | from pydantic import Field 15 | 16 | from otg_mcp.client import OtgClient 17 | from otg_mcp.config import Config 18 | from otg_mcp.models import ( 19 | CaptureResponse, 20 | ConfigResponse, 21 | ControlResponse, 22 | HealthStatus, 23 | MetricsResponse, 24 | ) 25 | 26 | logger = logging.getLogger(__name__) 27 | logger.setLevel(logging.INFO) 28 | 29 | 30 | class OtgMcpServer: 31 | """OTG MCP Server that provides access to traffic generators. 32 | 33 | This server provides a unified API that adheres to the 34 | Open Traffic Generator specification. 35 | 36 | Attributes: 37 | mcp: FastMCP instance that handles tool registration and execution 38 | """ 39 | 40 | def __init__(self, config_file: str): 41 | """Initialize the server and register all tools and endpoints. 42 | 43 | Args: 44 | config_file: Path to the configuration file 45 | """ 46 | try: 47 | logger.info("Initializing config with the provided file") 48 | config = Config(config_file) 49 | 50 | logger.info("Setting up logging configuration") 51 | config.setup_logging() 52 | 53 | logger.info("Creating the FastMCP instance") 54 | self.mcp: FastMCP = FastMCP("otg-mcp-server", log_level="INFO") 55 | 56 | logger.info("Initializing schema registry") 57 | custom_schema_path = None 58 | if hasattr(config, "schemas") and config.schemas.schema_path: 59 | custom_schema_path = config.schemas.schema_path 60 | logger.info( 61 | f"Using custom schema path from config: {custom_schema_path}" 62 | ) 63 | 64 | from otg_mcp.schema_registry import SchemaRegistry 65 | 66 | self.schema_registry = SchemaRegistry(custom_schema_path) 67 | 68 | logger.info("Initializing OTG client with schema registry") 69 | self.client = OtgClient(config=config, schema_registry=self.schema_registry) 70 | 71 | logger.info("Registering all endpoints") 72 | self._register_tools() 73 | 74 | except Exception as e: 75 | logger.critical(f"Failed to initialize server: {str(e)}") 76 | logger.critical(f"Stack trace: {traceback.format_exc()}") 77 | raise 78 | 79 | def _register_tools(self): 80 | """Automatically register all methods starting with 'tool_' as MCP tools.""" 81 | logger.info("Discovering and registering tools") 82 | 83 | count = 0 84 | for attr_name in dir(self): 85 | logger.debug(f"Checking attribute: {attr_name}") 86 | if attr_name.startswith("_") or not callable(getattr(self, attr_name)): 87 | logger.debug(f"Skipping non-tool attribute: {attr_name}") 88 | continue 89 | 90 | if attr_name.startswith("tool_"): 91 | method = getattr(self, attr_name) 92 | tool_name = attr_name[5:] 93 | logger.debug( 94 | f"Found tool method: {attr_name}, registering as: {tool_name}" 95 | ) 96 | logger.info(f"Registering tool: {tool_name}") 97 | self.mcp.add_tool(method, name=tool_name) 98 | count += 1 99 | 100 | logger.info(f"Registered {count} tools successfully") 101 | 102 | async def tool_set_config( 103 | self, 104 | config: Annotated[ 105 | Dict[str, Any], Field(description="The configuration to set") 106 | ], 107 | target: Annotated[ 108 | str, Field(description="Target traffic generator hostname or IP address") 109 | ], 110 | ) -> ConfigResponse: 111 | """Set the configuration of the traffic generator and retrieve the applied configuration.""" 112 | logger.info(f"Tool: set_config for target {target}") 113 | return await self.client.set_config(target=target, config=config) 114 | 115 | async def tool_get_config( 116 | self, target: Annotated[str, Field(description="Target traffic generator")] 117 | ) -> ConfigResponse: 118 | """Get the current configuration of the traffic generator.""" 119 | logger.info(f"Tool: get_config for target {target}") 120 | return await self.client.get_config(target=target) 121 | 122 | async def tool_get_metrics( 123 | self, 124 | flow_names: Annotated[ 125 | Optional[Union[str, List[str]]], 126 | Field(description="Optional flow name(s) to get metrics for"), 127 | ] = None, 128 | port_names: Annotated[ 129 | Optional[Union[str, List[str]]], 130 | Field(description="Optional port name(s) to get metrics for"), 131 | ] = None, 132 | target: Annotated[ 133 | Optional[str], Field(description="Optional target traffic generator") 134 | ] = None, 135 | ) -> MetricsResponse: 136 | """Get metrics from the traffic generator.""" 137 | logger.info( 138 | f"Tool: get_metrics for target {target}, flow_names={flow_names}, port_names={port_names}" 139 | ) 140 | return await self.client.get_metrics( 141 | flow_names=flow_names, port_names=port_names, target=target 142 | ) 143 | 144 | async def tool_start_traffic( 145 | self, target: Annotated[str, Field(description="Target traffic generator")] 146 | ) -> ControlResponse: 147 | """Start traffic generation.""" 148 | logger.info(f"Tool: start_traffic for target {target}") 149 | return await self.client.start_traffic(target=target) 150 | 151 | async def tool_stop_traffic( 152 | self, target: Annotated[str, Field(description="Target traffic generator")] 153 | ) -> ControlResponse: 154 | """Stop traffic generation.""" 155 | logger.info(f"Tool: stop_traffic for target {target}") 156 | return await self.client.stop_traffic(target=target) 157 | 158 | async def tool_start_capture( 159 | self, 160 | port_name: Annotated[ 161 | str, Field(description="Name of the port to capture packets on") 162 | ], 163 | target: Annotated[str, Field(description="Target traffic generator")], 164 | ) -> CaptureResponse: 165 | """Start packet capture on a port.""" 166 | logger.info(f"Tool: start_capture for port {port_name} on target {target}") 167 | return await self.client.start_capture(target=target, port_name=port_name) 168 | 169 | async def tool_stop_capture( 170 | self, 171 | port_name: Annotated[ 172 | str, Field(description="Name of the port to stop capturing packets on") 173 | ], 174 | target: Annotated[str, Field(description="Target traffic generator")], 175 | ) -> CaptureResponse: 176 | """Stop packet capture on a port.""" 177 | logger.info(f"Tool: stop_capture for port {port_name} on target {target}") 178 | return await self.client.stop_capture(target=target, port_name=port_name) 179 | 180 | async def tool_get_capture( 181 | self, 182 | port_name: Annotated[ 183 | str, Field(description="Name of the port to get capture from") 184 | ], 185 | target: Annotated[str, Field(description="Target traffic generator")], 186 | output_dir: Annotated[ 187 | Optional[str], 188 | Field(description="Directory to save the capture file (default: /tmp)"), 189 | ] = None, 190 | ) -> CaptureResponse: 191 | """ 192 | Get packet capture from a port and save it to a file. 193 | 194 | The capture data is saved as a .pcap file that can be opened with tools like Wireshark. 195 | """ 196 | logger.info(f"Tool: get_capture for port {port_name} on target {target}") 197 | return await self.client.get_capture( 198 | target=target, port_name=port_name, output_dir=output_dir 199 | ) 200 | 201 | async def tool_get_available_targets(self) -> Dict[str, Dict[str, Any]]: 202 | """Get all available traffic generator targets with comprehensive information.""" 203 | logger.info("Tool: get_available_targets") 204 | return await self.client.get_available_targets() 205 | 206 | async def tool_health( 207 | self, 208 | target: Annotated[ 209 | Optional[str], 210 | Field( 211 | description="Optional target to check. If None, checks all available targets" 212 | ), 213 | ] = None, 214 | ) -> HealthStatus: 215 | """Health check tool.""" 216 | logger.info(f"Tool: health for {target or 'all targets'}") 217 | return await self.client.health(target) 218 | 219 | async def tool_get_schemas_for_target( 220 | self, 221 | target_name: Annotated[str, Field(description="Name of the target")], 222 | schema_names: Annotated[ 223 | List[str], 224 | Field( 225 | description='List of schema names to retrieve (e.g., ["Flow", "Port"] or ["components.schemas.Flow"])' 226 | ), 227 | ], 228 | ) -> Dict[str, Any]: 229 | """Get schemas for a specific target's API version.""" 230 | logger.info( 231 | f"Tool: get_schemas_for_target for {target_name}, schemas {schema_names}" 232 | ) 233 | return await self.client.get_schemas_for_target(target_name, schema_names) 234 | 235 | async def tool_list_schemas_for_target( 236 | self, target_name: Annotated[str, Field(description="Name of the target")] 237 | ) -> List[str]: 238 | """List available schemas for a specific target's API version.""" 239 | logger.info(f"Tool: list_schemas_for_target for {target_name}") 240 | return await self.client.list_schemas_for_target(target_name) 241 | 242 | def run(self, transport: Literal["stdio", "sse"] = "stdio"): 243 | """Run the server with the specified transport mechanism. 244 | 245 | Args: 246 | transport: Transport to use (stdio or sse) 247 | """ 248 | try: 249 | self.mcp.run(transport=transport) 250 | except Exception as e: 251 | logger.critical(f"Error running server: {str(e)}") 252 | logger.critical(f"Stack trace: {traceback.format_exc()}") 253 | raise 254 | 255 | 256 | def run_server() -> None: 257 | """Run the OTG MCP Server.""" 258 | try: 259 | logger.info("Parsing command-line arguments") 260 | parser = argparse.ArgumentParser(description="OTG MCP Server") 261 | parser.add_argument( 262 | "--config-file", 263 | type=str, 264 | required=True, 265 | help="Path to the traffic generator configuration file", 266 | ) 267 | parser.add_argument( 268 | "--transport", 269 | type=str, 270 | choices=["stdio", "sse"], 271 | default="stdio", 272 | help="Transport mechanism to use (stdio or sse)", 273 | ) 274 | 275 | args = parser.parse_args() 276 | 277 | logger.info("Initializing and running the server with the config file") 278 | server = OtgMcpServer(config_file=args.config_file) 279 | server.run(transport=args.transport) # type: ignore 280 | except Exception as e: 281 | logger.critical(f"Server failed with error: {str(e)}") 282 | logger.critical(f"Stack trace: {traceback.format_exc()}") 283 | sys.exit(1) 284 | 285 | 286 | def main() -> None: 287 | """Legacy entry point for backward compatibility.""" 288 | run_server() 289 | 290 | 291 | if __name__ == "__main__": 292 | run_server() 293 | --------------------------------------------------------------------------------