├── LICENSE ├── README.md ├── git_hooks ├── pre-commit └── pre-push ├── rules ├── api.mdc ├── architecture.mdc ├── ci_cd.mdc ├── code_modification.mdc ├── code_style.mdc ├── database.mdc ├── development_environment.mdc ├── development_tools.mdc ├── docker_path_standards.mdc ├── documentation.mdc ├── environment.mdc ├── error_handling.mdc ├── file_management.mdc ├── git.mdc ├── hostinstalldenied.mdc ├── monitoring.mdc ├── no_interactive_commands.mdc ├── performance.mdc ├── poetry_enforcement.mdc ├── security.mdc ├── testing.mdc └── troubleshooting.mdc └── scripts ├── check_docker_compose.py ├── check_dockerfile.py ├── check_env_secrets.py ├── check_service_structure.py ├── check_standards.py ├── container_manager.py ├── update_docs.py ├── validate.py ├── validate_service.py └── validators ├── api_validator.py ├── compose_validator.py ├── container_validator.py ├── dockerfile_validator.py └── poetry_validator.py /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2024 [Your Name] 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # 🚫 Project Archived: `cursor-guardrails` 2 | 3 | > ⚠️ This project is archived and no longer maintained. 4 | > 5 | > It has been superseded by [OmniBase](https://github.com/OmniNode-ai/omnibase), 6 | > the official foundation for validation, protocol enforcement, and node scaffolding in the 7 | > [OmniNode](https://github.com/OmniNode-ai/omnibase) ecosystem. 8 | > 9 | > ✅ OmniBase includes: 10 | > - Modern Git hooks and CI pipelines 11 | > - Full ONEX protocol compliance 12 | > - Structured metadata validation 13 | > - Declarative node scaffolding and builder support 14 | > 15 | > For all new development, please use [OmniBase](https://github.com/OmniNode-ai/omnibase). 16 | -------------------------------------------------------------------------------- /git_hooks/pre-commit: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Get the absolute path to the project root 4 | PROJECT_ROOT="$(git rev-parse --show-toplevel)" 5 | export PROJECT_ROOT 6 | 7 | # Flag to control overall validation (set to "0" to disable all checks) 8 | VALIDATE_ENABLED=${VALIDATE_ENABLED:-0} 9 | 10 | # List of containers to validate (space-separated) 11 | # By default, only validate dev-environment 12 | VALIDATE_CONTAINERS=${VALIDATE_CONTAINERS:-"dev-environment foundation litellm haystack-mcp"} 13 | 14 | # Get list of staged Python files 15 | STAGED_PY_FILES=$(git diff --cached --name-only --diff-filter=d | grep ".py$" || true) 16 | if [ -n "$STAGED_PY_FILES" ] && [ "$VALIDATE_ENABLED" -eq 1 ]; then 17 | echo "Processing staged Python files in approved containers..." 18 | 19 | # Create a temporary file to store approved files 20 | APPROVED_FILES_TEMP=$(mktemp) 21 | 22 | for file in $STAGED_PY_FILES; do 23 | # Extract container name from file path 24 | container_path=$(echo "$file" | grep -o "containers/[^/]*" || echo "") 25 | if [ -n "$container_path" ]; then 26 | container_name=$(echo "$container_path" | cut -d'/' -f2) 27 | 28 | # Check if this container is in the approved list 29 | if [[ " $VALIDATE_CONTAINERS " == *" $container_name "* ]]; then 30 | echo "Processing $file (in approved container: $container_name)" 31 | 32 | # Run a quick syntax check before attempting to format 33 | python3 -m py_compile "$PROJECT_ROOT/$file" 2>/dev/null 34 | if [ $? -ne 0 ]; then 35 | echo "⚠️ Syntax error in $file. Skipping formatting." 36 | echo "Please fix the syntax errors before committing." 37 | exit 1 38 | fi 39 | 40 | # Add to list of approved files for bulk formatting later 41 | echo "$PROJECT_ROOT/$file" >> "$APPROVED_FILES_TEMP" 42 | 43 | # Run import standardization 44 | python3 "$PROJECT_ROOT/scripts/standardize_imports.py" "$PROJECT_ROOT/$(dirname "$file")" 45 | git add "$PROJECT_ROOT/$file" 46 | else 47 | echo "Skipping $file (not in approved container list)" 48 | fi 49 | else 50 | echo "Skipping $file (not in a container)" 51 | fi 52 | done 53 | 54 | # Run black on all approved files at once for better performance 55 | if [ -s "$APPROVED_FILES_TEMP" ]; then 56 | echo "Running Black formatter on modified files in approved containers..." 57 | cat "$APPROVED_FILES_TEMP" | xargs black --quiet --timeout 5 58 | 59 | # Run isort on all approved files 60 | echo "Running isort on modified files in approved containers..." 61 | cat "$APPROVED_FILES_TEMP" | xargs isort --profile black 62 | 63 | # Add all formatted files back to staging 64 | cat "$APPROVED_FILES_TEMP" | xargs git add 65 | fi 66 | 67 | # Clean up temp file 68 | rm "$APPROVED_FILES_TEMP" 69 | else 70 | if [ -n "$STAGED_PY_FILES" ]; then 71 | echo "Skipping Python file processing (VALIDATE_ENABLED=0)" 72 | fi 73 | fi 74 | 75 | # Container validation 76 | if [ "$VALIDATE_ENABLED" -eq 1 ]; then 77 | echo "Running container validation on approved containers: $VALIDATE_CONTAINERS" 78 | for container_name in $VALIDATE_CONTAINERS; do 79 | container_path="$PROJECT_ROOT/containers/$container_name" 80 | 81 | if [ -d "$container_path" ]; then 82 | echo "Validating approved container: $container_name" 83 | python3 "$PROJECT_ROOT/scripts/validators/container_validator.py" "$container_path" 84 | if [ $? -ne 0 ]; then 85 | echo "Container validation failed for $container_name." 86 | echo "You can bypass validation with: VALIDATE_ENABLED=0 git commit ..." 87 | exit 1 88 | fi 89 | else 90 | echo "Warning: Container $container_name not found at $container_path" 91 | fi 92 | done 93 | else 94 | echo "Skipping container validation (VALIDATE_ENABLED=0)" 95 | fi 96 | 97 | # Skip the pre-commit hooks from pre-commit framework as we've handled them directly 98 | SKIP=black,isort 99 | export SKIP 100 | 101 | # Print help message for enabling validation 102 | echo "" 103 | echo "=== Pre-Commit Hook Info ===" 104 | echo "• Validation is currently disabled by default (VALIDATE_ENABLED=0)" 105 | echo "• To enable validation: VALIDATE_ENABLED=1 git commit ..." 106 | echo "• To specify containers: VALIDATE_ENABLED=1 VALIDATE_CONTAINERS=\"dev-environment foundation\" git commit ..." 107 | echo "• Currently approved containers: $VALIDATE_CONTAINERS" 108 | echo "=============================" 109 | 110 | exit 0 111 | -------------------------------------------------------------------------------- /git_hooks/pre-push: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | echo "Running pre-push checks..." 4 | 5 | # Get current branch 6 | CURRENT_BRANCH=$(git symbolic-ref --short HEAD) 7 | 8 | # Skip tests for specific branches or if user passes NO_TEST flag 9 | if [[ "$CURRENT_BRANCH" == "chore/standardize-dev-environment" ]] || [[ "$SKIP_TESTS" == "1" ]]; then 10 | echo "Skipping tests for branch $CURRENT_BRANCH or due to SKIP_TESTS flag" 11 | else 12 | # Run tests using the containerized test script 13 | echo "Running tests..." 14 | if [ -f "scripts/run_pre_push_tests.sh" ]; then 15 | scripts/run_pre_push_tests.sh 16 | TEST_RESULT=$? 17 | 18 | if [ $TEST_RESULT -ne 0 ]; then 19 | echo "Tests failed. Please fix the failing tests before pushing." 20 | echo "To bypass test failures, use: SKIP_TESTS=1 git push" 21 | exit 1 22 | fi 23 | else 24 | echo "Pre-push test script not found, skipping tests..." 25 | fi 26 | fi 27 | 28 | # Skip standards check for specific branches 29 | if [[ "$CURRENT_BRANCH" == "chore/standardize-dev-environment" ]] || [[ "$SKIP_STANDARDS" == "1" ]]; then 30 | echo "Skipping standards check for branch $CURRENT_BRANCH or due to SKIP_STANDARDS flag" 31 | else 32 | # Run standards checker 33 | echo "Running standards checks..." 34 | if [ -f "scripts/check_standards.py" ]; then 35 | python3 scripts/check_standards.py 36 | CHECK_RESULT=$? 37 | 38 | if [ $CHECK_RESULT -ne 0 ]; then 39 | echo "Standards check failed. Please fix the issues before pushing." 40 | echo "To bypass standards check, use: SKIP_STANDARDS=1 git push" 41 | exit 1 42 | fi 43 | else 44 | echo "Standards check script not found, skipping..." 45 | fi 46 | fi 47 | 48 | exit 0 -------------------------------------------------------------------------------- /rules/api.mdc: -------------------------------------------------------------------------------- 1 | --- 2 | description: Rules for API design, versioning, and response formats 3 | globs: ["**/api/**/*.py", "**/*controller.py", "**/openapi.yml"] 4 | alwaysApply: true 5 | --- 6 | 7 | # API Rules 8 | 9 | ## External API Compatibility 10 | 11 | ### General Rules 12 | - NEVER modify external API request/response formats 13 | - Preserve ALL fields exactly as received 14 | - Match response format precisely to the external API spec 15 | - Forward all headers without modification 16 | - Preserve exact ID formats and values 17 | 18 | ### MCP API Compatibility 19 | - Preserve request ID exactly as received (from query params, headers, or body) 20 | - Match OpenAI-style streaming format precisely 21 | - Return exact model names as provided by upstream 22 | - Preserve all metadata fields 23 | 24 | ## Versioning 25 | 26 | - Required: true 27 | - Format: url (e.g., /v1/resource) 28 | 29 | ## Response Format 30 | 31 | ### Internal API Success 32 | { 33 | "status": true, 34 | "data": {} 35 | } 36 | 37 | ### Internal API Error 38 | { 39 | "status": false, 40 | "error": "error_code", 41 | "message": "Human readable message" 42 | } 43 | 44 | ### External API Responses 45 | - Must match external API spec exactly 46 | - No modification of response structure 47 | - Preserve all fields and formats 48 | - Forward status codes unchanged 49 | 50 | ## Rate Limiting 51 | 52 | - Required: true 53 | - Headers: 54 | - X-RateLimit-Limit 55 | - X-RateLimit-Remaining 56 | - X-RateLimit-Reset 57 | 58 | ## Required Endpoints 59 | 60 | - /health 61 | - /version 62 | 63 | ## Documentation 64 | 65 | - Format: openapi 66 | - Version: 3.0.0 67 | -------------------------------------------------------------------------------- /rules/architecture.mdc: -------------------------------------------------------------------------------- 1 | --- 2 | description: Rules for system architecture and service communication 3 | globs: ["**/*.py", "**/docker-compose.yml", "**/config/*.json"] 4 | alwaysApply: true 5 | --- 6 | 7 | # Architecture Rules 8 | 9 | ## Service Communication 10 | 11 | ### Required Communication Patterns 12 | - redis_streams (current implementation) 13 | - kafka_streams (planned for high-volume channels) 14 | 15 | ### Banned Practices 16 | - direct_imports between services 17 | - shared_code_mounting 18 | - direct database access between services 19 | 20 | ### Message Channel Structure 21 | - `agent..commands` - For direct commands to specific agents 22 | - `agent..events` - For events emitted by specific agents 23 | - `system.events` - For system-wide broadcasts 24 | - `system.deadletter` - For messages that failed to process 25 | 26 | ### Communication Patterns 27 | - Request-response pattern 28 | - Pub-sub events 29 | - Broadcast messages 30 | - Direct commands 31 | 32 | ### Rules 33 | - All inter-service communication must use message streams 34 | - Each service must be independent with its own codebase 35 | - Use foundation MessageBus client for all messaging 36 | - Implement proper schema validation for messages 37 | - Handle dead letter scenarios and failed message processing 38 | 39 | ## Node Framework Requirements 40 | 41 | - All services must implement the Node interface: 42 | ``` 43 | initialize(): Promise 44 | start(): Promise 45 | stop(): Promise 46 | getHealth(): NodeHealth 47 | getMetrics(): NodeMetrics 48 | handleMessage(message: Message): Promise 49 | ``` 50 | - Services must report health status 51 | - Services must consume configuration from central config system 52 | - Services must implement proper resource management 53 | -------------------------------------------------------------------------------- /rules/ci_cd.mdc: -------------------------------------------------------------------------------- 1 | --- 2 | description: Rules for continuous integration and deployment processes 3 | globs: [".github/workflows/**", "**/Jenkinsfile", ".gitlab-ci.yml"] 4 | alwaysApply: true 5 | --- 6 | 7 | # CI/CD Rules 8 | 9 | ## Required Checks 10 | 11 | All CI pipelines must include: 12 | - lint 13 | - test 14 | - security_scan 15 | - build 16 | 17 | ## Deployment 18 | 19 | ### Environments 20 | - dev 21 | - staging 22 | - prod 23 | 24 | ### Approval Requirements 25 | - staging: approval required 26 | - prod: approval required 27 | 28 | ## Integration 29 | 30 | ### CI Integration 31 | Required steps: 32 | - run all test categories 33 | - generate coverage reports 34 | - fail on coverage decrease 35 | - publish test results 36 | 37 | ### Artifacts 38 | - coverage reports 39 | - test logs 40 | - performance metrics 41 | -------------------------------------------------------------------------------- /rules/code_modification.mdc: -------------------------------------------------------------------------------- 1 | --- 2 | description: Rules for modifying code and indicating changes 3 | globs: ["**/*.py", "**/*.ts", "**/*.js", "**/*.md"] 4 | alwaysApply: true 5 | --- 6 | 7 | # Code Modification Rules 8 | 9 | ## Markers 10 | 11 | - Use '// ... existing code ...' to indicate unchanged sections 12 | - Never omit markers when skipping code sections 13 | 14 | ## Line References 15 | 16 | - Format: :endLine:filepath 17 | - Severity: error 18 | 19 | ## Context 20 | 21 | - Include only necessary surrounding lines for context 22 | - Verify all necessary imports are included 23 | 24 | ## Directory Structure 25 | 26 | - Check directory tree template before creating new files or directories 27 | - Verify file location matches component type 28 | - Maintain consistent directory hierarchy 29 | 30 | ## File Creation 31 | 32 | - Check for duplicate files before creation 33 | - Use file_search tool before creating new files 34 | - Follow naming conventions 35 | -------------------------------------------------------------------------------- /rules/code_style.mdc: -------------------------------------------------------------------------------- 1 | --- 2 | description: Code style and formatting rules for consistent development 3 | globs: ["**/*.py", "**/*.ts", "**/*.js"] 4 | alwaysApply: true 5 | --- 6 | 7 | # Code Style Rules 8 | 9 | ## Python 10 | 11 | - Line length: 88 (Match black formatter) 12 | - Quotes: Double quotes for strings 13 | - Docstring: Google style 14 | - Imports: 15 | - Order: stdlib, third_party, first_party, local 16 | - Group by type: true 17 | - Banned: 18 | - "from .. import *" (No relative imports) 19 | - "import *" (No wildcard imports) 20 | - Indent: 21 | - Style: space 22 | - Size: 4 23 | 24 | ## TypeScript 25 | 26 | - Quotes: single 27 | - Semicolons: required 28 | - Indent: 2 spaces 29 | -------------------------------------------------------------------------------- /rules/database.mdc: -------------------------------------------------------------------------------- 1 | --- 2 | description: Rules for database access, migrations, and transactions 3 | globs: ["**/models.py", "**/*repository.py", "**/migrations/**"] 4 | alwaysApply: true 5 | --- 6 | 7 | # Database Rules 8 | 9 | ## Migrations 10 | 11 | - Tool: alembic 12 | - Auto generate: true 13 | 14 | ## Connections 15 | 16 | - Pooling: required 17 | - Retry: required 18 | 19 | ## Transactions 20 | 21 | - Isolation level: read_committed 22 | 23 | ## Connection Pooling Optimization 24 | 25 | - Use appropriate pool sizes 26 | - Implement connection recycling 27 | - Configure timeouts 28 | - Use connection verification 29 | 30 | ## Query Optimization 31 | 32 | - Implement statement caching 33 | - Use batched operations for bulk processing 34 | - Implement efficient pagination 35 | - Avoid N+1 query problems 36 | -------------------------------------------------------------------------------- /rules/development_environment.mdc: -------------------------------------------------------------------------------- 1 | --- 2 | description: Rules for development environment, containerization, and command usage 3 | globs: ["**/devenv.sh", "**/dev", "docker-compose*.yml", "Dockerfile"] 4 | alwaysApply: true 5 | --- 6 | 7 | # Development Environment Rules 8 | 9 | ## Core Principles 10 | 11 | - All development work MUST be done inside containers 12 | - Never install dependencies or run commands directly on host system 13 | - Use `./dev` script for ALL container and development operations 14 | - Maintain consistent environment across all developers 15 | 16 | ## Container Usage 17 | 18 | - Always use ./dev script in project root for all development commands 19 | - Exception: Test execution should use run_tests.py directly as documented in testing.mdc 20 | - All development work must be done inside the dev container 21 | - Use ./dev shell for interactive development sessions 22 | 23 | ## Standard Commands 24 | 25 | ### Environment Management 26 | ```bash 27 | ./dev build # Build all development containers 28 | ./dev start # Start the development environment 29 | ./dev stop # Stop the development environment 30 | ./dev shell # Access development shell 31 | ./dev clean # Clean up environment 32 | ``` 33 | 34 | ### Development Operations 35 | ```bash 36 | ./dev format # Format code 37 | ./dev lint # Run linting 38 | ./dev docs # Generate documentation 39 | ``` 40 | 41 | ### Testing Commands 42 | Development Testing: 43 | ```bash 44 | ./dev test:unit # Run unit tests during development 45 | ./dev test:int # Run integration tests 46 | ./dev test:e2e # Run end-to-end tests 47 | ./dev test:cov # Run tests with coverage 48 | ``` 49 | 50 | CI/CD Testing: 51 | ```bash 52 | python run_tests.py [container] --type unit 53 | python run_tests.py [container] --type integration 54 | python run_tests.py [container] --type e2e 55 | ``` 56 | 57 | ## Environment Structure 58 | ``` 59 | containers/ 60 | ├── dev-environment/ # Development container configuration 61 | │ ├── Dockerfile # Dev container definition 62 | │ ├── pyproject.toml # Dev dependencies only 63 | │ └── docker-compose.dev.yml 64 | └── [service]/ # Service containers 65 | ├── Dockerfile # Service-specific container 66 | └── pyproject.toml # Service-specific dependencies 67 | ``` 68 | 69 | ## Environment Setup 70 | 71 | - Use devenv.sh script for environment setup 72 | - All development commands (except testing) must be run through ./dev script 73 | - Environment variables are set automatically by dev script: 74 | - PROJECT_ROOT 75 | - PYTHONPATH: "/app" 76 | - ENV: "development" 77 | 78 | ## Dependency Management 79 | 80 | - Use Poetry EXCLUSIVELY for Python dependency management 81 | - Each container must have its own pyproject.toml 82 | - No requirements.txt or setup.py files allowed 83 | - Dev container includes all development tools 84 | 85 | ## Service Integration 86 | 87 | - Development container must have access to all services 88 | - Use consistent network configuration (ai-platform-net) 89 | - Implement health checks for all services 90 | - Use proper container naming conventions 91 | 92 | ## Protected Operations 93 | 94 | - Changes to docker-compose files require review 95 | - Environment variable changes must be documented 96 | - Core configuration changes need explicit approval 97 | - Container modifications must follow docker_path_standards.mdc 98 | 99 | ## Development Container Features 100 | 101 | - Pre-configured development tools 102 | - Consistent Python version (3.11) 103 | - Mounted source code volumes 104 | - Shared development network 105 | - Automated code formatting 106 | - Integrated testing tools 107 | - Documentation generation 108 | -------------------------------------------------------------------------------- /rules/development_tools.mdc: -------------------------------------------------------------------------------- 1 | --- 2 | description: Rules for development tools, IDE settings, and utility scripts 3 | globs: [".vscode/**", "**/.editorconfig", "**/scripts/**"] 4 | alwaysApply: true 5 | --- 6 | 7 | # Development Tools Rules 8 | 9 | ## VSCode 10 | 11 | ### Settings 12 | 13 | { 14 | "python.linting.enabled": true, 15 | "python.formatting.provider": "black", 16 | "editor.formatOnSave": true 17 | } 18 | 19 | 20 | ## Auto Fixes 21 | 22 | ### On Save 23 | - remove_unused_imports 24 | - sort_imports 25 | - format_code 26 | 27 | ## Custom Commands 28 | 29 | - **check-standards**: `python scripts/check_standards.py` 30 | - Description: Check compliance with coding standards 31 | 32 | - **create-service**: `python scripts/create_service.py` 33 | - Description: Create a new service from template 34 | 35 | - **update-docs**: `python scripts/update_docs.py` 36 | - Description: Update service documentation 37 | -------------------------------------------------------------------------------- /rules/docker_path_standards.mdc: -------------------------------------------------------------------------------- 1 | --- 2 | description: Rules for Docker path standards and build context usage 3 | globs: ["**/Dockerfile*", "**/docker-compose*.yml"] 4 | alwaysApply: true 5 | --- 6 | 7 | # Docker Path Standards 8 | 9 | ## Relative Paths in Dockerfiles 10 | 11 | Always use relative paths in Dockerfile `COPY` and `ADD` commands. This ensures that the Dockerfile works correctly regardless of the build context. 12 | 13 | ✅ **Correct**: 14 | ```dockerfile 15 | COPY pyproject.toml ./ 16 | COPY src/ ./src/ 17 | ``` 18 | 19 | ❌ **Incorrect**: 20 | ```dockerfile 21 | COPY containers/service-name/pyproject.toml ./ 22 | COPY containers/service-name/src/ ./src/ 23 | ``` 24 | 25 | ## Build Context 26 | 27 | When building Docker images, set the build context to the directory containing the files to be copied into the image: 28 | 29 | ```bash 30 | # Correct (build from the service directory) 31 | docker build -t service-name:latest containers/service-name 32 | 33 | # Incorrect (requires absolute paths in Dockerfile) 34 | docker build -t service-name:latest -f containers/service-name/Dockerfile . 35 | ``` 36 | 37 | ## Working Directory 38 | 39 | Set the working directory to `/app` for consistency across all services: 40 | 41 | ```dockerfile 42 | WORKDIR /app 43 | ``` 44 | 45 | ## Entrypoint and Command 46 | 47 | For clarity and flexibility, separate the `ENTRYPOINT` and `CMD` instructions: 48 | 49 | ```dockerfile 50 | ENTRYPOINT ["poetry", "run"] 51 | CMD ["uvicorn", "service_name.main:app", "--host", "0.0.0.0", "--port", "8000"] 52 | ``` 53 | 54 | ## System Dependencies 55 | 56 | When installing system dependencies, always use `--no-install-recommends` and clean up the apt cache: 57 | 58 | ```dockerfile 59 | RUN apt-get update && apt-get install -y --no-install-recommends \ 60 | curl \ 61 | gcc \ 62 | && rm -rf /var/lib/apt/lists/* 63 | ``` 64 | 65 | ## Validation 66 | 67 | The Dockerfile validator script (`scripts/validators/dockerfile_validator.py`) checks for compliance with these standards. It will flag any absolute paths in `COPY` commands and suggest corrections. 68 | 69 | ## Documentation 70 | 71 | For more detailed information, refer to the Docker path standards documentation: 72 | `docs/standards/docker/DOCKER_PATH_STANDARDS.md` -------------------------------------------------------------------------------- /rules/documentation.mdc: -------------------------------------------------------------------------------- 1 | --- 2 | description: Rules for documentation, comments, and API references 3 | globs: ["**/*.md", "**/*.py"] 4 | alwaysApply: true 5 | --- 6 | 7 | # Documentation Rules 8 | 9 | ## Required Documentation Files 10 | 11 | - README.md in project root and each container 12 | - CONTRIBUTING.md for contribution guidelines 13 | - CHANGELOG.md for version history 14 | 15 | ## Strategic Documentation Types 16 | 17 | - Block documentation (e.g., BLOCK1_FOUNDATION.md) 18 | - Must include Overview, Components, Implementation Status, and Integration Points 19 | - Use checkboxes [x] for completed items, [ ] for pending items 20 | - Document specific Next Steps and Roadmap 21 | 22 | - Strategy documents (e.g., LLM_TRAINING_STRATEGY.md, OPTIMIZATION_STRATEGY.md) 23 | - Must include Overview, Objectives, and Implementation Details 24 | - Should include concrete code examples where relevant 25 | - Must align with technical implementation 26 | 27 | - Planning documents (e.g., GTM_PLAN.md) 28 | - Must include phased approach with clear timelines 29 | - Include measurable goals and KPIs 30 | - Document resource requirements 31 | 32 | ## Code Documentation 33 | 34 | - Python docstrings: 35 | - Use Google style docstrings 36 | - Required sections: Args, Returns, Raises, Examples 37 | - Use type hints in all function signatures 38 | 39 | - Special docstring annotations: 40 | ```python 41 | /** 42 | * @docCategory Foundation 43 | * @implementationStatus complete 44 | * @integrates Block2, Block5 45 | * 46 | * Service responsible for message routing between nodes. 47 | */ 48 | ``` 49 | 50 | ## Documentation Organization 51 | 52 | - docs/ - Root documentation directory 53 | - Block documentation (BLOCK*_*.md) 54 | - Strategy documents (*_STRATEGY.md, *_PLAN.md) 55 | - development/ - Development guidelines 56 | - api/ - API documentation 57 | 58 | ## Formatting Standards 59 | 60 | - Line length: 120 characters maximum 61 | - Code blocks must specify language 62 | - Use relative links for cross-references 63 | - Tables must have headers and proper alignment 64 | - Document all acronyms on first use 65 | -------------------------------------------------------------------------------- /rules/environment.mdc: -------------------------------------------------------------------------------- 1 | --- 2 | description: Rules for environment settings, variables, and requirements 3 | globs: ["**/.env", "**/requirements.txt", "**/setup.py"] 4 | alwaysApply: true 5 | --- 6 | 7 | # Environment Rules 8 | 9 | ## Python 10 | 11 | - Version: ">=3.11" 12 | - Virtual environment: required 13 | 14 | ## Node 15 | 16 | - Version: ">=18" 17 | 18 | ## Docker 19 | 20 | - Required: true 21 | - Compose version: ">=2.0" 22 | 23 | ## Required Variables 24 | 25 | - ENV 26 | - DEBUG 27 | - LOG_LEVEL 28 | 29 | ## Performance Settings 30 | 31 | - Memory: high 32 | - Optimize for large files: true 33 | -------------------------------------------------------------------------------- /rules/error_handling.mdc: -------------------------------------------------------------------------------- 1 | --- 2 | description: Rules for error handling, logging, and exception management 3 | globs: ["**/*.py"] 4 | alwaysApply: true 5 | --- 6 | 7 | # Error Handling Rules 8 | 9 | ## Messaging Error Handling 10 | 11 | - Implement dead letter handling for failed message processing 12 | - Log complete message context on errors 13 | - Include retry logic with exponential backoff for transient failures 14 | - Implement circuit breakers for external service dependencies 15 | 16 | ## Logging 17 | 18 | - Required fields in error logs: 19 | - timestamp 20 | - error_type 21 | - message 22 | - stack_trace 23 | - container_name 24 | - service_name 25 | - correlation_id (for tracing requests) 26 | 27 | ## Custom Exceptions 28 | 29 | - Base class: AppError 30 | - Implement proper exception hierarchy: 31 | ``` 32 | AppError 33 | ├── ValidationError 34 | ├── ServiceError 35 | │ ├── DatabaseError 36 | │ ├── MessagingError 37 | │ └── ExternalServiceError 38 | ├── ConfigurationError 39 | └── ResourceError 40 | ``` 41 | 42 | ## Error Response Format 43 | 44 | API error responses must follow standard format: 45 | ```json 46 | { 47 | "status": false, 48 | "error": "error_code", 49 | "message": "Human readable message", 50 | "details": { 51 | "field": "description of field-specific error" 52 | } 53 | } 54 | ``` 55 | 56 | ## Best Practices 57 | 58 | - Use specific exception types instead of generic exceptions 59 | - Include contextual information in error messages 60 | - Log exceptions at appropriate severity levels 61 | - Implement graceful degradation for non-critical services 62 | - Centralize error handling logic 63 | - Ensure errors are observable via monitoring system 64 | -------------------------------------------------------------------------------- /rules/file_management.mdc: -------------------------------------------------------------------------------- 1 | --- 2 | description: Rules for file creation, modification, and organization 3 | globs: ["**/*"] 4 | alwaysApply: true 5 | --- 6 | 7 | # File Management Rules 8 | 9 | ## Creation 10 | 11 | - Check standardization guidelines before creating files 12 | - Verify no duplicate functionality exists 13 | - Match existing file formats 14 | 15 | ## Imports 16 | 17 | - Verify package structure before adding imports 18 | - Follow import ordering standards: 19 | 1. Standard library imports 20 | 2. Third-party imports 21 | 3. First-party imports (foundation.*) 22 | 4. Local imports (relative) 23 | 24 | ## Modification 25 | 26 | - Preserve existing file format 27 | - Update related files when changing functionality: 28 | - Update tests 29 | - Update documentation 30 | - Update dependent modules 31 | - Update configuration files 32 | 33 | ## Directory Management 34 | 35 | - Follow standardization guidelines for directory structure 36 | - Verify directory purpose 37 | - Maintain consistent directory hierarchy 38 | - Include required base files (README.md, __init__.py) 39 | 40 | ## Validation 41 | 42 | - Run pre-creation checks before creating new files 43 | - Maintain consistency during updates 44 | -------------------------------------------------------------------------------- /rules/git.mdc: -------------------------------------------------------------------------------- 1 | --- 2 | description: Rules for git usage, branch naming, and commit messages 3 | globs: [".git/**", ".github/**", ".gitignore"] 4 | alwaysApply: true 5 | --- 6 | 7 | # Git Rules 8 | 9 | ## Branch Naming 10 | 11 | - Pattern: (feature|bugfix|hotfix|chore)/{ticket}-{description} 12 | - Example: feature/AI-123-add-new-endpoint 13 | 14 | ## Commit Messages 15 | 16 | - Format: type(scope): description 17 | - Types: feat, fix, docs, style, refactor, test, chore 18 | - Max length: 72 characters 19 | 20 | ## Hooks 21 | 22 | - Required hooks: 23 | - pre-commit 24 | - pre-push 25 | -------------------------------------------------------------------------------- /rules/hostinstalldenied.mdc: -------------------------------------------------------------------------------- 1 | --- 2 | description: You cannot install anything or make changes directly to the host environment or project root without prompting me first. 3 | globs: ["/*", ".*", "package.json", "requirements.txt", "Dockerfile", "docker-compose.yml"] 4 | alwaysApply: true 5 | --- 6 | 7 | # Host and Project Root Protection Rules 8 | 9 | 1. **No Direct Host Installations**: 10 | - Do not execute or suggest installation commands that modify the host system 11 | - This includes `apt-get`, `brew`, `npm install -g`, `pip install` without virtual environments 12 | - Always ask before suggesting any system-wide package installation 13 | 14 | 2. **Project Root Protection**: 15 | - Do not modify core configuration files in the project root without explicit permission 16 | - Protected files include but are not limited to: 17 | - package.json, requirements.txt 18 | - Dockerfile, docker-compose.yml 19 | - .env, .gitignore 20 | - Any hidden configuration files (.*) 21 | 22 | 3. **Required Approach**: 23 | - When suggesting changes to protected files or system, explicitly ask for permission 24 | - Present the changes you want to make and wait for explicit confirmation 25 | - Suggest using isolated environments (venvs, containers) when possible 26 | 27 | These rules ensure system stability and prevent unexpected changes to critical project infrastructure. -------------------------------------------------------------------------------- /rules/monitoring.mdc: -------------------------------------------------------------------------------- 1 | --- 2 | description: Rules for monitoring, metrics, logging, and observability 3 | globs: ["**/*.py", "**/prometheus.yml", "**/logging.conf"] 4 | alwaysApply: true 5 | --- 6 | 7 | # Monitoring Rules 8 | 9 | ## Metrics 10 | 11 | - Required: true 12 | - Format: prometheus 13 | - Required metrics: 14 | - requests_total 15 | - request_duration 16 | - errors_total 17 | 18 | ## Logging 19 | 20 | - Level: INFO 21 | - Format: json 22 | - Required imports: 23 | - structlog 24 | - logging 25 | 26 | ## Tracing 27 | 28 | - Required: true 29 | - Format: opentelemetry 30 | 31 | ## Foundation Monitoring 32 | 33 | - Message flow tracking 34 | - Channel performance metrics 35 | - Queue depth monitoring 36 | - Delivery latency tracking 37 | 38 | ## Observability 39 | 40 | - Standardized metrics across services 41 | - Centralized logging 42 | - Health check endpoints 43 | - Performance dashboards 44 | -------------------------------------------------------------------------------- /rules/no_interactive_commands.mdc: -------------------------------------------------------------------------------- 1 | --- 2 | description: Do not use interactive commands with Docker containers, prefer non-blocking alternatives 3 | globs: ["**/Dockerfile*", "**/*.sh", "**/docker-compose*.yml", "**/README.md"] 4 | alwaysApply: true 5 | --- 6 | 7 | # No Interactive Container Commands 8 | 9 | ## Prohibited Commands 10 | 11 | - **Interactive Container Logs**: Do not use `docker logs -f` or `docker-compose logs -f` 12 | - **Interactive Shells**: Do not use `docker exec -it` or `docker-compose exec` without specific exit conditions 13 | - **Watching Process Output**: Avoid `tail -f`, `watch`, or other commands that don't terminate automatically 14 | 15 | ## Recommended Alternatives 16 | 17 | - **Container Logs**: Use `docker logs --tail=100` or redirect to a file with `docker logs > container.log` 18 | - **Command Execution**: Use non-interactive mode: `docker exec container_name command` (without -it flags) 19 | - **Automated Output Capture**: 20 | 21 | # Instead of: docker logs -f container 22 | # Use: 23 | docker logs --tail=100 container 24 | # Or with timestamps: 25 | docker logs --tail=100 --timestamps container 26 | 27 | 28 | ## Rationale 29 | 30 | Interactive commands: 31 | 1. Block automation scripts indefinitely 32 | 2. Prevent clean CI/CD pipeline execution 33 | 3. Make logs harder to capture and analyze 34 | 4. Create inconsistent behavior between development and production 35 | 36 | ## Exceptions 37 | 38 | The only permitted exceptions are: 39 | - Debugging instructions that explicitly instruct users to enter/exit interactive modes 40 | - Development environment setup with clear instructions for termination 41 | -------------------------------------------------------------------------------- /rules/performance.mdc: -------------------------------------------------------------------------------- 1 | --- 2 | description: Rules for performance optimization, resource management, and scaling 3 | globs: ["**/*.py", "**/benchmarks/**"] 4 | alwaysApply: true 5 | --- 6 | 7 | # Performance Optimization Rules 8 | 9 | ## Database Optimization 10 | 11 | - Use connection pooling with appropriate settings: 12 | ```python 13 | engine = create_engine( 14 | DATABASE_URL, 15 | pool_size=20, 16 | max_overflow=30, 17 | pool_timeout=30, 18 | pool_recycle=1800, 19 | pool_pre_ping=True 20 | ) 21 | ``` 22 | - Implement statement caching 23 | - Use batched operations for bulk processing 24 | - Implement efficient pagination through keyset pagination 25 | 26 | ## Resource Management 27 | 28 | - Configure appropriate resource limits for containers: 29 | ```yaml 30 | deploy: 31 | resources: 32 | limits: 33 | cpus: '2' 34 | memory: 2G 35 | reservations: 36 | cpus: '1' 37 | memory: 1G 38 | ``` 39 | - Implement resource monitoring and alerts 40 | - Use async/await for I/O-bound operations 41 | - Optimize memory usage in high-throughput components 42 | 43 | ## Caching Strategy 44 | 45 | - Implement appropriate caching layers: 46 | - In-memory caching for frequent computations 47 | - Redis caching for shared application state 48 | - Content caching for static assets 49 | - Document cache invalidation strategies 50 | - Implement cache warming for critical paths 51 | 52 | ## Asynchronous Processing 53 | 54 | - Use task queues for long-running operations 55 | - Implement circuit breakers for external dependencies 56 | - Batch related operations where possible 57 | - Use streaming for large data transfers 58 | 59 | ## Benchmarking 60 | 61 | - Establish performance baselines for critical operations 62 | - Implement automated performance testing 63 | - Document expected performance characteristics 64 | - Set alerts for performance degradation -------------------------------------------------------------------------------- /rules/poetry_enforcement.mdc: -------------------------------------------------------------------------------- 1 | --- 2 | description: Rules for standardized dependency management using Poetry across all containers 3 | globs: ["**/pyproject.toml", "**/requirements.txt", "**/setup.py", "**/Dockerfile", "**/poetry.lock"] 4 | alwaysApply: true 5 | --- 6 | 7 | # Poetry Enforcement Rules 8 | 9 | ## Required Structure 10 | 11 | - All Python packages must use Poetry for dependency management 12 | - Each container must have a `pyproject.toml` file in its root directory 13 | - No `requirements.txt` or `setup.py` files are allowed 14 | 15 | ## pyproject.toml Requirements 16 | 17 | - Must include complete project metadata: 18 | ```toml 19 | [tool.poetry] 20 | name = "container-name" 21 | version = "0.1.0" 22 | description = "Container purpose description" 23 | authors = ["Project Maintainer "] 24 | readme = "README.md" 25 | ``` -------------------------------------------------------------------------------- /rules/security.mdc: -------------------------------------------------------------------------------- 1 | --- 2 | description: Rules for security, access control, and data protection 3 | globs: ["**/*.py", "**/security/**", "**/auth/**"] 4 | alwaysApply: true 5 | --- 6 | 7 | # Security Rules 8 | 9 | ## AI Model Security 10 | 11 | - Validate all inputs to AI models with strict sanitization 12 | - Implement rate limiting for model inference endpoints 13 | - Support offline mode operation for sensitive environments 14 | - Establish guardrails for model outputs and reasoning 15 | - Document model limitations and potential vulnerabilities 16 | 17 | ## Secrets Management 18 | 19 | - Storage: Use environment variables for secrets in containers 20 | - Never store secrets in: 21 | - Source code 22 | - Logs 23 | - Comments 24 | - Configuration files 25 | - Use secure vaults for credential storage in production 26 | 27 | ## API Security 28 | 29 | - Implement proper authentication for all endpoints 30 | - Use role-based access control for API operations 31 | - Generate audit logs for sensitive operations 32 | - Set appropriate CORS policies 33 | - Implement API rate limiting and throttling 34 | 35 | ## Data Protection 36 | 37 | - Apply encryption for data at rest and in transit 38 | - Implement data minimization principles 39 | - Support anonymization for sensitive information 40 | - Provide clear data retention policies 41 | - Ensure proper access controls for user data 42 | 43 | ## Banned Functions 44 | 45 | The following functions and practices are prohibited: 46 | - `eval`, `exec` for dynamic code execution 47 | - `os.system` and shell injection vectors 48 | - Hardcoded credentials 49 | - Disabling SSL verification 50 | - Overly permissive file permissions 51 | ``` 52 | 53 | ## 9. Update for `performance.mdc` (New File) 54 | 55 | **Why this file is needed:** 56 | Based on the OPTIMIZATION_STRATEGY.md document in the codebase, performance optimization is a key concern that deserves dedicated rules. 57 | 58 | **Suggested new file:** 59 | 60 | ```markdown:.cursor/rules/performance.mdc 61 | --- 62 | description: Rules for performance optimization, resource management, and scaling 63 | globs: ["**/*.py", "**/benchmarks/**"] 64 | alwaysApply: true 65 | --- 66 | 67 | # Performance Optimization Rules 68 | 69 | ## Database Optimization 70 | 71 | - Use connection pooling with appropriate settings: 72 | ```python 73 | engine = create_engine( 74 | DATABASE_URL, 75 | pool_size=20, 76 | max_overflow=30, 77 | pool_timeout=30, 78 | pool_recycle=1800, 79 | pool_pre_ping=True 80 | ) 81 | ``` 82 | - Implement statement caching 83 | - Use batched operations for bulk processing 84 | - Implement efficient pagination through keyset pagination 85 | 86 | ## Resource Management 87 | 88 | - Configure appropriate resource limits for containers: 89 | ```yaml 90 | deploy: 91 | resources: 92 | limits: 93 | cpus: '2' 94 | memory: 2G 95 | reservations: 96 | cpus: '1' 97 | memory: 1G 98 | ``` 99 | - Implement resource monitoring and alerts 100 | - Use async/await for I/O-bound operations 101 | - Optimize memory usage in high-throughput components 102 | 103 | ## Caching Strategy 104 | 105 | - Implement appropriate caching layers: 106 | - In-memory caching for frequent computations 107 | - Redis caching for shared application state 108 | - Content caching for static assets 109 | - Document cache invalidation strategies 110 | - Implement cache warming for critical paths 111 | 112 | ## Asynchronous Processing 113 | 114 | - Use task queues for long-running operations 115 | - Implement circuit breakers for external dependencies 116 | - Batch related operations where possible 117 | - Use streaming for large data transfers 118 | 119 | ## Benchmarking 120 | 121 | - Establish performance baselines for critical operations 122 | - Implement automated performance testing 123 | - Document expected performance characteristics 124 | - Set alerts for performance degradation 125 | -------------------------------------------------------------------------------- /rules/testing.mdc: -------------------------------------------------------------------------------- 1 | --- 2 | description: Rules for test organization, execution, and coverage 3 | globs: ["**/tests/**/*.py", "**/conftest.py", "**/pytest.ini"] 4 | alwaysApply: true 5 | --- 6 | 7 | # Testing Rules 8 | 9 | ## Framework 10 | 11 | - Use pytest (>=8.3.4) 12 | - Required packages: 13 | - pytest_asyncio (>=0.25.3) 14 | - pytest_cov (>=6.0.0) 15 | - pytest_timeout (>=2.2.0) 16 | - pytest_docker (>=3.1.1) 17 | - pytest_mock (>=3.12.0) 18 | - pytest_xdist (>=3.5.0) 19 | 20 | ## Organization 21 | 22 | ### Directory Structure 23 | 24 | - tests/unit/ - Unit tests 25 | - tests/integration/ - Integration tests 26 | - tests/e2e/ - End-to-end tests 27 | - tests/fixtures/ - Test fixtures 28 | - tests/mocks/ - Mock objects 29 | - tests/utils/ - Test utilities 30 | - conftest.py - Shared fixtures 31 | 32 | ### Naming 33 | 34 | - Test files: "test_*.py" 35 | - Test functions: "test_*" 36 | - Fixtures: "*_fixture.py" 37 | - Mocks: "mock_*.py" 38 | 39 | ## Execution 40 | 41 | - ALWAYS use run_tests.py for test execution 42 | - Run tests from the project root 43 | - Tests will automatically be run in the dev container 44 | - Always use --asyncio-mode=auto for async tests 45 | - Never run tests directly on host system without run_tests.py 46 | 47 | ### Commands 48 | 49 | - Run all tests for a container: 50 | ``` 51 | python run_tests.py [container_name] 52 | ``` 53 | 54 | - Run specific test type: 55 | ``` 56 | python run_tests.py [container_name] --type unit 57 | python run_tests.py [container_name] --type integration 58 | python run_tests.py [container_name] --type e2e 59 | ``` 60 | 61 | - Run specific test path: 62 | ``` 63 | python run_tests.py [container_name] --path tests/unit/test_specific.py 64 | ``` 65 | 66 | - Run with extra arguments (like coverage): 67 | ``` 68 | python run_tests.py [container_name] --extra "--cov=src --cov-report=html" 69 | ``` 70 | 71 | ## Standard Test Commands 72 | 73 | - Unit tests: `python run_tests.py foundation --type unit --extra "--asyncio-mode=auto --log-cli-level=INFO"` 74 | - Integration tests: `python run_tests.py foundation --type integration --extra "--asyncio-mode=auto --log-cli-level=INFO"` 75 | - E2E tests: `python run_tests.py foundation --type e2e --extra "--asyncio-mode=auto --log-cli-level=INFO"` 76 | - Specific test: `python run_tests.py foundation --path [test_path] --extra "--asyncio-mode=auto --log-cli-level=INFO"` 77 | - With coverage: `python run_tests.py foundation --path tests/ --extra "--cov --cov-report=term-missing"` 78 | 79 | ## Haystack-MCP Test Examples 80 | 81 | - Run all Haystack-MCP unit tests: 82 | ``` 83 | python run_tests.py haystack-mcp --type unit 84 | ``` 85 | 86 | - Run specific modules in Haystack-MCP: 87 | ``` 88 | python run_tests.py haystack-mcp --path "tests/unit/tools/ tests/unit/protocol/" 89 | ``` 90 | 91 | - Run tests for Cursor integration: 92 | ``` 93 | python run_tests.py haystack-mcp --path "tests/unit/integrations/test_cursor_integration.py" 94 | ``` 95 | 96 | - Run tests with coverage for Haystack-MCP: 97 | ``` 98 | python run_tests.py haystack-mcp --type unit --extra "--cov=src/haystack_mcp --cov-report=html" 99 | ``` 100 | 101 | ## Alternative Execution from Container Directory 102 | 103 | If working directly in the container directory, you can also use: 104 | 105 | ```bash 106 | # From the haystack-mcp container directory 107 | ./run_tests.sh --type unit 108 | ./run_tests.sh --path "tests/unit/tools/" 109 | ./run_tests.sh --path "tests/unit/integrations/test_cursor_integration.py" 110 | ``` 111 | 112 | ## Best Practices 113 | 114 | - One assertion per test when possible 115 | - Use descriptive test names 116 | - Isolate test dependencies 117 | - Clean up test data 118 | - Mock external services 119 | - Include both positive and negative test cases 120 | -------------------------------------------------------------------------------- /rules/troubleshooting.mdc: -------------------------------------------------------------------------------- 1 | --- 2 | description: Rules for troubleshooting, debugging, and issue resolution 3 | globs: ["docs/troubleshooting/*.md"] 4 | alwaysApply: true 5 | --- 6 | 7 | # Troubleshooting Rules 8 | 9 | ## Required Sections 10 | 11 | All troubleshooting documents must include: 12 | - **Issue Description**: Clear explanation of the problem 13 | - **Current State**: What's happening now 14 | - **Expected Behavior**: What should be happening 15 | - **Environment**: System information, versions, etc. 16 | - **Attempted Solutions**: What has been tried 17 | - **Next Steps**: Planned actions to resolve 18 | - **Resolution**: How the issue was fixed (when resolved) 19 | 20 | ## File Patterns 21 | 22 | - All troubleshooting files must be in docs/troubleshooting/*.md 23 | - Naming convention: `ISSUE_TYPE-BRIEF_DESCRIPTION.md` 24 | - Example: `DATABASE-CONNECTION_TIMEOUT.md` 25 | 26 | ## Documentation Standards 27 | 28 | - Include specific error messages with exact text 29 | - Document all steps to reproduce issues 30 | - Include system information when relevant (OS, versions, etc.) 31 | - Document both symptoms and root causes 32 | - Provide verification steps for solutions 33 | - Cross-reference related issues 34 | - Include code snippets when applicable 35 | - Add timestamps for ongoing issues 36 | 37 | ## Common Troubleshooting Patterns 38 | 39 | ### Database Issues 40 | - Check connection strings 41 | - Verify credentials 42 | - Test database connectivity 43 | - Check for schema changes 44 | 45 | ### API Issues 46 | - Verify endpoint URLs 47 | - Check request/response formats 48 | - Validate authentication 49 | - Test with curl or Postman 50 | 51 | ### Container Issues 52 | - Check container logs 53 | - Verify resource limits 54 | - Ensure ports are mapped correctly 55 | - Check for networking issues 56 | 57 | ## Debugging Tools 58 | 59 | - Use `python run_tests.py [container_name] --debug` for detailed logs 60 | - Enable debug logging with `LOG_LEVEL=DEBUG` 61 | - Use container inspection: `docker inspect [container_name]` 62 | - Check application logs: `docker logs [container_name]` 63 | -------------------------------------------------------------------------------- /scripts/check_docker_compose.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """Script to check Docker Compose file against standardization templates.""" 3 | 4 | import sys 5 | from pathlib import Path 6 | from typing import Dict, List, Set 7 | 8 | import containers.scripts.yaml 9 | 10 | 11 | def load_template() -> Dict: 12 | """Load Docker Compose service template.""" 13 | standards_dir = Path(__file__).parent.parent / "docs" / "standards" 14 | template_path = standards_dir / "docker" / "DOCKER_COMPOSE_SERVICE_TEMPLATE.yml" 15 | 16 | with open(template_path) as f: 17 | return yaml.safe_load(f) 18 | 19 | 20 | def check_service_config(service_name: str, config: Dict, template: Dict) -> List[str]: 21 | """Check if a service configuration follows the template standards.""" 22 | errors = [] 23 | 24 | # Required top-level keys 25 | required_keys = { 26 | "build", 27 | "environment", 28 | "volumes", 29 | "healthcheck" 30 | } 31 | 32 | # Check required keys 33 | for key in required_keys: 34 | if key not in config: 35 | errors.append(f"Service '{service_name}' is missing required key: {key}") 36 | 37 | # Check environment variables 38 | if "environment" in config: 39 | env_vars = set() 40 | for env in config["environment"]: 41 | if isinstance(env, str): 42 | env_vars.add(env.split("=")[0]) 43 | elif isinstance(env, dict): 44 | env_vars.update(env.keys()) 45 | 46 | required_env_vars = { 47 | "PYTHONPATH", 48 | "ENV" 49 | } 50 | 51 | for var in required_env_vars: 52 | if var not in env_vars: 53 | errors.append(f"Service '{service_name}' is missing required environment variable: {var}") 54 | 55 | # Check build configuration 56 | if "build" in config: 57 | if "context" not in config["build"]: 58 | errors.append(f"Service '{service_name}' build is missing context") 59 | if "dockerfile" not in config["build"]: 60 | errors.append(f"Service '{service_name}' build is missing dockerfile path") 61 | 62 | # Check volumes 63 | if "volumes" in config: 64 | source_mounted = False 65 | for volume in config["volumes"]: 66 | if "/app" in volume: 67 | source_mounted = True 68 | break 69 | if not source_mounted: 70 | errors.append(f"Service '{service_name}' must mount source code to /app") 71 | 72 | # Check healthcheck 73 | if "healthcheck" in config: 74 | required_health_keys = {"test", "interval", "timeout", "retries"} 75 | health_config = config["healthcheck"] 76 | 77 | for key in required_health_keys: 78 | if key not in health_config: 79 | errors.append(f"Service '{service_name}' healthcheck is missing: {key}") 80 | 81 | return errors 82 | 83 | 84 | def check_networks(compose_config: Dict) -> List[str]: 85 | """Check if required networks are defined.""" 86 | errors = [] 87 | 88 | if "networks" not in compose_config: 89 | errors.append("Missing top-level networks configuration") 90 | return errors 91 | 92 | required_networks = {"platform-net"} 93 | defined_networks = set(compose_config["networks"].keys()) 94 | 95 | for network in required_networks: 96 | if network not in defined_networks: 97 | errors.append(f"Missing required network: {network}") 98 | 99 | return errors 100 | 101 | 102 | def check_dependencies(compose_config: Dict) -> List[str]: 103 | """Check if service dependencies are properly configured.""" 104 | errors = [] 105 | 106 | for service_name, config in compose_config.get("services", {}).items(): 107 | if "depends_on" in config: 108 | deps = config["depends_on"] 109 | if isinstance(deps, dict): 110 | for dep, dep_config in deps.items(): 111 | if "condition" not in dep_config: 112 | errors.append(f"Service '{service_name}' dependency on '{dep}' should specify a condition") 113 | elif isinstance(deps, list): 114 | errors.append(f"Service '{service_name}' should use healthcheck conditions in depends_on") 115 | 116 | return errors 117 | 118 | 119 | def main() -> int: 120 | """Main function to check Docker Compose configuration.""" 121 | try: 122 | template = load_template() 123 | compose_path = Path("docker-compose.yml") 124 | 125 | if not compose_path.is_file(): 126 | print("Error: docker-compose.yml not found") 127 | return 1 128 | 129 | with open(compose_path) as f: 130 | compose_config = yaml.safe_load(f) 131 | 132 | if not compose_config or "services" not in compose_config: 133 | print("Error: Invalid docker-compose.yml format") 134 | return 1 135 | 136 | exit_code = 0 137 | 138 | # Check networks 139 | network_errors = check_networks(compose_config) 140 | if network_errors: 141 | print("\nNetwork configuration errors:") 142 | for error in network_errors: 143 | print(f" - {error}") 144 | exit_code = 1 145 | 146 | # Check dependencies 147 | dependency_errors = check_dependencies(compose_config) 148 | if dependency_errors: 149 | print("\nDependency configuration errors:") 150 | for error in dependency_errors: 151 | print(f" - {error}") 152 | exit_code = 1 153 | 154 | # Check each service 155 | for service_name, config in compose_config["services"].items(): 156 | service_errors = check_service_config(service_name, config, template) 157 | if service_errors: 158 | print(f"\nErrors in service '{service_name}':") 159 | for error in service_errors: 160 | print(f" - {error}") 161 | exit_code = 1 162 | 163 | return exit_code 164 | 165 | except Exception as e: 166 | print(f"Error: {str(e)}") 167 | return 1 168 | 169 | 170 | if __name__ == "__main__": 171 | sys.exit(main()) -------------------------------------------------------------------------------- /scripts/check_dockerfile.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """Script to check Dockerfile against standardization templates.""" 3 | 4 | import re 5 | import sys 6 | from pathlib import Path 7 | from typing import Dict, List, Set, Tuple 8 | 9 | 10 | def load_template() -> str: 11 | """Load Dockerfile template.""" 12 | standards_dir = Path(__file__).parent.parent / "docs" / "standards" 13 | template_path = standards_dir / "docker" / "DOCKERFILE_TEMPLATE" 14 | 15 | with open(template_path) as f: 16 | return f.read() 17 | 18 | 19 | def parse_dockerfile(content: str) -> List[Tuple[str, str]]: 20 | """Parse Dockerfile content into a list of (instruction, argument) tuples.""" 21 | instructions = [] 22 | current_instruction = None 23 | current_args = [] 24 | 25 | for line in content.split("\n"): 26 | line = line.strip() 27 | if not line or line.startswith("#"): 28 | continue 29 | 30 | if line.endswith("\\"): 31 | if current_instruction is None: 32 | instruction, arg = line.split(None, 1) 33 | current_instruction = instruction 34 | current_args = [arg.rstrip("\\").strip()] 35 | else: 36 | current_args.append(line.rstrip("\\").strip()) 37 | else: 38 | if current_instruction is not None: 39 | current_args.append(line) 40 | instructions.append((current_instruction, " ".join(current_args))) 41 | current_instruction = None 42 | current_args = [] 43 | else: 44 | instruction, arg = line.split(None, 1) 45 | instructions.append((instruction, arg)) 46 | 47 | return instructions 48 | 49 | 50 | def check_base_image(instructions: List[Tuple[str, str]]) -> List[str]: 51 | """Check if the base image is correct.""" 52 | errors = [] 53 | 54 | if not instructions or instructions[0][0] != "FROM": 55 | errors.append("Dockerfile must start with FROM instruction") 56 | return errors 57 | 58 | base_image = instructions[0][1] 59 | if not base_image.startswith("python:3.11"): 60 | errors.append("Base image must be python:3.11") 61 | 62 | return errors 63 | 64 | 65 | def check_environment_variables(instructions: List[Tuple[str, str]]) -> List[str]: 66 | """Check if required environment variables are set.""" 67 | errors = [] 68 | required_env_vars = { 69 | "PYTHONUNBUFFERED", 70 | "PYTHONDONTWRITEBYTECODE", 71 | "POETRY_VERSION", 72 | "POETRY_HOME", 73 | "POETRY_VIRTUALENVS_IN_PROJECT", 74 | "POETRY_NO_INTERACTION", 75 | "PYSETUP_PATH", 76 | "VENV_PATH" 77 | } 78 | 79 | found_env_vars = set() 80 | for instruction, args in instructions: 81 | if instruction == "ENV": 82 | # Handle both formats: ENV KEY=VALUE and ENV KEY VALUE 83 | if "=" in args: 84 | key = args.split("=")[0].strip() 85 | else: 86 | key = args.split()[0].strip() 87 | found_env_vars.add(key) 88 | 89 | for var in required_env_vars: 90 | if var not in found_env_vars: 91 | errors.append(f"Missing required environment variable: {var}") 92 | 93 | return errors 94 | 95 | 96 | def check_poetry_installation(instructions: List[Tuple[str, str]]) -> List[str]: 97 | """Check if Poetry is installed correctly.""" 98 | errors = [] 99 | poetry_install_found = False 100 | 101 | for instruction, args in instructions: 102 | if instruction == "RUN" and "curl -sSL https://install.python-poetry.org" in args: 103 | poetry_install_found = True 104 | break 105 | 106 | if not poetry_install_found: 107 | errors.append("Poetry installation command not found") 108 | 109 | return errors 110 | 111 | 112 | def check_dependencies_installation(instructions: List[Tuple[str, str]]) -> List[str]: 113 | """Check if dependencies are installed correctly.""" 114 | errors = [] 115 | poetry_install_found = False 116 | copy_requirements_found = False 117 | 118 | for instruction, args in instructions: 119 | if instruction == "COPY" and "pyproject.toml" in args and "poetry.lock" in args: 120 | copy_requirements_found = True 121 | elif instruction == "RUN" and "poetry install" in args: 122 | poetry_install_found = True 123 | 124 | if not copy_requirements_found: 125 | errors.append("Must copy pyproject.toml and poetry.lock files") 126 | if not poetry_install_found: 127 | errors.append("Must install dependencies using poetry install") 128 | 129 | return errors 130 | 131 | 132 | def check_healthcheck(instructions: List[Tuple[str, str]]) -> List[str]: 133 | """Check if healthcheck is configured correctly.""" 134 | errors = [] 135 | healthcheck_found = False 136 | 137 | for instruction, args in instructions: 138 | if instruction == "HEALTHCHECK": 139 | healthcheck_found = True 140 | if "--interval" not in args or "--timeout" not in args or "--retries" not in args: 141 | errors.append("Healthcheck must specify interval, timeout, and retries") 142 | break 143 | 144 | if not healthcheck_found: 145 | errors.append("Healthcheck configuration not found") 146 | 147 | return errors 148 | 149 | 150 | def check_labels(instructions: List[Tuple[str, str]]) -> List[str]: 151 | """Check if required labels are present.""" 152 | errors = [] 153 | required_labels = {"maintainer", "version", "description"} 154 | found_labels = set() 155 | 156 | for instruction, args in instructions: 157 | if instruction == "LABEL": 158 | for label in args.split(): 159 | if "=" in label: 160 | found_labels.add(label.split("=")[0].strip()) 161 | 162 | for label in required_labels: 163 | if label not in found_labels: 164 | errors.append(f"Missing required label: {label}") 165 | 166 | return errors 167 | 168 | 169 | def main() -> int: 170 | """Main function to check Dockerfile configuration.""" 171 | try: 172 | template = load_template() 173 | dockerfile_path = Path("Dockerfile") 174 | 175 | if not dockerfile_path.is_file(): 176 | print("Error: Dockerfile not found") 177 | return 1 178 | 179 | with open(dockerfile_path) as f: 180 | content = f.read() 181 | 182 | instructions = parse_dockerfile(content) 183 | exit_code = 0 184 | 185 | # Run all checks 186 | checks = [ 187 | ("Base image", check_base_image), 188 | ("Environment variables", check_environment_variables), 189 | ("Poetry installation", check_poetry_installation), 190 | ("Dependencies installation", check_dependencies_installation), 191 | ("Healthcheck", check_healthcheck), 192 | ("Labels", check_labels) 193 | ] 194 | 195 | for check_name, check_func in checks: 196 | errors = check_func(instructions) 197 | if errors: 198 | print(f"\n{check_name} errors:") 199 | for error in errors: 200 | print(f" - {error}") 201 | exit_code = 1 202 | 203 | return exit_code 204 | 205 | except Exception as e: 206 | print(f"Error: {str(e)}") 207 | return 1 208 | 209 | 210 | if __name__ == "__main__": 211 | sys.exit(main()) -------------------------------------------------------------------------------- /scripts/check_env_secrets.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import argparse 4 | import os 5 | import re 6 | from pathlib import Path 7 | from typing import Dict, List, Set, Tuple 8 | 9 | class EnvSecretChecker: 10 | def __init__(self): 11 | # Patterns to identify potential secrets/configuration 12 | self.patterns = { 13 | 'port': r'(?:^|\s|=)(\d{2,5})(?:\s|$|:)', # Matches potential port numbers 14 | 'password': r'(?i)(?:password|passwd|pwd)[\s]*[=:]\s*["\']?([^"\'\s]+)["\']?', 15 | 'username': r'(?i)(?:username|user|uname)[\s]*[=:]\s*["\']?([^"\'\s]+)["\']?', 16 | 'api_key': r'(?i)(?:api[_-]?key|token|secret)[\s]*[=:]\s*["\']?([^"\'\s]+)["\']?', 17 | 'url': r'(?i)(?:url|host|endpoint)[\s]*[=:]\s*["\']?(http[s]?://[^"\'\s]+)["\']?', 18 | 'email': r'[\w\.-]+@[\w\.-]+\.\w+', 19 | 'ip_address': r'\b\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\b' 20 | } 21 | 22 | # Known safe values that should be ignored 23 | self.safe_values = { 24 | 'port': {'80', '443', '3000', '8080'}, # Common development ports 25 | 'username': {'postgres', 'root', 'admin'}, # Common default usernames 26 | 'password': {'postgres'}, # Common default passwords 27 | 'host': {'localhost', '127.0.0.1', '0.0.0.0'} # Common development hosts 28 | } 29 | 30 | # Files and directories to ignore 31 | self.ignore_paths = { 32 | '.git', 33 | 'node_modules', 34 | 'venv', 35 | '.venv', 36 | '__pycache__', 37 | '.pytest_cache', 38 | 'dist', 39 | 'build', 40 | '.env', 41 | '.env.example', 42 | '.env.template', 43 | 'example.env' 44 | } 45 | 46 | # File extensions to check 47 | self.check_extensions = { 48 | '.py', 49 | '.js', 50 | '.ts', 51 | '.jsx', 52 | '.tsx', 53 | '.yml', 54 | '.yaml', 55 | '.json', 56 | '.toml', 57 | '.ini', 58 | '.conf', 59 | '.sh', 60 | '.bash', 61 | '.zsh', 62 | '.env', 63 | 'Dockerfile', 64 | 'docker-compose.yml' 65 | } 66 | 67 | def should_check_file(self, file_path: Path) -> bool: 68 | """Determine if a file should be checked.""" 69 | # Check if any parent directory is in ignore_paths 70 | for parent in file_path.parents: 71 | if parent.name in self.ignore_paths: 72 | return False 73 | 74 | # Check if file name is in ignore_paths 75 | if file_path.name in self.ignore_paths: 76 | return False 77 | 78 | # Check file extension 79 | return any( 80 | str(file_path).endswith(ext) for ext in self.check_extensions 81 | ) 82 | 83 | def is_safe_value(self, pattern_type: str, value: str) -> bool: 84 | """Check if a value is in the safe list.""" 85 | return value in self.safe_values.get(pattern_type, set()) 86 | 87 | def check_file(self, file_path: Path) -> List[Tuple[str, str, int, str]]: 88 | """Check a single file for potential secrets/configuration.""" 89 | findings = [] 90 | 91 | try: 92 | with open(file_path, 'r', encoding='utf-8') as f: 93 | lines = f.readlines() 94 | 95 | for line_num, line in enumerate(lines, 1): 96 | # Skip comments 97 | if line.strip().startswith(('#', '//', '/*', '*', '--')): 98 | continue 99 | 100 | # Check each pattern 101 | for pattern_type, pattern in self.patterns.items(): 102 | matches = re.finditer(pattern, line) 103 | for match in matches: 104 | value = match.group(1) 105 | if not self.is_safe_value(pattern_type, value): 106 | findings.append(( 107 | str(file_path), 108 | pattern_type, 109 | line_num, 110 | line.strip() 111 | )) 112 | 113 | except Exception as e: 114 | print(f"Error processing {file_path}: {str(e)}") 115 | 116 | return findings 117 | 118 | def check_directory(self, directory: Path) -> List[Tuple[str, str, int, str]]: 119 | """Recursively check all files in a directory.""" 120 | findings = [] 121 | 122 | for file_path in directory.rglob('*'): 123 | if file_path.is_file() and self.should_check_file(file_path): 124 | findings.extend(self.check_file(file_path)) 125 | 126 | return findings 127 | 128 | def format_findings(self, findings: List[Tuple[str, str, int, str]]) -> str: 129 | """Format findings into a readable report.""" 130 | if not findings: 131 | return "No potential secrets or configuration values found." 132 | 133 | report = [] 134 | report.append("\nPotential secrets or configuration values found:") 135 | report.append("=" * 80) 136 | 137 | # Group findings by file 138 | findings_by_file: Dict[str, List[Tuple[str, int, str]]] = {} 139 | for file_path, pattern_type, line_num, line in findings: 140 | if file_path not in findings_by_file: 141 | findings_by_file[file_path] = [] 142 | findings_by_file[file_path].append((pattern_type, line_num, line)) 143 | 144 | # Generate report 145 | for file_path, file_findings in findings_by_file.items(): 146 | report.append(f"\nFile: {file_path}") 147 | report.append("-" * 80) 148 | for pattern_type, line_num, line in file_findings: 149 | report.append(f" Line {line_num} ({pattern_type}):") 150 | report.append(f" {line}") 151 | report.append("-" * 80) 152 | 153 | report.append("\nRecommendations:") 154 | report.append("1. Move these values to environment variables") 155 | report.append("2. Use a .env file to store sensitive information") 156 | report.append("3. Reference environment variables in your code instead of hardcoded values") 157 | report.append("4. Add sensitive files to .gitignore") 158 | 159 | return "\n".join(report) 160 | 161 | def main(): 162 | parser = argparse.ArgumentParser( 163 | description='Check for hardcoded secrets and configuration that should be in .env files' 164 | ) 165 | parser.add_argument( 166 | 'path', 167 | type=str, 168 | help='Path to directory or file to check' 169 | ) 170 | parser.add_argument( 171 | '--json', 172 | action='store_true', 173 | help='Output results in JSON format' 174 | ) 175 | args = parser.parse_args() 176 | 177 | checker = EnvSecretChecker() 178 | path = Path(args.path) 179 | 180 | if not path.exists(): 181 | print(f"Error: Path {path} does not exist") 182 | return 1 183 | 184 | findings = [] 185 | if path.is_file(): 186 | if checker.should_check_file(path): 187 | findings = checker.check_file(path) 188 | else: 189 | findings = checker.check_directory(path) 190 | 191 | if args.json: 192 | import json 193 | json_findings = [ 194 | { 195 | 'file': f[0], 196 | 'type': f[1], 197 | 'line': f[2], 198 | 'content': f[3] 199 | } 200 | for f in findings 201 | ] 202 | print(json.dumps(json_findings, indent=2)) 203 | else: 204 | print(checker.format_findings(findings)) 205 | 206 | # Return non-zero exit code if findings were found 207 | return 1 if findings else 0 208 | 209 | if __name__ == "__main__": 210 | exit(main()) -------------------------------------------------------------------------------- /scripts/check_service_structure.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """Script to check service structure against standardization templates.""" 3 | 4 | import os 5 | import sys 6 | from pathlib import Path 7 | from typing import Dict, List, Set 8 | 9 | import containers.scripts.toml 10 | import containers.scripts.yaml 11 | 12 | 13 | def load_standards() -> Dict: 14 | """Load standardization templates and requirements.""" 15 | standards_dir = Path(__file__).parent.parent / "docs" / "standards" 16 | 17 | with open(standards_dir / "python" / "PYPROJECT_TEMPLATE.toml") as f: 18 | pyproject_template = toml.load(f) 19 | 20 | with open(standards_dir / "docker" / "DOCKERFILE_TEMPLATE") as f: 21 | dockerfile_template = f.read() 22 | 23 | with open(standards_dir / "docker" / "DOCKER_COMPOSE_SERVICE_TEMPLATE.yml") as f: 24 | docker_compose_template = yaml.safe_load(f) 25 | 26 | return { 27 | "pyproject": pyproject_template, 28 | "dockerfile": dockerfile_template, 29 | "docker_compose": docker_compose_template 30 | } 31 | 32 | 33 | def check_service_structure(service_dir: Path) -> List[str]: 34 | """Check if a service directory follows the standard structure.""" 35 | errors = [] 36 | 37 | # Required directories 38 | required_dirs = { 39 | "src", 40 | "tests", 41 | "tests/unit", 42 | "tests/integration" 43 | } 44 | 45 | # Required files 46 | required_files = { 47 | "Dockerfile", 48 | "pyproject.toml", 49 | "README.md", 50 | "tests/conftest.py" 51 | } 52 | 53 | # Check directories 54 | for dir_path in required_dirs: 55 | full_path = service_dir / dir_path 56 | if not full_path.is_dir(): 57 | errors.append(f"Missing required directory: {dir_path}") 58 | 59 | # Check files 60 | for file_path in required_files: 61 | full_path = service_dir / file_path 62 | if not full_path.is_file(): 63 | errors.append(f"Missing required file: {file_path}") 64 | 65 | return errors 66 | 67 | 68 | def check_pyproject_toml(service_dir: Path, template: Dict) -> List[str]: 69 | """Check if pyproject.toml follows the template structure.""" 70 | errors = [] 71 | pyproject_path = service_dir / "pyproject.toml" 72 | 73 | if not pyproject_path.is_file(): 74 | return ["pyproject.toml not found"] 75 | 76 | try: 77 | with open(pyproject_path) as f: 78 | pyproject = toml.load(f) 79 | 80 | # Check required sections 81 | required_sections = ["tool.poetry", "tool.poetry.dependencies", "build-system"] 82 | for section in required_sections: 83 | if not _get_nested_dict(pyproject, section.split(".")): 84 | errors.append(f"Missing required section: {section}") 85 | 86 | # Check Python version 87 | py_version = _get_nested_dict(pyproject, ["tool", "poetry", "dependencies", "python"]) 88 | if not py_version or not py_version.startswith("^3.11"): 89 | errors.append("Python version must be ^3.11.x") 90 | 91 | except Exception as e: 92 | errors.append(f"Error parsing pyproject.toml: {str(e)}") 93 | 94 | return errors 95 | 96 | 97 | def check_dockerfile(service_dir: Path, template: str) -> List[str]: 98 | """Check if Dockerfile follows the template structure.""" 99 | errors = [] 100 | dockerfile_path = service_dir / "Dockerfile" 101 | 102 | if not dockerfile_path.is_file(): 103 | return ["Dockerfile not found"] 104 | 105 | try: 106 | with open(dockerfile_path) as f: 107 | dockerfile = f.read() 108 | 109 | # Check required elements 110 | required_elements = [ 111 | "FROM python:3.11", 112 | "POETRY_VERSION", 113 | "WORKDIR /app", 114 | "COPY", 115 | "RUN poetry install", 116 | "HEALTHCHECK", 117 | "LABEL maintainer" 118 | ] 119 | 120 | for element in required_elements: 121 | if element not in dockerfile: 122 | errors.append(f"Missing required Dockerfile element: {element}") 123 | 124 | except Exception as e: 125 | errors.append(f"Error reading Dockerfile: {str(e)}") 126 | 127 | return errors 128 | 129 | 130 | def _get_nested_dict(d: Dict, keys: List[str]) -> any: 131 | """Get a value from nested dictionary using a list of keys.""" 132 | for key in keys: 133 | if not isinstance(d, dict) or key not in d: 134 | return None 135 | d = d[key] 136 | return d 137 | 138 | 139 | def main() -> int: 140 | """Main function to check service structure.""" 141 | try: 142 | standards = load_standards() 143 | containers_dir = Path(__file__).parent.parent / "containers" 144 | 145 | if not containers_dir.is_dir(): 146 | print("Error: containers directory not found") 147 | return 1 148 | 149 | exit_code = 0 150 | for service_dir in containers_dir.iterdir(): 151 | if not service_dir.is_dir(): 152 | continue 153 | 154 | print(f"\nChecking service: {service_dir.name}") 155 | 156 | # Check service structure 157 | structure_errors = check_service_structure(service_dir) 158 | if structure_errors: 159 | print("\nStructure errors:") 160 | for error in structure_errors: 161 | print(f" - {error}") 162 | exit_code = 1 163 | 164 | # Check pyproject.toml 165 | pyproject_errors = check_pyproject_toml(service_dir, standards["pyproject"]) 166 | if pyproject_errors: 167 | print("\npyproject.toml errors:") 168 | for error in pyproject_errors: 169 | print(f" - {error}") 170 | exit_code = 1 171 | 172 | # Check Dockerfile 173 | dockerfile_errors = check_dockerfile(service_dir, standards["dockerfile"]) 174 | if dockerfile_errors: 175 | print("\nDockerfile errors:") 176 | for error in dockerfile_errors: 177 | print(f" - {error}") 178 | exit_code = 1 179 | 180 | return exit_code 181 | 182 | except Exception as e: 183 | print(f"Error: {str(e)}") 184 | return 1 185 | 186 | 187 | if __name__ == "__main__": 188 | sys.exit(main()) -------------------------------------------------------------------------------- /scripts/check_standards.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """Standards checker with modular validator integration""" 3 | 4 | import sys 5 | from pathlib import Path 6 | 7 | # Import validators 8 | from containers.scripts.validators.dockerfile_validator import validate_dockerfile 9 | from containers.scripts.validators.container_validator import validate_container_structure 10 | from containers.scripts.validators.compose_validator import validate_compose_file 11 | from containers.scripts.validators.poetry_validator import validate_poetry_config 12 | 13 | def check_containers(): 14 | """Validate all containers using the modular validators""" 15 | print("Validating containers...") 16 | all_valid = True 17 | container_dirs = [p for p in Path('./containers').glob('*') if p.is_dir()] 18 | 19 | for container_dir in container_dirs: 20 | # Structural validation 21 | structure_errors = validate_container_structure(container_dir) 22 | if structure_errors: 23 | all_valid = False 24 | print(f"Container structure validation errors in {container_dir}:") 25 | for error in structure_errors: 26 | print(f" - {error}") 27 | 28 | # Poetry validation 29 | poetry_errors = validate_poetry_config(container_dir) 30 | if poetry_errors: 31 | all_valid = False 32 | print(f"Poetry configuration errors in {container_dir}:") 33 | for error in poetry_errors: 34 | print(f" - {error}") 35 | 36 | # Dockerfile validation if file exists 37 | dockerfile_path = container_dir / "Dockerfile" 38 | if dockerfile_path.exists(): 39 | dockerfile_errors, dockerfile_warnings = validate_dockerfile(dockerfile_path) 40 | if dockerfile_errors: 41 | all_valid = False 42 | print(f"Dockerfile validation errors in {dockerfile_path}:") 43 | for error in dockerfile_errors: 44 | print(f" - {error}") 45 | if dockerfile_warnings: 46 | print(f"Dockerfile validation warnings in {dockerfile_path}:") 47 | for warning in dockerfile_warnings: 48 | print(f" - Warning: {warning}") 49 | 50 | # Validate compose file 51 | compose_path = Path('./containers/dev-environment/docker-compose.dev.yml') 52 | if compose_path.exists(): 53 | compose_errors = validate_compose_file(str(compose_path)) 54 | if compose_errors: 55 | all_valid = False 56 | print(f"Docker Compose validation errors:") 57 | for error in compose_errors: 58 | print(f" - {error}") 59 | 60 | return all_valid 61 | 62 | # Then in the main execution flow: 63 | if __name__ == "__main__": 64 | containers_valid = check_containers() 65 | # ... other validations ... 66 | 67 | if not containers_valid: 68 | sys.exit(1) 69 | sys.exit(0) 70 | -------------------------------------------------------------------------------- /scripts/container_manager.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """ 3 | Container Manager 4 | 5 | A centralized tool for managing container versioning and building across all services. 6 | This script provides functionality for: 7 | - Managing semantic versioning for containers 8 | - Building container images with proper versioning 9 | - Standardizing container metadata 10 | 11 | Usage: 12 | python container_manager.py version get 13 | python container_manager.py version bump [major|minor|patch] 14 | python container_manager.py version set 15 | python container_manager.py build [--push] [--registry REGISTRY] [--no-latest] 16 | python container_manager.py validate 17 | """ 18 | 19 | import argparse 20 | import os 21 | import re 22 | import subprocess 23 | import sys 24 | from pathlib import Path 25 | from typing import Dict, List, Optional, Tuple, Union 26 | 27 | # Constants 28 | PROJECT_ROOT = Path(__file__).parent.parent 29 | CONTAINERS_DIR = PROJECT_ROOT / "containers" 30 | 31 | 32 | def get_container_dir(container_name: str) -> Path: 33 | """Get the directory for a specific container.""" 34 | container_dir = CONTAINERS_DIR / container_name 35 | if not container_dir.exists(): 36 | raise ValueError(f"Container '{container_name}' not found in {CONTAINERS_DIR}") 37 | return container_dir 38 | 39 | 40 | def get_pyproject_path(container_name: str) -> Path: 41 | """Get the path to the pyproject.toml file for a container.""" 42 | container_dir = get_container_dir(container_name) 43 | pyproject_path = container_dir / "pyproject.toml" 44 | if not pyproject_path.exists(): 45 | raise ValueError(f"pyproject.toml not found for container '{container_name}'") 46 | return pyproject_path 47 | 48 | 49 | def get_dockerfile_path(container_name: str) -> Path: 50 | """Get the path to the Dockerfile for a container.""" 51 | container_dir = get_container_dir(container_name) 52 | dockerfile_path = container_dir / "Dockerfile" 53 | if not dockerfile_path.exists(): 54 | raise ValueError(f"Dockerfile not found for container '{container_name}'") 55 | return dockerfile_path 56 | 57 | 58 | def get_current_version(container_name: str) -> str: 59 | """Get the current version from a container's pyproject.toml.""" 60 | pyproject_path = get_pyproject_path(container_name) 61 | 62 | with open(pyproject_path, "r") as f: 63 | content = f.read() 64 | 65 | match = re.search(r'version\s*=\s*"([^"]+)"', content) 66 | if not match: 67 | raise ValueError(f"Could not find version in {pyproject_path}") 68 | 69 | return match.group(1) 70 | 71 | 72 | def parse_version(version: str) -> Tuple[int, int, int]: 73 | """Parse a version string into major, minor, patch components.""" 74 | try: 75 | major, minor, patch = map(int, version.split(".")) 76 | return major, minor, patch 77 | except ValueError: 78 | raise ValueError(f"Invalid version format: {version}. Expected format: X.Y.Z") 79 | 80 | 81 | def bump_version(current_version: str, bump_type: str) -> str: 82 | """Bump the version according to the specified type.""" 83 | major, minor, patch = parse_version(current_version) 84 | 85 | if bump_type == "major": 86 | return f"{major + 1}.0.0" 87 | elif bump_type == "minor": 88 | return f"{major}.{minor + 1}.0" 89 | elif bump_type == "patch": 90 | return f"{major}.{minor}.{patch + 1}" 91 | else: 92 | raise ValueError(f"Invalid bump type: {bump_type}. Expected: major, minor, or patch") 93 | 94 | 95 | def update_pyproject(container_name: str, new_version: str) -> None: 96 | """Update the version in a container's pyproject.toml.""" 97 | pyproject_path = get_pyproject_path(container_name) 98 | 99 | with open(pyproject_path, "r") as f: 100 | content = f.read() 101 | 102 | updated_content = re.sub( 103 | r'version\s*=\s*"[^"]+"', 104 | f'version = "{new_version}"', 105 | content 106 | ) 107 | 108 | with open(pyproject_path, "w") as f: 109 | f.write(updated_content) 110 | 111 | print(f"Updated {pyproject_path} with version {new_version}") 112 | 113 | 114 | def update_dockerfile(container_name: str, new_version: str) -> None: 115 | """Update the version in a container's Dockerfile.""" 116 | dockerfile_path = get_dockerfile_path(container_name) 117 | 118 | with open(dockerfile_path, "r") as f: 119 | content = f.read() 120 | 121 | # Check if version label exists 122 | if "org.opencontainers.image.version" in content: 123 | updated_content = re.sub( 124 | r'org\.opencontainers\.image\.version="[^"]+"', 125 | f'org.opencontainers.image.version="{new_version}"', 126 | content 127 | ) 128 | else: 129 | # Add version labels if they don't exist 130 | label_section = f""" 131 | # Add version labels 132 | LABEL org.opencontainers.image.title="{container_name.capitalize()} Service" \ 133 | org.opencontainers.image.description="{container_name.capitalize()} service" \ 134 | org.opencontainers.image.version="{new_version}" \ 135 | org.opencontainers.image.vendor="AI Development Team" \ 136 | org.opencontainers.image.created="$(date -u +'%Y-%m-%dT%H:%M:%SZ')" \ 137 | org.opencontainers.image.source="https://github.com/organization/project" 138 | """ 139 | # Insert after the ENV sections 140 | env_pattern = r'(ENV\s+[^\n]+\n\s*(?:ENV\s+[^\n]+\n\s*)*)' 141 | if re.search(env_pattern, content): 142 | updated_content = re.sub( 143 | env_pattern, 144 | r'\1' + label_section, 145 | content, 146 | count=1 147 | ) 148 | else: 149 | # If no ENV section, insert after FROM 150 | updated_content = re.sub( 151 | r'(FROM\s+[^\n]+\n)', 152 | r'\1' + label_section, 153 | content, 154 | count=1 155 | ) 156 | 157 | with open(dockerfile_path, "w") as f: 158 | f.write(updated_content) 159 | 160 | print(f"Updated {dockerfile_path} with version {new_version}") 161 | 162 | 163 | def build_container(container_name: str, version: Optional[str] = None, 164 | push: bool = False, registry: Optional[str] = None, 165 | tag_latest: bool = True) -> None: 166 | """Build a container with the specified version.""" 167 | if not version: 168 | version = get_current_version(container_name) 169 | 170 | container_dir = get_container_dir(container_name) 171 | 172 | # Set image name 173 | image_name = container_name 174 | if registry: 175 | image_name = f"{registry}/{image_name}" 176 | 177 | # Build the image 178 | print(f"Building {container_name} container version {version}...") 179 | 180 | build_cmd = [ 181 | "docker", "build", 182 | "-t", f"{image_name}:{version}", 183 | "-f", str(container_dir / "Dockerfile"), 184 | "--build-arg", f"VERSION={version}", 185 | "." 186 | ] 187 | 188 | # Run the build command 189 | try: 190 | subprocess.run(build_cmd, check=True, cwd=PROJECT_ROOT) 191 | except subprocess.CalledProcessError as e: 192 | print(f"Error building container: {e}") 193 | sys.exit(1) 194 | 195 | # Tag as latest if requested 196 | if tag_latest: 197 | print(f"Tagging {image_name}:{version} as latest...") 198 | tag_cmd = ["docker", "tag", f"{image_name}:{version}", f"{image_name}:latest"] 199 | try: 200 | subprocess.run(tag_cmd, check=True) 201 | except subprocess.CalledProcessError as e: 202 | print(f"Error tagging container: {e}") 203 | sys.exit(1) 204 | 205 | # Push if requested 206 | if push: 207 | print(f"Pushing {image_name}:{version}...") 208 | push_cmd = ["docker", "push", f"{image_name}:{version}"] 209 | try: 210 | subprocess.run(push_cmd, check=True) 211 | except subprocess.CalledProcessError as e: 212 | print(f"Error pushing container: {e}") 213 | sys.exit(1) 214 | 215 | if tag_latest: 216 | print(f"Pushing {image_name}:latest...") 217 | push_latest_cmd = ["docker", "push", f"{image_name}:latest"] 218 | try: 219 | subprocess.run(push_latest_cmd, check=True) 220 | except subprocess.CalledProcessError as e: 221 | print(f"Error pushing latest tag: {e}") 222 | sys.exit(1) 223 | 224 | print(f"Build completed successfully!") 225 | print(f"Image: {image_name}:{version}") 226 | 227 | 228 | def validate_container(container_name: str) -> bool: 229 | """Validate a container's structure and configuration.""" 230 | container_dir = get_container_dir(container_name) 231 | 232 | # Check for required files 233 | required_files = ["Dockerfile", "pyproject.toml", "README.md"] 234 | missing_files = [f for f in required_files if not (container_dir / f).exists()] 235 | 236 | if missing_files: 237 | print(f"Container '{container_name}' is missing required files: {', '.join(missing_files)}") 238 | return False 239 | 240 | # Check for required directories 241 | required_dirs = ["src", "tests"] 242 | missing_dirs = [d for d in required_dirs if not (container_dir / d).exists()] 243 | 244 | if missing_dirs: 245 | print(f"Container '{container_name}' is missing required directories: {', '.join(missing_dirs)}") 246 | return False 247 | 248 | # Validate pyproject.toml 249 | try: 250 | version = get_current_version(container_name) 251 | parse_version(version) 252 | except ValueError as e: 253 | print(f"Invalid version in pyproject.toml: {e}") 254 | return False 255 | 256 | # Validate Dockerfile 257 | dockerfile_path = get_dockerfile_path(container_name) 258 | with open(dockerfile_path, "r") as f: 259 | dockerfile_content = f.read() 260 | 261 | # Check for required Dockerfile elements 262 | required_elements = [ 263 | ("FROM", r'FROM\s+python'), 264 | ("WORKDIR", r'WORKDIR\s+/app'), 265 | ("COPY", r'COPY\s+.*pyproject\.toml'), 266 | ("RUN", r'RUN\s+.*poetry\s+install'), 267 | ("EXPOSE", r'EXPOSE\s+\d+'), 268 | ("CMD", r'CMD\s+\[') 269 | ] 270 | 271 | for name, pattern in required_elements: 272 | if not re.search(pattern, dockerfile_content): 273 | print(f"Dockerfile missing required element: {name}") 274 | return False 275 | 276 | print(f"Container '{container_name}' validation successful!") 277 | return True 278 | 279 | 280 | def list_containers() -> List[str]: 281 | """List all available containers.""" 282 | return [d.name for d in CONTAINERS_DIR.iterdir() 283 | if d.is_dir() and (d / "Dockerfile").exists()] 284 | 285 | 286 | def handle_version_command(args: argparse.Namespace) -> None: 287 | """Handle the 'version' subcommand.""" 288 | container_name = args.container_name 289 | 290 | if args.version_action == "get": 291 | try: 292 | version = get_current_version(container_name) 293 | print(f"Current version of {container_name}: {version}") 294 | except ValueError as e: 295 | print(f"Error: {e}") 296 | sys.exit(1) 297 | 298 | elif args.version_action == "bump": 299 | try: 300 | current_version = get_current_version(container_name) 301 | new_version = bump_version(current_version, args.bump_type) 302 | update_pyproject(container_name, new_version) 303 | update_dockerfile(container_name, new_version) 304 | print(f"Bumped {container_name} version from {current_version} to {new_version}") 305 | except ValueError as e: 306 | print(f"Error: {e}") 307 | sys.exit(1) 308 | 309 | elif args.version_action == "set": 310 | try: 311 | current_version = get_current_version(container_name) 312 | # Validate the version format 313 | parse_version(args.version) 314 | update_pyproject(container_name, args.version) 315 | update_dockerfile(container_name, args.version) 316 | print(f"Set {container_name} version from {current_version} to {args.version}") 317 | except ValueError as e: 318 | print(f"Error: {e}") 319 | sys.exit(1) 320 | 321 | 322 | def handle_build_command(args: argparse.Namespace) -> None: 323 | """Handle the 'build' subcommand.""" 324 | try: 325 | build_container( 326 | args.container_name, 327 | version=args.version, 328 | push=args.push, 329 | registry=args.registry, 330 | tag_latest=not args.no_latest 331 | ) 332 | except ValueError as e: 333 | print(f"Error: {e}") 334 | sys.exit(1) 335 | 336 | 337 | def handle_validate_command(args: argparse.Namespace) -> None: 338 | """Handle the 'validate' subcommand.""" 339 | if not validate_container(args.container_name): 340 | sys.exit(1) 341 | 342 | 343 | def main() -> None: 344 | """Main entry point for the script.""" 345 | parser = argparse.ArgumentParser( 346 | description="Container Manager - A tool for managing container versioning and building" 347 | ) 348 | subparsers = parser.add_subparsers(dest="command", help="Command to execute") 349 | 350 | # Version command 351 | version_parser = subparsers.add_parser("version", help="Manage container versions") 352 | version_subparsers = version_parser.add_subparsers(dest="version_action", help="Version action") 353 | 354 | # Version get 355 | get_parser = version_subparsers.add_parser("get", help="Get current version") 356 | get_parser.add_argument("container_name", help="Name of the container") 357 | 358 | # Version bump 359 | bump_parser = version_subparsers.add_parser("bump", help="Bump version number") 360 | bump_parser.add_argument("container_name", help="Name of the container") 361 | bump_parser.add_argument("bump_type", choices=["major", "minor", "patch"], 362 | help="Type of version bump") 363 | 364 | # Version set 365 | set_parser = version_subparsers.add_parser("set", help="Set specific version") 366 | set_parser.add_argument("container_name", help="Name of the container") 367 | set_parser.add_argument("version", help="Version to set (format: X.Y.Z)") 368 | 369 | # Build command 370 | build_parser = subparsers.add_parser("build", help="Build container image") 371 | build_parser.add_argument("container_name", help="Name of the container to build") 372 | build_parser.add_argument("--version", "-v", help="Specify the version to build") 373 | build_parser.add_argument("--push", action="store_true", help="Push the image after building") 374 | build_parser.add_argument("--registry", help="Specify the registry to push to") 375 | build_parser.add_argument("--no-latest", action="store_true", help="Don't tag as latest") 376 | 377 | # Validate command 378 | validate_parser = subparsers.add_parser("validate", help="Validate container structure") 379 | validate_parser.add_argument("container_name", help="Name of the container to validate") 380 | 381 | # List command 382 | list_parser = subparsers.add_parser("list", help="List available containers") 383 | 384 | args = parser.parse_args() 385 | 386 | if args.command == "version": 387 | if not args.version_action: 388 | version_parser.print_help() 389 | sys.exit(1) 390 | handle_version_command(args) 391 | 392 | elif args.command == "build": 393 | handle_build_command(args) 394 | 395 | elif args.command == "validate": 396 | handle_validate_command(args) 397 | 398 | elif args.command == "list": 399 | containers = list_containers() 400 | if containers: 401 | print("Available containers:") 402 | for container in containers: 403 | try: 404 | version = get_current_version(container) 405 | print(f" - {container} (v{version})") 406 | except ValueError: 407 | print(f" - {container} (version unknown)") 408 | else: 409 | print("No containers found") 410 | 411 | else: 412 | parser.print_help() 413 | sys.exit(1) 414 | 415 | 416 | if __name__ == "__main__": 417 | main() -------------------------------------------------------------------------------- /scripts/update_docs.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """ 3 | Documentation Generator 4 | 5 | Automatically generates and updates documentation for services based on standardization rules. 6 | Extracts API documentation, configuration details, and maintains consistent structure. 7 | """ 8 | 9 | import os 10 | import sys 11 | import containers.scripts.yaml 12 | import logging 13 | import containers.scripts.inspect 14 | import containers.scripts.importlib 15 | import containers.scripts.importlib.util 16 | from pathlib import Path 17 | from typing import Dict, Any, List, Optional 18 | import re 19 | from datetime import datetime 20 | 21 | # Configure logging 22 | logging.basicConfig( 23 | level=logging.INFO, 24 | format='%(levelname)s: %(message)s' 25 | ) 26 | logger = logging.getLogger(__name__) 27 | 28 | class DocGenerator: 29 | """Generates and updates documentation following project standards.""" 30 | 31 | def __init__(self, rules_file: str = ".cursor/standardization.cursorrules"): 32 | """Initialize the generator with rules from the specified file.""" 33 | self.rules_file = Path(rules_file) 34 | self.rules = self._load_rules() 35 | 36 | def _load_rules(self) -> Dict[str, Any]: 37 | """Load rules from the YAML file.""" 38 | try: 39 | with open(self.rules_file, 'r') as f: 40 | return yaml.safe_load(f) 41 | except Exception as e: 42 | logger.error(f"Failed to load rules file: {e}") 43 | sys.exit(1) 44 | 45 | def _import_module(self, module_path: str) -> Optional[Any]: 46 | """Dynamically import a module from file path.""" 47 | try: 48 | spec = importlib.util.spec_from_file_location( 49 | module_path.replace('/', '.').replace('.py', ''), 50 | module_path 51 | ) 52 | if spec and spec.loader: 53 | module = importlib.util.module_from_spec(spec) 54 | spec.loader.exec_module(module) 55 | return module 56 | except Exception as e: 57 | logger.warning(f"Failed to import module {module_path}: {e}") 58 | return None 59 | 60 | def _extract_docstring(self, obj: Any) -> str: 61 | """Extract and format docstring from an object.""" 62 | doc = inspect.getdoc(obj) or "" 63 | return doc.strip() 64 | 65 | def _generate_api_docs(self, service_dir: Path) -> str: 66 | """Generate API documentation from FastAPI endpoints.""" 67 | api_docs = ["## API Reference\n"] 68 | main_path = service_dir / "api" / "main.py" 69 | routes_path = service_dir / "api" / "routes.py" 70 | 71 | # Document main endpoints 72 | if main_path.exists(): 73 | main_module = self._import_module(str(main_path)) 74 | if main_module and hasattr(main_module, 'app'): 75 | for route in main_module.app.routes: 76 | if route.path in ["/health", "/version"]: 77 | api_docs.append(f"### {route.path}") 78 | api_docs.append(f"**Method:** {route.methods}") 79 | if route.endpoint.__doc__: 80 | api_docs.append(f"\n{route.endpoint.__doc__.strip()}\n") 81 | api_docs.append("") 82 | 83 | # Document custom routes 84 | if routes_path.exists(): 85 | routes_module = self._import_module(str(routes_path)) 86 | if routes_module and hasattr(routes_module, 'router'): 87 | for route in routes_module.router.routes: 88 | api_docs.append(f"### {route.path}") 89 | api_docs.append(f"**Method:** {route.methods}") 90 | if route.endpoint.__doc__: 91 | api_docs.append(f"\n{route.endpoint.__doc__.strip()}\n") 92 | api_docs.append("") 93 | 94 | return "\n".join(api_docs) 95 | 96 | def _generate_config_docs(self, service_dir: Path) -> str: 97 | """Generate configuration documentation.""" 98 | config_docs = ["## Configuration\n"] 99 | settings_path = service_dir / "config" / "settings.py" 100 | 101 | if settings_path.exists(): 102 | settings_module = self._import_module(str(settings_path)) 103 | if settings_module and hasattr(settings_module, 'Settings'): 104 | settings_class = settings_module.Settings 105 | config_docs.append("### Environment Variables\n") 106 | 107 | # Get annotations (type hints) for settings 108 | annotations = settings_class.__annotations__ 109 | 110 | for var_name, var_type in annotations.items(): 111 | default_value = getattr(settings_class, var_name, None) 112 | config_docs.append(f"- `{var_name}` ({var_type.__name__})") 113 | if default_value is not None: 114 | config_docs.append(f" - Default: `{default_value}`") 115 | config_docs.append("") 116 | 117 | return "\n".join(config_docs) 118 | 119 | def _generate_development_docs(self, service_dir: Path) -> str: 120 | """Generate development documentation.""" 121 | return """## Development 122 | 123 | ### Setup 124 | 125 | 1. Install dependencies: 126 | ```bash 127 | pip install -r requirements.txt 128 | ``` 129 | 130 | 2. Set up environment variables: 131 | ```bash 132 | cp .env.example .env 133 | # Edit .env with your configuration 134 | ``` 135 | 136 | 3. Run tests: 137 | ```bash 138 | pytest 139 | ``` 140 | 141 | 4. Start the service: 142 | ```bash 143 | python -m api.main 144 | ``` 145 | 146 | ### Code Style 147 | 148 | - Follow PEP 8 guidelines 149 | - Use type hints 150 | - Document all public functions and classes 151 | - Write unit tests for new features 152 | 153 | ### Testing 154 | 155 | - Write unit tests in `tests/unit/` 156 | - Write integration tests in `tests/integration/` 157 | - Use pytest fixtures for common test setup 158 | - Aim for high test coverage 159 | 160 | """ 161 | 162 | def _generate_deployment_docs(self, service_dir: Path) -> str: 163 | """Generate deployment documentation.""" 164 | return """## Deployment 165 | 166 | ### Docker 167 | 168 | Build the service: 169 | ```bash 170 | docker build -t service-name . 171 | ``` 172 | 173 | Run the service: 174 | ```bash 175 | docker run -p 8000:8000 service-name 176 | ``` 177 | 178 | ### Environment Variables 179 | 180 | Ensure all required environment variables are set in your deployment environment. 181 | See the Configuration section for details. 182 | 183 | ### Health Checks 184 | 185 | The service provides a health check endpoint at `/health` that should be used 186 | for monitoring and container orchestration. 187 | 188 | ### Monitoring 189 | 190 | The service exposes the following metrics: 191 | - `requests_total`: Total number of requests 192 | - `request_duration`: Request duration histogram 193 | - `errors_total`: Total number of errors 194 | 195 | """ 196 | 197 | def _update_readme(self, service_dir: Path): 198 | """Update the main README.md with generated documentation.""" 199 | readme_path = service_dir / "README.md" 200 | if not readme_path.exists(): 201 | logger.warning(f"README.md not found in {service_dir}") 202 | return 203 | 204 | # Read existing content 205 | with open(readme_path, 'r') as f: 206 | content = f.read() 207 | 208 | # Extract service name and overview 209 | service_name = "" 210 | overview = "" 211 | name_match = re.search(r'# (.*?)\n', content) 212 | if name_match: 213 | service_name = name_match.group(1) 214 | overview_match = re.search(r'## Overview\n\n(.*?)\n\n', content, re.DOTALL) 215 | if overview_match: 216 | overview = overview_match.group(1) 217 | 218 | # Generate new documentation 219 | new_content = [ 220 | f"# {service_name}\n", 221 | "## Overview\n", 222 | f"{overview}\n", 223 | self._generate_config_docs(service_dir), 224 | self._generate_api_docs(service_dir), 225 | self._generate_development_docs(service_dir), 226 | self._generate_deployment_docs(service_dir) 227 | ] 228 | 229 | # Write updated content 230 | with open(readme_path, 'w') as f: 231 | f.write("\n".join(new_content)) 232 | 233 | def _generate_api_reference(self, service_dir: Path): 234 | """Generate detailed API reference documentation.""" 235 | api_doc_path = service_dir / "docs" / "API.md" 236 | api_content = [ 237 | "# API Reference\n", 238 | "## Overview\n", 239 | "This document provides detailed information about the service's API endpoints.\n", 240 | self._generate_api_docs(service_dir) 241 | ] 242 | 243 | os.makedirs(api_doc_path.parent, exist_ok=True) 244 | with open(api_doc_path, 'w') as f: 245 | f.write("\n".join(api_content)) 246 | 247 | def update_docs(self, service_dir: str = "."): 248 | """Update all documentation for the service.""" 249 | service_path = Path(service_dir) 250 | if not service_path.exists(): 251 | logger.error(f"Service directory not found: {service_dir}") 252 | sys.exit(1) 253 | 254 | logger.info(f"Updating documentation for service in {service_dir}") 255 | 256 | # Update main README 257 | self._update_readme(service_path) 258 | 259 | # Generate API reference 260 | self._generate_api_reference(service_path) 261 | 262 | logger.info("Documentation updated successfully!") 263 | logger.info("\nGenerated/updated files:") 264 | logger.info("- README.md") 265 | logger.info("- docs/API.md") 266 | 267 | def main(): 268 | """Main entry point for documentation generation.""" 269 | import argparse 270 | parser = argparse.ArgumentParser(description="Update service documentation.") 271 | parser.add_argument("--service-dir", default=".", help="Service directory to update docs for") 272 | args = parser.parse_args() 273 | 274 | generator = DocGenerator() 275 | generator.update_docs(args.service_dir) 276 | 277 | if __name__ == "__main__": 278 | main() -------------------------------------------------------------------------------- /scripts/validate.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """ 3 | Validation Runner 4 | 5 | Runs all validators for a specified container. 6 | """ 7 | 8 | import sys 9 | import subprocess 10 | import argparse 11 | from pathlib import Path 12 | 13 | def validate_container(container_name): 14 | """Run all validators for a specific container.""" 15 | validators = [ 16 | ("Container Structure", f"python3 scripts/validators/container_validator.py containers/{container_name}"), 17 | ("Dockerfile", f"python3 scripts/validators/dockerfile_validator.py containers/{container_name}/Dockerfile"), 18 | ("Poetry Configuration", f"python3 scripts/validators/poetry_validator.py containers/{container_name}"), 19 | ] 20 | 21 | # Docker Compose validation is project-wide 22 | validators.append(("Docker Compose", "python3 scripts/validators/compose_validator.py containers/dev-environment/docker-compose.dev.yml")) 23 | 24 | all_passed = True 25 | for name, command in validators: 26 | print(f"\nRunning {name} validation...") 27 | result = subprocess.run(command, shell=True) 28 | if result.returncode != 0: 29 | all_passed = False 30 | 31 | return all_passed 32 | 33 | def main(): 34 | parser = argparse.ArgumentParser(description="Validate container configuration") 35 | parser.add_argument("container", help="Container name to validate") 36 | args = parser.parse_args() 37 | 38 | success = validate_container(args.container) 39 | return 0 if success else 1 40 | 41 | if __name__ == "__main__": 42 | sys.exit(main()) 43 | -------------------------------------------------------------------------------- /scripts/validate_service.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """Script to validate services against standardization templates.""" 3 | 4 | import argparse 5 | import os 6 | import subprocess 7 | import sys 8 | import time 9 | from pathlib import Path 10 | from typing import Dict, List, Optional, Tuple 11 | 12 | import toml 13 | import yaml 14 | from rich.console import Console 15 | from rich.panel import Panel 16 | from rich.progress import Progress, SpinnerColumn, TextColumn 17 | from rich.table import Table 18 | 19 | console = Console() 20 | 21 | class ServiceValidator: 22 | """Validates a service against standardization templates.""" 23 | 24 | def __init__(self, service_path: str): 25 | """Initialize validator with service path.""" 26 | self.service_path = Path(service_path) 27 | self.service_name = self.service_path.name 28 | self.errors: List[str] = [] 29 | self.warnings: List[str] = [] 30 | 31 | def validate_structure(self) -> bool: 32 | """Validate service directory structure.""" 33 | with console.status("[bold blue]Validating service structure..."): 34 | required_files = [ 35 | "Dockerfile", 36 | "pyproject.toml", 37 | "src", 38 | "tests", 39 | "README.md" 40 | ] 41 | 42 | missing_files = [ 43 | f for f in required_files 44 | if not (self.service_path / f).exists() 45 | ] 46 | 47 | if missing_files: 48 | self.errors.append(f"Missing required files/directories: {', '.join(missing_files)}") 49 | return False 50 | 51 | # Check src directory structure 52 | src_dir = self.service_path / "src" / self.service_name 53 | if not src_dir.exists(): 54 | self.errors.append(f"Missing source directory: {src_dir}") 55 | return False 56 | 57 | # Check tests structure 58 | tests_dir = self.service_path / "tests" 59 | required_test_dirs = ["unit", "integration"] 60 | missing_test_dirs = [ 61 | d for d in required_test_dirs 62 | if not (tests_dir / d).exists() 63 | ] 64 | if missing_test_dirs: 65 | self.warnings.append(f"Missing test directories: {', '.join(missing_test_dirs)}") 66 | 67 | return True 68 | 69 | def validate_pyproject_toml(self) -> bool: 70 | """Validate pyproject.toml against template.""" 71 | with console.status("[bold blue]Validating pyproject.toml..."): 72 | try: 73 | # Read service pyproject.toml 74 | pyproject_path = self.service_path / "pyproject.toml" 75 | with open(pyproject_path) as f: 76 | service_pyproject = toml.load(f) 77 | 78 | # Read template 79 | template_path = Path("docs/standards/python/PYPROJECT_TEMPLATE.toml") 80 | with open(template_path) as f: 81 | template = toml.load(f) 82 | 83 | # Check required sections 84 | required_sections = [ 85 | "tool.poetry", 86 | "tool.poetry.dependencies", 87 | "tool.poetry.group.dev.dependencies", 88 | "build-system", 89 | "tool.pytest.ini_options", 90 | "tool.black", 91 | "tool.isort", 92 | "tool.mypy", 93 | "tool.coverage.run", 94 | "tool.coverage.report" 95 | ] 96 | 97 | for section in required_sections: 98 | parts = section.split(".") 99 | current = service_pyproject 100 | for part in parts: 101 | if part not in current: 102 | self.errors.append(f"Missing required section in pyproject.toml: {section}") 103 | return False 104 | current = current[part] 105 | 106 | return True 107 | 108 | except Exception as e: 109 | self.errors.append(f"Error validating pyproject.toml: {str(e)}") 110 | return False 111 | 112 | def validate_dockerfile(self) -> bool: 113 | """Validate Dockerfile against template.""" 114 | with console.status("[bold blue]Validating Dockerfile..."): 115 | try: 116 | # Read service Dockerfile 117 | dockerfile_path = self.service_path / "Dockerfile" 118 | with open(dockerfile_path) as f: 119 | service_dockerfile = f.read() 120 | 121 | # Read template 122 | template_path = Path("docs/standards/docker/DOCKERFILE_TEMPLATE") 123 | with open(template_path) as f: 124 | template = f.read() 125 | 126 | # Check for required components 127 | required_components = [ 128 | "FROM python:3.11-slim", 129 | "PYTHONUNBUFFERED=1", 130 | "POETRY_VERSION", 131 | "HEALTHCHECK", 132 | "EXPOSE", 133 | "CMD" 134 | ] 135 | 136 | for component in required_components: 137 | if component not in service_dockerfile: 138 | self.errors.append(f"Missing required component in Dockerfile: {component}") 139 | return False 140 | 141 | return True 142 | 143 | except Exception as e: 144 | self.errors.append(f"Error validating Dockerfile: {str(e)}") 145 | return False 146 | 147 | def check_environment_variables(self) -> bool: 148 | """Check if all required environment variables are set.""" 149 | with console.status("[bold blue]Checking environment variables..."): 150 | try: 151 | # Read .env.example 152 | env_example_path = Path(".env.example") 153 | required_vars = [] 154 | 155 | with open(env_example_path) as f: 156 | for line in f: 157 | line = line.strip() 158 | if line and not line.startswith("#"): 159 | var_name = line.split("=")[0] 160 | required_vars.append(var_name) 161 | 162 | # Check if variables are set in environment 163 | missing_vars = [ 164 | var for var in required_vars 165 | if not os.getenv(var) 166 | ] 167 | 168 | if missing_vars: 169 | self.warnings.append(f"Missing environment variables: {', '.join(missing_vars)}") 170 | return False 171 | 172 | return True 173 | 174 | except Exception as e: 175 | self.errors.append(f"Error checking environment variables: {str(e)}") 176 | return False 177 | 178 | def check_container_health(self, timeout: int = 60) -> bool: 179 | """Check if service containers are healthy using docker-compose.""" 180 | with Progress( 181 | SpinnerColumn(), 182 | TextColumn("[progress.description]{task.description}"), 183 | transient=True, 184 | ) as progress: 185 | try: 186 | task = progress.add_task( 187 | description="[bold blue]Checking container health...", 188 | total=None 189 | ) 190 | 191 | start_time = time.time() 192 | while time.time() - start_time < timeout: 193 | # Use docker-compose ps to check container status 194 | result = subprocess.run( 195 | ["./dev", "ps", self.service_name], 196 | capture_output=True, 197 | text=True, 198 | check=False 199 | ) 200 | 201 | if result.returncode != 0: 202 | time.sleep(1) 203 | continue 204 | 205 | # Check if container is running and healthy 206 | if "Up" in result.stdout and "(healthy)" in result.stdout: 207 | return True 208 | elif "Up" in result.stdout and "(unhealthy)" in result.stdout: 209 | self.errors.append("Container health check failed") 210 | return False 211 | 212 | time.sleep(1) 213 | 214 | self.errors.append("Container health check timed out") 215 | return False 216 | 217 | except Exception as e: 218 | self.errors.append(f"Error checking container health: {str(e)}") 219 | return False 220 | 221 | def run_tests(self) -> bool: 222 | """Run service tests.""" 223 | with console.status("[bold blue]Running tests..."): 224 | try: 225 | # Run tests using run_tests.py as per testing rules 226 | result = subprocess.run( 227 | [ 228 | "python", "run_tests.py", 229 | self.service_name, 230 | "--type", "unit", 231 | "--extra", "--asyncio-mode=auto --log-cli-level=INFO" 232 | ], 233 | capture_output=True, 234 | text=True 235 | ) 236 | 237 | if result.returncode != 0: 238 | self.errors.append("Tests failed:") 239 | self.errors.append(result.stderr) 240 | return False 241 | 242 | return True 243 | 244 | except Exception as e: 245 | self.errors.append(f"Error running tests: {str(e)}") 246 | return False 247 | 248 | def check_python_path(self) -> Tuple[bool, Optional[str]]: 249 | """Check PYTHONPATH configuration.""" 250 | try: 251 | result = subprocess.run( 252 | [ 253 | "docker", "compose", "exec", 254 | self.service_name, 255 | "python", "-c", 256 | "import sys; print('\\n'.join(sys.path))" 257 | ], 258 | capture_output=True, 259 | text=True 260 | ) 261 | 262 | if result.returncode != 0: 263 | return False, None 264 | 265 | return True, result.stdout 266 | 267 | except Exception as e: 268 | self.errors.append(f"Error checking PYTHONPATH: {str(e)}") 269 | return False, None 270 | 271 | def validate(self) -> bool: 272 | """Run all validations.""" 273 | console.print(Panel(f"[bold blue]Validating {self.service_name} service")) 274 | 275 | validations = [ 276 | ("Structure", self.validate_structure), 277 | ("pyproject.toml", self.validate_pyproject_toml), 278 | ("Dockerfile", self.validate_dockerfile), 279 | ("Environment", self.check_environment_variables), 280 | ("Container Health", self.check_container_health), 281 | ("Tests", self.run_tests) 282 | ] 283 | 284 | table = Table(show_header=True) 285 | table.add_column("Check", style="cyan") 286 | table.add_column("Status", style="bold") 287 | 288 | all_passed = True 289 | for name, func in validations: 290 | try: 291 | passed = func() 292 | status = "[green]✓ Passed" if passed else "[red]✗ Failed" 293 | table.add_row(name, status) 294 | all_passed = all_passed and passed 295 | except Exception as e: 296 | table.add_row(name, f"[red]✗ Error: {str(e)}") 297 | all_passed = False 298 | 299 | console.print(table) 300 | 301 | if self.warnings: 302 | console.print("\n[yellow]Warnings:") 303 | for warning in self.warnings: 304 | console.print(f"[yellow]• {warning}") 305 | 306 | if self.errors: 307 | console.print("\n[red]Errors:") 308 | for error in self.errors: 309 | console.print(f"[red]• {error}") 310 | 311 | if all_passed: 312 | console.print("\n[green]All validations passed!") 313 | else: 314 | console.print("\n[red]Some validations failed.") 315 | 316 | # Print diagnostic information 317 | console.print("\n[bold blue]Diagnostic Information:") 318 | 319 | # Check Python path 320 | success, python_path = self.check_python_path() 321 | if success: 322 | console.print("\n[bold]Python Path:") 323 | console.print(python_path) 324 | 325 | # Check container logs using docker-compose 326 | try: 327 | result = subprocess.run( 328 | ["./dev", "logs", self.service_name], 329 | capture_output=True, 330 | text=True, 331 | check=False 332 | ) 333 | if result.returncode == 0: 334 | console.print("\n[bold]Container Logs:") 335 | console.print(result.stdout) 336 | except Exception as e: 337 | console.print(f"[red]Error getting container logs: {str(e)}") 338 | 339 | return all_passed 340 | 341 | def main(): 342 | """Main entry point.""" 343 | parser = argparse.ArgumentParser( 344 | description="Validate a service against project standards" 345 | ) 346 | parser.add_argument( 347 | "service_path", 348 | help="Path to the service directory" 349 | ) 350 | args = parser.parse_args() 351 | 352 | validator = ServiceValidator(args.service_path) 353 | success = validator.validate() 354 | 355 | sys.exit(0 if success else 1) 356 | 357 | if __name__ == "__main__": 358 | main() -------------------------------------------------------------------------------- /scripts/validators/api_validator.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """ 3 | API Validator 4 | 5 | Validates API endpoints against project standards for response format and headers. 6 | """ 7 | 8 | import sys 9 | import json 10 | import logging 11 | import argparse 12 | import requests 13 | from typing import Dict, List, Tuple, Any, Optional 14 | 15 | # Configure logging 16 | logging.basicConfig( 17 | level=logging.INFO, 18 | format='%(levelname)s: %(message)s' 19 | ) 20 | logger = logging.getLogger(__name__) 21 | 22 | # Define validation rules 23 | REQUIRED_ENDPOINTS = [ 24 | "/api/v1/health", 25 | "/api/v1/version" 26 | ] 27 | 28 | REQUIRED_HEADERS = [ 29 | "X-RateLimit-Limit", 30 | "X-RateLimit-Remaining", 31 | "X-RateLimit-Reset" 32 | ] 33 | 34 | def validate_response_format(response_json: Dict[str, Any]) -> List[str]: 35 | """ 36 | Validate API response format against standards. 37 | 38 | Standard format: 39 | { 40 | "status": true/false, 41 | "data": {...} or "error": "error_code", "message": "Human readable message" 42 | } 43 | 44 | Also accepts alternative formats with warnings: 45 | {"status": "healthy"} - Health endpoint 46 | {"version": "1.0.0", ...} - Version endpoint 47 | """ 48 | errors = [] 49 | 50 | # Special case for health endpoint 51 | if "status" in response_json and response_json["status"] == "healthy": 52 | errors.append("WARNING: 'status' field should be a boolean (true/false), found: str") 53 | return errors 54 | 55 | # Special case for version endpoint 56 | if "version" in response_json and "status" not in response_json: 57 | errors.append("WARNING: Response missing 'status' field, should wrap data in {status: true, data: {...}}") 58 | return errors 59 | 60 | # Standard validation 61 | if "status" not in response_json: 62 | errors.append("WARNING: Response missing required 'status' field") 63 | else: 64 | # Check if status is a boolean 65 | if not isinstance(response_json["status"], bool): 66 | errors.append("WARNING: 'status' field should be a boolean (true/false), found: " + 67 | f"{type(response_json['status']).__name__}") 68 | 69 | # Check for data or error fields 70 | if response_json.get("status") is True and "data" not in response_json: 71 | errors.append("WARNING: Successful response missing 'data' field") 72 | 73 | if response_json.get("status") is False: 74 | if "error" not in response_json: 75 | errors.append("WARNING: Error response missing 'error' field") 76 | if "message" not in response_json: 77 | errors.append("WARNING: Error response missing 'message' field") 78 | 79 | return errors 80 | 81 | def validate_headers(headers: Dict[str, str]) -> List[str]: 82 | """Validate API response headers against standards.""" 83 | errors = [] 84 | 85 | # Check for rate limiting headers 86 | for header in REQUIRED_HEADERS: 87 | if header not in headers: 88 | # Make this a warning since rate limiting might be optional in some environments 89 | errors.append(f"WARNING: Missing required header: {header}") 90 | 91 | return errors 92 | 93 | def validate_api_endpoint(base_url: str, endpoint: str) -> List[str]: 94 | """Validate a single API endpoint.""" 95 | errors = [] 96 | url = f"{base_url}{endpoint}" 97 | 98 | try: 99 | response = requests.get(url, timeout=5) 100 | 101 | # Check status code 102 | if response.status_code != 200: 103 | errors.append(f"Endpoint {endpoint} returned status code {response.status_code}") 104 | return errors 105 | 106 | # Validate headers 107 | header_errors = validate_headers(response.headers) 108 | errors.extend(header_errors) 109 | 110 | # Validate response format 111 | try: 112 | response_json = response.json() 113 | format_errors = validate_response_format(response_json) 114 | errors.extend(format_errors) 115 | except json.JSONDecodeError: 116 | errors.append(f"Endpoint {endpoint} did not return valid JSON") 117 | 118 | except requests.RequestException as e: 119 | errors.append(f"Error connecting to {endpoint}: {str(e)}") 120 | 121 | return errors 122 | 123 | def validate_api(base_url: str) -> Tuple[bool, List[str]]: 124 | """ 125 | Validate API endpoints against standards. 126 | 127 | Args: 128 | base_url: Base URL of the API (e.g., http://localhost:8000) 129 | 130 | Returns: 131 | Tuple of (success, errors) 132 | """ 133 | all_errors = [] 134 | 135 | # Validate required endpoints 136 | for endpoint in REQUIRED_ENDPOINTS: 137 | logger.info(f"Validating endpoint: {endpoint}") 138 | errors = validate_api_endpoint(base_url, endpoint) 139 | 140 | if errors: 141 | all_errors.append(f"Endpoint {endpoint} has issues:") 142 | for error in errors: 143 | all_errors.append(f" - {error}") 144 | 145 | return len(all_errors) == 0, all_errors 146 | 147 | def main(): 148 | """Run standalone validation if script is executed directly.""" 149 | parser = argparse.ArgumentParser(description="Validate API endpoints against standards") 150 | parser.add_argument("base_url", help="Base URL of the API (e.g., http://localhost:8000)") 151 | args = parser.parse_args() 152 | 153 | success, errors = validate_api(args.base_url) 154 | 155 | # Count actual errors (not warnings) 156 | actual_errors = 0 157 | for error in errors: 158 | if "WARNING:" in error: 159 | logger.warning(error.replace("WARNING:", "").strip()) 160 | else: 161 | logger.error(error) 162 | if not error.startswith("Endpoint") and "WARNING:" not in error: 163 | actual_errors += 1 164 | 165 | if actual_errors > 0: 166 | logger.error(f"API validation failed with {actual_errors} errors") 167 | return 1 168 | else: 169 | if errors: 170 | logger.info("API validation passed with warnings") 171 | else: 172 | logger.info("API validation passed successfully!") 173 | return 0 174 | 175 | if __name__ == "__main__": 176 | sys.exit(main()) -------------------------------------------------------------------------------- /scripts/validators/compose_validator.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """ 3 | Docker Compose Validator 4 | 5 | Validates docker-compose.yml files against architectural standards, 6 | ensuring consistent service configuration and integration. 7 | """ 8 | 9 | import sys 10 | import yaml 11 | import logging 12 | import argparse 13 | from pathlib import Path 14 | from typing import List, Dict, Any, Optional 15 | 16 | # Configure logging 17 | logging.basicConfig( 18 | level=logging.INFO, 19 | format='%(levelname)s: %(message)s' 20 | ) 21 | logger = logging.getLogger(__name__) 22 | 23 | def validate_compose_service(service_name: str, service_config: Dict[str, Any]) -> List[str]: 24 | """ 25 | Validate a single service entry in docker-compose.yml. 26 | 27 | Args: 28 | service_name: Name of the service 29 | service_config: Service configuration dictionary 30 | 31 | Returns: 32 | List of error messages, empty list if valid 33 | """ 34 | errors = [] 35 | 36 | # Check build context is component-based 37 | if 'build' in service_config: 38 | build = service_config['build'] 39 | if isinstance(build, dict): 40 | # Validate context follows pattern 41 | if 'context' in build: 42 | context = build['context'] 43 | # Special case for dev container 44 | if service_name == "dev": 45 | if not (context == "./containers/dev-environment" or context == "." or context == "../.."): 46 | errors.append(f"Dev container build context should be './containers/dev-environment', '.' or '../..'") 47 | 48 | # Special case for dev container dockerfile path 49 | if 'dockerfile' in build: 50 | if not (build['dockerfile'] == "Dockerfile" or build['dockerfile'] == "containers/dev-environment/Dockerfile"): 51 | errors.append(f"Dev container dockerfile should be 'Dockerfile' or 'containers/dev-environment/Dockerfile'") 52 | else: 53 | if not (context == f"./containers/{service_name}" or context == "."): 54 | errors.append(f"Build context should be './containers/{service_name}' or '.'") 55 | 56 | # If using component-specific context, validate dockerfile is at root 57 | if context == f"./containers/{service_name}" and 'dockerfile' in build: 58 | if build['dockerfile'] != "Dockerfile": 59 | errors.append(f"When using component context, dockerfile should be 'Dockerfile'") 60 | 61 | # If using root context, validate dockerfile includes component path 62 | if context == "." and 'dockerfile' in build: 63 | if not build['dockerfile'].startswith(f"containers/{service_name}/"): 64 | errors.append(f"When using root context, dockerfile should include component path") 65 | 66 | # Check for Poetry-related args in build configuration 67 | if 'args' in build and isinstance(build['args'], dict): 68 | poetry_args = [arg for arg in build['args'] if 'poetry' in arg.lower()] 69 | if not poetry_args and is_python_service(service_name): 70 | errors.append(f"Python service {service_name} should include Poetry-related build args") 71 | 72 | # Check for volumes mounting Poetry configuration 73 | if 'volumes' in service_config and isinstance(service_config['volumes'], list): 74 | poetry_volume_found = False 75 | for volume in service_config['volumes']: 76 | if isinstance(volume, str) and ('pyproject.toml' in volume or 'poetry.lock' in volume): 77 | poetry_volume_found = True 78 | break 79 | 80 | if not poetry_volume_found and is_python_service(service_name): 81 | errors.append(f"Python service {service_name} should mount pyproject.toml/poetry.lock for development") 82 | errors.append(f" Consider adding: './containers/{service_name}/pyproject.toml:/app/pyproject.toml'") 83 | 84 | # Check for environment variables related to Python/Poetry 85 | if 'environment' in service_config: 86 | env_vars = service_config['environment'] 87 | if isinstance(env_vars, list): 88 | python_path_found = any('PYTHONPATH' in env for env in env_vars if isinstance(env, str)) 89 | elif isinstance(env_vars, dict): 90 | python_path_found = 'PYTHONPATH' in env_vars 91 | else: 92 | python_path_found = False 93 | 94 | if not python_path_found and is_python_service(service_name): 95 | errors.append(f"Python service {service_name} should define PYTHONPATH environment variable") 96 | 97 | return errors 98 | 99 | def is_python_service(service_name: str) -> bool: 100 | """ 101 | Determine if a service is likely a Python service based on name. 102 | This is a heuristic and may need improvement for specific projects. 103 | """ 104 | python_service_indicators = [ 105 | 'python', 'django', 'flask', 'fastapi', 'celery', 106 | 'worker', 'api', 'service', 'app', 'backend', 107 | 'foundation', 'agent', 'model', 'processor', 'analyzer' 108 | ] 109 | return any(indicator in service_name.lower() for indicator in python_service_indicators) 110 | 111 | def validate_compose_file(compose_file: str) -> List[str]: 112 | """ 113 | Validate the entire docker-compose.yml file. 114 | 115 | Args: 116 | compose_file: Path to the docker-compose.yml file 117 | 118 | Returns: 119 | List of error messages, empty list if valid 120 | """ 121 | try: 122 | with open(compose_file, 'r') as f: 123 | compose_data = yaml.safe_load(f) 124 | 125 | if not compose_data or 'services' not in compose_data: 126 | return ["Invalid docker-compose.yml: missing services section"] 127 | 128 | errors = [] 129 | for service_name, service_config in compose_data['services'].items(): 130 | service_errors = validate_compose_service(service_name, service_config) 131 | if service_errors: 132 | errors.append(f"Service '{service_name}' has configuration issues:") 133 | for error in service_errors: 134 | errors.append(f" - {error}") 135 | 136 | # Check for Poetry-related global configurations 137 | if 'x-poetry' not in compose_data and any(is_python_service(name) for name in compose_data['services']): 138 | errors.append("Missing recommended Poetry YAML extension for shared configuration") 139 | errors.append(" Consider adding: 'x-poetry: &poetry-settings' with common Poetry configurations") 140 | 141 | return errors 142 | 143 | except yaml.YAMLError as e: 144 | return [f"YAML parsing error: {str(e)}"] 145 | except Exception as e: 146 | return [f"Error validating compose file: {str(e)}"] 147 | 148 | def main(): 149 | """Run standalone validation if script is executed directly.""" 150 | parser = argparse.ArgumentParser(description="Validate docker-compose.yml file") 151 | parser.add_argument("compose_file", help="Path to docker-compose.yml") 152 | args = parser.parse_args() 153 | 154 | errors = validate_compose_file(args.compose_file) 155 | if errors: 156 | for error in errors: 157 | logger.error(error) 158 | sys.exit(1) 159 | else: 160 | logger.info("Docker Compose validation successful") 161 | sys.exit(0) 162 | 163 | if __name__ == "__main__": 164 | sys.exit(main()) 165 | -------------------------------------------------------------------------------- /scripts/validators/container_validator.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """ 3 | Container Structure Validator 4 | 5 | Validates container directory structure against architectural standards, 6 | ensuring consistent organization and file presence. 7 | """ 8 | 9 | import sys 10 | import logging 11 | import ast 12 | from pathlib import Path 13 | import os 14 | from typing import List, Optional, Tuple 15 | import black 16 | 17 | # Configure logging 18 | logging.basicConfig( 19 | level=logging.INFO, 20 | format='%(levelname)s: %(message)s' 21 | ) 22 | logger = logging.getLogger(__name__) 23 | 24 | # Directories to exclude from validation 25 | EXCLUDED_DIRS = { 26 | '__pycache__', 27 | 'common', 28 | 'tools', 29 | 'monitoring', 30 | '.pytest_cache', 31 | '.git', 32 | '.venv', 33 | 'node_modules', 34 | 'build', 35 | 'dist', 36 | 'coverage', 37 | '.coverage', 38 | '.mypy_cache', 39 | '.tox', 40 | '.eggs', 41 | } 42 | 43 | def should_skip_directory(dir_path: Path) -> bool: 44 | """Check if a directory should be skipped during validation.""" 45 | return ( 46 | any(part.startswith('.') for part in dir_path.parts) or 47 | any(part in EXCLUDED_DIRS for part in dir_path.parts) 48 | ) 49 | 50 | def format_python_files(container_path: Path) -> List[str]: 51 | """Format all Python files in the container using black.""" 52 | errors = [] 53 | mode = black.Mode( 54 | target_versions={black.TargetVersion.PY311}, 55 | line_length=88, 56 | string_normalization=True, 57 | is_pyi=False, 58 | ) 59 | 60 | for py_file in container_path.rglob("*.py"): 61 | if not should_skip_directory(py_file.parent): 62 | try: 63 | with open(py_file, 'r', encoding='utf-8') as f: 64 | src = f.read() 65 | try: 66 | formatted_src = black.format_str(src, mode=mode) 67 | with open(py_file, 'w', encoding='utf-8') as f: 68 | f.write(formatted_src) 69 | except black.InvalidInput as e: 70 | errors.append(f"Syntax error in {py_file}: {str(e)}") 71 | except Exception as e: 72 | errors.append(f"Error reading/writing {py_file}: {str(e)}") 73 | 74 | return errors 75 | 76 | def validate_container_structure(container_path: Path) -> List[str]: 77 | """Validate an individual container's structure and organization.""" 78 | errors = [] 79 | container_name = container_path.name 80 | 81 | # Skip validation for certain directories 82 | if container_name in EXCLUDED_DIRS: 83 | return [] 84 | 85 | # First format all Python files 86 | if has_python_code(container_path): 87 | logger.info("Formatting Python files with black...") 88 | format_errors = format_python_files(container_path) 89 | errors.extend(format_errors) 90 | 91 | # Continue with regular validation 92 | if container_path.is_dir(): 93 | # Service containers need a Dockerfile 94 | if not is_utility_container(container_name): 95 | if not (container_path / "Dockerfile").exists(): 96 | errors.append(f"Missing required file: Dockerfile") 97 | 98 | # Check Python package structure 99 | if has_python_code(container_path): 100 | # Check src directory structure 101 | src_path = container_path / "src" 102 | if src_path.exists(): 103 | errors.extend(validate_python_package(src_path, container_name)) 104 | 105 | # Check tests directory structure 106 | tests_path = container_path / "tests" 107 | if tests_path.exists(): 108 | if not (tests_path / "__init__.py").exists(): 109 | errors.append(f"Missing __init__.py in tests directory") 110 | else: 111 | errors.extend(validate_python_package(tests_path, "tests")) 112 | 113 | # Poetry validation - Check for pyproject.toml and absence of requirements.txt/setup.py 114 | poetry_errors = validate_poetry_configuration(container_path) 115 | errors.extend(poetry_errors) 116 | 117 | return errors 118 | 119 | def validate_poetry_configuration(container_path: Path) -> List[str]: 120 | """Validate Poetry configuration for a container.""" 121 | errors = [] 122 | 123 | # Check for pyproject.toml 124 | pyproject_path = container_path / "pyproject.toml" 125 | if not pyproject_path.exists() and has_python_code(container_path): 126 | errors.append(f"Missing pyproject.toml (required for Poetry dependency management)") 127 | 128 | # Check for absence of requirements.txt and setup.py 129 | requirements_path = container_path / "requirements.txt" 130 | if requirements_path.exists(): 131 | errors.append(f"requirements.txt found - should use pyproject.toml with Poetry instead") 132 | 133 | setup_path = container_path / "setup.py" 134 | if setup_path.exists(): 135 | errors.append(f"setup.py found - should use pyproject.toml with Poetry instead") 136 | 137 | return errors 138 | 139 | def validate_python_package(package_path: Path, package_name: str) -> List[str]: 140 | """Validate a Python package structure and its imports.""" 141 | errors = [] 142 | 143 | # Check for root __init__.py 144 | root_init = package_path / package_name / "__init__.py" 145 | if not root_init.exists() and package_name != "tests": # Skip this check for tests directory 146 | errors.append(f"Missing package __init__.py file at {root_init.relative_to(package_path)}") 147 | elif root_init.exists(): 148 | # Validate __init__.py contents 149 | init_errors = validate_init_file(root_init) 150 | errors.extend(init_errors) 151 | 152 | # Check all Python subdirectories for __init__.py files 153 | for py_dir in package_path.rglob("*"): 154 | if py_dir.is_dir() and not should_skip_directory(py_dir): 155 | init_file = py_dir / "__init__.py" 156 | if not init_file.exists(): 157 | errors.append(f"Missing __init__.py in Python package directory: {py_dir.relative_to(package_path)}") 158 | else: 159 | # Validate __init__.py contents 160 | init_errors = validate_init_file(init_file) 161 | errors.extend(init_errors) 162 | 163 | # Check for relative imports in Python files 164 | for py_file in package_path.rglob("*.py"): 165 | if py_file.name != "__init__.py" and not should_skip_directory(py_file.parent): 166 | import_errors = validate_imports(py_file, package_path) 167 | errors.extend(import_errors) 168 | 169 | return errors 170 | 171 | def validate_init_file(init_file: Path) -> List[str]: 172 | """Validate the contents of an __init__.py file.""" 173 | errors = [] 174 | try: 175 | with open(init_file, 'r') as f: 176 | content = f.read() 177 | 178 | # Parse the file 179 | try: 180 | tree = ast.parse(content) 181 | 182 | # Check for __all__ definition 183 | has_all = any( 184 | isinstance(node, ast.Assign) and 185 | any(t.id == "__all__" for t in node.targets if isinstance(t, ast.Name)) 186 | for node in ast.walk(tree) 187 | ) 188 | 189 | if not has_all and any( 190 | isinstance(node, ast.ImportFrom) for node in ast.walk(tree) 191 | ): 192 | errors.append(f"Missing __all__ definition in {init_file} when it contains imports") 193 | 194 | except SyntaxError as e: 195 | errors.append(f"Syntax error in {init_file}: {str(e)}") 196 | 197 | except Exception as e: 198 | errors.append(f"Error reading {init_file}: {str(e)}") 199 | 200 | return errors 201 | 202 | def validate_imports(py_file: Path, package_path: Path) -> List[str]: 203 | """Validate imports in a Python file.""" 204 | errors = [] 205 | try: 206 | with open(py_file, 'r') as f: 207 | content = f.read() 208 | 209 | # Parse the file 210 | try: 211 | tree = ast.parse(content) 212 | 213 | # Check for relative imports and non-standardized internal imports 214 | for node in ast.walk(tree): 215 | if isinstance(node, ast.ImportFrom): 216 | if node.level > 0: 217 | errors.append(f"Relative import found in {py_file}: from {'.' * node.level}{node.module or ''} import ...") 218 | # Check for imports that start with 'containers.' which is now deprecated 219 | elif node.module and node.module.startswith('containers.'): 220 | errors.append(f"Deprecated import format in {py_file}: from {node.module} import ... (should start with the container name directly, e.g., 'foundation.' instead of 'containers.foundation.')") 221 | 222 | except SyntaxError as e: 223 | errors.append(f"Syntax error in {py_file}: {str(e)}") 224 | 225 | except Exception as e: 226 | errors.append(f"Error reading {py_file}: {str(e)}") 227 | 228 | return errors 229 | 230 | def is_utility_container(container_name: str) -> bool: 231 | """Determine if a container is a utility container that may not need a Dockerfile.""" 232 | utility_containers = ['common', 'tools', 'monitoring', 'resource_monitor'] 233 | return container_name in utility_containers 234 | 235 | def has_python_code(container_path: Path) -> bool: 236 | """Check if the container has Python code.""" 237 | python_files = list(container_path.glob("**/*.py")) 238 | return len(python_files) > 0 and (container_path / "src").exists() 239 | 240 | def main(): 241 | """Run standalone validation if script is executed directly.""" 242 | import argparse 243 | 244 | parser = argparse.ArgumentParser(description="Validate container directory structure") 245 | parser.add_argument("container", help="Path to the container directory to validate") 246 | args = parser.parse_args() 247 | 248 | container_path = Path(args.container) 249 | if not container_path.exists() or not container_path.is_dir(): 250 | logger.error(f"Container directory not found: {container_path}") 251 | return 1 252 | 253 | errors = validate_container_structure(container_path) 254 | 255 | if errors: 256 | logger.error("Container validation failed:") 257 | for error in errors: 258 | logger.error(f" - {error}") 259 | return 1 260 | else: 261 | logger.info("Container validation passed successfully!") 262 | return 0 263 | 264 | if __name__ == "__main__": 265 | sys.exit(main()) 266 | -------------------------------------------------------------------------------- /scripts/validators/dockerfile_validator.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """ 3 | Dockerfile Validator 4 | 5 | Validates Dockerfile against best practices and project standards, 6 | ensuring consistent container builds and runtime behavior. 7 | """ 8 | 9 | import sys 10 | import os 11 | import re 12 | import logging 13 | import argparse 14 | from pathlib import Path 15 | from typing import List, Dict, Any, Tuple 16 | 17 | # Configure logging 18 | logging.basicConfig( 19 | level=logging.INFO, 20 | format='%(levelname)s: %(message)s' 21 | ) 22 | logger = logging.getLogger(__name__) 23 | 24 | # Define validation rules 25 | REQUIRED_INSTRUCTIONS = ['FROM', 'WORKDIR', 'COPY', 'RUN', 'CMD'] 26 | RECOMMENDED_INSTRUCTIONS = ['LABEL', 'EXPOSE', 'ENTRYPOINT'] 27 | PROHIBITED_PATTERNS = [ 28 | r'npm install -g', # Global npm installs 29 | r'apt-get (install|update)(?!.*--no-install-recommends)', # apt without --no-install-recommends 30 | r'pip install(?!.*--no-cache-dir)', # pip without --no-cache-dir 31 | r'COPY containers/', # Absolute paths in COPY commands 32 | ] 33 | SECURITY_CHECKS = [ 34 | (r'FROM\s+\w+(?::\S+)?\s+[Aa][Ss]\s+\w+', "Multi-stage builds recommended for smaller images"), 35 | (r'USER\s+(?!root)', "Using non-root user recommended for security"), 36 | (r'rm -rf /var/lib/apt/lists/\*', "Clean up apt cache to reduce image size"), 37 | ] 38 | 39 | # Poetry-specific checks 40 | POETRY_CHECKS = [ 41 | (r'(curl -sSL https://install\.python-poetry\.org|pip install poetry)', "Poetry installation"), 42 | (r'COPY pyproject\.toml poetry\.lock\* \./|COPY \["pyproject\.toml", "poetry\.lock\*", "\./"\]', "Copy Poetry files"), 43 | (r'poetry (install|config)', "Poetry dependency installation"), 44 | (r'poetry config virtualenvs.create false', "Poetry virtualenv configuration") 45 | ] 46 | 47 | def validate_dockerfile(dockerfile_path: str) -> Tuple[List[str], List[str]]: 48 | """Validate a Dockerfile against project standards.""" 49 | errors = [] 50 | warnings = [] 51 | dockerfile_path = Path(dockerfile_path) 52 | 53 | # Check if file exists 54 | if not dockerfile_path.exists(): 55 | errors.append(f"Dockerfile not found at {dockerfile_path}") 56 | return errors, warnings 57 | 58 | # Read Dockerfile 59 | try: 60 | with open(dockerfile_path, 'r') as f: 61 | content = f.read() 62 | lines = content.splitlines() 63 | except Exception as e: 64 | errors.append(f"Error reading Dockerfile: {e}") 65 | return errors, warnings 66 | 67 | # Check for required instructions 68 | for instruction in REQUIRED_INSTRUCTIONS: 69 | if not re.search(rf'^\s*{instruction}\s+', content, re.MULTILINE | re.IGNORECASE): 70 | errors.append(f"Missing required instruction: {instruction}") 71 | 72 | # Check for recommended instructions 73 | for instruction in RECOMMENDED_INSTRUCTIONS: 74 | if not re.search(rf'^\s*{instruction}\s+', content, re.MULTILINE | re.IGNORECASE): 75 | warnings.append(f"Missing recommended instruction: {instruction}") 76 | 77 | # Check for prohibited patterns 78 | for pattern in PROHIBITED_PATTERNS: 79 | matches = re.findall(pattern, content, re.MULTILINE | re.IGNORECASE) 80 | if matches: 81 | if "containers/" in pattern: 82 | errors.append(f"Found absolute path in COPY command. Use relative paths instead: {matches}") 83 | else: 84 | errors.append(f"Found prohibited pattern: {pattern}") 85 | 86 | # Check for multi-stage build 87 | is_multistage = bool(re.search(r'FROM\s+\w+(?::\S+)?\s+[Aa][Ss]\s+\w+', content, re.MULTILINE | re.IGNORECASE)) 88 | 89 | # Security checks 90 | for pattern, message in SECURITY_CHECKS: 91 | if not re.search(pattern, content, re.MULTILINE | re.IGNORECASE): 92 | # Skip multi-stage build warning if we already have a multi-stage build 93 | if "Multi-stage builds" in message and is_multistage: 94 | continue 95 | # Skip apt cache cleanup warning if we have rm -rf /var/lib/apt/lists/* 96 | if "apt cache" in message and "rm -rf /var/lib/apt/lists/*" in content: 97 | continue 98 | warnings.append(f"Security recommendation: {message}") 99 | 100 | # Poetry-specific validation 101 | poetry_installation_found = False 102 | poetry_config_found = False 103 | poetry_usage_found = False 104 | 105 | for pattern, description in POETRY_CHECKS: 106 | if re.search(pattern, content, re.MULTILINE | re.IGNORECASE): 107 | if "installation" in description.lower(): 108 | poetry_installation_found = True 109 | elif "configuration" in description.lower(): 110 | poetry_config_found = True 111 | elif "dependency" in description.lower(): 112 | poetry_usage_found = True 113 | 114 | # Check for Python-based container that should use Poetry 115 | is_python_container = re.search(r'FROM\s+python', content, re.MULTILINE | re.IGNORECASE) 116 | if is_python_container: 117 | if not poetry_installation_found: 118 | errors.append("Missing Poetry installation in Dockerfile") 119 | errors.append("Recommendation: Add 'RUN curl -sSL https://install.python-poetry.org | python3 -'") 120 | 121 | if not poetry_usage_found and not re.search(r'poetry install', content, re.MULTILINE | re.IGNORECASE): 122 | errors.append("Missing Poetry dependency installation in Dockerfile") 123 | errors.append("Recommendation: Add 'RUN poetry install --no-interaction'") 124 | 125 | if "COPY pyproject.toml" not in content and "COPY [\"pyproject.toml" not in content: 126 | errors.append("Missing Poetry configuration file copy in Dockerfile") 127 | errors.append("Recommendation: Add 'COPY pyproject.toml poetry.lock* ./'") 128 | 129 | # Check for relative paths in COPY commands 130 | copy_commands = re.findall(r'^\s*COPY\s+(.+?)\s+(.+?)$', content, re.MULTILINE) 131 | for source, dest in copy_commands: 132 | if 'containers/' in source: 133 | errors.append(f"Use relative paths in COPY commands: {source} -> {dest}") 134 | errors.append(f"Recommendation: Change to 'COPY {source.split('/')[-1]} {dest}'") 135 | 136 | return errors, warnings 137 | 138 | def main(): 139 | """Run standalone validation if script is executed directly.""" 140 | parser = argparse.ArgumentParser(description="Validate Dockerfile against project standards") 141 | parser.add_argument("dockerfile_path", help="Path to the Dockerfile to validate") 142 | args = parser.parse_args() 143 | 144 | dockerfile_path = args.dockerfile_path 145 | errors, warnings = validate_dockerfile(dockerfile_path) 146 | 147 | # Display results 148 | if warnings: 149 | logger.warning("Warnings:") 150 | for warning in warnings: 151 | logger.warning(f" - {warning}") 152 | 153 | if errors: 154 | logger.error("Dockerfile validation failed:") 155 | for error in errors: 156 | logger.error(f" - {error}") 157 | return 1 158 | else: 159 | if warnings: 160 | logger.info("Dockerfile validation passed with warnings!") 161 | else: 162 | logger.info("Dockerfile validation passed successfully!") 163 | return 0 164 | 165 | if __name__ == "__main__": 166 | sys.exit(main()) 167 | -------------------------------------------------------------------------------- /scripts/validators/poetry_validator.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """ 3 | Poetry Validator for AI Development Platform 4 | 5 | This script validates that containers properly use Poetry for dependency management 6 | by checking for required pyproject.toml files and validating their structure. 7 | 8 | Usage: 9 | python poetry_validator.py [container_path] 10 | """ 11 | 12 | import os 13 | import sys 14 | import tomli 15 | import argparse 16 | from pathlib import Path 17 | 18 | REQUIRED_SECTIONS = ["tool.poetry", "tool.poetry.dependencies"] 19 | REQUIRED_DEV_SECTIONS = ["tool.poetry.group.dev.dependencies"] 20 | REQUIRED_FIELDS = ["name", "version", "description"] 21 | 22 | 23 | def validate_poetry_config(container_path): 24 | """Validate poetry configuration for a container.""" 25 | pyproject_path = Path(container_path) / "pyproject.toml" 26 | 27 | # Check if pyproject.toml exists 28 | if not pyproject_path.exists(): 29 | print(f"❌ Error: {pyproject_path} does not exist") 30 | return False 31 | 32 | # Check for requirements.txt or setup.py (should not exist) 33 | requirements_path = Path(container_path) / "requirements.txt" 34 | setup_path = Path(container_path) / "setup.py" 35 | 36 | if requirements_path.exists(): 37 | print(f"❌ Error: {requirements_path} exists (should use pyproject.toml instead)") 38 | return False 39 | 40 | if setup_path.exists(): 41 | print(f"❌ Error: {setup_path} exists (should use pyproject.toml instead)") 42 | return False 43 | 44 | # Parse and validate pyproject.toml 45 | try: 46 | with open(pyproject_path, "rb") as f: 47 | pyproject_data = tomli.load(f) 48 | 49 | # Check required sections 50 | for section in REQUIRED_SECTIONS: 51 | parts = section.split(".") 52 | current = pyproject_data 53 | for part in parts: 54 | if part not in current: 55 | print(f"❌ Error: Missing section '{section}' in pyproject.toml") 56 | return False 57 | current = current[part] 58 | 59 | # Check required fields in [tool.poetry] 60 | poetry_section = pyproject_data.get("tool", {}).get("poetry", {}) 61 | for field in REQUIRED_FIELDS: 62 | if field not in poetry_section: 63 | print(f"❌ Error: Missing required field '{field}' in [tool.poetry]") 64 | return False 65 | 66 | # Check Python version constraint 67 | if "python" not in pyproject_data.get("tool", {}).get("poetry", {}).get("dependencies", {}): 68 | print("❌ Error: Missing Python version constraint in dependencies") 69 | return False 70 | 71 | print(f"✅ Poetry configuration valid: {pyproject_path}") 72 | return True 73 | 74 | except Exception as e: 75 | print(f"❌ Error parsing pyproject.toml: {e}") 76 | return False 77 | 78 | 79 | def main(): 80 | parser = argparse.ArgumentParser(description="Validate Poetry configuration") 81 | parser.add_argument("container_path", help="Path to container directory") 82 | args = parser.parse_args() 83 | 84 | success = validate_poetry_config(args.container_path) 85 | sys.exit(0 if success else 1) 86 | 87 | 88 | if __name__ == "__main__": 89 | main() 90 | --------------------------------------------------------------------------------