├── .envrc ├── .python-version ├── tests ├── e2e │ ├── test_traceroute_issues.py │ ├── conftest.py │ ├── test_nodes_primary_channel_filter.py │ ├── test_line_of_sight.py │ └── test_debug_node_picker.py ├── fixtures │ ├── __init__.py │ └── traceroute_graph_data.py ├── integration │ ├── __init__.py │ ├── test_packet_routes.py │ ├── test_relay_node_analysis.py │ ├── test_sorting_endpoints.py │ ├── test_direct_receptions_api.py │ ├── test_packets_exclude_self.py │ ├── test_relay_node.py │ ├── test_traceroute_route_node_grouped.py │ ├── test_api_routes.py │ ├── test_exclude_self_api.py │ ├── test_traceroute_filters.py │ ├── test_api_text_decoding.py │ └── test_node_routes.py ├── unit │ ├── __init__.py │ ├── test_generate_screenshots.py │ ├── test_config.py │ ├── test_sanitize_floats.py │ ├── test_exclude_fields_ui.py │ ├── test_wsgi.py │ ├── test_node_direct_receptions.py │ ├── test_traceroute_graph_utils.py │ ├── test_node_picker_broadcast.py │ └── test_traceroute_service.py ├── __init__.py └── conftest │ └── test_server.py ├── .screenshots ├── map.jpg ├── nodes.jpg ├── packets.jpg ├── dashboard.jpg ├── hop_analysis.jpg ├── traceroutes.jpg ├── line_of_sight.jpg ├── longest_links.jpg ├── gateway_compare.jpg └── traceroute_graph.jpg ├── src └── malla │ ├── models │ └── __init__.py │ ├── services │ └── __init__.py │ ├── database │ ├── __init__.py │ └── connection.py │ ├── __init__.py │ ├── routes │ ├── __init__.py │ ├── node_routes.py │ ├── main_routes.py │ └── traceroute_routes.py │ ├── utils │ ├── __init__.py │ ├── geo_utils.py │ ├── serialization_utils.py │ └── traceroute_utils.py │ ├── templates │ └── components │ │ ├── relay_node_analysis.html │ │ ├── direct_receptions.html │ │ └── shared_sidebar.html │ ├── static │ ├── js │ │ ├── filter-store.js │ │ ├── location-cache.js │ │ └── relay_node_analysis.js │ └── css │ │ └── node-picker.css │ ├── telemetry.py │ ├── tracing_utils.py │ ├── wsgi.py │ └── config.py ├── malla-web ├── malla-capture ├── malla-web-gunicorn ├── docker-compose.prod.yml ├── .vscode ├── extensions.json └── settings.json ├── .cursorrules ├── MANIFEST.in ├── .dockerignore ├── LICENSE ├── .pre-commit-config.yaml ├── .github └── workflows │ ├── ci.yml │ ├── copilot-setup-steps.yml │ └── docker.yml ├── AI.md ├── flake.nix ├── flake.lock ├── Dockerfile ├── Makefile ├── .cursor └── settings.json ├── env.example ├── docker-compose.yml ├── config.sample.yaml ├── scripts ├── benchmark_map_render.py └── benchmark_longest_links.py ├── .gitignore └── pyproject.toml /.envrc: -------------------------------------------------------------------------------- 1 | use flake 2 | -------------------------------------------------------------------------------- /.python-version: -------------------------------------------------------------------------------- 1 | 3.13 2 | -------------------------------------------------------------------------------- /tests/e2e/test_traceroute_issues.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.screenshots/map.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/zenitraM/malla/HEAD/.screenshots/map.jpg -------------------------------------------------------------------------------- /.screenshots/nodes.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/zenitraM/malla/HEAD/.screenshots/nodes.jpg -------------------------------------------------------------------------------- /.screenshots/packets.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/zenitraM/malla/HEAD/.screenshots/packets.jpg -------------------------------------------------------------------------------- /.screenshots/dashboard.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/zenitraM/malla/HEAD/.screenshots/dashboard.jpg -------------------------------------------------------------------------------- /.screenshots/hop_analysis.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/zenitraM/malla/HEAD/.screenshots/hop_analysis.jpg -------------------------------------------------------------------------------- /.screenshots/traceroutes.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/zenitraM/malla/HEAD/.screenshots/traceroutes.jpg -------------------------------------------------------------------------------- /.screenshots/line_of_sight.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/zenitraM/malla/HEAD/.screenshots/line_of_sight.jpg -------------------------------------------------------------------------------- /.screenshots/longest_links.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/zenitraM/malla/HEAD/.screenshots/longest_links.jpg -------------------------------------------------------------------------------- /.screenshots/gateway_compare.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/zenitraM/malla/HEAD/.screenshots/gateway_compare.jpg -------------------------------------------------------------------------------- /.screenshots/traceroute_graph.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/zenitraM/malla/HEAD/.screenshots/traceroute_graph.jpg -------------------------------------------------------------------------------- /tests/fixtures/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Test fixtures for the Meshtastic Mesh Health Web UI test suite. 3 | 4 | This package contains fixture data and database setup utilities for testing. 5 | """ 6 | -------------------------------------------------------------------------------- /tests/integration/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Integration tests for the Meshtastic Mesh Health Web UI. 3 | 4 | These tests use a test database with fixture data to test the complete 5 | API endpoints and their interactions with the database. 6 | """ 7 | -------------------------------------------------------------------------------- /tests/unit/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Unit tests for the Meshtastic Mesh Health Web UI. 3 | 4 | These tests focus on testing individual functions, classes, and methods 5 | in isolation, without requiring a database or full application context. 6 | """ 7 | -------------------------------------------------------------------------------- /src/malla/models/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Data models for Meshtastic Mesh Health Web UI. 3 | 4 | This package contains data structures and parsing logic for different entities. 5 | """ 6 | 7 | from .traceroute import TracerouteHop, TraceroutePacket, TraceroutePath 8 | 9 | __all__ = ["TraceroutePacket", "TracerouteHop", "TraceroutePath"] 10 | -------------------------------------------------------------------------------- /malla-web: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """ 3 | Entry point script for Malla Web UI. 4 | """ 5 | 6 | import sys 7 | from pathlib import Path 8 | 9 | # Add src to path for development 10 | src_path = Path(__file__).parent / "src" 11 | sys.path.insert(0, str(src_path)) 12 | 13 | from malla.web_ui import main 14 | 15 | if __name__ == "__main__": 16 | main() -------------------------------------------------------------------------------- /malla-capture: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """ 3 | Entry point script for Malla MQTT Capture. 4 | """ 5 | 6 | import sys 7 | from pathlib import Path 8 | 9 | # Add src to path for development 10 | src_path = Path(__file__).parent / "src" 11 | sys.path.insert(0, str(src_path)) 12 | 13 | from malla.mqtt_capture import main 14 | 15 | if __name__ == "__main__": 16 | main() -------------------------------------------------------------------------------- /malla-web-gunicorn: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """ 3 | Entry point script for Malla Web UI with Gunicorn. 4 | """ 5 | 6 | import sys 7 | from pathlib import Path 8 | 9 | # Add src to path for development 10 | src_path = Path(__file__).parent / "src" 11 | sys.path.insert(0, str(src_path)) 12 | 13 | from malla.wsgi import main 14 | 15 | if __name__ == "__main__": 16 | main() 17 | -------------------------------------------------------------------------------- /docker-compose.prod.yml: -------------------------------------------------------------------------------- 1 | # Production override for docker-compose.yml 2 | # Use with: docker-compose -f docker-compose.yml -f docker-compose.prod.yml up 3 | version: '3.8' 4 | 5 | services: 6 | malla-web: 7 | # Override the command to use Gunicorn for production 8 | command: ["/app/.venv/bin/malla-web-gunicorn"] 9 | environment: 10 | # Ensure debug is disabled in production 11 | - MALLA_DEBUG=false 12 | -------------------------------------------------------------------------------- /.vscode/extensions.json: -------------------------------------------------------------------------------- 1 | { 2 | "recommendations": [ 3 | "ms-python.python", 4 | "charliermarsh.ruff", 5 | "detachhead.basedpyright", 6 | "ms-python.pytest", 7 | "ms-vscode.test-adapter-converter", 8 | "ms-python.vscode-pylance" 9 | ], 10 | "unwantedRecommendations": [ 11 | "ms-python.black-formatter", 12 | "ms-python.isort", 13 | "ms-python.flake8", 14 | "ms-python.pylint", 15 | "ms-python.mypy-type-checker" 16 | ] 17 | } 18 | -------------------------------------------------------------------------------- /src/malla/services/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Service modules for business logic 3 | """ 4 | 5 | from .analytics_service import AnalyticsService 6 | from .gateway_service import GatewayService 7 | from .location_service import LocationService 8 | from .node_service import NodeNotFoundError, NodeService 9 | from .traceroute_service import TracerouteService 10 | 11 | __all__ = [ 12 | "TracerouteService", 13 | "LocationService", 14 | "AnalyticsService", 15 | "NodeService", 16 | "NodeNotFoundError", 17 | "GatewayService", 18 | ] 19 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Test package for Meshtastic Mesh Health Web UI 3 | 4 | This package contains comprehensive tests for the application including: 5 | - Unit tests for individual functions and classes 6 | - Integration tests for API endpoints and services 7 | - End-to-end tests with fixture data 8 | 9 | Test structure: 10 | - integration/: Integration tests with test database 11 | - unit/: Unit tests for isolated components 12 | - fixtures/: Test data and fixtures 13 | - conftest/: Shared pytest configuration and fixtures 14 | """ 15 | -------------------------------------------------------------------------------- /src/malla/database/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Database layer for Meshtastic Mesh Health Web UI. 3 | 4 | This package provides database connection management and data access operations. 5 | """ 6 | 7 | from .connection import get_db_connection 8 | from .repositories import ( 9 | DashboardRepository, 10 | LocationRepository, 11 | NodeRepository, 12 | PacketRepository, 13 | TracerouteRepository, 14 | ) 15 | 16 | __all__ = [ 17 | "get_db_connection", 18 | "DashboardRepository", 19 | "PacketRepository", 20 | "NodeRepository", 21 | "TracerouteRepository", 22 | "LocationRepository", 23 | ] 24 | -------------------------------------------------------------------------------- /.cursorrules: -------------------------------------------------------------------------------- 1 | # Deployment notes 2 | - To test with, assume there's an application runs at :5008 and that gets autoreloaded on your changes. No need to restart it. 3 | - You can tail the application's log messages in the app.log file. 4 | - You don't have access to the sqlite command, but you can run inline python scripts by executing python that you can use to inspect the database or parse things as needed to understand. 5 | - Implement tests when you do changes that test that your fixes are correct - see tests/ and tests/README.md. 6 | - Run the test runner with all tests often and make sure you fix any tests you break in the process. 7 | -------------------------------------------------------------------------------- /src/malla/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Malla - Meshtastic MQTT to SQLite capture and web monitoring tools. 3 | 4 | A comprehensive web UI for browsing and analyzing Meshtastic mesh network health data. 5 | """ 6 | 7 | __version__ = "0.1.0" 8 | __title__ = "Malla" 9 | __description__ = "A comprehensive web UI for browsing and analyzing Meshtastic mesh network health data" 10 | __author__ = "Malla Contributors" 11 | __license__ = "MIT" 12 | 13 | # Package-level imports for convenience 14 | from .web_ui import create_app 15 | 16 | __all__ = [ 17 | "create_app", 18 | "__version__", 19 | "__title__", 20 | "__description__", 21 | "__author__", 22 | "__license__", 23 | ] 24 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include README.md 2 | include LICENSE 3 | include pyproject.toml 4 | include malla-web 5 | include malla-capture 6 | include uv.lock 7 | include .python-version 8 | include .envrc 9 | 10 | # Include source code 11 | recursive-include src *.py 12 | recursive-include src/malla/templates *.html 13 | recursive-include src/malla/static *.css *.js *.png *.jpg *.gif *.ico 14 | 15 | # Include tests 16 | recursive-include tests *.py 17 | 18 | # Include documentation 19 | recursive-include docs *.md *.rst *.txt 20 | 21 | # Exclude compiled files 22 | global-exclude *.pyc 23 | global-exclude *.pyo 24 | global-exclude *.pyd 25 | global-exclude __pycache__ 26 | global-exclude .git* 27 | global-exclude .DS_Store -------------------------------------------------------------------------------- /src/malla/routes/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Routes package for Meshtastic Mesh Health Web UI 3 | """ 4 | 5 | from .api_routes import api_bp 6 | from .gateway_routes import gateway_bp 7 | 8 | # Import all route blueprints 9 | from .main_routes import main_bp 10 | from .node_routes import node_bp 11 | from .packet_routes import packet_bp 12 | from .traceroute_routes import traceroute_bp 13 | 14 | 15 | def register_routes(app): 16 | """Register all route blueprints with the Flask app.""" 17 | app.register_blueprint(main_bp) 18 | app.register_blueprint(packet_bp) 19 | app.register_blueprint(node_bp) 20 | app.register_blueprint(traceroute_bp) 21 | app.register_blueprint(api_bp) 22 | app.register_blueprint(gateway_bp) 23 | -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | # Git 2 | .git 3 | .gitignore 4 | 5 | # Python 6 | __pycache__ 7 | *.pyc 8 | *.pyo 9 | *.pyd 10 | .Python 11 | build 12 | develop-eggs 13 | dist 14 | downloads 15 | eggs 16 | .eggs 17 | lib 18 | lib64 19 | parts 20 | sdist 21 | var 22 | wheels 23 | *.egg-info 24 | .installed.cfg 25 | *.egg 26 | 27 | # Virtual environments 28 | .venv 29 | venv 30 | .env 31 | 32 | # IDEs 33 | .vscode 34 | .cursor 35 | .idea 36 | 37 | # Testing 38 | .pytest_cache 39 | .coverage 40 | htmlcov 41 | .tox 42 | 43 | # Development 44 | .ruff_cache 45 | .direnv 46 | .envrc 47 | 48 | # Database files (will be mounted as volumes) 49 | *.db 50 | *.sqlite 51 | *.sqlite3 52 | 53 | # Logs 54 | *.log 55 | app.log 56 | 57 | # Screenshots and documentation 58 | .screenshots 59 | AI.md 60 | 61 | # Nix 62 | flake.nix 63 | flake.lock 64 | 65 | # Config files (should be mounted or provided via env vars) 66 | config.yaml 67 | 68 | # CI/CD 69 | .github 70 | 71 | # Other 72 | Makefile 73 | run_tests.py 74 | scripts/benchmark_*.py 75 | scripts/generate_screenshots.py 76 | -------------------------------------------------------------------------------- /src/malla/utils/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Utility functions for Meshtastic Mesh Health Web UI 3 | """ 4 | 5 | from .formatting import ( 6 | create_highlighted_route_display, 7 | format_complete_traceroute_path, 8 | format_node_display_name, 9 | format_node_id, 10 | format_node_short_name, 11 | format_route_display, 12 | format_time_ago, 13 | ) 14 | from .geo_utils import calculate_bearing, calculate_distance 15 | from .node_utils import convert_node_id, get_bulk_node_names, get_node_display_name 16 | from .serialization_utils import convert_bytes_to_base64 17 | from .traceroute_utils import parse_traceroute_payload 18 | 19 | __all__ = [ 20 | "format_time_ago", 21 | "format_node_id", 22 | "format_node_short_name", 23 | "format_node_display_name", 24 | "format_route_display", 25 | "format_complete_traceroute_path", 26 | "create_highlighted_route_display", 27 | "get_node_display_name", 28 | "get_bulk_node_names", 29 | "convert_node_id", 30 | "parse_traceroute_payload", 31 | "convert_bytes_to_base64", 32 | "calculate_distance", 33 | "calculate_bearing", 34 | ] 35 | -------------------------------------------------------------------------------- /tests/e2e/conftest.py: -------------------------------------------------------------------------------- 1 | """ 2 | Pytest configuration for Playwright end-to-end tests. 3 | """ 4 | 5 | import pytest 6 | from playwright.sync_api import Browser 7 | 8 | 9 | @pytest.fixture(scope="session") 10 | def browser_context_args(): 11 | """Configure browser context arguments.""" 12 | return { 13 | "viewport": {"width": 1280, "height": 720}, 14 | "ignore_https_errors": True, 15 | } 16 | 17 | 18 | @pytest.fixture(scope="session") 19 | def browser_type_launch_args(): 20 | """Configure browser launch arguments.""" 21 | return { 22 | "headless": True, # Run in headless mode for CI/testing 23 | "args": [ 24 | "--no-sandbox", 25 | "--disable-dev-shm-usage", 26 | "--disable-gpu", 27 | "--disable-web-security", 28 | "--disable-features=VizDisplayCompositor", 29 | ], 30 | } 31 | 32 | 33 | @pytest.fixture(scope="function") 34 | def page(browser: Browser): 35 | """Create a new page for each test.""" 36 | context = browser.new_context() 37 | page = context.new_page() 38 | yield page 39 | context.close() 40 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2024 Malla Contributors 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/pre-commit/pre-commit-hooks 3 | rev: v4.5.0 4 | hooks: 5 | - id: trailing-whitespace 6 | - id: end-of-file-fixer 7 | - id: check-yaml 8 | - id: check-added-large-files 9 | - id: check-merge-conflict 10 | - id: check-toml 11 | - id: debug-statements 12 | - id: mixed-line-ending 13 | 14 | - repo: https://github.com/astral-sh/ruff-pre-commit 15 | rev: v0.11.13 16 | hooks: 17 | - id: ruff 18 | args: [--fix, --exit-non-zero-on-fix] 19 | - id: ruff-format 20 | 21 | - repo: https://github.com/RobertCraigie/pyright-python 22 | rev: v1.1.402 23 | hooks: 24 | - id: pyright 25 | name: basedpyright 26 | entry: basedpyright 27 | language: python 28 | types: [python] 29 | 30 | 31 | 32 | - repo: https://github.com/pycqa/bandit 33 | rev: 1.7.5 34 | hooks: 35 | - id: bandit 36 | args: ["-c", "pyproject.toml"] 37 | additional_dependencies: ["bandit[toml]"] 38 | 39 | - repo: https://github.com/python-poetry/poetry 40 | rev: 1.7.1 41 | hooks: 42 | - id: poetry-check 43 | -------------------------------------------------------------------------------- /tests/unit/test_generate_screenshots.py: -------------------------------------------------------------------------------- 1 | import socket 2 | import sqlite3 3 | from pathlib import Path 4 | 5 | from scripts import generate_screenshots as gs 6 | 7 | 8 | def test_find_free_port(): 9 | """_find_free_port should return a port that is immediately available.""" 10 | 11 | port = gs._find_free_port() 12 | assert isinstance(port, int) and 0 < port < 65536 13 | 14 | # The port should be free to bind to – try binding and releasing 15 | with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock: 16 | sock.bind(("127.0.0.1", port)) 17 | 18 | 19 | def test_build_demo_database(tmp_path): 20 | """Verify that a demo database is created with expected tables.""" 21 | 22 | db_file = Path(tmp_path) / "demo.db" 23 | gs._build_demo_database(db_file) 24 | 25 | assert db_file.exists() and db_file.stat().st_size > 0, "Demo DB was not created" 26 | 27 | with sqlite3.connect(db_file) as conn: 28 | cursor = conn.execute( 29 | "SELECT name FROM sqlite_master WHERE type='table' AND name='packet_history'" 30 | ) 31 | assert cursor.fetchone(), "packet_history table missing" 32 | cursor = conn.execute( 33 | "SELECT name FROM sqlite_master WHERE type='table' AND name='node_info'" 34 | ) 35 | assert cursor.fetchone(), "node_info table missing" 36 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: Continuous Integration 2 | 3 | on: 4 | push: 5 | branches: ["main", "master"] 6 | pull_request: 7 | workflow_dispatch: 8 | 9 | permissions: 10 | contents: read 11 | actions: write 12 | 13 | jobs: 14 | build: 15 | runs-on: ubuntu-latest 16 | 17 | steps: 18 | - uses: actions/checkout@v4 19 | - uses: nixbuild/nix-quick-install-action@v30 20 | with: 21 | nix_conf: | 22 | keep-env-derivations = true 23 | keep-outputs = true 24 | 25 | - name: Restore and save Nix store 26 | uses: nix-community/cache-nix-action@v6 27 | with: 28 | primary-key: nix-${{ runner.os }}-${{ hashFiles('**/*.nix', '**/flake.lock') }} 29 | restore-prefixes-first-match: nix-${{ runner.os }}- 30 | gc-max-store-size-linux: 1G 31 | purge: true 32 | purge-prefixes: nix-${{ runner.os }}- 33 | purge-created: 0 34 | purge-last-accessed: 0 35 | purge-primary-key: never 36 | 37 | - name: Install project dependencies (including dev) 38 | run: nix develop --command make install-dev 39 | 40 | - name: Run linting 41 | run: nix develop --command make lint 42 | 43 | - name: Run tests 44 | run: nix develop --command uv run pytest -n 4 --tb=line --no-header --quiet 45 | -------------------------------------------------------------------------------- /AI.md: -------------------------------------------------------------------------------- 1 | # About AI in this project 2 | 3 | Malla was almost entirely kickstarted, or rather _vibe coded_ using AI, with Cursor and Claude 4 Sonnet. 4 | 5 | This is a "between jobs hobby project" where I mostly cared about the idea and the UX working and the data making sense, and not so much about the code quality, scalability, security or maintainability. 6 | 7 | This means -- this code may likely not what you would call "production ready". I have _not_ gone doing a full review of the entirety of the code the model has churned out, although I at least tried for it to be kept relatively structured. 8 | 9 | I also tried the code to make tests that it continously ran (see [cursorrules](./cursorrules)), it was useful to keep a self-running feedback loop but it also likely has led to some of the tests actually being wrong or the AI cheating to make them pass. 10 | 11 | I don't think there's a lot of room for it to be security issues in a project like this, the app has no auth, the public facing server only interacts with its own SQLite and most of the data is already public anyhow (if the attacker comes with a LoRA receiver to the right place they can see most of the data). But.. famous last words. 12 | 13 | In any case, and just in case - don't run this in production or anywhere close to any critical data if you expose it to the internet, and if you do, take good isolation measures. I run it on a isolated cheap VPS. -------------------------------------------------------------------------------- /flake.nix: -------------------------------------------------------------------------------- 1 | { 2 | description = "Meshcosas development environment"; 3 | 4 | inputs = { 5 | nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable"; 6 | flake-utils.url = "github:numtide/flake-utils"; 7 | }; 8 | 9 | outputs = 10 | { 11 | self, 12 | nixpkgs, 13 | flake-utils, 14 | }: 15 | flake-utils.lib.eachDefaultSystem ( 16 | system: 17 | let 18 | pkgs = nixpkgs.legacyPackages.${system}; 19 | python = pkgs.python313; 20 | in 21 | { 22 | devShells.default = pkgs.mkShell { 23 | buildInputs = with pkgs; [ 24 | # Python and uv 25 | python 26 | uv 27 | 28 | # Playwright browsers and dependencies 29 | playwright-driver 30 | playwright-test 31 | 32 | #build deps 33 | git 34 | gnumake 35 | ]; 36 | 37 | LD_LIBRARY_PATH = "${pkgs.stdenv.cc.cc.lib}/lib"; 38 | PLAYWRIGHT_BROWSERS_PATH = "${pkgs.playwright-driver.browsers}"; 39 | PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD = 1; 40 | UV_PYTHON_PREFERENCE = "only-system"; 41 | UV_PYTHON = "${python}"; 42 | shellHook = '' 43 | ${pkgs.uv}/bin/uv sync 44 | # Set up the Python virtual environment with uv 45 | test -d .venv || ${pkgs.uv}/bin/uv venv .venv 46 | source .venv/bin/activate 47 | ''; 48 | }; 49 | } 50 | ); 51 | } 52 | -------------------------------------------------------------------------------- /tests/unit/test_config.py: -------------------------------------------------------------------------------- 1 | # New unit tests for configuration loader 2 | 3 | from pathlib import Path 4 | 5 | from malla.config import AppConfig, _clear_config_cache, load_config 6 | 7 | 8 | def test_yaml_loading(tmp_path: Path, monkeypatch): 9 | """Ensure that values from a YAML file are loaded into AppConfig.""" 10 | 11 | # Clear any cached config from other imports 12 | _clear_config_cache() 13 | 14 | # Clear any environment variables that might override the YAML 15 | monkeypatch.delenv("MALLA_NAME", raising=False) 16 | monkeypatch.delenv("MALLA_PORT", raising=False) 17 | monkeypatch.delenv("MALLA_HOME_MARKDOWN", raising=False) 18 | 19 | yaml_file = tmp_path / "config.yaml" 20 | yaml_file.write_text(""" 21 | name: CustomName 22 | home_markdown: "# Welcome\nThis is **markdown** content." 23 | port: 9999 24 | """) 25 | 26 | cfg = load_config(config_path=yaml_file) 27 | 28 | assert isinstance(cfg, AppConfig) 29 | assert cfg.name == "CustomName" 30 | assert "markdown" in cfg.home_markdown 31 | assert cfg.port == 9999 32 | 33 | 34 | def test_env_override(monkeypatch): 35 | """Environment variables with the `MALLA_` prefix override YAML/defaults.""" 36 | 37 | # Clear any cached config from other imports 38 | _clear_config_cache() 39 | 40 | monkeypatch.setenv("MALLA_NAME", "EnvName") 41 | monkeypatch.setenv("MALLA_DEBUG", "true") 42 | cfg = load_config(config_path=None) 43 | 44 | assert cfg.name == "EnvName" 45 | assert cfg.debug is True 46 | -------------------------------------------------------------------------------- /tests/integration/test_packet_routes.py: -------------------------------------------------------------------------------- 1 | """ 2 | Integration tests for packet routes 3 | """ 4 | 5 | import pytest 6 | 7 | 8 | class TestPacketRoutes: 9 | """Test packet-related routes.""" 10 | 11 | def test_packets_page_renders(self, client): 12 | """Test that the packets page renders successfully.""" 13 | response = client.get("/packets") 14 | assert response.status_code == 200 15 | assert b"Packets" in response.data 16 | assert b"packetsTable" in response.data # Table container ID 17 | 18 | def test_packet_detail_page_renders(self, client): 19 | """Test that a packet detail page renders successfully.""" 20 | # First, get a packet ID from the database 21 | from src.malla.database.repositories import PacketRepository 22 | 23 | # Get the first packet 24 | result = PacketRepository.get_packets(limit=1, offset=0) 25 | if result["packets"]: 26 | packet_id = result["packets"][0]["id"] 27 | 28 | # Test the packet detail page 29 | response = client.get(f"/packet/{packet_id}") 30 | assert response.status_code == 200 31 | assert b"Packet #" in response.data 32 | assert str(packet_id).encode() in response.data 33 | else: 34 | pytest.skip("No packets available for testing") 35 | 36 | def test_packet_detail_not_found(self, client): 37 | """Test that non-existent packet returns 404.""" 38 | response = client.get("/packet/999999") 39 | assert response.status_code == 404 40 | assert b"Packet not found" in response.data 41 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "python.testing.pytestArgs": [ 3 | "tests" 4 | ], 5 | "python.testing.unittestEnabled": false, 6 | "python.testing.pytestEnabled": true, 7 | 8 | // Ruff configuration 9 | "python.defaultInterpreterPath": "./.venv/bin/python", 10 | "[python]": { 11 | "editor.defaultFormatter": "charliermarsh.ruff", 12 | "editor.formatOnSave": true, 13 | "editor.codeActionsOnSave": { 14 | "source.fixAll.ruff": "explicit", 15 | "source.organizeImports.ruff": "explicit" 16 | } 17 | }, 18 | 19 | // Ruff extension settings 20 | "ruff.enable": true, 21 | "ruff.lint.enable": true, 22 | "ruff.format.enable": true, 23 | "ruff.organizeImports": true, 24 | "ruff.fixAll": true, 25 | 26 | // BasedPyright configuration 27 | "python.analysis.typeCheckingMode": "off", 28 | "basedpyright.enable": true, 29 | "pylsp.plugins.pycodestyle.enabled": false, 30 | "pylsp.plugins.pyflakes.enabled": false, 31 | "pylsp.plugins.mccabe.enabled": false, 32 | "pylsp.plugins.pyls_isort.enabled": false, 33 | "pylsp.plugins.yapf.enabled": false, 34 | "pylsp.plugins.autopep8.enabled": false, 35 | 36 | // Disable other formatters/linters to avoid conflicts 37 | "python.linting.enabled": false, 38 | "python.linting.pylintEnabled": false, 39 | "python.linting.flake8Enabled": false, 40 | "python.linting.mypyEnabled": false, 41 | "black-formatter.enable": false, 42 | "isort.enable": false, 43 | 44 | // Editor settings 45 | "editor.rulers": [88], 46 | "files.trimTrailingWhitespace": true, 47 | "files.insertFinalNewline": true, 48 | "files.trimFinalNewlines": true 49 | } 50 | -------------------------------------------------------------------------------- /flake.lock: -------------------------------------------------------------------------------- 1 | { 2 | "nodes": { 3 | "flake-utils": { 4 | "inputs": { 5 | "systems": "systems" 6 | }, 7 | "locked": { 8 | "lastModified": 1731533236, 9 | "narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=", 10 | "owner": "numtide", 11 | "repo": "flake-utils", 12 | "rev": "11707dc2f618dd54ca8739b309ec4fc024de578b", 13 | "type": "github" 14 | }, 15 | "original": { 16 | "owner": "numtide", 17 | "repo": "flake-utils", 18 | "type": "github" 19 | } 20 | }, 21 | "nixpkgs": { 22 | "locked": { 23 | "lastModified": 1758690382, 24 | "narHash": "sha256-NY3kSorgqE5LMm1LqNwGne3ZLMF2/ILgLpFr1fS4X3o=", 25 | "owner": "NixOS", 26 | "repo": "nixpkgs", 27 | "rev": "e643668fd71b949c53f8626614b21ff71a07379d", 28 | "type": "github" 29 | }, 30 | "original": { 31 | "owner": "NixOS", 32 | "ref": "nixos-unstable", 33 | "repo": "nixpkgs", 34 | "type": "github" 35 | } 36 | }, 37 | "root": { 38 | "inputs": { 39 | "flake-utils": "flake-utils", 40 | "nixpkgs": "nixpkgs" 41 | } 42 | }, 43 | "systems": { 44 | "locked": { 45 | "lastModified": 1681028828, 46 | "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", 47 | "owner": "nix-systems", 48 | "repo": "default", 49 | "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", 50 | "type": "github" 51 | }, 52 | "original": { 53 | "owner": "nix-systems", 54 | "repo": "default", 55 | "type": "github" 56 | } 57 | } 58 | }, 59 | "root": "root", 60 | "version": 7 61 | } 62 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | # Use a Python image with uv pre-installed 2 | FROM ghcr.io/astral-sh/uv:python3.13-bookworm-slim 3 | 4 | # Set environment variables for uv optimization 5 | ENV UV_COMPILE_BYTECODE=1 6 | ENV UV_LINK_MODE=copy 7 | ENV PYTHONUNBUFFERED=1 8 | ENV PYTHONDONTWRITEBYTECODE=1 9 | ENV MALLA_HOST=0.0.0.0 10 | ENV MALLA_PORT=5008 11 | 12 | # Install the project into `/app` 13 | WORKDIR /app 14 | 15 | RUN apt update && apt install -y git 16 | 17 | # Copy dependency files and metadata files (required for package build) 18 | # Install the project's dependencies using the lockfile and settings 19 | RUN --mount=type=cache,target=/root/.cache/uv \ 20 | --mount=type=bind,source=uv.lock,target=uv.lock \ 21 | --mount=type=bind,source=pyproject.toml,target=pyproject.toml \ 22 | --mount=type=bind,source=LICENSE,target=LICENSE \ 23 | --mount=type=bind,source=README.md,target=README.md \ 24 | uv sync --locked --no-install-project --no-dev 25 | 26 | # Then, add the rest of the project source code and install it 27 | # Installing separately from its dependencies allows optimal layer caching 28 | COPY . /app 29 | RUN --mount=type=cache,target=/root/.cache/uv \ 30 | uv sync --locked --no-dev 31 | 32 | # Create app user 33 | RUN groupadd --gid 1000 app && \ 34 | useradd --uid 1000 --gid 1000 --create-home --shell /bin/bash app 35 | 36 | # Create data directory for database and set permissions 37 | RUN mkdir -p /app/data && chown -R app:app /app 38 | 39 | # Switch to non-root user 40 | USER app 41 | 42 | # Place executables in the environment at the front of the path 43 | ENV PATH="/app/.venv/bin:$PATH" 44 | 45 | # Reset the entrypoint, don't invoke `uv` 46 | ENTRYPOINT [] 47 | 48 | # Expose port 49 | EXPOSE 5008 50 | 51 | # Default command runs the web UI 52 | CMD ["/app/.venv/bin/malla-web"] 53 | -------------------------------------------------------------------------------- /.github/workflows/copilot-setup-steps.yml: -------------------------------------------------------------------------------- 1 | name: Copilot Setup Steps 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | paths: 8 | - .github/workflows/copilot-setup-steps.yml 9 | pull_request: 10 | paths: 11 | - .github/workflows/copilot-setup-steps.yml 12 | 13 | jobs: 14 | # The job MUST be called `copilot-setup-steps` or it will not be picked up by Copilot. 15 | copilot-setup-steps: 16 | runs-on: ubuntu-latest 17 | 18 | # Set the permissions to the lowest permissions possible needed for your steps. 19 | # Copilot will be given its own token for its operations. 20 | permissions: 21 | # If you want to clone the repository as part of your setup steps, for example to install dependencies, you'll need the `contents: read` permission. If you don't clone the repository in your setup steps, Copilot will do this for you automatically after the steps complete. 22 | contents: read 23 | 24 | # You can define any steps you want, and they will run before the agent starts. 25 | # If you do not check out your code, Copilot will do this for you. 26 | steps: 27 | - name: Checkout code 28 | uses: actions/checkout@v4 29 | 30 | - name: Install uv package manager 31 | run: curl -LsSf https://astral.sh/uv/install.sh | sh 32 | 33 | - name: Add uv to PATH 34 | run: echo "$HOME/.local/bin" >> $GITHUB_PATH 35 | 36 | - name: Install Python dependencies 37 | run: | 38 | export PATH="$HOME/.local/bin:$PATH" 39 | uv sync --dev 40 | 41 | - name: Install Playwright browsers (for e2e tests) 42 | run: | 43 | export PATH="$HOME/.local/bin:$PATH" 44 | uv run playwright install 45 | 46 | - name: Copy sample config 47 | run: cp config.sample.yaml config.yaml 48 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: help install install-dev test test-cov lint format clean build upload docs serve-docs 2 | .DEFAULT_GOAL := help 3 | 4 | help: ## Show this help message 5 | @echo "Available commands:" 6 | @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-20s\033[0m %s\n", $$1, $$2}' 7 | 8 | install: ## Install the package in development mode 9 | uv sync 10 | 11 | install-dev: ## Install with development dependencies 12 | uv sync --dev 13 | 14 | test: ## Run tests 15 | uv run pytest 16 | 17 | test-cov: ## Run tests with coverage 18 | uv run pytest --cov=src/malla --cov-report=html --cov-report=term 19 | 20 | lint: ## Run linting tools 21 | uv run ruff check src tests 22 | uv run basedpyright src 23 | 24 | format: ## Format code 25 | uv run ruff format src tests 26 | uv run ruff check --fix src tests 27 | 28 | clean: ## Clean build artifacts 29 | rm -rf build/ 30 | rm -rf dist/ 31 | rm -rf *.egg-info/ 32 | rm -rf .pytest_cache/ 33 | rm -rf .coverage 34 | rm -rf htmlcov/ 35 | find . -type d -name __pycache__ -exec rm -rf {} + 36 | find . -type f -name "*.pyc" -delete 37 | 38 | build: clean ## Build the package 39 | uv build 40 | 41 | upload: build ## Upload to PyPI (requires authentication) 42 | uv publish 43 | 44 | docs: ## Build documentation 45 | @echo "Documentation build not yet configured" 46 | 47 | serve-docs: ## Serve documentation locally 48 | @echo "Documentation serving not yet configured" 49 | 50 | run-web: ## Run the web UI 51 | ./malla-web 52 | 53 | run-capture: ## Run the MQTT capture tool 54 | ./malla-capture 55 | 56 | dev-setup: install-dev ## Set up development environment 57 | uv run pre-commit install 58 | 59 | check: lint test ## Run all checks (lint + test) 60 | 61 | ci: install-dev check ## Run CI pipeline locally -------------------------------------------------------------------------------- /.cursor/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "python.testing.pytestArgs": [ 3 | "tests" 4 | ], 5 | "python.testing.unittestEnabled": false, 6 | "python.testing.pytestEnabled": true, 7 | 8 | // Ruff configuration 9 | "python.defaultInterpreterPath": "./.venv/bin/python", 10 | "[python]": { 11 | "editor.defaultFormatter": "charliermarsh.ruff", 12 | "editor.formatOnSave": true, 13 | "editor.codeActionsOnSave": { 14 | "source.fixAll.ruff": "explicit", 15 | "source.organizeImports.ruff": "explicit" 16 | } 17 | }, 18 | 19 | // Ruff extension settings 20 | "ruff.enable": true, 21 | "ruff.lint.enable": true, 22 | "ruff.format.enable": true, 23 | "ruff.organizeImports": true, 24 | "ruff.fixAll": true, 25 | 26 | // BasedPyright configuration 27 | "python.analysis.typeCheckingMode": "standard", 28 | "basedpyright.enable": true, 29 | "pylsp.plugins.pycodestyle.enabled": false, 30 | "pylsp.plugins.pyflakes.enabled": false, 31 | "pylsp.plugins.mccabe.enabled": false, 32 | "pylsp.plugins.pyls_isort.enabled": false, 33 | "pylsp.plugins.yapf.enabled": false, 34 | "pylsp.plugins.autopep8.enabled": false, 35 | 36 | // Disable other formatters/linters to avoid conflicts 37 | "python.linting.enabled": false, 38 | "python.linting.pylintEnabled": false, 39 | "python.linting.flake8Enabled": false, 40 | "python.linting.mypyEnabled": false, 41 | "black-formatter.enable": false, 42 | "isort.enable": false, 43 | 44 | // Editor settings 45 | "editor.rulers": [88], 46 | "files.trimTrailingWhitespace": true, 47 | "files.insertFinalNewline": true, 48 | "files.trimFinalNewlines": true, 49 | 50 | // Cursor-specific settings 51 | "cursor.cpp.disabledLanguages": [], 52 | "cursor.general.enableCodeActions": true 53 | } 54 | -------------------------------------------------------------------------------- /tests/unit/test_sanitize_floats.py: -------------------------------------------------------------------------------- 1 | import math 2 | 3 | from malla.utils.serialization_utils import sanitize_floats 4 | 5 | 6 | def test_sanitize_floats_basic(): 7 | input_data = { 8 | "valid": 123.45, 9 | "nan_value": float("nan"), 10 | "inf_value": float("inf"), 11 | "nested": { 12 | "neg_inf": float("-inf"), 13 | "list": [1, 2, math.nan, math.inf, -math.inf], 14 | }, 15 | } 16 | 17 | sanitized = sanitize_floats(input_data) 18 | 19 | # Valid number should remain unchanged 20 | assert sanitized["valid"] == 123.45 21 | # Special floats should become None 22 | assert sanitized["nan_value"] is None 23 | assert sanitized["inf_value"] is None 24 | assert sanitized["nested"]["neg_inf"] is None 25 | # All problematic values in list should be converted 26 | assert sanitized["nested"]["list"] == [1, 2, None, None, None] 27 | 28 | 29 | def test_safe_jsonify(app): 30 | """Test that safe_jsonify properly sanitizes data and returns valid Flask response.""" 31 | from malla.routes.api_routes import safe_jsonify 32 | 33 | test_data = { 34 | "normal": 42.0, 35 | "nan": float("nan"), 36 | "inf": float("inf"), 37 | "nested": {"neg_inf": float("-inf")}, 38 | } 39 | 40 | with app.app_context(): 41 | # This should not raise an exception 42 | response = safe_jsonify(test_data) 43 | 44 | # Should be a Flask response 45 | assert hasattr(response, "get_json") 46 | assert response.status_code == 200 47 | 48 | # The response data should be sanitized 49 | response_data = response.get_json() 50 | assert response_data["normal"] == 42.0 51 | assert response_data["nan"] is None 52 | assert response_data["inf"] is None 53 | assert response_data["nested"]["neg_inf"] is None 54 | -------------------------------------------------------------------------------- /tests/integration/test_relay_node_analysis.py: -------------------------------------------------------------------------------- 1 | """Tests for relay_node analysis in node details.""" 2 | 3 | import pytest 4 | 5 | from malla.database.repositories import NodeRepository 6 | 7 | 8 | class TestRelayNodeAnalysis: 9 | """Test cases for relay_node analysis in node detail pages.""" 10 | 11 | def test_relay_node_stats_in_node_details(self, test_client, temp_database): 12 | """Test that relay_node stats are NOT in node details (moved to API).""" 13 | # Get a node that has packets 14 | result = NodeRepository.get_nodes(limit=10, offset=0) 15 | if not result["nodes"]: 16 | pytest.skip("No nodes in database") 17 | 18 | node_id = result["nodes"][0]["node_id"] 19 | 20 | # Get node details 21 | node_details = NodeRepository.get_node_details(node_id) 22 | assert node_details is not None 23 | 24 | # relay_node_stats should NOT be in node_details anymore (moved to separate API) 25 | assert "relay_node_stats" not in node_details 26 | 27 | def test_relay_node_display_in_page(self, test_client, temp_database): 28 | """Test that relay_node analysis is displayed in the node detail page.""" 29 | # Get a node 30 | result = NodeRepository.get_nodes(limit=10, offset=0) 31 | if not result["nodes"]: 32 | pytest.skip("No nodes in database") 33 | 34 | node_id = result["nodes"][0]["node_id"] 35 | 36 | # Access node detail page 37 | response = test_client.get(f"/node/{node_id}") 38 | assert response.status_code == 200 39 | 40 | html = response.data.decode("utf-8") 41 | 42 | # Check that the page can render (relay_node section may or may not be visible depending on data) 43 | # The template uses {% if relay_node_stats %} so it won't show if there's no data 44 | assert "node_id" in html.lower() or "node id" in html.lower() 45 | -------------------------------------------------------------------------------- /src/malla/routes/node_routes.py: -------------------------------------------------------------------------------- 1 | """ 2 | Node-related routes for the Meshtastic Mesh Health Web UI 3 | """ 4 | 5 | import logging 6 | 7 | from flask import Blueprint, render_template 8 | 9 | # Import from the new modular architecture 10 | from ..database.repositories import NodeRepository 11 | 12 | logger = logging.getLogger(__name__) 13 | node_bp = Blueprint("node", __name__) 14 | 15 | 16 | @node_bp.route("/nodes") 17 | def nodes(): 18 | """Node browser page using modern table interface.""" 19 | logger.info("Nodes route accessed") 20 | try: 21 | logger.info("Nodes page rendered") 22 | return render_template("nodes.html") 23 | except Exception as e: 24 | logger.error(f"Error in nodes route: {e}") 25 | return f"Nodes error: {e}", 500 26 | 27 | 28 | @node_bp.route("/node/") 29 | def node_detail(node_id): 30 | """Node detail page showing comprehensive information about a specific node.""" 31 | logger.info(f"Node detail route accessed for node {node_id}") 32 | try: 33 | # Handle both hex ID and integer node ID 34 | if isinstance(node_id, str) and node_id.startswith("!"): 35 | node_id_int = int(node_id[1:], 16) 36 | elif isinstance(node_id, str) and not node_id.isdigit(): 37 | try: 38 | node_id_int = int(node_id, 16) 39 | except ValueError: 40 | return "Invalid node ID format", 400 41 | else: 42 | node_id_int = int(node_id) 43 | 44 | # Get node details using the repository 45 | node_details = NodeRepository.get_node_details(node_id_int) 46 | if not node_details: 47 | return "Node not found", 404 48 | 49 | logger.info("Node detail page rendered successfully") 50 | return render_template("node_detail.html", **node_details) 51 | except Exception as e: 52 | logger.error(f"Error in node detail route: {e}") 53 | return f"Node detail error: {e}", 500 54 | -------------------------------------------------------------------------------- /tests/e2e/test_nodes_primary_channel_filter.py: -------------------------------------------------------------------------------- 1 | from playwright.sync_api import Page, expect 2 | 3 | 4 | class TestNodesPrimaryChannelFilter: 5 | """E2E tests for filtering nodes by primary channel.""" 6 | 7 | def test_filter_by_primary_channel(self, page: Page, test_server_url: str): 8 | """Verify that filtering by primary channel works on Nodes page.""" 9 | page.goto(f"{test_server_url}/nodes") 10 | 11 | # Wait for table data and channel filter options to load 12 | page.wait_for_selector("#nodesTable tbody tr", timeout=15000) 13 | page.wait_for_function( 14 | "() => document.querySelectorAll('#primary_channel option').length > 1", 15 | timeout=10000, 16 | ) 17 | 18 | # Get current total rows 19 | initial_rows = page.locator("#nodesTable tbody tr").count() 20 | assert initial_rows > 0, "Nodes table should have data before filtering" 21 | 22 | # Select the primary channel (fixture uses 'LongFast') 23 | channel_select = page.locator("#primary_channel") 24 | expect(channel_select).to_be_visible() 25 | channel_select.select_option(value="LongFast") 26 | 27 | # Apply filters 28 | page.locator("#applyFilters").click() 29 | 30 | # Wait for filtering to apply – expect same or fewer rows 31 | page.wait_for_function( 32 | "(initial) => document.querySelectorAll('#nodesTable tbody tr').length <= initial", 33 | timeout=5000, 34 | arg=initial_rows, 35 | ) 36 | 37 | filtered_rows = page.locator("#nodesTable tbody tr").count() 38 | assert filtered_rows > 0, ( 39 | "Filtering by primary channel should return some nodes" 40 | ) 41 | 42 | # Each visible row should contain 'LongFast' badge in Channel column 43 | rows_html = page.locator("#nodesTable tbody").inner_html() 44 | assert "LongFast" in rows_html, "Filtered rows should display selected channel" 45 | -------------------------------------------------------------------------------- /env.example: -------------------------------------------------------------------------------- 1 | # Malla Configuration 2 | # Copy this file to .env and adjust the values as needed 3 | 4 | # ============================================================================= 5 | # Docker Configuration 6 | # ============================================================================= 7 | 8 | # Docker image to use (leave as default to use pre-built images from GHCR) 9 | # MALLA_IMAGE=ghcr.io/zenitram/malla:latest 10 | 11 | # Port to expose the web UI on (default: 5008) 12 | # MALLA_WEB_PORT=5008 13 | 14 | # Web server command (choose between Flask dev server and Gunicorn) 15 | # Default: /app/.venv/bin/malla-web (Flask development server) 16 | # Production: /app/.venv/bin/malla-web-gunicorn (Gunicorn WSGI server) 17 | # MALLA_WEB_COMMAND=/app/.venv/bin/malla-web-gunicorn 18 | 19 | # ============================================================================= 20 | # Application Configuration 21 | # ============================================================================= 22 | 23 | # Display name shown in the navigation bar 24 | MALLA_NAME=Malla 25 | 26 | # Flask secret key (change this for production!) 27 | MALLA_SECRET_KEY=your-secret-key-here 28 | 29 | # Enable debug mode (DO NOT use in production) 30 | MALLA_DEBUG=false 31 | 32 | # ============================================================================= 33 | # MQTT Configuration (Required) 34 | # ============================================================================= 35 | 36 | # MQTT broker address (REQUIRED - set this to your broker's IP/hostname) 37 | MALLA_MQTT_BROKER_ADDRESS=127.0.0.1 38 | 39 | # MQTT broker port (default: 1883) 40 | MALLA_MQTT_PORT=1883 41 | 42 | # MQTT authentication (optional) 43 | MALLA_MQTT_USERNAME= 44 | MALLA_MQTT_PASSWORD= 45 | 46 | # MQTT topic configuration (defaults should work for most setups) 47 | MALLA_MQTT_TOPIC_PREFIX=msh 48 | MALLA_MQTT_TOPIC_SUFFIX=/+/+/+/# 49 | 50 | # Default channel key for decrypting secondary channels (base64, optional) 51 | # MALLA_DEFAULT_CHANNEL_KEY=1PG7OiApB1nwvP+rz05pAQ== 52 | -------------------------------------------------------------------------------- /.github/workflows/docker.yml: -------------------------------------------------------------------------------- 1 | name: Build and Push Docker Images 2 | 3 | on: 4 | push: 5 | branches: ["main"] 6 | pull_request: 7 | branches: ["main"] 8 | workflow_dispatch: 9 | 10 | env: 11 | REGISTRY: ghcr.io 12 | IMAGE_NAME: ${{ github.repository }} 13 | 14 | jobs: 15 | build-and-push: 16 | runs-on: ubuntu-latest 17 | permissions: 18 | contents: read 19 | packages: write 20 | id-token: write 21 | attestations: write 22 | 23 | steps: 24 | - name: Checkout repository 25 | uses: actions/checkout@v4 26 | 27 | - name: Set up Docker Buildx 28 | uses: docker/setup-buildx-action@v3 29 | 30 | - name: Log in to Container Registry 31 | if: github.event_name != 'pull_request' 32 | uses: docker/login-action@v3 33 | with: 34 | registry: ${{ env.REGISTRY }} 35 | username: ${{ github.actor }} 36 | password: ${{ secrets.GITHUB_TOKEN }} 37 | 38 | - name: Extract metadata 39 | id: meta 40 | uses: docker/metadata-action@v5 41 | with: 42 | images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} 43 | tags: | 44 | type=ref,event=branch 45 | type=ref,event=pr 46 | type=raw,value=latest,enable={{is_default_branch}} 47 | type=sha,prefix=sha- 48 | 49 | - name: Build and push Docker image 50 | id: build 51 | uses: docker/build-push-action@v5 52 | with: 53 | context: . 54 | platforms: linux/amd64,linux/arm64 55 | push: ${{ github.event_name != 'pull_request' }} 56 | tags: ${{ steps.meta.outputs.tags }} 57 | labels: ${{ steps.meta.outputs.labels }} 58 | cache-from: type=gha 59 | cache-to: type=gha,mode=max 60 | 61 | - name: Generate artifact attestation 62 | if: github.event_name != 'pull_request' 63 | uses: actions/attest-build-provenance@v1 64 | with: 65 | subject-name: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME}} 66 | subject-digest: ${{ steps.build.outputs.digest }} 67 | push-to-registry: true 68 | -------------------------------------------------------------------------------- /src/malla/templates/components/relay_node_analysis.html: -------------------------------------------------------------------------------- 1 | 2 | 44 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '3.8' 2 | 3 | services: 4 | malla-web: 5 | # Use pre-built image by default, override with 'build: .' for development 6 | image: ${MALLA_IMAGE:-ghcr.io/zenitram/malla:latest} 7 | # Uncomment the following line for local development: 8 | # build: . 9 | 10 | # Choose between Flask dev server and Gunicorn 11 | # Default: Flask development server (malla-web) 12 | # Production: Gunicorn WSGI server (malla-web-gunicorn) 13 | command: ${MALLA_WEB_COMMAND:-/app/.venv/bin/malla-web} 14 | 15 | ports: 16 | - "${MALLA_WEB_PORT:-5008}:5008" 17 | environment: 18 | - MALLA_HOST=0.0.0.0 19 | - MALLA_PORT=5008 20 | - MALLA_DATABASE_FILE=/app/data/meshtastic_history.db 21 | - MALLA_NAME=${MALLA_NAME:-Malla} 22 | - MALLA_SECRET_KEY=${MALLA_SECRET_KEY:-dev-secret-key-change-in-production} 23 | - MALLA_DEBUG=${MALLA_DEBUG:-false} 24 | volumes: 25 | - malla_data:/app/data 26 | # Optional: mount your own config file (uncomment if you have one) 27 | # - ./config.yaml:/app/config.yaml:ro 28 | restart: unless-stopped 29 | depends_on: 30 | - malla-capture 31 | 32 | malla-capture: 33 | # Use pre-built image by default, override with 'build: .' for development 34 | image: ${MALLA_IMAGE:-ghcr.io/zenitram/malla:latest} 35 | # Uncomment the following line for local development: 36 | # build: . 37 | command: ["/app/.venv/bin/malla-capture"] 38 | environment: 39 | - MALLA_DATABASE_FILE=/app/data/meshtastic_history.db 40 | # MQTT Configuration - set these in your .env file 41 | - MALLA_MQTT_BROKER_ADDRESS=${MALLA_MQTT_BROKER_ADDRESS} 42 | - MALLA_MQTT_PORT=${MALLA_MQTT_PORT:-1883} 43 | - MALLA_MQTT_USERNAME=${MALLA_MQTT_USERNAME:-} 44 | - MALLA_MQTT_PASSWORD=${MALLA_MQTT_PASSWORD:-} 45 | - MALLA_MQTT_TOPIC_PREFIX=${MALLA_MQTT_TOPIC_PREFIX:-msh} 46 | - MALLA_MQTT_TOPIC_SUFFIX=${MALLA_MQTT_TOPIC_SUFFIX:-/+/+/+/#} 47 | - MALLA_DEFAULT_CHANNEL_KEY=${MALLA_DEFAULT_CHANNEL_KEY:-} 48 | volumes: 49 | - malla_data:/app/data 50 | # Optional: mount your own config file (uncomment if you have one) 51 | # - ./config.yaml:/app/config.yaml:ro 52 | restart: unless-stopped 53 | 54 | volumes: 55 | malla_data: 56 | -------------------------------------------------------------------------------- /config.sample.yaml: -------------------------------------------------------------------------------- 1 | # Sample configuration for Malla (rename to 'config.yaml' and adjust as needed) 2 | # All fields are optional – defaults are shown below. Each setting can also be 3 | # overridden via environment variables using the `MALLA_` prefix. For example, 4 | # to override `host`, set `MALLA_HOST=127.0.0.1`. 5 | 6 | # Display name shown in the navigation bar and browser title 7 | name: "Malla" 8 | 9 | # Markdown content rendered on the dashboard homepage 10 | home_markdown: | 11 | # Welcome to Malla 12 | This content is written in **Markdown** and will appear at the top of the 13 | dashboard. You can use standard Markdown formatting. 14 | 15 | # Flask / server options ---------------------------------------------------- 16 | 17 | # Secret key used by Flask for session signing (change for production!) 18 | secret_key: "dev-secret-key-change-in-production" 19 | 20 | # SQLite database file path (relative or absolute) 21 | database_file: "meshtastic_history.db" 22 | 23 | # Host interface and port for the web server 24 | host: "0.0.0.0" 25 | port: 5008 26 | 27 | # Enable Flask debug mode (do **not** use in production) 28 | debug: false 29 | 30 | # Logging level (DEBUG, INFO, WARNING, ERROR, CRITICAL) 31 | # log_level: "INFO" 32 | 33 | # --------------------------------------------------------------------------- 34 | # MQTT capture settings (used by malla-capture) 35 | # --------------------------------------------------------------------------- 36 | 37 | # mqtt_broker_address: "127.0.0.1" 38 | # mqtt_port: 1883 39 | # mqtt_username: "" # optional 40 | # mqtt_password: "" # optional 41 | # mqtt_topic_prefix: "msh" 42 | # mqtt_topic_suffix: "/+/+/+/#" 43 | 44 | # Default channel key(s) used for decrypting secondary channels (base64) 45 | # Supports multiple comma-separated keys - each key will be tried in order 46 | # until one successfully decrypts the packet 47 | # Examples: 48 | # Single key: "1PG7OiApB1nwvP+rz05pAQ==" 49 | # Multiple keys: "1PG7OiApB1nwvP+rz05pAQ==,AQ4GCAwQFBgcICQoLDA0ODw=,another+base64+key==" 50 | # default_channel_key: "1PG7OiApB1nwvP+rz05pAQ==" 51 | 52 | # Number of hours after which to delete old data (0 = never delete) 53 | # data_retention_hours: 0 54 | 55 | # OpenTelemetry settings 56 | # Endpoint for sending traces (e.g. "http://localhost:4317") 57 | # otlp_endpoint: "" 58 | -------------------------------------------------------------------------------- /src/malla/utils/geo_utils.py: -------------------------------------------------------------------------------- 1 | """ 2 | Geographic utility functions for Meshtastic Mesh Health Web UI 3 | """ 4 | 5 | import math 6 | 7 | 8 | def calculate_distance(lat1: float, lon1: float, lat2: float, lon2: float) -> float: 9 | """ 10 | Calculate the great circle distance between two points 11 | on the earth (specified in decimal degrees) using the Haversine formula. 12 | 13 | Args: 14 | lat1: Latitude of first point in decimal degrees 15 | lon1: Longitude of first point in decimal degrees 16 | lat2: Latitude of second point in decimal degrees 17 | lon2: Longitude of second point in decimal degrees 18 | 19 | Returns: 20 | Distance in kilometers 21 | """ 22 | # Earth's radius in km 23 | R = 6371.0 24 | 25 | # Convert decimal degrees to radians 26 | lat1_rad = math.radians(lat1) 27 | lon1_rad = math.radians(lon1) 28 | lat2_rad = math.radians(lat2) 29 | lon2_rad = math.radians(lon2) 30 | 31 | # Haversine formula 32 | dlat = lat2_rad - lat1_rad 33 | dlon = lon2_rad - lon1_rad 34 | 35 | a = ( 36 | math.sin(dlat / 2) ** 2 37 | + math.cos(lat1_rad) * math.cos(lat2_rad) * math.sin(dlon / 2) ** 2 38 | ) 39 | c = 2 * math.atan2(math.sqrt(a), math.sqrt(1 - a)) 40 | 41 | distance = R * c 42 | return distance 43 | 44 | 45 | def calculate_bearing(lat1: float, lon1: float, lat2: float, lon2: float) -> float: 46 | """ 47 | Calculate the initial bearing from point 1 to point 2. 48 | 49 | Args: 50 | lat1: Latitude of first point in decimal degrees 51 | lon1: Longitude of first point in decimal degrees 52 | lat2: Latitude of second point in decimal degrees 53 | lon2: Longitude of second point in decimal degrees 54 | 55 | Returns: 56 | Bearing in degrees (0-360) 57 | """ 58 | lat1_rad = math.radians(lat1) 59 | lat2_rad = math.radians(lat2) 60 | dlon_rad = math.radians(lon2 - lon1) 61 | 62 | y = math.sin(dlon_rad) * math.cos(lat2_rad) 63 | x = math.cos(lat1_rad) * math.sin(lat2_rad) - math.sin(lat1_rad) * math.cos( 64 | lat2_rad 65 | ) * math.cos(dlon_rad) 66 | 67 | bearing_rad = math.atan2(y, x) 68 | bearing_deg = math.degrees(bearing_rad) 69 | 70 | # Normalize to 0-360 degrees 71 | return (bearing_deg + 360) % 360 72 | -------------------------------------------------------------------------------- /src/malla/static/js/filter-store.js: -------------------------------------------------------------------------------- 1 | /* 2 | * Filter Store – tiny reactive state container used by table filter widgets. 3 | * Can be reused by any page that needs a shared, reactive filter object. 4 | * Usage: 5 | * const store = createFilterStore(initialState); 6 | * store.subscribe(filters => { ... }); // get notified on every change (debounced in caller if desired) 7 | * store.state. = value; // update a single filter property 8 | */ 9 | (function (global) { 10 | "use strict"; 11 | 12 | /** 13 | * Create a lightweight reactive store backed by `Proxy`. 14 | * @param {Object} initial - initial state values. 15 | * @returns {{state: Object, subscribe: Function}} 16 | */ 17 | function createFilterStore(initial = {}) { 18 | const listeners = new Set(); 19 | 20 | /** 21 | * Notify all listeners with a shallow copy of the current state. 22 | * @param {Object} currentState 23 | */ 24 | function notify(currentState) { 25 | // Create a shallow copy to protect internals 26 | const snapshot = { ...currentState }; 27 | listeners.forEach((fn) => { 28 | try { fn(snapshot); } catch (err) { console.error("FilterStore listener error", err); } 29 | }); 30 | } 31 | 32 | const state = new Proxy({ ...initial }, { 33 | set(target, prop, value) { 34 | if (target[prop] !== value) { 35 | target[prop] = value; 36 | notify(target); 37 | } 38 | return true; 39 | } 40 | }); 41 | 42 | return { 43 | state, 44 | /** 45 | * Subscribe to state changes. 46 | * @param {(state: Object) => void} fn - callback executed on every mutation. 47 | * @returns {() => void} Unsubscribe function. 48 | */ 49 | subscribe(fn) { 50 | listeners.add(fn); 51 | // Immediately emit current state to new subscriber 52 | fn({ ...state }); 53 | return () => listeners.delete(fn); 54 | } 55 | }; 56 | } 57 | 58 | // Expose globally 59 | global.createFilterStore = createFilterStore; 60 | })(typeof window !== "undefined" ? window : this); 61 | -------------------------------------------------------------------------------- /tests/unit/test_exclude_fields_ui.py: -------------------------------------------------------------------------------- 1 | """Unit tests for exclude fields UI functionality.""" 2 | 3 | import pytest 4 | 5 | 6 | @pytest.mark.unit 7 | class TestExcludeFieldsUI: 8 | """Test that exclude fields are properly included in the UI.""" 9 | 10 | def test_exclude_fields_present_in_packets_template(self, client): 11 | """Test that exclude_from and exclude_to fields are present in packets template.""" 12 | response = client.get("/packets") 13 | assert response.status_code == 200 14 | 15 | content = response.get_data(as_text=True) 16 | 17 | # Check for exclude_from field 18 | assert 'name="exclude_from"' in content 19 | assert 'id="exclude_from"' in content 20 | assert "Exclude From Node" in content 21 | 22 | # Check for exclude_to field 23 | assert 'name="exclude_to"' in content 24 | assert 'id="exclude_to"' in content 25 | assert "Exclude To Node" in content 26 | 27 | # Check for proper placeholders 28 | assert 'placeholder="No exclusions"' in content 29 | 30 | def test_exclude_parameters_accepted_by_api(self, client): 31 | """Test that the API accepts exclude_from and exclude_to parameters.""" 32 | # Test that the API endpoint accepts these parameters without error 33 | response = client.get( 34 | "/api/packets/data?exclude_from=123&exclude_to=456&limit=5" 35 | ) 36 | 37 | # Should return 200 (API accepts the parameters) 38 | assert response.status_code == 200 39 | 40 | data = response.get_json() 41 | 42 | # Should return proper response structure 43 | assert "data" in data 44 | assert isinstance(data["data"], list) 45 | assert "total_count" in data 46 | assert isinstance(data["total_count"], int) 47 | 48 | def test_exclude_parameters_with_broadcast_node(self, client): 49 | """Test that exclude parameters work with broadcast node ID.""" 50 | # Test excluding broadcast packets (node ID 4294967295) 51 | response = client.get( 52 | "/api/packets/data?exclude_from=4294967295&exclude_to=4294967295&limit=5" 53 | ) 54 | 55 | assert response.status_code == 200 56 | data = response.get_json() 57 | 58 | # Should handle the request gracefully 59 | assert "data" in data 60 | assert isinstance(data["data"], list) 61 | -------------------------------------------------------------------------------- /tests/integration/test_sorting_endpoints.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | 4 | def _is_sorted(values, ascending=True): 5 | """Utility to check if list is sorted, treating None as extreme.""" 6 | # Replace None with extreme values for comparison 7 | extreme = float("inf") if ascending else float("-inf") 8 | norm = [v if v is not None else extreme for v in values] 9 | return all( 10 | (norm[i] <= norm[i + 1]) if ascending else (norm[i] >= norm[i + 1]) 11 | for i in range(len(norm) - 1) 12 | ) 13 | 14 | 15 | @pytest.mark.integration 16 | @pytest.mark.api 17 | @pytest.mark.parametrize( 18 | "sort_by,api_field", 19 | [ 20 | ("from_node", "from_node_id"), 21 | ("to_node", "to_node_id"), 22 | ("hops", "hops"), 23 | ], 24 | ) 25 | def test_packets_sorting(client, sort_by, api_field): 26 | """Verify that /api/packets/data supports sorting by the given field when ungrouped.""" 27 | for order in ("asc", "desc"): 28 | resp = client.get( 29 | f"/api/packets/data?limit=25&sort_by={sort_by}&sort_order={order}&group_packets=false" 30 | ) 31 | assert resp.status_code == 200 32 | data = resp.get_json() 33 | rows = data["data"] 34 | values = [row.get(api_field) for row in rows] 35 | if len(values) > 1: 36 | assert _is_sorted(values, ascending=(order == "asc")), ( 37 | f"Packet sorting failed for {sort_by} {order}" 38 | ) 39 | 40 | 41 | @pytest.mark.integration 42 | @pytest.mark.api 43 | @pytest.mark.parametrize( 44 | "sort_by,api_field", 45 | [ 46 | ("from_node", "from_node_id"), 47 | ("to_node", "to_node_id"), 48 | ("hops", "hops"), 49 | ], 50 | ) 51 | def test_traceroute_sorting(client, sort_by, api_field): 52 | """Verify that /api/traceroute/data supports sorting by the given field when ungrouped.""" 53 | for order in ("asc", "desc"): 54 | resp = client.get( 55 | f"/api/traceroute/data?limit=25&sort_by={sort_by}&sort_order={order}&group_packets=false" 56 | ) 57 | assert resp.status_code == 200 58 | data = resp.get_json() 59 | rows = data["data"] 60 | values = [row.get(api_field) for row in rows] 61 | if len(values) > 1: 62 | assert _is_sorted(values, ascending=(order == "asc")), ( 63 | f"Traceroute sorting failed for {sort_by} {order}" 64 | ) 65 | -------------------------------------------------------------------------------- /src/malla/utils/serialization_utils.py: -------------------------------------------------------------------------------- 1 | """ 2 | Serialization utility functions for Meshtastic Mesh Health Web UI 3 | """ 4 | 5 | import base64 6 | from typing import Any 7 | 8 | 9 | def convert_bytes_to_base64(obj: Any) -> Any: 10 | """ 11 | Recursively convert bytes objects to base64 strings for JSON serialization. 12 | 13 | Args: 14 | obj: The object to process, which may contain bytes objects 15 | 16 | Returns: 17 | The object with all bytes converted to base64 strings 18 | """ 19 | if isinstance(obj, bytes): 20 | return base64.b64encode(obj).decode("utf-8") 21 | elif isinstance(obj, dict): 22 | return {k: convert_bytes_to_base64(v) for k, v in obj.items()} 23 | elif isinstance(obj, list): 24 | return [convert_bytes_to_base64(item) for item in obj] 25 | else: 26 | return obj 27 | 28 | 29 | def sanitize_floats(obj: Any) -> Any: 30 | """Recursively replace ``NaN`` and (−)``Infinity`` float values with ``None``. 31 | 32 | Standard JSON does *not* support special floating-point values such as ``NaN``, 33 | ``Infinity`` or ``-Infinity``. If they end up in the response payload 34 | many browsers will fail to parse the JSON produced by :pyfunc:`flask.json.jsonify`. 35 | 36 | This helper walks the supplied structure (dict / list / scalar) and 37 | converts any offending float to ``None`` so that the resulting payload is 38 | fully standards-compliant. 39 | 40 | Args: 41 | obj: Arbitrary, potentially nested, data structure. 42 | 43 | Returns: 44 | The sanitised structure with only valid JSON scalar values. 45 | """ 46 | import math 47 | 48 | # Fast-path common scalar types -------------------------------------------------- 49 | if isinstance(obj, float): 50 | if math.isnan(obj) or math.isinf(obj): 51 | return None 52 | return obj 53 | 54 | if isinstance(obj, str | int | type(None) | bool): 55 | return obj 56 | 57 | # Recurse for containers --------------------------------------------------------- 58 | if isinstance(obj, dict): 59 | return {k: sanitize_floats(v) for k, v in obj.items()} 60 | 61 | if isinstance(obj, list | tuple | set): 62 | return [sanitize_floats(v) for v in obj] 63 | 64 | # Anything else (e.g. bytes) leave untouched – other helpers may convert it later 65 | return obj 66 | 67 | 68 | __all__ = [ 69 | "convert_bytes_to_base64", 70 | "sanitize_floats", 71 | ] 72 | -------------------------------------------------------------------------------- /tests/integration/test_direct_receptions_api.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | 4 | class TestDirectReceptionsAPI: 5 | @pytest.mark.integration 6 | def test_direct_receptions_endpoint(self, client): 7 | """Ensure the endpoint returns expected JSON keys and HTTP 200.""" 8 | # The fixture database contains example node 1128074276 9 | response = client.get("/api/node/1128074276/direct-receptions?limit=10") 10 | assert response.status_code == 200 11 | 12 | data = response.get_json() 13 | assert "direct_receptions" in data 14 | assert "total_count" in data 15 | 16 | # direct_receptions should be a list 17 | assert isinstance(data["direct_receptions"], list) 18 | 19 | @pytest.mark.integration 20 | def test_direct_receptions_received_direction(self, client): 21 | """Test the received direction parameter.""" 22 | response = client.get( 23 | "/api/node/1128074276/direct-receptions?direction=received&limit=10" 24 | ) 25 | assert response.status_code == 200 26 | 27 | data = response.get_json() 28 | assert "direct_receptions" in data 29 | assert "total_count" in data 30 | assert "direction" in data 31 | assert data["direction"] == "received" 32 | 33 | # direct_receptions should be a list 34 | assert isinstance(data["direct_receptions"], list) 35 | 36 | @pytest.mark.integration 37 | def test_direct_receptions_transmitted_direction(self, client): 38 | """Test the transmitted direction parameter.""" 39 | response = client.get( 40 | "/api/node/1128074276/direct-receptions?direction=transmitted&limit=10" 41 | ) 42 | assert response.status_code == 200 43 | 44 | data = response.get_json() 45 | assert "direct_receptions" in data 46 | assert "total_count" in data 47 | assert "direction" in data 48 | assert data["direction"] == "transmitted" 49 | 50 | # direct_receptions should be a list 51 | assert isinstance(data["direct_receptions"], list) 52 | 53 | @pytest.mark.integration 54 | def test_direct_receptions_invalid_direction(self, client): 55 | """Test that invalid direction parameter returns 400.""" 56 | response = client.get( 57 | "/api/node/1128074276/direct-receptions?direction=invalid" 58 | ) 59 | assert response.status_code == 400 60 | 61 | data = response.get_json() 62 | assert "error" in data 63 | assert "Invalid direction" in data["error"] 64 | -------------------------------------------------------------------------------- /tests/e2e/test_line_of_sight.py: -------------------------------------------------------------------------------- 1 | """ 2 | E2E tests for the line-of-sight analysis feature on the map. 3 | """ 4 | 5 | import pytest 6 | from playwright.sync_api import Page, expect 7 | 8 | DEFAULT_TIMEOUT = 20000 # ms 9 | 10 | 11 | class TestLineOfSight: 12 | """Test line-of-sight analysis functionality.""" 13 | 14 | @pytest.mark.e2e 15 | def test_line_of_sight_link_in_popup_template(self, page: Page, test_server_url): 16 | """Test that the line-of-sight link structure exists in link popups.""" 17 | page.goto(f"{test_server_url}/map") 18 | 19 | # Wait for loading to complete 20 | page.wait_for_selector("#mapLoading", state="hidden", timeout=DEFAULT_TIMEOUT) 21 | 22 | # Enable traceroute links if not already enabled 23 | links_checkbox = page.locator("#tracerouteLinksCheckbox") 24 | if not links_checkbox.is_checked(): 25 | links_checkbox.click() 26 | page.wait_for_timeout(1000) 27 | 28 | # Wait for map data to load 29 | page.wait_for_timeout(3000) 30 | 31 | # Check if Line of Sight link functionality exists by examining the page source 32 | # This is more reliable than trying to click on map elements 33 | page_content = page.content() 34 | 35 | # Verify the Line of Sight link template is in the page 36 | # The showLineOfSight or Line of Sight link should be referenced 37 | assert ( 38 | "line-of-sight" in page_content.lower() or "Line of Sight" in page_content 39 | ), "Line of Sight functionality should be available in the map page" 40 | 41 | # Verify the icon class is used 42 | assert "bi-bezier" in page_content, "Line of Sight icon should be defined" 43 | 44 | @pytest.mark.e2e 45 | def test_line_of_sight_route_exists(self, page: Page, test_server_url): 46 | """Test that the line-of-sight route is accessible.""" 47 | # Simply verify the line-of-sight page can be accessed directly 48 | page.goto(f"{test_server_url}/line-of-sight") 49 | 50 | # Wait for page to load 51 | page.wait_for_load_state("networkidle") 52 | 53 | # Verify we're on the line-of-sight page 54 | expect(page.locator("h1")).to_contain_text("Line of Sight Analysis") 55 | 56 | # Verify the route accepts parameters 57 | page.goto(f"{test_server_url}/line-of-sight?from=123&to=456") 58 | page.wait_for_load_state("networkidle") 59 | 60 | # Should still load without error 61 | expect(page.locator("h1")).to_contain_text("Line of Sight Analysis") 62 | -------------------------------------------------------------------------------- /tests/unit/test_wsgi.py: -------------------------------------------------------------------------------- 1 | """ 2 | Unit tests for the WSGI application factory. 3 | """ 4 | 5 | from unittest.mock import MagicMock, patch 6 | 7 | from src.malla.wsgi import create_wsgi_app, get_application 8 | 9 | 10 | class TestWSGIApplication: 11 | """Test the WSGI application factory.""" 12 | 13 | def test_create_wsgi_app_returns_flask_app(self): 14 | """Test that create_wsgi_app returns a Flask application.""" 15 | app = create_wsgi_app() 16 | 17 | # Check that it's a Flask app 18 | assert hasattr(app, "run") 19 | assert hasattr(app, "config") 20 | assert hasattr(app, "route") 21 | 22 | # Check that it has our expected configuration 23 | assert "APP_CONFIG" in app.config 24 | assert "DATABASE_FILE" in app.config 25 | 26 | def test_get_application_returns_flask_app(self): 27 | """Test that get_application returns a Flask application.""" 28 | # The application instance should be created when get_application is called 29 | app = get_application() 30 | assert app is not None 31 | assert hasattr(app, "run") 32 | assert hasattr(app, "config") 33 | 34 | def test_wsgi_app_has_health_endpoint(self): 35 | """Test that the WSGI app has the health check endpoint.""" 36 | app = get_application() 37 | with app.test_client() as client: 38 | response = client.get("/health") 39 | assert response.status_code == 200 40 | data = response.get_json() 41 | assert data["status"] == "healthy" 42 | assert data["service"] == "meshtastic-mesh-health-ui" 43 | 44 | @patch("src.malla.web_ui.get_config") 45 | def test_create_wsgi_app_uses_config(self, mock_get_config): 46 | """Test that create_wsgi_app uses the configuration properly.""" 47 | # Mock the config 48 | mock_config = MagicMock() 49 | mock_config.database_file = "/test/path/test.db" 50 | mock_config.secret_key = "test-secret" 51 | mock_config.host = "127.0.0.1" 52 | mock_config.port = 5008 53 | mock_config.debug = False 54 | mock_config.port = 5008 55 | mock_config.debug = False 56 | mock_config.name = "Test Malla" 57 | mock_config.otlp_endpoint = None 58 | mock_get_config.return_value = mock_config 59 | 60 | app = create_wsgi_app() 61 | 62 | # Verify config was called 63 | mock_get_config.assert_called_once() 64 | 65 | # Verify the app was configured 66 | assert app.config["DATABASE_FILE"] == "/test/path/test.db" 67 | -------------------------------------------------------------------------------- /tests/integration/test_packets_exclude_self.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | 4 | @pytest.mark.integration 5 | @pytest.mark.api 6 | class TestPacketsExcludeSelf: 7 | """Integration tests for exclude_self filter behavior.""" 8 | 9 | GATEWAY_ID = "!11110000" 10 | GATEWAY_NODE_INT = int(GATEWAY_ID[1:], 16) 11 | 12 | def _assert_no_self_messages(self, packets): 13 | for pkt in packets: 14 | # Legacy endpoint returns list of dicts under "packets" 15 | from_id = pkt.get("from_node_id") or pkt.get("from_node") 16 | assert from_id != self.GATEWAY_NODE_INT, ( 17 | "Self-reported gateway messages were not excluded as expected" 18 | ) 19 | 20 | def test_legacy_endpoint_exclude_self(self, client): 21 | """/api/packets should respect exclude_self parameter.""" 22 | # Without exclude_self (baseline) 23 | resp = client.get(f"/api/packets?gateway_id={self.GATEWAY_ID}&limit=50&page=1") 24 | assert resp.status_code == 200 25 | packets_all = resp.get_json()["packets"] 26 | assert len(packets_all) > 0 27 | 28 | # With exclude_self=true 29 | resp = client.get( 30 | f"/api/packets?gateway_id={self.GATEWAY_ID}&exclude_self=true&limit=50&page=1" 31 | ) 32 | assert resp.status_code == 200 33 | packets_filtered = resp.get_json()["packets"] 34 | assert len(packets_filtered) > 0 35 | 36 | # Verify filtered packets have no self messages 37 | self._assert_no_self_messages(packets_filtered) 38 | 39 | # Filtered list should be <= original 40 | assert len(packets_filtered) <= len(packets_all) 41 | 42 | def test_modern_endpoint_exclude_self(self, client): 43 | """/api/packets/data should respect exclude_self parameter.""" 44 | # Without exclude_self 45 | resp = client.get( 46 | f"/api/packets/data?gateway_id={self.GATEWAY_ID}&limit=50&page=1&group_packets=false" 47 | ) 48 | assert resp.status_code == 200 49 | packets_all = resp.get_json()["data"] 50 | assert len(packets_all) > 0 51 | 52 | # With exclude_self=true 53 | resp = client.get( 54 | f"/api/packets/data?gateway_id={self.GATEWAY_ID}&exclude_self=true&limit=50&page=1&group_packets=false" 55 | ) 56 | assert resp.status_code == 200 57 | packets_filtered = resp.get_json()["data"] 58 | assert len(packets_filtered) > 0 59 | 60 | # Verify no self messages 61 | self._assert_no_self_messages(packets_filtered) 62 | 63 | assert len(packets_filtered) <= len(packets_all) 64 | -------------------------------------------------------------------------------- /tests/integration/test_relay_node.py: -------------------------------------------------------------------------------- 1 | """Tests for relay_node field display in packets.""" 2 | 3 | import pytest 4 | 5 | from malla.database.repositories import PacketRepository 6 | 7 | 8 | class TestRelayNode: 9 | """Test cases for relay_node field in packet details and API.""" 10 | 11 | def test_relay_node_in_packet_details(self, test_client, temp_database): 12 | """Test that relay_node is displayed in packet details page.""" 13 | # Get first packet from repository 14 | result = PacketRepository.get_packets(limit=1, offset=0) 15 | if not result["packets"]: 16 | pytest.skip("No packets in database") 17 | 18 | packet = result["packets"][0] 19 | packet_id = packet["id"] 20 | 21 | # Access packet detail page 22 | response = test_client.get(f"/packet/{packet_id}") 23 | assert response.status_code == 200 24 | 25 | # Check that the page contains relay node information 26 | html = response.data.decode("utf-8") 27 | assert "Relay Node:" in html 28 | 29 | def test_relay_node_in_api_packets_data(self, test_client, temp_database): 30 | """Test that relay_node is included in /api/packets/data endpoint.""" 31 | response = test_client.get("/api/packets/data?limit=10") 32 | assert response.status_code == 200 33 | 34 | data = response.get_json() 35 | assert "data" in data 36 | 37 | # Check that packets include relay_node field 38 | if data["data"]: 39 | packet = data["data"][0] 40 | # relay_node should be present (may be None or a number) 41 | assert "relay_node" in packet 42 | 43 | def test_relay_node_hex_format(self, test_client, temp_database): 44 | """Test that relay_node field is present and correctly formatted when available.""" 45 | # Get a packet 46 | result = PacketRepository.get_packets(limit=10, offset=0) 47 | if not result["packets"]: 48 | pytest.skip("No packets in database") 49 | 50 | packet = result["packets"][0] 51 | packet_id = packet["id"] 52 | 53 | response = test_client.get(f"/packet/{packet_id}") 54 | assert response.status_code == 200 55 | 56 | html = response.data.decode("utf-8") 57 | 58 | # Verify "Relay Node:" label is present 59 | assert "Relay Node:" in html 60 | 61 | # If packet has relay_node set, verify hex format 62 | if packet.get("relay_node") and packet["relay_node"] != 0: 63 | relay_value = packet["relay_node"] 64 | expected_hex = f"0x{relay_value & 0xFF:02x}" 65 | assert expected_hex in html.lower() 66 | -------------------------------------------------------------------------------- /src/malla/routes/main_routes.py: -------------------------------------------------------------------------------- 1 | """ 2 | Main routes for the Meshtastic Mesh Health Web UI 3 | """ 4 | 5 | import logging 6 | 7 | from flask import Blueprint, render_template, request 8 | 9 | # Import from the new modular architecture 10 | from ..database.repositories import ( 11 | DashboardRepository, 12 | ) 13 | 14 | logger = logging.getLogger(__name__) 15 | main_bp = Blueprint("main", __name__) 16 | 17 | 18 | @main_bp.route("/") 19 | def dashboard(): 20 | """Dashboard route with network statistics.""" 21 | try: 22 | # Get basic dashboard stats 23 | stats = DashboardRepository.get_stats() 24 | 25 | # Get gateway statistics from the new cached service 26 | from ..services.gateway_service import GatewayService 27 | 28 | gateway_stats = GatewayService.get_gateway_statistics(hours=24) 29 | gateway_count = gateway_stats.get("total_gateways", 0) 30 | 31 | return render_template( 32 | "dashboard.html", 33 | stats=stats, 34 | gateway_count=gateway_count, 35 | ) 36 | except Exception as e: 37 | logger.error(f"Error loading dashboard: {e}") 38 | # Fallback to basic stats without gateway info 39 | stats = DashboardRepository.get_stats() 40 | return render_template( 41 | "dashboard.html", 42 | stats=stats, 43 | gateway_count=0, 44 | error_message="Some dashboard features may be unavailable", 45 | ) 46 | 47 | 48 | @main_bp.route("/map") 49 | def map_view(): 50 | """Node location map view.""" 51 | try: 52 | return render_template("map.html") 53 | except Exception as e: 54 | logger.error(f"Error in map route: {e}") 55 | return f"Map error: {e}", 500 56 | 57 | 58 | @main_bp.route("/longest-links") 59 | def longest_links(): 60 | """Longest links analysis page.""" 61 | logger.info("Longest links route accessed") 62 | try: 63 | return render_template("longest_links.html") 64 | except Exception as e: 65 | logger.error(f"Error in longest links route: {e}") 66 | return f"Longest links error: {e}", 500 67 | 68 | 69 | @main_bp.route("/line-of-sight") 70 | def line_of_sight(): 71 | """Line of sight analysis tool page.""" 72 | logger.info("Line of sight tool route accessed") 73 | try: 74 | # Get optional query parameters for pre-loading analysis 75 | from_node_id = request.args.get("from") 76 | to_node_id = request.args.get("to") 77 | 78 | return render_template( 79 | "line_of_sight.html", from_node_id=from_node_id, to_node_id=to_node_id 80 | ) 81 | except Exception as e: 82 | logger.error(f"Error in line of sight route: {e}") 83 | return f"Line of sight error: {e}", 500 84 | -------------------------------------------------------------------------------- /tests/unit/test_node_direct_receptions.py: -------------------------------------------------------------------------------- 1 | from unittest.mock import Mock, patch 2 | 3 | import pytest 4 | 5 | from src.malla.database.repositories import NodeRepository 6 | 7 | 8 | class TestNodeRepositoryDirectReceptions: 9 | """Unit tests for NodeRepository.get_direct_receptions""" 10 | 11 | @pytest.mark.unit 12 | @patch("src.malla.database.repositories.get_db_connection") 13 | def test_get_direct_receptions_returns_expected_structure(self, mock_get_db): 14 | """Verify the method returns properly structured dictionaries.""" 15 | # Arrange – set up mocked DB connection/ cursor 16 | mock_conn = Mock() 17 | mock_cursor = Mock() 18 | mock_get_db.return_value = mock_conn 19 | mock_conn.cursor.return_value = mock_cursor 20 | 21 | # Mock stats query results 22 | mock_cursor.fetchall.side_effect = [ 23 | # First call: stats query 24 | [ 25 | { 26 | "from_node_id": 123, 27 | "long_name": "Alpha", 28 | "short_name": "A", 29 | "packet_count": 2, 30 | "rssi_avg": -75.0, 31 | "rssi_min": -70.0, 32 | "rssi_max": -80.0, 33 | "snr_avg": 7.5, 34 | "snr_min": 5.0, 35 | "snr_max": 10.0, 36 | "first_seen": 1700000000.0, 37 | "last_seen": 1700000100.0, 38 | } 39 | ], 40 | # Second call: packets query 41 | [ 42 | { 43 | "packet_id": 1, 44 | "timestamp": 1700000000.0, 45 | "from_node_id": 123, 46 | "rssi": -80, 47 | "snr": 5, 48 | }, 49 | { 50 | "packet_id": 2, 51 | "timestamp": 1700000100.0, 52 | "from_node_id": 123, 53 | "rssi": -70, 54 | "snr": 10, 55 | }, 56 | ], 57 | ] 58 | 59 | # Act 60 | result = NodeRepository.get_direct_receptions(11223344, limit=100) 61 | 62 | # Assert 63 | assert isinstance(result, list) 64 | assert len(result) == 1 # One node with aggregated stats 65 | first = result[0] 66 | assert set(first.keys()) == { 67 | "from_node_id", 68 | "from_node_name", 69 | "packet_count", 70 | "rssi_avg", 71 | "rssi_min", 72 | "rssi_max", 73 | "snr_avg", 74 | "snr_min", 75 | "snr_max", 76 | "first_seen", 77 | "last_seen", 78 | "packets", 79 | } 80 | assert first["from_node_name"] == "Alpha" 81 | -------------------------------------------------------------------------------- /src/malla/telemetry.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | from flask import Flask 4 | from opentelemetry import trace 5 | from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter 6 | from opentelemetry.instrumentation.flask import FlaskInstrumentor 7 | from opentelemetry.instrumentation.logging import LoggingInstrumentor 8 | from opentelemetry.instrumentation.requests import RequestsInstrumentor 9 | from opentelemetry.instrumentation.sqlite3 import SQLite3Instrumentor 10 | from opentelemetry.instrumentation.system_metrics import SystemMetricsInstrumentor 11 | from opentelemetry.sdk.resources import SERVICE_NAME, Resource 12 | from opentelemetry.sdk.trace import TracerProvider 13 | from opentelemetry.sdk.trace.export import BatchSpanProcessor 14 | 15 | logger = logging.getLogger(__name__) 16 | 17 | 18 | def setup_telemetry(app: Flask, endpoint: str) -> None: 19 | """ 20 | Configure OpenTelemetry tracing for the Flask application. 21 | 22 | This sets up comprehensive instrumentation including: 23 | - Flask HTTP requests 24 | - SQLite3 database operations 25 | - Python logging (with trace context injection) 26 | - HTTP client requests (via requests library) 27 | - System metrics (CPU, memory, etc.) 28 | 29 | Args: 30 | app: The Flask application instance. 31 | endpoint: The OTLP endpoint URL (e.g., "http://localhost:4317"). 32 | """ 33 | if not endpoint: 34 | logger.info("OTLP endpoint not configured, skipping telemetry setup") 35 | return 36 | 37 | logger.info(f"Setting up OpenTelemetry with OTLP endpoint: {endpoint}") 38 | 39 | # Create resource with service name 40 | resource = Resource(attributes={SERVICE_NAME: "malla-web"}) 41 | 42 | # Setup trace provider 43 | provider = TracerProvider(resource=resource) 44 | processor = BatchSpanProcessor(OTLPSpanExporter(endpoint=endpoint, insecure=True)) 45 | provider.add_span_processor(processor) 46 | trace.set_tracer_provider(provider) 47 | 48 | # Instrument Flask application 49 | FlaskInstrumentor().instrument_app(app) 50 | logger.info("Flask instrumentation enabled") 51 | 52 | # Instrument SQLite3 for database tracing 53 | SQLite3Instrumentor().instrument() 54 | logger.info("SQLite3 instrumentation enabled") 55 | 56 | # Instrument logging to inject trace context into logs 57 | LoggingInstrumentor().instrument(set_logging_format=True) 58 | logger.info("Logging instrumentation enabled (trace context injection)") 59 | 60 | # Instrument requests library for HTTP client tracing 61 | RequestsInstrumentor().instrument() 62 | logger.info("Requests instrumentation enabled") 63 | 64 | # Instrument system metrics collection 65 | SystemMetricsInstrumentor().instrument() 66 | logger.info("System metrics instrumentation enabled") 67 | 68 | logger.info("OpenTelemetry instrumentation setup complete") 69 | -------------------------------------------------------------------------------- /src/malla/tracing_utils.py: -------------------------------------------------------------------------------- 1 | """ 2 | OpenTelemetry tracing utilities for adding custom spans to functions. 3 | 4 | This module provides decorators and utilities for manual instrumentation 5 | of specific functions and code blocks. 6 | """ 7 | 8 | from collections.abc import Callable 9 | from functools import wraps 10 | from typing import Any 11 | 12 | from opentelemetry import trace 13 | 14 | # Get a tracer for this application 15 | tracer = trace.get_tracer(__name__) 16 | 17 | 18 | def traced(span_name: str | None = None): 19 | """ 20 | Decorator to create a span for a function. 21 | 22 | This decorator automatically creates a span that tracks the execution 23 | of the decorated function, including timing and any exceptions. 24 | 25 | Args: 26 | span_name: Optional custom name for the span. If not provided, 27 | uses the function's qualified name. 28 | 29 | Example: 30 | @traced("process_packet") 31 | def process_packet(packet_data): 32 | # Function logic here 33 | pass 34 | 35 | # Or with automatic naming: 36 | @traced() 37 | def calculate_metrics(): 38 | # Function logic here 39 | pass 40 | """ 41 | 42 | def decorator(func: Callable) -> Callable: 43 | @wraps(func) 44 | def wrapper(*args: Any, **kwargs: Any) -> Any: 45 | # Use custom span name or fall back to function name 46 | name = span_name or f"{func.__module__}.{func.__qualname__}" 47 | 48 | with tracer.start_as_current_span(name) as span: 49 | # Add function metadata as span attributes 50 | span.set_attribute("function.name", func.__name__) 51 | span.set_attribute("function.module", func.__module__) 52 | 53 | try: 54 | result = func(*args, **kwargs) 55 | return result 56 | except Exception as e: 57 | # Record exception in span 58 | span.record_exception(e) 59 | span.set_status(trace.Status(trace.StatusCode.ERROR, str(e))) 60 | raise 61 | 62 | return wrapper 63 | 64 | return decorator 65 | 66 | 67 | def add_span_attributes(**attributes: Any) -> None: 68 | """ 69 | Add custom attributes to the current active span. 70 | 71 | This is useful for adding contextual information to spans created 72 | by auto-instrumentation or parent spans. 73 | 74 | Args: 75 | **attributes: Key-value pairs to add as span attributes. 76 | 77 | Example: 78 | def process_node(node_id): 79 | add_span_attributes(node_id=node_id, node_type="sensor") 80 | # ... rest of function 81 | """ 82 | current_span = trace.get_current_span() 83 | if current_span and current_span.is_recording(): 84 | for key, value in attributes.items(): 85 | current_span.set_attribute(key, value) 86 | -------------------------------------------------------------------------------- /tests/integration/test_traceroute_route_node_grouped.py: -------------------------------------------------------------------------------- 1 | """Integration tests for route_node filtering when group_packets=true. 2 | 3 | These tests ensure that traceroute queries with the `route_node` filter applied 4 | return only packets that include the specified node anywhere in the route, even 5 | when aggregated (`group_packets=true`). 6 | """ 7 | 8 | import pytest 9 | 10 | 11 | @pytest.mark.integration 12 | class TestTracerouteRouteNodeGrouped: 13 | """Tests for route_node filter with grouped traceroute queries.""" 14 | 15 | ROUTE_NODE_CANDIDATES = [ 16 | 0x11111111, # 286331153 – used in fixtures 17 | 0x22222222, # 572662306 18 | 0x33333333, # 858993459 19 | 0x55555555, # 1431655765 20 | 555666777, # From traceroute_graph_data fixture 21 | ] 22 | 23 | def _find_route_node_with_results(self, client): 24 | """Return first route_node candidate that yields results when grouped.""" 25 | for node in self.ROUTE_NODE_CANDIDATES: 26 | resp = client.get( 27 | f"/api/traceroute/data?page=1&limit=5&group_packets=true&route_node={node}" 28 | ) 29 | assert resp.status_code == 200 30 | data = resp.get_json() 31 | if data["total_count"] > 0: 32 | return node, data["total_count"] 33 | return None, 0 34 | 35 | def test_grouped_route_node_filter_returns_correct_packets(self, client): 36 | """Verify that grouped queries honour the route_node filter.""" 37 | route_node, total_cnt = self._find_route_node_with_results(client) 38 | if route_node is None: 39 | pytest.skip("No suitable route_node found in fixture data for grouped test") 40 | 41 | # Fetch a page of grouped results 42 | resp = client.get( 43 | f"/api/traceroute/data?page=1&limit=25&group_packets=true&route_node={route_node}" 44 | ) 45 | assert resp.status_code == 200 46 | payload = resp.get_json() 47 | 48 | # Basic response checks 49 | assert payload["total_count"] == total_cnt 50 | assert payload["page"] == 1 51 | assert payload["limit"] == 25 52 | assert isinstance(payload["data"], list) 53 | assert payload["data"], "Expected at least one packet in response" 54 | 55 | # All returned packets should be grouped and include the route_node 56 | for pkt in payload["data"]: 57 | assert pkt["is_grouped"] is True 58 | route_nodes = pkt.get("route_nodes", []) 59 | from_node = pkt.get("from_node_id") 60 | to_node = pkt.get("to_node_id") 61 | assert ( 62 | route_node in route_nodes 63 | or route_node == from_node 64 | or route_node == to_node 65 | ), ( 66 | f"Packet {pkt['id']} does not contain route_node {route_node}. " 67 | f"route_nodes={route_nodes}, from_node={from_node}, to_node={to_node}" 68 | ) 69 | -------------------------------------------------------------------------------- /src/malla/routes/traceroute_routes.py: -------------------------------------------------------------------------------- 1 | """ 2 | Traceroute-related routes for the Meshtastic Mesh Health Web UI 3 | """ 4 | 5 | import logging 6 | 7 | from flask import Blueprint, render_template, request 8 | 9 | # Import from the new modular architecture 10 | 11 | logger = logging.getLogger(__name__) 12 | traceroute_bp = Blueprint("traceroute", __name__) 13 | 14 | 15 | @traceroute_bp.route("/traceroute") 16 | def traceroute(): 17 | """Traceroute analysis page using modern table interface.""" 18 | logger.info("Traceroute route accessed") 19 | try: 20 | # Determine if the request URL already contains filter parameters. If it 21 | # does, we want the front-end ModernTable to *defer* the initial data 22 | # load until those filters have been applied client-side – otherwise we 23 | # would wastefully fire an unfiltered request first. 24 | 25 | filter_params = { 26 | "from_node", 27 | "to_node", 28 | "route_node", 29 | "gateway_id", 30 | "return_path_only", 31 | "start_time", 32 | "end_time", 33 | } 34 | 35 | has_filters = any(param in request.args for param in filter_params) 36 | 37 | logger.info( 38 | "Traceroute page rendered (has_filters=%s, args=%s)", 39 | has_filters, 40 | dict(request.args), 41 | ) 42 | 43 | return render_template("traceroute.html", defer_initial_load=has_filters) 44 | except Exception as e: 45 | logger.error(f"Error in traceroute route: {e}") 46 | return f"Traceroute error: {e}", 500 47 | 48 | 49 | @traceroute_bp.route("/traceroute-hops") 50 | def traceroute_hops(): 51 | """Traceroute hops visualization page.""" 52 | logger.info("Traceroute hops route accessed") 53 | try: 54 | return render_template("traceroute_hops.html") 55 | except Exception as e: 56 | logger.error(f"Error in traceroute hops route: {e}") 57 | return f"Traceroute hops error: {e}", 500 58 | 59 | 60 | @traceroute_bp.route("/traceroute-graph") 61 | def traceroute_graph(): 62 | """Traceroute network graph visualization page.""" 63 | logger.info("Traceroute graph route accessed") 64 | try: 65 | # Get filter parameters 66 | hours = request.args.get("hours", 24, type=int) 67 | min_snr = request.args.get("min_snr", -200.0, type=float) 68 | include_indirect = request.args.get("include_indirect", False, type=bool) 69 | 70 | # Validate parameters 71 | if hours < 1 or hours > 168: # Max 7 days 72 | hours = 24 73 | # Allow -200 as special "no limit" value, otherwise validate normal range 74 | if min_snr < -200 or min_snr > 20: 75 | min_snr = -200.0 76 | 77 | return render_template( 78 | "traceroute_graph.html", 79 | hours=hours, 80 | min_snr=min_snr, 81 | include_indirect=include_indirect, 82 | ) 83 | except Exception as e: 84 | logger.error(f"Error in traceroute graph route: {e}") 85 | return f"Traceroute graph error: {e}", 500 86 | -------------------------------------------------------------------------------- /tests/unit/test_traceroute_graph_utils.py: -------------------------------------------------------------------------------- 1 | import json 2 | import time 3 | from unittest.mock import patch 4 | 5 | import pytest 6 | 7 | from src.malla.utils.traceroute_graph import build_combined_traceroute_graph 8 | 9 | 10 | @pytest.mark.unit 11 | @patch("src.malla.utils.traceroute_utils.parse_traceroute_payload") 12 | @patch("src.malla.utils.node_utils.get_bulk_node_names") 13 | def test_build_combined_graph_basic(mock_get_names, mock_parse): 14 | """Ensure that the utility aggregates hops across multiple traceroutes.""" 15 | # Mock node names for readability 16 | mock_get_names.return_value = { 17 | 0x01: "NodeA", 18 | 0x02: "NodeB", 19 | 0x03: "NodeC", 20 | } 21 | 22 | # Prepare traceroute payloads (JSON encoded like the DB stores) 23 | payload1 = json.dumps( 24 | { 25 | "route_nodes": [0x02], 26 | "snr_towards": [5.0, 4.0], 27 | "route_back": [], 28 | "snr_back": [], 29 | } 30 | ).encode() 31 | 32 | payload2 = json.dumps( 33 | { 34 | "route_nodes": [0x02, 0x03], 35 | "snr_towards": [6.0, 3.5, 2.0], 36 | "route_back": [], 37 | "snr_back": [], 38 | } 39 | ).encode() 40 | 41 | # Configure mock parse to just load the JSON we encoded 42 | def parse_side_effect(raw): 43 | return json.loads(raw.decode()) 44 | 45 | mock_parse.side_effect = parse_side_effect 46 | 47 | base_time = int(time.time()) 48 | packet1 = { 49 | "id": 1, 50 | "timestamp": base_time, 51 | "from_node_id": 0x01, 52 | "to_node_id": 0x03, 53 | "gateway_id": "!00000001", 54 | "hop_start": 3, 55 | "hop_limit": 0, 56 | "raw_payload": payload1, 57 | "portnum_name": "TRACEROUTE_APP", 58 | "payload_length": len(payload1), 59 | } 60 | 61 | packet2 = { 62 | "id": 2, 63 | "timestamp": base_time + 0.5, 64 | "from_node_id": 0x01, 65 | "to_node_id": 0x03, 66 | "gateway_id": "!00000002", 67 | "hop_start": 4, 68 | "hop_limit": 0, 69 | "raw_payload": payload2, 70 | "portnum_name": "TRACEROUTE_APP", 71 | "payload_length": len(payload2), 72 | } 73 | 74 | graph = build_combined_traceroute_graph([packet1, packet2]) 75 | 76 | # Expect three unique nodes 77 | assert len(graph["nodes"]) == 3 78 | 79 | # Expect edges NodeA-NodeB and NodeB-NodeC (undirected) 80 | edge_ids = {edge["id"] for edge in graph["edges"]} 81 | # Edge IDs use node names, but in test they fall back to hex format 82 | # Check for the actual connections: 0x01-0x02 and 0x02-0x03 83 | assert any("00000001" in edge_id and "00000002" in edge_id for edge_id in edge_ids) 84 | assert any("00000002" in edge_id and "00000003" in edge_id for edge_id in edge_ids) 85 | 86 | # Edge observation counts should reflect combined occurrences 87 | # 0x01-0x02 edge appears in both packets (once each) => count 2 88 | nodeab_edge = next( 89 | ( 90 | edge 91 | for edge in graph["edges"] 92 | if "00000001" in edge["id"] and "00000002" in edge["id"] 93 | ), 94 | None, 95 | ) 96 | assert nodeab_edge is not None 97 | assert nodeab_edge["value"] == 2 98 | -------------------------------------------------------------------------------- /tests/unit/test_node_picker_broadcast.py: -------------------------------------------------------------------------------- 1 | """ 2 | Unit tests for node picker broadcast functionality. 3 | 4 | Tests that the node picker correctly includes broadcast options when configured. 5 | """ 6 | 7 | import re 8 | 9 | 10 | class TestNodePickerBroadcast: 11 | """Test node picker broadcast functionality.""" 12 | 13 | def test_from_node_dropdown_includes_broadcast(self, app, client): 14 | """Test that from_node dropdown includes broadcast option when include_broadcast=true.""" 15 | # Get the packets page which has from_node picker with include_broadcast=true 16 | response = client.get("/packets") 17 | assert response.status_code == 200 18 | 19 | content = response.data.decode("utf-8") 20 | 21 | # Look for from_node field with data-include-broadcast="true" 22 | # Check that from_node exists and has include_broadcast enabled 23 | assert 'data-include-broadcast="true"' in content, ( 24 | "Should have broadcast-enabled node pickers" 25 | ) 26 | assert 'name="from_node"' in content, "Should have from_node hidden input field" 27 | 28 | def test_exclude_from_dropdown_includes_broadcast(self, app, client): 29 | """Test that exclude_from dropdown includes broadcast option when include_broadcast=true.""" 30 | response = client.get("/packets") 31 | assert response.status_code == 200 32 | 33 | content = response.data.decode("utf-8") 34 | 35 | # Check for exclude_from node picker with broadcast enabled 36 | assert 'data-include-broadcast="true"' in content, ( 37 | "Should have broadcast-enabled node pickers" 38 | ) 39 | assert 'name="exclude_from"' in content, ( 40 | "Should have exclude_from hidden input field" 41 | ) 42 | 43 | def test_exclude_to_dropdown_includes_broadcast(self, app, client): 44 | """Test that exclude_to dropdown includes broadcast option when include_broadcast=true.""" 45 | response = client.get("/packets") 46 | assert response.status_code == 200 47 | 48 | content = response.data.decode("utf-8") 49 | 50 | # Check for exclude_to node picker with broadcast enabled 51 | assert 'data-include-broadcast="true"' in content, ( 52 | "Should have broadcast-enabled node pickers" 53 | ) 54 | assert 'name="exclude_to"' in content, ( 55 | "Should have exclude_to hidden input field" 56 | ) 57 | 58 | def test_node_picker_structure_is_correct(self, app, client): 59 | """Test that node pickers have the correct HTML structure.""" 60 | response = client.get("/packets") 61 | assert response.status_code == 200 62 | 63 | content = response.data.decode("utf-8") 64 | 65 | # Check that we have at least 4 node picker containers 66 | picker_containers = re.findall( 67 | r']*class="[^"]*node-picker-container[^"]*"', content 68 | ) 69 | assert len(picker_containers) >= 4, ( 70 | f"Should have at least 4 node picker containers, found {len(picker_containers)}" 71 | ) 72 | 73 | # Check that each exclude field has the correct structure 74 | for field_name in ["exclude_from", "exclude_to"]: 75 | # Check for hidden input with correct name 76 | assert f'name="{field_name}"' in content, ( 77 | f"Should have hidden input for {field_name}" 78 | ) 79 | -------------------------------------------------------------------------------- /scripts/benchmark_map_render.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """Simple benchmark utility for measuring the time it takes to build all the data 3 | needed by the /api/locations endpoint (node locations + traceroute links). 4 | 5 | You can point this script at any SQLite DB file by passing the absolute path as 6 | an argument. It will set the DATABASE_FILE environment variable so the regular 7 | service layer uses the specified production DB. 8 | 9 | Example: 10 | python scripts/benchmark_map_render.py /data/meshtastic_history_prod.db 11 | 12 | Performance Improvements Achieved: 13 | - Original baseline: ~4.2s total render time 14 | - After SQL optimization: ~2.1s total render time (2x speedup) 15 | - Key optimizations: 16 | 1. Replaced window function with CTE + MAX() approach (2.4s → 0.3s SQL query) 17 | 2. Reduced gateway analysis time window (24h → 3h for faster queries) 18 | 3. Added bulk multi-source BFS for hop calculation (vs per-node BFS) 19 | 4. Eliminated duplicate location queries between map render and statistics 20 | """ 21 | 22 | from __future__ import annotations 23 | 24 | import os 25 | import sys 26 | import time 27 | from pathlib import Path 28 | 29 | from malla.services.location_service import LocationService 30 | 31 | 32 | def main() -> None: 33 | if len(sys.argv) < 2 or not Path(sys.argv[1]).is_file(): 34 | print( 35 | "Usage: python scripts/benchmark_map_render.py " 36 | ) 37 | sys.exit(1) 38 | 39 | db_path = Path(sys.argv[1]).expanduser().resolve() 40 | os.environ["MALLA_DATABASE_FILE"] = str(db_path) 41 | 42 | print(f"Using database: {db_path}") 43 | 44 | # Warm-up – the first call will create the SQLite connection and parse some rows. 45 | print("\nWarming up …") 46 | _ = LocationService.get_node_locations() 47 | _ = LocationService.get_traceroute_links() 48 | 49 | # Benchmark node locations 50 | print("\nBenchmarking node locations …") 51 | start = time.perf_counter() 52 | locations = LocationService.get_node_locations() 53 | elapsed_locations = time.perf_counter() - start 54 | print(f"Fetched {len(locations)} node locations in {elapsed_locations:.3f}s") 55 | 56 | # Benchmark traceroute links 57 | print("\nBenchmarking traceroute links …") 58 | start = time.perf_counter() 59 | links = LocationService.get_traceroute_links() 60 | elapsed_links = time.perf_counter() - start 61 | print(f"Fetched {len(links)} traceroute links in {elapsed_links:.3f}s") 62 | 63 | # Benchmark combined statistics (re-uses locations list to avoid duplicate work) 64 | print("\nBenchmarking location statistics …") 65 | start = time.perf_counter() 66 | stats = LocationService.get_location_statistics(locations) 67 | elapsed_stats = time.perf_counter() - start 68 | print(f"Calculated statistics in {elapsed_stats:.3f}s") 69 | 70 | total_time = elapsed_locations + elapsed_links 71 | print("\n=== SUMMARY ===") 72 | print(f"Total data build time (locations + links): {total_time:.3f}s") 73 | print(f"Node count: {len(locations)} | Link count: {len(links)}") 74 | print("----------------") 75 | print("Location statistics excerpt:") 76 | for key in [ 77 | "nodes_with_location", 78 | "recent_nodes_with_location", 79 | "total_position_packets", 80 | "recent_position_packets", 81 | ]: 82 | print(f" {key}: {stats.get(key)}") 83 | 84 | 85 | if __name__ == "__main__": 86 | main() 87 | -------------------------------------------------------------------------------- /src/malla/utils/traceroute_utils.py: -------------------------------------------------------------------------------- 1 | """ 2 | Traceroute utility functions for Meshtastic Mesh Health Web UI 3 | """ 4 | 5 | import logging 6 | from typing import Any, TypedDict 7 | 8 | from meshtastic import mesh_pb2 9 | 10 | logger = logging.getLogger(__name__) 11 | 12 | 13 | class RouteData(TypedDict): 14 | """Type definition for parsed traceroute route data.""" 15 | 16 | route_nodes: list[int] 17 | snr_towards: list[float] 18 | route_back: list[int] 19 | snr_back: list[float] 20 | 21 | 22 | def parse_traceroute_payload(raw_payload: bytes) -> RouteData: 23 | """ 24 | Parse traceroute payload from raw bytes using protobuf parsing. 25 | 26 | Args: 27 | raw_payload: Raw payload bytes from the packet 28 | 29 | Returns: 30 | Dictionary containing route data with proper types: 31 | { 32 | 'route_nodes': List[int], 33 | 'snr_towards': List[float], 34 | 'route_back': List[int], 35 | 'snr_back': List[float] 36 | } 37 | 38 | Returns empty lists for all fields if the payload cannot be parsed 39 | as valid protobuf data. 40 | """ 41 | logger.debug(f"Parsing traceroute payload of {len(raw_payload)} bytes") 42 | 43 | if not raw_payload: 44 | return RouteData(route_nodes=[], snr_towards=[], route_back=[], snr_back=[]) 45 | 46 | try: 47 | # Try protobuf parsing 48 | route_discovery = mesh_pb2.RouteDiscovery() 49 | route_discovery.ParseFromString(raw_payload) 50 | 51 | result = RouteData( 52 | route_nodes=[int(node_id) for node_id in route_discovery.route], 53 | # Convert SNR from scaled integer to actual dB (divide by 4) 54 | snr_towards=[float(snr) / 4.0 for snr in route_discovery.snr_towards], 55 | route_back=[int(node_id) for node_id in route_discovery.route_back], 56 | # Convert SNR from scaled integer to actual dB (divide by 4) 57 | snr_back=[float(snr) / 4.0 for snr in route_discovery.snr_back], 58 | ) 59 | 60 | logger.debug( 61 | f"Protobuf parsing successful: {len(result['route_nodes'])} nodes, " 62 | f"{len(result['snr_towards'])} SNR values" 63 | ) 64 | return result 65 | 66 | except Exception as e: 67 | logger.debug(f"Protobuf parsing failed: {e}, returning empty result") 68 | 69 | # Return empty result for invalid/malformed packets 70 | return RouteData(route_nodes=[], snr_towards=[], route_back=[], snr_back=[]) 71 | 72 | 73 | def get_node_location_at_timestamp( 74 | node_id: int, target_timestamp: float 75 | ) -> dict[str, Any] | None: 76 | """ 77 | Get the most recent location for a node at or before the given timestamp. 78 | 79 | This function is a wrapper around LocationRepository.get_node_location_at_timestamp 80 | to maintain backward compatibility. 81 | 82 | Args: 83 | node_id: The node ID to get location for 84 | target_timestamp: The timestamp to get location at (Unix timestamp) 85 | 86 | Returns: 87 | Dictionary with location data and metadata, or None if no location found 88 | """ 89 | # Import here to avoid circular dependencies 90 | from ..database.repositories import LocationRepository 91 | 92 | try: 93 | return LocationRepository.get_node_location_at_timestamp( 94 | node_id, target_timestamp 95 | ) 96 | except Exception as e: 97 | logger.error(f"Error getting location for node {node_id}: {e}") 98 | return None 99 | -------------------------------------------------------------------------------- /tests/unit/test_traceroute_service.py: -------------------------------------------------------------------------------- 1 | """ 2 | Unit tests for TracerouteService class. 3 | 4 | Tests the business logic and service methods for traceroute analysis. 5 | """ 6 | 7 | from datetime import datetime 8 | from unittest.mock import Mock, patch 9 | 10 | from src.malla.services.traceroute_service import TracerouteService 11 | 12 | 13 | class TestTracerouteServiceLongestLinks: 14 | """Test TracerouteService longest links analysis functionality.""" 15 | 16 | @patch("src.malla.services.traceroute_service.TracerouteRepository") 17 | @patch("src.malla.services.traceroute_service.TraceroutePacket") 18 | def test_longest_links_analysis_basic(self, mock_traceroute_packet, mock_repo): 19 | """Test basic longest links analysis functionality.""" 20 | # Mock repository response 21 | mock_packet_data = { 22 | "id": 1, 23 | "from_node_id": 100, 24 | "to_node_id": 200, 25 | "timestamp": datetime.now().timestamp(), 26 | "gateway_id": "!12345678", 27 | "raw_payload": b"mock_payload", 28 | "processed_successfully": True, 29 | } 30 | 31 | mock_repo.get_traceroute_packets.return_value = {"packets": [mock_packet_data]} 32 | 33 | # Mock TraceroutePacket 34 | mock_packet = Mock() 35 | mock_packet.from_node_id = 100 36 | mock_packet.to_node_id = 200 37 | 38 | # Mock RF hop 39 | mock_hop = Mock() 40 | mock_hop.from_node_id = 100 41 | mock_hop.to_node_id = 200 42 | mock_hop.from_node_name = "Node100" 43 | mock_hop.to_node_name = "Node200" 44 | mock_hop.distance_km = 5.0 # 5km 45 | mock_hop.snr = -5.0 46 | 47 | mock_packet.get_rf_hops.return_value = [mock_hop] 48 | mock_packet.get_display_hops.return_value = [mock_hop] 49 | mock_packet.calculate_hop_distances = Mock() 50 | 51 | mock_traceroute_packet.return_value = mock_packet 52 | 53 | # Call the method 54 | result = TracerouteService.get_longest_links_analysis( 55 | min_distance_km=1.0, min_snr=-10.0, max_results=10 56 | ) 57 | 58 | # Verify TraceroutePacket was called with correct arguments 59 | mock_traceroute_packet.assert_called_with( 60 | packet_data=mock_packet_data, resolve_names=True 61 | ) 62 | 63 | # Verify structure 64 | assert "summary" in result 65 | assert "direct_links" in result 66 | assert "indirect_links" in result 67 | 68 | # Verify summary 69 | summary = result["summary"] 70 | assert "total_links" in summary 71 | assert "direct_links" in summary 72 | assert "longest_direct" in summary 73 | assert "longest_path" in summary 74 | 75 | # Verify direct links 76 | assert len(result["direct_links"]) == 1 77 | direct_link = result["direct_links"][0] 78 | assert direct_link["from_node_id"] == 100 79 | assert direct_link["to_node_id"] == 200 80 | assert direct_link["distance_km"] == 5.0 81 | assert direct_link["avg_snr"] == -5.0 82 | assert direct_link["traceroute_count"] == 1 83 | 84 | @patch("src.malla.services.traceroute_service.TracerouteRepository") 85 | def test_longest_links_analysis_empty_data(self, mock_repo): 86 | """Test analysis with no traceroute data.""" 87 | # Mock empty repository response 88 | mock_repo.get_traceroute_packets.return_value = {"packets": []} 89 | 90 | # Call the method 91 | result = TracerouteService.get_longest_links_analysis() 92 | 93 | # Should return empty results with proper structure 94 | assert result["summary"]["total_links"] == 0 95 | assert result["summary"]["direct_links"] == 0 96 | assert result["summary"]["longest_direct"] is None 97 | assert result["summary"]["longest_path"] is None 98 | assert len(result["direct_links"]) == 0 99 | assert len(result["indirect_links"]) == 0 100 | -------------------------------------------------------------------------------- /src/malla/templates/components/direct_receptions.html: -------------------------------------------------------------------------------- 1 | 2 | 83 | -------------------------------------------------------------------------------- /scripts/benchmark_longest_links.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """Benchmark runner for TracerouteService.get_longest_links_analysis 3 | 4 | This standalone script helps profile/compare the runtime of the longest-links 5 | analysis against different databases or parameter sets. 6 | 7 | Example usage: 8 | python scripts/benchmark_longest_links.py --db meshtastic_history_prod.db \ 9 | --iterations 3 --min-distance 1 --min-snr -20 --max-results 100 10 | 11 | By default it reads the database path from --db or the env var DATABASE_FILE. 12 | It prints individual run durations and a small summary table. 13 | """ 14 | 15 | from __future__ import annotations 16 | 17 | import argparse 18 | import os 19 | import statistics as stats 20 | import time 21 | from pathlib import Path 22 | 23 | # Delay heavy imports until after we have pointed the code at the correct DB 24 | 25 | 26 | def _parse_args() -> argparse.Namespace: # pragma: no cover (cli helper) 27 | parser = argparse.ArgumentParser(description="Benchmark get_longest_links_analysis") 28 | parser.add_argument( 29 | "--db", 30 | dest="db_path", 31 | type=str, 32 | help="SQLite DB file to use (overrides $DATABASE_FILE)", 33 | ) 34 | parser.add_argument( 35 | "--iterations", 36 | type=int, 37 | default=3, 38 | help="Number of benchmark iterations to run", 39 | ) 40 | parser.add_argument( 41 | "--min-distance", type=float, default=1.0, help="min_distance_km parameter" 42 | ) 43 | parser.add_argument( 44 | "--min-snr", type=float, default=-20.0, help="min_snr parameter" 45 | ) 46 | parser.add_argument( 47 | "--max-results", type=int, default=100, help="max_results parameter" 48 | ) 49 | parser.add_argument( 50 | "--quiet", 51 | action="store_true", 52 | help="Suppress detailed result output (duration only)", 53 | ) 54 | return parser.parse_args() 55 | 56 | 57 | def _print_summary(durations: list[float]): 58 | if not durations: 59 | return 60 | print("\nSummary (seconds):") 61 | print(f" runs : {len(durations)}") 62 | print(f" min : {min(durations):.3f}") 63 | print(f" max : {max(durations):.3f}") 64 | print(f" mean : {stats.mean(durations):.3f}") 65 | if len(durations) >= 2: 66 | print(f" median: {stats.median(durations):.3f}") 67 | print() 68 | 69 | 70 | def main() -> None: # pragma: no cover (benchmark script) 71 | args = _parse_args() 72 | 73 | if args.db_path: 74 | db_path = Path(args.db_path).expanduser().resolve() 75 | if not db_path.exists(): 76 | raise FileNotFoundError(db_path) 77 | os.environ["MALLA_DATABASE_FILE"] = str(db_path) 78 | print(f"Using database: {db_path}") 79 | else: 80 | print("Using database from $MALLA_DATABASE_FILE or default path") 81 | 82 | # Ensure 'src' directory is on sys.path so local imports work when run 83 | import sys 84 | 85 | ROOT_DIR = Path(__file__).resolve().parents[1] 86 | SRC_DIR = ROOT_DIR / "src" 87 | sys.path.insert(0, str(ROOT_DIR)) # allow 'import src.*' 88 | sys.path.insert(0, str(SRC_DIR)) # allow 'import malla.*' 89 | 90 | # Now that DATABASE_FILE is set, we can import the heavy modules 91 | from src.malla.services.traceroute_service import ( 92 | TracerouteService, # noqa: WPS433 (runtime import intended) 93 | ) 94 | 95 | durations: list[float] = [] 96 | 97 | for i in range(1, args.iterations + 1): 98 | print(f"\nRun {i}/{args.iterations} …", end=" ", flush=True) 99 | start = time.perf_counter() 100 | result = TracerouteService.get_longest_links_analysis( 101 | min_distance_km=args.min_distance, 102 | min_snr=args.min_snr, 103 | max_results=args.max_results, 104 | ) 105 | elapsed = time.perf_counter() - start 106 | durations.append(elapsed) 107 | print(f"{elapsed:.3f}s, links: {result['summary']['total_links']}") 108 | if not args.quiet: 109 | print(" longest direct link:", result["summary"]["longest_direct"]) 110 | 111 | _print_summary(durations) 112 | 113 | 114 | if __name__ == "__main__": # pragma: no cover 115 | main() 116 | -------------------------------------------------------------------------------- /src/malla/templates/components/shared_sidebar.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 99 | -------------------------------------------------------------------------------- /src/malla/wsgi.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """ 3 | WSGI entry point for Malla Web UI with Gunicorn support. 4 | 5 | This module provides the WSGI application factory and a main function 6 | that starts Gunicorn with appropriate configuration for production deployment. 7 | """ 8 | 9 | import logging 10 | import sys 11 | 12 | from .config import get_config 13 | from .web_ui import create_app 14 | 15 | # Configure logging 16 | logging.basicConfig( 17 | level=logging.INFO, 18 | format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", 19 | handlers=[logging.FileHandler("app.log"), logging.StreamHandler(sys.stdout)], 20 | ) 21 | 22 | logger = logging.getLogger(__name__) 23 | 24 | 25 | def create_wsgi_app(): 26 | """Create and return the WSGI application.""" 27 | logger.info("Creating WSGI application for Gunicorn") 28 | return create_app() 29 | 30 | 31 | # Lazy application creation to avoid config caching issues during testing 32 | _application = None 33 | 34 | 35 | def get_application(): 36 | """Get the WSGI application instance, creating it if necessary.""" 37 | global _application 38 | if _application is None: 39 | _application = create_wsgi_app() 40 | return _application 41 | 42 | 43 | # WSGI application callable for servers - use a callable that defers execution 44 | def application(*args, **kwargs): 45 | """WSGI application entry point.""" 46 | return get_application()(*args, **kwargs) 47 | 48 | 49 | def main(): 50 | """Main entry point for running with Gunicorn.""" 51 | logger.info("Starting Malla Web UI with Gunicorn") 52 | 53 | try: 54 | # Import gunicorn here to avoid hard dependency when not using WSGI 55 | from gunicorn.app.wsgiapp import WSGIApplication 56 | 57 | # Get configuration 58 | cfg = get_config() 59 | 60 | # Print startup information 61 | print("=" * 60) 62 | print("🌐 Malla Web UI (Gunicorn)") 63 | print("=" * 60) 64 | print(f"Database: {cfg.database_file}") 65 | print(f"Web UI: http://{cfg.host}:{cfg.port}") 66 | print("Workers: auto-detected") 67 | print(f"Debug mode: {cfg.debug}") 68 | print("=" * 60) 69 | print() 70 | 71 | # Configure Gunicorn 72 | gunicorn_config = { 73 | "bind": f"{cfg.host}:{cfg.port}", 74 | "workers": None, # Let Gunicorn auto-detect based on CPU cores 75 | "worker_class": "sync", 76 | "worker_connections": 1000, 77 | "max_requests": 1000, 78 | "max_requests_jitter": 50, 79 | "timeout": 30, 80 | "keepalive": 2, 81 | "preload_app": True, 82 | "access_log_format": '%(h)s %(l)s %(u)s %(t)s "%(r)s" %(s)s %(b)s "%(f)s" "%(a)s" %(D)s', 83 | "accesslog": "-", # Log to stdout 84 | "errorlog": "-", # Log to stderr 85 | "loglevel": "info", 86 | "capture_output": True, 87 | "enable_stdio_inheritance": True, 88 | } 89 | 90 | # Create Gunicorn application 91 | class MallaWSGIApplication(WSGIApplication): 92 | def __init__(self, app, options=None): 93 | self.options = options or {} 94 | self.application = app 95 | super().__init__() 96 | 97 | def load_config(self): 98 | if hasattr(self, "cfg") and self.cfg: 99 | config = { 100 | key: value 101 | for key, value in self.options.items() 102 | if key in self.cfg.settings and value is not None 103 | } 104 | for key, value in config.items(): 105 | self.cfg.set(key.lower(), value) 106 | 107 | def load(self): 108 | return self.application 109 | 110 | # Start Gunicorn 111 | logger.info(f"Starting Gunicorn server on {cfg.host}:{cfg.port}") 112 | MallaWSGIApplication(get_application(), gunicorn_config).run() 113 | 114 | except ImportError: 115 | logger.error( 116 | "Gunicorn is not installed. Please install it with: pip install gunicorn" 117 | ) 118 | sys.exit(1) 119 | except Exception as e: 120 | logger.error(f"Failed to start Gunicorn application: {e}") 121 | sys.exit(1) 122 | 123 | 124 | if __name__ == "__main__": 125 | main() 126 | -------------------------------------------------------------------------------- /tests/integration/test_api_routes.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | 4 | class TestAPIRoutes: 5 | @pytest.mark.integration 6 | def test_api_traceroute_endpoint(self, client): 7 | """Test the API traceroute endpoint returns expected structure.""" 8 | response = client.get("/api/traceroute") 9 | assert response.status_code == 200 10 | 11 | data = response.get_json() 12 | assert "traceroutes" in data 13 | assert "total_count" in data 14 | assert "page" in data 15 | assert "per_page" in data 16 | 17 | @pytest.mark.integration 18 | def test_api_traceroute_graph_endpoint(self, client): 19 | """Test the API traceroute graph endpoint returns expected structure.""" 20 | response = client.get("/api/traceroute/graph") 21 | assert response.status_code == 200 22 | 23 | data = response.get_json() 24 | assert "nodes" in data 25 | assert "links" in data 26 | assert "stats" in data 27 | assert "filters" in data 28 | 29 | # Check that nodes and links are lists 30 | assert isinstance(data["nodes"], list) 31 | assert isinstance(data["links"], list) 32 | 33 | # Check stats structure 34 | stats = data["stats"] 35 | assert "packets_analyzed" in stats 36 | assert "links_found" in stats 37 | 38 | # Check filters structure 39 | filters = data["filters"] 40 | assert "hours" in filters 41 | assert "min_snr" in filters 42 | assert "include_indirect" in filters 43 | 44 | @pytest.mark.integration 45 | def test_api_traceroute_graph_location_data(self, client): 46 | """Test that the traceroute graph endpoint includes location data for nodes.""" 47 | response = client.get("/api/traceroute/graph") 48 | assert response.status_code == 200 49 | 50 | data = response.get_json() 51 | nodes = data["nodes"] 52 | 53 | if nodes: # Only test if we have nodes 54 | # Check node structure includes expected fields 55 | node = nodes[0] 56 | required_fields = [ 57 | "id", 58 | "name", 59 | "packet_count", 60 | "connections", 61 | "last_seen", 62 | "size", 63 | ] 64 | for field in required_fields: 65 | assert field in node, f"Missing required field: {field}" 66 | 67 | # Check if any nodes have location data 68 | nodes_with_location = [n for n in nodes if "location" in n] 69 | 70 | # If we have nodes with location data, verify the structure 71 | if nodes_with_location: 72 | location_node = nodes_with_location[0] 73 | location = location_node["location"] 74 | 75 | # Verify location structure 76 | assert "latitude" in location 77 | assert "longitude" in location 78 | assert isinstance(location["latitude"], int | float) 79 | assert isinstance(location["longitude"], int | float) 80 | 81 | # Altitude is optional 82 | if "altitude" in location: 83 | assert isinstance(location["altitude"], int | float | type(None)) 84 | 85 | print( 86 | f"Found {len(nodes_with_location)} nodes with location data out of {len(nodes)} total nodes" 87 | ) 88 | else: 89 | print("No nodes with location data found in graph response") 90 | 91 | @pytest.mark.integration 92 | def test_api_traceroute_graph_with_filters(self, client): 93 | """Test the traceroute graph endpoint with various filters.""" 94 | # Test with different time periods 95 | response = client.get("/api/traceroute/graph?hours=6") 96 | assert response.status_code == 200 97 | data = response.get_json() 98 | assert data["filters"]["hours"] == 6 99 | 100 | # Test with SNR filter 101 | response = client.get("/api/traceroute/graph?min_snr=-20") 102 | assert response.status_code == 200 103 | data = response.get_json() 104 | assert data["filters"]["min_snr"] == -20.0 105 | 106 | # Test with indirect connections 107 | response = client.get("/api/traceroute/graph?include_indirect=true") 108 | assert response.status_code == 200 109 | data = response.get_json() 110 | assert data["filters"]["include_indirect"] is True 111 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | share/python-wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | MANIFEST 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .nox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | *.py,cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | cover/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | .pybuilder/ 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pyenv 86 | # For a library or package, you might want to ignore these files since the code is 87 | # intended to run in multiple environments; otherwise, check them in: 88 | .python-version 89 | 90 | # pipenv 91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 94 | # install all needed dependencies. 95 | #Pipfile.lock 96 | 97 | # poetry 98 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 99 | # This is especially recommended for binary packages to ensure reproducibility, and is more 100 | # commonly ignored for libraries. 101 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 102 | #poetry.lock 103 | 104 | # pdm 105 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 106 | #pdm.lock 107 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 108 | # in version control. 109 | # https://pdm.fming.dev/#use-with-ide 110 | .pdm.toml 111 | 112 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 113 | __pypackages__/ 114 | 115 | # Celery stuff 116 | celerybeat-schedule 117 | celerybeat.pid 118 | 119 | # SageMath parsed files 120 | *.sage.py 121 | 122 | # Environments 123 | .env 124 | .venv 125 | env/ 126 | venv/ 127 | ENV/ 128 | env.bak/ 129 | venv.bak/ 130 | 131 | # Docker environment files 132 | .env.local 133 | .env.production 134 | 135 | # Spyder project settings 136 | .spyderproject 137 | .spyproject 138 | 139 | # Rope project settings 140 | .ropeproject 141 | 142 | # mkdocs documentation 143 | /site 144 | 145 | # mypy 146 | .mypy_cache/ 147 | .dmypy.json 148 | dmypy.json 149 | 150 | # basedpyright 151 | .basedpyright/ 152 | pyrightconfig.json 153 | 154 | # Pyre type checker 155 | .pyre/ 156 | 157 | # pytype static type analyzer 158 | .pytype/ 159 | 160 | # Cython debug symbols 161 | cython_debug/ 162 | 163 | # PyCharm 164 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 165 | # be added to the global gitignore or merged into this project gitignore. For a PyCharm 166 | # project, it is recommended to include the .idea directory in version control. 167 | .idea/ 168 | 169 | # Local development files 170 | .envrc 171 | .direnv/ 172 | 173 | # Database files (project-specific) 174 | *.db 175 | *.db-shm 176 | *.db-wal 177 | *.sqlite 178 | *.sqlite3 179 | meshtastic_history.db 180 | 181 | # Log files (project-specific) 182 | *.log 183 | app.log 184 | backfill_mesh_packet_ids.log 185 | 186 | # Temporary files 187 | *.tmp 188 | *.temp 189 | .DS_Store 190 | Thumbs.db 191 | 192 | # Editor backup files 193 | *~ 194 | *.swp 195 | *.swo 196 | *# 197 | 198 | # OS generated files 199 | .DS_Store 200 | .DS_Store? 201 | ._* 202 | .Spotlight-V100 203 | .Trashes 204 | ehthumbs.db 205 | Thumbs.db 206 | 207 | # Local configuration (do not commit real config) 208 | config.yaml 209 | config_*.yaml 210 | -------------------------------------------------------------------------------- /src/malla/static/css/node-picker.css: -------------------------------------------------------------------------------- 1 | .node-picker-container { 2 | position: relative; 3 | } 4 | 5 | .node-picker-input { 6 | padding-right: 35px; /* Make room for clear button */ 7 | } 8 | 9 | .node-picker-clear { 10 | position: absolute; 11 | right: 5px; 12 | top: 50%; 13 | transform: translateY(-50%); 14 | z-index: 10; 15 | border: none; 16 | background: transparent; 17 | padding: 2px 6px; 18 | font-size: 12px; 19 | line-height: 1; 20 | } 21 | 22 | .node-picker-clear:hover { 23 | background-color: var(--bs-secondary-bg); 24 | border-radius: 3px; 25 | } 26 | 27 | .node-picker-dropdown { 28 | position: absolute; 29 | top: 100%; 30 | left: 0; 31 | right: 0; 32 | z-index: 1050; 33 | background: var(--bs-body-bg); 34 | border: 1px solid var(--bs-border-color); 35 | border-radius: 0.375rem; 36 | box-shadow: var(--bs-box-shadow); 37 | max-height: 300px; 38 | overflow-y: auto; 39 | } 40 | 41 | .node-picker-dropdown.show { 42 | display: block !important; 43 | } 44 | 45 | .node-picker-item { 46 | padding: 8px 12px; 47 | cursor: pointer; 48 | border-bottom: 1px solid var(--bs-border-color-translucent); 49 | transition: background-color 0.15s ease-in-out; 50 | } 51 | 52 | .node-picker-item:last-child { 53 | border-bottom: none; 54 | } 55 | 56 | .node-picker-item:hover, 57 | .node-picker-item.active { 58 | background-color: var(--bs-tertiary-bg); 59 | } 60 | 61 | .node-picker-item-name { 62 | font-weight: 500; 63 | color: var(--bs-body-color); 64 | } 65 | 66 | .node-picker-item-id { 67 | font-size: 0.875rem; 68 | color: var(--bs-secondary-color); 69 | font-family: monospace; 70 | } 71 | 72 | .node-picker-item-details { 73 | font-size: 0.75rem; 74 | color: var(--bs-secondary-color); 75 | margin-top: 2px; 76 | } 77 | 78 | .node-picker-loading, 79 | .node-picker-no-results { 80 | padding: 12px; 81 | text-align: center; 82 | color: var(--bs-secondary-color); 83 | } 84 | 85 | .node-picker-loading .spinner-border { 86 | width: 1rem; 87 | height: 1rem; 88 | } 89 | 90 | /* Focus styles */ 91 | .node-picker-input:focus { 92 | border-color: var(--bs-primary); 93 | outline: 0; 94 | box-shadow: 0 0 0 0.25rem rgba(var(--bs-primary-rgb), 0.25); 95 | } 96 | 97 | /* Keyboard navigation */ 98 | .node-picker-item.keyboard-active { 99 | background-color: var(--bs-secondary-bg); 100 | } 101 | 102 | /* Gateway Picker Styles (similar to node picker) */ 103 | .gateway-picker-container { 104 | position: relative; 105 | } 106 | 107 | .gateway-picker-input { 108 | padding-right: 35px; /* Make room for clear button */ 109 | } 110 | 111 | .gateway-picker-clear { 112 | position: absolute; 113 | right: 5px; 114 | top: 50%; 115 | transform: translateY(-50%); 116 | z-index: 10; 117 | border: none; 118 | background: transparent; 119 | padding: 2px 6px; 120 | font-size: 12px; 121 | line-height: 1; 122 | } 123 | 124 | .gateway-picker-clear:hover { 125 | background-color: var(--bs-secondary-bg); 126 | border-radius: 3px; 127 | } 128 | 129 | .gateway-picker-dropdown { 130 | position: absolute; 131 | top: 100%; 132 | left: 0; 133 | right: 0; 134 | z-index: 1050; 135 | background: var(--bs-body-bg); 136 | border: 1px solid var(--bs-border-color); 137 | border-radius: 0.375rem; 138 | box-shadow: var(--bs-box-shadow); 139 | max-height: 300px; 140 | overflow-y: auto; 141 | } 142 | 143 | .gateway-picker-dropdown.show { 144 | display: block !important; 145 | } 146 | 147 | .gateway-picker-item { 148 | padding: 8px 12px; 149 | cursor: pointer; 150 | border-bottom: 1px solid var(--bs-border-color-translucent); 151 | transition: background-color 0.15s ease-in-out; 152 | } 153 | 154 | .gateway-picker-item:last-child { 155 | border-bottom: none; 156 | } 157 | 158 | .gateway-picker-item:hover, 159 | .gateway-picker-item.active { 160 | background-color: var(--bs-tertiary-bg); 161 | } 162 | 163 | .gateway-picker-item-name { 164 | font-weight: 500; 165 | color: var(--bs-body-color); 166 | } 167 | 168 | .gateway-picker-item-id { 169 | font-size: 0.875rem; 170 | color: var(--bs-secondary-color); 171 | font-family: monospace; 172 | } 173 | 174 | .gateway-picker-item-details { 175 | font-size: 0.75rem; 176 | color: var(--bs-secondary-color); 177 | margin-top: 2px; 178 | } 179 | 180 | .gateway-picker-loading, 181 | .gateway-picker-no-results { 182 | padding: 12px; 183 | text-align: center; 184 | color: var(--bs-secondary-color); 185 | } 186 | 187 | .gateway-picker-loading .spinner-border { 188 | width: 1rem; 189 | height: 1rem; 190 | } 191 | 192 | /* Focus styles */ 193 | .gateway-picker-input:focus { 194 | border-color: var(--bs-primary); 195 | outline: 0; 196 | box-shadow: 0 0 0 0.25rem rgba(var(--bs-primary-rgb), 0.25); 197 | } 198 | 199 | /* Keyboard navigation */ 200 | .gateway-picker-item.keyboard-active { 201 | background-color: var(--bs-secondary-bg); 202 | } 203 | -------------------------------------------------------------------------------- /src/malla/static/js/location-cache.js: -------------------------------------------------------------------------------- 1 | (function () { 2 | const CACHE_KEY = 'malla_locations_cache_v1'; 3 | const CACHE_TTL_MS = 15 * 60 * 1000; // 15 minutes 4 | 5 | // Internal state 6 | let _locations = null; // Map of node_id -> location object 7 | let _loaded = false; 8 | let _loadPromise = null; 9 | 10 | /** 11 | * Restore cached locations from localStorage 12 | */ 13 | function _restoreFromLocalStorage() { 14 | try { 15 | const raw = localStorage.getItem(CACHE_KEY); 16 | if (!raw) return null; 17 | 18 | const parsed = JSON.parse(raw); 19 | if (!parsed || typeof parsed.locations !== 'object') return null; 20 | if (Date.now() - (parsed.timestamp || 0) > CACHE_TTL_MS) return null; 21 | 22 | return parsed.locations; 23 | } catch (err) { 24 | console.warn('LocationCache: Failed to restore from localStorage:', err); 25 | return null; 26 | } 27 | } 28 | 29 | /** 30 | * Persist locations to localStorage 31 | */ 32 | function _persistToLocalStorage(locations) { 33 | try { 34 | const payload = { timestamp: Date.now(), locations }; 35 | localStorage.setItem(CACHE_KEY, JSON.stringify(payload)); 36 | } catch (err) { 37 | console.warn('LocationCache: Failed to persist to localStorage:', err); 38 | } 39 | } 40 | 41 | const LocationCache = { 42 | /** 43 | * Load all locations from API and cache them 44 | */ 45 | load() { 46 | if (_loaded) return Promise.resolve(_locations || {}); 47 | if (_loadPromise) return _loadPromise; 48 | 49 | _loadPromise = (async () => { 50 | // Try localStorage first 51 | const cached = _restoreFromLocalStorage(); 52 | if (cached) { 53 | _locations = cached; 54 | _loaded = true; 55 | return _locations; 56 | } 57 | 58 | // Fetch from API 59 | try { 60 | const resp = await fetch('/api/locations'); 61 | if (!resp.ok) throw new Error(`HTTP ${resp.status}`); 62 | const data = await resp.json(); 63 | 64 | // Convert array to map for fast lookup 65 | _locations = {}; 66 | if (Array.isArray(data.locations)) { 67 | data.locations.forEach(loc => { 68 | if (loc && loc.node_id) { 69 | _locations[loc.node_id] = loc; 70 | } 71 | }); 72 | } 73 | 74 | _persistToLocalStorage(_locations); 75 | } catch (err) { 76 | console.error('LocationCache: Failed to fetch locations:', err); 77 | _locations = {}; 78 | } 79 | 80 | _loaded = true; 81 | return _locations; 82 | })(); 83 | 84 | return _loadPromise; 85 | }, 86 | 87 | /** 88 | * Get location for a specific node ID 89 | */ 90 | async getLocation(nodeId) { 91 | await this.load(); 92 | return _locations[nodeId] || null; 93 | }, 94 | 95 | /** 96 | * Get locations for multiple node IDs 97 | */ 98 | async getLocations(nodeIds) { 99 | await this.load(); 100 | const result = []; 101 | nodeIds.forEach(id => { 102 | const loc = _locations[id]; 103 | if (loc) result.push(loc); 104 | }); 105 | return result; 106 | }, 107 | 108 | /** 109 | * Get all locations with coordinates 110 | */ 111 | async getLocationsWithCoordinates() { 112 | await this.load(); 113 | return Object.values(_locations).filter(loc => 114 | loc && loc.latitude != null && loc.longitude != null 115 | ); 116 | }, 117 | 118 | /** 119 | * Add or update a location in the cache 120 | */ 121 | addLocation(location) { 122 | if (!location || !location.node_id) return; 123 | if (!_locations) _locations = {}; 124 | _locations[location.node_id] = location; 125 | _persistToLocalStorage(_locations); 126 | }, 127 | 128 | /** 129 | * Clear the cache 130 | */ 131 | clear() { 132 | _locations = {}; 133 | _loaded = false; 134 | _loadPromise = null; 135 | try { 136 | localStorage.removeItem(CACHE_KEY); 137 | } catch (err) { 138 | console.warn('LocationCache: Failed to clear localStorage:', err); 139 | } 140 | } 141 | }; 142 | 143 | // Expose globally 144 | window.LocationCache = LocationCache; 145 | 146 | // Start loading immediately 147 | LocationCache.load(); 148 | })(); 149 | -------------------------------------------------------------------------------- /tests/integration/test_exclude_self_api.py: -------------------------------------------------------------------------------- 1 | """Integration tests for exclude_self API functionality.""" 2 | 3 | import pytest 4 | 5 | 6 | @pytest.mark.integration 7 | @pytest.mark.api 8 | class TestExcludeSelfAPI: 9 | """Integration tests for exclude_self filter behavior.""" 10 | 11 | def test_exclude_self_filter_works(self, client): 12 | """Test that exclude_self filter actually excludes self-sent packets.""" 13 | # Find a gateway that has both self-sent and other packets 14 | response = client.get("/api/packets/data?limit=100") 15 | assert response.status_code == 200 16 | data = response.get_json() 17 | 18 | # Find gateway with both self-sent and other packets 19 | gateway_stats = {} 20 | for packet in data.get("data", []): 21 | gw_node_id = packet.get("gateway_node_id") 22 | from_node_id = packet.get("from_node_id") 23 | if gw_node_id: 24 | if gw_node_id not in gateway_stats: 25 | gateway_stats[gw_node_id] = {"self": 0, "others": 0} 26 | if from_node_id == gw_node_id: 27 | gateway_stats[gw_node_id]["self"] += 1 28 | else: 29 | gateway_stats[gw_node_id]["others"] += 1 30 | 31 | # Find a suitable gateway for testing 32 | test_gateway_id = None 33 | for gw_id, stats in gateway_stats.items(): 34 | if stats["self"] > 0 and stats["others"] > 0: 35 | test_gateway_id = gw_id 36 | break 37 | 38 | if not test_gateway_id: 39 | pytest.skip("No gateway found with both self-sent and other packets") 40 | 41 | # Test without exclude_self 42 | response = client.get( 43 | f"/api/packets/data?gateway_id={test_gateway_id}&limit=20" 44 | ) 45 | assert response.status_code == 200 46 | data_without_filter = response.get_json() 47 | packets_without_filter = data_without_filter.get("data", []) 48 | 49 | # Count self-sent packets 50 | self_sent_count = sum( 51 | 1 52 | for p in packets_without_filter 53 | if p.get("from_node_id") == test_gateway_id 54 | ) 55 | assert self_sent_count > 0, "Should have self-sent packets without filter" 56 | 57 | # Test with exclude_self=true 58 | response = client.get( 59 | f"/api/packets/data?gateway_id={test_gateway_id}&exclude_self=true&limit=20" 60 | ) 61 | assert response.status_code == 200 62 | data_with_filter = response.get_json() 63 | packets_with_filter = data_with_filter.get("data", []) 64 | 65 | # Verify no self-sent packets 66 | self_sent_count_filtered = sum( 67 | 1 for p in packets_with_filter if p.get("from_node_id") == test_gateway_id 68 | ) 69 | assert self_sent_count_filtered == 0, ( 70 | "Should have no self-sent packets with exclude_self=true" 71 | ) 72 | 73 | # Verify we still have other packets 74 | assert len(packets_with_filter) > 0, "Should still have non-self packets" 75 | 76 | # Verify all remaining packets are from other nodes 77 | for packet in packets_with_filter: 78 | from_node_id = packet.get("from_node_id") 79 | assert from_node_id != test_gateway_id, ( 80 | f"Found self-sent packet {packet['id']} when exclude_self=true" 81 | ) 82 | 83 | def test_exclude_self_with_no_gateway_filter(self, client): 84 | """Test that exclude_self without gateway_id has no effect.""" 85 | # Without gateway filter, exclude_self should have no effect 86 | response1 = client.get("/api/packets/data?limit=10") 87 | response2 = client.get("/api/packets/data?exclude_self=true&limit=10") 88 | 89 | assert response1.status_code == 200 90 | assert response2.status_code == 200 91 | 92 | data1 = response1.get_json() 93 | data2 = response2.get_json() 94 | 95 | # Should return same results when no gateway filter is applied 96 | assert len(data1.get("data", [])) == len(data2.get("data", [])) 97 | 98 | def test_exclude_self_false_includes_self_packets(self, client): 99 | """Test that exclude_self=false includes self-sent packets.""" 100 | # Find a gateway with self-sent packets 101 | response = client.get("/api/packets/data?limit=100") 102 | assert response.status_code == 200 103 | data = response.get_json() 104 | 105 | test_gateway_id = None 106 | for packet in data.get("data", []): 107 | gw_node_id = packet.get("gateway_node_id") 108 | from_node_id = packet.get("from_node_id") 109 | if gw_node_id and from_node_id == gw_node_id: 110 | test_gateway_id = gw_node_id 111 | break 112 | 113 | if not test_gateway_id: 114 | pytest.skip("No gateway found with self-sent packets") 115 | 116 | # Test with exclude_self=false (explicit) 117 | response = client.get( 118 | f"/api/packets/data?gateway_id={test_gateway_id}&exclude_self=false&limit=20" 119 | ) 120 | assert response.status_code == 200 121 | data = response.get_json() 122 | packets = data.get("data", []) 123 | 124 | # Should include self-sent packets 125 | self_sent_count = sum( 126 | 1 for p in packets if p.get("from_node_id") == test_gateway_id 127 | ) 128 | assert self_sent_count > 0, ( 129 | "Should include self-sent packets when exclude_self=false" 130 | ) 131 | -------------------------------------------------------------------------------- /tests/e2e/test_debug_node_picker.py: -------------------------------------------------------------------------------- 1 | """ 2 | Simplified E2E test to debug node picker interaction issues. 3 | """ 4 | 5 | from playwright.sync_api import Page, expect 6 | 7 | 8 | def test_debug_node_picker_interaction(page: Page, test_server_url: str): 9 | """Debug test to understand node picker interaction issues.""" 10 | # Navigate to packets page 11 | page.goto(f"{test_server_url}/packets") 12 | page.wait_for_selector("#packetsTable", timeout=10000) 13 | page.wait_for_timeout(2000) # Wait for everything to load 14 | 15 | print("=== PAGE LOADED ===") 16 | 17 | # Debug: Check initial state 18 | initial_state = page.evaluate("""() => { 19 | const excludeFromHidden = document.querySelector('input[name="exclude_from"]'); 20 | const excludeFromVisible = document.querySelector('#exclude_from'); 21 | return { 22 | hiddenInputExists: !!excludeFromHidden, 23 | hiddenInputValue: excludeFromHidden?.value || 'NONE', 24 | visibleInputExists: !!excludeFromVisible, 25 | visibleInputValue: excludeFromVisible?.value || 'NONE', 26 | containerExists: !!document.querySelector('.node-picker-container[data-include-broadcast="true"]'), 27 | nodePickerCount: document.querySelectorAll('.node-picker-container').length 28 | }; 29 | }""") 30 | print(f"Initial state: {initial_state}") 31 | 32 | # Try to interact with the exclude_from field 33 | exclude_from_field = page.locator("#exclude_from") 34 | expect(exclude_from_field).to_be_visible() 35 | 36 | print("=== CLICKING EXCLUDE_FROM FIELD ===") 37 | exclude_from_field.click() 38 | page.wait_for_timeout(1000) 39 | 40 | # Check if the node picker activated 41 | picker_state = page.evaluate("""() => { 42 | const container = document.querySelector('#exclude_from').closest('.node-picker-container'); 43 | const dropdown = container?.querySelector('.node-picker-dropdown'); 44 | const textInput = container?.querySelector('input[type="text"]'); 45 | return { 46 | containerFound: !!container, 47 | dropdownExists: !!dropdown, 48 | dropdownVisible: dropdown?.style.display !== 'none' && dropdown?.classList.contains('show'), 49 | textInputExists: !!textInput, 50 | textInputValue: textInput?.value || 'NONE' 51 | }; 52 | }""") 53 | print(f"Picker state after click: {picker_state}") 54 | 55 | # Type search text 56 | search_input = ( 57 | page.locator("#exclude_from").locator("..").locator("input[type='text']") 58 | ) 59 | print("=== TYPING SEARCH TEXT ===") 60 | search_input.fill("Test Gateway Alpha") 61 | page.wait_for_timeout(2000) # Give extra time for search 62 | 63 | # Check search results 64 | search_results = page.evaluate("""() => { 65 | const container = document.querySelector('#exclude_from').closest('.node-picker-container'); 66 | const dropdown = container?.querySelector('.node-picker-dropdown'); 67 | const results = dropdown?.querySelector('.node-picker-results'); 68 | const items = results?.querySelectorAll('.node-picker-item'); 69 | return { 70 | dropdownVisible: dropdown?.style.display !== 'none' && dropdown?.classList.contains('show'), 71 | resultsHTML: results?.innerHTML || 'NO RESULTS', 72 | itemCount: items?.length || 0, 73 | firstItemText: items?.[0]?.textContent?.trim() || 'NO FIRST ITEM', 74 | firstItemNodeId: items?.[0]?.dataset?.nodeId || 'NO NODE ID' 75 | }; 76 | }""") 77 | print(f"Search results: {search_results}") 78 | 79 | if search_results["itemCount"] > 0: 80 | print("=== CLICKING FIRST SEARCH RESULT ===") 81 | # Try to click the first result using a more robust selector 82 | exclude_from_container = page.locator("#exclude_from").locator("..") 83 | first_item = exclude_from_container.locator(".node-picker-item").first 84 | 85 | # Wait for the item to be visible and clickable 86 | expect(first_item).to_be_visible() 87 | first_item.click() 88 | page.wait_for_timeout(1000) 89 | 90 | # Check if hidden input was set 91 | final_state = page.evaluate("""() => { 92 | const excludeFromHidden = document.querySelector('input[name="exclude_from"]'); 93 | const excludeFromVisible = document.querySelector('#exclude_from'); 94 | return { 95 | hiddenInputValue: excludeFromHidden?.value || 'NONE', 96 | visibleInputValue: excludeFromVisible?.value || 'NONE', 97 | }; 98 | }""") 99 | print(f"Final state after selection: {final_state}") 100 | 101 | if final_state["hiddenInputValue"] != "NONE": 102 | print("=== SUCCESS: Hidden input was set correctly ===") 103 | 104 | # Now try applying the filter 105 | print("=== APPLYING FILTERS ===") 106 | apply_button = page.locator("#applyFilters") 107 | apply_button.click() 108 | page.wait_for_timeout(3000) 109 | 110 | # Check URL 111 | current_url = page.url 112 | print(f"URL after applying: {current_url}") 113 | 114 | # Check if packets were filtered 115 | rows_after = page.locator("#packetsTable tbody tr") 116 | count_after = rows_after.count() 117 | print(f"Packet count after filtering: {count_after}") 118 | 119 | else: 120 | print("=== FAILURE: Hidden input was not set ===") 121 | else: 122 | print("=== FAILURE: No search results found ===") 123 | 124 | 125 | if __name__ == "__main__": 126 | import pytest 127 | 128 | pytest.main([__file__, "-v", "-s"]) 129 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["hatchling"] 3 | build-backend = "hatchling.build" 4 | 5 | [project] 6 | name = "malla" 7 | version = "0.1.0" 8 | description = "A comprehensive web UI for browsing and analyzing Meshtastic mesh network health data" 9 | readme = "README.md" 10 | license = {file = "LICENSE"} 11 | authors = [ 12 | {name = "Malla Contributors"}, 13 | ] 14 | maintainers = [ 15 | {name = "Malla Contributors"}, 16 | ] 17 | classifiers = [ 18 | "Development Status :: 4 - Beta", 19 | "Environment :: Web Environment", 20 | "Framework :: Flask", 21 | "Intended Audience :: End Users/Desktop", 22 | "Intended Audience :: System Administrators", 23 | "License :: OSI Approved :: MIT License", 24 | "Operating System :: OS Independent", 25 | "Programming Language :: Python :: 3", 26 | "Programming Language :: Python :: 3.13", 27 | "Topic :: Communications :: Ham Radio", 28 | "Topic :: Internet :: WWW/HTTP :: Dynamic Content", 29 | "Topic :: System :: Monitoring", 30 | "Topic :: System :: Networking :: Monitoring", 31 | ] 32 | requires-python = ">=3.13" 33 | dependencies = [ 34 | "flask>=3.0.0", 35 | "gunicorn>=22.0.0", 36 | "paho-mqtt>=2.1.0", 37 | "meshtastic>=2.6.0", 38 | "plotly>=5.17.0", 39 | "pyyaml>=6.0", 40 | "tabulate>=0.9.0", 41 | "tenacity>=9.1.0", 42 | "cryptography>=45.0.3", 43 | "types-protobuf>=6.30.2.20250516", 44 | "types-flask-cors>=6.0.0.20250520", 45 | "markdown>=3.6.0", 46 | "opentelemetry-api>=1.24.0", 47 | "opentelemetry-sdk>=1.24.0", 48 | "opentelemetry-exporter-otlp>=1.24.0", 49 | "opentelemetry-instrumentation-flask>=0.45b0", 50 | "opentelemetry-instrumentation-sqlite3>=0.45b0", 51 | "opentelemetry-instrumentation-logging>=0.45b0", 52 | "opentelemetry-instrumentation-requests>=0.45b0", 53 | "opentelemetry-instrumentation-system-metrics>=0.45b0", 54 | ] 55 | 56 | [project.urls] 57 | Homepage = "https://github.com/zenitraM/malla" 58 | Documentation = "https://github.com/zenitraM/malla#readme" 59 | Repository = "https://github.com/zenitraM/malla.git" 60 | Issues = "https://github.com/zenitraM/malla/issues" 61 | 62 | [project.scripts] 63 | malla-web = "malla.web_ui:main" 64 | malla-web-gunicorn = "malla.wsgi:main" 65 | malla-capture = "malla.mqtt_capture:main" 66 | 67 | [project.optional-dependencies] 68 | dev = [ 69 | "pytest>=8.3.0", 70 | "coverage>=7.6.0", 71 | "line-profiler>=4.2.0", 72 | "py-spy>=0.4.0", 73 | ] 74 | 75 | # Hatch configuration 76 | [tool.hatch.version] 77 | path = "src/malla/__init__.py" 78 | 79 | [tool.hatch.build] 80 | include = [ 81 | "src/malla/**/*.py", 82 | "src/malla/templates/**/*.html", 83 | "src/malla/static/**/*", 84 | "README.md", 85 | "LICENSE", 86 | ] 87 | 88 | [tool.hatch.build.targets.wheel] 89 | packages = ["src/malla"] 90 | 91 | [tool.hatch.build.targets.sdist] 92 | include = [ 93 | "src/", 94 | "tests/", 95 | "README.md", 96 | "LICENSE", 97 | "pyproject.toml", 98 | "malla-web", 99 | "malla-web-gunicorn", 100 | "malla-capture", 101 | ] 102 | 103 | # Testing configuration 104 | [tool.pytest.ini_options] 105 | testpaths = ["tests"] 106 | python_files = ["test_*.py", "*_test.py"] 107 | python_classes = ["Test*"] 108 | python_functions = ["test_*"] 109 | addopts = [ 110 | "-v", 111 | "--tb=short", 112 | "--strict-markers", 113 | "-n=auto", 114 | "--color=yes", 115 | "--durations=10", 116 | "--durations-min=1.0", 117 | "--show-capture=no", 118 | "--maxfail=5", 119 | ] 120 | asyncio_default_fixture_loop_scope = "function" 121 | markers = [ 122 | "slow: marks tests as slow (deselect with '-m \"not slow\"')", 123 | "integration: marks tests as integration tests", 124 | "unit: marks tests as unit tests", 125 | "api: marks tests as API tests", 126 | "e2e: marks tests as end-to-end tests", 127 | ] 128 | 129 | # Coverage configuration 130 | [tool.coverage.run] 131 | source = ["src/malla"] 132 | omit = [ 133 | "*/tests/*", 134 | "*/test_*", 135 | "*/__pycache__/*", 136 | ] 137 | 138 | [tool.coverage.report] 139 | exclude_lines = [ 140 | "pragma: no cover", 141 | "def __repr__", 142 | "if self.debug:", 143 | "if settings.DEBUG", 144 | "raise AssertionError", 145 | "raise NotImplementedError", 146 | "if 0:", 147 | "if __name__ == .__main__.:", 148 | "class .*\\bProtocol\\):", 149 | "@(abc\\.)?abstractmethod", 150 | ] 151 | 152 | 153 | 154 | [tool.ruff] 155 | target-version = "py313" 156 | line-length = 88 157 | 158 | [tool.ruff.lint] 159 | select = [ 160 | "E", # pycodestyle errors 161 | "W", # pycodestyle warnings 162 | "F", # pyflakes 163 | "I", # isort 164 | "B", # flake8-bugbear 165 | "C4", # flake8-comprehensions 166 | "UP", # pyupgrade 167 | ] 168 | ignore = [ 169 | "E501", # line too long, handled by ruff format 170 | "B008", # do not perform function calls in argument defaults 171 | "C901", # too complex 172 | ] 173 | 174 | [tool.ruff.lint.per-file-ignores] 175 | "__init__.py" = ["F401"] 176 | "tests/*" = ["B011"] 177 | 178 | [tool.ruff.format] 179 | # Enable the formatter 180 | quote-style = "double" 181 | indent-style = "space" 182 | skip-magic-trailing-comma = false 183 | line-ending = "auto" 184 | docstring-code-format = true 185 | docstring-code-line-length = "dynamic" 186 | 187 | [tool.basedpyright] 188 | pythonVersion = "3.13" 189 | typeCheckingMode = "standard" 190 | 191 | # Include and exclude patterns 192 | include = ["src", "tests"] 193 | exclude = [ 194 | "**/__pycache__", 195 | "**/.pytest_cache", 196 | "**/node_modules", 197 | "**/.venv", 198 | "**/venv", 199 | "build", 200 | "dist" 201 | ] 202 | 203 | # Ignore specific modules that have complex typing issues 204 | ignore = [ 205 | ] 206 | 207 | [tool.uv.sources] 208 | meshtastic = { git = "https://github.com/meshtastic/python" } 209 | 210 | [dependency-groups] 211 | dev = [ 212 | "basedpyright>=1.29.4", 213 | "playwright==1.54.0", 214 | "pytest-asyncio>=1.0.0", 215 | "pytest-playwright>=0.7.0", 216 | "ruff>=0.11.13", 217 | "pytest-xdist>=3.6.0", 218 | "pytest-flask>=1.3.0" 219 | ] 220 | -------------------------------------------------------------------------------- /src/malla/database/connection.py: -------------------------------------------------------------------------------- 1 | """ 2 | Database connection management for Meshtastic Mesh Health Web UI. 3 | """ 4 | 5 | import logging 6 | import os 7 | import sqlite3 8 | 9 | # Prefer configuration loader over environment variables 10 | from malla.config import get_config 11 | 12 | logger = logging.getLogger(__name__) 13 | 14 | 15 | def get_db_connection() -> sqlite3.Connection: 16 | """ 17 | Get a connection to the SQLite database with proper concurrency configuration. 18 | 19 | Returns: 20 | sqlite3.Connection: Database connection with row factory set and WAL mode enabled 21 | """ 22 | # Resolve DB path: 23 | # 1. Explicit override via `MALLA_DATABASE_FILE` env-var (handy for scripts) 24 | # 2. Value from YAML configuration 25 | # 3. Fallback to hard-coded default 26 | 27 | db_path: str = ( 28 | os.getenv("MALLA_DATABASE_FILE") 29 | or get_config().database_file 30 | or "meshtastic_history.db" 31 | ) 32 | 33 | try: 34 | conn = sqlite3.connect( 35 | db_path, timeout=30.0 36 | ) # 30 second timeout for busy database 37 | conn.row_factory = sqlite3.Row # Enable column access by name 38 | 39 | # Configure SQLite for better concurrency 40 | cursor = conn.cursor() 41 | 42 | # Enable WAL mode for better concurrent read/write performance 43 | cursor.execute("PRAGMA journal_mode=WAL") 44 | 45 | # Set synchronous to NORMAL for better performance while maintaining safety 46 | cursor.execute("PRAGMA synchronous=NORMAL") 47 | 48 | # Set busy timeout to handle concurrent access 49 | cursor.execute("PRAGMA busy_timeout=30000") # 30 seconds 50 | 51 | # Enable foreign key constraints 52 | cursor.execute("PRAGMA foreign_keys=ON") 53 | 54 | # Optimize for read performance 55 | cursor.execute("PRAGMA cache_size=10000") # 10MB cache 56 | cursor.execute("PRAGMA temp_store=MEMORY") 57 | 58 | # ------------------------------------------------------------------ 59 | # Lightweight schema migrations – run once per connection. 60 | # ------------------------------------------------------------------ 61 | try: 62 | _ensure_schema_migrations(cursor) 63 | except Exception as e: 64 | logger.warning(f"Schema migration check failed: {e}") 65 | 66 | return conn 67 | except Exception as e: 68 | logger.error(f"Failed to connect to database: {e}") 69 | raise 70 | 71 | 72 | def init_database() -> None: 73 | """ 74 | Initialize the database connection and verify it's accessible. 75 | This function is called during application startup. 76 | """ 77 | # Resolve DB path: 78 | # 1. Explicit override via `MALLA_DATABASE_FILE` env-var (handy for scripts) 79 | # 2. Value from YAML configuration 80 | # 3. Fallback to hard-coded default 81 | 82 | db_path: str = ( 83 | os.getenv("MALLA_DATABASE_FILE") 84 | or get_config().database_file 85 | or "meshtastic_history.db" 86 | ) 87 | 88 | logger.info(f"Initializing database connection to: {db_path}") 89 | 90 | try: 91 | # Test the connection 92 | conn = get_db_connection() 93 | 94 | # Test a simple query to verify the database is accessible 95 | cursor = conn.cursor() 96 | cursor.execute("SELECT COUNT(*) FROM sqlite_master WHERE type='table'") 97 | table_count = cursor.fetchone()[0] 98 | 99 | # Check and log the journal mode 100 | cursor.execute("PRAGMA journal_mode") 101 | journal_mode = cursor.fetchone()[0] 102 | 103 | conn.close() 104 | 105 | logger.info( 106 | f"Database connection successful - found {table_count} tables, journal_mode: {journal_mode}" 107 | ) 108 | 109 | except Exception as e: 110 | logger.error(f"Database initialization failed: {e}") 111 | # Don't raise the exception - let the app start anyway 112 | # The database might not exist yet or be created by another process 113 | 114 | 115 | # ---------------------------------------------------------------------- 116 | # Internal helpers 117 | # ---------------------------------------------------------------------- 118 | 119 | 120 | _SCHEMA_MIGRATIONS_DONE: set[str] = set() 121 | 122 | 123 | def _ensure_schema_migrations(cursor: sqlite3.Cursor) -> None: 124 | """Run any idempotent schema updates that the application depends on. 125 | 126 | Currently this checks that ``node_info`` has a ``primary_channel`` column 127 | (added in April 2024) so queries that reference it do not fail when the 128 | database was created with an older version of the schema. 129 | 130 | The function is **safe** to run repeatedly – it will only attempt each 131 | migration once per Python process and each individual migration is 132 | guarded with a try/except that ignores the *duplicate column* error. 133 | """ 134 | 135 | global _SCHEMA_MIGRATIONS_DONE # pylint: disable=global-statement 136 | 137 | # Quickly short-circuit if we've already handled migrations in this process 138 | if "primary_channel" in _SCHEMA_MIGRATIONS_DONE: 139 | return 140 | 141 | try: 142 | # Check whether the column already exists 143 | cursor.execute("PRAGMA table_info(node_info)") 144 | columns = [row[1] for row in cursor.fetchall()] 145 | 146 | if "primary_channel" not in columns: 147 | cursor.execute("ALTER TABLE node_info ADD COLUMN primary_channel TEXT") 148 | cursor.execute( 149 | "CREATE INDEX IF NOT EXISTS idx_node_primary_channel ON node_info(primary_channel)" 150 | ) 151 | logging.info( 152 | "Added primary_channel column to node_info table via auto-migration" 153 | ) 154 | 155 | _SCHEMA_MIGRATIONS_DONE.add("primary_channel") 156 | except sqlite3.OperationalError as exc: 157 | # Ignore errors about duplicate columns in race situations – another 158 | # process may have altered the table first. 159 | if "duplicate column name" in str(exc).lower(): 160 | _SCHEMA_MIGRATIONS_DONE.add("primary_channel") 161 | else: 162 | raise 163 | -------------------------------------------------------------------------------- /tests/integration/test_traceroute_filters.py: -------------------------------------------------------------------------------- 1 | """ 2 | Test traceroute filtering functionality to ensure all filters work correctly. 3 | """ 4 | 5 | from datetime import datetime, timedelta 6 | 7 | import pytest 8 | 9 | 10 | class TestTracerouteFilters: 11 | """Test traceroute filter functionality.""" 12 | 13 | @pytest.mark.integration 14 | def test_traceroute_data_api_node_filters(self, client): 15 | """Test that node filters work correctly in the traceroute data API.""" 16 | # Test without filters - should return whatever data exists 17 | response = client.get("/api/traceroute/data?page=1&limit=10") 18 | assert response.status_code == 200 19 | data = response.get_json() 20 | assert "data" in data 21 | assert "total_count" in data 22 | total_without_filters = data["total_count"] 23 | 24 | # Test from_node filter with a non-existent node 25 | response = client.get( 26 | "/api/traceroute/data?page=1&limit=10&from_node=999999999" 27 | ) 28 | assert response.status_code == 200 29 | data = response.get_json() 30 | assert data["total_count"] == 0 31 | assert len(data["data"]) == 0 32 | 33 | # Test to_node filter with a non-existent node 34 | response = client.get("/api/traceroute/data?page=1&limit=10&to_node=999999999") 35 | assert response.status_code == 200 36 | data = response.get_json() 37 | assert data["total_count"] == 0 38 | assert len(data["data"]) == 0 39 | 40 | # Test route_node filter with a non-existent node 41 | response = client.get( 42 | "/api/traceroute/data?page=1&limit=10&route_node=999999999" 43 | ) 44 | assert response.status_code == 200 45 | data = response.get_json() 46 | assert data["total_count"] == 0 47 | assert len(data["data"]) == 0 48 | 49 | # Test that invalid node IDs are handled gracefully 50 | response = client.get("/api/traceroute/data?page=1&limit=10&from_node=invalid") 51 | assert response.status_code == 200 52 | data = response.get_json() 53 | # Should return all data since invalid filter is ignored 54 | assert data["total_count"] == total_without_filters 55 | 56 | @pytest.mark.integration 57 | def test_traceroute_data_api_time_filters(self, client): 58 | """Test that time filters work correctly in the traceroute data API.""" 59 | # Test with future time range - should return no results 60 | future_start = datetime.now() + timedelta(days=1) 61 | future_end = datetime.now() + timedelta(days=2) 62 | 63 | response = client.get( 64 | f"/api/traceroute/data?page=1&limit=10&start_time={future_start.isoformat()}&end_time={future_end.isoformat()}" 65 | ) 66 | assert response.status_code == 200 67 | data = response.get_json() 68 | assert data["total_count"] == 0 69 | assert len(data["data"]) == 0 70 | 71 | # Test with past time range 72 | past_start = datetime.now() - timedelta(days=30) 73 | past_end = datetime.now() - timedelta(days=29) 74 | 75 | response = client.get( 76 | f"/api/traceroute/data?page=1&limit=10&start_time={past_start.isoformat()}&end_time={past_end.isoformat()}" 77 | ) 78 | assert response.status_code == 200 79 | data = response.get_json() 80 | # Should return valid response (may or may not have data depending on test database) 81 | assert "data" in data 82 | assert "total_count" in data 83 | 84 | @pytest.mark.integration 85 | def test_traceroute_data_api_gateway_filter(self, client): 86 | """Test that gateway filter works correctly in the traceroute data API.""" 87 | # Test with non-existent gateway 88 | response = client.get( 89 | "/api/traceroute/data?page=1&limit=10&gateway_id=!nonexist" 90 | ) 91 | assert response.status_code == 200 92 | data = response.get_json() 93 | assert data["total_count"] == 0 94 | assert len(data["data"]) == 0 95 | 96 | @pytest.mark.integration 97 | def test_traceroute_data_api_return_path_filter(self, client): 98 | """Test that return_path_only filter works correctly.""" 99 | # Test return_path_only filter 100 | response = client.get( 101 | "/api/traceroute/data?page=1&limit=10&return_path_only=true" 102 | ) 103 | assert response.status_code == 200 104 | data = response.get_json() 105 | # Should return valid response structure 106 | assert "data" in data 107 | assert "total_count" in data 108 | 109 | @pytest.mark.integration 110 | def test_traceroute_data_api_combined_filters(self, client): 111 | """Test that multiple filters work together.""" 112 | # Test multiple filters combined 113 | response = client.get( 114 | "/api/traceroute/data?page=1&limit=10&from_node=999999999&gateway_id=!nonexist&return_path_only=true" 115 | ) 116 | assert response.status_code == 200 117 | data = response.get_json() 118 | assert data["total_count"] == 0 119 | assert len(data["data"]) == 0 120 | 121 | @pytest.mark.integration 122 | def test_traceroute_data_api_response_structure(self, client): 123 | """Test that the API response has the correct structure.""" 124 | response = client.get("/api/traceroute/data?page=1&limit=5") 125 | assert response.status_code == 200 126 | data = response.get_json() 127 | 128 | # Check response structure 129 | assert "data" in data 130 | assert "total_count" in data 131 | assert "page" in data 132 | assert "limit" in data 133 | assert "total_pages" in data 134 | 135 | # Check that data is a list 136 | assert isinstance(data["data"], list) 137 | 138 | # If there's data, check the structure of individual items 139 | if data["data"]: 140 | item = data["data"][0] 141 | expected_fields = [ 142 | "id", 143 | "timestamp", 144 | "from_node", 145 | "from_node_id", 146 | "to_node", 147 | "to_node_id", 148 | "route_nodes", 149 | "route_names", 150 | "gateway", 151 | "rssi", 152 | "snr", 153 | "hops", 154 | "is_grouped", 155 | ] 156 | for field in expected_fields: 157 | assert field in item, f"Missing field: {field}" 158 | -------------------------------------------------------------------------------- /tests/integration/test_api_text_decoding.py: -------------------------------------------------------------------------------- 1 | """Test text message decoding functionality in API endpoints.""" 2 | 3 | 4 | class TestAPITextDecoding: 5 | """Test text message decoding in API responses.""" 6 | 7 | def test_text_message_decoding_with_real_data(self, client): 8 | """Test that text messages are properly decoded from raw_payload.""" 9 | # This test uses the test database with fixture data 10 | response = client.get("/api/packets/data?portnum=TEXT_MESSAGE_APP&limit=5") 11 | assert response.status_code == 200 12 | 13 | data = response.get_json() 14 | assert "data" in data 15 | 16 | # If we have text messages, verify they have proper structure 17 | if data["data"]: # Only test if we have data 18 | for packet in data["data"]: 19 | assert packet.get("portnum_name") == "TEXT_MESSAGE_APP" 20 | assert "text_content" in packet 21 | assert "channel" in packet 22 | 23 | # text_content should be either a string or None 24 | if packet["text_content"] is not None: 25 | assert isinstance(packet["text_content"], str) 26 | # If truncated, should end with "..." 27 | if len(packet["text_content"]) == 100: 28 | assert packet["text_content"].endswith("...") 29 | else: 30 | # If no text messages in database, that's still a valid test result 31 | # The important thing is that the API responds correctly 32 | pass 33 | 34 | def test_non_text_messages_have_null_text_content(self, client): 35 | """Test that non-text messages have null text_content.""" 36 | # Get non-text messages 37 | response = client.get("/api/packets/data?portnum=POSITION_APP&limit=5") 38 | assert response.status_code == 200 39 | 40 | data = response.get_json() 41 | assert "data" in data 42 | 43 | # Position packets should not have text content 44 | if data["data"]: # Only test if we have data 45 | for packet in data["data"]: 46 | if packet.get("portnum_name") == "POSITION_APP": 47 | assert "text_content" in packet 48 | assert packet["text_content"] is None 49 | 50 | def test_channel_information_always_present(self, client): 51 | """Test that channel information is always present in API response.""" 52 | response = client.get("/api/packets/data?limit=10") 53 | assert response.status_code == 200 54 | 55 | data = response.get_json() 56 | assert "data" in data 57 | 58 | # All packets should have channel information 59 | if data["data"]: # Only test if we have data 60 | for packet in data["data"]: 61 | assert "channel" in packet 62 | assert isinstance(packet["channel"], str) 63 | assert len(packet["channel"]) > 0 64 | 65 | def test_sqlite_row_to_dict_conversion(self, client): 66 | """Test that sqlite3.Row objects are properly converted to dicts.""" 67 | # This tests the specific bug that was causing the 500 error 68 | # The bug was calling packet.get() on a sqlite3.Row object 69 | 70 | # Make a request that would trigger the grouped packets code path 71 | response = client.get("/api/packets/data?group_packets=true&limit=5") 72 | assert response.status_code == 200 73 | 74 | data = response.get_json() 75 | assert "data" in data 76 | 77 | # If we have grouped packets, they should have proper structure 78 | for packet in data["data"]: 79 | if packet.get("is_grouped"): 80 | assert "text_content" in packet 81 | assert "channel" in packet 82 | 83 | def test_api_backwards_compatibility(self, client): 84 | """Test that the old /api/packets endpoint still works.""" 85 | response = client.get("/api/packets?limit=5") 86 | assert response.status_code == 200 87 | 88 | data = response.get_json() 89 | assert "packets" in data 90 | 91 | # Old endpoint should not have raw_payload in response (to avoid JSON serialization issues) 92 | for packet in data["packets"]: 93 | assert "raw_payload" not in packet 94 | 95 | def test_text_message_content_truncation(self): 96 | """Test that long text messages are properly truncated.""" 97 | # Create a test scenario with a long message 98 | long_text = "A" * 150 # 150 characters 99 | 100 | # Test the truncation logic directly 101 | from malla.database.repositories import PacketRepository 102 | 103 | test_packet = { 104 | "portnum_name": "TEXT_MESSAGE_APP", 105 | "raw_payload": long_text.encode("utf-8"), 106 | } 107 | 108 | result = PacketRepository._decode_text_content(test_packet) 109 | assert result is not None 110 | assert len(result) == 100 # Should be truncated to 100 chars 111 | assert result.endswith("...") # Should end with ellipsis 112 | assert result.startswith("AAA") # Should start with original content 113 | 114 | def test_text_message_encoding_edge_cases(self): 115 | """Test text message decoding with various encoding scenarios.""" 116 | from malla.database.repositories import PacketRepository 117 | 118 | # Test with bytes 119 | test_packet_bytes = { 120 | "portnum_name": "TEXT_MESSAGE_APP", 121 | "raw_payload": "Hello, world! 🌍".encode(), 122 | } 123 | result = PacketRepository._decode_text_content(test_packet_bytes) 124 | assert result == "Hello, world! 🌍" 125 | 126 | # Test with string (should pass through) 127 | test_packet_string = { 128 | "portnum_name": "TEXT_MESSAGE_APP", 129 | "raw_payload": "Hello, string!", 130 | } 131 | result = PacketRepository._decode_text_content(test_packet_string) 132 | assert result == "Hello, string!" 133 | 134 | # Test with non-text packet type 135 | test_packet_position = { 136 | "portnum_name": "POSITION_APP", 137 | "raw_payload": b"some binary data", 138 | } 139 | result = PacketRepository._decode_text_content(test_packet_position) 140 | assert result is None 141 | 142 | # Test with missing raw_payload 143 | test_packet_no_payload = { 144 | "portnum_name": "TEXT_MESSAGE_APP", 145 | "raw_payload": None, 146 | } 147 | result = PacketRepository._decode_text_content(test_packet_no_payload) 148 | assert result is None 149 | -------------------------------------------------------------------------------- /src/malla/static/js/relay_node_analysis.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Relay Node Analysis Component 3 | * Displays relay_node statistics for gateway nodes with candidate source nodes 4 | */ 5 | 6 | class RelayNodeAnalysis { 7 | constructor(nodeId) { 8 | this.nodeId = nodeId; 9 | this.data = null; 10 | } 11 | 12 | /** 13 | * Initialize and load the relay node analysis 14 | */ 15 | async initialize() { 16 | const cardContainer = document.getElementById('relay-node-analysis-card'); 17 | 18 | if (!cardContainer) { 19 | return; 20 | } 21 | 22 | try { 23 | await this.loadData(); 24 | } catch (error) { 25 | console.error('Error initializing relay node analysis:', error); 26 | } 27 | } 28 | 29 | /** 30 | * Load relay node analysis data from API 31 | */ 32 | async loadData() { 33 | const cardContainer = document.getElementById('relay-node-analysis-card'); 34 | const tableContainer = document.getElementById('relay-node-analysis-table'); 35 | const loadingIndicator = document.getElementById('relay-node-analysis-loading'); 36 | const contentDiv = document.getElementById('relay-node-analysis-content'); 37 | 38 | if (!cardContainer || !tableContainer) { 39 | return; 40 | } 41 | 42 | try { 43 | // Show card and loading indicator 44 | cardContainer.style.display = 'block'; 45 | loadingIndicator.style.display = 'block'; 46 | contentDiv.style.display = 'none'; 47 | 48 | const response = await fetch(`/api/node/${this.nodeId}/relay-node-analysis?limit=50`); 49 | if (!response.ok) { 50 | throw new Error(`HTTP error! status: ${response.status}`); 51 | } 52 | const data = await response.json(); 53 | 54 | if (data.error) { 55 | throw new Error(data.error); 56 | } 57 | 58 | this.data = data; 59 | 60 | // Hide loading, show content 61 | loadingIndicator.style.display = 'none'; 62 | contentDiv.style.display = 'block'; 63 | 64 | if (!data.relay_node_stats || data.relay_node_stats.length === 0) { 65 | this.showNoDataMessage(tableContainer); 66 | return; 67 | } 68 | 69 | // Render the table 70 | this.renderTable(data.relay_node_stats, tableContainer); 71 | 72 | } catch (error) { 73 | console.error('Error loading relay node analysis:', error); 74 | this.showErrorMessage(error, cardContainer, loadingIndicator, contentDiv); 75 | } 76 | } 77 | 78 | /** 79 | * Render the relay node analysis table 80 | */ 81 | renderTable(stats, container) { 82 | const tbody = container.querySelector('tbody'); 83 | if (!tbody) return; 84 | 85 | tbody.innerHTML = ''; 86 | 87 | stats.forEach(stat => { 88 | const row = document.createElement('tr'); 89 | 90 | // Relay node column 91 | const relayCell = document.createElement('td'); 92 | relayCell.innerHTML = `${stat.relay_hex}`; 93 | row.appendChild(relayCell); 94 | 95 | // Count column 96 | const countCell = document.createElement('td'); 97 | countCell.innerHTML = `${stat.count}`; 98 | row.appendChild(countCell); 99 | 100 | // Avg RSSI column 101 | const rssiCell = document.createElement('td'); 102 | if (stat.avg_rssi !== null && stat.avg_rssi !== undefined) { 103 | const rssiValue = stat.avg_rssi.toFixed(1); 104 | rssiCell.innerHTML = `${rssiValue} dBm`; 105 | } else { 106 | rssiCell.innerHTML = '-'; 107 | } 108 | row.appendChild(rssiCell); 109 | 110 | // Avg SNR column 111 | const snrCell = document.createElement('td'); 112 | if (stat.avg_snr !== null && stat.avg_snr !== undefined) { 113 | const snrValue = stat.avg_snr.toFixed(1); 114 | snrCell.innerHTML = `${snrValue} dB`; 115 | } else { 116 | snrCell.innerHTML = '-'; 117 | } 118 | row.appendChild(snrCell); 119 | 120 | // Candidates column 121 | const candidatesCell = document.createElement('td'); 122 | if (stat.candidates && stat.candidates.length > 0) { 123 | const candidateLinks = stat.candidates.map(candidate => { 124 | return `${candidate.node_name} (${candidate.last_byte})`; 125 | }).join(', '); 126 | candidatesCell.innerHTML = candidateLinks; 127 | } else { 128 | candidatesCell.innerHTML = 'No matching 0-hop nodes'; 129 | } 130 | row.appendChild(candidatesCell); 131 | 132 | tbody.appendChild(row); 133 | }); 134 | } 135 | 136 | /** 137 | * Show no data message 138 | */ 139 | showNoDataMessage(container) { 140 | const tbody = container.querySelector('tbody'); 141 | if (!tbody) return; 142 | 143 | tbody.innerHTML = ` 144 | 145 | 146 | No relay node data available for this gateway. 147 |
This node may not have reported any packets with relay_node information. 148 | 149 | 150 | `; 151 | } 152 | 153 | /** 154 | * Show error message 155 | */ 156 | showErrorMessage(error, cardContainer, loadingIndicator, contentDiv) { 157 | loadingIndicator.style.display = 'none'; 158 | contentDiv.style.display = 'block'; 159 | 160 | const tableContainer = document.getElementById('relay-node-analysis-table'); 161 | const tbody = tableContainer.querySelector('tbody'); 162 | if (!tbody) return; 163 | 164 | tbody.innerHTML = ` 165 | 166 | 167 | Error loading relay node analysis: ${error.message} 168 | 169 | 170 | `; 171 | } 172 | } 173 | 174 | // Initialize when DOM is loaded 175 | document.addEventListener('DOMContentLoaded', function() { 176 | const scriptTag = document.querySelector('script[data-node-id]'); 177 | if (scriptTag) { 178 | const nodeId = scriptTag.getAttribute('data-node-id'); 179 | const relayNodeAnalysis = new RelayNodeAnalysis(nodeId); 180 | relayNodeAnalysis.initialize(); 181 | } 182 | }); 183 | -------------------------------------------------------------------------------- /tests/integration/test_node_routes.py: -------------------------------------------------------------------------------- 1 | """ 2 | Integration tests for node routes. 3 | 4 | Tests the node detail endpoint with fixture data to ensure it renders properly. 5 | """ 6 | 7 | import pytest 8 | 9 | 10 | class TestNodeRoutes: 11 | """Test node-related routes.""" 12 | 13 | @pytest.mark.integration 14 | def test_nodes_page_loads(self, client): 15 | """Test that the nodes page loads successfully.""" 16 | response = client.get("/nodes") 17 | assert response.status_code == 200 18 | assert b"Nodes" in response.data 19 | 20 | @pytest.mark.integration 21 | def test_node_detail_with_valid_node_id(self, client): 22 | """Test node detail page with a valid node ID from fixture data.""" 23 | # Use Test Gateway Alpha from fixture data 24 | node_id = 1128074276 # 0x433d0c24 25 | 26 | response = client.get(f"/node/{node_id}") 27 | assert response.status_code == 200 28 | 29 | # Check that the page contains expected content from fixture data 30 | assert b"Test Gateway Alpha" in response.data 31 | assert b"Node Details" in response.data 32 | assert b"!433d0c24" in response.data # Hex ID 33 | assert b"TBEAM" in response.data # Hardware model 34 | assert b"ROUTER" in response.data # Role 35 | 36 | @pytest.mark.integration 37 | def test_node_detail_with_hex_node_id(self, client): 38 | """Test node detail page with a hex node ID.""" 39 | # Use hex format of Test Gateway Alpha 40 | hex_node_id = "!433d0c24" 41 | 42 | response = client.get(f"/node/{hex_node_id}") 43 | assert response.status_code == 200 44 | 45 | # Check that the page contains expected content 46 | assert b"Test Gateway Alpha" in response.data 47 | assert b"Node Details" in response.data 48 | 49 | @pytest.mark.integration 50 | def test_node_detail_with_decimal_hex_node_id(self, client): 51 | """Test node detail page with a decimal hex node ID.""" 52 | # Use decimal hex format 53 | hex_node_id = "433d0c24" 54 | 55 | response = client.get(f"/node/{hex_node_id}") 56 | assert response.status_code == 200 57 | 58 | # Check that the page contains expected content 59 | assert b"Test Gateway Alpha" in response.data 60 | assert b"Node Details" in response.data 61 | 62 | @pytest.mark.integration 63 | def test_node_detail_with_mobile_node(self, client): 64 | """Test node detail page with Test Mobile Beta from fixture data.""" 65 | # Use Test Mobile Beta node 66 | node_id = 1128074277 # 0x433d0c25 67 | 68 | response = client.get(f"/node/{node_id}") 69 | assert response.status_code == 200 70 | 71 | # Check that the page contains expected content 72 | assert b"Test Mobile Beta" in response.data 73 | assert b"!433d0c25" in response.data 74 | assert b"HELTEC_V3" in response.data 75 | assert b"CLIENT" in response.data 76 | 77 | @pytest.mark.integration 78 | def test_node_detail_with_repeater_node(self, client): 79 | """Test node detail page with Test Repeater Gamma from fixture data.""" 80 | # Use Test Repeater Gamma node 81 | node_id = 1128074278 # 0x433d0c26 82 | 83 | response = client.get(f"/node/{node_id}") 84 | assert response.status_code == 200 85 | 86 | # Check that the page contains expected content 87 | assert b"Test Repeater Gamma" in response.data 88 | assert b"!433d0c26" in response.data 89 | assert b"TBEAM" in response.data 90 | assert b"REPEATER" in response.data 91 | 92 | @pytest.mark.integration 93 | def test_node_detail_with_nonexistent_node(self, client): 94 | """Test node detail page with a non-existent node ID.""" 95 | # Use a node ID that doesn't exist in the fixture data 96 | node_id = 999999999 97 | 98 | response = client.get(f"/node/{node_id}") 99 | assert response.status_code == 404 100 | assert b"Node not found" in response.data 101 | 102 | @pytest.mark.integration 103 | def test_node_detail_with_invalid_node_id(self, client): 104 | """Test node detail page with an invalid node ID format.""" 105 | # Use an invalid node ID format 106 | invalid_node_id = "invalid_id" 107 | 108 | response = client.get(f"/node/{invalid_node_id}") 109 | assert response.status_code == 400 110 | assert b"Invalid node ID format" in response.data 111 | 112 | @pytest.mark.integration 113 | def test_node_detail_contains_required_sections(self, client): 114 | """Test that node detail page contains all required sections.""" 115 | # Use Test Gateway Alpha which should have packet data in fixtures 116 | node_id = 1128074276 # Test Gateway Alpha 117 | 118 | response = client.get(f"/node/{node_id}") 119 | assert response.status_code == 200 120 | 121 | # Check for required sections in the HTML 122 | response_text = response.data.decode("utf-8") 123 | 124 | # Basic information section 125 | assert "Node Information" in response_text 126 | assert "Total Packets" in response_text 127 | assert "Destinations" in response_text 128 | assert "Avg RSSI" in response_text 129 | 130 | # Check for node metrics 131 | assert "metric-value" in response_text 132 | 133 | # Check for breadcrumb navigation 134 | assert "breadcrumb" in response_text 135 | assert "Home" in response_text 136 | assert "Nodes" in response_text 137 | 138 | @pytest.mark.integration 139 | def test_node_detail_packet_data_display(self, client): 140 | """Test that node detail page displays packet data correctly.""" 141 | # Use Test Gateway Alpha which should have packet data in fixtures 142 | node_id = 1128074276 # Test Gateway Alpha 143 | 144 | response = client.get(f"/node/{node_id}") 145 | assert response.status_code == 200 146 | 147 | response_text = response.data.decode("utf-8") 148 | 149 | # Should show packet count greater than 0 150 | assert "Total Packets" in response_text 151 | 152 | # Check for protocol breakdown section if there are packets 153 | if "Protocol Usage" in response_text: 154 | assert "Protocol" in response_text 155 | assert "Count" in response_text 156 | 157 | @pytest.mark.integration 158 | def test_node_detail_navigation_links(self, client): 159 | """Test that node detail page contains proper navigation links.""" 160 | node_id = 1128074276 # Test Gateway Alpha 161 | 162 | response = client.get(f"/node/{node_id}") 163 | assert response.status_code == 200 164 | 165 | response_text = response.data.decode("utf-8") 166 | 167 | # Check for quick action links 168 | assert "View All Packets" in response_text 169 | assert f"/packets?from_node={node_id}" in response_text 170 | 171 | # Check for traceroute link 172 | assert "View Traceroutes" in response_text 173 | assert f"/traceroute?from_node={node_id}" in response_text 174 | -------------------------------------------------------------------------------- /tests/conftest/test_server.py: -------------------------------------------------------------------------------- 1 | """ 2 | Generic test server fixture for E2E tests. 3 | """ 4 | 5 | import os 6 | import threading 7 | import time 8 | 9 | import pytest 10 | from flask import Flask, jsonify, render_template_string 11 | 12 | from tests.fixtures.traceroute_graph_data import get_sample_graph_data 13 | 14 | 15 | class GenericTestServer: 16 | """Generic test server for E2E tests.""" 17 | 18 | def __init__(self, port=5009): 19 | self.port = port 20 | self.app = Flask(__name__) 21 | self.server_thread = None 22 | self.custom_routes = {} 23 | self.custom_api_routes = {} 24 | self.setup_default_routes() 25 | 26 | def add_route(self, path, handler, methods=None): 27 | """Add a custom route to the server.""" 28 | if methods is None: 29 | methods = ["GET"] 30 | self.custom_routes[path] = (handler, methods) 31 | 32 | def add_api_route(self, path, data_provider, methods=None): 33 | """Add a custom API route that returns JSON data.""" 34 | if methods is None: 35 | methods = ["GET"] 36 | self.custom_api_routes[path] = (data_provider, methods) 37 | 38 | def setup_default_routes(self): 39 | """Set up default test routes.""" 40 | 41 | @self.app.route("/traceroute-graph") 42 | def traceroute_graph(): 43 | """Serve the traceroute graph page.""" 44 | return self._serve_template( 45 | "traceroute_graph.html", 46 | {"hours": 24, "min_snr": -200, "include_indirect": True}, 47 | ) 48 | 49 | @self.app.route("/api/traceroute/graph") 50 | def api_traceroute_graph(): 51 | """Serve sample graph data.""" 52 | return jsonify(get_sample_graph_data()) 53 | 54 | def _serve_template(self, template_name, context=None): 55 | """Serve a template with minimal base HTML.""" 56 | if context is None: 57 | context = {} 58 | 59 | # Read the actual template file 60 | template_path = os.path.join( 61 | os.path.dirname(__file__), f"../../src/malla/templates/{template_name}" 62 | ) 63 | 64 | if not os.path.exists(template_path): 65 | return f"Template {template_name} not found", 404 66 | 67 | with open(template_path) as f: 68 | template_content = f.read() 69 | 70 | # Replace the extends directive with a minimal base 71 | base_html = """ 72 | 73 | 74 | 75 | 76 | Test Application 77 | 78 | 79 | 80 | 81 | 82 | 87 |
""" 88 | 89 | template_content = template_content.replace( 90 | '{% extends "base.html" %}', base_html 91 | ) 92 | template_content = template_content.replace("{% block content %}", "") 93 | template_content = template_content.replace( 94 | "{% endblock %}", "
" 95 | ) 96 | 97 | return render_template_string(template_content, **context) 98 | 99 | def _register_custom_routes(self): 100 | """Register custom routes added via add_route.""" 101 | for path, (handler, methods) in self.custom_routes.items(): 102 | self.app.add_url_rule( 103 | path, 104 | endpoint=f"custom_{path.replace('/', '_')}", 105 | view_func=handler, 106 | methods=methods, 107 | ) 108 | 109 | for path, (data_provider, methods) in self.custom_api_routes.items(): 110 | 111 | def make_api_handler(provider): 112 | def api_handler(): 113 | if callable(provider): 114 | data = provider() 115 | else: 116 | data = provider 117 | return jsonify(data) 118 | 119 | return api_handler 120 | 121 | self.app.add_url_rule( 122 | path, 123 | endpoint=f"api_{path.replace('/', '_')}", 124 | view_func=make_api_handler(data_provider), 125 | methods=methods, 126 | ) 127 | 128 | def start(self): 129 | """Start the test server.""" 130 | # Register custom routes before starting 131 | self._register_custom_routes() 132 | 133 | def run_server(): 134 | self.app.run( 135 | host="127.0.0.1", port=self.port, debug=False, use_reloader=False 136 | ) 137 | 138 | self.server_thread = threading.Thread(target=run_server, daemon=True) 139 | self.server_thread.start() 140 | 141 | # Wait for server to start 142 | time.sleep(2) 143 | 144 | # Verify server is running 145 | import requests 146 | 147 | try: 148 | response = requests.get( 149 | f"http://127.0.0.1:{self.port}/api/traceroute/graph", timeout=5 150 | ) 151 | if response.status_code != 200: 152 | raise Exception( 153 | f"Test server not responding correctly: {response.status_code}" 154 | ) 155 | except Exception as e: 156 | raise Exception(f"Failed to start test server: {e}") from e 157 | 158 | def stop(self): 159 | """Stop the test server.""" 160 | # Flask development server doesn't have a clean shutdown method 161 | # The daemon thread will be cleaned up when the main process exits 162 | pass 163 | 164 | 165 | @pytest.fixture(scope="session") 166 | def test_server(): 167 | """Provide a generic test server for E2E tests.""" 168 | server = GenericTestServer() 169 | server.start() 170 | yield server 171 | server.stop() 172 | 173 | 174 | @pytest.fixture(scope="session") 175 | def test_server_url(test_server): 176 | """Provide the test server URL.""" 177 | return f"http://127.0.0.1:{test_server.port}" 178 | 179 | 180 | # Convenience fixtures for specific use cases 181 | @pytest.fixture(scope="session") 182 | def traceroute_graph_server(): 183 | """Provide a test server specifically configured for traceroute graph testing.""" 184 | server = GenericTestServer() 185 | 186 | # Add any additional routes specific to traceroute graph testing 187 | # server.add_api_route('/api/custom/endpoint', lambda: {'custom': 'data'}) 188 | 189 | server.start() 190 | yield server 191 | server.stop() 192 | 193 | 194 | @pytest.fixture(scope="session") 195 | def traceroute_graph_url(traceroute_graph_server): 196 | """Provide the traceroute graph URL from the test server.""" 197 | return f"http://127.0.0.1:{traceroute_graph_server.port}/traceroute-graph" 198 | -------------------------------------------------------------------------------- /tests/fixtures/traceroute_graph_data.py: -------------------------------------------------------------------------------- 1 | """ 2 | Fixture data for traceroute graph testing. 3 | """ 4 | 5 | import time 6 | 7 | # Sample traceroute graph data for testing 8 | SAMPLE_GRAPH_DATA = { 9 | "nodes": [ 10 | { 11 | "id": 1819569748, 12 | "name": "Tomate Base 🍅🌞📡 (🍅)", 13 | "avg_snr": -58.9, 14 | "connections": 6, 15 | "packet_count": 1250, 16 | "size": 15, 17 | "last_seen": int(time.time()) - 3600, 18 | "location": {"latitude": 40.7128, "longitude": -74.0060, "altitude": 10}, 19 | }, 20 | { 21 | "id": 2147483647, 22 | "name": "Central Hub Node", 23 | "avg_snr": -12.5, 24 | "connections": 12, 25 | "packet_count": 2500, 26 | "size": 20, 27 | "last_seen": int(time.time()) - 1800, 28 | "location": {"latitude": 40.7589, "longitude": -73.9851, "altitude": 25}, 29 | }, 30 | { 31 | "id": 3735928559, 32 | "name": "Edge Node Alpha", 33 | "avg_snr": -25.3, 34 | "connections": 4, 35 | "packet_count": 800, 36 | "size": 12, 37 | "last_seen": int(time.time()) - 900, 38 | "location": {"latitude": 40.6782, "longitude": -73.9442, "altitude": 5}, 39 | }, 40 | { 41 | "id": 0xDDDDDDDD, 42 | "name": "Edge Node Beta (TEND)", 43 | "avg_snr": -18.7, 44 | "connections": 3, 45 | "packet_count": 650, 46 | "size": 10, 47 | "last_seen": int(time.time()) - 600, 48 | "location": {"latitude": 40.7505, "longitude": -73.9934, "altitude": 15}, 49 | }, 50 | { 51 | "id": 1234567890, 52 | "name": "Relay Station Gamma", 53 | "avg_snr": -35.2, 54 | "connections": 8, 55 | "packet_count": 1800, 56 | "size": 16, 57 | "last_seen": int(time.time()) - 300, 58 | "location": {"latitude": 40.7282, "longitude": -74.0776, "altitude": 30}, 59 | }, 60 | { 61 | "id": 987654321, 62 | "name": "Remote Node Delta", 63 | "avg_snr": -42.1, 64 | "connections": 2, 65 | "packet_count": 450, 66 | "size": 8, 67 | "last_seen": int(time.time()) - 7200, 68 | "location": {"latitude": 40.6892, "longitude": -74.0445, "altitude": 8}, 69 | }, 70 | { 71 | "id": 555666777, 72 | "name": "Mesh Node Epsilon", 73 | "avg_snr": -28.9, 74 | "connections": 7, 75 | "packet_count": 1100, 76 | "size": 14, 77 | "last_seen": int(time.time()) - 1200, 78 | "location": {"latitude": 40.7831, "longitude": -73.9712, "altitude": 20}, 79 | }, 80 | { 81 | "id": 111222333, 82 | "name": "Gateway Node Zeta", 83 | "avg_snr": -15.4, 84 | "connections": 9, 85 | "packet_count": 2200, 86 | "size": 18, 87 | "last_seen": int(time.time()) - 450, 88 | # This node intentionally has no location to test mixed scenarios 89 | }, 90 | ], 91 | "links": [ 92 | { 93 | "source": 1819569748, 94 | "target": 2147483647, 95 | "avg_snr": -15.2, 96 | "strength": 8, 97 | "packet_count": 450, 98 | "type": "direct", 99 | "last_seen": int(time.time()) - 300, 100 | }, 101 | { 102 | "source": 2147483647, 103 | "target": 3735928559, 104 | "avg_snr": -22.1, 105 | "strength": 6, 106 | "packet_count": 320, 107 | "type": "direct", 108 | "last_seen": int(time.time()) - 600, 109 | }, 110 | { 111 | "source": 2147483647, 112 | "target": 0xDDDDDDDD, 113 | "avg_snr": -18.5, 114 | "strength": 7, 115 | "packet_count": 380, 116 | "type": "direct", 117 | "last_seen": int(time.time()) - 400, 118 | }, 119 | { 120 | "source": 1234567890, 121 | "target": 2147483647, 122 | "avg_snr": -25.8, 123 | "strength": 5, 124 | "packet_count": 280, 125 | "type": "direct", 126 | "last_seen": int(time.time()) - 800, 127 | }, 128 | { 129 | "source": 987654321, 130 | "target": 1234567890, 131 | "avg_snr": -38.2, 132 | "strength": 3, 133 | "packet_count": 150, 134 | "type": "direct", 135 | "last_seen": int(time.time()) - 1200, 136 | }, 137 | { 138 | "source": 555666777, 139 | "target": 2147483647, 140 | "avg_snr": -20.3, 141 | "strength": 6, 142 | "packet_count": 340, 143 | "type": "direct", 144 | "last_seen": int(time.time()) - 500, 145 | }, 146 | { 147 | "source": 111222333, 148 | "target": 2147483647, 149 | "avg_snr": -12.7, 150 | "strength": 9, 151 | "packet_count": 520, 152 | "type": "direct", 153 | "last_seen": int(time.time()) - 200, 154 | }, 155 | { 156 | "source": 3735928559, 157 | "target": 555666777, 158 | "avg_snr": -30.1, 159 | "strength": 4, 160 | "packet_count": 180, 161 | "type": "direct", 162 | "last_seen": int(time.time()) - 900, 163 | }, 164 | { 165 | "source": 0xDDDDDDDD, 166 | "target": 111222333, 167 | "avg_snr": -16.9, 168 | "strength": 7, 169 | "packet_count": 410, 170 | "type": "direct", 171 | "last_seen": int(time.time()) - 350, 172 | }, 173 | { 174 | "source": 1819569748, 175 | "target": 1234567890, 176 | "avg_snr": -32.4, 177 | "strength": 4, 178 | "packet_count": 220, 179 | "type": "direct", 180 | "last_seen": int(time.time()) - 700, 181 | }, 182 | ], 183 | "indirect_connections": [ 184 | { 185 | "source": 987654321, 186 | "target": 3735928559, 187 | "avg_snr": -45.2, 188 | "strength": 2, 189 | "path_count": 85, 190 | "hop_count": 3, 191 | "type": "indirect", 192 | "last_seen": int(time.time()) - 1500, 193 | }, 194 | { 195 | "source": 987654321, 196 | "target": 111222333, 197 | "avg_snr": -42.8, 198 | "strength": 2, 199 | "path_count": 95, 200 | "hop_count": 3, 201 | "type": "indirect", 202 | "last_seen": int(time.time()) - 1800, 203 | }, 204 | ], 205 | "stats": {"links_found": 10, "packets_with_rf_hops": 3250, "total_rf_hops": 28}, 206 | "filters": {"hours": 24, "min_snr": -200, "include_indirect": True}, 207 | } 208 | 209 | 210 | def get_sample_graph_data(): 211 | """Return a copy of the sample graph data.""" 212 | import copy 213 | 214 | return copy.deepcopy(SAMPLE_GRAPH_DATA) 215 | -------------------------------------------------------------------------------- /src/malla/config.py: -------------------------------------------------------------------------------- 1 | # This module implements application-wide configuration handling. 2 | from __future__ import annotations 3 | 4 | import logging 5 | import os 6 | from dataclasses import dataclass, field 7 | from pathlib import Path 8 | from typing import Any 9 | 10 | import yaml 11 | 12 | logger = logging.getLogger(__name__) 13 | 14 | # --------------------------------------------------------------------------- 15 | # Configuration data model 16 | # --------------------------------------------------------------------------- 17 | 18 | 19 | @dataclass(slots=True) 20 | class AppConfig: 21 | """Application configuration loaded from YAML and environment variables.""" 22 | 23 | # Core UI settings 24 | name: str = "Malla" 25 | home_markdown: str = "" 26 | 27 | # Flask/server settings 28 | secret_key: str = "dev-secret-key-change-in-production" 29 | database_file: str = "meshtastic_history.db" 30 | host: str = "0.0.0.0" 31 | port: int = 5008 32 | debug: bool = False 33 | 34 | # MQTT capture settings 35 | mqtt_broker_address: str = "127.0.0.1" 36 | mqtt_port: int = 1883 37 | mqtt_username: str | None = None 38 | mqtt_password: str | None = None 39 | mqtt_topic_prefix: str = "msh" 40 | mqtt_topic_suffix: str = "/+/+/+/#" 41 | 42 | # Meshtastic channel default key (for optional packet decryption) 43 | # Supports comma-separated list of base64-encoded keys 44 | default_channel_key: str = "1PG7OiApB1nwvP+rz05pAQ==" 45 | 46 | # Logging 47 | log_level: str = "INFO" 48 | 49 | # Data cleanup settings 50 | # Number of hours after which to delete old data (0 = never delete) 51 | data_retention_hours: int = 0 52 | 53 | # OpenTelemetry settings 54 | otlp_endpoint: str | None = None 55 | 56 | # Internal attribute to remember the source file used 57 | _config_path: Path | None = field(default=None, repr=False, compare=False) 58 | 59 | def get_decryption_keys(self) -> list[str]: 60 | """Parse and return list of decryption keys from the configuration. 61 | 62 | Supports both single keys and comma-separated lists of keys. 63 | Empty keys are filtered out. 64 | 65 | Returns: 66 | List of base64-encoded decryption keys 67 | """ 68 | if not self.default_channel_key: 69 | return [] 70 | 71 | # Split by comma and strip whitespace 72 | keys = [key.strip() for key in self.default_channel_key.split(",")] 73 | 74 | # Filter out empty keys 75 | return [key for key in keys if key] 76 | 77 | 78 | # --------------------------------------------------------------------------- 79 | # Loader helpers 80 | # --------------------------------------------------------------------------- 81 | 82 | 83 | _YAML_DEFAULT_PATH = "config.yaml" 84 | _ENV_PREFIX = "MALLA_" # Prefix for environment variable overrides 85 | 86 | 87 | def _resolve_type(t: Any) -> Any: # noqa: ANN001 88 | """Resolve **t** which may be a string forward-reference into a real type.""" 89 | 90 | if isinstance(t, str): 91 | # Basic builtin types are fine to eval() in this restricted context. 92 | builtins_map = {"bool": bool, "int": int, "float": float, "str": str} 93 | return builtins_map.get(t, str) 94 | return t 95 | 96 | 97 | def _coerce_value(value: str, target_type): # noqa: ANN001 98 | """Coerce *value* (a string from env) to *target_type* (which may be a string).""" 99 | 100 | target_type = _resolve_type(target_type) 101 | 102 | try: 103 | if target_type is bool: 104 | return value.lower() in {"1", "true", "yes", "on"} 105 | if target_type is int: 106 | return int(value) 107 | if target_type is float: 108 | return float(value) 109 | except ValueError: 110 | logger.warning( 111 | "Could not coerce environment variable '%s' to %s – using raw string", 112 | value, 113 | target_type, 114 | ) 115 | return value 116 | 117 | 118 | def load_config(config_path: str | os.PathLike | None = None) -> AppConfig: # noqa: C901 119 | """Load configuration in the following precedence order: 120 | 121 | 1. Defaults defined in :class:`AppConfig`. 122 | 2. YAML file (``config.yaml`` or path provided via *config_path* or the 123 | ``MALLA_CONFIG_FILE`` environment variable). 124 | 3. Environment variables prefixed with ``MALLA_`` (e.g. ``MALLA_NAME``) 125 | – case-insensitive. **This is the only supported override mechanism.** 126 | """ 127 | 128 | # Step 1 – start with the defaults from the dataclass converted to dict 129 | data: dict[str, object] = {} 130 | 131 | # Determine the YAML path to use (step 2) 132 | yaml_path = ( 133 | Path(config_path) # explicit argument wins 134 | if config_path is not None 135 | else Path(os.getenv("MALLA_CONFIG_FILE", _YAML_DEFAULT_PATH)) 136 | ) 137 | 138 | if yaml_path.is_file(): 139 | try: 140 | with yaml_path.open("r", encoding="utf-8") as fp: 141 | file_data = yaml.safe_load(fp) or {} 142 | if not isinstance(file_data, dict): 143 | logger.warning( 144 | "YAML config file %s must contain a mapping at top-level – ignoring", 145 | yaml_path, 146 | ) 147 | file_data = {} 148 | data.update(file_data) 149 | except Exception as exc: # noqa: BLE001 150 | logger.warning("Failed to read YAML config from %s: %s", yaml_path, exc) 151 | 152 | # Step 3 – look for env vars prefixed with MALLA_ 153 | for field_name, field_obj in AppConfig.__dataclass_fields__.items(): # type: ignore[attr-defined] 154 | env_key = f"{_ENV_PREFIX}{field_name}".upper() 155 | if env_key in os.environ: 156 | data[field_name] = _coerce_value(os.environ[env_key], field_obj.type) 157 | 158 | # Construct the config instance 159 | config = AppConfig(**data) # type: ignore[arg-type] 160 | config._config_path = yaml_path if yaml_path.is_file() else None 161 | 162 | logger.debug("Loaded application configuration: %s", config) 163 | return config 164 | 165 | 166 | # Convenience singleton to avoid re-loading throughout the process 167 | _config_singleton: AppConfig | None = None 168 | 169 | 170 | def get_config() -> AppConfig: 171 | """Return a singleton :class:`AppConfig` instance loaded with *load_config()*. 172 | Subsequent calls return the cached object. 173 | """ 174 | 175 | global _config_singleton # noqa: PLW0603 176 | if _config_singleton is None: 177 | _config_singleton = load_config() 178 | return _config_singleton 179 | 180 | 181 | # --------------------------------------------------------------------------- 182 | # Helper for unit tests to override the cached singleton 183 | # --------------------------------------------------------------------------- 184 | 185 | 186 | def _override_config(new_cfg: AppConfig) -> None: # noqa: D401, ANN001 187 | """Force the global singleton to *new_cfg* (used internally by tests).""" 188 | 189 | global _config_singleton # noqa: PLW0603 190 | _config_singleton = new_cfg 191 | 192 | 193 | def _clear_config_cache() -> None: 194 | """Clear the global config singleton cache (used internally by tests).""" 195 | 196 | global _config_singleton # noqa: PLW0603 197 | _config_singleton = None 198 | --------------------------------------------------------------------------------