├── .github ├── dependabot.yml └── workflows │ ├── build.yml │ ├── codeql-analysis.yml │ └── test.yml ├── .gitignore ├── Dockerfile ├── LICENSE ├── README.md ├── exampleconf ├── conf.yaml ├── histogram.yaml ├── metric_example.yaml ├── mosquitto_sys_metrics.yaml └── switchstate.yaml ├── mqtt_exporter.py ├── requirements-dev.txt ├── requirements-frozen.txt ├── requirements.txt ├── tests ├── __init__.py ├── readme.md ├── test_data │ ├── test1 │ │ ├── conf.yaml │ │ └── mqtt_msg.csv │ ├── test2 │ │ ├── conf.yaml │ │ └── mqtt_msg.csv │ ├── test_counter_absolute │ │ ├── conf.yaml │ │ └── mqtt_msg.csv │ └── test_enum │ │ ├── conf.yaml │ │ └── mqtt_msg.csv ├── test_mqtt_exporter.py ├── test_prometheus_additions.py └── tmp_data │ └── .gitkeep ├── utils ├── __init__.py └── prometheus_additions.py └── version.py /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | 4 | - package-ecosystem: "github-actions" 5 | directory: "/" 6 | schedule: 7 | interval: "monthly" 8 | 9 | - package-ecosystem: "docker" 10 | directory: "/" 11 | schedule: 12 | interval: "monthly" 13 | -------------------------------------------------------------------------------- /.github/workflows/build.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Build and push Docker image 3 | 4 | on: 5 | push: 6 | tags: 7 | - '*' 8 | 9 | concurrency: 10 | group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} 11 | cancel-in-progress: true 12 | 13 | jobs: 14 | build: 15 | runs-on: ubuntu-latest 16 | steps: 17 | - name: Check out repo 18 | uses: actions/checkout@v4 19 | 20 | - name: Set up QEMU 21 | uses: docker/setup-qemu-action@v3 22 | 23 | - name: Set up Docker Buildx 24 | uses: docker/setup-buildx-action@v3 25 | 26 | - name: Get the tag name 27 | run: echo "TAG=${GITHUB_REF/refs\/tags\//}" >> $GITHUB_ENV 28 | 29 | - name: Login to DockerHub 30 | uses: docker/login-action@v3 31 | with: 32 | username: ${{ secrets.DOCKERHUB_USERNAME }} 33 | password: ${{ secrets.DOCKERHUB_TOKEN }} 34 | 35 | - name: Login to GitHub Container Registry 36 | uses: docker/login-action@v3 37 | with: 38 | registry: ghcr.io 39 | username: ${{ github.actor }} 40 | password: ${{ secrets.GITHUB_TOKEN }} 41 | 42 | - name: Build and push 43 | uses: docker/build-push-action@v6.18.0 44 | with: 45 | push: true 46 | platforms: linux/amd64,linux/arm64 47 | tags: | 48 | ${{ github.repository }}:latest 49 | ${{ github.repository }}:${{ env.TAG }} 50 | ghcr.io/${{ github.repository }}:latest 51 | ghcr.io/${{ github.repository }}:${{ env.TAG }} 52 | -------------------------------------------------------------------------------- /.github/workflows/codeql-analysis.yml: -------------------------------------------------------------------------------- 1 | name: "CodeQL" 2 | 3 | on: 4 | push: 5 | branches: [ master ] 6 | pull_request: 7 | # The branches below must be a subset of the branches above 8 | branches: [ master ] 9 | schedule: 10 | - cron: '30 14 * * 3' 11 | 12 | concurrency: 13 | group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} 14 | cancel-in-progress: true 15 | 16 | jobs: 17 | analyze: 18 | name: Analyze 19 | runs-on: ubuntu-latest 20 | permissions: 21 | actions: read 22 | contents: read 23 | security-events: write 24 | 25 | strategy: 26 | fail-fast: false 27 | matrix: 28 | language: [ 'python' ] 29 | # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ] 30 | # Learn more: 31 | # https://docs.github.com/en/free-pro-team@latest/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#changing-the-languages-that-are-analyzed 32 | 33 | steps: 34 | - name: Checkout repository 35 | uses: actions/checkout@v4 36 | 37 | # Initializes the CodeQL tools for scanning. 38 | - name: Initialize CodeQL 39 | uses: github/codeql-action/init@v3 40 | with: 41 | languages: ${{ matrix.language }} 42 | # If you wish to specify custom queries, you can do so here or in a config file. 43 | # By default, queries listed here will override any specified in a config file. 44 | # Prefix the list here with "+" to use these queries and those in the config file. 45 | # queries: ./path/to/local/query, your-org/your-repo/queries@main 46 | 47 | # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). 48 | # If this step fails, then you should remove it and run the build manually (see below) 49 | - name: Autobuild 50 | uses: github/codeql-action/autobuild@v3 51 | 52 | # ℹ️ Command-line programs to run using the OS shell. 53 | # 📚 https://git.io/JvXDl 54 | 55 | # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines 56 | # and modify them (or add more) to build your code if your project 57 | # uses a compiled language 58 | 59 | #- run: | 60 | # make bootstrap 61 | # make release 62 | 63 | - name: Perform CodeQL Analysis 64 | uses: github/codeql-action/analyze@v3 65 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: Test with pytest 2 | 3 | on: 4 | push: 5 | branches: 6 | - '*' 7 | pull_request: 8 | branches: [ master ] 9 | 10 | # Allows you to run this workflow manually from the Actions tab 11 | workflow_dispatch: 12 | 13 | concurrency: 14 | group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} 15 | cancel-in-progress: true 16 | 17 | jobs: 18 | build: 19 | runs-on: ubuntu-latest 20 | steps: 21 | - name: Check out repo 22 | uses: actions/checkout@v4 23 | 24 | - name: Set up Python 25 | uses: actions/setup-python@v5 26 | with: 27 | python-version: '3.11.5' 28 | cache: 'pip' 29 | 30 | - name: Install dependencies 31 | run: | 32 | python -m pip install --upgrade pip 33 | pip install -r requirements-frozen.txt 34 | 35 | - name: Test with pytest 36 | run: | 37 | pip install pytest 38 | pytest --verbose --capture=no --log-level=DEBUG -o log_cli=true 39 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # IntelliJ project files 2 | .idea 3 | out 4 | gen 5 | venv 6 | conf/* 7 | 8 | # Byte-compiled / optimized / DLL files 9 | __pycache__/ 10 | *.py[cod] 11 | *$py.class 12 | 13 | # pytest cache files 14 | .pytest_cache 15 | 16 | # VScode project files 17 | .vscode/ 18 | 19 | # tmp files 20 | **/.~* 21 | **/tmp_data/* 22 | !**/tmp_data/.gitkeep 23 | .DS_Store -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.13.3-alpine 2 | 3 | LABEL org.opencontainers.image.title=mqtt_exporter 4 | LABEL org.opencontainers.image.description="Prometheus exporter for MQTT." 5 | LABEL org.opencontainers.image.vendor="Frederic Hemberger" 6 | LABEL org.opencontainers.image.licenses=MIT 7 | LABEL org.opencontainers.image.source=https://github.com/fhemberger/mqtt_exporter 8 | 9 | WORKDIR /usr/src/app 10 | 11 | RUN adduser --system --no-create-home --shell /usr/sbin/nologin mqtt_exporter 12 | COPY *.py requirements-frozen.txt ./ 13 | COPY utils ./utils 14 | RUN pip install --no-cache-dir -r requirements-frozen.txt 15 | 16 | USER mqtt_exporter 17 | 18 | 19 | EXPOSE 9344 20 | ENTRYPOINT [ "/usr/local/bin/python3", "-u", "/usr/src/app/mqtt_exporter.py" ] 21 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2018 Bendik Wang Andreassen 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Prometheus exporter for MQTT 2 | 3 | Configurable general purpose Prometheus exporter for MQTT. 4 | 5 | Subscribes to one or more MQTT topics, and lets you configure prometheus metrics based on pattern matching. 6 | 7 | [![Test with pytest](https://github.com/fhemberger/mqtt_exporter/actions/workflows/test.yml/badge.svg)](https://github.com/fhemberger/mqtt_exporter/actions/workflows/test.yml)[![CodeQL](https://github.com/fhemberger/mqtt_exporter/actions/workflows/codeql-analysis.yml/badge.svg)](https://github.com/fhemberger/mqtt_exporter/actions/workflows/codeql-analysis.yml)![Docker Pulls](https://img.shields.io/docker/pulls/fhemberger/mqtt_exporter) 8 | 9 | 10 | ## Features 11 | 12 | - Supported Metrics: 13 | - standard metrics 14 | - Gauge, Counter, Histogram, Summary 15 | - additional 16 | - **Counter (Absolute):** 17 | - Same as Counter, but working with absolute numbers received from MQTT. Which is far more common, than sending the diff in each publish. 18 | - e.g. a network counter or a rain sensor 19 | - **Enum:** 20 | - is a metric type not so common, details can be found in the [OpenMetrics docs](https://github.com/OpenObservability/OpenMetrics/blob/main/specification/OpenMetrics.md#stateset) and [Python client code](https://github.com/prometheus/client_python/blob/9a24236695c9ad47f9dc537a922a6d1333d8d093/prometheus_client/metrics.py#L640-L698). 21 | - Allows to track as state by a know set of strings describing the state, e.g. `on/off` or `high/medium/low` 22 | - Common sources would be a light switch oder a door lock. 23 | - Comprehensive rewriting for topic, value/payload and labels 24 | - similar to prometheus label rewrites 25 | - regex allows almost every conversion 26 | - e.g. to 27 | - remove units or other strings from payload 28 | - convert topic hierarchy into labels 29 | - normalize labels 30 | - check example configs `./exampleconf` and the configs in `./test/test_data/` 31 | 32 | 33 | ## Usage 34 | 35 | - Create a folder to hold the config (default: `conf/`) 36 | - Add metric config(s) in YAML format to the folder. Files are combined and read as a single config. (See `exampleconf/metric_example.yaml` for details) 37 | - Install dependencies with `pip3 install -r requirements-frozen.txt` 38 | - Run `./mqtt_exporter.py` 39 | 40 | 41 | ## Docker 42 | 43 | For your convenience, there is also a Docker image available: 44 | 45 | ```bash 46 | docker run -d \ 47 | -v "$(pwd)/myconfig:/usr/src/app/conf:ro" \ 48 | -p "9344:9344" \ 49 | ghcr.io/fhemberger/mqtt_exporter 50 | ``` 51 | 52 | If you want to mount your configuration to a different directory, add the `-c` flag: 53 | 54 | ```bash 55 | docker run -d \ 56 | -v "$(pwd)/myconfig:/myconfig:ro" \ 57 | -p "9344:9344" \ 58 | ghcr.io/fhemberger/mqtt_exporter -c /myconfig 59 | ``` 60 | 61 | 62 | ## Python dependencies 63 | 64 | - paho-mqtt 65 | - prometheus-client 66 | - PyYAML 67 | - yamlreader 68 | 69 | 70 | ## Contribution 71 | 72 | * Contribution is welcome. Fork and then PR. 73 | * Discussions in Issues. 74 | * Functional tests are written in `pytest` (see [tests/readme.md](tests/readme.md)) 75 | * Code formatting uses [`autopep8`](https://pypi.org/project/autopep8/) with default settings. If you submit a PR to this repo, please make sure it follows its formatting guidelines. 76 | 77 | 78 | ## TODO 79 | 80 | - Add persistence of metrics on restart 81 | - forget/age out metrics receiving no updates anymore 82 | -------------------------------------------------------------------------------- /exampleconf/conf.yaml: -------------------------------------------------------------------------------- 1 | # Config file for MQTT prometheus exporter 2 | 3 | # Logging 4 | #logging: 5 | # logfile: '' # Optional default '' (stdout) 6 | # level: 'info' # Optional default 'info' 7 | 8 | # MQTT All values default to paho.mqtt.client defaults 9 | #mqtt: 10 | # host: 'mqtt.example.com' # Optional default 'localhost' 11 | # port: 1883 # Optional default '1883' 12 | # keepalive: 60 # Optional 13 | # auth: # Optional If included, username_pw_set() is called with user/password 14 | # username: 'user' # Required (when auth is present) 15 | # password: 'pass' # Optional 16 | # tls: # Optional If included, tls_set() is called with the following: 17 | # ca_certs: # Optional 18 | # certfile: # Optional 19 | # keyfile: # Optional 20 | # cert_reqs: # Optional 21 | # tls_version: # Optional 22 | # ciphers: # Optional 23 | 24 | # Prometheus 25 | #prometheus: 26 | # exporter_port: # Optional default 9344 27 | -------------------------------------------------------------------------------- /exampleconf/histogram.yaml: -------------------------------------------------------------------------------- 1 | # histogram metric. with Buckets <= 0.5, 5, 10, +inf 2 | - name: 'network_ping_ms' 3 | help: 'ping response in ms' 4 | type: 'histogram' 5 | topic: 'network/+/+/ping' 6 | parameters: 7 | buckets: 8 | - 0.5 9 | - 5 10 | - 10 11 | label_configs: 12 | - source_labels: ['__msg_topic__'] 13 | target_label: '__topic__' 14 | - source_labels: ["__msg_topic__"] 15 | regex: "network/([^/]+).*" 16 | target_label: "network" 17 | replacement: '\1' 18 | action: "replace" 19 | - source_labels: ["__msg_topic__"] 20 | regex: "network/[^/]+/([^/]+).*" 21 | target_label: "server" 22 | replacement: '\1' 23 | action: "replace" -------------------------------------------------------------------------------- /exampleconf/metric_example.yaml: -------------------------------------------------------------------------------- 1 | # Example metric definition 2 | metrics: 3 | - name: 'mqtt_example' # Required(unique, if multiple, only last entry is kept) 4 | help: 'MQTT example gauge' # Required 5 | type: 'gauge' # Required ('gauge', 'counter', 'summary' or 'histogram') 6 | expires: 60 # Optional time in seconds after last update to remove metric 7 | #parameters: # Optional parameters for certain metrics 8 | # buckets: # Optional (Passed as 'buckets' argument to Histogram) 9 | # - .1 10 | # - 1.0 11 | # - 10.0 12 | # states: # Optional (Passes as 'states' arguments to Enum) 13 | # - on 14 | # - off 15 | topic: 'example/topic/+' # Required 16 | 17 | # Inspired by 'https://prometheus.io/docs/operating/configuration/#' 18 | # "__msg_topic__" and "__value__" are populated with msg topic and value. And "__topic__" is 'topic' from config. 19 | # Supported actions are: 'replace', 'keep' and 'drop' 20 | # All labels starting with "__" will be removed, and "__topic__" and "__value__" is copied into "topic" anv "value" 21 | # after all label configs have been applied. 22 | label_configs: # Optional 23 | - source_labels: ['__msg_topic__'] # Required (when label_configs is present) 24 | separator: '/' # Optional default ';' 25 | regex: '(.*)' # Optional default '(.*)' 26 | target_label: '__topic__' # Required (when label_configs is present and 'action' = 'replace') 27 | replacement: '\1' # Optional default '\1' 28 | action: 'replace' # Optional default 'replace' 29 | -------------------------------------------------------------------------------- /exampleconf/mosquitto_sys_metrics.yaml: -------------------------------------------------------------------------------- 1 | # Config file for Mosquitto broker system metrics 2 | 3 | # Metric definitions 4 | metrics: 5 | - name: 'mqtt_broker' 6 | help: 'System events from broker' 7 | type: 'gauge' 8 | topic: '$SYS/broker/#' 9 | label_configs: 10 | - source_labels: ['__msg_topic__'] 11 | target_label: '__topic__' 12 | - source_labels: ['__value__'] 13 | regex: '^(\d+([,.]\d*)?)$|^([,.]\d+)$' 14 | action: 'keep' 15 | 16 | - name: 'mqtt_broker_version' 17 | help: 'Mosquitto version (static)' 18 | type: 'gauge' 19 | topic: '$SYS/broker/version' 20 | label_configs: 21 | - source_labels: ['__msg_topic__'] 22 | target_label: '__topic__' 23 | - source_labels: ['__value__'] 24 | regex: '^\D+((?:\d+[\.]?)+)$' 25 | target_label: 'version' 26 | replacement: '\1' 27 | action: 'replace' 28 | - source_labels: ['__value__'] 29 | replacement: '1' 30 | target_label: '__value__' 31 | action: 'replace' 32 | 33 | - name: 'mqtt_broker_changeset' 34 | help: 'Mosquitto build changeset (static)' 35 | type: 'gauge' 36 | topic: '$SYS/broker/changeset' 37 | label_configs: 38 | - source_labels: ['__msg_topic__'] 39 | target_label: '__topic__' 40 | - source_labels: ['__value__'] 41 | target_label: 'changeset' 42 | action: 'replace' 43 | - source_labels: ['__value__'] 44 | replacement: '1' 45 | target_label: '__value__' 46 | action: 'replace' 47 | 48 | - name: 'mqtt_broker_timestamp' 49 | help: 'Mosquitto build timestamp (static)' 50 | type: 'gauge' 51 | topic: '$SYS/broker/timestamp' 52 | label_configs: 53 | - source_labels: ['__msg_topic__'] 54 | target_label: '__topic__' 55 | - source_labels: ['__value__'] 56 | target_label: 'timestamp' 57 | action: 'replace' 58 | - source_labels: ['__value__'] 59 | replacement: '1' 60 | target_label: '__value__' 61 | action: 'replace' 62 | -------------------------------------------------------------------------------- /exampleconf/switchstate.yaml: -------------------------------------------------------------------------------- 1 | # metric for a switch with the state on and off. 2 | # states are case sensitive and must match exactly 3 | # use label_config to rewrite other values, see below. 4 | metrics: 5 | - name: "fhem_light_state" 6 | help: "Light state on/off" 7 | type: "enum" 8 | topic: "fhem/+/+/light" 9 | parameters: 10 | states: 11 | - 'on' 12 | - 'off' 13 | label_configs: 14 | - source_labels: ['__value__'] # replace uppercase ON and 0 with on 15 | regex: "(ON|0)" 16 | target_label: '__value__' 17 | replacement: 'on' 18 | action: "replace" 19 | - source_labels: ['__value__'] # replace uppercase OFF und 1 with off 20 | regex: "(OFF|1)" 21 | target_label: '__value__' 22 | replacement: 'off' 23 | action: "replace" -------------------------------------------------------------------------------- /mqtt_exporter.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | from copy import deepcopy 4 | import json 5 | from collections import defaultdict 6 | import logging 7 | import argparse 8 | import os 9 | import re 10 | import operator 11 | import time 12 | import signal 13 | import sys 14 | import paho.mqtt.client as mqtt 15 | import yaml 16 | import prometheus_client as prometheus 17 | from yamlreader import yaml_load 18 | import utils.prometheus_additions 19 | import version 20 | import threading 21 | 22 | VERSION = version.__version__ 23 | SUFFIXES_PER_TYPE = { 24 | "gauge": [], 25 | "counter": ['total'], 26 | "counter_absolute": ['total'], 27 | "summary": ['sum', 'count'], 28 | "histogram": ['sum', 'count', 'bucket'], 29 | "enum": [], 30 | } 31 | METRICS_LOCK = threading.Semaphore() 32 | 33 | 34 | def _read_config(config_path): 35 | """Read config file from given location, and parse properties""" 36 | 37 | if config_path is not None: 38 | if os.path.isfile(config_path): 39 | logging.info(f'Config file found at: {config_path}') 40 | try: 41 | with open(config_path, 'r') as f: 42 | return yaml.safe_load(f.read()) 43 | except yaml.YAMLError: 44 | logging.exception('Failed to parse configuration file:') 45 | 46 | elif os.path.isdir(config_path): 47 | logging.info( 48 | f'Config directory found at: {config_path}') 49 | try: 50 | return yaml_load(config_path) 51 | except yaml.YAMLError: 52 | logging.exception('Failed to parse configuration directory:') 53 | 54 | return {} 55 | 56 | 57 | def _parse_config_and_add_defaults(config_from_file): 58 | """Parse content of configfile and add default values where needed""" 59 | 60 | config = deepcopy(config_from_file) 61 | logging.debug(f'_parse_config Config from file: {str(config_from_file)}') 62 | # Logging values ('logging' is optional in config 63 | if 'logging' in config_from_file: 64 | config['logging'] = _add_config_and_defaults( 65 | config_from_file['logging'], {'logfile': '', 'level': 'info'}) 66 | else: 67 | config['logging'] = _add_config_and_defaults( 68 | None, {'logfile': '', 'level': 'info'}) 69 | 70 | # MQTT values 71 | if 'mqtt' in config_from_file: 72 | config['mqtt'] = _add_config_and_defaults( 73 | config_from_file['mqtt'], {'host': 'localhost', 'port': 1883}) 74 | else: 75 | config['mqtt'] = _add_config_and_defaults( 76 | None, {'host': 'localhost', 'port': 1883}) 77 | 78 | if 'auth' in config['mqtt']: 79 | config['mqtt']['auth'] = _add_config_and_defaults( 80 | config['mqtt']['auth'], {}) 81 | _validate_required_fields(config['mqtt']['auth'], 'auth', ['username']) 82 | 83 | if 'tls' in config['mqtt']: 84 | config['mqtt']['tls'] = _add_config_and_defaults( 85 | config['mqtt']['tls'], {}) 86 | 87 | # Prometheus values 88 | if 'prometheus' in config: 89 | config['prometheus'] = _add_config_and_defaults( 90 | config_from_file['prometheus'], {'exporter_port': 9344}) 91 | else: 92 | config['prometheus'] = _add_config_and_defaults( 93 | None, {'exporter_port': 9344}) 94 | 95 | metrics = {} 96 | if not 'metrics' in config_from_file: 97 | logging.critical('No metrics defined in config. Aborting.') 98 | sys.exit(1) 99 | for metric in config_from_file['metrics']: 100 | parse_and_validate_metric_config(metric, metrics) 101 | 102 | config['metrics'] = _group_by_topic(list(metrics.values())) 103 | return config 104 | 105 | 106 | def parse_and_validate_metric_config(metric, metrics): 107 | m = _add_config_and_defaults(metric, {}) 108 | _validate_required_fields(m, None, ['name', 'help', 'type', 'topic']) 109 | if 'label_configs' in m and m['label_configs']: 110 | label_configs = [] 111 | for lc in m['label_configs']: 112 | if lc: 113 | lc = _add_config_and_defaults(lc, {'separator': ';', 'regex': '^(.*)$', 'replacement': '\\1', 114 | 'action': 'replace'}) 115 | if lc['action'] == 'replace': 116 | _validate_required_fields(lc, None, 117 | ['source_labels', 'target_label', 'separator', 'regex', 'replacement', 118 | 'action']) 119 | else: 120 | _validate_required_fields(lc, None, 121 | ['source_labels', 'separator', 'regex', 'replacement', 122 | 'action']) 123 | label_configs.append(lc) 124 | m['label_configs'] = label_configs 125 | # legacy config handling move 'buckets' to params directory 126 | if m.get('buckets'): 127 | m.setdefault('parameters', {})['buckets'] = (m['buckets']) 128 | metrics[m['name']] = m 129 | 130 | 131 | def _validate_required_fields(config, parent, required_fields): 132 | """Fail if required_fields is not present in config""" 133 | for field in required_fields: 134 | if field not in config or config[field] is None: 135 | if parent is None: 136 | error = f'\'{field}\' is a required field in configfile' 137 | else: 138 | error = f'\'{field}\' is a required parameter for field {parent} in configfile' 139 | raise TypeError(error) 140 | 141 | 142 | def _add_config_and_defaults(config, defaults): 143 | """Return dict with values from config, if present, or values from defaults""" 144 | if config is not None: 145 | defaults.update(config) 146 | return defaults.copy() 147 | 148 | 149 | def _strip_config(config, allowed_keys): 150 | return {k: v for k, v in config.items() if k in allowed_keys and v} 151 | 152 | 153 | def _group_by_topic(metrics): 154 | """Group metrics by topic""" 155 | t = defaultdict(list) 156 | for metric in metrics: 157 | t[metric['topic']].append(metric) 158 | return t 159 | 160 | 161 | def _topic_matches(topic1, topic2): 162 | """Check if wildcard-topics match""" 163 | if topic1 == topic2: 164 | return True 165 | 166 | # If topic1 != topic2 and no wildcard is present in topic1, no need for regex 167 | if '+' not in topic1 and '#' not in topic1: 168 | return False 169 | 170 | logging.debug( 171 | f'_topic_matches: Topic1: {topic1}, Topic2: {topic2}') 172 | topic1 = re.escape(topic1) 173 | regex = topic1.replace('/\\#', '.*$').replace('\\+', '[^/]+') 174 | match = re.match(regex, topic2) 175 | 176 | logging.debug(f'_topic_matches: Match: {match is not None}') 177 | return match is not None 178 | 179 | 180 | # noinspection SpellCheckingInspection 181 | def _log_setup(logging_config): 182 | """Setup application logging""" 183 | 184 | logfile = logging_config['logfile'] 185 | 186 | log_level = logging_config['level'] 187 | 188 | numeric_level = logging.getLevelName(log_level.upper()) 189 | if not isinstance(numeric_level, int): 190 | raise TypeError(f'Invalid log level: {log_level}') 191 | 192 | if logfile != '': 193 | logging.info(f"Logging redirected to: {logfile}") 194 | # Need to replace the current handler on the root logger: 195 | file_handler = logging.FileHandler(logfile, 'a') 196 | formatter = logging.Formatter('%(asctime)s %(levelname)s: %(message)s') 197 | file_handler.setFormatter(formatter) 198 | 199 | log = logging.getLogger() # root logger 200 | for handler in log.handlers: # remove all old handlers 201 | log.removeHandler(handler) 202 | log.addHandler(file_handler) 203 | 204 | else: 205 | logging.basicConfig(format='%(asctime)s %(levelname)s: %(message)s') 206 | 207 | logging.getLogger().setLevel(numeric_level) 208 | logging.info(f'log_level set to: {log_level}') 209 | 210 | 211 | # noinspection PyUnusedLocal 212 | def _on_connect(client, userdata, flags, rc): # pylint: disable=unused-argument,invalid-name 213 | """The callback for when the client receives a CONNACK response from the server.""" 214 | logging.info(f'Connected to broker, result code {str(rc)}') 215 | 216 | for topic in userdata.keys(): 217 | client.subscribe(topic) 218 | logging.info(f'Subscribing to topic: {topic}') 219 | 220 | 221 | def _label_config_match(label_config, labels): 222 | """Action 'keep' and 'drop' in label_config: Matches joined 'source_labels' to 'regex'""" 223 | source = label_config['separator'].join( 224 | [labels[x] for x in label_config['source_labels']]) 225 | logging.debug(f'_label_config_match source: {source}') 226 | match = re.match(label_config['regex'], source) 227 | 228 | if label_config['action'] == 'keep': 229 | logging.debug( 230 | f"_label_config_match Action: {label_config['action']}, Keep msg: {match is not None}") 231 | return match is not None 232 | if label_config['action'] == 'drop': 233 | logging.debug( 234 | f"_label_config_match Action: {label_config['action']}, Drop msg: {match is not None}") 235 | return match is None 236 | else: 237 | logging.debug( 238 | f"_label_config_match Action: {label_config['action']} is not supported, metric is dropped") 239 | return False 240 | 241 | 242 | def _apply_label_config(labels, label_configs): 243 | """Create/change labels based on label_config in config file.""" 244 | 245 | for label_config in label_configs: 246 | if label_config['action'] == 'replace': 247 | _label_config_rename(label_config, labels) 248 | else: 249 | if not _label_config_match(label_config, labels): 250 | return False 251 | return True 252 | 253 | 254 | def _label_config_rename(label_config, labels): 255 | """Action 'rename' in label_config: Add/change value for label 'target_label'""" 256 | source = label_config['separator'].join( 257 | [labels[x] for x in label_config['source_labels']]) 258 | 259 | if re.match(re.compile(label_config['regex']), source): 260 | logging.debug(f'_label_config_rename source: {source}') 261 | result = re.sub(label_config['regex'], 262 | label_config['replacement'], source) 263 | logging.debug(f'_label_config_rename result: {result}') 264 | labels[label_config['target_label']] = result 265 | 266 | 267 | def finalize_labels(labels): 268 | """Keep '__value__', and '__topic__' but remove all other labels starting with '__'""" 269 | labels['value'] = labels['__value__'] 270 | labels['topic'] = labels['__topic__'] 271 | 272 | return {k: v for k, v in labels.items() if not k.startswith('__')} 273 | 274 | 275 | def _update_metrics(metrics, msg): 276 | """For each metric on this topic, apply label renaming if present, and export to prometheus""" 277 | for metric in metrics: 278 | labels = {'__topic__': metric['topic'], 279 | '__msg_topic__': msg.topic, '__value__': str(msg.payload, 'utf-8')} 280 | 281 | if 'label_configs' in metric: 282 | # If action 'keep' in label_configs fails, or 'drop' succeeds, the metric is not updated 283 | if not _apply_label_config(labels, metric['label_configs']): 284 | continue 285 | 286 | # try to convert to float, but leave as is if conversion not possible 287 | try: 288 | labels['__value__'] = float(labels['__value__'].replace(',', '.')) 289 | except ValueError: 290 | logging.debug(f"Conversion of {labels['__value__']} to float not possible, continue with value as is.") 291 | 292 | logging.debug('_update_metrics all labels:') 293 | logging.debug(labels) 294 | 295 | labels = finalize_labels(labels) 296 | 297 | derived_metric = metric.setdefault('derived_metric', 298 | # Add derived metric for when the message was last received (timestamp in milliseconds) 299 | { 300 | 'name': f"{metric['name']}_last_received", 301 | 'help': f"Last received message for '{metric['name']}'", 302 | 'type': 'gauge' 303 | } 304 | ) 305 | derived_labels = {'topic': metric['topic'], 306 | 'value': int(round(time.time() * 1000))} 307 | 308 | _export_to_prometheus(metric['name'], metric, labels) 309 | 310 | _export_to_prometheus( 311 | derived_metric['name'], derived_metric, derived_labels) 312 | 313 | if metric.get('expires'): 314 | if metric.get('expiration_timer'): 315 | metric.get('expiration_timer').cancel() 316 | logging.debug(f"_update_metric Canceled existing timer for {metric.get('name')}") 317 | 318 | metric['expiration_timer'] = threading.Timer(metric.get('expires'), _clear_metric, args=(metric, derived_metric)) 319 | metric['expiration_timer'].start() 320 | logging.debug(f"_update_metric Set a {metric.get('expires')} second expiration timer for {metric.get('name')}") 321 | 322 | 323 | def _clear_metric(metric, derived_metric): 324 | with METRICS_LOCK: 325 | metric['prometheus_metric']['parent'].clear() 326 | derived_metric['prometheus_metric']['parent'].clear() 327 | logging.debug(f"_clear_metric cleared metric {metric.get('name')}") 328 | 329 | 330 | # noinspection PyUnusedLocal 331 | def _on_message(client, userdata, msg): 332 | with METRICS_LOCK: 333 | """The callback for when a PUBLISH message is received from the server.""" 334 | logging.debug( 335 | f'_on_message Msg received on topic: {msg.topic}, Value: {str(msg.payload)}') 336 | 337 | for topic in userdata.keys(): 338 | if _topic_matches(topic, msg.topic): 339 | _update_metrics(userdata[topic], msg) 340 | 341 | 342 | def _mqtt_init(mqtt_config, metrics): 343 | """Setup mqtt connection""" 344 | mqtt_client = mqtt.Client(userdata=metrics) 345 | mqtt_client.on_connect = _on_connect 346 | mqtt_client.on_message = _on_message 347 | 348 | if 'auth' in mqtt_config: 349 | auth = _strip_config(mqtt_config['auth'], ['username', 'password']) 350 | mqtt_client.username_pw_set(**auth) 351 | 352 | if 'tls' in mqtt_config: 353 | tls_config = _strip_config(mqtt_config['tls'], [ 354 | 'ca_certs', 'certfile', 'keyfile', 'cert_reqs', 'tls_version']) 355 | mqtt_client.tls_set(**tls_config) 356 | 357 | try: 358 | mqtt_client.connect(**_strip_config(mqtt_config, 359 | ['host', 'port', 'keepalive'])) 360 | except ConnectionRefusedError as err: 361 | logging.critical( 362 | f"Error connecting to {mqtt_config['host']}:{mqtt_config['port']}: {err.strerror}") 363 | sys.exit(1) 364 | 365 | return mqtt_client 366 | 367 | 368 | def _export_to_prometheus(name, metric, labels): 369 | """Export metric and labels to prometheus.""" 370 | metric_wrappers = {'gauge': GaugeWrapper, 371 | 'counter': CounterWrapper, 372 | 'counter_absolute': CounterAbsoluteWrapper, 373 | 'summary': SummaryWrapper, 374 | 'histogram': HistogramWrapper, 375 | 'enum': EnumWrapper, 376 | } 377 | valid_types = metric_wrappers.keys() 378 | if metric['type'] not in valid_types: 379 | logging.error( 380 | f"Metric type: {metric['type']}, is not a valid metric type. Must be one of: {valid_types} - ignoring" 381 | ) 382 | return 383 | 384 | value = labels['value'] 385 | del labels['value'] 386 | 387 | sorted_labels = _get_sorted_tuple_list(labels) 388 | label_names, label_values = list(zip(*sorted_labels)) 389 | 390 | prometheus_metric = None 391 | if not metric.get('prometheus_metric') or not metric['prometheus_metric'].get('parent'): 392 | # parent metric not seen before, create metric 393 | additional_parameters = metric.get('parameters', {}) 394 | 395 | metric_wrapper = metric_wrappers[metric['type']] 396 | prometheus_metric = metric_wrapper( 397 | metric['name'], metric['help'], label_names, **additional_parameters) 398 | metric['prometheus_metric'] = {} 399 | metric['prometheus_metric']['parent'] = prometheus_metric 400 | else: 401 | prometheus_metric = metric['prometheus_metric']['parent'] 402 | try: 403 | prometheus_metric.update(label_values, value) 404 | except ValueError as ve: 405 | logging.error(f"Value {value} is not compatible with metric {metric['name']} of type {metric['type']}") 406 | logging.exception('ve:') 407 | 408 | logging.debug( 409 | f"_export_to_prometheus metric ({metric['type']}): {name}{labels} updated with value: {value}") 410 | if logging.DEBUG >= logging.root.level: # log test data only in debugging mode 411 | _log_test_data(metric, labels['topic'], value) 412 | 413 | 414 | def _log_test_data(metric, topic, value): 415 | try: 416 | base_metric = metric['prometheus_metric']['parent'].metric.collect() 417 | samples = {} 418 | for child_metric in base_metric: 419 | if child_metric.name.endswith('_last_received'): 420 | # ignore derived metrics '*._last_received' 421 | continue 422 | first_sample = child_metric.samples[0] 423 | for first_sample in child_metric.samples: 424 | if first_sample.labels.get('topic', '') == topic: 425 | samples[first_sample.name] = first_sample 426 | 427 | if len(samples) == 1: 428 | logging.debug( 429 | f"TEST_DATA: {topic}; {value}; {child_metric.name}; {json.dumps(first_sample.labels)}; {first_sample.value}; 0; True") 430 | else: 431 | out_value = {} 432 | labels = first_sample.labels 433 | for sample_name, first_sample in samples.items(): 434 | suffix = sample_name[len(child_metric.name):] 435 | out_value[suffix] = first_sample.value 436 | if suffix == "_bucket": # buckets have extra "le" label 437 | labels = first_sample.labels 438 | logging.debug( 439 | f"TEST_DATA: {topic}; {value}; {child_metric.name}; {json.dumps(labels)}; {json.dumps(out_value)}; 0; True") 440 | except: # pylint: disable=bare-except 441 | logging.exception("Failed to log TEST_DATA. ignoring.") 442 | 443 | 444 | class GaugeWrapper(): 445 | """ 446 | Wrapper to provide generic interface to Gauge metric 447 | """ 448 | def __init__(self, name, help_text, label_names, *args, **kwargs) -> None: 449 | self.metric = prometheus.Gauge( 450 | name, help_text, list(label_names) 451 | ) 452 | 453 | def clear(self): 454 | self.metric.clear() 455 | 456 | def update(self, label_values, value): 457 | child = self.metric.labels(*label_values) 458 | child.set(value) 459 | return child 460 | 461 | 462 | class CounterWrapper(): 463 | """ 464 | Wrapper to provide generic interface to Counter metric 465 | """ 466 | 467 | def __init__(self, name, help_text, label_names, *args, **kwargs) -> None: 468 | self.metric = prometheus.Counter( 469 | name, help_text, list(label_names) 470 | ) 471 | 472 | def clear(self): 473 | self.metric.clear() 474 | 475 | def update(self, label_values, value): 476 | child = self.metric.labels(*label_values) 477 | child.inc(value) 478 | return child 479 | 480 | 481 | class CounterAbsoluteWrapper(): 482 | """ 483 | Wrapper to provide generic interface to CounterAbsolute metric 484 | """ 485 | 486 | def __init__(self, name, help_text, label_names, *args, **kwargs) -> None: 487 | self.metric = utils.prometheus_additions.CounterAbsolute( 488 | name, help_text, list(label_names) 489 | ) 490 | 491 | def clear(self): 492 | self.metric.clear() 493 | 494 | def update(self, label_values, value): 495 | child = self.metric.labels(*label_values) 496 | child.set(value) 497 | return child 498 | 499 | 500 | class SummaryWrapper(): 501 | """ 502 | Wrapper to provide generic interface to Summary metric 503 | """ 504 | 505 | def __init__(self, name, help_text, label_names, *args, **kwargs) -> None: 506 | self.metric = prometheus.Summary( 507 | name, help_text, list(label_names) 508 | ) 509 | 510 | def clear(self): 511 | self.metric.clear() 512 | 513 | def update(self, label_values, value): 514 | child = self.metric.labels(*label_values) 515 | child.observe(value) 516 | return child 517 | 518 | 519 | class HistogramWrapper(): 520 | """ 521 | Wrapper to provide generic interface to Summary metric 522 | """ 523 | 524 | def __init__(self, name, help_text, label_names, *args, **kwargs) -> None: 525 | params = {} 526 | if kwargs.get('buckets'): 527 | if isinstance(kwargs['buckets'], str): 528 | params['buckets'] = kwargs['buckets'].split(',') 529 | else: 530 | params['buckets'] = kwargs['buckets'] 531 | 532 | self.metric = prometheus.Histogram( 533 | name, help_text, list(label_names), **params 534 | ) 535 | 536 | def clear(self): 537 | self.metric.clear() 538 | 539 | def update(self, label_values, value): 540 | child = self.metric.labels(*label_values) 541 | child.observe(value) 542 | return child 543 | 544 | 545 | class EnumWrapper(): 546 | def __init__(self, name, help_text, label_names, *args, **kwargs) -> None: 547 | params = {} 548 | if kwargs.get('states'): 549 | params['states'] = kwargs['states'] 550 | 551 | self.metric = prometheus.Enum( 552 | name, help_text, list(label_names), **params 553 | ) 554 | 555 | def clear(self): 556 | self.metric.clear() 557 | 558 | def update(self, label_values, value): 559 | child = self.metric.labels(*label_values) 560 | child.state(value) 561 | return child 562 | 563 | 564 | def add_static_metric(timestamp): 565 | g = prometheus.Gauge('mqtt_exporter_timestamp', 'Startup time of exporter in millis since EPOC (static)', 566 | ['exporter_version']) 567 | g.labels(VERSION).set(timestamp) 568 | 569 | 570 | def _get_sorted_tuple_list(source): 571 | """Return a sorted list of tuples""" 572 | filtered_source = source.copy() 573 | sorted_tuple_list = sorted( 574 | list(filtered_source.items()), key=operator.itemgetter(0)) 575 | return sorted_tuple_list 576 | 577 | 578 | def _signal_handler(sig, frame): 579 | # pylint: disable=E1101 580 | logging.info('Received {0}'.format(signal.Signals(sig).name)) 581 | sys.exit(0) 582 | 583 | 584 | def main(): 585 | add_static_metric(int(time.time() * 1000)) 586 | # Setup argument parsing 587 | parser = argparse.ArgumentParser( 588 | description='Simple program to export formatted MQTT messages to Prometheus') 589 | parser.add_argument('-c', '--config', action='store', dest='config', default='conf', 590 | help='Set config location (file or directory), default: \'conf\'') 591 | options = parser.parse_args() 592 | 593 | # Initial logging to console 594 | _log_setup({'logfile': '', 'level': 'info'}) 595 | signal.signal(signal.SIGTERM, _signal_handler) 596 | signal.signal(signal.SIGINT, _signal_handler) 597 | 598 | # Read config file from disk 599 | from_file = _read_config(options.config) 600 | config = _parse_config_and_add_defaults(from_file) 601 | 602 | # Set up logging 603 | _log_setup(config['logging']) 604 | 605 | # Start prometheus exporter 606 | logging.info( 607 | f"Starting Prometheus exporter on port: {str(config['prometheus']['exporter_port'])}") 608 | try: 609 | prometheus.start_http_server(config['prometheus']['exporter_port']) 610 | except OSError as err: 611 | logging.critical( 612 | f"Error starting Prometheus exporter: {err.strerror}") 613 | sys.exit(1) 614 | 615 | # Set up mqtt client and loop forever 616 | mqtt_client = _mqtt_init(config['mqtt'], config['metrics']) 617 | mqtt_client.loop_forever() 618 | 619 | 620 | if __name__ == '__main__': 621 | main() 622 | -------------------------------------------------------------------------------- /requirements-dev.txt: -------------------------------------------------------------------------------- 1 | paho-mqtt 2 | prometheus-client 3 | PyYAML 4 | yamlreader 5 | pytest -------------------------------------------------------------------------------- /requirements-frozen.txt: -------------------------------------------------------------------------------- 1 | paho-mqtt==1.6.1 2 | prometheus-client==0.17.1 3 | PyYAML==6.0.1 4 | six==1.16.0 5 | yamlreader==3.0.4 6 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | paho-mqtt 2 | prometheus-client 3 | PyYAML 4 | yamlreader -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fhemberger/mqtt_exporter/b0d363e4f263d97917c406d27b54c6173d7f81dc/tests/__init__.py -------------------------------------------------------------------------------- /tests/readme.md: -------------------------------------------------------------------------------- 1 | # Tests with pytest 2 | 3 | make sure pytest is installed `pip install -r requirements-dev.txt` 4 | run `pytest -s -o log_cli=true -o log_cli_level="DEBUG"` from the repository root 5 | 6 | ## test_mqtt_explorer.py:test_update_metrics 7 | 8 | This test loads test mqtt data from a file and feeds it into mqtt_exporter and check if expected results are recorded in the prometheus client. 9 | 10 | ### directory structure 11 | Test data is loaded from following directory structure: 12 | 13 | ``` 14 | ./tests/ 15 | ./test_data/ 16 | ./test1/ 17 | conf.yaml 18 | mqtt_msg.csv 19 | ./test2/ 20 | conf.yaml 21 | mqtt_msg.csv 22 | ./test_xyz/ 23 | conf.yaml 24 | mqtt_msg.csv 25 | ./tmp_data 26 | [metric_test...##,txt] 27 | ``` 28 | 29 | In `test-data` each subfolder (e.g. `test1`, `test_bla`) contains a separate set of test data. There is no naming convention for folders. The could be descriptive like `test_for_issue1234`. Avoid any special characters and white spaces. 30 | 31 | Files: 32 | - `conf.yaml`: a config like a config for mqtt_exporter itself, but only the `metrics` part and a new optional attribute `timescale` is read from it. 33 | - `mqtt_msg.csv`: fake mqtt data, format description see below. 34 | 35 | `tmp-data` contains prometheus scrape output from after each processed mqtt msg data. This folder will be cleaned before each test run. 36 | 37 | ### mqtt_msg.csv file format 38 | 39 | `mqtt_msg.csv` is a CSV file with `;` as delimiter and `'` as quotation character. 40 | Following Column looks are expected: 41 | 42 | ``` 43 | in_topic;in_payload;out_name;out_labels;out_value;delay;assert 44 | ``` 45 | - `in_topic`: topic as from mqtt server 46 | - `in_payload`: payload from mqtt server as string (will be converted byte array) 47 | - `out_name`: metric name without any suffix like `total`, `sum`, `bucket`, ... 48 | - `out_labels`: labels notes as a JSON string including the topic. 49 | - `out_value`: expected value for simple metrics like gauge it is a number. For other metrics is is a JSON string with expected values per suffix e.g. `{"_count": 10, "_sum": 85.55, "_bucket": 10}` 50 | - `delay`: seconds delay until the next mqtt msg is processed. The `timescale` config attribute speed up/slow down the delay. A time scale of 0 means no default, a timescale of 1 means realtime. Default timescale = 0 51 | - `assert`: `True/False`. Specify if the test should pass or not. In most cases this should be `True` 52 | 53 | Metric type `Histogram` special handling here as it will log a `$(metric_name)_bucket` metric for each bucket with a reserved label `le` in the meaning of _less or equal_. Specify `le` for one bucket and set the expected count to the `bucket` attribute in the `out_value` JSON. See examples in `test1`. 54 | 55 | For sample data see existing tests above. 56 | 57 | ### Gather test data from live environment 58 | 59 | If logging level is set to `debug` the log will contain some lines that should be already correct formatted to be placed in a `mqtt_msq.csv`. 60 | 61 | they look like this: 62 | ``` 63 | 2021-08-08 22:24:36,996 DEBUG: TEST_DATA: fhem/Terrasse/TermPearl02/humidity; 21.0; fhem_humidity_percent; {"location": "paz", "topic": "fhem/paz/TermPearl01/humidity"}; 17.0; 0; True 64 | 2021-08-08 22:24:30,601 DEBUG: TEST_DATA: fhem/Terrasse/TerrasseWeiss/humidity; 20.0; fhem_humidity_percent; {"location": "paz", "topic": "fhem/paz/TermPearl01/humidity"}; 17.0; 0; True 65 | 2021-08-08 22:24:27,097 DEBUG: TEST_DATA: fhem/Garten/TermFetanten01/temperature; 16.7; fhem_temperature_celsius; {"location": "Terrasse", "topic": "fhem/Terrasse/TerrasseWeiss/temperature"}; 16.0; 0; True 66 | 2021-08-08 22:23:58,831 DEBUG: TEST_DATA: fhem/paz/TermPearl01/humidity; 17.0; fhem_humidity_percent; {"location": "paz", "topic": "fhem/paz/TermPearl01/humidity"}; 17.0; 0; True 67 | ``` 68 | Tips: 69 | - remove `.* DEBUG: TEST_DATA: `. 70 | - Make sure the `mqtt_msg.csv` contains as first line the headers given above. 71 | - The captured data won't fit if the `payload/__value__` has been replaced by a label_config. Please set `in_payload` to the correct value manually. An example for this exception is the `- name: 'mqtt_broker_version'` metric from the example configurations. 72 | - put the data in a new subfolder in the `test_data` dir. Copy also the config file from the live environment to this folder (you should remove the `mqtt` part from it. Make also sure the recorded data don't contain sensitive data). 73 | - Create a PR and share the test data, as this will allow all developers to verify code changes. 74 | 75 | -------------------------------------------------------------------------------- /tests/test_data/test1/conf.yaml: -------------------------------------------------------------------------------- 1 | 2 | # Logging 3 | logging: 4 | # logfile: 'conf/mqttexperter.log' # Optional default '' (stdout) 5 | level: 'debug' # Optional default 'info' 6 | 7 | timescale: 0 8 | 9 | # Metric definitions 10 | metrics: 11 | - name: 'ftp_transferred_bytes' 12 | help: 'data transferred in bytes pe file' 13 | type: 'summary' 14 | topic: 'ftp/+/transferred' 15 | label_configs: 16 | - source_labels: ['__msg_topic__'] 17 | target_label: '__topic__' 18 | - source_labels: ["__msg_topic__"] 19 | regex: "ftp/([^/]+).*" 20 | target_label: "file" 21 | replacement: '\1' 22 | action: "replace" 23 | - name: 'network_ping_ms' 24 | help: 'ping response in ms' 25 | type: 'histogram' 26 | topic: 'network/+/+/ping' 27 | buckets: '0.5,5,10' 28 | label_configs: 29 | - source_labels: ['__msg_topic__'] 30 | target_label: '__topic__' 31 | - source_labels: ["__msg_topic__"] 32 | regex: "network/([^/]+).*" 33 | target_label: "network" 34 | replacement: '\1' 35 | action: "replace" 36 | - source_labels: ["__msg_topic__"] 37 | regex: "network/[^/]+/([^/]+).*" 38 | target_label: "server" 39 | replacement: '\1' 40 | action: "replace" 41 | - name: "fhem_temperature_celsius" 42 | help: "443 Mhz Sensors, Temperature in C" 43 | type: "gauge" 44 | topic: "fhem/+/+/temperature" 45 | label_configs: 46 | - source_labels: ['__msg_topic__'] 47 | target_label: '__topic__' 48 | - source_labels: ["__msg_topic__"] 49 | regex: "fhem/([^/]+).*" 50 | target_label: "location" 51 | replacement: '\1' 52 | action: "replace" 53 | - name: "fhem_humidity_percent" 54 | help: "443 Mhz Sensors, Humidity in %" 55 | type: "gauge" 56 | topic: "fhem/+/+/humidity" 57 | label_configs: 58 | - source_labels: ['__msg_topic__'] 59 | target_label: '__topic__' 60 | - source_labels: ["__msg_topic__"] 61 | regex: "fhem/([^/]+).*" 62 | target_label: "location" 63 | replacement: '\1' 64 | action: "replace" 65 | - name: "fhem_rain_mm" 66 | help: "443 Mhz Sensors, rain in mm/m2" 67 | type: "counter" 68 | topic: "fhem/+/+/rain_total" 69 | label_configs: 70 | - source_labels: ['__msg_topic__'] 71 | target_label: '__topic__' 72 | - source_labels: ["__msg_topic__"] 73 | regex: "fhem/([^/]+).*" 74 | target_label: "location" 75 | replacement: '\1' 76 | action: "replace" 77 | -------------------------------------------------------------------------------- /tests/test_data/test1/mqtt_msg.csv: -------------------------------------------------------------------------------- 1 | in_topic;in_payload;out_name;out_labels;out_value;delay;assert 2 | fhem/Terrasse/TermPearl02/temperature;18;fhem_temperature_celsius;{"location": "Terrasse","topic": "fhem/Terrasse/TermPearl02/temperature"};18;1;True 3 | fhem/Terrasse/TermPearl02/humidity;21;fhem_humidity_percent;{"location": "Terrasse","topic": "fhem/Terrasse/TermPearl02/humidity"};21;2;True 4 | fhem/Garten/TermFetanten01/humidity;79;fhem_humidity_percent;{"location": "Garten","topic": "fhem/Garten/TermFetanten01/humidity"};79;2;True 5 | fhem/Garten/rainmeter01/rain_total;134.8;fhem_rain_mm;{"location":"Garten","topic": "fhem/Garten/rainmeter01/rain_total"};134.8;4;True 6 | fhem/Terrasse/TermPearl02/temperature;22;fhem_temperature_celsius;{"location": "Terrasse","topic": "fhem/Terrasse/TermPearl02/temperature"};22;5;True 7 | fhem/Terrasse/TermPearl02/humidity;24.3;fhem_humidity_percent;{"location": "Terrasse","topic": "fhem/Terrasse/TermPearl02/humidity"};24.3;1;True 8 | fhem/Garten/TermFetanten01/humidity;79;fhem_humidity_percent;{"location": "Garten","topic": "fhem/Garten/TermFetanten01/humidity"};79;2;True 9 | fhem/Garten/rainmeter01/rain_total;11.1;fhem_rain_mm;{"location":"Garten","topic": "fhem/Garten/rainmeter01/rain_total"};145.9;3;True 10 | fhem/Garten/rainmeter01/rain_total;10;fhem_rain_mm;{"location":"Garten","topic": "fhem/Garten/rainmeter01/rain_total"};155.9;5;True 11 | network/vlan11/srv01.local/ping;2;network_ping_ms;{"network": "vlan11","topic": "network/vlan11/srv01.local/ping", "server": "srv01.local", "le": "5.0"};{"_count": 1, "_sum": 2, "_bucket": 1};2;True 12 | network/vlan11/srv01.local/ping;4;network_ping_ms;{"network": "vlan11","topic": "network/vlan11/srv01.local/ping", "server": "srv01.local", "le": "5.0"};{"_count": 2, "_sum": 6, "_bucket": 2};6;True 13 | network/vlan11/srv01.local/ping;7;network_ping_ms;{"network": "vlan11","topic": "network/vlan11/srv01.local/ping", "server": "srv01.local", "le": "10.0"};{"_count": 3, "_sum": 13, "_bucket": 3};1;True 14 | network/vlan11/srv01.local/ping;0.4;network_ping_ms;{"network": "vlan11","topic": "network/vlan11/srv01.local/ping", "server": "srv01.local", "le": "0.5"};{"_count": 4, "_sum": 13.4, "_bucket": 1};4;True 15 | network/vlan11/srv01.local/ping;20;network_ping_ms;{"network": "vlan11","topic": "network/vlan11/srv01.local/ping", "server": "srv01.local", "le": "+Inf"};{"_count": 5, "_sum": 33.4, "_bucket": 5};5;True 16 | network/vlan11/srv01.local/ping;11.1;network_ping_ms;{"network": "vlan11","topic": "network/vlan11/srv01.local/ping", "server": "srv01.local", "le": "+Inf"};{"_count": 6, "_sum": 44.5, "_bucket": 6};2;True 17 | network/vlan11/srv01.local/ping;5;network_ping_ms;{"network": "vlan11","topic": "network/vlan11/srv01.local/ping", "server": "srv01.local", "le": "5.0"};{"_count": 7, "_sum": 49.5, "_bucket": 4};4;True 18 | network/vlan11/srv01.local/ping;6;network_ping_ms;{"network": "vlan11","topic": "network/vlan11/srv01.local/ping", "server": "srv01.local", "le": "10.0"};{"_count": 8, "_sum": 55.5, "_bucket": 6};1;True 19 | network/vlan11/srv01.local/ping;0.05;network_ping_ms;{"network": "vlan11","topic": "network/vlan11/srv01.local/ping", "server": "srv01.local", "le": "0.5"};{"_count": 9, "_sum": 55.55, "_bucket": 2};4;True 20 | network/vlan11/srv01.local/ping;30;network_ping_ms;{"network": "vlan11","topic": "network/vlan11/srv01.local/ping", "server": "srv01.local", "le": "+Inf"};{"_count": 10, "_sum": 85.55, "_bucket": 10};5;True 21 | ftp/update.bin/transferred;123;ftp_transferred_bytes;{"file": "update.bin","topic": "ftp/update.bin/transferred"};{"_count": 1, "_sum": 123};1;True 22 | ftp/update.bin/transferred;234;ftp_transferred_bytes;{"file": "update.bin","topic": "ftp/update.bin/transferred"};{"_count": 2, "_sum": 357};1;True 23 | ftp/update.bin/transferred;34;ftp_transferred_bytes;{"file": "update.bin","topic": "ftp/update.bin/transferred"};{"_count": 3, "_sum": 391};1;True 24 | ftp/update.bin/transferred;45;ftp_transferred_bytes;{"file": "update.bin","topic": "ftp/update.bin/transferred"};{"_count": 4, "_sum": 436};1;True 25 | ftp/update.bin/transferred;89;ftp_transferred_bytes;{"file": "update.bin","topic": "ftp/update.bin/transferred"};{"_count": 5, "_sum": 525};1;True 26 | ftp/update.bin/transferred;11111;ftp_transferred_bytes;{"file": "update.bin","topic": "ftp/update.bin/transferred"};{"_count": 6, "_sum": 11636};1;True 27 | -------------------------------------------------------------------------------- /tests/test_data/test2/conf.yaml: -------------------------------------------------------------------------------- 1 | # Config file for MQTT prometheus exporter 2 | 3 | 4 | # Metric definitions 5 | metrics: 6 | # - name: 'mqtt_broker' 7 | # help: 'System events from broker' 8 | # type: 'gauge' 9 | # topic: '$SYS/broker/#' 10 | # label_configs: 11 | # - source_labels: ['__msg_topic__'] 12 | # target_label: '__topic__' 13 | # - source_labels: ['__value__'] 14 | # regex: '^(\d+([,.]\d*)?)$|^([,.]\d+)$' 15 | # action: 'keep' 16 | 17 | - name: 'mqtt_broker_version' 18 | help: 'Mosquitto version (static)' 19 | type: 'gauge' 20 | topic: '$SYS/broker/version' 21 | label_configs: 22 | - source_labels: ['__msg_topic__'] 23 | target_label: '__topic__' 24 | - source_labels: ['__value__'] 25 | regex: '^\D+((?:\d+[\.]?)+)$' 26 | target_label: 'version' 27 | replacement: '\1' 28 | action: 'replace' 29 | - source_labels: ['__value__'] 30 | replacement: '1' 31 | target_label: '__value__' 32 | action: 'replace' 33 | 34 | - name: 'mqtt_broker_changeset' 35 | help: 'Mosquitto build changeset (static)' 36 | type: 'gauge' 37 | topic: '$SYS/broker/changeset' 38 | label_configs: 39 | - source_labels: ['__msg_topic__'] 40 | target_label: '__topic__' 41 | - source_labels: ['__value__'] 42 | target_label: 'changeset' 43 | action: 'replace' 44 | - source_labels: ['__value__'] 45 | replacement: '1' 46 | target_label: '__value__' 47 | action: 'replace' 48 | 49 | - name: 'mqtt_broker_timestamp' 50 | help: 'Mosquitto build timestamp (static)' 51 | type: 'gauge' 52 | topic: '$SYS/broker/timestamp' 53 | label_configs: 54 | - source_labels: ['__msg_topic__'] 55 | target_label: '__topic__' 56 | - source_labels: ['__value__'] 57 | target_label: 'timestamp' 58 | action: 'replace' 59 | - source_labels: ['__value__'] 60 | replacement: '1' 61 | target_label: '__value__' 62 | action: 'replace' 63 | 64 | - name: "fhem_temperature_celsius" 65 | help: "443 Mhz Sensors, Temperature in C" 66 | type: "gauge" 67 | topic: "fhem/+/+/temperature" 68 | label_configs: 69 | - source_labels: ['__msg_topic__'] 70 | target_label: '__topic__' 71 | - source_labels: ["__msg_topic__"] 72 | regex: "fhem/([^/]+).*" 73 | target_label: "location" 74 | replacement: '\1' 75 | action: "replace" 76 | - name: "fhem_humidity_percent" 77 | help: "443 Mhz Sensors, Humidity in %" 78 | type: "gauge" 79 | topic: "fhem/+/+/humidity" 80 | label_configs: 81 | - source_labels: ['__msg_topic__'] 82 | target_label: '__topic__' 83 | - source_labels: ["__msg_topic__"] 84 | regex: "fhem/([^/]+).*" 85 | target_label: "location" 86 | replacement: '\1' 87 | action: "replace" 88 | - name: "fhem_rain_mm" 89 | help: "443 Mhz Sensors, rain in mm/m2" 90 | type: "counter" 91 | topic: "fhem/+/+/rain_total" 92 | label_configs: 93 | - source_labels: ['__msg_topic__'] 94 | target_label: '__topic__' 95 | - source_labels: ["__msg_topic__"] 96 | regex: "fhem/([^/]+).*" 97 | target_label: "location" 98 | replacement: '\1' 99 | action: "replace" 100 | -------------------------------------------------------------------------------- /tests/test_data/test2/mqtt_msg.csv: -------------------------------------------------------------------------------- 1 | in_topic;in_payload;out_name;out_labels;out_value;delay;assert 2 | $SYS/broker/version;' 2.0.11'; mqtt_broker_version; {"topic": "$SYS/broker/version", "version": "2.0.11"}; 1.0; 0; True 3 | fhem/Terrasse/TerrasseWeiss/humidity; 20.0; fhem_humidity_percent; {"location": "Terrasse", "topic": "fhem/Terrasse/TerrasseWeiss/humidity"}; 20.0; 0; True 4 | fhem/Terrasse/TermPearl02/temperature; 17.5; fhem_temperature_celsius; {"location": "Terrasse", "topic": "fhem/Terrasse/TermPearl02/temperature"}; 17.5; 0; True 5 | fhem/Terrasse/TermPearl02/humidity; 20.0; fhem_humidity_percent; {"location": "Terrasse", "topic": "fhem/Terrasse/TermPearl02/humidity"}; 20.0; 0; True 6 | fhem/Terrasse/TermPearl02/humidity; 21.0; fhem_humidity_percent; {"location": "Terrasse", "topic": "fhem/Terrasse/TermPearl02/humidity"}; 21.0; 0; True 7 | fhem/Terrasse/TermPearl02/temperature; 17.6; fhem_temperature_celsius; {"location": "Terrasse", "topic": "fhem/Terrasse/TermPearl02/temperature"}; 17.6; 0; True 8 | fhem/Garten/TermFetanten01/humidity; 66.0; fhem_humidity_percent; {"location": "Garten", "topic": "fhem/Garten/TermFetanten01/humidity"}; 66.0; 0; True 9 | fhem/Terrasse/TermPearl02/temperature; 17.5; fhem_temperature_celsius; {"location": "Terrasse", "topic": "fhem/Terrasse/TermPearl02/temperature"}; 17.5; 0; True 10 | $SYS/broker/version;' 2.0.11'; mqtt_broker_version; {"topic": "$SYS/broker/version", "version": "2.0.11"}; 1.0; 0; True 11 | fhem/Garten/rainmeter01/rain_total; 106.426; fhem_rain_mm; {"location": "Garten", "topic": "fhem/Garten/rainmeter01/rain_total"}; {"_total": 106.426, "_created": 1628459492.695393}; 0; True 12 | fhem/Garten/TermFetanten01/humidity; 65.0; fhem_humidity_percent; {"location": "Garten", "topic": "fhem/Garten/TermFetanten01/humidity"}; 65.0; 0; True 13 | fhem/paz/TermPearl01/temperature; 24.3; fhem_temperature_celsius; {"location": "paz", "topic": "fhem/paz/TermPearl01/temperature"}; 24.3; 0; True 14 | fhem/paz/TermPearl01/humidity; 17.0; fhem_humidity_percent; {"location": "paz", "topic": "fhem/paz/TermPearl01/humidity"}; 17.0; 0; True 15 | fhem/Terrasse/TerrasseWeiss/humidity; 20.0; fhem_humidity_percent; {"location": "paz", "topic": "fhem/paz/TermPearl01/humidity"}; 17.0; 0; True 16 | -------------------------------------------------------------------------------- /tests/test_data/test_counter_absolute/conf.yaml: -------------------------------------------------------------------------------- 1 | 2 | # Logging 3 | logging: 4 | # logfile: 'conf/mqttexperter.log' # Optional default '' (stdout) 5 | level: 'debug' # Optional default 'info' 6 | 7 | timescale: 0 8 | 9 | # Metric definitions 10 | metrics: 11 | - name: "fhem_rain_mm" 12 | help: "443 Mhz Sensors, rain in mm/m2" 13 | type: "counter_absolute" 14 | topic: "fhem/+/+/rain_total" 15 | label_configs: 16 | - source_labels: ['__msg_topic__'] 17 | target_label: '__topic__' 18 | - source_labels: ["__msg_topic__"] 19 | regex: "fhem/([^/]+).*" 20 | target_label: "location" 21 | replacement: '\1' 22 | action: "replace" 23 | -------------------------------------------------------------------------------- /tests/test_data/test_counter_absolute/mqtt_msg.csv: -------------------------------------------------------------------------------- 1 | in_topic;in_payload;out_name;out_labels;out_value;delay;assert 2 | fhem/Garten/rainmeter01/rain_total;4.8;fhem_rain_mm;{"location":"Garten","topic": "fhem/Garten/rainmeter01/rain_total"};4.8;4;True 3 | fhem/Garten/rainmeter01/rain_total;11.1;fhem_rain_mm;{"location":"Garten","topic": "fhem/Garten/rainmeter01/rain_total"};11.1;3;True 4 | fhem/Garten/rainmeter01/rain_total;110;fhem_rain_mm;{"location":"Garten","topic": "fhem/Garten/rainmeter01/rain_total"};110;5;True 5 | fhem/Garten/rainmeter01/rain_total;134.8;fhem_rain_mm;{"location":"Garten","topic": "fhem/Garten/rainmeter01/rain_total"};134.8;4;True 6 | fhem/Garten/rainmeter01/rain_total;211.1;fhem_rain_mm;{"location":"Garten","topic": "fhem/Garten/rainmeter01/rain_total"};211.1;3;True 7 | fhem/Garten/rainmeter01/rain_total;155.9;fhem_rain_mm;{"location":"Garten","topic": "fhem/Garten/rainmeter01/rain_total"};155.9;5;True 8 | fhem/Garten/rainmeter01/rain_total;2134.8;fhem_rain_mm;{"location":"Garten","topic": "fhem/Garten/rainmeter01/rain_total"};2134.8;4;True 9 | fhem/Garten/rainmeter01/rain_total;11.1;fhem_rain_mm;{"location":"Garten","topic": "fhem/Garten/rainmeter01/rain_total"};11.1;3;True 10 | fhem/Garten/rainmeter01/rain_total;155.9;fhem_rain_mm;{"location":"Garten","topic": "fhem/Garten/rainmeter01/rain_total"};155.9;5;True 11 | fhem/Garten/rainmeter01/rain_total;23134.8;fhem_rain_mm;{"location":"Garten","topic": "fhem/Garten/rainmeter01/rain_total"};23134.8;4;True 12 | fhem/Garten/rainmeter01/rain_total;1233123.123123123123;fhem_rain_mm;{"location":"Garten","topic": "fhem/Garten/rainmeter01/rain_total"};1233123.123123123123;3;True 13 | fhem/Garten/rainmeter01/rain_total;1233123.123123123124;fhem_rain_mm;{"location":"Garten","topic": "fhem/Garten/rainmeter01/rain_total"};1233123.123123123124;5;True 14 | -------------------------------------------------------------------------------- /tests/test_data/test_enum/conf.yaml: -------------------------------------------------------------------------------- 1 | 2 | # Logging 3 | logging: 4 | # logfile: 'conf/mqttexperter.log' # Optional default '' (stdout) 5 | level: 'debug' # Optional default 'info' 6 | 7 | timescale: 0 8 | 9 | # Metric definitions 10 | metrics: 11 | - name: "fhem_light_state" 12 | help: "Light state on/off" 13 | type: "enum" 14 | topic: "fhem/+/+/light" 15 | parameters: 16 | states: 17 | - 'on' 18 | - 'off' 19 | label_configs: 20 | - source_labels: ['__value__'] 21 | regex: "(ON|0)" 22 | target_label: '__value__' 23 | replacement: 'on' 24 | action: "replace" 25 | - source_labels: ['__value__'] 26 | regex: "(OFF|1)" 27 | target_label: '__value__' 28 | replacement: 'off' 29 | action: "replace" 30 | - source_labels: ['__msg_topic__'] 31 | target_label: '__topic__' 32 | - source_labels: ["__msg_topic__"] 33 | regex: "fhem/([^/]+).*" 34 | target_label: "location" 35 | replacement: '\1' 36 | action: "replace" 37 | - name: 'network_ping_ms' 38 | help: 'ping response in ms' 39 | type: 'histogram' 40 | topic: 'network/+/+/ping' 41 | parameters: 42 | buckets: 43 | - 0.5 44 | - 5 45 | - 10 46 | label_configs: 47 | - source_labels: ['__msg_topic__'] 48 | target_label: '__topic__' 49 | - source_labels: ["__msg_topic__"] 50 | regex: "network/([^/]+).*" 51 | target_label: "network" 52 | replacement: '\1' 53 | action: "replace" 54 | - source_labels: ["__msg_topic__"] 55 | regex: "network/[^/]+/([^/]+).*" 56 | target_label: "server" 57 | replacement: '\1' 58 | action: "replace" -------------------------------------------------------------------------------- /tests/test_data/test_enum/mqtt_msg.csv: -------------------------------------------------------------------------------- 1 | in_topic;in_payload;out_name;out_labels;out_value;delay;assert 2 | fhem/room01/desk/light01;on;fhem_light_state;{"location":"Garten","topic": "fhem/room01/desk/light01"};0;4;True 3 | fhem/room01/desk/light01;on;fhem_light_state;{"location":"Garten","topic": "fhem/room01/desk/light01"};0;3;True 4 | fhem/room01/desk/light01;off;fhem_light_state;{"location":"Garten","topic": "fhem/room01/desk/light01"};1;5;True 5 | fhem/room01/desk/light01;on;fhem_light_state;{"location":"Garten","topic": "fhem/room01/desk/light01"};0;4;True 6 | fhem/room01/desk/light01;off;fhem_light_state;{"location":"Garten","topic": "fhem/room01/desk/light01"};1;3;True 7 | fhem/room01/desk/light01;ON;fhem_light_state;{"location":"Garten","topic": "fhem/room01/desk/light01"};0;5;True 8 | fhem/room01/desk/light01;OFF;fhem_light_state;{"location":"Garten","topic": "fhem/room01/desk/light01"};1;4;True 9 | fhem/room01/desk/light01;off;fhem_light_state;{"location":"Garten","topic": "fhem/room01/desk/light01"};1;3;True 10 | fhem/room01/desk/light01;1;fhem_light_state;{"location":"Garten","topic": "fhem/room01/desk/light01"};1;5;True 11 | fhem/room01/desk/light01;0;fhem_light_state;{"location":"Garten","topic": "fhem/room01/desk/light01"};0;4;True 12 | fhem/room01/desk/light01;on;fhem_light_state;{"location":"Garten","topic": "fhem/room01/desk/light01"};0;3;True 13 | fhem/room01/desk/light01;off;fhem_light_state;{"location":"Garten","topic": "fhem/room01/desk/light01"};1;5;True 14 | network/vlan11/srv01.local/ping;2;network_ping_ms;{"network": "vlan11","topic": "network/vlan11/srv01.local/ping", "server": "srv01.local", "le": "5.0"};{"_count": 1, "_sum": 2, "_bucket": 1};2;True 15 | network/vlan11/srv01.local/ping;4;network_ping_ms;{"network": "vlan11","topic": "network/vlan11/srv01.local/ping", "server": "srv01.local", "le": "5.0"};{"_count": 2, "_sum": 6, "_bucket": 2};6;True 16 | network/vlan11/srv01.local/ping;7;network_ping_ms;{"network": "vlan11","topic": "network/vlan11/srv01.local/ping", "server": "srv01.local", "le": "10.0"};{"_count": 3, "_sum": 13, "_bucket": 3};1;True 17 | network/vlan11/srv01.local/ping;0.4;network_ping_ms;{"network": "vlan11","topic": "network/vlan11/srv01.local/ping", "server": "srv01.local", "le": "0.5"};{"_count": 4, "_sum": 13.4, "_bucket": 1};4;True 18 | network/vlan11/srv01.local/ping;20;network_ping_ms;{"network": "vlan11","topic": "network/vlan11/srv01.local/ping", "server": "srv01.local", "le": "+Inf"};{"_count": 5, "_sum": 33.4, "_bucket": 5};5;True 19 | network/vlan11/srv01.local/ping;11.1;network_ping_ms;{"network": "vlan11","topic": "network/vlan11/srv01.local/ping", "server": "srv01.local", "le": "+Inf"};{"_count": 6, "_sum": 44.5, "_bucket": 6};2;True 20 | network/vlan11/srv01.local/ping;5;network_ping_ms;{"network": "vlan11","topic": "network/vlan11/srv01.local/ping", "server": "srv01.local", "le": "5.0"};{"_count": 7, "_sum": 49.5, "_bucket": 4};4;True 21 | network/vlan11/srv01.local/ping;6;network_ping_ms;{"network": "vlan11","topic": "network/vlan11/srv01.local/ping", "server": "srv01.local", "le": "10.0"};{"_count": 8, "_sum": 55.5, "_bucket": 6};1;True 22 | network/vlan11/srv01.local/ping;0.05;network_ping_ms;{"network": "vlan11","topic": "network/vlan11/srv01.local/ping", "server": "srv01.local", "le": "0.5"};{"_count": 9, "_sum": 55.55, "_bucket": 2};4;True 23 | network/vlan11/srv01.local/ping;30;network_ping_ms;{"network": "vlan11","topic": "network/vlan11/srv01.local/ping", "server": "srv01.local", "le": "+Inf"};{"_count": 10, "_sum": 85.55, "_bucket": 10};5;True -------------------------------------------------------------------------------- /tests/test_mqtt_exporter.py: -------------------------------------------------------------------------------- 1 | """ 2 | py test testing for mqtt_exporter 3 | 4 | """ 5 | 6 | import csv 7 | import distutils.util 8 | import json 9 | from json.decoder import JSONDecodeError 10 | import os 11 | import time 12 | import logging 13 | 14 | import prometheus_client as prometheus 15 | import prometheus_client.registry 16 | import mqtt_exporter 17 | import pytest 18 | 19 | logging.basicConfig(level=logging.DEBUG) 20 | 21 | TMP_DIR=os.path.join( 22 | os.path.dirname(__file__), 23 | 'tmp_data' 24 | ) 25 | DATA_DIR=os.path.join( 26 | os.path.dirname(__file__), 27 | 'test_data' 28 | ) 29 | 30 | def setup_module(module): #pylint: disable=unused-argument 31 | """ setup any state specific to the execution of the given module.""" 32 | delete_temp_test_files() 33 | 34 | 35 | def delete_temp_test_files(): 36 | # delete TEMP files 37 | for file in os.listdir(TMP_DIR): 38 | if file == '.gitkeep': 39 | continue 40 | os.remove(os.path.join(TMP_DIR, file)) 41 | 42 | 43 | class MqttCVS: 44 | in_topic = "in_topic" 45 | in_payload = "in_payload" 46 | out_name = "out_name" 47 | out_labels = "out_labels" 48 | out_value = "out_value" 49 | delay = "delay" 50 | expected_assert = "assert" 51 | 52 | def _get_mqtt_data(file_name): 53 | """ 54 | Reads mqtt fake data and expected results from file 55 | """ 56 | mqtt_data = [] 57 | with open(file_name, newline='') as mqtt_data_csv: 58 | csv_reader = csv.DictReader(mqtt_data_csv, quotechar="'", delimiter=';') 59 | for row in csv_reader: 60 | row[MqttCVS.in_topic] = row[MqttCVS.in_topic].strip() 61 | row[MqttCVS.out_name] = row[MqttCVS.out_name].strip() 62 | # covert payloud to bytes, as in a MQTT Message 63 | row[MqttCVS.in_payload] = row[MqttCVS.in_payload].encode('UTF-8') 64 | # parse labels, to a python object. 65 | try: 66 | row[MqttCVS.out_labels] = json.loads(row.get(MqttCVS.out_labels, '{}')) 67 | except json.decoder.JSONDecodeError as jde: 68 | logging.error(f"json.decoder.JSONDecodeError while decoding {row.get(MqttCVS.out_labels, '{}')}") 69 | raise jde 70 | # Value could be a JSON, a float or anthing else. 71 | try: 72 | row[MqttCVS.out_value] = float(row.get(MqttCVS.out_value)) 73 | except ValueError: 74 | try: 75 | row[MqttCVS.out_value] = json.loads(row.get(MqttCVS.out_value)) 76 | except (JSONDecodeError, TypeError): 77 | pass # leave as it is 78 | # set delay to 0 if not a number 79 | try: 80 | row[MqttCVS.delay] = float(row.get(MqttCVS.delay, 0)) 81 | except ValueError: 82 | row[MqttCVS.delay] = 0 83 | # convert string to bool for expected assertion. 84 | row[MqttCVS.expected_assert] = bool( 85 | distutils.util.strtobool(row.get(MqttCVS.expected_assert, "True").strip())) 86 | mqtt_data.append(row) 87 | return mqtt_data 88 | 89 | 90 | def _get_test_data(): 91 | """ 92 | Reads test data from DATA_DIR sub directories. 93 | Each subdirectory is expected to contain a `conf.yaml` file with a metrics config (like in the config file) 94 | and a CSV file `mqtt_msg.csv` with fake mqtt data ";" delimited: 95 | `in_topic;in_payload;out_name;out_labels;out_value;delay;assert` 96 | where 97 | lables_out: json string with all expected lables 98 | delay: delay until the next line is processed 99 | assert: expected assert result, True if out_value matches prometheus metric 100 | """ 101 | test_data_sets = [] 102 | test_data_dirs = [f.path for f in os.scandir(DATA_DIR) if f.is_dir()] 103 | test_names = [ os.path.basename(os.path.normpath(name)) for name in test_data_dirs] 104 | for test_data_dir in test_data_dirs: 105 | conf_file = os.path.join(test_data_dir, 'conf.yaml') 106 | mqtt_data_file = os.path.join(test_data_dir, 'mqtt_msg.csv') 107 | if not os.path.isfile(conf_file) or not os.path.isfile(mqtt_data_file): 108 | logging.error(f"Test data dir {test_data_dir} doesn't contain required files, skipping") 109 | continue 110 | config_yaml = mqtt_exporter._read_config(conf_file) 111 | config_yaml = mqtt_exporter._parse_config_and_add_defaults(config_yaml) 112 | test_data_sets.append(( 113 | config_yaml['metrics'], 114 | _get_mqtt_data(mqtt_data_file), 115 | config_yaml.get('timescale', 0), 116 | )) 117 | return test_names, test_data_sets 118 | 119 | def _get_suffixes_by_metric_name(metrics, metric_name): 120 | metric_type = None 121 | for _, outer_metric in metrics.items(): 122 | for metric in outer_metric: 123 | if metric['name'] == metric_name: 124 | metric_type = metric['type'] 125 | break 126 | 127 | for suffix in mqtt_exporter.SUFFIXES_PER_TYPE[metric_type]: 128 | if len(suffix) == 0: 129 | yield suffix 130 | else: 131 | yield f"_{suffix}" 132 | 133 | 134 | class FakeMSG(): 135 | """"Simulate MQTT Msg""" 136 | def __init__(self, topic, payload) -> None: 137 | self.topic = topic 138 | self.payload = payload 139 | 140 | 141 | param_test_data_dirs, param_test_data_sets = _get_test_data() 142 | 143 | @pytest.mark.parametrize("metrics,mqtt_data_set,timescale", param_test_data_sets, ids=param_test_data_dirs) 144 | def test_update_metrics(caplog, request, metrics, mqtt_data_set, timescale): 145 | """ 146 | reads a label_config and some mqtt data and asserts if they are in the metrics 147 | """ 148 | logging.info(f"Start test_update_metrics with ID {request.node.callspec.id}") 149 | 150 | # reset prometheus registry between tests 151 | collectors = list(prometheus.REGISTRY._collector_to_names.keys()) 152 | for collector in collectors: 153 | prometheus.REGISTRY.unregister(collector) 154 | 155 | i = 1 156 | for mqtt_data in mqtt_data_set: 157 | msg = FakeMSG(mqtt_data[MqttCVS.in_topic], mqtt_data[MqttCVS.in_payload]) 158 | mqtt_exporter._on_message(None, metrics, msg) 159 | prometheus.REGISTRY.collect() 160 | prometheus.write_to_textfile(os.path.join(TMP_DIR, f"metric_{request.node.callspec.id}_{i:02}.txt"), prometheus.REGISTRY) 161 | # depending on metric type one or more metrics with different suffixes are added. 162 | for suffix in _get_suffixes_by_metric_name(metrics, mqtt_data[MqttCVS.out_name]): 163 | # historgram with buckets need special handling, remove bucket labe label 'le' 164 | labels = mqtt_data[MqttCVS.out_labels].copy() 165 | if not suffix == "_bucket" and labels.get('le'): 166 | labels.pop('le') 167 | 168 | expected_result = mqtt_data[MqttCVS.out_value] 169 | expected_result = expected_result if not isinstance(expected_result, dict) else expected_result[suffix] 170 | logging.info(f"Assert {mqtt_data[MqttCVS.out_name]}{suffix} from testdata record {i}") 171 | assert ( prometheus.REGISTRY.get_sample_value( 172 | f"{mqtt_data[MqttCVS.out_name]}{suffix}", 173 | labels 174 | ) == expected_result ) == mqtt_data[MqttCVS.expected_assert] 175 | time.sleep(mqtt_data[MqttCVS.delay] * timescale) 176 | i += 1 177 | for record in caplog.records: 178 | assert record.levelno < logging.ERROR -------------------------------------------------------------------------------- /tests/test_prometheus_additions.py: -------------------------------------------------------------------------------- 1 | """ 2 | Pytest for prometheus client enhancements 3 | """ 4 | import os 5 | import logging 6 | import time 7 | import pytest 8 | from utils.prometheus_additions import CounterAbsolute 9 | import prometheus_client as prometheus 10 | 11 | logging.basicConfig(level=logging.DEBUG) 12 | 13 | TMP_DIR=os.path.join( 14 | os.path.dirname(__file__), 15 | 'tmp_data' 16 | ) 17 | DATA_DIR=os.path.join( 18 | os.path.dirname(__file__), 19 | 'test_data' 20 | ) 21 | 22 | @pytest.fixture(scope="class") 23 | def get_registry(): 24 | yield prometheus.REGISTRY 25 | # reset prometheus registry between tests 26 | collectors = list(prometheus.REGISTRY._collector_to_names.keys()) 27 | for collector in collectors: 28 | prometheus.REGISTRY.unregister(collector) 29 | 30 | old_creation_time = 0.0 31 | 32 | class TestCounterWithReset: 33 | a_counter_absolute = CounterAbsolute('Absolute_Counter', 'Test metric' ) 34 | old_creation_time = 0.0 35 | param_test_data_sets = [ 36 | (10, False), 37 | (10, True), 38 | (11, True), 39 | (110, True), 40 | (110, True), 41 | (210, True), 42 | (310.7, True), 43 | (110, False), 44 | (210, True), 45 | (310.7, True), 46 | ] 47 | 48 | @pytest.mark.parametrize("value, same_creation_time", param_test_data_sets) 49 | def test_counter_absolute(self, request, get_registry, value, same_creation_time): 50 | global old_creation_time 51 | self.a_counter_absolute.set(value) 52 | creation_time = self.a_counter_absolute._created 53 | logging.info(f"Creation time: {creation_time:e}") 54 | registry = get_registry 55 | registry.collect() 56 | prometheus.write_to_textfile(os.path.join(TMP_DIR, f"absolute_counter_{request.node.callspec.id}_{value:05}.txt"), prometheus.REGISTRY) 57 | 58 | assert self.a_counter_absolute._value.get() == value 59 | assert (creation_time == old_creation_time ) == same_creation_time 60 | old_creation_time = creation_time 61 | time.sleep(0.005) 62 | 63 | 64 | class TestCounterRestForbidden: 65 | a_counter_absolute = CounterAbsolute('Strict_Absolute_Counter', "This Counters doesn't allow reset") 66 | 67 | def test_counter_reset(self): 68 | val_first = 0.3324234 69 | val_second = 0.3324233 70 | self.a_counter_absolute.set(val_first, fail_on_decrease=True) 71 | with pytest.raises(ValueError, match=rf"Counter must increase {val_second} lower {val_first}"): 72 | self.a_counter_absolute.set(val_second, fail_on_decrease=True) 73 | -------------------------------------------------------------------------------- /tests/tmp_data/.gitkeep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fhemberger/mqtt_exporter/b0d363e4f263d97917c406d27b54c6173d7f81dc/tests/tmp_data/.gitkeep -------------------------------------------------------------------------------- /utils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fhemberger/mqtt_exporter/b0d363e4f263d97917c406d27b54c6173d7f81dc/utils/__init__.py -------------------------------------------------------------------------------- /utils/prometheus_additions.py: -------------------------------------------------------------------------------- 1 | """ 2 | Additions and enhancements to the prometheus_client package 3 | """ 4 | 5 | import prometheus_client as prometheus 6 | 7 | class CounterAbsolute(prometheus.Counter): 8 | """ 9 | CounterAbsolute allows to set the Counter by an absolute value like Gauge, but data is 10 | handled properly if counter resets or over flows. 11 | CounterAbsolute is typically used if values need to by proxied from another source e.g. 12 | a network counter, SMTP or MQTT data which return increasing but absolute numbers 13 | instead of a diff. 14 | 15 | As Counter must not decrease, setting CounterAbsolute to a lower value is handled as follows: 16 | A counter overflow or a reset is assumed and the create timestamp gets reset and internally 17 | a new Value object created. 18 | 19 | An example for a CounterAbsolute: 20 | from prometheus_client import Counter 21 | c = CounterAbsolute('my_failures_total', 'Description of counter') 22 | c.set(1123.63213) # Set to an absolute value. If lower than last value, Counter gets reset. 23 | 24 | """ 25 | _type = 'counter' 26 | 27 | 28 | def set(self, value, fail_on_decrease=False): 29 | """Increment counter to the given amount.""" 30 | self._raise_if_not_observable() 31 | if value < 0: 32 | raise ValueError('Counters can be a positive number only.') 33 | if value >= self._value.get(): 34 | self._value.set(float(value)) 35 | else: 36 | if fail_on_decrease: 37 | raise ValueError(f"Counter must increase {value} lower {self._value.get()}") 38 | else: 39 | self._metric_init() 40 | self._value.set(float(value)) 41 | -------------------------------------------------------------------------------- /version.py: -------------------------------------------------------------------------------- 1 | """ 2 | Version of the whole project. 3 | """ 4 | __version__ = "2.0.0" 5 | --------------------------------------------------------------------------------