├── .gitattributes ├── .devcontainer ├── peek │ └── peekrc ├── configuration.yaml ├── Dockerfile ├── install-docker-cli.sh ├── post-install.sh ├── devcontainer.json └── README.md ├── custom_components ├── __init__.py └── elasticsearch │ ├── datastreams │ ├── __init__.py │ └── index_template.py │ ├── manifest.json │ ├── diagnostics.py │ ├── errors.py │ ├── encoder.py │ ├── system_info.py │ ├── utils.py │ ├── const.py │ ├── loop.py │ ├── es_datastream_manager.py │ ├── logger.py │ ├── translations │ └── en.json │ ├── es_integration.py │ ├── es_gateway.py │ ├── entity_details.py │ └── __init__.py ├── tests ├── __init__.py ├── test_util │ ├── __init__.py │ └── es_mocker.py ├── .ruff.toml ├── snapshots │ ├── test_diagnostics.ambr │ └── test_entity_details.ambr ├── test_diagnostics.py ├── test_utils.py ├── certs │ └── http_ca.crt ├── test_system_info.py ├── test_es_datastream_manager.py └── test_loop.py ├── .DS_Store ├── scripts ├── test ├── coverage ├── run_ek_8_0_0 ├── run_ek_8_11_0 ├── run_ek_8_13_0 ├── run_ek_8_7_0 ├── lint ├── run_ek_7_11_0 ├── run_ek_7_17_0 ├── run_ek_8_14_0 ├── helpers │ ├── es8 │ │ ├── get_ca_cert │ │ ├── reset_es_enrollment │ │ ├── run_es │ │ ├── run_kb │ │ ├── wait_for_kb_available │ │ ├── wait_for_kb_setup │ │ ├── get_kb_code │ │ ├── wait_for_es │ │ ├── reset_es_pwd │ │ ├── new_kb_dataview │ │ ├── wait_for_es_green │ │ ├── enroll_kb │ │ └── bootstrap_ek │ ├── es7 │ │ ├── run_kb │ │ ├── run_es │ │ ├── wait_for_es │ │ ├── wait_for_kb │ │ └── bootstrap_ek │ └── shared │ │ ├── pull_kb │ │ ├── pull_es │ │ ├── create_hass_writer_user_es │ │ ├── create_hass_writer_role_es │ │ ├── create_role_es │ │ ├── create_hass_writer_apikey_es │ │ ├── create_user_es │ │ └── create_apikey_es ├── update_snapshots └── clean_ek ├── hacs.json ├── docs ├── assets │ ├── hvac-history.png │ ├── add-integration.png │ ├── kibana-discover.png │ ├── publish-options.png │ ├── weather-station.png │ ├── kibana-lens-visualization.png │ └── weather-station-wind-pressure.png ├── install.md ├── index.md ├── ingest_advanced.md ├── configure.md └── using_kibana.md ├── .github ├── ISSUE_TEMPLATE │ ├── help-support-request.md │ ├── feature_request.md │ └── bug_report.md ├── dependabot.yml └── workflows │ ├── cron.yml │ ├── docs.yml │ ├── combined.yml │ ├── pull.yml │ └── coverage-comment.yml ├── config └── configuration.yaml ├── .gitignore ├── LICENSE ├── .vscode ├── launch.json ├── tasks.json └── settings.json ├── mkdocs.yml ├── TROUBLESHOOTING.md ├── mypy.ini ├── CONTRIBUTING.md ├── README.md ├── .ruff.toml └── pyproject.toml /.gitattributes: -------------------------------------------------------------------------------- 1 | * text=auto eol=lf -------------------------------------------------------------------------------- /.devcontainer/peek/peekrc: -------------------------------------------------------------------------------- 1 | use_keyring = False -------------------------------------------------------------------------------- /custom_components/__init__.py: -------------------------------------------------------------------------------- 1 | """Custom components module.""" 2 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests for the Elasticsearch integration.""" 2 | -------------------------------------------------------------------------------- /.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/legrego/homeassistant-elasticsearch/HEAD/.DS_Store -------------------------------------------------------------------------------- /tests/test_util/__init__.py: -------------------------------------------------------------------------------- 1 | """Test utils for Elasticsearch custom component tests.""" 2 | -------------------------------------------------------------------------------- /scripts/test: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -e 4 | 5 | cd "$(dirname "$0")/.." 6 | 7 | pytest . -------------------------------------------------------------------------------- /hacs.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "Elasticsearch", 3 | "render_readme": true, 4 | "homeassistant": "2025.6.0" 5 | } 6 | -------------------------------------------------------------------------------- /docs/assets/hvac-history.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/legrego/homeassistant-elasticsearch/HEAD/docs/assets/hvac-history.png -------------------------------------------------------------------------------- /docs/assets/add-integration.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/legrego/homeassistant-elasticsearch/HEAD/docs/assets/add-integration.png -------------------------------------------------------------------------------- /docs/assets/kibana-discover.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/legrego/homeassistant-elasticsearch/HEAD/docs/assets/kibana-discover.png -------------------------------------------------------------------------------- /docs/assets/publish-options.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/legrego/homeassistant-elasticsearch/HEAD/docs/assets/publish-options.png -------------------------------------------------------------------------------- /docs/assets/weather-station.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/legrego/homeassistant-elasticsearch/HEAD/docs/assets/weather-station.png -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/help-support-request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Help/Support request 3 | about: Get help with this project 4 | 5 | --- 6 | 7 | 8 | -------------------------------------------------------------------------------- /config/configuration.yaml: -------------------------------------------------------------------------------- 1 | default_config: 2 | 3 | logger: 4 | default: warning 5 | logs: 6 | custom_components.elasticsearch: debug 7 | -------------------------------------------------------------------------------- /.devcontainer/configuration.yaml: -------------------------------------------------------------------------------- 1 | default_config: 2 | 3 | logger: 4 | default: warning 5 | logs: 6 | custom_components.elasticsearch: debug 7 | -------------------------------------------------------------------------------- /docs/assets/kibana-lens-visualization.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/legrego/homeassistant-elasticsearch/HEAD/docs/assets/kibana-lens-visualization.png -------------------------------------------------------------------------------- /docs/assets/weather-station-wind-pressure.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/legrego/homeassistant-elasticsearch/HEAD/docs/assets/weather-station-wind-pressure.png -------------------------------------------------------------------------------- /scripts/coverage: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -e 4 | 5 | cd "$(dirname "$0")/.." 6 | 7 | pytest . -vv --cov=./custom_components --cov-report=xml --cov-report=html -------------------------------------------------------------------------------- /custom_components/elasticsearch/datastreams/__init__.py: -------------------------------------------------------------------------------- 1 | """datastreams module for Elasticsearch integration.""" 2 | 3 | from .index_template import index_template_definition 4 | 5 | __all__ = ["index_template_definition"] 6 | -------------------------------------------------------------------------------- /scripts/run_ek_8_0_0: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # get the current directory 4 | DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" 5 | 6 | sudo /bin/bash "$DIR/clean_ek" 7 | sudo /bin/bash "$DIR/helpers/es8/bootstrap_ek" "8.0.0" -------------------------------------------------------------------------------- /scripts/run_ek_8_11_0: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # get the current directory 4 | DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" 5 | 6 | sudo /bin/bash "$DIR/clean_ek" 7 | sudo /bin/bash "$DIR/helpers/es8/bootstrap_ek" "8.11.0" -------------------------------------------------------------------------------- /scripts/run_ek_8_13_0: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # get the current directory 4 | DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" 5 | 6 | sudo /bin/bash "$DIR/clean_ek" 7 | sudo /bin/bash "$DIR/helpers/es8/bootstrap_ek" "8.13.0" -------------------------------------------------------------------------------- /scripts/run_ek_8_7_0: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # get the current directory 4 | DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" 5 | 6 | sudo /bin/bash "$DIR/clean_ek" 7 | sudo /bin/bash "$DIR/helpers/es8/bootstrap_ek" "8.7.0" -------------------------------------------------------------------------------- /scripts/lint: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -e 4 | 5 | cd "$(dirname "$0")/.." 6 | 7 | if [[ "$1" == "--no-fix" ]]; then 8 | ruff format . --check 9 | ruff check . 10 | else 11 | ruff format . 12 | ruff check . --fix 13 | fi 14 | -------------------------------------------------------------------------------- /scripts/run_ek_7_11_0: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # get the current directory 4 | DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd)" 5 | 6 | sudo /bin/bash "$DIR/clean_ek" 7 | sudo /bin/bash "$DIR/helpers/es7/bootstrap_ek" "7.11.0" 8 | -------------------------------------------------------------------------------- /scripts/run_ek_7_17_0: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # get the current directory 4 | DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd)" 5 | 6 | sudo /bin/bash "$DIR/clean_ek" 7 | sudo /bin/bash "$DIR/helpers/es7/bootstrap_ek" "7.17.0" 8 | -------------------------------------------------------------------------------- /scripts/run_ek_8_14_0: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # get the current directory 4 | DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" 5 | 6 | sudo /bin/bash "$DIR/clean_ek" 7 | sudo /bin/bash "$DIR/helpers/es8/bootstrap_ek" "8.14.0" 8 | -------------------------------------------------------------------------------- /scripts/helpers/es8/get_ca_cert: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | dir=$(dirname "$0") 4 | escontainer=$1 5 | 6 | # Extract the CA cert from the es01 container 7 | docker cp $escontainer:/usr/share/elasticsearch/config/certs/http_ca.crt "$dir/../../../es_env/http_ca.crt" 8 | -------------------------------------------------------------------------------- /scripts/update_snapshots: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -e 4 | 5 | cd "$(dirname "$0")/.." 6 | 7 | # To run a single test simply call this script with tests/test_....py as argument 8 | 9 | # Todo: We didnt need to remove this before, but now we do. We should investigate why 10 | rm -rf tests/snapshots 11 | 12 | pytest --snapshot-update "$@" 13 | -------------------------------------------------------------------------------- /scripts/helpers/es7/run_kb: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # This script runs an Kibana Docker container with the specified version. 4 | 5 | # Usage: run_kb 6 | # - The version of Kibana to run (e.g., 7.10.2) 7 | 8 | kbversion=$1 9 | 10 | set -e 11 | 12 | docker run -p 5601:5601 --net elastic -q -d -m 1GB --name kb01 --env ELASTICSEARCH_HOSTS=http://es01:9200 docker.elastic.co/kibana/kibana:"$kbversion" 13 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # artifacts 2 | __pycache__ 3 | .pytest* 4 | pytestdebug.log 5 | *.egg-info 6 | */build/* 7 | */dist/* 8 | site/* 9 | es_env 10 | 11 | # misc 12 | .cache 13 | .coverage 14 | .ruff_cache 15 | .venv 16 | .vscode/* 17 | !.vscode/launch.json 18 | !.vscode/settings.json 19 | !.vscode/tasks.json 20 | coverage.xml 21 | test_results/* 22 | *.creds 23 | 24 | # Home Assistant configuration 25 | config/* 26 | !config/configuration.yaml 27 | -------------------------------------------------------------------------------- /tests/.ruff.toml: -------------------------------------------------------------------------------- 1 | # This extend our general Ruff rules specifically for tests 2 | extend = "../.ruff.toml" 3 | 4 | [lint] 5 | 6 | extend-ignore = [ 7 | "B904", # Use raise from to specify exception cause 8 | "N815", # Variable {name} in class scope should not be mixedCase 9 | "RUF018", # Avoid assignment expressions in assert statements 10 | "SLF001", # Private member accessed: Tests do often test internals a lot 11 | ] 12 | -------------------------------------------------------------------------------- /scripts/helpers/es7/run_es: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # This script runs an Elasticsearch Docker container with the specified version. 4 | 5 | # Usage: run_es 6 | # - The version of Elasticsearch to run (e.g., 7.10.2) 7 | 8 | esversion=$1 9 | 10 | set -e 11 | 12 | docker run -p 9200:9200 --env discovery.type=single-node --name es01 --net elastic -q -d -m 1GB docker.elastic.co/elasticsearch/elasticsearch:"$esversion" 13 | -------------------------------------------------------------------------------- /scripts/helpers/es8/reset_es_enrollment: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # This script resets the Elasticsearch enrollment token for Kibana. 4 | 5 | # Usage: /bin/bash ./reset_es_enrollment 6 | # - The container id of the Elasticsearch instance 7 | 8 | escontainer=$1 9 | 10 | token=$(docker exec -it "$escontainer" /usr/share/elasticsearch/bin/elasticsearch-create-enrollment-token -s kibana) 11 | 12 | echo -n "$token" 13 | -------------------------------------------------------------------------------- /scripts/helpers/es8/run_es: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # This script runs an Elasticsearch Docker container with the specified version. 4 | 5 | # Usage: run_es 6 | # - The version of Elasticsearch to run (e.g., 7.10.2) 7 | 8 | esversion=$1 9 | 10 | set -e 11 | 12 | docker run -p 9200:9200 --name es01 --add-host="host.docker.internal:host-gateway" --net elastic -q -d -m 1GB docker.elastic.co/elasticsearch/elasticsearch:"$esversion" 13 | -------------------------------------------------------------------------------- /scripts/helpers/es8/run_kb: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # This script runs an Kibana Docker container with the specified version. 4 | 5 | # Usage: run_kb 6 | # - The version of Kibana to run (e.g., 7.10.2) 7 | 8 | kbversion=$1 9 | 10 | set -e 11 | 12 | docker run -p 5601:5601 --name kb01 --add-host="host.docker.internal:host-gateway" --env "SERVER_HOST=0.0.0.0" --net elastic -q -d -m 1GB docker.elastic.co/kibana/kibana:"$kbversion" 13 | -------------------------------------------------------------------------------- /scripts/helpers/shared/pull_kb: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # This script pulls the specified version of Kibana Docker image from the elastic Docker registry. 4 | 5 | # Usage: /bin/bash ./pull_kb 6 | # - The version of Elasticsearch to pull (e.g., 7.10.2) 7 | kbversion="$1" 8 | 9 | set -e 10 | 11 | start=$(date +%s) 12 | 13 | docker pull docker.elastic.co/kibana/kibana:"$kbversion" >/dev/null 14 | 15 | end=$(date +%s) 16 | 17 | echo -n " took $((end - start)) seconds" 18 | echo 19 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "github-actions" 4 | directory: "/" 5 | schedule: 6 | interval: "weekly" 7 | 8 | - package-ecosystem: "pip" 9 | directory: "/" 10 | schedule: 11 | interval: "weekly" 12 | ignore: 13 | - dependency-name: "homeassistant" 14 | - dependency-name: "elasticsearch8" 15 | 16 | - package-ecosystem: "devcontainers" 17 | directory: "/.devcontainer" 18 | schedule: 19 | interval: "weekly" 20 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | 5 | --- 6 | 7 | **Is your feature request related to a problem? Please describe.** 8 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 9 | 10 | **Describe the solution you'd like** 11 | A clear and concise description of what you want to happen. 12 | 13 | **Additional context** 14 | Add any other context or screenshots about the feature request here. 15 | -------------------------------------------------------------------------------- /scripts/helpers/shared/pull_es: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # This script pulls the specified version of Elasticsearch Docker image from the elastic Docker registry. 4 | 5 | # Usage: /bin/bash ./pull_es 6 | # - The version of Elasticsearch to pull (e.g., 7.10.2) 7 | esversion=$1 8 | 9 | set -e 10 | 11 | start=$(date +%s) 12 | 13 | docker pull docker.elastic.co/elasticsearch/elasticsearch:"$esversion" >/dev/null 14 | 15 | end=$(date +%s) 16 | 17 | echo -n " took $((end - start)) seconds" 18 | 19 | echo 20 | -------------------------------------------------------------------------------- /.github/workflows/cron.yml: -------------------------------------------------------------------------------- 1 | name: Build 2 | permissions: 3 | contents: read 4 | 5 | on: 6 | workflow_dispatch: 7 | schedule: 8 | - cron: '0 0 * * *' 9 | 10 | jobs: 11 | validate: 12 | runs-on: "ubuntu-latest" 13 | name: Validate 14 | steps: 15 | - uses: "actions/checkout@v5" 16 | 17 | - name: HACS validation 18 | uses: "hacs/action@main" 19 | with: 20 | category: "integration" 21 | 22 | - name: Hassfest validation 23 | uses: "home-assistant/actions/hassfest@master" 24 | -------------------------------------------------------------------------------- /scripts/helpers/es7/wait_for_es: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # Wait for Elasticsearch to be ready 4 | 5 | # Usage: ./wait_for_es 6 | 7 | start=$(date +%s) 8 | while true; do 9 | statuscode=$(curl -s -k -o /dev/null -w "%{http_code}" http://host.docker.internal:9200) 10 | #echo "Elasticsearch status code: $statuscode" 11 | echo -n "." # print a dot for each iteration 12 | if [ "$statuscode" -eq "200" ]; then 13 | break 14 | fi 15 | sleep 1 16 | done 17 | end=$(date +%s) 18 | 19 | echo -n " (took $((end - start)) seconds)" 20 | 21 | echo 22 | -------------------------------------------------------------------------------- /scripts/helpers/es7/wait_for_kb: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # Wait for Kibana to be ready 4 | 5 | # Usage: ./wait_for_kb 6 | 7 | start=$(date +%s) 8 | 9 | while true; do 10 | statuscode=$(curl -s -k -o /dev/null -w "%{http_code}" http://host.docker.internal:5601/app/home) 11 | #echo "Kibana status code: $statuscode" 12 | echo -n "." # print a dot for each iteration 13 | if [ "$statuscode" -eq "200" ]; then 14 | break 15 | fi 16 | sleep 1 17 | done 18 | 19 | end=$(date +%s) 20 | 21 | echo -n " (took $((end - start)) seconds)" 22 | 23 | echo 24 | -------------------------------------------------------------------------------- /scripts/helpers/es8/wait_for_kb_available: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # Wait for Kibana to be ready 4 | 5 | # Usage: ./wait_for_kb_available 6 | 7 | start=$(date +%s) 8 | 9 | while true; do 10 | statuscode=$(curl -s -k -o /dev/null -w "%{http_code}" http://host.docker.internal:5601/api/status) 11 | #echo "Kibana status code: $statuscode" 12 | echo -n "." # print a dot for each iteration 13 | if [ "$statuscode" -eq "200" ]; then 14 | break 15 | fi 16 | sleep 1 17 | done 18 | 19 | end=$(date +%s) 20 | 21 | echo -n " (took $((end - start)) seconds)" 22 | 23 | echo 24 | -------------------------------------------------------------------------------- /scripts/helpers/es8/wait_for_kb_setup: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # Wait for Kibana to be ready to setup 4 | 5 | # Usage: ./wait_for_kb_setup 6 | 7 | start=$(date +%s) 8 | 9 | while true; do 10 | statuscode=$(curl -s -k -o /dev/null -w "%{http_code}" http://host.docker.internal:5601/internal/interactive_setup/status) 11 | #echo "Kibana status code: $statuscode" 12 | echo -n "." # print a dot for each iteration 13 | if [ "$statuscode" -eq "200" ]; then 14 | break 15 | fi 16 | sleep 1 17 | done 18 | 19 | end=$(date +%s) 20 | 21 | echo -n " (took $((end - start)) seconds)" 22 | 23 | echo 24 | -------------------------------------------------------------------------------- /.devcontainer/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM mcr.microsoft.com/vscode/devcontainers/python:3.13-bookworm 2 | 3 | RUN apt-get update && apt-get install -y apt-transport-https gnupg2 procps less curl 4 | 5 | COPY ".devcontainer/install-docker-cli.sh" install-docker-cli.sh 6 | 7 | RUN /bin/bash install-docker-cli.sh 8 | 9 | # Install Poetry for dependency management 10 | RUN pip install poetry~=2.2.1 11 | 12 | # Install peek utility 13 | RUN pip install es-peek 14 | RUN mkdir /home/vscode/.config/peek 15 | 16 | COPY ".devcontainer/peek" /home/vscode/.config/peek/ 17 | 18 | RUN chown -R vscode:vscode /home/vscode/.config/peek 19 | 20 | -------------------------------------------------------------------------------- /scripts/helpers/es8/get_kb_code: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # This script extracts the code from the logs of a Docker container running Kibana. 4 | # The code is in the format 'Go to http://0.0.0.0:5601/?code=317916 to get started.' 5 | 6 | # Usage: /bin/bash ./get_kb_code 7 | # - The name or ID of the Kibana container 8 | 9 | kbcontainer=$1 10 | 11 | # Pull out the code from the logs 12 | logs=$(docker logs "$kbcontainer") 13 | 14 | # Extract the code using a regular expression 15 | code=$(echo "$logs" | grep -oP 'code=\K[0-9]+') 16 | 17 | # Print the extracted code 18 | echo -n "$code" 19 | -------------------------------------------------------------------------------- /scripts/helpers/es8/wait_for_es: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # Wait for Elasticsearch to be ready 4 | 5 | # Usage: ./wait_for_es 6 | 7 | start=$(date +%s) 8 | while true; do 9 | statuscode=$(curl -s -k -o /dev/null -w "%{http_code}" https://host.docker.internal:9200) 10 | #echo "Elasticsearch status code: $statuscode" 11 | echo -n "." # print a dot for each iteration 12 | 13 | # When Security-Enabled ES is ready, it will return 401 Unauthorized 14 | if [ "$statuscode" -eq "401" ]; then 15 | break 16 | fi 17 | sleep 1 18 | done 19 | end=$(date +%s) 20 | 21 | echo -n " (took $((end - start)) seconds)" 22 | 23 | echo 24 | -------------------------------------------------------------------------------- /custom_components/elasticsearch/manifest.json: -------------------------------------------------------------------------------- 1 | { 2 | "domain": "elasticsearch", 3 | "name": "Elasticsearch", 4 | "after_dependencies": [ 5 | "hassio" 6 | ], 7 | "codeowners": [ 8 | "@legrego" 9 | ], 10 | "config_flow": true, 11 | "dependencies": [], 12 | "documentation": "https://github.com/legrego/homeassistant-elasticsearch", 13 | "iot_class": "local_polling", 14 | "issue_tracker": "https://github.com/legrego/homeassistant-elasticsearch/issues", 15 | "loggers": [ 16 | "custom_components.elasticsearch" 17 | ], 18 | "quality_scale": "platinum", 19 | "requirements": [ 20 | "elasticsearch8==8.14.0" 21 | ], 22 | "version": "2.0.0" 23 | } 24 | -------------------------------------------------------------------------------- /scripts/helpers/es8/reset_es_pwd: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # This script resets the elastic user password for Elasticsearch. 4 | 5 | # Usage: /bin/bash ./reset_es_enrollment.sh 6 | # - The container id of the Elasticsearch instance 7 | 8 | escontainer=$1 9 | 10 | return=$(docker exec -it "$escontainer" /usr/share/elasticsearch/bin/elasticsearch-reset-password -u elastic --silent --batch) 11 | 12 | # password is now a multiline return, filter out any lines that begin with WARNING: 13 | password=$(echo -n "$return" | grep -v 'WARNING: ') 14 | 15 | # return password and trim any leading/trailing whitespace 16 | echo -n "$password" | tr -d '[:space:]' 17 | -------------------------------------------------------------------------------- /scripts/helpers/shared/create_hass_writer_user_es: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # Create the hass_writer user 4 | 5 | # Usage: /bin/bash ./create_hass-writer_user_es 6 | # - The URL of the Elasticsearch instance (e.g., https://localhost:9200) 7 | # - The existing admin user used to create the new user (e.g., hass) 8 | # - The password for the admin user used to create the new user (e.g., hass) 9 | 10 | esurl=$1 11 | esusername=$2 12 | espassword=$3 13 | 14 | set -e 15 | 16 | dir=$(dirname "$0") 17 | /bin/bash "$dir/create_user_es" "$esurl" "$esusername" "$espassword" "hass_writer" "Home Assistant Writer" "changeme" "hass_writer" 18 | -------------------------------------------------------------------------------- /.devcontainer/install-docker-cli.sh: -------------------------------------------------------------------------------- 1 | # Add Docker's official GPG key: 2 | apt-get update 3 | apt-get install ca-certificates curl 4 | install -m 0755 -d /etc/apt/keyrings 5 | curl -fsSL https://download.docker.com/linux/debian/gpg -o /etc/apt/keyrings/docker.asc 6 | chmod a+r /etc/apt/keyrings/docker.asc 7 | 8 | # Add the repository to Apt sources: 9 | echo \ 10 | "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.asc] https://download.docker.com/linux/debian \ 11 | $(. /etc/os-release && echo "$VERSION_CODENAME") stable" | \ 12 | tee /etc/apt/sources.list.d/docker.list > /dev/null 13 | apt-get update 14 | 15 | apt-get install docker-ce-cli -y 16 | 17 | groupadd docker 18 | 19 | usermod -aG docker vscode 20 | 21 | newgrp docker -------------------------------------------------------------------------------- /.devcontainer/post-install.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -ex 3 | 4 | # Convenience workspace directory for later use 5 | WORKSPACE_DIR=$(pwd) 6 | 7 | # Change some Poetry settings to better deal with working in a container 8 | poetry config cache-dir ${WORKSPACE_DIR}/.cache 9 | poetry config virtualenvs.in-project true 10 | 11 | # Now install all dependencies 12 | poetry install 13 | 14 | if [[ ! -d "${WORKSPACE_DIR}/config" ]]; then 15 | mkdir -p "${WORKSPACE_DIR}/config" 16 | cp "${WORKSPACE_DIR}/.devcontainer/configuration.yaml" "${WORKSPACE_DIR}/config/configuration.yaml" 17 | ${WORKSPACE_DIR}/.venv/bin/hass --config "${WORKSPACE_DIR}/config" --script ensure_config 18 | fi 19 | 20 | export PYTHONPATH="${PYTHONPATH}:${WORKSPACE_DIR}/custom_components" 21 | -------------------------------------------------------------------------------- /scripts/clean_ek: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | echo "Performing Environment Clean-up" 4 | dir=$(dirname "$0") 5 | 6 | containers=("kb01" "es01") 7 | 8 | for container in "${containers[@]}"; do 9 | # Check if the container exists 10 | if ! docker ps -a | grep -q "$container"; then 11 | continue 12 | fi 13 | 14 | echo "- Stopping and removing container $container" 15 | docker kill "$container" >/dev/null 16 | docker rm "$container" >/dev/null 17 | done 18 | 19 | # If the network exists, remove it 20 | networks=$(docker network ls | grep -o "elastic") 21 | 22 | if [ -n "$networks" ]; then 23 | echo "- Removing network $(echo "$networks" | tr -d '\n')" 24 | docker network rm elastic >/dev/null 25 | fi 26 | 27 | echo "- Clearing cached credentials in es_env" 28 | 29 | rm -rf "$dir/../es_env" 30 | -------------------------------------------------------------------------------- /custom_components/elasticsearch/diagnostics.py: -------------------------------------------------------------------------------- 1 | """Diagnostics for the Elasticsearch integration.""" 2 | 3 | from typing import Any 4 | 5 | from homeassistant.components.diagnostics import async_redact_data 6 | from homeassistant.config_entries import ConfigEntry 7 | from homeassistant.const import ( 8 | CONF_API_KEY, 9 | CONF_PASSWORD, 10 | CONF_USERNAME, 11 | ) 12 | from homeassistant.core import HomeAssistant 13 | 14 | CONFIG_TO_REDACT = {CONF_API_KEY, CONF_PASSWORD, CONF_USERNAME} 15 | 16 | 17 | async def async_get_config_entry_diagnostics(hass: HomeAssistant, entry: ConfigEntry) -> dict[str, Any]: # noqa: ARG001 18 | """Return diagnostics for the config entry.""" 19 | 20 | return { 21 | "data": async_redact_data(entry.data, CONFIG_TO_REDACT), 22 | "options": async_redact_data(entry.options, CONFIG_TO_REDACT), 23 | } 24 | -------------------------------------------------------------------------------- /scripts/helpers/es8/new_kb_dataview: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # This script is used to create a homeassistant-metrics dataview in Kibana 4 | 5 | 6 | kburl=$1 7 | kbusername=$2 8 | kbpassword=$3 9 | 10 | # set payload equal to: 11 | # curl -X POST api/data_views/data_view 12 | # { 13 | # "data_view": { 14 | # "title": "homeassistant-metrics.*", 15 | # "name": "Homeassistant Metrics", 16 | # "timeFieldName": "@timestamp", 17 | # } 18 | # } 19 | 20 | # Create the JSON payload for Dataview creation 21 | payload=$(echo -n "{\"data_view\":{\"title\":\"metrics-homeassistant.*\",\"allowNoIndex\":true,\"timeFieldName\":\"@timestamp\",\"name\":\"Homeassistant Metrics\"}}") 22 | 23 | # Send the payload to Kibana using curl 24 | curl -k "$kburl/api/data_views/data_view" -X POST -u "$kbusername:$kbpassword" -H "kbn-xsrf: true" -H "Content-Type: application/json" -d "$payload" 25 | -------------------------------------------------------------------------------- /.github/workflows/docs.yml: -------------------------------------------------------------------------------- 1 | name: docs 2 | on: 3 | push: 4 | branches: 5 | - main 6 | permissions: 7 | contents: write 8 | jobs: 9 | deploy: 10 | runs-on: ubuntu-latest 11 | steps: 12 | - uses: actions/checkout@v5 13 | - name: Configure Git Credentials 14 | run: | 15 | git config user.name github-actions[bot] 16 | git config user.email 41898282+github-actions[bot]@users.noreply.github.com 17 | - uses: actions/setup-python@v6 18 | with: 19 | python-version: 3.13 20 | - run: echo "cache_id=$(date --utc '+%V')" >> $GITHUB_ENV 21 | - uses: actions/cache@v4 22 | with: 23 | key: mkdocs-material-${{ env.cache_id }} 24 | path: .cache 25 | restore-keys: | 26 | mkdocs-material- 27 | - run: pip install mkdocs-material 28 | - run: mkdocs gh-deploy --force 29 | -------------------------------------------------------------------------------- /tests/snapshots/test_diagnostics.ambr: -------------------------------------------------------------------------------- 1 | # serializer version: 1 2 | # name: test_async_get_config_entry_diagnostics[options0-Only URL and username] 3 | dict({ 4 | 'data': dict({ 5 | 'url': 'https://example.com', 6 | 'username': '**REDACTED**', 7 | }), 8 | 'options': dict({ 9 | }), 10 | }) 11 | # --- 12 | # name: test_async_get_config_entry_diagnostics[options0-Only URL] 13 | dict({ 14 | 'data': dict({ 15 | 'url': 'https://example.com', 16 | }), 17 | 'options': dict({ 18 | }), 19 | }) 20 | # --- 21 | # name: test_async_get_config_entry_diagnostics[options0-URL and all auth params] 22 | dict({ 23 | 'data': dict({ 24 | 'api_key': '**REDACTED**', 25 | 'password': '**REDACTED**', 26 | 'url': 'https://example.com', 27 | 'username': '**REDACTED**', 28 | }), 29 | 'options': dict({ 30 | }), 31 | }) 32 | # --- 33 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | 5 | --- 6 | 7 | **Environment** 8 | Home-Assistant version: x.yy 9 | Elasticsearch version: x.y.z 10 | 11 | Relevant `configuration.yml` settings: 12 | ```yml 13 | # Do not include your Elasticsearch URL, credentials, or any other sensitive information 14 | elastic: 15 | ... 16 | ``` 17 | 18 | **Describe the bug** 19 | A clear and concise description of what the bug is. 20 | 21 | **To Reproduce** 22 | Steps to reproduce the behavior: 23 | 1. Go to '...' 24 | 2. Click on '....' 25 | 3. Scroll down to '....' 26 | 4. See error 27 | 28 | **Expected behavior** 29 | A clear and concise description of what you expected to happen. 30 | 31 | **Screenshots** 32 | If applicable, add screenshots to help explain your problem. 33 | 34 | **Additional context** 35 | Add any other context about the problem here. 36 | -------------------------------------------------------------------------------- /docs/install.md: -------------------------------------------------------------------------------- 1 | # Installation 2 | 3 | ## HACS (recommended) 4 | This component is available in [HACS](https://hacs.xyz/){:target="_blank"} (Home Assistant Community Store). 5 | 6 | 1. Install HACS if you don't have it already 7 | 2. Open HACS in Home Assistant 8 | 3. Go to "Integrations" section 9 | 4. Click button with "+" icon 10 | 5. Search for "Elasticsearch" 11 | 12 | ## Manual 13 | 1. Download the latest release from the [releases page](https://github.com/legrego/homeassistant-elasticsearch/releases) 14 | 2. Extract the contents of the zip file 15 | 3. Copy the `custom_components` directory to your `$HASS_CONFIG/custom_components` directory, where `$HASS_CONFIG` is the location on your machine where Home-Assistant lives. Example: `/home/pi/.homeassistant` and `/home/pi/.homeassistant/custom_components`. You may have to create the `custom_components` directory yourself. 16 | 17 | You must restart Home Assistant after installation. 18 | 19 | Next: [Configuration](./configure.md) -------------------------------------------------------------------------------- /scripts/helpers/shared/create_hass_writer_role_es: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # Create the hass_writer role 4 | 5 | # Usage: /bin/bash ./create_hass-writer_role_es 6 | # - The URL of the Elasticsearch instance (e.g., https://localhost:9200) 7 | # - The existing admin user used to create the new user (e.g., hass) 8 | # - The password for the admin user used to create the new user (e.g., hass) 9 | 10 | esurl=$1 11 | esusername=$2 12 | espassword=$3 13 | 14 | set -e 15 | 16 | dir=$(dirname "$0") 17 | /bin/bash "$dir/create_role_es" "$esurl" "$esusername" "$espassword" "hass_writer" '{ 18 | "cluster": [ 19 | "manage_index_templates", 20 | "monitor" 21 | ], 22 | "indices": [ 23 | { 24 | "names": [ 25 | "metrics-homeassistant.*" 26 | ], 27 | "privileges": [ 28 | "manage", 29 | "index", 30 | "create_index", 31 | "create" 32 | ] 33 | } 34 | ] 35 | }' 36 | -------------------------------------------------------------------------------- /scripts/helpers/shared/create_role_es: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # Create an elasticsearch role 4 | 5 | # Usage: /bin/bash ./create_role_es 6 | # - The URL of the Elasticsearch instance (e.g., https://localhost:9200) 7 | # - The existing admin user used to create the new user (e.g., hass) 8 | # - The password for the admin user used to create the new user (e.g., hass) 9 | # - The name of the new role to create (e.g., hass) 10 | # - The definition of the new role to create (e.g., '{"cluster":["all"],"indices":[{"names":[".kibana*"],"privileges":["all"]}]}') 11 | 12 | esurl=$1 13 | esusername=$2 14 | espassword=$3 15 | newrolename=$4 16 | newroledefinition=$5 17 | 18 | set -e 19 | 20 | AUTH=$(echo -ne "$esusername:$espassword" | base64 --wrap 0) 21 | 22 | curl -s -X POST -k -H 'Content-Type: application/json' --header "Authorization: Basic $AUTH" -d "$newroledefinition" "$esurl/_security/role/$newrolename" > /dev/null 23 | -------------------------------------------------------------------------------- /scripts/helpers/es8/wait_for_es_green: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # Use GET /_cluster/health?wait_for_status=green&timeout=50s to wait for the cluster to be in a green state. 4 | 5 | # Usage: ./wait_for_es_green 6 | # - The URL of the Elasticsearch instance (e.g., https://localhost:9200) 7 | # - The existing admin user used to test cluster status (e.g., elastic) 8 | # - The password for the admin user used to test cluster status 9 | 10 | esurl=$1 11 | esusername=$2 12 | espassword=$3 13 | 14 | start=$(date +%s) 15 | 16 | # body will contain timed_out = true if there was a timeout and if there is we should fail 17 | # https://www.elastic.co/guide/en/elasticsearch/reference/current/cluster-health.html 18 | 19 | timed_out=$(curl -s -k -u "$esusername:$espassword" -X GET "$esurl/_cluster/health?wait_for_status=green&timeout=60s" | jq -r '.timed_out') 20 | 21 | if [ "$timed_out" == "true" ]; then 22 | echo "Timed out waiting for Elasticsearch to be ready" 23 | exit 1 24 | fi 25 | 26 | end=$(date +%s) 27 | 28 | echo -n " (took $((end - start)) seconds)" 29 | 30 | echo 31 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2018 Larry Gregory 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /scripts/helpers/shared/create_hass_writer_apikey_es: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # Create the hass_writer api key 4 | 5 | # Usage: /bin/bash ./create_hass_writer_apikey_es 6 | # - The URL of the Elasticsearch instance (e.g., https://localhost:9200) 7 | # - The existing admin user used to create the new user (e.g., hass) 8 | # - The password for the admin user used to create the new user (e.g., hass) 9 | 10 | esurl=$1 11 | esusername=$2 12 | espassword=$3 13 | 14 | set -e 15 | 16 | dir=$(dirname "$0") 17 | /bin/bash "$dir/create_apikey_es" "$esurl" "$esusername" "$espassword" "home_assistant_component" '{ 18 | "hass_writer": { 19 | "cluster": [ 20 | "manage_index_templates", 21 | "monitor" 22 | ], 23 | "indices": [ 24 | { 25 | "names": [ 26 | "metrics-homeassistant.*" 27 | ], 28 | "privileges": [ 29 | "manage", 30 | "index", 31 | "create_index", 32 | "create" 33 | ] 34 | } 35 | ] 36 | } 37 | }' 38 | -------------------------------------------------------------------------------- /tests/test_diagnostics.py: -------------------------------------------------------------------------------- 1 | """Tests for the Elasticsearch integration diagnostics.""" 2 | 3 | import pytest 4 | from custom_components.elasticsearch.diagnostics import async_get_config_entry_diagnostics 5 | from homeassistant.const import ( 6 | CONF_API_KEY, 7 | CONF_PASSWORD, 8 | CONF_URL, 9 | CONF_USERNAME, 10 | ) 11 | 12 | 13 | @pytest.mark.parametrize( 14 | "data", 15 | [ 16 | { 17 | CONF_URL: "https://example.com", 18 | CONF_USERNAME: "test_user", 19 | CONF_PASSWORD: "test_password", 20 | CONF_API_KEY: "test_api_key", 21 | }, 22 | { 23 | CONF_URL: "https://example.com", 24 | CONF_USERNAME: "test_user", 25 | }, 26 | { 27 | CONF_URL: "https://example.com", 28 | }, 29 | ], 30 | ids=["URL and all auth params", "Only URL and username", "Only URL"], 31 | ) 32 | @pytest.mark.parametrize("options", [{}]) 33 | async def test_async_get_config_entry_diagnostics(hass, config_entry, data, options, snapshot): 34 | """Test async_get_config_entry_diagnostics function.""" 35 | 36 | result = await async_get_config_entry_diagnostics(hass, config_entry) 37 | 38 | assert result == snapshot 39 | -------------------------------------------------------------------------------- /custom_components/elasticsearch/errors.py: -------------------------------------------------------------------------------- 1 | """Errors for the Elastic component.""" 2 | 3 | from homeassistant.exceptions import HomeAssistantError 4 | 5 | 6 | class ESIntegrationException(HomeAssistantError): # noqa: N818 7 | """Base class for Elastic exceptions.""" 8 | 9 | 10 | class ESIntegrationConnectionException(ESIntegrationException): 11 | """Base class for Elasticsearch exceptions.""" 12 | 13 | 14 | class AuthenticationRequired(ESIntegrationConnectionException): 15 | """Cluster requires authentication.""" 16 | 17 | 18 | class InsufficientPrivileges(AuthenticationRequired): 19 | """Credentials are lacking the required privileges.""" 20 | 21 | 22 | class CannotConnect(ESIntegrationConnectionException): 23 | """Unable to connect to the cluster.""" 24 | 25 | 26 | class ServerError(CannotConnect): 27 | """Server Error.""" 28 | 29 | 30 | class ClientError(CannotConnect): 31 | """Client Error.""" 32 | 33 | 34 | class SSLError(CannotConnect): 35 | """Error related to SSL.""" 36 | 37 | 38 | class UntrustedCertificate(SSLError): 39 | """Received a untrusted certificate error.""" 40 | 41 | 42 | class UnsupportedVersion(ESIntegrationConnectionException): 43 | """Connected to an unsupported version of Elasticsearch.""" 44 | -------------------------------------------------------------------------------- /tests/test_utils.py: -------------------------------------------------------------------------------- 1 | """Utility functions for the Elasticsearch Integration.""" 2 | 3 | from custom_components.elasticsearch.utils import flatten_dict 4 | 5 | 6 | def test_flatten_dict(): 7 | """Test the flatten_dict function.""" 8 | # Test case 1: Flattening a nested dictionary with default separator 9 | nested_dict = { 10 | "a": 1, 11 | "b": { 12 | "c": 2, 13 | "d": { 14 | "e": 3, 15 | }, 16 | }, 17 | "f": 4, 18 | } 19 | expected_result = { 20 | "a": 1, 21 | "b.c": 2, 22 | "b.d.e": 3, 23 | "f": 4, 24 | } 25 | assert flatten_dict(nested_dict) == expected_result 26 | 27 | # Test case 3: Flattening a nested dictionary with lists, sets, and tuples in various locations 28 | nested_dict = { 29 | "a": 1, 30 | "b": { 31 | "c": [2, 3, 4], 32 | "d": { 33 | "e": (5, 6, 7), 34 | }, 35 | }, 36 | "f": {8, 9, 10}, 37 | } 38 | 39 | expected_result = { 40 | "a": 1, 41 | "b.c": [2, 3, 4], 42 | "b.d.e": (5, 6, 7), 43 | "f": {8, 9, 10}, 44 | } 45 | 46 | assert flatten_dict(nested_dict) == expected_result 47 | -------------------------------------------------------------------------------- /.github/workflows/combined.yml: -------------------------------------------------------------------------------- 1 | name: "Validate and Test" 2 | permissions: 3 | contents: read 4 | on: 5 | workflow_dispatch: 6 | jobs: 7 | ci: 8 | runs-on: ubuntu-latest 9 | steps: 10 | - uses: actions/checkout@v5 11 | name: Download repo 12 | with: 13 | fetch-depth: 0 14 | - uses: actions/setup-python@v6 15 | name: Setup Python 16 | - uses: actions/cache@v4 17 | name: Cache 18 | with: 19 | path: | 20 | ~/.cache/pip 21 | key: custom-component-ci 22 | 23 | - name: Install dependencies 24 | run: | 25 | python3 -m pip install poetry~=2.2.1 26 | poetry install 27 | - uses: "actions/checkout@v5" 28 | - name: HACS validation 29 | uses: "hacs/action@main" 30 | with: 31 | category: "integration" 32 | 33 | - name: Hassfest validation 34 | uses: "home-assistant/actions/hassfest@master" 35 | 36 | - name: Test with pytest 37 | run: | 38 | poetry run pytest 39 | - name: Test with flake8 40 | run: | 41 | poetry run flake8 . 42 | - name: Test with isort 43 | run: | 44 | poetry run isort . 45 | - name: Test with black 46 | run: | 47 | poetry run black . 48 | -------------------------------------------------------------------------------- /scripts/helpers/es8/enroll_kb: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # This script is used to enroll a Kibana instance with Elasticsearch using an enrollment token. 4 | # It takes two arguments: the enrollment token and the Kibana code. 5 | 6 | # Usage: /bin/bash ./enroll_kb 7 | # - enrollment_token: The enrollment token to use for enrolling Kibana 8 | # - kibana_code: The Kibana code to use for enrolling Kibana 9 | 10 | enrollment_token=$1 11 | kibana_code=$2 12 | 13 | # base64 decode the enrollment token 14 | decoded_token=$(echo "$enrollment_token" | base64 --decode -i) 15 | 16 | # Extract the Elasticsearch address from the decoded token 17 | adr=$(echo "$decoded_token" | jq -r '.adr[0]') 18 | 19 | # Extract the fingerprint from the decoded token 20 | fgr=$(echo "$decoded_token" | jq -r '.fgr') 21 | 22 | # Extract the key from the decoded token and base64 encode it 23 | key=$(echo "$decoded_token" | jq -r '.key') 24 | key=$(echo -n "$key" | base64) 25 | 26 | # Create the JSON payload for Kibana enrollment 27 | payload=$(echo -n "{\"hosts\":[\"https://$adr\"],\"code\":\"$kibana_code\",\"apiKey\":\"$key\",\"caFingerprint\":\"$fgr\"}") 28 | 29 | # Send the payload to Kibana using curl 30 | curl -k "http://host.docker.internal:5601/internal/interactive_setup/enroll" -H "kbn-xsrf: true" -H "Content-Type: application/json" -d "$payload" 31 | -------------------------------------------------------------------------------- /scripts/helpers/shared/create_user_es: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # Create an elasticsearch user account named hass with a password of changeme. 4 | 5 | # Usage: /bin/bash ./create_user_es 6 | # - The URL of the Elasticsearch instance (e.g., https://localhost:9200) 7 | # - The existing admin user used to create the new user (e.g., hass) 8 | # - The password for the admin user used to create the new user (e.g., hass) 9 | # - The new user to create (e.g., hass) 10 | # - The password for the new user (e.g., changeme) 11 | # - The full name of the new user (e.g., Home Assistant) 12 | # - The roles to assign to the new user (e.g., superuser) 13 | 14 | esurl=$1 15 | esusername=$2 16 | espassword=$3 17 | newusername=$4 18 | newuserfullname=$5 19 | newuserpassword=$6 20 | newuserroles=$7 21 | 22 | set -e 23 | 24 | AUTH=$(echo -ne "$esusername:$espassword" | base64 --wrap 0) 25 | 26 | # Create a new user account 27 | curl -s -X POST -k -H "Content-Type: application/json" --header "Authorization: Basic $AUTH" -k "$esurl/_security/user/$newusername" -d '{ 28 | "password" : "'"$newuserpassword"'", 29 | "full_name" : "'"$newuserfullname"'", 30 | "roles" : [ "'"$newuserroles"'" ] 31 | }' >/dev/null 32 | -------------------------------------------------------------------------------- /.devcontainer/devcontainer.json: -------------------------------------------------------------------------------- 1 | { 2 | "context": "..", 3 | "customizations": { 4 | "vscode": { 5 | "extensions": [ 6 | "charliermarsh.ruff", 7 | "esbenp.prettier-vscode", 8 | "GitHub.copilot", 9 | "GitHub.vscode-pull-request-github", 10 | "ms-python.pylint", 11 | "ms-python.vscode-pylance", 12 | "redhat.vscode-yaml", 13 | "visualstudioexptteam.vscodeintellicode" 14 | ] 15 | } 16 | }, 17 | "dockerFile": "Dockerfile", 18 | "features": { 19 | "ghcr.io/wxw-matt/devcontainer-features/command_runner:0": {}, 20 | "github-cli": "latest" 21 | }, 22 | "forwardPorts": [ 23 | 5601, 24 | 8123, 25 | 9200 26 | ], 27 | "initializeCommand": "ls", 28 | "mounts": [ 29 | "source=/var/run/docker.sock,target=/var/run/docker.sock,type=bind", 30 | "source=${localEnv:HOME}/.ssh,target=/root/.ssh,readonly,type=bind" 31 | ], 32 | "name": "Elasticsearch integration development", 33 | "portsAttributes": { 34 | "5601": { 35 | "label": "Kibana", 36 | "onAutoForward": "notify" 37 | }, 38 | "8123": { 39 | "label": "Home Assistant", 40 | "onAutoForward": "notify" 41 | }, 42 | "9200": { 43 | "label": "Elasticsearch", 44 | "onAutoForward": "notify" 45 | } 46 | }, 47 | "postCreateCommand": "bash ./.devcontainer/post-install.sh", 48 | "remoteUser": "vscode", 49 | "runArgs": [ 50 | "--add-host=host.docker.internal:host-gateway" 51 | ] 52 | } 53 | -------------------------------------------------------------------------------- /docs/index.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Introduction 3 | --- 4 | 5 | # Elasticsearch Component for Home-Assistant 6 | 7 | Publish Home Assistant events to your [Elasticsearch](https://elastic.co) cluster! 8 | 9 | ## Features 10 | 11 | - Efficiently publishes Home-Assistant events to Elasticsearch using the Bulk API 12 | - Automatically sets up Datastreams using Time Series Data Streams ("TSDS") and Datastream Lifecycle Management ("DLM") 13 | - Supports Elastic's [stack security features](https://www.elastic.co/elastic-stack/security) via optional username, password, and API keys 14 | - Selectively publish events based on labels, entities, devices, or areas 15 | 16 | ## Compatibility 17 | 18 | - Elasticsearch 8.14+ (Self, [Cloud](https://www.elastic.co/cloud), or [Serverless](https://www.elastic.co/docs/current/serverless)). 19 | - [Elastic Common Schema version 1.0.0](https://github.com/elastic/ecs/releases/tag/v1.0.0) 20 | - [Home Assistant Community Store](https://github.com/custom-components/hacs) 21 | - Home Assistant >= 2025.6 22 | 23 | ## Older versions 24 | 25 | [Version `1.0.0`](https://github.com/legrego/homeassistant-elasticsearch/releases/tag/v1.0.0) includes support for 7.11 to 8.13. No features or bugfixes will be backported to this version. 26 | [Version `0.4.0`](https://github.com/legrego/homeassistant-elasticsearch/releases/tag/v0.4.0) includes support for versions of Elasticsearch older than 7.11. No features or bugfixes will be backported to this version. 27 | -------------------------------------------------------------------------------- /scripts/helpers/shared/create_apikey_es: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # Create an elasticsearch api key 4 | 5 | # Usage: /bin/bash ./create_apikey_es 6 | # - The URL of the Elasticsearch instance (e.g., https: //localhost:9200) 7 | # - The existing admin user used to create the new user (e.g., hass) 8 | # - The password for the admin user used to create the new user (e.g., hass) 9 | # - The name of the new api key to create (e.g., hass) 10 | # - The definition of the new api key to create 11 | 12 | esurl=$1 13 | esusername=$2 14 | espassword=$3 15 | newapikeyname=$4 16 | newapiroledescriptor=$5 17 | 18 | set -e 19 | 20 | AUTH=$(echo -ne "$esusername:$espassword" | base64 --wrap 0) 21 | 22 | BODY="{ 23 | \"name\": \"$newapikeyname\", 24 | \"role_descriptors\": $newapiroledescriptor 25 | }" 26 | 27 | Response=$(curl -s -X POST -k -H 'Content-Type: application/json' --header "Authorization: Basic $AUTH" -d "$BODY" "$esurl/_security/api_key") 28 | 29 | # Response Format is 30 | # { 31 | # "id": "VuaCfGcBCdbkQm-e5aOx", 32 | # "name": "my-api-key", 33 | # "expiration": 1544068612110, 34 | # "api_key": "ui2lp2axTNmsyakw9tvNnw", 35 | # "encoded": "VnVhQ2ZHY0JDZGJrUW0tZTVhT3g6dWkybHAyYXhUTm1zeWFrdzl0dk5udw==" 36 | # } 37 | 38 | # Return just the api_key and strip any new lines 39 | echo $Response | jq -r '.encoded' | tr -d '\n' -------------------------------------------------------------------------------- /.vscode/launch.json: -------------------------------------------------------------------------------- 1 | { 2 | // Use IntelliSense to learn about possible attributes. 3 | // Hover to view descriptions of existing attributes. 4 | // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 5 | "version": "0.2.0", 6 | "configurations": [ 7 | { 8 | "name": "Debug HA with Elasticsearch Component", 9 | "type": "debugpy", 10 | "request": "launch", 11 | "module": "homeassistant", 12 | "justMyCode": true, 13 | "args": [ 14 | "--debug", 15 | "-c", 16 | "config" 17 | ] 18 | }, 19 | { 20 | "name": "Python: Debug Tests", 21 | "type": "debugpy", 22 | "request": "launch", 23 | "purpose": [ 24 | "debug-test" 25 | ], 26 | "console": "integratedTerminal", 27 | "justMyCode": true, 28 | "args": [ 29 | "tests", 30 | "--continue-on-collection-errors" 31 | ] 32 | }, 33 | { 34 | "name": "Python: Run Tests", 35 | "type": "debugpy", 36 | "request": "launch", 37 | "module": "pytest", 38 | "console": "integratedTerminal", 39 | "justMyCode": true, 40 | "args": [ 41 | "tests", 42 | "--continue-on-collection-errors" 43 | ] 44 | }, 45 | ] 46 | } 47 | -------------------------------------------------------------------------------- /mkdocs.yml: -------------------------------------------------------------------------------- 1 | site_name: Elasticsearch Component for Home Assistant 2 | nav: 3 | - 'index.md' 4 | - 'install.md' 5 | - 'configure.md' 6 | - 'using_kibana.md' 7 | - 'ingest_advanced.md' 8 | theme: 9 | name: material 10 | palette: 11 | # Palette toggle for automatic mode 12 | - media: "(prefers-color-scheme)" 13 | toggle: 14 | icon: material/brightness-auto 15 | name: Switch to light mode 16 | 17 | # Palette toggle for light mode 18 | - media: "(prefers-color-scheme: light)" 19 | scheme: default 20 | toggle: 21 | icon: material/brightness-7 22 | name: Switch to dark mode 23 | 24 | # Palette toggle for dark mode 25 | - media: "(prefers-color-scheme: dark)" 26 | scheme: slate 27 | toggle: 28 | icon: material/brightness-4 29 | name: Switch to system preference 30 | features: 31 | - content.code.annotate 32 | - content.code.copy 33 | - content.tabs.link 34 | - navigation.path 35 | - toc.follow 36 | markdown_extensions: 37 | - admonition 38 | - pymdownx.details 39 | - toc: 40 | permalink: '#' 41 | - attr_list 42 | - pymdownx.superfences: 43 | custom_fences: 44 | - name: mermaid 45 | class: mermaid 46 | format: !!python/name:pymdownx.superfences.fence_code_format 47 | - pymdownx.tabbed: 48 | alternate_style: true 49 | - pymdownx.emoji: 50 | emoji_index: !!python/name:material.extensions.emoji.twemoji 51 | emoji_generator: !!python/name:material.extensions.emoji.to_svg -------------------------------------------------------------------------------- /custom_components/elasticsearch/encoder.py: -------------------------------------------------------------------------------- 1 | """Custom JSON encoder for Elasticsearch.""" 2 | 3 | import json 4 | from typing import Any 5 | 6 | from elasticsearch8.serializer import JSONSerializer 7 | 8 | 9 | def convert_set_to_list(data: Any) -> Any: 10 | """Convert set to list.""" 11 | 12 | if isinstance(data, set): 13 | output = [convert_set_to_list(item) for item in data] 14 | output.sort() 15 | return output 16 | 17 | if isinstance(data, dict): 18 | return json.dumps({key: convert_set_to_list(value) for key, value in data.items()}) 19 | 20 | if isinstance(data, list): 21 | return [convert_set_to_list(item) for item in data] 22 | 23 | if isinstance(data, tuple): 24 | return tuple(convert_set_to_list(item) for item in data) 25 | 26 | return data 27 | 28 | 29 | class Serializer(JSONSerializer): 30 | """JSONSerializer which serializes sets to lists.""" 31 | 32 | def json_dumps(self, data: Any) -> bytes: 33 | """Serialize data to JSON.""" 34 | 35 | return json.dumps( 36 | data, default=self.default, ensure_ascii=False, separators=(",", ":"), cls=Encoder 37 | ).encode("utf-8", "surrogatepass") 38 | 39 | def default(self, data: Any) -> Any: 40 | """Entry point.""" 41 | 42 | return super().default(convert_set_to_list(data)) 43 | 44 | 45 | class Encoder(json.JSONEncoder): 46 | """JSONSerializer which serializes sets to lists.""" 47 | 48 | def default(self, o: Any) -> Any: 49 | """Entry point.""" 50 | 51 | return super().default(convert_set_to_list(o)) 52 | -------------------------------------------------------------------------------- /tests/snapshots/test_entity_details.ambr: -------------------------------------------------------------------------------- 1 | # serializer version: 1 2 | # name: Test_ExtendedDeviceEntry.test_to_dict 3 | dict({ 4 | 'area.floor.id': 'device_floor', 5 | 'area.floor.name': 'device floor', 6 | 'area.id': 'device_area', 7 | 'area.name': 'device area', 8 | 'id': 'very_unique_device_id', 9 | 'labels': list([ 10 | 'device label 1', 11 | 'device label 2', 12 | 'device label 3', 13 | ]), 14 | 'name': 'device name', 15 | }) 16 | # --- 17 | # name: Test_ExtendedRegistryEntry.test_to_dict 18 | dict({ 19 | 'area': dict({ 20 | 'floor': dict({ 21 | 'id': 'entity_floor', 22 | 'name': 'entity floor', 23 | }), 24 | 'id': 'entity_area', 25 | 'name': 'entity area', 26 | }), 27 | 'device': dict({ 28 | 'area': dict({ 29 | 'floor': dict({ 30 | 'id': 'device_floor', 31 | 'name': 'device floor', 32 | }), 33 | 'id': 'device_area', 34 | 'name': 'device area', 35 | }), 36 | 'id': 'very_unique_device_id', 37 | 'labels': list([ 38 | 'device label 1', 39 | 'device label 2', 40 | 'device label 3', 41 | ]), 42 | 'name': 'device name', 43 | }), 44 | 'device_class': 'user-modified entity device class', 45 | 'domain': 'counter', 46 | 'id': 'counter.entity_object_id', 47 | 'labels': list([ 48 | 'entity label 1', 49 | 'entity label 2', 50 | 'entity label 3', 51 | ]), 52 | 'name': 'user-modified entity name', 53 | 'platform': 'entity platform', 54 | 'unit_of_measurement': 'Mbit/s', 55 | }) 56 | # --- 57 | -------------------------------------------------------------------------------- /.vscode/tasks.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "2.0.0", 3 | "tasks": [ 4 | { 5 | "label": "Lint", 6 | "type": "shell", 7 | "command": "/bin/bash ./scripts/lint" 8 | }, 9 | { 10 | "label": "Run all Tests", 11 | "type": "shell", 12 | "command": "/bin/bash ./scripts/test" 13 | }, 14 | { 15 | "label": "Update Test Snapshots for current file", 16 | "type": "shell", 17 | "command": "/bin/bash ${workspaceFolder}/scripts/update_snapshots ${file}", 18 | "problemMatcher": [] 19 | }, 20 | { 21 | "label": "Update Test Snapshots", 22 | "type": "shell", 23 | "command": "/bin/bash ./scripts/update_snapshots" 24 | }, 25 | { 26 | "label": "Generate coverage report", 27 | "type": "shell", 28 | "command": "/bin/bash ./scripts/coverage" 29 | }, 30 | { 31 | "label": "Run Elasticsearch 7.17.0 (HTTP Port 9200) and Kibana 7.17.0 (HTTP Port 5601)", 32 | "type": "shell", 33 | "command": "/bin/bash ./scripts/run_ek_7_17_0" 34 | }, 35 | { 36 | "label": "Run Elasticsearch 8.0.0 (HTTPS Port 9200) and Kibana 8.0.0 (HTTP Port 5601)", 37 | "type": "shell", 38 | "command": "/bin/bash ./scripts/run_ek_8_0_0" 39 | }, 40 | { 41 | "label": "Run Elasticsearch 8.14.0 (HTTPS Port 9200) and Kibana 8.14.0 (HTTP Port 5601)", 42 | "type": "shell", 43 | "command": "/bin/bash ./scripts/run_ek_8_14_0" 44 | }, 45 | ] 46 | } 47 | -------------------------------------------------------------------------------- /TROUBLESHOOTING.md: -------------------------------------------------------------------------------- 1 | ## Troubleshooting 2 | 3 | Solutions to common questions and errors: 4 | 5 | ### Error loading elasticsearch 6 | 7 | After installing this component, you may see an error similar to this on startup: 8 | 9 | > No module named 'elasticsearch' 10 | 11 | ``` 12 | ERROR (MainThread) [homeassistant.setup] Error during setup of component elastic Traceback (most recent call last): File "/usr/src/app/homeassistant/setup.py", line 145, in _async_setup_component hass, processed_config) File "/usr/local/lib/python3.6/asyncio/coroutines.py", line 212, in coro res = func(*args, **kw) File "/config/custom_components/elastic/__init__.py", line 62, in async_setup gateway = ElasticsearchGateway(hass, conf) File "/config/custom_components/elastic/__init__.py", line 126, in __init__ self.client = self._create_es_client() File "/config/custom_components/elastic.py", line 134, in _create_es_client import elasticsearch ModuleNotFoundError: No module named 'elasticsearch' 13 | ``` 14 | 15 | This means that home-assistant was not able to download the required `elasticsearch` module for this component to function. 16 | 17 | **Solution**: Restart home assistant 18 | 19 | More info: https://github.com/legrego/homeassistant-elasticsearch/issues/23 20 | 21 | ### Certificate verify failed 22 | 23 | When connecting to a TLS protected cluster, you might receive the following error: 24 | 25 | ``` 26 | ssl.SSLError: [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed (_ssl.c:720) 27 | ``` 28 | 29 | This generally means that the certificate is not trusted by the home-assistant runtime. Please ensure your certificates are setup correctly. To skip certificate verification, see setup instructions [here](https://github.com/legrego/homeassistant-elasticsearch/pull/36) 30 | 31 | More info: https://github.com/legrego/homeassistant-elasticsearch/issues/33 32 | -------------------------------------------------------------------------------- /.github/workflows/pull.yml: -------------------------------------------------------------------------------- 1 | name: Pull actions 2 | permissions: 3 | contents: read 4 | 5 | on: 6 | pull_request: 7 | 8 | jobs: 9 | validate: 10 | runs-on: "ubuntu-latest" 11 | name: Validate 12 | steps: 13 | - uses: "actions/checkout@v5" 14 | 15 | - name: HACS validation 16 | uses: "hacs/action@main" 17 | with: 18 | category: "integration" 19 | 20 | - name: Hassfest validation 21 | uses: "home-assistant/actions/hassfest@master" 22 | 23 | style: 24 | runs-on: "ubuntu-latest" 25 | name: Check style formatting 26 | steps: 27 | - uses: "actions/checkout@v5" 28 | - uses: "actions/setup-python@v6" 29 | with: 30 | python-version: "3.13" 31 | - run: python3 -m pip install poetry~=2.2.1 32 | - run: poetry install 33 | - run: poetry run ./scripts/lint --no-fix 34 | 35 | tests: 36 | runs-on: "ubuntu-latest" 37 | name: Run tests 38 | steps: 39 | - name: Check out code from GitHub 40 | uses: "actions/checkout@v5" 41 | - name: Setup Python 42 | uses: "actions/setup-python@v6" 43 | with: 44 | python-version: "3.13" 45 | - name: Install requirements 46 | run: python3 -m pip install poetry~=2.2.1 && poetry install 47 | - name: Run tests 48 | run: | 49 | poetry run pytest --cov=./custom_components --cov-report=xml --cov-report=html 50 | - name: Save PR number and coverage results 51 | run: | 52 | mkdir -p ./pr 53 | echo ${{ github.event.number }} > ./pr/PR-number.txt 54 | cp ./test_results/cov_xml/coverage.xml ./pr/coverage.xml 55 | cp ./test_results/pytest.xml ./pr/pytest.xml 56 | - uses: actions/upload-artifact@v5 57 | with: 58 | name: pr 59 | path: pr/ 60 | -------------------------------------------------------------------------------- /tests/certs/http_ca.crt: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIIFWTCCA0GgAwIBAgIUJYE/pPcl9VZvzwFMC4sJc+cQ35AwDQYJKoZIhvcNAQEL 3 | BQAwPDE6MDgGA1UEAxMxRWxhc3RpY3NlYXJjaCBzZWN1cml0eSBhdXRvLWNvbmZp 4 | Z3VyYXRpb24gSFRUUCBDQTAeFw0yNDEyMTgxNzU1NTRaFw0yNzEyMTgxNzU1NTRa 5 | MDwxOjA4BgNVBAMTMUVsYXN0aWNzZWFyY2ggc2VjdXJpdHkgYXV0by1jb25maWd1 6 | cmF0aW9uIEhUVFAgQ0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCh 7 | Mcu8VmxMaEJG0XJGL9kO7wIGKE0LHBEY6HSP55Vmp17JtD0P0yYDPvKzIPvr+r9m 8 | lvU7MdKxB4h7rkK/rC1AYagW4jZJZCrTMeCkf5Zcb1o60Lp23MSAFyOSR8AHjegO 9 | BVFvMOO9WwwRtFm2i3YOVAFEyc3UyVD4A4gCKhxWS1/R6D49kiUNE2HsACPMdHPk 10 | uuIkwOrQ4B5LAfmRqjOK5nlYff/4f3+Cm3e8kIuUmOQViwfGxsAi3/44Fn5hGk3Z 11 | 1qF4+JVrNNukBks1T9zFZ0nDHD7DylossOBvK1VG/Jl8hFOp8nIWDHC9vbFMcSR0 12 | f1/UelHmDwwvH9nvpTwrGBpZdJFKkGD070CFhSSwcKrG4sNbUcIz2CVxQGWFs5aO 13 | Xlc5aRK1jYSTaRwuN2msXl5nn5rz/3iGdlq6TwInc+uXLp3hRp7V7hjUHiZmrTTg 14 | BRakafnm91H9Xo89G5KzeJkoKF8mBUTwIOgmwKuH3OtVzocrn998lo2MllRKDkWS 15 | Fbqyw5BE8y3SAIRXnEKqAtjgJ/qsX2zPI/k18Yij1R9G2gQqXmElCh4gpbbXMaNE 16 | JWGooG3AO/JPtGOEb+Gi6e+jj2/eLPCPjNhfGyjFszDa8A04O7jvvSXOOzcYBM93 17 | +wpXCsoANQq3Fyy1efOsYEiVHJx2dazXtyv16MR/4wIDAQABo1MwUTAdBgNVHQ4E 18 | FgQUxWgytYdNqrMN0y2krN7bLG9mbU8wHwYDVR0jBBgwFoAUxWgytYdNqrMN0y2k 19 | rN7bLG9mbU8wDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOCAgEAheBq 20 | ZzDYwXu2Goy5yzAMGKm3emxL9m1ruZQnkuZmMwLuLbBPlTlFmS8rhST3aOejaZ3B 21 | zFVEPUZmkJmA0qDzohRzh09CsxZtstu2pw0l4BZP8br4YdPCZ+k7OX/ZM/6M9Wnq 22 | dlTp1C5MACNBUTph6Jg+WBaTAqnlbh0K7YseV6B75zkymqpDMcxK4Sd9hfJUcGHU 23 | XbO8Ts2pxRrar+z6egUOJEhgtkgKPdUn8/0yvGH7jN4gIvYGEmUVyOae/nQmWHj8 24 | PWwBk7HJ+E50QZqwibxTK4fV7PBUt9Id2F7tgcCMUBGtLD06Z/4uGku4MQL/zm5F 25 | 19+yU7SqgY/IyMsN6q79+CtFpMv53buwFJudEGskwVjboiqLv9KBBgArmuoqlNNQ 26 | 8jnazqC+9RWInFwQkSlmzNBXaGkjbxqjh4QUrQJtCxKy4n4vkhdAPuFPYp+ESziH 27 | HtZ+vXJcV7LIHdH84cmyMNdyepyd04LeOyATVK2GXRVj3kjZpz/McbiZSQwHuOyq 28 | 1ZcG4/KyxGP06KyJhXRBIpl7GB3Tyy+bPHfTEyuU+slwR3TCEcnl6dOtCeiDf3td 29 | yc4tpjPwcVF51bmFpRmtP0SMigbh+Hus9bzmx80PQHMEYc0DaSWl/sqBw3Gbxlf9 30 | cUXYaAGk/HGq59JorzpK6cpR7Pi8oD7gk3K3vA0= 31 | -----END CERTIFICATE----- 32 | -------------------------------------------------------------------------------- /custom_components/elasticsearch/system_info.py: -------------------------------------------------------------------------------- 1 | """Retrieve system information.""" 2 | 3 | import socket 4 | from dataclasses import dataclass 5 | 6 | from homeassistant.components.hassio.coordinator import get_host_info 7 | from homeassistant.core import HomeAssistant 8 | from homeassistant.helpers.system_info import async_get_system_info 9 | 10 | from .logger import LOGGER 11 | 12 | 13 | @dataclass 14 | class SystemInfoResult: 15 | """System info for use in documents published to Elasticsearch.""" 16 | 17 | version: str 18 | arch: str 19 | os_name: str 20 | os_version: str 21 | hostname: str | None 22 | 23 | 24 | class SystemInfo: 25 | """Retrieve system information.""" 26 | 27 | def __init__(self, hass: HomeAssistant) -> None: 28 | """System Info init.""" 29 | self._hass: HomeAssistant = hass 30 | 31 | async def _get_system_info(self) -> dict: 32 | try: 33 | return await async_get_system_info(self._hass) 34 | except Exception as err: # noqa: BLE001 35 | msg = "Unknown error retrieving system info" 36 | LOGGER.exception(msg) 37 | raise ValueError(msg) from err 38 | 39 | def _get_host_info(self) -> dict | None: 40 | """Retrieve host information from HASS.""" 41 | return get_host_info(self._hass) 42 | 43 | async def async_get_system_info(self) -> SystemInfoResult | None: 44 | """Retrieve system information from HASS.""" 45 | system_info = await self._get_system_info() 46 | 47 | host_info = self._get_host_info() 48 | 49 | hostname = None 50 | 51 | if host_info is not None and system_info["hassio"] is True: 52 | hostname = host_info.get("hostname", None) 53 | else: 54 | hostname = socket.gethostname() 55 | 56 | return SystemInfoResult( 57 | version=system_info["version"], 58 | arch=system_info["arch"], 59 | os_name=system_info["os_name"], 60 | os_version=system_info["os_version"], 61 | hostname=hostname, 62 | ) 63 | -------------------------------------------------------------------------------- /custom_components/elasticsearch/utils.py: -------------------------------------------------------------------------------- 1 | """Utilities.""" 2 | 3 | from __future__ import annotations 4 | 5 | from typing import Any 6 | 7 | from custom_components.elasticsearch import const as compconst 8 | 9 | 10 | def skip_dict_values(d: dict, skip_values: list[Any]) -> dict: 11 | """Trim keys with values that match skip_values. Works best on a flattened dict.""" 12 | if skip_values == (): 13 | return d 14 | 15 | return {k: v for k, v in d.items() if v not in skip_values} 16 | 17 | 18 | def keep_dict_keys(d: dict, keys: list[str] | None = None, prefixes: list[str] | None = None) -> dict: 19 | """Trim keys that match keep_keys. Works best on a flattened dict.""" 20 | 21 | new_dict = {} 22 | 23 | if keys: 24 | new_dict.update({k: v for k, v in d.items() if k in keys}) 25 | 26 | if prefixes: 27 | new_dict.update({k: v for k, v in d.items() if any(k.startswith(prefix) for prefix in prefixes)}) 28 | 29 | return new_dict 30 | 31 | 32 | def prepare_dict( 33 | d: dict, 34 | flatten: bool = True, 35 | keep_keys: list[str] | None = None, 36 | skip_values: list[Any] | None = compconst.SKIP_VALUES, 37 | ) -> dict: 38 | """Clean a dictionary by flattening it, removing keys with empty values and optionally keeping only specified keys.""" 39 | 40 | d = flatten_dict(d=d) if flatten else d 41 | 42 | d = keep_dict_keys(d=d, keys=keep_keys) if keep_keys else d 43 | 44 | d = skip_dict_values(d=d, skip_values=skip_values) if skip_values else d 45 | 46 | return d # noqa: RET504 47 | 48 | 49 | def flatten_dict(d: dict, parent_key: str = "", sep: str = ".") -> dict: 50 | """Flatten an n-level nested dictionary using periods.""" 51 | 52 | flattened_dict = {} 53 | 54 | for k, v in d.items(): 55 | new_key = f"{parent_key}{sep}{k}" if parent_key != "" else k 56 | 57 | if isinstance(v, dict): 58 | flattened_dict.update( 59 | flatten_dict(d=v, parent_key=new_key, sep=sep), 60 | ) 61 | else: 62 | flattened_dict[new_key] = v 63 | 64 | return flattened_dict 65 | -------------------------------------------------------------------------------- /.github/workflows/coverage-comment.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Comment coverage report on the pull request 3 | permissions: 4 | contents: read 5 | pull-requests: write 6 | on: 7 | workflow_run: 8 | workflows: ["Pull actions"] 9 | types: 10 | - completed 11 | 12 | jobs: 13 | post-coverage-report: 14 | runs-on: ubuntu-latest 15 | if: > 16 | github.event.workflow_run.event == 'pull_request' && 17 | github.event.workflow_run.conclusion == 'success' 18 | steps: 19 | - name: 'Download artifact' 20 | uses: actions/github-script@v8 21 | with: 22 | script: | 23 | var artifacts = await github.rest.actions.listWorkflowRunArtifacts({ 24 | owner: context.repo.owner, 25 | repo: context.repo.repo, 26 | run_id: ${{github.event.workflow_run.id }}, 27 | }); 28 | var matchArtifact = artifacts.data.artifacts.filter((artifact) => { 29 | return artifact.name == "pr" 30 | })[0]; 31 | var download = await github.rest.actions.downloadArtifact({ 32 | owner: context.repo.owner, 33 | repo: context.repo.repo, 34 | artifact_id: matchArtifact.id, 35 | archive_format: 'zip', 36 | }); 37 | var fs = require('fs'); 38 | fs.writeFileSync('${{github.workspace}}/pr.zip', Buffer.from(download.data)); 39 | - name: Unzip artifact 40 | run: unzip pr.zip 41 | - name: Read the PR number from file 42 | id: pr_number 43 | uses: juliangruber/read-file-action@v1 44 | with: 45 | path: ./PR-number.txt 46 | - name: Pytest coverage comment 47 | uses: MishaKav/pytest-coverage-comment@v1.1.57 48 | with: 49 | issue-number: ${{ steps.pr_number.outputs.content }} 50 | pytest-xml-coverage-path: ./coverage.xml 51 | junitxml-path: ./pytest.xml -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "[python]": { 3 | "editor.codeActionsOnSave": { 4 | "source.fixAll": "explicit", 5 | "source.organizeImports": "explicit" 6 | }, 7 | "editor.formatOnSave": true, 8 | "editor.defaultFormatter": "charliermarsh.ruff", 9 | "editor.rulers": [ 10 | 110 11 | ] 12 | }, 13 | "debug.saveBeforeStart": "none", 14 | "editor.formatOnPaste": false, 15 | "editor.formatOnSave": true, 16 | "editor.formatOnType": true, 17 | "editor.inlineSuggest.enabled": true, 18 | "editor.tabSize": 4, 19 | "files.eol": "\n", 20 | "files.insertFinalNewline": true, 21 | "files.trimTrailingWhitespace": true, 22 | "files.watcherExclude": { 23 | "**/.git/objects/**": true, 24 | "**/.git/subtree-cache/**": true, 25 | "**/.venv/*/**": true, 26 | "**/.cache/*/**": true 27 | }, 28 | "github.copilot.editor.enableAutoCompletions": true, 29 | "python.analysis.autoSearchPaths": false, 30 | "python.analysis.inlayHints.pytestParameters": true, 31 | "python.analysis.typeCheckingMode": "standard", 32 | "python.defaultInterpreterPath": "${workspaceFolder}/.venv/bin/python", 33 | "python.experiments.optInto": [ 34 | "pythonTestAdapter" 35 | ], 36 | "python.analysis.ignore": [ 37 | "**/.venv/**", 38 | "**/.cache/**", 39 | "**/.git/**", 40 | "**/.cache/**", 41 | "**/.**" 42 | ], 43 | "python.formatting.blackPath": "/usr/local/py-utils/bin/black", 44 | "python.terminal.activateEnvInCurrentTerminal": true, 45 | "python.testing.pytestArgs": [ 46 | "tests" 47 | ], 48 | "python.testing.pytestEnabled": true, 49 | "python.testing.unittestEnabled": false, 50 | "python.analysis.autoImportCompletions": true, 51 | "python.analysis.autoFormatStrings": true, 52 | "python.analysis.inlayHints.functionReturnTypes": true, 53 | "python.analysis.inlayHints.variableTypes": true, 54 | "python-envs.defaultEnvManager": "ms-python.python:poetry", 55 | "python-envs.defaultPackageManager": "ms-python.python:poetry", 56 | "python-envs.pythonProjects": [], 57 | } 58 | -------------------------------------------------------------------------------- /scripts/helpers/es7/bootstrap_ek: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # This script is used to bootstrap Elasticsearch and Kibana in a Docker environment. 4 | # It pulls the Docker images for Elasticsearch and Kibana, starts the containers, 5 | # configures Elasticsearch and Kibana, and provides the necessary setup information. 6 | 7 | # Usage: /bin/bash ./bootstrap_ek 8 | # - version: The version of the Elasticsearch and Kibana stack to use (e.g. 7.0.0) 9 | 10 | dir=$(dirname "$0") 11 | version=$1 12 | 13 | set -e 14 | 15 | start=$(date +%s) 16 | 17 | # if version is blank, prompt for a value 18 | if [ -z "$version" ]; then 19 | echo "Please provide a valid stack version (e.g. 7.0.0)" 20 | read -r version 21 | fi 22 | 23 | # If the user provided a version that starts with 8 suggest they use the es8 script 24 | if [[ "$version" =~ ^8\.[0-9]+\.[0-9]+$ ]]; then 25 | echo "This script is for Elasticsearch 7. Please use the es7 scripts for Elasticsearch 7." 26 | exit 1 27 | fi 28 | 29 | # Ensure that the version provided has two decimals and starts with 7 30 | if [[ ! "$version" =~ ^7\.[0-9]+\.[0-9]+$ ]]; then 31 | echo "Invalid version provided. Please provide a valid stack version (e.g. 7.0.0)" 32 | exit 1 33 | fi 34 | 35 | # Create a Docker network for Elasticsearch and Kibana 36 | docker network create elastic >/dev/null 37 | 38 | echo "Pulling Docker Images for Elasticsearch and Kibana" 39 | 40 | echo -n "- Pulling Elasticsearch image for $version" 41 | /bin/bash "$dir/../shared/pull_es" "$version" 42 | 43 | echo -n "- Pulling Kibana image for $version" 44 | /bin/bash "$dir/../shared/pull_kb" "$version" 45 | 46 | echo "Starting Elasticsearch and Kibana" 47 | escontainer=$(/bin/bash "$dir/run_es" "$version") 48 | kbcontainer=$(/bin/bash "$dir/run_kb" "$version") 49 | 50 | echo "Configuring Elasticsearch:" 51 | echo -n "- Wait for Elasticsearch ($escontainer) to be ready" 52 | /bin/bash "$dir/wait_for_es" 53 | 54 | echo "Configuring Kibana:" 55 | echo -n "- Wait for Kibana ($kbcontainer) to be ready" 56 | /bin/bash "$dir/wait_for_kb_setup" 57 | 58 | end=$(date +%s) 59 | 60 | echo 61 | echo "---Ready to use!---" 62 | echo "Elasticsearch" 63 | echo " Url: http://host.docker.internal:9200" 64 | echo " Unauthenticated Access" 65 | echo 66 | echo "Kibana" 67 | echo " Url: http://host.docker.internal:5601" 68 | echo " Unauthenticated Access" 69 | echo 70 | echo "Setup Information" 71 | echo " Version: $version" 72 | echo " Containers: $escontainer $kbcontainer" 73 | echo " Duration: $((end - start)) seconds" 74 | -------------------------------------------------------------------------------- /mypy.ini: -------------------------------------------------------------------------------- 1 | 2 | [mypy] 3 | python_version = 3.13 4 | platform = linux 5 | plugins = pydantic.mypy 6 | show_error_codes = true 7 | follow_imports = normal 8 | enable_incomplete_feature = NewGenericSyntax 9 | local_partial_types = true 10 | strict_equality = true 11 | no_implicit_optional = true 12 | warn_incomplete_stub = true 13 | warn_redundant_casts = true 14 | warn_unused_configs = true 15 | warn_unused_ignores = true 16 | enable_error_code = ignore-without-code, redundant-self, truthy-iterable 17 | disable_error_code = annotation-unchecked, import-not-found, import-untyped 18 | extra_checks = false 19 | check_untyped_defs = true 20 | disallow_incomplete_defs = true 21 | disallow_subclassing_any = true 22 | disallow_untyped_calls = true 23 | disallow_untyped_decorators = true 24 | disallow_untyped_defs = true 25 | warn_return_any = true 26 | warn_unreachable = true 27 | 28 | [pydantic-mypy] 29 | init_forbid_extra = true 30 | init_typed = true 31 | warn_required_dynamic_aliases = true 32 | warn_untyped_fields = true 33 | 34 | [mypy-custom_components.*] 35 | check_untyped_defs = false 36 | disallow_incomplete_defs = false 37 | disallow_subclassing_any = false 38 | disallow_untyped_calls = false 39 | disallow_untyped_decorators = false 40 | disallow_untyped_defs = false 41 | warn_return_any = false 42 | warn_unreachable = false 43 | no_implicit_reexport = false 44 | 45 | [mypy-custom_components.elasticsearch.*] 46 | check_untyped_defs = true 47 | disallow_incomplete_defs = true 48 | disallow_subclassing_any = true 49 | disallow_untyped_calls = true 50 | disallow_untyped_decorators = true 51 | disallow_untyped_defs = true 52 | warn_return_any = true 53 | warn_unreachable = true 54 | no_implicit_reexport = true 55 | 56 | [mypy-tests.*] 57 | check_untyped_defs = false 58 | disallow_incomplete_defs = false 59 | disallow_subclassing_any = false 60 | disallow_untyped_calls = false 61 | disallow_untyped_decorators = false 62 | disallow_untyped_defs = false 63 | warn_return_any = false 64 | warn_unreachable = false 65 | no_implicit_reexport = false 66 | 67 | [mypy-tests*] 68 | check_untyped_defs = false 69 | disallow_incomplete_defs = false 70 | disallow_subclassing_any = false 71 | disallow_untyped_calls = false 72 | disallow_untyped_decorators = false 73 | disallow_untyped_defs = false 74 | warn_return_any = false 75 | warn_unreachable = false 76 | no_implicit_reexport = false 77 | 78 | [mypy-tests] 79 | check_untyped_defs = false 80 | disallow_incomplete_defs = false 81 | disallow_subclassing_any = false 82 | disallow_untyped_calls = false 83 | disallow_untyped_decorators = false 84 | disallow_untyped_defs = false 85 | warn_return_any = false 86 | warn_unreachable = false 87 | no_implicit_reexport = false 88 | -------------------------------------------------------------------------------- /tests/test_system_info.py: -------------------------------------------------------------------------------- 1 | # type: ignore # noqa: PGH003 2 | """Test Entity Details.""" 3 | 4 | from unittest import mock 5 | from unittest.mock import AsyncMock, Mock 6 | 7 | import pytest 8 | from custom_components.elasticsearch.system_info import SystemInfo, SystemInfoResult 9 | from homeassistant.const import __version__ as current_version 10 | from homeassistant.core import HomeAssistant 11 | 12 | 13 | class Test_Initialization: 14 | """Integration tests for system_info.py.""" 15 | 16 | async def test_init(self, hass: HomeAssistant): 17 | """Verify the SystemInfo class is initialized correctly.""" 18 | sys_info = SystemInfo(hass) 19 | 20 | assert sys_info is not None 21 | assert sys_info._hass == hass 22 | 23 | 24 | class Test_SystemInfo: 25 | """Test the SystemInfo class methods.""" 26 | 27 | @pytest.fixture(name="sys_info") 28 | def sys_info_fixture(self, hass: HomeAssistant): 29 | """Return a SystemInfo instance.""" 30 | return SystemInfo(hass) 31 | 32 | async def test_async_get_system_info(self, sys_info): 33 | """Verify system information is retrieved correctly.""" 34 | result = await sys_info.async_get_system_info() 35 | 36 | assert isinstance(result, SystemInfoResult) 37 | assert result.version == current_version 38 | assert result.arch is not None 39 | assert result.os_name is not None 40 | assert result.os_version is not None 41 | assert result.hostname is not None 42 | 43 | async def test_async_get_system_info_non_hassio(self, sys_info): 44 | """Verify we rely on the hostname from the socket module on non-HASSio systems.""" 45 | sys_info._get_system_info = AsyncMock( 46 | return_value={ 47 | "hassio": False, 48 | "version": "1.0", 49 | "arch": "x86_64", 50 | "os_name": "Linux", 51 | "os_version": "4.4.0-109-generic", 52 | "hostname": None, 53 | } 54 | ) 55 | sys_info._get_host_info = Mock(return_value={}) 56 | 57 | with mock.patch("socket.gethostname", return_value="test-hostname"): 58 | result = await sys_info.async_get_system_info() 59 | 60 | assert isinstance(result, SystemInfoResult) 61 | assert result.version == "1.0" 62 | assert result.arch == "x86_64" 63 | assert result.hostname == "test-hostname" 64 | 65 | async def test_get_host_info(self, sys_info): 66 | """Verify host information is returns an error on non-HASSio systems.""" 67 | host_info = sys_info._get_host_info() 68 | 69 | assert host_info is None 70 | -------------------------------------------------------------------------------- /.devcontainer/README.md: -------------------------------------------------------------------------------- 1 | ## Developing with Visual Studio Code + devcontainer 2 | 3 | The easiest way to get started with custom integration development is to use Visual Studio Code with devcontainers. This approach will create a preconfigured development environment with all the tools you need. 4 | 5 | In the container you will have a dedicated Home Assistant core instance running with your custom component code. You can configure this instance by updating the `./devcontainer/configuration.yaml` file. 6 | 7 | **Prerequisites** 8 | 9 | - [git](https://git-scm.com/book/en/v2/Getting-Started-Installing-Git) 10 | - Docker 11 | - For Linux, macOS, or Windows 10 Pro/Enterprise/Education use the [current release version of Docker](https://docs.docker.com/install/) 12 | - Windows 10 Home requires [WSL 2](https://docs.microsoft.com/windows/wsl/wsl2-install) and the current Edge version of Docker Desktop (see instructions [here](https://docs.docker.com/docker-for-windows/wsl-tech-preview/)). This can also be used for Windows Pro/Enterprise/Education. 13 | - [Visual Studio code](https://code.visualstudio.com/) 14 | - [Remote - Containers (VSC Extension)][extension-link] 15 | 16 | [More info about requirements and devcontainer in general](https://code.visualstudio.com/docs/remote/containers#_getting-started) 17 | 18 | [extension-link]: https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.remote-containers 19 | 20 | **Getting started:** 21 | 22 | 1. Fork the repository. 23 | 2. Clone the repository to your computer. 24 | 3. Open the repository using Visual Studio code. 25 | 26 | When you open this repository with Visual Studio code you are asked to "Reopen in Container", this will start the build of the container. 27 | 28 | _If you don't see this notification, open the command palette and select `Remote-Containers: Reopen Folder in Container`._ 29 | 30 | ### Tasks 31 | 32 | The devcontainer comes with some useful tasks to help you with development, you can start these tasks by opening the command palette and select `Tasks: Run Task` then select the task you want to run. 33 | 34 | When a task is currently running (like `Run Home Assistant on port 9123` for the docs), it can be restarted by opening the command palette and selecting `Tasks: Restart Running Task`, then select the task you want to restart. 35 | 36 | The available tasks are: 37 | 38 | Task | Description 39 | -- | -- 40 | Run Home Assistant on port 8123 | Launch Home Assistant with your custom component code and the configuration defined in `.devcontainer/configuration.yaml`. 41 | Run Unsupported Elasticsearch 7.0.0 (HTTP Port 9200) and Kibana 7.0.0 (HTTP Port 5601) | Launch Unsupported Elasticsearch 7.0.0 and Kibana 7.0.0. 42 | Run Unsupported Elasticsearch 7.10.0 (HTTP Port 9200) and Kibana 7.10.0 (HTTP Port 5601) | Launch Elasticsearch 7.10.0 and Kibana 7.10.0. 43 | Run Unsupported Elasticsearch 7.17.0 (HTTP Port 9200) and Kibana 7.17.0 (HTTP Port 5601) | Launch Elasticsearch 7.17.0 and Kibana 7.17.0. 44 | Run Unsupported Elasticsearch 8.0.0 (HTTPS Port 9200) and Kibana 8.0.0 (HTTP Port 5601) | Launch Elasticsearch 8.0.0 and Kibana 8.0.0. 45 | Run Unsupported Elasticsearch 8.7.0 (HTTPS Port 9200) and Kibana 8.7.0 (HTTP Port 5601) | Launch Elasticsearch 8.7.0 and Kibana 8.7.0. 46 | Run Elasticsearch 8.11.0 (HTTPS Port 9200) and Kibana 8.11.0 (HTTP Port 5601) | Launch Elasticsearch 8.11.0 and Kibana 8.11.0. 47 | Run Elasticsearch 8.13.0 (HTTPS Port 9200) and Kibana 8.13.0 (HTTP Port 5601) | Launch Elasticsearch 8.13.0 and Kibana 8.13.0. 48 | 49 | 50 | -------------------------------------------------------------------------------- /custom_components/elasticsearch/const.py: -------------------------------------------------------------------------------- 1 | """constants.""" 2 | 3 | from enum import Enum 4 | from types import MappingProxyType 5 | from typing import Any 6 | 7 | DOMAIN: str = "elasticsearch" 8 | ELASTIC_DOMAIN: str = "elasticsearch" 9 | 10 | ELASTIC_MINIMUM_VERSION: tuple[int, int] = (8, 14) 11 | 12 | CONF_PUBLISH_FREQUENCY: str = "publish_frequency" 13 | CONF_POLLING_FREQUENCY: str = "polling_frequency" 14 | CONF_AUTHENTICATION_TYPE: str = "authentication_type" 15 | 16 | CONF_CHANGE_DETECTION_TYPE: str = "change_detection_type" 17 | 18 | CONF_DEBUG_ATTRIBUTE_FILTERING: str = "debug_attribute_filtering" 19 | 20 | CONF_INCLUDE_TARGETS: str = "include_targets" 21 | CONF_EXCLUDE_TARGETS: str = "exclude_targets" 22 | 23 | CONF_TARGETS_TO_INCLUDE: str = "targets_to_include" 24 | CONF_TARGETS_TO_EXCLUDE: str = "targets_to_exclude" 25 | 26 | CONF_SSL_VERIFY_HOSTNAME: str = "ssl_verify_hostname" 27 | CONF_SSL_CA_PATH: str = "ssl_ca_path" 28 | 29 | CONF_TAGS: str = "tags" 30 | 31 | # For trimming keys with values that are None, empty lists, or empty objects 32 | SKIP_VALUES = [None, [], {}] 33 | 34 | ONE_MINUTE: int = 60 35 | ONE_HOUR: int = 60 * 60 36 | 37 | DATASTREAM_TYPE: str = "metrics" 38 | DATASTREAM_DATASET_PREFIX: str = "homeassistant" 39 | DATASTREAM_NAMESPACE: str = "default" 40 | 41 | # Set to match the datastream prefix name 42 | DATASTREAM_METRICS_INDEX_TEMPLATE_NAME: str = DATASTREAM_TYPE + "-" + DATASTREAM_DATASET_PREFIX 43 | 44 | PUBLISH_REASON_POLLING: str = "Polling" 45 | PUBLISH_REASON_STATE_CHANGE: str = "State change" 46 | PUBLISH_REASON_ATTR_CHANGE: str = "Attribute change" 47 | 48 | STATE_CHANGE_TYPE_VALUE: str = PUBLISH_REASON_STATE_CHANGE 49 | STATE_CHANGE_TYPE_ATTR: str = PUBLISH_REASON_ATTR_CHANGE 50 | 51 | ES_CHECK_PERMISSIONS_DATASTREAM: MappingProxyType[str, Any] = MappingProxyType( 52 | { 53 | "cluster": ["manage_index_templates", "monitor"], 54 | "index": [ 55 | { 56 | "names": [ 57 | "metrics-homeassistant.*", 58 | ], 59 | "privileges": [ 60 | "manage", 61 | "index", 62 | "create_index", 63 | "create", 64 | ], 65 | }, 66 | ], 67 | } 68 | ) 69 | 70 | 71 | class StateChangeType(Enum): 72 | """Elasticsearch State Change Types constants.""" 73 | 74 | STATE = "state" 75 | ATTRIBUTE = "attribute" 76 | NO_CHANGE = "polling" 77 | 78 | def to_publish_reason(self) -> str: 79 | """Return the publish reason for the state change type.""" 80 | if self == StateChangeType.STATE: 81 | return PUBLISH_REASON_STATE_CHANGE 82 | if self == StateChangeType.ATTRIBUTE: 83 | return PUBLISH_REASON_ATTR_CHANGE 84 | return PUBLISH_REASON_POLLING 85 | 86 | 87 | class CAPABILITIES: 88 | """Elasticsearch CAPABILITIES constants.""" 89 | 90 | MAJOR: str = "MAJOR" 91 | MINOR: str = "MINOR" 92 | BUILD_FLAVOR: str = "BUILD_FLAVOR" 93 | SERVERLESS: str = "SERVERLESS" 94 | OSS: str = "OSS" 95 | SUPPORTED: str = "SUPPORTED" 96 | TIMESERIES_DATASTREAM: str = "TIMESERIES_DATASTREAM" 97 | IGNORE_MISSING_COMPONENT_TEMPLATES: str = "IGNORE_MISSING_COMPONENT_TEMPLATES" 98 | DATASTREAM_LIFECYCLE_MANAGEMENT: str = "DATASTREAM_LIFECYCLE_MANAGEMENT" 99 | MAX_PRIMARY_SHARD_SIZE: str = "MAX_PRIMARY_SHARD_SIZE" 100 | -------------------------------------------------------------------------------- /custom_components/elasticsearch/loop.py: -------------------------------------------------------------------------------- 1 | """Implements a loop handler.""" 2 | 3 | import asyncio 4 | import time 5 | import typing 6 | from datetime import UTC, datetime, timedelta 7 | from logging import Logger 8 | 9 | from .logger import LOGGER as BASE_LOGGER 10 | 11 | 12 | class LoopHandler: 13 | """Handle a loop for a given function.""" 14 | 15 | def __init__(self, func: typing.Callable, name: str, frequency: int, log: Logger = BASE_LOGGER) -> None: 16 | """Initialize the loop handler.""" 17 | self._func: typing.Callable = func 18 | 19 | self._name = name 20 | 21 | self._frequency: int = frequency 22 | self._running: bool = False 23 | self._should_stop: bool = False 24 | self._run_count: int = 0 25 | 26 | self._log: Logger = log 27 | self._next_run_time: float = time.monotonic() 28 | 29 | def get_run_count(self) -> int: 30 | """Return the number of times the loop has run.""" 31 | return self._run_count 32 | 33 | async def wait_for_first_run(self) -> None: 34 | """Wait for the first run of the loop.""" 35 | while self._run_count == 0: 36 | await asyncio.sleep(0.1) 37 | 38 | def _time_to_run(self) -> bool: 39 | """Determine if now is a good time to poll for state changes.""" 40 | return self._next_run_time <= time.monotonic() 41 | 42 | def _time_until_next_run(self) -> int: 43 | """Return the time until the next run.""" 44 | 45 | # If the next run time is in the past, return 0 46 | # Otherwise, return the time until the next run, round up to the nearest second 47 | return max(0, int(self._next_run_time - time.monotonic())) 48 | 49 | async def _wait_for_next_run(self) -> None: 50 | """Wait for the next poll time.""" 51 | while not self._time_to_run(): 52 | if self._should_stop_running(): 53 | msg = "Stopping the loop handler." 54 | raise RuntimeError(msg) 55 | await self._spin() 56 | continue 57 | 58 | def _schedule_next_run(self) -> None: 59 | self._next_run_time = time.monotonic() + self._frequency 60 | self._log.debug( 61 | "Next run of loop: %s scheduled for roughly %s (UTC) -- %ss from now", 62 | self._name, 63 | datetime.now(tz=UTC) + timedelta(0, self._frequency), 64 | self._frequency, 65 | ) 66 | 67 | def _should_keep_running(self) -> bool: 68 | """Determine if the runner should keep running.""" 69 | return self._running and not self._should_stop 70 | 71 | def _should_stop_running(self) -> bool: 72 | """Determine if the runner should stop.""" 73 | return self._should_stop 74 | 75 | async def _spin(self, duration: int = 1) -> None: 76 | """Spin the event loop.""" 77 | await asyncio.sleep(duration) 78 | 79 | def stop(self) -> None: 80 | """Stop the loop.""" 81 | self._should_stop = True 82 | self._running = False 83 | 84 | async def start(self) -> None: 85 | """Start the loop.""" 86 | self._running = True 87 | 88 | while self._should_keep_running(): 89 | await self._wait_for_next_run() 90 | self._schedule_next_run() 91 | 92 | self._run_count += 1 93 | try: 94 | await self._func() 95 | except Exception: 96 | self._log.debug("Unexpected error in loop handler: %s", self._name, exc_info=True) 97 | self._log.error("Unexpected error in loop handler: %s", self._name) 98 | self.stop() 99 | raise 100 | -------------------------------------------------------------------------------- /custom_components/elasticsearch/es_datastream_manager.py: -------------------------------------------------------------------------------- 1 | """Manage Elasticsearch datastreams and index templates. 2 | 3 | This class provides methods to initialize, install, and update 4 | Elasticsearch index templates for Home Assistant datastreams. 5 | """ 6 | 7 | from logging import Logger 8 | 9 | from custom_components.elasticsearch.datastreams.index_template import index_template_definition 10 | from custom_components.elasticsearch.es_gateway import ElasticsearchGateway 11 | 12 | from .const import ( 13 | DATASTREAM_METRICS_INDEX_TEMPLATE_NAME, 14 | ) 15 | from .logger import LOGGER as BASE_LOGGER 16 | from .logger import async_log_enter_exit_debug 17 | 18 | 19 | class DatastreamManager: 20 | """Datastream manager.""" 21 | 22 | _logger: Logger 23 | 24 | def __init__( 25 | self, 26 | gateway: ElasticsearchGateway, 27 | log: Logger = BASE_LOGGER, 28 | ) -> None: 29 | """Initialize index management.""" 30 | 31 | self._logger = log 32 | 33 | self._gateway: ElasticsearchGateway = gateway 34 | 35 | @async_log_enter_exit_debug 36 | async def async_init(self) -> None: 37 | """Perform initializiation of required datastream primitives.""" 38 | if await self._needs_index_template(): 39 | await self._install_index_template() 40 | elif await self._needs_index_template_update(): 41 | await self._update_index_template() 42 | 43 | @async_log_enter_exit_debug 44 | async def _needs_index_template(self) -> bool: 45 | """Check if the ES cluster needs the index template installed.""" 46 | matching_templates = await self._gateway.get_index_template( 47 | name=DATASTREAM_METRICS_INDEX_TEMPLATE_NAME, 48 | ignore=[404], 49 | ) 50 | 51 | return len(matching_templates.get("index_templates", [])) == 0 52 | 53 | @async_log_enter_exit_debug 54 | async def _needs_index_template_update(self) -> bool: 55 | """Check if the ES cluster needs the index template updated.""" 56 | matching_templates = await self._gateway.get_index_template( 57 | name=DATASTREAM_METRICS_INDEX_TEMPLATE_NAME, 58 | ignore=[404], 59 | ) 60 | 61 | matching_template = matching_templates.get("index_templates", [{}])[0] 62 | 63 | imported_version = matching_template["index_template"].get("version", 0) 64 | new_version = index_template_definition.get("version", 0) 65 | 66 | if imported_version != new_version: 67 | self._logger.info( 68 | "Update required from [%s] to [%s] for Home Assistant datastream index template", 69 | imported_version, 70 | new_version, 71 | ) 72 | return True 73 | 74 | return False 75 | 76 | @async_log_enter_exit_debug 77 | async def _install_index_template(self) -> None: 78 | """Initialize any required datastream templates.""" 79 | self._logger.info("Installing index template for Home Assistant datastreams") 80 | 81 | await self._gateway.put_index_template( 82 | name=DATASTREAM_METRICS_INDEX_TEMPLATE_NAME, 83 | body=index_template_definition, 84 | ) 85 | 86 | @async_log_enter_exit_debug 87 | async def _update_index_template(self) -> None: 88 | """Update the specified index template and rollover the indices.""" 89 | self._logger.info("Updating Index template and rolling over Home Assistant datastreams") 90 | 91 | await self._install_index_template() 92 | 93 | datastream_wildcard = index_template_definition["index_patterns"][0] 94 | 95 | # Rollover all Home Assistant datastreams to ensure we don't get mapping conflicts 96 | datastreams = await self._gateway.get_datastream(datastream=datastream_wildcard) 97 | 98 | for datastream in datastreams.get("data_streams", []): 99 | self._logger.info("Rolling over datastream [%s]", datastream["name"]) 100 | await self._gateway.rollover_datastream(datastream=datastream["name"]) 101 | -------------------------------------------------------------------------------- /tests/test_es_datastream_manager.py: -------------------------------------------------------------------------------- 1 | """Tests for the index manager class.""" 2 | # noqa: F401 # pylint: disable=redefined-outer-name 3 | 4 | from unittest.mock import AsyncMock 5 | 6 | import pytest 7 | from custom_components.elasticsearch.datastreams import index_template 8 | from custom_components.elasticsearch.es_datastream_manager import DatastreamManager 9 | from custom_components.elasticsearch.es_gateway import ElasticsearchGateway 10 | 11 | 12 | @pytest.fixture 13 | async def mock_gateway() -> AsyncMock: 14 | """Return an ElasticsearchGateway instance.""" 15 | gateway = AsyncMock(ElasticsearchGateway) 16 | 17 | gateway.get_index_template = AsyncMock() 18 | gateway.put_index_template = AsyncMock() 19 | gateway.get_datastream = AsyncMock() 20 | gateway.rollover_datastream = AsyncMock() 21 | 22 | return gateway 23 | 24 | 25 | @pytest.fixture 26 | async def datastream_manager(mock_gateway) -> DatastreamManager: 27 | """Return an DatastreamManager instance.""" 28 | return DatastreamManager(mock_gateway) 29 | 30 | 31 | class Test_Initialization: 32 | """Test the DatastreamManager class sync methods.""" 33 | 34 | def test_init(self, mock_gateway): 35 | """Test the __init__ method.""" 36 | 37 | datastream_manager = DatastreamManager(mock_gateway) 38 | 39 | assert datastream_manager is not None 40 | assert datastream_manager._gateway == mock_gateway 41 | 42 | async def test_async_init_first_run(self, datastream_manager): 43 | """Test initialization of the DatastreamManager with a fresh ES cluster.""" 44 | 45 | datastream_manager._gateway.get_index_template = AsyncMock( 46 | return_value={"index_templates": []}, 47 | ) 48 | 49 | await datastream_manager.async_init() 50 | 51 | datastream_manager._gateway.get_index_template.assert_called_once() 52 | datastream_manager._gateway.put_index_template.assert_called_once() 53 | datastream_manager._gateway.rollover_datastream.assert_not_called() 54 | 55 | async def test_async_init_second_run(self, datastream_manager): 56 | """Test initialization of the DatastreamManager on a cluster that already contains the required index template.""" 57 | datastream_manager._gateway.get_index_template = AsyncMock( 58 | return_value={ 59 | "index_templates": [ 60 | { 61 | "name": "datastream_metrics", 62 | "index_template": {"version": index_template.index_template_definition["version"]}, 63 | } 64 | ] 65 | }, 66 | ) 67 | 68 | await datastream_manager.async_init() 69 | 70 | assert datastream_manager._gateway.get_index_template.call_count == 2 71 | datastream_manager._gateway.put_index_template.assert_not_called() 72 | datastream_manager._gateway.rollover_datastream.assert_not_called() 73 | 74 | async def test_async_init_update_required(self, datastream_manager): 75 | """Test initialization of the DatastreamManager with an existing ES cluster that requires an index template update and rollover.""" 76 | datastream_manager._gateway.get_index_template = AsyncMock( 77 | return_value={ 78 | "index_templates": [{"name": "datastream_metrics", "index_template": {"version": 1}}] 79 | }, 80 | ) 81 | 82 | datastream_manager._gateway.get_datastream = AsyncMock( 83 | return_value={ 84 | "data_streams": [ 85 | { 86 | "name": "metrics-homeassistant.sensor-default", 87 | }, 88 | { 89 | "name": "metrics-homeassistant.counter-default", 90 | }, 91 | ] 92 | } 93 | ) 94 | 95 | await datastream_manager.async_init() 96 | 97 | assert datastream_manager._gateway.get_index_template.call_count == 2 98 | datastream_manager._gateway.put_index_template.assert_called_once() 99 | assert datastream_manager._gateway.rollover_datastream.call_count == 2 100 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contribution guidelines 2 | 3 | Contributing to this project should be as easy and transparent as possible, whether it's: 4 | 5 | - Reporting a bug 6 | - Discussing the current state of the code 7 | - Submitting a fix 8 | - Proposing new features 9 | 10 | ## Github is used for everything 11 | 12 | Github is used to host code, to track issues and feature requests, as well as accept pull requests. 13 | 14 | Pull requests are the best way to propose changes to the codebase. 15 | 16 | 1. Fork the repo and create your branch from `main`. 17 | 2. If you've changed something, update the documentation. 18 | 3. Make sure your code lints (using `scripts/lint`). 19 | 4. Make sure tests pass (using `scripts/test`). 20 | 5. Test your contribution. 21 | 6. Issue that pull request! 22 | 23 | ## Any contributions you make will be under the MIT Software License 24 | 25 | In short, when you submit code changes, your submissions are understood to be under the same [MIT License](http://choosealicense.com/licenses/mit/) that covers the project. Feel free to contact the maintainers if that's a concern. 26 | 27 | ## Report bugs using Github's [issues](../../issues) 28 | 29 | GitHub issues are used to track public bugs. 30 | Report a bug by [opening a new issue](../../issues/new/choose); it's that easy! 31 | 32 | ## Write bug reports with detail, background, and sample code 33 | 34 | **Great Bug Reports** tend to have: 35 | 36 | - A quick summary and/or background 37 | - Steps to reproduce 38 | - Be specific! 39 | - Give sample code if you can. 40 | - What you expected would happen 41 | - What actually happens 42 | - Notes (possibly including why you think this might be happening, or stuff you tried that didn't work) 43 | 44 | People _love_ thorough bug reports. I'm not even kidding. 45 | 46 | ## Use a Consistent Coding Style 47 | 48 | Use the configured linter to check your code, and make sure it follows the project conventions. 49 | 50 | ## Local development environment 51 | 52 | Visual Studio Code is the recommended code editor for this project. 53 | This project includes a [devcontainer](./.devcontainer) configuration for an easy to use and consistent development environment. With this container you will have a stand alone Home Assistant instance running and already configured with the included [`configuration.yaml`](./config/configuration.yaml) file. 54 | 55 | ### Dependency management 56 | 57 | Dependencies are managed via [Poetry](https://python-poetry.org). This will be managed for you automatically if using the dev container. If you wish to run outside of a dev container, you will need to install your dependencies manually: 58 | 59 | ```sh 60 | pip install poetry~=2.2.1 61 | poetry install 62 | ``` 63 | 64 | ### Running tests 65 | 66 | Use `./scripts/test` to invoke the test runner. 67 | 68 | You must be within the virtual environment where project dependencies are installed: 69 | 70 | ```sh 71 | poetry run ./scripts/test 72 | ``` 73 | 74 | Alternatively: 75 | 76 | ```sh 77 | poetry shell 78 | # you now have a shell within the virtual env 79 | ./scripts/test 80 | ``` 81 | 82 | ### Updating snapshots 83 | 84 | If you've made a change to code that impacts a snapshot, your test will fail and the snapshot needs to be updated. To update snapshots run /bin/bash ./scripts/update_snapshots 85 | 86 | Or in vscode you can run one of the two update snapshot tasks by opening the command pallete with cmd + p (or ctrl + p), and type `task Update` to see the tasks related to updating snapshots. 87 | 88 | ### Linting 89 | 90 | In the devcontainer, linting and formatting runs on save and linting errors are showed as PROBLEMS in vscode. 91 | 92 | To invoke a full lint/format, in vscode open the command pallete with cmd + p (or ctrl + p), and type `task Lint`. 93 | 94 | Alternatively use `./scripts/lint` to invoke the project linter. You must be within the virtual environment where project dependencies are installed: 95 | 96 | ```sh 97 | poetry run ./scripts/lint 98 | ``` 99 | 100 | Alternatively: 101 | 102 | ```sh 103 | poetry shell 104 | # you now have a shell within the virtual env 105 | ./scripts/lint 106 | ``` 107 | 108 | ## License 109 | 110 | By contributing, you agree that your contributions will be licensed under its MIT License. 111 | -------------------------------------------------------------------------------- /custom_components/elasticsearch/logger.py: -------------------------------------------------------------------------------- 1 | """Component Logger.""" 2 | 3 | import logging 4 | from collections.abc import Callable, Coroutine 5 | from typing import Any 6 | 7 | LOGGER = logging.getLogger("custom_components.elasticsearch") 8 | es_library_logger = logging.getLogger("elasticsearch") 9 | es_library_logger.name = "custom_components.elasticsearch.library" 10 | es_transport_logger = logging.getLogger("elastic_transport") 11 | es_transport_logger.name = "custom_components.elasticsearch.transport" 12 | 13 | # if the logger is already set up, don't change the level 14 | if LOGGER.level == logging.NOTSET: 15 | LOGGER.setLevel(logging.INFO) 16 | 17 | if es_library_logger.level == logging.NOTSET: 18 | es_library_logger.setLevel(logging.ERROR) 19 | 20 | if es_transport_logger.level == logging.NOTSET: 21 | es_transport_logger.setLevel(logging.ERROR) 22 | 23 | 24 | def have_child(name: str) -> logging.Logger: 25 | """Create a child logger.""" 26 | 27 | # Sanitize the param name only allowing lowercase a-z and 0-9 and replace spaces with underscores 28 | sanitized_name = "".join([c if c.isalnum() else "" for c in name.replace(" ", "_").lower()]) 29 | 30 | parent = logging.getLogger("custom_components.elasticsearch") 31 | new_logger = parent.getChild(f"{sanitized_name}") 32 | new_logger.name = f"{parent.name}-{sanitized_name}" 33 | 34 | return new_logger 35 | 36 | 37 | # Returns a function 38 | def log_enter_exit_info(func: Callable) -> Callable: 39 | """Log function start and end.""" 40 | 41 | def decorated_func(*args, **kwargs): # noqa: ANN202 42 | logger = getattr(args[0], "_logger", LOGGER) if args and len(args) > 0 else LOGGER 43 | return call_and_log_enter_exit(func, logger, logging.INFO, *args, **kwargs) 44 | 45 | return decorated_func 46 | 47 | 48 | def async_log_enter_exit_info(func: Callable[..., Coroutine]): # noqa: ANN201 49 | """Log function start and end.""" 50 | 51 | async def decorated_func(*args, **kwargs): # noqa: ANN202 52 | logger = getattr(args[0], "_logger", LOGGER) if args and len(args) > 0 else LOGGER 53 | return await call_and_log_enter_exit(func, logger, logging.INFO, *args, **kwargs) 54 | 55 | return decorated_func 56 | 57 | 58 | def log_enter_exit_debug(func: Callable) -> Callable: 59 | """Log function start and end.""" 60 | 61 | def decorated_func(*args, **kwargs): # noqa: ANN202 62 | logger = getattr(args[0], "_logger", LOGGER) if args and len(args) > 0 else LOGGER 63 | return call_and_log_enter_exit(func, logger, logging.DEBUG, *args, **kwargs) 64 | 65 | return decorated_func 66 | 67 | 68 | def async_log_enter_exit_debug(func: Callable[..., Coroutine]): # noqa: ANN201 69 | """Log function start and end.""" 70 | 71 | async def decorated_func(*args, **kwargs): # noqa: ANN202 72 | logger = getattr(args[0], "_logger", LOGGER) if args and len(args) > 0 else LOGGER 73 | return await async_call_and_log_enter_exit(func, logger, logging.DEBUG, *args, **kwargs) 74 | 75 | return decorated_func 76 | 77 | 78 | def call_and_log_enter_exit( 79 | func: Callable, 80 | logger: logging.Logger, 81 | level: int = logging.DEBUG, 82 | *args, 83 | **kwargs, 84 | ) -> Any: # noqa: ANN401 85 | """Log function start and end.""" 86 | 87 | module = func.__module__ 88 | 89 | name = func.__qualname__ 90 | logger.log(level, "Entering %s : %s", module, name) 91 | try: 92 | result = func(*args, **kwargs) 93 | logger.log(level, "Returning from %s : %s", module, name) 94 | except: 95 | logger.log(level, "Error in %s : %s", module, name) 96 | raise 97 | return result 98 | 99 | 100 | async def async_call_and_log_enter_exit( 101 | func: Callable, 102 | logger: logging.Logger, 103 | level: int = logging.DEBUG, 104 | *args, 105 | **kwargs, 106 | ) -> Any: # noqa: ANN401 107 | """Log function start and end.""" 108 | 109 | module = func.__module__ 110 | 111 | name = func.__qualname__ 112 | logger.log(level, "Entering %s : %s", module, name) 113 | try: 114 | result = await func(*args, **kwargs) 115 | logger.log(level, "Returning from %s : %s", module, name) 116 | except: 117 | logger.log(level, "Error in %s : %s", module, name) 118 | raise 119 | return result 120 | -------------------------------------------------------------------------------- /scripts/helpers/es8/bootstrap_ek: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # This script is used to bootstrap Elasticsearch and Kibana in a Docker environment. 4 | # It pulls the Docker images for Elasticsearch and Kibana, starts the containers, 5 | # configures Elasticsearch and Kibana, and provides the necessary setup information. 6 | 7 | # Usage: /bin/bash "$dir/bootstrap_ek 8 | # - version: The version of the Elasticsearch and Kibana stack to use (e.g. 8.0.0) 9 | 10 | dir=$(dirname "$0") 11 | version=$1 12 | 13 | set -e 14 | 15 | start=$(date +%s) 16 | 17 | # if version is blank, prompt for a value 18 | if [ -z "$version" ]; then 19 | echo "Please provide a valid stack version (e.g. 8.0.0)" 20 | read -r version 21 | fi 22 | 23 | # If the user provided a version that starts with 7 suggest they use the es7 script 24 | if [[ "$version" =~ ^7\.[0-9]+\.[0-9]+$ ]]; then 25 | echo "This script is for Elasticsearch 8. Please use the es7 scripts for Elasticsearch 7." 26 | exit 1 27 | fi 28 | 29 | # Ensure that the version provided has two decimals and starts with 8 30 | if [[ ! "$version" =~ ^8\.[0-9]+\.[0-9]+$ ]]; then 31 | echo "Invalid version provided. Please provide a valid stack version (e.g. 8.0.0)" 32 | exit 1 33 | fi 34 | 35 | # Create a Docker network for Elasticsearch and Kibana 36 | docker network create elastic >/dev/null 37 | 38 | mkdir "$dir/../../../es_env" 39 | 40 | echo "Pulling Docker Images for Elasticsearch and Kibana" 41 | 42 | echo -n "- Pulling Elasticsearch image for $version" 43 | /bin/bash "$dir/../shared/pull_es" "$version" 44 | 45 | echo -n "- Pulling Kibana image for $version" 46 | /bin/bash "$dir/../shared/pull_kb" "$version" 47 | 48 | echo "Starting Elasticsearch and Kibana" 49 | escontainer=$(/bin/bash "$dir/run_es" "$version") 50 | kbcontainer=$(/bin/bash "$dir/run_kb" "$version") 51 | echo -n "- Wait for Elasticsearch ($escontainer) to be ready" 52 | /bin/bash "$dir/wait_for_es" 53 | 54 | echo "Configuring Elasticsearch:" 55 | 56 | echo "- Resetting elastic user account password" 57 | espwd=$(/bin/bash "$dir/reset_es_pwd" "$escontainer") 58 | 59 | echo "- Grabbing certificate from Elasticsearch container" 60 | /bin/bash "$dir/get_ca_cert" "$escontainer" 61 | 62 | echo "- Generating an Enrollment Token for Kibana" 63 | enrolltoken=$(/bin/bash "$dir/reset_es_enrollment" "$escontainer") 64 | 65 | echo "Configuring Kibana:" 66 | echo -n "- Wait for Kibana ($kbcontainer) to be ready to setup" 67 | /bin/bash "$dir/wait_for_kb_setup" 68 | 69 | echo "- Enrolling Kibana" 70 | kbcode=$(/bin/bash "$dir/get_kb_code" "$kbcontainer") 71 | /bin/bash "$dir/enroll_kb" "$enrolltoken" "$kbcode" 72 | 73 | echo "Configuring HASS Access:" 74 | echo -n "- Wait for Elasticsearch ($escontainer) cluster to be green" 75 | /bin/bash "$dir/wait_for_es_green" "https://host.docker.internal:9200" "elastic" "$espwd" 76 | echo "- Creating a role for Home Assistant" 77 | /bin/bash "$dir/../shared/create_hass_writer_role_es" "https://host.docker.internal:9200" "elastic" "$espwd" 78 | echo "- Creating a user for Home Assistant" 79 | /bin/bash "$dir/../shared/create_hass_writer_user_es" "https://host.docker.internal:9200" "elastic" "$espwd" 80 | echo "- Creating an API Key for Home Assistant" 81 | apikey=$(/bin/bash "$dir/../shared/create_hass_writer_apikey_es" "https://host.docker.internal:9200" "elastic" "$espwd") 82 | 83 | echo -n "- Wait for Kibana ($kbcontainer) to be ready" 84 | /bin/bash "$dir/wait_for_kb_available" 85 | echo "- Creating a dataview in Kibana" 86 | /bin/bash "$dir/new_kb_dataview" "http://host.docker.internal:5601" "elastic" "$espwd" 87 | 88 | end=$(date +%s) 89 | 90 | echo "" 91 | echo "---Ready to use!---" 92 | echo "" 93 | echo "Elasticsearch" 94 | echo " Url: https://host.docker.internal:9200" 95 | echo " User: elastic" 96 | echo " Password: $espwd" 97 | echo 98 | echo " User: hass_writer" 99 | echo " Password: changeme" 100 | echo " API Key: $apikey" 101 | echo 102 | echo "Kibana" 103 | echo " Host Url: http://host.docker.internal:5601" 104 | echo " User: elastic" 105 | echo " Password: $espwd" 106 | echo 107 | echo "Setup Information" 108 | echo " Version: $version" 109 | echo " Setup Code:" "$kbcode" 110 | echo " Enrollment Token:" "$enrolltoken" 111 | echo " Containers: $escontainer $kbcontainer" 112 | echo " Duration: $((end - start)) seconds" 113 | echo "" 114 | echo "Credentialsand certificate storage" 115 | echo " Credential File: ../../../es_env/es_cluster.creds" 116 | echo " Certificate File: ../../../es_env/http_ca.crt" 117 | 118 | # Save a file with the credentials in the scripts directory which is 2 directories up 119 | echo "elastic: $espwd" > "$dir/../../../es_env/es_cluster.creds" 120 | echo "elastic: $apikey" >> "$dir/../../../es_env/es_cluster.creds" 121 | echo "hass_writer: changeme" >> "$dir/../../../es_env/es_cluster.creds" 122 | 123 | chown -R 1000:1000 "$dir/../../../es_env" # make sure we dont save our certs as root 124 | -------------------------------------------------------------------------------- /custom_components/elasticsearch/translations/en.json: -------------------------------------------------------------------------------- 1 | { 2 | "config": { 3 | "abort": { 4 | "reauth_successful": "Re-authentication was successful" 5 | }, 6 | "error": { 7 | "cannot_connect": "Failed to connect to host on port. Is it reachable?", 8 | "invalid_basic_auth": "Invalid username or password.", 9 | "invalid_api_key": "Invalid API Key.", 10 | "insufficient_privileges": "Insufficient privileges for specified user.", 11 | "missing_credentials": "This cluster requires authentication. Please provide a username and password.", 12 | "untrusted_certificate": "Elasticsearch's server certificate could not be verified. Either disable TLS verification, or specify a custom CA path below.", 13 | "unsupported_version": "Unsupported version of Elasticsearch detected. The minimum supported version is 7.11.0." 14 | }, 15 | "step": { 16 | "user": { 17 | "title": "Elasticsearch Cluster Information", 18 | "description": "Please provide the URL to access your elasticsearch cluster in the format http(s)://my_elasticsearch_server:port (https://my_elasticsearch:9200).", 19 | "data": { 20 | "url": "URL to access the Elasticsearch Cluster" 21 | } 22 | }, 23 | "certificate_issues": { 24 | "title": "Untrusted Certificate with SSL/TLS", 25 | "description": "Elasticsearch's server certificate could not be verified. Either disable TLS verification, or specify a custom CA path below.", 26 | "data": { 27 | "verify_ssl": "Verify TLS certificate (recommended)", 28 | "ssl_verify_hostname": "Verify hostname matches server certificate", 29 | "ssl_ca_path": "Fully qualified path to custom certificate authority" 30 | } 31 | }, 32 | "authentication_issues": { 33 | "title": "Select an Authentication Method", 34 | "description": "Choose an authentication method to connect to your Elasticsearch cluster.", 35 | "data": { 36 | "basic_auth": "Authenticate via username/password", 37 | "api_key": "Authenticate via API Key" 38 | } 39 | }, 40 | "basic_auth": { 41 | "title": "Authenticate via username/password", 42 | "data": { 43 | "username": "Username", 44 | "password": "Password" 45 | } 46 | }, 47 | "api_key": { 48 | "title": "Authenticate via API Key", 49 | "data": { 50 | "api_key": "API Key" 51 | } 52 | }, 53 | "reauth_confirm": { 54 | "title": "Reauthenticate", 55 | "data": { 56 | "api_key": "API Key", 57 | "username": "Username", 58 | "password": "Password" 59 | } 60 | } 61 | } 62 | }, 63 | "options": { 64 | "step": { 65 | "options": { 66 | "title": "Elastic Integration Settings", 67 | "description": "Configure the polling and publishing settings for the Elastic integration.", 68 | "data": { 69 | "publish_frequency": "Send events to Elasticsearch at this interval", 70 | "polling_frequency": "Gather all entity states at this interval", 71 | "change_detection_type": "Choose what types of entity changes to listen for and publish", 72 | "tags": "Tags to apply to all published events", 73 | "include_targets": "Toggle to only publish the set of targets below", 74 | "exclude_targets": "Toggle to exclude publishing the set of targets below", 75 | "targets_to_include": "Select the targets to include", 76 | "targets_to_exclude": "Select the targets to exclude" 77 | }, 78 | "data_description": { 79 | "publish_frequency": "Set to zero to disable publishing.", 80 | "polling_frequency": "Set to zero to only publish entity changes." 81 | } 82 | } 83 | } 84 | }, 85 | "selector": { 86 | "authentication_issues": { 87 | "options": { 88 | "api_key": "Authenticate using an API Key", 89 | "basic_auth": "Authenticate using Username/Password" 90 | } 91 | }, 92 | "change_detection_type": { 93 | "options": { 94 | "state": "Track entities with state changes", 95 | "attribute": "Track entities with attribute changes" 96 | } 97 | } 98 | } 99 | } 100 | -------------------------------------------------------------------------------- /docs/ingest_advanced.md: -------------------------------------------------------------------------------- 1 | # Advanced ingest configuration 2 | 3 | !!! note 4 | 5 | This section describes advanced use cases. Most users will not need to customize their ingest configuration. 6 | 7 | ## Defining your own Index Mappings, Settings, and Ingest Pipeline 8 | 9 | You can customize the mappings, settings and define an [ingest pipeline](https://www.elastic.co/guide/en/elasticsearch/reference/current/ingest.html) by creating a [component template](https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-component-template.html) called `metrics-homeassistant@custom` 10 | 11 | ### Custom Ingest Pipeline 12 | 13 | The following is an example on how to push your Home Assistant metrics into an ingest pipeline called `metrics-homeassistant-pipeline`: 14 | 15 | === "Dev Tools" 16 | Run these commands using Kibana's [Dev Tools console](https://www.elastic.co/guide/en/kibana/current/console-kibana.html): 17 | 18 | ``` 19 | PUT _ingest/pipeline/metrics-homeassistant-pipeline 20 | { 21 | "description": "Pipeline for HomeAssistant dataset", 22 | "processors": [ ] 23 | } 24 | ``` 25 | 26 | ``` 27 | PUT _component_template/metrics-homeassistant@custom 28 | { 29 | "template": { 30 | "mappings": {} 31 | "settings": { 32 | "index.default_pipeline": "metrics-homeassistant-pipeline", 33 | } 34 | } 35 | } 36 | ``` 37 | 38 | === "curl" 39 | 40 | ```bash 41 | ES_URL=https://localhost:9200 # (1) 42 | ES_USER=elastic # (2) 43 | ES_PASSWORD=changeme # (3) 44 | curl -X PUT "$ES_URL/_ingest/pipeline/metrics-homeassistant-pipeline" \ 45 | -u "$ES_USER":"ES_PASSWORD" \ 46 | -H "Content-Type: application/json" \ 47 | -d' 48 | { 49 | "description": "Pipeline for HomeAssistant dataset", 50 | "processors": [ ] 51 | } 52 | ' # (4) 53 | 54 | curl -X PUT "$ES_URL/_component_template/metrics-homeassistant@custom" \ 55 | -u "$ES_USER":"ES_PASSWORD" \ 56 | -H "Content-Type: application/json" \ 57 | -d' 58 | { 59 | "template": { 60 | "mappings": {} 61 | "settings": { 62 | "index.default_pipeline": "metrics-homeassistant-pipeline", 63 | } 64 | } 65 | } 66 | ' 67 | ``` 68 | 69 | 1. Replace `https://localhost:9200` with the URL of your Elasticsearch instance 70 | 2. Replace `elastic` with your Elasticsearch username 71 | 3. Replace `changeme` with your Elasticsearch password 72 | 4. Add your ingest pipeline processors to the `processors` array 73 | 74 | Component template changes apply when the datastream performs a rollover so the first time you modify the template you may need to manually initiate index/datastream rollover to start applying the pipeline. 75 | 76 | ### Custom Attribute mappings 77 | 78 | The following is an example on how to provide custom mappings for any attributes you're interested in making available as other data types `metrics-homeassistant-pipeline`: 79 | 80 | === "Dev Tools" 81 | Run these commands using Kibana's [Dev Tools console](https://www.elastic.co/guide/en/kibana/current/console-kibana.html): 82 | 83 | ``` 84 | PUT /_component_template/metrics-homeassistant@custom 85 | { 86 | "template": { 87 | "mappings": { 88 | "properties": { 89 | "hass.entity.attributes": { 90 | "type": "object", 91 | "properties": { 92 | "temperature": { 93 | "type": "float" 94 | "ignore_malformed": true 95 | } 96 | } 97 | } 98 | } 99 | } 100 | } 101 | } 102 | ``` 103 | 104 | === "curl" 105 | 106 | ```bash 107 | ES_URL=https://localhost:9200 # (1) 108 | ES_USER=elastic # (2) 109 | ES_PASSWORD=changeme # (3) 110 | 111 | curl -X PUT "$ES_URL/_component_template/metrics-homeassistant@custom" \ 112 | -u "$ES_USER":"ES_PASSWORD" \ 113 | -H "Content-Type: application/json" \ 114 | -d' 115 | { 116 | "template": { 117 | "mappings": { 118 | "properties": { 119 | "hass.entity.attributes": { 120 | "type": "object", 121 | "properties": { 122 | "temperature": { 123 | "type": "float" 124 | "ignore_malformed": true 125 | } 126 | } 127 | } 128 | } 129 | } 130 | } 131 | } 132 | ' 133 | ``` 134 | 135 | 1. Replace `https://localhost:9200` with the URL of your Elasticsearch instance 136 | 2. Replace `elastic` with your Elasticsearch username 137 | 3. Replace `changeme` with your Elasticsearch password 138 | 4. Modify the body of the component template to include desired mappings 139 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | Elasticsearch Component for Home-Assistant 2 | ![build](https://github.com/legrego/homeassistant-elasticsearch/actions/workflows/cron.yml/badge.svg) 3 | [![hacs_badge](https://img.shields.io/badge/HACS-Default-orange.svg)](https://github.com/hacs/integration) 4 | ===== 5 | 6 | Publish Home Assistant events to your [Elasticsearch](https://elastic.co) cluster! 7 | 8 | Documentation: https://legrego.github.io/homeassistant-elasticsearch/ 9 | 10 | ## Table of Contents 11 | 12 | - [Getting started](#getting-started) 13 | - [Features](#features) 14 | - [Inspiration](#inspiration) 15 | - [Create your own cluster health sensor](#create-your-own-cluster-health-sensor) 16 | - [Support](#support) 17 | - [Contributing](#contributing) 18 | 19 | ## Getting started 20 | 21 | Visit our documentation site for instructions on [installing](https://legrego.github.io/homeassistant-elasticsearch/install/), [configuring](https://legrego.github.io/homeassistant-elasticsearch/configure/), and [using](https://legrego.github.io/homeassistant-elasticsearch/using_kibana/) this component. 22 | 23 | ## Features 24 | 25 | - Efficiently publishes Home-Assistant events to Elasticsearch using the Bulk API 26 | - Automatically sets up Datastreams using Time Series Data Streams ("TSDS"), Datastream Lifecycle Management ("DLM"), or Index Lifecycle Management ("ILM") depending on your cluster's capabilities 27 | - Supports Elastic's [stack security features](https://www.elastic.co/elastic-stack/security) via optional username, password, and API keys 28 | - Selectively publish events based on domains or entities 29 | 30 | ## Inspiration 31 | 32 | ### HVAC Usage 33 | Graph your home's climate and HVAC Usage: 34 | 35 | ![img](docs/assets/hvac-history.png) 36 | 37 | ### Weather Station 38 | Visualize and alert on data from your weather station: 39 | 40 | ![img](docs/assets/weather-station.png) 41 | 42 | ![img](docs/assets/weather-station-wind-pressure.png) 43 | 44 | ### Additional examples 45 | 46 | Some usage examples inspired by [real users](https://github.com/legrego/homeassistant-elasticsearch/issues/203): 47 | 48 | - Utilizing a Raspberry Pi in [kiosk mode](https://www.raspberrypi.com/tutorials/how-to-use-a-raspberry-pi-in-kiosk-mode/) with a 15" display, the homeassistant-elasticsearch integration enables the creation of rotating fullscreen [Elasticsearch Canvas](https://www.elastic.co/kibana/canvas). Those canvas displays metrics collected from various Home Assistant integrations, offering visually dynamic and informative dashboards for monitoring smart home data. 49 | - To address temperature maintenance issues in refrigerators and freezers, temperature sensors in each appliance report data to Home Assistant, which is then published to Elasticsearch. Kibana's [alerting framework](https://www.elastic.co/kibana/alerting) is employed to set up rules that notify the user if temperatures deviate unfavorably for an extended period. The Elastic rule engine and aggregations simplify the monitoring process for this specific use case. 50 | - Monitoring the humidity and temperature in a snake enclosure/habitat for a user's daughter, the integration facilitates the use of Elastic's Alerting framework. This choice is motivated by the framework's suitability for the monitoring requirements, providing a more intuitive solution compared to Home Assistant automations. 51 | - The integration allows users to maintain a smaller subset of data, focusing on individual stats of interest, for an extended period. This capability contrasts with the limited retention achievable with Home Assistant and databases like MariaDB/MySQL. This extended data retention facilitates very long-term trend analysis, such as for weather data, enabling users to glean insights over an extended timeframe. 52 | 53 | 54 | ## Create your own cluster health sensor 55 | Versions prior to `0.6.0` included a cluster health sensor. This has been removed in favor of a more generic approach. You can create your own cluster health sensor by using Home Assistant's built-in [REST sensor](https://www.home-assistant.io/integrations/sensor.rest). 56 | 57 | ```yaml 58 | # Example configuration 59 | sensor: 60 | - platform: rest 61 | name: "Cluster Health" 62 | unique_id: "cluster_health" # Replace with your own unique id. See https://www.home-assistant.io/integrations/sensor.rest#unique_id 63 | resource: "https://example.com/_cluster/health" # Replace with your Elasticsearch URL 64 | username: hass # Replace with your username 65 | password: changeme # Replace with your password 66 | value_template: "{{ value_json.status }}" 67 | json_attributes: # Optional attributes you may want to include from the /_cluster/health API response 68 | - "cluster_name" 69 | - "status" 70 | - "timed_out" 71 | - "number_of_nodes" 72 | - "number_of_data_nodes" 73 | - "active_primary_shards" 74 | - "active_shards" 75 | - "relocating_shards" 76 | - "initializing_shards" 77 | - "unassigned_shards" 78 | - "delayed_unassigned_shards" 79 | - "number_of_pending_tasks" 80 | - "number_of_in_flight_fetch" 81 | - "task_max_waiting_in_queue_millis" 82 | - "active_shards_percent_as_number" 83 | ``` 84 | 85 | ## Support 86 | 87 | This project is not endorsed or supported by either Elastic or Home-Assistant - please open a GitHub issue for any questions, bugs, or feature requests. 88 | 89 | ## Contributing 90 | 91 | Contributions are welcome! Please see the [Contributing Guide](CONTRIBUTING.md) for more information. 92 | -------------------------------------------------------------------------------- /custom_components/elasticsearch/es_integration.py: -------------------------------------------------------------------------------- 1 | """Support for sending event data to an Elasticsearch cluster.""" 2 | 3 | from __future__ import annotations 4 | 5 | from types import MappingProxyType 6 | from typing import TYPE_CHECKING 7 | 8 | from homeassistant.const import ( 9 | CONF_API_KEY, 10 | CONF_PASSWORD, 11 | CONF_TIMEOUT, 12 | CONF_URL, 13 | CONF_USERNAME, 14 | CONF_VERIFY_SSL, 15 | ) 16 | 17 | from custom_components.elasticsearch.const import ( 18 | CONF_CHANGE_DETECTION_TYPE, 19 | CONF_DEBUG_ATTRIBUTE_FILTERING, 20 | CONF_EXCLUDE_TARGETS, 21 | CONF_INCLUDE_TARGETS, 22 | CONF_POLLING_FREQUENCY, 23 | CONF_PUBLISH_FREQUENCY, 24 | CONF_SSL_CA_PATH, 25 | CONF_SSL_VERIFY_HOSTNAME, 26 | CONF_TAGS, 27 | CONF_TARGETS_TO_EXCLUDE, 28 | CONF_TARGETS_TO_INCLUDE, 29 | ES_CHECK_PERMISSIONS_DATASTREAM, 30 | ) 31 | from custom_components.elasticsearch.errors import ESIntegrationException 32 | from custom_components.elasticsearch.es_datastream_manager import DatastreamManager 33 | from custom_components.elasticsearch.es_gateway_8 import Elasticsearch8Gateway, Gateway8Settings 34 | from custom_components.elasticsearch.es_publish_pipeline import Pipeline, PipelineSettings 35 | from custom_components.elasticsearch.logger import LOGGER as BASE_LOGGER 36 | from custom_components.elasticsearch.logger import async_log_enter_exit_debug, log_enter_exit_debug 37 | 38 | if TYPE_CHECKING: # pragma: no cover 39 | from logging import Logger 40 | from typing import Any 41 | 42 | from homeassistant.config_entries import ConfigEntry 43 | from homeassistant.core import HomeAssistant 44 | 45 | 46 | class ElasticIntegration: 47 | """Integration for publishing entity state change events to Elasticsearch.""" 48 | 49 | @log_enter_exit_debug 50 | def __init__(self, hass: HomeAssistant, config_entry: ConfigEntry, log: Logger = BASE_LOGGER) -> None: 51 | """Integration initialization.""" 52 | 53 | self._hass = hass 54 | 55 | self._logger = log 56 | self._config_entry = config_entry 57 | 58 | self._logger.info("Initializing integration components.") 59 | 60 | # Initialize our Elasticsearch Gateway 61 | gateway_settings: Gateway8Settings = self.build_gateway_parameters( 62 | config_entry=self._config_entry, 63 | ) 64 | self._gateway = Elasticsearch8Gateway(log=self._logger, gateway_settings=gateway_settings) 65 | 66 | # Initialize our publishing pipeline 67 | manager_parameters = self.build_pipeline_manager_parameters( 68 | hass=self._hass, gateway=self._gateway, config_entry=self._config_entry 69 | ) 70 | self._pipeline_manager = Pipeline.Manager(log=self._logger, **manager_parameters) 71 | 72 | # Initialize our Datastream manager 73 | self._datastream_manager = DatastreamManager(log=self._logger, gateway=self._gateway) 74 | 75 | @async_log_enter_exit_debug 76 | async def async_init(self) -> None: 77 | """Async init procedure.""" 78 | 79 | try: 80 | await self._gateway.async_init() 81 | await self._datastream_manager.async_init() 82 | await self._pipeline_manager.async_init(config_entry=self._config_entry) 83 | 84 | except ESIntegrationException as err: 85 | self._logger.error("Error initializing integration: %s", err) 86 | self._logger.debug("Error initializing integration", exc_info=True) 87 | await self.async_shutdown() 88 | 89 | raise 90 | 91 | async def async_shutdown(self) -> None: 92 | """Async shutdown procedure.""" 93 | self._pipeline_manager.stop() 94 | await self._gateway.stop() 95 | 96 | @classmethod 97 | def build_gateway_parameters( 98 | cls, 99 | config_entry: ConfigEntry, 100 | minimum_privileges: MappingProxyType[str, Any] = ES_CHECK_PERMISSIONS_DATASTREAM, 101 | ) -> Gateway8Settings: 102 | """Build the parameters for the Elasticsearch gateway.""" 103 | return Gateway8Settings( 104 | url=config_entry.data[CONF_URL], 105 | username=config_entry.data.get(CONF_USERNAME), 106 | password=config_entry.data.get(CONF_PASSWORD), 107 | api_key=config_entry.data.get(CONF_API_KEY), 108 | verify_certs=config_entry.data.get(CONF_VERIFY_SSL, False), 109 | verify_hostname=config_entry.data.get(CONF_SSL_VERIFY_HOSTNAME, False), 110 | ca_certs=config_entry.data.get(CONF_SSL_CA_PATH), 111 | request_timeout=config_entry.data.get(CONF_TIMEOUT, 30), 112 | minimum_privileges=minimum_privileges, 113 | ) 114 | 115 | @classmethod 116 | def build_pipeline_manager_parameters(cls, hass, gateway, config_entry: ConfigEntry) -> dict: 117 | """Build the parameters for the Elasticsearch pipeline manager.""" 118 | 119 | # Options are never none, but mypy doesn't know that 120 | assert config_entry.options is not None 121 | 122 | settings = PipelineSettings( 123 | polling_frequency=config_entry.options[CONF_POLLING_FREQUENCY], 124 | publish_frequency=config_entry.options[CONF_PUBLISH_FREQUENCY], 125 | change_detection_type=config_entry.options[CONF_CHANGE_DETECTION_TYPE], 126 | tags=config_entry.options[CONF_TAGS], 127 | debug_attribute_filtering=config_entry.options.get(CONF_DEBUG_ATTRIBUTE_FILTERING, False), 128 | include_targets=config_entry.options[CONF_INCLUDE_TARGETS], 129 | exclude_targets=config_entry.options[CONF_EXCLUDE_TARGETS], 130 | included_areas=config_entry.options[CONF_TARGETS_TO_INCLUDE].get("area_id", []), 131 | excluded_areas=config_entry.options[CONF_TARGETS_TO_EXCLUDE].get("area_id", []), 132 | included_labels=config_entry.options[CONF_TARGETS_TO_INCLUDE].get("label_id", []), 133 | excluded_labels=config_entry.options[CONF_TARGETS_TO_EXCLUDE].get("label_id", []), 134 | included_devices=config_entry.options[CONF_TARGETS_TO_INCLUDE].get("device_id", []), 135 | excluded_devices=config_entry.options[CONF_TARGETS_TO_EXCLUDE].get("device_id", []), 136 | included_entities=config_entry.options[CONF_TARGETS_TO_INCLUDE].get("entity_id", []), 137 | excluded_entities=config_entry.options[CONF_TARGETS_TO_EXCLUDE].get("entity_id", []), 138 | ) 139 | 140 | return {"hass": hass, "gateway": gateway, "settings": settings} 141 | -------------------------------------------------------------------------------- /.ruff.toml: -------------------------------------------------------------------------------- 1 | # The contents of this file is based on https://github.com/home-assistant/core/blob/dev/pyproject.toml 2 | 3 | target-version = "py312" 4 | line-length = 110 5 | 6 | lint.fixable = ["ALL"] 7 | 8 | lint.select = [ 9 | "A001", # Variable {name} is shadowing a Python builtin 10 | "B002", # Python does not support the unary prefix increment 11 | "B005", # Using .strip() with multi-character strings is misleading 12 | "B007", # Loop control variable {name} not used within loop body 13 | "B014", # Exception handler with duplicate exception 14 | "B015", # Pointless comparison. Did you mean to assign a value? Otherwise, prepend assert or remove it. 15 | "B017", # pytest.raises(BaseException) should be considered evil 16 | "B018", # Found useless attribute access. Either assign it to a variable or remove it. 17 | "B023", # Function definition does not bind loop variable {name} 18 | "B026", # Star-arg unpacking after a keyword argument is strongly discouraged 19 | "B032", # Possible unintentional type annotation (using :). Did you mean to assign (using =)? 20 | "B904", # Use raise from to specify exception cause 21 | "B905", # zip() without an explicit strict= parameter 22 | "BLE", 23 | "C", # complexity 24 | "COM818", # Trailing comma on bare tuple prohibited 25 | "D", # docstrings 26 | "DTZ003", # Use datetime.now(tz=) instead of datetime.utcnow() 27 | "DTZ004", # Use datetime.fromtimestamp(ts, tz=) instead of datetime.utcfromtimestamp(ts) 28 | "E", # pycodestyle 29 | "F", # pyflakes/autoflake 30 | "FLY", # flynt 31 | "G", # flake8-logging-format 32 | "I", # isort 33 | "INP", # flake8-no-pep420 34 | "ISC", # flake8-implicit-str-concat 35 | "ICN001", # import concentions; {name} should be imported as {asname} 36 | "LOG", # flake8-logging 37 | "N804", # First argument of a class method should be named cls 38 | "N805", # First argument of a method should be named self 39 | "N815", # Variable {name} in class scope should not be mixedCase 40 | "PERF", # Perflint 41 | "PGH", # pygrep-hooks 42 | "PIE", # flake8-pie 43 | "PL", # pylint 44 | "PT", # flake8-pytest-style 45 | "PYI", # flake8-pyi 46 | "RET", # flake8-return 47 | "RSE", # flake8-raise 48 | "RUF005", # Consider iterable unpacking instead of concatenation 49 | "RUF006", # Store a reference to the return value of asyncio.create_task 50 | "RUF010", # Use explicit conversion flag 51 | "RUF013", # PEP 484 prohibits implicit Optional 52 | "RUF018", # Avoid assignment expressions in assert statements 53 | "RUF019", # Unnecessary key check before dictionary access 54 | # "RUF100", # Unused `noqa` directive; temporarily every now and then to clean them up 55 | "S102", # Use of exec detected 56 | "S103", # bad-file-permissions 57 | "S108", # hardcoded-temp-file 58 | "S306", # suspicious-mktemp-usage 59 | "S307", # suspicious-eval-usage 60 | "S313", # suspicious-xmlc-element-tree-usage 61 | "S314", # suspicious-xml-element-tree-usage 62 | "S315", # suspicious-xml-expat-reader-usage 63 | "S316", # suspicious-xml-expat-builder-usage 64 | "S317", # suspicious-xml-sax-usage 65 | "S318", # suspicious-xml-mini-dom-usage 66 | "S319", # suspicious-xml-pull-dom-usage 67 | "S601", # paramiko-call 68 | "S602", # subprocess-popen-with-shell-equals-true 69 | "S604", # call-with-shell-equals-true 70 | "S608", # hardcoded-sql-expression 71 | "S609", # unix-command-wildcard-injection 72 | "SIM", # flake8-simplify 73 | "SLF", # flake8-self 74 | "SLOT", # flake8-slots 75 | "T100", # Trace found: {name} used 76 | "T20", # flake8-print 77 | "TID251", # Banned imports 78 | "TRY", # tryceratops 79 | "UP", # pyupgrade 80 | "W", # pycodestyle 81 | 82 | # this component's additions 83 | "F401" 84 | ] 85 | 86 | lint.ignore = [ 87 | "D202", # No blank lines allowed after function docstring 88 | "D203", # 1 blank line required before class docstring 89 | "D213", # Multi-line docstring summary should start at the second line 90 | "D406", # Section name should end with a newline 91 | "D407", # Section name underlining 92 | "E501", # line too long 93 | 94 | "PLC1901", # {existing} can be simplified to {replacement} as an empty string is falsey; too many false positives 95 | "PLR0911", # Too many return statements ({returns} > {max_returns}) 96 | "PLR0912", # Too many branches ({branches} > {max_branches}) 97 | "PLR0913", # Too many arguments to function call ({c_args} > {max_args}) 98 | "PLR0915", # Too many statements ({statements} > {max_statements}) 99 | "PLR2004", # Magic value used in comparison, consider replacing {value} with a constant variable 100 | "PLW2901", # Outer {outer_kind} variable {name} overwritten by inner {inner_kind} target 101 | "PT011", # pytest.raises({exception}) is too broad, set the `match` parameter or use a more specific exception 102 | "PT018", # Assertion should be broken down into multiple parts 103 | "RUF001", # String contains ambiguous unicode character. 104 | "RUF002", # Docstring contains ambiguous unicode character. 105 | "RUF003", # Comment contains ambiguous unicode character. 106 | "RUF015", # Prefer next(...) over single element slice 107 | "SIM102", # Use a single if statement instead of nested if statements 108 | "SIM103", # Return the condition {condition} directly 109 | "SIM108", # Use ternary operator {contents} instead of if-else-block 110 | "SIM115", # Use context handler for opening files 111 | "TRY003", # Avoid specifying long messages outside the exception class 112 | "TRY400", # Use `logging.exception` instead of `logging.error` 113 | 114 | # May conflict with the formatter, https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules 115 | "W191", 116 | "E111", 117 | "E114", 118 | "E117", 119 | "D206", 120 | "D300", 121 | "Q", 122 | "COM812", 123 | "COM819", 124 | "ISC001", 125 | 126 | # Disabled because ruff does not understand type of __all__ generated by a function 127 | "PLE0605", 128 | 129 | # temporarily disabled 130 | "PT019", 131 | "PYI024", # Use typing.NamedTuple instead of collections.namedtuple 132 | "RET503", 133 | "RET501", 134 | "TRY002", 135 | "TRY301", 136 | ] 137 | 138 | [lint.flake8-pytest-style] 139 | fixture-parentheses = false 140 | mark-parentheses = false 141 | 142 | [lint.pyupgrade] 143 | keep-runtime-typing = true 144 | 145 | [lint.mccabe] 146 | max-complexity = 25 147 | -------------------------------------------------------------------------------- /docs/configure.md: -------------------------------------------------------------------------------- 1 | # Configuration 2 | 3 | ## Gather Elasticsearch details 4 | 5 | You will need the following details to configure the Elasticsearch integration: 6 | 7 | 1. The URL of your Elasticsearch instance 8 | 2. Credentials to access the Elasticsearch instance (if required) 9 | 3. The SSL certificate authority (CA) file, if you are using a custom CA not trusted by the host system 10 | 11 | ### Credentials 12 | 13 | You must provide credentials if your Elasticsearch instance is secured. While we support authenticating via username/password, we recommend using API Keys for simplicity and compatibility with all versions of Elasticsearch. 14 | 15 | Use the following command to create an API Key for the Home Assistant component.: 16 | 17 | === "curl" 18 | ```bash 19 | curl https://localhost:9200/_security/api_key \ # (1) 20 | -X POST \ 21 | -H "Content-Type: application/json" \ 22 | -u elastic:changeme \ # (2) 23 | -d' 24 | { 25 | "name": "home_assistant_component", 26 | "role_descriptors": { 27 | "hass_writer": { 28 | "cluster": [ 29 | "manage_index_templates", 30 | "monitor" 31 | ], 32 | "indices": [ 33 | { 34 | "names": [ 35 | "metrics-homeassistant.*" 36 | ], 37 | "privileges": [ 38 | "manage", 39 | "index", 40 | "create_index", 41 | "create" 42 | ] 43 | } 44 | ] 45 | } 46 | } 47 | } 48 | ' 49 | ``` 50 | 51 | 1. Replace `https://localhost:9200` with the URL of your Elasticsearch instance. 52 | 2. Replace `elastic:changeme` with your Elasticsearch credentials. 53 | 54 | === "Dev Tools" 55 | ``` 56 | POST /_security/api_key 57 | { 58 | "name": "home_assistant_component", 59 | "role_descriptors": { 60 | "hass_writer": { 61 | "cluster": [ 62 | "manage_index_templates", 63 | "monitor" 64 | ], 65 | "indices": [ 66 | { 67 | "names": [ 68 | "metrics-homeassistant.*" 69 | ], 70 | "privileges": [ 71 | "manage", 72 | "index", 73 | "create_index", 74 | "create" 75 | ] 76 | } 77 | ] 78 | } 79 | } 80 | } 81 | ``` 82 | 83 | The API Key will be returned in the response. Save the `encoded` field for use in the configuration. 84 | 85 | Read the [Elasticsearch documentation](https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-create-api-key.html) for more information on creating API Keys. 86 | 87 | ## Add the integration 88 | 89 | This component is configured interactively via Home Assistant's integration configuration page. 90 | 91 | 1. Verify you have restarted Home Assistant after installing the component. 92 | 2. From the [`Integrations` configuration menu](https://my.home-assistant.io/redirect/integrations/), add a new `Elasticsearch` integration. 93 | 3. Provide the URL of your elasticsearch server in the format `https://:`. For example, `https://localhost:9200`. 94 | 4. If your Elasticsearch instance is untrusted, you will be prompted to provide the path to the CA file or disable certificate verification. 95 | 5. If your Elasticsearch instance is secured, you will be prompted to provide either a username and password or an API Key. 96 | 5. Once the integration is setup, you may tweak all configuration options via the `Configure` button on the [integrations page](https://my.home-assistant.io/redirect/integration/?domain=elasticsearch){:target="_blank"}. 97 | 98 | ## Configuration options 99 | 100 | Select `Configure` from the integration's homepage to configure the following settings. 101 | 102 | [![Open your Home Assistant instance and show the Elasticsearch integration.](https://my.home-assistant.io/badges/integration.svg)](https://my.home-assistant.io/redirect/integration/?domain=elasticsearch){:target="_blank"} 103 | 104 | ### Send events to Elasticsearch at this interval 105 | The frequency at which events are published to Elasticsearch, in seconds. The default is `60`. 106 | 107 | ### Gather all entity states at this interval 108 | The frequency at which all entity states are gathered, in seconds. The default is `60`. 109 | 110 | ### Choose what types of entity changes to listen for and publish 111 | There are two types of entity changes that can be published to Elasticsearch: 112 | - `Track entities with state changes` - Publish entities when their state changes 113 | - `Track entities with attribute changes` - Publish entities when their attributes change 114 | 115 | Enabling both options will publish entities when either their state or attributes change. 116 | 117 | ### Tags to apply to all published events 118 | Tags are values that can be used to filter events in Elasticsearch. You can use this to add tags to all published events. 119 | 120 | ### Toggle to only publish the set of targets below 121 | 122 | Pick area, device, entity, or labels and only publish events from one of these targets. If you select multiple targets, events that match any of the targets will be published. If you select no targets, all events will be published. 123 | 124 | ### Toggle to exclude publishing the set of targets below 125 | 126 | Pick area, device, entity, or labels and exclude events from one of these targets. If you select multiple targets, events that match any of the targets will be excluded. If you also configure `Toggle to only publish the set of targets below`, the exclusion will be applied after the inclusion. 127 | 128 | ## Advanced configuration 129 | 130 | ### Custom certificate authority (CA) 131 | 132 | This component will use the system's default certificate authority (CA) bundle to verify the Elasticsearch server's certificate. If you need to use a custom CA, you can provide the path to the CA file in the integration configuration. 133 | 134 | 1. Place the CA file somewhere within Home Assistant's `configuration` directory. 135 | 2. Follow the steps above to [add the integration](#add-the-integration). 136 | 3. After providing connection details, the component will attempt to establish a connection to the Elasticsearch server. If the server's certificate is not signed by a known CA, you will be prompted for the CA file's path. 137 | 4. Provide the path to the CA file and continue with the setup. 138 | 139 | !!! note 140 | You can choose to bypass certificate verification during setup, if you do not have the CA file available. 141 | -------------------------------------------------------------------------------- /docs/using_kibana.md: -------------------------------------------------------------------------------- 1 | # Using Kibana 2 | 3 | The integration will put data into Elasticsearch under the `metrics-homeassistant.*` [data stream](https://www.elastic.co/guide/en/elasticsearch/reference/current/data-streams.html). To explore your data, create visualizations, or dashboards in Kibana you first need to create a [Data View](https://www.elastic.co/guide/en/kibana/current/data-views.html). 4 | 5 | ## Create a data view 6 | 7 | === "Kibana UI" 8 | Create a Data View using Kibana's UI: 9 | 10 | 1. Open Kibana 11 | 2. Using the instructions in the [Kibana documentation](https://www.elastic.co/guide/en/kibana/current/data-views.html#create-data-view), navigate to the `Data views` page, and create a data view with the following values: 12 | - **Name**: `Home Assistant Metrics` 13 | - **Index pattern**: `metrics-homeassistant.*` 14 | - **Timestamp field**: `@timestamp` 15 | 16 | === "curl" 17 | Create a Data View using `curl` and Kibana's [Data views API](https://www.elastic.co/guide/en/kibana/current/data-views-api.html): 18 | 19 | ```bash 20 | KIBANA_URL=http://localhost:5601 # (1) 21 | KIBANA_USER=elastic # (2) 22 | KIBANA_PASSWORD=changeme # (3) 23 | curl -X POST "$KIBANA_URL/api/data_views/data_view" \ 24 | -u "$KIBANA_USER":"KIBANA_PASSWORD" \ 25 | -H "Content-Type: application/json" \ 26 | -H "kbn-xsrf: true" \ 27 | -d' 28 | { 29 | "data_view": { 30 | "title": "metrics-homeassistant.*", 31 | "name": "Home Assistant Metrics", 32 | "timeFieldName": "@timestamp" 33 | } 34 | } 35 | ' 36 | ``` 37 | 38 | 1. Replace `http://localhost:5601` with the URL of your Kibana instance 39 | 2. Replace `elastic` with your Kibana username 40 | 3. Replace `changeme` with your Kibana password 41 | 42 | === "Dev Tools" 43 | Create a Data View using Kibana's [Dev Tools console](https://www.elastic.co/guide/en/kibana/current/console-kibana.html): 44 | 45 | ``` 46 | POST kbn:/api/data_views/data_view 47 | { 48 | "data_view": { 49 | "title": "metrics-homeassistant.*", 50 | "name": "Home Assistant Metrics", 51 | "timeFieldName": "@timestamp" 52 | } 53 | } 54 | ``` 55 | 56 | ## Exploring Home Assistant data in Kibana 57 | 58 | Once you have created a Data View, you can start exploring your Home Assistant data in Kibana using `Discover`: 59 | 60 | 1. In Kibana select `Discover` 61 | 2. Select the `Home Assistant Metrics` Data View at the top left 62 | 3. You can now see all the Home Assistant data that has been published to Elasticsearch 63 | 4. You can filter the data using the filter bar at the top 64 | 5. You can pull specific fields into the document table at the bottom by clicking on the `+` icon next to a field 65 | 6. You can change the time range of the data you are viewing using the time picker in the top right 66 | 67 | ![img](assets/kibana-discover.png) 68 | 69 | ## Viewing Home Assistant data in Kibana 70 | 71 | When creating new visualizations you may find the following fields useful: 72 | 73 | | Field | Description | 74 | | --- | --- | 75 | | `@timestamp` | The timestamp of the event (ex. `Apr 10, 2024 @ 16:23:25.878`) | 76 | | `hass.entity.attributes.friendly_name` | The name of the entity in Home Assistant (ex. `Living Room EcoBee Temperature`) | 77 | | `hass.entity.device.area.name` | The area of the device in Home Assistant (ex. `Living Room`) | 78 | | `hass.entity.id` | The entity id of the entity in Home Assistant (ex. `sensor.living_room_ecobee_temperature`) | 79 | | `hass.entity.value` | The state of the entity in Home Assistant (ex. `72.5`), as a string-typed value | 80 | | `hass.entity.valueas.integer` | The state of the entity in Home Assistant (ex. `72`), as an integer-typed value | 81 | | `hass.entity.valueas.float` | The state of the entity in Home Assistant (ex. `72.5`), as a float-typed value | 82 | | `hass.entity.valueas.boolean` | The state of the entity in Home Assistant (ex. `true`), as a boolean-typed value | 83 | | `hass.entity.valueas.date` | The state of the entity in Home Assistant (ex. `2024-04-10`), as a date-typed value | 84 | | `hass.entity.valueas.datetime` | The state of the entity in Home Assistant (ex. `2024-04-10T16:23:25.878`), as a datetime-typed value | 85 | | `hass.entity.valueas.time` | The state of the entity in Home Assistant (ex. `16:23:25.878`), as a time-typed value | 86 | 87 | 88 | To build a visualization that shows the temperature of a specific entity over time, you can use the following steps: 89 | 90 | 1. In Kibana select `Visualizations` and create a new Lens visualization 91 | 2. Select `Home Assistant Metrics` 92 | 3. For the `Horizontal axis` select `@timestamp` 93 | 4. For the `Vertical axis` select `hass.entity.valueas.float` 94 | 5. In the filter bar at the top, add a filter for `hass.entity.id` and set the value to the entity id of the entity you want to visualize (ex. `sensor.living_room_ecobee_temperature`) or `hass.entity.attributes.friendly_name` and set the value to the friendly name of the entity you want to visualize (ex. `Living Room EcoBee Temperature`) 95 | 96 | ![img](assets/kibana-lens-visualization.png) 97 | 98 | ## Inspiration 99 | 100 | ### HVAC Usage 101 | Graph your home's climate and HVAC Usage: 102 | 103 | ![img](assets/hvac-history.png) 104 | 105 | ### Weather Station 106 | Visualize and alert on data from your weather station: 107 | 108 | ![img](assets/weather-station.png) 109 | 110 | ![img](assets/weather-station-wind-pressure.png) 111 | 112 | ### Additional examples 113 | 114 | Some usage examples inspired by [real users](https://github.com/legrego/homeassistant-elasticsearch/issues/203): 115 | 116 | - Utilizing a Raspberry Pi in [kiosk mode](https://www.raspberrypi.com/tutorials/how-to-use-a-raspberry-pi-in-kiosk-mode/) with a 15" display, the homeassistant-elasticsearch integration enables the creation of rotating fullscreen [Elasticsearch Canvas](https://www.elastic.co/kibana/canvas). Those canvas displays metrics collected from various Home Assistant integrations, offering visually dynamic and informative dashboards for monitoring smart home data. 117 | - To address temperature maintenance issues in refrigerators and freezers, temperature sensors in each appliance report data to Home Assistant, which is then published to Elasticsearch. Kibana's [alerting framework](https://www.elastic.co/kibana/alerting) is employed to set up rules that notify the user if temperatures deviate unfavorably for an extended period. The Elastic rule engine and aggregations simplify the monitoring process for this specific use case. 118 | - Monitoring the humidity and temperature in a snake enclosure/habitat for a user's daughter, the integration facilitates the use of Elastic's Alerting framework. This choice is motivated by the framework's suitability for the monitoring requirements, providing a more intuitive solution compared to Home Assistant automations. 119 | - The integration allows users to maintain a smaller subset of data, focusing on individual stats of interest, for an extended period. This capability contrasts with the limited retention achievable with Home Assistant and databases like MariaDB/MySQL. This extended data retention facilitates very long-term trend analysis, such as for weather data, enabling users to glean insights over an extended timeframe. -------------------------------------------------------------------------------- /tests/test_loop.py: -------------------------------------------------------------------------------- 1 | """Tests for the loop module.""" 2 | 3 | import asyncio 4 | import time 5 | from unittest.mock import AsyncMock, MagicMock 6 | 7 | import pytest 8 | from custom_components.elasticsearch.logger import LOGGER as BASE_LOGGER 9 | from custom_components.elasticsearch.loop import LoopHandler 10 | 11 | 12 | class Test_Initialization: 13 | """Test initialization of the LoopHandler class.""" 14 | 15 | async def test_init(self): 16 | """Test initializing the loop handler.""" 17 | 18 | # Create a mock function 19 | mock_func = MagicMock() 20 | 21 | # Create a LoopHandler instance with a frequency of 1 second 22 | loop_handler = LoopHandler(mock_func, "test_loop", 1) 23 | 24 | # Assert that the function, name, frequency, and log are set correctly 25 | assert loop_handler._func == mock_func 26 | assert loop_handler._name == "test_loop" 27 | assert loop_handler._frequency == 1 28 | assert loop_handler._running is False 29 | assert loop_handler._should_stop is False 30 | assert loop_handler._run_count == 0 31 | assert loop_handler._log == BASE_LOGGER 32 | assert loop_handler._next_run_time <= time.monotonic() 33 | assert loop_handler._next_run_time >= time.monotonic() - 2 34 | 35 | 36 | class Test_Loop_Handler: 37 | """Test the LoopHandler class with syncronous functions.""" 38 | 39 | def test_loop_handler_start(self): 40 | """Test starting the loop handler.""" 41 | 42 | # Create a mock function 43 | mock_func = AsyncMock() 44 | 45 | # Create a LoopHandler instance with a frequency of 1 second 46 | loop_handler = LoopHandler(mock_func, "test_loop", 1) 47 | loop_handler._should_keep_running = MagicMock(side_effect=[True, False]) 48 | 49 | # Start the loop handler 50 | asyncio.run(loop_handler.start()) 51 | 52 | # Assert that the mock function was called at least once 53 | assert mock_func.call_count >= 1 54 | assert loop_handler._should_keep_running.call_count >= 2 55 | 56 | def test_loop_handler_start_exception(self): 57 | """Test starting the loop handler.""" 58 | 59 | # Create a mock function that throws an exception 60 | mock_func = MagicMock(side_effect=Exception("Test exception")) 61 | 62 | # Create a LoopHandler instance with a frequency of 1 second 63 | loop_handler = LoopHandler(mock_func, "test_loop", 1) 64 | loop_handler._should_keep_running = MagicMock(side_effect=[True, False]) 65 | 66 | # Start the loop handler 67 | with pytest.raises(Exception): # noqa: B017 68 | asyncio.run(loop_handler.start()) 69 | 70 | # Assert that the mock function was called at least once 71 | assert mock_func.call_count >= 1 72 | assert loop_handler._should_keep_running.call_count == 1 73 | 74 | async def test_loop_handler_stop(self): 75 | """Test starting the loop handler.""" 76 | 77 | # Create a mock function 78 | mock_func = AsyncMock() 79 | mock_func.return_value = None 80 | # Create a LoopHandler instance with a frequency of 1 second 81 | loop_handler = LoopHandler(mock_func, "test_loop", 1) 82 | 83 | # Start the loop handler in the background, make sure it runs for a short duration 84 | # then stop it, wait for a short duration, and assert that the loop handler has stopped 85 | loop_task = asyncio.ensure_future(loop_handler.start(), loop=asyncio.get_event_loop()) 86 | await asyncio.sleep(2) 87 | 88 | assert loop_handler._running is True 89 | 90 | # Stop the loop handler 91 | loop_handler.stop() 92 | 93 | assert loop_handler._running is False 94 | assert loop_handler._should_keep_running() is False 95 | assert loop_handler._should_stop_running() is True 96 | assert loop_handler._should_stop is True 97 | 98 | # Wait for the loop handler to stop 99 | await asyncio.sleep(1) 100 | 101 | assert loop_task.done() 102 | 103 | async def test_loop_handler_time_to_run(self): 104 | """Test the _time_to_run method of LoopHandler.""" 105 | mock_func = MagicMock() 106 | loop_handler = LoopHandler(mock_func, "test_loop", 1) 107 | 108 | # Set the next run time to be in the future 109 | loop_handler._next_run_time = time.monotonic() + 100 110 | 111 | assert loop_handler._time_to_run() is False 112 | 113 | # Set the next run time to be in the past 114 | loop_handler._next_run_time = 0 115 | 116 | assert loop_handler._time_to_run() is True 117 | 118 | async def test_loop_handler_time_until_next_run(self): 119 | """Test the _time_until_next_run method of LoopHandler.""" 120 | mock_func = MagicMock() 121 | loop_handler = LoopHandler(mock_func, "test_loop", 1) 122 | 123 | # Set the next run time to be in the future 124 | loop_handler._next_run_time = time.monotonic() + 100 125 | 126 | assert loop_handler._time_until_next_run() <= 100 127 | assert loop_handler._time_until_next_run() > 90 128 | 129 | # Set the next run time to be in the past 130 | loop_handler._next_run_time = 0 131 | 132 | assert loop_handler._time_until_next_run() == 0 133 | 134 | async def test_loop_handler_schedule_next_run(self): 135 | """Test the _schedule_next_run method of LoopHandler.""" 136 | mock_func = MagicMock() 137 | loop_handler = LoopHandler(mock_func, "test_loop", 1) 138 | 139 | # Set the next run time to be in the past 140 | loop_handler._next_run_time = 0 141 | 142 | loop_handler._schedule_next_run() 143 | 144 | assert loop_handler._next_run_time > 0 145 | 146 | async def test_loop_handler_wait_for_next_run(self): 147 | """Test the _wait_for_next_run method of LoopHandler.""" 148 | 149 | mock_func = MagicMock() 150 | loop_handler = LoopHandler(mock_func, "test_loop", 1) 151 | 152 | # Set the next run time to be in the past 153 | loop_handler._next_run_time = time.monotonic() + 1 154 | 155 | assert loop_handler._time_to_run() is False 156 | 157 | await loop_handler._wait_for_next_run() 158 | 159 | assert loop_handler._time_to_run() is True 160 | 161 | async def test_loop_handler_wait_for_next_run_should_stop(self): 162 | """Test the _wait_for_next_run method of LoopHandler.""" 163 | 164 | mock_func = MagicMock() 165 | loop_handler = LoopHandler(mock_func, "test_loop", 1) 166 | 167 | # Set the next run time to be in the future 168 | loop_handler._next_run_time = time.monotonic() + 30 169 | loop_handler._should_stop_running = MagicMock(return_value=True) 170 | 171 | # wait for the next run time should throw a runtimeerror using pytest raises 172 | with pytest.raises(RuntimeError): 173 | await loop_handler._wait_for_next_run() 174 | 175 | async def test_loop_handler_wait_for_next_run_should_spin(self): 176 | """Test the _wait_for_next_run method of LoopHandler.""" 177 | 178 | mock_func = AsyncMock() 179 | loop_handler = LoopHandler(mock_func, "test_loop", 1) 180 | 181 | # Set the next run time to be in the future 182 | loop_handler._next_run_time = time.monotonic() + 1 183 | loop_handler._spin = AsyncMock() 184 | loop_handler._should_stop_running = MagicMock(side_effect=[False, True]) 185 | 186 | # will throw a runtimeerror using pytest raises 187 | with pytest.raises(RuntimeError): 188 | await loop_handler._wait_for_next_run() 189 | 190 | assert loop_handler._spin.call_count >= 1 191 | -------------------------------------------------------------------------------- /custom_components/elasticsearch/datastreams/index_template.py: -------------------------------------------------------------------------------- 1 | """Defines the index template for Elasticsearch data streams.""" 2 | 3 | from typing import Any 4 | 5 | index_template_definition: dict[str, Any] = { 6 | "index_patterns": ["metrics-homeassistant.*-default"], 7 | "template": { 8 | "mappings": { 9 | "dynamic": "false", 10 | "dynamic_templates": [ 11 | { 12 | "hass_entity_attributes": { 13 | "path_match": "hass.entity.attributes.*", 14 | "mapping": { 15 | "type": "text", 16 | "fields": { 17 | "keyword": {"ignore_above": 1024, "type": "keyword"}, 18 | }, 19 | }, 20 | } 21 | } 22 | ], 23 | "properties": { 24 | "data_stream": { 25 | "properties": { 26 | "type": {"type": "constant_keyword", "value": "metrics"}, 27 | "dataset": {"type": "constant_keyword"}, 28 | "namespace": {"type": "constant_keyword"}, 29 | } 30 | }, 31 | "hass": { 32 | "type": "object", 33 | "properties": { 34 | "entity": { 35 | "type": "object", 36 | "properties": { 37 | "id": {"type": "keyword"}, 38 | "domain": {"type": "keyword"}, 39 | "friendly_name": {"type": "keyword"}, 40 | "name": {"type": "keyword"}, 41 | "attributes": {"type": "object", "dynamic": True}, 42 | "object": { 43 | "type": "object", 44 | "properties": {"id": {"type": "keyword", "time_series_dimension": True}}, 45 | }, 46 | "location": {"type": "geo_point"}, 47 | "value": { 48 | "type": "text", 49 | "fields": {"keyword": {"ignore_above": 1024, "type": "keyword"}}, 50 | }, 51 | "valueas": { 52 | "properties": { 53 | "string": { 54 | "type": "text", 55 | "fields": {"keyword": {"ignore_above": 1024, "type": "keyword"}}, 56 | }, 57 | "float": {"ignore_malformed": True, "type": "float"}, 58 | "boolean": {"type": "boolean"}, 59 | "datetime": {"type": "date"}, 60 | "date": {"type": "date", "format": "strict_date"}, 61 | "time": { 62 | "type": "date", 63 | "format": "HH:mm:ss.SSSSSS||time||strict_hour_minute_second||time_no_millis", 64 | }, 65 | "integer": {"ignore_malformed": True, "type": "integer"}, 66 | } 67 | }, 68 | "platform": {"type": "keyword"}, 69 | "unit_of_measurement": {"type": "keyword"}, 70 | "state": {"properties": {"class": {"type": "keyword"}}}, 71 | "labels": {"type": "keyword"}, 72 | "area": { 73 | "type": "object", 74 | "properties": { 75 | "floor": { 76 | "type": "object", 77 | "properties": { 78 | "id": {"type": "keyword"}, 79 | "name": {"type": "keyword"}, 80 | }, 81 | }, 82 | "id": {"type": "keyword"}, 83 | "name": {"type": "keyword"}, 84 | }, 85 | }, 86 | "device": { 87 | "type": "object", 88 | "properties": { 89 | "id": {"type": "keyword"}, 90 | "name": {"type": "keyword"}, 91 | "labels": {"type": "keyword"}, 92 | "area": { 93 | "type": "object", 94 | "properties": { 95 | "floor": { 96 | "type": "object", 97 | "properties": { 98 | "id": {"type": "keyword"}, 99 | "name": {"type": "keyword"}, 100 | }, 101 | }, 102 | "id": {"type": "keyword"}, 103 | "name": {"type": "keyword"}, 104 | }, 105 | }, 106 | }, 107 | }, 108 | "device_class": {"type": "keyword"}, 109 | }, 110 | } 111 | }, 112 | }, 113 | "@timestamp": {"type": "date_nanos", "format": "strict_date_optional_time_nanos"}, 114 | "tags": {"ignore_above": 1024, "type": "keyword"}, 115 | "event": { 116 | "properties": { 117 | "action": {"type": "keyword", "ignore_above": 1024}, 118 | "type": {"ignore_above": 1024, "type": "keyword"}, 119 | "kind": {"ignore_above": 1024, "type": "keyword"}, 120 | } 121 | }, 122 | "agent": { 123 | "properties": { 124 | "version": {"ignore_above": 1024, "type": "keyword"}, 125 | } 126 | }, 127 | "host": { 128 | "properties": { 129 | "architecture": {"ignore_above": 1024, "type": "keyword"}, 130 | "location": {"type": "geo_point"}, 131 | "hostname": {"ignore_above": 1024, "type": "keyword"}, 132 | "name": {"ignore_above": 1024, "type": "keyword"}, 133 | "os": {"properties": {"name": {"ignore_above": 1024, "type": "keyword"}}}, 134 | } 135 | }, 136 | "ecs": {"properties": {"version": {"ignore_above": 1024, "type": "keyword"}}}, 137 | }, 138 | }, 139 | "settings": { 140 | "codec": "best_compression", 141 | "index.mode": "time_series", 142 | "mapping": {"total_fields": {"limit": "10000"}}, 143 | }, 144 | "lifecycle": {"data_retention": "365d"}, 145 | }, 146 | "composed_of": "metrics-homeassistant@custom", 147 | "ignore_missing_component_templates": "metrics-homeassistant@custom", 148 | "priority": 500, 149 | "data_stream": {}, 150 | "version": 6, 151 | } 152 | -------------------------------------------------------------------------------- /custom_components/elasticsearch/es_gateway.py: -------------------------------------------------------------------------------- 1 | """Encapsulates Elasticsearch operations.""" 2 | 3 | from __future__ import annotations # noqa: I001 4 | 5 | from abc import ABC, abstractmethod 6 | from dataclasses import dataclass 7 | from types import MappingProxyType 8 | from typing import TYPE_CHECKING 9 | from custom_components.elasticsearch.errors import InsufficientPrivileges, UnsupportedVersion 10 | from custom_components.elasticsearch.const import ES_CHECK_PERMISSIONS_DATASTREAM, ELASTIC_MINIMUM_VERSION 11 | 12 | from .logger import LOGGER as BASE_LOGGER 13 | from .logger import log_enter_exit_debug 14 | from typing import Any 15 | 16 | if TYPE_CHECKING: # pragma: no cover 17 | from collections.abc import AsyncGenerator 18 | from logging import Logger 19 | 20 | from elasticsearch8._async.client import AsyncElasticsearch as AsyncElasticsearch8 21 | 22 | 23 | @dataclass 24 | class GatewaySettings(ABC): 25 | """Elasticsearch Gateway settings object.""" 26 | 27 | url: str 28 | username: str | None = None 29 | password: str | None = None 30 | api_key: str | None = None 31 | verify_certs: bool = True 32 | ca_certs: str | None = None 33 | request_timeout: int = 30 34 | verify_hostname: bool = True 35 | minimum_version: tuple[int, int] | None = None 36 | minimum_privileges: MappingProxyType[str, Any] = MappingProxyType[str, Any]({}) 37 | 38 | @abstractmethod 39 | def to_client(self) -> AsyncElasticsearch8: 40 | """Return an Elasticsearch client.""" 41 | 42 | def to_dict(self) -> dict: 43 | """Return a dictionary representation of the settings.""" 44 | return { 45 | "url": self.url, 46 | "username": self.username, 47 | "password": self.password, 48 | "api_key": self.api_key, 49 | "verify_certs": self.verify_certs, 50 | "ca_certs": self.ca_certs, 51 | "request_timeout": self.request_timeout, 52 | "verify_hostname": self.verify_hostname, 53 | "minimum_version": self.minimum_version, 54 | # Perform a shallow copy of the mapping proxy to allow serialization 55 | "minimum_privileges": self.minimum_privileges.copy(), 56 | } 57 | 58 | 59 | class ElasticsearchGateway(ABC): 60 | """Encapsulates Elasticsearch operations.""" 61 | 62 | _logger = BASE_LOGGER 63 | 64 | def __init__( 65 | self, 66 | gateway_settings: GatewaySettings, 67 | log: Logger = BASE_LOGGER, 68 | ) -> None: 69 | """Non-I/O bound init.""" 70 | 71 | self._logger: Logger = log 72 | 73 | self._previous_ping: bool | None = None 74 | 75 | @log_enter_exit_debug 76 | async def async_init(self) -> None: 77 | """I/O bound init.""" 78 | 79 | # Test the connection 80 | await self.info() 81 | self._previous_ping = True 82 | 83 | # Minimum version check 84 | if not await self._is_supported_version(): 85 | msg = f"Elasticsearch version is not supported. Minimum version: {ELASTIC_MINIMUM_VERSION}" 86 | raise UnsupportedVersion(msg) 87 | 88 | # Check minimum privileges 89 | if await self.has_security() and not await self.has_privileges(self.settings.minimum_privileges): 90 | raise InsufficientPrivileges 91 | 92 | @property 93 | @abstractmethod 94 | def client(self) -> AsyncElasticsearch8: 95 | """Return the underlying ES Client.""" 96 | 97 | @property 98 | @abstractmethod 99 | def settings(self) -> GatewaySettings: 100 | """Return the settings.""" 101 | 102 | @classmethod 103 | @abstractmethod 104 | async def async_init_then_stop( 105 | cls, 106 | url: str, 107 | username: str | None = None, 108 | password: str | None = None, 109 | api_key: str | None = None, 110 | verify_certs: bool = True, 111 | verify_hostname: bool = True, 112 | ca_certs: str | None = None, 113 | request_timeout: int = 30, 114 | minimum_privileges: MappingProxyType[str, Any] = ES_CHECK_PERMISSIONS_DATASTREAM, 115 | log: Logger = BASE_LOGGER, 116 | ) -> None: 117 | """Initialize the gateway and then stop it.""" 118 | 119 | @abstractmethod 120 | async def info(self) -> dict: 121 | """Retrieve info about the connected elasticsearch cluster.""" 122 | 123 | async def check_connection(self) -> bool: 124 | """Check if the connection to the Elasticsearch cluster is working.""" 125 | 126 | previous_ping = self._previous_ping 127 | new_ping = await self.ping() 128 | 129 | # Our first connection check 130 | if previous_ping is None: 131 | established = new_ping 132 | if established: 133 | self._logger.info("Connection to Elasticsearch is established.") 134 | else: 135 | self._logger.error("Failed to establish connection to Elasticsearch.") 136 | 137 | return new_ping 138 | 139 | reestablished: bool = not previous_ping and new_ping 140 | maintained = previous_ping and new_ping 141 | lost: bool = previous_ping and not new_ping 142 | down: bool = not previous_ping and not new_ping 143 | 144 | if maintained: 145 | self._logger.debug("Connection to Elasticsearch is still available.") 146 | 147 | if lost: 148 | self._logger.error("Connection to Elasticsearch has been lost.") 149 | 150 | if down: 151 | self._logger.debug("Connection to Elasticsearch is still down.") 152 | 153 | if reestablished: 154 | self._logger.info("Connection to Elasticsearch has been reestablished.") 155 | 156 | return new_ping 157 | 158 | @abstractmethod 159 | async def ping(self) -> bool: 160 | """Pings the connected elasticsearch cluster.""" 161 | 162 | @abstractmethod 163 | async def has_security(self) -> bool: 164 | """Check if the cluster has security enabled.""" 165 | 166 | @abstractmethod 167 | async def has_privileges(self, privileges) -> bool: 168 | """Check if the user has the specified privileges.""" 169 | 170 | @abstractmethod 171 | async def get_index_template(self, name, ignore=None) -> dict: 172 | """Retrieve an index template.""" 173 | 174 | @abstractmethod 175 | async def put_index_template(self, name, body) -> dict: 176 | """Update an index template.""" 177 | 178 | @abstractmethod 179 | async def get_datastream(self, datastream: str) -> dict: 180 | """Retrieve datastreams.""" 181 | 182 | @abstractmethod 183 | async def rollover_datastream(self, datastream: str) -> dict: 184 | """Rollover a datastream.""" 185 | 186 | @abstractmethod 187 | async def bulk(self, actions: AsyncGenerator[dict[str, Any], Any]) -> None: 188 | """Perform a bulk operation.""" 189 | 190 | @abstractmethod 191 | async def stop(self) -> None: 192 | """Stop the gateway.""" 193 | 194 | # Helper methods 195 | 196 | async def _is_supported_version(self) -> bool: 197 | """Check if the Elasticsearch version is supported.""" 198 | info: dict = await self.info() 199 | 200 | return self._is_serverless(info) or self._meets_minimum_version(info, ELASTIC_MINIMUM_VERSION) 201 | 202 | def _is_serverless(self, cluster_info: dict) -> bool: 203 | """Check if the Elasticsearch instance is serverless.""" 204 | 205 | return cluster_info["version"]["build_flavor"] == "serverless" 206 | 207 | def _meets_minimum_version(self, cluster_info: dict, minimum_version: tuple[int, int]) -> bool: 208 | """Check if the Elasticsearch version is supported.""" 209 | 210 | version_number_parts = cluster_info["version"]["number"].split(".") 211 | 212 | current_major = int(version_number_parts[0]) 213 | current_minor = int(version_number_parts[1]) 214 | 215 | minimum_major = minimum_version[0] 216 | minimum_minor = minimum_version[1] 217 | 218 | return ( 219 | current_major > minimum_major or current_major == minimum_major and current_minor >= minimum_minor 220 | ) 221 | -------------------------------------------------------------------------------- /custom_components/elasticsearch/entity_details.py: -------------------------------------------------------------------------------- 1 | """Retrieve extended details for an entity.""" 2 | 3 | from __future__ import annotations 4 | 5 | from typing import TYPE_CHECKING 6 | 7 | from homeassistant.helpers import ( 8 | area_registry, 9 | device_registry, 10 | entity_registry, 11 | floor_registry, 12 | ) 13 | from homeassistant.helpers.device_registry import DeviceEntry 14 | from homeassistant.helpers.entity_registry import RegistryEntry 15 | 16 | from .logger import LOGGER as BASE_LOGGER 17 | 18 | if TYPE_CHECKING: # pragma: no cover 19 | from logging import Logger 20 | from typing import Any 21 | 22 | from homeassistant.core import HomeAssistant 23 | 24 | 25 | class ExtendedDeviceEntry: 26 | """Extended device class to include area, floor, and labels.""" 27 | 28 | def __init__( 29 | self, 30 | details: ExtendedEntityDetails, 31 | device: DeviceEntry, 32 | logger: Logger = BASE_LOGGER, 33 | ) -> None: 34 | """Init ExtendedDevice.""" 35 | self._logger: Logger = logger 36 | self._details: ExtendedEntityDetails = details 37 | 38 | self._device: DeviceEntry = device 39 | 40 | @property 41 | def id(self) -> str: 42 | """Return the id of the device.""" 43 | return self._device.id 44 | 45 | # @property 46 | # def device(self) -> DeviceEntry: 47 | # """Return the Hass DeviceEntry object.""" 48 | # return self._device 49 | 50 | @property 51 | def name(self) -> str | None: 52 | """Return the Hass friendly name of the device.""" 53 | return self._device.name_by_user or self._device.name 54 | 55 | @property 56 | def area(self) -> area_registry.AreaEntry | None: 57 | """Return the Hass AreaEntry of the area of the device.""" 58 | if self._device.area_id is None: 59 | return None 60 | 61 | return self._details.area_registry.async_get_area(self._device.area_id) 62 | 63 | @property 64 | def floor(self) -> floor_registry.FloorEntry | None: 65 | """Return the Hass FloorEntry of the floor of the device.""" 66 | if self.area is None or self.area.floor_id is None: 67 | return None 68 | 69 | return self._details.floor_registry.async_get_floor(self.area.floor_id) 70 | 71 | @property 72 | def labels(self) -> list[str]: 73 | """Return the labels of the device.""" 74 | return sorted(self._device.labels) 75 | 76 | def to_dict(self) -> dict: 77 | """Convert to dict.""" 78 | 79 | device: dict[str, Any] = { 80 | "id": self.id, 81 | "name": self.name, 82 | "area": None, 83 | "labels": self.labels, 84 | } 85 | 86 | if self.area is not None: 87 | device["area"] = { 88 | "id": self.area.id, 89 | "name": self.area.name, 90 | "floor": None, 91 | } 92 | 93 | if self.floor is not None: 94 | device["area"]["floor"] = { 95 | "id": self.floor.floor_id, 96 | "name": self.floor.name, 97 | } 98 | 99 | return device 100 | 101 | 102 | class ExtendedRegistryEntry: 103 | """Extended entity class to include device, area, floor, and labels.""" 104 | 105 | def __init__( 106 | self, 107 | details: ExtendedEntityDetails, 108 | entity: RegistryEntry, 109 | device: DeviceEntry | None = None, 110 | logger: Logger = BASE_LOGGER, 111 | ) -> None: 112 | """Initialize an ExtendedRegistryEntry.""" 113 | self._logger: Logger = logger 114 | 115 | self._details: ExtendedEntityDetails = details 116 | 117 | self._entity: RegistryEntry = entity 118 | self._device: DeviceEntry | None = device 119 | 120 | # @property 121 | # def entity(self) -> RegistryEntry: 122 | # """Return the Hass RegistryEntry object.""" 123 | # return self._entity 124 | 125 | @property 126 | def device(self) -> ExtendedDeviceEntry | None: 127 | """Return the ExtendedDeviceEntry object for the entity.""" 128 | if self._device is None: 129 | return None 130 | 131 | return ExtendedDeviceEntry(details=self._details, device=self._device) 132 | 133 | @property 134 | def area(self) -> area_registry.AreaEntry | None: 135 | """Return the Hass AreaEntry of the area of the entity.""" 136 | 137 | if self._entity.area_id is not None: 138 | return self._details.area_registry.async_get_area(self._entity.area_id) 139 | 140 | return None 141 | 142 | @property 143 | def floor(self) -> floor_registry.FloorEntry | None: 144 | """Return the Hass FloorEntry of the floor of the entity.""" 145 | 146 | if self.area is not None and self.area.floor_id is not None: 147 | return self._details.floor_registry.async_get_floor(self.area.floor_id) 148 | 149 | return None 150 | 151 | @property 152 | def device_class(self) -> str | None: 153 | """Return the device class of the entity.""" 154 | return self._entity.device_class or self._entity.original_device_class 155 | 156 | @property 157 | def id(self) -> str | None: 158 | """Return the id of the entity.""" 159 | return self._entity.entity_id 160 | 161 | @property 162 | def name(self) -> str | None: 163 | """Return the name of the entity.""" 164 | return self._entity.name or self._entity.original_name 165 | 166 | @property 167 | def unit_of_measurement(self) -> str | None: 168 | """Return the unit of measurement of the entity.""" 169 | return self._entity.unit_of_measurement 170 | 171 | @property 172 | def platform(self) -> str | None: 173 | """Return the platform of the entity.""" 174 | return self._entity.platform 175 | 176 | @property 177 | def domain(self) -> str: 178 | """Return the domain of the entity.""" 179 | return self._entity.domain 180 | 181 | @property 182 | def labels(self) -> list[str]: 183 | """Return the labels of the entity.""" 184 | return sorted(self._entity.labels) 185 | 186 | def to_dict(self) -> dict[str, Any]: 187 | """Convert to dict.""" 188 | 189 | entity: dict[str, Any] = { 190 | "id": self.id, 191 | "name": self.name, 192 | "domain": self.domain, 193 | "area": None, 194 | "device_class": self.device_class, 195 | "device": None, 196 | "labels": self.labels, 197 | "platform": self.platform, 198 | "unit_of_measurement": self.unit_of_measurement, 199 | } 200 | 201 | if self.area is not None: 202 | entity["area"] = {"id": self.area.id, "name": self.area.name, "floor": None} 203 | 204 | if self.floor is not None: 205 | entity["area"]["floor"] = { 206 | "id": self.floor.floor_id, 207 | "name": self.floor.name, 208 | } 209 | 210 | if self.device is not None: 211 | entity["device"] = self.device.to_dict() 212 | 213 | return entity 214 | 215 | 216 | class ExtendedEntityDetails: 217 | """Creates extended entity and device entries.""" 218 | 219 | def __init__( 220 | self, 221 | hass: HomeAssistant, 222 | logger: Logger = BASE_LOGGER, 223 | ) -> None: 224 | """Init ExtendedEntity class.""" 225 | self._hass: HomeAssistant = hass 226 | self._logger: Logger = logger 227 | 228 | self.entity_registry: entity_registry.EntityRegistry = entity_registry.async_get(hass) 229 | self.device_registry: device_registry.DeviceRegistry = device_registry.async_get(hass) 230 | self.area_registry: area_registry.AreaRegistry = area_registry.async_get(hass) 231 | self.floor_registry: floor_registry.FloorRegistry = floor_registry.async_get(hass) 232 | 233 | def async_get(self, entity_id: str) -> ExtendedRegistryEntry: 234 | """Retrieve extended entity details.""" 235 | device: DeviceEntry | None = None 236 | entity: RegistryEntry | None = None 237 | 238 | entity = self.entity_registry.async_get(entity_id) 239 | 240 | if entity is None: 241 | msg = f"Entity not found: {entity_id}" 242 | self._logger.debug(msg) 243 | raise ValueError(msg) 244 | 245 | if entity.device_id is not None: 246 | device = self.device_registry.async_get(entity.device_id) 247 | 248 | if device is None: 249 | self._logger.debug( 250 | "Device id [%s] present for entity [%s] but device not found.", 251 | entity.device_id, 252 | entity_id, 253 | ) 254 | 255 | return ExtendedRegistryEntry(details=self, entity=entity, device=device) 256 | -------------------------------------------------------------------------------- /custom_components/elasticsearch/__init__.py: -------------------------------------------------------------------------------- 1 | """Support for sending event data to an Elasticsearch cluster.""" 2 | 3 | from __future__ import annotations 4 | 5 | from logging import Logger 6 | from typing import TYPE_CHECKING 7 | 8 | from homeassistant.config_entries import ConfigEntry 9 | from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady, IntegrationError 10 | from homeassistant.loader import ( 11 | async_get_integration, 12 | ) 13 | 14 | from custom_components.elasticsearch.config_flow import ElasticFlowHandler 15 | from custom_components.elasticsearch.const import ELASTIC_DOMAIN 16 | from custom_components.elasticsearch.errors import ( 17 | AuthenticationRequired, 18 | CannotConnect, 19 | ESIntegrationException, 20 | UnsupportedVersion, 21 | ) 22 | from custom_components.elasticsearch.logger import ( 23 | LOGGER, 24 | async_log_enter_exit_debug, 25 | async_log_enter_exit_info, 26 | have_child, 27 | log_enter_exit_debug, 28 | ) 29 | 30 | from .es_integration import ElasticIntegration 31 | 32 | if TYPE_CHECKING: # pragma: no cover 33 | from homeassistant.core import HomeAssistant 34 | 35 | type ElasticIntegrationConfigEntry = ConfigEntry[ElasticIntegration] 36 | 37 | 38 | @async_log_enter_exit_info 39 | async def async_setup_entry(hass: HomeAssistant, config_entry: ElasticIntegrationConfigEntry) -> bool: 40 | """Set up integration via config flow.""" 41 | 42 | # Create an specific logger for this config entry 43 | _logger: Logger = have_child(name=config_entry.title) 44 | 45 | version = await get_integration_version(hass) 46 | 47 | _logger.info("Initializing integration v%s for %s", version, config_entry.title) 48 | 49 | try: 50 | integration = ElasticIntegration(hass=hass, config_entry=config_entry, log=_logger) 51 | await integration.async_init() 52 | except (UnsupportedVersion, CannotConnect) as err: 53 | raise ConfigEntryNotReady(err) from err 54 | except AuthenticationRequired as err: 55 | raise ConfigEntryAuthFailed(err) from err 56 | except ESIntegrationException as err: 57 | raise ConfigEntryNotReady(err) from err 58 | except Exception as err: 59 | msg = "Unknown error occurred" 60 | _logger.exception(msg) 61 | raise IntegrationError(err) from err 62 | 63 | config_entry.runtime_data = integration 64 | return True 65 | 66 | 67 | @async_log_enter_exit_info 68 | async def async_unload_entry(hass: HomeAssistant, config_entry: ElasticIntegrationConfigEntry) -> bool: 69 | """Teardown integration.""" 70 | 71 | if ( 72 | hasattr(config_entry, "runtime_data") 73 | and config_entry.runtime_data is not None 74 | and isinstance(config_entry.runtime_data, ElasticIntegration) 75 | ): 76 | integration = config_entry.runtime_data 77 | 78 | await integration.async_shutdown() 79 | else: 80 | LOGGER.warning( 81 | "Called to unload config entry %s, but it doesn't appear to be loaded", config_entry.title 82 | ) 83 | 84 | return True 85 | 86 | 87 | @async_log_enter_exit_debug 88 | async def async_migrate_entry(hass: HomeAssistant, config_entry: ElasticIntegrationConfigEntry) -> bool: 89 | """Handle migration of config entry.""" 90 | if config_entry.version == ElasticFlowHandler.VERSION: 91 | return True 92 | 93 | try: 94 | migrated_data, migrated_options, migrated_version = migrate_data_and_options_to_version( 95 | config_entry, 96 | ElasticFlowHandler.VERSION, 97 | ) 98 | except Exception: # noqa: BLE001 99 | LOGGER.exception( 100 | "Migration failed attempting to migrate from version %s to version %s.", 101 | config_entry.version, 102 | ElasticFlowHandler.VERSION, 103 | ) 104 | return False 105 | 106 | hass.config_entries.async_update_entry( 107 | config_entry, 108 | data=migrated_data, 109 | options=migrated_options, 110 | version=migrated_version, 111 | ) 112 | 113 | return True 114 | 115 | 116 | async def get_integration_version(hass) -> str: 117 | """Return the version of the integration.""" 118 | integration = await async_get_integration(hass, ELASTIC_DOMAIN) 119 | 120 | if integration is None or integration.version is None: 121 | return "Unknown" 122 | 123 | return integration.version.string 124 | 125 | 126 | @log_enter_exit_debug 127 | def migrate_data_and_options_to_version( 128 | config_entry: ElasticIntegrationConfigEntry, 129 | desired_version: int, 130 | ) -> tuple[dict, dict, int]: 131 | """Migrate a config entry from its current version to a desired version.""" 132 | LOGGER.debug( 133 | "Migrating config entry from version %s to %s", 134 | config_entry.version, 135 | desired_version, 136 | ) 137 | 138 | data = {**config_entry.data} 139 | options = {**config_entry.options} 140 | begin_version = config_entry.version 141 | current_version = begin_version 142 | 143 | if current_version < desired_version: 144 | for version in range(current_version + 1, desired_version + 1): 145 | migration_func = globals().get(f"migrate_to_version_{version}") 146 | if migration_func: 147 | data, options = migration_func(data, options) 148 | current_version = version 149 | 150 | end_version = current_version 151 | 152 | LOGGER.info("Migration from version %s to version %s successful", begin_version, end_version) 153 | 154 | return data, options, end_version 155 | 156 | 157 | def migrate_to_version_2(data: dict, options: dict) -> tuple[dict, dict]: 158 | """Migrate config to version 2.""" 159 | only_publish_changed = data.get("only_publish_changed", False) 160 | data["publish_mode"] = "All" if not only_publish_changed else "Any changes" 161 | 162 | if "only_publish_changed" in data: 163 | del data["only_publish_changed"] 164 | 165 | return data, options 166 | 167 | 168 | def migrate_to_version_3(data: dict, options: dict) -> tuple[dict, dict]: 169 | """Migrate config to version 3.""" 170 | if "health_sensor_enabled" in data: 171 | del data["health_sensor_enabled"] 172 | 173 | return data, options 174 | 175 | 176 | def migrate_to_version_4(data: dict, options: dict) -> tuple[dict, dict]: 177 | """Migrate config to version 4.""" 178 | if "index_mode" not in data: 179 | data["index_mode"] = "index" 180 | 181 | conf_ilm_max_size = "ilm_max_size" 182 | if conf_ilm_max_size in data: 183 | del data[conf_ilm_max_size] 184 | 185 | conf_ilm_delete_after = "ilm_delete_after" 186 | if conf_ilm_delete_after in data: 187 | del data[conf_ilm_delete_after] 188 | 189 | return data, options 190 | 191 | 192 | def migrate_to_version_5(data: dict, options: dict) -> tuple[dict, dict]: 193 | """Migrate config to version 5.""" 194 | keys_to_remove = [ 195 | "datastream_type", 196 | "datastream_name_prefix", 197 | "datastream_namespace", 198 | ] 199 | 200 | for key in keys_to_remove: 201 | if key in data: 202 | del data[key] 203 | 204 | keys_to_migrate = [ 205 | "publish_enabled", 206 | "publish_frequency", 207 | "publish_mode", 208 | "excluded_domains", 209 | "excluded_entities", 210 | "included_domains", 211 | "included_entities", 212 | ] 213 | 214 | for key in keys_to_migrate: 215 | if key not in options and key in data: 216 | options[key] = data[key] 217 | if key in data: 218 | del data[key] 219 | 220 | remove_keys_if_empty = [ 221 | "username", 222 | "password", 223 | "api_key", 224 | ] 225 | 226 | for key in remove_keys_if_empty: 227 | if key in data and data[key] == "": 228 | del data[key] 229 | 230 | return data, options 231 | 232 | 233 | def migrate_to_version_6(data: dict, options: dict) -> tuple[dict, dict]: 234 | """Migrate config to version 6.""" 235 | 236 | if data.get("index_mode") is not None: 237 | del data["index_mode"] 238 | 239 | # Change publish mode to change_detection_type 240 | if options.get("publish_mode") is not None: 241 | if options["publish_mode"] == "All": 242 | options["polling_frequency"] = options["publish_frequency"] 243 | options["change_detection_type"] = ["STATE", "ATTRIBUTE"] 244 | 245 | if options["publish_mode"] == "Any changes": 246 | options["polling_frequency"] = 0 247 | options["change_detection_type"] = ["STATE", "ATTRIBUTE"] 248 | 249 | if options["publish_mode"] == "State changes": 250 | options["polling_frequency"] = 0 251 | options["change_detection_type"] = ["STATE"] 252 | 253 | del options["publish_mode"] 254 | 255 | else: 256 | options["polling_frequency"] = 0 257 | options["change_detection_type"] = ["STATE", "ATTRIBUTE"] 258 | 259 | # add dedicated settings for polling 260 | options_to_remove = [ 261 | "ilm_enabled", 262 | "ilm_policy_name", 263 | "publish_mode", 264 | "publish_enabled", 265 | "index_format", 266 | "index_mode", 267 | "alias", 268 | ] 269 | 270 | for key in options_to_remove: 271 | if key in options: 272 | del options[key] 273 | 274 | return data, options 275 | 276 | 277 | def migrate_to_version_7(data: dict, options: dict) -> tuple[dict, dict]: 278 | """Migrate config to version 7.""" 279 | 280 | # if tags does not exist, set it to an empty array 281 | if "tags" not in options: 282 | options["tags"] = [] 283 | 284 | options["targets_to_include"] = {} 285 | options["targets_to_exclude"] = {} 286 | options["exclude_targets"] = False 287 | options["include_targets"] = False 288 | 289 | if "included_entities" in options and len(options["included_entities"]) != 0: 290 | options["include_targets"] = True 291 | options["targets_to_include"]["entity_id"] = options["included_entities"] 292 | 293 | if "excluded_entities" in options and len(options["excluded_entities"]) != 0: 294 | options["exclude_targets"] = True 295 | options["targets_to_exclude"]["entity_id"] = options["excluded_entities"] 296 | 297 | keys_to_remove = [ 298 | "excluded_domains", 299 | "excluded_entities", 300 | "included_domains", 301 | "included_entities", 302 | ] 303 | 304 | # Lowercase values in array options["change_detection_type"] 305 | if "change_detection_type" in options: 306 | options["change_detection_type"] = [x.lower() for x in options["change_detection_type"]] 307 | 308 | for key in keys_to_remove: 309 | if key in options: 310 | del options[key] 311 | 312 | return data, options 313 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "homeassistant-elasticsearch" 3 | version = "2.0.0" 4 | description = "" 5 | authors = ["Larry Gregory "] 6 | license = "MIT" 7 | readme = "README.md" 8 | packages = [{ include = "elasticsearch", from = "custom_components" }] 9 | 10 | [tool.poetry.dependencies] 11 | python = ">=3.13.2,<4.0" 12 | elasticsearch8 = "~=8.14.0" 13 | 14 | [tool.poetry.group.dev.dependencies] 15 | colorlog = "==6.9.0" 16 | homeassistant = "==2025.6.0" 17 | ruff = ">=0.9.1" 18 | pip = ">=21.0,<25.3" 19 | pydantic = ">=1.10.4" 20 | mypy-dev = ">=1.15.0a2" 21 | 22 | [tool.poetry.group.docs.dependencies] 23 | mkdocs-material = ">=9.5.17" 24 | 25 | [tool.poetry.group.test.dependencies] 26 | syrupy = ">=4.6.0" 27 | pytest = ">=8.3.5" 28 | pytest-asyncio = ">=0.26.0" 29 | pytest-cov = ">=5.0.0" 30 | pytest-homeassistant-custom-component = "==0.13.251" 31 | mock = ">4.0.0" 32 | jsondiff = "*" 33 | 34 | [build-system] 35 | requires = ["poetry-core"] 36 | build-backend = "poetry.core.masonry.api" 37 | 38 | [tool.isort] 39 | combine-as-imports = true 40 | ensure_newline_before_comments = true 41 | force_grid_wrap = 0 42 | force-sort-within-sections = true 43 | include_trailing_comma = true 44 | line_length = 110 45 | multi_line_output = 3 46 | split-on-trailing-comma = false 47 | use_parentheses = true 48 | 49 | [tool.coverage.html] 50 | directory = "test_results/cov_html" 51 | 52 | [tool.coverage.xml] 53 | output = "test_results/cov_xml/coverage.xml" 54 | 55 | [tool.pytest.ini_options] 56 | addopts = [ 57 | "-rxf", 58 | "-vv", 59 | "-l", 60 | "--asyncio-mode=auto", 61 | "--junitxml=test_results/pytest.xml", 62 | "--disable-socket", 63 | "--allow-unix-socket" 64 | ] 65 | filterwarnings = ["ignore:ssl.PROTOCOL_TLS is deprecated:DeprecationWarning"] 66 | testpaths = ["tests"] 67 | python_files = ["test_*.py"] 68 | norecursedirs = [ 69 | ".git", 70 | ".venv", 71 | ".cache", 72 | ".pytest_cache", 73 | ".ruff_cache" 74 | ] 75 | 76 | 77 | [tool.pylint.FORMAT] 78 | expected-line-ending-format = "LF" 79 | 80 | [tool.pylint.MAIN] 81 | py-version = "3.13" 82 | ignore = [ 83 | "tests", 84 | ] 85 | # Use a conservative default here; 2 should speed up most setups and not hurt 86 | # any too bad. Override on command line as appropriate. 87 | jobs = 2 88 | init-hook = """\ 89 | from pathlib import Path; \ 90 | import sys; \ 91 | 92 | from pylint.config import find_default_config_files; \ 93 | 94 | sys.path.append( \ 95 | str(Path(next(find_default_config_files())).parent.joinpath('pylint/plugins')) 96 | ) \ 97 | """ 98 | load-plugins = [ 99 | ] 100 | persistent = false 101 | extension-pkg-allow-list = [ 102 | "av.audio.stream", 103 | "av.logging", 104 | "av.stream", 105 | "ciso8601", 106 | "orjson", 107 | "cv2", 108 | ] 109 | fail-on = [ 110 | "I", 111 | ] 112 | 113 | [tool.pylint.BASIC] 114 | class-const-naming-style = "any" 115 | 116 | 117 | [tool.pylint."MESSAGES CONTROL"] 118 | # Reasons disabled: 119 | # format - handled by ruff 120 | # locally-disabled - it spams too much 121 | # duplicate-code - unavoidable 122 | # cyclic-import - doesn't test if both import on load 123 | # abstract-class-little-used - prevents from setting right foundation 124 | # unused-argument - generic callbacks and setup methods create a lot of warnings 125 | # too-many-* - are not enforced for the sake of readability 126 | # too-few-* - same as too-many-* 127 | # abstract-method - with intro of async there are always methods missing 128 | # inconsistent-return-statements - doesn't handle raise 129 | # too-many-ancestors - it's too strict. 130 | # wrong-import-order - isort guards this 131 | # consider-using-f-string - str.format sometimes more readable 132 | # possibly-used-before-assignment - too many errors / not necessarily issues 133 | # --- 134 | # Pylint CodeStyle plugin 135 | # consider-using-namedtuple-or-dataclass - too opinionated 136 | # consider-using-assignment-expr - decision to use := better left to devs 137 | disable = [ 138 | "format", 139 | "abstract-method", 140 | "cyclic-import", 141 | "duplicate-code", 142 | "inconsistent-return-statements", 143 | "locally-disabled", 144 | "not-context-manager", 145 | "too-few-public-methods", 146 | "too-many-ancestors", 147 | "too-many-arguments", 148 | "too-many-instance-attributes", 149 | "too-many-lines", 150 | "too-many-locals", 151 | "too-many-public-methods", 152 | "too-many-boolean-expressions", 153 | "wrong-import-order", 154 | "consider-using-f-string", 155 | 156 | # Handled by ruff 157 | # Ref: 158 | "await-outside-async", # PLE1142 159 | "bad-str-strip-call", # PLE1310 160 | "bad-string-format-type", # PLE1307 161 | "bidirectional-unicode", # PLE2502 162 | "continue-in-finally", # PLE0116 163 | "duplicate-bases", # PLE0241 164 | "format-needs-mapping", # F502 165 | "function-redefined", # F811 166 | # Needed because ruff does not understand type of __all__ generated by a function 167 | # "invalid-all-format", # PLE0605 168 | "invalid-all-object", # PLE0604 169 | "invalid-character-backspace", # PLE2510 170 | "invalid-character-esc", # PLE2513 171 | "invalid-character-nul", # PLE2514 172 | "invalid-character-sub", # PLE2512 173 | "invalid-character-zero-width-space", # PLE2515 174 | "logging-too-few-args", # PLE1206 175 | "logging-too-many-args", # PLE1205 176 | "missing-format-string-key", # F524 177 | "mixed-format-string", # F506 178 | "no-method-argument", # N805 179 | "no-self-argument", # N805 180 | "nonexistent-operator", # B002 181 | "nonlocal-without-binding", # PLE0117 182 | "not-in-loop", # F701, F702 183 | "notimplemented-raised", # F901 184 | "return-in-init", # PLE0101 185 | "return-outside-function", # F706 186 | "syntax-error", # E999 187 | "too-few-format-args", # F524 188 | "too-many-format-args", # F522 189 | "too-many-star-expressions", # F622 190 | "truncated-format-string", # F501 191 | "undefined-all-variable", # F822 192 | "undefined-variable", # F821 193 | "used-prior-global-declaration", # PLE0118 194 | "yield-inside-async-function", # PLE1700 195 | "yield-outside-function", # F704 196 | "anomalous-backslash-in-string", # W605 197 | "assert-on-string-literal", # PLW0129 198 | "assert-on-tuple", # F631 199 | "bad-format-string", # W1302, F 200 | "bad-format-string-key", # W1300, F 201 | "bare-except", # E722 202 | "binary-op-exception", # PLW0711 203 | "cell-var-from-loop", # B023 204 | # "dangerous-default-value", # B006, ruff catches new occurrences, needs more work 205 | "duplicate-except", # B014 206 | "duplicate-key", # F601 207 | "duplicate-string-formatting-argument", # F 208 | "duplicate-value", # F 209 | "eval-used", # S307 210 | "exec-used", # S102 211 | "expression-not-assigned", # B018 212 | "f-string-without-interpolation", # F541 213 | "forgotten-debug-statement", # T100 214 | "format-string-without-interpolation", # F 215 | # "global-statement", # PLW0603, ruff catches new occurrences, needs more work 216 | "global-variable-not-assigned", # PLW0602 217 | "implicit-str-concat", # ISC001 218 | "import-self", # PLW0406 219 | "inconsistent-quotes", # Q000 220 | "invalid-envvar-default", # PLW1508 221 | "keyword-arg-before-vararg", # B026 222 | "logging-format-interpolation", # G 223 | "logging-fstring-interpolation", # G 224 | "logging-not-lazy", # G 225 | "misplaced-future", # F404 226 | "named-expr-without-context", # PLW0131 227 | "nested-min-max", # PLW3301 228 | "pointless-statement", # B018 229 | "raise-missing-from", # B904 230 | "redefined-builtin", # A001 231 | "try-except-raise", # TRY302 232 | "unused-argument", # ARG001, we don't use it 233 | "unused-format-string-argument", #F507 234 | "unused-format-string-key", # F504 235 | "unused-import", # F401 236 | "unused-variable", # F841 237 | "useless-else-on-loop", # PLW0120 238 | "wildcard-import", # F403 239 | "bad-classmethod-argument", # N804 240 | "consider-iterating-dictionary", # SIM118 241 | "empty-docstring", # D419 242 | "invalid-name", # N815 243 | "line-too-long", # E501, disabled globally 244 | "missing-class-docstring", # D101 245 | "missing-final-newline", # W292 246 | "missing-function-docstring", # D103 247 | "missing-module-docstring", # D100 248 | "multiple-imports", #E401 249 | "singleton-comparison", # E711, E712 250 | "subprocess-run-check", # PLW1510 251 | "superfluous-parens", # UP034 252 | "ungrouped-imports", # I001 253 | "unidiomatic-typecheck", # E721 254 | "unnecessary-direct-lambda-call", # PLC3002 255 | "unnecessary-lambda-assignment", # PLC3001 256 | "unnecessary-pass", # PIE790 257 | "unneeded-not", # SIM208 258 | "useless-import-alias", # PLC0414 259 | "wrong-import-order", # I001 260 | "wrong-import-position", # E402 261 | "comparison-of-constants", # PLR0133 262 | "comparison-with-itself", # PLR0124 263 | "consider-merging-isinstance", # PLR1701 264 | "consider-using-dict-comprehension", # C402 265 | "consider-using-generator", # C417 266 | "consider-using-get", # SIM401 267 | "consider-using-set-comprehension", # C401 268 | "consider-using-sys-exit", # PLR1722 269 | "consider-using-ternary", # SIM108 270 | "literal-comparison", # F632 271 | "property-with-parameters", # PLR0206 272 | "super-with-arguments", # UP008 273 | "too-many-branches", # PLR0912 274 | "too-many-return-statements", # PLR0911 275 | "too-many-statements", # PLR0915 276 | "trailing-comma-tuple", # COM818 277 | "unnecessary-comprehension", # C416 278 | "use-a-generator", # C417 279 | "use-dict-literal", # C406 280 | "use-list-literal", # C405 281 | "useless-object-inheritance", # UP004 282 | "useless-return", # PLR1711 283 | "no-else-break", # RET508 284 | "no-else-continue", # RET507 285 | "no-else-raise", # RET506 286 | "no-else-return", # RET505 287 | "broad-except", # BLE001 288 | "protected-access", # SLF001 289 | # "no-self-use", # PLR6301 # Optional plugin, not enabled 290 | 291 | # Handled by mypy 292 | # Ref: 293 | "abstract-class-instantiated", 294 | "arguments-differ", 295 | "assigning-non-slot", 296 | "assignment-from-no-return", 297 | "assignment-from-none", 298 | "bad-exception-cause", 299 | "bad-format-character", 300 | "bad-reversed-sequence", 301 | "bad-super-call", 302 | "bad-thread-instantiation", 303 | "catching-non-exception", 304 | "comparison-with-callable", 305 | "deprecated-class", 306 | "dict-iter-missing-items", 307 | "format-combined-specification", 308 | "global-variable-undefined", 309 | "import-error", 310 | "inconsistent-mro", 311 | "inherit-non-class", 312 | "init-is-generator", 313 | "invalid-class-object", 314 | "invalid-enum-extension", 315 | "invalid-envvar-value", 316 | "invalid-format-returned", 317 | "invalid-hash-returned", 318 | "invalid-metaclass", 319 | "invalid-overridden-method", 320 | "invalid-repr-returned", 321 | "invalid-sequence-index", 322 | "invalid-slice-index", 323 | "invalid-slots-object", 324 | "invalid-slots", 325 | "invalid-star-assignment-target", 326 | "invalid-str-returned", 327 | "invalid-unary-operand-type", 328 | "invalid-unicode-codec", 329 | "isinstance-second-argument-not-valid-type", 330 | "method-hidden", 331 | "misplaced-format-function", 332 | "missing-format-argument-key", 333 | "missing-format-attribute", 334 | "missing-kwoa", 335 | "no-member", 336 | "no-value-for-parameter", 337 | "non-iterator-returned", 338 | "non-str-assignment-to-dunder-name", 339 | "nonlocal-and-global", 340 | "not-a-mapping", 341 | "not-an-iterable", 342 | "not-async-context-manager", 343 | "not-callable", 344 | "not-context-manager", 345 | "overridden-final-method", 346 | "raising-bad-type", 347 | "raising-non-exception", 348 | "redundant-keyword-arg", 349 | "relative-beyond-top-level", 350 | "self-cls-assignment", 351 | "signature-differs", 352 | "star-needs-assignment-target", 353 | "subclassed-final-class", 354 | "super-without-brackets", 355 | "too-many-function-args", 356 | "typevar-double-variance", 357 | "typevar-name-mismatch", 358 | "unbalanced-dict-unpacking", 359 | "unbalanced-tuple-unpacking", 360 | "unexpected-keyword-arg", 361 | "unhashable-member", 362 | "unpacking-non-sequence", 363 | "unsubscriptable-object", 364 | "unsupported-assignment-operation", 365 | "unsupported-binary-operation", 366 | "unsupported-delete-operation", 367 | "unsupported-membership-test", 368 | "used-before-assignment", 369 | "using-final-decorator-in-unsupported-version", 370 | "wrong-exception-operation", 371 | ] 372 | enable = [ 373 | #"useless-suppression", # temporarily every now and then to clean them up 374 | "use-symbolic-message-instead", 375 | ] 376 | -------------------------------------------------------------------------------- /tests/test_util/es_mocker.py: -------------------------------------------------------------------------------- 1 | """Elasticsearch API Call Mocker.""" 2 | 3 | from __future__ import annotations 4 | 5 | from http import HTTPStatus 6 | from ssl import SSLCertVerificationError 7 | from typing import TYPE_CHECKING 8 | from unittest.mock import MagicMock 9 | 10 | from aiohttp import client_exceptions 11 | from custom_components.elasticsearch.const import DATASTREAM_METRICS_INDEX_TEMPLATE_NAME 12 | 13 | # import custom_components.elasticsearch # noqa: F401 14 | # import custom_components.elasticsearch # noqa: F401 15 | from pytest_homeassistant_custom_component.common import ( 16 | MockConfigEntry, # noqa: F401 # noqa: F401 17 | ) 18 | from pytest_homeassistant_custom_component.plugins import ( # noqa: F401 # noqa: F401 19 | aioclient_mock, 20 | skip_stop_scripts, 21 | snapshot, 22 | verify_cleanup, 23 | ) 24 | from pytest_homeassistant_custom_component.test_util.aiohttp import ( 25 | AiohttpClientMocker, 26 | AiohttpClientMockResponse, 27 | ) 28 | 29 | from tests import const as testconst 30 | 31 | if TYPE_CHECKING: 32 | from typing import Any 33 | 34 | 35 | def self_signed_tls_error(): 36 | """Return a self-signed certificate error.""" 37 | connection_key = MagicMock() 38 | connection_key.host = "mock_es_integration" 39 | connection_key.port = 9200 40 | connection_key.is_ssl = True 41 | 42 | certificate_error = SSLCertVerificationError() 43 | certificate_error.verify_code = 19 44 | certificate_error.verify_message = "'self-signed certificate in certificate chain'" 45 | certificate_error.library = "SSL" 46 | certificate_error.reason = "CERTIFICATE_VERIFY_FAILED" 47 | certificate_error.strerror = "[SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self-signed certificate in certificate chain (_ssl.c:1000)" 48 | certificate_error.errno = 1 49 | 50 | return client_exceptions.ClientConnectorCertificateError( 51 | connection_key=connection_key, certificate_error=certificate_error 52 | ) 53 | 54 | 55 | class es_mocker: 56 | """Mock builder for Elasticsearch integration tests.""" 57 | 58 | mocker: AiohttpClientMocker 59 | base_url: str = testconst.CONFIG_ENTRY_DATA_URL 60 | 61 | def __init__(self, mocker): 62 | """Initialize the mock builder.""" 63 | self.mocker = mocker 64 | 65 | def reset(self): 66 | """Reset the mock builder.""" 67 | self.mocker.clear_requests() 68 | 69 | return self 70 | 71 | def get_calls(self, include_headers=False): 72 | """Return the calls.""" 73 | # each mock_call is a tuple of method, url, body, and headers 74 | 75 | if not include_headers: 76 | return [(method, url, body) for method, url, body, headers in self.mocker.mock_calls] 77 | 78 | return self.mocker.mock_calls 79 | 80 | def clear(self): 81 | """Clear the requests.""" 82 | self.mocker.mock_calls.clear() 83 | 84 | return self 85 | 86 | def with_server_error(self, status=None, exc=None): 87 | """Mock Elasticsearch being unreachable.""" 88 | if status is None and exc is None: 89 | self.mocker.get(f"{self.base_url}", status=HTTPStatus.INTERNAL_SERVER_ERROR) 90 | 91 | if exc is None: 92 | self.mocker.get(f"{self.base_url}", status=status) 93 | else: 94 | self.mocker.get(f"{self.base_url}", exc=exc) 95 | 96 | return self 97 | 98 | def without_authentication(self): 99 | """Mock the user not being authenticated.""" 100 | self.mocker.get( 101 | f"{self.base_url}", 102 | status=401, 103 | json=testconst.CLUSTER_INFO_MISSING_CREDENTIALS_RESPONSE_BODY, 104 | ) 105 | return self 106 | 107 | def with_server_timeout(self): 108 | """Mock Elasticsearch being unreachable.""" 109 | self.mocker.get(f"{self.base_url}", exc=client_exceptions.ServerTimeoutError()) 110 | return self 111 | 112 | def _add_fail_after( 113 | self, success: AiohttpClientMockResponse, failure: AiohttpClientMockResponse, fail_after 114 | ): 115 | if fail_after is None: 116 | self.mocker.request( 117 | url=success.url, 118 | method=success.method, 119 | status=success.status, 120 | content=success.response, 121 | headers=success.headers, 122 | exc=success.exc, 123 | ) 124 | return self 125 | 126 | call_count = 0 127 | 128 | async def side_effect(*args, **kwargs): 129 | nonlocal call_count 130 | call_count += 1 131 | if call_count >= fail_after: 132 | return failure 133 | 134 | return success 135 | 136 | self.mocker.request(success.method, f"{success.url}", side_effect=side_effect) 137 | 138 | return self 139 | 140 | def _as_elasticsearch_stateful( 141 | self, version_response: dict[str, Any], with_security: bool = True, fail_after=None 142 | ) -> es_mocker: 143 | """Mock Elasticsearch version.""" 144 | 145 | self.base_url = ( 146 | testconst.CONFIG_ENTRY_DATA_URL if with_security else testconst.CONFIG_ENTRY_DATA_URL_INSECURE 147 | ) 148 | 149 | self._add_fail_after( 150 | success=AiohttpClientMockResponse( 151 | method="GET", 152 | url=self.base_url, 153 | headers={"x-elastic-product": "Elasticsearch"}, 154 | json=version_response, 155 | ), 156 | failure=AiohttpClientMockResponse( 157 | method="GET", 158 | url=self.base_url, 159 | status=HTTPStatus.INTERNAL_SERVER_ERROR, 160 | ), 161 | fail_after=fail_after, 162 | ) 163 | 164 | self.mocker.get( 165 | url=f"{self.base_url}/_xpack/usage", 166 | json={ 167 | "security": {"available": True, "enabled": with_security}, 168 | }, 169 | ) 170 | 171 | return self 172 | 173 | def as_elasticsearch_8_0(self, with_security: bool = True) -> es_mocker: 174 | """Mock Elasticsearch 8.0.""" 175 | return self._as_elasticsearch_stateful(testconst.CLUSTER_INFO_8DOT0_RESPONSE_BODY, with_security) 176 | 177 | def as_elasticsearch_8_17(self, with_security: bool = True, fail_after=None) -> es_mocker: 178 | """Mock Elasticsearch 8.17.""" 179 | return self._as_elasticsearch_stateful( 180 | testconst.CLUSTER_INFO_8DOT17_RESPONSE_BODY, with_security, fail_after=fail_after 181 | ) 182 | 183 | def as_elasticsearch_8_14(self, with_security: bool = True): 184 | """Mock Elasticsearch 8.14.""" 185 | 186 | return self._as_elasticsearch_stateful(testconst.CLUSTER_INFO_8DOT14_RESPONSE_BODY, with_security) 187 | 188 | def as_fake_elasticsearch(self) -> es_mocker: 189 | """Mock a fake elasticsearch node response.""" 190 | 191 | self.mocker.get( 192 | f"{self.base_url}", 193 | status=200, 194 | # No x-elastic-product header 195 | json=testconst.CLUSTER_INFO_8DOT14_RESPONSE_BODY, 196 | ) 197 | 198 | return self 199 | 200 | def as_elasticsearch_serverless(self) -> es_mocker: 201 | """Mock Elasticsearch version.""" 202 | 203 | self.base_url = testconst.CONFIG_ENTRY_DATA_URL 204 | 205 | self.mocker.get( 206 | f"{self.base_url}", 207 | status=200, 208 | json=testconst.CLUSTER_INFO_SERVERLESS_RESPONSE_BODY, 209 | headers={"x-elastic-product": "Elasticsearch"}, 210 | ) 211 | 212 | self.mocker.get( 213 | url=f"{self.base_url}/_xpack/usage", 214 | status=410, 215 | json=testconst.XPACK_USAGE_SERVERLESS_RESPONSE_BODY, 216 | ) 217 | 218 | return self 219 | 220 | def with_incorrect_permissions(self): 221 | """Mock the user being properly authenticated.""" 222 | self.mocker.post( 223 | f"{self.base_url}/_security/user/_has_privileges", 224 | status=200, 225 | json={ 226 | "has_all_requested": False, 227 | }, 228 | ) 229 | 230 | return self 231 | 232 | def with_correct_permissions(self): 233 | """Mock the user being properly authenticated.""" 234 | 235 | self.mocker.post( 236 | f"{self.base_url}/_security/user/_has_privileges", 237 | status=200, 238 | json={ 239 | "has_all_requested": True, 240 | }, 241 | ) 242 | 243 | return self 244 | 245 | def with_selfsigned_certificate(self): 246 | """Mock a self-signed certificate error.""" 247 | 248 | self.mocker.get(f"{self.base_url}", exc=self_signed_tls_error()) 249 | 250 | return self 251 | 252 | def with_index_template(self, version=2): 253 | """Mock the user being properly authenticated.""" 254 | 255 | # Mock index template setup 256 | self.mocker.get( 257 | f"{self.base_url}/_index_template/{DATASTREAM_METRICS_INDEX_TEMPLATE_NAME}", 258 | status=200, 259 | headers={"x-elastic-product": "Elasticsearch"}, 260 | json={ 261 | "index_templates": [{"name": "datastream_metrics", "index_template": {"version": version}}] 262 | }, 263 | ) 264 | 265 | return self 266 | 267 | def without_index_template(self): 268 | """Mock the user being properly authenticated.""" 269 | 270 | # Mock index template setup 271 | self.mocker.get( 272 | f"{self.base_url}/_index_template/{DATASTREAM_METRICS_INDEX_TEMPLATE_NAME}", 273 | status=200, 274 | headers={"x-elastic-product": "Elasticsearch"}, 275 | json={}, 276 | ) 277 | 278 | self.mocker.put( 279 | f"{self.base_url}/_index_template/{DATASTREAM_METRICS_INDEX_TEMPLATE_NAME}", 280 | status=200, 281 | headers={"x-elastic-product": "Elasticsearch"}, 282 | json={}, 283 | ) 284 | return self 285 | 286 | def with_datastreams(self): 287 | """Mock the user being properly authenticated.""" 288 | 289 | self.mocker.get( 290 | f"{self.base_url}/_data_stream/metrics-homeassistant.*", 291 | status=200, 292 | headers={"x-elastic-product": "Elasticsearch"}, 293 | json={ 294 | "data_streams": [ 295 | { 296 | "name": "metrics-homeassistant.sensor-default", 297 | }, 298 | { 299 | "name": "metrics-homeassistant.counter-default", 300 | }, 301 | ] 302 | }, 303 | ) 304 | 305 | self.mocker.put( 306 | f"{self.base_url}/_data_stream/metrics-homeassistant.counter-default/_rollover", 307 | status=200, 308 | headers={"x-elastic-product": "Elasticsearch"}, 309 | json={ 310 | "acknowledged": True, 311 | "shards_acknowledged": True, 312 | "old_index": ".ds-metrics-homeassistant.counter-default-2024.12.19-000001", 313 | "new_index": ".ds-metrics-homeassistant.counter-default-2025.01.10-000002", 314 | "rolled_over": True, 315 | "dry_run": False, 316 | "lazy": False, 317 | "conditions": {}, 318 | }, 319 | ) 320 | self.mocker.put( 321 | f"{self.base_url}/_data_stream/metrics-homeassistant.sensor-default/_rollover", 322 | status=200, 323 | headers={"x-elastic-product": "Elasticsearch"}, 324 | json={ 325 | "acknowledged": True, 326 | "shards_acknowledged": True, 327 | "old_index": ".ds-metrics-homeassistant.sensor-default-2024.12.19-000001", 328 | "new_index": ".ds-metrics-homeassistant.sensor-default-2025.01.10-000002", 329 | "rolled_over": True, 330 | "dry_run": False, 331 | "lazy": False, 332 | "conditions": {}, 333 | }, 334 | ) 335 | 336 | return self 337 | 338 | def respond_to_bulk_with_item_level_error(self): 339 | """Mock a bulk response with an item-level error.""" 340 | 341 | self.mocker.put( 342 | f"{self.base_url}/_bulk", 343 | status=200, 344 | headers={"x-elastic-product": "Elasticsearch"}, 345 | json=testconst.BULK_ERROR_RESPONSE_BODY, 346 | ) 347 | 348 | return self 349 | 350 | def respond_to_bulk(self, status=200, fail_after=None): 351 | """Mock the user being properly authenticated.""" 352 | 353 | self._add_fail_after( 354 | success=AiohttpClientMockResponse( 355 | method="PUT", 356 | url=f"{self.base_url}/_bulk", 357 | headers={"x-elastic-product": "Elasticsearch"}, 358 | json=testconst.BULK_SUCCESS_RESPONSE_BODY, 359 | ), 360 | failure=AiohttpClientMockResponse( 361 | method="PUT", 362 | url=f"{self.base_url}/_bulk", 363 | status=HTTPStatus.INTERNAL_SERVER_ERROR, 364 | ), 365 | fail_after=fail_after, 366 | ) 367 | 368 | return self 369 | --------------------------------------------------------------------------------