├── .gitattributes ├── requirements_dev.txt ├── custom_components ├── __init__.py └── npm_switches │ ├── manifest.json │ ├── entity.py │ ├── translations │ ├── en.json │ ├── nb.json │ └── fr.json │ ├── const.py │ ├── button.py │ ├── __init__.py │ ├── config_flow.py │ ├── sensor.py │ ├── switch.py │ └── api.py ├── tests ├── __init__.py ├── pytest.ini ├── README.md ├── test_init.py ├── test_sensor.py ├── test_api.py ├── const.py ├── test_config_flow.py ├── conftest.py └── test_switch.py ├── .gitignore ├── requirements.txt ├── requirements_test.txt ├── hacs.json ├── scripts ├── lint ├── setup └── develop ├── .vscode ├── settings.json ├── tasks.json └── launch.json ├── config └── configuration.yaml ├── .github ├── workflows │ ├── cron.yaml │ ├── pull.yml │ └── push.yml └── ISSUE_TEMPLATE │ ├── feature_request.md │ └── issue.md ├── LICENSE ├── setup.cfg ├── .devcontainer.json ├── info.md ├── CONTRIBUTING.md └── README.md /.gitattributes: -------------------------------------------------------------------------------- 1 | * text=auto eol=lf -------------------------------------------------------------------------------- /requirements_dev.txt: -------------------------------------------------------------------------------- 1 | homeassistant 2 | -------------------------------------------------------------------------------- /custom_components/__init__.py: -------------------------------------------------------------------------------- 1 | """Custom components module.""" 2 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests for integration_blueprint integration.""" 2 | -------------------------------------------------------------------------------- /tests/pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | markers = 3 | asyncio: asyncio mark 4 | 5 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | __pycache__ 2 | pythonenv* 3 | venv 4 | .venv 5 | .coverage 6 | .idea 7 | config -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | colorlog==6.9.0 2 | homeassistant==2025.2.4 3 | pip>=21.3.1 4 | ruff==0.14.0 5 | -------------------------------------------------------------------------------- /requirements_test.txt: -------------------------------------------------------------------------------- 1 | pytest-homeassistant-custom-component 2 | homeassistant 3 | pytest 4 | 5 | -------------------------------------------------------------------------------- /hacs.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "NPM Switches", 3 | "hacs": "1.6.0", 4 | "homeassistant": "2024.1.1" 5 | } -------------------------------------------------------------------------------- /scripts/lint: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -e 4 | 5 | cd "$(dirname "$0")/.." 6 | 7 | ruff format . 8 | ruff check . --fix 9 | -------------------------------------------------------------------------------- /scripts/setup: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -e 4 | 5 | cd "$(dirname "$0")/.." 6 | 7 | python3 -m pip install --requirement requirements.txt 8 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "python.linting.pylintEnabled": true, 3 | "python.linting.enabled": true, 4 | "python.pythonPath": "/usr/local/bin/python", 5 | "files.associations": { 6 | "*.yaml": "home-assistant" 7 | } 8 | } -------------------------------------------------------------------------------- /.vscode/tasks.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "2.0.0", 3 | "tasks": [ 4 | { 5 | "label": "Run Home Assistant on port 8123", 6 | "type": "shell", 7 | "command": "scripts/develop", 8 | "problemMatcher": [] 9 | } 10 | ] 11 | } -------------------------------------------------------------------------------- /config/configuration.yaml: -------------------------------------------------------------------------------- 1 | # https://www.home-assistant.io/integrations/default_config/ 2 | default_config: 3 | 4 | # https://www.home-assistant.io/integrations/homeassistant/ 5 | homeassistant: 6 | debug: true 7 | 8 | # https://www.home-assistant.io/integrations/logger/ 9 | logger: 10 | default: info 11 | logs: 12 | custom_components.npm_switches: debug 13 | -------------------------------------------------------------------------------- /custom_components/npm_switches/manifest.json: -------------------------------------------------------------------------------- 1 | { 2 | "domain": "npm_switches", 3 | "name": "NPM Switches", 4 | "codeowners": [ 5 | "@InTheDaylight14" 6 | ], 7 | "config_flow": true, 8 | "documentation": "https://github.com/InTheDaylight14/nginx-proxy-manager-switches", 9 | "iot_class": "local_polling", 10 | "issue_tracker": "https://github.com/InTheDaylight14/nginx-proxy-manager-switches/issues", 11 | "version": "2.1.0", 12 | "homeassistant": "2025.2.4" 13 | } -------------------------------------------------------------------------------- /.github/workflows/cron.yaml: -------------------------------------------------------------------------------- 1 | name: Cron actions 2 | 3 | on: 4 | schedule: 5 | - cron: '0 0 * * *' 6 | 7 | jobs: 8 | validate: 9 | runs-on: "ubuntu-latest" 10 | name: Validate 11 | steps: 12 | - uses: "actions/checkout@v2" 13 | 14 | - name: HACS validation 15 | uses: "hacs/action@main" 16 | with: 17 | category: "integration" 18 | ignore: brands 19 | 20 | - name: Hassfest validation 21 | uses: "home-assistant/actions/hassfest@master" -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | 5 | --- 6 | 7 | **Is your feature request related to a problem? Please describe.** 8 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 9 | 10 | **Describe the solution you'd like** 11 | A clear and concise description of what you want to happen. 12 | 13 | **Describe alternatives you've considered** 14 | A clear and concise description of any alternative solutions or features you've considered. 15 | 16 | **Additional context** 17 | Add any other context or screenshots about the feature request here. -------------------------------------------------------------------------------- /scripts/develop: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -e 4 | 5 | cd "$(dirname "$0")/.." 6 | 7 | # Create config dir if not present 8 | if [[ ! -d "${PWD}/config" ]]; then 9 | mkdir -p "${PWD}/config" 10 | hass --config "${PWD}/config" --script ensure_config 11 | fi 12 | 13 | # Set the path to custom_components 14 | ## This let's us have the structure we want /custom_components/integration_blueprint 15 | ## while at the same time have Home Assistant configuration inside /config 16 | ## without resulting to symlinks. 17 | export PYTHONPATH="${PYTHONPATH}:${PWD}/custom_components" 18 | 19 | # Start Home Assistant 20 | hass --config "${PWD}/config" --debug 21 | -------------------------------------------------------------------------------- /custom_components/npm_switches/entity.py: -------------------------------------------------------------------------------- 1 | """NPM Switches Entity class""" 2 | from homeassistant.helpers.update_coordinator import CoordinatorEntity 3 | from homeassistant.helpers.device_registry import DeviceInfo 4 | from homeassistant.util import slugify 5 | 6 | from .const import DOMAIN, NAME, VERSION, ATTRIBUTION 7 | 8 | 9 | class NpmSwitchesEntity(CoordinatorEntity): 10 | """Init NPM user device.""" 11 | 12 | _attr_has_entity_name = True 13 | 14 | def __init__(self, coordinator, config_entry): 15 | super().__init__(coordinator) 16 | self.host = None 17 | self.name = None 18 | self.entity_id = None 19 | self.config_entry = config_entry 20 | self.host_id = None 21 | self.coordinator = coordinator 22 | self._attr_unique_id = None 23 | self._attr_device_info = DeviceInfo( 24 | identifiers={(DOMAIN, self.config_entry.entry_id)}, 25 | name=self.config_entry.title, 26 | ) 27 | -------------------------------------------------------------------------------- /.vscode/launch.json: -------------------------------------------------------------------------------- 1 | { 2 | // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 3 | "version": "0.2.0", 4 | "configurations": [ 5 | { 6 | // Example of attaching to local debug server 7 | "name": "Python: Attach Local", 8 | "type": "python", 9 | "request": "attach", 10 | "port": 5678, 11 | "host": "localhost", 12 | "pathMappings": [ 13 | { 14 | "localRoot": "${workspaceFolder}", 15 | "remoteRoot": "." 16 | } 17 | ] 18 | }, 19 | { 20 | // Example of attaching to my production server 21 | "name": "Python: Attach Remote", 22 | "type": "python", 23 | "request": "attach", 24 | "port": 5678, 25 | "host": "homeassistant.local", 26 | "pathMappings": [ 27 | { 28 | "localRoot": "${workspaceFolder}", 29 | "remoteRoot": "/usr/src/homeassistant" 30 | } 31 | ] 32 | } 33 | ] 34 | } 35 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/issue.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Issue 3 | about: Create a report to help us improve 4 | 5 | --- 6 | 7 | 16 | 17 | ## Version of the custom_component 18 | 21 | 22 | ## Configuration 23 | 24 | ```yaml 25 | 26 | Add your logs here. 27 | 28 | ``` 29 | 30 | ## Describe the bug 31 | A clear and concise description of what the bug is. 32 | 33 | 34 | ## Debug log 35 | 36 | 37 | 38 | ```text 39 | 40 | Add your logs here. 41 | 42 | ``` -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 Joakim Sørensen @ludeeus 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [flake8] 2 | exclude = .venv,.git,.tox,docs,venv,bin,lib,deps,build 3 | doctests = True 4 | # To work with Black 5 | max-line-length = 88 6 | # E501: line too long 7 | # W503: Line break occurred before a binary operator 8 | # E203: Whitespace before ':' 9 | # D202 No blank lines allowed after function docstring 10 | # W504 line break after binary operator 11 | ignore = 12 | E501, 13 | W503, 14 | E203, 15 | D202, 16 | W504 17 | 18 | [isort] 19 | # https://github.com/timothycrosley/isort 20 | # https://github.com/timothycrosley/isort/wiki/isort-Settings 21 | # splits long import on multiple lines indented by 4 spaces 22 | multi_line_output = 3 23 | include_trailing_comma=True 24 | force_grid_wrap=0 25 | use_parentheses=True 26 | line_length=88 27 | indent = " " 28 | # by default isort don't check module indexes 29 | not_skip = __init__.py 30 | # will group `import x` and `from x import` of the same module. 31 | force_sort_within_sections = true 32 | sections = FUTURE,STDLIB,INBETWEENS,THIRDPARTY,FIRSTPARTY,LOCALFOLDER 33 | default_section = THIRDPARTY 34 | known_first_party = custom_components.npm_switches, tests 35 | combine_as_imports = true 36 | -------------------------------------------------------------------------------- /custom_components/npm_switches/translations/en.json: -------------------------------------------------------------------------------- 1 | { 2 | "config": { 3 | "step": { 4 | "user": { 5 | "title": "NPM Instance", 6 | "description": "If you need help with the configuration have a look here: https://github.com/InTheDaylight14/nginx-proxy-manager-switches", 7 | "data": { 8 | "username": "Username", 9 | "password": "Password", 10 | "npm_url": "NPM URL including UI port number ex. http://ip:port", 11 | "include_proxy_hosts": "Include Proxy Hosts", 12 | "include_redirection_hosts": "Include Redirection Hosts", 13 | "include_stream_hosts": "Include Stream Hosts", 14 | "include_dead_hosts": "Include Dead Hosts", 15 | "include_enable_disable_count_sensors": "Include Enabled/Disabled Count Sensors", 16 | "include_certificate_sensors": "Include Certificate Sensors and Buttons" 17 | } 18 | } 19 | }, 20 | "error": { 21 | "auth": "Username/Password is wrong." 22 | }, 23 | "abort": { 24 | "single_instance_allowed": "Only a single instance is allowed." 25 | } 26 | }, 27 | "options": { 28 | "step": { 29 | "user": { 30 | "data": { 31 | "sensor": "Sensor enabled", 32 | "switch": "Switch enabled" 33 | } 34 | } 35 | } 36 | } 37 | } -------------------------------------------------------------------------------- /.github/workflows/pull.yml: -------------------------------------------------------------------------------- 1 | name: Pull actions 2 | 3 | on: 4 | pull_request: 5 | 6 | jobs: 7 | validate: 8 | runs-on: "ubuntu-latest" 9 | name: Validate 10 | steps: 11 | - uses: "actions/checkout@v2" 12 | 13 | - name: HACS validation 14 | uses: "hacs/action@main" 15 | with: 16 | category: "integration" 17 | ignore: brands 18 | 19 | - name: Hassfest validation 20 | uses: "home-assistant/actions/hassfest@master" 21 | 22 | style: 23 | runs-on: "ubuntu-latest" 24 | name: Check style formatting 25 | steps: 26 | - uses: "actions/checkout@v2" 27 | - uses: "actions/setup-python@v1" 28 | with: 29 | python-version: "3.x" 30 | - run: python3 -m pip install black 31 | - run: black . 32 | 33 | tests: 34 | runs-on: "ubuntu-latest" 35 | name: Run tests 36 | steps: 37 | - name: Check out code from GitHub 38 | uses: "actions/checkout@v2" 39 | - name: Setup Python 40 | uses: "actions/setup-python@v1" 41 | with: 42 | python-version: "3.12" 43 | - name: Install requirements 44 | run: python3 -m pip install -r requirements_test.txt 45 | - name: Run tests 46 | run: | 47 | pytest \ 48 | -qq \ 49 | --timeout=9 \ 50 | --durations=10 \ 51 | -n auto \ 52 | --cov custom_components.npm_switches \ 53 | -o console_output_style=count \ 54 | -p no:sugar \ 55 | tests 56 | -------------------------------------------------------------------------------- /custom_components/npm_switches/translations/nb.json: -------------------------------------------------------------------------------- 1 | { 2 | "config": { 3 | "step": { 4 | "user": { 5 | "title": "NPM", 6 | "description": "Hvis du trenger hjep til konfigurasjon ta en titt her: https://github.com/InTheDaylight14/nginx-proxy-manager-switches", 7 | "data": { 8 | "username": "Brukernavn", 9 | "password": "Passord", 10 | "npm_url": "http://ip:port", 11 | "include_proxy_hosts": "Inkluder proxy-verter", 12 | "include_redirection_hosts": "Inkluder omdirigeringsverter", 13 | "include_stream_hosts": "Inkluder strømverter", 14 | "include_dead_hosts": "Inkluder døde verter", 15 | "include_enable_disable_count_sensors": "Inkluder aktiverte/deaktiverte tellesensorer", 16 | "include_certificate_sensors": "Inkluder sertifikatsensorer og knapper" 17 | } 18 | } 19 | }, 20 | "error": { 21 | "auth": "Brukernavn/Passord er feil." 22 | }, 23 | "abort": { 24 | "single_instance_allowed": "Denne integrasjonen kan kun konfigureres en gang." 25 | } 26 | }, 27 | "options": { 28 | "step": { 29 | "user": { 30 | "data": { 31 | "binary_sensor": "Binær sensor aktivert", 32 | "sensor": "Sensor aktivert", 33 | "switch": "Bryter aktivert" 34 | } 35 | } 36 | } 37 | } 38 | } -------------------------------------------------------------------------------- /.github/workflows/push.yml: -------------------------------------------------------------------------------- 1 | name: Push actions 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | - dev 8 | 9 | jobs: 10 | validate: 11 | runs-on: "ubuntu-latest" 12 | name: Validate 13 | steps: 14 | - uses: "actions/checkout@v2" 15 | 16 | - name: HACS validation 17 | uses: "hacs/action@main" 18 | with: 19 | category: "integration" 20 | ignore: brands 21 | 22 | - name: Hassfest validation 23 | uses: "home-assistant/actions/hassfest@master" 24 | 25 | style: 26 | runs-on: "ubuntu-latest" 27 | name: Check style formatting 28 | steps: 29 | - uses: "actions/checkout@v2" 30 | - uses: "actions/setup-python@v1" 31 | with: 32 | python-version: "3.x" 33 | - run: python3 -m pip install black 34 | - run: black . 35 | 36 | tests: 37 | runs-on: "ubuntu-latest" 38 | name: Run tests 39 | steps: 40 | - name: Check out code from GitHub 41 | uses: "actions/checkout@v2" 42 | - name: Setup Python 43 | uses: "actions/setup-python@v1" 44 | with: 45 | python-version: "3.12" 46 | - name: Install requirements 47 | run: python3 -m pip install -r requirements_test.txt 48 | - name: Run tests 49 | run: | 50 | pytest \ 51 | -qq \ 52 | --timeout=9 \ 53 | --durations=10 \ 54 | -n auto \ 55 | --cov custom_components.npm_switches \ 56 | -o console_output_style=count \ 57 | -p no:sugar \ 58 | tests 59 | -------------------------------------------------------------------------------- /.devcontainer.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "ludeeus/integration_blueprint", 3 | "image": "mcr.microsoft.com/devcontainers/python:3.13", 4 | "postCreateCommand": "scripts/setup", 5 | "forwardPorts": [ 6 | 8123 7 | ], 8 | "portsAttributes": { 9 | "8123": { 10 | "label": "Home Assistant", 11 | "onAutoForward": "notify" 12 | } 13 | }, 14 | "customizations": { 15 | "vscode": { 16 | "extensions": [ 17 | "charliermarsh.ruff", 18 | "github.vscode-pull-request-github", 19 | "ms-python.python", 20 | "ms-python.vscode-pylance", 21 | "ryanluker.vscode-coverage-gutters" 22 | ], 23 | "settings": { 24 | "files.eol": "\n", 25 | "editor.tabSize": 4, 26 | "editor.formatOnPaste": true, 27 | "editor.formatOnSave": true, 28 | "editor.formatOnType": false, 29 | "files.trimTrailingWhitespace": true, 30 | "python.analysis.typeCheckingMode": "basic", 31 | "python.analysis.autoImportCompletions": true, 32 | "python.defaultInterpreterPath": "/usr/local/bin/python", 33 | "[python]": { 34 | "editor.defaultFormatter": "charliermarsh.ruff" 35 | } 36 | } 37 | } 38 | }, 39 | "remoteUser": "vscode", 40 | "features": { 41 | "ghcr.io/devcontainers-extra/features/apt-packages:1": { 42 | "packages": "ffmpeg,libturbojpeg0,libpcap-dev" 43 | } 44 | } 45 | } -------------------------------------------------------------------------------- /custom_components/npm_switches/translations/fr.json: -------------------------------------------------------------------------------- 1 | { 2 | "config": { 3 | "step": { 4 | "user": { 5 | "title": "NPM Exemple", 6 | "description": "Si vous avez besoin d'aide pour la configuration, regardez ici: https://github.com/InTheDaylight14/nginx-proxy-manager-switches", 7 | "data": { 8 | "username": "Identifiant", 9 | "password": "Mot de Passe", 10 | "npm_url": "Nginx Proxy Manager URL ex. http://ip:port", 11 | "include_proxy_hosts": "Inclure les hôtes proxy", 12 | "include_redirection_hosts": "Inclure les hôtes de redirection", 13 | "include_stream_hosts": "Inclure les hôtes de flux", 14 | "include_dead_hosts": "Inclure les hôtes morts", 15 | "include_enable_disable_count_sensors": "Inclure les capteurs de comptage activés/désactivés", 16 | "include_certificate_sensors": "Inclure des capteurs et des boutons de certificat" 17 | } 18 | } 19 | }, 20 | "error": { 21 | "auth": "Identifiant ou mot de passe erroné." 22 | }, 23 | "abort": { 24 | "single_instance_allowed": "Une seule instance est autorisée." 25 | } 26 | }, 27 | "options": { 28 | "step": { 29 | "user": { 30 | "data": { 31 | "binary_sensor": "Capteur binaire activé", 32 | "sensor": "Capteur activé", 33 | "switch": "Interrupteur activé" 34 | } 35 | } 36 | } 37 | } 38 | } -------------------------------------------------------------------------------- /custom_components/npm_switches/const.py: -------------------------------------------------------------------------------- 1 | """Constants for NPM Switches.""" 2 | # Base component constants 3 | NAME = "NPM Switches" 4 | DOMAIN = "npm_switches" 5 | DOMAIN_DATA = f"{DOMAIN}_data" 6 | VERSION = "2.1.0" 7 | ATTRIBUTION = "Data provided by your NPM Instance" 8 | ISSUE_URL = "https://github.com/InTheDaylight14/nginx-proxy-manager-switches/issues" 9 | 10 | # Icons 11 | ICON = "mdi:format-quote-close" 12 | 13 | # Device classes 14 | BINARY_SENSOR_DEVICE_CLASS = "connectivity" 15 | 16 | # Platforms 17 | # BINARY_SENSOR = "binary_sensor" 18 | SENSOR = "sensor" 19 | SWITCH = "switch" 20 | BUTTON = 'button' 21 | PLATFORMS = [SENSOR, SWITCH, BUTTON] 22 | 23 | 24 | # Configuration and options 25 | CONF_ENABLED = "enabled" 26 | CONF_USERNAME = "username" 27 | CONF_PASSWORD = "password" 28 | CONF_NPM_URL = "npm_url" 29 | CONF_INDLUDE_PROXY = "include_proxy_hosts" 30 | CONF_INCLUDE_REDIR = "include_redirection_hosts" 31 | CONF_INCLUDE_STREAMS = "include_stream_hosts" 32 | CONF_INCLUDE_DEAD = "include_dead_hosts" 33 | CONF_INCLUDE_SENSORS = "include_enable_disable_count_sensors" 34 | CONF_INCLUDE_CERTS = "include_certificate_sensors" 35 | DEFAULT_ENABLED = "" 36 | DEFAULT_USERNAME = "" 37 | DEFAULT_PASSWORD = "" 38 | DEFAULT_NPM_URL = "http://" 39 | DEFAULT_INDLUDE_PROXY = True 40 | DEFAULT_INCLUDE_REDIR = False 41 | DEFAULT_INCLUDE_STREAMS = False 42 | DEFAULT_INCLUDE_DEAD = False 43 | DEFAULT_INCLUDE_SENSORS = True 44 | DEFAULT_INCLUDE_CERTS = False 45 | 46 | # Defaults 47 | DEFAULT_NAME = DOMAIN 48 | 49 | 50 | STARTUP_MESSAGE = f""" 51 | ------------------------------------------------------------------- 52 | {NAME} 53 | Version: {VERSION} 54 | This is a custom integration! 55 | If you have any issues with this you need to open an issue here: 56 | {ISSUE_URL} 57 | ------------------------------------------------------------------- 58 | """ 59 | -------------------------------------------------------------------------------- /tests/README.md: -------------------------------------------------------------------------------- 1 | # Why? 2 | 3 | While tests aren't required to publish a custom component for Home Assistant, they will generally make development easier because good tests will expose when changes you want to make to the component logic will break expected functionality. Home Assistant uses [`pytest`](https://docs.pytest.org/en/latest/) for its tests, and the tests that have been included are modeled after tests that are written for core Home Assistant integrations. These tests pass with 100% coverage (unless something has changed ;) ) and have comments to help you understand the purpose of different parts of the test. 4 | 5 | # Getting Started 6 | 7 | To begin, it is recommended to create a virtual environment to install dependencies: 8 | ```bash 9 | python3 -m venv venv 10 | source venv/bin/activate 11 | ``` 12 | 13 | You can then install the dependencies that will allow you to run tests: 14 | `pip3 install -r requirements_test.txt.` 15 | 16 | This will install `homeassistant`, `pytest`, and `pytest-homeassistant-custom-component`, a plugin which allows you to leverage helpers that are available in Home Assistant for core integration tests. 17 | 18 | # Useful commands 19 | 20 | Command | Description 21 | ------- | ----------- 22 | `pytest tests/` | This will run all tests in `tests/` and tell you how many passed/failed 23 | `pytest --durations=10 --cov-report term-missing --cov=custom_components.integration_blueprint tests` | This tells `pytest` that your target module to test is `custom_components.integration_blueprint` so that it can give you a [code coverage](https://en.wikipedia.org/wiki/Code_coverage) summary, including % of code that was executed and the line numbers of missed executions. 24 | `pytest tests/test_init.py -k test_setup_unload_and_reload_entry` | Runs the `test_setup_unload_and_reload_entry` test function located in `tests/test_init.py` 25 | -------------------------------------------------------------------------------- /custom_components/npm_switches/button.py: -------------------------------------------------------------------------------- 1 | """Button platform for NPM Switches.""" 2 | from datetime import datetime 3 | 4 | from homeassistant.components.button import ButtonEntity 5 | from homeassistant.config_entries import ConfigEntry 6 | from homeassistant.util import slugify, dt 7 | 8 | from .const import DOMAIN 9 | from .entity import NpmSwitchesEntity 10 | from . import NpmSwitchesUpdateCoordinator 11 | 12 | 13 | async def async_setup_entry(hass, entry, async_add_entities): 14 | """Setup sensor platform.""" 15 | coordinator = hass.data[DOMAIN][entry.entry_id] 16 | api = hass.data[DOMAIN][entry.entry_id].api 17 | certificates = await api.get_certificates() 18 | entities = [] 19 | 20 | if "include_certificate_sensors" in entry.data: 21 | if entry.data["include_certificate_sensors"]: 22 | for cert in certificates.values(): 23 | entities.append(NpmSwitchesCertificateRenewButton(coordinator, entry, cert)) 24 | 25 | async_add_entities(entities, True) 26 | 27 | class NpmSwitchesCertificateRenewButton(NpmSwitchesEntity, ButtonEntity): 28 | """NPM Switches Certificate Renew Button Class.""" 29 | 30 | def __init__( 31 | self, 32 | coordinator: NpmSwitchesUpdateCoordinator, 33 | entry: ConfigEntry, 34 | certificate: dict, 35 | ) -> None: 36 | """Initialize cert renew button.""" 37 | super().__init__(coordinator, entry) 38 | self.cert_id = str(certificate["id"]) 39 | self.name = "Renew Certificate " + certificate["domain_names"][0] 40 | self.entity_id = "button."+slugify(f"{entry.title}")+" Renew Cert "+str(self.cert_id) 41 | self._attr_unique_id = f"{entry.entry_id} {" Renew Cert "} {self.cert_id}" 42 | self._attr_icon = "mdi:refresh" 43 | 44 | async def async_press(self) -> None: 45 | """Handle the button press.""" 46 | await self.coordinator.api.renew_certificate(self.cert_id) -------------------------------------------------------------------------------- /info.md: -------------------------------------------------------------------------------- 1 | [![GitHub Release][releases-shield]][releases] 2 | [![GitHub Activity][commits-shield]][commits] 3 | [![License][license-shield]][license] 4 | 5 | [![hacs][hacsbadge]][hacs] 6 | [![Project Maintenance][maintenance-shield]][user_profile] 7 | 8 | [![Community Forum][forum-shield]][forum] 9 | 10 | 11 | **This component will set up the following platforms.** 12 | 13 | Platform | Description 14 | -- | -- 15 | `sensor` | Show info from API. 16 | `switch` | Switch proxy hosts to `Enabled` or `Disabled`. 17 | 18 | 19 | 20 | {% if not installed %} 21 | ## Installation 22 | 23 | 1. Click install. 24 | 1. In the HA UI go to "Configuration" -> "Integrations" click "+" and search for "NPM Switches". 25 | 26 | {% endif %} 27 | 28 | _Component built with [integration_blueprint][integration_blueprint]._ 29 | 30 | 31 | 32 | *** 33 | 34 | [integration_blueprint]: https://github.com/custom-components/integration_blueprint 35 | [buymecoffee]: https://www.buymeacoffee.com/ludeeus 36 | [buymecoffeebadge]: https://img.shields.io/badge/buy%20me%20a%20coffee-donate-yellow.svg?style=for-the-badge 37 | [commits-shield]: https://img.shields.io/github/commit-activity/w/InTheDaylight14/nginx-proxy-manager-switches?style=for-the-badge 38 | [commits]: https://github.com/InTheDaylight14/nginx-proxy-manager-switches/commits/master 39 | [hacs]: https://hacs.xyz 40 | [hacsbadge]: https://img.shields.io/badge/HACS-Custom-orange.svg?style=for-the-badge 41 | 42 | [forum-shield]: https://img.shields.io/badge/community-forum-brightgreen.svg?style=for-the-badge 43 | [forum]: https://community.home-assistant.io/ 44 | [license-shield]: https://img.shields.io/github/license/InTheDaylight14/nginx-proxy-manager-switches?style=for-the-badge 45 | [maintenance-shield]: https://img.shields.io/badge/maintainer-@InTheDaylight14-blue.svg?style=for-the-badge 46 | [releases-shield]: https://img.shields.io/github/release/InTheDaylight14/nginx-proxy-manager-switches?style=for-the-badge 47 | [releases]: https://github.com/InTheDaylight14/nginx-proxy-manager-switches/releases 48 | [user_profile]: https://github.com/InTheDaylight14 49 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contribution guidelines 2 | 3 | Contributing to this project should be as easy and transparent as possible, whether it's: 4 | 5 | - Reporting a bug 6 | - Discussing the current state of the code 7 | - Submitting a fix 8 | - Proposing new features 9 | 10 | ## Github is used for everything 11 | 12 | Github is used to host code, to track issues and feature requests, as well as accept pull requests. 13 | 14 | Pull requests are the best way to propose changes to the codebase. 15 | 16 | 1. Fork the repo and create your branch from `master`. 17 | 2. If you've changed something, update the documentation. 18 | 3. Make sure your code lints (using black). 19 | 4. Test you contribution. 20 | 5. Issue that pull request! 21 | 22 | ## Any contributions you make will be under the MIT Software License 23 | 24 | In short, when you submit code changes, your submissions are understood to be under the same [MIT License](http://choosealicense.com/licenses/mit/) that covers the project. Feel free to contact the maintainers if that's a concern. 25 | 26 | ## Report bugs using Github's [issues](../../issues) 27 | 28 | GitHub issues are used to track public bugs. 29 | Report a bug by [opening a new issue](../../issues/new/choose); it's that easy! 30 | 31 | ## Write bug reports with detail, background, and sample code 32 | 33 | **Great Bug Reports** tend to have: 34 | 35 | - A quick summary and/or background 36 | - Steps to reproduce 37 | - Be specific! 38 | - Give sample code if you can. 39 | - What you expected would happen 40 | - What actually happens 41 | - Notes (possibly including why you think this might be happening, or stuff you tried that didn't work) 42 | 43 | People *love* thorough bug reports. I'm not even kidding. 44 | 45 | ## Use a Consistent Coding Style 46 | 47 | Use [black](https://github.com/ambv/black) to make sure the code follows the style. 48 | 49 | ## Test your code modification 50 | 51 | This custom component is based on [integration_blueprint template](https://github.com/custom-components/integration_blueprint). 52 | 53 | It comes with development environment in a container, easy to launch 54 | if you use Visual Studio Code. With this container you will have a stand alone 55 | Home Assistant instance running and already configured with the included 56 | [`.devcontainer/configuration.yaml`](./.devcontainer/configuration.yaml) 57 | file. 58 | 59 | ## License 60 | 61 | By contributing, you agree that your contributions will be licensed under its MIT License. 62 | -------------------------------------------------------------------------------- /tests/test_init.py: -------------------------------------------------------------------------------- 1 | """Test integration_blueprint setup process.""" 2 | from homeassistant.exceptions import ConfigEntryNotReady 3 | import pytest 4 | from pytest_homeassistant_custom_component.common import MockConfigEntry 5 | 6 | from custom_components.npm_switches import ( 7 | NpmSwitchesUpdateCoordinator, 8 | async_reload_entry, 9 | async_setup_entry, 10 | async_unload_entry, 11 | ) 12 | from custom_components.npm_switches.const import DOMAIN 13 | 14 | from .const import MOCK_CONFIG 15 | 16 | pytestmark = pytest.mark.asyncio 17 | 18 | # We can pass fixtures as defined in conftest.py to tell pytest to use the fixture 19 | # for a given test. We can also leverage fixtures and mocks that are available in 20 | # Home Assistant using the pytest_homeassistant_custom_component plugin. 21 | # Assertions allow you to verify that the return value of whatever is on the left 22 | # side of the assertion matches with the right side. 23 | async def test_setup_unload_and_reload_entry(hass, bypass_get_data, bypass_new_token): 24 | """Test entry setup and unload.""" 25 | # Create a mock entry so we don't have to go through config flow 26 | config_entry = MockConfigEntry(domain=DOMAIN, data=MOCK_CONFIG, entry_id="test") 27 | 28 | # Set up the entry and assert that the values set during setup are where we expect 29 | # them to be. Because we have patched the NpmSwitchesUpdateCoordinator.async_get_data 30 | # call, no code from custom_components/integration_blueprint/api.py actually runs. 31 | assert await async_setup_entry(hass, config_entry) 32 | assert DOMAIN in hass.data and config_entry.entry_id in hass.data[DOMAIN] 33 | assert ( 34 | type(hass.data[DOMAIN][config_entry.entry_id]) == NpmSwitchesUpdateCoordinator 35 | ) 36 | 37 | # Reload the entry and assert that the data from above is still there 38 | assert await async_reload_entry(hass, config_entry) is None 39 | assert DOMAIN in hass.data and config_entry.entry_id in hass.data[DOMAIN] 40 | assert ( 41 | type(hass.data[DOMAIN][config_entry.entry_id]) == NpmSwitchesUpdateCoordinator 42 | ) 43 | 44 | # Unload the entry and verify that the data has been removed 45 | assert await async_unload_entry(hass, config_entry) 46 | assert config_entry.entry_id not in hass.data[DOMAIN] 47 | 48 | 49 | async def test_setup_entry_exception(hass, error_on_get_new_token): 50 | """Test ConfigEntryNotReady when API raises an exception during entry setup.""" 51 | config_entry = MockConfigEntry(domain=DOMAIN, data=MOCK_CONFIG, entry_id="test") 52 | 53 | # In this case we are testing the condition where async_setup_entry raises 54 | # ConfigEntryNotReady using the `error_on_get_data` fixture which simulates 55 | # an error. 56 | with pytest.raises(ConfigEntryNotReady): 57 | assert await async_setup_entry(hass, config_entry) 58 | -------------------------------------------------------------------------------- /tests/test_sensor.py: -------------------------------------------------------------------------------- 1 | """Test npm switches sensor.""" 2 | from unittest.mock import call, patch 3 | 4 | from homeassistant.components.switch import SERVICE_TURN_OFF, SERVICE_TURN_ON 5 | from homeassistant.const import ATTR_ENTITY_ID 6 | from homeassistant.core import HomeAssistant 7 | from pytest_homeassistant_custom_component.common import MockConfigEntry 8 | from homeassistant.helpers import entity_registry as er 9 | 10 | from custom_components.npm_switches import async_setup_entry 11 | from custom_components.npm_switches.const import ( 12 | DEFAULT_NAME, 13 | DOMAIN, 14 | SENSOR, 15 | ) 16 | 17 | from .const import ( 18 | MOCK_CONFIG, 19 | MOCK_PROXY_HOSTS_DICT, 20 | MOCK_PROXY_HOSTS_LIST, 21 | MOCK_NPM_URL, 22 | ) 23 | 24 | import pytest 25 | 26 | pytestmark = pytest.mark.asyncio 27 | 28 | 29 | async def test_registry_entries(hass, aioclient_mock, bypass_new_token): 30 | """Tests sensors are registered in the entity registry.""" 31 | entry_id = "test" 32 | config_entry = MockConfigEntry( 33 | domain=DOMAIN, data=MOCK_CONFIG, entry_id=entry_id, options=None 34 | ) 35 | 36 | # Mock the api call to get proxy data, this allows setup to complete successfully. 37 | aioclient_mock.get( 38 | MOCK_NPM_URL + "/api/nginx/proxy-hosts", 39 | json=MOCK_PROXY_HOSTS_LIST, 40 | ) 41 | 42 | assert await async_setup_entry(hass, config_entry) 43 | await hass.async_block_till_done() 44 | 45 | entity_registry = er.async_get(hass) 46 | 47 | entry = entity_registry.async_get("sensor.npm_enabled_proxy_hosts") 48 | assert entry.unique_id == entry_id + "_npm_enabled_proxy_hosts" 49 | 50 | entry = entity_registry.async_get("sensor.npm_disabled_proxy_hosts") 51 | assert entry.unique_id == entry_id + "_npm_disabled_proxy_hosts" 52 | 53 | 54 | async def test_sensor_states(hass, aioclient_mock, bypass_new_token): 55 | """Test switch services.""" 56 | # Create a mock entry so we don't have to go through config flow 57 | config_entry = MockConfigEntry( 58 | domain=DOMAIN, data=MOCK_CONFIG, entry_id="test", options=None 59 | ) 60 | 61 | # Mock the api call to get proxy data, this allows setup to complete successfully. 62 | aioclient_mock.get( 63 | MOCK_NPM_URL + "/api/nginx/proxy-hosts", 64 | json=MOCK_PROXY_HOSTS_LIST, 65 | ) 66 | 67 | assert await async_setup_entry(hass, config_entry) 68 | await hass.async_block_till_done() 69 | 70 | # Retrieve state of the enabled sesnor 71 | state = hass.states.get("sensor.npm_enabled_proxy_hosts") 72 | proxy_id = str(state.attributes["id"]) 73 | 74 | assert state.state == "1" 75 | 76 | # Retrieve state of the disabled sesnor 77 | state = hass.states.get("sensor.npm_disabled_proxy_hosts") 78 | proxy_id = str(state.attributes["id"]) 79 | 80 | assert state.state == "1" 81 | -------------------------------------------------------------------------------- /tests/test_api.py: -------------------------------------------------------------------------------- 1 | """Tests for integration_blueprint api.""" 2 | import asyncio 3 | import pytest 4 | 5 | import aiohttp 6 | from homeassistant.helpers.aiohttp_client import async_get_clientsession 7 | from homeassistant.util import dt 8 | from custom_components.npm_switches.api import NpmSwitchesApiClient 9 | 10 | from .const import ( 11 | MOCK_NPM_URL, 12 | MOCK_PROXY_HOSTS_LIST, 13 | MOCK_PROXY_HOSTS_DICT, 14 | MOCK_TOKEN, 15 | ) 16 | 17 | pytestmark = pytest.mark.asyncio 18 | 19 | 20 | async def test_api(hass, aioclient_mock, caplog): 21 | """Test API calls.""" 22 | 23 | # To test the api submodule, we first create an instance of our API client 24 | api = NpmSwitchesApiClient( 25 | "test", "test", "http://test:81", async_get_clientsession(hass) 26 | ) 27 | 28 | aioclient_mock.post( 29 | MOCK_NPM_URL + "/api/tokens", 30 | json=MOCK_TOKEN, 31 | ) 32 | 33 | await api.async_get_new_token() 34 | assert api._token == MOCK_TOKEN["token"] 35 | assert api._token_expires == dt.parse_datetime(MOCK_TOKEN["expires"]) 36 | 37 | aioclient_mock.get( 38 | MOCK_NPM_URL + "/api/nginx/proxy-hosts", 39 | json=MOCK_PROXY_HOSTS_LIST, 40 | ) 41 | 42 | # print(await api.get_proxy_hosts()) 43 | assert await api.get_proxy_hosts() == MOCK_PROXY_HOSTS_DICT 44 | 45 | assert api.get_npm_url == MOCK_NPM_URL 46 | 47 | # In order to get 100% coverage, we need to test `api_wrapper` to test the code 48 | # that isn't already called by `async_get_data` and `async_set_title`. Because the 49 | # only logic that lives inside `api_wrapper` that is not being handled by a third 50 | # party library (aiohttp) is the exception handling, we also want to simulate 51 | # raising the exceptions to ensure that the function handles them as expected. 52 | # The caplog fixture allows access to log messages in tests. This is particularly 53 | # useful during exception handling testing since often the only action as part of 54 | # exception handling is a logging statement 55 | caplog.clear() 56 | aioclient_mock.put( 57 | "https://jsonplaceholder.typicode.com/posts/1", exc=asyncio.TimeoutError 58 | ) 59 | assert ( 60 | await api.api_wrapper("put", "https://jsonplaceholder.typicode.com/posts/1") 61 | is None 62 | ) 63 | assert ( 64 | len(caplog.record_tuples) == 1 65 | and "Timeout error fetching information from" in caplog.record_tuples[0][2] 66 | ) 67 | 68 | caplog.clear() 69 | aioclient_mock.post( 70 | "https://jsonplaceholder.typicode.com/posts/1", exc=aiohttp.ClientError 71 | ) 72 | assert ( 73 | await api.api_wrapper("post", "https://jsonplaceholder.typicode.com/posts/1") 74 | is None 75 | ) 76 | assert ( 77 | len(caplog.record_tuples) == 1 78 | and "Error fetching information from" in caplog.record_tuples[0][2] 79 | ) 80 | 81 | caplog.clear() 82 | aioclient_mock.post("https://jsonplaceholder.typicode.com/posts/2", exc=Exception) 83 | assert ( 84 | await api.api_wrapper("post", "https://jsonplaceholder.typicode.com/posts/2") 85 | is None 86 | ) 87 | assert ( 88 | len(caplog.record_tuples) == 1 89 | and "Something really wrong happened!" in caplog.record_tuples[0][2] 90 | ) 91 | 92 | caplog.clear() 93 | aioclient_mock.post("https://jsonplaceholder.typicode.com/posts/3", exc=TypeError) 94 | assert ( 95 | await api.api_wrapper("post", "https://jsonplaceholder.typicode.com/posts/3") 96 | is None 97 | ) 98 | assert ( 99 | len(caplog.record_tuples) == 1 100 | and "Error parsing information from" in caplog.record_tuples[0][2] 101 | ) 102 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # NPM Switches Custom Integration 2 | 3 | [![GitHub Release][releases-shield]][releases] 4 | 5 | 6 | [![GitHub Activity][commits-shield]][commits] 7 | 8 | [![License][license-shield]][license] 9 | [![hacs][hacsbadge]][hacs] 10 | ![Project Maintenance][maintenance-shield] 11 | [![Community Forum][forum-shield]][forum] 12 | 13 | 14 | 15 | ## Installation 16 | 17 | ### Recommended via HACs 18 | 19 | 1. Use [HACS](https://hacs.xyz/docs/setup/download), in `HACS > Integrations > Explore & Add Repositories` search for "NPM Switches". 20 | 2. Restart Home Assistant. 21 | 3. [![Add Integration][add-integration-badge]][add-integration] or in the HA UI go to "Configuration" -> "Integrations" click "+" and search for "NPM Switches". 22 | 23 | ### Manual Download 24 | 1. Use the tool of choice to open the directory (folder) for your HA configuration (where you find `configuration.yaml`). 25 | 3. If you do not have a `custom_components` directory (folder) there, you need to create it. 26 | 4. Download this repository's files. 27 | 5. Move or Copy the entire `npm_switches/` directory (folder) in your HA `custom_components` directory (folder). End result should look like `custom_components/npm_switches/`. 28 | 6. Restart Home Assistant. 29 | 7. [![Add Integration][add-integration-badge]][add-integration] or in the HA UI go to "Configuration" -> "Integrations" click "+" and search for "NPM Switches". 30 | 31 | ## Usage 32 | 33 | 'NPM Switches' offers integration with a local Nginx Proxy Manager server/instance. It will login into and retrieve a token from the NPM server every 24 hours. This token is used to query the state of each proxy host every 60 seconds. It is also used to enable or disable a proxy host via local api call to the NPM server. 34 | 35 | This integration provides the following entities: 36 | - Switches - One switch for every proxy host that is configured 37 | - Optional Switches: Redirection Hosts, Streams, 404 Hosts 38 | - Sensors: 39 | - Number of enabled and disabled proxy hosts (and, if enabled during configuration, redirection, stream, and 404 hosts) 40 | - Buttons: 41 | - Renew certificate buttons 42 | 43 | Features to be developed: 44 | - Truly unique Unique IDs 45 | - Reconfiguration of an entry should the NPM url or login info change 46 | - Better error messages on failed configuration 47 | 48 | If you want to contribute to this please read the [Contribution guidelines](CONTRIBUTING.md) 49 | 50 | _Component built with [integration_blueprint][integration_blueprint]._ 51 | 52 | *** 53 | 54 | [integration_blueprint]: https://github.com/custom-components/integration_blueprint 55 | [commits-shield]: https://img.shields.io/github/commit-activity/w/InTheDaylight14/nginx-proxy-manager-switches?style=for-the-badge 56 | [commits]: https://github.com/InTheDaylight14/nginx-proxy-manager-switches/commits/master 57 | [hacs]: https://github.com/custom-components/hacs 58 | [hacsbadge]: https://img.shields.io/badge/HACS-Custom-orange.svg?style=for-the-badge 59 | 60 | [forum-shield]: https://img.shields.io/badge/community-forum-brightgreen.svg?style=for-the-badge 61 | [forum]: https://community.home-assistant.io/ 62 | [license]: LICENSE 63 | [license-shield]: https://img.shields.io/github/license/InTheDaylight14/nginx-proxy-manager-switches?style=for-the-badge 64 | [maintenance-shield]: https://img.shields.io/badge/maintainer-@InTheDaylight14-blue.svg?style=for-the-badge 65 | [releases-shield]: https://img.shields.io/github/release/InTheDaylight14/nginx-proxy-manager-switches?style=for-the-badge 66 | [releases]: https://github.com/InTheDaylight14/nginx-proxy-manager-switches/releases 67 | [add-integration]: https://my.home-assistant.io/redirect/config_flow_start?domain=npm_switches 68 | [add-integration-badge]: https://my.home-assistant.io/badges/config_flow_start.svg 69 | [download-all]: https://img.shields.io/github/downloads/InTheDaylight14/nginx-proxy-manager-switches/total?style=for-the-badge 70 | [download-latest]: https://img.shields.io/github/downloads/InTheDaylight14/nginx-proxy-manager-switches/latest/total?style=for-the-badge 71 | -------------------------------------------------------------------------------- /tests/const.py: -------------------------------------------------------------------------------- 1 | """Constants for integration_blueprint tests.""" 2 | from custom_components.npm_switches.const import ( 3 | CONF_PASSWORD, 4 | CONF_USERNAME, 5 | CONF_NPM_URL, 6 | ) 7 | 8 | # Mock config data to be used across multiple tests 9 | MOCK_CONFIG = { 10 | CONF_USERNAME: "test_username", 11 | CONF_PASSWORD: "test_password", 12 | CONF_NPM_URL: "http://test:81", 13 | } 14 | 15 | MOCK_NPM_URL = "http://test:81" 16 | 17 | MOCK_TOKEN = { 18 | "token": "abcd12345", 19 | "expires": "2023-01-25T01:37:00.107Z", 20 | } 21 | 22 | MOCK_PROXY_HOSTS_LIST = [ 23 | { 24 | "id": 33, 25 | "created_on": "2022-11-27T22:46:21.000Z", 26 | "modified_on": "2022-12-11T22:48:53.000Z", 27 | "owner_user_id": 1, 28 | "domain_names": ["my.domain.com"], 29 | "forward_host": "192.168.1.1", 30 | "forward_port": 8123, 31 | "access_list_id": 0, 32 | "certificate_id": 35, 33 | "ssl_forced": 0, 34 | "caching_enabled": 0, 35 | "block_exploits": 0, 36 | "advanced_config": "", 37 | "meta": { 38 | "letsencrypt_agree": False, 39 | "dns_challenge": False, 40 | "nginx_online": True, 41 | "nginx_err": None, 42 | }, 43 | "allow_websocket_upgrade": 0, 44 | "http2_support": 0, 45 | "forward_scheme": "https", 46 | "enabled": 1, 47 | "locations": [], 48 | "hsts_enabled": 0, 49 | "hsts_subdomains": 0, 50 | }, 51 | { 52 | "id": 32, 53 | "created_on": "2022-11-17T00:49:25.000Z", 54 | "modified_on": "2023-01-24T00:36:53.000Z", 55 | "owner_user_id": 1, 56 | "domain_names": ["other.domain.com"], 57 | "forward_host": "192.168.1.2", 58 | "forward_port": 8080, 59 | "access_list_id": 0, 60 | "certificate_id": 35, 61 | "ssl_forced": 0, 62 | "caching_enabled": 0, 63 | "block_exploits": 1, 64 | "advanced_config": "", 65 | "meta": { 66 | "letsencrypt_agree": False, 67 | "dns_challenge": False, 68 | "nginx_online": True, 69 | "nginx_err": None, 70 | }, 71 | "allow_websocket_upgrade": 1, 72 | "http2_support": 0, 73 | "forward_scheme": "http", 74 | "enabled": 0, 75 | "locations": [], 76 | "hsts_enabled": 0, 77 | "hsts_subdomains": 0, 78 | }, 79 | ] 80 | 81 | MOCK_PROXY_HOSTS_DICT = { 82 | "33": { 83 | "id": 33, 84 | "created_on": "2022-11-27T22:46:21.000Z", 85 | "modified_on": "2022-12-11T22:48:53.000Z", 86 | "owner_user_id": 1, 87 | "domain_names": ["my.domain.com"], 88 | "forward_host": "192.168.1.1", 89 | "forward_port": 8123, 90 | "access_list_id": 0, 91 | "certificate_id": 35, 92 | "ssl_forced": 0, 93 | "caching_enabled": 0, 94 | "block_exploits": 0, 95 | "advanced_config": "", 96 | "meta": { 97 | "letsencrypt_agree": False, 98 | "dns_challenge": False, 99 | "nginx_online": True, 100 | "nginx_err": None, 101 | }, 102 | "allow_websocket_upgrade": 0, 103 | "http2_support": 0, 104 | "forward_scheme": "https", 105 | "enabled": 1, 106 | "locations": [], 107 | "hsts_enabled": 0, 108 | "hsts_subdomains": 0, 109 | }, 110 | "32": { 111 | "id": 32, 112 | "created_on": "2022-11-17T00:49:25.000Z", 113 | "modified_on": "2023-01-24T00:36:53.000Z", 114 | "owner_user_id": 1, 115 | "domain_names": ["other.domain.com"], 116 | "forward_host": "192.168.1.2", 117 | "forward_port": 8080, 118 | "access_list_id": 0, 119 | "certificate_id": 35, 120 | "ssl_forced": 0, 121 | "caching_enabled": 0, 122 | "block_exploits": 1, 123 | "advanced_config": "", 124 | "meta": { 125 | "letsencrypt_agree": False, 126 | "dns_challenge": False, 127 | "nginx_online": True, 128 | "nginx_err": None, 129 | }, 130 | "allow_websocket_upgrade": 1, 131 | "http2_support": 0, 132 | "forward_scheme": "http", 133 | "enabled": 0, 134 | "locations": [], 135 | "hsts_enabled": 0, 136 | "hsts_subdomains": 0, 137 | }, 138 | } 139 | -------------------------------------------------------------------------------- /tests/test_config_flow.py: -------------------------------------------------------------------------------- 1 | """Test integration_blueprint config flow.""" 2 | from unittest.mock import patch 3 | 4 | from homeassistant import config_entries, data_entry_flow 5 | import pytest 6 | from pytest_homeassistant_custom_component.common import MockConfigEntry 7 | 8 | from custom_components.npm_switches.const import ( 9 | DOMAIN, 10 | PLATFORMS, 11 | SENSOR, 12 | SWITCH, 13 | ) 14 | 15 | from .const import MOCK_CONFIG 16 | 17 | pytestmark = pytest.mark.asyncio 18 | 19 | 20 | # This fixture bypasses the actual setup of the integration 21 | # since we only want to test the config flow. We test the 22 | # actual functionality of the integration in other test modules. 23 | @pytest.fixture(autouse=True) 24 | def bypass_setup_fixture(): 25 | """Prevent setup.""" 26 | with patch("custom_components.npm_switches.async_setup", return_value=True,), patch( 27 | "custom_components.npm_switches.async_setup_entry", 28 | return_value=True, 29 | ): 30 | yield 31 | 32 | 33 | # Here we simiulate a successful config flow from the backend. 34 | # Note that we use the `bypass_get_data` fixture here because 35 | # we want the config flow validation to succeed during the test. 36 | async def test_successful_config_flow(hass, bypass_new_token): 37 | """Test a successful config flow.""" 38 | # Initialize a config flow 39 | result = await hass.config_entries.flow.async_init( 40 | DOMAIN, context={"source": config_entries.SOURCE_USER} 41 | ) 42 | 43 | # # Check that the config flow shows the user form as the first step 44 | assert result["type"] == data_entry_flow.RESULT_TYPE_FORM 45 | assert result["step_id"] == "user" 46 | 47 | # If a user were to enter `test_username` for username and `test_password` 48 | # for password, it would result in this function call 49 | result = await hass.config_entries.flow.async_configure( 50 | result["flow_id"], user_input=MOCK_CONFIG 51 | ) 52 | 53 | # Check that the config flow is complete and a new entry is created with 54 | # the input data 55 | assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY 56 | assert result["title"] == "test_username" 57 | assert result["data"] == MOCK_CONFIG 58 | assert result["result"] 59 | 60 | 61 | # In this case, we want to simulate a failure during the config flow. 62 | # We use the `error_on_get_data` mock instead of `bypass_get_data` 63 | # (note the function parameters) to raise an Exception during 64 | # validation of the input config. 65 | async def test_failed_config_flow(hass, error_on_get_new_token): 66 | """Test a failed config flow due to credential validation failure.""" 67 | print("Can I see this?") 68 | assert True 69 | result = await hass.config_entries.flow.async_init( 70 | DOMAIN, context={"source": config_entries.SOURCE_USER} 71 | ) 72 | 73 | assert result["type"] == data_entry_flow.RESULT_TYPE_FORM 74 | assert result["step_id"] == "user" 75 | 76 | result = await hass.config_entries.flow.async_configure( 77 | result["flow_id"], user_input=MOCK_CONFIG 78 | ) 79 | 80 | assert result["type"] == data_entry_flow.RESULT_TYPE_FORM 81 | assert result["errors"] == {"base": "auth"} 82 | 83 | 84 | # Our config flow also has an options flow, so we must test it as well. 85 | async def test_options_flow(hass): 86 | """Test an options flow.""" 87 | # Create a new MockConfigEntry and add to HASS (we're bypassing config 88 | # flow entirely) 89 | entry = MockConfigEntry(domain=DOMAIN, data=MOCK_CONFIG, entry_id="test") 90 | entry.add_to_hass(hass) 91 | 92 | # Initialize an options flow 93 | result = await hass.config_entries.options.async_init(entry.entry_id) 94 | 95 | # Verify that the first options step is a user form 96 | assert result["type"] == data_entry_flow.RESULT_TYPE_FORM 97 | assert result["step_id"] == "user" 98 | 99 | # Enter some fake data into the form 100 | result = await hass.config_entries.options.async_configure( 101 | result["flow_id"], 102 | user_input={platform: platform != SENSOR for platform in PLATFORMS}, 103 | ) 104 | 105 | # Verify that the flow finishes 106 | assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY 107 | assert result["title"] == "test_username" 108 | 109 | # Verify that the options were updated 110 | assert entry.options == {SENSOR: False, SWITCH: True} 111 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | """Global fixtures for integration_blueprint integration.""" 2 | # Fixtures allow you to replace functions with a Mock object. You can perform 3 | # many options via the Mock to reflect a particular behavior from the original 4 | # function that you want to see without going through the function's actual logic. 5 | # Fixtures can either be passed into tests as parameters, or if autouse=True, they 6 | # will automatically be used across all tests. 7 | # 8 | # Fixtures that are defined in conftest.py are available across all tests. You can also 9 | # define fixtures within a particular test file to scope them locally. 10 | # 11 | # pytest_homeassistant_custom_component provides some fixtures that are provided by 12 | # Home Assistant core. You can find those fixture definitions here: 13 | # https://github.com/MatthewFlamm/pytest-homeassistant-custom-component/blob/master/pytest_homeassistant_custom_component/common.py 14 | # 15 | # See here for more info: https://docs.pytest.org/en/latest/fixture.html (note that 16 | # pytest includes fixtures OOB which you can use as defined on this page) 17 | from unittest.mock import patch 18 | 19 | from .const import MOCK_PROXY_HOSTS_DICT, MOCK_TOKEN 20 | 21 | import pytest 22 | 23 | pytest_plugins = "pytest_homeassistant_custom_component" 24 | 25 | 26 | # This fixture enables loading custom integrations in all tests. 27 | # Remove to enable selective use of this fixture 28 | @pytest.fixture(autouse=True) 29 | def auto_enable_custom_integrations(enable_custom_integrations): 30 | yield 31 | 32 | 33 | # This fixture is used to prevent HomeAssistant from attempting to create and dismiss persistent 34 | # notifications. These calls would fail without this fixture since the persistent_notification 35 | # integration is never loaded during a test. 36 | @pytest.fixture(name="skip_notifications", autouse=True) 37 | def skip_notifications_fixture(): 38 | """Skip notification calls.""" 39 | with patch("homeassistant.components.persistent_notification.async_create"), patch( 40 | "homeassistant.components.persistent_notification.async_dismiss" 41 | ): 42 | yield 43 | 44 | 45 | # This fixture, when used, will result in calls to get_proxy_hosts to return None. To have the call 46 | # return a value, we would add the `return_value=` parameter to the patch call. 47 | @pytest.fixture(name="bypass_get_data") 48 | def bypass_get_data_fixture(): 49 | """Skip calls to get data from API.""" 50 | with patch( 51 | "custom_components.npm_switches.NpmSwitchesApiClient.get_proxy_hosts", 52 | return_value=MOCK_PROXY_HOSTS_DICT, 53 | ): 54 | yield 55 | 56 | 57 | @pytest.fixture(name="bypass_get_data_api") 58 | def bypass_get_data_api_fixture(): 59 | """Skip calls to get data from API.""" 60 | with patch( 61 | "custom_components.npm_switches.NpmSwitchesApiClient.get_proxy_hosts", 62 | return_value=MOCK_PROXY_HOSTS_DICT, 63 | ): 64 | yield 65 | 66 | 67 | @pytest.fixture(name="bypass_new_token") 68 | def bypass_get_new_token_fixture(): 69 | """Skip calls to get data from API.""" 70 | with patch( 71 | "custom_components.npm_switches.NpmSwitchesApiClient.async_get_new_token", 72 | return_value=MOCK_TOKEN, 73 | ): 74 | yield 75 | 76 | 77 | # @pytest.fixture(name="bypass_get_data") 78 | # def bypass_get_data_fixture(): 79 | # """Skip calls to get data from API.""" 80 | # with patch("custom_components.npm_switches.NpmSwitchesApiClient.get_proxy_hosts"): 81 | # yield 82 | 83 | ##I don't know if we need this long-term??? 84 | # @pytest.fixture(name="bypass_check_token_expiration") 85 | # def bypass_check_token_expiration(): 86 | # """Skip calls to check token expiration.""" 87 | # with patch( 88 | # "custom_components.npm_switches.NpmSwitchesApiClient.async_check_token_expiration" 89 | # ): 90 | # yield 91 | 92 | 93 | # In this fixture, we are forcing calls to async_get_data to raise an Exception. This is useful 94 | # for exception handling. 95 | @pytest.fixture(name="error_on_get_data") 96 | def error_get_data_fixture(): 97 | """Simulate error when retrieving data from API.""" 98 | with patch( 99 | "custom_components.npm_switches.NpmSwitchesApiClient.async_get_data", 100 | side_effect=Exception, 101 | ): 102 | yield 103 | 104 | 105 | @pytest.fixture(name="error_on_get_new_token") 106 | def error_get_new_token_fixture(): 107 | """Simulate error when retrieving data from API.""" 108 | with patch( 109 | "custom_components.npm_switches.NpmSwitchesApiClient.async_get_data", 110 | side_effect=Exception, 111 | ): 112 | yield 113 | -------------------------------------------------------------------------------- /custom_components/npm_switches/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Custom integration to integrate npm_switches with Home Assistant. 3 | 4 | For more details about this integration, please refer to 5 | https://github.com/InTheDaylight14/nginx-proxy-manager-switches 6 | """ 7 | import asyncio 8 | from datetime import timedelta 9 | import logging 10 | 11 | from homeassistant.config_entries import ConfigEntry 12 | from homeassistant.core import HomeAssistant 13 | from homeassistant.helpers.typing import ConfigType 14 | from homeassistant.exceptions import ConfigEntryNotReady 15 | from homeassistant.helpers.aiohttp_client import async_get_clientsession 16 | from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed 17 | 18 | from .api import NpmSwitchesApiClient 19 | 20 | from .const import ( 21 | CONF_NPM_URL, 22 | CONF_PASSWORD, 23 | CONF_USERNAME, 24 | DOMAIN, 25 | PLATFORMS, 26 | STARTUP_MESSAGE, 27 | CONF_INDLUDE_PROXY, 28 | CONF_INCLUDE_REDIR, 29 | CONF_INCLUDE_STREAMS, 30 | CONF_INCLUDE_DEAD, 31 | CONF_INCLUDE_SENSORS, 32 | CONF_INCLUDE_CERTS 33 | ) 34 | 35 | SCAN_INTERVAL = timedelta(seconds=60) 36 | 37 | _LOGGER: logging.Logger = logging.getLogger(__package__) 38 | 39 | 40 | async def async_setup(hass: HomeAssistant, config: ConfigType): 41 | """Set up this integration using YAML is not supported.""" 42 | return True 43 | 44 | 45 | async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry): 46 | """Set up this integration using UI.""" 47 | if hass.data.get(DOMAIN) is None: 48 | hass.data.setdefault(DOMAIN, {}) 49 | _LOGGER.info(STARTUP_MESSAGE) 50 | 51 | username = entry.data.get(CONF_USERNAME) 52 | password = entry.data.get(CONF_PASSWORD) 53 | npm_url = entry.data.get(CONF_NPM_URL) 54 | 55 | session = async_get_clientsession(hass) 56 | client = NpmSwitchesApiClient(username, password, npm_url, session) 57 | 58 | coordinator = NpmSwitchesUpdateCoordinator(hass, client=client, entry=entry) 59 | await coordinator.async_refresh() 60 | 61 | if not coordinator.last_update_success: 62 | raise ConfigEntryNotReady 63 | 64 | hass.data[DOMAIN][entry.entry_id] = coordinator 65 | 66 | for platform in PLATFORMS: 67 | if entry.options.get(platform, True): 68 | coordinator.platforms.append(platform) 69 | 70 | await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) 71 | 72 | entry.async_on_unload(entry.add_update_listener(async_reload_entry)) 73 | return True 74 | 75 | 76 | class NpmSwitchesUpdateCoordinator(DataUpdateCoordinator): 77 | """Class to manage fetching data from the API.""" 78 | 79 | def __init__(self, hass: HomeAssistant, client: NpmSwitchesApiClient, entry: ConfigEntry) -> None: 80 | """Initialize.""" 81 | self.api = client 82 | self.platforms = [] 83 | self.entry = entry 84 | 85 | super().__init__(hass, _LOGGER, name=DOMAIN, update_interval=SCAN_INTERVAL) 86 | 87 | async def _async_update_data(self): 88 | """Update data via library.""" 89 | 90 | if self.entry.data.get(CONF_INDLUDE_PROXY): 91 | try: 92 | await self.api.get_proxy_hosts() 93 | except Exception as exception: 94 | raise UpdateFailed() from exception 95 | 96 | if self.entry.data.get(CONF_INCLUDE_REDIR): 97 | try: 98 | await self.api.get_redirection_hosts() 99 | except Exception as exception: 100 | raise UpdateFailed() from exception 101 | 102 | if self.entry.data.get(CONF_INCLUDE_STREAMS): 103 | try: 104 | await self.api.get_stream_hosts() 105 | except Exception as exception: 106 | raise UpdateFailed() from exception 107 | 108 | if self.entry.data.get(CONF_INCLUDE_DEAD): 109 | try: 110 | await self.api.get_dead_hosts() 111 | except Exception as exception: 112 | raise UpdateFailed() from exception 113 | 114 | if self.entry.data.get(CONF_INCLUDE_CERTS): 115 | await self.api.get_certificates() 116 | 117 | 118 | async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: 119 | """Handle removal of an entry.""" 120 | coordinator = hass.data[DOMAIN][entry.entry_id] 121 | unloaded = all( 122 | await asyncio.gather( 123 | *[ 124 | hass.config_entries.async_forward_entry_unload(entry, platform) 125 | for platform in PLATFORMS 126 | if platform in coordinator.platforms 127 | ] 128 | ) 129 | ) 130 | if unloaded: 131 | hass.data[DOMAIN].pop(entry.entry_id) 132 | 133 | return unloaded 134 | 135 | 136 | async def async_reload_entry(hass: HomeAssistant, entry: ConfigEntry) -> None: 137 | """Reload config entry.""" 138 | await async_unload_entry(hass, entry) 139 | await async_setup_entry(hass, entry) 140 | -------------------------------------------------------------------------------- /tests/test_switch.py: -------------------------------------------------------------------------------- 1 | """Test integration_blueprint switch.""" 2 | from unittest.mock import call, patch 3 | 4 | from homeassistant.components.switch import SERVICE_TURN_OFF, SERVICE_TURN_ON 5 | from homeassistant.const import ATTR_ENTITY_ID 6 | from homeassistant.core import HomeAssistant 7 | from pytest_homeassistant_custom_component.common import MockConfigEntry 8 | from homeassistant.helpers import entity_registry as er 9 | 10 | from custom_components.npm_switches import async_setup_entry 11 | from custom_components.npm_switches.const import ( 12 | DEFAULT_NAME, 13 | DOMAIN, 14 | SWITCH, 15 | ) 16 | 17 | from .const import ( 18 | MOCK_CONFIG, 19 | MOCK_PROXY_HOSTS_DICT, 20 | MOCK_PROXY_HOSTS_LIST, 21 | MOCK_NPM_URL, 22 | ) 23 | 24 | import pytest 25 | 26 | pytestmark = pytest.mark.asyncio 27 | 28 | 29 | async def test_registry_entries(hass, aioclient_mock, bypass_new_token): 30 | """Tests devices are registered in the entity registry.""" 31 | entry_id = "test" 32 | config_entry = MockConfigEntry( 33 | domain=DOMAIN, data=MOCK_CONFIG, entry_id=entry_id, options=None 34 | ) 35 | 36 | # Mock the api call to get proxy data, this allows setup to complete successfully. 37 | aioclient_mock.get( 38 | MOCK_NPM_URL + "/api/nginx/proxy-hosts", 39 | json=MOCK_PROXY_HOSTS_LIST, 40 | ) 41 | 42 | assert await async_setup_entry(hass, config_entry) 43 | await hass.async_block_till_done() 44 | 45 | entity_registry = er.async_get(hass) 46 | 47 | entry = entity_registry.async_get("switch.npm_my_domain_com") 48 | assert entry.unique_id == entry_id + "_npm_my_domain_com" 49 | 50 | entry = entity_registry.async_get("switch.npm_other_domain_com") 51 | assert entry.unique_id == entry_id + "_npm_other_domain_com" 52 | 53 | 54 | async def test_switch_services(hass, aioclient_mock, bypass_new_token): 55 | """Test switch services.""" 56 | # Create a mock entry so we don't have to go through config flow 57 | config_entry = MockConfigEntry( 58 | domain=DOMAIN, data=MOCK_CONFIG, entry_id="test", options=None 59 | ) 60 | 61 | # Mock the api call to get proxy data, this allows setup to complete successfully. 62 | aioclient_mock.get( 63 | MOCK_NPM_URL + "/api/nginx/proxy-hosts", 64 | json=MOCK_PROXY_HOSTS_LIST, 65 | ) 66 | 67 | assert await async_setup_entry(hass, config_entry) 68 | await hass.async_block_till_done() 69 | 70 | # Retrieve state of switch entity to test 71 | state = hass.states.get("switch.npm_my_domain_com") 72 | proxy_id = str(state.attributes["id"]) 73 | 74 | # Mock enable and diable api calls for this entity, make them return True for a successful api call. 75 | aioclient_mock.post( 76 | MOCK_NPM_URL + "/api/nginx/proxy-hosts/" + proxy_id + "/disable", 77 | json=True, 78 | ) 79 | 80 | aioclient_mock.post( 81 | MOCK_NPM_URL + "/api/nginx/proxy-hosts/" + proxy_id + "/enable", 82 | json=True, 83 | ) 84 | 85 | # Ensure the enable/disable functions are called when turning the switch on/off 86 | with patch( 87 | "custom_components.npm_switches.NpmSwitchesApiClient.enable_proxy" 88 | ) as enable_proxy: 89 | await hass.services.async_call( 90 | SWITCH, 91 | SERVICE_TURN_ON, 92 | service_data={ATTR_ENTITY_ID: "switch.npm_my_domain_com"}, 93 | blocking=True, 94 | ) 95 | assert enable_proxy.called 96 | assert enable_proxy.call_args == call( 97 | str(MOCK_PROXY_HOSTS_DICT[proxy_id]["id"]) 98 | ) 99 | 100 | with patch( 101 | "custom_components.npm_switches.NpmSwitchesApiClient.disable_proxy" 102 | ) as disable_proxy: 103 | await hass.services.async_call( 104 | SWITCH, 105 | SERVICE_TURN_OFF, 106 | service_data={ATTR_ENTITY_ID: "switch.npm_my_domain_com"}, 107 | blocking=True, 108 | ) 109 | assert disable_proxy.called 110 | assert disable_proxy.call_args == call( 111 | str(MOCK_PROXY_HOSTS_DICT[proxy_id]["id"]) 112 | ) 113 | 114 | 115 | async def test_switch_states(hass, aioclient_mock, bypass_new_token): 116 | """Test switch states.""" 117 | # Create a mock entry so we don't have to go through config flow 118 | config_entry = MockConfigEntry( 119 | domain=DOMAIN, data=MOCK_CONFIG, entry_id="test", options=None 120 | ) 121 | 122 | # Mock the api call to get proxy data, this allows setup to complete successfully. 123 | aioclient_mock.get( 124 | MOCK_NPM_URL + "/api/nginx/proxy-hosts", 125 | json=MOCK_PROXY_HOSTS_LIST, 126 | ) 127 | 128 | assert await async_setup_entry(hass, config_entry) 129 | await hass.async_block_till_done() 130 | 131 | for proxy_host in MOCK_PROXY_HOSTS_LIST: 132 | entity_id = "switch.npm_" + proxy_host["domain_names"][0].replace(".", "_") 133 | state = hass.states.get(entity_id) 134 | 135 | if proxy_host["enabled"] == 1: 136 | expected_state = "on" 137 | else: 138 | expected_state = "off" 139 | 140 | assert state.state == expected_state 141 | assert state.attributes["id"] == proxy_host["id"] 142 | assert state.attributes["domain_names"] == proxy_host["domain_names"] 143 | -------------------------------------------------------------------------------- /custom_components/npm_switches/config_flow.py: -------------------------------------------------------------------------------- 1 | """Adds config flow for Blueprint.""" 2 | from homeassistant import config_entries 3 | from homeassistant.core import callback 4 | from homeassistant.helpers.aiohttp_client import async_create_clientsession 5 | from homeassistant.util import slugify 6 | import voluptuous as vol 7 | 8 | from .api import NpmSwitchesApiClient 9 | from .const import ( 10 | CONF_NPM_URL, 11 | CONF_PASSWORD, 12 | CONF_USERNAME, 13 | DOMAIN, 14 | PLATFORMS, 15 | CONF_INDLUDE_PROXY, 16 | CONF_INCLUDE_REDIR, 17 | CONF_INCLUDE_STREAMS, 18 | CONF_INCLUDE_DEAD, 19 | CONF_INCLUDE_SENSORS, 20 | CONF_INCLUDE_CERTS, 21 | DEFAULT_USERNAME, 22 | DEFAULT_PASSWORD, 23 | DEFAULT_NPM_URL, 24 | DEFAULT_INDLUDE_PROXY, 25 | DEFAULT_INCLUDE_REDIR, 26 | DEFAULT_INCLUDE_STREAMS, 27 | DEFAULT_INCLUDE_DEAD, 28 | DEFAULT_INCLUDE_SENSORS, 29 | DEFAULT_INCLUDE_CERTS, 30 | ) 31 | 32 | 33 | class NPMSwitchesFloHandler(config_entries.ConfigFlow, domain=DOMAIN): 34 | """Config flow for NPM Switches.""" 35 | 36 | VERSION = 1 37 | CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL 38 | 39 | def __init__(self): 40 | """Initialize.""" 41 | self._errors = {} 42 | self.clean_npm_url = None 43 | 44 | async def async_step_user(self, user_input=None): 45 | """Handle a flow initialized by the user.""" 46 | self._errors = {} 47 | 48 | # Uncomment the next 2 lines if only a single instance of the integration is allowed: 49 | # if self._async_current_entries(): 50 | # return self.async_abort(reason="single_instance_allowed") 51 | 52 | if user_input is not None: 53 | scheme_end = user_input[CONF_NPM_URL].find("://")+3 54 | self.clean_npm_url = user_input[CONF_NPM_URL][scheme_end:] 55 | user_input["clean_npm_url"] = slugify(f"{self.clean_npm_url}") 56 | 57 | # existing_entry = self._async_entry_for_username(user_input[CONF_NPM_URL]) 58 | existing_entry = self._async_entry_for_username(self.clean_npm_url) 59 | # if existing_entry and not self.reauth: 60 | if existing_entry: 61 | return self.async_abort(reason="already_configured") 62 | 63 | valid = await self._test_credentials( 64 | user_input[CONF_USERNAME], 65 | user_input[CONF_PASSWORD], 66 | user_input[CONF_NPM_URL], 67 | ) 68 | if valid: 69 | return self.async_create_entry( 70 | title=self.clean_npm_url, data=user_input 71 | ) 72 | else: 73 | self._errors["base"] = "auth" 74 | 75 | return await self._show_config_form(user_input) 76 | 77 | # user_input = {} 78 | # # Provide defaults for form 79 | # user_input[CONF_USERNAME] = "" 80 | # user_input[CONF_PASSWORD] = "" 81 | # user_input[CONF_NPM_URL] = "http://" 82 | # user_input[CONF_INDLUDE_PROXY] = True 83 | 84 | return await self._show_config_form() 85 | 86 | # @staticmethod 87 | # @callback 88 | # def async_get_options_flow(config_entry): 89 | # return BlueprintOptionsFlowHandler(config_entry) 90 | 91 | async def _show_config_form(self): # pylint: disable=unused-argument 92 | """Show the configuration form to edit location data.""" 93 | return self.async_show_form( 94 | step_id="user", 95 | data_schema=vol.Schema( 96 | { 97 | vol.Required(CONF_USERNAME, default=DEFAULT_USERNAME): str, 98 | vol.Required(CONF_PASSWORD, default=DEFAULT_PASSWORD): str, 99 | vol.Required(CONF_NPM_URL, default=DEFAULT_NPM_URL): str, 100 | vol.Optional(CONF_INCLUDE_SENSORS,default=DEFAULT_INCLUDE_SENSORS): bool, 101 | vol.Optional(CONF_INDLUDE_PROXY,default=DEFAULT_INDLUDE_PROXY): bool, 102 | vol.Optional(CONF_INCLUDE_REDIR,default=DEFAULT_INCLUDE_REDIR): bool, 103 | vol.Optional(CONF_INCLUDE_STREAMS,default=DEFAULT_INCLUDE_STREAMS): bool, 104 | vol.Optional(CONF_INCLUDE_DEAD,default=DEFAULT_INCLUDE_DEAD): bool, 105 | vol.Optional(CONF_INCLUDE_CERTS,default=DEFAULT_INCLUDE_CERTS): bool 106 | } 107 | ), 108 | errors=self._errors, 109 | ) 110 | 111 | async def _test_credentials(self, username, password, npm_url): 112 | """Return true if credentials is valid.""" 113 | try: 114 | session = async_create_clientsession(self.hass) 115 | client = NpmSwitchesApiClient(username, password, npm_url, session) 116 | await client.async_get_new_token() 117 | return True 118 | except Exception: # pylint: disable=broad-except 119 | pass 120 | return False 121 | 122 | @callback 123 | def _async_entry_for_username(self, username): 124 | """Find an existing entry for a username.""" 125 | for entry in self._async_current_entries(): 126 | # if entry.data.get(CONF_NPM_URL) == username: 127 | if entry.title == username: 128 | return entry 129 | return None 130 | 131 | 132 | # class BlueprintOptionsFlowHandler(config_entries.OptionsFlow): 133 | # """Blueprint config flow options handler.""" 134 | 135 | # def __init__(self, config_entry): 136 | # """Initialize HACS options flow.""" 137 | # self.config_entry = config_entry 138 | # self.options = dict(config_entry.options) 139 | 140 | # async def async_step_init(self, user_input=None): # pylint: disable=unused-argument 141 | # """Manage the options.""" 142 | # return await self.async_step_user() 143 | 144 | # async def async_step_user(self, user_input=None): 145 | # """Handle a flow initialized by the user.""" 146 | # if user_input is not None: 147 | # self.options.update(user_input) 148 | # return await self._update_options() 149 | 150 | # return self.async_show_form( 151 | # step_id="user", 152 | # data_schema = vol.Schema( 153 | # { 154 | # vol.Optional(CONF_INDLUDE_PROXY,default=self.config_entry.options.get(CONF_INDLUDE_PROXY, DEFAULT_INDLUDE_PROXY),): bool, 155 | # vol.Optional( 156 | # CONF_INCLUDE_REDIR, 157 | # default=self.config_entry.options.get( 158 | # CONF_INCLUDE_REDIR, DEFAULT_INCLUDE_REDIR 159 | # ), 160 | # ): bool, 161 | # vol.Optional( 162 | # CONF_INCLUDE_STREAMS, 163 | # default=self.config_entry.options.get( 164 | # CONF_INCLUDE_STREAMS, DEFAULT_INCLUDE_STREAMS 165 | # ), 166 | # ): bool, 167 | # vol.Optional( 168 | # CONF_INCLUDE_DEAD, 169 | # default=self.config_entry.options.get( 170 | # CONF_INCLUDE_DEAD, DEFAULT_INCLUDE_DEAD 171 | # ), 172 | # ): bool, 173 | # vol.Optional( 174 | # CONF_INCLUDE_SENSORS, 175 | # default=self.config_entry.options.get( 176 | # CONF_INCLUDE_SENSORS, DEFAULT_INCLUDE_SENSORS 177 | # ), 178 | # ): bool, 179 | # } 180 | # ) 181 | # ) 182 | 183 | # async def _update_options(self): 184 | # """Update config entry options.""" 185 | # return self.async_create_entry( 186 | # title=self.config_entry.data.get(CONF_USERNAME), data=self.options 187 | # ) 188 | -------------------------------------------------------------------------------- /custom_components/npm_switches/sensor.py: -------------------------------------------------------------------------------- 1 | """Sensor platform for NPM Switches.""" 2 | from datetime import datetime 3 | 4 | from homeassistant.components.sensor import SensorEntity, SensorDeviceClass 5 | from homeassistant.config_entries import ConfigEntry 6 | from homeassistant.util import slugify, dt 7 | 8 | from .const import DOMAIN 9 | from .entity import NpmSwitchesEntity 10 | from . import NpmSwitchesUpdateCoordinator 11 | 12 | 13 | async def async_setup_entry(hass, entry, async_add_entities): 14 | """Setup sensor platform.""" 15 | coordinator = hass.data[DOMAIN][entry.entry_id] 16 | api = hass.data[DOMAIN][entry.entry_id].api 17 | certificates = await api.get_certificates() 18 | entities = [] 19 | if entry.data["include_enable_disable_count_sensors"]: 20 | if entry.data["include_proxy_hosts"]: 21 | entities.append(NpmSwitchesProxySensor(coordinator, entry, "enabled")) 22 | entities.append(NpmSwitchesProxySensor(coordinator, entry, "disabled")) 23 | if entry.data["include_redirection_hosts"]: 24 | entities.append(NpmSwitchesRedirSensor(coordinator, entry, "enabled")) 25 | entities.append(NpmSwitchesRedirSensor(coordinator, entry, "disabled")) 26 | if entry.data["include_stream_hosts"]: 27 | entities.append(NpmSwitchesStreamSensor(coordinator, entry, "enabled")) 28 | entities.append(NpmSwitchesStreamSensor(coordinator, entry, "disabled")) 29 | if entry.data["include_dead_hosts"]: 30 | entities.append(NpmSwitchesDeadSensor(coordinator, entry, "enabled")) 31 | entities.append(NpmSwitchesDeadSensor(coordinator, entry, "disabled")) 32 | 33 | if "include_certificate_sensors" in entry.data: 34 | if entry.data["include_certificate_sensors"]: 35 | for cert in certificates.values(): 36 | entities.append(NpmSwitchesCertSensor(coordinator, entry, cert)) 37 | 38 | async_add_entities(entities, True) 39 | 40 | 41 | class NpmSwitchesProxySensor(NpmSwitchesEntity, SensorEntity): 42 | """NPM Switches Proxy Sensor class.""" 43 | 44 | def __init__( 45 | self, 46 | coordinator: NpmSwitchesUpdateCoordinator, 47 | entry: ConfigEntry, 48 | name: str, 49 | ) -> None: 50 | """Initialize proxy switch entity.""" 51 | super().__init__(coordinator, entry) 52 | self.host_id = name 53 | self.sensor_name = self.host_id 54 | self.name = "Proxy Hosts " + self.sensor_name.capitalize() 55 | self.entity_id = "sensor."+slugify(f"{entry.title} {self.name}") 56 | self._attr_unique_id = f"{entry.entry_id} {self.name}" 57 | 58 | @property 59 | def native_value(self): 60 | """Return the native value of the sensor.""" 61 | if self.sensor_name == "enabled": 62 | return self.coordinator.api.num_proxy_enabled 63 | return self.coordinator.api.num_proxy_disabled 64 | 65 | @property 66 | def icon(self): 67 | """Return the icon of the sensor.""" 68 | return "mdi:counter" 69 | 70 | class NpmSwitchesRedirSensor(NpmSwitchesEntity, SensorEntity): 71 | """NPM Switches Redir Sensor class.""" 72 | 73 | def __init__( 74 | self, 75 | coordinator: NpmSwitchesUpdateCoordinator, 76 | entry: ConfigEntry, 77 | name: str, 78 | ) -> None: 79 | """Initialize proxy switch entity.""" 80 | super().__init__(coordinator, entry) 81 | self.host_id = name 82 | self.sensor_name = self.host_id 83 | self.name = "Redirection Hosts " + self.sensor_name.capitalize() 84 | self.entity_id = "sensor." + slugify(f"{entry.title} {self.name}") 85 | self._attr_unique_id = f"{entry.entry_id} {self.name}" 86 | 87 | @property 88 | def native_value(self): 89 | """Return the native value of the sensor.""" 90 | if self.sensor_name == "enabled": 91 | return self.coordinator.api.num_redir_enabled 92 | return self.coordinator.api.num_redir_disabled 93 | 94 | @property 95 | def icon(self): 96 | """Return the icon of the sensor.""" 97 | return "mdi:counter" 98 | 99 | class NpmSwitchesStreamSensor(NpmSwitchesEntity, SensorEntity): 100 | """NPM Switches Stream Sensor class.""" 101 | 102 | def __init__( 103 | self, 104 | coordinator: NpmSwitchesUpdateCoordinator, 105 | entry: ConfigEntry, 106 | name: str, 107 | ) -> None: 108 | """Initialize proxy switch entity.""" 109 | super().__init__(coordinator, entry) 110 | self.host_id = name 111 | self.sensor_name = self.host_id 112 | self.name = "Stream Hosts " + self.sensor_name.capitalize() 113 | self.entity_id = "sensor."+slugify(f"{entry.title} {self.name}") 114 | self._attr_unique_id = f"{entry.entry_id} {self.name}" 115 | 116 | @property 117 | def native_value(self): 118 | """Return the native value of the sensor.""" 119 | if self.sensor_name == "enabled": 120 | return self.coordinator.api.num_stream_enabled 121 | return self.coordinator.api.num_stream_disabled 122 | 123 | @property 124 | def icon(self): 125 | """Return the icon of the sensor.""" 126 | return "mdi:counter" 127 | 128 | class NpmSwitchesDeadSensor(NpmSwitchesEntity, SensorEntity): 129 | """NPM Switches Dead Sensor class.""" 130 | 131 | def __init__( 132 | self, 133 | coordinator: NpmSwitchesUpdateCoordinator, 134 | entry: ConfigEntry, 135 | name: str, 136 | ) -> None: 137 | """Initialize proxy switch entity.""" 138 | super().__init__(coordinator, entry) 139 | self.host_id = name 140 | self.sensor_name = self.host_id 141 | self.name = "404 Hosts " + self.sensor_name.capitalize() 142 | self.entity_id = "sensor."+slugify(f"{entry.title} {self.name}") 143 | self._attr_unique_id = f"{entry.entry_id} {self.name}" 144 | 145 | @property 146 | def native_value(self): 147 | """Return the native value of the sensor.""" 148 | if self.sensor_name == "enabled": 149 | return self.coordinator.api.num_dead_enabled 150 | return self.coordinator.api.num_dead_disabled 151 | 152 | @property 153 | def icon(self): 154 | """Return the icon of the sensor.""" 155 | return "mdi:counter" 156 | 157 | class NpmSwitchesCertSensor(NpmSwitchesEntity, SensorEntity): 158 | """NPM Switches Cert Sensor class.""" 159 | 160 | def __init__( 161 | self, 162 | coordinator: NpmSwitchesUpdateCoordinator, 163 | entry: ConfigEntry, 164 | certificate: dict, 165 | ) -> None: 166 | """Initialize Cert expire sensor entity.""" 167 | super().__init__(coordinator, entry) 168 | self.cert_id = str(certificate["id"]) 169 | self.name = "Certificate " + certificate["domain_names"][0] 170 | self.entity_id = "sensor."+slugify(f"{entry.title}")+" Cert "+str(self.cert_id) 171 | self._attr_unique_id = f"{entry.entry_id} {" Cert "} {self.cert_id}" 172 | self._attr_device_class = SensorDeviceClass.TIMESTAMP 173 | self._expires_on: Optional[datetime] = None 174 | 175 | @property 176 | def native_value(self): 177 | """Return the native value of the sensor.""" 178 | 179 | certificate = self.coordinator.api.get_certificate(self.cert_id) 180 | local_expiration = dt.parse_datetime(certificate["expires_on"]) 181 | self._expires_on = dt.as_utc(local_expiration) 182 | 183 | return self._expires_on 184 | 185 | @property 186 | def icon(self): 187 | """Return the icon of the sensor.""" 188 | return "mdi:lock-clock" 189 | 190 | @property 191 | def extra_state_attributes(self): 192 | """Return device state attributes.""" 193 | certificate = self.coordinator.api.get_certificate(self.cert_id) 194 | 195 | return { 196 | "id": certificate["id"], 197 | "provider": certificate["provider"], 198 | "domain_names": certificate["domain_names"], 199 | "created_on": certificate["created_on"], 200 | "modified_on": certificate["modified_on"], 201 | } -------------------------------------------------------------------------------- /custom_components/npm_switches/switch.py: -------------------------------------------------------------------------------- 1 | """Switch platform for npm_switches.""" 2 | import logging 3 | from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription 4 | from homeassistant.util import slugify 5 | 6 | # from homeassistant.core import HomeAssistant 7 | from homeassistant.config_entries import ConfigEntry 8 | 9 | from .const import DOMAIN 10 | from .entity import NpmSwitchesEntity 11 | from . import NpmSwitchesUpdateCoordinator 12 | 13 | _LOGGER = logging.getLogger(__name__) 14 | 15 | 16 | async def async_setup_entry(hass, entry, async_add_entities): 17 | """Setup sensor platform.""" 18 | coordinator = hass.data[DOMAIN][entry.entry_id] 19 | api = hass.data[DOMAIN][entry.entry_id].api 20 | proxy_hosts = await api.get_proxy_hosts() 21 | redir_hosts = await api.get_redirection_hosts() 22 | stream_hosts = await api.get_stream_hosts() 23 | dead_hosts = await api.get_dead_hosts() 24 | entities = [] 25 | 26 | if entry.data["include_proxy_hosts"]: 27 | for proxy_host in proxy_hosts.values(): 28 | entities.append(NpmProxyBinarySwitch(coordinator, entry, proxy_host)) 29 | if entry.data["include_redirection_hosts"]: 30 | for redir_host in redir_hosts.values(): 31 | entities.append(NpmRedirBinarySwitch(coordinator, entry, redir_host)) 32 | if entry.data["include_stream_hosts"]: 33 | for stream_host in stream_hosts.values(): 34 | entities.append(NpmStreamBinarySwitch(coordinator, entry, stream_host)) 35 | if entry.data["include_dead_hosts"]: 36 | for dead_host in dead_hosts.values(): 37 | entities.append(NpmDeadBinarySwitch(coordinator, entry, dead_host)) 38 | 39 | async_add_entities(entities, True) 40 | # async_add_devices([NpmProxyBinarySwitch(coordinator, entry, "20")]) 41 | 42 | 43 | class NpmProxyBinarySwitch(NpmSwitchesEntity, SwitchEntity): 44 | """Switches to enable/disable the Proxy Host Type in NPM""" 45 | 46 | def __init__( 47 | self, 48 | coordinator: NpmSwitchesUpdateCoordinator, 49 | entry: ConfigEntry, 50 | host: dict, 51 | ) -> None: 52 | """Initialize proxy switch entity.""" 53 | super().__init__(coordinator, entry) 54 | self.host = host 55 | self.name = "Proxy " + self.host["domain_names"][0].replace(".", " ").capitalize() 56 | self.entity_id = "switch."+slugify(f"{entry.title} {self.name}") 57 | self._attr_unique_id = f"{entry.entry_id} {self.name}" 58 | self.host_id = str(host["id"]) 59 | self.host_type = "proxy-hosts" 60 | 61 | async def async_turn_on(self, **kwargs): # pylint: disable=unused-argument 62 | """Turn on the switch.""" 63 | await self.coordinator.api.enable_host(self.host_id, self.host_type) 64 | self.async_write_ha_state() 65 | self.host = await self.coordinator.api.get_host(self.host_id, self.host_type) 66 | 67 | async def async_turn_off(self, **kwargs): # pylint: disable=unused-argument 68 | """Turn off the switch.""" 69 | await self.coordinator.api.disable_host(self.host_id, self.host_type) 70 | self.async_write_ha_state() 71 | self.host = await self.coordinator.api.get_host(self.host_id, self.host_type) 72 | 73 | # @property 74 | # def name(self): 75 | # """Return the name of the switch.""" 76 | # return "NPM " + self.host["domain_names"][0].replace(".", " ").capitalize() 77 | 78 | @property 79 | def icon(self): 80 | """Return the icon of this switch.""" 81 | if self.coordinator.api.is_host_enabled(self.host_id, self.host_type): 82 | return "mdi:check-network" 83 | return "mdi:close-network" 84 | 85 | @property 86 | def is_on(self): 87 | """Return true if the switch is on.""" 88 | return self.coordinator.api.is_host_enabled(self.host_id, self.host_type) 89 | 90 | @property 91 | def extra_state_attributes(self): 92 | """Return device state attributes.""" 93 | return { 94 | "id": self.host["id"], 95 | "domain_names": self.host["domain_names"], 96 | } 97 | 98 | class NpmRedirBinarySwitch(NpmSwitchesEntity, SwitchEntity): 99 | """Switches to enable/disable the Redir Host Type in NPM""" 100 | 101 | def __init__( 102 | self, 103 | coordinator: NpmSwitchesUpdateCoordinator, 104 | entry: ConfigEntry, 105 | host: dict, 106 | ) -> None: 107 | """Initialize redir switch entity.""" 108 | super().__init__(coordinator, entry) 109 | self.host = host 110 | self.name = "Redirect " + self.host["domain_names"][0].replace(".", " ").capitalize() 111 | self.entity_id = "switch."+slugify(f"{entry.title} {self.name}") 112 | self._attr_unique_id = f"{entry.entry_id} {self.name}" 113 | self.host_type = "redirection-hosts" 114 | self.host_id = str(host["id"]) 115 | 116 | async def async_turn_on(self, **kwargs): # pylint: disable=unused-argument 117 | """Turn on the switch.""" 118 | await self.coordinator.api.enable_host(self.host_id, self.host_type) 119 | self.async_write_ha_state() 120 | self.host = await self.coordinator.api.get_host(self.host_id, self.host_type) 121 | 122 | async def async_turn_off(self, **kwargs): # pylint: disable=unused-argument 123 | """Turn off the switch.""" 124 | await self.coordinator.api.disable_host(self.host_id, self.host_type) 125 | self.async_write_ha_state() 126 | self.host = await self.coordinator.api.get_host(self.host_id, self.host_type) 127 | 128 | @property 129 | def icon(self): 130 | """Return the icon of this switch.""" 131 | if self.coordinator.api.is_host_enabled(self.host_id, self.host_type): 132 | return "mdi:check-network" 133 | return "mdi:close-network" 134 | 135 | @property 136 | def is_on(self): 137 | """Return true if the switch is on.""" 138 | return self.coordinator.api.is_host_enabled(self.host_id, self.host_type) 139 | 140 | @property 141 | def extra_state_attributes(self): 142 | """Return device state attributes.""" 143 | return { 144 | "id": self.host["id"], 145 | "domain_names": self.host["domain_names"], 146 | # "forward_domain_name": self.host["forward_domain_names"], 147 | } 148 | 149 | class NpmStreamBinarySwitch(NpmSwitchesEntity, SwitchEntity): 150 | """Switches to enable/disable the Redir Host Type in NPM""" 151 | 152 | def __init__( 153 | self, 154 | coordinator: NpmSwitchesUpdateCoordinator, 155 | entry: ConfigEntry, 156 | host: dict, 157 | ) -> None: 158 | """Initialize steam switch entity.""" 159 | super().__init__(coordinator, entry) 160 | self.host = host 161 | self.name = "Stream " + str(self.host["incoming_port"]) 162 | self.entity_id = "switch."+slugify(f"{entry.title} {self.name}") 163 | self._attr_unique_id = f"{entry.entry_id} {self.name}" 164 | self.host_type = "streams" 165 | self.host_id = str(host["id"]) 166 | 167 | async def async_turn_on(self, **kwargs): # pylint: disable=unused-argument 168 | """Turn on the switch.""" 169 | await self.coordinator.api.enable_host(self.host_id, self.host_type) 170 | self.async_write_ha_state() 171 | self.host = await self.coordinator.api.get_host(self.host_id, self.host_type) 172 | 173 | async def async_turn_off(self, **kwargs): # pylint: disable=unused-argument 174 | """Turn off the switch.""" 175 | await self.coordinator.api.disable_host(self.host_id, self.host_type) 176 | self.async_write_ha_state() 177 | self.host = await self.coordinator.api.get_host(self.host_id, self.host_type) 178 | 179 | @property 180 | def icon(self): 181 | """Return the icon of this switch.""" 182 | if self.coordinator.api.is_host_enabled(self.host_id, self.host_type): 183 | return "mdi:check-network" 184 | return "mdi:close-network" 185 | 186 | @property 187 | def is_on(self): 188 | """Return true if the switch is on.""" 189 | return self.coordinator.api.is_host_enabled(self.host_id, self.host_type) 190 | 191 | @property 192 | def extra_state_attributes(self): 193 | """Return device state attributes.""" 194 | return { 195 | "id": self.host["id"], 196 | "forwarding_host": self.host["forwarding_host"], 197 | "forwarding_port": self.host["forwarding_port"], 198 | # "forward_domain_name": self.host["forward_domain_names"], 199 | } 200 | 201 | class NpmDeadBinarySwitch(NpmSwitchesEntity, SwitchEntity): 202 | """Switches to enable/disable the Dead Host Type in NPM""" 203 | 204 | def __init__( 205 | self, 206 | coordinator: NpmSwitchesUpdateCoordinator, 207 | entry: ConfigEntry, 208 | host: dict, 209 | ) -> None: 210 | """Initialize redir switch entity.""" 211 | super().__init__(coordinator, entry) 212 | self.host = host 213 | self.name = "404 " + self.host["domain_names"][0].replace(".", " ").capitalize() 214 | self.entity_id = "switch."+slugify(f"{entry.title} {self.name}") 215 | self._attr_unique_id = f"{entry.entry_id} {self.name}" 216 | self.host_type = "dead-hosts" 217 | self.host_id = str(host["id"]) 218 | 219 | async def async_turn_on(self, **kwargs): # pylint: disable=unused-argument 220 | """Turn on the switch.""" 221 | await self.coordinator.api.enable_host(self.host_id, self.host_type) 222 | self.async_write_ha_state() 223 | self.host = await self.coordinator.api.get_host(self.host_id, self.host_type) 224 | 225 | async def async_turn_off(self, **kwargs): # pylint: disable=unused-argument 226 | """Turn off the switch.""" 227 | await self.coordinator.api.disable_host(self.host_id, self.host_type) 228 | self.async_write_ha_state() 229 | self.host = await self.coordinator.api.get_host(self.host_id, self.host_type) 230 | 231 | @property 232 | def icon(self): 233 | """Return the icon of this switch.""" 234 | if self.coordinator.api.is_host_enabled(self.host_id, self.host_type): 235 | return "mdi:check-network" 236 | return "mdi:close-network" 237 | 238 | @property 239 | def is_on(self): 240 | """Return true if the switch is on.""" 241 | return self.coordinator.api.is_host_enabled(self.host_id, self.host_type) 242 | 243 | @property 244 | def extra_state_attributes(self): 245 | """Return device state attributes.""" 246 | return { 247 | "id": self.host["id"], 248 | "domain_names": self.host["domain_names"], 249 | # "forward_domain_name": self.host["forward_domain_names"], 250 | } -------------------------------------------------------------------------------- /custom_components/npm_switches/api.py: -------------------------------------------------------------------------------- 1 | """Sample API Client.""" 2 | import logging 3 | import asyncio 4 | import socket 5 | 6 | # from typing import Optional 7 | # from datetime import datetime 8 | import aiohttp 9 | import async_timeout 10 | 11 | from homeassistant.util import dt 12 | 13 | TIMEOUT = 10 14 | 15 | 16 | _LOGGER: logging.Logger = logging.getLogger(__package__) 17 | 18 | HEADERS = {"Content-type": "application/json; charset=UTF-8"} 19 | 20 | 21 | class NpmSwitchesApiClient: 22 | """Handle api calls to NPM instance.""" 23 | 24 | def __init__( 25 | self, username: str, password: str, npm_url: str, session: aiohttp.ClientSession 26 | ) -> None: 27 | """NPM API Client.""" 28 | self._username = username 29 | self._password = password 30 | self._session = session 31 | self._npm_url = npm_url 32 | self._token = None 33 | self._token_expires = dt.utcnow() 34 | self._headers = None 35 | self.proxy_hosts_data = None 36 | self.redir_hosts_data = None 37 | self.stream_hosts_data = None 38 | self.dead_hosts_data = None 39 | self.certificates_data = None 40 | self.num_proxy_enabled = 0 41 | self.num_proxy_disabled = 0 42 | self.num_redir_enabled = 0 43 | self.num_redir_disabled = 0 44 | self.num_stream_enabled = 0 45 | self.num_stream_disabled = 0 46 | self.num_dead_enabled = 0 47 | self.num_dead_disabled = 0 48 | 49 | async def async_get_data(self) -> dict: 50 | """Get data from the API.""" 51 | url = "http://test:81" 52 | return await self.api_wrapper("get", url) 53 | 54 | # async def async_set_title(self, value: str) -> None: 55 | # """Get data from the API.""" 56 | # url = "https://jsonplaceholder.typicode.com/posts/1" 57 | # await self.api_wrapper("patch", url, data={"title": value}, headers=HEADERS) 58 | 59 | async def get_proxy_hosts(self) -> list(): 60 | """Get a list of proxy-hosts.""" 61 | self.num_proxy_enabled = 0 62 | self.num_proxy_disabled = 0 63 | 64 | if self._token is None: 65 | await self.async_get_new_token() 66 | url = self._npm_url + "/api/nginx/proxy-hosts" 67 | proxy_hosts_list = await self.api_wrapper("get", url, headers=self._headers) 68 | self.proxy_hosts_data = {} 69 | for proxy in proxy_hosts_list: 70 | self.proxy_hosts_data[str(proxy["id"])] = proxy 71 | if proxy["enabled"] == 1: 72 | self.num_proxy_enabled += 1 73 | else: 74 | self.num_proxy_disabled += 1 75 | 76 | return self.proxy_hosts_data 77 | 78 | async def get_redirection_hosts(self) -> list(): 79 | """Get a list of redirection hosts.""" 80 | self.num_redir_enabled = 0 81 | self.num_redir_disabled = 0 82 | 83 | if self._token is None: 84 | await self.async_get_new_token() 85 | url = self._npm_url + "/api/nginx/redirection-hosts" 86 | redirection_hosts_list = await self.api_wrapper("get", url, headers=self._headers) 87 | 88 | self.redir_hosts_data = {} 89 | for redirection in redirection_hosts_list: 90 | self.redir_hosts_data[str(redirection["id"])] = redirection 91 | if redirection["enabled"] == 1: 92 | self.num_redir_enabled += 1 93 | else: 94 | self.num_redir_disabled += 1 95 | return self.redir_hosts_data 96 | 97 | async def get_stream_hosts(self) -> list(): 98 | """Get a list of stream hosts.""" 99 | self.num_stream_enabled = 0 100 | self.num_stream_disabled = 0 101 | 102 | if self._token is None: 103 | await self.async_get_new_token() 104 | url = self._npm_url + "/api/nginx/streams" 105 | stream_hosts_list = await self.api_wrapper("get", url, headers=self._headers) 106 | 107 | self.stream_hosts_data = {} 108 | for stream in stream_hosts_list: 109 | self.stream_hosts_data[str(stream["id"])] = stream 110 | if stream["enabled"] == 1: 111 | self.num_stream_enabled += 1 112 | else: 113 | self.num_stream_disabled += 1 114 | return self.stream_hosts_data 115 | 116 | async def get_dead_hosts(self) -> list(): 117 | """Get a list of stream hosts.""" 118 | self.num_dead_enabled = 0 119 | self.num_dead_disabled = 0 120 | 121 | if self._token is None: 122 | await self.async_get_new_token() 123 | url = self._npm_url + "/api/nginx/dead-hosts" 124 | dead_hosts_list = await self.api_wrapper("get", url, headers=self._headers) 125 | 126 | self.dead_hosts_data = {} 127 | for dead in dead_hosts_list: 128 | self.dead_hosts_data[str(dead["id"])] = dead 129 | if dead["enabled"] == 1: 130 | self.num_dead_enabled += 1 131 | else: 132 | self.num_dead_disabled += 1 133 | return self.dead_hosts_data 134 | 135 | async def get_host(self, host_id: int, host_type: str) -> dict: 136 | """Get a host by id and type. 137 | Host Type: proxy-hosts, redirection-hosts, streams, dead-hosts""" 138 | if host_type == "proxy-hosts": 139 | return self.proxy_hosts_data[host_id] 140 | elif host_type == "redirection-hosts": 141 | return self.redir_hosts_data[host_id] 142 | elif host_type == "streams": 143 | return self.stream_hosts_data[host_id] 144 | elif host_type == "dead-hosts": 145 | return self.dead_hosts_data[host_id] 146 | else: 147 | return None 148 | 149 | async def async_get_new_token(self) -> None: 150 | """Get a new token.""" 151 | url = self._npm_url + "/api/tokens" 152 | response = await self.api_wrapper( 153 | "token", 154 | url, 155 | data={ 156 | "identity": self._username, 157 | "secret": self._password, 158 | }, 159 | ) 160 | 161 | self._token = response["token"] 162 | self._token_expires = dt.parse_datetime(response["expires"]) 163 | self._headers = { 164 | "Authorization": "Bearer " + self._token, 165 | } 166 | 167 | async def async_check_token_expiration(self) -> None: 168 | """Check if token expired.""" 169 | utcnow = dt.utcnow() 170 | 171 | if utcnow > self._token_expires: 172 | await self.async_get_new_token() 173 | 174 | async def enable_host(self, host_id: str, host_type: str) -> None: 175 | """Enable the passed host 176 | Host Type: proxy-hosts, redirection-hosts, streams, dead-hosts""" 177 | url = self._npm_url + "/api/nginx/" + host_type + "/" + host_id + "/enable" 178 | response = await self.api_wrapper("post", url, headers=self._headers) 179 | 180 | if response is True: 181 | if host_type == "proxy-hosts": 182 | self.proxy_hosts_data[host_id]["enabled"] = 1 183 | if host_type == "redirection-hosts": 184 | self.redir_hosts_data[host_id]["enabled"] = 1 185 | if host_type == "streams": 186 | self.stream_hosts_data[host_id]["enabled"] = 1 187 | if host_type == "dead-hosts": 188 | self.dead_hosts_data[host_id]["enabled"] = 1 189 | elif "error" in response.keys(): 190 | _LOGGER.error( 191 | "Error enabling host type %s host id %s. Error message: '%s'", 192 | host_type, 193 | host_id, 194 | response["error"]["message"], 195 | ) 196 | 197 | async def disable_host(self, host_id: str, host_type: str) -> None: 198 | """Disable the passed host. 199 | Host Type: proxy-hosts, redirection-hosts, streams, dead-hosts""" 200 | url = self._npm_url + "/api/nginx/" +host_type+ "/" + host_id + "/disable" 201 | 202 | response = await self.api_wrapper("post", url, headers=self._headers) 203 | if response is True: 204 | if host_type == "proxy-hosts": 205 | self.proxy_hosts_data[host_id]["enabled"] = 0 206 | if host_type == "redirection-hosts": 207 | self.redir_hosts_data[host_id]["enabled"] = 0 208 | if host_type == "streams": 209 | self.stream_hosts_data[host_id]["enabled"] = 0 210 | if host_type == "dead-hosts": 211 | self.dead_hosts_data[host_id]["enabled"] = 0 212 | elif "error" in response.keys(): 213 | _LOGGER.error( 214 | "Error enabling host type %s host id %s. Error message: '%s'", 215 | host_type, 216 | host_id, 217 | response["error"]["message"], 218 | ) 219 | 220 | def is_host_enabled(self, host_id: str, host_type: str) -> bool: 221 | """Return True if the proxy is enabled. 222 | Host Type: proxy-hosts, redirection-hosts, streams, dead-hosts""" 223 | if host_type == "proxy-hosts": 224 | if self.proxy_hosts_data[host_id]["enabled"] == 1: 225 | return True 226 | else: 227 | return False 228 | elif host_type == "redirection-hosts": 229 | if self.redir_hosts_data[host_id]["enabled"] == 1: 230 | return True 231 | else: 232 | return False 233 | elif host_type == "streams": 234 | if self.stream_hosts_data[host_id]["enabled"] == 1: 235 | return True 236 | else: 237 | return False 238 | elif host_type == "dead-hosts": 239 | if self.dead_hosts_data[host_id]["enabled"] == 1: 240 | return True 241 | else: 242 | return False 243 | else: 244 | return None 245 | 246 | async def get_certificates(self) -> list(): 247 | """Get a list of cirtificates.""" 248 | 249 | if self._token is None: 250 | await self.async_get_new_token() 251 | url = self._npm_url + "/api/nginx/certificates" 252 | certificate_list = await self.api_wrapper("get", url, headers=self._headers) 253 | 254 | self.certificates_data = {} 255 | for cert in certificate_list: 256 | self.certificates_data[str(cert["id"])] = cert 257 | 258 | return self.certificates_data 259 | 260 | def get_certificate(self, certificate_id: int) -> dict: 261 | """Get a single certificate""" 262 | return self.certificates_data[certificate_id] 263 | 264 | async def renew_certificate(self, certificate_id: int) -> None: 265 | """Renew the passed certificate""" 266 | url = self._npm_url + "/api/nginx/certificates/" + str(certificate_id) + "/renew" 267 | response = await self.api_wrapper("post", url, headers=self._headers) 268 | 269 | if response is True: 270 | await self.get_certificates() 271 | elif "error" in response.keys(): 272 | _LOGGER.error( 273 | "Error renewing certificate. Error message: '%s'", 274 | response["error"]["message"], 275 | ) 276 | 277 | @property 278 | def get_num_proxy_enabled(self) -> int: 279 | """Return the num enabled proxy hosts.""" 280 | return self.num_proxy_enabled 281 | 282 | @property 283 | def get_num_proxy_disabled(self) -> int: 284 | """Return the num disabled proxy hosts.""" 285 | return self.num_proxy_disabled 286 | 287 | @property 288 | def get_npm_url(self) -> str: 289 | """Return the npm url.""" 290 | return self._npm_url 291 | 292 | async def api_wrapper( 293 | self, method: str, url: str, data: dict = None, headers: dict = None 294 | ) -> dict: 295 | """Get information from the API.""" 296 | if method != "token": 297 | await self.async_check_token_expiration() 298 | 299 | try: 300 | async with async_timeout.timeout(TIMEOUT): 301 | if method == "get": 302 | response = await self._session.get(url, headers=headers) 303 | return await response.json() 304 | 305 | elif method == "put": 306 | await self._session.put(url, headers=headers, json=data) 307 | 308 | elif method == "patch": 309 | await self._session.patch(url, headers=headers, json=data) 310 | 311 | elif method == "post" or method == "token": 312 | response = await self._session.post(url, headers=headers, json=data) 313 | return await response.json() 314 | 315 | except asyncio.TimeoutError as exception: 316 | _LOGGER.error( 317 | "Timeout error fetching information from %s - %s", 318 | url, 319 | exception, 320 | ) 321 | 322 | except (KeyError, TypeError) as exception: 323 | _LOGGER.error( 324 | "Error parsing information from %s - %s", 325 | url, 326 | exception, 327 | ) 328 | except (aiohttp.ClientError, socket.gaierror) as exception: 329 | _LOGGER.error( 330 | "Error fetching information from %s - %s", 331 | url, 332 | exception, 333 | ) 334 | except Exception as exception: # pylint: disable=broad-except 335 | _LOGGER.error("Something really wrong happened! - %s", exception) 336 | --------------------------------------------------------------------------------