├── .devcontainer.json ├── .gitattributes ├── .github ├── ISSUE_TEMPLATE │ ├── feature_request.md │ └── issue.md └── workflows │ ├── cron.yaml │ ├── pull.yml │ └── push.yml ├── .gitignore ├── .vscode ├── launch.json ├── settings.json └── tasks.json ├── CONTRIBUTING.md ├── LICENSE ├── README.md ├── config └── configuration.yaml ├── custom_components ├── __init__.py └── npm_switches │ ├── __init__.py │ ├── api.py │ ├── config_flow.py │ ├── const.py │ ├── entity.py │ ├── manifest.json │ ├── sensor.py │ ├── switch.py │ └── translations │ ├── en.json │ ├── fr.json │ └── nb.json ├── hacs.json ├── info.md ├── requirements.txt ├── requirements_dev.txt ├── requirements_test.txt ├── scripts ├── develop ├── lint └── setup ├── setup.cfg └── tests ├── README.md ├── __init__.py ├── conftest.py ├── const.py ├── pytest.ini ├── test_api.py ├── test_config_flow.py ├── test_init.py ├── test_sensor.py └── test_switch.py /.devcontainer.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "ludeeus/integration_blueprint", 3 | "image": "mcr.microsoft.com/devcontainers/python:3.12", 4 | "postCreateCommand": "scripts/setup", 5 | "forwardPorts": [ 6 | 8123 7 | ], 8 | "portsAttributes": { 9 | "8123": { 10 | "label": "Home Assistant", 11 | "onAutoForward": "notify" 12 | } 13 | }, 14 | "customizations": { 15 | "vscode": { 16 | "extensions": [ 17 | "charliermarsh.ruff", 18 | "github.vscode-pull-request-github", 19 | "ms-python.python", 20 | "ms-python.vscode-pylance", 21 | "ryanluker.vscode-coverage-gutters" 22 | ], 23 | "settings": { 24 | "files.eol": "\n", 25 | "editor.tabSize": 4, 26 | "editor.formatOnPaste": true, 27 | "editor.formatOnSave": true, 28 | "editor.formatOnType": false, 29 | "files.trimTrailingWhitespace": true, 30 | "python.analysis.typeCheckingMode": "basic", 31 | "python.analysis.autoImportCompletions": true, 32 | "python.defaultInterpreterPath": "/usr/local/bin/python", 33 | "[python]": { 34 | "editor.defaultFormatter": "charliermarsh.ruff" 35 | } 36 | } 37 | } 38 | }, 39 | "remoteUser": "vscode", 40 | "features": {} 41 | } -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | * text=auto eol=lf -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | 5 | --- 6 | 7 | **Is your feature request related to a problem? Please describe.** 8 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 9 | 10 | **Describe the solution you'd like** 11 | A clear and concise description of what you want to happen. 12 | 13 | **Describe alternatives you've considered** 14 | A clear and concise description of any alternative solutions or features you've considered. 15 | 16 | **Additional context** 17 | Add any other context or screenshots about the feature request here. -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/issue.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Issue 3 | about: Create a report to help us improve 4 | 5 | --- 6 | 7 | 16 | 17 | ## Version of the custom_component 18 | 21 | 22 | ## Configuration 23 | 24 | ```yaml 25 | 26 | Add your logs here. 27 | 28 | ``` 29 | 30 | ## Describe the bug 31 | A clear and concise description of what the bug is. 32 | 33 | 34 | ## Debug log 35 | 36 | 37 | 38 | ```text 39 | 40 | Add your logs here. 41 | 42 | ``` -------------------------------------------------------------------------------- /.github/workflows/cron.yaml: -------------------------------------------------------------------------------- 1 | name: Cron actions 2 | 3 | on: 4 | schedule: 5 | - cron: '0 0 * * *' 6 | 7 | jobs: 8 | validate: 9 | runs-on: "ubuntu-latest" 10 | name: Validate 11 | steps: 12 | - uses: "actions/checkout@v2" 13 | 14 | - name: HACS validation 15 | uses: "hacs/action@main" 16 | with: 17 | category: "integration" 18 | ignore: brands 19 | 20 | - name: Hassfest validation 21 | uses: "home-assistant/actions/hassfest@master" -------------------------------------------------------------------------------- /.github/workflows/pull.yml: -------------------------------------------------------------------------------- 1 | name: Pull actions 2 | 3 | on: 4 | pull_request: 5 | 6 | jobs: 7 | validate: 8 | runs-on: "ubuntu-latest" 9 | name: Validate 10 | steps: 11 | - uses: "actions/checkout@v2" 12 | 13 | - name: HACS validation 14 | uses: "hacs/action@main" 15 | with: 16 | category: "integration" 17 | ignore: brands 18 | 19 | - name: Hassfest validation 20 | uses: "home-assistant/actions/hassfest@master" 21 | 22 | style: 23 | runs-on: "ubuntu-latest" 24 | name: Check style formatting 25 | steps: 26 | - uses: "actions/checkout@v2" 27 | - uses: "actions/setup-python@v1" 28 | with: 29 | python-version: "3.x" 30 | - run: python3 -m pip install black 31 | - run: black . 32 | 33 | tests: 34 | runs-on: "ubuntu-latest" 35 | name: Run tests 36 | steps: 37 | - name: Check out code from GitHub 38 | uses: "actions/checkout@v2" 39 | - name: Setup Python 40 | uses: "actions/setup-python@v1" 41 | with: 42 | python-version: "3.12" 43 | - name: Install requirements 44 | run: python3 -m pip install -r requirements_test.txt 45 | - name: Run tests 46 | run: | 47 | pytest \ 48 | -qq \ 49 | --timeout=9 \ 50 | --durations=10 \ 51 | -n auto \ 52 | --cov custom_components.npm_switches \ 53 | -o console_output_style=count \ 54 | -p no:sugar \ 55 | tests 56 | -------------------------------------------------------------------------------- /.github/workflows/push.yml: -------------------------------------------------------------------------------- 1 | name: Push actions 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | - dev 8 | 9 | jobs: 10 | validate: 11 | runs-on: "ubuntu-latest" 12 | name: Validate 13 | steps: 14 | - uses: "actions/checkout@v2" 15 | 16 | - name: HACS validation 17 | uses: "hacs/action@main" 18 | with: 19 | category: "integration" 20 | ignore: brands 21 | 22 | - name: Hassfest validation 23 | uses: "home-assistant/actions/hassfest@master" 24 | 25 | style: 26 | runs-on: "ubuntu-latest" 27 | name: Check style formatting 28 | steps: 29 | - uses: "actions/checkout@v2" 30 | - uses: "actions/setup-python@v1" 31 | with: 32 | python-version: "3.x" 33 | - run: python3 -m pip install black 34 | - run: black . 35 | 36 | tests: 37 | runs-on: "ubuntu-latest" 38 | name: Run tests 39 | steps: 40 | - name: Check out code from GitHub 41 | uses: "actions/checkout@v2" 42 | - name: Setup Python 43 | uses: "actions/setup-python@v1" 44 | with: 45 | python-version: "3.12" 46 | - name: Install requirements 47 | run: python3 -m pip install -r requirements_test.txt 48 | - name: Run tests 49 | run: | 50 | pytest \ 51 | -qq \ 52 | --timeout=9 \ 53 | --durations=10 \ 54 | -n auto \ 55 | --cov custom_components.npm_switches \ 56 | -o console_output_style=count \ 57 | -p no:sugar \ 58 | tests 59 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | __pycache__ 2 | pythonenv* 3 | venv 4 | .venv 5 | .coverage 6 | .idea 7 | config -------------------------------------------------------------------------------- /.vscode/launch.json: -------------------------------------------------------------------------------- 1 | { 2 | // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 3 | "version": "0.2.0", 4 | "configurations": [ 5 | { 6 | // Example of attaching to local debug server 7 | "name": "Python: Attach Local", 8 | "type": "python", 9 | "request": "attach", 10 | "port": 5678, 11 | "host": "localhost", 12 | "pathMappings": [ 13 | { 14 | "localRoot": "${workspaceFolder}", 15 | "remoteRoot": "." 16 | } 17 | ] 18 | }, 19 | { 20 | // Example of attaching to my production server 21 | "name": "Python: Attach Remote", 22 | "type": "python", 23 | "request": "attach", 24 | "port": 5678, 25 | "host": "homeassistant.local", 26 | "pathMappings": [ 27 | { 28 | "localRoot": "${workspaceFolder}", 29 | "remoteRoot": "/usr/src/homeassistant" 30 | } 31 | ] 32 | } 33 | ] 34 | } 35 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "python.linting.pylintEnabled": true, 3 | "python.linting.enabled": true, 4 | "python.pythonPath": "/usr/local/bin/python", 5 | "files.associations": { 6 | "*.yaml": "home-assistant" 7 | } 8 | } -------------------------------------------------------------------------------- /.vscode/tasks.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "2.0.0", 3 | "tasks": [ 4 | { 5 | "label": "Run Home Assistant on port 8123", 6 | "type": "shell", 7 | "command": "scripts/develop", 8 | "problemMatcher": [] 9 | } 10 | ] 11 | } -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contribution guidelines 2 | 3 | Contributing to this project should be as easy and transparent as possible, whether it's: 4 | 5 | - Reporting a bug 6 | - Discussing the current state of the code 7 | - Submitting a fix 8 | - Proposing new features 9 | 10 | ## Github is used for everything 11 | 12 | Github is used to host code, to track issues and feature requests, as well as accept pull requests. 13 | 14 | Pull requests are the best way to propose changes to the codebase. 15 | 16 | 1. Fork the repo and create your branch from `master`. 17 | 2. If you've changed something, update the documentation. 18 | 3. Make sure your code lints (using black). 19 | 4. Test you contribution. 20 | 5. Issue that pull request! 21 | 22 | ## Any contributions you make will be under the MIT Software License 23 | 24 | In short, when you submit code changes, your submissions are understood to be under the same [MIT License](http://choosealicense.com/licenses/mit/) that covers the project. Feel free to contact the maintainers if that's a concern. 25 | 26 | ## Report bugs using Github's [issues](../../issues) 27 | 28 | GitHub issues are used to track public bugs. 29 | Report a bug by [opening a new issue](../../issues/new/choose); it's that easy! 30 | 31 | ## Write bug reports with detail, background, and sample code 32 | 33 | **Great Bug Reports** tend to have: 34 | 35 | - A quick summary and/or background 36 | - Steps to reproduce 37 | - Be specific! 38 | - Give sample code if you can. 39 | - What you expected would happen 40 | - What actually happens 41 | - Notes (possibly including why you think this might be happening, or stuff you tried that didn't work) 42 | 43 | People *love* thorough bug reports. I'm not even kidding. 44 | 45 | ## Use a Consistent Coding Style 46 | 47 | Use [black](https://github.com/ambv/black) to make sure the code follows the style. 48 | 49 | ## Test your code modification 50 | 51 | This custom component is based on [integration_blueprint template](https://github.com/custom-components/integration_blueprint). 52 | 53 | It comes with development environment in a container, easy to launch 54 | if you use Visual Studio Code. With this container you will have a stand alone 55 | Home Assistant instance running and already configured with the included 56 | [`.devcontainer/configuration.yaml`](./.devcontainer/configuration.yaml) 57 | file. 58 | 59 | ## License 60 | 61 | By contributing, you agree that your contributions will be licensed under its MIT License. 62 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 Joakim Sørensen @ludeeus 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # NPM Switches Custom Integration 2 | 3 | [![GitHub Release][releases-shield]][releases] 4 | 5 | 6 | [![GitHub Activity][commits-shield]][commits] 7 | 8 | [![License][license-shield]][license] 9 | [![hacs][hacsbadge]][hacs] 10 | ![Project Maintenance][maintenance-shield] 11 | [![Community Forum][forum-shield]][forum] 12 | 13 | 14 | 15 | ## Installation 16 | 17 | ### Recomended via HACs 18 | 19 | 1. Use [HACS](https://hacs.xyz/docs/setup/download), in `HACS > Integrations > Explore & Add Repositories` search for "NPM Switches". 20 | 2. Restart Home Assistant. 21 | 3. [![Add Integration][add-integration-badge]][add-integration] or in the HA UI go to "Configuration" -> "Integrations" click "+" and search for "NPM Switches". 22 | 23 | ### Manual Download 24 | 1. Use the tool of choice to open the directory (folder) for your HA configuration (where you find `configuration.yaml`). 25 | 3. If you do not have a `custom_components` directory (folder) there, you need to create it. 26 | 4. Download this repository's files. 27 | 5. Move or Copy the the entire `npm_switches/` directory (folder) in your HA `custom_components` directory (folder). End result should look like `custom_components/npm_switches/`. 28 | 6. Restart Home Assistant. 29 | 7. [![Add Integration][add-integration-badge]][add-integration] or in the HA UI go to "Configuration" -> "Integrations" click "+" and search for "NPM Switches". 30 | 31 | ## Usage 32 | 33 | 'NPM Switches' offers integration with a local Nginx Proxy Manager server/instance. It will login into and retrieve a token from the NPM server every 24 hours. This token is used to querry the state of each proxy host every 60 seconds. It is also used to enable or disable a proxy host via local api call to the NPM server. 34 | 35 | This integration provides the following entities: 36 | - Switches - One switch for every proxy host that is configured 37 | - Sensors - Number of enabled proxy hosts, number of disabled proxy hosts 38 | 39 | Future features could include redirection host switches and 404 host switches. 40 | 41 | If you want to contribute to this please read the [Contribution guidelines](CONTRIBUTING.md) 42 | 43 | _Component built with [integration_blueprint][integration_blueprint]._ 44 | 45 | *** 46 | 47 | [integration_blueprint]: https://github.com/custom-components/integration_blueprint 48 | [commits-shield]: https://img.shields.io/github/commit-activity/w/InTheDaylight14/nginx-proxy-manager-switches?style=for-the-badge 49 | [commits]: https://github.com/InTheDaylight14/nginx-proxy-manager-switches/commits/master 50 | [hacs]: https://github.com/custom-components/hacs 51 | [hacsbadge]: https://img.shields.io/badge/HACS-Custom-orange.svg?style=for-the-badge 52 | 53 | [forum-shield]: https://img.shields.io/badge/community-forum-brightgreen.svg?style=for-the-badge 54 | [forum]: https://community.home-assistant.io/ 55 | [license]: LICENSE 56 | [license-shield]: https://img.shields.io/github/license/InTheDaylight14/nginx-proxy-manager-switches?style=for-the-badge 57 | [maintenance-shield]: https://img.shields.io/badge/maintainer-@InTheDaylight14-blue.svg?style=for-the-badge 58 | [releases-shield]: https://img.shields.io/github/release/InTheDaylight14/nginx-proxy-manager-switches?style=for-the-badge 59 | [releases]: https://github.com/InTheDaylight14/nginx-proxy-manager-switches/releases 60 | [add-integration]: https://my.home-assistant.io/redirect/config_flow_start?domain=npm_switches 61 | [add-integration-badge]: https://my.home-assistant.io/badges/config_flow_start.svg 62 | [download-all]: https://img.shields.io/github/downloads/InTheDaylight14/nginx-proxy-manager-switches/total?style=for-the-badge 63 | [download-latest]: https://img.shields.io/github/downloads/InTheDaylight14/nginx-proxy-manager-switches/latest/total?style=for-the-badge 64 | -------------------------------------------------------------------------------- /config/configuration.yaml: -------------------------------------------------------------------------------- 1 | # https://www.home-assistant.io/integrations/default_config/ 2 | default_config: 3 | 4 | # https://www.home-assistant.io/integrations/homeassistant/ 5 | homeassistant: 6 | debug: true 7 | 8 | # https://www.home-assistant.io/integrations/logger/ 9 | logger: 10 | default: info 11 | logs: 12 | custom_components.npm_switches: debug 13 | -------------------------------------------------------------------------------- /custom_components/__init__.py: -------------------------------------------------------------------------------- 1 | """Custom components module.""" 2 | -------------------------------------------------------------------------------- /custom_components/npm_switches/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Custom integration to integrate npm_switches with Home Assistant. 3 | 4 | For more details about this integration, please refer to 5 | https://github.com/InTheDaylight14/nginx-proxy-manager-switches 6 | """ 7 | import asyncio 8 | from datetime import timedelta 9 | import logging 10 | 11 | from homeassistant.config_entries import ConfigEntry 12 | from homeassistant.core import HomeAssistant 13 | from homeassistant.helpers.typing import ConfigType 14 | from homeassistant.exceptions import ConfigEntryNotReady 15 | from homeassistant.helpers.aiohttp_client import async_get_clientsession 16 | from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed 17 | 18 | from .api import NpmSwitchesApiClient 19 | 20 | from .const import ( 21 | CONF_NPM_URL, 22 | CONF_PASSWORD, 23 | CONF_USERNAME, 24 | DOMAIN, 25 | PLATFORMS, 26 | STARTUP_MESSAGE, 27 | ) 28 | 29 | SCAN_INTERVAL = timedelta(seconds=60) 30 | 31 | _LOGGER: logging.Logger = logging.getLogger(__package__) 32 | 33 | 34 | async def async_setup(hass: HomeAssistant, config: ConfigType): 35 | """Set up this integration using YAML is not supported.""" 36 | return True 37 | 38 | 39 | async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry): 40 | """Set up this integration using UI.""" 41 | if hass.data.get(DOMAIN) is None: 42 | hass.data.setdefault(DOMAIN, {}) 43 | _LOGGER.info(STARTUP_MESSAGE) 44 | 45 | username = entry.data.get(CONF_USERNAME) 46 | password = entry.data.get(CONF_PASSWORD) 47 | npm_url = entry.data.get(CONF_NPM_URL) 48 | 49 | session = async_get_clientsession(hass) 50 | client = NpmSwitchesApiClient(username, password, npm_url, session) 51 | 52 | coordinator = NpmSwitchesUpdateCoordinator(hass, client=client) 53 | await coordinator.async_refresh() 54 | 55 | if not coordinator.last_update_success: 56 | raise ConfigEntryNotReady 57 | 58 | hass.data[DOMAIN][entry.entry_id] = coordinator 59 | 60 | for platform in PLATFORMS: 61 | if entry.options.get(platform, True): 62 | coordinator.platforms.append(platform) 63 | 64 | await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) 65 | 66 | entry.async_on_unload(entry.add_update_listener(async_reload_entry)) 67 | return True 68 | 69 | 70 | class NpmSwitchesUpdateCoordinator(DataUpdateCoordinator): 71 | """Class to manage fetching data from the API.""" 72 | 73 | def __init__(self, hass: HomeAssistant, client: NpmSwitchesApiClient) -> None: 74 | """Initialize.""" 75 | self.api = client 76 | self.platforms = [] 77 | 78 | super().__init__(hass, _LOGGER, name=DOMAIN, update_interval=SCAN_INTERVAL) 79 | 80 | async def _async_update_data(self): 81 | """Update data via library.""" 82 | try: 83 | await self.api.get_proxy_hosts() 84 | except Exception as exception: 85 | raise UpdateFailed() from exception 86 | try: 87 | await self.api.get_redirection_hosts() 88 | except Exception as exception: 89 | raise UpdateFailed() from exception 90 | 91 | 92 | async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: 93 | """Handle removal of an entry.""" 94 | coordinator = hass.data[DOMAIN][entry.entry_id] 95 | unloaded = all( 96 | await asyncio.gather( 97 | *[ 98 | hass.config_entries.async_forward_entry_unload(entry, platform) 99 | for platform in PLATFORMS 100 | if platform in coordinator.platforms 101 | ] 102 | ) 103 | ) 104 | if unloaded: 105 | hass.data[DOMAIN].pop(entry.entry_id) 106 | 107 | return unloaded 108 | 109 | 110 | async def async_reload_entry(hass: HomeAssistant, entry: ConfigEntry) -> None: 111 | """Reload config entry.""" 112 | await async_unload_entry(hass, entry) 113 | await async_setup_entry(hass, entry) 114 | -------------------------------------------------------------------------------- /custom_components/npm_switches/api.py: -------------------------------------------------------------------------------- 1 | """Sample API Client.""" 2 | import logging 3 | import asyncio 4 | import socket 5 | 6 | # from typing import Optional 7 | # from datetime import datetime 8 | import aiohttp 9 | import async_timeout 10 | 11 | from homeassistant.util import dt 12 | 13 | TIMEOUT = 10 14 | 15 | 16 | _LOGGER: logging.Logger = logging.getLogger(__package__) 17 | 18 | HEADERS = {"Content-type": "application/json; charset=UTF-8"} 19 | 20 | 21 | class NpmSwitchesApiClient: 22 | """Handle api calls to NPM instance.""" 23 | 24 | def __init__( 25 | self, username: str, password: str, npm_url: str, session: aiohttp.ClientSession 26 | ) -> None: 27 | """NPM API Client.""" 28 | self._username = username 29 | self._password = password 30 | self._session = session 31 | self._npm_url = npm_url 32 | self._token = None 33 | self._token_expires = dt.utcnow() 34 | self._headers = None 35 | self.proxy_hosts_data = None 36 | self.redir_hosts_data = None 37 | self.stream_hosts_data = None 38 | self.dead_hosts_data = None 39 | self.num_proxy_enabled = 0 40 | self.num_proxy_disabled = 0 41 | self.num_redir_enabled = 0 42 | self.num_redir_disabled = 0 43 | self.num_stream_enabled = 0 44 | self.num_stream_disabled = 0 45 | self.num_dead_enabled = 0 46 | self.num_dead_disabled = 0 47 | 48 | async def async_get_data(self) -> dict: 49 | """Get data from the API.""" 50 | url = "http://test:81" 51 | return await self.api_wrapper("get", url) 52 | 53 | # async def async_set_title(self, value: str) -> None: 54 | # """Get data from the API.""" 55 | # url = "https://jsonplaceholder.typicode.com/posts/1" 56 | # await self.api_wrapper("patch", url, data={"title": value}, headers=HEADERS) 57 | 58 | async def get_proxy_hosts(self) -> list(): 59 | """Get a list of proxy-hosts.""" 60 | self.num_proxy_enabled = 0 61 | self.num_proxy_disabled = 0 62 | 63 | if self._token is None: 64 | await self.async_get_new_token() 65 | url = self._npm_url + "/api/nginx/proxy-hosts" 66 | proxy_hosts_list = await self.api_wrapper("get", url, headers=self._headers) 67 | self.proxy_hosts_data = {} 68 | for proxy in proxy_hosts_list: 69 | self.proxy_hosts_data[str(proxy["id"])] = proxy 70 | if proxy["enabled"] == 1: 71 | self.num_proxy_enabled += 1 72 | else: 73 | self.num_proxy_disabled += 1 74 | 75 | return self.proxy_hosts_data 76 | 77 | async def get_redirection_hosts(self) -> list(): 78 | """Get a list of redirection hosts.""" 79 | self.num_redir_enabled = 0 80 | self.num_redir_disabled = 0 81 | 82 | if self._token is None: 83 | await self.async_get_new_token() 84 | url = self._npm_url + "/api/nginx/redirection-hosts" 85 | redirection_hosts_list = await self.api_wrapper("get", url, headers=self._headers) 86 | 87 | self.redir_hosts_data = {} 88 | for redirection in redirection_hosts_list: 89 | self.redir_hosts_data[str(redirection["id"])] = redirection 90 | if redirection["enabled"] == 1: 91 | self.num_redir_enabled += 1 92 | else: 93 | self.num_redir_disabled += 1 94 | return self.redir_hosts_data 95 | 96 | async def get_stream_hosts(self) -> list(): 97 | """Get a list of stream hosts.""" 98 | self.num_stream_enabled = 0 99 | self.num_stream_disabled = 0 100 | 101 | if self._token is None: 102 | await self.async_get_new_token() 103 | url = self._npm_url + "/api/nginx/streams" 104 | stream_hosts_list = await self.api_wrapper("get", url, headers=self._headers) 105 | 106 | self.stream_hosts_data = {} 107 | for stream in stream_hosts_list: 108 | self.stream_hosts_data[str(stream["id"])] = stream 109 | if stream["enabled"] == 1: 110 | self.num_stream_enabled += 1 111 | else: 112 | self.num_stream_disabled += 1 113 | return self.stream_hosts_data 114 | 115 | async def get_dead_hosts(self) -> list(): 116 | """Get a list of stream hosts.""" 117 | self.num_dead_enabled = 0 118 | self.num_dead_disabled = 0 119 | 120 | if self._token is None: 121 | await self.async_get_new_token() 122 | url = self._npm_url + "/api/nginx/dead-hosts" 123 | dead_hosts_list = await self.api_wrapper("get", url, headers=self._headers) 124 | 125 | self.dead_hosts_data = {} 126 | for dead in dead_hosts_list: 127 | self.dead_hosts_data[str(dead["id"])] = dead 128 | if dead["enabled"] == 1: 129 | self.num_dead_enabled += 1 130 | else: 131 | self.num_dead_disabled += 1 132 | return self.dead_hosts_data 133 | 134 | async def get_host(self, host_id: int, host_type: str) -> dict: 135 | """Get a host by id and type. 136 | Host Type: proxy-hosts, redirection-hosts, streams, dead-hosts""" 137 | if host_type == "proxy-hosts": 138 | return self.proxy_hosts_data[host_id] 139 | elif host_type == "redirection-hosts": 140 | return self.redir_hosts_data[host_id] 141 | elif host_type == "streams": 142 | return self.stream_hosts_data[host_id] 143 | elif host_type == "dead-hosts": 144 | return self.dead_hosts_data[host_id] 145 | else: 146 | return None 147 | 148 | async def async_get_new_token(self) -> None: 149 | """Get a new token.""" 150 | url = self._npm_url + "/api/tokens" 151 | response = await self.api_wrapper( 152 | "token", 153 | url, 154 | data={ 155 | "identity": self._username, 156 | "secret": self._password, 157 | }, 158 | ) 159 | 160 | self._token = response["token"] 161 | self._token_expires = dt.parse_datetime(response["expires"]) 162 | self._headers = { 163 | "Authorization": "Bearer " + self._token, 164 | } 165 | 166 | async def async_check_token_expiration(self) -> None: 167 | """Check if token expired.""" 168 | utcnow = dt.utcnow() 169 | 170 | if utcnow > self._token_expires: 171 | await self.async_get_new_token() 172 | 173 | async def enable_host(self, host_id: str, host_type: str) -> None: 174 | """Enable the passed host 175 | Host Type: proxy-hosts, redirection-hosts, streams, dead-hosts""" 176 | url = self._npm_url + "/api/nginx/" + host_type + "/" + host_id + "/enable" 177 | response = await self.api_wrapper("post", url, headers=self._headers) 178 | 179 | if response is True: 180 | if host_type == "proxy-hosts": 181 | self.proxy_hosts_data[host_id]["enabled"] = 1 182 | if host_type == "redirection-hosts": 183 | self.redir_hosts_data[host_id]["enabled"] = 1 184 | if host_type == "streams": 185 | self.stream_hosts_data[host_id]["enabled"] = 1 186 | if host_type == "dead-hosts": 187 | self.dead_hosts_data[host_id]["enabled"] = 1 188 | elif "error" in response.keys(): 189 | _LOGGER.error( 190 | "Error enabling host type %s host id %s. Error message: '%s'", 191 | host_type, 192 | host_id, 193 | response["error"]["message"], 194 | ) 195 | 196 | async def disable_host(self, host_id: str, host_type: str) -> None: 197 | """Disable the passed host. 198 | Host Type: proxy-hosts, redirection-hosts, streams, dead-hosts""" 199 | url = self._npm_url + "/api/nginx/" +host_type+ "/" + host_id + "/disable" 200 | 201 | response = await self.api_wrapper("post", url, headers=self._headers) 202 | if response is True: 203 | if host_type == "proxy-hosts": 204 | self.proxy_hosts_data[host_id]["enabled"] = 0 205 | if host_type == "redirection-hosts": 206 | self.redir_hosts_data[host_id]["enabled"] = 0 207 | if host_type == "streams": 208 | self.stream_hosts_data[host_id]["enabled"] = 0 209 | if host_type == "dead-hosts": 210 | self.dead_hosts_data[host_id]["enabled"] = 0 211 | elif "error" in response.keys(): 212 | _LOGGER.error( 213 | "Error enabling host type %s host id %s. Error message: '%s'", 214 | host_type, 215 | host_id, 216 | response["error"]["message"], 217 | ) 218 | 219 | def is_host_enabled(self, host_id: str, host_type: str) -> bool: 220 | """Return True if the proxy is enabled. 221 | Host Type: proxy-hosts, redirection-hosts, streams, dead-hosts""" 222 | if host_type == "proxy-hosts": 223 | if self.proxy_hosts_data[host_id]["enabled"] == 1: 224 | return True 225 | else: 226 | return False 227 | elif host_type == "redirection-hosts": 228 | if self.redir_hosts_data[host_id]["enabled"] == 1: 229 | return True 230 | else: 231 | return False 232 | elif host_type == "streams": 233 | if self.stream_hosts_data[host_id]["enabled"] == 1: 234 | return True 235 | else: 236 | return False 237 | elif host_type == "dead-hosts": 238 | if self.dead_hosts_data[host_id]["enabled"] == 1: 239 | return True 240 | else: 241 | return False 242 | else: 243 | return None 244 | @property 245 | def get_num_proxy_enabled(self) -> int: 246 | """Return the num enabled proxy hosts.""" 247 | return self.num_proxy_enabled 248 | 249 | @property 250 | def get_num_proxy_disabled(self) -> int: 251 | """Return the num disabled proxy hosts.""" 252 | return self.num_proxy_disabled 253 | 254 | @property 255 | def get_npm_url(self) -> str: 256 | """Return the npm url.""" 257 | return self._npm_url 258 | 259 | async def api_wrapper( 260 | self, method: str, url: str, data: dict = None, headers: dict = None 261 | ) -> dict: 262 | """Get information from the API.""" 263 | if method != "token": 264 | await self.async_check_token_expiration() 265 | 266 | try: 267 | async with async_timeout.timeout(TIMEOUT): 268 | if method == "get": 269 | response = await self._session.get(url, headers=headers) 270 | return await response.json() 271 | 272 | elif method == "put": 273 | await self._session.put(url, headers=headers, json=data) 274 | 275 | elif method == "patch": 276 | await self._session.patch(url, headers=headers, json=data) 277 | 278 | elif method == "post" or method == "token": 279 | response = await self._session.post(url, headers=headers, json=data) 280 | return await response.json() 281 | 282 | except asyncio.TimeoutError as exception: 283 | _LOGGER.error( 284 | "Timeout error fetching information from %s - %s", 285 | url, 286 | exception, 287 | ) 288 | 289 | except (KeyError, TypeError) as exception: 290 | _LOGGER.error( 291 | "Error parsing information from %s - %s", 292 | url, 293 | exception, 294 | ) 295 | except (aiohttp.ClientError, socket.gaierror) as exception: 296 | _LOGGER.error( 297 | "Error fetching information from %s - %s", 298 | url, 299 | exception, 300 | ) 301 | except Exception as exception: # pylint: disable=broad-except 302 | _LOGGER.error("Something really wrong happened! - %s", exception) 303 | -------------------------------------------------------------------------------- /custom_components/npm_switches/config_flow.py: -------------------------------------------------------------------------------- 1 | """Adds config flow for Blueprint.""" 2 | from homeassistant import config_entries 3 | from homeassistant.core import callback 4 | from homeassistant.helpers.aiohttp_client import async_create_clientsession 5 | from homeassistant.util import slugify 6 | import voluptuous as vol 7 | 8 | from .api import NpmSwitchesApiClient 9 | from .const import ( 10 | CONF_NPM_URL, 11 | CONF_PASSWORD, 12 | CONF_USERNAME, 13 | DOMAIN, 14 | PLATFORMS, 15 | CONF_INDLUDE_PROXY, 16 | CONF_INCLUDE_REDIR, 17 | CONF_INCLUDE_STREAMS, 18 | CONF_INCLUDE_DEAD, 19 | CONF_INCLUDE_SENSORS, 20 | DEFAULT_USERNAME, 21 | DEFAULT_PASSWORD, 22 | DEFAULT_NPM_URL, 23 | DEFAULT_INDLUDE_PROXY, 24 | DEFAULT_INCLUDE_REDIR, 25 | DEFAULT_INCLUDE_STREAMS, 26 | DEFAULT_INCLUDE_DEAD, 27 | DEFAULT_INCLUDE_SENSORS, 28 | ) 29 | 30 | 31 | class NPMSwitchesFloHandler(config_entries.ConfigFlow, domain=DOMAIN): 32 | """Config flow for NPM Switches.""" 33 | 34 | VERSION = 1 35 | CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL 36 | 37 | def __init__(self): 38 | """Initialize.""" 39 | self._errors = {} 40 | self.clean_npm_url = None 41 | 42 | async def async_step_user(self, user_input=None): 43 | """Handle a flow initialized by the user.""" 44 | self._errors = {} 45 | 46 | # Uncomment the next 2 lines if only a single instance of the integration is allowed: 47 | # if self._async_current_entries(): 48 | # return self.async_abort(reason="single_instance_allowed") 49 | 50 | if user_input is not None: 51 | scheme_end = user_input[CONF_NPM_URL].find("://")+3 52 | self.clean_npm_url = user_input[CONF_NPM_URL][scheme_end:] 53 | user_input["clean_npm_url"] = slugify(f"{self.clean_npm_url}") 54 | 55 | # existing_entry = self._async_entry_for_username(user_input[CONF_NPM_URL]) 56 | existing_entry = self._async_entry_for_username(self.clean_npm_url) 57 | # if existing_entry and not self.reauth: 58 | if existing_entry: 59 | return self.async_abort(reason="already_configured") 60 | 61 | valid = await self._test_credentials( 62 | user_input[CONF_USERNAME], 63 | user_input[CONF_PASSWORD], 64 | user_input[CONF_NPM_URL], 65 | ) 66 | if valid: 67 | return self.async_create_entry( 68 | title=self.clean_npm_url, data=user_input 69 | ) 70 | else: 71 | self._errors["base"] = "auth" 72 | 73 | return await self._show_config_form(user_input) 74 | 75 | # user_input = {} 76 | # # Provide defaults for form 77 | # user_input[CONF_USERNAME] = "" 78 | # user_input[CONF_PASSWORD] = "" 79 | # user_input[CONF_NPM_URL] = "http://" 80 | # user_input[CONF_INDLUDE_PROXY] = True 81 | 82 | return await self._show_config_form() 83 | 84 | # @staticmethod 85 | # @callback 86 | # def async_get_options_flow(config_entry): 87 | # return BlueprintOptionsFlowHandler(config_entry) 88 | 89 | async def _show_config_form(self): # pylint: disable=unused-argument 90 | """Show the configuration form to edit location data.""" 91 | return self.async_show_form( 92 | step_id="user", 93 | data_schema=vol.Schema( 94 | { 95 | vol.Required(CONF_USERNAME, default=DEFAULT_USERNAME): str, 96 | vol.Required(CONF_PASSWORD, default=DEFAULT_PASSWORD): str, 97 | vol.Required(CONF_NPM_URL, default=DEFAULT_NPM_URL): str, 98 | vol.Optional(CONF_INCLUDE_SENSORS,default=DEFAULT_INCLUDE_SENSORS): bool, 99 | vol.Optional(CONF_INDLUDE_PROXY,default=DEFAULT_INDLUDE_PROXY): bool, 100 | vol.Optional(CONF_INCLUDE_REDIR,default=DEFAULT_INCLUDE_REDIR): bool, 101 | vol.Optional(CONF_INCLUDE_STREAMS,default=DEFAULT_INCLUDE_STREAMS): bool, 102 | vol.Optional(CONF_INCLUDE_DEAD,default=DEFAULT_INCLUDE_DEAD): bool 103 | } 104 | ), 105 | errors=self._errors, 106 | ) 107 | 108 | async def _test_credentials(self, username, password, npm_url): 109 | """Return true if credentials is valid.""" 110 | try: 111 | session = async_create_clientsession(self.hass) 112 | client = NpmSwitchesApiClient(username, password, npm_url, session) 113 | await client.async_get_new_token() 114 | return True 115 | except Exception: # pylint: disable=broad-except 116 | pass 117 | return False 118 | 119 | @callback 120 | def _async_entry_for_username(self, username): 121 | """Find an existing entry for a username.""" 122 | for entry in self._async_current_entries(): 123 | # if entry.data.get(CONF_NPM_URL) == username: 124 | if entry.title == username: 125 | return entry 126 | return None 127 | 128 | 129 | # class BlueprintOptionsFlowHandler(config_entries.OptionsFlow): 130 | # """Blueprint config flow options handler.""" 131 | 132 | # def __init__(self, config_entry): 133 | # """Initialize HACS options flow.""" 134 | # self.config_entry = config_entry 135 | # self.options = dict(config_entry.options) 136 | 137 | # async def async_step_init(self, user_input=None): # pylint: disable=unused-argument 138 | # """Manage the options.""" 139 | # return await self.async_step_user() 140 | 141 | # async def async_step_user(self, user_input=None): 142 | # """Handle a flow initialized by the user.""" 143 | # if user_input is not None: 144 | # self.options.update(user_input) 145 | # return await self._update_options() 146 | 147 | # return self.async_show_form( 148 | # step_id="user", 149 | # data_schema = vol.Schema( 150 | # { 151 | # vol.Optional(CONF_INDLUDE_PROXY,default=self.config_entry.options.get(CONF_INDLUDE_PROXY, DEFAULT_INDLUDE_PROXY),): bool, 152 | # vol.Optional( 153 | # CONF_INCLUDE_REDIR, 154 | # default=self.config_entry.options.get( 155 | # CONF_INCLUDE_REDIR, DEFAULT_INCLUDE_REDIR 156 | # ), 157 | # ): bool, 158 | # vol.Optional( 159 | # CONF_INCLUDE_STREAMS, 160 | # default=self.config_entry.options.get( 161 | # CONF_INCLUDE_STREAMS, DEFAULT_INCLUDE_STREAMS 162 | # ), 163 | # ): bool, 164 | # vol.Optional( 165 | # CONF_INCLUDE_DEAD, 166 | # default=self.config_entry.options.get( 167 | # CONF_INCLUDE_DEAD, DEFAULT_INCLUDE_DEAD 168 | # ), 169 | # ): bool, 170 | # vol.Optional( 171 | # CONF_INCLUDE_SENSORS, 172 | # default=self.config_entry.options.get( 173 | # CONF_INCLUDE_SENSORS, DEFAULT_INCLUDE_SENSORS 174 | # ), 175 | # ): bool, 176 | # } 177 | # ) 178 | # ) 179 | 180 | # async def _update_options(self): 181 | # """Update config entry options.""" 182 | # return self.async_create_entry( 183 | # title=self.config_entry.data.get(CONF_USERNAME), data=self.options 184 | # ) 185 | -------------------------------------------------------------------------------- /custom_components/npm_switches/const.py: -------------------------------------------------------------------------------- 1 | """Constants for NPM Switches.""" 2 | # Base component constants 3 | NAME = "NPM Switches" 4 | DOMAIN = "npm_switches" 5 | DOMAIN_DATA = f"{DOMAIN}_data" 6 | VERSION = "1.1.0" 7 | ATTRIBUTION = "Data provided by http://jsonplaceholder.typicode.com/" 8 | ISSUE_URL = "https://github.com/InTheDaylight14/nginx-proxy-manager-switches/issues" 9 | 10 | # Icons 11 | ICON = "mdi:format-quote-close" 12 | 13 | # Device classes 14 | BINARY_SENSOR_DEVICE_CLASS = "connectivity" 15 | 16 | # Platforms 17 | # BINARY_SENSOR = "binary_sensor" 18 | SENSOR = "sensor" 19 | SWITCH = "switch" 20 | PLATFORMS = [SENSOR, SWITCH] 21 | 22 | 23 | # Configuration and options 24 | CONF_ENABLED = "enabled" 25 | CONF_USERNAME = "username" 26 | CONF_PASSWORD = "password" 27 | CONF_NPM_URL = "npm_url" 28 | CONF_INDLUDE_PROXY = "include_proxy_hosts" 29 | CONF_INCLUDE_REDIR = "include_redirection_hosts" 30 | CONF_INCLUDE_STREAMS = "include_stream_hosts" 31 | CONF_INCLUDE_DEAD = "include_dead_hosts" 32 | CONF_INCLUDE_SENSORS = "include_enable_disable_count_sensors" 33 | DEFAULT_ENABLED = "" 34 | DEFAULT_USERNAME = "" 35 | DEFAULT_PASSWORD = "" 36 | DEFAULT_NPM_URL = "http://" 37 | DEFAULT_INDLUDE_PROXY = True 38 | DEFAULT_INCLUDE_REDIR = False 39 | DEFAULT_INCLUDE_STREAMS = False 40 | DEFAULT_INCLUDE_DEAD = False 41 | DEFAULT_INCLUDE_SENSORS = True 42 | 43 | # Defaults 44 | DEFAULT_NAME = DOMAIN 45 | 46 | 47 | STARTUP_MESSAGE = f""" 48 | ------------------------------------------------------------------- 49 | {NAME} 50 | Version: {VERSION} 51 | This is a custom integration! 52 | If you have any issues with this you need to open an issue here: 53 | {ISSUE_URL} 54 | ------------------------------------------------------------------- 55 | """ 56 | -------------------------------------------------------------------------------- /custom_components/npm_switches/entity.py: -------------------------------------------------------------------------------- 1 | """NPM Switches Entity class""" 2 | from homeassistant.helpers.update_coordinator import CoordinatorEntity 3 | from homeassistant.helpers.device_registry import DeviceInfo 4 | from homeassistant.util import slugify 5 | 6 | from .const import DOMAIN, NAME, VERSION, ATTRIBUTION 7 | 8 | 9 | class NpmSwitchesEntity(CoordinatorEntity): 10 | """Init NPM user device.""" 11 | 12 | _attr_has_entity_name = True 13 | 14 | def __init__(self, coordinator, config_entry): 15 | super().__init__(coordinator) 16 | self.host = None 17 | self.name = None 18 | self.entity_id = None 19 | self.config_entry = config_entry 20 | self.host_id = None 21 | self.coordinator = coordinator 22 | self._attr_unique_id = None 23 | self._attr_device_info = DeviceInfo( 24 | identifiers={(DOMAIN, self.config_entry.entry_id)}, 25 | name=self.config_entry.title, 26 | ) 27 | -------------------------------------------------------------------------------- /custom_components/npm_switches/manifest.json: -------------------------------------------------------------------------------- 1 | { 2 | "domain": "npm_switches", 3 | "name": "NPM Switches", 4 | "codeowners": [ 5 | "@InTheDaylight14" 6 | ], 7 | "config_flow": true, 8 | "documentation": "https://github.com/InTheDaylight14/nginx-proxy-manager-switches", 9 | "iot_class": "local_polling", 10 | "issue_tracker": "https://github.com/InTheDaylight14/nginx-proxy-manager-switches/issues", 11 | "version": "2.0.2", 12 | "homeassistant": "2024.1.1" 13 | } -------------------------------------------------------------------------------- /custom_components/npm_switches/sensor.py: -------------------------------------------------------------------------------- 1 | """Sensor platform for NPM Switches.""" 2 | from homeassistant.components.sensor import SensorEntity 3 | from homeassistant.config_entries import ConfigEntry 4 | from homeassistant.util import slugify 5 | 6 | from .const import DOMAIN 7 | from .entity import NpmSwitchesEntity 8 | from . import NpmSwitchesUpdateCoordinator 9 | 10 | 11 | async def async_setup_entry(hass, entry, async_add_entities): 12 | """Setup sensor platform.""" 13 | coordinator = hass.data[DOMAIN][entry.entry_id] 14 | entities = [] 15 | if entry.data["include_enable_disable_count_sensors"]: 16 | if entry.data["include_proxy_hosts"]: 17 | entities.append(NpmSwitchesProxySensor(coordinator, entry, "enabled")) 18 | entities.append(NpmSwitchesProxySensor(coordinator, entry, "disabled")) 19 | if entry.data["include_redirection_hosts"]: 20 | entities.append(NpmSwitchesRedirSensor(coordinator, entry, "enabled")) 21 | entities.append(NpmSwitchesRedirSensor(coordinator, entry, "disabled")) 22 | if entry.data["include_stream_hosts"]: 23 | entities.append(NpmSwitchesStreamSensor(coordinator, entry, "enabled")) 24 | entities.append(NpmSwitchesStreamSensor(coordinator, entry, "disabled")) 25 | if entry.data["include_dead_hosts"]: 26 | entities.append(NpmSwitchesDeadSensor(coordinator, entry, "enabled")) 27 | entities.append(NpmSwitchesDeadSensor(coordinator, entry, "disabled")) 28 | 29 | async_add_entities(entities, True) 30 | 31 | 32 | class NpmSwitchesProxySensor(NpmSwitchesEntity, SensorEntity): 33 | """NPM Switches Proxy Sensor class.""" 34 | 35 | def __init__( 36 | self, 37 | coordinator: NpmSwitchesUpdateCoordinator, 38 | entry: ConfigEntry, 39 | name: str, 40 | ) -> None: 41 | """Initialize proxy switch entity.""" 42 | super().__init__(coordinator, entry) 43 | self.host_id = name 44 | self.sensor_name = self.host_id 45 | self.name = "Proxy Hosts " + self.sensor_name.capitalize() 46 | self.entity_id = "sensor."+slugify(f"{entry.title} {self.name}") 47 | self._attr_unique_id = f"{entry.entry_id} {self.name}" 48 | 49 | @property 50 | def native_value(self): 51 | """Return the native value of the sensor.""" 52 | if self.sensor_name == "enabled": 53 | return self.coordinator.api.num_proxy_enabled 54 | return self.coordinator.api.num_proxy_disabled 55 | 56 | @property 57 | def icon(self): 58 | """Return the icon of the sensor.""" 59 | return "mdi:counter" 60 | 61 | class NpmSwitchesRedirSensor(NpmSwitchesEntity, SensorEntity): 62 | """NPM Switches Redir Sensor class.""" 63 | 64 | def __init__( 65 | self, 66 | coordinator: NpmSwitchesUpdateCoordinator, 67 | entry: ConfigEntry, 68 | name: str, 69 | ) -> None: 70 | """Initialize proxy switch entity.""" 71 | super().__init__(coordinator, entry) 72 | self.host_id = name 73 | self.sensor_name = self.host_id 74 | self.name = "Redirection Hosts " + self.sensor_name.capitalize() 75 | self.entity_id = "sensor." + slugify(f"{entry.title} {self.name}") 76 | self._attr_unique_id = f"{entry.entry_id} {self.name}" 77 | 78 | @property 79 | def native_value(self): 80 | """Return the native value of the sensor.""" 81 | if self.sensor_name == "enabled": 82 | return self.coordinator.api.num_redir_enabled 83 | return self.coordinator.api.num_redir_disabled 84 | 85 | @property 86 | def icon(self): 87 | """Return the icon of the sensor.""" 88 | return "mdi:counter" 89 | 90 | class NpmSwitchesStreamSensor(NpmSwitchesEntity, SensorEntity): 91 | """NPM Switches Stream Sensor class.""" 92 | 93 | def __init__( 94 | self, 95 | coordinator: NpmSwitchesUpdateCoordinator, 96 | entry: ConfigEntry, 97 | name: str, 98 | ) -> None: 99 | """Initialize proxy switch entity.""" 100 | super().__init__(coordinator, entry) 101 | self.host_id = name 102 | self.sensor_name = self.host_id 103 | self.name = "Stream Hosts " + self.sensor_name.capitalize() 104 | self.entity_id = "sensor."+slugify(f"{entry.title} {self.name}") 105 | self._attr_unique_id = f"{entry.entry_id} {self.name}" 106 | 107 | @property 108 | def native_value(self): 109 | """Return the native value of the sensor.""" 110 | if self.sensor_name == "enabled": 111 | return self.coordinator.api.num_stream_enabled 112 | return self.coordinator.api.num_stream_disabled 113 | 114 | @property 115 | def icon(self): 116 | """Return the icon of the sensor.""" 117 | return "mdi:counter" 118 | 119 | class NpmSwitchesDeadSensor(NpmSwitchesEntity, SensorEntity): 120 | """NPM Switches Deam Sensor class.""" 121 | 122 | def __init__( 123 | self, 124 | coordinator: NpmSwitchesUpdateCoordinator, 125 | entry: ConfigEntry, 126 | name: str, 127 | ) -> None: 128 | """Initialize proxy switch entity.""" 129 | super().__init__(coordinator, entry) 130 | self.host_id = name 131 | self.sensor_name = self.host_id 132 | self.name = "404 Hosts " + self.sensor_name.capitalize() 133 | self.entity_id = "sensor."+slugify(f"{entry.title} {self.name}") 134 | self._attr_unique_id = f"{entry.entry_id} {self.name}" 135 | 136 | @property 137 | def native_value(self): 138 | """Return the native value of the sensor.""" 139 | if self.sensor_name == "enabled": 140 | return self.coordinator.api.num_dead_enabled 141 | return self.coordinator.api.num_dead_disabled 142 | 143 | @property 144 | def icon(self): 145 | """Return the icon of the sensor.""" 146 | return "mdi:counter" -------------------------------------------------------------------------------- /custom_components/npm_switches/switch.py: -------------------------------------------------------------------------------- 1 | """Switch platform for npm_switches.""" 2 | import logging 3 | from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription 4 | from homeassistant.util import slugify 5 | 6 | # from homeassistant.core import HomeAssistant 7 | from homeassistant.config_entries import ConfigEntry 8 | 9 | from .const import DOMAIN 10 | from .entity import NpmSwitchesEntity 11 | from . import NpmSwitchesUpdateCoordinator 12 | 13 | _LOGGER = logging.getLogger(__name__) 14 | 15 | 16 | async def async_setup_entry(hass, entry, async_add_entities): 17 | """Setup sensor platform.""" 18 | coordinator = hass.data[DOMAIN][entry.entry_id] 19 | api = hass.data[DOMAIN][entry.entry_id].api 20 | proxy_hosts = await api.get_proxy_hosts() 21 | redir_hosts = await api.get_redirection_hosts() 22 | stream_hosts = await api.get_stream_hosts() 23 | dead_hosts = await api.get_dead_hosts() 24 | entities = [] 25 | 26 | if entry.data["include_proxy_hosts"]: 27 | for proxy_host in proxy_hosts.values(): 28 | entities.append(NpmProxyBinarySwitch(coordinator, entry, proxy_host)) 29 | if entry.data["include_redirection_hosts"]: 30 | for redir_host in redir_hosts.values(): 31 | entities.append(NpmRedirBinarySwitch(coordinator, entry, redir_host)) 32 | if entry.data["include_stream_hosts"]: 33 | for stream_host in stream_hosts.values(): 34 | entities.append(NpmStreamBinarySwitch(coordinator, entry, stream_host)) 35 | if entry.data["include_dead_hosts"]: 36 | for dead_host in dead_hosts.values(): 37 | entities.append(NpmDeadBinarySwitch(coordinator, entry, dead_host)) 38 | 39 | async_add_entities(entities, True) 40 | # async_add_devices([NpmProxyBinarySwitch(coordinator, entry, "20")]) 41 | 42 | 43 | class NpmProxyBinarySwitch(NpmSwitchesEntity, SwitchEntity): 44 | """Switches to enable/disable the Proxy Host Type in NPM""" 45 | 46 | def __init__( 47 | self, 48 | coordinator: NpmSwitchesUpdateCoordinator, 49 | entry: ConfigEntry, 50 | host: dict, 51 | ) -> None: 52 | """Initialize proxy switch entity.""" 53 | super().__init__(coordinator, entry) 54 | self.host = host 55 | self.name = "Proxy " + self.host["domain_names"][0].replace(".", " ").capitalize() 56 | self.entity_id = "switch."+slugify(f"{entry.title} {self.name}") 57 | self._attr_unique_id = f"{entry.entry_id} {self.name}" 58 | self.host_id = str(host["id"]) 59 | self.host_type = "proxy-hosts" 60 | 61 | async def async_turn_on(self, **kwargs): # pylint: disable=unused-argument 62 | """Turn on the switch.""" 63 | await self.coordinator.api.enable_host(self.host_id, self.host_type) 64 | self.async_write_ha_state() 65 | self.host = await self.coordinator.api.get_host(self.host_id, self.host_type) 66 | 67 | async def async_turn_off(self, **kwargs): # pylint: disable=unused-argument 68 | """Turn off the switch.""" 69 | await self.coordinator.api.disable_host(self.host_id, self.host_type) 70 | self.async_write_ha_state() 71 | self.host = await self.coordinator.api.get_host(self.host_id, self.host_type) 72 | 73 | # @property 74 | # def name(self): 75 | # """Return the name of the switch.""" 76 | # return "NPM " + self.host["domain_names"][0].replace(".", " ").capitalize() 77 | 78 | @property 79 | def icon(self): 80 | """Return the icon of this switch.""" 81 | if self.coordinator.api.is_host_enabled(self.host_id, self.host_type): 82 | return "mdi:check-network" 83 | return "mdi:close-network" 84 | 85 | @property 86 | def is_on(self): 87 | """Return true if the switch is on.""" 88 | return self.coordinator.api.is_host_enabled(self.host_id, self.host_type) 89 | 90 | @property 91 | def extra_state_attributes(self): 92 | """Return device state attributes.""" 93 | return { 94 | "id": self.host["id"], 95 | "domain_names": self.host["domain_names"], 96 | } 97 | 98 | class NpmRedirBinarySwitch(NpmSwitchesEntity, SwitchEntity): 99 | """Switches to enable/disable the Redir Host Type in NPM""" 100 | 101 | def __init__( 102 | self, 103 | coordinator: NpmSwitchesUpdateCoordinator, 104 | entry: ConfigEntry, 105 | host: dict, 106 | ) -> None: 107 | """Initialize redir switch entity.""" 108 | super().__init__(coordinator, entry) 109 | self.host = host 110 | self.name = "Redirect " + self.host["domain_names"][0].replace(".", " ").capitalize() 111 | self.entity_id = "switch."+slugify(f"{entry.title} {self.name}") 112 | self._attr_unique_id = f"{entry.entry_id} {self.name}" 113 | self.host_type = "redirection-hosts" 114 | self.host_id = str(host["id"]) 115 | 116 | async def async_turn_on(self, **kwargs): # pylint: disable=unused-argument 117 | """Turn on the switch.""" 118 | await self.coordinator.api.enable_host(self.host_id, self.host_type) 119 | self.async_write_ha_state() 120 | self.host = await self.coordinator.api.get_host(self.host_id, self.host_type) 121 | 122 | async def async_turn_off(self, **kwargs): # pylint: disable=unused-argument 123 | """Turn off the switch.""" 124 | await self.coordinator.api.disable_host(self.host_id, self.host_type) 125 | self.async_write_ha_state() 126 | self.host = await self.coordinator.api.get_host(self.host_id, self.host_type) 127 | 128 | @property 129 | def icon(self): 130 | """Return the icon of this switch.""" 131 | if self.coordinator.api.is_host_enabled(self.host_id, self.host_type): 132 | return "mdi:check-network" 133 | return "mdi:close-network" 134 | 135 | @property 136 | def is_on(self): 137 | """Return true if the switch is on.""" 138 | return self.coordinator.api.is_host_enabled(self.host_id, self.host_type) 139 | 140 | @property 141 | def extra_state_attributes(self): 142 | """Return device state attributes.""" 143 | return { 144 | "id": self.host["id"], 145 | "domain_names": self.host["domain_names"], 146 | # "forward_domain_name": self.host["forward_domain_names"], 147 | } 148 | 149 | class NpmStreamBinarySwitch(NpmSwitchesEntity, SwitchEntity): 150 | """Switches to enable/disable the Redir Host Type in NPM""" 151 | 152 | def __init__( 153 | self, 154 | coordinator: NpmSwitchesUpdateCoordinator, 155 | entry: ConfigEntry, 156 | host: dict, 157 | ) -> None: 158 | """Initialize steam switch entity.""" 159 | super().__init__(coordinator, entry) 160 | self.host = host 161 | self.name = "Stream " + str(self.host["incoming_port"]) 162 | self.entity_id = "switch."+slugify(f"{entry.title} {self.name}") 163 | self._attr_unique_id = f"{entry.entry_id} {self.name}" 164 | self.host_type = "streams" 165 | self.host_id = str(host["id"]) 166 | 167 | async def async_turn_on(self, **kwargs): # pylint: disable=unused-argument 168 | """Turn on the switch.""" 169 | await self.coordinator.api.enable_host(self.host_id, self.host_type) 170 | self.async_write_ha_state() 171 | self.host = await self.coordinator.api.get_host(self.host_id, self.host_type) 172 | 173 | async def async_turn_off(self, **kwargs): # pylint: disable=unused-argument 174 | """Turn off the switch.""" 175 | await self.coordinator.api.disable_host(self.host_id, self.host_type) 176 | self.async_write_ha_state() 177 | self.host = await self.coordinator.api.get_host(self.host_id, self.host_type) 178 | 179 | @property 180 | def icon(self): 181 | """Return the icon of this switch.""" 182 | if self.coordinator.api.is_host_enabled(self.host_id, self.host_type): 183 | return "mdi:check-network" 184 | return "mdi:close-network" 185 | 186 | @property 187 | def is_on(self): 188 | """Return true if the switch is on.""" 189 | return self.coordinator.api.is_host_enabled(self.host_id, self.host_type) 190 | 191 | @property 192 | def extra_state_attributes(self): 193 | """Return device state attributes.""" 194 | return { 195 | "id": self.host["id"], 196 | "forwarding_host": self.host["forwarding_host"], 197 | "forwarding_port": self.host["forwarding_port"], 198 | # "forward_domain_name": self.host["forward_domain_names"], 199 | } 200 | 201 | class NpmDeadBinarySwitch(NpmSwitchesEntity, SwitchEntity): 202 | """Switches to enable/disable the Dead Host Type in NPM""" 203 | 204 | def __init__( 205 | self, 206 | coordinator: NpmSwitchesUpdateCoordinator, 207 | entry: ConfigEntry, 208 | host: dict, 209 | ) -> None: 210 | """Initialize redir switch entity.""" 211 | super().__init__(coordinator, entry) 212 | self.host = host 213 | self.name = "404 " + self.host["domain_names"][0].replace(".", " ").capitalize() 214 | self.entity_id = "switch."+slugify(f"{entry.title} {self.name}") 215 | self._attr_unique_id = f"{entry.entry_id} {self.name}" 216 | self.host_type = "dead-hosts" 217 | self.host_id = str(host["id"]) 218 | 219 | async def async_turn_on(self, **kwargs): # pylint: disable=unused-argument 220 | """Turn on the switch.""" 221 | await self.coordinator.api.enable_host(self.host_id, self.host_type) 222 | self.async_write_ha_state() 223 | self.host = await self.coordinator.api.get_host(self.host_id, self.host_type) 224 | 225 | async def async_turn_off(self, **kwargs): # pylint: disable=unused-argument 226 | """Turn off the switch.""" 227 | await self.coordinator.api.disable_host(self.host_id, self.host_type) 228 | self.async_write_ha_state() 229 | self.host = await self.coordinator.api.get_host(self.host_id, self.host_type) 230 | 231 | @property 232 | def icon(self): 233 | """Return the icon of this switch.""" 234 | if self.coordinator.api.is_host_enabled(self.host_id, self.host_type): 235 | return "mdi:check-network" 236 | return "mdi:close-network" 237 | 238 | @property 239 | def is_on(self): 240 | """Return true if the switch is on.""" 241 | return self.coordinator.api.is_host_enabled(self.host_id, self.host_type) 242 | 243 | @property 244 | def extra_state_attributes(self): 245 | """Return device state attributes.""" 246 | return { 247 | "id": self.host["id"], 248 | "domain_names": self.host["domain_names"], 249 | # "forward_domain_name": self.host["forward_domain_names"], 250 | } -------------------------------------------------------------------------------- /custom_components/npm_switches/translations/en.json: -------------------------------------------------------------------------------- 1 | { 2 | "config": { 3 | "step": { 4 | "user": { 5 | "title": "NPM Instance", 6 | "description": "If you need help with the configuration have a look here: https://github.com/InTheDaylight14/nginx-proxy-manager-switches", 7 | "data": { 8 | "username": "Username", 9 | "password": "Password", 10 | "npm_url": "NPM URL including UI port number ex. http://ip:port", 11 | "include_proxy_hosts": "Include Proxy Hosts", 12 | "include_redirection_hosts": "Include Redirection Hosts", 13 | "include_stream_hosts": "Include Stream Hosts", 14 | "include_dead_hosts": "Include Dead Hosts", 15 | "include_enable_disable_count_sensors": "Include Enabled/Disabled Count Sensors" 16 | } 17 | } 18 | }, 19 | "error": { 20 | "auth": "Username/Password is wrong." 21 | }, 22 | "abort": { 23 | "single_instance_allowed": "Only a single instance is allowed." 24 | } 25 | }, 26 | "options": { 27 | "step": { 28 | "user": { 29 | "data": { 30 | "sensor": "Sensor enabled", 31 | "switch": "Switch enabled" 32 | } 33 | } 34 | } 35 | } 36 | } -------------------------------------------------------------------------------- /custom_components/npm_switches/translations/fr.json: -------------------------------------------------------------------------------- 1 | { 2 | "config": { 3 | "step": { 4 | "user": { 5 | "title": "NPM Exemple", 6 | "description": "Si vous avez besoin d'aide pour la configuration, regardez ici: https://github.com/InTheDaylight14/nginx-proxy-manager-switches", 7 | "data": { 8 | "username": "Identifiant", 9 | "password": "Mot de Passe", 10 | "npm_url": "Nginx Proxy Manager URL ex. http://ip:port", 11 | "include_proxy_hosts": "Inclure les hôtes proxy", 12 | "include_redirection_hosts": "Inclure les hôtes de redirection", 13 | "include_stream_hosts": "Inclure les hôtes de flux", 14 | "include_dead_hosts": "Inclure les hôtes morts", 15 | "include_enable_disable_count_sensors": "Inclure les capteurs de comptage activés/désactivés" 16 | } 17 | } 18 | }, 19 | "error": { 20 | "auth": "Identifiant ou mot de passe erroné." 21 | }, 22 | "abort": { 23 | "single_instance_allowed": "Une seule instance est autorisée." 24 | } 25 | }, 26 | "options": { 27 | "step": { 28 | "user": { 29 | "data": { 30 | "binary_sensor": "Capteur binaire activé", 31 | "sensor": "Capteur activé", 32 | "switch": "Interrupteur activé" 33 | } 34 | } 35 | } 36 | } 37 | } -------------------------------------------------------------------------------- /custom_components/npm_switches/translations/nb.json: -------------------------------------------------------------------------------- 1 | { 2 | "config": { 3 | "step": { 4 | "user": { 5 | "title": "NPM", 6 | "description": "Hvis du trenger hjep til konfigurasjon ta en titt her: https://github.com/InTheDaylight14/nginx-proxy-manager-switches", 7 | "data": { 8 | "username": "Brukernavn", 9 | "password": "Passord", 10 | "npm_url": "http://ip:port", 11 | "include_proxy_hosts": "Inkluder proxy-verter", 12 | "include_redirection_hosts": "Inkluder omdirigeringsverter", 13 | "include_stream_hosts": "Inkluder strømverter", 14 | "include_dead_hosts": "Inkluder døde verter", 15 | "include_enable_disable_count_sensors": "Inkluder aktiverte/deaktiverte tellesensorer" 16 | } 17 | } 18 | }, 19 | "error": { 20 | "auth": "Brukernavn/Passord er feil." 21 | }, 22 | "abort": { 23 | "single_instance_allowed": "Denne integrasjonen kan kun konfigureres en gang." 24 | } 25 | }, 26 | "options": { 27 | "step": { 28 | "user": { 29 | "data": { 30 | "binary_sensor": "Binær sensor aktivert", 31 | "sensor": "Sensor aktivert", 32 | "switch": "Bryter aktivert" 33 | } 34 | } 35 | } 36 | } 37 | } -------------------------------------------------------------------------------- /hacs.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "NPM Switches", 3 | "hacs": "1.6.0", 4 | "homeassistant": "2024.1.1" 5 | } -------------------------------------------------------------------------------- /info.md: -------------------------------------------------------------------------------- 1 | [![GitHub Release][releases-shield]][releases] 2 | [![GitHub Activity][commits-shield]][commits] 3 | [![License][license-shield]][license] 4 | 5 | [![hacs][hacsbadge]][hacs] 6 | [![Project Maintenance][maintenance-shield]][user_profile] 7 | 8 | [![Community Forum][forum-shield]][forum] 9 | 10 | 11 | **This component will set up the following platforms.** 12 | 13 | Platform | Description 14 | -- | -- 15 | `sensor` | Show info from API. 16 | `switch` | Switch proxy hosts to `Enabled` or `Disabled`. 17 | 18 | 19 | 20 | {% if not installed %} 21 | ## Installation 22 | 23 | 1. Click install. 24 | 1. In the HA UI go to "Configuration" -> "Integrations" click "+" and search for "NPM Switches". 25 | 26 | {% endif %} 27 | 28 | _Component built with [integration_blueprint][integration_blueprint]._ 29 | 30 | 31 | 32 | *** 33 | 34 | [integration_blueprint]: https://github.com/custom-components/integration_blueprint 35 | [buymecoffee]: https://www.buymeacoffee.com/ludeeus 36 | [buymecoffeebadge]: https://img.shields.io/badge/buy%20me%20a%20coffee-donate-yellow.svg?style=for-the-badge 37 | [commits-shield]: https://img.shields.io/github/commit-activity/w/InTheDaylight14/nginx-proxy-manager-switches?style=for-the-badge 38 | [commits]: https://github.com/InTheDaylight14/nginx-proxy-manager-switches/commits/master 39 | [hacs]: https://hacs.xyz 40 | [hacsbadge]: https://img.shields.io/badge/HACS-Custom-orange.svg?style=for-the-badge 41 | 42 | [forum-shield]: https://img.shields.io/badge/community-forum-brightgreen.svg?style=for-the-badge 43 | [forum]: https://community.home-assistant.io/ 44 | [license-shield]: https://img.shields.io/github/license/InTheDaylight14/nginx-proxy-manager-switches?style=for-the-badge 45 | [maintenance-shield]: https://img.shields.io/badge/maintainer-@InTheDaylight14-blue.svg?style=for-the-badge 46 | [releases-shield]: https://img.shields.io/github/release/InTheDaylight14/nginx-proxy-manager-switches?style=for-the-badge 47 | [releases]: https://github.com/InTheDaylight14/nginx-proxy-manager-switches/releases 48 | [user_profile]: https://github.com/InTheDaylight14 49 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | colorlog==6.9.0 2 | homeassistant==2024.11.0 3 | pip>=21.3.1 4 | ruff==0.7.2 5 | -------------------------------------------------------------------------------- /requirements_dev.txt: -------------------------------------------------------------------------------- 1 | homeassistant 2 | -------------------------------------------------------------------------------- /requirements_test.txt: -------------------------------------------------------------------------------- 1 | pytest-homeassistant-custom-component 2 | homeassistant 3 | pytest 4 | 5 | -------------------------------------------------------------------------------- /scripts/develop: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -e 4 | 5 | cd "$(dirname "$0")/.." 6 | 7 | # Create config dir if not present 8 | if [[ ! -d "${PWD}/config" ]]; then 9 | mkdir -p "${PWD}/config" 10 | hass --config "${PWD}/config" --script ensure_config 11 | fi 12 | 13 | # Set the path to custom_components 14 | ## This let's us have the structure we want /custom_components/integration_blueprint 15 | ## while at the same time have Home Assistant configuration inside /config 16 | ## without resulting to symlinks. 17 | export PYTHONPATH="${PYTHONPATH}:${PWD}/custom_components" 18 | 19 | # Start Home Assistant 20 | hass --config "${PWD}/config" --debug 21 | -------------------------------------------------------------------------------- /scripts/lint: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -e 4 | 5 | cd "$(dirname "$0")/.." 6 | 7 | ruff format . 8 | ruff check . --fix 9 | -------------------------------------------------------------------------------- /scripts/setup: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -e 4 | 5 | cd "$(dirname "$0")/.." 6 | 7 | python3 -m pip install --requirement requirements.txt 8 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [flake8] 2 | exclude = .venv,.git,.tox,docs,venv,bin,lib,deps,build 3 | doctests = True 4 | # To work with Black 5 | max-line-length = 88 6 | # E501: line too long 7 | # W503: Line break occurred before a binary operator 8 | # E203: Whitespace before ':' 9 | # D202 No blank lines allowed after function docstring 10 | # W504 line break after binary operator 11 | ignore = 12 | E501, 13 | W503, 14 | E203, 15 | D202, 16 | W504 17 | 18 | [isort] 19 | # https://github.com/timothycrosley/isort 20 | # https://github.com/timothycrosley/isort/wiki/isort-Settings 21 | # splits long import on multiple lines indented by 4 spaces 22 | multi_line_output = 3 23 | include_trailing_comma=True 24 | force_grid_wrap=0 25 | use_parentheses=True 26 | line_length=88 27 | indent = " " 28 | # by default isort don't check module indexes 29 | not_skip = __init__.py 30 | # will group `import x` and `from x import` of the same module. 31 | force_sort_within_sections = true 32 | sections = FUTURE,STDLIB,INBETWEENS,THIRDPARTY,FIRSTPARTY,LOCALFOLDER 33 | default_section = THIRDPARTY 34 | known_first_party = custom_components.npm_switches, tests 35 | combine_as_imports = true 36 | -------------------------------------------------------------------------------- /tests/README.md: -------------------------------------------------------------------------------- 1 | # Why? 2 | 3 | While tests aren't required to publish a custom component for Home Assistant, they will generally make development easier because good tests will expose when changes you want to make to the component logic will break expected functionality. Home Assistant uses [`pytest`](https://docs.pytest.org/en/latest/) for its tests, and the tests that have been included are modeled after tests that are written for core Home Assistant integrations. These tests pass with 100% coverage (unless something has changed ;) ) and have comments to help you understand the purpose of different parts of the test. 4 | 5 | # Getting Started 6 | 7 | To begin, it is recommended to create a virtual environment to install dependencies: 8 | ```bash 9 | python3 -m venv venv 10 | source venv/bin/activate 11 | ``` 12 | 13 | You can then install the dependencies that will allow you to run tests: 14 | `pip3 install -r requirements_test.txt.` 15 | 16 | This will install `homeassistant`, `pytest`, and `pytest-homeassistant-custom-component`, a plugin which allows you to leverage helpers that are available in Home Assistant for core integration tests. 17 | 18 | # Useful commands 19 | 20 | Command | Description 21 | ------- | ----------- 22 | `pytest tests/` | This will run all tests in `tests/` and tell you how many passed/failed 23 | `pytest --durations=10 --cov-report term-missing --cov=custom_components.integration_blueprint tests` | This tells `pytest` that your target module to test is `custom_components.integration_blueprint` so that it can give you a [code coverage](https://en.wikipedia.org/wiki/Code_coverage) summary, including % of code that was executed and the line numbers of missed executions. 24 | `pytest tests/test_init.py -k test_setup_unload_and_reload_entry` | Runs the `test_setup_unload_and_reload_entry` test function located in `tests/test_init.py` 25 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | """Tests for integration_blueprint integration.""" 2 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | """Global fixtures for integration_blueprint integration.""" 2 | # Fixtures allow you to replace functions with a Mock object. You can perform 3 | # many options via the Mock to reflect a particular behavior from the original 4 | # function that you want to see without going through the function's actual logic. 5 | # Fixtures can either be passed into tests as parameters, or if autouse=True, they 6 | # will automatically be used across all tests. 7 | # 8 | # Fixtures that are defined in conftest.py are available across all tests. You can also 9 | # define fixtures within a particular test file to scope them locally. 10 | # 11 | # pytest_homeassistant_custom_component provides some fixtures that are provided by 12 | # Home Assistant core. You can find those fixture definitions here: 13 | # https://github.com/MatthewFlamm/pytest-homeassistant-custom-component/blob/master/pytest_homeassistant_custom_component/common.py 14 | # 15 | # See here for more info: https://docs.pytest.org/en/latest/fixture.html (note that 16 | # pytest includes fixtures OOB which you can use as defined on this page) 17 | from unittest.mock import patch 18 | 19 | from .const import MOCK_PROXY_HOSTS_DICT, MOCK_TOKEN 20 | 21 | import pytest 22 | 23 | pytest_plugins = "pytest_homeassistant_custom_component" 24 | 25 | 26 | # This fixture enables loading custom integrations in all tests. 27 | # Remove to enable selective use of this fixture 28 | @pytest.fixture(autouse=True) 29 | def auto_enable_custom_integrations(enable_custom_integrations): 30 | yield 31 | 32 | 33 | # This fixture is used to prevent HomeAssistant from attempting to create and dismiss persistent 34 | # notifications. These calls would fail without this fixture since the persistent_notification 35 | # integration is never loaded during a test. 36 | @pytest.fixture(name="skip_notifications", autouse=True) 37 | def skip_notifications_fixture(): 38 | """Skip notification calls.""" 39 | with patch("homeassistant.components.persistent_notification.async_create"), patch( 40 | "homeassistant.components.persistent_notification.async_dismiss" 41 | ): 42 | yield 43 | 44 | 45 | # This fixture, when used, will result in calls to get_proxy_hosts to return None. To have the call 46 | # return a value, we would add the `return_value=` parameter to the patch call. 47 | @pytest.fixture(name="bypass_get_data") 48 | def bypass_get_data_fixture(): 49 | """Skip calls to get data from API.""" 50 | with patch( 51 | "custom_components.npm_switches.NpmSwitchesApiClient.get_proxy_hosts", 52 | return_value=MOCK_PROXY_HOSTS_DICT, 53 | ): 54 | yield 55 | 56 | 57 | @pytest.fixture(name="bypass_get_data_api") 58 | def bypass_get_data_api_fixture(): 59 | """Skip calls to get data from API.""" 60 | with patch( 61 | "custom_components.npm_switches.NpmSwitchesApiClient.get_proxy_hosts", 62 | return_value=MOCK_PROXY_HOSTS_DICT, 63 | ): 64 | yield 65 | 66 | 67 | @pytest.fixture(name="bypass_new_token") 68 | def bypass_get_new_token_fixture(): 69 | """Skip calls to get data from API.""" 70 | with patch( 71 | "custom_components.npm_switches.NpmSwitchesApiClient.async_get_new_token", 72 | return_value=MOCK_TOKEN, 73 | ): 74 | yield 75 | 76 | 77 | # @pytest.fixture(name="bypass_get_data") 78 | # def bypass_get_data_fixture(): 79 | # """Skip calls to get data from API.""" 80 | # with patch("custom_components.npm_switches.NpmSwitchesApiClient.get_proxy_hosts"): 81 | # yield 82 | 83 | ##I don't know if we need this long-term??? 84 | # @pytest.fixture(name="bypass_check_token_expiration") 85 | # def bypass_check_token_expiration(): 86 | # """Skip calls to check token expiration.""" 87 | # with patch( 88 | # "custom_components.npm_switches.NpmSwitchesApiClient.async_check_token_expiration" 89 | # ): 90 | # yield 91 | 92 | 93 | # In this fixture, we are forcing calls to async_get_data to raise an Exception. This is useful 94 | # for exception handling. 95 | @pytest.fixture(name="error_on_get_data") 96 | def error_get_data_fixture(): 97 | """Simulate error when retrieving data from API.""" 98 | with patch( 99 | "custom_components.npm_switches.NpmSwitchesApiClient.async_get_data", 100 | side_effect=Exception, 101 | ): 102 | yield 103 | 104 | 105 | @pytest.fixture(name="error_on_get_new_token") 106 | def error_get_new_token_fixture(): 107 | """Simulate error when retrieving data from API.""" 108 | with patch( 109 | "custom_components.npm_switches.NpmSwitchesApiClient.async_get_data", 110 | side_effect=Exception, 111 | ): 112 | yield 113 | -------------------------------------------------------------------------------- /tests/const.py: -------------------------------------------------------------------------------- 1 | """Constants for integration_blueprint tests.""" 2 | from custom_components.npm_switches.const import ( 3 | CONF_PASSWORD, 4 | CONF_USERNAME, 5 | CONF_NPM_URL, 6 | ) 7 | 8 | # Mock config data to be used across multiple tests 9 | MOCK_CONFIG = { 10 | CONF_USERNAME: "test_username", 11 | CONF_PASSWORD: "test_password", 12 | CONF_NPM_URL: "http://test:81", 13 | } 14 | 15 | MOCK_NPM_URL = "http://test:81" 16 | 17 | MOCK_TOKEN = { 18 | "token": "abcd12345", 19 | "expires": "2023-01-25T01:37:00.107Z", 20 | } 21 | 22 | MOCK_PROXY_HOSTS_LIST = [ 23 | { 24 | "id": 33, 25 | "created_on": "2022-11-27T22:46:21.000Z", 26 | "modified_on": "2022-12-11T22:48:53.000Z", 27 | "owner_user_id": 1, 28 | "domain_names": ["my.domain.com"], 29 | "forward_host": "192.168.1.1", 30 | "forward_port": 8123, 31 | "access_list_id": 0, 32 | "certificate_id": 35, 33 | "ssl_forced": 0, 34 | "caching_enabled": 0, 35 | "block_exploits": 0, 36 | "advanced_config": "", 37 | "meta": { 38 | "letsencrypt_agree": False, 39 | "dns_challenge": False, 40 | "nginx_online": True, 41 | "nginx_err": None, 42 | }, 43 | "allow_websocket_upgrade": 0, 44 | "http2_support": 0, 45 | "forward_scheme": "https", 46 | "enabled": 1, 47 | "locations": [], 48 | "hsts_enabled": 0, 49 | "hsts_subdomains": 0, 50 | }, 51 | { 52 | "id": 32, 53 | "created_on": "2022-11-17T00:49:25.000Z", 54 | "modified_on": "2023-01-24T00:36:53.000Z", 55 | "owner_user_id": 1, 56 | "domain_names": ["other.domain.com"], 57 | "forward_host": "192.168.1.2", 58 | "forward_port": 8080, 59 | "access_list_id": 0, 60 | "certificate_id": 35, 61 | "ssl_forced": 0, 62 | "caching_enabled": 0, 63 | "block_exploits": 1, 64 | "advanced_config": "", 65 | "meta": { 66 | "letsencrypt_agree": False, 67 | "dns_challenge": False, 68 | "nginx_online": True, 69 | "nginx_err": None, 70 | }, 71 | "allow_websocket_upgrade": 1, 72 | "http2_support": 0, 73 | "forward_scheme": "http", 74 | "enabled": 0, 75 | "locations": [], 76 | "hsts_enabled": 0, 77 | "hsts_subdomains": 0, 78 | }, 79 | ] 80 | 81 | MOCK_PROXY_HOSTS_DICT = { 82 | "33": { 83 | "id": 33, 84 | "created_on": "2022-11-27T22:46:21.000Z", 85 | "modified_on": "2022-12-11T22:48:53.000Z", 86 | "owner_user_id": 1, 87 | "domain_names": ["my.domain.com"], 88 | "forward_host": "192.168.1.1", 89 | "forward_port": 8123, 90 | "access_list_id": 0, 91 | "certificate_id": 35, 92 | "ssl_forced": 0, 93 | "caching_enabled": 0, 94 | "block_exploits": 0, 95 | "advanced_config": "", 96 | "meta": { 97 | "letsencrypt_agree": False, 98 | "dns_challenge": False, 99 | "nginx_online": True, 100 | "nginx_err": None, 101 | }, 102 | "allow_websocket_upgrade": 0, 103 | "http2_support": 0, 104 | "forward_scheme": "https", 105 | "enabled": 1, 106 | "locations": [], 107 | "hsts_enabled": 0, 108 | "hsts_subdomains": 0, 109 | }, 110 | "32": { 111 | "id": 32, 112 | "created_on": "2022-11-17T00:49:25.000Z", 113 | "modified_on": "2023-01-24T00:36:53.000Z", 114 | "owner_user_id": 1, 115 | "domain_names": ["other.domain.com"], 116 | "forward_host": "192.168.1.2", 117 | "forward_port": 8080, 118 | "access_list_id": 0, 119 | "certificate_id": 35, 120 | "ssl_forced": 0, 121 | "caching_enabled": 0, 122 | "block_exploits": 1, 123 | "advanced_config": "", 124 | "meta": { 125 | "letsencrypt_agree": False, 126 | "dns_challenge": False, 127 | "nginx_online": True, 128 | "nginx_err": None, 129 | }, 130 | "allow_websocket_upgrade": 1, 131 | "http2_support": 0, 132 | "forward_scheme": "http", 133 | "enabled": 0, 134 | "locations": [], 135 | "hsts_enabled": 0, 136 | "hsts_subdomains": 0, 137 | }, 138 | } 139 | -------------------------------------------------------------------------------- /tests/pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | markers = 3 | asyncio: asyncio mark 4 | 5 | -------------------------------------------------------------------------------- /tests/test_api.py: -------------------------------------------------------------------------------- 1 | """Tests for integration_blueprint api.""" 2 | import asyncio 3 | import pytest 4 | 5 | import aiohttp 6 | from homeassistant.helpers.aiohttp_client import async_get_clientsession 7 | from homeassistant.util import dt 8 | from custom_components.npm_switches.api import NpmSwitchesApiClient 9 | 10 | from .const import ( 11 | MOCK_NPM_URL, 12 | MOCK_PROXY_HOSTS_LIST, 13 | MOCK_PROXY_HOSTS_DICT, 14 | MOCK_TOKEN, 15 | ) 16 | 17 | pytestmark = pytest.mark.asyncio 18 | 19 | 20 | async def test_api(hass, aioclient_mock, caplog): 21 | """Test API calls.""" 22 | 23 | # To test the api submodule, we first create an instance of our API client 24 | api = NpmSwitchesApiClient( 25 | "test", "test", "http://test:81", async_get_clientsession(hass) 26 | ) 27 | 28 | aioclient_mock.post( 29 | MOCK_NPM_URL + "/api/tokens", 30 | json=MOCK_TOKEN, 31 | ) 32 | 33 | await api.async_get_new_token() 34 | assert api._token == MOCK_TOKEN["token"] 35 | assert api._token_expires == dt.parse_datetime(MOCK_TOKEN["expires"]) 36 | 37 | aioclient_mock.get( 38 | MOCK_NPM_URL + "/api/nginx/proxy-hosts", 39 | json=MOCK_PROXY_HOSTS_LIST, 40 | ) 41 | 42 | # print(await api.get_proxy_hosts()) 43 | assert await api.get_proxy_hosts() == MOCK_PROXY_HOSTS_DICT 44 | 45 | assert api.get_npm_url == MOCK_NPM_URL 46 | 47 | # In order to get 100% coverage, we need to test `api_wrapper` to test the code 48 | # that isn't already called by `async_get_data` and `async_set_title`. Because the 49 | # only logic that lives inside `api_wrapper` that is not being handled by a third 50 | # party library (aiohttp) is the exception handling, we also want to simulate 51 | # raising the exceptions to ensure that the function handles them as expected. 52 | # The caplog fixture allows access to log messages in tests. This is particularly 53 | # useful during exception handling testing since often the only action as part of 54 | # exception handling is a logging statement 55 | caplog.clear() 56 | aioclient_mock.put( 57 | "https://jsonplaceholder.typicode.com/posts/1", exc=asyncio.TimeoutError 58 | ) 59 | assert ( 60 | await api.api_wrapper("put", "https://jsonplaceholder.typicode.com/posts/1") 61 | is None 62 | ) 63 | assert ( 64 | len(caplog.record_tuples) == 1 65 | and "Timeout error fetching information from" in caplog.record_tuples[0][2] 66 | ) 67 | 68 | caplog.clear() 69 | aioclient_mock.post( 70 | "https://jsonplaceholder.typicode.com/posts/1", exc=aiohttp.ClientError 71 | ) 72 | assert ( 73 | await api.api_wrapper("post", "https://jsonplaceholder.typicode.com/posts/1") 74 | is None 75 | ) 76 | assert ( 77 | len(caplog.record_tuples) == 1 78 | and "Error fetching information from" in caplog.record_tuples[0][2] 79 | ) 80 | 81 | caplog.clear() 82 | aioclient_mock.post("https://jsonplaceholder.typicode.com/posts/2", exc=Exception) 83 | assert ( 84 | await api.api_wrapper("post", "https://jsonplaceholder.typicode.com/posts/2") 85 | is None 86 | ) 87 | assert ( 88 | len(caplog.record_tuples) == 1 89 | and "Something really wrong happened!" in caplog.record_tuples[0][2] 90 | ) 91 | 92 | caplog.clear() 93 | aioclient_mock.post("https://jsonplaceholder.typicode.com/posts/3", exc=TypeError) 94 | assert ( 95 | await api.api_wrapper("post", "https://jsonplaceholder.typicode.com/posts/3") 96 | is None 97 | ) 98 | assert ( 99 | len(caplog.record_tuples) == 1 100 | and "Error parsing information from" in caplog.record_tuples[0][2] 101 | ) 102 | -------------------------------------------------------------------------------- /tests/test_config_flow.py: -------------------------------------------------------------------------------- 1 | """Test integration_blueprint config flow.""" 2 | from unittest.mock import patch 3 | 4 | from homeassistant import config_entries, data_entry_flow 5 | import pytest 6 | from pytest_homeassistant_custom_component.common import MockConfigEntry 7 | 8 | from custom_components.npm_switches.const import ( 9 | DOMAIN, 10 | PLATFORMS, 11 | SENSOR, 12 | SWITCH, 13 | ) 14 | 15 | from .const import MOCK_CONFIG 16 | 17 | pytestmark = pytest.mark.asyncio 18 | 19 | 20 | # This fixture bypasses the actual setup of the integration 21 | # since we only want to test the config flow. We test the 22 | # actual functionality of the integration in other test modules. 23 | @pytest.fixture(autouse=True) 24 | def bypass_setup_fixture(): 25 | """Prevent setup.""" 26 | with patch("custom_components.npm_switches.async_setup", return_value=True,), patch( 27 | "custom_components.npm_switches.async_setup_entry", 28 | return_value=True, 29 | ): 30 | yield 31 | 32 | 33 | # Here we simiulate a successful config flow from the backend. 34 | # Note that we use the `bypass_get_data` fixture here because 35 | # we want the config flow validation to succeed during the test. 36 | async def test_successful_config_flow(hass, bypass_new_token): 37 | """Test a successful config flow.""" 38 | # Initialize a config flow 39 | result = await hass.config_entries.flow.async_init( 40 | DOMAIN, context={"source": config_entries.SOURCE_USER} 41 | ) 42 | 43 | # # Check that the config flow shows the user form as the first step 44 | assert result["type"] == data_entry_flow.RESULT_TYPE_FORM 45 | assert result["step_id"] == "user" 46 | 47 | # If a user were to enter `test_username` for username and `test_password` 48 | # for password, it would result in this function call 49 | result = await hass.config_entries.flow.async_configure( 50 | result["flow_id"], user_input=MOCK_CONFIG 51 | ) 52 | 53 | # Check that the config flow is complete and a new entry is created with 54 | # the input data 55 | assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY 56 | assert result["title"] == "test_username" 57 | assert result["data"] == MOCK_CONFIG 58 | assert result["result"] 59 | 60 | 61 | # In this case, we want to simulate a failure during the config flow. 62 | # We use the `error_on_get_data` mock instead of `bypass_get_data` 63 | # (note the function parameters) to raise an Exception during 64 | # validation of the input config. 65 | async def test_failed_config_flow(hass, error_on_get_new_token): 66 | """Test a failed config flow due to credential validation failure.""" 67 | print("Can I see this?") 68 | assert True 69 | result = await hass.config_entries.flow.async_init( 70 | DOMAIN, context={"source": config_entries.SOURCE_USER} 71 | ) 72 | 73 | assert result["type"] == data_entry_flow.RESULT_TYPE_FORM 74 | assert result["step_id"] == "user" 75 | 76 | result = await hass.config_entries.flow.async_configure( 77 | result["flow_id"], user_input=MOCK_CONFIG 78 | ) 79 | 80 | assert result["type"] == data_entry_flow.RESULT_TYPE_FORM 81 | assert result["errors"] == {"base": "auth"} 82 | 83 | 84 | # Our config flow also has an options flow, so we must test it as well. 85 | async def test_options_flow(hass): 86 | """Test an options flow.""" 87 | # Create a new MockConfigEntry and add to HASS (we're bypassing config 88 | # flow entirely) 89 | entry = MockConfigEntry(domain=DOMAIN, data=MOCK_CONFIG, entry_id="test") 90 | entry.add_to_hass(hass) 91 | 92 | # Initialize an options flow 93 | result = await hass.config_entries.options.async_init(entry.entry_id) 94 | 95 | # Verify that the first options step is a user form 96 | assert result["type"] == data_entry_flow.RESULT_TYPE_FORM 97 | assert result["step_id"] == "user" 98 | 99 | # Enter some fake data into the form 100 | result = await hass.config_entries.options.async_configure( 101 | result["flow_id"], 102 | user_input={platform: platform != SENSOR for platform in PLATFORMS}, 103 | ) 104 | 105 | # Verify that the flow finishes 106 | assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY 107 | assert result["title"] == "test_username" 108 | 109 | # Verify that the options were updated 110 | assert entry.options == {SENSOR: False, SWITCH: True} 111 | -------------------------------------------------------------------------------- /tests/test_init.py: -------------------------------------------------------------------------------- 1 | """Test integration_blueprint setup process.""" 2 | from homeassistant.exceptions import ConfigEntryNotReady 3 | import pytest 4 | from pytest_homeassistant_custom_component.common import MockConfigEntry 5 | 6 | from custom_components.npm_switches import ( 7 | NpmSwitchesUpdateCoordinator, 8 | async_reload_entry, 9 | async_setup_entry, 10 | async_unload_entry, 11 | ) 12 | from custom_components.npm_switches.const import DOMAIN 13 | 14 | from .const import MOCK_CONFIG 15 | 16 | pytestmark = pytest.mark.asyncio 17 | 18 | # We can pass fixtures as defined in conftest.py to tell pytest to use the fixture 19 | # for a given test. We can also leverage fixtures and mocks that are available in 20 | # Home Assistant using the pytest_homeassistant_custom_component plugin. 21 | # Assertions allow you to verify that the return value of whatever is on the left 22 | # side of the assertion matches with the right side. 23 | async def test_setup_unload_and_reload_entry(hass, bypass_get_data, bypass_new_token): 24 | """Test entry setup and unload.""" 25 | # Create a mock entry so we don't have to go through config flow 26 | config_entry = MockConfigEntry(domain=DOMAIN, data=MOCK_CONFIG, entry_id="test") 27 | 28 | # Set up the entry and assert that the values set during setup are where we expect 29 | # them to be. Because we have patched the NpmSwitchesUpdateCoordinator.async_get_data 30 | # call, no code from custom_components/integration_blueprint/api.py actually runs. 31 | assert await async_setup_entry(hass, config_entry) 32 | assert DOMAIN in hass.data and config_entry.entry_id in hass.data[DOMAIN] 33 | assert ( 34 | type(hass.data[DOMAIN][config_entry.entry_id]) == NpmSwitchesUpdateCoordinator 35 | ) 36 | 37 | # Reload the entry and assert that the data from above is still there 38 | assert await async_reload_entry(hass, config_entry) is None 39 | assert DOMAIN in hass.data and config_entry.entry_id in hass.data[DOMAIN] 40 | assert ( 41 | type(hass.data[DOMAIN][config_entry.entry_id]) == NpmSwitchesUpdateCoordinator 42 | ) 43 | 44 | # Unload the entry and verify that the data has been removed 45 | assert await async_unload_entry(hass, config_entry) 46 | assert config_entry.entry_id not in hass.data[DOMAIN] 47 | 48 | 49 | async def test_setup_entry_exception(hass, error_on_get_new_token): 50 | """Test ConfigEntryNotReady when API raises an exception during entry setup.""" 51 | config_entry = MockConfigEntry(domain=DOMAIN, data=MOCK_CONFIG, entry_id="test") 52 | 53 | # In this case we are testing the condition where async_setup_entry raises 54 | # ConfigEntryNotReady using the `error_on_get_data` fixture which simulates 55 | # an error. 56 | with pytest.raises(ConfigEntryNotReady): 57 | assert await async_setup_entry(hass, config_entry) 58 | -------------------------------------------------------------------------------- /tests/test_sensor.py: -------------------------------------------------------------------------------- 1 | """Test npm switches sensor.""" 2 | from unittest.mock import call, patch 3 | 4 | from homeassistant.components.switch import SERVICE_TURN_OFF, SERVICE_TURN_ON 5 | from homeassistant.const import ATTR_ENTITY_ID 6 | from homeassistant.core import HomeAssistant 7 | from pytest_homeassistant_custom_component.common import MockConfigEntry 8 | from homeassistant.helpers import entity_registry as er 9 | 10 | from custom_components.npm_switches import async_setup_entry 11 | from custom_components.npm_switches.const import ( 12 | DEFAULT_NAME, 13 | DOMAIN, 14 | SENSOR, 15 | ) 16 | 17 | from .const import ( 18 | MOCK_CONFIG, 19 | MOCK_PROXY_HOSTS_DICT, 20 | MOCK_PROXY_HOSTS_LIST, 21 | MOCK_NPM_URL, 22 | ) 23 | 24 | import pytest 25 | 26 | pytestmark = pytest.mark.asyncio 27 | 28 | 29 | async def test_registry_entries(hass, aioclient_mock, bypass_new_token): 30 | """Tests sensors are registered in the entity registry.""" 31 | entry_id = "test" 32 | config_entry = MockConfigEntry( 33 | domain=DOMAIN, data=MOCK_CONFIG, entry_id=entry_id, options=None 34 | ) 35 | 36 | # Mock the api call to get proxy data, this allows setup to complete successfully. 37 | aioclient_mock.get( 38 | MOCK_NPM_URL + "/api/nginx/proxy-hosts", 39 | json=MOCK_PROXY_HOSTS_LIST, 40 | ) 41 | 42 | assert await async_setup_entry(hass, config_entry) 43 | await hass.async_block_till_done() 44 | 45 | entity_registry = er.async_get(hass) 46 | 47 | entry = entity_registry.async_get("sensor.npm_enabled_proxy_hosts") 48 | assert entry.unique_id == entry_id + "_npm_enabled_proxy_hosts" 49 | 50 | entry = entity_registry.async_get("sensor.npm_disabled_proxy_hosts") 51 | assert entry.unique_id == entry_id + "_npm_disabled_proxy_hosts" 52 | 53 | 54 | async def test_sensor_states(hass, aioclient_mock, bypass_new_token): 55 | """Test switch services.""" 56 | # Create a mock entry so we don't have to go through config flow 57 | config_entry = MockConfigEntry( 58 | domain=DOMAIN, data=MOCK_CONFIG, entry_id="test", options=None 59 | ) 60 | 61 | # Mock the api call to get proxy data, this allows setup to complete successfully. 62 | aioclient_mock.get( 63 | MOCK_NPM_URL + "/api/nginx/proxy-hosts", 64 | json=MOCK_PROXY_HOSTS_LIST, 65 | ) 66 | 67 | assert await async_setup_entry(hass, config_entry) 68 | await hass.async_block_till_done() 69 | 70 | # Retrieve state of the enabled sesnor 71 | state = hass.states.get("sensor.npm_enabled_proxy_hosts") 72 | proxy_id = str(state.attributes["id"]) 73 | 74 | assert state.state == "1" 75 | 76 | # Retrieve state of the disabled sesnor 77 | state = hass.states.get("sensor.npm_disabled_proxy_hosts") 78 | proxy_id = str(state.attributes["id"]) 79 | 80 | assert state.state == "1" 81 | -------------------------------------------------------------------------------- /tests/test_switch.py: -------------------------------------------------------------------------------- 1 | """Test integration_blueprint switch.""" 2 | from unittest.mock import call, patch 3 | 4 | from homeassistant.components.switch import SERVICE_TURN_OFF, SERVICE_TURN_ON 5 | from homeassistant.const import ATTR_ENTITY_ID 6 | from homeassistant.core import HomeAssistant 7 | from pytest_homeassistant_custom_component.common import MockConfigEntry 8 | from homeassistant.helpers import entity_registry as er 9 | 10 | from custom_components.npm_switches import async_setup_entry 11 | from custom_components.npm_switches.const import ( 12 | DEFAULT_NAME, 13 | DOMAIN, 14 | SWITCH, 15 | ) 16 | 17 | from .const import ( 18 | MOCK_CONFIG, 19 | MOCK_PROXY_HOSTS_DICT, 20 | MOCK_PROXY_HOSTS_LIST, 21 | MOCK_NPM_URL, 22 | ) 23 | 24 | import pytest 25 | 26 | pytestmark = pytest.mark.asyncio 27 | 28 | 29 | async def test_registry_entries(hass, aioclient_mock, bypass_new_token): 30 | """Tests devices are registered in the entity registry.""" 31 | entry_id = "test" 32 | config_entry = MockConfigEntry( 33 | domain=DOMAIN, data=MOCK_CONFIG, entry_id=entry_id, options=None 34 | ) 35 | 36 | # Mock the api call to get proxy data, this allows setup to complete successfully. 37 | aioclient_mock.get( 38 | MOCK_NPM_URL + "/api/nginx/proxy-hosts", 39 | json=MOCK_PROXY_HOSTS_LIST, 40 | ) 41 | 42 | assert await async_setup_entry(hass, config_entry) 43 | await hass.async_block_till_done() 44 | 45 | entity_registry = er.async_get(hass) 46 | 47 | entry = entity_registry.async_get("switch.npm_my_domain_com") 48 | assert entry.unique_id == entry_id + "_npm_my_domain_com" 49 | 50 | entry = entity_registry.async_get("switch.npm_other_domain_com") 51 | assert entry.unique_id == entry_id + "_npm_other_domain_com" 52 | 53 | 54 | async def test_switch_services(hass, aioclient_mock, bypass_new_token): 55 | """Test switch services.""" 56 | # Create a mock entry so we don't have to go through config flow 57 | config_entry = MockConfigEntry( 58 | domain=DOMAIN, data=MOCK_CONFIG, entry_id="test", options=None 59 | ) 60 | 61 | # Mock the api call to get proxy data, this allows setup to complete successfully. 62 | aioclient_mock.get( 63 | MOCK_NPM_URL + "/api/nginx/proxy-hosts", 64 | json=MOCK_PROXY_HOSTS_LIST, 65 | ) 66 | 67 | assert await async_setup_entry(hass, config_entry) 68 | await hass.async_block_till_done() 69 | 70 | # Retrieve state of switch entity to test 71 | state = hass.states.get("switch.npm_my_domain_com") 72 | proxy_id = str(state.attributes["id"]) 73 | 74 | # Mock enable and diable api calls for this entity, make them return True for a successful api call. 75 | aioclient_mock.post( 76 | MOCK_NPM_URL + "/api/nginx/proxy-hosts/" + proxy_id + "/disable", 77 | json=True, 78 | ) 79 | 80 | aioclient_mock.post( 81 | MOCK_NPM_URL + "/api/nginx/proxy-hosts/" + proxy_id + "/enable", 82 | json=True, 83 | ) 84 | 85 | # Ensure the enable/disable functions are called when turning the switch on/off 86 | with patch( 87 | "custom_components.npm_switches.NpmSwitchesApiClient.enable_proxy" 88 | ) as enable_proxy: 89 | await hass.services.async_call( 90 | SWITCH, 91 | SERVICE_TURN_ON, 92 | service_data={ATTR_ENTITY_ID: "switch.npm_my_domain_com"}, 93 | blocking=True, 94 | ) 95 | assert enable_proxy.called 96 | assert enable_proxy.call_args == call( 97 | str(MOCK_PROXY_HOSTS_DICT[proxy_id]["id"]) 98 | ) 99 | 100 | with patch( 101 | "custom_components.npm_switches.NpmSwitchesApiClient.disable_proxy" 102 | ) as disable_proxy: 103 | await hass.services.async_call( 104 | SWITCH, 105 | SERVICE_TURN_OFF, 106 | service_data={ATTR_ENTITY_ID: "switch.npm_my_domain_com"}, 107 | blocking=True, 108 | ) 109 | assert disable_proxy.called 110 | assert disable_proxy.call_args == call( 111 | str(MOCK_PROXY_HOSTS_DICT[proxy_id]["id"]) 112 | ) 113 | 114 | 115 | async def test_switch_states(hass, aioclient_mock, bypass_new_token): 116 | """Test switch states.""" 117 | # Create a mock entry so we don't have to go through config flow 118 | config_entry = MockConfigEntry( 119 | domain=DOMAIN, data=MOCK_CONFIG, entry_id="test", options=None 120 | ) 121 | 122 | # Mock the api call to get proxy data, this allows setup to complete successfully. 123 | aioclient_mock.get( 124 | MOCK_NPM_URL + "/api/nginx/proxy-hosts", 125 | json=MOCK_PROXY_HOSTS_LIST, 126 | ) 127 | 128 | assert await async_setup_entry(hass, config_entry) 129 | await hass.async_block_till_done() 130 | 131 | for proxy_host in MOCK_PROXY_HOSTS_LIST: 132 | entity_id = "switch.npm_" + proxy_host["domain_names"][0].replace(".", "_") 133 | state = hass.states.get(entity_id) 134 | 135 | if proxy_host["enabled"] == 1: 136 | expected_state = "on" 137 | else: 138 | expected_state = "off" 139 | 140 | assert state.state == expected_state 141 | assert state.attributes["id"] == proxy_host["id"] 142 | assert state.attributes["domain_names"] == proxy_host["domain_names"] 143 | --------------------------------------------------------------------------------