├── custom_components
├── .gitignore
└── solcast_solar
│ ├── manifest.json
│ ├── system_health.py
│ ├── energy.py
│ ├── diagnostics.py
│ ├── services.yaml
│ ├── icons.json
│ ├── repairs.py
│ ├── select.py
│ ├── util.py
│ └── strings.json
├── tests
├── .gitignore
├── .coveragerc
├── aioresponses
│ ├── __init__.py
│ └── LICENCE
├── simulator
│ ├── __init__.py
│ └── simulate.py
├── conftest.py
├── test_system_health.py
├── test_energy.py
├── test_diagnostics.py
├── test_select.py
├── test_forecast_retry.py
├── test_specific_times.py
├── test_dampen_dst.py
├── test_repairs.py
└── wsgi_sim.py
├── .github
├── SCREENSHOTS
│ ├── conf.png
│ ├── damp.png
│ ├── diag.png
│ ├── azimuth.png
│ ├── dampopt.png
│ ├── install.png
│ ├── sensors.png
│ ├── Download.png
│ ├── reconfig.png
│ ├── SampleSites.png
│ ├── SolarPanels.png
│ ├── SolcastSolar.png
│ ├── azimuth_tilt.png
│ ├── AddIntegration.png
│ ├── ExcludeSites1.png
│ ├── ExcludeSites2.png
│ ├── RestartSubmit.png
│ ├── SolcastPVSolar.png
│ ├── SolcastService.png
│ ├── azimuth_house.png
│ ├── forecast_today.png
│ ├── RestartRequired.png
│ ├── dampening_chart.png
│ ├── solar_production.png
│ ├── solcast_plus_two.png
│ ├── Dampeningcomparison.png
│ ├── automated-dampening.png
│ ├── solcast_minus_one.jpeg
│ ├── SettingsNotification.png
│ ├── Setupanewintegration.png
│ ├── SuccessIssueRepaired.png
│ ├── OpenPageinyourHomeAssistant.png
│ ├── undampened_forecast_today.jpeg
│ └── example_span_offset_modifier.png
├── CODEOWNERS
├── workflows
│ ├── hassfest.yaml
│ ├── validate.yml
│ ├── stale.yml
│ └── release.yml
├── ISSUE_TEMPLATE
│ ├── config.yml
│ ├── feature_request.md
│ └── bug_report.md
└── release-config.yml
├── .gitignore
├── CODEOWNERS
├── hacs.json
├── DEVELOPERS.md
├── quality_checklist.md
├── LICENSE.md
├── ADVOPTIONS.md
└── FAQ.md
/custom_components/.gitignore:
--------------------------------------------------------------------------------
1 | *.sh
2 |
--------------------------------------------------------------------------------
/tests/.gitignore:
--------------------------------------------------------------------------------
1 | cert.pem
2 | key.pem
3 | .coverage
--------------------------------------------------------------------------------
/tests/.coveragerc:
--------------------------------------------------------------------------------
1 | [report]
2 | exclude_lines =
3 | if TYPE_CHECKING:
--------------------------------------------------------------------------------
/.github/SCREENSHOTS/conf.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BJReplay/ha-solcast-solar/HEAD/.github/SCREENSHOTS/conf.png
--------------------------------------------------------------------------------
/.github/SCREENSHOTS/damp.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BJReplay/ha-solcast-solar/HEAD/.github/SCREENSHOTS/damp.png
--------------------------------------------------------------------------------
/.github/SCREENSHOTS/diag.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BJReplay/ha-solcast-solar/HEAD/.github/SCREENSHOTS/diag.png
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .DS_Store
2 | .vscode/settings.json
3 | .vscode
4 | .vs
5 | .pytest_cache
6 | Thumbs.db
7 | __pycache__
8 |
--------------------------------------------------------------------------------
/.github/SCREENSHOTS/azimuth.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BJReplay/ha-solcast-solar/HEAD/.github/SCREENSHOTS/azimuth.png
--------------------------------------------------------------------------------
/.github/SCREENSHOTS/dampopt.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BJReplay/ha-solcast-solar/HEAD/.github/SCREENSHOTS/dampopt.png
--------------------------------------------------------------------------------
/.github/SCREENSHOTS/install.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BJReplay/ha-solcast-solar/HEAD/.github/SCREENSHOTS/install.png
--------------------------------------------------------------------------------
/.github/SCREENSHOTS/sensors.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BJReplay/ha-solcast-solar/HEAD/.github/SCREENSHOTS/sensors.png
--------------------------------------------------------------------------------
/.github/SCREENSHOTS/Download.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BJReplay/ha-solcast-solar/HEAD/.github/SCREENSHOTS/Download.png
--------------------------------------------------------------------------------
/.github/SCREENSHOTS/reconfig.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BJReplay/ha-solcast-solar/HEAD/.github/SCREENSHOTS/reconfig.png
--------------------------------------------------------------------------------
/.github/CODEOWNERS:
--------------------------------------------------------------------------------
1 | # BJ Did this on a whim
2 | * @BJReplay
3 | # Welcome Steve Saunders from your Tokyo Drift
4 | * @autoSteve
5 |
--------------------------------------------------------------------------------
/.github/SCREENSHOTS/SampleSites.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BJReplay/ha-solcast-solar/HEAD/.github/SCREENSHOTS/SampleSites.png
--------------------------------------------------------------------------------
/.github/SCREENSHOTS/SolarPanels.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BJReplay/ha-solcast-solar/HEAD/.github/SCREENSHOTS/SolarPanels.png
--------------------------------------------------------------------------------
/.github/SCREENSHOTS/SolcastSolar.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BJReplay/ha-solcast-solar/HEAD/.github/SCREENSHOTS/SolcastSolar.png
--------------------------------------------------------------------------------
/.github/SCREENSHOTS/azimuth_tilt.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BJReplay/ha-solcast-solar/HEAD/.github/SCREENSHOTS/azimuth_tilt.png
--------------------------------------------------------------------------------
/.github/SCREENSHOTS/AddIntegration.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BJReplay/ha-solcast-solar/HEAD/.github/SCREENSHOTS/AddIntegration.png
--------------------------------------------------------------------------------
/.github/SCREENSHOTS/ExcludeSites1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BJReplay/ha-solcast-solar/HEAD/.github/SCREENSHOTS/ExcludeSites1.png
--------------------------------------------------------------------------------
/.github/SCREENSHOTS/ExcludeSites2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BJReplay/ha-solcast-solar/HEAD/.github/SCREENSHOTS/ExcludeSites2.png
--------------------------------------------------------------------------------
/.github/SCREENSHOTS/RestartSubmit.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BJReplay/ha-solcast-solar/HEAD/.github/SCREENSHOTS/RestartSubmit.png
--------------------------------------------------------------------------------
/.github/SCREENSHOTS/SolcastPVSolar.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BJReplay/ha-solcast-solar/HEAD/.github/SCREENSHOTS/SolcastPVSolar.png
--------------------------------------------------------------------------------
/.github/SCREENSHOTS/SolcastService.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BJReplay/ha-solcast-solar/HEAD/.github/SCREENSHOTS/SolcastService.png
--------------------------------------------------------------------------------
/.github/SCREENSHOTS/azimuth_house.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BJReplay/ha-solcast-solar/HEAD/.github/SCREENSHOTS/azimuth_house.png
--------------------------------------------------------------------------------
/.github/SCREENSHOTS/forecast_today.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BJReplay/ha-solcast-solar/HEAD/.github/SCREENSHOTS/forecast_today.png
--------------------------------------------------------------------------------
/.github/SCREENSHOTS/RestartRequired.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BJReplay/ha-solcast-solar/HEAD/.github/SCREENSHOTS/RestartRequired.png
--------------------------------------------------------------------------------
/.github/SCREENSHOTS/dampening_chart.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BJReplay/ha-solcast-solar/HEAD/.github/SCREENSHOTS/dampening_chart.png
--------------------------------------------------------------------------------
/.github/SCREENSHOTS/solar_production.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BJReplay/ha-solcast-solar/HEAD/.github/SCREENSHOTS/solar_production.png
--------------------------------------------------------------------------------
/.github/SCREENSHOTS/solcast_plus_two.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BJReplay/ha-solcast-solar/HEAD/.github/SCREENSHOTS/solcast_plus_two.png
--------------------------------------------------------------------------------
/.github/SCREENSHOTS/Dampeningcomparison.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BJReplay/ha-solcast-solar/HEAD/.github/SCREENSHOTS/Dampeningcomparison.png
--------------------------------------------------------------------------------
/.github/SCREENSHOTS/automated-dampening.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BJReplay/ha-solcast-solar/HEAD/.github/SCREENSHOTS/automated-dampening.png
--------------------------------------------------------------------------------
/.github/SCREENSHOTS/solcast_minus_one.jpeg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BJReplay/ha-solcast-solar/HEAD/.github/SCREENSHOTS/solcast_minus_one.jpeg
--------------------------------------------------------------------------------
/.github/SCREENSHOTS/SettingsNotification.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BJReplay/ha-solcast-solar/HEAD/.github/SCREENSHOTS/SettingsNotification.png
--------------------------------------------------------------------------------
/.github/SCREENSHOTS/Setupanewintegration.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BJReplay/ha-solcast-solar/HEAD/.github/SCREENSHOTS/Setupanewintegration.png
--------------------------------------------------------------------------------
/.github/SCREENSHOTS/SuccessIssueRepaired.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BJReplay/ha-solcast-solar/HEAD/.github/SCREENSHOTS/SuccessIssueRepaired.png
--------------------------------------------------------------------------------
/.github/SCREENSHOTS/OpenPageinyourHomeAssistant.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BJReplay/ha-solcast-solar/HEAD/.github/SCREENSHOTS/OpenPageinyourHomeAssistant.png
--------------------------------------------------------------------------------
/.github/SCREENSHOTS/undampened_forecast_today.jpeg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BJReplay/ha-solcast-solar/HEAD/.github/SCREENSHOTS/undampened_forecast_today.jpeg
--------------------------------------------------------------------------------
/.github/SCREENSHOTS/example_span_offset_modifier.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/BJReplay/ha-solcast-solar/HEAD/.github/SCREENSHOTS/example_span_offset_modifier.png
--------------------------------------------------------------------------------
/CODEOWNERS:
--------------------------------------------------------------------------------
1 | # BJ Did this on a whim
2 | * @BJReplay
3 | # Welcome Steve Saunders from your Tokyo Drift
4 | * @autoSteve
5 | # Not sure if CODEOWNERS goes in / or .github
6 |
--------------------------------------------------------------------------------
/hacs.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "Solcast PV Forecast",
3 | "render_readme": true,
4 | "homeassistant": "2025.3",
5 | "zip_release": true,
6 | "filename": "solcast_solar.zip"
7 | }
--------------------------------------------------------------------------------
/tests/aioresponses/__init__.py:
--------------------------------------------------------------------------------
1 | """Define the aioresponses package."""
2 |
3 | from .core import CallbackResult, aioresponses
4 |
5 | __all__ = [
6 | "CallbackResult",
7 | "aioresponses",
8 | ]
9 |
--------------------------------------------------------------------------------
/tests/simulator/__init__.py:
--------------------------------------------------------------------------------
1 | """Define the simulator package."""
2 |
3 | from .simulate import API_KEY_SITES, GENERATION_FACTOR, SimulatedSolcast
4 |
5 | __all__ = ["API_KEY_SITES", "GENERATION_FACTOR", "SimulatedSolcast"]
6 |
--------------------------------------------------------------------------------
/.github/workflows/hassfest.yaml:
--------------------------------------------------------------------------------
1 | name: Validate with hassfest
2 |
3 | on:
4 | push:
5 | pull_request:
6 | schedule:
7 | - cron: "0 0 * * *"
8 |
9 | permissions:
10 | contents: read
11 |
12 | jobs:
13 | validate:
14 | runs-on: "ubuntu-latest"
15 | steps:
16 | - uses: "actions/checkout@v4"
17 | - uses: home-assistant/actions/hassfest@master
--------------------------------------------------------------------------------
/.github/workflows/validate.yml:
--------------------------------------------------------------------------------
1 | name: Validate
2 |
3 | on:
4 | push:
5 | pull_request:
6 | schedule:
7 | - cron: "0 0 * * *"
8 | workflow_dispatch:
9 |
10 | permissions:
11 | contents: read
12 |
13 | jobs:
14 | validate-hacs:
15 | runs-on: "ubuntu-latest"
16 | steps:
17 | - uses: "actions/checkout@v4"
18 | - name: HACS validation
19 | uses: "hacs/action@main"
20 | with:
21 | category: "integration"
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/config.yml:
--------------------------------------------------------------------------------
1 | blank_issues_enabled: false
2 | contact_links:
3 | - name: I have a question or I would like support from the community
4 | url: https://github.com/BJReplay/ha-solcast-solar/discussions
5 | about: Check the discussions section to see if anyone has asked your question before, and ask the community for help
6 | - name: Feature Request
7 | url: https://github.com/BJReplay/ha-solcast-solar/discussions
8 | about: Please use the discussions section for discussing potential feature requests. A discussion that results in a feature being considered worthwhile may be considered for inclusion.
9 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/feature_request.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Feature request
3 | about: Suggest an idea for this project
4 | title: (FR)
5 | labels: ''
6 | assignees: ''
7 |
8 | ---
9 |
10 | **Is your feature request related to a problem? Please describe.**
11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
12 |
13 | **Describe the solution you'd like**
14 | A clear and concise description of what you want to happen.
15 |
16 | **Describe alternatives you've considered**
17 | A clear and concise description of any alternative solutions or features you've considered.
18 |
19 | **Additional context**
20 | Add any other context or screenshots about the feature request here.
21 |
--------------------------------------------------------------------------------
/custom_components/solcast_solar/manifest.json:
--------------------------------------------------------------------------------
1 | {
2 | "domain": "solcast_solar",
3 | "name": "Solcast PV Forecast",
4 | "after_dependencies": [
5 | "http"
6 | ],
7 | "codeowners": [
8 | "@BJReplay",
9 | "@autoSteve"
10 | ],
11 | "config_flow": true,
12 | "dependencies": [
13 | "homeassistant",
14 | "recorder",
15 | "select"
16 | ],
17 | "documentation": "https://github.com/BJReplay/ha-solcast-solar",
18 | "integration_type": "service",
19 | "iot_class": "cloud_polling",
20 | "issue_tracker": "https://github.com/BJReplay/ha-solcast-solar/issues",
21 | "requirements": [
22 | "aiohttp>=3.8.5",
23 | "aiofiles>=23.2.0",
24 | "watchdog>=6.0.0"
25 | ],
26 | "version": "4.4.10"
27 | }
--------------------------------------------------------------------------------
/custom_components/solcast_solar/system_health.py:
--------------------------------------------------------------------------------
1 | """Provide info to system health."""
2 |
3 | from __future__ import annotations
4 |
5 | from typing import Any
6 |
7 | from homeassistant.components import system_health
8 | from homeassistant.core import HomeAssistant, callback
9 |
10 | from .const import DEFAULT_SOLCAST_HTTPS_URL
11 |
12 |
13 | @callback
14 | def async_register(hass: HomeAssistant, register: system_health.SystemHealthRegistration) -> None:
15 | """Register system health callbacks."""
16 | register.domain = "Solcast Solar"
17 | register.async_register_info(system_health_info) # pyright: ignore[reportUnknownMemberType]
18 |
19 |
20 | async def system_health_info(hass: HomeAssistant) -> dict[str, Any]:
21 | """Get info for the info page."""
22 |
23 | return {
24 | "can_reach_server": system_health.async_check_can_reach_url(hass, DEFAULT_SOLCAST_HTTPS_URL),
25 | }
26 |
--------------------------------------------------------------------------------
/.github/release-config.yml:
--------------------------------------------------------------------------------
1 | name-template: 'v$NEXT_MINOR_VERSION'
2 | tag-template: 'v$NEXT_MINOR_VERSION'
3 | categories:
4 | - title: '🚀 Features'
5 | labels:
6 | - 'feature'
7 | - 'enhancement'
8 | - title: '🐛 Bug Fixes'
9 | labels:
10 | - 'fix'
11 | - 'bugfix'
12 | - 'bug'
13 | - title: '🧰 Maintenance'
14 | labels:
15 | - 'chore'
16 | - 'documentation'
17 | - 'miscellaneous'
18 | exclude-labels:
19 | - 'dependencies'
20 | change-template: '- $TITLE #$NUMBER (@$AUTHOR)'
21 | change-title-escapes: '\<*_&' # You can add # and @ to disable mentions, and add ` to disable code blocks.
22 | version-resolver:
23 | major:
24 | labels:
25 | - 'major'
26 | minor:
27 | labels:
28 | - 'minor'
29 | patch:
30 | labels:
31 | - 'patch'
32 | default: patch
33 | template: |
34 | $CHANGES
35 |
36 | 
37 |
--------------------------------------------------------------------------------
/.github/workflows/stale.yml:
--------------------------------------------------------------------------------
1 | # This workflow warns and then closes issues and PRs that have had no activity for a specified amount of time.
2 | #
3 | # You can adjust the behavior by modifying this file.
4 | # For more information, see:
5 | # https://github.com/actions/stale
6 | name: Mark stale issues and pull requests
7 |
8 | on:
9 | schedule:
10 | - cron: '30 * * * *'
11 |
12 | jobs:
13 | stale:
14 |
15 | runs-on: ubuntu-latest
16 | permissions:
17 | issues: write
18 |
19 | steps:
20 | - uses: actions/stale@v9
21 | with:
22 | repo-token: ${{ secrets.GITHUB_TOKEN }}
23 | stale-issue-message: 'This issue is stale because it has been open 7 days with no activity. Remove stale label or comment or this will be closed in 2 days.'
24 | stale-issue-label: 'no-issue-activity'
25 | close-issue-message: 'This issue was closed because it has been stalled for over a week with no activity.'
26 | days-before-issue-stale: 7
27 | days-before-issue-close: 2
28 |
29 |
--------------------------------------------------------------------------------
/.github/workflows/release.yml:
--------------------------------------------------------------------------------
1 | name: "Release"
2 |
3 | on:
4 | release:
5 | types: [published, edited]
6 |
7 | permissions: {}
8 |
9 | jobs:
10 | release:
11 | name: "Release"
12 | runs-on: "ubuntu-latest"
13 | permissions:
14 | contents: write
15 | steps:
16 | - name: "Checkout the repository"
17 | uses: "actions/checkout@v4"
18 |
19 | - name: "Adjust version number"
20 | shell: "bash"
21 | run: |
22 | yq -i -o json '.version="${{ github.event.release.tag_name }}"' \
23 | "${{ github.workspace }}/custom_components/solcast_solar/manifest.json"
24 |
25 | - name: "ZIP the integration directory"
26 | shell: "bash"
27 | run: |
28 | cd "${{ github.workspace }}/custom_components/solcast_solar"
29 | zip solcast_solar.zip -r ./
30 |
31 | - name: "Upload the ZIP file to the release"
32 | uses: softprops/action-gh-release@v2
33 | with:
34 | files: ${{ github.workspace }}/custom_components/solcast_solar/solcast_solar.zip
35 |
--------------------------------------------------------------------------------
/tests/conftest.py:
--------------------------------------------------------------------------------
1 | """Test configuration for Solcast Solar integration."""
2 |
3 | from collections.abc import Generator
4 | from datetime import datetime as dt
5 | import logging
6 |
7 | import freezegun
8 | from freezegun.api import FrozenDateTimeFactory
9 | import pytest
10 |
11 | disable_loggers = [
12 | "homeassistant.core",
13 | "homeassistant.components.recorder.core",
14 | "homeassistant.components.recorder.pool",
15 | "homeassistant.components.recorder.pool.MutexPool",
16 | "sqlalchemy.engine.Engine",
17 | "watchdog.observers.inotify_buffer",
18 | "asyncio",
19 | ]
20 |
21 |
22 | def pytest_configure():
23 | """Disable loggers."""
24 |
25 | for logger_name in disable_loggers:
26 | logger = logging.getLogger(logger_name)
27 | logger.disabled = True
28 |
29 |
30 | @pytest.fixture(autouse=True)
31 | def frozen_time() -> Generator[FrozenDateTimeFactory]:
32 | """Freeze test time."""
33 |
34 | with freezegun.freeze_time(f"{dt.now().date()} 12:27:27", tz_offset=-10) as freeze:
35 | yield freeze # type: ignore[misc]
36 |
--------------------------------------------------------------------------------
/tests/aioresponses/LICENCE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2016 pnuckowski
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
--------------------------------------------------------------------------------
/custom_components/solcast_solar/energy.py:
--------------------------------------------------------------------------------
1 | """Energy platform."""
2 |
3 | from __future__ import annotations
4 |
5 | import logging
6 | from typing import Any
7 |
8 | from homeassistant.config_entries import ConfigEntry
9 | from homeassistant.core import HomeAssistant
10 |
11 | from . import SolcastUpdateCoordinator
12 | from .const import DOMAIN
13 |
14 | _LOGGER = logging.getLogger(__name__)
15 |
16 |
17 | async def async_get_solar_forecast(hass: HomeAssistant, config_entry_id: str) -> dict[str, Any] | None:
18 | """Get solar forecast for a config entry ID.
19 |
20 | Arguments:
21 | hass (HomeAssistant): The Home Assistant instance.
22 | config_entry_id (str): The integration entry ID.
23 |
24 | Returns:
25 | dict[str, Any] | None: The Energy Dashboard compatible forecast data
26 |
27 | """
28 |
29 | if not hass.data.get(DOMAIN):
30 | _LOGGER.warning("Domain %s is not yet available to provide forecast data", DOMAIN)
31 | return None
32 |
33 | entry: ConfigEntry | None = hass.config_entries.async_get_entry(config_entry_id)
34 | if (
35 | entry is None
36 | or (coordinator := entry.runtime_data.coordinator) is None
37 | or not isinstance(entry.runtime_data.coordinator, SolcastUpdateCoordinator)
38 | ):
39 | return None
40 |
41 | return coordinator.get_energy_tab_data()
42 |
--------------------------------------------------------------------------------
/tests/test_system_health.py:
--------------------------------------------------------------------------------
1 | """Tests for the Solcast Solar diagnostics and system health."""
2 |
3 | import logging
4 | from typing import Any
5 |
6 | from freezegun.api import FrozenDateTimeFactory
7 |
8 | from homeassistant.components.recorder import Recorder
9 | from homeassistant.core import HomeAssistant
10 | from homeassistant.setup import async_setup_component
11 |
12 | from . import DEFAULT_INPUT1, async_cleanup_integration_tests, async_init_integration
13 |
14 | _LOGGER = logging.getLogger(__name__)
15 |
16 | SYSTEM_HEALTH_DOMAIN = "Solcast Solar"
17 |
18 |
19 | async def get_system_health_info(hass: HomeAssistant, domain: str) -> dict[str, Any]:
20 | """Get system health info."""
21 | return await hass.data["system_health"][domain].info_callback(hass)
22 |
23 |
24 | async def test_system_health(
25 | recorder_mock: Recorder,
26 | hass: HomeAssistant,
27 | freezer: FrozenDateTimeFactory,
28 | ) -> None:
29 | """Test system health."""
30 |
31 | await async_init_integration(hass, DEFAULT_INPUT1)
32 |
33 | try:
34 | assert await async_setup_component(hass, "system_health", {})
35 | await hass.async_block_till_done()
36 |
37 | info = await get_system_health_info(hass, SYSTEM_HEALTH_DOMAIN)
38 | assert await info["can_reach_server"] == "ok"
39 |
40 | finally:
41 | assert await async_cleanup_integration_tests(hass)
42 |
--------------------------------------------------------------------------------
/custom_components/solcast_solar/diagnostics.py:
--------------------------------------------------------------------------------
1 | """Support for the Solcast diagnostics."""
2 |
3 | from __future__ import annotations
4 |
5 | from typing import Any, Final
6 |
7 | from homeassistant.config_entries import ConfigEntry
8 | from homeassistant.const import CONF_API_KEY
9 | from homeassistant.core import HomeAssistant
10 |
11 | from .coordinator import SolcastUpdateCoordinator
12 |
13 | TO_REDACT: Final = [
14 | CONF_API_KEY,
15 | ]
16 |
17 |
18 | async def async_get_config_entry_diagnostics(hass: HomeAssistant, entry: ConfigEntry) -> dict[str, Any]:
19 | """Return diagnostics for a config entry.
20 |
21 | Args:
22 | hass (HomeAssistant): The Home Assistant instance.
23 | entry (ConfigEntry): The integration entry instance, provides access to the coordinator.
24 |
25 | Returns:
26 | dict[str, Any]: Diagnostic details to include in a download file.
27 |
28 | """
29 | coordinator: SolcastUpdateCoordinator = entry.runtime_data.coordinator
30 |
31 | def hard_limit_set():
32 | hard_set = False
33 | for hard_limit in coordinator.solcast.hard_limit.split(","):
34 | if hard_limit != "100.0":
35 | hard_set = True
36 | return hard_set
37 |
38 | energy_data = coordinator.solcast.get_energy_data()
39 |
40 | return {
41 | "tz_conversion": coordinator.solcast.options.tz,
42 | "used_api_requests": coordinator.solcast.get_api_used_count(),
43 | "api_request_limit": coordinator.solcast.get_api_limit(),
44 | "rooftop_site_count": len(coordinator.solcast.sites),
45 | "forecast_hard_limit_set": hard_limit_set(),
46 | "data": (coordinator.data, TO_REDACT),
47 | "energy_forecasts_graph": energy_data["wh_hours"] if energy_data is not None else {},
48 | }
49 |
--------------------------------------------------------------------------------
/custom_components/solcast_solar/services.yaml:
--------------------------------------------------------------------------------
1 | # Describes the format for available services for the Solcast integration
2 | update_forecasts:
3 | name: Update
4 | description: Fetches the forecasts from Solcast
5 |
6 | force_update_forecasts:
7 | name: Force Update Forecasts
8 | description: Force fetches the forecasts from Solcast
9 |
10 | force_update_estimates:
11 | name: Force Update Estimates
12 | description: Force fetches estimated actuals from Solcast
13 |
14 | clear_all_solcast_data:
15 | name: Clear saved Solcast site data
16 | description: Deletes the solcast.json file to remove all current solcast site data
17 |
18 | query_forecast_data:
19 | name: Query forecasts
20 | description: List of forecasts between start date/time and end date/time
21 | fields:
22 | start_date_time:
23 | example: "2024-10-06T00:00:00Z"
24 | selector:
25 | datetime:
26 | end_date_time:
27 | example: "2024-10-06T10:00:00Z"
28 | selector:
29 | datetime:
30 | undampened:
31 | example: "false"
32 | site:
33 | example: "1234-5678-9012-3456"
34 |
35 | query_estimate_data:
36 | name: Query estimates
37 | description: List of estimated actuals between start date/time and end date/time
38 | fields:
39 | start_date_time:
40 | example: "2024-10-06T00:00:00Z"
41 | selector:
42 | datetime:
43 | end_date_time:
44 | example: "2024-10-06T10:00:00Z"
45 | selector:
46 | datetime:
47 | undampened:
48 | example: "false"
49 |
50 | set_dampening:
51 | name: Set forecasts dampening
52 | description: Set the hourly or half-hourly forecast dampening factors
53 | fields:
54 | damp_factor:
55 | example: "1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1"
56 | site:
57 | example: "1234-5678-9012-3456"
58 |
59 | get_dampening:
60 | name: Get forecasts dampening
61 | description: Get the forecast dampening factors
62 | fields:
63 | site:
64 | example: "1234-5678-9012-3456"
65 |
66 | set_hard_limit:
67 | name: Set inverter forecast hard limit in kW
68 | description: Prevent forecast values being higher than the inverter can produce
69 | fields:
70 | hard_limit:
71 | example: "6.0"
72 |
73 | remove_hard_limit:
74 | name: Remove inverter forecast hard limit
75 | description: Remove set limit
76 |
--------------------------------------------------------------------------------
/custom_components/solcast_solar/icons.json:
--------------------------------------------------------------------------------
1 | {
2 | "entity": {
3 | "select": {
4 | "estimate_mode": {"default": "mdi:sun-angle"}
5 | },
6 | "sensor": {
7 | "api_counter": {"default": "mdi:web-check"},
8 | "api_limit": {"default": "mdi:web-check"},
9 | "forecast_custom_hours": {"default": "mdi:solar-power"},
10 | "forecast_next_hour": {"default": "mdi:solar-power"},
11 | "forecast_this_hour": {"default": "mdi:solar-power"},
12 | "get_remaining_today": {"default": "mdi:solar-power"},
13 | "hard_limit": {"default": "mdi:speedometer"},
14 | "hard_limit_api": {"default": "mdi:speedometer"},
15 | "lastupdated": {"default": "mdi:clock"},
16 | "peak_w_time_today": {"default": "mdi:clock"},
17 | "peak_w_today": {"default": "mdi:solar-power"},
18 | "peak_w_time_tomorrow": {"default": "mdi:clock"},
19 | "peak_w_tomorrow": {"default": "mdi:solar-power"},
20 | "power_now": {"default": "mdi:flash"},
21 | "power_now_1hr": {"default": "mdi:flash"},
22 | "power_now_30m": {"default": "mdi:flash"},
23 | "site_data": {"default": "mdi:home"},
24 | "total_kwh_forecast_d3": {"default": "mdi:solar-power"},
25 | "total_kwh_forecast_d4": {"default": "mdi:solar-power"},
26 | "total_kwh_forecast_d5": {"default": "mdi:solar-power"},
27 | "total_kwh_forecast_d6": {"default": "mdi:solar-power"},
28 | "total_kwh_forecast_d7": {"default": "mdi:solar-power"},
29 | "total_kwh_forecast_d8": {"default": "mdi:solar-power"},
30 | "total_kwh_forecast_d9": {"default": "mdi:solar-power"},
31 | "total_kwh_forecast_d10": {"default": "mdi:solar-power"},
32 | "total_kwh_forecast_d11": {"default": "mdi:solar-power"},
33 | "total_kwh_forecast_d12": {"default": "mdi:solar-power"},
34 | "total_kwh_forecast_d13": {"default": "mdi:solar-power"},
35 | "total_kwh_forecast_today": {"default": "mdi:solar-power"},
36 | "total_kwh_forecast_tomorrow": {"default": "mdi:solar-power"}
37 | }
38 | },
39 | "services": {
40 | "clear_all_solcast_data": "mdi:database-off",
41 | "force_update_estimates": "mdi:sun-wireless-outline",
42 | "force_update_forecasts": "mdi:sun-wireless-outline",
43 | "get_dampening": "mdi:format-align-middle",
44 | "set_dampening": "mdi:format-align-middle",
45 | "query_estimate_data": "mdi:table-question",
46 | "query_forecast_data": "mdi:table-question",
47 | "update_forecasts": "mdi:sun-wireless-outline"
48 | }
49 | }
50 |
--------------------------------------------------------------------------------
/tests/test_energy.py:
--------------------------------------------------------------------------------
1 | """Tests for the Solcast Solar energy dashboard."""
2 |
3 | from datetime import datetime as dt
4 |
5 | import pytest
6 |
7 | from homeassistant.components.recorder import Recorder
8 | from homeassistant.components.solcast_solar.const import CONFIG_VERSION, DOMAIN
9 | from homeassistant.components.solcast_solar.coordinator import SolcastUpdateCoordinator
10 | from homeassistant.components.solcast_solar.energy import async_get_solar_forecast
11 | from homeassistant.config_entries import ConfigEntry
12 | from homeassistant.core import HomeAssistant
13 |
14 | from . import DEFAULT_INPUT1, async_cleanup_integration_tests, async_init_integration
15 |
16 | from tests.common import MockConfigEntry
17 |
18 | _LOGGER = __import__("logging").getLogger(__name__)
19 |
20 |
21 | async def test_energy_data(
22 | recorder_mock: Recorder,
23 | hass: HomeAssistant,
24 | ) -> None:
25 | """Test energy dashboard data structure."""
26 |
27 | # Test that the function returns None if the domain is not yet available
28 | not_available_entry = MockConfigEntry(
29 | domain=DOMAIN, unique_id="solcast_pv_solar", title="Solcast PV Forecast", data={}, options=DEFAULT_INPUT1, version=CONFIG_VERSION
30 | )
31 | assert await async_get_solar_forecast(hass, not_available_entry.entry_id) is None
32 |
33 | entry: ConfigEntry = await async_init_integration(hass, DEFAULT_INPUT1)
34 | coordinator: SolcastUpdateCoordinator = entry.runtime_data.coordinator
35 |
36 | # Test that the function returns None if the coordinator does not exist
37 | runtime_data = entry.runtime_data.coordinator
38 | entry.runtime_data.coordinator = None
39 | assert await async_get_solar_forecast(hass, entry.entry_id) is None
40 | entry.runtime_data.coordinator = runtime_data
41 |
42 | try:
43 | response = await async_get_solar_forecast(hass, entry.entry_id)
44 |
45 | if response is not None:
46 | # Test dictionary structure and length
47 | assert response.get("wh_hours") is not None
48 | day_start = coordinator.solcast.get_day_start_utc()
49 | day_start_earliest_whole_day = coordinator.solcast.get_day_start_utc(future=-6)
50 | today_and_beyond = 0
51 | earliest_and_beyond = 0
52 | for timestamp, wh_hour in response["wh_hours"].items():
53 | assert type(dt.fromisoformat(timestamp)) is dt
54 | assert wh_hour % 1 == 0
55 | if dt.fromisoformat(timestamp) >= day_start:
56 | today_and_beyond += 1
57 | if dt.fromisoformat(timestamp) >= day_start_earliest_whole_day:
58 | earliest_and_beyond += 1
59 |
60 | # Test that at least seven days of thirty time periods from today onwards are present
61 | assert today_and_beyond >= 30 * 7
62 |
63 | # Test that at least thirteen days of thirty time periods from earliest whole day onwards are present
64 | assert earliest_and_beyond >= 30 * 13
65 | else:
66 | pytest.fail("Energy data is None")
67 |
68 | finally:
69 | assert await async_cleanup_integration_tests(hass)
70 |
--------------------------------------------------------------------------------
/tests/test_diagnostics.py:
--------------------------------------------------------------------------------
1 | """Tests for the Solcast Solar diagnostics and system health."""
2 |
3 | from datetime import datetime as dt, timedelta
4 | import logging
5 |
6 | from freezegun.api import FrozenDateTimeFactory
7 |
8 | from homeassistant.components.recorder import Recorder
9 | from homeassistant.components.solcast_solar.const import API_QUOTA, DOMAIN
10 | from homeassistant.components.solcast_solar.coordinator import SolcastUpdateCoordinator
11 | from homeassistant.components.solcast_solar.solcastapi import SolcastApi
12 | from homeassistant.core import HomeAssistant
13 |
14 | from . import (
15 | DEFAULT_INPUT1,
16 | ZONE_RAW,
17 | async_cleanup_integration_tests,
18 | async_init_integration,
19 | )
20 |
21 | from tests.components.diagnostics import (
22 | get_diagnostics_for_config_entry, # pyright:ignore[reportUnknownVariableType]
23 | )
24 | from tests.typing import (
25 | ClientSessionGenerator, # pyright:ignore[reportUnknownVariableType]
26 | )
27 |
28 | _LOGGER = logging.getLogger(__name__)
29 |
30 |
31 | async def test_diagnostics(
32 | recorder_mock: Recorder,
33 | hass: HomeAssistant,
34 | freezer: FrozenDateTimeFactory,
35 | hass_client: ClientSessionGenerator, # pyright:ignore[reportUnknownParameterType]
36 | ) -> None:
37 | """Test diagnostics output."""
38 |
39 | try:
40 | entry = await async_init_integration(hass, DEFAULT_INPUT1)
41 | freezer.move_to(dt.now() + timedelta(minutes=1))
42 | await hass.async_block_till_done()
43 | coordinator: SolcastUpdateCoordinator = entry.runtime_data.coordinator
44 | solcast: SolcastApi = coordinator.solcast
45 |
46 | diagnostics = await get_diagnostics_for_config_entry(hass, hass_client, entry)
47 | assert ZONE_RAW in diagnostics["tz_conversion"]["repr"] # type: ignore[call-overload, index, operator] # pyright: ignore[reportOperatorIssue, reportIndexIssue, reportCallIssue, reportArgumentType, reportOptionalSubscript]
48 | assert diagnostics["used_api_requests"] == 4
49 | assert diagnostics["api_request_limit"] == int(DEFAULT_INPUT1[API_QUOTA])
50 | assert diagnostics["rooftop_site_count"] == 2
51 | assert diagnostics["forecast_hard_limit_set"] is False
52 | for site, data in diagnostics["data"][0]["siteinfo"].items(): # type: ignore[call-overload, index, union-attr] # pyright: ignore[reportArgumentType, reportIndexIssue, reportOptionalSubscript, reportUnknownMemberType]
53 | assert site in ["1111-1111-1111-1111", "2222-2222-2222-2222"]
54 | assert len(data["forecasts"]) > 300 # type: ignore[arg-type, call-overload, index] # pyright: ignore[reportArgumentType, reportIndexIssue, reportOptionalSubscript, reportUnknownMemberType]
55 | assert diagnostics["energy_forecasts_graph"][solcast.get_now_utc().replace(hour=2, minute=0, second=0).isoformat()] == 3600.0 # type: ignore[call-overload, index]
56 |
57 | await hass.services.async_call(DOMAIN, "set_hard_limit", {"hard_limit": "5.0"}, blocking=True)
58 | await hass.async_block_till_done() # Because integration reloads
59 | diagnostics = await get_diagnostics_for_config_entry(hass, hass_client, entry)
60 | assert diagnostics["forecast_hard_limit_set"] is True
61 |
62 | finally:
63 | assert await async_cleanup_integration_tests(hass)
64 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/bug_report.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Bug report
3 | about: Create an issue when you think you've found a bug in the Solcast integration
4 | title: ''
5 | labels: ''
6 | assignees: ''
7 |
8 | ---
9 | ### Help us help you.
10 |
11 | #### Do not create issues for questions on how to use or set up this integration.
12 |
13 | Use the discussions for general support. Only create issues to report geniune suspected bugs in the integration.
14 |
15 | #### This issue template requests debug logs. Please provide them.
16 |
17 | Please don't provide just screenshots of logs (they're hard to read), or say "_logs look clean_" or assume they're not needed. This integration works extremely well except when it fails, and when it fails, unless debug logs are provided - as ` ```back-ticked text``` `, or attachments - our chances of working out why it has failed are close to zero.
18 |
19 | Note the instructions for getting debug logs, and follow them.
20 |
21 | If you fail to provide debug logs, you're providing us with permission to ignore you, or, at best, for us to say **_debug logs required_**, and then ignore you until you provide debug logs.
22 |
23 | By being lazy and not bothering to fill in **all** of the data requested for an issue (such as the version of the integration you're running, or the type of home assistant installation you're running), you're encouraging us to be lazy and not bother responding.
24 |
25 | #### Fill in **all** of the data requested.
26 |
27 | Feel free to delete everything above this line.
28 |
29 | ---
30 |
31 | ## Describe the bug
32 |
33 | A clear and concise description of what the bug is.
34 |
35 | ## To Reproduce
36 |
37 | Steps to reproduce the behaviour:
38 |
39 | 1. Go to '...'
40 | 2. Click on '....'
41 | 3. Scroll down to '....'
42 | 4. See error
43 |
44 | ## Expected behaviour
45 |
46 | A clear and concise description of what you expected to happen.
47 |
48 | ## Screenshots
49 |
50 | If applicable, add screenshots to help explain your problem.
51 |
52 | ## Logs
53 |
54 | I confirm:
55 |
56 | - [ ] I have attached **debug** logs
57 | - [ ] I have embedded **debug** logs in the issue description (enclosed in tick marks ``` for proper formatting)
58 | - [ ] Confirmed **debug** logs are not required for this issue
59 |
60 | Make sure you include logs from HA listing the output from the Solcast integration showing the error - this is particularly useful in debugging issues and helping to determine whether the issue is with the integration or the Solcast service
61 |
62 | To add detailed debug information, add the following to your configuration.yaml and restart HA:
63 |
64 | ``` yaml
65 | logger:
66 | default: warn
67 | logs:
68 | custom_components.solcast_solar: debug
69 | ```
70 |
71 | To inspect and collect debug logs examine `/config/home-assistant.log` using File Editor or Visual Studio Code Server.
72 |
73 | If you are using docker, it sometimes can be easier to gather logs using `docker compose logs -n 500 -f homeassistant` or similar
74 |
75 | ## Solcast Integration Version
76 |
77 | - Integration Version [e.g. 4.0.29]
78 |
79 | ## Desktop (please complete the following information)
80 |
81 | - OS: [e.g. iOS]
82 | - Browser [e.g. chrome, safari]
83 | - Version [e.g. 22]
84 |
85 | ## Smartphone (please complete the following information)
86 |
87 | - Device: [e.g. iPhone6]
88 | - OS: [e.g. iOS8.1]
89 | - Browser [e.g. stock browser, safari]
90 | - Version [e.g. 22]
91 |
92 | ## Additional context
93 |
94 | Add any other context about the problem here.
95 |
--------------------------------------------------------------------------------
/custom_components/solcast_solar/repairs.py:
--------------------------------------------------------------------------------
1 | """Repairs for the Solcast Solar integration."""
2 |
3 | import logging
4 | from typing import Any
5 |
6 | import voluptuous as vol
7 |
8 | from homeassistant import data_entry_flow
9 | from homeassistant.components.repairs import ConfirmRepairFlow, RepairsFlow
10 | from homeassistant.config_entries import ConfigEntry
11 | from homeassistant.core import HomeAssistant, callback
12 | from homeassistant.helpers import issue_registry as ir
13 | from homeassistant.helpers.selector import (
14 | SelectOptionDict,
15 | SelectSelector, # pyright: ignore[reportUnknownVariableType]
16 | SelectSelectorConfig,
17 | SelectSelectorMode,
18 | )
19 |
20 | from .const import AUTO_UPDATE, DOMAIN, ENTRY_ID, LEARN_MORE
21 |
22 | _LOGGER = logging.getLogger(__name__)
23 |
24 | AUTO_UPDATE_OPTIONS: list[SelectOptionDict] = [
25 | SelectOptionDict(label="sunrise_sunset", value="1"),
26 | SelectOptionDict(label="all_day", value="2"),
27 | ]
28 |
29 |
30 | class SolcastRepair(RepairsFlow):
31 | """Handler for an issue fixing flow."""
32 |
33 | entry: ConfigEntry | None
34 |
35 | def __init__(self, *, entry: ConfigEntry | None) -> None:
36 | """Create flow."""
37 |
38 | self.entry = entry
39 | super().__init__()
40 |
41 | @callback
42 | def _async_get_placeholders(self) -> dict[str, str]:
43 | issue_registry = ir.async_get(self.hass)
44 | placeholders: dict[str, Any] = {}
45 | if issue := issue_registry.issues.get((DOMAIN, self.issue_id)):
46 | if issue.learn_more_url:
47 | placeholders[LEARN_MORE] = issue.learn_more_url
48 |
49 | return placeholders
50 |
51 |
52 | class RecordsMissingRepairFlow(SolcastRepair):
53 | """Handler to enable auto-update."""
54 |
55 | async def async_step_init(self, user_input: dict[str, str] | None = None) -> data_entry_flow.FlowResult:
56 | """Handle the init."""
57 |
58 | return await self.async_step_offer_auto()
59 |
60 | async def async_step_offer_auto(self, user_input: dict[str, str] | None = None) -> data_entry_flow.FlowResult:
61 | """Handle the offer to enable auto-update."""
62 |
63 | if user_input is not None and self.entry is not None:
64 | opts = {AUTO_UPDATE: int(user_input[AUTO_UPDATE])}
65 | new_options: dict[str, Any] = {**self.entry.options, **opts}
66 | self.hass.config_entries.async_update_entry(self.entry, options=new_options)
67 | return self.async_abort(reason="reconfigured")
68 |
69 | placeholders = self._async_get_placeholders()
70 | return self.async_show_form(
71 | step_id="offer_auto",
72 | data_schema=vol.Schema(
73 | {
74 | vol.Required(AUTO_UPDATE, default="1"): SelectSelector(
75 | SelectSelectorConfig(options=AUTO_UPDATE_OPTIONS, mode=SelectSelectorMode.DROPDOWN, translation_key="auto_update")
76 | ),
77 | }
78 | ),
79 | description_placeholders=placeholders,
80 | )
81 |
82 |
83 | async def async_create_fix_flow(
84 | hass: HomeAssistant,
85 | issue_id: str,
86 | data: dict[str, Any],
87 | ) -> RepairsFlow:
88 | """Create flow."""
89 |
90 | if issue_id == "records_missing_fixable":
91 | entry = hass.config_entries.async_get_entry(data[ENTRY_ID]) if data.get(ENTRY_ID) and data[ENTRY_ID] != "" else None
92 | return RecordsMissingRepairFlow(entry=entry)
93 |
94 | return ConfirmRepairFlow()
95 |
--------------------------------------------------------------------------------
/tests/test_select.py:
--------------------------------------------------------------------------------
1 | """Tests for the Solcast Solar select."""
2 |
3 | import asyncio
4 | from datetime import datetime as dt, timedelta
5 | import logging
6 |
7 | from freezegun.api import FrozenDateTimeFactory
8 | import pytest
9 |
10 | from homeassistant.components.recorder import Recorder
11 | from homeassistant.components.select import (
12 | ATTR_OPTION,
13 | DOMAIN as SELECT_DOMAIN,
14 | SERVICE_SELECT_OPTION,
15 | )
16 | from homeassistant.components.solcast_solar.const import DOMAIN
17 | from homeassistant.components.solcast_solar.coordinator import SolcastUpdateCoordinator
18 | from homeassistant.components.solcast_solar.select import PVEstimateMode
19 | from homeassistant.components.solcast_solar.solcastapi import SolcastApi
20 | from homeassistant.const import ATTR_ENTITY_ID
21 | from homeassistant.core import HomeAssistant
22 | from homeassistant.helpers import entity_registry as er
23 |
24 | from . import DEFAULT_INPUT1, async_cleanup_integration_tests, async_init_integration
25 |
26 | _LOGGER = logging.getLogger(__name__)
27 |
28 |
29 | @pytest.mark.parametrize(
30 | ("entity_key", "resulting_state", "test_entity", "expected_value"),
31 | [
32 | (PVEstimateMode.ESTIMATE, "estimate", "forecast_today", "42.552"),
33 | (PVEstimateMode.ESTIMATE10, "estimate10", "forecast_today", "35.46"),
34 | (PVEstimateMode.ESTIMATE90, "estimate90", "forecast_today", "47.28"),
35 | ],
36 | )
37 | async def test_select_change_value(
38 | recorder_mock: Recorder,
39 | hass: HomeAssistant,
40 | freezer: FrozenDateTimeFactory,
41 | caplog: pytest.LogCaptureFixture,
42 | entity_registry: er.EntityRegistry,
43 | entity_key: PVEstimateMode,
44 | resulting_state: str,
45 | test_entity: str,
46 | expected_value: float,
47 | ) -> None:
48 | """Test estimate mode selector."""
49 |
50 | try:
51 | entry = await async_init_integration(hass, DEFAULT_INPUT1)
52 | freezer.move_to(dt.now() + timedelta(minutes=1))
53 | async with asyncio.timeout(10):
54 | while "Start is not stale" not in caplog.text:
55 | freezer.tick()
56 | await hass.async_block_till_done()
57 | coordinator: SolcastUpdateCoordinator = entry.runtime_data.coordinator
58 | solcast: SolcastApi = coordinator.solcast
59 |
60 | freezer.move_to((dt.now(solcast._tz) + timedelta(hours=24)).replace(minute=27, second=27)) # pyright: ignore[reportPrivateUsage]
61 | await hass.async_block_till_done()
62 |
63 | assert (
64 | select_entity_id := entity_registry.async_get_entity_id(
65 | SELECT_DOMAIN,
66 | DOMAIN,
67 | "estimate_mode",
68 | )
69 | ) is not None
70 | assert hass.states.get(select_entity_id).state == "estimate" # type: ignore[union-attr]
71 |
72 | await hass.services.async_call(
73 | SELECT_DOMAIN,
74 | SERVICE_SELECT_OPTION,
75 | {ATTR_ENTITY_ID: select_entity_id, ATTR_OPTION: resulting_state},
76 | blocking=True,
77 | )
78 |
79 | coordinator._data_updated = True # pyright:ignore[reportPrivateUsage]
80 | await coordinator.async_refresh()
81 | await hass.async_block_till_done()
82 |
83 | assert hass.states.get(select_entity_id).state == resulting_state # type: ignore[union-attr]
84 | assert coordinator.solcast.options.key_estimate == resulting_state
85 | assert hass.states.get(f"sensor.solcast_pv_forecast_{test_entity}").state == expected_value # type: ignore[union-attr]
86 |
87 | for _ in range(300): # Extra time needed for refresh
88 | await hass.async_block_till_done()
89 | freezer.tick(0.1)
90 |
91 | finally:
92 | assert await async_cleanup_integration_tests(hass)
93 |
--------------------------------------------------------------------------------
/DEVELOPERS.md:
--------------------------------------------------------------------------------
1 | Hi community!
2 |
3 | A Solcast API simulator and unit/integration tests are available for this custom integration. To set up, add these mounts to your HA dev container, adjusting for your local integration fork.
4 |
5 | As a custom component (no simulator, no tests):
6 |
7 | ```
8 | "mounts": [
9 | "source=${localEnv:HOME}/Documents/GitHub/ha-solcast-solar/custom_components/solcast_solar,target=${containerWorkspaceFolder}/config/custom_components/solcast_solar,type=bind",
10 | "source=${localEnv:HOME}/Documents/GitHub/ha-solcast-solar/tests,target=${containerWorkspaceFolder}/tests/components/solcast_solar,type=bind",
11 | ],
12 | ```
13 |
14 | As a core component (to run tests the integration must be mounted under core components):
15 |
16 | ```
17 | "mounts": [
18 | "source=${localEnv:HOME}/Documents/GitHub/ha-solcast-solar/custom_components/solcast_solar,target=${containerWorkspaceFolder}/homeassistant/components/solcast_solar,type=bind",
19 | "source=${localEnv:HOME}/Documents/GitHub/ha-solcast-solar/tests,target=${containerWorkspaceFolder}/tests/components/solcast_solar,type=bind",
20 | ],
21 | ```
22 |
23 | Before running as core, set up and start the simulator, then add the integration as a custom component in HA and then modify the mount locations. Things are not set up to be able to add the component as core, but an already added component will use core component code on HA start once the mount is modified.
24 |
25 | To get the simulator to work `/etc/hosts` needs to be modified to specify `127.0.0.1 localhost api.solcast.com.au` (use sudo). For a quick start, `cd tests/components/solcast_solar` and execute `python3 -u wsgi_sim.py --limit 5000 --no429`, which gets 5,000 API calls max, and no 'too busy' errors generated on the hour. (`python3 -u wsgi_sim.py --help` for options, or inspect `wsgi_sim.py` for the documentation.) Note that if the integration or simulator has never been started then dependencies will not yet be installed. The simulator will `pip install` missing dependencies and also create a new self-signed certificate. /etc/hosts is inspected for correctness but not modified automatically. To avoid needing `python3 -u` make `wsgi_sim.py` executable.
26 |
27 | Re-building the dev container will require `/etc/hosts` to be modified again for the simulator to work.
28 |
29 | The tests will show up at `tests/components/solcast_solar`. `cd` to there and execute `pytest` for all, or `pytest test_xxxx.py` for just one test module. To inspect logging, `pytest -o log_cli=true --log-cli-level=DEBUG [module.py ...]`. For a test coverage report, `pytest --cov=homeassistant.components.solcast_solar --cov-report term-missing -v`.
30 |
31 | Additional test contributions will be most welcome. In fact, test contributions will be required if your code modifications introduce lines of code that are not properly tested by the current PyTest modules.
32 |
33 | PyTest coverage of all modules is 100%. _Every_ line of code is currently exercised, and it is expected that every circumstance is covered by a test. (This may be accomplished by extending an existing test, or by creating a new one.) This is something that should be aspired to for every pull request to this integration, and if test coverage is completely ignored and your pull request is extensive, then it will likely be rejected, even if it appears to work perfectly. (If your test does not hit PyTest 100% coverage then _someone else_ will need to code the test before the code is released. And they won't like that...)
34 |
35 | Home Assistant development standards to Platinum level is also a thing here, and non-conformance will also result in PR revisions required, or rejection. A strict type checking standard is maintained. The Home Assistant dev container incorporates much automated checking of code standards to help out, but you will likely need to add PyLance and configure type checking to strict standard. GitHub CoPilot is also pretty neat at calling out inefficient or poorly constructed code, and this can also be used in the dev container.
36 |
37 | Welcome! Let's make this an even better integration!
38 |
--------------------------------------------------------------------------------
/custom_components/solcast_solar/select.py:
--------------------------------------------------------------------------------
1 | """Selector to allow users to select the pv_ data field to use for calculations."""
2 |
3 | from enum import IntEnum
4 | import logging
5 | from typing import Any
6 |
7 | from homeassistant.components.select import SelectEntity, SelectEntityDescription
8 | from homeassistant.config_entries import ConfigEntry
9 | from homeassistant.const import EntityCategory
10 | from homeassistant.core import HomeAssistant
11 | from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
12 | from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
13 |
14 | from .const import DOMAIN, KEY_ESTIMATE, MANUFACTURER
15 | from .coordinator import SolcastUpdateCoordinator
16 |
17 | _LOGGER = logging.getLogger(__name__)
18 |
19 |
20 | class PVEstimateMode(IntEnum):
21 | """Enumeration of pv forecast estimates.
22 |
23 | ESTIMATE: Use default forecasts
24 | ESTIMATE10: Use forecasts 10 - cloudier than expected scenario
25 | ESTIMATE90: Use forecasts 90 - less cloudy than expected scenario
26 | """
27 |
28 | ESTIMATE = 0
29 | ESTIMATE10 = 1
30 | ESTIMATE90 = 2
31 |
32 |
33 | _MODE_TO_OPTION: dict[PVEstimateMode, str] = {
34 | PVEstimateMode.ESTIMATE: "estimate",
35 | PVEstimateMode.ESTIMATE10: "estimate10",
36 | PVEstimateMode.ESTIMATE90: "estimate90",
37 | }
38 |
39 | ESTIMATE_MODE = SelectEntityDescription(
40 | key="estimate_mode",
41 | icon="mdi:sun-angle",
42 | entity_category=EntityCategory.CONFIG,
43 | translation_key="estimate_mode",
44 | )
45 |
46 |
47 | async def async_setup_entry(
48 | hass: HomeAssistant,
49 | entry: ConfigEntry,
50 | async_add_entities: AddConfigEntryEntitiesCallback,
51 | ) -> None:
52 | """Set up a Solcast select.
53 |
54 | Arguments:
55 | hass (HomeAssistant): The Home Assistant instance.
56 | entry (ConfigEntry): The integration entry instance, contains the configuration.
57 | async_add_entities (AddEntitiesCallback): The Home Assistant callback to add entities.
58 |
59 | """
60 | coordinator: SolcastUpdateCoordinator = entry.runtime_data.coordinator
61 |
62 | entity = EstimateModeEntity(
63 | coordinator,
64 | ESTIMATE_MODE,
65 | list(_MODE_TO_OPTION.values()),
66 | coordinator.solcast.options.key_estimate,
67 | entry,
68 | )
69 | async_add_entities([entity])
70 |
71 |
72 | class EstimateModeEntity(SelectEntity):
73 | """Entity representing the solcast estimate field to use for calculations."""
74 |
75 | _attr_should_poll = False
76 | _attr_has_entity_name = True
77 |
78 | def __init__(
79 | self,
80 | coordinator: SolcastUpdateCoordinator,
81 | entity_description: SelectEntityDescription,
82 | supported_options: list[str],
83 | current_option: str,
84 | entry: ConfigEntry,
85 | ) -> None:
86 | """Initialise the sensor.
87 |
88 | Arguments:
89 | coordinator (SolcastUpdateCoordinator): The integration coordinator instance.
90 | entity_description (SensorEntityDescription): The details of the entity.
91 | supported_options (list[str]): All select options available.
92 | current_option (str): The currently selected option.
93 | entry (ConfigEntry): The integration entry instance, contains the configuration.
94 |
95 | """
96 |
97 | self.coordinator = coordinator
98 | self.entity_description = entity_description
99 |
100 | self._entry = entry
101 | self._attr_unique_id = f"{entity_description.key}"
102 | self._attr_options = supported_options
103 | self._attr_current_option = current_option
104 | self._attr_entity_category = EntityCategory.CONFIG
105 | self._attributes: dict[str, Any] = {}
106 | self._attr_extra_state_attributes: dict[str, Any] = {}
107 | self._attr_device_info = DeviceInfo(
108 | identifiers={(DOMAIN, entry.entry_id)},
109 | name="Solcast PV Forecast",
110 | manufacturer=MANUFACTURER,
111 | model="Solcast PV Forecast",
112 | entry_type=DeviceEntryType.SERVICE,
113 | sw_version=coordinator.version,
114 | configuration_url="https://toolkit.solcast.com.au/",
115 | )
116 |
117 | async def async_select_option(self, option: str) -> None:
118 | """Change the selected option.
119 |
120 | Arguments:
121 | option (str): The preferred forecast to use. estimate, estimate10 or estimate90
122 |
123 | """
124 | self._attr_current_option = option
125 | self.async_write_ha_state()
126 |
127 | new = {**self._entry.options}
128 | new[KEY_ESTIMATE] = option
129 |
130 | self.coordinator.hass.config_entries.async_update_entry(self._entry, options=new)
131 |
--------------------------------------------------------------------------------
/tests/test_forecast_retry.py:
--------------------------------------------------------------------------------
1 | """Test forecasts update retry mechanism."""
2 |
3 | import asyncio
4 | from datetime import timedelta
5 | import json
6 | import logging
7 | from pathlib import Path
8 | from typing import Any
9 | from unittest import mock
10 |
11 | from freezegun.api import FrozenDateTimeFactory
12 | import pytest
13 |
14 | from homeassistant.components.recorder import Recorder
15 | from homeassistant.components.solcast_solar.const import (
16 | DOMAIN,
17 | SERVICE_FORCE_UPDATE_FORECASTS,
18 | )
19 | from homeassistant.core import HomeAssistant
20 | from homeassistant.setup import async_setup_component
21 |
22 | from . import (
23 | CONFIG_DISCRETE_NAME,
24 | CONFIG_FOLDER_DISCRETE,
25 | DEFAULT_INPUT1,
26 | MOCK_BUSY,
27 | async_cleanup_integration_tests,
28 | async_init_integration,
29 | session_clear,
30 | session_set,
31 | )
32 |
33 |
34 | class AsyncMockDoNothing(mock.MagicMock):
35 | """Do nothing. Used to replace asyncio sleep."""
36 |
37 | async def __call__(self, *args: Any, **kwargs: Any) -> None:
38 | """Do nothing."""
39 | return super().__call__(*args, **kwargs)
40 |
41 |
42 | @pytest.fixture(autouse=True)
43 | def frozen_time() -> None:
44 | """Override autouse fixture for this module.
45 |
46 | Using other mock times.
47 | """
48 | return
49 |
50 |
51 | _LOGGER = logging.getLogger(__name__)
52 |
53 |
54 | def _occurs_in_log(caplog: pytest.LogCaptureFixture, text: str, occurrences: int) -> None:
55 | occurs = 0
56 | for entry in caplog.messages:
57 | if text in entry:
58 | occurs += 1
59 | assert occurrences == occurs
60 |
61 |
62 | @pytest.mark.asyncio
63 | async def test_forecast_retry(
64 | recorder_mock: Recorder,
65 | hass: HomeAssistant,
66 | freezer: FrozenDateTimeFactory,
67 | caplog: pytest.LogCaptureFixture,
68 | ) -> None:
69 | """Test retry mechanism."""
70 |
71 | try:
72 | freezer.move_to("2025-01-11 00:00:00") # A pending update will be queued for 00:00:09 UTC
73 |
74 | config_dir = f"{hass.config.config_dir}/{CONFIG_DISCRETE_NAME}" if CONFIG_FOLDER_DISCRETE else hass.config.config_dir
75 | if CONFIG_FOLDER_DISCRETE:
76 | Path(config_dir).mkdir(parents=False, exist_ok=True)
77 | Path(f"{config_dir}/solcast-advanced.json").write_text(
78 | json.dumps(
79 | {
80 | "trigger_on_api_unavailable": "Automation unavailable",
81 | "trigger_on_api_available": "Automation available",
82 | }
83 | ),
84 | encoding="utf-8",
85 | )
86 |
87 | entry = await async_init_integration(hass, DEFAULT_INPUT1)
88 | coordinator = entry.runtime_data.coordinator
89 | solcast = coordinator.solcast
90 |
91 | assert await async_setup_component(
92 | hass,
93 | "automation",
94 | {
95 | "automation": [
96 | {
97 | "id": "automation_available",
98 | "alias": "Automation available",
99 | "trigger": {"platform": "event", "event_type": "test_event"},
100 | "action": {"service": "persistent_notification.create"},
101 | },
102 | {
103 | "id": "automation_unavailable",
104 | "alias": "Automation unavailable",
105 | "trigger": {"platform": "event", "event_type": "test_event"},
106 | "action": {"service": "persistent_notification.create"},
107 | },
108 | ]
109 | },
110 | )
111 | await hass.async_block_till_done()
112 |
113 | session_set(MOCK_BUSY)
114 | caplog.clear()
115 |
116 | solcast._data["last_updated"] -= timedelta(minutes=20)
117 | with mock.patch("homeassistant.components.solcast_solar.solcastapi.SolcastApi._sleep", new_callable=AsyncMockDoNothing):
118 | async with asyncio.timeout(10):
119 | while "Raise issue for api_unavailable" not in caplog.text:
120 | freezer.tick(0.1)
121 | await hass.async_block_till_done()
122 |
123 | assert "API was tried 10 times, but all attempts failed" in caplog.text
124 | _occurs_in_log(caplog, "Call status 429/Try again later", 10)
125 | assert "No data was returned for forecasts" in caplog.text
126 | assert "Forecast has not been updated, next auto update at" in caplog.text
127 | assert "Completed task pending_update_009" in caplog.text
128 | assert "Raise issue for api_unavailable" in caplog.text
129 | await solcast.tasks_cancel()
130 | await coordinator.tasks_cancel()
131 |
132 | session_clear(MOCK_BUSY)
133 | caplog.clear()
134 | await hass.services.async_call(DOMAIN, SERVICE_FORCE_UPDATE_FORECASTS, {}, blocking=True)
135 | async with asyncio.timeout(10):
136 | while "Remove issue for api_unavailable" not in caplog.text:
137 | freezer.tick(0.1)
138 | await hass.async_block_till_done()
139 | assert "Remove issue for api_unavailable" in caplog.text
140 | await solcast.tasks_cancel()
141 | await coordinator.tasks_cancel()
142 |
143 | finally:
144 | await async_cleanup_integration_tests(hass)
145 |
--------------------------------------------------------------------------------
/quality_checklist.md:
--------------------------------------------------------------------------------
1 | ## Bronze
2 | - [X] `config-flow` - Integration needs to be able to be set up via the UI
3 | - [X] Uses `data_description` to give context to fields
4 | - [X] Uses `ConfigEntry.data` and `ConfigEntry.options` correctly
5 | - [X] `test-before-configure` - Test a connection in the config flow
6 | - [X] `unique-config-entry` - Don't allow the same device or service to be able to be set up twice
7 | - [X] `config-flow-test-coverage` - Full test coverage for the config flow
8 | - [X] `runtime-data` - Use ConfigEntry.runtime_data to store runtime data
9 | - [X] `test-before-setup` - Check during integration initialization if we are able to set it up correctly
10 | - [X] `appropriate-polling` - If it's a polling integration, set an appropriate polling interval
11 | - [X] `entity-unique-id` - Entities have a unique ID
12 | - [X] `has-entity-name` - Entities use has_entity_name = True
13 | - [N/A] `entity-event-setup` - Entities event setup
14 | - [N/A] `dependency-transparency` - Dependency transparency
15 | - [X] `action-setup` - Service actions are registered in async_setup
16 | - [X] `common-modules` - Place common patterns in common modules
17 | - [X] `docs-high-level-description` - The documentation includes a high-level description of the integration brand, product, or service
18 | - [X] `docs-installation-instructions` - The documentation provides step-by-step installation instructions for the integration, including, if needed, prerequisites
19 | - [X] `docs-removal-instructions` - The documentation provides removal instructions
20 | - [X] `docs-actions` - The documentation describes the provided service actions that can be used
21 | - [X] `brands` - Has branding assets available for the integration
22 |
23 | ## Silver
24 | - [X] `config-entry-unloading` - Support config entry unloading
25 | - [N/A] `log-when-unavailable` - If internet/device/service is unavailable, log once when unavailable and once when back connected
26 | - [X] `entity-unavailable` - Mark entity unavailable if appropriate
27 | - [X] `action-exceptions` - Service actions raise exceptions when encountering failures
28 | - [X] `reauthentication-flow` - Reauthentication flow
29 | - [N/A] `parallel-updates` - Set Parallel updates
30 | - [X] `test-coverage` - Above 95% test coverage for all integration modules
31 | - [X] `integration-owner` - Has an integration owner
32 | - [X] `docs-installation-parameters` - The documentation describes all integration installation parameters
33 | - [X] `docs-configuration-parameters` - The documentation describes all integration configuration options
34 |
35 | ## Gold
36 | - [X] `entity-translations` - Entities have translated names
37 | - [X] `entity-device-class` - Entities use device classes where possible
38 | - [X] `devices` - The integration creates devices
39 | - [X] `entity-category` - Entities are assigned an appropriate EntityCategory
40 | - [X] `entity-disabled-by-default` - Integration disables less popular (or noisy) entities
41 | - [N/A] `discovery` - Can be discovered
42 | - [N/A] `stale-devices` - Clean up stale devices
43 | - [X] `diagnostics` - Implements diagnostics
44 | - [X] `exception-translations` - Exception messages are translatable
45 | - [X] `icon-translations` - Icon translations
46 | - [X] `reconfiguration-flow` - Integrations should have a reconfigure flow
47 | - [N/A] `dynamic-devices` - Devices added after integration setup
48 | - [N/A] `discovery-update-info` - Integration uses discovery info to update network information
49 | - [X] `repair-issues` - Repair issues and repair flows are used when user intervention is needed
50 | - [X] `docs-use-cases` - The documentation describes use cases to illustrate how this integration can be used
51 | - [N/A] `docs-supported-devices` - The documentation describes known supported / unsupported devices
52 | - [X] `docs-supported-functions` - The documentation describes the supported functionality, including entities, and platforms
53 | - [X] `docs-data-update` - The documentation describes how data is updated
54 | - [X] `docs-known-limitations` - The documentation describes known limitations of the integration (not to be confused with bugs)
55 | - [X] `docs-troubleshooting` - The documentation provides troubleshooting information
56 | - [X] `docs-examples` - The documentation provides automation examples the user can use.
57 |
58 | ## Platinum
59 | - [N/A] `async-dependency` - Dependency is async
60 | - [N/A] `inject-websession` - The integration dependency supports passing in a websession
61 | - [X] `strict-typing` - Strict typing
62 |
63 | ## Notes on applicability
64 | - `entity-event-setup`: Entity event are not used by the integration, so there is nothing to set up.
65 | - `dependency-transparency`: The integration does not utilise an externally hosted dependency.
66 | - `log-when-unavailable`: It is not desirable to log once when the Solcast REST API is unavailable and then once more when re-connected. Each interaction is atomic, and a connection is not held open.
67 | - `parallel-updates`: Parallel updates of local devices are not applicable given the nature of the integration.
68 | - `discovery`: Local devices are not used, so discovery of any is irrelevant.
69 | - `stale-devices`: A single device is created for the integration instance. There can only be one instance.
70 | - `dynamic-devices`: A single device is created for the integration instance. There can only be one instance.
71 | - `discovery-update-info`: Local devices are not used, so updating network information is irrelevant.
72 | - `docs-supported-devices`: Local devices are not used, and API variability is irrelevant.
73 | - `async-dependency`: The integration does not utilise an externally hosted dependency.
74 | - `inject-websession`: The integration does not utilise an externally hosted dependency.
75 |
--------------------------------------------------------------------------------
/tests/test_specific_times.py:
--------------------------------------------------------------------------------
1 | """Test midnight rollover."""
2 |
3 | from datetime import datetime as dt
4 | import json
5 | import logging
6 | from pathlib import Path
7 |
8 | from freezegun.api import FrozenDateTimeFactory
9 | import pytest
10 |
11 | from homeassistant.components.recorder import Recorder
12 | from homeassistant.components.solcast_solar.const import (
13 | CONFIG_DISCRETE_NAME,
14 | CONFIG_FOLDER_DISCRETE,
15 | DEFAULT_FORECAST_DAYS,
16 | )
17 | from homeassistant.components.solcast_solar.coordinator import SolcastUpdateCoordinator
18 | from homeassistant.core import HomeAssistant
19 |
20 | from . import DEFAULT_INPUT1, async_cleanup_integration_tests, async_init_integration
21 |
22 |
23 | @pytest.fixture(autouse=True)
24 | def frozen_time() -> None:
25 | """Override autouse fixture for this module.
26 |
27 | Using other mock times.
28 | """
29 | return
30 |
31 |
32 | _LOGGER = logging.getLogger(__name__)
33 |
34 |
35 | @pytest.mark.asyncio
36 | async def test_midnight(
37 | recorder_mock: Recorder,
38 | hass: HomeAssistant,
39 | freezer: FrozenDateTimeFactory,
40 | caplog: pytest.LogCaptureFixture,
41 | ) -> None:
42 | """Test midnight updates."""
43 |
44 | try:
45 | config_dir = f"{hass.config.config_dir}/{CONFIG_DISCRETE_NAME}" if CONFIG_FOLDER_DISCRETE else hass.config.config_dir
46 | if CONFIG_FOLDER_DISCRETE:
47 | Path(config_dir).mkdir(parents=False, exist_ok=True)
48 |
49 | # Test midnight UTC usage reset.
50 | freezer.move_to("2025-01-10 23:59:59")
51 |
52 | Path(f"{config_dir}/solcast-advanced.json").write_text(json.dumps({"entity_logging": True}), encoding="utf-8")
53 |
54 | entry = await async_init_integration(hass, DEFAULT_INPUT1)
55 | coordinator: SolcastUpdateCoordinator = entry.runtime_data.coordinator
56 |
57 | assert hass.states.get("sensor.solcast_pv_forecast_api_used").state == "4" # type: ignore[union-attr]
58 | assert "Transitioning between summer/standard time" not in caplog.text
59 |
60 | coordinator._intervals = [ # Inject expired interval # pyright: ignore[reportPrivateUsage]
61 | dt.fromisoformat("2025-01-10T00:59:30+00:00"),
62 | *coordinator._intervals, # Inject expired interval # pyright: ignore[reportPrivateUsage]
63 | ]
64 | caplog.clear()
65 | coordinator._data_updated = False # Improve test coverage # pyright: ignore[reportPrivateUsage]
66 | await coordinator.async_refresh()
67 | for _ in range(6):
68 | freezer.tick(1)
69 | coordinator._data_updated = True # pyright: ignore[reportPrivateUsage]
70 | await coordinator.async_refresh()
71 | await hass.async_block_till_done()
72 | # Result is used for the next test. An update task must be pending, which should occur at nine minutes past the hour.
73 | if "API Used to 0" in caplog.text and "Create task pending_update" in caplog.text: # Relies on SENSOR_UPDATE_LOGGING enabled
74 | break
75 |
76 | assert "Reset API usage" in caplog.text
77 | assert hass.states.get("sensor.solcast_pv_forecast_api_used").state == "0" # type: ignore[union-attr]
78 |
79 | # Test auto-update occurs just after midnight UTC.
80 | caplog.clear()
81 | for _ in range(2000): # Twenty virtual seconds
82 | freezer.tick(0.01)
83 | await hass.async_block_till_done()
84 | if "Completed task pending_update" in caplog.text:
85 | break
86 | assert "Completed task pending_update" in caplog.text
87 |
88 | # Test midnight local happenings.
89 | freezer.move_to(f"{dt.now().date()} 13:59:59")
90 |
91 | caplog.clear()
92 | for _ in range(600):
93 | freezer.tick()
94 | await hass.async_block_till_done()
95 | if "Updating sensor" in caplog.text:
96 | break
97 |
98 | assert "Date has changed" in caplog.text
99 | assert "Forecast data from" in caplog.text
100 | assert "Sun rise / set today" in caplog.text
101 | assert "Auto forecast updates for today" in caplog.text
102 | assert "Updating sensor" in caplog.text
103 |
104 | finally:
105 | await async_cleanup_integration_tests(hass)
106 |
107 |
108 | @pytest.mark.parametrize(
109 | "scenario",
110 | [
111 | {"timezone": "Australia/Sydney", "start_date": "2025-04-04", "end_date": "2025-10-01"},
112 | {"timezone": "Europe/Dublin", "start_date": "2025-10-15", "end_date": "2026-03-16"},
113 | ],
114 | )
115 | async def test_timezone_transition(
116 | recorder_mock: Recorder,
117 | hass: HomeAssistant,
118 | freezer: FrozenDateTimeFactory,
119 | caplog: pytest.LogCaptureFixture,
120 | scenario: dict[str, str],
121 | ) -> None:
122 | """Test summer time transitions."""
123 |
124 | try:
125 | # Test transition from summer to standard time.
126 | freezer.move_to(scenario["start_date"] + " 00:00:00")
127 | entry = await async_init_integration(hass, DEFAULT_INPUT1, timezone=scenario["timezone"])
128 | coordinator: SolcastUpdateCoordinator = entry.runtime_data.coordinator
129 | assert coordinator.solcast.dst(dt.now())
130 |
131 | assert (
132 | f"Transitioning between {'standard/Summer' if scenario['timezone'] == 'Australia/Sydney' else 'standard/Winter'} time"
133 | in caplog.text
134 | )
135 | assert (
136 | f"Forecast data from {scenario['start_date']} to {scenario['start_date'][:-2]}{int(scenario['start_date'][-2:]) - 2 + DEFAULT_FORECAST_DAYS:02d} contains all intervals"
137 | in caplog.text
138 | )
139 |
140 | assert await hass.config_entries.async_unload(entry.entry_id)
141 | await hass.async_block_till_done()
142 |
143 | caplog.clear()
144 | await async_cleanup_integration_tests(hass)
145 |
146 | # Test transition from standard to summer time.
147 | freezer.move_to(scenario["end_date"] + " 00:00:00")
148 | entry = await async_init_integration(hass, DEFAULT_INPUT1, timezone=scenario["timezone"])
149 | coordinator: SolcastUpdateCoordinator = entry.runtime_data.coordinator
150 | assert not coordinator.solcast.dst(dt.now())
151 |
152 | assert (
153 | f"Transitioning between {'standard/Summer' if scenario['timezone'] == 'Australia/Sydney' else 'standard/Winter'} time"
154 | in caplog.text
155 | )
156 | assert (
157 | f"Forecast data from {scenario['end_date']} to {scenario['end_date'][:-2]}{int(scenario['end_date'][-2:]) - 1 + DEFAULT_FORECAST_DAYS - 1:02d} contains all intervals"
158 | in caplog.text
159 | )
160 |
161 | assert await hass.config_entries.async_unload(entry.entry_id)
162 | await hass.async_block_till_done()
163 |
164 | finally:
165 | await async_cleanup_integration_tests(hass)
166 |
--------------------------------------------------------------------------------
/tests/test_dampen_dst.py:
--------------------------------------------------------------------------------
1 | """Tests for the Solcast Solar automated dampening."""
2 |
3 | import asyncio
4 | import copy
5 | from datetime import datetime as dt, timedelta
6 | import json
7 | import logging
8 | from pathlib import Path
9 | from typing import Any
10 | from zoneinfo import ZoneInfo
11 |
12 | from freezegun.api import FrozenDateTimeFactory
13 | import pytest
14 |
15 | from homeassistant.components.recorder import Recorder
16 | from homeassistant.components.solcast_solar.const import (
17 | AUTO_DAMPEN,
18 | AUTO_UPDATE,
19 | CONFIG_DISCRETE_NAME,
20 | CONFIG_FOLDER_DISCRETE,
21 | EXCLUDE_SITES,
22 | GENERATION_ENTITIES,
23 | GET_ACTUALS,
24 | SITE_EXPORT_ENTITY,
25 | SITE_EXPORT_LIMIT,
26 | USE_ACTUALS,
27 | )
28 | from homeassistant.core import HomeAssistant
29 | from homeassistant.helpers import entity_registry as er
30 |
31 | from . import (
32 | DEFAULT_INPUT2,
33 | ZONE_RAW,
34 | ExtraSensors,
35 | async_cleanup_integration_tests,
36 | async_init_integration,
37 | )
38 |
39 | ZONE = ZoneInfo(ZONE_RAW)
40 | NOW = dt.now(ZONE)
41 |
42 | _LOGGER = logging.getLogger(__name__)
43 |
44 |
45 | @pytest.fixture(autouse=True)
46 | def frozen_time() -> None:
47 | """Override autouse fixture for this module.
48 |
49 | Using other mock times.
50 | """
51 | return
52 |
53 |
54 | async def midnight_utc(hass: HomeAssistant, freezer: FrozenDateTimeFactory, caplog: pytest.LogCaptureFixture, at: str):
55 | """Set the time to midnight UTC."""
56 | freezer.move_to(at)
57 | async with asyncio.timeout(600):
58 | for _ in range(600):
59 | freezer.tick(0.1)
60 | await hass.async_block_till_done()
61 | if "Updating sensor Third Site" in caplog.text:
62 | break
63 |
64 |
65 | async def five_minute_bump(hass: HomeAssistant, freezer: FrozenDateTimeFactory, caplog: pytest.LogCaptureFixture):
66 | """Set the time to the next five-minute point."""
67 | freezer.move_to(dt.now().replace(minute=dt.now().minute // 5 * 5, second=0, microsecond=0) + timedelta(minutes=5))
68 | async with asyncio.timeout(300):
69 | while "Updating sensor Dampening" not in caplog.text:
70 | freezer.tick(0.01)
71 | await hass.async_block_till_done()
72 |
73 |
74 | @pytest.mark.parametrize(
75 | "direction",
76 | [
77 | {
78 | "times": [
79 | "2025-10-02T18:00:00+00:00",
80 | "2025-10-03T00:00:00+00:00",
81 | "2025-10-03T14:00:00+00:00",
82 | "2025-10-04T00:00:00+00:00",
83 | "2025-10-04T14:00:00+00:00",
84 | "2025-10-04T16:00:00+00:00",
85 | ],
86 | "from": "09:00",
87 | "to": "10:00",
88 | "factor": (-2, 0),
89 | },
90 | {
91 | "times": [
92 | "2026-04-02T18:00:00+00:00",
93 | "2026-04-03T00:00:00+00:00",
94 | "2026-04-03T13:00:00+00:00",
95 | "2026-04-04T00:00:00+00:00",
96 | "2026-04-04T13:00:00+00:00",
97 | "2026-04-04T15:00:00+00:00",
98 | ],
99 | "from": "10:00",
100 | "to": "09:00",
101 | "factor": (0, -2),
102 | },
103 | ],
104 | )
105 | async def test_auto_dampen_dst_transition(
106 | recorder_mock: Recorder,
107 | hass: HomeAssistant,
108 | caplog: pytest.LogCaptureFixture,
109 | freezer: FrozenDateTimeFactory,
110 | direction: dict[str, Any],
111 | ) -> None:
112 | """Test automated dampening."""
113 |
114 | try:
115 | options = copy.deepcopy(DEFAULT_INPUT2)
116 | options[AUTO_UPDATE] = 1
117 | options[GET_ACTUALS] = True
118 | options[USE_ACTUALS] = 0
119 | options[AUTO_DAMPEN] = True
120 | options[EXCLUDE_SITES] = ["3333-3333-3333-3333"]
121 | options[GENERATION_ENTITIES] = [
122 | "sensor.solar_export_sensor_1111_1111_1111_1111",
123 | "sensor.solar_export_sensor_2222_2222_2222_2222",
124 | ]
125 | options[SITE_EXPORT_ENTITY] = "sensor.site_export_sensor"
126 | options[SITE_EXPORT_LIMIT] = 5.0
127 | expected_value = 0.797
128 |
129 | config_dir = f"{hass.config.config_dir}/{CONFIG_DISCRETE_NAME}" if CONFIG_FOLDER_DISCRETE else hass.config.config_dir
130 | if CONFIG_FOLDER_DISCRETE:
131 | Path(config_dir).mkdir(parents=False, exist_ok=True)
132 | Path(f"{config_dir}/solcast-advanced.json").write_text(json.dumps({"entity_logging": True}), encoding="utf-8")
133 |
134 | # Test transition from standard to summer time.
135 | freezer.move_to(direction["times"][0])
136 |
137 | await async_init_integration(hass, options, timezone="Australia/Sydney", extra_sensors=ExtraSensors.YES_WATT_HOUR)
138 |
139 | # Enable the dampening entity
140 | dampening_entity = "sensor.solcast_pv_forecast_dampening"
141 | er.async_get(hass).async_update_entity(dampening_entity, disabled_by=None)
142 | async with asyncio.timeout(300):
143 | while "Reloading configuration entries because disabled_by changed" not in caplog.text:
144 | freezer.tick(0.01)
145 | await hass.async_block_till_done()
146 |
147 | await midnight_utc(hass, freezer, caplog, direction["times"][1])
148 |
149 | freezer.move_to(direction["times"][2])
150 | caplog.clear()
151 | for _ in range(60000):
152 | freezer.tick(0.1)
153 | await hass.async_block_till_done()
154 | if "Task model_automated_dampening took" in caplog.text:
155 | break
156 | assert f"Auto-dampen factor for {direction['from']} is {expected_value}" in caplog.text
157 | caplog.clear()
158 | await five_minute_bump(hass, freezer, caplog)
159 | if (state := hass.states.get(dampening_entity)) is not None:
160 | assert state.state == "True"
161 | if (attribute := state.attributes.get("factors")) is not None:
162 | assert len(attribute) == 48
163 | assert attribute[20 + direction["factor"][0]]["factor"] == expected_value
164 | else:
165 | pytest.fail("Dampening attribute `factors` is None")
166 | else:
167 | pytest.fail("Dampening entity state is None")
168 |
169 | await midnight_utc(hass, freezer, caplog, direction["times"][3])
170 |
171 | freezer.move_to(direction["times"][4])
172 | caplog.clear()
173 | for _ in range(60000):
174 | freezer.tick(0.1)
175 | await hass.async_block_till_done()
176 | if "Applying future dampening" in caplog.text:
177 | break
178 | assert f"Auto-dampen factor for {direction['to']} is {expected_value}" in caplog.text
179 | caplog.clear()
180 | await five_minute_bump(hass, freezer, caplog)
181 | if (state := hass.states.get(dampening_entity)) is not None:
182 | assert state.state == "True"
183 | if (attribute := state.attributes.get("factors")) is not None:
184 | assert len(attribute) == 48
185 | assert attribute[20 + direction["factor"][1]]["factor"] == expected_value
186 | else:
187 | pytest.fail("Dampening attribute `factors` is None")
188 | else:
189 | pytest.fail("Dampening entity state is None")
190 |
191 | freezer.move_to(direction["times"][5])
192 | caplog.clear()
193 | await hass.async_block_till_done()
194 | if (state := hass.states.get(dampening_entity)) is not None:
195 | assert state.state == "True"
196 | if (attribute := state.attributes.get("factors")) is not None:
197 | assert len(attribute) == 48
198 | assert attribute[20 + direction["factor"][1]]["factor"] == expected_value
199 | else:
200 | pytest.fail("Dampening attribute `factors` is None")
201 | else:
202 | pytest.fail("Dampening entity state is None")
203 |
204 | finally:
205 | assert await async_cleanup_integration_tests(hass)
206 |
--------------------------------------------------------------------------------
/tests/simulator/simulate.py:
--------------------------------------------------------------------------------
1 | """Simulated data for Solcast Solar integration."""
2 |
3 | import datetime
4 | from datetime import datetime as dt, timedelta
5 | from typing import Any
6 | from zoneinfo import ZoneInfo
7 |
8 | API_KEY_SITES: dict[str, Any] = {
9 | "1": {
10 | "sites": [
11 | {
12 | "resource_id": "1111-1111-1111-1111",
13 | "name": "First Site",
14 | "latitude": -11.11111,
15 | "longitude": 111.1111,
16 | "install_date": "2024-01-01T00:00:00+00:00",
17 | "loss_factor": 0.99,
18 | "capacity": 5.0,
19 | "capacity_dc": 6.2,
20 | "azimuth": 66,
21 | "tilt": 30,
22 | "tags": ["tag1", "tag2"],
23 | },
24 | {
25 | "resource_id": "2222-2222-2222-2222",
26 | "name": "Second Site",
27 | "latitude": -11.11111,
28 | "longitude": 111.1111,
29 | "install_date": "2024-01-01T00:00:00+00:00",
30 | "loss_factor": 0.99,
31 | "capacity": 3.0,
32 | "capacity_dc": 4.2,
33 | "azimuth": 66,
34 | "tilt": 30,
35 | "tags": ["tag1", "tag3"],
36 | },
37 | ],
38 | "counter": 0,
39 | },
40 | "10": {
41 | "sites": [
42 | {
43 | "resource_id": "1111-1111-1111-1111",
44 | "name": "First Site",
45 | "latitude": -11.11111,
46 | "longitude": 111.1111,
47 | "install_date": "2024-01-01T00:00:00+00:00",
48 | "loss_factor": 0.99,
49 | "capacity": 5.0,
50 | "capacity_dc": 6.2,
51 | "azimuth": 66,
52 | "tilt": 30,
53 | "tags": ["tag1", "tag2"],
54 | },
55 | {
56 | "resource_id": "2222-2222-2222-2222",
57 | "name": "Second Site",
58 | "latitude": -11.11111,
59 | "longitude": 111.1111,
60 | "install_date": "2024-01-01T00:00:00+00:00",
61 | "loss_factor": 0.99,
62 | "capacity": 3.0,
63 | "capacity_dc": 4.2,
64 | "azimuth": 66,
65 | "tilt": 30,
66 | "tags": ["tag1", "tag3"],
67 | },
68 | ],
69 | "counter": 0,
70 | },
71 | "2": {
72 | "sites": [
73 | {
74 | "resource_id": "3333-3333-3333-3333",
75 | "name": "Third Site",
76 | "latitude": -11.11111,
77 | "longitude": 111.1111,
78 | "install_date": "2024-01-01T00:00:00+00:00",
79 | "loss_factor": 0.99,
80 | "capacity": 3.0,
81 | "capacity_dc": 3.5,
82 | "azimuth": 66,
83 | "tilt": 30,
84 | "tags": ["tag1", "tag4"],
85 | },
86 | ],
87 | "counter": 0,
88 | },
89 | "3": {
90 | "sites": [
91 | {
92 | "resource_id": "4444-4444-4444-4444",
93 | "name": "Fourth Site",
94 | "latitude": -11.11111,
95 | "longitude": 111.1111,
96 | "install_date": "2024-01-01T00:00:00+00:00",
97 | "loss_factor": 0.99,
98 | "capacity": 4.5,
99 | "capacity_dc": 5.0,
100 | "azimuth": 66,
101 | "tilt": 30,
102 | "tags": ["tag1", "tag5"],
103 | },
104 | {
105 | "resource_id": "5555-5555-5555-5555",
106 | "name": "Fifth Site",
107 | "latitude": -11.11111,
108 | "longitude": 111.1111,
109 | "install_date": "2024-01-01T00:00:00+00:00",
110 | "loss_factor": 0.99,
111 | "capacity": 3.2,
112 | "capacity_dc": 3.7,
113 | "azimuth": 66,
114 | "tilt": 30,
115 | "tags": ["tag1", "tag6"],
116 | },
117 | {
118 | "resource_id": "6666-6666-6666-6666",
119 | "name": "Sixth Site",
120 | "latitude": -11.11111,
121 | "longitude": 111.1111,
122 | "install_date": "2024-01-01T00:00:00+00:00",
123 | "loss_factor": 0.99,
124 | "capacity": 4.2,
125 | "capacity_dc": 4.8,
126 | "azimuth": 66,
127 | "tilt": 30,
128 | "tags": ["tag1", "tag7"],
129 | },
130 | ],
131 | "counter": 0,
132 | },
133 | "aaaa-aaaa": {
134 | "sites": [
135 | {
136 | "resource_id": "7777-7777-7777-7777",
137 | "name": "Seventh Site",
138 | "latitude": -11.11111,
139 | "longitude": 111.1111,
140 | "install_date": "2024-01-01T00:00:00+00:00",
141 | "loss_factor": 0.99,
142 | "capacity": 3.0,
143 | "capacity_dc": 3.5,
144 | "azimuth": 66,
145 | "tilt": 30,
146 | "tags": ["tag1", "tag2"],
147 | },
148 | ],
149 | "counter": 0,
150 | },
151 | "no_sites": {
152 | "sites": [],
153 | "counter": 0,
154 | },
155 | }
156 | FORECAST = 0.9
157 | FORECAST_10 = 0.75
158 | FORECAST_90 = 1.0
159 | GENERATION_FACTOR: list[float] = [
160 | 0,
161 | 0,
162 | 0,
163 | 0,
164 | 0,
165 | 0,
166 | 0,
167 | 0,
168 | 0,
169 | 0,
170 | 0,
171 | 0,
172 | 0.01,
173 | 0.025,
174 | 0.04,
175 | 0.075,
176 | 0.11,
177 | 0.17,
178 | 0.26,
179 | 0.38,
180 | 0.52,
181 | 0.65,
182 | 0.8,
183 | 0.9,
184 | 0.97,
185 | 1,
186 | 1,
187 | 0.97,
188 | 0.9,
189 | 0.8,
190 | 0.65,
191 | 0.52,
192 | 0.38,
193 | 0.26,
194 | 0.17,
195 | 0.11,
196 | 0.075,
197 | 0.04,
198 | 0.025,
199 | 0.01,
200 | 0,
201 | 0,
202 | 0,
203 | 0,
204 | 0,
205 | 0,
206 | 0,
207 | 0,
208 | ]
209 | TIMEZONE = ZoneInfo("Australia/Melbourne")
210 |
211 |
212 | class SimulatedSolcast:
213 | """Simulated Solcast API."""
214 |
215 | modified_actuals: bool = True
216 |
217 | def __init__(self) -> None:
218 | """Initialize the API."""
219 | self.timezone: ZoneInfo = TIMEZONE
220 | self.cached_forecasts: dict[str, Any] = {}
221 |
222 | def raw_get_sites(self, api_key: str) -> dict[str, Any] | None:
223 | """Return sites for an API key."""
224 |
225 | sites = API_KEY_SITES.get(api_key)
226 | meta = {
227 | "page_count": 1,
228 | "current_page": 1,
229 | "total_records": len(API_KEY_SITES.get(api_key, {}).get("sites", [])),
230 | }
231 | return sites | meta if sites is not None else None
232 |
233 | def raw_get_site_estimated_actuals(
234 | self, site_id: str, api_key: str, hours: int, prefix: str = "pv_estimate", period_end: dt | None = None
235 | ) -> dict[str, list[dict[str, Any]]]:
236 | """Return simulated estimated actuals for a site."""
237 |
238 | sites: list[dict[str, Any]] | int | None = API_KEY_SITES.get(api_key, {}).get("sites", [])
239 | site: dict[str, Any] | None = next((s for s in sites if s["resource_id"] == site_id), None) if isinstance(sites, list) else None
240 | if not site:
241 | return {}
242 | period_end = self.get_period(dt.now(datetime.UTC), timedelta(hours=hours) * -1) if period_end is None else period_end
243 |
244 | return {
245 | "estimated_actuals": [
246 | {
247 | "period_end": (period_end + timedelta(minutes=minute * 30)).isoformat(),
248 | "period": "PT30M",
249 | prefix: self.__pv_interval(site["capacity"], FORECAST, period_end, minute, modified=self.modified_actuals),
250 | }
251 | for minute in range((hours + 1) * 2)
252 | ],
253 | }
254 |
255 | def raw_get_site_forecasts(
256 | self, site_id: str, api_key: str, hours: int, prefix: str = "pv_estimate"
257 | ) -> dict[str, list[dict[str, Any]]]:
258 | """Return simulated forecasts for a site."""
259 |
260 | sites: list[dict[str, Any]] | int | None = API_KEY_SITES.get(api_key, {}).get("sites")
261 | site: dict[str, Any] | None = next((s for s in sites if s["resource_id"] == site_id), None) if isinstance(sites, list) else None
262 | if not site:
263 | return {}
264 | period_end = self.get_period(dt.now(datetime.UTC), timedelta(minutes=30))
265 |
266 | lookup = f"{api_key} {site_id} {hours} {period_end}"
267 | if cached := self.cached_forecasts.get(lookup):
268 | return cached
269 |
270 | self.cached_forecasts[lookup] = {
271 | "forecasts": [
272 | {
273 | "period_end": (period_end + timedelta(minutes=minute * 30)).isoformat(),
274 | "period": "PT30M",
275 | prefix: self.__pv_interval(site["capacity"], FORECAST, period_end, minute),
276 | prefix + "10": self.__pv_interval(site["capacity"], FORECAST_10, period_end, minute),
277 | prefix + "90": self.__pv_interval(site["capacity"], FORECAST_90, period_end, minute),
278 | }
279 | for minute in range(hours * 2 + 1) # Solcast usually returns one more forecast, not an even number of intervals
280 | ],
281 | }
282 | return self.cached_forecasts[lookup]
283 |
284 | def set_time_zone(self, timezone: ZoneInfo) -> None:
285 | """Set the time zone."""
286 |
287 | self.timezone = timezone
288 |
289 | def get_period(self, period: dt, delta: timedelta) -> dt:
290 | """Return the start period and factors for the current time."""
291 | return period.replace(minute=(int(period.minute / 30) * 30), second=0, microsecond=0) + delta
292 |
293 | def __pv_interval(self, site_capacity: float, estimate: float, period_end: dt, minute: int, modified: bool = False) -> float:
294 | """Calculate value for a single interval."""
295 | interval = int(
296 | (period_end + timedelta(minutes=minute * 30)).astimezone(self.timezone).hour * 2
297 | + (period_end + timedelta(minutes=minute * 30)).astimezone(self.timezone).minute / 30
298 | )
299 | interval -= (
300 | 2
301 | if ((period_end + timedelta(minutes=minute * 30)).astimezone(self.timezone).dst() == timedelta(hours=1) and interval > 1)
302 | else 0
303 | )
304 |
305 | return round(
306 | site_capacity * estimate * (GENERATION_FACTOR[interval] * 0.4 if modified and interval > 32 else GENERATION_FACTOR[interval]),
307 | 4,
308 | )
309 |
--------------------------------------------------------------------------------
/tests/test_repairs.py:
--------------------------------------------------------------------------------
1 | """Test the Solcast Solar repairs flow."""
2 |
3 | import asyncio
4 | import copy
5 | import datetime
6 | from datetime import datetime as dt, timedelta
7 | import json
8 | import logging
9 | from pathlib import Path
10 | import re
11 | from typing import Any
12 | from zoneinfo import ZoneInfo
13 |
14 | from freezegun.api import FrozenDateTimeFactory
15 | import pytest
16 |
17 | from homeassistant.components.recorder import Recorder
18 | from homeassistant.components.repairs import ConfirmRepairFlow
19 | from homeassistant.components.solcast_solar.const import (
20 | AUTO_UPDATE,
21 | CONFIG_DISCRETE_NAME,
22 | CONFIG_FOLDER_DISCRETE,
23 | DOMAIN,
24 | SERVICE_CLEAR_DATA,
25 | SERVICE_UPDATE,
26 | )
27 | from homeassistant.components.solcast_solar.coordinator import SolcastUpdateCoordinator
28 | from homeassistant.components.solcast_solar.repairs import async_create_fix_flow
29 | from homeassistant.components.solcast_solar.solcastapi import SolcastApi
30 | from homeassistant.config_entries import ConfigEntry
31 | from homeassistant.core import HomeAssistant
32 | from homeassistant.data_entry_flow import FlowResultType
33 | from homeassistant.helpers import issue_registry as ir
34 |
35 | from . import (
36 | DEFAULT_INPUT1,
37 | MOCK_OVER_LIMIT,
38 | ZONE_RAW,
39 | async_cleanup_integration_tests,
40 | async_init_integration,
41 | session_clear,
42 | session_set,
43 | )
44 | from .simulator import API_KEY_SITES
45 |
46 | _LOGGER = logging.getLogger(__name__)
47 |
48 |
49 | async def _reload(hass: HomeAssistant, entry: ConfigEntry) -> tuple[SolcastUpdateCoordinator | None, SolcastApi | None]:
50 | """Reload the integration."""
51 |
52 | _LOGGER.warning("Reloading integration")
53 | await hass.config_entries.async_reload(entry.entry_id)
54 | await hass.async_block_till_done()
55 | if hass.data[DOMAIN].get(entry.entry_id):
56 | try:
57 | coordinator = entry.runtime_data.coordinator
58 | return coordinator, coordinator.solcast # noqa: TRY300
59 | except: # noqa: E722
60 | _LOGGER.error("Failed to load coordinator (or solcast), which may be expected given test conditions")
61 | return None, None
62 |
63 |
64 | async def test_missing_data_fixable(
65 | recorder_mock: Recorder,
66 | hass: HomeAssistant,
67 | issue_registry: ir.IssueRegistry,
68 | caplog: pytest.LogCaptureFixture,
69 | ) -> None:
70 | """Test missing fixable."""
71 |
72 | try:
73 | options = copy.deepcopy(DEFAULT_INPUT1)
74 | options[AUTO_UPDATE] = "0"
75 | entry = await async_init_integration(hass, options)
76 | config_dir = f"{hass.config.config_dir}/{CONFIG_DISCRETE_NAME}" if CONFIG_FOLDER_DISCRETE else hass.config.config_dir
77 |
78 | def remove_future_forecasts():
79 | for file_name in [f"{config_dir}/solcast.json", f"{config_dir}/solcast-undampened.json"]:
80 | data_file = Path(file_name)
81 | data = json.loads(data_file.read_text(encoding="utf-8"))
82 | # Remove future forecasts from "now" plus six days
83 | for site in data["siteinfo"].values():
84 | site["forecasts"] = [
85 | f for f in site["forecasts"] if f["period_start"] < (dt.now(datetime.UTC) + timedelta(days=4)).isoformat()
86 | ]
87 | data_file.write_text(json.dumps(data), encoding="utf-8")
88 | _LOGGER.critical("%s: %s", data_file, len(data["siteinfo"]["1111-1111-1111-1111"]["forecasts"]))
89 |
90 | remove_future_forecasts()
91 | await _reload(hass, entry)
92 |
93 | # Assert the issue is present, fixable and non-persistent
94 | assert len(issue_registry.issues) == 1
95 | issue = list(issue_registry.issues.values())[0]
96 | assert issue.domain == DOMAIN
97 | assert issue.issue_id == "records_missing_fixable"
98 | assert issue.is_fixable is True
99 | assert issue.is_persistent is False
100 |
101 | flow = await async_create_fix_flow(hass, "not_handled_issue", {})
102 | assert type(flow) is ConfirmRepairFlow
103 |
104 | flow = await async_create_fix_flow(hass, issue.issue_id, {"contiguous": 8, "entry_id": entry.entry_id})
105 | flow.hass = hass
106 | flow.issue_id = issue.issue_id
107 |
108 | result = await flow.async_step_init() # type: ignore[attr-defined]
109 | assert result["type"] == FlowResultType.FORM
110 | assert result["step_id"] == "offer_auto"
111 |
112 | result = await flow.async_step_offer_auto({AUTO_UPDATE: "1"}) # type: ignore[attr-defined]
113 | await hass.async_block_till_done()
114 |
115 | assert "Options updated, action: The integration will reload" in caplog.text
116 | assert "Auto forecast updates" in caplog.text
117 | assert result["type"] == FlowResultType.ABORT
118 | assert result["reason"] == "reconfigured"
119 |
120 | finally:
121 | await async_cleanup_integration_tests(hass)
122 |
123 |
124 | async def test_missing_data_initial(
125 | recorder_mock: Recorder,
126 | hass: HomeAssistant,
127 | issue_registry: ir.IssueRegistry,
128 | caplog: pytest.LogCaptureFixture,
129 | freezer: FrozenDateTimeFactory,
130 | ) -> None:
131 | """Test missing data after history reset."""
132 |
133 | try:
134 |
135 | def assert_issue_present():
136 | # Assert the issue is present, unfixable and persistent
137 | assert len(issue_registry.issues) == 1
138 | issue = list(issue_registry.issues.values())[0]
139 | assert issue.domain == DOMAIN
140 | assert issue.issue_id == "records_missing_initial"
141 | assert issue.is_fixable is False
142 | assert issue.is_persistent is True
143 |
144 | def assert_issue_not_present():
145 | # Assert the issue is not present
146 | assert len(issue_registry.issues) == 0
147 |
148 | async def update_forecast():
149 | await hass.services.async_call(DOMAIN, SERVICE_UPDATE, {}, blocking=True)
150 | async with asyncio.timeout(100):
151 | while "Completed task update" not in caplog.text:
152 | freezer.tick(0.1)
153 | await hass.async_block_till_done()
154 |
155 | options = copy.deepcopy(DEFAULT_INPUT1)
156 | options[AUTO_UPDATE] = "0"
157 | entry = await async_init_integration(hass, options)
158 | solcast = entry.runtime_data.coordinator.solcast
159 |
160 | caplog.clear()
161 | session_set(MOCK_OVER_LIMIT)
162 | await hass.services.async_call(DOMAIN, SERVICE_CLEAR_DATA, {}, blocking=True)
163 | await hass.async_block_till_done()
164 |
165 | assert_issue_present()
166 |
167 | caplog.clear()
168 | session_clear(MOCK_OVER_LIMIT)
169 | await solcast.reset_api_usage(force=True)
170 | assert "Reset API usage" in caplog.text
171 | await update_forecast()
172 | assert_issue_present()
173 |
174 | caplog.clear()
175 | freezer.move_to((dt.now(tz=ZoneInfo(ZONE_RAW))).replace(hour=23, minute=59, second=0, microsecond=0))
176 | await update_forecast()
177 |
178 | caplog.clear()
179 | freezer.move_to((dt.now(tz=ZoneInfo(ZONE_RAW)) + timedelta(days=1)).replace(hour=0, minute=0, second=0, microsecond=0))
180 | await hass.async_block_till_done()
181 | await update_forecast()
182 | assert_issue_not_present()
183 |
184 | finally:
185 | await async_cleanup_integration_tests(hass)
186 |
187 |
188 | @pytest.mark.parametrize(
189 | "scenario",
190 | [
191 | {"latitude": -37.8136, "azimuth": +50, "unusual": False},
192 | {"latitude": -37.8136, "azimuth": -50, "unusual": False},
193 | {"latitude": -37.8136, "azimuth": +150, "proposal": +30, "unusual": True},
194 | {"latitude": -37.8136, "azimuth": -150, "proposal": -30, "unusual": True},
195 | {"latitude": +37.8136, "azimuth": +50, "proposal": +130, "unusual": True},
196 | {"latitude": +37.8136, "azimuth": -50, "proposal": -130, "unusual": True},
197 | {"latitude": +37.8136, "azimuth": +150, "unusual": False},
198 | {"latitude": +37.8136, "azimuth": -150, "unusual": False},
199 | {"latitude": +37.8136, "azimuth": 90, "unusual": False},
200 | {"latitude": -37.8136, "azimuth": -90, "unusual": False},
201 | {"latitude": +37.8136, "azimuth": 180, "unusual": False},
202 | {"latitude": -37.8136, "azimuth": 0, "unusual": False},
203 | ],
204 | )
205 | async def test_unusual_azimuth(
206 | recorder_mock: Recorder,
207 | hass: HomeAssistant,
208 | caplog: pytest.LogCaptureFixture,
209 | issue_registry: ir.IssueRegistry,
210 | scenario: dict[str, Any],
211 | ) -> None:
212 | """Test unusual azimuth."""
213 |
214 | old_latitude = API_KEY_SITES["1"]["sites"][0]["latitude"]
215 | old_azimuth = API_KEY_SITES["1"]["sites"][0]["azimuth"]
216 | API_KEY_SITES["1"]["sites"][0]["latitude"] = scenario["latitude"]
217 | API_KEY_SITES["1"]["sites"][0]["azimuth"] = scenario["azimuth"]
218 | entry = await async_init_integration(hass, DEFAULT_INPUT1)
219 |
220 | try:
221 | if scenario["unusual"]:
222 | # Assert the issue is present and persistent
223 | assert len(issue_registry.issues) == 1
224 | issue = list(issue_registry.issues.values())[0]
225 | assert f"Raise issue `{issue.issue_id}`" in caplog.text
226 | assert issue.domain == DOMAIN
227 | assert issue.issue_id == "unusual_azimuth_northern" if scenario["latitude"] > 0 else "unusual_azimuth_southern"
228 | assert issue.is_fixable is False
229 | assert issue.is_persistent is True
230 | assert issue.translation_placeholders is not None
231 | assert issue.translation_placeholders.get("proposal") == str(scenario["proposal"])
232 | assert re.search(r"WARNING.+Unusual azimuth", caplog.text) is not None
233 |
234 | if scenario["proposal"] != -130:
235 | # Fix the issue at Solcast and reload the integration
236 | API_KEY_SITES["1"]["sites"][0]["latitude"] = old_latitude
237 | API_KEY_SITES["1"]["sites"][0]["azimuth"] = old_azimuth
238 | await _reload(hass, entry)
239 | assert len(issue_registry.issues) == 0
240 | else:
241 | assert "Re-serialising sites cache for" in caplog.text
242 | caplog.clear()
243 | # Dismiss the issue and reload the integration
244 | ir.async_ignore_issue(hass, DOMAIN, issue.issue_id, True)
245 | await _reload(hass, entry)
246 | assert len(list(issue_registry.issues.values())) == 0
247 | assert "Remove ignored issue for unusual_azimuth_northern" in caplog.text
248 | assert f"Raise issue `{issue.issue_id}`" not in caplog.text
249 | assert len(issue_registry.issues) == 0
250 | caplog.clear()
251 | await _reload(hass, entry)
252 | assert re.search(r"DEBUG.+Unusual azimuth", caplog.text) is not None
253 | else:
254 | # Assert the issue is not present
255 | assert len(issue_registry.issues) == 0
256 |
257 | finally:
258 | API_KEY_SITES["1"]["sites"][0]["latitude"] = old_latitude
259 | API_KEY_SITES["1"]["sites"][0]["azimuth"] = old_azimuth
260 | await async_cleanup_integration_tests(hass)
261 |
--------------------------------------------------------------------------------
/LICENSE.md:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright [yyyy] [name of copyright owner]
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/custom_components/solcast_solar/util.py:
--------------------------------------------------------------------------------
1 | """Utility."""
2 |
3 | # pylint: disable=consider-using-enumerate
4 |
5 | from collections.abc import Iterator
6 | from dataclasses import dataclass
7 | from datetime import datetime as dt
8 | from enum import Enum
9 | import json
10 | import logging
11 | import math
12 | import re
13 | from typing import TYPE_CHECKING, Any
14 |
15 | from homeassistant.core import HomeAssistant
16 | from homeassistant.exceptions import IntegrationError
17 | from homeassistant.helpers import issue_registry as ir
18 | from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
19 |
20 | from .const import (
21 | DOMAIN,
22 | DT_DATE_ONLY_FORMAT,
23 | ESTIMATE,
24 | ESTIMATE10,
25 | ESTIMATE90,
26 | ISSUE_ADVANCED_DEPRECATED,
27 | ISSUE_ADVANCED_PROBLEM,
28 | LEARN_MORE_ADVANCED,
29 | NEW_OPTION,
30 | OPTION,
31 | PRIOR_CRASH_EXCEPTION,
32 | PRIOR_CRASH_PLACEHOLDERS,
33 | PRIOR_CRASH_TRANSLATION_KEY,
34 | PROBLEMS,
35 | STOPS_WORKING,
36 | )
37 |
38 | if TYPE_CHECKING:
39 | from . import coordinator
40 |
41 | # Status code translation, HTTP and more.
42 | # A HTTP 418 error is included here for fun. This was introduced in RFC2324#section-2.3.2 as an April Fools joke in 1998.
43 | # A HTTP 420 error is a Demolition Man reference previously used by Twitter to indicate rate limiting, seen rarely (and oddly) by this integration.
44 | # 400-599 = HTTP
45 | # 900-999 = Integration-specific situation to be potentially handled with retries.
46 | STATUS_TRANSLATE: dict[int, str] = {
47 | 200: "Success",
48 | 400: "Bad request",
49 | 401: "Unauthorized",
50 | 403: "Forbidden",
51 | 404: "Not found",
52 | 418: "I'm a teapot",
53 | 420: "Enhance your calm",
54 | 429: "Try again later",
55 | 500: "Internal web server error",
56 | 501: "Not implemented",
57 | 502: "Bad gateway",
58 | 503: "Service unavailable",
59 | 504: "Gateway timeout",
60 | 996: "Connection refused",
61 | 997: "Connect call failed",
62 | 999: "Prior crash",
63 | }
64 |
65 | _LOGGER = logging.getLogger(__name__)
66 |
67 |
68 | @dataclass
69 | class SolcastData:
70 | """Runtime data definition."""
71 |
72 | if TYPE_CHECKING:
73 | coordinator: coordinator.SolcastUpdateCoordinator
74 | else:
75 | coordinator: DataUpdateCoordinator[None]
76 |
77 |
78 | class SolcastApiStatus(Enum):
79 | """The state of the Solcast API."""
80 |
81 | OK = 0
82 | DATA_CORRUPT = 1
83 | DATA_INCOMPATIBLE = 2
84 | BUILD_FAILED_FORECASTS = 3
85 | BUILD_FAILED_ACTUALS = 4
86 | ERROR = 5
87 | UNKNOWN = 99
88 |
89 |
90 | class DataCallStatus(Enum):
91 | """The result of a data call."""
92 |
93 | SUCCESS = 0
94 | FAIL = 1
95 | ABORT = 2
96 |
97 |
98 | class SitesStatus(Enum):
99 | """The state of load sites."""
100 |
101 | OK = 0
102 | BAD_KEY = 1
103 | ERROR = 2
104 | NO_SITES = 3
105 | CACHE_INVALID = 4
106 | API_BUSY = 5
107 | UNKNOWN = 99
108 |
109 |
110 | class UsageStatus(Enum):
111 | """The state of API usage."""
112 |
113 | OK = 0
114 | ERROR = 1
115 | UNKNOWN = 99
116 |
117 |
118 | class AutoUpdate(int, Enum):
119 | """The type of history data."""
120 |
121 | NONE = 0
122 | DAYLIGHT = 1
123 | ALL_DAY = 2
124 |
125 |
126 | class HistoryType(int, Enum):
127 | """The type of history data."""
128 |
129 | FORECASTS = 0
130 | ESTIMATED_ACTUALS = 1
131 | ESTIMATED_ACTUALS_ADJUSTED = 2
132 |
133 |
134 | class DateTimeEncoder(json.JSONEncoder):
135 | """Helper to convert datetime dict values to ISO format."""
136 |
137 | def default(self, o: Any) -> str | Any:
138 | """Convert to ISO format if datetime."""
139 | return o.isoformat() if isinstance(o, dt) else super().default(o)
140 |
141 |
142 | class NoIndentEncoder(json.JSONEncoder):
143 | """Helper to output semi-indented json."""
144 |
145 | def iterencode(self, o: Any, _one_shot: bool = False):
146 | """Recursive encoder to indent only top level keys."""
147 | list_lvl = 0
148 | raw: Iterator[str] = super().iterencode(o, _one_shot=_one_shot)
149 | output = ""
150 | for s in list(raw)[0].splitlines():
151 | if "[" in s:
152 | list_lvl += 1
153 | elif list_lvl > 0:
154 | s = s.replace(" ", "").rstrip()
155 | if "]" in s:
156 | list_lvl -= 1
157 | s += "\n"
158 | else:
159 | s += "\n"
160 | output += s
161 | yield output
162 |
163 |
164 | class JSONDecoder(json.JSONDecoder):
165 | """Helper to convert ISO format dict values to datetime."""
166 |
167 | def __init__(self, *args: Any, **kwargs: Any) -> None:
168 | """Initialise the decoder."""
169 | json.JSONDecoder.__init__(self, object_hook=self.date_hook, *args, **kwargs) # noqa: B026
170 |
171 | def date_hook(self, o: Any) -> dict[str, Any]:
172 | """Return converted datetimes."""
173 | result: dict[str, Any] = {}
174 | for key, value in o.items():
175 | try:
176 | result[key] = dt.fromisoformat(value)
177 | except: # noqa: E722
178 | result[key] = value
179 | return result
180 |
181 |
182 | def http_status_translate(status: int) -> str | Any:
183 | """Translate HTTP status code to a human-readable translation."""
184 |
185 | return (f"{status}/{STATUS_TRANSLATE[status]}") if STATUS_TRANSLATE.get(status) else status
186 |
187 |
188 | def api_key_last_six(api_key: str) -> str:
189 | """Return last six characters of API key."""
190 |
191 | return api_key[-6:]
192 |
193 |
194 | def redact_api_key(api_key: str) -> str:
195 | """Obfuscate API key."""
196 |
197 | return "*" * 6 + api_key_last_six(api_key)
198 |
199 |
200 | def redact_msg_api_key(msg: str, api_key: str) -> str:
201 | """Obfuscate API key in messages."""
202 |
203 | return (
204 | msg.replace("key=" + api_key, "key=" + redact_api_key(api_key))
205 | .replace("key': '" + api_key, "key': '" + redact_api_key(api_key))
206 | .replace("sites-" + api_key, "sites-" + redact_api_key(api_key))
207 | .replace("usage-" + api_key, "usage-" + redact_api_key(api_key))
208 | )
209 |
210 |
211 | def redact_lat_lon_simple(s: str) -> str:
212 | """Redact latitude and longitude decimal places in a string."""
213 |
214 | return re.sub(r"\.[0-9]+", ".******", s)
215 |
216 |
217 | def redact_lat_lon(s: str) -> str:
218 | """Redact latitude and longitude in a string."""
219 |
220 | return re.sub(r"itude\': [0-9\-\.]+", "itude': **.******", s)
221 |
222 |
223 | def forecast_entry_update(forecasts: dict[dt, Any], period_start: dt, pv: float, pv10: float | None = None, pv90: float | None = None):
224 | """Update an individual forecast entry."""
225 |
226 | extant = forecasts.get(period_start)
227 | if extant: # Update existing.
228 | forecasts[period_start][ESTIMATE] = pv
229 | if pv10 is not None:
230 | forecasts[period_start][ESTIMATE10] = pv10
231 | if pv90 is not None:
232 | forecasts[period_start][ESTIMATE90] = pv90
233 | elif pv10 is not None:
234 | forecasts[period_start] = {
235 | "period_start": period_start,
236 | "pv_estimate": pv,
237 | "pv_estimate10": pv10,
238 | "pv_estimate90": pv90,
239 | }
240 | else:
241 | forecasts[period_start] = {
242 | "period_start": period_start,
243 | "pv_estimate": pv,
244 | }
245 |
246 |
247 | def raise_and_record(
248 | hass: HomeAssistant, exception: type[IntegrationError], translation_key: str, translation_placeholders: dict | None = None
249 | ) -> None:
250 | """Raise and record an exception during initialisation."""
251 | hass.data[DOMAIN][PRIOR_CRASH_EXCEPTION] = exception
252 | hass.data[DOMAIN][PRIOR_CRASH_TRANSLATION_KEY] = translation_key
253 | hass.data[DOMAIN][PRIOR_CRASH_PLACEHOLDERS] = translation_placeholders
254 | raise exception(translation_domain=DOMAIN, translation_key=translation_key, translation_placeholders=translation_placeholders)
255 |
256 |
257 | async def raise_or_clear_advanced_problems(problems: list[str], hass: HomeAssistant):
258 | """Raise or clear advanced unknown option issues."""
259 | issue_registry = ir.async_get(hass)
260 | if problems:
261 | problem_list = "".join([("\n* " + problem) for problem in sorted(problems)])
262 | issue = issue_registry.async_get_issue(DOMAIN, ISSUE_ADVANCED_PROBLEM)
263 | if (
264 | issue is not None
265 | and issue.translation_placeholders is not None
266 | and issue.translation_placeholders.get(PROBLEMS) != problem_list
267 | ):
268 | ir.async_delete_issue(hass, DOMAIN, ISSUE_ADVANCED_PROBLEM)
269 | await hass.async_block_till_done()
270 | _LOGGER.debug("Raising advanced option problems issue for: %s", ", ".join(problems))
271 | ir.async_create_issue(
272 | hass,
273 | DOMAIN,
274 | ISSUE_ADVANCED_PROBLEM,
275 | is_fixable=False,
276 | is_persistent=True,
277 | translation_key=ISSUE_ADVANCED_PROBLEM,
278 | translation_placeholders={
279 | PROBLEMS: problem_list,
280 | },
281 | severity=ir.IssueSeverity.ERROR,
282 | learn_more_url=LEARN_MORE_ADVANCED,
283 | )
284 | issue = issue_registry.async_get_issue(DOMAIN, ISSUE_ADVANCED_PROBLEM)
285 | else:
286 | issue_registry = ir.async_get(hass)
287 | issue = issue_registry.async_get_issue(DOMAIN, ISSUE_ADVANCED_PROBLEM)
288 | if issue is not None:
289 | _LOGGER.debug("Removing advanced problems issue")
290 | ir.async_delete_issue(hass, DOMAIN, ISSUE_ADVANCED_PROBLEM)
291 |
292 |
293 | async def raise_or_clear_advanced_deprecated(
294 | deprecated_in_use: dict[str, str], hass: HomeAssistant, stops_working: dict[str, dt] | None = None
295 | ):
296 | """Raise or clear advanced deprecated option issues."""
297 | if deprecated_in_use:
298 | ir.async_create_issue(
299 | hass,
300 | DOMAIN,
301 | ISSUE_ADVANCED_DEPRECATED,
302 | is_fixable=False,
303 | is_persistent=True,
304 | translation_key=ISSUE_ADVANCED_DEPRECATED,
305 | translation_placeholders={
306 | OPTION: ", ".join(deprecated_in_use.keys()),
307 | NEW_OPTION: ", ".join(deprecated_in_use.values()),
308 | STOPS_WORKING: (
309 | " ("
310 | + ", ".join(
311 | [
312 | f"{option} stops working after {date.strftime(DT_DATE_ONLY_FORMAT)}"
313 | for option, date in stops_working.items()
314 | if option in deprecated_in_use
315 | ]
316 | )
317 | + ")"
318 | )
319 | if stops_working
320 | else "",
321 | },
322 | severity=ir.IssueSeverity.WARNING,
323 | learn_more_url=LEARN_MORE_ADVANCED,
324 | )
325 | else:
326 | issue_registry = ir.async_get(hass)
327 | issue = issue_registry.async_get_issue(DOMAIN, ISSUE_ADVANCED_DEPRECATED)
328 | if issue is not None:
329 | _LOGGER.debug("Removing advanced deprecation issue")
330 | ir.async_delete_issue(hass, DOMAIN, ISSUE_ADVANCED_DEPRECATED)
331 |
332 |
333 | def percentile(data: list[Any], _percentile: float) -> float | int:
334 | """Find the given percentile in a sorted list of values."""
335 |
336 | if not data:
337 | return 0.0
338 | k = (len(data) - 1) * (_percentile / 100)
339 | f = math.floor(k)
340 | c = math.ceil(k)
341 | if f == c:
342 | return data[int(k)]
343 | d0 = data[int(f)] * (c - k)
344 | d1 = data[int(c)] * (k - f)
345 | return round(d0 + d1, 4)
346 |
347 |
348 | def interquartile_bounds(sorted_data: list[Any], factor: float = 1.5) -> tuple[float | int, float | int]:
349 | """Return the lower and upper interquartile bounds of a sorted data set."""
350 |
351 | lower = 0.0
352 | upper = float("inf")
353 | iqr = 0.0
354 | if len(sorted_data) > 4:
355 | q1 = percentile(sorted_data, 25)
356 | q3 = percentile(sorted_data, 75)
357 | iqr = round(q3 - q1, 5)
358 | lower = round(q1 - factor * iqr, 4)
359 | upper = round(q3 + factor * iqr, 4)
360 | return (lower, upper)
361 |
362 |
363 | def diff(lst: list[Any], non_negative: bool = True) -> list[Any]:
364 | """Build a numpy-like diff."""
365 |
366 | size = len(lst) - 1
367 | r: list[int | float] = [0] * size
368 | for i in range(size):
369 | r[i] = max(0, lst[i + 1] - lst[i]) if non_negative else lst[i + 1] - lst[i]
370 | return r
371 |
372 |
373 | def cubic_interp(x0: list[Any], x: list[Any], y: list[Any]) -> list[Any]:
374 | """Build a cubic spline.
375 |
376 | Arguments:
377 | x0 (list): List of numbers to interpolate at
378 | x (list): List of numbers in increasing order
379 | y (list): List of floats to interpolate
380 |
381 | Returns:
382 | list: Array of interpolated values.
383 |
384 | """
385 |
386 | def clip(lst: list[Any], min_val: float, max_val: float, in_place: bool = False) -> list[Any]: # numpy-like clip
387 | if not in_place:
388 | lst = lst[:]
389 | for i in range(len(lst)):
390 | if lst[i] < min_val:
391 | lst[i] = min_val
392 | elif lst[i] > max_val:
393 | lst[i] = max_val
394 | return lst
395 |
396 | def search_sorted(list_to_insert: list[Any], insert_into: list[Any]) -> list[Any]: # numpy-like search_sorted
397 | def float_search_sorted(float_to_insert: Any, insert_into: list[Any]) -> int:
398 | for i in range(len(insert_into)):
399 | if float_to_insert <= insert_into[i]:
400 | return i
401 | return len(insert_into)
402 |
403 | return [float_search_sorted(i, insert_into) for i in list_to_insert]
404 |
405 | def subtract(a: float, b: float) -> float:
406 | return a - b
407 |
408 | size: int = len(x)
409 | x_diff: list[Any] = diff(x, non_negative=False)
410 | y_diff: list[Any] = diff(y, non_negative=False)
411 |
412 | li: list[Any] = [0] * size
413 | li_1: list[Any] = [0] * (size - 1)
414 | z: list[Any] = [0] * (size)
415 |
416 | li[0] = math.sqrt(2 * x_diff[0])
417 | li_1[0] = 0.0
418 | b0: float = 0.0
419 | z[0] = b0 / li[0]
420 |
421 | bi: float = 0.0
422 |
423 | for i in range(1, size - 1, 1):
424 | li_1[i] = x_diff[i - 1] / li[i - 1]
425 | li[i] = math.sqrt(2 * (x_diff[i - 1] + x_diff[i]) - li_1[i - 1] * li_1[i - 1])
426 | bi = 6 * (y_diff[i] / x_diff[i] - y_diff[i - 1] / x_diff[i - 1])
427 | z[i] = (bi - li_1[i - 1] * z[i - 1]) / li[i]
428 |
429 | i = size - 1
430 | li_1[i - 1] = x_diff[-1] / li[i - 1]
431 | li[i] = math.sqrt(2 * x_diff[-1] - li_1[i - 1] * li_1[i - 1])
432 | bi = 0.0
433 | z[i] = (bi - li_1[i - 1] * z[i - 1]) / li[i]
434 |
435 | i = size - 1
436 | z[i] = z[i] / li[i]
437 | for i in range(size - 2, -1, -1):
438 | z[i] = (z[i] - li_1[i - 1] * z[i + 1]) / li[i]
439 |
440 | index = search_sorted(x0, x)
441 | index = clip(index, 1, size - 1)
442 |
443 | xi1: list[Any] = [x[num] for num in index]
444 | xi0: list[Any] = [x[num - 1] for num in index]
445 | yi1: list[Any] = [y[num] for num in index]
446 | yi0: list[Any] = [y[num - 1] for num in index]
447 | zi1: list[Any] = [z[num] for num in index]
448 | zi0: list[Any] = [z[num - 1] for num in index]
449 | hi1 = list(map(subtract, xi1, xi0))
450 |
451 | f0: list[Any] = [0] * len(hi1)
452 | for j in range(len(f0)):
453 | f0[j] = round(
454 | zi0[j] / (6 * hi1[j]) * (xi1[j] - x0[j]) ** 3
455 | + zi1[j] / (6 * hi1[j]) * (x0[j] - xi0[j]) ** 3
456 | + (yi1[j] / hi1[j] - zi1[j] * hi1[j] / 6) * (x0[j] - xi0[j])
457 | + (yi0[j] / hi1[j] - zi0[j] * hi1[j] / 6) * (xi1[j] - x0[j]),
458 | 4,
459 | )
460 |
461 | return f0
462 |
--------------------------------------------------------------------------------
/ADVOPTIONS.md:
--------------------------------------------------------------------------------
1 | # Advanced options
2 |
3 | It is possible to alter the behaviour of some integration functions by creating a file called `solcast-advanced.json` in the Home Assistant configuration directory `solcast_solar` subdirectory.
4 |
5 | This file has a JSON structure of a dictionary containing key/value pairs.
6 |
7 | Example:
8 |
9 | ```
10 | {
11 | "option_key_one": value,
12 | "option_key_two": value
13 | }
14 | ```
15 |
16 | Changes to this file will be detected in near-real time, changing code behaviour. The impact of that changed behaviour may only be seen at forecast or estimated actuals update. For other changes, `reload_on_advanced_change` can be set to `true` (see below), so that things like dampening modelling and entity set up can occur on reload.
17 |
18 | The impact of not restarting will vary by advanced option, and you are left to decide when the outcome should occur. This is advanced, and _you_ are expected to be advanced about option application. If you're unsure then just set `reload_on_advanced_change` while testing.
19 |
20 | Support for these advanced options will be limited. (Well, "support" for this integration is limited at the best of times. You expect it, yet we are not obliged to provide it; we endeavour to.)
21 |
22 | Understand the implication of setting any of these options before reporting any problem, and check that set values are sensible, and if you then need to seek help, clearly outline any problem faced in detail in a discussion. Any value set is logged at `DEBUG` level, so please include that detail.
23 |
24 | These options modify otherwise predictable and well-tested behaviour, so you are wandering into poorly tested/test-it-yourself territory, where enabling `DEBUG` logging will likely be essential to see what's going on. That said, some of these options are quite well synthetically tested, but others are not at all. No exceptions should arise.
25 |
26 | Values are validated for sanity individually, and in conjunction with other options set. Do not raise an issue report should validation fail to catch a weird set up. You broke it. You fix your config, or revert to defaults and reload. Please raise a discussion topic describing the experience, as we do want to know about weird combinations. These are advanced options so it is not "Issue" raising stuff, just for discussion. The maintainers watch discussions with the same attention as they do issues, so any "issue" raised in this context will likely be converted to a "discussion" _very_ quickly anyway.
27 |
28 | You are free to raise an issue should a code exception occur after setting an advanced option, and `DEBUG` logging is _mandatory_ in this circumstance. Exceptions should not happen, and there will be no exception to requiring `DEBUG` logs in any raised issue. These will not be converted to discussions.
29 |
30 | ## Contents
31 |
32 | 1. [Automated dampening](#automated-dampening)
33 | 1. [Estimated actuals](#estimated-actuals)
34 | 1. [Forecasts](#forecasts)
35 | 1. [Granular dampening](#granular-dampening)
36 | 1. [General](#general)
37 |
38 | ## Automated dampening
39 |
40 | **Key: "automated_dampening_delta_adjustment_model"**
41 |
42 | Possible values: integer `0`,`1` (default `0`)
43 |
44 | Allows the selection of different calculations to nudge the base dampening factors closer to 1.0 when forecast generation for an interval is below the recent peak.
45 |
46 | Option `0` selects an adjustment based on the logarithmic difference between peak and forecast:
47 |
48 | adjusted_factor = base_factor + ((1 - base_factor) * (ln(interval_peak) - ln(interval_forecast)))
49 |
50 | Option `1` selects an adjustment based on the squared ratio of forecast and peak:
51 |
52 | adjusted_factor = base_factor + ((1 - base_factor) * ((1-(interval_forecast/interval_peak))^2))
53 |
54 | Adjusted dampening factors are constrained to lie within the range 0 to 1. The chart below illustrates the behaviour of the two different adjustment calculations for base factors of 0.5 and 0.9.
55 |
56 |
57 |
58 | As the forecast generation decreases in relation to the recent generation peak the logarithmic difference calculation tends to give a higher adjusted factors than the squared ratio calculation. The logarithmic difference calculation will give an adjusted factor of 1.0 for any forecast below around 36% of the peak whereas the adjusted factor from the squared ratio calculation only approaches 1.0 as forecast generation approaches 0.
59 |
60 | **Key: "automated_dampening_generation_fetch_delay"**
61 |
62 | Possible values: int `0`..`120` (default `0`)
63 |
64 | A number of minutes to delay beyond midnight before generation history is retrieved.
65 |
66 | This value must be less than the estimated actual fetch delay option.
67 |
68 | If Home Assistant is restarted in the period between midnight and generation being retrieved then retrieval will be rescheduled.
69 |
70 | **Key: "automated_dampening_generation_history_load_days"**
71 |
72 | Possible values: integer `1`..`21` (default `7`)
73 |
74 | By default, the integration assumes that there will not be generation history available beyond seven days. If Home Assistant is configured with `purge_keep_days` of a longer period for `recorder`, then this option may be used to accelerate the time to accuracy for automated dampening results.
75 |
76 | This history load occurs when there is no `solcast-generation.json` present. An integration reload is required after deleting the generation cache file.
77 |
78 | **Key: "automated_dampening_ignore_intervals"**
79 |
80 | Possible values: list of strings as "HH:MM" (default `[]`)
81 |
82 | Certain intervals of the day can be set to be ignored by dampening, at times when there is no possibility of shading.
83 |
84 | A possible use case is to avoid situations where there are many matching estimated actual intervals and a small number of lower older generation intervals that lack an export limiting flag. This can only occur when `automated_dampening_no_limiting_consistency` is set to `true`, and may be seen as days get longer towards Summer.
85 |
86 | Double quotes are valid JSON format (single quotes are not). Times are specified in local time zone, and must match the format "HH:MM" with one or two digit hour and a minute of either "00" or "30", and be unique in the list.
87 |
88 | An example list: `["12:00", "12:30", "13:00", "13:30", "14:00", "14:30", "15:00"]`
89 |
90 | **Key: "automated_dampening_insignificant_factor"**
91 |
92 | Possible values: float `0.0`..`1.0` (default `0.95`)
93 |
94 | Dampening values modelled as higher than a certain threshold are ignored as insignificant.
95 |
96 | **Key: "automated_dampening_insignificant_factor_adjusted"**
97 |
98 | Possible values: float `0.0`..`1.0` (default `0.95`)
99 |
100 | Dampening values adjusted by delta adjustment as higher than a certain threshold are ignored as insignificant.
101 |
102 | **Key: "automated_dampening_minimum_matching_generation"**
103 |
104 | Possible values: integer `1`..`21` (default `2`)
105 |
106 | Dampening modelling will skip intervals where there are a low number of matching generation samples for intervals. This is defaulted at two to get a "peak" generation value, but a value of one is also allowed for experimentation.
107 |
108 | This value must be less than or equal to the minimum matching intervals or the number of past days considered for automated dampening.
109 |
110 | **Key: "automated_dampening_minimum_matching_intervals"**
111 |
112 | Possible values: integer `1`..`21` (default `2`)
113 |
114 | Dampening modelling will skip intervals where there are a low number of matching past intervals. A low number of matches are generally seen at the beginning and end of each day, and these are ignored by default.
115 |
116 | This value must be greater than or equal to the minimum matching generation, or higher than the number of past days considered for automated dampening.
117 |
118 | **Key: "automated_dampening_model"**
119 |
120 | Possible values: integer `0`..`3` (default `0`)
121 |
122 | Selects the algorithm to be used to determine automated dampening factors.
123 |
124 | Option `0` is the default model described in the documentation. This compares the recent peak estimated actual with the recent peak generation for each interval to calculate a dampening factor. In this model it is possible that the peak estimated actual and peak generation occur on different days.
125 |
126 | Options `1` to `3` follow a similar approach to the above, but calculate a list of candidate factors from the paired generation and estimated actual data in each interval, so each candidate factor is calculated from a single day.
127 |
128 | - Option `1` returns the maximum of the candidate factors.
129 | - Option `2` returns the mean of the candidate factors.
130 | - Option `3` returns the minimum of the candidate factors.
131 |
132 | Option `1` can be thought of as a most optimistic scenario which will tend to give a higher forecast than Option `2`, with Option `3` as the most pessimistic scenario giving the lowest forecast.
133 |
134 | **Key: "automated_dampening_model_days"**
135 |
136 | Possible values: integer `2`..`21` (default `14`)
137 |
138 | The maximum number of days of past estimated actuals and generation to use for modelling future dampening.
139 |
140 | **Key: "automated_dampening_no_delta_adjustment"**
141 |
142 | Possible values: boolean `true`/`false` (default `false`)
143 |
144 | If delta logarithmic adjustment of dampening factors is not desired then this option may be set to `true`.
145 |
146 | **Key: "automated_dampening_no_limiting_consistency"**
147 |
148 | Possible values: boolean `true`/`false` (default `false`)
149 |
150 | Default limiting behaviour is that whenever export limiting of generation is seen (either by export limit detection, or manual limiting by using the entity `solcast_suppress_auto_dampening`) then all of the same intervals of generation will be ignored over the period defined by `automated_dampening_model_days`, which is `14` by default.
151 |
152 | Said another way, the default behaviour is that if there is limiting detected for any interval on any day, then that interval will be ignored for every day of the past fourteen days unless this option is enabled.
153 |
154 | Set this option to `true` to prevent this behaviour.
155 |
156 | **Key: "automated_dampening_preserve_unmatched_factors"**
157 |
158 | Possible values: boolean `true`/`false` (default `false`)
159 |
160 | Default behaviour when calculating base dampening factors is that any interval with insufficient matching intervals or generation (see `automated_dampening_minimum_matching_generation` and `automated_dampening_minimum_matching_intervals`) will have a dampening factor of 1.0. This can be an issue if there is a sustained period of poor solar generation (e.g. a sustained cloudy spell) when previously calculated dampening factors for these intervals may be reset to 1.0 leading to a higher generation forecast until sufficient 'good' intervals are recorded and the dampening factors can be recalculated.
161 |
162 | Setting this option to `true` will retain the previously calculated factor for such an interval, with factors reverting to 1.0 when all recent estimated actuals for the interval are 0.
163 |
164 | **Key: "automated_dampening_similar_peak"**
165 |
166 | Possible values: float `0.0`..`1.0` (default `0.9`)
167 |
168 | Estimated actual peaks are compared to find a similar number of "matching" peaks from which to compare maximum generation. By default this is intervals within 90% to 100% of peak.
169 |
170 | This option varies what is considered a similar interval from all modelled days.
171 |
172 | **Key: "automated_dampening_suppression_entity"**
173 |
174 | Possible values: string (default `"solcast_suppress_auto_dampening"`)
175 |
176 | A templated suppression entity in the `sensor`, `binary_sensor` or `switch` platform can be used to intentionally ignore intervals of generation. This option allows the entity name searched for to be changed.
177 |
178 | ## Estimated actuals
179 |
180 | **Key: "estimated_actuals_fetch_delay"**
181 |
182 | Possible values: int `0`..`120` (default `0`)
183 |
184 | A number of minutes to delay beyond midnight before estimated actuals are retrieved (in addition to a randomised up-to fifteen minute delay). This may be of use should the retrieval of estimated actuals often fail just after midnight local time.
185 |
186 | If automated dampening is enabled then modelling of new dampening factors will occur immediately following retrieval.
187 |
188 | If Home Assistant is restarted in the period between midnight and estimated actuals being retrieved then retrieval will be rescheduled.
189 |
190 | **Key: "estimated_actuals_log_ape_percentiles"**
191 |
192 | Possible values: list[int] (default [50])
193 |
194 | By default a 50th percentile Actual Percentage Error (APE) is logged on day change and at startup. Specify alternate or more percentiles to analyse with this option.
195 |
196 | **Key: "estimated_actuals_log_mape_breakdown"**
197 |
198 | Possible values: boolean `true`/`false` (default `false`)
199 |
200 | A Mean Actual Percentage Error (MAPE) value for estimated actuals vs. actual generation is logged on day change and at startup. Enable this option to also log the daily percentage error (APE).
201 |
202 | Both undampened and dampened are logged when automated dampening is enabled.
203 |
204 | Note: Totals for both generation and estimated actuals do not include half-hourly intervals that have experienced site export limiting.
205 |
206 | ## Forecasts
207 |
208 | **Key: "forecast_day_entities"**
209 |
210 | Possible values: integer `8`..`14` (default `8`)
211 |
212 | The number of forecast day entities to create (plus one). By default seven entities are created. Today, tomorrow, day 3, day 4, day 5, day 6 and day 7. This option enables creation of up to a day 13 entity.
213 |
214 | An integration reload is required to vary the number of entities. New entities created will be disabled by default, and if this option is reduced then entities will be cleaned up.
215 |
216 | This value must be less than or equal to the number of forecast future days.
217 |
218 | **Key: "forecast_future_days"**
219 |
220 | Possible values: integer `8`..`14` (default `14`)
221 |
222 | The number of days of forecasts to request from Solcast. Setting this lower than 14 will not remove forecasts already retrieved.
223 |
224 | Consider the setting of `forecast_day_entities` when lowering this option.
225 |
226 | **Key: "forecast_history_max_days"**
227 |
228 | Possible values: integer `22`..`3650` (default `730`)
229 |
230 | The number of days of history to retain for forecasts (and estimated actuals).
231 |
232 | There may be a performance implication when too much history data is retained, depending on the platform used for Home Assistant.
233 |
234 | ## Granular dampening
235 |
236 | **Key: "granular_dampening_delta_adjustment"**
237 |
238 | Possible values: boolean `true`/`false` (default `false`)
239 |
240 | Implements the option "automated_dampening_delta_adjustment_model" for granular dampening, to enable utilising this mechanism for automation-driven or manual dampening.
241 |
242 | Fetching estimated actuals is required.
243 |
244 | The adjustment will only apply to `ALL` factors, and not individual per-site dampening, which is in line with how automated dampening operates.
245 |
246 | ## General
247 |
248 | **Key: "api_raise_issue"**
249 |
250 | Possible values: boolean `true`/`false` (default `true`)
251 |
252 | When repeated `429 / Try again later` errors are received from Solcast the integration will raise an ignorable issue alerting a user to the fact.
253 |
254 | This behaviour may be suppressed by setting this option to `false`.
255 |
256 | **Key: "entity_logging"**
257 |
258 | Possible values: boolean `true`/`false` (default `false`)
259 |
260 | By default the value set in entities is not logged. This option enables that at `DEBUG` level.
261 |
262 | An integration reload is required.
263 |
264 | **Key: "reload_on_advanced_change"**
265 |
266 | Possible values: boolean `true`/`false` (default `false`)
267 |
268 | Setting this option to `true` will cause the integration to reload whenever any advanced option is added or changed.
269 |
270 | **Key: "solcast_url"**
271 |
272 | Possible values: string URL (default `"https://api.solcast.com.au"`)
273 |
274 | Do not set this option unless you are a developer and want to utilise the Solcast API Simulator.
275 |
276 | Do not add a trailing `/`. An integration reload is required.
277 |
278 | **Key: "trigger_on_api_available"**
279 |
280 | Possible values: string (default `""`)
281 |
282 | When API availability has been determined (after a period of unavailability) then trigger the friendly name of an automation.
283 |
284 | **Key: "trigger_on_api_unavailable"**
285 |
286 | Possible values: string (default `""`)
287 |
288 | When API unavailability has been determined then trigger the friendly name of an automation.
289 |
290 | **Key: "user_agent"**
291 |
292 | Possible values: string (default `"default"`)
293 |
294 | The HTTP header User-Agent is set to "ha-solcast-solar-integration/x.x.x" by default. This advanced option allows it to be set to another user agent string.
295 |
--------------------------------------------------------------------------------
/tests/wsgi_sim.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | """Solcast hobbyist API simulator.
3 |
4 | Install:
5 |
6 | * This script runs in a Home Assistant DevContainer
7 | * Script start: `python3 -m wsgi_sim.py`, or make the file executable and run `./wsgi_sim.py`
8 |
9 | Optional run arguments:
10 |
11 | * --limit LIMIT Set the API call limit available, example --limit 100 (There is no limit... 😉)
12 | * --no429 Do not generate 429 response.
13 | * --bomb429 w-x,y,z The minute(s) of the hour to return API too busy, comma separated, example --bomb429 0-5,15,30-35,45
14 | * --teapot Infrequently generate 418 response.
15 |
16 | Theory of operation:
17 |
18 | * Configure integration to use either API key "1", "2", "3", or any combination of multiple. Any other key will return an error.
19 | * API key 1 has two sites, API key 2 has one site, API key 3 has an impossible (for hobbyists) three sites.
20 | * Forecast for every day is the same blissful-clear-day bell curve.
21 | * As time goes on new forecast hour values are calculated based on the current get forecasts call time of day.
22 | * 429 responses are given when minute=0, unless --no429 is set, or other minutes are specified with --bomb429.
23 | * An occasionally generated "I'm a teapot" status can verify that the integration handles unknown status returns.
24 | * The time zone used should be read from the Home Assistant configuration. If this fails then the zone will be Australia/Melbourne.
25 |
26 | SSL certificate:
27 |
28 | * The integration does not care whether the api.solcast.com.au certificate is valid, so a self-signed certificate is created by this simulator.
29 | * To generate a new self-signed certificate run in this folder: openssl req -x509 -newkey rsa:4096 -nodes -out cert.pem -keyout key.pem -days 3650,
30 | * or simply delete *.pem files and restart the simulator to generate new ones. The DevContainer will already have openssl installed.
31 |
32 | Integration issues raised regarding the simulator will be closed without response.
33 | Raise a pull request instead, suggesting a fix for whatever is wrong, or to add additional functionality.
34 |
35 | Experimental support for advanced_pv_power:
36 |
37 | * Should Solcast deprecate the legacy hobbyist API, then the advanced_pv_power API calls will probably be preferred, just with capabilities limited by Solcast.
38 | * This simulator, and the integration are prepared should this occur.
39 |
40 | """
41 |
42 | import argparse
43 | import copy
44 | import datetime
45 | from datetime import datetime as dt, timedelta
46 | import json
47 | from logging.config import dictConfig
48 | import os
49 | from pathlib import Path
50 | import random
51 | import subprocess
52 | import sys
53 | from typing import Any
54 | from zoneinfo import ZoneInfo
55 |
56 | from simulator import API_KEY_SITES, SimulatedSolcast
57 |
58 | simulate = SimulatedSolcast()
59 |
60 |
61 | def restart():
62 | """Restarts the sim."""
63 |
64 | python = sys.executable
65 | os.execl(python, python, *sys.argv)
66 | sys.exit()
67 |
68 |
69 | need_restart = False
70 |
71 | try:
72 | from flask import Flask, jsonify, request
73 | from flask.json.provider import DefaultJSONProvider
74 | except (ModuleNotFoundError, ImportError):
75 | subprocess.check_call([sys.executable, "-m", "pip", "install", "flask"])
76 | need_restart = True
77 | try:
78 | import isodate # pyright: ignore[reportMissingTypeStubs]
79 | except (ModuleNotFoundError, ImportError):
80 | subprocess.check_call([sys.executable, "-m", "pip", "install", "isodate"])
81 | need_restart = True
82 |
83 | if need_restart:
84 | restart()
85 |
86 | if not (Path("cert.pem").exists() and Path("key.pem").exists()):
87 | subprocess.check_call(
88 | [
89 | "/usr/bin/openssl",
90 | "req",
91 | "-x509",
92 | "-newkey",
93 | "rsa:4096",
94 | "-nodes",
95 | "-out",
96 | "cert.pem",
97 | "-keyout",
98 | "key.pem",
99 | "-days",
100 | "3650",
101 | "-subj",
102 | "/C=AU/ST=Victoria/L=Melbourne/O=Solcast/OU=Solcast/CN=api.solcast.com.au",
103 | ]
104 | )
105 |
106 | API_LIMIT = 50
107 | BOMB_429 = [0]
108 | BOMB_KEY = []
109 | ERROR_KEY_REQUIRED = "KeyRequired"
110 | ERROR_INVALID_KEY = "InvalidKey"
111 | ERROR_TOO_MANY_REQUESTS = "TooManyRequests"
112 | ERROR_SITE_NOT_FOUND = "SiteNotFound"
113 | ERROR_MESSAGE: dict[str, Any] = {
114 | ERROR_KEY_REQUIRED: {"message": "An API key must be specified.", "status": 400},
115 | ERROR_INVALID_KEY: {"message": "Invalid API key.", "status": 403},
116 | ERROR_TOO_MANY_REQUESTS: {"message": "You have exceeded your free daily limit.", "status": 429},
117 | ERROR_SITE_NOT_FOUND: {"message": "The specified site cannot be found.", "status": 404},
118 | }
119 | GENERATE_418 = False
120 | GENERATE_429 = True
121 |
122 | dictConfig( # Logger configuration
123 | {
124 | "version": 1,
125 | "formatters": {
126 | "default": {
127 | "format": "[%(asctime)s] %(levelname)s in %(module)s: %(message)s",
128 | }
129 | },
130 | "handlers": {
131 | "wsgi": {"class": "logging.StreamHandler", "stream": "ext://flask.logging.wsgi_errors_stream", "formatter": "default"}
132 | },
133 | "root": {"level": "DEBUG", "handlers": ["wsgi"]},
134 | }
135 | )
136 |
137 |
138 | class DtJSONProvider(DefaultJSONProvider): # pyright: ignore[reportPossiblyUnboundVariable]
139 | """Custom JSON provider converting datetime to ISO format."""
140 |
141 | def default(self, o: Any) -> Any: # pyright: ignore[reportIncompatibleMethodOverride]
142 | """Convert datetime to ISO format."""
143 | if isinstance(o, dt):
144 | return o.isoformat()
145 |
146 | return super().default(o)
147 |
148 |
149 | app = Flask(__name__) # pyright: ignore[reportPossiblyUnboundVariable]
150 | app.json = DtJSONProvider(app)
151 | _LOGGER = app.logger
152 | counter_last_reset = dt.now(datetime.UTC).replace(hour=0, minute=0, second=0, microsecond=0) # Previous UTC midnight
153 |
154 |
155 | def validate_call(api_key: str, site_id: str | None = None, counter: bool = True) -> tuple[int, Any, Any]:
156 | """Return the state of the API call."""
157 | global counter_last_reset # noqa: PLW0603 pylint: disable=global-statement
158 |
159 | revert_key = True
160 |
161 | if counter_last_reset.day != dt.now(datetime.UTC).day:
162 | _LOGGER.info("Resetting API usage counter")
163 | for v in API_KEY_SITES.values():
164 | v["counter"] = 0
165 | counter_last_reset = dt.now(datetime.UTC).replace(hour=0, minute=0, second=0, microsecond=0)
166 |
167 | def error(code: str) -> tuple[int, Any, None]:
168 | return (
169 | ERROR_MESSAGE[code]["status"],
170 | {"response_status": {"error_code": code, "message": ERROR_MESSAGE[code]["message"]}},
171 | None,
172 | )
173 |
174 | if not api_key:
175 | return error(ERROR_KEY_REQUIRED)
176 | if api_key not in API_KEY_SITES:
177 | return error(ERROR_INVALID_KEY)
178 | if GENERATE_429 and dt.now(datetime.UTC).minute in BOMB_429:
179 | return 429, "", None
180 | if dt.now(datetime.UTC).minute in BOMB_KEY:
181 | if API_KEY_SITES.get("1"):
182 | API_KEY_SITES["4"] = copy.deepcopy(API_KEY_SITES["1"])
183 | API_KEY_SITES.pop("1")
184 | revert_key = False
185 | if counter and API_KEY_SITES.get(api_key, {}).get("counter", 0) >= API_LIMIT:
186 | return error(ERROR_TOO_MANY_REQUESTS)
187 | if GENERATE_418 and random.random() < 0.01:
188 | return 418, "", None # An unusual status returned for fun, infrequently
189 | if site_id is not None:
190 | # Find the site by site_id
191 | site = next((site for site in API_KEY_SITES.get(api_key, {}).get("sites", {}) if site["resource_id"] == site_id), None)
192 | if not site:
193 | if API_KEY_SITES.get(api_key) is None:
194 | return error(ERROR_INVALID_KEY)
195 | return error(ERROR_SITE_NOT_FOUND) # Technically the Solcast API should not return 404 (as documented), but it might
196 | else:
197 | site = None
198 | if counter:
199 | if API_KEY_SITES.get(api_key) is None:
200 | API_KEY_SITES[api_key]["counter"] += 1
201 | _LOGGER.info("API key %s has been used %s times", api_key, API_KEY_SITES[api_key]["counter"])
202 | if revert_key and API_KEY_SITES.get("4"):
203 | API_KEY_SITES["1"] = copy.deepcopy(API_KEY_SITES["4"])
204 | API_KEY_SITES.pop("4")
205 | return 200, None, site
206 |
207 |
208 | @app.route("/rooftop_sites", methods=["GET"])
209 | def get_sites() -> tuple[Any, int]:
210 | """Return sites for an API key."""
211 |
212 | api_key = request.args.get("api_key") # pyright: ignore[reportPossiblyUnboundVariable]
213 | if api_key is None:
214 | return "{}", 500
215 |
216 | response_code, issue, _ = validate_call(api_key, counter=False)
217 | if response_code != 200:
218 | return jsonify(issue) if issue != "" else "{}", response_code # pyright: ignore[reportPossiblyUnboundVariable]
219 |
220 | get_sites = simulate.raw_get_sites(api_key)
221 | if get_sites is not None:
222 | return jsonify(get_sites), 200 # pyright: ignore[reportPossiblyUnboundVariable]
223 | return "{}", 403
224 |
225 |
226 | @app.route("/rooftop_sites//estimated_actuals", methods=["GET"])
227 | def get_site_estimated_actuals(site_id: str) -> tuple[Any, int]:
228 | """Return simulated estimated actials for a site."""
229 |
230 | api_key = request.args.get("api_key") # pyright: ignore[reportPossiblyUnboundVariable]
231 | if api_key is None:
232 | return "{}", 500
233 |
234 | response_code, issue, _ = validate_call(api_key, site_id)
235 | if response_code != 200:
236 | return jsonify(issue) if issue != "" else "", response_code # pyright: ignore[reportPossiblyUnboundVariable]
237 |
238 | if request.args.get("hours") is None: # pyright: ignore[reportPossiblyUnboundVariable]
239 | return "{}", 500
240 | return jsonify(simulate.raw_get_site_estimated_actuals(site_id, api_key, int(request.args["hours"]))), 200 # pyright: ignore[reportPossiblyUnboundVariable]
241 |
242 |
243 | @app.route("/rooftop_sites//forecasts", methods=["GET"])
244 | def get_site_forecasts(site_id: str) -> tuple[Any, int]:
245 | """Return simulated forecasts for a site."""
246 |
247 | api_key = request.args.get("api_key") # pyright: ignore[reportPossiblyUnboundVariable]
248 | if api_key is None:
249 | return "{}", 500
250 |
251 | response_code, issue, _ = validate_call(api_key, site_id)
252 | if response_code != 200:
253 | return jsonify(issue) if issue != "" else "", response_code # pyright: ignore[reportPossiblyUnboundVariable]
254 | if request.args.get("hours") is None: # pyright: ignore[reportPossiblyUnboundVariable]
255 | return "{}", 500
256 | return jsonify(simulate.raw_get_site_forecasts(site_id, api_key, int(request.args["hours"]))), 200 # pyright: ignore[reportPossiblyUnboundVariable]
257 |
258 |
259 | @app.route("/data/historic/advanced_pv_power", methods=["GET"])
260 | def get_site_estimated_actuals_advanced() -> tuple[Any, int]:
261 | """Return simulated advanced pv power history for a site."""
262 |
263 | def missing_parameter():
264 | _LOGGER.info("Missing parameter")
265 | return jsonify({"response_status": {"error_code": "MissingParameter", "message": "Missing parameter."}}), 400 # pyright: ignore[reportPossiblyUnboundVariable]
266 |
267 | api_key = request.args.get("api_key") # pyright: ignore[reportPossiblyUnboundVariable]
268 | site_id = request.args.get("resource_id") # pyright: ignore[reportPossiblyUnboundVariable]
269 | if api_key is None or site_id is None:
270 | return "{}", 500
271 |
272 | try:
273 | start = dt.fromisoformat(request.args.get("start")) # type:ignore[arg-type]
274 | except: # noqa: E722
275 | _LOGGER.info("Missing start parameter %s", request.args.get("start")) # pyright: ignore[reportPossiblyUnboundVariable]
276 | return missing_parameter()
277 | try:
278 | end = dt.fromisoformat(request.args.get("end")) # type: ignore[arg-type]
279 | except: # noqa: E722
280 | end = None
281 | try:
282 | duration = isodate.parse_duration(request.args.get("duration")) # pyright: ignore[reportPossiblyUnboundVariable, reportUnknownMemberType]
283 | end = start + duration # pyright: ignore[reportUnknownVariableType]
284 | except: # noqa: E722
285 | duration = None
286 | if not end and not duration:
287 | _LOGGER.info("Missing end or duration parameter")
288 | return missing_parameter()
289 | if duration is None:
290 | _hours = int((end - start).total_seconds() / 3600) # type: ignore[operator]
291 | period_end = simulate.get_period(start, timedelta(minutes=30))
292 | response_code, issue, _ = validate_call(api_key, site_id)
293 | if response_code != 200:
294 | return jsonify(issue) if issue != "" else "", response_code # pyright: ignore[reportPossiblyUnboundVariable]
295 |
296 | return jsonify(simulate.raw_get_site_estimated_actuals(site_id, api_key, _hours, key="pv_power_advanced", period_end=period_end)), 200 # pyright:ignore[reportPossiblyUnboundVariable, reportCallIssue]
297 |
298 |
299 | @app.route("/data/forecast/advanced_pv_power", methods=["GET"])
300 | def get_site_forecasts_advanced() -> tuple[Any, int]:
301 | """Return simulated advanced pv power forecasts for a site."""
302 |
303 | api_key = request.args.get("api_key") # pyright: ignore[reportPossiblyUnboundVariable]
304 | site_id = request.args.get("resource_id") # pyright: ignore[reportPossiblyUnboundVariable]
305 | _hours = int(request.args.get("hours")) # type:ignore[arg-type]
306 | period_end = simulate.get_period(dt.now(datetime.UTC), timedelta(minutes=30))
307 | response_code, issue, _ = validate_call(api_key, site_id) # type:ignore[arg-type]
308 | if response_code != 200:
309 | return jsonify(issue) if issue != "" else "", response_code # pyright: ignore[reportPossiblyUnboundVariable]
310 |
311 | return jsonify(simulate.raw_get_site_forecasts(site_id, api_key, _hours, key="pv_power_advanced", period_end=period_end)), 200 # pyright:ignore[reportPossiblyUnboundVariable, reportCallIssue]
312 |
313 |
314 | def get_time_zone():
315 | """Attempt to read time zone from Home Assistant config."""
316 |
317 | try:
318 | with Path.open(Path(Path.cwd(), "../../../.storage/core.config")) as f:
319 | config = json.loads(f.read())
320 | simulate.set_time_zone(ZoneInfo(config["data"]["time_zone"]))
321 | _LOGGER.info("Time zone: %s", config["data"]["time_zone"])
322 | except: # noqa: E722
323 | pass
324 |
325 |
326 | if __name__ == "__main__":
327 | random.seed()
328 | _LOGGER.info("Starting Solcast API simulator, will listen on localhost:443")
329 | _LOGGER.info("Originally written by @autoSteve")
330 | _LOGGER.info("Integration issues raised regarding this script will be closed without response because it is a development tool")
331 | get_time_zone()
332 |
333 | parser = argparse.ArgumentParser()
334 | parser.add_argument("--limit", help="Set the API call limit available, example --limit 100", type=int, required=False)
335 | parser.add_argument("--no429", help="Do not generate 429 response", action="store_true", required=False)
336 | parser.add_argument("--teapot", help="Infrequently generate 418 response", action="store_true", required=False)
337 | parser.add_argument(
338 | "--bomb429",
339 | help="The minute(s) of the hour to return API too busy, comma separated, example --bomb429 0-5,15,30,45",
340 | type=str,
341 | required=False,
342 | )
343 | parser.add_argument(
344 | "--bombkey",
345 | help="The minute(s) of the hour to use a different API key, comma separated, example --bombkey 0-5,15,30,45",
346 | type=str,
347 | required=False,
348 | )
349 | parser.add_argument("--debug", help="Set Flask debug mode on", action="store_true", required=False, default=False)
350 | args = parser.parse_args()
351 | if args.limit:
352 | API_LIMIT = args.limit # pyright: ignore[reportConstantRedefinition]
353 | _LOGGER.info("API limit has been set to %s", API_LIMIT)
354 | if args.no429:
355 | GENERATE_429 = False # pyright: ignore[reportConstantRedefinition]
356 | _LOGGER.info("429 responses will not be generated")
357 | if args.bomb429:
358 | if not GENERATE_429:
359 | _LOGGER.error("Cannot specify --bomb429 with --no429")
360 | sys.exit()
361 | BOMB_429 = [ # pyright: ignore[reportConstantRedefinition]
362 | int(x) for x in args.bomb429.split(",") if "-" not in x
363 | ] # Simple minutes of the hour. # pyright: ignore[reportConstantRedefinition]
364 | if "-" in args.bomb429:
365 | for x_to_y in [x for x in args.bomb429.split(",") if "-" in x]: # Minute of the hour ranges.
366 | split = x_to_y.split("-")
367 | if len(split) != 2:
368 | _LOGGER.error("Not two hyphen separated values for --bomb429")
369 | BOMB_429 += list(range(int(split[0]), int(split[1]) + 1)) # pyright: ignore[reportConstantRedefinition]
370 | list.sort(BOMB_429) # pyright:ignore[reportUnknownMemberType]
371 | _LOGGER.info("API too busy responses will be returned at minute(s) %s", BOMB_429)
372 | if args.bombkey:
373 | BOMB_KEY = [ # pyright: ignore[reportConstantRedefinition]
374 | int(x) for x in args.bombkey.split(",") if "-" not in x
375 | ] # Simple minutes of the hour. # pyright: ignore[reportConstantRedefinition]
376 | if "-" in args.bombkey:
377 | for x_to_y in [x for x in args.bombkey.split(",") if "-" in x]: # Minute of the hour ranges.
378 | split = x_to_y.split("-")
379 | if len(split) != 2:
380 | _LOGGER.error("Not two hyphen separated values for --bombkey")
381 | BOMB_KEY += list(range(int(split[0]), int(split[1]) + 1)) # pyright: ignore[reportConstantRedefinition]
382 | list.sort(BOMB_KEY) # pyright:ignore[reportUnknownMemberType]
383 | _LOGGER.info("API key changes will be happen at minute(s) %s", BOMB_KEY)
384 | if args.teapot:
385 | GENERATE_418 = True # pyright: ignore[reportConstantRedefinition]
386 | _LOGGER.info("I'm a teapot response will be sometimes generated")
387 |
388 | if API_LIMIT == 50:
389 | _LOGGER.info("API limit is default %s, usage has been reset", API_LIMIT)
390 |
391 | app.run(debug=args.debug, host="127.0.0.1", port=443, ssl_context=("cert.pem", "key.pem"))
392 |
--------------------------------------------------------------------------------
/FAQ.md:
--------------------------------------------------------------------------------
1 | # Troubleshooting FAQ
2 |
3 | ## Contents
4 | * Q: When should I change my API key, and will this help fix problems?
5 | * Q: I've just got **lots of 429 errors reported**, and I'm not getting forecasts. Should I raise an issue, or continue one of the long running discussions?
6 | * Q: I'm trying to set up (or re-set up) the integration, and **I'm getting 429 errors** and can't get any further. What's happening?
7 | * Solcast's API status page at [https://status.solcast.com/](https://status.solcast.com/) says that the API status is all green, but **I'm getting 429 errors**. What should I do?
8 | * Q: You asked for DEBUG logs to be provided when raising issues. How do I get these?
9 | * Q: The Solcast Toolkit site will not allow me to add a new rooftop site. I get a minus one error. Why?
10 | * Q: I don't understand the README, and it's too long. Can it be simplified?
11 | * Q: Is submitting debug logging going to expose my API key or location to the world?
12 | * Q: I get a timeout connecting to api.solcast.com.au!!! What the heck is happening? (...raises issue...)
13 | * Q: I've just had a shiny new PV string attached to my inverter, and I've gone to Solcast, added the new rooftop site details, but it's not being included in the forecast results. What's up?
14 | * Q: I just restored Home Assistant from backup, and when it started the Solcast integration updated the forecast! Why?
15 | * Q: Does the integration cope with daylight savings time / Summer time transitions?
16 | * Q: Why are certain sensors Watt, while others are Watt-hour or kilo-Watt-hour? Shouldn't these be the same? Why?
17 | * Q: Why have my historical forecasts disappeared from the energy dashboard? I now only see 10/14 days!
18 | * Q: I have a Solcast API limit of 50 calls. Why is the integration now limiting me to 10?
19 | * Q: What polls to Solcast happen, when do they happen, and are they important?
20 | * Q: A follow-up question: If I restart the integration will it use API quota?
21 |
22 | ### Q: When should I change my API key, and will this help fix problems?
23 |
24 | Only change your API key when you think the key has been leaked publicly or somehow compromised.
25 |
26 | Only change your API key when the Solcast service is _**HEALTHY**_ for Hobbyist users. NEVER CHANGE YOUR KEY WHEN `429` errors are occurring, because when you update the integration configuration with the new key it needs to contact the Solcast API to read site details. It will likely get a `429` error and will not be able to fully complete the configuration change.
27 |
28 | Changing your API key will NEVER fix any problem other than resolving a compromise, like if you have posted a screen grab of your integration configuration in a discussion topic. The last six characters of the key only are logged.
29 |
30 | ### Q: I've just got lots of 429 errors reported, and I'm not getting forecasts. Should I raise an issue, or continue one of the long running discussions?
31 |
32 | or
33 |
34 | ### Q: I'm trying to set up (or re-set up) the integration, and I'm getting 429 errors and can't get any further. What's happening?
35 |
36 | or
37 |
38 | ### Solcast's API status page at [https://status.solcast.com/](https://status.solcast.com/) says that the API status is all green, but I'm getting 429 errors. What should I do?
39 |
40 | As the [Solcast API Status](https://status.solcast.com/) page says: **_Don't agree with what's reported here? Contact_ [them] _at [support@solcast.com](mailto:support@solcast.com?subject=Report%20Incident)._** Do note that the status page generally **doesn't report the hobbyist API status** because the information there is for **paying customers**, not you.
41 |
42 | This integration reports 429 errors returned by the Solcast Legacy Rooftop Site API as received. If that's what Solcast is sending us, then that's what we report.
43 |
44 | There is nothing that the integration maintainers can do to fix this.
45 |
46 | The vast majority of times that this has occurred is because someone (not necessarily an integration user, and not necessarily a rooftop hobbyist user) is hammering Solcast servers thousands of times an hour (or minute) with an out-of-control process.
47 |
48 | If the Solcast team aren't yet aware of it (outside of normal Australian business hours) they may not have had a chance to respond and block that process, so as the message on their website says, please, in the first instance, politely (since you're using a free service), ask them if there are any issues, and provide them with as much information as possible.
49 |
50 | For example:
51 |
52 | - Last successful update 1:30pm AEST (GMT+10).
53 | - Three failed attempts since (10 retries per attempt over a fifteen-minute period)
54 | - 429 responses received on each call to https://api.solcast.com.au/rooftop_sites/
55 |
56 | By the way, the 10 retries per attempt over a fifteen-minute period is exactly how the integration works.
57 |
58 | ### Q: You asked for DEBUG logs to be provided when raising issues. How do I get these?
59 |
60 | We can't usually do anything without them, and it's almost always the first question that will be asked: "Could you provide debug logs, please?"
61 |
62 | When set for debug you see all kinds of fascinating stuff about what's going on under the covers. In configuration.yaml:
63 |
64 | ```
65 | logger:
66 | default: warn
67 | logs:
68 | custom_components.solcast_solar: debug
69 | ```
70 |
71 | (Make the default info/warn/whatever, as we don't care. We just want the debug goodness.)
72 |
73 | Reviewing logs is quite simple, and can be done from the UI. Go to Settings | System | Logs, where "condensed" logs are shown by default. Select the three dots at the top right of screen and select "Show full logs".
74 |
75 |
76 |
77 | You can't filter for just the Solcast integration in the UI, so it might be a good idea to download the log and filter it by another method (for example the *nix utility `less` "&/" command, or Notepad++ with the Linefilter2 plugin.)
78 |
79 | ### Q: The Solcast Toolkit site will not allow me to add a new rooftop site. I get a minus one error. Why?
80 |
81 | If you get a notification that your hobbyist account is limited to the creation of -1 Home PV arrays within 1km of each other, then your account needs fixing by Solcast support.
82 |
83 | [
](https://github.com/BJReplay/ha-solcast-solar/blob/main/.github/SCREENSHOTS/solcast_minus_one.jpeg)
84 |
85 | Billy from Solcast advises, _"This is an issue on our backend. For any future issues, if you could please just email through to support@solcast.com we'll fix it up, which will allow you to create the second site next time you log in."_
86 |
87 | What you should see is this:
88 |
89 | [
](https://github.com/BJReplay/ha-solcast-solar/blob/main/.github/SCREENSHOTS/solcast_plus_two.png)
90 |
91 | ### Q: I don't understand the README, and it's too long. Can it be simplified?
92 |
93 | No.
94 |
95 | This is a complex integration, and the documentation is extensive. If you want to use some of its advanced features then you're just going to have to slow down, take a deep breath and read. Do not guess nor assume how something works.
96 |
97 | Every word has been painstakingly reviewed to ensure clarity and flow.
98 |
99 | If the documentation is incorrect or misleading then call it out by contributing a pull request.
100 |
101 | ### Q: Is submitting debug logging going to expose my API key or location to the world?
102 |
103 | No.
104 |
105 | All sensitive information, including API keys, and your home address via latitude/longitude are redacted.
106 |
107 | ### Q: I get a timeout connecting to api.solcast.com.au!!! What the heck is happening? (...raises issue...)
108 |
109 | Honestly, it's not us, but you. Please do not raise an issue.
110 |
111 | This kind of thing can cause you to tear your hair out, but we might be able to put it back. A timeout is a timeout. We got nothing, so we timeout. Period. We got nothing, so we can't go on. But...
112 |
113 | You _have_ to check your networking. It may seem right, but it's not. It may seem solid but...
114 |
115 | Try a `curl` from your Home Assistant server first. (This will not use up API call quota.)
116 |
117 | ```
118 | curl -X GET -H 'content-type: application/json' https://api.solcast.com.au/rooftop_sites?api_key=YOURAPIKEYHERE
119 | ```
120 |
121 | Do you get an instant, and pleasing response of your sites data? (Or even a 429/Solcast too busy) Then, great! Move on. Because more networking bear traps can be laid... It is possible for the command line `curl` to receive an IPv4 address, and not the address that Home Assistant actually gets from making the same DNS query.
122 |
123 | WT?
124 |
125 | Check for IPv6 weirdness. Is IPv6 enabled in Home Assistant? If yes, then triple check that HA can _actually_ talk IPv6 to the Internet... If in doubt, then disable IPv6 in the HA network config. Or triple check your **router** config. This IPv6 stuff is new and scary, but please get your head around it, or disable it if you don't get it.
126 |
127 | If that's not the problem, and `curl` works, then I've got nothing without knowing more. But it's a network issue... Try a more generic networking issue Google search relating to Home Assistant.
128 |
129 | ### Q: I've just had a shiny new PV string attached to my inverter, and I've gone to Solcast, added the new rooftop site details, but it's not being included in the forecast results. What's up?
130 |
131 | There is almost certainly a simple fix.
132 |
133 | The integration loads a list of all rooftop sites from Solcast on startup. It does not attempt to load these until another startup, because that would just add API load to the Solcast service. They don't like that, and tend to respond with `429/Too busy` responses when heaps of calls come in from hobbyists, so we like to keep the number of calls to just the essential, and not do a 'get sites' call at every forecast update fetch.
134 |
135 | Restart the integration, and your new rooftop will almost certainly be found.
136 |
137 | But... Potentially further complicating things for you, it is just possible that a restart will not load the new site details. Should a `429/Too busy` be hit at precisely the same moment that you restart to load the new site then the integration will move on, preferring to load cached sites instead. The logs are your friend here, because a `429` gets logged as a warning to tell you that this has happened.
138 |
139 | Ugh. Be patient, and persistent if needed.
140 |
141 | Restarting near an on-the-fifteen-minute boundary of the hour particularly in the European morning could be to blame. So try, and try again. It will pick up the new site eventually (but only when restarted).
142 |
143 | ### Q: I just restored Home Assistant from backup, and when it started the Solcast integration updated the forecast! Why?
144 |
145 | Auto-update is enabled. The backup that you restored from was prior to the last auto update.
146 |
147 | The integration records the date and time of the latest update attempt in `solcast.json`. On start, it calculates the auto-update intervals, plus the date and time of the most recent auto-update. It then compares the most recent auto-update time with that recorded in `solcast.json`, and if the forecast cache is out of date, or "stale" then it will do a forecast update.
148 |
149 | It has no way of knowing that you have restored from backup.
150 |
151 | This is an unusual situation to arise, so we have no plans to alter the integration behaviour. At worst, there will be one instance of API quota exhaustion on update for the day before the next UTC midnight reset of the used count happens.
152 |
153 | As an aside, the reason that it does this check is to cover a far more likely scenario. Re-starting Home Assistant.
154 |
155 | If HA gets restarted _just before_ a scheduled auto-update is going to happen then that update will be cancelled. If the check for stale forecast data were not done on start then that update would be missed entirely.
156 |
157 | So short-term pain on restore from backup. Long-term gain on having auto-update operate reliably for you.
158 |
159 | Don't like the behaviour? Send feedback in a discussion, and revert to using an HA automation to update the forecast.
160 |
161 | ### Q: Does the integration cope with daylight savings / Summer / Winter time transitions?
162 |
163 | It does.
164 |
165 | If it is logging odd things for you in debug level logs, and you're getting multiple "Sunday" forecasts (if that's your day of transition) then you need to upgrade to at least v4.2.5.
166 |
167 | The transition to daylight time results in Solcast varying the number of half-hourly forecast intervals for the day of transition. When transitioning to daylight time there will be only 46 intervals, and not the usual 48. This is because 2AM will no longer exist for that day. When transitioning from daylight time we get a sleep-in, and there are two 2AMs and a total of 50 intervals.
168 |
169 | The integration was messing up the UTC time of period start and end, and using a fixed number of 48 intervals. Now it does not.
170 |
171 | More recently "Winter time" transition support was added for Ireland (their Summer period is considered "standard" time, and the net time shift is the same, but this gets treated differently by Python code, which the integration is written in).
172 |
173 | ### Q: Why are certain sensors Watt, while others are Watt-hour or kilo-Watt-hour? Shouldn't these be the same? Why?
174 |
175 | The power sensors, with a unit of measurement of Watt represent an instantaneous forecast power at a point in time. Given Solcast forecasts in half-hourly increments these can be thought of as an average power that is expected to be generated for a period (or the value expected half way through each interval).
176 |
177 | All values received from Solcast are instantaneous power, or Watts.
178 |
179 | The values for Watt-hour/kWh are calculated by the integration from the power numbers, and are power over time, or energy. An example of this is expected solar production for the remainder of the day. For these, the period averages are summed, and then divided by two because the unit is for a whole hour, yet intervals are half-hourly. For some of these, like remaining for the day, a portion of the calculated first period is used because some sensors are updated every five minutes.
180 |
181 | @ProphetOfDoom drew up a great annotated representation of an actual forecast chart overlaid with the underlying values that had been received from Solcast.
182 |
183 | 
184 |
185 | ### Q: Why have my historical forecasts disappeared from the energy dashboard? I now only see 10/14 days!
186 |
187 | At some point, your /config/solcast_solar/solcast.json file has gone missing, and was recreated. This contains the history.
188 |
189 | First ask yourself, what use are historic forecasts to me anyway? A dashed line that extends back as far as since this integration was first installed is really only visually pleasing, and not really of value.
190 |
191 | What temperature was forecast on the 3rd of December 2021, and was it right? Who cares? We know the actual answer now. Solcast would care about improving forecast accuracy, but I'm pretty sure they would not use history to do so. They would compare the predictions of present and proposed forecast models against actuals over time.
192 |
193 | The "good" news is that these forecasts will be retained for a couple of years from here on, so your dashed line will get longer, even if it is of almost zero value.
194 |
195 | Or do you really want to fix it?
196 |
197 | I hope you have a backup from the day when the history vanished. You do back up, right?
198 |
199 | The fix involves "merging" the contents of two solcast.json files, which is not as simple as just concatenating the files.
200 |
201 | Inside the json structure there is a `forecasts` key for each rooftop site, which holds an array. What you need to do is get the older values from backup for this array, and _carefully_ (making sure there is the required comma between array elements) insert these forecast elements into the current file. Do this for each rooftop ID, then restart the Solcast integration. (Having duplicates of the timestamped entries won't hurt anything, and they will be cleaned up.) _Please, take a backup of `solcast.json` before attempting this..._
202 |
203 | ### Q: I have a Solcast API limit of 50 calls. Why is the integration now limiting me to 10?
204 |
205 | Solcast removed an API call to get API quota usage, so the answer is because _**you**_ told it to.
206 |
207 | To answer a question with a question, is the API quota set correctly in the integration configuration? If not, then set it to 50 or as appropriate, given you may be using calls for estimated actuals or forced updates as well.
208 |
209 | ### Q: What polls to Solcast happen, when do they happen, and are they important?
210 |
211 | When the integration starts for the very first time, several important things happen, and these involve Solcast API calls that are generally metered. Continued use of API calls also occurs.
212 |
213 | This FAQ post is way longer than it should be for a mere _three_ API calls, but there be nuances depending on circumstance. The TL>DR? Getting sites data does not use API call quota. Getting a forecast, or a set of estimated actuals does.
214 |
215 | 1. The rooftop sites information is gathered for each of the solcast API keys specified.
216 |
217 | This is **super** important information, and the integration _cannot function_ with out this. The return includes the rooftop ID(s), which are used in subsequent calls, plus other data that isn't used except for populating sensor attributes, like location, azimuth and panel tilt.
218 |
219 | This call happens when you are first setting up the integration, and also on _every_ re-load. The "first set up" call is used to verify that your API key is good, and also that you've got sites configured. It also occurs on each load just in case you've changed settings at `solcast.com` and then re-load the integration.
220 |
221 | For each re-load if the API call does not work for some reason, then this integration utilises a cache that will recall the data from the last successful call. If the cache doesn't exist yet or has been deleted then the integration won't work, and it will continuously restart until this call succeeds. But do note that if this is your first attempt at setting up the integration and the call fails (i.e. Solcast not available), then you'll be hit with a "Do not pass Go" scenario, and must just follow the instruction: Try again later. You've almost certainly hit a busy API time window. It's not us. It's them... So, try again later. 😅 Five or ten minutes should do.
222 |
223 | This call _will not_ use up any precious API call quota, no matter how many times it is called.
224 |
225 | 2. When "estimated actuals" is not yet available.
226 |
227 | This call happens ideally once, and only for a new install or if the `solcast.json` cache file is deleted (which is an action that can be requested via Developer tools, or by directly deleting the file and re-starting).
228 |
229 | It can also be called should the integration have been sitting disabled/failed for over a week, where past data gaps would be seen. (The estimated actuals are used to fill gaps where possible.)
230 |
231 | This occurs for each rooftop ID, so if you have two Solcast sites defined, then _two_ calls are made.
232 |
233 | This will use up API quota for each site defined, then on top of that usage a forecast update will occur using more quota.
234 |
235 | 3. When a forecast update is requested.
236 |
237 | Auto-update is enabled, or an automation is created by you in Home Assistant to trigger how often solar forecasts are gathered, and when this triggers the service `solcast_solar.update_forecasts` it will update all of the rooftop IDs for all of the accounts.
238 |
239 | This will use up API quota, and if you have two sites configured for an API key then it'll use up two calls for that key.
240 |
241 | Should this call not be successful, then it will be re-tried ten times. (A failure almost always does not use quota. It _may_ if the failure happens due to a bug, but I can't recall that ever happening.) The retry mechanism is designed using a back-off mechanism that will retry at delays of 15, 30, 45, 60 and so on seconds, plus a random number of seconds between zero and fifteen for each retry. You'll see this activity in the log as warnings if it happens.
242 |
243 | If all retries are exhausted, then a 429/Solcast too busy error will be logged.
244 |
245 | But don't panic and raise an issue. It's almost certainly them, not you or us, and the next forecast acquisition will likely be successful.
246 |
247 | 4. When "estimated actuals" are updated just past midnight, or when requested to be updated using an action.
248 |
249 | This will use up API quota for each site defined if the option to get estimated actuals is enabled. Updates occur by default within fifteen minutes of the midnight local time roll-over, or when requested by an action call.
250 |
251 | That's all the API calls there are!
252 |
253 | Sometimes the API gets so swamped with requests that it asks users to retry. This is the well seen HTTP 429 response where quota has not yet been reached. It's not an error as such, but more a "we heard you, but go away we're busy, try later" notification. Paying users generally never hit this. Un-paying hobbyist users sometimes often, and fair enough. I think Solcast are super generous to offer such a brilliant (but limited) service for us for free.
254 |
255 | This integration does its level best to cooperate with Solcast, and retry in a sensible manner.
256 |
257 | ### Q: A follow-up question: If I restart the integration will it use API quota?
258 |
259 | The integration has a cache of the last successful forecast call response data and the sites data. The sites are loaded on startup if requesting it from Solcast fails. Then the forecast history loads. This does not use API quota. Forecast is only requested, and API usage incremented when auto-update fetches, or _you_ ask the integration to do so, normally with an automation.
260 |
261 | So the answer is definitely no. But this becomes a definite maybe as of v4.2.5.
262 |
263 | If you have auto-update enabled in v4.2.5+ then some strange things can seem to happen. If you restored Home Assistant from a backup that pre-dated the last auto fetch then the integration will initiate a fetch because stale data. API call(s) used.
264 |
265 | And if you re-started HA immediately before an auto-update was scheduled to fire then that update will fire on integration start. API call(s) will be used, but they would have been used anyway, just weren't previously.
266 |
267 | So depending on circumstance, the v4.2.5+ answer is possibly, but probably not.
268 |
269 | And a final "what the???" API use scenario: If the integration had been in a failed state that has caused forecast history to be aged out beyond one week then "estimated actuals" will be retrieved from Solcast to cover the gap. This is done to support integration scenarios that rely on recent history, so API calls will be used to get history, and a fresh forecast. (But this will likely not be an issue as the integration has not been running and using quota for forecast updates...)
270 |
271 | That is _way_ too many words to describe that lot, but I trust it has explained every scenario.
--------------------------------------------------------------------------------
/custom_components/solcast_solar/strings.json:
--------------------------------------------------------------------------------
1 | {
2 | "config": {
3 | "abort": {
4 | "internal_error": "Internal error occurred",
5 | "reauth_successful": "New API key applied successfully",
6 | "reconfigured": "New configuration applied successfully",
7 | "single_instance_allowed": "Only one Solcast instance allowed"
8 | },
9 | "error": {
10 | "api_duplicate": "Duplicate API key specified",
11 | "api_looks_like_site": "API key looks like a site ID",
12 | "limit_not_number": "API limit is not a number",
13 | "limit_one_or_greater": "API limit must be one or greater",
14 | "limit_too_many": "There are more API limit counts entered than keys"
15 | },
16 | "step": {
17 | "reauth_confirm": {
18 | "data": {
19 | "api_key": "API key (comma separate multiple values)"
20 | },
21 | "description": "Solcast Account API key"
22 | },
23 | "reconfigure_confirm": {
24 | "data": {
25 | "api_key": "API key (comma separate multiple values)",
26 | "api_quota": "API limit (optionally comma separate multiple values for each key)",
27 | "auto_update": "Auto-update"
28 | },
29 | "description": "Solcast Account Details\n\n(If not using auto-update then an automation is required - see the integration documentation)"
30 | },
31 | "user": {
32 | "data": {
33 | "api_key": "API key (comma separate multiple values)",
34 | "api_quota": "API limit (optionally comma separate multiple values for each key)",
35 | "auto_update": "Auto-update"
36 | },
37 | "description": "Solcast Account Details\n\n(If not using auto-update then an automation is required - see the integration documentation)"
38 | }
39 | }
40 | },
41 | "entity": {
42 | "select": {
43 | "estimate_mode": {
44 | "name": "Use Forecast Field"
45 | }
46 | },
47 | "sensor": {
48 | "api_counter": {
49 | "name": "API Used"
50 | },
51 | "api_limit": {
52 | "name": "API Limit"
53 | },
54 | "dampen": {
55 | "name": "Dampening"
56 | },
57 | "forecast_custom_hours": {
58 | "name": "Forecast Next X Hours"
59 | },
60 | "forecast_next_hour": {
61 | "name": "Forecast Next Hour"
62 | },
63 | "forecast_remaining_today": {
64 | "name": "Forecast Remaining Today"
65 | },
66 | "forecast_this_hour": {
67 | "name": "Forecast This Hour"
68 | },
69 | "hard_limit": {
70 | "name": "Hard Limit Set"
71 | },
72 | "hard_limit_api": {
73 | "name": "Hard Limit Set {api_key}"
74 | },
75 | "last_updated": {
76 | "name": "API Last Polled"
77 | },
78 | "peak_w_time_today": {
79 | "name": "Peak Time Today"
80 | },
81 | "peak_w_time_tomorrow": {
82 | "name": "Peak Time Tomorrow"
83 | },
84 | "peak_w_today": {
85 | "name": "Peak Forecast Today"
86 | },
87 | "peak_w_tomorrow": {
88 | "name": "Peak Forecast Tomorrow"
89 | },
90 | "power_now": {
91 | "name": "Power Now"
92 | },
93 | "power_now_1hr": {
94 | "name": "Power in 1 Hour"
95 | },
96 | "power_now_30m": {
97 | "name": "Power in 30 Minutes"
98 | },
99 | "total_kwh_forecast_d10": {
100 | "name": "Forecast Day 10"
101 | },
102 | "total_kwh_forecast_d11": {
103 | "name": "Forecast Day 11"
104 | },
105 | "total_kwh_forecast_d12": {
106 | "name": "Forecast Day 12"
107 | },
108 | "total_kwh_forecast_d13": {
109 | "name": "Forecast Day 13"
110 | },
111 | "total_kwh_forecast_d3": {
112 | "name": "Forecast Day 3"
113 | },
114 | "total_kwh_forecast_d4": {
115 | "name": "Forecast Day 4"
116 | },
117 | "total_kwh_forecast_d5": {
118 | "name": "Forecast Day 5"
119 | },
120 | "total_kwh_forecast_d6": {
121 | "name": "Forecast Day 6"
122 | },
123 | "total_kwh_forecast_d7": {
124 | "name": "Forecast Day 7"
125 | },
126 | "total_kwh_forecast_d8": {
127 | "name": "Forecast Day 8"
128 | },
129 | "total_kwh_forecast_d9": {
130 | "name": "Forecast Day 9"
131 | },
132 | "total_kwh_forecast_today": {
133 | "name": "Forecast Today"
134 | },
135 | "total_kwh_forecast_tomorrow": {
136 | "name": "Forecast Tomorrow"
137 | }
138 | }
139 | },
140 | "exceptions": {
141 | "actuals_not_enabled": {
142 | "message": "Estimated actuals acquisition is not enabled."
143 | },
144 | "auto_use_force": {
145 | "message": "Auto-update is enabled, ignoring service event for forecast update, use Force Update instead."
146 | },
147 | "auto_use_normal": {
148 | "message": "Auto-update is not enabled, ignoring service event for force forecast update, use Update instead."
149 | },
150 | "build_failed_actuals": {
151 | "message": "Failed to build estimated actual data (corrupt config/solcast-actuals.json?)"
152 | },
153 | "build_failed_forecasts": {
154 | "message": "Failed to build forecast data (corrupt config/solcast.json?)"
155 | },
156 | "damp_auto_enabled": {
157 | "message": "Automated dampening is enabled, not setting manual dampening factors."
158 | },
159 | "damp_count_not_correct": {
160 | "message": "There are not 24 or 48 comma separated numbers between 0.0 and 1.0."
161 | },
162 | "damp_error_parsing": {
163 | "message": "Error parsing dampening factor comma separated numbers."
164 | },
165 | "damp_no_all_24": {
166 | "message": "Specifying a site of 'all' is not allowed with 24 factors, remove site from the request."
167 | },
168 | "damp_no_factors": {
169 | "message": "No dampening factors, must be a comma separated list of numbers between 0.0 and 1.0."
170 | },
171 | "damp_not_for_site": {
172 | "message": "Site dampening is not set for {site}."
173 | },
174 | "damp_not_site": {
175 | "message": "Not a configured site."
176 | },
177 | "damp_outside_range": {
178 | "message": "Dampening factor value present that is not between 0.0 and 1.0."
179 | },
180 | "damp_use_all": {
181 | "message": "Site dampening is not set for {site}, 'all' is the only available site parameter for the service call."
182 | },
183 | "hard_empty": {
184 | "message": "Empty hard limit value."
185 | },
186 | "hard_not_positive_number": {
187 | "message": "Hard limit value is not a positive number."
188 | },
189 | "hard_too_many": {
190 | "message": "More hard limits are specified than there are API keys."
191 | },
192 | "init_cannot_get_sites": {
193 | "message": "Sites data could not be retrieved."
194 | },
195 | "init_cannot_get_sites_cache_invalid": {
196 | "message": "Sites data could not be retrieved and cache is invalid."
197 | },
198 | "init_corrupt": {
199 | "message": "The cached data in {file} is corrupted, suggest deleting or repairing it."
200 | },
201 | "init_incompatible": {
202 | "message": "The cached data in `{file}` is incompatible, suggest deleting it."
203 | },
204 | "init_key_invalid": {
205 | "message": "API key is invalid."
206 | },
207 | "init_no_sites": {
208 | "message": "No sites found for API key."
209 | },
210 | "init_unknown": {
211 | "message": "Exception loading sites data."
212 | },
213 | "init_usage_corrupt": {
214 | "message": "Usage data is corrupt, check or delete `config/solcast_solar/solcast-usage.json`."
215 | },
216 | "integration_not_loaded": {
217 | "message": "Integration not loaded, not performing action."
218 | },
219 | "integration_prior_crash": {
220 | "message": "Integration failed to load previously, trying again after a delay."
221 | },
222 | "remove_cache_failed": {
223 | "message": "Clear all Solcast data failed."
224 | }
225 | },
226 | "issues": {
227 | "advanced_deprecated": {
228 | "description": "Advanced configuration option(s) `{option}` changed to `{new_option}`. Please update your advanced configuration options to use the new option name.{stops_working}",
229 | "title": "Advanced option deprecated"
230 | },
231 | "advanced_problem": {
232 | "description": "The advanced configuration file contains errors.\n{problems}\n\nPlease fix these, referring to the integration documentation at the 'Learn more' link.",
233 | "title": "Advanced option problem"
234 | },
235 | "api_unavailable": {
236 | "description": "The Solcast API has reported `429 / Try again later` status repeatedly during an update.\n\n* Do not attempt to fix this situation.\n* Do not change your API key.\n* Avoid restarting the integration\n* This is not an integration issue and will resolve itself. Change nothing.\n\n(To suppress this issue, create an advanced configuration option api_raise_issue and set it to false.)",
237 | "title": "Solcast API temporarily unavailable"
238 | },
239 | "corrupt_file": {
240 | "description": "The cached data file(s) `{files}` is corrupted. The integration will attempt to recover by deleting the file(s) and re-fetching data from Solcast. Expect increased API usage and potential API failures, and loss of forecast history due to this situation. Possibly recover the files from backup, but still expect API call failures.",
241 | "title": "Corrupted cached data"
242 | },
243 | "records_missing": {
244 | "description": "Future forecast data is missing, which is unexpected because auto-update is enabled. Investigate the cause by examining the Home Assistant log. Read the troubleshooting FAQ before raising an integration issue.",
245 | "title": "Future forecasts missing"
246 | },
247 | "records_missing_fixable": {
248 | "fix_flow": {
249 | "abort": {
250 | "reconfigured": "New configuration applied successfully"
251 | },
252 | "step": {
253 | "offer_auto": {
254 | "data": {
255 | "auto_update": "Auto-update"
256 | },
257 | "description": "A forecast update automation has not been running. Would you like to enable auto-update? (Learn more: {learn_more})",
258 | "title": "Future forecasts missing"
259 | }
260 | }
261 | },
262 | "title": "Future forecasts missing"
263 | },
264 | "records_missing_initial": {
265 | "description": "Forecast and/or estimated actual data is missing, likely because the initial forecast update did not occur. This is likely because the API call limit has been reached. You _will_ have sensor entity issues, and the Energy dashboard will likely have missing data. This will resolve itself.",
266 | "title": "Forecasts missing"
267 | },
268 | "records_missing_unfixable": {
269 | "description": "Future forecast data is missing, which is unexpected. Check the logs.",
270 | "title": "Future forecasts missing"
271 | },
272 | "unusual_azimuth_northern": {
273 | "description": "The azimuth for site {site} is unusual for the latitude of this northern hemisphere rooftop. Should this be {proposal} instead of {extant}? (Ignore the issue if the site is facing towards North.)",
274 | "title": "Unusual azimuth"
275 | },
276 | "unusual_azimuth_southern": {
277 | "description": "The azimuth for site {site} is unusual for the latitude of this southern hemisphere rooftop. Should this be {proposal} instead of {extant}? (Ignore the issue if the site is facing towards South.)",
278 | "title": "Unusual azimuth"
279 | }
280 | },
281 | "options": {
282 | "abort": {
283 | "internal_error": "Internal error occurred",
284 | "reconfigured": "New configuration applied successfully"
285 | },
286 | "error": {
287 | "actuals_not_enabled": "Estimated actuals acquisition is not enabled",
288 | "actuals_without_get": "Estimated actuals cannot be used without getting them",
289 | "api_duplicate": "Duplicate API key specified",
290 | "api_looks_like_site": "API key looks like a site ID",
291 | "custom_invalid": "Custom sensor not between 1 and 144",
292 | "dampen_without_actuals": "Estimated actuals must be selected to use auto-dampening",
293 | "dampen_without_generation": "At least one PV generation Wh/kWh/MWh sensor must be specified to use auto-dampening",
294 | "export_multiple_entities": "Site export entity must be a single entity, not multiple entities",
295 | "export_no_entity": "Site export entity must be specified if site export limit is set",
296 | "hard_not_number": "Hard limit is not a positive number",
297 | "hard_too_many": "There are more hard limits entered than keys",
298 | "limit_not_number": "API limit is not a number",
299 | "limit_one_or_greater": "API limit must be one or greater",
300 | "limit_too_many": "There are more API limit counts entered than keys"
301 | },
302 | "step": {
303 | "dampen": {
304 | "data": {
305 | "damp00": "00:00",
306 | "damp01": "01:00",
307 | "damp02": "02:00",
308 | "damp03": "03:00",
309 | "damp04": "04:00",
310 | "damp05": "05:00",
311 | "damp06": "06:00",
312 | "damp07": "07:00",
313 | "damp08": "08:00",
314 | "damp09": "09:00",
315 | "damp10": "10:00",
316 | "damp11": "11:00",
317 | "damp12": "12:00",
318 | "damp13": "13:00",
319 | "damp14": "14:00",
320 | "damp15": "15:00",
321 | "damp16": "16:00",
322 | "damp17": "17:00",
323 | "damp18": "18:00",
324 | "damp19": "19:00",
325 | "damp20": "20:00",
326 | "damp21": "21:00",
327 | "damp22": "22:00",
328 | "damp23": "23:00"
329 | },
330 | "description": "Modify the hourly dampening factor"
331 | },
332 | "init": {
333 | "data": {
334 | "api_key": "API key (comma separate multiple values)",
335 | "api_quota": "API limit (optionally comma separate multiple values for each key)",
336 | "attr_brk_detailed": "Enable site breakdown for half-hourly/hourly detail attributes (these will not be saved to database state attributes)",
337 | "attr_brk_estimate": "Enable estimate 50 sensor attributes",
338 | "attr_brk_estimate10": "Enable estimate 10 sensor attributes",
339 | "attr_brk_estimate90": "Enable estimate 90 sensor attributes",
340 | "attr_brk_halfhourly": "Enable forecast half-hourly detail attributes",
341 | "attr_brk_hourly": "Enable forecast hourly detail attributes",
342 | "attr_brk_site": "Enable site breakdown sensor attributes",
343 | "auto_dampen": "Auto-dampen (requires estimated actuals and PV generation entity, see integration documentation)",
344 | "auto_update": "Auto-update (if no auto-update then an automation must be used instead)",
345 | "config_damp": "Modify the hourly dampening factors (after clicking submit)",
346 | "customhoursensor": "Custom next X hours sensor (min=1, max=144)",
347 | "exclude_sites": "Site(s) to exclude from the Energy dashboard and sensor totals",
348 | "generation_entities": "PV generation entity/entities for automated dampening (Wh/kWh/MWh, total increasing)",
349 | "get_actuals": "Estimated actuals updated at midnight (see integration documentation, uses one API call per site per day, so a reduced API limit is needed with this option enabled)",
350 | "hard_limit_api": "Inverter hard limit in kW (see integration documentation, comma separate multiple values for each key)",
351 | "key_estimate": "Preferred forecast likelihood to use for sensors",
352 | "site_damp": "Granular dampening is set: Clear to revert to overall hourly dampening",
353 | "site_export_entity": "Optional site export entity for automated dampening (Wh/kWh/MWh, total increasing)",
354 | "site_export_limit": "Site export limit if site export is specified (kW)",
355 | "use_actuals": "Forecast history to show on the Energy dashboard"
356 | },
357 | "description": "Solcast configuration options"
358 | }
359 | }
360 | },
361 | "selector": {
362 | "auto_update": {
363 | "options": {
364 | "0": "No automatic update of forecasts",
365 | "1": "Automatic update of forecasts from sunrise to sunset",
366 | "2": "Automatic update of forecasts over 24 hours"
367 | }
368 | },
369 | "energy_history": {
370 | "options": {
371 | "0": "Forecasts",
372 | "1": "Estimated actuals",
373 | "2": "Dampened estimated actuals"
374 | }
375 | },
376 | "key_estimate": {
377 | "options": {
378 | "estimate": "Median expected generation (estimate)",
379 | "estimate10": "10th percentile expected generation (lower end of possible outcomes, estimate10)",
380 | "estimate90": "90th percentile expected generation (upper end of possible outcomes, estimate90)"
381 | }
382 | }
383 | },
384 | "services": {
385 | "clear_all_solcast_data": {
386 | "description": "Deletes the solcast.json file to remove all current solcast site data.",
387 | "name": "Clear all saved Solcast data"
388 | },
389 | "force_update_estimates": {
390 | "description": "Force fetches the latest estimated actual data from Solcast.",
391 | "name": "Force Update Estimated Actuals"
392 | },
393 | "force_update_forecasts": {
394 | "description": "Force fetches the latest forecasts data from Solcast.",
395 | "name": "Force Update"
396 | },
397 | "get_dampening": {
398 | "description": "Get the forecast dampening factors.",
399 | "fields": {
400 | "site": {
401 | "description": "Optional site to get dampening. (Note: Will not work if granular dampening is not enabled.)",
402 | "name": "Site ID"
403 | }
404 | },
405 | "name": "Get forecasts dampening"
406 | },
407 | "query_estimate_data": {
408 | "description": "Return a data set for a given query.",
409 | "fields": {
410 | "end_date_time": {
411 | "description": "Query estimated actual data events up to date time.",
412 | "name": "End date time"
413 | },
414 | "start_date_time": {
415 | "description": "Query estimated actual data events from date time.",
416 | "name": "Start date time"
417 | }
418 | },
419 | "name": "Query estimated actual data"
420 | },
421 | "query_forecast_data": {
422 | "description": "Return a data set for a given query.",
423 | "fields": {
424 | "end_date_time": {
425 | "description": "Query forecast data events up to date time.",
426 | "name": "End date time"
427 | },
428 | "site": {
429 | "description": "Optional site to retrieve one site forecast",
430 | "name": "Site ID"
431 | },
432 | "start_date_time": {
433 | "description": "Query forecast data events from date time.",
434 | "name": "Start date time"
435 | },
436 | "undampened": {
437 | "description": "Optional boolean to retrieve un-dampened forecast",
438 | "name": "Undampened"
439 | }
440 | },
441 | "name": "Query forecast data"
442 | },
443 | "remove_hard_limit": {
444 | "description": "Remove set limit.",
445 | "name": "Remove inverter forecast hard limit"
446 | },
447 | "set_dampening": {
448 | "description": "Set forecast dampening factors.",
449 | "fields": {
450 | "damp_factor": {
451 | "description": "String of dampening factor values comma separated (24 or 48 values).",
452 | "name": "Dampening string"
453 | },
454 | "site": {
455 | "description": "Optional site to set per-site dampening. (Note: Omitting site clears per-site dampening unless 48 values.)",
456 | "name": "Site ID"
457 | }
458 | },
459 | "name": "Set forecasts dampening"
460 | },
461 | "set_hard_limit": {
462 | "description": "Prevent forecast values being higher than the inverter can produce.",
463 | "fields": {
464 | "hard_limit": {
465 | "description": "Set the max value in Watts that the inverter can produce.",
466 | "name": "Limit value in Watts"
467 | }
468 | },
469 | "name": "Set inverter forecast hard limit"
470 | },
471 | "update_forecasts": {
472 | "description": "Fetches the latest forecasts data from Solcast.",
473 | "name": "Update"
474 | }
475 | },
476 | "system_health": {
477 | "info": {
478 | "can_reach_server": "Solcast server connection"
479 | }
480 | }
481 | }
--------------------------------------------------------------------------------