├── .gitignore ├── custom_components └── hasl3 │ ├── slapi │ ├── __init__.py │ ├── exceptions.py │ ├── const.py │ └── slapi.py │ ├── rrapi │ ├── __init__.py │ ├── const.py │ ├── exceptions.py │ └── rrapi.py │ ├── haslworker │ ├── exceptions.py │ └── __init__.py │ ├── manifest.json │ ├── system_health.py │ ├── const.py │ ├── services.yaml │ ├── translations │ ├── en.json │ └── sv.json │ ├── binary_sensor.py │ ├── config_schema.py │ ├── config_flow.py │ ├── __init__.py │ └── sensor.py ├── hacs.json ├── .github └── workflows │ ├── hassfest.yaml │ ├── hacs.yaml │ └── codeql-analysis.yml ├── SECURITY.md ├── README.md ├── LICENSE └── CHANGELOG.md /.gitignore: -------------------------------------------------------------------------------- 1 | .vscode/* 2 | __pycache__ -------------------------------------------------------------------------------- /custom_components/hasl3/slapi/__init__.py: -------------------------------------------------------------------------------- 1 | """Python package for accessing SL REST API.""" 2 | from .exceptions import * 3 | from .slapi import * 4 | from .const import * 5 | -------------------------------------------------------------------------------- /custom_components/hasl3/rrapi/__init__.py: -------------------------------------------------------------------------------- 1 | """Python package for accessing RESROBOT REST API.""" 2 | from .exceptions import * 3 | from .rrapi import * 4 | from .const import * 5 | -------------------------------------------------------------------------------- /hacs.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "Swedish Public Transport Sensor (HASL)", 3 | "content_in_root": false, 4 | "country": "SE", 5 | "render_readme": false, 6 | "homeassistant": "2021.12.0", 7 | "zip_release": false 8 | } -------------------------------------------------------------------------------- /custom_components/hasl3/haslworker/exceptions.py: -------------------------------------------------------------------------------- 1 | """Custom Exceptions.""" 2 | 3 | 4 | class HaslException(Exception): 5 | """Super basic.""" 6 | 7 | 8 | class HaslExpectedException(HaslException): 9 | """For stuff that is expected.""" 10 | -------------------------------------------------------------------------------- /.github/workflows/hassfest.yaml: -------------------------------------------------------------------------------- 1 | name: Validate using hassfest 2 | 3 | on: 4 | push: 5 | branches: [ dev ] 6 | pull_request: 7 | branches: [ dev ] 8 | schedule: 9 | - cron: "0 0 * * *" 10 | 11 | jobs: 12 | validate: 13 | runs-on: "ubuntu-latest" 14 | steps: 15 | - uses: "actions/checkout@v4" 16 | - uses: home-assistant/actions/hassfest@master -------------------------------------------------------------------------------- /.github/workflows/hacs.yaml: -------------------------------------------------------------------------------- 1 | name: Validate using HACS 2 | 3 | on: 4 | push: 5 | branches: [ dev ] 6 | pull_request: 7 | branches: [ dev ] 8 | schedule: 9 | - cron: "0 0 * * *" 10 | 11 | jobs: 12 | validate: 13 | runs-on: "ubuntu-latest" 14 | steps: 15 | - uses: "actions/checkout@v4" 16 | - name: HACS validation 17 | uses: "hacs/action@main" 18 | with: 19 | category: "integration" -------------------------------------------------------------------------------- /custom_components/hasl3/rrapi/const.py: -------------------------------------------------------------------------------- 1 | __version__ = '3.1.3' 2 | 3 | BASE_URL = 'https://api.resrobot.se/v2.1/' 4 | STOP_LOOKUP_URL = '{}location.name?input={}&format=json&accessId={}' 5 | ARRIVAL_BOARD_URL = '{}arrivalBoard?id={}&format=json&accessId={}' 6 | DEPARTURE_BOARD_URL = '{}departureBoard?id={}&format=json&accessId={}' 7 | ROUTE_PLANNER_URL = '{}trip?format=json&originId={}&destId={}&passlist=true&showPassingPoints=true&accessId={}' 8 | 9 | USER_AGENT = "hasl-rrapi/" + __version__ 10 | -------------------------------------------------------------------------------- /custom_components/hasl3/manifest.json: -------------------------------------------------------------------------------- 1 | { 2 | "codeowners": [ 3 | "@DSorlov" 4 | ], 5 | "domain": "hasl3", 6 | "name": "Swedish Public Transport Sensor (HASL)", 7 | "documentation": "https://hasl.sorlov.com", 8 | "issue_tracker": "https://github.com/hasl-sensor/integration/issues", 9 | "dependencies": [], 10 | "after_dependencies": [], 11 | "config_flow": true, 12 | "version": "3.1.3", 13 | "iot_class": "cloud_polling", 14 | "quality_scale": "silver", 15 | "requirements": [ 16 | "httpx>0.12.1", 17 | "jsonpickle>2.0.0", 18 | "isodate>0.6.0" 19 | ] 20 | } 21 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | # Security Policy 2 | 3 | ## Supported Versions 4 | 5 | We actively support version 3 and above. Please note that version 2.2.7 and earlier is no longer supported or updated. 6 | 7 | | Version | Supported | 8 | | ------- | ------------------ | 9 | | 3.x | :white_check_mark: | 10 | | <3.0 | :x: | 11 | 12 | ## Reporting a vulnerability 13 | 14 | To report a security problem go to our [issues](https://github.com/hasl-platform/integration/issues) and search so no one else already have reported it, then open a [new](https://github.com/hasl-platform/integration/issues/new) and tag it using the `Security` tag. We will prioritize reports having this tag. -------------------------------------------------------------------------------- /custom_components/hasl3/rrapi/exceptions.py: -------------------------------------------------------------------------------- 1 | class RRAPI_Error(Exception): 2 | """Base class for SL exceptions.""" 3 | def __init__(self, code, message, details): 4 | self._code = code 5 | self._message = message 6 | self._details = details 7 | 8 | def __str__(self): 9 | return "RRAPI_Error {0}: {1}".format(self._code, self._message) 10 | 11 | @property 12 | def details(self): 13 | return self._details 14 | 15 | @property 16 | def message(self): 17 | return self._message 18 | 19 | @property 20 | def code(self): 21 | return self._code 22 | 23 | 24 | class RRAPI_API_Error(RRAPI_Error): 25 | """An API-level exception occurred.""" 26 | def __str__(self): 27 | return "RRAPI_API_Error {0}: {1}".format(self._code, self._message) 28 | 29 | 30 | class RRAPI_HTTP_Error(RRAPI_Error): 31 | """An HTTP-level exception occurred.""" 32 | def __str__(self): 33 | return "RRAPI_HTTP_Error {0}: {1}".format(self._code, self._message) 34 | -------------------------------------------------------------------------------- /custom_components/hasl3/slapi/exceptions.py: -------------------------------------------------------------------------------- 1 | class SLAPI_Error(Exception): 2 | """Base class for SL exceptions.""" 3 | def __init__(self, code, message, details): 4 | self._code = code 5 | self._message = message 6 | self._details = details 7 | 8 | def __str__(self): 9 | return "SLAPI_Error {0}: {1}".format(self._code, self._message) 10 | 11 | @property 12 | def details(self): 13 | return self._details 14 | 15 | @property 16 | def message(self): 17 | return self._message 18 | 19 | @property 20 | def code(self): 21 | return self._code 22 | 23 | 24 | class SLAPI_API_Error(SLAPI_Error): 25 | """An API-level exception occurred.""" 26 | def __str__(self): 27 | return "SLAPI_API_Error {0}: {1}".format(self._code, self._message) 28 | 29 | 30 | class SLAPI_HTTP_Error(SLAPI_Error): 31 | """An HTTP-level exception occurred.""" 32 | def __str__(self): 33 | return "SLAPI_HTTP_Error {0}: {1}".format(self._code, self._message) 34 | -------------------------------------------------------------------------------- /custom_components/hasl3/slapi/const.py: -------------------------------------------------------------------------------- 1 | __version__ = '3.1.3' 2 | 3 | FORDONSPOSITION_URL = 'https://api.sl.se/fordonspositioner/GetData?' \ 4 | 'type={}&pp=false&cacheControl={}' 5 | 6 | # old https://api.sl.se/api2 ceases to function on 2024-03-15 7 | TRAFIKLAB_URL = 'https://journeyplanner.integration.sl.se/v1/' 8 | # Due to technical reasons, this API is being replaced by SLs Deviations API and will completely stop working on 2024-03-31 9 | SI2_URL = TRAFIKLAB_URL + 'deviations.json?key={}&siteid={}&lineNumber={}' 10 | # Due to technical reasons, this API is being replaced by SLs Deviations API and GTFS Service alerts. It will stop working on 2024-03-31 11 | TL2_URL = TRAFIKLAB_URL + 'trafficsituation.json?key={}' 12 | # This API will be shut down at the end of March 2024. It is replaced by SL’s new transport API. 13 | RI4_URL = TRAFIKLAB_URL + 'realtimedeparturesV4.json?key={}&siteid={}' \ 14 | '&timeWindow={}' 15 | PU1_URL = TRAFIKLAB_URL + 'typeahead.json?key={}&searchstring={}' \ 16 | '&stationsonly=False&maxresults=25' 17 | RP3_URL = TRAFIKLAB_URL + 'TravelplannerV3_1/trip.json?key={}&originExtId={}' \ 18 | '&destExtId={}&originCoordLat={}' \ 19 | '&originCoordLong={}&destCoordLat={}' \ 20 | '&destCoordLong={}&Passlist=1' 21 | 22 | USER_AGENT = "hasl-slapi/" + __version__ 23 | -------------------------------------------------------------------------------- /.github/workflows/codeql-analysis.yml: -------------------------------------------------------------------------------- 1 | # For most projects, this workflow file will not need changing; you simply need 2 | # to commit it to your repository. 3 | # 4 | # You may wish to alter this file to override the set of languages analyzed, 5 | # or to provide custom queries or build logic. 6 | # 7 | # ******** NOTE ******** 8 | # We have attempted to detect the languages in your repository. Please check 9 | # the `language` matrix defined below to confirm you have the correct set of 10 | # supported CodeQL languages. 11 | # 12 | name: "Validate using CodeQL" 13 | 14 | on: 15 | push: 16 | branches: [ dev ] 17 | pull_request: 18 | # The branches below must be a subset of the branches above 19 | branches: [ dev ] 20 | schedule: 21 | - cron: '34 12 * * 3' 22 | 23 | jobs: 24 | analyze: 25 | name: Analyze 26 | runs-on: ubuntu-latest 27 | permissions: 28 | actions: read 29 | contents: read 30 | security-events: write 31 | 32 | strategy: 33 | fail-fast: false 34 | matrix: 35 | language: [ 'python' ] 36 | # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ] 37 | # Learn more: 38 | # https://docs.github.com/en/free-pro-team@latest/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#changing-the-languages-that-are-analyzed 39 | 40 | steps: 41 | - name: Checkout repository 42 | uses: actions/checkout@v4 43 | 44 | # Initializes the CodeQL tools for scanning. 45 | - name: Initialize CodeQL 46 | uses: github/codeql-action/init@v3 47 | with: 48 | languages: ${{ matrix.language }} 49 | 50 | - name: Perform CodeQL Analysis 51 | uses: github/codeql-action/analyze@v3 52 | -------------------------------------------------------------------------------- /custom_components/hasl3/system_health.py: -------------------------------------------------------------------------------- 1 | """Provide info to system health.""" 2 | import sys 3 | import logging 4 | 5 | from homeassistant.components import system_health 6 | from homeassistant.core import HomeAssistant, callback 7 | 8 | from .const import ( 9 | DOMAIN, 10 | HASL_VERSION, 11 | SCHEMA_VERSION 12 | ) 13 | 14 | logger = logging.getLogger(f"custom_components.{DOMAIN}.core") 15 | 16 | 17 | def get_size(obj, seen=None): 18 | """Recursively finds size of objects""" 19 | size = sys.getsizeof(obj) 20 | if seen is None: 21 | seen = set() 22 | obj_id = id(obj) 23 | if obj_id in seen: 24 | return 0 25 | 26 | seen.add(obj_id) 27 | if isinstance(obj, dict): 28 | size += sum([get_size(v, seen) for v in obj.values()]) 29 | size += sum([get_size(k, seen) for k in obj.keys()]) 30 | elif hasattr(obj, '__dict__'): 31 | size += get_size(obj.__dict__, seen) 32 | elif hasattr(obj, '__iter__') and not isinstance(obj, (str, bytes, bytearray)): 33 | size += sum([get_size(i, seen) for i in obj]) 34 | return size 35 | 36 | 37 | @callback 38 | def async_register( 39 | hass: HomeAssistant, register: system_health.SystemHealthRegistration 40 | ) -> None: 41 | """Register system health callbacks.""" 42 | logger.debug("[system_health_register] Entered") 43 | 44 | try: 45 | register.domain = DOMAIN 46 | register.async_register_info(system_health_info, "/config/integrations") 47 | logger.debug("[system_health_register] System health registration succeeded") 48 | except: 49 | logger.error("[system_health_register] System health registration failed") 50 | 51 | 52 | async def system_health_info(hass): 53 | """Get info for the info page.""" 54 | logger.debug("[system_health_info] Entered") 55 | worker = hass.data[DOMAIN]["worker"] 56 | 57 | try: 58 | statusObject = { 59 | "Version": HASL_VERSION, 60 | "Schema": SCHEMA_VERSION, 61 | "Instances": worker.instances.count(), 62 | "Database Size": f"{get_size(worker.data)} bytes", 63 | "Startup in progress": worker.status.startup_in_progress, 64 | "Running tasks": worker.status.running_background_tasks 65 | } 66 | logger.debug("[system_health_info] Information gather succeeded") 67 | return statusObject 68 | except: 69 | logger.debug("[system_health_info] Information gather failed") 70 | return { 71 | "Version": HASL_VERSION, 72 | "Schema": SCHEMA_VERSION, 73 | "Instances": "(worker_failed)", 74 | "Database Size": "(worker_failed)", 75 | "Startup in progress": "(worker_failed)", 76 | "Running tasks": "(worker_failed)" 77 | } 78 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ![maintained](https://img.shields.io/maintenance/yes/2022.svg) 2 | [![hacs_badge](https://img.shields.io/badge/hacs-default-green.svg)](https://github.com/custom-components/hacs) 3 | [![ha_version](https://img.shields.io/badge/home%20assistant-2021.12%2B-green.svg)](https://www.home-assistant.io) 4 | ![version](https://img.shields.io/badge/version-3.1.3-green.svg) 5 | [![maintainer](https://img.shields.io/badge/maintainer-dsorlov-blue.svg)](https://github.com/DSorlov) 6 | [![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://opensource.org/licenses/Apache-2.0) 7 | 8 | Swedish Public Transport Sensor (HASL) 9 | ====================================== 10 | 11 | ## Project formerly known as "Home Assistant SL integration" 12 | 13 | This is an Home Assistant integration providing sensors for [Stockholms Lokaltrafik (SL)](https://sl.se/) primarily, though it does support [Resrobot](https://resrobot.se/) and journeys in the whole country. This integration provides intelligent sensors for departures, deviations, vehicle locations, traffic status and route monitoring using the SL official APIs and departures, arrivals and route monitoring using Resrobot. It also provides services for Location ID lookup and Trip Planing. You will still need to get your own API keys from SL / Trafiklab (see docs for [HASL](https://hasl.sorlov.com)) for *some* of the API endpoints. 14 | 15 | Full and detailed documentation [is available](http://hasl.sorlov.com). 16 | 17 | ## Install using HACS 18 | 19 | * If you haven't already, you must have [HACS installed](https://hacs.xyz/docs/setup/download). 20 | * Go into _HACS_ and search for _HASL_ under the _Integrations_ headline. You will need to restart Home Assistant to finish the process. 21 | * Once that is done reload your GUI (caching issues preventing the integration to be shown). 22 | * Go to _Integrations_ and add _HASL integrations_. 23 | * Get API keys at TrafikLab. Read details in [documentation](https://hasl.sorlov.com/trafiklab) 24 | * [Location IDs](https://hasl.sorlov.com/locationid) can be found using services 25 | * Perhaps add some GUI/Lovelace components as examples shows in the [documentation](https://hasl.sorlov.com/lovelace_cards) 26 | * Enjoy and [buy me a coffee](https://www.buymeacoffee.com/sorlov) if you like my work 27 | 28 | ## Visualization 29 | 30 | The sensors should be able to be used multiple cards in hasl-cards ([departure-card](https://github.com/hasl-platform/lovelace-hasl-departure-card), [traffic-status-card](https://github.com/hasl-platform/lovelace-hasl-traffic-status-card)) . There are several cards for different sensors and presentation options for each sensor type. [More examples](https://hasl.sorlov.com/lovelace_cards) can be found in the [documentation](https://hasl.sorlov.com/). 31 | 32 | ![card](https://user-images.githubusercontent.com/8133650/56198334-0a150f00-603b-11e9-9e93-92be212d7f7b.PNG) 33 | 34 | 35 | -------------------------------------------------------------------------------- /custom_components/hasl3/const.py: -------------------------------------------------------------------------------- 1 | """ SL Platform Constants """ 2 | from homeassistant.const import ( 3 | CONF_NAME, 4 | STATE_ON, 5 | STATE_OFF 6 | ) 7 | 8 | HASL_VERSION = "3.1.3" 9 | SCHEMA_VERSION = "3" 10 | DOMAIN = "hasl3" 11 | NAME = "Swedish Public Transport Sensor (HASL)" 12 | 13 | DEVICE_NAME = "HASL API Communications Device" 14 | DEVICE_MANUFACTURER = "hasl.sorlov.com" 15 | DEVICE_MODEL = "Software device" 16 | DEVICE_GUID = "10ba5386-5fad-49c6-8f03-c7a047cd5aa5-6a618956-520c-41d2-9a10-6d7e7353c7f5" 17 | 18 | SENSOR_RRDEP = 'Resrobot Departures' 19 | SENSOR_RRARR = 'Resrobot Arrivals' 20 | SENSOR_RRROUTE = 'Resrobot Route Sensor' 21 | SENSOR_STANDARD = 'SL Departures' 22 | SENSOR_STATUS = 'SL Traffic Status' 23 | SENSOR_VEHICLE_LOCATION = 'SL Vehicle Locations' 24 | SENSOR_DEVIATION = 'SL Deviations' 25 | SENSOR_ROUTE = 'SL Route Sensor' 26 | 27 | CONF_RI4_KEY = 'ri4key' 28 | CONF_SI2_KEY = 'si2key' 29 | CONF_TL2_KEY = 'tl2key' 30 | CONF_RP3_KEY = 'rp3key' 31 | CONF_RR_KEY = 'rrkey' 32 | 33 | CONF_SITE_ID = 'siteid' 34 | CONF_DEBUG = 'debug' 35 | CONF_FP_PT = 'fppt' 36 | CONF_FP_RB = 'fprb' 37 | CONF_FP_TVB = 'fptvb' 38 | CONF_FP_SB = 'fpsb' 39 | CONF_FP_LB = 'fplb' 40 | CONF_FP_SPVC = 'fpspvc' 41 | CONF_FP_TB1 = 'fptb1' 42 | CONF_FP_TB2 = 'fptb2' 43 | CONF_FP_TB3 = 'fptb3' 44 | CONF_ANALOG_SENSORS = 'analog' 45 | CONF_SENSOR = 'sensor' 46 | CONF_SENSOR_PROPERTY = 'property' 47 | CONF_LINES = 'lines' 48 | CONF_TRIPS = 'trips' 49 | CONF_ENABLED = 'enabled' 50 | CONF_DIRECTION = 'direction' 51 | CONF_DIRECTION_ANY = 0 52 | CONF_DIRECTION_LEFT = 1 53 | CONF_DIRECTION_RIGHT = 2 54 | CONF_TIMEWINDOW = 'timewindow' 55 | CONF_SCAN_INTERVAL = 'scan_interval' 56 | CONF_SENSOR_PROPERTY_MIN = 'min' 57 | CONF_SENSOR_PROPERTY_TIME = 'time' 58 | CONF_SENSOR_PROPERTY_DEVIATIONS = 'deviations' 59 | CONF_SENSOR_PROPERTY_UPDATED = 'updated' 60 | CONF_SENSOR_PROPERTY_ORIGIN = 'origin' 61 | CONF_INTEGRATION_TYPE = 'type' 62 | CONF_INTEGRATION_ID = 'id' 63 | CONF_DEVIATION_LINE = 'line' 64 | CONF_DEVIATION_STOP = 'stop' 65 | CONF_DEVIATION_LINES = 'lines' 66 | CONF_DEVIATION_STOPS = 'stops' 67 | CONF_SOURCE = "from" 68 | CONF_DESTINATION = "to" 69 | CONF_SOURCE_ID = "fromid" 70 | CONF_DESTINATION_ID = "toid" 71 | 72 | CONF_METRO = "metro" 73 | CONF_TRAIN = "train" 74 | CONF_LOCAL = "local" 75 | CONF_TRAM = "tram" 76 | CONF_BUS = "bus" 77 | CONF_FERRY = "ferry" 78 | 79 | CONF_DIRECTION_LIST = [CONF_DIRECTION_ANY, CONF_DIRECTION_LEFT, CONF_DIRECTION_RIGHT] 80 | CONF_SENSOR_PROPERTY_LIST = [CONF_SENSOR_PROPERTY_MIN, CONF_SENSOR_PROPERTY_TIME, CONF_SENSOR_PROPERTY_DEVIATIONS, CONF_SENSOR_PROPERTY_UPDATED] 81 | CONF_INTEGRATION_LIST = [SENSOR_RRDEP, SENSOR_RRARR, SENSOR_RRROUTE, SENSOR_STANDARD, SENSOR_STATUS, SENSOR_VEHICLE_LOCATION, SENSOR_DEVIATION, SENSOR_ROUTE] 82 | CONF_TRANSPORT_MODE_LIST = [CONF_METRO, CONF_TRAIN, CONF_LOCAL, CONF_TRAM, CONF_BUS, CONF_FERRY] 83 | CONF_RRARR_PROPERTY_LIST = [CONF_SENSOR_PROPERTY_MIN, CONF_SENSOR_PROPERTY_TIME, CONF_SENSOR_PROPERTY_UPDATED, CONF_SENSOR_PROPERTY_ORIGIN] 84 | CONF_RRDEP_PROPERTY_LIST = [CONF_SENSOR_PROPERTY_MIN, CONF_SENSOR_PROPERTY_TIME, CONF_SENSOR_PROPERTY_UPDATED] 85 | 86 | DEFAULT_DIRECTION = CONF_DIRECTION_ANY 87 | DEFAULT_SENSOR_PROPERTY = CONF_SENSOR_PROPERTY_MIN 88 | DEFAULT_INTEGRATION_TYPE = SENSOR_RRDEP 89 | DEFAULT_SCAN_INTERVAL = 300 90 | DEFAULT_TIMEWINDOW = 6 91 | -------------------------------------------------------------------------------- /custom_components/hasl3/services.yaml: -------------------------------------------------------------------------------- 1 | # Describes the format for available hasl3 services 2 | dump_cache: 3 | description: Dumps all downloaded and cached data in the HASL worker to a file in the config directory and returns the full path and name of the file created. Response will be triggered as event on the bus (topic is hasl3). 4 | 5 | get_cache: 6 | description: Returns all data downloaded and cached in the HASL worker for manual processing. Response will be triggered as event on the bus (topic is hasl3). 7 | 8 | sl_find_location: 9 | description: Searches for a SL location id using a freetext string. Response will be triggered as event on the bus (topic is hasl3). 10 | fields: 11 | api_key: 12 | name: API Key 13 | advanced: false 14 | required: true 15 | description: The SL Platsuppslag 1 API key to use for the query 16 | selector: 17 | text: 18 | search_string: 19 | description: Enter the location to find id 20 | advanced: false 21 | name: Location 22 | required: true 23 | example: 'Slussen' 24 | selector: 25 | text: 26 | 27 | rr_find_location: 28 | description: Searches for a Resrobot location id using a freetext string. Response will be triggered as event on the bus (topic is hasl3). 29 | fields: 30 | api_key: 31 | name: API Key 32 | advanced: false 33 | required: true 34 | description: The Resrobot API key to use for the query 35 | selector: 36 | text: 37 | search_string: 38 | description: Enter the location to find id 39 | advanced: false 40 | name: Location 41 | required: true 42 | example: 'Götaplatsen' 43 | selector: 44 | text: 45 | 46 | sl_find_trip_id: 47 | description: Search for a trip between two places using either SL orgigin and destination locations. Response will be triggered as event on the bus (topic is hasl3). 48 | fields: 49 | api_key: 50 | name: API Key 51 | advanced: false 52 | required: true 53 | description: The Reseplaneraren 3.1 API key to use for the query 54 | selector: 55 | text: 56 | org: 57 | name: Origin ID 58 | advanced: false 59 | required: true 60 | description: The origin for the trip 61 | example: 4243 62 | selector: 63 | text: 64 | dest: 65 | name: Destination ID 66 | advanced: false 67 | required: true 68 | description: The destination for the trip 69 | example: 2412 70 | selector: 71 | text: 72 | 73 | sl_find_trip_pos: 74 | description: Search for a trip between two placings using longitude and latitude. Response will be triggered as event on the bus (topic is hasl3). 75 | fields: 76 | api_key: 77 | name: API Key 78 | advanced: false 79 | required: true 80 | description: The SL Reseplaneraren 3.1 API key to use for the query 81 | selector: 82 | text: 83 | orig_lat: 84 | name: Origin Latitude 85 | advanced: false 86 | required: true 87 | description: The origin location for the trip 88 | example: '20.20' 89 | selector: 90 | text: 91 | orig_long: 92 | name: Origin Longitude 93 | advanced: false 94 | required: true 95 | description: The origin location for the trip 96 | example: '10.20' 97 | selector: 98 | text: 99 | dest_lat: 100 | name: Destination Latitude 101 | advanced: false 102 | required: true 103 | description: The destination location for the trip 104 | example: '20.20' 105 | selector: 106 | text: 107 | dest_long: 108 | name: Destination Longitude 109 | advanced: false 110 | required: true 111 | description: The destination location for the trip 112 | example: '10.20' 113 | selector: 114 | text: 115 | -------------------------------------------------------------------------------- /custom_components/hasl3/rrapi/rrapi.py: -------------------------------------------------------------------------------- 1 | import json 2 | import httpx 3 | import time 4 | import logging 5 | 6 | from .exceptions import ( 7 | RRAPI_Error, 8 | RRAPI_HTTP_Error, 9 | RRAPI_API_Error 10 | ) 11 | from .const import ( 12 | __version__, 13 | BASE_URL, 14 | STOP_LOOKUP_URL, 15 | ARRIVAL_BOARD_URL, 16 | DEPARTURE_BOARD_URL, 17 | ROUTE_PLANNER_URL, 18 | USER_AGENT 19 | ) 20 | 21 | logger = logging.getLogger("custom_components.hasl3.rrapi") 22 | 23 | class rrapi(object): 24 | 25 | def __init__(self, timeout=None): 26 | self._timeout = timeout 27 | 28 | def version(self): 29 | return __version__ 30 | 31 | async def _get(self, url, api): 32 | 33 | try: 34 | async with httpx.AsyncClient() as client: 35 | resp = await client.get(url, 36 | headers={"User-agent": USER_AGENT}, 37 | follow_redirects=True, 38 | timeout=self._timeout) 39 | except Exception as e: 40 | error = RRAPI_HTTP_Error(997, f"A HTTP error occured ({api})", str(e)) 41 | logger.debug(e) 42 | logger.error(error) 43 | raise error 44 | 45 | try: 46 | jsonResponse = resp.json() 47 | except Exception as e: 48 | error = RRAPI_API_Error(998, f"A parsing error occurred ({api})", str(e)) 49 | logger.debug(error) 50 | raise error 51 | 52 | if not jsonResponse: 53 | error = RRAPI_Error(999, "Internal error", f"jsonResponse is empty ({api})") 54 | logger.error(error) 55 | raise error 56 | 57 | if 'errorCode' in jsonResponse: 58 | error = RRAPI_API_Error(jsonResponse['errorCode'], jsonResponse['errorText'],jsonResponse['errorText']) 59 | logger.error(error) 60 | raise error 61 | 62 | return jsonResponse 63 | 64 | 65 | class rrapi_sl(rrapi): 66 | def __init__(self, api_token, timeout=None): 67 | super().__init__(timeout) 68 | self._api_token = api_token 69 | 70 | async def request(self, searchstring): 71 | logger.debug("Will call RR-SL API") 72 | 73 | data = await self._get(STOP_LOOKUP_URL.format(BASE_URL, searchstring, self._api_token),"Location Lookup") 74 | result = [] 75 | 76 | for stopOrLocation in data["stopLocationOrCoordLocation"]: 77 | place = stopOrLocation["CoordLocation"] if "CoordLocation" in stopOrLocation else stopOrLocation["StopLocation"] 78 | entry = {} 79 | entry["longId"] = place["id"] 80 | entry["name"] = place["name"] 81 | entry["lon"] = place["lon"] 82 | entry["lat"] = place["lat"] 83 | entry["id"] = place["extId"] if 'extId' in place else '' 84 | entry["type"] = place["type"] if 'type' in place else 'STOP' 85 | result.append(entry) 86 | 87 | return result 88 | 89 | 90 | 91 | 92 | class rrapi_rrr(rrapi): 93 | def __init__(self, api_token, timeout=None): 94 | super().__init__(timeout) 95 | self._api_token = api_token 96 | 97 | async def request(self, origin, destination): 98 | logger.debug("Will call RR-RP API") 99 | return await self._get(ROUTE_PLANNER_URL.format(BASE_URL, origin, destination, self._api_token), "Route Planner") 100 | 101 | 102 | class rrapi_rrd(rrapi): 103 | 104 | def __init__(self, api_token, timeout=None): 105 | super().__init__(timeout) 106 | self._api_token = api_token 107 | 108 | async def request(self, id): 109 | logger.debug("Will call RRDB API") 110 | return await self._get(DEPARTURE_BOARD_URL.format(BASE_URL, id, self._api_token),"Departure Board") 111 | 112 | class rrapi_rra(rrapi): 113 | 114 | def __init__(self, api_token, id, timeout=None): 115 | super().__init__(timeout) 116 | self._api_token = api_token 117 | 118 | async def request(self, id): 119 | logger.debug("Will call RRAB API") 120 | return await self._get(ARRIVAL_BOARD_URL.format(BASE_URL, id, self._api_token),"Arrivals Board") 121 | 122 | -------------------------------------------------------------------------------- /custom_components/hasl3/translations/en.json: -------------------------------------------------------------------------------- 1 | { 2 | "title": "Swedish Public Transport Sensor (HASL)", 3 | "config": { 4 | "step": { 5 | "user": { 6 | "description": "Enter a friendly name for your integration to get started.", 7 | "data": { 8 | "name": "Integration friendly name", 9 | "sensortype": "Which kind of integration?" 10 | } 11 | }, 12 | "config": { 13 | "description": "Change integration parameters. More info on https://hasl.sorlov.com", 14 | "data": { 15 | "scan_interval": "How many seconds between refresh", 16 | "sensor": "Only update if this binary sensor is True (empty=always update)", 17 | 18 | "ri4key": "Realtidsinformation 4 API Key", 19 | "siteid": "Location ID", 20 | "direction": "Direction", 21 | "property": "Sensor primary property", 22 | "timewindow": "How long into the future (5-60 min)", 23 | 24 | "si2key": "Störningsinformation 2 API Key", 25 | "lines": "Lines (comma separated list)", 26 | "stops": "Location IDs (comma separated list)", 27 | 28 | "tl2key": "Trafikläget 2 API Key", 29 | "analog": "Create sensors instead of binary_sensors (not recommended)", 30 | 31 | "rp3key": "Reseplaneraren 3.1 API Key", 32 | "from": "From as Location ID or Longlat-pair (59.33258,18.0649)", 33 | "to": "Destination as Location ID or Longlat-pair (59.33258,18.0649)", 34 | 35 | "rrkey": "Resrobot 2.1 API Key", 36 | "fromid": "From Location ID", 37 | "toid": "Destination Location ID", 38 | 39 | "fppt": "Commuter Trains", 40 | "fprb": "Roslagsbanan", 41 | "fptvb": "Tvärbanan", 42 | "fplb": "Lidingöbanen", 43 | "fpsb": "Saltsjöbanan", 44 | "fpspvc": "Spårväg City", 45 | "fptb1": "Gröna Linjen", 46 | "fptb2": "Röda linjen", 47 | "fptb3": "Blåa linjen", 48 | 49 | "metro": "Subway", 50 | "train": "Commuter trains", 51 | "local": "Local trains", 52 | "tram": "Trams", 53 | "bus": "Buses", 54 | "ferry": "Ferries" 55 | } 56 | } 57 | } 58 | }, 59 | "options": { 60 | "step": { 61 | "user": { 62 | "description": "Change integration parameters. More info on https://hasl.sorlov.com", 63 | "data": { 64 | "scan_interval": "How many seconds between refresh", 65 | "sensor": "Only update if this binary sensor is True (empty=always update)", 66 | 67 | "ri4key": "Realtidsinformation 4 API Key", 68 | "siteid": "Location ID", 69 | "direction": "Direction", 70 | "property": "Sensor primary property", 71 | "timewindow": "How long into the future (5-60 min)", 72 | 73 | "si2key": "Störningsinformation 2 API Key", 74 | "lines": "Lines (comma separated list)", 75 | "stops": "Location IDs (comma separated list)", 76 | 77 | "tl2key": "Trafikläget 2 API Key", 78 | "analog": "Create sensors instead of binary_sensors (not recommended)", 79 | 80 | "rp3key": "Reseplaneraren 3.1 API Key", 81 | "from": "From as Location ID or Longlat-pair (59.33258,18.0649)", 82 | "to": "Destination as Location ID or Longlat-pair (59.33258,18.0649)", 83 | 84 | "rrkey": "Resrobot 2.1 API Key", 85 | "fromid": "From Location ID", 86 | "toid": "Destination Location ID", 87 | 88 | "fppt": "Commuter Trains", 89 | "fprb": "Roslagsbanan", 90 | "fptvb": "Tvärbanan", 91 | "fplb": "Lidingöbanen", 92 | "fpsb": "Saltsjöbanan", 93 | "fpspvc": "Spårväg City", 94 | "fptb1": "Gröna Linjen", 95 | "fptb2": "Röda linjen", 96 | "fptb3": "Blåa linjen", 97 | 98 | "metro": "Subway", 99 | "train": "Commuter trains", 100 | "local": "Local trains", 101 | "tram": "Trams", 102 | "bus": "Buses", 103 | "ferry": "Ferries" 104 | } 105 | } 106 | } 107 | } 108 | } -------------------------------------------------------------------------------- /custom_components/hasl3/translations/sv.json: -------------------------------------------------------------------------------- 1 | { 2 | "title": "Svensk Kollektivtrafikssensor (HASL)", 3 | "config": { 4 | "step": { 5 | "user": { 6 | "description": "Ange ett namn för denna integration", 7 | "data": { 8 | "name": "Namn på integrationen", 9 | "sensortype": "Vilken sorts integration är det?" 10 | } 11 | }, 12 | "config": { 13 | "description": "Anpassa parametrar för din integratin. Mer info på https://hasl.sorlov.com", 14 | "data": { 15 | "scan_interval": "Hur många sekunder mellan uppdateringar?", 16 | "sensor": "Uppdatera bara om denna sensor är True (tom=updaterar alltid)", 17 | 18 | "ri4key": "SL Realtidsinformation 4 API-nyckel", 19 | "siteid": "Plats ID", 20 | "direction": "Riktning", 21 | "property": "Primäregenskap för sensorn", 22 | "timewindow": "För hur lång tid skall vi hämta data (5-60 min)", 23 | 24 | "si2key": "SL Störningsinformation 2 API-nyckel", 25 | "lines": "Linjer (kommaseparerad lista)", 26 | "stops": "Plats IDn (kommaseparerad lista)", 27 | 28 | "tl2key": "SL Trafikläget 2 API-nyckel", 29 | "analog": "Skapa sensorer istället för binära sensorer (rekomenderas ej)", 30 | 31 | "rp3key": "SL Reseplaneraren 3.1 API-nyckel", 32 | "from": "Från med plats id eller som longlat-par (59.33258,18.0649)", 33 | "to": "Till med plats id eller som longlat-par (59.33258,18.0649)", 34 | 35 | "rrkey": "Resrobot 2.1 API-nyckel", 36 | "fromid": "Från med plats id", 37 | "toid": "Till med plats id", 38 | 39 | "fppt": "Pendeltåg", 40 | "fprb": "Roslagsbanan", 41 | "fptvb": "Tvärbanan", 42 | "fplb": "Lidingöbanen", 43 | "fpsb": "Saltsjöbanan", 44 | "fpspvc": "Spårväg City", 45 | "fptb1": "Gröna Linjen", 46 | "fptb2": "Röda linjen", 47 | "fptb3": "Blåa linjen", 48 | 49 | "metro": "Tunnelbana", 50 | "train": "Pendeltåg", 51 | "local": "Lokaltåg", 52 | "tram": "Spårvagn", 53 | "bus": "Bussar", 54 | "ferry": "Färjor" 55 | } 56 | } 57 | } 58 | }, 59 | "options": { 60 | "step": { 61 | "user": { 62 | "description": "Anpassa parametrar för din integratin. Mer info på https://hasl.sorlov.com", 63 | "data": { 64 | "scan_interval": "Hur många sekunder mellan uppdateringar?", 65 | "sensor": "Uppdatera bara om denna sensor är True (tom=updaterar alltid)", 66 | 67 | "ri4key": "SL Realtidsinformation 4 API-nyckel", 68 | "siteid": "Plats ID", 69 | "direction": "Riktning", 70 | "property": "Primäregenskap för sensorn", 71 | "timewindow": "För hur lång tid skall vi hämta data (5-60 min)", 72 | 73 | "si2key": "SL Störningsinformation 2 API-nyckel", 74 | "lines": "Linjer (kommaseparerad lista)", 75 | "stops": "Plats IDn (kommaseparerad lista)", 76 | 77 | "tl2key": "SL Trafikläget 2 API-nyckel", 78 | "analog": "Skapa sensorer istället för binära sensorer (rekomenderas ej)", 79 | 80 | "rp3key": "SL Reseplaneraren 3.1 API-nyckel", 81 | "from": "Från med plats id eller som longlat-par (59.33258,18.0649)", 82 | "to": "Till med plats id eller som longlat-par (59.33258,18.0649)", 83 | 84 | "rrkey": "Resrobot 2.1 API-nyckel", 85 | "fromid": "Från med plats id", 86 | "toid": "Till med plats id", 87 | 88 | "fppt": "Pendeltåg", 89 | "fprb": "Roslagsbanan", 90 | "fptvb": "Tvärbanan", 91 | "fplb": "Lidingöbanen", 92 | "fpsb": "Saltsjöbanan", 93 | "fpspvc": "Spårväg City", 94 | "fptb1": "Gröna Linjen", 95 | "fptb2": "Röda linjen", 96 | "fptb3": "Blåa linjen", 97 | 98 | "metro": "Tunnelbana", 99 | "train": "Pendeltåg", 100 | "local": "Lokaltåg", 101 | "tram": "Spårvagn", 102 | "bus": "Bussar", 103 | "ferry": "Färjor" 104 | } 105 | } 106 | } 107 | } 108 | } -------------------------------------------------------------------------------- /custom_components/hasl3/slapi/slapi.py: -------------------------------------------------------------------------------- 1 | import json 2 | import httpx 3 | import time 4 | import logging 5 | 6 | from .exceptions import ( 7 | SLAPI_Error, 8 | SLAPI_HTTP_Error, 9 | SLAPI_API_Error 10 | ) 11 | from .const import ( 12 | __version__, 13 | FORDONSPOSITION_URL, 14 | SI2_URL, 15 | TL2_URL, 16 | RI4_URL, 17 | PU1_URL, 18 | RP3_URL, 19 | USER_AGENT 20 | ) 21 | 22 | logger = logging.getLogger("custom_components.hasl3.slapi") 23 | 24 | 25 | class slapi_fp(object): 26 | def __init__(self, timeout=None): 27 | self._timeout = timeout 28 | 29 | def version(self): 30 | return __version__ 31 | 32 | async def request(self, vehicletype): 33 | 34 | logger.debug("Will call FP API") 35 | if vehicletype not in ('PT', 'RB', 'TVB', 'SB', 'LB', 36 | 'SpvC', 'TB1', 'TB2', 'TB3'): 37 | raise SLAPI_Error(-1, "Vehicle type is not valid", 38 | "Must be one of 'PT','RB','TVB','SB'," 39 | "'LB','SpvC','TB1','TB2','TB3'") 40 | 41 | try: 42 | async with httpx.AsyncClient() as client: 43 | request = await client.get(FORDONSPOSITION_URL.format(vehicletype, 44 | time.time()), 45 | headers={"User-agent": USER_AGENT}, 46 | follow_redirects=True, 47 | timeout=self._timeout) 48 | except Exception as e: 49 | error = SLAPI_HTTP_Error(997, "An HTTP error occurred (Vehicle Locations)", str(e)) 50 | logger.debug(e) 51 | logger.error(error) 52 | raise error 53 | 54 | response = json.loads(request.json()) 55 | 56 | result = [] 57 | 58 | for trip in response['Trips']: 59 | result.append(trip) 60 | 61 | logger.debug("Call completed") 62 | return result 63 | 64 | 65 | class slapi(object): 66 | 67 | def __init__(self, timeout=None): 68 | self._timeout = timeout 69 | 70 | def version(self): 71 | return __version__ 72 | 73 | async def _get(self, url, api): 74 | 75 | api_errors = { 76 | 1001: 'No API key supplied in request', 77 | 1002: 'The supplied API key is not valid', 78 | 1003: 'Specified API is not valid', 79 | 1004: 'The API is not available for this key', 80 | 1005: 'Key exists but is not for requested API', 81 | 1006: 'Too many request per minute (quota exceeded for key)', 82 | 1007: 'Too many request per month (quota exceeded for key)', 83 | 4002: 'Date filter is not valid', 84 | 5000: 'Parameter invalid', 85 | } 86 | 87 | try: 88 | async with httpx.AsyncClient() as client: 89 | resp = await client.get(url, 90 | headers={"User-agent": USER_AGENT}, 91 | follow_redirects=True, 92 | timeout=self._timeout) 93 | except Exception as e: 94 | error = SLAPI_HTTP_Error(997, f"An HTTP error occurred ({api})", str(e)) 95 | logger.debug(e) 96 | logger.error(error) 97 | raise error 98 | 99 | try: 100 | jsonResponse = resp.json() 101 | except Exception as e: 102 | error = SLAPI_API_Error(998, f"A parsing error occurred ({api})", str(e)) 103 | logger.debug(error) 104 | raise error 105 | 106 | if not jsonResponse: 107 | error = SLAPI_Error(999, "Internal error", f"jsonResponse is empty ({api})") 108 | logger.error(error) 109 | raise error 110 | 111 | if 'StatusCode' in jsonResponse: 112 | 113 | if jsonResponse['StatusCode'] == 0: 114 | logger.debug("Call completed") 115 | return jsonResponse 116 | 117 | apiErrorText = f"{api_errors.get(jsonResponse['StatusCode'])} ({api})" 118 | 119 | if apiErrorText: 120 | error = SLAPI_API_Error(jsonResponse['StatusCode'], 121 | apiErrorText, 122 | jsonResponse['Message']) 123 | logger.error(error) 124 | raise error 125 | else: 126 | error = SLAPI_API_Error(jsonResponse['StatusCode'], 127 | "Unknown API-response code encountered ({api})", 128 | jsonResponse['Message']) 129 | logger.error(error) 130 | raise error 131 | 132 | elif 'Trip' in jsonResponse: 133 | logger.debug("Call completed") 134 | return jsonResponse 135 | 136 | elif 'Sites' in jsonResponse: 137 | logger.debug("Call completed") 138 | return jsonResponse 139 | 140 | else: 141 | error = SLAPI_Error(-100, f"ResponseType is not ({api})") 142 | logger.error(error) 143 | raise error 144 | 145 | 146 | class slapi_pu1(slapi): 147 | def __init__(self, api_token, timeout=None): 148 | super().__init__(timeout) 149 | self._api_token = api_token 150 | 151 | async def request(self, searchstring): 152 | logger.debug("Will call PU1 API") 153 | return await self._get(PU1_URL.format(self._api_token, searchstring),"Location Lookup") 154 | 155 | 156 | class slapi_rp3(slapi): 157 | def __init__(self, api_token, timeout=None): 158 | super().__init__(timeout) 159 | self._api_token = api_token 160 | 161 | async def request(self, origin, destination, orgLat, orgLong, destLat, destLong): 162 | logger.debug("Will call RP3 API") 163 | return await self._get(RP3_URL.format(self._api_token, origin, destination, 164 | orgLat, orgLong, destLat, destLong),"Route Planner") 165 | 166 | 167 | class slapi_ri4(slapi): 168 | 169 | def __init__(self, api_token, window, timeout=None): 170 | super().__init__(timeout) 171 | self._api_token = api_token 172 | self._window = window 173 | 174 | async def request(self, siteid): 175 | logger.debug("Will call RI4 API") 176 | return await self._get(RI4_URL.format(self._api_token, 177 | siteid, self._window),"Departure Board") 178 | 179 | 180 | class slapi_si2(slapi): 181 | 182 | def __init__(self, api_token, siteid, timeout=None): 183 | super().__init__(timeout) 184 | self._api_token = api_token 185 | 186 | async def request(self, siteid, lines): 187 | logger.debug("Will call SI2 API") 188 | return await self._get(SI2_URL.format(self._api_token, 189 | siteid, lines),"Deviations") 190 | 191 | 192 | class slapi_tl2(slapi): 193 | def __init__(self, api_token, timeout=None): 194 | super().__init__(timeout) 195 | self._api_token = api_token 196 | 197 | async def request(self): 198 | logger.debug("Will call TL2 API") 199 | return await self._get(TL2_URL.format(self._api_token),"Traffic Status") 200 | -------------------------------------------------------------------------------- /custom_components/hasl3/binary_sensor.py: -------------------------------------------------------------------------------- 1 | """ SL Platform Sensor """ 2 | import logging 3 | 4 | from homeassistant.helpers.entity import Entity 5 | from homeassistant.helpers.device_registry import DeviceEntryType 6 | from homeassistant.util.dt import now 7 | 8 | from .const import ( 9 | DOMAIN, 10 | HASL_VERSION, 11 | DEVICE_NAME, 12 | DEVICE_MANUFACTURER, 13 | DEVICE_MODEL, 14 | DEVICE_GUID, 15 | STATE_ON, 16 | CONF_SENSOR, 17 | CONF_ANALOG_SENSORS, 18 | CONF_TL2_KEY, 19 | SENSOR_STATUS, 20 | CONF_INTEGRATION_TYPE, 21 | CONF_INTEGRATION_ID, 22 | CONF_SCAN_INTERVAL, 23 | CONF_TRANSPORT_MODE_LIST 24 | ) 25 | 26 | logger = logging.getLogger(f"custom_components.{DOMAIN}.sensors") 27 | 28 | 29 | async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): 30 | async_add_entities(await setup_hasl_sensor(hass, config)) 31 | 32 | 33 | async def async_setup_entry(hass, config_entry, async_add_devices): 34 | async_add_devices(await setup_hasl_sensor(hass, config_entry)) 35 | 36 | 37 | async def setup_hasl_sensor(hass, config): 38 | logger.debug("[setup_binary_sensor] Entered") 39 | 40 | sensors = [] 41 | 42 | logger.debug("[setup_binary_sensor] Processing sensors") 43 | if config.data[CONF_INTEGRATION_TYPE] == SENSOR_STATUS: 44 | if not CONF_ANALOG_SENSORS in config.data: 45 | if CONF_TL2_KEY in config.data: 46 | await hass.data[DOMAIN]["worker"].assert_tl2(config.data[CONF_TL2_KEY]) 47 | for sensortype in CONF_TRANSPORT_MODE_LIST: 48 | if sensortype in config.data and config.data[sensortype]: 49 | logger.debug("[setup_binary_sensor] Setting up binary problem sensor..") 50 | try: 51 | sensors.append(HASLTrafficProblemSensor(hass, config, sensortype)) 52 | logger.debug("[setup_binary_sensor] Sensor setup completed successfully") 53 | except: 54 | logger.debug("[setup_binary_sensor] Sensor setup failed") 55 | 56 | logger.debug("[setup_binary_sensor] Force processing problem sensors..") 57 | try: 58 | await hass.data[DOMAIN]["worker"].process_tl2() 59 | logger.debug("[setup_binary_sensor] Force processing completed successfully") 60 | except: 61 | logger.debug("[setup_binary_sensor] Force processing failed") 62 | 63 | return sensors 64 | 65 | 66 | class HASLDevice(Entity): 67 | """HASL Device class.""" 68 | @property 69 | def device_info(self): 70 | """Return device information about HASL Device.""" 71 | return { 72 | "identifiers": {(DOMAIN, DEVICE_GUID)}, 73 | "name": DEVICE_NAME, 74 | "manufacturer": DEVICE_MANUFACTURER, 75 | "model": DEVICE_MODEL, 76 | "sw_version": HASL_VERSION, 77 | "entry_type": DeviceEntryType.SERVICE 78 | } 79 | 80 | 81 | class HASLTrafficProblemSensor(HASLDevice): 82 | """Class to hold Sensor basic info.""" 83 | 84 | def __init__(self, hass, config, sensortype): 85 | """Initialize the sensor object.""" 86 | self._hass = hass 87 | self._config = config 88 | self._sensortype = sensortype 89 | self._enabled_sensor = config.data[CONF_SENSOR] 90 | self._name = f"SL {self._sensortype.capitalize()} Problem Sensor ({self._config.title})" 91 | self._sensordata = [] 92 | self._scan_interval = self._config.data[CONF_SCAN_INTERVAL] or 300 93 | self._worker = hass.data[DOMAIN]["worker"] 94 | 95 | async def async_update(self): 96 | """Update the sensor.""" 97 | 98 | logger.debug("[async_update] Entered") 99 | logger.debug(f"[async_update] Processing {self._name}") 100 | if self._worker.data.tl2[self._config.data[CONF_TL2_KEY]]["api_lastrun"]: 101 | if self._worker.checksensorstate(self._enabled_sensor, STATE_ON): 102 | if self._sensordata == [] or self._worker.getminutesdiff(now().strftime('%Y-%m-%d %H:%M:%S'), self._worker.data.tl2[self._config.data[CONF_TL2_KEY]]["api_lastrun"]) > self._config.data[CONF_SCAN_INTERVAL]: 103 | try: 104 | await self._worker.process_tl2() 105 | logger.debug("[async_update] Update processed") 106 | except: 107 | logger.debug("[async_update] Error occurred during update") 108 | else: 109 | logger.debug("[async_update] Not due for update, skipping") 110 | 111 | self._sensordata = self._worker.data.tl2[self._config.data[CONF_TL2_KEY]] 112 | logger.debug("[async_update] Completed") 113 | 114 | @property 115 | def name(self): 116 | """Return the name of the sensor.""" 117 | return self._name 118 | 119 | @property 120 | def should_poll(self): 121 | """No polling needed.""" 122 | return True 123 | 124 | @property 125 | def unique_id(self): 126 | return f"sl-{self._sensortype}-status-sensor-{self._config.data[CONF_INTEGRATION_ID]}" 127 | 128 | @property 129 | def icon(self): 130 | trafficTypeIcons = { 131 | 'ferry': 'mdi:ferry', 132 | 'bus': 'mdi:bus', 133 | 'tram': 'mdi:tram', 134 | 'train': 'mdi:train', 135 | 'local': 'mdi:train-variant', 136 | 'metro': 'mdi:subway-variant' 137 | } 138 | 139 | return trafficTypeIcons.get(self._sensortype) 140 | 141 | @property 142 | def is_on(self): 143 | """Return the state of the sensor.""" 144 | if self._sensordata == []: 145 | return False 146 | else: 147 | if self._sensordata["data"][self._sensortype]["status"] == "Good": 148 | return False 149 | else: 150 | return True 151 | 152 | @property 153 | def state(self): 154 | """Return the state of the sensor.""" 155 | if self._sensordata == []: 156 | return False 157 | else: 158 | if self._sensordata["data"][self._sensortype]["status"] == "Good": 159 | return False 160 | else: 161 | return True 162 | 163 | @property 164 | def device_class(self): 165 | """Return the class of this device.""" 166 | return "problem" 167 | 168 | @property 169 | def scan_interval(self): 170 | """Return the unique id.""" 171 | return self._scan_interval 172 | 173 | @property 174 | def available(self): 175 | """Return true if value is valid.""" 176 | return self._sensordata != [] 177 | 178 | @property 179 | def extra_state_attributes(self): 180 | """Attributes.""" 181 | val = {} 182 | 183 | if self._sensordata == []: 184 | return val 185 | 186 | if self._sensordata["api_result"] == "Success": 187 | val['api_result'] = "Ok" 188 | else: 189 | val['api_result'] = self._sensordata["api_error"] 190 | 191 | # Set values of the sensor. 192 | val['scan_interval'] = self._scan_interval 193 | val['refresh_enabled'] = self._worker.checksensorstate(self._enabled_sensor, STATE_ON) 194 | try: 195 | val['attribution'] = self._sensordata["attribution"] 196 | val['status_text'] = self._sensordata["data"][self._sensortype]["status"] 197 | val['status_icon'] = self._sensordata["data"][self._sensortype]["status_icon"] 198 | val['events'] = self._sensordata["data"][self._sensortype]["events"] 199 | val['last_updated'] = self._sensordata["last_updated"] 200 | except: 201 | val['error'] = "NoDataYet" 202 | logger.debug(f"Data was not available for processing when getting attributes for sensor {self._name}") 203 | 204 | return val 205 | -------------------------------------------------------------------------------- /custom_components/hasl3/config_schema.py: -------------------------------------------------------------------------------- 1 | """HASL Configuration Database.""" 2 | import voluptuous as vol 3 | 4 | from .const import ( 5 | CONF_DESTINATION_ID, 6 | CONF_NAME, 7 | CONF_RI4_KEY, 8 | CONF_RRARR_PROPERTY_LIST, 9 | CONF_RRDEP_PROPERTY_LIST, 10 | CONF_SI2_KEY, 11 | CONF_SOURCE_ID, 12 | CONF_TL2_KEY, 13 | CONF_RP3_KEY, 14 | CONF_SITE_ID, 15 | CONF_FP_PT, 16 | CONF_FP_RB, 17 | CONF_FP_TVB, 18 | CONF_FP_SB, 19 | CONF_FP_LB, 20 | CONF_FP_SPVC, 21 | CONF_FP_TB1, 22 | CONF_FP_TB2, 23 | CONF_FP_TB3, 24 | CONF_SENSOR, 25 | CONF_SENSOR_PROPERTY, 26 | CONF_LINES, 27 | CONF_DIRECTION, 28 | CONF_INTEGRATION_TYPE, 29 | CONF_INTEGRATION_LIST, 30 | CONF_SENSOR_PROPERTY_LIST, 31 | CONF_SCAN_INTERVAL, 32 | CONF_TIMEWINDOW, 33 | CONF_ANALOG_SENSORS, 34 | DEFAULT_INTEGRATION_TYPE, 35 | DEFAULT_SENSOR_PROPERTY, 36 | DEFAULT_DIRECTION, 37 | DEFAULT_SCAN_INTERVAL, 38 | DEFAULT_TIMEWINDOW, 39 | CONF_DEVIATION_STOPS, 40 | CONF_DEVIATION_LINES, 41 | CONF_DIRECTION_LIST, 42 | CONF_DESTINATION, 43 | CONF_SOURCE, 44 | CONF_RR_KEY 45 | ) 46 | 47 | 48 | def hasl_base_config_schema(config: dict = {}, config_flow: bool = False) -> dict: 49 | """Schema configuration dict that is common with all integration types.""" 50 | if not config: 51 | config = { 52 | CONF_NAME: "", 53 | CONF_INTEGRATION_TYPE: DEFAULT_INTEGRATION_TYPE 54 | } 55 | if config_flow: 56 | return { 57 | vol.Required(CONF_NAME, default=config.get(CONF_NAME)): str, 58 | vol.Required(CONF_INTEGRATION_TYPE, default=config.get(CONF_INTEGRATION_TYPE)): vol.In(CONF_INTEGRATION_LIST) 59 | } 60 | return { 61 | vol.Optional(CONF_NAME, default=config.get(CONF_NAME)): str, 62 | vol.Required(CONF_INTEGRATION_TYPE, default=config.get(CONF_INTEGRATION_TYPE)): vol.In(CONF_INTEGRATION_LIST) 63 | } 64 | 65 | 66 | def standard_config_option_schema(options: dict = {}) -> dict: 67 | """Options for departure sensor / standard sensor.""" 68 | if not options: 69 | options = {CONF_SENSOR: "", CONF_RI4_KEY: "", CONF_SITE_ID: "", CONF_SENSOR: "", CONF_LINES: "", CONF_DIRECTION: DEFAULT_DIRECTION, CONF_SENSOR_PROPERTY: DEFAULT_SENSOR_PROPERTY, CONF_SCAN_INTERVAL: DEFAULT_SCAN_INTERVAL, CONF_TIMEWINDOW: DEFAULT_TIMEWINDOW} 70 | return { 71 | vol.Required(CONF_RI4_KEY, default=options.get(CONF_RI4_KEY)): str, 72 | vol.Required(CONF_SITE_ID, default=options.get(CONF_SITE_ID)): int, 73 | vol.Required(CONF_SENSOR_PROPERTY, default=options.get(CONF_SENSOR_PROPERTY)): vol.In(CONF_SENSOR_PROPERTY_LIST), 74 | vol.Required(CONF_SCAN_INTERVAL, default=options.get(CONF_SCAN_INTERVAL)): int, 75 | vol.Required(CONF_TIMEWINDOW, default=options.get(CONF_TIMEWINDOW)): int, 76 | vol.Optional(CONF_LINES, default=options.get(CONF_LINES)): str, 77 | vol.Optional(CONF_DIRECTION, default=options.get(CONF_DIRECTION)): vol.In(CONF_DIRECTION_LIST), 78 | vol.Optional(CONF_SENSOR, default=options.get(CONF_SENSOR)): str 79 | } 80 | 81 | 82 | def deviation_config_option_schema(options: dict = {}) -> dict: 83 | """Deviation sensor options.""" 84 | if not options: 85 | options = {CONF_SCAN_INTERVAL: DEFAULT_SCAN_INTERVAL, CONF_SENSOR: "", CONF_SI2_KEY: "", CONF_DEVIATION_STOPS: "", CONF_DEVIATION_LINES: ""} 86 | return { 87 | vol.Required(CONF_SI2_KEY, default=options.get(CONF_SI2_KEY)): str, 88 | vol.Optional(CONF_DEVIATION_STOPS, default=options.get(CONF_DEVIATION_STOPS)): str, 89 | vol.Optional(CONF_DEVIATION_LINES, default=options.get(CONF_DEVIATION_LINES)): str, 90 | vol.Required(CONF_SCAN_INTERVAL, default=options.get(CONF_SCAN_INTERVAL)): int, 91 | vol.Optional(CONF_SENSOR, default=options.get(CONF_SENSOR)): str 92 | } 93 | 94 | 95 | CONF_METRO = "metro" 96 | CONF_TRAIN = "train" 97 | CONF_LOCAL = "local" 98 | CONF_TRAM = "tram" 99 | CONF_BUS = "bus" 100 | CONF_FERRY = "ferry" 101 | 102 | 103 | def status_config_option_schema(options: dict = {}) -> dict: 104 | """Status sensor options.""" 105 | if not options: 106 | options = {CONF_SCAN_INTERVAL: DEFAULT_SCAN_INTERVAL, CONF_SENSOR: "", CONF_TL2_KEY: "", CONF_ANALOG_SENSORS: False, CONF_METRO: False, CONF_TRAIN: False, CONF_LOCAL: False, CONF_TRAM: False, CONF_BUS: False, CONF_FERRY: False} 107 | return { 108 | vol.Required(CONF_TL2_KEY, default=options.get(CONF_TL2_KEY)): str, 109 | vol.Optional(CONF_METRO, default=options.get(CONF_METRO)): bool, 110 | vol.Optional(CONF_TRAIN, default=options.get(CONF_TRAIN)): bool, 111 | vol.Optional(CONF_LOCAL, default=options.get(CONF_LOCAL)): bool, 112 | vol.Optional(CONF_TRAM, default=options.get(CONF_TRAM)): bool, 113 | vol.Optional(CONF_BUS, default=options.get(CONF_BUS)): bool, 114 | vol.Optional(CONF_FERRY, default=options.get(CONF_FERRY)): bool, 115 | vol.Optional(CONF_ANALOG_SENSORS, default=options.get(CONF_ANALOG_SENSORS)): bool, 116 | vol.Required(CONF_SCAN_INTERVAL, default=options.get(CONF_SCAN_INTERVAL)): int, 117 | vol.Optional(CONF_SENSOR, default=options.get(CONF_SENSOR)): str 118 | } 119 | 120 | 121 | def vehiclelocation_config_option_schema(options: dict = {}) -> dict: 122 | """The schema used for train location service""" 123 | if not options: 124 | options = {CONF_SCAN_INTERVAL: DEFAULT_SCAN_INTERVAL, CONF_SENSOR: "", CONF_FP_PT: False, CONF_FP_RB: False, CONF_FP_TVB: False, CONF_FP_SB: False, CONF_FP_LB: False, CONF_FP_SPVC: False, CONF_FP_TB1: False, CONF_FP_TB2: False, CONF_FP_TB3: False} 125 | return { 126 | vol.Optional(CONF_FP_PT, default=options.get(CONF_FP_PT)): bool, 127 | vol.Optional(CONF_FP_RB, default=options.get(CONF_FP_RB)): bool, 128 | vol.Optional(CONF_FP_TVB, default=options.get(CONF_FP_TVB)): bool, 129 | vol.Optional(CONF_FP_SB, default=options.get(CONF_FP_SB)): bool, 130 | vol.Optional(CONF_FP_LB, default=options.get(CONF_FP_LB)): bool, 131 | vol.Optional(CONF_FP_SPVC, default=options.get(CONF_FP_SPVC)): bool, 132 | vol.Optional(CONF_FP_TB1, default=options.get(CONF_FP_TB1)): bool, 133 | vol.Optional(CONF_FP_TB2, default=options.get(CONF_FP_TB2)): bool, 134 | vol.Optional(CONF_FP_TB3, default=options.get(CONF_FP_TB3)): bool, 135 | vol.Required(CONF_SCAN_INTERVAL, default=options.get(CONF_SCAN_INTERVAL)): int, 136 | vol.Optional(CONF_SENSOR, default=options.get(CONF_SENSOR)): str 137 | } 138 | 139 | 140 | def route_config_option_schema(options: dict = {}) -> dict: 141 | """Deviation sensor options.""" 142 | if not options: 143 | options = {CONF_SCAN_INTERVAL: DEFAULT_SCAN_INTERVAL, CONF_SENSOR: "", CONF_RP3_KEY: "", CONF_SOURCE: "", CONF_DESTINATION: ""} 144 | return { 145 | vol.Required(CONF_RP3_KEY, default=options.get(CONF_RP3_KEY)): str, 146 | vol.Required(CONF_SOURCE, default=options.get(CONF_SOURCE)): str, 147 | vol.Required(CONF_DESTINATION, default=options.get(CONF_DESTINATION)): str, 148 | vol.Required(CONF_SCAN_INTERVAL, default=options.get(CONF_SCAN_INTERVAL)): int, 149 | vol.Optional(CONF_SENSOR, default=options.get(CONF_SENSOR)): str 150 | } 151 | 152 | 153 | def rrdep_config_option_schema(options: dict = {}) -> dict: 154 | """Options for resrobot departure sensor.""" 155 | if not options: 156 | options = {CONF_SENSOR: "", CONF_RR_KEY: "", CONF_SITE_ID: "", CONF_SENSOR: "", CONF_LINES: "", CONF_DIRECTION: DEFAULT_DIRECTION, CONF_SENSOR_PROPERTY: DEFAULT_SENSOR_PROPERTY, CONF_SCAN_INTERVAL: DEFAULT_SCAN_INTERVAL, CONF_TIMEWINDOW: DEFAULT_TIMEWINDOW} 157 | return { 158 | vol.Required(CONF_RR_KEY, default=options.get(CONF_RR_KEY)): str, 159 | vol.Required(CONF_SITE_ID, default=options.get(CONF_SITE_ID)): int, 160 | vol.Required(CONF_SENSOR_PROPERTY, default=options.get(CONF_SENSOR_PROPERTY)): vol.In(CONF_RRDEP_PROPERTY_LIST), 161 | vol.Required(CONF_SCAN_INTERVAL, default=options.get(CONF_SCAN_INTERVAL)): int, 162 | vol.Required(CONF_TIMEWINDOW, default=options.get(CONF_TIMEWINDOW)): int, 163 | vol.Optional(CONF_LINES, default=options.get(CONF_LINES)): str, 164 | vol.Optional(CONF_DIRECTION, default=options.get(CONF_DIRECTION)): vol.In(CONF_DIRECTION_LIST), 165 | vol.Optional(CONF_SENSOR, default=options.get(CONF_SENSOR)): str 166 | } 167 | 168 | def rrarr_config_option_schema(options: dict = {}) -> dict: 169 | """Options for resrobot arrival sensor.""" 170 | if not options: 171 | options = {CONF_SENSOR: "", CONF_RR_KEY: "", CONF_SITE_ID: "", CONF_SENSOR: "", CONF_LINES: "", CONF_DIRECTION: DEFAULT_DIRECTION, CONF_SENSOR_PROPERTY: DEFAULT_SENSOR_PROPERTY, CONF_SCAN_INTERVAL: DEFAULT_SCAN_INTERVAL, CONF_TIMEWINDOW: DEFAULT_TIMEWINDOW} 172 | return { 173 | vol.Required(CONF_RR_KEY, default=options.get(CONF_RR_KEY)): str, 174 | vol.Required(CONF_SITE_ID, default=options.get(CONF_SITE_ID)): int, 175 | vol.Required(CONF_SENSOR_PROPERTY, default=options.get(CONF_SENSOR_PROPERTY)): vol.In(CONF_RRARR_PROPERTY_LIST), 176 | vol.Required(CONF_SCAN_INTERVAL, default=options.get(CONF_SCAN_INTERVAL)): int, 177 | vol.Required(CONF_TIMEWINDOW, default=options.get(CONF_TIMEWINDOW)): int, 178 | vol.Optional(CONF_LINES, default=options.get(CONF_LINES)): str, 179 | vol.Optional(CONF_SENSOR, default=options.get(CONF_SENSOR)): str 180 | } 181 | 182 | def rrroute_config_option_schema(options: dict = {}) -> dict: 183 | """Deviation sensor options.""" 184 | if not options: 185 | options = {CONF_SCAN_INTERVAL: DEFAULT_SCAN_INTERVAL, CONF_SENSOR: "", CONF_RR_KEY: "", CONF_SOURCE_ID: "", CONF_DESTINATION_ID: ""} 186 | return { 187 | vol.Required(CONF_RR_KEY, default=options.get(CONF_RR_KEY)): str, 188 | vol.Required(CONF_SOURCE_ID, default=options.get(CONF_SOURCE)): str, 189 | vol.Required(CONF_DESTINATION_ID, default=options.get(CONF_DESTINATION)): str, 190 | vol.Required(CONF_SCAN_INTERVAL, default=options.get(CONF_SCAN_INTERVAL)): int, 191 | vol.Optional(CONF_SENSOR, default=options.get(CONF_SENSOR)): str 192 | } -------------------------------------------------------------------------------- /custom_components/hasl3/config_flow.py: -------------------------------------------------------------------------------- 1 | """Config flow for the HASL component.""" 2 | import voluptuous 3 | import logging 4 | import uuid 5 | 6 | from homeassistant import config_entries 7 | from homeassistant.exceptions import HomeAssistantError 8 | from homeassistant.core import callback 9 | 10 | from .const import ( 11 | DOMAIN, 12 | SCHEMA_VERSION, 13 | CONF_NAME, 14 | SENSOR_RRARR, 15 | SENSOR_RRROUTE, 16 | SENSOR_RRDEP, 17 | SENSOR_STANDARD, 18 | SENSOR_STATUS, 19 | SENSOR_VEHICLE_LOCATION, 20 | SENSOR_DEVIATION, 21 | SENSOR_ROUTE, 22 | CONF_INTEGRATION_ID, 23 | CONF_INTEGRATION_TYPE, 24 | CONF_INTEGRATION_LIST, 25 | ) 26 | 27 | from .config_schema import ( 28 | hasl_base_config_schema, 29 | standard_config_option_schema, 30 | status_config_option_schema, 31 | vehiclelocation_config_option_schema, 32 | deviation_config_option_schema, 33 | route_config_option_schema, 34 | rrdep_config_option_schema, 35 | rrarr_config_option_schema, 36 | rrroute_config_option_schema 37 | ) 38 | 39 | logger = logging.getLogger(f"custom_components.{DOMAIN}.config") 40 | 41 | 42 | class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): 43 | """Config flow for HASL.""" 44 | 45 | VERSION = SCHEMA_VERSION 46 | CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL 47 | 48 | # FIXME: DOES NOT ACTUALLY VALIDATE ANYTHING! WE NEED THIS! =) 49 | async def validate_input(self, data): 50 | """Validate input in step user""" 51 | 52 | if not data[CONF_INTEGRATION_TYPE] in CONF_INTEGRATION_LIST: 53 | raise InvalidIntegrationType 54 | 55 | return data 56 | 57 | async def validate_config(self, data): 58 | """Validate input in step config""" 59 | 60 | return data 61 | 62 | async def async_step_user(self, user_input): 63 | """Handle the initial step.""" 64 | logger.debug("[setup_integration] Entered") 65 | errors = {} 66 | 67 | if user_input is None: 68 | logger.debug("[async_step_user] No user input so showing creation form") 69 | return self.async_show_form(step_id="user", data_schema=voluptuous.Schema(hasl_base_config_schema(user_input, True))) 70 | 71 | try: 72 | user_input = await self.validate_input(user_input) 73 | except InvalidIntegrationType: 74 | errors["base"] = "invalid_integration_type" 75 | logger.debug("[setup_integration(validate)] Invalid integration type") 76 | return self.async_show_form(step_id="user", data_schema=voluptuous.Schema(hasl_base_config_schema(user_input, True)), errors=errors) 77 | except InvalidIntegrationName: 78 | errors["base"] = "invalid_integration_name" 79 | logger.debug("[setup_integration(validate)] Invalid integration type") 80 | return self.async_show_form(step_id="user", data_schema=voluptuous.Schema(hasl_base_config_schema(user_input, True)), errors=errors) 81 | except Exception: # pylint: disable=broad-except 82 | errors["base"] = "unknown_exception" 83 | logger.debug("[setup_integration(validate)] Unknown exception occurred") 84 | return self.async_show_form(step_id="user", data_schema=voluptuous.Schema(hasl_base_config_schema(user_input, True)), errors=errors) 85 | 86 | id = str(uuid.uuid4()) 87 | await self.async_set_unique_id(id) 88 | user_input[CONF_INTEGRATION_ID] = id 89 | self._userdata = user_input 90 | 91 | if user_input[CONF_INTEGRATION_TYPE] == SENSOR_STANDARD: 92 | schema = standard_config_option_schema() 93 | if user_input[CONF_INTEGRATION_TYPE] == SENSOR_STATUS: 94 | schema = status_config_option_schema() 95 | if user_input[CONF_INTEGRATION_TYPE] == SENSOR_VEHICLE_LOCATION: 96 | schema = vehiclelocation_config_option_schema() 97 | if user_input[CONF_INTEGRATION_TYPE] == SENSOR_DEVIATION: 98 | schema = deviation_config_option_schema() 99 | if user_input[CONF_INTEGRATION_TYPE] == SENSOR_ROUTE: 100 | schema = route_config_option_schema() 101 | if user_input[CONF_INTEGRATION_TYPE] == SENSOR_RRDEP: 102 | schema = rrdep_config_option_schema() 103 | if user_input[CONF_INTEGRATION_TYPE] == SENSOR_RRARR: 104 | schema = rrarr_config_option_schema() 105 | if user_input[CONF_INTEGRATION_TYPE] == SENSOR_RRROUTE: 106 | schema = rrroute_config_option_schema() 107 | 108 | return self.async_show_form(step_id="config", data_schema=voluptuous.Schema(schema), errors=errors) 109 | 110 | async def async_step_config(self, user_input): 111 | """Handle a flow initialized by the user.""" 112 | logger.debug("[setup_integration_config] Entered") 113 | errors = {} 114 | 115 | if self._userdata[CONF_INTEGRATION_TYPE] == SENSOR_STANDARD: 116 | schema = standard_config_option_schema(user_input) 117 | if self._userdata[CONF_INTEGRATION_TYPE] == SENSOR_STATUS: 118 | schema = status_config_option_schema(user_input) 119 | if self._userdata[CONF_INTEGRATION_TYPE] == SENSOR_VEHICLE_LOCATION: 120 | schema = vehiclelocation_config_option_schema(user_input) 121 | if self._userdata[CONF_INTEGRATION_TYPE] == SENSOR_DEVIATION: 122 | schema = deviation_config_option_schema(user_input) 123 | if self._userdata[CONF_INTEGRATION_TYPE] == SENSOR_ROUTE: 124 | schema = route_config_option_schema(user_input) 125 | if self._userdata[CONF_INTEGRATION_TYPE] == SENSOR_RRDEP: 126 | schema = rrdep_config_option_schema(user_input) 127 | if self._userdata[CONF_INTEGRATION_TYPE] == SENSOR_RRARR: 128 | schema = rrarr_config_option_schema(user_input) 129 | if self._userdata[CONF_INTEGRATION_TYPE] == SENSOR_RRROUTE: 130 | schema = rrroute_config_option_schema(user_input) 131 | 132 | logger.debug(f"[setup_integration_config] Schema is {self._userdata[CONF_INTEGRATION_TYPE]}") 133 | 134 | # FIXME: DOES NOT ACTUALLY VALIDATE ANYTHING! WE NEED THIS! =) 135 | if user_input is not None: 136 | try: 137 | user_input = await self.validate_config(user_input) 138 | except Exception: # pylint: disable=broad-except 139 | errors["base"] = "unknown_exception" 140 | logger.debug("[setup_integration_config(validate)] Unknown exception occurred") 141 | else: 142 | try: 143 | name = self._userdata[CONF_NAME] 144 | del self._userdata[CONF_NAME] 145 | logger.debug(f"[setup_integration_config] Creating entry '{name}' with id {self._userdata[CONF_INTEGRATION_ID]}") 146 | 147 | self._userdata.update(user_input) 148 | 149 | tempresult = self.async_create_entry(title=name, data=self._userdata) 150 | logger.debug("[setup_integration_config] Entry creating succeeded") 151 | return tempresult 152 | except: 153 | logger.error(f"[setup_integration] Entry creation failed for '{name}' with id {self._userdata[CONF_INTEGRATION_ID]}") 154 | return self.async_abort(reason="not_supported") 155 | 156 | logger.debug("[setup_integration_config] Validation errors encountered so showing options form again") 157 | return self.async_show_form(step_id="config", data_schema=voluptuous.Schema(schema), errors=errors) 158 | 159 | logger.debug("[setup_integration_config] No user input so showing options form") 160 | return self.async_show_form(step_id="config", data_schema=voluptuous.Schema(schema)) 161 | 162 | @staticmethod 163 | @callback 164 | def async_get_options_flow(config_entry): 165 | return OptionsFlow(config_entry) 166 | 167 | 168 | class OptionsFlow(config_entries.OptionsFlow): 169 | """HASL config flow options handler.""" 170 | 171 | def __init__(self, config_entry): 172 | """Initialize HASL options flow.""" 173 | self.config_entry = config_entry 174 | 175 | async def async_step_init(self, user_input=None): 176 | """Manage the options.""" 177 | return await self.async_step_user(user_input) 178 | 179 | async def validate_input(self, data): 180 | """Validate input in step user""" 181 | # FIXME: DOES NOT ACTUALLY VALIDATE ANYTHING! WE NEED THIS! =) 182 | 183 | return data 184 | 185 | async def async_step_user(self, user_input): 186 | """Handle a flow initialized by the user.""" 187 | logger.debug("[integration_options] Entered") 188 | errors = {} 189 | 190 | if self.config_entry.data[CONF_INTEGRATION_TYPE] == SENSOR_STANDARD: 191 | schema = standard_config_option_schema(self.config_entry.data) 192 | if self.config_entry.data[CONF_INTEGRATION_TYPE] == SENSOR_STATUS: 193 | schema = status_config_option_schema(self.config_entry.data) 194 | if self.config_entry.data[CONF_INTEGRATION_TYPE] == SENSOR_VEHICLE_LOCATION: 195 | schema = vehiclelocation_config_option_schema(self.config_entry.data) 196 | if self.config_entry.data[CONF_INTEGRATION_TYPE] == SENSOR_DEVIATION: 197 | schema = deviation_config_option_schema(self.config_entry.data) 198 | if self.config_entry.data[CONF_INTEGRATION_TYPE] == SENSOR_ROUTE: 199 | schema = route_config_option_schema(self.config_entry.data) 200 | if self.config_entry.data[CONF_INTEGRATION_TYPE] == SENSOR_RRDEP: 201 | schema = rrdep_config_option_schema(self.config_entry.data) 202 | if self.config_entry.data[CONF_INTEGRATION_TYPE] == SENSOR_RRARR: 203 | schema = rrarr_config_option_schema(self.config_entry.data) 204 | if self.config_entry.data[CONF_INTEGRATION_TYPE] == SENSOR_RRROUTE: 205 | schema = rrroute_config_option_schema(self.config_entry.data) 206 | 207 | logger.debug(f"[integration_options] Schema is {self.config_entry.data[CONF_INTEGRATION_TYPE]}") 208 | 209 | # FIXME: DOES NOT ACTUALLY VALIDATE ANYTHING! WE NEED THIS! =) 210 | if user_input is not None: 211 | try: 212 | user_input = await self.validate_input(user_input) 213 | except Exception: # pylint: disable=broad-except 214 | errors["base"] = "unknown_exception" 215 | logger.debug("[integration_options(validate)] Unknown exception occurred") 216 | else: 217 | try: 218 | tempresult = self.async_create_entry(title=self.config_entry.title, data=user_input) 219 | logger.debug("[integration_options] Entry update succeeded") 220 | return tempresult 221 | except: 222 | logger.error("[integration_options] Unknown exception occurred") 223 | 224 | logger.debug("[integration_options] Validation errors encountered so showing options form again") 225 | return self.async_show_form(step_id="user", data_schema=voluptuous.Schema(schema), errors=errors) 226 | 227 | logger.debug("[integration_options] No user input so showing options form") 228 | return self.async_show_form(step_id="user", data_schema=voluptuous.Schema(schema)) 229 | 230 | 231 | class InvalidIntegrationType(HomeAssistantError): 232 | """Error to indicate the integration is not of a valid type.""" 233 | 234 | 235 | class InvalidIntegrationName(HomeAssistantError): 236 | """Error to indicate that the name is not a legal name.""" 237 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "{}" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright {yyyy} {name of copyright owner} 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /custom_components/hasl3/__init__.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import jsonpickle 3 | import time 4 | import asyncio 5 | 6 | from custom_components.hasl3.haslworker import HaslWorker 7 | from homeassistant.config_entries import ConfigEntry 8 | from homeassistant.helpers.device_registry import DeviceEntryType 9 | from homeassistant.core import HomeAssistant, callback 10 | from homeassistant.helpers import device_registry as dr 11 | 12 | from .const import ( 13 | DOMAIN, 14 | HASL_VERSION, 15 | SCHEMA_VERSION, 16 | DEVICE_NAME, 17 | DEVICE_MANUFACTURER, 18 | DEVICE_MODEL, 19 | DEVICE_GUID, 20 | CONF_INTEGRATION_TYPE, 21 | SENSOR_STANDARD, 22 | SENSOR_STATUS, 23 | SENSOR_VEHICLE_LOCATION, 24 | SENSOR_DEVIATION, 25 | SENSOR_ROUTE, 26 | ) 27 | 28 | from custom_components.hasl3.slapi import ( 29 | slapi_rp3, 30 | slapi_pu1, 31 | ) 32 | 33 | from custom_components.hasl3.rrapi import ( 34 | rrapi_sl 35 | ) 36 | 37 | logger = logging.getLogger(f"custom_components.{DOMAIN}.core") 38 | serviceLogger = logging.getLogger(f"custom_components.{DOMAIN}.services") 39 | 40 | 41 | async def async_setup(hass, config): 42 | """Set up HASL integration""" 43 | logger.debug("[setup] Entering") 44 | 45 | # SERVICE FUNCTIONS 46 | @callback 47 | async def dump_cache(service): 48 | serviceLogger.debug("[dump_cache] Entered") 49 | timestring = time.strftime("%Y%m%d%H%M%S") 50 | outputfile = hass.config.path(f"hasl_data_{timestring}.json") 51 | 52 | serviceLogger.debug(f"[dump_cache] Will dump to {outputfile}") 53 | 54 | try: 55 | jsonFile = open(outputfile, "w") 56 | jsonFile.write(jsonpickle.dumps(worker.data.dump(), 4, unpicklable=False)) 57 | jsonFile.close() 58 | serviceLogger.debug("[dump_cache] Completed") 59 | hass.bus.fire(DOMAIN, {"source": "dump_cache", "state": "success", "result": outputfile}) 60 | return True 61 | except Exception as e: 62 | serviceLogger.debug("[dump_cache] Failed to take a dump") 63 | hass.bus.fire(DOMAIN, {"source": "dump_cache", "state": "error", "result": f"Exception occurred during execution: {str(e)}"}) 64 | return True 65 | 66 | @callback 67 | async def get_cache(service): 68 | serviceLogger.debug("[get_cache] Entered") 69 | 70 | try: 71 | dataDump = jsonpickle.dump(worker.data.dump(), 4, unpicklable=False) 72 | serviceLogger.debug("[get_cache] Completed") 73 | hass.bus.fire(DOMAIN, {"source": "get_cache", "state": "success", "result": dataDump}) 74 | return True 75 | except Exception as e: 76 | serviceLogger.debug("[get_cache] Failed to get dump") 77 | hass.bus.fire(DOMAIN, {"source": "get_cache", "state": "error", "result": f"Exception occurred during execution: {str(e)}"}) 78 | return True 79 | 80 | @callback 81 | async def sl_find_location(service): 82 | serviceLogger.debug("[sl_find_location] Entered") 83 | search_string = service.data.get('search_string') 84 | api_key = service.data.get('api_key') 85 | 86 | serviceLogger.debug(f"[sl_find_location] Looking for '{search_string}' with key {api_key}") 87 | 88 | try: 89 | pu1api = slapi_pu1(api_key) 90 | requestResult = await pu1api.request(search_string) 91 | serviceLogger.debug("[sl_find_location] Completed") 92 | hass.bus.fire(DOMAIN, {"source": "sl_find_location", "state": "success", "result": requestResult}) 93 | return True 94 | except Exception as e: 95 | serviceLogger.debug("[sl_find_location] Lookup failed") 96 | hass.bus.fire(DOMAIN, {"source": "sl_find_location", "state": "error", "result": f"Exception occurred during execution: {str(e)}"}) 97 | return True 98 | 99 | @callback 100 | async def rr_find_location(service): 101 | serviceLogger.debug("[rr_find_location] Entered") 102 | search_string = service.data.get('search_string') 103 | api_key = service.data.get('api_key') 104 | 105 | serviceLogger.debug(f"[rr_find_location] Looking for '{search_string}' with key {api_key}") 106 | 107 | try: 108 | rrapi = rrapi_sl(api_key) 109 | requestResult = await rrapi.request(search_string) 110 | serviceLogger.debug("[rr_find_location] Completed") 111 | hass.bus.fire(DOMAIN, {"source": "rr_find_location", "state": "success", "result": requestResult}) 112 | return True 113 | except Exception as e: 114 | serviceLogger.debug("[rr_find_location] Lookup failed") 115 | hass.bus.fire(DOMAIN, {"source": "rr_find_location", "state": "error", "result": f"Exception occurred during execution: {str(e)}"}) 116 | return True 117 | 118 | @callback 119 | async def sl_find_trip_id(service): 120 | serviceLogger.debug("[sl_find_trip_id] Entered") 121 | origin = service.data.get('org') 122 | destination = service.data.get('dest') 123 | api_key = service.data.get('api_key') 124 | 125 | # serviceLogger.debug(f"[sl_Availablefind_trip_id] Finding from '{origin}' to '{destination}' with key {api_key}") 126 | 127 | try: 128 | rp3api = slapi_rp3(api_key) 129 | requestResult = await rp3api.request(origin, destination, '', '', '', '') 130 | serviceLogger.debug("[sl_find_trip_id] Completed") 131 | hass.bus.fire(DOMAIN, {"source": "sl_find_trip_id", "state": "success", "result": requestResult}) 132 | return True 133 | except Exception as e: 134 | serviceLogger.debug("[sl_find_trip_id] Lookup failed") 135 | hass.bus.fire(DOMAIN, {"source": "sl_find_trip_id", "state": "error", "result": f"Exception occurred during execution: {str(e)}"}) 136 | return True 137 | 138 | @callback 139 | async def sl_find_trip_pos(service): 140 | serviceLogger.debug("[sl_find_trip_pos] Entered") 141 | olat = service.data.get('orig_lat') 142 | olon = service.data.get('orig_long') 143 | dlat = service.data.get('dest_lat') 144 | dlon = service.data.get('dest_long') 145 | api_key = service.data.get('api_key') 146 | 147 | serviceLogger.debug(f"[sl_find_trip_pos] Finding from '{olat} {olon}' to '{dlat} {dlon}' with key {api_key}") 148 | 149 | try: 150 | rp3api = slapi_rp3(api_key) 151 | requestResult = await rp3api.request('', '', olat, olon, dlat, dlon) 152 | serviceLogger.debug("[sl_find_trip_pos] Completed") 153 | hass.bus.fire(DOMAIN, {"source": "sl_find_trip_pos", "state": "success", "result": requestResult}) 154 | return True 155 | except Exception as e: 156 | serviceLogger.debug("[sl_find_trip_pos] Lookup failed") 157 | hass.bus.fire(DOMAIN, {"source": "sl_find_trip_pos", "state": "error", "result": f"Exception occurred during execution: {str(e)}"}) 158 | return True 159 | 160 | @callback 161 | async def eventListener(service): 162 | serviceLogger.debug("[eventListener] Entered") 163 | 164 | command = service.data.get('cmd') 165 | 166 | if command == "dump_cache": 167 | dump_cache(service) 168 | serviceLogger.debug("[eventListener] Dispatched to dump_cache") 169 | return True 170 | if command == "get_cache": 171 | get_cache(service) 172 | serviceLogger.debug("[eventListener] Dispatched to get_cache") 173 | return True 174 | if command == "sl_find_location": 175 | sl_find_location(service) 176 | serviceLogger.debug("[eventListener] Dispatched to sl_find_location") 177 | return True 178 | if command == "rr_find_location": 179 | rr_find_location(service) 180 | serviceLogger.debug("[eventListener] Dispatched to rr_find_location") 181 | return True 182 | if command == "sl_find_trip_pos": 183 | sl_find_trip_pos(service) 184 | serviceLogger.debug("[eventListener] Dispatched to sl_find_trip_pos") 185 | return True 186 | if command == "sl_find_trip_id": 187 | sl_find_trip_id(service) 188 | serviceLogger.debug("[eventListener] Dispatched to sl_find_trip_id") 189 | return True 190 | 191 | try: 192 | if DOMAIN not in hass.data: 193 | hass.data.setdefault(DOMAIN, {}) 194 | 195 | if "worker" not in hass.data[DOMAIN]: 196 | logger.debug("[setup] No worker present") 197 | worker = HaslWorker() 198 | worker.hass = hass 199 | hass.data[DOMAIN] = { 200 | "worker": worker 201 | } 202 | logger.debug("[setup] Worker created") 203 | except: 204 | logger.error("[setup] Could not get worker") 205 | return False 206 | 207 | logger.debug("[setup] Registering services") 208 | try: 209 | hass.services.async_register(DOMAIN, 'dump_cache', dump_cache) 210 | hass.services.async_register(DOMAIN, 'get_cache', get_cache) 211 | hass.services.async_register(DOMAIN, 'sl_find_location', sl_find_location) 212 | hass.services.async_register(DOMAIN, 'rr_find_location', rr_find_location) 213 | hass.services.async_register(DOMAIN, 'sl_find_trip_pos', sl_find_trip_pos) 214 | hass.services.async_register(DOMAIN, 'sl_find_trip_id', sl_find_trip_id) 215 | logger.debug("[setup] Service registration completed") 216 | except: 217 | logger.error("[setup] Service registration failed") 218 | 219 | logger.debug("[setup] Registering event listeners") 220 | try: 221 | hass.bus.async_listen(DOMAIN, eventListener) 222 | logger.debug("[setup] Registering event listeners completed") 223 | except: 224 | logger.error("[setup] Registering event listeners failed") 225 | 226 | hass.data[DOMAIN]["worker"].status.startup_in_progress = False 227 | logger.debug("[setup] Completed") 228 | return True 229 | 230 | 231 | async def async_migrate_entry(hass, config_entry: ConfigEntry): 232 | logger.debug("[migrate_entry] Entered") 233 | 234 | logger.debug("[migrate_entry] Migrating configuration from schema version %s to version %s", config_entry.version, SCHEMA_VERSION) 235 | 236 | data = {**config_entry.data} 237 | 238 | if config_entry.version != "1" and config_entry.version != "2" and config_entry.version != "3": 239 | for option in config_entry.options: 240 | logger.debug(f"[migrate_entry] set {option} = {config_entry.options[option]}") 241 | data[option] = config_entry.options[option] 242 | 243 | if config_entry.version == "2" and SCHEMA_VERSION == "3": 244 | if data[CONF_INTEGRATION_TYPE] == "Departures": 245 | data[CONF_INTEGRATION_TYPE] = SENSOR_STANDARD 246 | logger.debug(f"[migrate_entry] migrate from Departures to {SENSOR_STANDARD}") 247 | if data[CONF_INTEGRATION_TYPE] == "Traffic Status": 248 | data[CONF_INTEGRATION_TYPE] = SENSOR_STATUS 249 | logger.debug(f"[migrate_entry] migrate from Traffic Status to {SENSOR_STATUS}") 250 | if data[CONF_INTEGRATION_TYPE] == "Vehicle Locations": 251 | data[CONF_INTEGRATION_TYPE] = SENSOR_VEHICLE_LOCATION 252 | logger.debug(f"[migrate_entry] migrate from Vehicle Locations to {SENSOR_VEHICLE_LOCATION}") 253 | if data[CONF_INTEGRATION_TYPE] == "Deviations": 254 | data[CONF_INTEGRATION_TYPE] = SENSOR_DEVIATION 255 | logger.debug(f"[migrate_entry] migrate from Deviations to {SENSOR_DEVIATION}") 256 | if data[CONF_INTEGRATION_TYPE] == "Route": 257 | data[CONF_INTEGRATION_TYPE] = SENSOR_ROUTE 258 | logger.debug(f"[migrate_entry] migrate from Route to {SENSOR_ROUTE}") 259 | 260 | try: 261 | hass.config_entries.async_update_entry(config_entry, data=data) 262 | logger.debug("[migrate_entry] Completed") 263 | except Exception as e: 264 | logger.error(f"[migrate_entry] Failed: {str(e)}") 265 | return False 266 | 267 | return True 268 | 269 | 270 | async def reload_entry(hass, entry): 271 | """Reload HASL.""" 272 | logger.debug(f"[reload_entry] Entering for {entry.entry_id}") 273 | 274 | try: 275 | await async_unload_entry(hass, entry) 276 | logger.debug("[reload_entry] Unload succeeded") 277 | except: 278 | logger.error("[reload_entry] Unload failed") 279 | 280 | try: 281 | await async_setup_entry(hass, entry) 282 | logger.debug("[reload_entry] Setup succeeded") 283 | except: 284 | logger.error("[reload_entry] Setup failed") 285 | 286 | logger.debug("[reload_entry] Completed") 287 | 288 | 289 | async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry): 290 | """Set up HASL entries""" 291 | 292 | logger.debug(f"[setup_entry] Entering for {entry.entry_id}") 293 | 294 | try: 295 | device_registry = dr.async_get(hass) 296 | device_registry.async_get_or_create( 297 | config_entry_id=entry.entry_id, 298 | identifiers={(DOMAIN, DEVICE_GUID)}, 299 | name=DEVICE_NAME, 300 | model=DEVICE_MODEL, 301 | sw_version=HASL_VERSION, 302 | manufacturer=DEVICE_MANUFACTURER, 303 | entry_type=DeviceEntryType.SERVICE 304 | ) 305 | logger.debug("[setup_entry] Created device") 306 | except Exception as e: 307 | logger.error(f"[setup_entry] Failed to create device: {str(e)}") 308 | return False 309 | 310 | try: 311 | await hass.config_entries.async_forward_entry_setups(entry, ["sensor", "binary_sensor"]) 312 | logger.debug("[setup_entry] Forward entry setup succeeded") 313 | except: 314 | logger.error("[setup_entry] Forward entry setup failed") 315 | return False 316 | 317 | updater = None 318 | try: 319 | updater = entry.add_update_listener(reload_entry) 320 | except: 321 | logger.error("[setup_entry] Update listener setup failed") 322 | return False 323 | 324 | try: 325 | hass.data[DOMAIN]["worker"].instances.add(entry.entry_id, updater) 326 | logger.debug("[setup_entry] Worker registration succeeded") 327 | except Exception as e: 328 | logger.error(f"[setup_entry] Worker registration failed: {str(e)}") 329 | return False 330 | 331 | logger.debug("[setup_entry] Completed") 332 | 333 | return True 334 | 335 | 336 | async def async_unload_entry(hass, entry): 337 | """Unload entry.""" 338 | logger.debug("[unload_entry] Entered") 339 | 340 | try: 341 | 342 | hass.async_add_job(hass.config_entries.async_forward_entry_unload(entry, "sensor")) 343 | hass.async_add_job(hass.config_entries.async_forward_entry_unload(entry, "binary_sensor")) 344 | except: 345 | logger.error("[unload_entry] Forward entry unload failed") 346 | return False 347 | 348 | try: 349 | hass.data[DOMAIN]["worker"].instances.remove(entry.entry_id) 350 | logger.debug("[unload_entry] Worker deregistration succeeded") 351 | except: 352 | logger.error("[unload_entry] Worker deregistration failed") 353 | return False 354 | 355 | logger.debug("[unload_entry] Completed") 356 | return True 357 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | All notable changes to this project will be documented in this file. 3 | 4 | The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), 5 | and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). 6 | 7 | ## [3.1.3] (2024-03-06) 8 | 9 | ### Fixes 10 | Buying time: use the newer API URL until its shutdown at end of 2024-03 @systemcrash 11 | 12 | ## [3.1.2] (2024-03-06) 13 | 14 | ### Fixes 15 | Fixes spelling/grammar fixes thanks @systemcrash 16 | 17 | ## [3.1.1] (2023-09-10) 18 | 19 | ### Fixes 20 | Fixes [hasl-sensor/integration#56](https://github.com/hasl-sensor/integration/issues/56) Setup fails with Home Assistant 2023.6.0, thanks @morlic 21 | 22 | ## [3.1] (2022-09-10) 23 | 24 | This is major release adding Resrobot functionality BUT with some known upgrade issues due to some bad decisions earlier in development. During some cases the automatic upgrade of sensors might fail and to restore a broken sensor it must be fully removed and then recreated manually. Sorry. 25 | 26 | ### BREAKING CHANGES 27 | - During some cases the automatic upgrade of sensors might fail, to restore a broken sensor it must be fully removed and then recreated manually. Sorry. 28 | - Service find_location renamed to `sl_find_location` 29 | - Service find_trip_id renamed to `sl_find_trip_id` 30 | - Service find_trip_pos renamed to `sl_find_trip_pos` 31 | 32 | ### ADDED 33 | - API Communication for Resrobot 2.1 34 | - Departure sensor using Resrobot API 35 | - Arrival sensor using Resrobot API 36 | - Route sensor using Resrobot API 37 | - Lookup services for Resrobot locations 38 | 39 | ## [3.1.0b2] (2022-08-18) 40 | 41 | ### Fixes 42 | - Fixes [hasl-sensor/integration#43](https://github.com/hasl-sensor/integration/issues/43) Warning on deprecated 'async_get_registry' being used 43 | - Removed localized titles in en.json and sv.json due to hassfest validation failing 44 | 45 | ## [3.1.0b1] (2022-03-25) 46 | 47 | Project will change name to `Swedish Public Transit Sensor (HASL)` because Resrobot supports the whole country however for Stockholm region it is still recommended to use SL-specific APIs since they contain a lot of extra data. 48 | 49 | ### Breaking Changes 50 | - Service find_location renamed to `sl_find_location` 51 | - Service find_trip_id renamed to `sl_find_trip_id` 52 | - Service find_trip_pos renamed to `sl_find_trip_pos` 53 | 54 | ### Added 55 | - API Communication for Resrobot 2.1 56 | - Departure sensor using Resrobot API 57 | - Arrival sensor using Resrobot API 58 | - Route sensor using Resrobot API 59 | - Lookup service for Resrobot locations 60 | 61 | ### Changed 62 | - Updated translation for names ad descriptions and strings where applicable 63 | 64 | ## [3.0.6] (2022-03-25) 65 | 66 | ### Added 67 | - Errors are now logged to error and not just to debug so you know whats wrong when it fails 68 | - If sensor data is empty, force refresh even if interval is not up yet 69 | 70 | ### Fixes 71 | - Added missing translation from GUI for new wizard 72 | - Fixes [hasl-sensor/integration#38](https://github.com/hasl-sensor/integration/issues/38) broken logging 73 | 74 | ## [3.0.5] (2022-03-17) 75 | 76 | ### Fixes 77 | - Fixes [hasl-sensor/integration#36](https://github.com/hasl-sensor/integration/issues/36) introduced in 3.0.3 78 | - Fixes [hasl-sensor/integration#35](https://github.com/hasl-sensor/integration/issues/35) annoying log message due to stupidity 79 | 80 | ### Changes 81 | - Sensor will now show unavailable until sensor data is initiated 82 | - Configuration now stored in data and not options inside of configuration object 83 | - Implemented separate configuration schema versioning (restarting with schema version 2. Everything earlier is to be treated as version 1) 84 | - Now configures integration in one step when adding new integration 85 | - Harmonizing version settings between slapi and core as these will never differ anymore 86 | 87 | ## [3.0.4] (2022-03-07) 88 | 89 | ### Fixes 90 | - Fixes [hasl-sensor/integration#33](https://github.com/hasl-sensor/integration/issues/33) due to dependency updates in Home Assistant 91 | 92 | ## [3.0.3] (2022-02-26) 93 | 94 | ### Fixes 95 | - Fixed broken route sensor when prognosis fails (seems to be flaky so it is removed for now) 96 | 97 | ## [3.0.2] (2022-02-17) 98 | 99 | ### Added 100 | - Added first and last leg details as attributes for route sensor 101 | - Added possibility to use longitude,latitude pairs as locations in route sensor 102 | - Starting address/location is detailed in attributes for route sensor 103 | - Swedish translation for GUI 104 | - Added stoplists to results for RP3 for route planner 105 | - Added prognosisType for each leg and stoplist in haslworker for route planner 106 | 107 | ### Fixes 108 | - Fixed broken counter for number of transfers in route sensor 109 | - Fixes [#26](https://github.com/hasl-sensor/integration/issues/26): version number fixed in hacs.json 110 | - Fixes [#29](https://github.com/hasl-sensor/integration/issues/29): filtering in departure sensor updated 111 | - Fixed broken prognosisType for route sensor (thanks @clearminds) 112 | - Fixed broken stoplist for route sensor (thanks @clearminds) 113 | 114 | ### Changes 115 | - Combining location id and longitude,latitude pars as locations in route sensor 116 | - Route sensor total time does include estimated walking time however walking is not shown as first or last leg in route sensor 117 | 118 | ## [3.0.1] (2022-01-05) 119 | 120 | ### Fixes 121 | - [#22](https://github.com/hasl-sensor/integration/issues/22) fixed broken filtering on no lines being filtered 122 | 123 | ## [3.0.0] (2022-01-03) 124 | 125 | Official first release of version 3 branch. 126 | This also means that version 2 and older is now deprecated. 127 | 128 | ## [3.0.0-beta.4] (2021-12-28) 129 | 130 | ### Fixes 131 | - [#17](https://github.com/hasl-sensor/integration/issues/17) fixed. Logic logically logical now. Thnx @FredHags 132 | - [#18](https://github.com/hasl-sensor/integration/issues/18) fixed. Renamed 133 | - [#13](https://github.com/hasl-sensor/integration/issues/18) Removed strange errors being logged. 134 | - Fixed errors in SLAPI causing requests to fail 135 | - Fixed broken data validation on some sensor data types 136 | 137 | ### Changes 138 | - Raised the required HA version to 2021.12 139 | 140 | ## [3.0.0-beta.3] (2021-06-21) 141 | 142 | ### Changed 143 | - Replaced all bare except: 144 | - [#9](https://github.com/hasl-sensor/integration/issues/9) fixed. Implemented skeleton async_migrate_entry 145 | 146 | ## [3.0.0-beta.2] (2021-06-14) 147 | 148 | ### Fixes 149 | - [#7](https://github.com/hasl-sensor/integration/issues/7) fixed. Too many programming languages I guess... =) 150 | - [#6](https://github.com/hasl-sensor/integration/issues/6) fixed. Fault management implemented. 151 | - Fixed double type datetime.datetime vs datetime in haslworker 152 | 153 | ## [3.0.0-beta.1] (2021-06-11) 154 | 155 | Generally stuff could be really broken right now and I'm working on lots of stuff all over the code. 156 | Forked from 2.2.3 but changes from later versions are implemented as needed. 157 | 158 | ### Changed (summarized) 159 | - Moving into the new organization on github and renaming to integration 160 | - Changed domain from hasl to hasl3 161 | - Removed dependency on external hasl-communication-library and replaced by internal slapi dependency instead 162 | - Using httpx async in slapi library 163 | - Added GUI configuration for the integration 164 | - Metadata added to the home assistant wheels repo, PR [#48](https://github.com/home-assistant/wheels-custom-integrations/pull/48) and [#57](https://github.com/home-assistant/wheels-custom-integrations/pull/57) 165 | - Icons added to the home assistant brands repo, PR [#1606](https://github.com/home-assistant/brands/pull/1606) and [#1626](https://github.com/home-assistant/brands/pull/1626) 166 | - Changed the unique naming of all entities generated to be truly unique and stay the same over time 167 | - Allow use of multiple API-keys for different sensors using multiple integrations 168 | - Enforce time based caching between all integrations to reduce wear on the API-keys 169 | - Workers to handle updates etc are now run on one minute intervals using call-backs to be friendlier on hass 170 | - Sensors are "just" retrieving data from the workers data instead of directly interfacing the APIs. 171 | - Devices are now created for each integration to be used for future automation etc 172 | - Departures entity is now providing Deviation data only if a Deviation integration is configured with the same stops/lines to decrease complexity but maintaining compatibility both with new and old architecture. 173 | - Deviation sensors are now available as separate entities/sensors if needed and are leveraged by the Departures sensors if they exist. 174 | - A common in-memory structure for all data is now done using a worker holding data for all instances instead of writing to disk 175 | - Updated hacs.json and info.md to be updated for v3 176 | - Service for dumping the cache to disk have been implemented 177 | - Fixed lots of bugs related to data not being available yet (async issues) 178 | - Generic extensible queuing system built in 179 | - Services for location lookup and trip planning implemented 180 | - Traffic status now is one sensor per traffic type to make it simpler to display status 181 | - All sensortypes works and returns some kind of data if configured with valid data 182 | - Added dependency on jsonpickle as the built-in json serializer kind of sucks 183 | - Added version field to manifest 184 | - Added system health checks 185 | - Services now response on the event bus by hasl3_response 186 | - Services can be called via the event bus on hasl3_execute with argument cmd= and then the rest of the argument as when normally using when calling a service 187 | - Binary sensor logging and fault management implemented 188 | - Slapi and haslworker logging and fault management implemented 189 | - Sensor logging and fault management implemented 190 | - Changed fork from DSorlov to hasl-sensor 191 | 192 | ## [2.2.7] (2021-06-08) 193 | 194 | ### Changed 195 | - Workaround fix for timezone error 196 | 197 | ## [2.2.6] (2021-05-18) 198 | 199 | ### Changed 200 | - Fixed version numbers discrepancies 201 | 202 | ## [2.2.5] (2021-05-10) 203 | 204 | ### Changed 205 | - Updated manifest.json 206 | 207 | ## [2.2.4] (2020-05-23) 208 | 209 | ### Changed 210 | - Documentation updates 211 | 212 | ## [2.2.3] (2020-03-04) 213 | 214 | ### Changed 215 | - Bug fixes 216 | 217 | ## [2.2.2] (2020-03-01) 218 | 219 | ### Changed 220 | - Release with right branch 221 | 222 | ## [2.2.1] (2020-03-01) 223 | 224 | ### Changed 225 | - PR [#44](https://github.com/DSorlov/hasl-platform/pull/44) [#49](https://github.com/DSorlov/hasl-platform/pull/49) Fixed bug where no departures were returned if no lines specified [@Ziqqo](https://github.com/Ziqqo) [@lindell](https://github.com/lindell) 226 | - PR [#49](https://github.com/DSorlov/hasl-platform/pull/49) Minutes from expected time [@lindell](https://github.com/lindell) 227 | - Updated HACS configuration 228 | 229 | ## [2.2.0] (2019-07-18) 230 | 231 | ### BREAKING CHANGES 232 | - config entries have been changed to a true string array and should now be specified according to `lines: ['123X','124']` 233 | 234 | ### Changed 235 | - Fix [#36](https://github.com/DSorlov/hasl-platform/issues/36) platform not found 236 | - Fix [#37](https://github.com/DSorlov/hasl-platform/issues/37) lines in 2.1.3 237 | - Moved the cache file from config dir into the .storage folder 238 | - Fixed documentation error (said TI2KEY instead of TL2KEY) 239 | 240 | ### Added 241 | - Added services Platsupplslag and Reseplaneraren as services (preview, no docs yet) 242 | 243 | ## [2.1.3] (2019-07-15) 244 | 245 | ### Changed 246 | - Replaced custom updater with [HACS](https://custom-components.github.io/hacs/) 247 | 248 | ## [2.1.2] (2019-07-15) 249 | 250 | ### Changed 251 | - Replaced custom updater with [HACS](https://custom-components.github.io/hacs/) 252 | - Minor changed in documentation 253 | - Fixed bad custom_updater.json file 254 | 255 | ## [2.1.1] (2019-06-04) 256 | 257 | ### Changed 258 | - Fix [#32](https://github.com/DSorlov/hasl-platform/issues/32) error management fails 259 | - Fix [#27](https://github.com/DSorlov/hasl-platform/issues/27) implemented filtering on "lines" for departure sensors 260 | 261 | ## [2.1.0] (2019-05-21) 262 | 263 | ### BREAKING CHANGES 264 | - `comb` sensor is now `departures`, will remove `comb` in 2.5.0. Please change your config. 265 | - `tl2` sensor is now `status`, will remove `tl2` in 2.5.0. Please change your config. 266 | 267 | ### Changed 268 | - Fix [#23](https://github.com/DSorlov/ha-sensor-sl/issues/23) timewindow not working 269 | - Fix [#24](https://github.com/DSorlov/ha-sensor-sl/issues/24) default scan_interval documentation bug 270 | - Fix [#25](https://github.com/DSorlov/ha-sensor-sl/issues/25) stupid bug introduced by DSorlov =) 271 | - PEP8 Compliance 272 | - Branched all display cards to new project [hasl-cards](https://github.com/DSorlov/hasl-cards). 273 | - Renamed repository from ha-sensor-sl to hasl-platform to conform to new naming. 274 | - Updated massive amounts of links and documentation 275 | - Many stability improvements and minor bug fixes 276 | 277 | ### Added 278 | - Implemented basic error handling as exceptions are now raised from communications library. 279 | - Implemented new sensor based on real-time train location API (EXPERIMENTAL!) 280 | 281 | ## [2.0.2] (2019-04-30) 282 | 283 | ### Changed 284 | - Fixed [#19](https://github.com/DSorlov/ha-sensor-sl/issues/19) Small changes for custom_updater 285 | 286 | ## [2.0.1] (2019-04-30) 287 | 288 | ### Changed 289 | - Fixed [#18](https://github.com/DSorlov/ha-sensor-sl/issues/18) missing indentation in sensor.py 290 | 291 | ## [2.0.0] (2019-04-30) 292 | 293 | ### BREAKING CHANGES 294 | - Changed structure in configuration to be more standardized, avoid key duplication etc 295 | - Cannot be used pre 0.92 as dependency code has moved (or at least I have not tried it) 296 | - New install location for the auto-updater (changed from folder sl to hasl) 297 | - Rename of sl-card.js to hasl-comb-card.js 298 | 299 | ### Changed 300 | - Changed naming of a few functions to make it more clean 301 | - Fixed issue #16: sync_interval not working 302 | - Fixed issue #11: wrong time shown, thanks to [@isabellaalstrom] for suggesting fix 303 | - Language is now picked from config first, then from browser, and then default sv-SE 304 | - Icon changed to mdi:bus if no deviations, otherwise mdi:bus-alert 305 | - Recommended install directory is now 'hasl' instead of 'sl' to align naming 306 | - Fixed the auto-updater URLs (dev branch will be off but who cares, dev should not be used in prod) 307 | - Using HASL 2.0.0 Communications Library with support for Trafikläget2 API from SL 308 | - Implemented a request minimization strategy for API-calls / using caching (haslcache.json) when using multiple sensors for same stops 309 | 310 | ### Added 311 | - Config 'property' in comb sensor to set which property that is reported as default state 312 | - Config 'always_show_time' in hasl-comb-card.js to force also showing time when less than 60 minutes to departure when 'timeleft' is set 313 | - Added __init.py__ and manifest.json to support 0.92+ version of home assistant, thanks to [@dimmanramone] 314 | - Added property deviation_count to comb sensor to show number of deviations in total 315 | - New sensor type TL2 for displaying traffic status messages for the Trafikläget2 API 316 | - Service for force clearing of cache (adds services.json, mostly for troubleshooting) 317 | 318 | ## [1.0.3] (2019-04-16) 319 | 320 | ### Changed 321 | - Missing default value for direction parameter 322 | - Integrated magic to better show time left to departure from fork [lokanx-home-assistant](https://github.com/lokanx-home-assistant/ha-sensor-sl/commit/df7de55f040a7fab5b15be176ec5d61400b1dbba) 323 | - Added support for languages (sv-SE and en-EN) in sl-card.js 324 | 325 | ## [1.0.2] (2019-04-16) 326 | 327 | ### Changed 328 | - Fix for naming of unique_id for certain cases (still not good however) 329 | - sl-card.js enhanced and styled by [@dimmanramone](https://github.com/dimmanramone)! Huge thanks!! 330 | 331 | ## [1.0.1] (2019-04-15) 332 | 333 | ### Changed 334 | - Fixed documentation about interval being changed to scan_interval this to better support the Home Assistant standard. 335 | - Fixed direction parameter that had been hard-coded for some strange reason. Blaming it on someone else. =) 336 | 337 | ## [1.0.0] (2019-04-12) 338 | 339 | ### Added 340 | - Added configuration for TimeWindow 341 | - Added friendly_name (and removed name) 342 | - Updated sl-card.js to support time or minutes departures 343 | - Exposed unique_id for each sensor 344 | - Added multiple properties to the sensor output 345 | 346 | ### Changed 347 | - Moved communication to external PyPi library (HASL) 348 | - Changed default repo from dev to hasl 349 | - Cleaned up the code 350 | - Using constant keywords from HomeAssistant 351 | - If update error occurs now deliver '-' as value instead of -1 352 | 353 | ### Removed 354 | - name configuration (replaced by friendly_name) 355 | 356 | ## [0.0.8] (2019-04-11) 357 | 358 | ### Changed 359 | - Moved /sensor/sl.py to /sl/sensor.py 360 | - Fixed bad formatting of custom_updater files 361 | - Fixed custom_updater instructions in readme 362 | - Fixed broken encoding issues for rendering in sl-card.js 363 | 364 | ## [0.0.7] (2018-12-13) 365 | 366 | ### Added 367 | - Rendering of deviations 368 | - Parameters to customize card 369 | 370 | ### Changed 371 | - Rendering strategy 372 | 373 | ## [0.0.6] (2018-11-16) 374 | 375 | ### Added 376 | - Added output of icons 377 | 378 | ### Changed 379 | - Bug fixes in lookup 380 | - Changed rendering in lovelace card 381 | 382 | ## [0.0.5] (2018-11-16) 383 | 384 | ### Added 385 | - Dependency for new https://www.trafiklab.se/api/sl-storningsinformation-2 386 | - Lovelace card 387 | 388 | ### Changed 389 | - Logging strings changed to indicate which API failed 390 | - User Agent String conforms to standard 391 | - Now renders the next hour of departures 392 | 393 | ### Removed 394 | - JavaScript output 395 | 396 | ## [0.0.4] (2018-09-30) 397 | 398 | ### Added 399 | - Use a binary_sensor to enable/disable API-calls 400 | - Log error code and message once in case of error at API call 401 | - Support for custom_updater 402 | 403 | ### Changed 404 | - Log error message instead of just reporting failure. 405 | - Changed fork from fredrikbaberg to DSorlov 406 | 407 | ## [0.0.3] (2018-09-30) 408 | 409 | ### Changed 410 | - Only log errors once 411 | 412 | ## [0.0.2] (2018-09-29) 413 | 414 | ### Changed 415 | - Log error code. 416 | - Changed fork from fuffenz to fredrikbaberg 417 | 418 | ## [0.0.1] (2018-05-08) 419 | 420 | ### Initial release 421 | - This is a great day indeed. 422 | 423 | [keep-a-changelog]: http://keepachangelog.com/en/1.0.0/ 424 | [3.1.0b2]: https://github.com/hasl-sensor/integration/compare/3.1.0b1...3.1.0b2 425 | [3.1.0b1]: https://github.com/hasl-sensor/integration/compare/3.0.6...3.1.0b1 426 | [3.0.6]: https://github.com/hasl-sensor/integration/compare/3.0.5...3.0.6 427 | [3.0.5]: https://github.com/hasl-sensor/integration/compare/3.0.4...3.0.5 428 | [3.0.4]: https://github.com/hasl-sensor/integration/compare/3.0.3...3.0.4 429 | [3.0.3]: https://github.com/hasl-sensor/integration/compare/3.0.2...3.0.3 430 | [3.0.2]: https://github.com/hasl-sensor/integration/compare/3.0.1...3.0.2 431 | [3.0.1]: https://github.com/hasl-sensor/integration/compare/3.0.0...3.0.1 432 | [3.0.0]: https://github.com/hasl-sensor/integration/compare/3.0.0-beta.4...3.0.0 433 | [3.0.0-beta.4]: https://github.com/hasl-sensor/integration/compare/3.0.0-beta.3...3.0.0-beta.4 434 | [3.0.0-beta.3]: https://github.com/hasl-sensor/integration/compare/3.0.0-beta.2...3.0.0-beta.3 435 | [3.0.0-beta.2]: https://github.com/hasl-sensor/integration/compare/3.0.0-beta.1...3.0.0-beta.2 436 | [3.0.0-beta.1]: https://github.com/hasl-sensor/integration/compare/3.0.0-beta.1...DSorlov:2.2.7 437 | [2.2.7]: https://github.com/DSorlov/hasl-platform/compare/2.2.6...2.2.7 438 | [2.2.6]: https://github.com/DSorlov/hasl-platform/compare/2.2.5...2.2.6 439 | [2.2.5]: https://github.com/DSorlov/hasl-platform/compare/2.2.4...2.2.5 440 | [2.2.4]: https://github.com/DSorlov/hasl-platform/compare/2.2.3...2.2.4 441 | [2.2.3]: https://github.com/DSorlov/hasl-platform/compare/2.2.3...2.2.2 442 | [2.2.2]: https://github.com/DSorlov/hasl-platform/compare/2.2.2...2.2.1 443 | [2.2.1]: https://github.com/DSorlov/hasl-platform/compare/2.2.1...2.2.0 444 | [2.2.0]: https://github.com/DSorlov/hasl-platform/compare/2.2.0...2.1.3 445 | [2.1.3]: https://github.com/DSorlov/hasl-platform/compare/2.1.3...2.1.2 446 | [2.1.2]: https://github.com/DSorlov/hasl-platform/compare/2.1.1...2.1.2 447 | [2.1.1]: https://github.com/DSorlov/hasl-platform/compare/2.1.0...2.1.1 448 | [2.1.0]: https://github.com/DSorlov/hasl-platform/compare/2.0.3...2.1.0 449 | [2.0.3]: https://github.com/DSorlov/hasl-platform/compare/2.0.2...2.0.3 450 | [2.0.2]: https://github.com/DSorlov/hasl-platform/compare/2.0.1...2.0.2 451 | [2.0.1]: https://github.com/DSorlov/hasl-platform/compare/2.0.0...2.0.1 452 | [2.0.0]: https://github.com/DSorlov/hasl-platform/compare/1.0.3...2.0.0 453 | [1.0.3]: https://github.com/DSorlov/hasl-platform/compare/1.0.2...1.0.3 454 | [1.0.2]: https://github.com/DSorlov/hasl-platform/compare/1.0.1...1.0.2 455 | [1.0.1]: https://github.com/DSorlov/hasl-platform/compare/1.0.0...1.0.1 456 | [1.0.0]: https://github.com/DSorlov/hasl-platform/compare/0.0.8...1.0.0 457 | [0.0.8]: https://github.com/DSorlov/hasl-platform/compare/0.0.7...0.0.8 458 | [0.0.7]: https://github.com/DSorlov/hasl-platform/compare/0.0.6...0.0.7 459 | [0.0.6]: https://github.com/DSorlov/hasl-platform/compare/0.0.5...0.0.6 460 | [0.0.5]: https://github.com/DSorlov/hasl-platform/compare/v0.0.4...0.0.5 461 | [0.0.4]: https://github.com/fredrikbaberg/ha-sensor-sl/compare/v0.0.3...DSorlov:v0.0.4 462 | [0.0.3]: https://github.com/fredrikbaberg/ha-sensor-sl/compare/v0.0.2...v0.0.3 463 | [0.0.2]: https://github.com/fredrikbaberg/ha-sensor-sl/compare/fredrikbaberg:v0.0.2...fuffenz:master 464 | [0.0.1]: https://github.com/fuffenz/ha-sensor-sl -------------------------------------------------------------------------------- /custom_components/hasl3/haslworker/__init__.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import jsonpickle 3 | import isodate 4 | import time 5 | 6 | from datetime import datetime 7 | from homeassistant.util.dt import now 8 | 9 | from custom_components.hasl3.slapi import ( 10 | slapi_fp, 11 | slapi_tl2, 12 | slapi_ri4, 13 | slapi_si2, 14 | slapi_rp3, 15 | ) 16 | 17 | from custom_components.hasl3.rrapi import ( 18 | rrapi_rra, 19 | rrapi_rrd, 20 | rrapi_rrr 21 | ) 22 | 23 | 24 | logger = logging.getLogger("custom_components.hasl3.worker") 25 | 26 | 27 | class HASLStatus(object): 28 | """System Status.""" 29 | startup_in_progress = True 30 | running_background_tasks = False 31 | 32 | 33 | class HASLData(object): 34 | tl2 = {} 35 | si2 = {} 36 | ri4 = {} 37 | rp3 = {} 38 | rp3keys = {} 39 | si2keys = {} 40 | ri4keys = {} 41 | rrd = {} 42 | rra = {} 43 | rrr = {} 44 | rrkeys = {} 45 | fp = {} 46 | 47 | def dump(self): 48 | return { 49 | 'si2keys': self.si2keys, 50 | 'ri4keys': self.ri4keys, 51 | 'rrkeys': self.rrkeys, 52 | 'tl2': self.tl2, 53 | 'si2': self.si2, 54 | 'ri4': self.ri4, 55 | 'fp': self.fp, 56 | 'rrd': self.rrd, 57 | 'rra': self.rra, 58 | 'rrr': self.rrr 59 | } 60 | 61 | 62 | class HASLInstances(object): 63 | """The instance holder object object""" 64 | 65 | instances = {} 66 | instanceCount = 0 67 | 68 | def add(self, id, updater): 69 | self.instances[id] = { 70 | 'subscriber': updater 71 | } 72 | self.instanceCount += 1 73 | 74 | def remove(self, id): 75 | try: 76 | self.instances[id]['subscriber']() 77 | self.instanceCount -= 1 78 | del self.instances[id] 79 | except Exception as e: 80 | logger.debug( 81 | f"Error occurred while deregistering listener {str(e)}") 82 | 83 | def count(self): 84 | return self.instanceCount 85 | 86 | 87 | class HaslWorker(object): 88 | """HaslWorker.""" 89 | 90 | hass = None 91 | configuration = None 92 | status = HASLStatus() 93 | data = HASLData() 94 | instances = HASLInstances() 95 | 96 | @staticmethod 97 | def init(hass, configuration): 98 | """Return a initialized HaslWorker object.""" 99 | return HaslWorker() 100 | 101 | def debugdump(self, data): 102 | logger.debug("[debug_dump] Entered") 103 | 104 | try: 105 | timestring = time.strftime("%Y%m%d%H%M%S") 106 | outputfile = self.hass.config.path(f"hasl_debug_{timestring}.json") 107 | jsonFile = open(outputfile, "w") 108 | jsonFile.write(jsonpickle.dumps(data, unpicklable=False)) 109 | jsonFile.close() 110 | logger.debug("[debug_dump] Completed") 111 | except: 112 | logger.debug("[debug_dump] A processing error occurred") 113 | 114 | def getminutesdiff(self, d1, d2): 115 | d1 = datetime.strptime(d1, "%Y-%m-%d %H:%M:%S") 116 | d2 = datetime.strptime(d2, "%Y-%m-%d %H:%M:%S") 117 | diff = (d1 - d2).total_seconds() 118 | logger.debug(f"[get_minutes_diff] diff {diff}, d1 {d1}, d2 {d2}") 119 | return diff 120 | 121 | def checksensorstate(self, sensor, state, default=True): 122 | logger.debug("[check_sensor_state] Entered") 123 | if sensor is not None and not sensor == "": 124 | try: 125 | sensor_state = self.hass.states.get(sensor) 126 | if sensor_state.state is state: 127 | logger.debug("[check_sensor_state] Completed will return TRUE/ENABLED") 128 | return True 129 | else: 130 | logger.debug("[check_sensor_state] Completed will return FALSE/DISABLED") 131 | return False 132 | except: 133 | logger.debug("[check_sensor_state] An error occurred, default will be returned") 134 | return default 135 | else: 136 | logger.debug("[check_sensor_state] No sensor specified, will return default") 137 | return default 138 | 139 | async def assert_rp3(self, key, source, destination): 140 | logger.debug("[assert_rp3] Entered") 141 | 142 | listvalue = f"{source}-{destination}" 143 | if key not in self.data.rp3keys: 144 | logger.debug("[assert_rp3] Registered key") 145 | self.data.rp3keys[key] = { 146 | "api_key": key, 147 | "trips": "" 148 | } 149 | else: 150 | logger.debug("[assert_rp3] Key already present") 151 | 152 | currentvalue = self.data.rp3keys[key]['trips'] 153 | if currentvalue == "": 154 | logger.debug("[assert_rp3] Creating trip key") 155 | self.data.rp3keys[key]["trips"] = listvalue 156 | else: 157 | logger.debug("[assert_rp3] Amending to trip key") 158 | self.data.rp3keys[key]["trips"] = f"{currentvalue}|{listvalue}" 159 | 160 | if listvalue not in self.data.rp3: 161 | logger.debug("[assert_rp3] Creating default values") 162 | self.data.rp3[listvalue] = { 163 | "api_type": "slapi-si2", 164 | "api_lastrun": '1970-01-01 01:01:01', 165 | "api_result": "Pending", 166 | "trips": [] 167 | } 168 | 169 | logger.debug("[assert_rp3] Completed") 170 | return 171 | 172 | def parseDepartureTime(self, t): 173 | """ weird time formats from the API, 174 | do some quick and dirty conversions. """ 175 | 176 | try: 177 | if t == 'Nu': 178 | return 0 179 | s = t.split() 180 | if len(s) > 1 and s[1] == 'min': 181 | return int(s[0]) 182 | s = t.split(':') 183 | if len(s) > 1: 184 | rightnow = now() 185 | min = int(s[0]) * 60 + int(s[1]) - ( 186 | (rightnow.hour * 60) + rightnow.minute) 187 | if min < 0: 188 | min = min + 1440 189 | return min 190 | except: 191 | return 192 | return 193 | 194 | async def process_rp3(self): 195 | logger.debug("[process_rp3] Entered") 196 | 197 | for rp3key in list(self.data.rp3keys): 198 | logger.debug(f"[process_rp3] Processing key {rp3key}") 199 | rp3data = self.data.rp3keys[rp3key] 200 | api = slapi_rp3(rp3key) 201 | for tripname in '|'.join(set(rp3data["trips"].split('|'))).split('|'): 202 | logger.debug(f"[process_rp3] Processing trip {tripname}") 203 | newdata = self.data.rp3[tripname] 204 | positions = tripname.split('-') 205 | 206 | try: 207 | 208 | apidata = {} 209 | 210 | srcLocID = '' 211 | dstLocID = '' 212 | srcLocLat = '' 213 | srcLocLng = '' 214 | dstLocLat = '' 215 | dstLocLng = '' 216 | 217 | if "," in positions[0]: 218 | srcLoc = positions[0].split(',') 219 | srcLocLat = srcLoc[0] 220 | srcLocLng = srcLoc[1] 221 | else: 222 | srcLocID = positions[0] 223 | 224 | if "," in positions[1]: 225 | dstLoc = positions[1].split(',') 226 | dstLocLat = dstLoc[0] 227 | dstLocLng = dstLoc[1] 228 | else: 229 | dstLocID = positions[1] 230 | 231 | apidata = await api.request(srcLocID, dstLocID, srcLocLat, srcLocLng, dstLocLat, dstLocLng) 232 | newdata['trips'] = [] 233 | 234 | # Parse every trip 235 | for trip in apidata["Trip"]: 236 | newtrip = { 237 | 'fares': [], 238 | 'legs': [] 239 | } 240 | 241 | # Loop all fares and add 242 | for fare in trip['TariffResult']['fareSetItem'][0]['fareItem']: 243 | newfare = {} 244 | newfare['name'] = fare['name'] 245 | newfare['desc'] = fare['desc'] 246 | newfare['price'] = int(fare['price']) / 100 247 | newtrip['fares'].append(newfare) 248 | 249 | # Add legs to trips 250 | for leg in trip['LegList']['Leg']: 251 | newleg = {} 252 | # Walking is done by humans. 253 | # And robots. 254 | # Robots are scary. 255 | if leg["type"] == "WALK": 256 | newleg['name'] = leg['name'] 257 | newleg['line'] = 'Walk' 258 | newleg['direction'] = 'Walk' 259 | newleg['category'] = 'WALK' 260 | else: 261 | newleg['name'] = leg['Product']['name'] 262 | newleg['line'] = leg['Product']['line'] 263 | newleg['direction'] = leg['direction'] 264 | newleg['category'] = leg['category'] 265 | newleg['from'] = leg['Origin']['name'] 266 | newleg['to'] = leg['Destination']['name'] 267 | newleg['time'] = f"{leg['Origin']['date']} {leg['Origin']['time']}" 268 | 269 | if leg.get('Stops'): 270 | if leg['Stops'].get('Stop', {}): 271 | newleg['stops'] = [] 272 | for stop in leg.get('Stops', {}).get('Stop', {}): 273 | newleg['stops'].append(stop) 274 | 275 | newtrip['legs'].append(newleg) 276 | 277 | # Make some shortcuts for data 278 | newtrip['first_leg'] = newtrip['legs'][0]['name'] 279 | newtrip['time'] = newtrip['legs'][0]['time'] 280 | newtrip['price'] = newtrip['fares'][0]['price'] 281 | newtrip['duration'] = str(isodate.parse_duration(trip['duration'])) 282 | newtrip['transfers'] = trip['transferCount'] 283 | newdata['trips'].append(newtrip) 284 | 285 | # Add shortcuts to info in the first trip if it exists 286 | firstLegFirstTrip = next((x for x in newdata['trips'][0]['legs'] if x["category"] != "WALK"), []) 287 | lastLegLastTrip = next((x for x in reversed(newdata['trips'][0]['legs']) if x["category"] != "WALK"), []) 288 | newdata['transfers'] = sum(p["category"] != "WALK" for p in newdata['trips'][0]['legs']) - 1 or 0 289 | newdata['price'] = newdata['trips'][0]['price'] or '' 290 | newdata['time'] = newdata['trips'][0]['time'] or '' 291 | newdata['duration'] = newdata['trips'][0]['duration'] or '' 292 | newdata['from'] = newdata['trips'][0]['legs'][0]['from'] or '' 293 | newdata['to'] = newdata['trips'][0]['legs'][len(newdata['trips'][0]['legs']) - 1]['to'] or '' 294 | newdata['origin'] = {} 295 | newdata['origin']['leg'] = firstLegFirstTrip["name"] or '' 296 | newdata['origin']['line'] = firstLegFirstTrip["line"] or '' 297 | newdata['origin']['direction'] = firstLegFirstTrip["direction"] or '' 298 | newdata['origin']['category'] = firstLegFirstTrip["category"] or '' 299 | newdata['origin']['time'] = firstLegFirstTrip["time"] or '' 300 | newdata['origin']['from'] = firstLegFirstTrip["from"] or '' 301 | newdata['origin']['to'] = firstLegFirstTrip["to"] or '' 302 | newdata['destination'] = {} 303 | newdata['destination']['leg'] = lastLegLastTrip["name"] or '' 304 | newdata['destination']['line'] = lastLegLastTrip["line"] or '' 305 | newdata['destination']['direction'] = lastLegLastTrip["direction"] or '' 306 | newdata['destination']['category'] = lastLegLastTrip["category"] or '' 307 | newdata['destination']['time'] = lastLegLastTrip["time"] or '' 308 | newdata['destination']['from'] = lastLegLastTrip["from"] or '' 309 | newdata['destination']['to'] = lastLegLastTrip["to"] or '' 310 | 311 | newdata['attribution'] = "Stockholms Lokaltrafik" 312 | newdata['last_updated'] = now().strftime('%Y-%m-%d %H:%M:%S') 313 | newdata['api_result'] = "Success" 314 | except Exception as e: 315 | logger.debug(f"[process_rp3] Error occurred: {str(e)}") 316 | newdata['api_result'] = "Error" 317 | newdata['api_error'] = str(e) 318 | 319 | newdata['api_lastrun'] = now().strftime('%Y-%m-%d %H:%M:%S') 320 | self.data.rp3[tripname] = newdata 321 | 322 | logger.debug(f"[process_rp3] Completed trip {tripname}") 323 | 324 | logger.debug(f"[process_rp3] Completed key {rp3key}") 325 | 326 | logger.debug("[process_rp3] Completed") 327 | 328 | async def assert_fp(self, traintype): 329 | logger.debug("[assert_fp] Entered") 330 | 331 | if traintype not in self.data.fp: 332 | logger.debug(f"[assert_fp] Registering {traintype}") 333 | self.data.fp[traintype] = { 334 | "api_type": "slapi-fp1", 335 | "api_lastrun": '1970-01-01 01:01:01', 336 | "api_result": "Pending" 337 | } 338 | else: 339 | logger.debug(f"[assert_fp] {traintype} already registered") 340 | 341 | logger.debug("[assert_fp] Completed") 342 | return 343 | 344 | async def process_fp(self, notarealarg=None): 345 | logger.debug("[process_rp3] Entered") 346 | 347 | api = slapi_fp() 348 | for traintype in list(self.data.fp): 349 | logger.debug(f"[process_rp3] Processing {traintype}") 350 | 351 | newdata = self.data.fp[traintype] 352 | try: 353 | newdata['data'] = await api.request(traintype) 354 | newdata['attribution'] = "Stockholms Lokaltrafik" 355 | newdata['last_updated'] = now().strftime('%Y-%m-%d %H:%M:%S') 356 | newdata['api_result'] = "Success" 357 | logger.debug(f"[process_rp3] Completed {traintype}") 358 | except Exception as e: 359 | newdata['api_result'] = "Error" 360 | newdata['api_error'] = str(e) 361 | logger.debug(f"[process_rp3] Error occurred for {traintype}: {str(e)}") 362 | 363 | newdata['api_lastrun'] = now().strftime('%Y-%m-%d %H:%M:%S') 364 | self.data.fp[traintype] = newdata 365 | logger.debug("[process_rp3] Completed") 366 | 367 | async def assert_si2_stop(self, key, stop): 368 | await self.assert_si2(key, f"stop_{stop}", "stops", stop) 369 | 370 | async def assert_si2_line(self, key, line): 371 | await self.assert_si2(key, f"line_{line}", "lines", line) 372 | 373 | async def assert_si2(self, key, datakey, listkey, listvalue): 374 | logger.debug("[assert_si2] Entered") 375 | 376 | if key not in self.data.si2keys: 377 | logger.debug("[assert_si2] Registering key") 378 | self.data.si2keys[key] = { 379 | "api_key": key, 380 | "stops": "", 381 | "lines": "" 382 | } 383 | else: 384 | logger.debug("[assert_si2] Key already present") 385 | 386 | if self.data.si2keys[key][listkey] == "": 387 | logger.debug("[assert_si2] Creating trip key") 388 | self.data.si2keys[key][listkey] = listvalue 389 | else: 390 | logger.debug("[assert_si2] Appending to trip key") 391 | self.data.si2keys[key][listkey] = f"{self.data.si2keys[key][listkey]},{listvalue}" 392 | 393 | if datakey not in self.data.si2: 394 | logger.debug("[assert_si2] Creating default values") 395 | self.data.si2[datakey] = { 396 | "api_type": "slapi-si2", 397 | "api_lastrun": '1970-01-01 01:01:01', 398 | "api_result": "Pending" 399 | } 400 | 401 | logger.debug("[assert_si2] Completed") 402 | return 403 | 404 | async def process_si2(self, notarealarg=None): 405 | logger.debug("[process_si2] Entered") 406 | 407 | for si2key in list(self.data.si2keys): 408 | logger.debug(f"[process_si2] Processing key {si2key}") 409 | si2data = self.data.si2keys[si2key] 410 | api = slapi_si2(si2key, 60) 411 | for stop in ','.join(set(si2data["stops"].split(','))).split(','): 412 | logger.debug(f"[process_si2] Processing stop {stop}") 413 | newdata = self.data.si2[f"stop_{stop}"] 414 | # TODO: CHECK FOR FRESHNESS TO NOT KILL OFF THE KEYS 415 | 416 | try: 417 | deviationdata = await api.request(stop, '') 418 | deviationdata = deviationdata['ResponseData'] 419 | 420 | deviations = [] 421 | for (idx, value) in enumerate(deviationdata): 422 | deviations.append({ 423 | 'updated': value['Updated'], 424 | 'title': value['Header'], 425 | 'fromDate': value['FromDateTime'], 426 | 'toDate': value['UpToDateTime'], 427 | 'details': value['Details'], 428 | 'sortOrder': value['SortOrder'], 429 | }) 430 | 431 | newdata['data'] = sorted(deviations, 432 | key=lambda k: k['sortOrder']) 433 | newdata['attribution'] = "Stockholms Lokaltrafik" 434 | newdata['last_updated'] = now().strftime('%Y-%m-%d %H:%M:%S') 435 | newdata['api_result'] = "Success" 436 | logger.debug(f"[process_si2] Processing stop {stop} completed") 437 | except Exception as e: 438 | newdata['api_result'] = "Error" 439 | newdata['api_error'] = str(e) 440 | logger.debug(f"[process_si2] An error occurred during processing of stop {stop}") 441 | 442 | newdata['api_lastrun'] = now().strftime('%Y-%m-%d %H:%M:%S') 443 | self.data.si2[f"stop_{stop}"] = newdata 444 | logger.debug( 445 | f"[process_si2] Completed processing of stop {stop}") 446 | 447 | for line in ','.join(set(si2data["lines"].split(','))).split(','): 448 | logger.debug(f"[process_si2] Processing line {line}") 449 | newdata = self.data.si2[f"line_{line}"] 450 | # TODO: CHECK FOR FRESHNESS TO NOT KILL OFF THE KEYS 451 | 452 | try: 453 | deviationdata = await api.request('', line) 454 | deviationdata = deviationdata['ResponseData'] 455 | 456 | deviations = [] 457 | for (idx, value) in enumerate(deviationdata): 458 | deviations.append({ 459 | 'updated': value['Updated'], 460 | 'title': value['Header'], 461 | 'fromDate': value['FromDateTime'], 462 | 'toDate': value['UpToDateTime'], 463 | 'details': value['Details'], 464 | 'sortOrder': value['SortOrder'], 465 | }) 466 | 467 | newdata['data'] = sorted(deviations, key=lambda k: k['sortOrder']) 468 | newdata['attribution'] = "Stockholms Lokaltrafik" 469 | newdata['last_updated'] = now().strftime('%Y-%m-%d %H:%M:%S') 470 | newdata['api_result'] = "Success" 471 | logger.debug(f"[process_si2] Processing line {line} completed") 472 | except Exception as e: 473 | newdata['api_result'] = "Error" 474 | newdata['api_error'] = str(e) 475 | logger.debug(f"[process_si2] An error occurred during processing of line {line}") 476 | 477 | newdata['api_lastrun'] = now().strftime('%Y-%m-%d %H:%M:%S') 478 | self.data.si2[f"line_{line}"] = newdata 479 | logger.debug(f"[process_si2] Completed processing of line {line}") 480 | 481 | logger.debug(f"[process_si2] Completed processing key {si2key}") 482 | 483 | logger.debug("[process_si2] Completed") 484 | return 485 | 486 | async def assert_ri4(self, key, stop): 487 | logger.debug("[assert_ri4] Entered") 488 | stopkey = str(stop) 489 | 490 | if key not in self.data.ri4keys: 491 | logger.debug("[assert_ri4] Registering key and stop") 492 | self.data.ri4keys[key] = { 493 | "api_key": key, 494 | "stops": stopkey 495 | } 496 | else: 497 | logger.debug("[assert_ri4] Adding stop to existing key") 498 | self.data.ri4keys[key]["stops"] = f"{self.data.ri4keys[key]['stops']},{stopkey}" 499 | 500 | if stop not in self.data.ri4: 501 | logger.debug("[assert_ri4] Creating default data") 502 | self.data.ri4[stopkey] = { 503 | "api_type": "slapi-ri4", 504 | "api_lastrun": '1970-01-01 01:01:01', 505 | "api_result": "Pending" 506 | } 507 | 508 | logger.debug("[assert_ri4] Completed") 509 | return 510 | 511 | async def assert_rrd(self, key, stop): 512 | logger.debug("[assert_rrd] Entered") 513 | stopkey = str(stop) 514 | 515 | if key not in self.data.rrkeys: 516 | logger.debug("[assert_rrd] Registering key") 517 | self.data.rrkeys[key] = { 518 | "api_key": key 519 | } 520 | 521 | if 'deps' not in self.data.rrkeys[key]: 522 | logger.debug("[assert_rrd] Registering deps key") 523 | self.data.rrkeys[key]['deps'] = f"{stopkey}" 524 | else: 525 | logger.debug("[assert_rrd] Adding stop to existing deps key") 526 | self.data.rrkeys[key]["deps"] = f"{self.data.rrkeys[key]['deps']},{stopkey}" 527 | 528 | if stop not in self.data.rrd: 529 | logger.debug("[assert_rrd] Creating default data") 530 | self.data.rrd[stopkey] = { 531 | "api_type": "rrapi-rrd", 532 | "api_lastrun": '1970-01-01 01:01:01', 533 | "api_result": "Pending" 534 | } 535 | 536 | logger.debug("[assert_rrd] Completed") 537 | return 538 | 539 | async def assert_rra(self, key, stop): 540 | logger.debug("[assert_rra] Entered") 541 | stopkey = str(stop) 542 | 543 | if key not in self.data.rrkeys: 544 | logger.debug("[assert_rra] Registering key") 545 | self.data.rrkeys[key] = { 546 | "api_key": key 547 | } 548 | 549 | if 'arrs' not in self.data.rrkeys[key]: 550 | logger.debug("[assert_rra] Registering arrs key") 551 | self.data.rrkeys[key]['arrs'] = f"{stopkey}" 552 | else: 553 | logger.debug("[assert_rra] Adding stop to existing arrs key") 554 | self.data.rrkeys[key]["arrs"] = f"{self.data.rrkeys[key]['arrs']},{stopkey}" 555 | 556 | if stop not in self.data.rra: 557 | logger.debug("[assert_rra] Creating default data") 558 | self.data.rra[stopkey] = { 559 | "api_type": "rrapi-rra", 560 | "api_lastrun": '1970-01-01 01:01:01', 561 | "api_result": "Pending" 562 | } 563 | 564 | logger.debug("[assert_rra] Completed") 565 | return 566 | 567 | async def assert_rrr(self, key, source, destination): 568 | logger.debug("[assert_rrr] Entered") 569 | 570 | listvalue = f"{source}-{destination}" 571 | if key not in self.data.rrkeys: 572 | logger.debug("[assert_rra] Registering key") 573 | self.data.rrkeys[key] = { 574 | "api_key": key 575 | } 576 | 577 | if 'trips' not in self.data.rrkeys[key]: 578 | logger.debug("[assert_rra] Registering trips key") 579 | self.data.rrkeys[key]['trips'] = "" 580 | 581 | currentvalue = self.data.rrkeys[key]['trips'] 582 | if currentvalue == "": 583 | logger.debug("[assert_rrr] Creating trip key") 584 | self.data.rrkeys[key]["trips"] = listvalue 585 | else: 586 | logger.debug("[assert_rrr] Amending to trip key") 587 | self.data.rrkeys[key]["trips"] = f"{currentvalue}|{listvalue}" 588 | 589 | if listvalue not in self.data.rrr: 590 | logger.debug("[assert_rrr] Creating default values") 591 | self.data.rrr[listvalue] = { 592 | "api_type": "rrapi-rrr", 593 | "api_lastrun": '1970-01-01 01:01:01', 594 | "api_result": "Pending", 595 | "trips": [] 596 | } 597 | 598 | logger.debug("[assert_rp3] Completed") 599 | return 600 | 601 | async def process_rrd(self, notarealarg=None): 602 | logger.debug("[process_rrd] Entered") 603 | 604 | iconswitcher = { 605 | 'BLT': 'mdi:bus', 606 | 'BXB': 'mdi:bus', 607 | 'ULT': 'mdi:subway-variant', 608 | 'JAX': 'mdi:train', 609 | 'JLT': 'mdi:train', 610 | 'JRE': 'mdi:train', 611 | 'JIC': 'mdi:train', 612 | 'JPT': 'mdi:train', 613 | 'JEX': 'mdi:train', 614 | 'SLT': 'mdi:tram', 615 | 'FLT': 'mdi:ferry', 616 | 'FUT': 'mdi:ferry' 617 | } 618 | 619 | for rrkey in list(self.data.rrkeys): 620 | logger.debug(f"[process_rrd] Processing key {rrkey}") 621 | rrdata = self.data.rrkeys[rrkey] 622 | api = rrapi_rrd(rrkey, 60) 623 | for stop in ','.join(set(rrdata["deps"].split(','))).split(','): 624 | logger.debug(f"[process_rrd] Processing stop {stop}") 625 | newdata = self.data.rrd[stop] 626 | # TODO: CHECK FOR FRESHNESS TO NOT KILL OFF THE KEYS 627 | 628 | try: 629 | departures = [] 630 | departuredata = await api.request(stop) 631 | departuredata = departuredata['Departure'] 632 | 633 | for (idx, value) in enumerate(departuredata): 634 | 635 | adjustedDateTime = now() 636 | adjustedDateTime = adjustedDateTime.replace(tzinfo=None) 637 | if 'rtDate' in value and 'rtTime' in value: 638 | diff = datetime.strptime(f'{value["rtDate"]} {value["rtTime"]}', '%Y-%m-%d %H:%M:%S') - adjustedDateTime 639 | expected = datetime.strptime(f'{value["rtDate"]} {value["rtTime"]}', '%Y-%m-%d %H:%M:%S') 640 | else: 641 | diff = datetime.strptime(f'{value["date"]} {value["time"]}', '%Y-%m-%d %H:%M:%S') - adjustedDateTime 642 | expected = datetime.strptime(f'{value["date"]} {value["time"]}', '%Y-%m-%d %H:%M:%S') 643 | diff = diff.total_seconds() 644 | diff = diff / 60 645 | diff = round(diff) 646 | 647 | departures.append({ 648 | 'line': value["ProductAtStop"]["displayNumber"], 649 | 'direction': value["directionFlag"], 650 | 'departure': datetime.strptime(f'{value["date"]} {value["time"]}', '%Y-%m-%d %H:%M:%S'), 651 | 'destination': value["direction"], 652 | 'time': diff, 653 | 'operator': value["ProductAtStop"]["operator"], 654 | 'expected': expected, 655 | 'type': value["ProductAtStop"]["catOut"], 656 | 'icon': iconswitcher.get(value["ProductAtStop"]["catOut"],'mdi:train-car'), 657 | }) 658 | 659 | newdata['data'] = sorted(departures, 660 | key=lambda k: k['time']) 661 | newdata['attribution'] = "Samtrafiken Resrobot" 662 | newdata['last_updated'] = now().strftime('%Y-%m-%d %H:%M:%S') 663 | newdata['api_result'] = "Success" 664 | logger.debug(f"[process_rrd] Stop {stop} updated successfully") 665 | 666 | except Exception as e: 667 | newdata['api_result'] = "Error" 668 | newdata['api_error'] = str(e) 669 | logger.debug(f"[process_rrd] Error occurred during update {stop}") 670 | 671 | 672 | newdata['api_lastrun'] = now().strftime('%Y-%m-%d %H:%M:%S') 673 | self.data.rrd[stop] = newdata 674 | logger.debug(f"[process_rrd] Completed stop {stop}") 675 | 676 | logger.debug(f"[process_rrd] Completed key {rrkey}") 677 | 678 | logger.debug("[process_rrd] Completed") 679 | return 680 | 681 | async def process_rra(self, notarealarg=None): 682 | logger.debug("[process_rra] Entered") 683 | 684 | iconswitcher = { 685 | 'BLT': 'mdi:bus', 686 | 'BXB': 'mdi:bus', 687 | 'ULT': 'mdi:subway-variant', 688 | 'JAX': 'mdi:train', 689 | 'JLT': 'mdi:train', 690 | 'JRE': 'mdi:train', 691 | 'JIC': 'mdi:train', 692 | 'JPT': 'mdi:train', 693 | 'JEX': 'mdi:train', 694 | 'SLT': 'mdi:tram', 695 | 'FLT': 'mdi:ferry', 696 | 'FUT': 'mdi:ferry' 697 | } 698 | 699 | for rrkey in list(self.data.rrkeys): 700 | logger.debug(f"[process_rra] Processing key {rrkey}") 701 | rrdata = self.data.rrkeys[rrkey] 702 | api = rrapi_rra(rrkey, 60) 703 | for stop in ','.join(set(rrdata["arrs"].split(','))).split(','): 704 | logger.debug(f"[process_rra] Processing stop {stop}") 705 | newdata = self.data.rra[stop] 706 | # TODO: CHECK FOR FRESHNESS TO NOT KILL OFF THE KEYS 707 | 708 | try: 709 | arrivals = [] 710 | arrivaldata = await api.request(stop) 711 | arrivaldata = arrivaldata['Arrival'] 712 | 713 | logger.error(arrivaldata) 714 | 715 | for (idx, value) in enumerate(arrivaldata): 716 | 717 | adjustedDateTime = now() 718 | adjustedDateTime = adjustedDateTime.replace(tzinfo=None) 719 | if 'rtDate' in value and 'rtTime' in value: 720 | diff = datetime.strptime(f'{value["rtDate"]} {value["rtTime"]}', '%Y-%m-%d %H:%M:%S') - adjustedDateTime 721 | expected = datetime.strptime(f'{value["rtDate"]} {value["rtTime"]}', '%Y-%m-%d %H:%M:%S') 722 | else: 723 | diff = datetime.strptime(f'{value["date"]} {value["time"]}', '%Y-%m-%d %H:%M:%S') - adjustedDateTime 724 | expected = datetime.strptime(f'{value["date"]} {value["time"]}', '%Y-%m-%d %H:%M:%S') 725 | diff = diff.total_seconds() 726 | diff = diff / 60 727 | diff = round(diff) 728 | 729 | arrivals.append({ 730 | 'line': value["ProductAtStop"]["displayNumber"], 731 | 'arrival': datetime.strptime(f'{value["date"]} {value["time"]}', '%Y-%m-%d %H:%M:%S'), 732 | 'origin': value["origin"], 733 | 'time': diff, 734 | 'operator': value["ProductAtStop"]["operator"], 735 | 'expected': expected, 736 | 'type': value["ProductAtStop"]["catOut"], 737 | 'icon': iconswitcher.get(value["ProductAtStop"]["catOut"],'mdi:train-car'), 738 | }) 739 | 740 | newdata['data'] = sorted(arrivals, 741 | key=lambda k: k['time']) 742 | newdata['attribution'] = "Samtrafiken Resrobot" 743 | newdata['last_updated'] = now().strftime('%Y-%m-%d %H:%M:%S') 744 | newdata['api_result'] = "Success" 745 | logger.debug(f"[process_rra] Stop {stop} updated successfully") 746 | 747 | except Exception as e: 748 | newdata['api_result'] = "Error" 749 | newdata['api_error'] = str(e) 750 | logger.debug(f"[process_rra] Error occurred during update {stop}") 751 | 752 | 753 | newdata['api_lastrun'] = now().strftime('%Y-%m-%d %H:%M:%S') 754 | self.data.rra[stop] = newdata 755 | logger.debug(f"[process_rra] Completed stop {stop}") 756 | 757 | logger.debug(f"[process_rra] Completed key {rrkey}") 758 | 759 | logger.debug("[process_rra] Completed") 760 | return 761 | 762 | async def process_rrr(self): 763 | logger.debug("[process_rrr] Entered") 764 | 765 | for rrkey in list(self.data.rrkeys): 766 | logger.debug(f"[process_rrr] Processing key {rrkey}") 767 | rrdata = self.data.rrkeys[rrkey] 768 | api = rrapi_rrr(rrkey) 769 | for tripname in '|'.join(set(rrdata["trips"].split('|'))).split('|'): 770 | logger.debug(f"[process_rrr] Processing trip {tripname}") 771 | newdata = self.data.rrr[tripname] 772 | positions = tripname.split('-') 773 | 774 | try: 775 | 776 | apidata = {} 777 | srcLocID = positions[0] 778 | dstLocID = positions[1] 779 | 780 | apidata = await api.request(srcLocID, dstLocID) 781 | newdata['trips'] = [] 782 | 783 | #Parse every trip 784 | for trip in apidata["Trip"]: 785 | newtrip = { 786 | 'legs': [] 787 | } 788 | 789 | # Add legs to trips 790 | for leg in trip['LegList']['Leg']: 791 | newleg = {} 792 | # Walking is done by humans. 793 | # And robots. 794 | # Robots are scary. 795 | newleg['line'] = leg['Product'][0]['line'] if leg["type"] != "WALK" else "Walk" 796 | newleg['direction'] = leg['directionFlag'] if leg["type"] != "WALK" else "Walk" 797 | newleg['category'] = leg['type'] 798 | newleg['name'] = leg['Product'][0]['name'] 799 | newleg['from'] = leg['Origin']['name'] 800 | newleg['to'] = leg['Destination']['name'] 801 | newleg['time'] = f"{leg['Origin']['date']} {leg['Origin']['time']}" 802 | 803 | if leg.get('Stops'): 804 | if leg['Stops'].get('Stop', {}): 805 | newleg['stops'] = [] 806 | for stop in leg.get('Stops', {}).get('Stop', {}): 807 | newleg['stops'].append(stop) 808 | 809 | newtrip['legs'].append(newleg) 810 | 811 | # Make some shortcuts for data 812 | newtrip['first_leg'] = newtrip['legs'][0]['name'] 813 | newtrip['time'] = newtrip['legs'][0]['time'] 814 | newtrip['duration'] = str(isodate.parse_duration(trip['duration'])) 815 | newdata['trips'].append(newtrip) 816 | 817 | # Add shortcuts to info in the first trip if it exists 818 | firstLegFirstTrip = next((x for x in newdata['trips'][0]['legs'] if x["category"] != "WALK"), []) 819 | lastLegLastTrip = next((x for x in reversed(newdata['trips'][0]['legs']) if x["category"] != "WALK"), []) 820 | newdata['transfers'] = sum(p["category"] != "WALK" for p in newdata['trips'][0]['legs']) - 1 or 0 821 | #newdata['price'] = newdata['trips'][0]['price'] or '' 822 | newdata['time'] = newdata['trips'][0]['time'] or '' 823 | newdata['duration'] = newdata['trips'][0]['duration'] or '' 824 | newdata['from'] = newdata['trips'][0]['legs'][0]['from'] or '' 825 | newdata['to'] = newdata['trips'][0]['legs'][len(newdata['trips'][0]['legs']) - 1]['to'] or '' 826 | newdata['origin'] = {} 827 | newdata['origin']['leg'] = firstLegFirstTrip["name"] or '' 828 | newdata['origin']['line'] = firstLegFirstTrip["line"] or '' 829 | newdata['origin']['direction'] = firstLegFirstTrip["direction"] or '' 830 | newdata['origin']['category'] = firstLegFirstTrip["category"] or '' 831 | newdata['origin']['time'] = firstLegFirstTrip["time"] or '' 832 | newdata['origin']['from'] = firstLegFirstTrip["from"] or '' 833 | newdata['origin']['to'] = firstLegFirstTrip["to"] or '' 834 | newdata['destination'] = {} 835 | newdata['destination']['leg'] = lastLegLastTrip["name"] or '' 836 | newdata['destination']['line'] = lastLegLastTrip["line"] or '' 837 | newdata['destination']['direction'] = lastLegLastTrip["direction"] or '' 838 | newdata['destination']['category'] = lastLegLastTrip["category"] or '' 839 | newdata['destination']['time'] = lastLegLastTrip["time"] or '' 840 | newdata['destination']['from'] = lastLegLastTrip["from"] or '' 841 | newdata['destination']['to'] = lastLegLastTrip["to"] or '' 842 | 843 | newdata['attribution'] = "Samtrafiken Resrobot" 844 | newdata['last_updated'] = now().strftime('%Y-%m-%d %H:%M:%S') 845 | newdata['api_result'] = "Success" 846 | except Exception as e: 847 | logger.debug(f"[process_rrr] Error occuredA: {str(e)}") 848 | newdata['api_result'] = "Error" 849 | newdata['api_error'] = str(e) 850 | 851 | newdata['api_lastrun'] = now().strftime('%Y-%m-%d %H:%M:%S') 852 | self.data.rrr[tripname] = newdata 853 | 854 | logger.debug(f"[process_rrr] Completed trip {tripname}") 855 | 856 | logger.debug(f"[process_rrr] Completed key {rrkey}") 857 | 858 | logger.debug("[process_rrr] Completed") 859 | 860 | 861 | async def process_ri4(self, notarealarg=None): 862 | logger.debug("[process_ri4] Entered") 863 | 864 | iconswitcher = { 865 | 'Buses': 'mdi:bus', 866 | 'Trams': 'mdi:tram', 867 | 'Ships': 'mdi:ferry', 868 | 'Metros': 'mdi:subway-variant', 869 | 'Trains': 'mdi:train', 870 | } 871 | 872 | for ri4key in list(self.data.ri4keys): 873 | logger.debug(f"[process_ri4] Processing key {ri4key}") 874 | ri4data = self.data.ri4keys[ri4key] 875 | api = slapi_ri4(ri4key, 60) 876 | for stop in ','.join(set(ri4data["stops"].split(','))).split(','): 877 | logger.debug(f"[process_ri4] Processing stop {stop}") 878 | newdata = self.data.ri4[stop] 879 | # TODO: CHECK FOR FRESHNESS TO NOT KILL OFF THE KEYS 880 | 881 | try: 882 | departures = [] 883 | departuredata = await api.request(stop) 884 | departuredata = departuredata['ResponseData'] 885 | 886 | for (i, traffictype) in enumerate(['Metros', 887 | 'Buses', 888 | 'Trains', 889 | 'Trams', 890 | 'Ships']): 891 | 892 | for (idx, value) in enumerate( 893 | departuredata[traffictype]): 894 | direction = value['JourneyDirection'] or 0 895 | displaytime = value['DisplayTime'] or '' 896 | destination = value['Destination'] or '' 897 | linenumber = value['LineNumber'] or '' 898 | expected = value['ExpectedDateTime'] or '' 899 | groupofline = value['GroupOfLine'] or '' 900 | icon = iconswitcher.get(traffictype, 901 | 'mdi:train-car') 902 | diff = self.parseDepartureTime(displaytime) 903 | departures.append({ 904 | 'line': linenumber, 905 | 'direction': direction, 906 | 'departure': displaytime, 907 | 'destination': destination, 908 | 'time': diff, 909 | 'expected': datetime.strptime( 910 | expected, '%Y-%m-%dT%H:%M:%S' 911 | ), 912 | 'type': traffictype, 913 | 'groupofline': groupofline, 914 | 'icon': icon, 915 | }) 916 | 917 | newdata['data'] = sorted(departures, 918 | key=lambda k: k['time']) 919 | newdata['attribution'] = "Stockholms Lokaltrafik" 920 | newdata['last_updated'] = now().strftime('%Y-%m-%d %H:%M:%S') 921 | newdata['api_result'] = "Success" 922 | logger.debug(f"[process_ri4] Stop {stop} updated successfully") 923 | except Exception as e: 924 | newdata['api_result'] = "Error" 925 | newdata['api_error'] = str(e) 926 | logger.debug(f"[process_ri4] Error occurred during update {stop}") 927 | 928 | newdata['api_lastrun'] = now().strftime('%Y-%m-%d %H:%M:%S') 929 | self.data.ri4[stop] = newdata 930 | logger.debug(f"[process_ri4] Completed stop {stop}") 931 | 932 | logger.debug(f"[process_ri4] Completed key {ri4key}") 933 | 934 | logger.debug("[process_ri4] Completed") 935 | return 936 | 937 | async def assert_tl2(self, key): 938 | logger.debug("[assert_tl2] Entered") 939 | 940 | if key not in self.data.tl2: 941 | logger.debug("[assert_tl2] Registering key") 942 | self.data.tl2[key] = { 943 | "api_type": "slapi-tl2", 944 | "api_lastrun": '1970-01-01 01:01:01', 945 | "api_result": "Pending" 946 | } 947 | else: 948 | logger.debug("[assert_tl2] Key already present") 949 | 950 | logger.debug("[assert_tl2] Completed") 951 | return 952 | 953 | async def process_tl2(self, notarealarg=None): 954 | logger.debug("[process_tl2] Entered") 955 | 956 | for tl2key in list(self.data.tl2): 957 | logger.debug(f"[process_tl2] Processing {tl2key}") 958 | 959 | newdata = self.data.tl2[tl2key] 960 | 961 | statuses = { 962 | 'EventGood': 'Good', 963 | 'EventMinor': 'Minor', 964 | 'EventMajor': 'Closed', 965 | 'EventPlanned': 'Planned', 966 | } 967 | 968 | # Icon table used for HomeAssistant. 969 | statusIcons = { 970 | 'EventGood': 'mdi:check', 971 | 'EventMinor': 'mdi:clock-alert-outline', 972 | 'EventMajor': 'mdi:close', 973 | 'EventPlanned': 'mdi:triangle-outline' 974 | } 975 | 976 | try: 977 | 978 | api = slapi_tl2(tl2key) 979 | apidata = await api.request() 980 | apidata = apidata['ResponseData']['TrafficTypes'] 981 | 982 | responselist = {} 983 | for response in apidata: 984 | statustype = ('ferry' if response['Type'] == 'fer' else response['Type']) 985 | 986 | for event in response['Events']: 987 | event['Status'] = statuses.get(event['StatusIcon']) 988 | event['StatusIcon'] = \ 989 | statusIcons.get(event['StatusIcon']) 990 | 991 | responsedata = { 992 | 'status': statuses.get(response['StatusIcon']), 993 | 'status_icon': statusIcons.get(response['StatusIcon']), 994 | 'events': response['Events'] 995 | } 996 | responselist[statustype] = responsedata 997 | 998 | # Attribution and update sensor data. 999 | newdata['data'] = responselist 1000 | newdata['attribution'] = "Stockholms Lokaltrafik" 1001 | newdata['last_updated'] = now().strftime('%Y-%m-%d %H:%M:%S') 1002 | newdata['api_result'] = "Success" 1003 | logger.debug(f"[process_tl2] Update of {tl2key} succeeded") 1004 | except Exception as e: 1005 | newdata['api_result'] = "Error" 1006 | newdata['api_error'] = str(e) 1007 | logger.debug(f"[process_tl2] Update of {tl2key} failed") 1008 | 1009 | newdata['api_lastrun'] = now().strftime('%Y-%m-%d %H:%M:%S') 1010 | self.data.tl2[tl2key] = newdata 1011 | logger.debug(f"[process_tl2] Completed {tl2key}") 1012 | 1013 | logger.debug("[process_tl2] Completed") 1014 | return 1015 | -------------------------------------------------------------------------------- /custom_components/hasl3/sensor.py: -------------------------------------------------------------------------------- 1 | """ SL Platform Sensor """ 2 | import logging 3 | import math 4 | import datetime 5 | 6 | from homeassistant.helpers.entity import Entity 7 | from homeassistant.helpers.device_registry import DeviceEntryType 8 | from homeassistant.util.dt import now 9 | 10 | from .const import ( 11 | CONF_DESTINATION_ID, 12 | CONF_RR_KEY, 13 | CONF_SOURCE_ID, 14 | DOMAIN, 15 | HASL_VERSION, 16 | DEVICE_NAME, 17 | DEVICE_MANUFACTURER, 18 | DEVICE_MODEL, 19 | DEVICE_GUID, 20 | SENSOR_RRARR, 21 | SENSOR_RRDEP, 22 | SENSOR_RRROUTE, 23 | SENSOR_STANDARD, 24 | SENSOR_STATUS, 25 | SENSOR_VEHICLE_LOCATION, 26 | SENSOR_DEVIATION, 27 | SENSOR_ROUTE, 28 | CONF_ANALOG_SENSORS, 29 | CONF_FP_PT, 30 | CONF_FP_RB, 31 | CONF_FP_TVB, 32 | CONF_FP_SB, 33 | CONF_FP_LB, 34 | CONF_FP_SPVC, 35 | CONF_FP_TB1, 36 | CONF_FP_TB2, 37 | CONF_TL2_KEY, 38 | CONF_RI4_KEY, 39 | CONF_SI2_KEY, 40 | CONF_RP3_KEY, 41 | CONF_SITE_ID, 42 | CONF_SENSOR, 43 | CONF_LINES, 44 | CONF_INTEGRATION_TYPE, 45 | CONF_INTEGRATION_ID, 46 | CONF_DEVIATION_LINES, 47 | CONF_DEVIATION_STOPS, 48 | CONF_DEVIATION_LINE, 49 | CONF_DEVIATION_STOP, 50 | CONF_SENSOR_PROPERTY, 51 | CONF_DIRECTION, 52 | CONF_TIMEWINDOW, 53 | CONF_SCAN_INTERVAL, 54 | CONF_SOURCE, 55 | CONF_DESTINATION, 56 | STATE_ON, 57 | CONF_TRANSPORT_MODE_LIST 58 | ) 59 | 60 | logger = logging.getLogger(f"custom_components.{DOMAIN}.sensors") 61 | 62 | 63 | async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): 64 | async_add_entities(await setup_hasl_sensor(hass, config)) 65 | 66 | 67 | async def async_setup_entry(hass, config_entry, async_add_devices): 68 | async_add_devices(await setup_hasl_sensor(hass, config_entry)) 69 | 70 | 71 | async def setup_hasl_sensor(hass, config): 72 | """Setup sensor platform.""" 73 | logger.debug("[setup_hasl_sensor] Entered") 74 | 75 | sensors = [] 76 | worker = hass.data[DOMAIN]["worker"] 77 | 78 | try: 79 | logger.debug("[setup_hasl_sensor] Setting up RI4 sensors...") 80 | if config.data[CONF_INTEGRATION_TYPE] == SENSOR_STANDARD: 81 | if CONF_RI4_KEY in config.data and CONF_SITE_ID in config.data: 82 | await worker.assert_ri4(config.data[CONF_RI4_KEY], config.data[CONF_SITE_ID]) 83 | sensors.append(HASLDepartureSensor(hass, config, config.data[CONF_SITE_ID])) 84 | logger.debug("[setup_hasl_sensor] Force processing RI4 sensors") 85 | await worker.process_ri4() 86 | logger.debug("[setup_hasl_sensor] Completed setting up RI4 sensors") 87 | except Exception as e: 88 | logger.error(f"[setup_hasl_sensor] Failed to set up RI4 sensors: {str(e)}") 89 | 90 | try: 91 | logger.debug("[setup_hasl_sensor] Setting up SI2 sensors...") 92 | if config.data[CONF_INTEGRATION_TYPE] == SENSOR_DEVIATION: 93 | if CONF_SI2_KEY in config.data: 94 | for deviationid in ','.join(set(config.data[CONF_DEVIATION_LINES].split(','))).split(','): 95 | await worker.assert_si2_line(config.data[CONF_SI2_KEY], deviationid) 96 | sensors.append(HASLDeviationSensor(hass, config, CONF_DEVIATION_LINE, deviationid)) 97 | for deviationid in ','.join(set(config.data[CONF_DEVIATION_STOPS].split(','))).split(','): 98 | await worker.assert_si2_stop(config.data[CONF_SI2_KEY], deviationid) 99 | sensors.append(HASLDeviationSensor(hass, config, CONF_DEVIATION_STOP, deviationid)) 100 | logger.debug("[setup_hasl_sensor] Force processing SI2 sensors") 101 | await worker.process_si2() 102 | logger.debug("[setup_hasl_sensor] Completed setting up SI2 sensors") 103 | except Exception as e: 104 | logger.error(f"[setup_hasl_sensor] Failed to set up SI2 sensors: {str(e)}") 105 | 106 | try: 107 | logger.debug("[setup_hasl_sensor] Setting up RP3 sensors...") 108 | if config.data[CONF_INTEGRATION_TYPE] == SENSOR_ROUTE: 109 | if CONF_RP3_KEY in config.data: 110 | await worker.assert_rp3(config.data[CONF_RP3_KEY], config.data[CONF_SOURCE], config.data[CONF_DESTINATION]) 111 | sensors.append(HASLRouteSensor(hass, config, f"{config.data[CONF_SOURCE]}-{config.data[CONF_DESTINATION]}")) 112 | logger.debug("[setup_hasl_sensor] Force processing RP3 sensors") 113 | await worker.process_rp3() 114 | logger.debug("[setup_hasl_sensor] Completed setting up RP3 sensors") 115 | except Exception as e: 116 | logger.error(f"[setup_hasl_sensor] Failed to set up RP3 sensors: {str(e)}") 117 | 118 | try: 119 | logger.debug("[setup_hasl_sensor] Setting up TL2 sensors...") 120 | if config.data[CONF_INTEGRATION_TYPE] == SENSOR_STATUS: 121 | if CONF_ANALOG_SENSORS in config.data: 122 | if CONF_TL2_KEY in config.data: 123 | await worker.assert_tl2(config.data[CONF_TL2_KEY]) 124 | 125 | for sensortype in CONF_TRANSPORT_MODE_LIST: 126 | if sensortype in config.data and config.data[sensortype]: 127 | sensors.append(HASLTrafficStatusSensor(hass, config, sensortype)) 128 | 129 | logger.debug("[setup_hasl_sensor] Force processing TL2 sensors") 130 | await worker.process_tl2() 131 | logger.debug("[setup_hasl_sensor] Completed setting up TL2 sensors") 132 | except Exception as e: 133 | logger.error(f"[setup_hasl_sensor] Failed to set up TL2 sensors: {str(e)}") 134 | 135 | try: 136 | logger.debug("[setup_hasl_sensor] Setting up FP sensors...") 137 | if config.data[CONF_INTEGRATION_TYPE] == SENSOR_VEHICLE_LOCATION: 138 | if CONF_FP_PT in config.data and config.data[CONF_FP_PT]: 139 | await worker.assert_fp("PT") 140 | sensors.append(HASLVehicleLocationSensor(hass, config, 'PT')) 141 | if CONF_FP_RB in config.data and config.data[CONF_FP_RB]: 142 | await worker.assert_fp("RB") 143 | sensors.append(HASLVehicleLocationSensor(hass, config, 'RB')) 144 | if CONF_FP_TVB in config.data and config.data[CONF_FP_TVB]: 145 | await worker.assert_fp("TVB") 146 | sensors.append(HASLVehicleLocationSensor(hass, config, 'TVB')) 147 | if CONF_FP_SB in config.data and config.data[CONF_FP_SB]: 148 | await worker.assert_fp("SB") 149 | sensors.append(HASLVehicleLocationSensor(hass, config, 'SB')) 150 | if CONF_FP_LB in config.data and config.data[CONF_FP_LB]: 151 | await worker.assert_fp("LB") 152 | sensors.append(HASLVehicleLocationSensor(hass, config, 'LB')) 153 | if CONF_FP_SPVC in config.data and config.data[CONF_FP_SPVC]: 154 | await worker.assert_fp("SpvC") 155 | sensors.append(HASLVehicleLocationSensor(hass, config, 'SpvC')) 156 | if CONF_FP_TB1 in config.data and config.data[CONF_FP_TB1]: 157 | await worker.assert_fp("TB1") 158 | sensors.append(HASLVehicleLocationSensor(hass, config, 'TB1')) 159 | if CONF_FP_TB2 in config.data and config.data[CONF_FP_TB2]: 160 | await worker.assert_fp("TB2") 161 | sensors.append(HASLVehicleLocationSensor(hass, config, 'TB2')) 162 | if CONF_FP_TB2 in config.data and config.data[CONF_FP_TB2]: 163 | await worker.assert_fp("TB3") 164 | sensors.append(HASLVehicleLocationSensor(hass, config, 'TB3')) 165 | logger.debug("[setup_hasl_sensor] Force processing FP sensors") 166 | await worker.process_fp() 167 | logger.debug("[setup_hasl_sensor] Completed setting up FP sensors") 168 | except Exception as e: 169 | logger.error(f"[setup_hasl_sensor] Failed to set up FP sensors: {str(e)}") 170 | 171 | try: 172 | logger.debug("[setup_hasl_sensor] Setting up RRD sensors...") 173 | if config.data[CONF_INTEGRATION_TYPE] == SENSOR_RRDEP: 174 | if CONF_RR_KEY in config.data and CONF_SITE_ID in config.data: 175 | await worker.assert_rrd(config.data[CONF_RR_KEY], config.data[CONF_SITE_ID]) 176 | sensors.append(HASLRRDepartureSensor(hass, config, config.data[CONF_SITE_ID])) 177 | logger.debug("[setup_hasl_sensor] Force processing RRD sensors") 178 | await worker.process_rrd() 179 | logger.debug("[setup_hasl_sensor] Completed setting up RRD sensors") 180 | except Exception as e: 181 | logger.error(f"[setup_hasl_sensor] Failed to set up RRD sensors: {str(e)}") 182 | 183 | try: 184 | logger.debug("[setup_hasl_sensor] Setting up RRA sensors...") 185 | if config.data[CONF_INTEGRATION_TYPE] == SENSOR_RRARR: 186 | if CONF_RR_KEY in config.data and CONF_SITE_ID in config.data: 187 | await worker.assert_rra(config.data[CONF_RR_KEY], config.data[CONF_SITE_ID]) 188 | sensors.append(HASLRRArrivalSensor(hass, config, config.data[CONF_SITE_ID])) 189 | logger.debug("[setup_hasl_sensor] Force processing RRA sensors") 190 | await worker.process_rra() 191 | logger.debug("[setup_hasl_sensor] Completed setting up RRA sensors") 192 | except Exception as e: 193 | logger.error(f"[setup_hasl_sensor] Failed to set up RRA sensors: {str(e)}") 194 | 195 | #try: 196 | logger.debug("[setup_hasl_sensor] Setting up RRR sensors...") 197 | if config.data[CONF_INTEGRATION_TYPE] == SENSOR_RRROUTE: 198 | if CONF_RR_KEY in config.data: 199 | await worker.assert_rrr(config.data[CONF_RR_KEY], config.data[CONF_SOURCE_ID], config.data[CONF_DESTINATION_ID]) 200 | sensors.append(HASLRRRouteSensor(hass, config, f"{config.data[CONF_SOURCE_ID]}-{config.data[CONF_DESTINATION_ID]}")) 201 | logger.debug("[setup_hasl_sensor] Force processing RRR sensors") 202 | await worker.process_rrr() 203 | logger.debug("[setup_hasl_sensor] Completed setting up RRR sensors") 204 | #except Exception as e: 205 | # logger.error(f"[setup_hasl_sensor] Failed to set up RRR sensors: {str(e)}") 206 | 207 | logger.debug("[setup_hasl_sensor] Completed") 208 | return sensors 209 | 210 | 211 | class HASLDevice(Entity): 212 | """HASL Device class.""" 213 | @property 214 | def device_info(self): 215 | """Return device information about HASL Device.""" 216 | return { 217 | "identifiers": {(DOMAIN, DEVICE_GUID)}, 218 | "name": DEVICE_NAME, 219 | "manufacturer": DEVICE_MANUFACTURER, 220 | "model": DEVICE_MODEL, 221 | "sw_version": HASL_VERSION, 222 | "entry_type": DeviceEntryType.SERVICE 223 | } 224 | 225 | 226 | class HASLRouteSensor(HASLDevice): 227 | """HASL Train Location Sensor class.""" 228 | 229 | def __init__(self, hass, config, trip): 230 | """Initialize.""" 231 | self._hass = hass 232 | self._config = config 233 | self._enabled_sensor = config.data[CONF_SENSOR] 234 | self._trip = trip 235 | self._name = f"SL {self._trip} Route Sensor ({self._config.title})" 236 | self._sensordata = [] 237 | self._scan_interval = self._config.data[CONF_SCAN_INTERVAL] or 300 238 | self._worker = hass.data[DOMAIN]["worker"] 239 | 240 | async def async_update(self): 241 | """Update the sensor.""" 242 | 243 | logger.debug("[async_update] Entered") 244 | logger.debug(f"[async_update] Processing {self._name}") 245 | 246 | if self._worker.data.rp3[self._trip]["api_lastrun"]: 247 | if self._worker.checksensorstate(self._enabled_sensor, STATE_ON): 248 | if self._sensordata == [] or self._worker.getminutesdiff(now().strftime('%Y-%m-%d %H:%M:%S'), self._worker.data.rp3[self._trip]["api_lastrun"]) > self._config.data[CONF_SCAN_INTERVAL]: 249 | try: 250 | await self._worker.process_rp3() 251 | logger.debug("[async_update] Update processed") 252 | except: 253 | logger.debug("[async_update] Error occurred during update") 254 | else: 255 | logger.debug("[async_update] Not due for update, skipping") 256 | 257 | self._sensordata = self._worker.data.rp3[self._trip] 258 | logger.debug("[async_update] Completed") 259 | return 260 | 261 | @property 262 | def unique_id(self): 263 | """Return a unique ID to use for this sensor.""" 264 | return f"sl-route-{self._trip}-sensor-{self._config.data[CONF_INTEGRATION_ID]}" 265 | 266 | @property 267 | def name(self): 268 | """Return the name of the sensor.""" 269 | return self._name 270 | 271 | @property 272 | def state(self): 273 | """Return the state of the sensor.""" 274 | if self._sensordata == []: 275 | return 'Unknown' 276 | else: 277 | return len(self._sensordata["trips"]) 278 | 279 | @property 280 | def icon(self): 281 | """Return the icon of the sensor.""" 282 | return "mdi:train" 283 | 284 | @property 285 | def unit_of_measurement(self): 286 | """Return the unit of measurement.""" 287 | return "" 288 | 289 | @property 290 | def scan_interval(self): 291 | """Return the unique id.""" 292 | return self._scan_interval 293 | 294 | @property 295 | def available(self): 296 | """Return true if value is valid.""" 297 | return self._sensordata != [] 298 | 299 | 300 | @property 301 | def extra_state_attributes(self): 302 | 303 | val = {} 304 | 305 | if self._sensordata == []: 306 | return val 307 | 308 | if self._sensordata["api_result"] == "Success": 309 | val['api_result'] = "Success" 310 | else: 311 | val['api_result'] = self._sensordata["api_error"] 312 | 313 | # Set values of the sensor. 314 | val['scan_interval'] = self._scan_interval 315 | val['refresh_enabled'] = self._worker.checksensorstate(self._enabled_sensor, STATE_ON) 316 | try: 317 | val['attribution'] = self._sensordata["attribution"] 318 | val['trips'] = self._sensordata["trips"] 319 | val['transfers'] = self._sensordata["transfers"] 320 | val['price'] = self._sensordata["price"] 321 | val['time'] = self._sensordata["time"] 322 | val['duration'] = self._sensordata["duration"] 323 | val['to'] = self._sensordata["to"] 324 | val['from'] = self._sensordata["from"] 325 | val['origin'] = {} 326 | val['origin']['leg'] = self._sensordata['origin']["leg"] 327 | val['origin']['line'] = self._sensordata['origin']["line"] 328 | val['origin']['direction'] = self._sensordata['origin']["direction"] 329 | val['origin']['category'] = self._sensordata['origin']["category"] 330 | val['origin']['time'] = self._sensordata['origin']["time"] 331 | val['origin']['from'] = self._sensordata['origin']["from"] 332 | val['origin']['to'] = self._sensordata['origin']["to"] 333 | val['origin']['prognosis'] = self._sensordata['origin']["prognosis"] 334 | val['destination'] = {} 335 | val['destination']['leg'] = self._sensordata['destination']["leg"] 336 | val['destination']['line'] = self._sensordata['destination']["line"] 337 | val['destination']['direction'] = self._sensordata['destination']["direction"] 338 | val['destination']['category'] = self._sensordata['destination']["category"] 339 | val['destination']['time'] = self._sensordata['destination']["time"] 340 | val['destination']['from'] = self._sensordata['destination']["from"] 341 | val['destination']['to'] = self._sensordata['destination']["to"] 342 | val['destination']['prognosis'] = self._sensordata['destination']["prognosis"] 343 | val['last_refresh'] = self._sensordata["last_updated"] 344 | val['trip_count'] = len(self._sensordata["trips"]) 345 | except: 346 | val['error'] = "NoDataYet" 347 | logger.debug(f"Data was not available for processing when getting attributes for sensor {self._name}") 348 | 349 | return val 350 | 351 | class HASLRRRouteSensor(HASLDevice): 352 | """HASL Train Location Sensor class.""" 353 | 354 | def __init__(self, hass, config, trip): 355 | """Initialize.""" 356 | self._hass = hass 357 | self._config = config 358 | self._enabled_sensor = config.data[CONF_SENSOR] 359 | self._trip = trip 360 | self._name = f"RR {self._trip} Route Sensor ({self._config.title})" 361 | self._sensordata = [] 362 | self._scan_interval = self._config.data[CONF_SCAN_INTERVAL] or 300 363 | self._worker = hass.data[DOMAIN]["worker"] 364 | 365 | async def async_update(self): 366 | """Update the sensor.""" 367 | 368 | logger.debug("[async_update] Entered") 369 | logger.debug(f"[async_update] Processing {self._name}") 370 | 371 | if self._worker.data.rrr[self._trip]["api_lastrun"]: 372 | if self._worker.checksensorstate(self._enabled_sensor, STATE_ON): 373 | if self._sensordata == [] or self._worker.getminutesdiff(now().strftime('%Y-%m-%d %H:%M:%S'), self._worker.data.rrr[self._trip]["api_lastrun"]) > self._config.data[CONF_SCAN_INTERVAL]: 374 | try: 375 | await self._worker.process_rrr() 376 | logger.debug("[async_update] Update processed") 377 | except: 378 | logger.debug("[async_update] Error occurred during update") 379 | else: 380 | logger.debug("[async_update] Not due for update, skipping") 381 | 382 | self._sensordata = self._worker.data.rrr[self._trip] 383 | logger.debug("[async_update] Completed") 384 | return 385 | 386 | @property 387 | def unique_id(self): 388 | """Return a unique ID to use for this sensor.""" 389 | return f"rr-route-{self._trip}-sensor-{self._config.data[CONF_INTEGRATION_ID]}" 390 | 391 | @property 392 | def name(self): 393 | """Return the name of the sensor.""" 394 | return self._name 395 | 396 | @property 397 | def state(self): 398 | """Return the state of the sensor.""" 399 | if self._sensordata == []: 400 | return 'Unknown' 401 | else: 402 | return len(self._sensordata["trips"]) 403 | 404 | @property 405 | def icon(self): 406 | """Return the icon of the sensor.""" 407 | return "mdi:train" 408 | 409 | @property 410 | def unit_of_measurement(self): 411 | """Return the unit of measurement.""" 412 | return "" 413 | 414 | @property 415 | def scan_interval(self): 416 | """Return the unique id.""" 417 | return self._scan_interval 418 | 419 | @property 420 | def available(self): 421 | """Return true if value is valid.""" 422 | return self._sensordata != [] 423 | 424 | 425 | @property 426 | def extra_state_attributes(self): 427 | 428 | val = {} 429 | 430 | if self._sensordata == []: 431 | return val 432 | 433 | if self._sensordata["api_result"] == "Success": 434 | val['api_result'] = "Success" 435 | else: 436 | val['api_result'] = self._sensordata["api_error"] 437 | 438 | # Set values of the sensor. 439 | val['scan_interval'] = self._scan_interval 440 | val['refresh_enabled'] = self._worker.checksensorstate(self._enabled_sensor, STATE_ON) 441 | try: 442 | val['attribution'] = self._sensordata["attribution"] 443 | val['trips'] = self._sensordata["trips"] 444 | val['transfers'] = self._sensordata["transfers"] 445 | val['time'] = self._sensordata["time"] 446 | val['duration'] = self._sensordata["duration"] 447 | val['to'] = self._sensordata["to"] 448 | val['from'] = self._sensordata["from"] 449 | val['origin'] = {} 450 | val['origin']['leg'] = self._sensordata['origin']["leg"] 451 | val['origin']['line'] = self._sensordata['origin']["line"] 452 | val['origin']['direction'] = self._sensordata['origin']["direction"] 453 | val['origin']['category'] = self._sensordata['origin']["category"] 454 | val['origin']['time'] = self._sensordata['origin']["time"] 455 | val['origin']['from'] = self._sensordata['origin']["from"] 456 | val['origin']['to'] = self._sensordata['origin']["to"] 457 | val['origin']['prognosis'] = self._sensordata['origin']["prognosis"] 458 | val['destination'] = {} 459 | val['destination']['leg'] = self._sensordata['destination']["leg"] 460 | val['destination']['line'] = self._sensordata['destination']["line"] 461 | val['destination']['direction'] = self._sensordata['destination']["direction"] 462 | val['destination']['category'] = self._sensordata['destination']["category"] 463 | val['destination']['time'] = self._sensordata['destination']["time"] 464 | val['destination']['from'] = self._sensordata['destination']["from"] 465 | val['destination']['to'] = self._sensordata['destination']["to"] 466 | val['destination']['prognosis'] = self._sensordata['destination']["prognosis"] 467 | val['last_refresh'] = self._sensordata["last_updated"] 468 | val['trip_count'] = len(self._sensordata["trips"]) 469 | except: 470 | val['error'] = "NoDataYet" 471 | logger.debug(f"Data was not available for processing when getting attributes for sensor {self._name}") 472 | 473 | return val 474 | 475 | 476 | 477 | class HASLDepartureSensor(HASLDevice): 478 | """HASL Departure Sensor class.""" 479 | 480 | def __init__(self, hass, config, siteid): 481 | """Initialize.""" 482 | 483 | unit_table = { 484 | 'min': 'min', 485 | 'time': '', 486 | 'deviations': '', 487 | 'updated': '', 488 | } 489 | 490 | self._hass = hass 491 | self._config = config 492 | self._lines = config.data[CONF_LINES] 493 | self._siteid = str(siteid) 494 | self._name = f"SL Departure Sensor {self._siteid} ({self._config.title})" 495 | self._enabled_sensor = config.data[CONF_SENSOR] 496 | self._sensorproperty = config.data[CONF_SENSOR_PROPERTY] 497 | self._direction = config.data[CONF_DIRECTION] 498 | self._timewindow = config.data[CONF_TIMEWINDOW] 499 | self._nextdeparture_minutes = '0' 500 | self._nextdeparture_expected = '-' 501 | self._lastupdate = '-' 502 | self._unit_of_measure = unit_table.get(self._config.data[CONF_SENSOR_PROPERTY], 'min') 503 | self._sensordata = None 504 | self._scan_interval = self._config.data[CONF_SCAN_INTERVAL] or 300 505 | self._worker = hass.data[DOMAIN]["worker"] 506 | 507 | if (self._lines==''): 508 | self._lines = [] 509 | if (not isinstance(self._lines,list)): 510 | self._lines = self._lines.split(',') 511 | 512 | async def async_update(self): 513 | """Update the sensor.""" 514 | 515 | logger.debug("[async_update] Entered") 516 | logger.debug(f"[async_update] Processing {self._name}") 517 | if self._worker.data.ri4[self._siteid]["api_lastrun"]: 518 | if self._worker.checksensorstate(self._enabled_sensor, STATE_ON): 519 | if self._sensordata == [] or self._worker.getminutesdiff(now().strftime('%Y-%m-%d %H:%M:%S'), self._worker.data.ri4[self._siteid]["api_lastrun"]) > self._config.data[CONF_SCAN_INTERVAL]: 520 | try: 521 | await self._worker.process_ri4() 522 | logger.debug("[async_update] Update processed") 523 | except: 524 | logger.debug("[async_update] Error occurred during update") 525 | else: 526 | logger.debug("[async_update] Not due for update, skipping") 527 | 528 | self._sensordata = self._worker.data.ri4[self._siteid] 529 | 530 | logger.debug("[async_update] Performing calculations") 531 | if f"stop_{self._siteid}" in self._worker.data.si2: 532 | if "data" in self._worker.data.si2[f"stop_{self._siteid}"]: 533 | self._sensordata["deviations"] = self._worker.data.si2[f"stop_{self._siteid}"]["data"] 534 | else: 535 | self._sensordata["deviations"] = [] 536 | else: 537 | self._sensordata["deviations"] = [] 538 | 539 | if "last_updated" in self._sensordata: 540 | self._last_updated = self._sensordata["last_updated"] 541 | else: 542 | self._last_updated = now().strftime('%Y-%m-%d %H:%M:%S') 543 | 544 | logger.debug("[async_update] Completed") 545 | return 546 | 547 | @property 548 | def unique_id(self): 549 | """Return a unique ID to use for this sensor.""" 550 | return f"sl-stop-{self._siteid}-sensor-{self._config.data[CONF_INTEGRATION_ID]}" 551 | 552 | @property 553 | def name(self): 554 | """Return the name of the sensor.""" 555 | return self._name 556 | 557 | @property 558 | def state(self): 559 | """Return the state of the sensor.""" 560 | sensorproperty = self._config.data[CONF_SENSOR_PROPERTY] 561 | 562 | if self._sensordata == []: 563 | return 'Unknown' 564 | 565 | if sensorproperty == 'min': 566 | next_departure = self.nextDeparture() 567 | if not next_departure: 568 | return '-' 569 | 570 | delta = next_departure['expected'] - datetime.datetime.now() 571 | expected_minutes = math.floor(delta.total_seconds() / 60) 572 | return expected_minutes 573 | 574 | # If the sensor should return the time at which next departure occurs. 575 | if sensorproperty == 'time': 576 | next_departure = self.nextDeparture() 577 | if not next_departure: 578 | return '-' 579 | 580 | expected = next_departure['expected'].strftime('%H:%M:%S') 581 | return expected 582 | 583 | # If the sensor should return the number of deviations. 584 | if sensorproperty == 'deviations': 585 | return len(self._sensordata["deviations"]) 586 | 587 | if sensorproperty == 'updated': 588 | return self._sensordata["last_updated"] 589 | 590 | # Fail-safe 591 | return '-' 592 | 593 | def nextDeparture(self): 594 | if not self._sensordata: 595 | return None 596 | 597 | now = datetime.datetime.now() 598 | if "data" in self._sensordata: 599 | for departure in self._sensordata["data"]: 600 | if departure['expected'] > now: 601 | return departure 602 | return None 603 | 604 | def filter_direction(self, departure): 605 | if self._direction == 0: 606 | return True 607 | return departure["direction"] == self._direction 608 | 609 | def filter_lines(self, departure): 610 | if not self._lines or len(self._lines) == 0: 611 | return True 612 | return departure["line"] in self._lines 613 | 614 | @property 615 | def icon(self): 616 | """Return the icon of the sensor.""" 617 | return "mdi:train" 618 | 619 | @property 620 | def unit_of_measurement(self): 621 | """Return the unit of measurement.""" 622 | return self._unit_of_measure 623 | 624 | @property 625 | def scan_interval(self): 626 | """Return the unique id.""" 627 | return self._scan_interval 628 | 629 | @property 630 | def available(self): 631 | """Return true if value is valid.""" 632 | if self._sensordata == [] or self._sensordata is None: 633 | return False 634 | else: 635 | return True 636 | 637 | @property 638 | def extra_state_attributes(self): 639 | """ Return the sensor attributes .""" 640 | 641 | # Initialize the state attributes. 642 | 643 | val = {} 644 | 645 | if self._sensordata == [] or self._sensordata is None: 646 | return val 647 | 648 | # Format the next expected time. 649 | next_departure = self.nextDeparture() 650 | if next_departure: 651 | expected_time = next_departure['expected'] 652 | delta = expected_time - datetime.datetime.now() 653 | expected_minutes = math.floor(delta.total_seconds() / 60) 654 | expected_time = expected_time.strftime('%H:%M:%S') 655 | else: 656 | expected_time = '-' 657 | expected_minutes = '-' 658 | 659 | # Set up the unit of measure. 660 | if self._unit_of_measure != '': 661 | val['unit_of_measurement'] = self._unit_of_measure 662 | 663 | if self._sensordata["api_result"] == "Success": 664 | val['api_result'] = "Ok" 665 | else: 666 | val['api_result'] = self._sensordata["api_error"] 667 | 668 | # Set values of the sensor. 669 | val['scan_interval'] = self._scan_interval 670 | val['refresh_enabled'] = self._worker.checksensorstate(self._enabled_sensor, STATE_ON) 671 | 672 | if val['api_result'] != "Ok": 673 | return val 674 | 675 | departures = self._sensordata["data"] 676 | departures = list(filter(self.filter_direction, departures)) 677 | departures = list(filter(self.filter_lines, departures)) 678 | 679 | try: 680 | val['attribution'] = self._sensordata["attribution"] 681 | val['departures'] = departures 682 | val['deviations'] = self._sensordata["deviations"] 683 | val['last_refresh'] = self._sensordata["last_updated"] 684 | val['next_departure_minutes'] = expected_minutes 685 | val['next_departure_time'] = expected_time 686 | val['deviation_count'] = len(self._sensordata["deviations"]) 687 | except: 688 | val['error'] = "NoDataYet" 689 | logger.debug(f"Data was not available for processing when getting attributes for sensor {self._name}") 690 | 691 | return val 692 | 693 | class HASLRRDepartureSensor(HASLDevice): 694 | """HASL Departure Sensor class.""" 695 | 696 | def __init__(self, hass, config, siteid): 697 | """Initialize.""" 698 | 699 | unit_table = { 700 | 'min': 'min', 701 | 'time': '', 702 | 'updated': '', 703 | } 704 | 705 | self._hass = hass 706 | self._config = config 707 | self._lines = config.data[CONF_LINES] 708 | self._siteid = str(siteid) 709 | self._name = f"RR Departure Sensor {self._siteid} ({self._config.title})" 710 | self._enabled_sensor = config.data[CONF_SENSOR] 711 | self._sensorproperty = config.data[CONF_SENSOR_PROPERTY] 712 | self._direction = config.data[CONF_DIRECTION] 713 | self._timewindow = config.data[CONF_TIMEWINDOW] 714 | self._nextdeparture_minutes = '0' 715 | self._nextdeparture_expected = '-' 716 | self._lastupdate = '-' 717 | self._unit_of_measure = unit_table.get(self._config.data[CONF_SENSOR_PROPERTY], 'min') 718 | self._sensordata = None 719 | self._scan_interval = self._config.data[CONF_SCAN_INTERVAL] or 300 720 | self._worker = hass.data[DOMAIN]["worker"] 721 | 722 | if (self._lines==''): 723 | self._lines = [] 724 | if (not isinstance(self._lines,list)): 725 | self._lines = self._lines.split(',') 726 | 727 | async def async_update(self): 728 | """Update the sensor.""" 729 | 730 | logger.debug("[async_update] Entered") 731 | logger.debug(f"[async_update] Processing {self._name}") 732 | if self._worker.data.rrd[self._siteid]["api_lastrun"]: 733 | if self._worker.checksensorstate(self._enabled_sensor, STATE_ON): 734 | if self._sensordata == [] or self._worker.getminutesdiff(now().strftime('%Y-%m-%d %H:%M:%S'), self._worker.data.rrd[self._siteid]["api_lastrun"]) > self._config.data[CONF_SCAN_INTERVAL]: 735 | try: 736 | await self._worker.process_rrd() 737 | logger.debug("[async_update] Update processed") 738 | except: 739 | logger.debug("[async_update] Error occurred during update") 740 | else: 741 | logger.debug("[async_update] Not due for update, skipping") 742 | 743 | self._sensordata = self._worker.data.rrd[self._siteid] 744 | 745 | logger.debug("[async_update] Completed") 746 | return 747 | 748 | @property 749 | def unique_id(self): 750 | """Return a unique ID to use for this sensor.""" 751 | return f"rr-departure-{self._siteid}-sensor-{self._config.data[CONF_INTEGRATION_ID]}" 752 | 753 | @property 754 | def name(self): 755 | """Return the name of the sensor.""" 756 | return self._name 757 | 758 | @property 759 | def state(self): 760 | """Return the state of the sensor.""" 761 | sensorproperty = self._config.data[CONF_SENSOR_PROPERTY] 762 | 763 | if self._sensordata == []: 764 | return 'Unknown' 765 | 766 | if sensorproperty == 'min': 767 | next_departure = self.nextDeparture() 768 | if not next_departure: 769 | return '-' 770 | 771 | adjustedDateTime = now() 772 | adjustedDateTime = adjustedDateTime.replace(tzinfo=None) 773 | delta = next_departure['expected'] - adjustedDateTime 774 | expected_minutes = math.floor(delta.total_seconds() / 60) 775 | return expected_minutes 776 | 777 | # If the sensor should return the time at which next departure occurs. 778 | if sensorproperty == 'time': 779 | next_departure = self.nextDeparture() 780 | if not next_departure: 781 | return '-' 782 | 783 | expected = next_departure['expected'].strftime('%H:%M:%S') 784 | return expected 785 | 786 | if sensorproperty == 'updated': 787 | return self._sensordata["last_updated"] 788 | 789 | # Fail-safe 790 | return '-' 791 | 792 | def nextDeparture(self): 793 | if not self._sensordata: 794 | return None 795 | 796 | adjustedDateTime = now() 797 | adjustedDateTime = adjustedDateTime.replace(tzinfo=None) 798 | if "data" in self._sensordata: 799 | for departure in self._sensordata["data"]: 800 | if departure['expected'] > adjustedDateTime: 801 | return departure 802 | return None 803 | 804 | def filter_direction(self, departure): 805 | if self._direction == 0: 806 | return True 807 | return departure["direction"] == self._direction 808 | 809 | def filter_lines(self, departure): 810 | if not self._lines or len(self._lines) == 0: 811 | return True 812 | return departure["line"] in self._lines 813 | 814 | @property 815 | def icon(self): 816 | """Return the icon of the sensor.""" 817 | return "mdi:train" 818 | 819 | @property 820 | def unit_of_measurement(self): 821 | """Return the unit of measurement.""" 822 | return self._unit_of_measure 823 | 824 | @property 825 | def scan_interval(self): 826 | """Return the unique id.""" 827 | return self._scan_interval 828 | 829 | @property 830 | def available(self): 831 | """Return true if value is valid.""" 832 | if self._sensordata == [] or self._sensordata is None: 833 | return False 834 | else: 835 | return True 836 | 837 | @property 838 | def extra_state_attributes(self): 839 | """ Return the sensor attributes .""" 840 | 841 | # Initialize the state attributes. 842 | 843 | val = {} 844 | 845 | if self._sensordata == [] or self._sensordata is None: 846 | return val 847 | 848 | # Format the next expected time. 849 | next_departure = self.nextDeparture() 850 | if next_departure: 851 | adjustedDateTime = now() 852 | adjustedDateTime = adjustedDateTime.replace(tzinfo=None) 853 | expected_time = next_departure['expected'] 854 | delta = expected_time - adjustedDateTime 855 | expected_minutes = math.floor(delta.total_seconds() / 60) 856 | expected_time = expected_time.strftime('%H:%M:%S') 857 | else: 858 | expected_time = '-' 859 | expected_minutes = '-' 860 | 861 | # Set up the unit of measure. 862 | if self._unit_of_measure != '': 863 | val['unit_of_measurement'] = self._unit_of_measure 864 | 865 | if self._sensordata["api_result"] == "Success": 866 | val['api_result'] = "Ok" 867 | else: 868 | val['api_result'] = self._sensordata["api_error"] 869 | 870 | # Set values of the sensor. 871 | val['scan_interval'] = self._scan_interval 872 | val['refresh_enabled'] = self._worker.checksensorstate(self._enabled_sensor, STATE_ON) 873 | 874 | if val['api_result'] != "Ok": 875 | return val 876 | 877 | departures = self._sensordata["data"] 878 | departures = list(filter(self.filter_direction, departures)) 879 | departures = list(filter(self.filter_lines, departures)) 880 | 881 | try: 882 | val['attribution'] = self._sensordata["attribution"] 883 | val['departures'] = departures 884 | val['last_refresh'] = self._sensordata["last_updated"] 885 | val['next_departure_minutes'] = expected_minutes 886 | val['next_departure_time'] = expected_time 887 | except: 888 | val['error'] = "NoDataYet" 889 | logger.debug(f"Data was not available for processing when getting attributes for sensor {self._name}") 890 | 891 | return val 892 | 893 | class HASLRRArrivalSensor(HASLDevice): 894 | """HASL Arrival Sensor class.""" 895 | 896 | def __init__(self, hass, config, siteid): 897 | """Initialize.""" 898 | 899 | unit_table = { 900 | 'min': 'min', 901 | 'time': '', 902 | 'updated': '', 903 | } 904 | 905 | self._hass = hass 906 | self._config = config 907 | self._lines = config.data[CONF_LINES] 908 | self._siteid = str(siteid) 909 | self._name = f"RR Arrival Sensor {self._siteid} ({self._config.title})" 910 | self._enabled_sensor = config.data[CONF_SENSOR] 911 | self._sensorproperty = config.data[CONF_SENSOR_PROPERTY] 912 | self._direction = config.data[CONF_DIRECTION] 913 | self._timewindow = config.data[CONF_TIMEWINDOW] 914 | self._nextarrival_minutes = '0' 915 | self._nextarrival_expected = '-' 916 | self._lastupdate = '-' 917 | self._unit_of_measure = unit_table.get(self._config.data[CONF_SENSOR_PROPERTY], 'min') 918 | self._sensordata = None 919 | self._scan_interval = self._config.data[CONF_SCAN_INTERVAL] or 300 920 | self._worker = hass.data[DOMAIN]["worker"] 921 | 922 | if (self._lines==''): 923 | self._lines = [] 924 | if (not isinstance(self._lines,list)): 925 | self._lines = self._lines.split(',') 926 | 927 | async def async_update(self): 928 | """Update the sensor.""" 929 | 930 | logger.debug("[async_update] Entered") 931 | logger.debug(f"[async_update] Processing {self._name}") 932 | if self._worker.data.rrd[self._siteid]["api_lastrun"]: 933 | if self._worker.checksensorstate(self._enabled_sensor, STATE_ON): 934 | if self._sensordata == [] or self._worker.getminutesdiff(now().strftime('%Y-%m-%d %H:%M:%S'), self._worker.data.rra[self._siteid]["api_lastrun"]) > self._config.data[CONF_SCAN_INTERVAL]: 935 | try: 936 | await self._worker.process_rra() 937 | logger.debug("[async_update] Update processed") 938 | except: 939 | logger.debug("[async_update] Error occurred during update") 940 | else: 941 | logger.debug("[async_update] Not due for update, skipping") 942 | self._sensordata = self._worker.data.rra[self._siteid] 943 | 944 | logger.debug("[async_update] Completed") 945 | return 946 | 947 | @property 948 | def unique_id(self): 949 | """Return a unique ID to use for this sensor.""" 950 | return f"rr-arrival-{self._siteid}-sensor-{self._config.data[CONF_INTEGRATION_ID]}" 951 | 952 | @property 953 | def name(self): 954 | """Return the name of the sensor.""" 955 | return self._name 956 | 957 | @property 958 | def state(self): 959 | """Return the state of the sensor.""" 960 | sensorproperty = self._config.data[CONF_SENSOR_PROPERTY] 961 | 962 | if self._sensordata == []: 963 | return 'Unknown' 964 | 965 | if sensorproperty == 'min': 966 | next_arrival = self.nextArrival() 967 | if not next_arrival: 968 | return '-' 969 | 970 | adjustedDateTime = now() 971 | adjustedDateTime = adjustedDateTime.replace(tzinfo=None) 972 | delta = next_arrival['expected'] - adjustedDateTime 973 | expected_minutes = math.floor(delta.total_seconds() / 60) 974 | return expected_minutes 975 | 976 | # If the sensor should return the time at which next arrival occurs. 977 | if sensorproperty == 'time': 978 | next_arrival = self.nextArrival() 979 | if not next_arrival: 980 | return '-' 981 | 982 | expected = next_arrival['expected'].strftime('%H:%M:%S') 983 | return expected 984 | 985 | if sensorproperty == 'origin': 986 | next_arrival = self.nextArrival() 987 | if not next_arrival: 988 | return '-' 989 | 990 | origin = next_arrival['origin'] 991 | return origin 992 | 993 | 994 | if sensorproperty == 'updated': 995 | return self._sensordata["last_updated"] 996 | 997 | # Fail-safe 998 | return '-' 999 | 1000 | def nextArrival(self): 1001 | if not self._sensordata: 1002 | return None 1003 | 1004 | adjustedDateTime = now() 1005 | adjustedDateTime = adjustedDateTime.replace(tzinfo=None) 1006 | if "data" in self._sensordata: 1007 | for arrival in self._sensordata["data"]: 1008 | if arrival['expected'] > adjustedDateTime: 1009 | return arrival 1010 | return None 1011 | 1012 | def filter_lines(self, arrival): 1013 | if not self._lines or len(self._lines) == 0: 1014 | return True 1015 | return arrival["line"] in self._lines 1016 | 1017 | @property 1018 | def icon(self): 1019 | """Return the icon of the sensor.""" 1020 | return "mdi:train" 1021 | 1022 | @property 1023 | def unit_of_measurement(self): 1024 | """Return the unit of measurement.""" 1025 | return self._unit_of_measure 1026 | 1027 | @property 1028 | def scan_interval(self): 1029 | """Return the unique id.""" 1030 | return self._scan_interval 1031 | 1032 | @property 1033 | def available(self): 1034 | """Return true if value is valid.""" 1035 | if self._sensordata == [] or self._sensordata is None: 1036 | return False 1037 | else: 1038 | return True 1039 | 1040 | @property 1041 | def extra_state_attributes(self): 1042 | """ Return the sensor attributes .""" 1043 | 1044 | # Initialize the state attributes. 1045 | 1046 | val = {} 1047 | 1048 | if self._sensordata == [] or self._sensordata is None: 1049 | return val 1050 | 1051 | # Format the next expected time. 1052 | next_arrival = self.nextArrival() 1053 | if next_arrival: 1054 | adjustedDateTime = now() 1055 | adjustedDateTime = adjustedDateTime.replace(tzinfo=None) 1056 | expected_time = next_arrival['expected'] 1057 | delta = expected_time - adjustedDateTime 1058 | expected_minutes = math.floor(delta.total_seconds() / 60) 1059 | expected_time = expected_time.strftime('%H:%M:%S') 1060 | else: 1061 | expected_time = '-' 1062 | expected_minutes = '-' 1063 | 1064 | # Set up the unit of measure. 1065 | if self._unit_of_measure != '': 1066 | val['unit_of_measurement'] = self._unit_of_measure 1067 | 1068 | if self._sensordata["api_result"] == "Success": 1069 | val['api_result'] = "Ok" 1070 | else: 1071 | val['api_result'] = self._sensordata["api_error"] 1072 | 1073 | # Set values of the sensor. 1074 | val['scan_interval'] = self._scan_interval 1075 | val['refresh_enabled'] = self._worker.checksensorstate(self._enabled_sensor, STATE_ON) 1076 | 1077 | if val['api_result'] != "Ok": 1078 | return val 1079 | 1080 | arrivals = self._sensordata["data"] 1081 | arrivals = list(filter(self.filter_lines, arrivals)) 1082 | 1083 | try: 1084 | val['attribution'] = self._sensordata["attribution"] 1085 | val['arrivals'] = arrivals 1086 | val['last_refresh'] = self._sensordata["last_updated"] 1087 | val['next_arrival_minutes'] = expected_minutes 1088 | val['next_arrival_time'] = expected_time 1089 | except: 1090 | val['error'] = "NoDataYet" 1091 | logger.debug(f"Data was not available for processing when getting attributes for sensor {self._name}") 1092 | 1093 | return val 1094 | 1095 | 1096 | 1097 | class HASLDeviationSensor(HASLDevice): 1098 | """HASL Deviation Sensor class.""" 1099 | 1100 | def __init__(self, hass, config, deviationtype, deviationkey): 1101 | """Initialize.""" 1102 | self._config = config 1103 | self._hass = hass 1104 | self._deviationkey = deviationkey 1105 | self._deviationtype = deviationtype 1106 | self._enabled_sensor = config.data[CONF_SENSOR] 1107 | self._name = f"SL {self._deviationtype.capitalize()} Deviation Sensor {self._deviationkey} ({self._config.title})" 1108 | self._sensordata = [] 1109 | self._enabled_sensor 1110 | self._scan_interval = self._config.data[CONF_SCAN_INTERVAL] or 300 1111 | self._worker = hass.data[DOMAIN]["worker"] 1112 | 1113 | async def async_update(self): 1114 | """Update the sensor.""" 1115 | 1116 | logger.debug("[async_update] Entered") 1117 | logger.debug(f"[async_update] Processing {self._name}") 1118 | if self._worker.data.si2[f"{self._deviationtype}_{self._deviationkey}"]["api_lastrun"]: 1119 | if self._worker.checksensorstate(self._enabled_sensor, STATE_ON): 1120 | if self._sensordata == [] or self._worker.getminutesdiff(now().strftime('%Y-%m-%d %H:%M:%S'), self._worker.data.si2[f"{self._deviationtype}_{self._deviationkey}"]["api_lastrun"]) > self._config.data[CONF_SCAN_INTERVAL]: 1121 | try: 1122 | await self._worker.process_si2() 1123 | logger.debug("[async_update] Update processed") 1124 | except: 1125 | logger.debug("[async_update] Error occurred during update") 1126 | else: 1127 | logger.debug("[async_update] Not due for update, skipping") 1128 | 1129 | self._sensordata = self._worker.data.si2[f"{self._deviationtype}_{self._deviationkey}"] 1130 | logger.debug("[async_update] Completed") 1131 | return 1132 | 1133 | @property 1134 | def unique_id(self): 1135 | """Return a unique ID to use for this sensor.""" 1136 | return f"sl-deviation-{self._deviationtype}-{self._deviationkey}-sensor-{self._config.data[CONF_INTEGRATION_ID]}" 1137 | 1138 | @property 1139 | def name(self): 1140 | """Return the name of the sensor.""" 1141 | return self._name 1142 | 1143 | @property 1144 | def state(self): 1145 | """Return the state of the sensor.""" 1146 | if self._sensordata == []: 1147 | return 'Unknown' 1148 | else: 1149 | if "data" in self._sensordata: 1150 | return len(self._sensordata["data"]) 1151 | else: 1152 | return 'Unknown' 1153 | 1154 | @property 1155 | def icon(self): 1156 | """Return the icon of the sensor.""" 1157 | return "mdi:train" 1158 | 1159 | @property 1160 | def unit_of_measurement(self): 1161 | """Return the unit of measurement.""" 1162 | return "" 1163 | 1164 | @property 1165 | def scan_interval(self): 1166 | """Return the unique id.""" 1167 | return self._scan_interval 1168 | 1169 | @property 1170 | def available(self): 1171 | """Return true if value is valid.""" 1172 | return self._sensordata != [] 1173 | 1174 | @property 1175 | def extra_state_attributes(self): 1176 | """ Return the sensor attributes.""" 1177 | 1178 | val = {} 1179 | 1180 | if self._sensordata == []: 1181 | return val 1182 | 1183 | if self._sensordata["api_result"] == "Success": 1184 | val['api_result'] = "Ok" 1185 | else: 1186 | val['api_result'] = self._sensordata["api_error"] 1187 | 1188 | # Set values of the sensor. 1189 | val['scan_interval'] = self._scan_interval 1190 | val['refresh_enabled'] = self._worker.checksensorstate(self._enabled_sensor, STATE_ON) 1191 | try: 1192 | val['attribution'] = self._sensordata["attribution"] 1193 | val['deviations'] = self._sensordata["data"] 1194 | val['last_refresh'] = self._sensordata["last_updated"] 1195 | val['deviation_count'] = len(self._sensordata["data"]) 1196 | except: 1197 | val['error'] = "NoDataYet" 1198 | logger.debug(f"Data was not available for processing when getting attributes for sensor {self._name}") 1199 | 1200 | return val 1201 | 1202 | 1203 | class HASLVehicleLocationSensor(HASLDevice): 1204 | """HASL Train Location Sensor class.""" 1205 | 1206 | def __init__(self, hass, config, vehicletype): 1207 | """Initialize.""" 1208 | self._hass = hass 1209 | self._config = config 1210 | self._vehicletype = vehicletype 1211 | self._enabled_sensor = config.data[CONF_SENSOR] 1212 | self._name = f"SL {self._vehicletype} Location Sensor ({self._config.title})" 1213 | self._sensordata = [] 1214 | self._scan_interval = self._config.data[CONF_SCAN_INTERVAL] or 300 1215 | self._worker = hass.data[DOMAIN]["worker"] 1216 | 1217 | async def async_update(self): 1218 | """Update the sensor.""" 1219 | 1220 | logger.debug("[async_update] Entered") 1221 | logger.debug(f"[async_update] Processing {self._name}") 1222 | if self._worker.data.fp[self._vehicletype]["api_lastrun"]: 1223 | if self._worker.checksensorstate(self._enabled_sensor, STATE_ON): 1224 | if self._sensordata == [] or self._worker.getminutesdiff(now().strftime('%Y-%m-%d %H:%M:%S'), self._worker.data.fp[self._vehicletype]["api_lastrun"]) > self._config.data[CONF_SCAN_INTERVAL]: 1225 | try: 1226 | await self._worker.process_fp() 1227 | logger.debug("[async_update] Update processed") 1228 | except: 1229 | logger.debug("[async_update] Error occurred during update") 1230 | else: 1231 | logger.debug("[async_update] Not due for update, skipping") 1232 | 1233 | self._sensordata = self._worker.data.fp[self._vehicletype] 1234 | logger.debug("[async_update] Completed") 1235 | return 1236 | 1237 | @property 1238 | def unique_id(self): 1239 | """Return a unique ID to use for this sensor.""" 1240 | return f"sl-fl-{self._vehicletype}-sensor-{self._config.data[CONF_INTEGRATION_ID]}" 1241 | 1242 | @property 1243 | def name(self): 1244 | """Return the name of the sensor.""" 1245 | return self._name 1246 | 1247 | @property 1248 | def state(self): 1249 | """Return the state of the sensor.""" 1250 | if self._sensordata == []: 1251 | return 'Unknown' 1252 | else: 1253 | if "data" in self._sensordata: 1254 | return len(self._sensordata["data"]) 1255 | else: 1256 | return 'Unknown' 1257 | 1258 | @property 1259 | def icon(self): 1260 | """Return the icon of the sensor.""" 1261 | return "mdi:train" 1262 | 1263 | @property 1264 | def unit_of_measurement(self): 1265 | """Return the unit of measurement.""" 1266 | return "" 1267 | 1268 | @property 1269 | def scan_interval(self): 1270 | """Return the unique id.""" 1271 | return self._scan_interval 1272 | 1273 | @property 1274 | def available(self): 1275 | """Return true if value is valid.""" 1276 | return self._sensordata != [] 1277 | 1278 | @property 1279 | def extra_state_attributes(self): 1280 | 1281 | val = {} 1282 | 1283 | if self._sensordata == []: 1284 | return val 1285 | 1286 | if self._sensordata["api_result"] == "Success": 1287 | val['api_result'] = "Success" 1288 | else: 1289 | val['api_result'] = self._sensordata["api_error"] 1290 | 1291 | # Set values of the sensor. 1292 | val['scan_interval'] = self._scan_interval 1293 | val['refresh_enabled'] = self._worker.checksensorstate(self._enabled_sensor, STATE_ON) 1294 | try: 1295 | val['attribution'] = self._sensordata["attribution"] 1296 | val['data'] = self._sensordata["data"] 1297 | val['last_refresh'] = self._sensordata["last_updated"] 1298 | val['vehicle_count'] = len(self._sensordata["data"]) 1299 | except: 1300 | val['error'] = "NoDataYet" 1301 | logger.debug(f"Data was not available for processing when getting attributes for sensor {self._name}") 1302 | 1303 | return val 1304 | 1305 | 1306 | class HASLTrafficStatusSensor(HASLDevice): 1307 | """HASL Traffic Status Sensor class.""" 1308 | 1309 | def __init__(self, hass, config, sensortype): 1310 | """Initialize.""" 1311 | self._hass = hass 1312 | self._config = config 1313 | self._sensortype = sensortype 1314 | self._enabled_sensor = config.data[CONF_SENSOR] 1315 | self._name = f"SL {self._sensortype.capitalize()} Status Sensor ({self._config.title})" 1316 | self._sensordata = [] 1317 | self._scan_interval = self._config.data[CONF_SCAN_INTERVAL] or 300 1318 | self._worker = hass.data[DOMAIN]["worker"] 1319 | 1320 | async def async_update(self): 1321 | """Update the sensor.""" 1322 | 1323 | logger.debug("[async_update] Entered") 1324 | logger.debug(f"[async_update] Processing {self._name}") 1325 | if self._worker.data.tl2[self._config.data[CONF_TL2_KEY]]["api_lastrun"]: 1326 | if self._worker.checksensorstate(self._enabled_sensor, STATE_ON): 1327 | if self._sensordata == [] or self._worker.getminutesdiff(now().strftime('%Y-%m-%d %H:%M:%S'), self._worker.data.tl2[self._config.data[CONF_TL2_KEY]]["api_lastrun"]) > self._config.data[CONF_SCAN_INTERVAL]: 1328 | try: 1329 | await self._worker.process_tl2() 1330 | logger.debug("[async_update] Update processed") 1331 | except: 1332 | logger.debug("[async_update] Error occurred during update") 1333 | else: 1334 | logger.debug("[async_update] Not due for update, skipping") 1335 | 1336 | self._sensordata = self._worker.data.tl2[self._config.data[CONF_TL2_KEY]] 1337 | logger.debug("[async_update] Completed") 1338 | return 1339 | 1340 | @property 1341 | def unique_id(self): 1342 | """Return a unique ID to use for this sensor.""" 1343 | return f"sl-{self._sensortype}-sensor-{self._config.data[CONF_INTEGRATION_ID]}" 1344 | 1345 | @property 1346 | def name(self): 1347 | """Return the name of the sensor.""" 1348 | return self._name 1349 | 1350 | @property 1351 | def state(self): 1352 | """Return the state of the sensor.""" 1353 | if self._sensordata == []: 1354 | return 'Unknown' 1355 | else: 1356 | return self._sensordata["data"][self._sensortype]["status"] 1357 | 1358 | @property 1359 | def icon(self): 1360 | trafficTypeIcons = { 1361 | 'ferry': 'mdi:ferry', 1362 | 'bus': 'mdi:bus', 1363 | 'tram': 'mdi:tram', 1364 | 'train': 'mdi:train', 1365 | 'local': 'mdi:train-variant', 1366 | 'metro': 'mdi:subway-variant' 1367 | } 1368 | 1369 | return trafficTypeIcons.get(self._sensortype) 1370 | 1371 | @property 1372 | def unit_of_measurement(self): 1373 | """Return the unit of measurement.""" 1374 | return "" 1375 | 1376 | @property 1377 | def scan_interval(self): 1378 | """Return the unique id.""" 1379 | return self._scan_interval 1380 | 1381 | @property 1382 | def available(self): 1383 | """Return true if value is valid.""" 1384 | if not self._sensordata or not 'data' in self._sensordata: 1385 | return False 1386 | else: 1387 | return True 1388 | 1389 | @property 1390 | def extra_state_attributes(self): 1391 | 1392 | val = {} 1393 | 1394 | if self._sensordata == []: 1395 | return val 1396 | 1397 | if self._sensordata["api_result"] == "Success": 1398 | val['api_result'] = "Ok" 1399 | else: 1400 | val['api_result'] = self._sensordata["api_error"] 1401 | 1402 | # Set values of the sensor. 1403 | val['scan_interval'] = self._scan_interval 1404 | val['refresh_enabled'] = self._worker.checksensorstate(self._enabled_sensor, STATE_ON) 1405 | 1406 | try: 1407 | val['attribution'] = self._sensordata["attribution"] 1408 | val['status_icon'] = self._sensordata["data"][self._sensortype]["status_icon"] 1409 | val['events'] = self._sensordata["data"][self._sensortype]["events"] 1410 | val['last_updated'] = self._sensordata["last_updated"] 1411 | except: 1412 | val['error'] = "NoDataYet" 1413 | logger.debug(f"Data was not available for processing when getting attributes for sensor {self._name}") 1414 | 1415 | return val 1416 | --------------------------------------------------------------------------------