├── .coveragerc
├── .flake8
├── .github
└── workflows
│ ├── hassfest.yaml
│ └── python-app.yml
├── .gitignore
├── NVE-HAN-SPEC.pdf
├── README.md
├── custom_components
├── __init__.py
└── ams
│ ├── __init__.py
│ ├── config_flow.py
│ ├── const.py
│ ├── manifest.json
│ ├── parsers
│ ├── __init__.py
│ ├── aidon.py
│ ├── aidon_se.py
│ ├── kaifa.py
│ ├── kaifa_se.py
│ └── kamstrup.py
│ ├── sensor.py
│ ├── strings.json
│ └── translations
│ ├── en.json
│ ├── nb.json
│ ├── nn.json
│ ├── no.json
│ └── se.json
├── hacs.json
├── logo_images
├── Arrow-002.svg
├── black-man-1.svg
├── black-man-2.svg
├── bus-gateway.svg
├── home-assistant.svg
├── logo.png
├── logo.svg
└── simple_meter_icon.svg
├── pytest.ini
├── requirements.txt
├── requirements_test.txt
└── tests
├── __init__.py
├── common_test_data.py
├── conftest.py
├── parser_test.py
├── test_config_flow.py
├── test_init.py
├── test_parser_aidon.py
├── test_parser_aidon_se.py
├── test_parser_kaifa.py
├── test_parser_kaifa_se.py
└── test_parser_kamstrup.py
/.coveragerc:
--------------------------------------------------------------------------------
1 | [run]
2 | source = custom_components/ams
3 | omit =
4 | custom_components/ams/tests/*
--------------------------------------------------------------------------------
/.flake8:
--------------------------------------------------------------------------------
1 | [flake8]
2 | exclude =
3 | m.github
4 | __pycache__
5 | bin
6 | logo_images
7 | .gitignore
8 | README.md
9 | tests
10 | venv
--------------------------------------------------------------------------------
/.github/workflows/hassfest.yaml:
--------------------------------------------------------------------------------
1 | name: Validate with hassfest
2 |
3 | on:
4 | push:
5 | pull_request:
6 | schedule:
7 | - cron: "0 0 * * *"
8 |
9 | jobs:
10 | validate:
11 | runs-on: "ubuntu-latest"
12 | steps:
13 | - uses: "actions/checkout@v2"
14 | - uses: home-assistant/actions/hassfest@master
15 |
--------------------------------------------------------------------------------
/.github/workflows/python-app.yml:
--------------------------------------------------------------------------------
1 | name: Python package
2 |
3 | on: [push]
4 |
5 | jobs:
6 | build:
7 | runs-on: ubuntu-latest
8 | strategy:
9 | max-parallel: 4
10 | matrix:
11 | python-version: ['3.10']
12 |
13 | steps:
14 | - uses: actions/checkout@v1
15 | - name: Set up Python ${{ matrix.python-version }}
16 | uses: actions/setup-python@v2
17 | with:
18 | python-version: ${{ matrix.python-version }}
19 |
20 | - name: Install dependencies
21 | run: |
22 | python -m pip install --upgrade pip
23 | pip install -r requirements_test.txt
24 | pip install -r requirements.txt
25 | - name: Run Tests
26 | run: |
27 | python3 -m pytest --cov=custom_components/ams --cov-config=.coveragerc
28 | - name: Run flake8
29 | run: |
30 | python3 -m flake8
31 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | *.egg-info/
24 | .installed.cfg
25 | *.egg
26 | MANIFEST
27 |
28 | # PyInstaller
29 | # Usually these files are written by a python script from a template
30 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
31 | *.manifest
32 | *.spec
33 |
34 | # Installer logs
35 | pip-log.txt
36 | pip-delete-this-directory.txt
37 |
38 | # Unit test / coverage reports
39 | htmlcov/
40 | .tox/
41 | .coverage
42 | .coverage.*
43 | .cache
44 | nosetests.xml
45 | coverage.xml
46 | *.cover
47 | .hypothesis/
48 | .pytest_cache/
49 |
50 | # Translations
51 | *.mo
52 | *.pot
53 |
54 | # Django stuff:
55 | *.log
56 | local_settings.py
57 | db.sqlite3
58 |
59 | # Flask stuff:
60 | instance/
61 | .webassets-cache
62 |
63 | # Scrapy stuff:
64 | .scrapy
65 |
66 | # Sphinx documentation
67 | docs/_build/
68 |
69 | # PyBuilder
70 | target/
71 |
72 | # Jupyter Notebook
73 | .ipynb_checkpoints
74 |
75 | # pyenv
76 | .python-version
77 |
78 | # celery beat schedule file
79 | celerybeat-schedule
80 |
81 | # SageMath parsed files
82 | *.sage.py
83 |
84 | # Environments
85 | .env
86 | .venv
87 | env/
88 | venv/
89 | ENV/
90 | env.bak/
91 | venv.bak/
92 |
93 | # Spyder project settings
94 | .spyderproject
95 | .spyproject
96 |
97 | # Rope project settings
98 | .ropeproject
99 |
100 | # mkdocs documentation
101 | /site
102 |
103 | # mypy
104 | .mypy_cache/
105 |
106 | # Other custom_components
107 | /nordpool
108 | /home-assistant
109 |
--------------------------------------------------------------------------------
/NVE-HAN-SPEC.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/turbokongen/hass-AMS/d399131f19c57f1a6cd53f81604f1a556b9cc83d/NVE-HAN-SPEC.pdf
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # hass-AMS - AMS Reader for Norwegian and Swedish AMS meters
2 |
3 | 
4 |
5 | [](https://github.com/hacs/integration)
6 |
7 |
8 | Custom component reading [AMS](https://no.wikipedia.org/wiki/Smart_str%C3%B8mm%C3%A5ler)
9 | through MBus adapter into [HomeAssistant](https://www.home-assistant.io/).
10 | Supports the new energy dashboard in Home-Assistant.
11 |
12 | ## Electricity meter hardware
13 |
14 | Works with the following Swedish and Norwegian meters:
15 |
16 | ### Kamstrup:
17 |
18 | - 6861111 tested by janna at homeassistant community forum
19 | - 6841121 tested by me
20 | - 6841131
21 | - 6841138A tested by NilsFA at homeassistant community forum
22 | - 6851121
23 | - 6851131
24 |
25 | ### Kaifa:
26 |
27 | Norway:
28 |
29 | - MA304H3E Thanks to @thomasja27 for testing :+1:
30 | - MA105H2E Thanks for confirming: @Futrax
31 |
32 | Sweden:
33 |
34 | - MA304H4 Thanks to @runlar for testing (Swedish version) :+1:
35 | - MA304H4D
36 |
37 | Not tested with, but should work:
38 |
39 | - MA304T4
40 | - MA304T3
41 |
42 | ### Aidon:
43 |
44 | Norway:
45 |
46 | - 6525 Thanks to @razzymoose for testing and providing patch :+1:
47 | - 6515 Thanks to @maxgyver87 for fault finding and testing :+1:
48 | - 6534 Thanks to @mariwing for testing and debugging :+1:
49 | - 6483 Thanks @PerBob81 for confirming :+1:
50 |
51 | Sweden:
52 |
53 | - 6484 Thanks to @bo1jo for testing and debugging :+1:
54 |
55 | Not tested with, but should work:
56 |
57 | Norway:
58 |
59 | - 6540
60 | - 6550
61 |
62 | Sweden:
63 |
64 | - 6479
65 |
66 | If it does not decode your data, please submit a ticket, and I will try to
67 | make a parser for your meter.
68 | If your meter type shown "unknown", please submit a ticket, and I will add
69 | your meter to the module.
70 |
71 | ## Home-assistant installation
72 |
73 | Easiest method is to install via [HACS](https://hacs.xyz/).
74 | Then setup via *Integrations* config.
75 |
76 | *Or*
77 |
78 | 1. Copy `ams` folder into your `custom_components` folder.
79 | 2. Config by YAML setup or config by integrations in Home-assistant
80 |
81 | ### YAML options
82 |
83 | ```yaml
84 | #Serial port example
85 | ams:
86 | protocol: serial # Required. The Protocol type for communications.
87 | serial_port: '/dev/ttyUSB0' # Required. The serial port used to communicate through
88 | baudrate: 2400 # Optional, defaults to '2400'
89 | parity: 'N' # Optional, defaults to 'N'
90 | meter_manufacturer: 'auto' # Optional, defaults to 'auto'
91 | ```
92 | ```yaml
93 | # TCP/IP config example
94 | ams:
95 | protocol: tcp_ip #Required. The protocol type for communications.
96 | tcp_host: 192.168.2.11 # Required. The transmitting host address.
97 | tcp_port: 8900 #Required. The transmitting host port.
98 | meter_manufacturer: 'kamstrup' # Optional, defaults to 'auto'
99 | ```
100 | *All options:*
101 | ```yaml
102 | protocol: Options are 'tcp_ip' or 'serial' This is option is required.
103 | serial_port: Input your serial port to communicate through. Required if 'serial' is selected for 'protocol'.
104 | baudrate: Input a custom baudrate. Default is 2400. This option in optional.
105 | parity: Input a custom parity option. Default is 'N'. See https://github.com/pyserial/pyserial/blob/master/serial/serialutil.py#L79
106 | tcp_host: Ip adress to host of meter data. Required if 'tcp_ip' is selected
107 | tcp_port: Port at host of meter data. Required if 'tcp_ip' is selected.
108 | meter_manufacturer: Set the meter manufacturer if 'auto' fails. This option is optional.
109 | ```
110 |
111 | For `meter_manufacturer` values the options are:
112 |
113 | ```python
114 | 'auto' # This is default if nothing is specified.
115 | 'aidon'
116 | 'aidon_se' # Swedish aidon meter RF2 modules
117 | 'kamstrup'
118 | 'kaifa'
119 | 'kaifa_se' # Swedish kaifa meters
120 | ```
121 |
122 | This will create sensors for each of the available usage data in the meter.
123 | The accumulative sensors will only be fully available after first read, and is transmitted from the meter 5 seconds past the hour.
124 | There seems to be a bug in the current Kamstrup firmware that the hour package is transmitted at xx:xx:55.
125 |
126 | ## MBus interface hardware
127 |
128 | ### *Known working modules*
129 |
130 | - [USB to MBUS slave module MBUS master slave communication debugging bus monitor TSS721](https://www.aliexpress.com/item/32894249052.html)
131 | - [TSS721 Module M-BUS To TTL with RX TX Indicator STM32 Development](https://www.aliexpress.com/item/32751482255.html?spm=2114.10010108.1000014.1.2a3189f8fCOsSM)
132 | - [TTL to MBUS, Serial to MBUS Slave Module, Instead of TSS721A, Signal Isolation](https://www.ebay.com/itm/273122508071?_trkparms=amclksrc%3DITM%26aid%3D1110006%26algo%3DHOMESPLICE.SIM%26ao%3D1%26asc%3D20201210111314%26meid%3D4ae500c42d6c43dba4fbc3c8ed06db95%26pid%3D101195%26rk%3D1%26rkt%3D12%26sd%3D274011702607%26itm%3D273122508071%26pmt%3D1%26noa%3D0%26pg%3D2047675%26algv%3DSimplAMLv9PairwiseWebMskuAspectsV202110NoVariantSeed%26brand%3DUnbranded&_trksid=p2047675.c101195.m1851&amdata=cksum%3A2731225080714ae500c42d6c43dba4fbc3c8ed06db95%7Cenc%3AAQAGAAABEIIp3bTCXFAKw6S9crk9CpCrrtOgadqFiC10NVPbN0936eprYVB4SXfDa52e1mbPLFwdBXChmaGoQQy%252BBr%252F%252B6GwMcxzo7LNbOSruw8JYl4DlyP7oF0HAfDyaq70zO7kJ5TaNT4YP7AkcEgketK%252BSXiNU63J9O%252FNGJW%252FFzn%252B66oRh%252Ff51%252FOd%252BA7ejgOOSfiKyvRVua%252FiFnXrxVBuK6ejVzVdYQ8RkXWB%252BnAu4penvUGurSKh3gR%252B7GDYFpsU4WtTtZ59xj8h1jM8sf1rmoti2QMtmA6IzzxoTGR%252FY%252BwYIH5Ouq6dr2FeuKZP7nES3I5AT%252FKa7fdaUOT27KtkO6Rie2slNPzgZrUAyecfyWTwfQBCc%7Campid%3APL_CLK%7Cclp%3A2047675)
133 |
134 | ### *Known NOT working modules*
135 |
136 | - [USB to MBUS slave module discrete component non TSS721 circuit M-BUS bus data monitor](https://www.aliexpress.com/item/32814808312.html?shortkey=iM7rQb67&addresstype=600)
137 |
138 | ## Technical documentation
139 |
140 | Latest information about OBIS for all the Norwegian meters:
141 |
142 | Latest information about Swedish standard for AMS:
143 |
144 | ## Feedback
145 |
146 | Improvements and suggestions are also welcome.
147 | Keep in mind, I am not a experienced programmer :)
148 | Enjoy
149 |
--------------------------------------------------------------------------------
/custom_components/__init__.py:
--------------------------------------------------------------------------------
1 | """Custom components module."""
2 |
--------------------------------------------------------------------------------
/custom_components/ams/__init__.py:
--------------------------------------------------------------------------------
1 | """AMS hub platform."""
2 | import asyncio
3 | import logging
4 | import threading
5 | from copy import deepcopy
6 |
7 | import homeassistant.helpers.config_validation as cv
8 | import serial
9 | import voluptuous as vol
10 |
11 | from homeassistant.config_entries import ConfigEntry, SOURCE_IMPORT
12 | from homeassistant.core import Config, HomeAssistant
13 | from homeassistant.helpers.dispatcher import async_dispatcher_send
14 | from custom_components.ams.parsers import aidon as Aidon
15 | from custom_components.ams.parsers import field_type
16 | from custom_components.ams.parsers import kaifa as Kaifa
17 | from custom_components.ams.parsers import kaifa_se as Kaifa_se
18 | from custom_components.ams.parsers import kamstrup as Kamstrup
19 | from custom_components.ams.parsers import aidon_se as Aidon_se
20 | from custom_components.ams.const import (
21 | AMS_DEVICES,
22 | AIDON_METER_SEQ,
23 | AIDON_SE_METER_SEQ_1PH,
24 | AIDON_SE_METER_SEQ_3PH,
25 | CONF_BAUDRATE,
26 | CONF_METER_MANUFACTURER,
27 | CONF_OSS_BRIKKEN,
28 | CONF_PARITY,
29 | CONF_PROTOCOL,
30 | CONF_SERIAL_PORT,
31 | CONF_TCP_HOST,
32 | CONF_TCP_PORT,
33 | DEFAULT_BAUDRATE,
34 | DEFAULT_METER_MANUFACTURER,
35 | DEFAULT_OSS_BRIKKEN,
36 | DEFAULT_PARITY,
37 | DEFAULT_SERIAL_PORT,
38 | DEFAULT_TIMEOUT,
39 | DOMAIN,
40 | FRAME_FLAG,
41 | HAN_METER_MANUFACTURER,
42 | HAN_METER_SERIAL,
43 | HAN_METER_TYPE,
44 | KAIFA_SE_METER_SEQ,
45 | KAMSTRUP_METER_SEQ,
46 | NETWORK,
47 | SENSOR_ATTR,
48 | SERIAL,
49 | SIGNAL_NEW_AMS_SENSOR,
50 | SIGNAL_UPDATE_AMS,
51 | )
52 |
53 | _LOGGER = logging.getLogger(__name__)
54 |
55 | SERIAL_SCHEMA = {vol.Required(CONF_SERIAL_PORT, default=DEFAULT_SERIAL_PORT)}
56 | NETWORK_SCHEMA = {vol.Required(CONF_TCP_HOST), vol.Required(CONF_TCP_PORT)}
57 | PROTOCOL_SCHEMA = {
58 | vol.Required(SERIAL): SERIAL_SCHEMA,
59 | vol.Required(NETWORK): NETWORK_SCHEMA,
60 | }
61 | CONFIG_SCHEMA = vol.Schema(
62 | {
63 | DOMAIN: vol.Schema(
64 | {
65 | vol.Required(CONF_PROTOCOL, default=SERIAL): vol.In(
66 | [NETWORK, SERIAL]),
67 | vol.Optional(CONF_TCP_HOST): str,
68 | vol.Optional(CONF_TCP_PORT): vol.All(
69 | vol.Coerce(int), vol.Range(0, 65535)
70 | ),
71 | vol.Optional(CONF_SERIAL_PORT): str,
72 | vol.Optional(CONF_PARITY, default=DEFAULT_PARITY): cv.string,
73 | vol.Optional(CONF_BAUDRATE, default=DEFAULT_BAUDRATE): vol.All(
74 | vol.Coerce(int), vol.Range(0, 256000)
75 | ),
76 | vol.Optional(
77 | CONF_METER_MANUFACTURER, default=DEFAULT_METER_MANUFACTURER
78 | ): cv.string,
79 | vol.Optional(
80 | CONF_OSS_BRIKKEN, default=DEFAULT_OSS_BRIKKEN
81 | ): vol.All(vol.Any(int, bool), vol.Coerce(bool))
82 | }
83 | )
84 | },
85 | extra=vol.ALLOW_EXTRA,
86 | )
87 |
88 |
89 | def _setup(hass, config):
90 | """Setup helper for the component."""
91 | if DOMAIN not in hass.data:
92 | hub = AmsHub(hass, config)
93 | hass.data[DOMAIN] = hub
94 |
95 |
96 | async def async_setup(hass: HomeAssistant, config: Config):
97 | """AMS hub YAML setup."""
98 | if config.get(DOMAIN) is None:
99 | _LOGGER.info("No YAML config available, using config_entries")
100 | return True
101 | _setup(hass, config[DOMAIN])
102 | if not hass.config_entries.async_entries(DOMAIN):
103 | hass.async_create_task(
104 | hass.config_entries.flow.async_init(
105 | DOMAIN, context={"source": SOURCE_IMPORT}, data=config[DOMAIN]
106 | )
107 | )
108 | return True
109 |
110 |
111 | async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
112 | """Set up AMS as config entry."""
113 | _setup(hass, entry.data)
114 | await hass.config_entries.async_forward_entry_setup(entry, "sensor")
115 | return True
116 |
117 |
118 | async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry):
119 | """Unload a config entry."""
120 | await hass.config_entries.async_forward_entry_unload(entry, "sensor")
121 | return True
122 |
123 |
124 | async def async_remove_entry(hass, entry): # pylint: disable=unused-argument
125 | """Handle removal of an entry."""
126 | await hass.async_add_executor_job(hass.data[DOMAIN].stop_serial_read)
127 | return True
128 |
129 |
130 | class AmsHub:
131 | """AmsHub wrapper for all sensors."""
132 |
133 | def __init__(self, hass, entry):
134 | """Initialize the AMS hub."""
135 | _LOGGER.debug("config entry = %s", entry)
136 | self._hass = hass
137 | self.meter_manufacturer = entry.get(CONF_METER_MANUFACTURER)
138 | self.sensor_data = {}
139 | self._attrs = {}
140 | self._running = True
141 | self.oss = None
142 | if entry.get(CONF_PROTOCOL) == SERIAL:
143 | port = entry.get(CONF_SERIAL_PORT)
144 | self.oss = entry.get(CONF_OSS_BRIKKEN)
145 |
146 | _LOGGER.debug("Connecting to HAN using serialport %s", port)
147 | try:
148 | self._ser = serial.serial_for_url(
149 | port,
150 | baudrate=entry.get(CONF_BAUDRATE),
151 | parity=entry.get(CONF_PARITY),
152 | stopbits=serial.STOPBITS_ONE,
153 | bytesize=serial.EIGHTBITS,
154 | timeout=DEFAULT_TIMEOUT,
155 | )
156 | except serial.serialutil.SerialException as ex:
157 | _LOGGER.warning("Serial error: %", ex)
158 | if entry.get(CONF_PROTOCOL) == NETWORK:
159 | port = f"socket://{entry.get(CONF_TCP_HOST)}" \
160 | f":{entry.get(CONF_TCP_PORT)}"
161 | _LOGGER.debug("Connecting to HAN using TCP/IP %s", port)
162 | try:
163 | self._ser = serial.serial_for_url(
164 | port,
165 | baudrate=entry.get(CONF_BAUDRATE, DEFAULT_BAUDRATE),
166 | parity=entry.get(CONF_PARITY, DEFAULT_PARITY),
167 | stopbits=serial.STOPBITS_ONE,
168 | bytesize=serial.EIGHTBITS,
169 | timeout=DEFAULT_TIMEOUT,
170 | )
171 | except serial.serialutil.SerialException as ex:
172 | _LOGGER.warning("Serial error: %s", ex)
173 | self.connection = threading.Thread(target=self.connect, daemon=True)
174 | self.connection.start()
175 | _LOGGER.debug("Finish init of AMS")
176 |
177 | def stop_serial_read(self):
178 | """Close resources."""
179 | _LOGGER.debug("stop_serial_read")
180 | self._running = False
181 | self.connection.join()
182 | self._ser.close()
183 |
184 | def read_packet(self):
185 | """Read raw data for one packet from serial port."""
186 | byte_counter = 0
187 | bytelist = []
188 | frame_started = False
189 | packet_size = -1
190 | while self._running:
191 | buf = self._ser.read()
192 |
193 | if buf:
194 | if buf == FRAME_FLAG and not frame_started:
195 | # Purge data until FRAME_FLAG is received
196 | frame_started = True
197 | byte_counter = 0
198 | bytelist = []
199 | _LOGGER.debug("Start of package")
200 | if frame_started:
201 | # Build packet
202 | bytelist.extend(buf)
203 | byte_counter = byte_counter + 1
204 | if byte_counter == 3:
205 | # Calculate size after FRAME_FLAG + 2 bytes are
206 | # received
207 | packet_size = ((bytelist[1] & 0x0F)
208 | << 8 | bytelist[2]) + 2
209 | if byte_counter == packet_size:
210 | # If we have built a packet equal to packet size
211 | if buf == FRAME_FLAG:
212 | # Valid packet as last byte is FRAME_FLAG
213 | _LOGGER.debug("Package complete")
214 | return bytelist
215 | else:
216 | # Special for OSS brikken.
217 | if self.oss:
218 | return bytelist
219 | else:
220 | # Not valid packet. Flush what we have built so
221 | # far.
222 | _LOGGER.debug(
223 | "Not a valid packet. Start over "
224 | "again. byte_counter=%s, "
225 | "frame_started=%s, "
226 | "packet_size=%s, DUMP: %s",
227 | byte_counter,
228 | frame_started,
229 | packet_size,
230 | bytelist,
231 | )
232 | bytelist = []
233 | byte_counter = 0
234 | frame_started = False
235 | packet_size = -1
236 | else:
237 | if frame_started:
238 | _LOGGER.debug(
239 | "Timeout waiting for end of packet. Flush "
240 | " current packet. byte_counter=%s, "
241 | "frame_started=%s, package_size=%s, "
242 | "DUMP: %s",
243 | byte_counter,
244 | frame_started,
245 | packet_size,
246 | bytelist,
247 | )
248 | frame_started = False
249 | byte_counter = 0
250 | bytelist = []
251 |
252 | @property
253 | def meter_serial(self):
254 | """The electrical meter's serial number"""
255 | return self._attrs[HAN_METER_SERIAL]
256 |
257 | @property
258 | def meter_type(self):
259 | """The electrical meter's type"""
260 |
261 | return self._attrs[HAN_METER_TYPE]
262 |
263 | def connect(self): # pylint: disable=too-many-branches
264 | """Read the data from the port."""
265 | parser = None
266 | detect_pkg = None # This is needed to push the package used for
267 | # detecting the meter straight to the parser. If not, users will get
268 | # unknown state class None for energy sensors at startup.
269 | if self.meter_manufacturer == "auto":
270 | while parser is None and self._running is True:
271 | _LOGGER.info("Autodetecting meter manufacturer")
272 | detect_pkg = self.read_packet()
273 | self.meter_manufacturer = self._find_parser(detect_pkg)
274 | parser = self.meter_manufacturer
275 |
276 | swedish = None
277 | if self.meter_manufacturer == "aidon":
278 | parser = Aidon
279 | elif self.meter_manufacturer == "aidon_se":
280 | parser = Aidon_se
281 | elif self.meter_manufacturer == "kaifa":
282 | if detect_pkg and field_type(
283 | fields=detect_pkg[62:70], enc=chr) == "MA304H4D":
284 | swedish = True
285 | parser = Kaifa
286 | else:
287 | parser = Kaifa
288 | elif self.meter_manufacturer == "kaifa_se":
289 | parser = Kaifa_se
290 | elif self.meter_manufacturer == "kamstrup":
291 | parser = Kamstrup
292 |
293 | while self._running:
294 | try:
295 | if detect_pkg:
296 | data = detect_pkg
297 | else:
298 | data = self.read_packet()
299 |
300 | if parser.test_valid_data(data, self.oss):
301 | _LOGGER.debug("data read from port=%s", data)
302 | if swedish:
303 | self.sensor_data, _ = parser.parse_data(
304 | self.sensor_data, data, swedish
305 | )
306 | else:
307 | self.sensor_data, _ = parser.parse_data(
308 | self.sensor_data, data
309 | )
310 |
311 | self._check_for_new_sensors_and_update(self.sensor_data)
312 | else:
313 | _LOGGER.debug("failed package: %s", data)
314 | if detect_pkg:
315 | detect_pkg = None
316 | except serial.serialutil.SerialException:
317 | pass
318 |
319 | @classmethod
320 | def _find_parser(cls, pkg):
321 | """Helper to detect meter manufacturer."""
322 |
323 | def _test_meter(test_pkg, meter):
324 | """Meter tester."""
325 | match = []
326 | _LOGGER.debug("Testing for %s", meter)
327 | if test_pkg is None:
328 | return None
329 | for i, _ in enumerate(test_pkg):
330 | if test_pkg[i] == meter[0] and (
331 | test_pkg[i:(i + len(meter))] == meter
332 | ):
333 | match.append(meter)
334 | return meter in match
335 |
336 | if _test_meter(pkg, AIDON_METER_SEQ):
337 | _LOGGER.info("Detected Aidon meter")
338 | return "aidon"
339 | if _test_meter(pkg, AIDON_SE_METER_SEQ_3PH):
340 | _LOGGER.info("Detected Swedish Aidon meter")
341 | return "aidon_se"
342 | if _test_meter(pkg, AIDON_SE_METER_SEQ_1PH):
343 | _LOGGER.info("Detected Swedish Aidon meter")
344 | return "aidon_se"
345 | if _test_meter(pkg, KAIFA_SE_METER_SEQ):
346 | if field_type(fields=pkg[62:70], enc=chr) == "MA304H4D":
347 | _LOGGER.info("Detected Swedish Kaifa meter MA304H4D")
348 | return "kaifa"
349 | elif _test_meter(pkg, [1, 0, 1, 7, 0, 255]):
350 | _LOGGER.info("Detected Swedish kaifa meter")
351 | return "kaifa_se"
352 | else:
353 | _LOGGER.info("Detected Kaifa meter")
354 | return "kaifa"
355 | if _test_meter(pkg, KAMSTRUP_METER_SEQ):
356 | _LOGGER.info("Detected Kamstrup meter")
357 | return "kamstrup"
358 |
359 | _LOGGER.warning("No parser detected")
360 | _LOGGER.debug("Meter detection package dump: %s", pkg)
361 |
362 | @property
363 | def data(self):
364 | """Return sensor data."""
365 | return self.sensor_data
366 |
367 | def missing_attrs(self, data=None):
368 | """Check if we have any missing attrs that we need and set them."""
369 | if data is None:
370 | data = self.data
371 |
372 | attrs_to_check = [HAN_METER_SERIAL,
373 | HAN_METER_MANUFACTURER, HAN_METER_TYPE]
374 | imp_attrs = [i for i in attrs_to_check if i not in self._attrs]
375 | if imp_attrs:
376 | cp_sensors_data = deepcopy(data)
377 | for check in imp_attrs:
378 | for value in cp_sensors_data.values():
379 | val = value.get(SENSOR_ATTR, {}).get(check)
380 | if val:
381 | self._attrs[check] = val
382 | break
383 | del cp_sensors_data
384 | miss_attrs = [i for i in attrs_to_check if i not in self._attrs]
385 | _LOGGER.debug(
386 | "miss_attrs=%s", (
387 | [i for i in attrs_to_check if i not in self._attrs])
388 | )
389 | if miss_attrs:
390 | _LOGGER.debug("We miss some attributes: %s", miss_attrs)
391 | return True
392 | return False
393 |
394 | def _check_for_new_sensors_and_update(self, sensor_data):
395 | """Compare sensor list and update."""
396 | new_devices = []
397 | sensors_in_data = set(sensor_data.keys())
398 | new_devices = sensors_in_data.difference(AMS_DEVICES)
399 |
400 | if new_devices:
401 | # Check that we have all the info we need before the sensors are
402 | # created, the most important one is the meter_serial as this is
403 | # use to create the unique_id
404 | if self.missing_attrs(sensor_data) is True:
405 | _LOGGER.debug(
406 | "Missing some attributes waiting for new read from the"
407 | " serial"
408 | )
409 | else:
410 | _LOGGER.debug("Got %s new devices from the serial",
411 | len(new_devices))
412 | _LOGGER.debug("DUMP %s", sensor_data)
413 | asyncio.run_coroutine_threadsafe(self._signal_new_sensor(),
414 | self._hass.loop).result()
415 | else:
416 | # _LOGGER.debug("sensors are the same, updating states")
417 | asyncio.run_coroutine_threadsafe(self._signal_update_sensors(),
418 | self._hass.loop).result()
419 |
420 | async def _signal_new_sensor(self) -> None:
421 | async_dispatcher_send(self._hass, SIGNAL_NEW_AMS_SENSOR)
422 |
423 | async def _signal_update_sensors(self) -> None:
424 | async_dispatcher_send(self._hass, SIGNAL_UPDATE_AMS)
425 |
--------------------------------------------------------------------------------
/custom_components/ams/config_flow.py:
--------------------------------------------------------------------------------
1 | """Adds config flow for hass-AMS."""
2 | import logging
3 | import os
4 |
5 | import serial.tools.list_ports as devices
6 | import voluptuous as vol
7 | from homeassistant import config_entries
8 |
9 | from custom_components.ams.const import ( # pylint: disable=unused-import
10 | CONF_BAUDRATE,
11 | CONF_MANUAL_SERIAL_PORT,
12 | CONF_METER_MANUFACTURER,
13 | CONF_OSS_BRIKKEN,
14 | CONF_PARITY,
15 | CONF_PROTOCOL,
16 | CONF_TCP_HOST,
17 | CONF_TCP_PORT,
18 | CONF_SERIAL_PORT,
19 | DEFAULT_BAUDRATE,
20 | DEFAULT_METER_MANUFACTURER,
21 | DEFAULT_OSS_BRIKKEN,
22 | DEFAULT_PARITY,
23 | DOMAIN,
24 | NETWORK,
25 | MANUFACTURER_OPTIONS,
26 | SERIAL,
27 | )
28 | DATA_SCHEMA_SELECT_PROTOCOL = vol.Schema(
29 | {vol.Required("type"): vol.In([SERIAL, CONF_MANUAL_SERIAL_PORT, NETWORK])}
30 | )
31 | DATA_SCHEMA_NETWORK_DATA = vol.Schema(
32 | {
33 | vol.Required(CONF_TCP_HOST): str,
34 | vol.Required(CONF_TCP_PORT): vol.All(vol.Coerce(int),
35 | vol.Range(0, 65535)),
36 | vol.Required(
37 | CONF_METER_MANUFACTURER,
38 | default=DEFAULT_METER_MANUFACTURER
39 | ): vol.In(MANUFACTURER_OPTIONS),
40 | vol.Optional(
41 | CONF_PARITY, default=DEFAULT_PARITY
42 | ): vol.All(str),
43 | vol.Optional(
44 | CONF_BAUDRATE, default=DEFAULT_BAUDRATE
45 | ): vol.All(int),
46 | }
47 | )
48 | _LOGGER = logging.getLogger(__name__)
49 |
50 |
51 | class AmsFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
52 | """Config flow for AMS."""
53 |
54 | VERSION = 1
55 | CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_PUSH
56 |
57 | def __init__(self):
58 | """Initialize."""
59 | self._errors = {}
60 | self.connection_type = None
61 |
62 | async def async_step_user(self, user_input=None):
63 | """Handle selection of protocol."""
64 | if user_input is not None:
65 | self.connection_type = user_input["type"]
66 | if self.connection_type == NETWORK:
67 | return await self.async_step_network_connection()
68 | if self.connection_type == SERIAL:
69 | return await self.async_step_select_serial_connection()
70 | if self.connection_type == CONF_MANUAL_SERIAL_PORT:
71 | return await self.async_step_enter_serial_connection()
72 |
73 | return self.async_show_form(
74 | step_id="user", data_schema=DATA_SCHEMA_SELECT_PROTOCOL,
75 | errors=self._errors
76 | )
77 |
78 | async def async_step_enter_serial_connection(self, user_input=None):
79 | """Handle the manual serialport connection step."""
80 |
81 | if user_input is not None:
82 | user_input[CONF_PROTOCOL] = SERIAL
83 | entry_result = self.async_create_entry(
84 | title="AMS Reader", data=user_input,
85 | )
86 | if entry_result:
87 | return entry_result
88 |
89 | return self.async_show_form(
90 | step_id="enter_serial_connection",
91 | data_schema=vol.Schema(
92 | {
93 | vol.Required(
94 | CONF_SERIAL_PORT, default=None
95 | ): vol.All(str),
96 | vol.Required(
97 | CONF_METER_MANUFACTURER,
98 | default=DEFAULT_METER_MANUFACTURER
99 | ): vol.In(MANUFACTURER_OPTIONS),
100 | vol.Optional(
101 | CONF_PARITY, default=DEFAULT_PARITY
102 | ): vol.All(str),
103 | vol.Optional(
104 | CONF_BAUDRATE, default=DEFAULT_BAUDRATE
105 | ): vol.All(int),
106 | vol.Optional(
107 | CONF_OSS_BRIKKEN, default=DEFAULT_OSS_BRIKKEN
108 | ): bool,
109 | }
110 | ),
111 | errors=self._errors,
112 | )
113 |
114 | async def async_step_select_serial_connection(self, user_input=None):
115 | """Handle the select serialport connection step."""
116 | portdata = await self.hass.async_add_executor_job(devices.comports)
117 | _LOGGER.debug(portdata)
118 | ports = [(comport.device + ": " + comport.description) for
119 | comport in portdata]
120 |
121 | if user_input is not None:
122 | user_input[CONF_PROTOCOL] = SERIAL
123 | user_selection = user_input[CONF_SERIAL_PORT]
124 | port = portdata[ports.index(user_selection)]
125 | serial_by_id = await self.hass.async_add_executor_job(
126 | get_serial_by_id, port.device
127 | )
128 | user_input[CONF_SERIAL_PORT] = serial_by_id
129 | entry_result = self.async_create_entry(
130 | title="AMS Reader", data=user_input,
131 | )
132 | if entry_result:
133 | return entry_result
134 |
135 | _LOGGER.debug(ports)
136 | return self.async_show_form(
137 | step_id="select_serial_connection",
138 | data_schema=vol.Schema(
139 | {
140 | vol.Required(
141 | CONF_SERIAL_PORT, default=None
142 | ): vol.In(ports),
143 | vol.Required(
144 | CONF_METER_MANUFACTURER,
145 | default=DEFAULT_METER_MANUFACTURER
146 | ): vol.In(MANUFACTURER_OPTIONS),
147 | vol.Optional(
148 | CONF_PARITY, default=DEFAULT_PARITY
149 | ): vol.All(str),
150 | vol.Optional(
151 | CONF_BAUDRATE, default=DEFAULT_BAUDRATE
152 | ): vol.All(int),
153 | vol.Optional(
154 | CONF_OSS_BRIKKEN, default=DEFAULT_OSS_BRIKKEN
155 | ): bool,
156 | }
157 | ),
158 | errors=self._errors,
159 | )
160 |
161 | async def async_step_network_connection(self, user_input=None):
162 | """Handle the network connection step."""
163 | if user_input:
164 | user_input[CONF_PROTOCOL] = NETWORK
165 | entry_result = self.async_create_entry(
166 | title="AMS Reader", data=user_input,
167 | )
168 | if entry_result:
169 | return entry_result
170 |
171 | return self.async_show_form(
172 | step_id="network_connection",
173 | data_schema=DATA_SCHEMA_NETWORK_DATA,
174 | errors={},
175 | )
176 |
177 | async def async_step_import(self, import_config):
178 | """Import a config flow from configuration."""
179 | if self._async_current_entries():
180 | _LOGGER.warning("Only one configuration of AMS Reader is allowed.")
181 | return self.async_abort(reason="single_instance_allowed")
182 |
183 | return self.async_create_entry(title="configuration.yaml",
184 | data=import_config)
185 |
186 |
187 | def get_serial_by_id(dev_path):
188 | """Return a /dev/serial/by-id match for given device if available."""
189 | by_id = "/dev/serial/by-id"
190 | if not os.path.isdir(by_id):
191 | return dev_path
192 |
193 | for path in (entry.path for entry in os.scandir(by_id)
194 | if entry.is_symlink()):
195 | if os.path.realpath(path) == dev_path:
196 | return path
197 | return dev_path
198 |
--------------------------------------------------------------------------------
/custom_components/ams/const.py:
--------------------------------------------------------------------------------
1 | """ Constants for hass-AMS package"""
2 | import serial
3 | from homeassistant.components.sensor import (
4 | SensorDeviceClass,
5 | SensorStateClass
6 | )
7 |
8 | HAN_OBIS_CODE = "obis_code"
9 | HAN_PACKET_SIZE = "packet_size"
10 | HAN_METER_MANUFACTURER = "meter_manufacturer"
11 | HAN_METER_LIST_TYPE = "list_type"
12 | HAN_LIST_VER_ID = "obis_list_version"
13 | HAN_METER_SERIAL = "meter_serial"
14 | HAN_METER_TYPE = "meter_type"
15 | HAN_METER_DATETIME = "meter_date_time"
16 | HAN_OBIS_DATETIME = "obis_timedate"
17 | HAN_METER_DAYOFWEEK = "meter_day_of_week"
18 | HAN_ACTIVE_POWER_IMPORT = "ams_active_power_import"
19 | HAN_ACTIVE_POWER_EXPORT = "ams_active_power_export"
20 | HAN_REACTIVE_POWER_IMPORT = "ams_reactive_power_import"
21 | HAN_REACTIVE_POWER_EXPORT = "ams_reactive_power_export"
22 | HAN_ACTIVE_POWER_IMPORT_L1 = "ams_active_power_import_l1"
23 | HAN_ACTIVE_POWER_EXPORT_L1 = "ams_active_power_export_l1"
24 | HAN_REACTIVE_POWER_IMPORT_L1 = "ams_reactive_power_import_l1"
25 | HAN_REACTIVE_POWER_EXPORT_L1 = "ams_reactive_power_export_l1"
26 | HAN_ACTIVE_POWER_IMPORT_L2 = "ams_active_power_import_l2"
27 | HAN_ACTIVE_POWER_EXPORT_L2 = "ams_active_power_export_l2"
28 | HAN_REACTIVE_POWER_IMPORT_L2 = "ams_reactive_power_import_l2"
29 | HAN_REACTIVE_POWER_EXPORT_L2 = "ams_reactive_power_export_l2"
30 | HAN_ACTIVE_POWER_IMPORT_L3 = "ams_active_power_import_l3"
31 | HAN_ACTIVE_POWER_EXPORT_L3 = "ams_active_power_export_l3"
32 | HAN_REACTIVE_POWER_IMPORT_L3 = "ams_reactive_power_import_l3"
33 | HAN_REACTIVE_POWER_EXPORT_L3 = "ams_reactive_power_export_l3"
34 | HAN_CURRENT_L1 = "ams_current_l1"
35 | HAN_CURRENT_L2 = "ams_current_l2"
36 | HAN_CURRENT_L3 = "ams_current_l3"
37 | HAN_VOLTAGE_L1 = "ams_voltage_l1"
38 | HAN_VOLTAGE_L2 = "ams_voltage_l2"
39 | HAN_VOLTAGE_L3 = "ams_voltage_l3"
40 | HAN_ACTIVE_ENERGY_IMPORT = "ams_active_energy_import"
41 | HAN_ACTIVE_ENERGY_EXPORT = "ams_active_energy_export"
42 | HAN_REACTIVE_ENERGY_IMPORT = "ams_reactive_energy_import"
43 | HAN_REACTIVE_ENERGY_EXPORT = "ams_reactive_energy_export"
44 |
45 | SENSOR_ICON = "icon"
46 | SENSOR_UOM = "unit_of_measurement"
47 | SENSOR_ATTR = "attributes"
48 | SENSOR_STATE = "state"
49 |
50 | AMS_ENERGY_METER = "AMS energy meter"
51 | AMS_NEW_SENSORS = "ams_new_sensors"
52 | AMS_SENSORS = "ams_sensors"
53 | # Devices that we have read from the serial connection.
54 | AMS_DEVICES = set()
55 | AMS_SENSOR_CREATED_BUT_NOT_READ = set()
56 |
57 | CONF_BAUDRATE = "baudrate"
58 | CONF_METER_MANUFACTURER = HAN_METER_MANUFACTURER
59 | CONF_MANUAL_SERIAL_PORT = "manual_serial_port"
60 | CONF_OSS_BRIKKEN = "oss_brikken"
61 | CONF_PARITY = "parity"
62 | CONF_SERIAL_PORT = "serial_port"
63 | CONF_TCP_PORT = "tcp_port"
64 | CONF_TCP_HOST = "tcp_host"
65 | CONF_PROTOCOL = "protocol"
66 | CONF_PROTOCOL_CONFIG = "protocol_config"
67 | CONF_PROTOCOL_TYPE = "type"
68 | ATTR_DEVICE_CLASS = "device_class"
69 | ATTR_LAST_RESET = "last_reset"
70 | ATTR_STATE_CLASS = "state_class"
71 | SERIAL = "serial"
72 | NETWORK = "tcp_ip"
73 |
74 | DOMAIN = "ams"
75 |
76 | DEFAULT_SERIAL_PORT = "/dev/ttyUSB0"
77 | DEFAULT_BAUDRATE = 2400
78 | DEFAULT_METER_MANUFACTURER = "auto"
79 | DEFAULT_OSS_BRIKKEN = False
80 | DEFAULT_PARITY = serial.PARITY_NONE
81 | DEFAULT_TIMEOUT = 0.1
82 |
83 | DATA_FLAG = [230, 231, 0, 15]
84 | FRAME_FLAG = b"\x7e"
85 | DEC_FRAME_FLAG = 126
86 | AIDON_METER_SEQ = [65, 73, 68, 79, 78, 95]
87 | AIDON_SE_METER_SEQ_3PH = [126, 162, 67]
88 | AIDON_SE_METER_SEQ_1PH = [126, 161, 79]
89 |
90 | KAIFA_METER_SEQ = [75, 102, 109, 95]
91 | KAIFA_SE_METER_SEQ = [75, 70, 77, 95]
92 | KAMSTRUP_METER_SEQ = [75, 97, 109, 115, 116, 114, 117, 112, 95]
93 | LIST_TYPE_1PH_SE = 15
94 | LIST_TYPE_3PH_SE = 27
95 | LIST_TYPE_MINI = 1
96 | LIST_TYPE_SHORT_1PH = 9
97 | LIST_TYPE_LONG_1PH = 14
98 | LIST_TYPE_SHORT_3PH = 13
99 | LIST_TYPE_LONG_3PH = 18
100 | LIST_TYPE_SHORT_3PH_3W = 12
101 | LIST_TYPE_LONG_3PH_3W = 17
102 |
103 |
104 | METER_TYPE = {
105 | # Aidon
106 | 6484: "RF2-system module Integrated HAN", # Sweden
107 | 6483: "RF2-system module Integrated HAN", # Norway
108 | 6510: "6510 1-phase Meter",
109 | 6511: "6511 1-phase Meter with CB",
110 | 6515: "6515 1-phase Meter with CB and Earth Fault Current Measurement",
111 | 6520: "6520 3-phase Meter 3 Wire",
112 | 6521: "6521 2-phase Meter 3 Wire with CB",
113 | 6525: (
114 | "6525 3-phase Meter 3 Wire with CB and Earth Fault Current "
115 | "Measurement"
116 | ),
117 | 6530: "6530 3-phase Meter 4 Wire",
118 | 6531: "6531 3-phase Meter 4 Wire with CB",
119 | 6534: "6534 3-phase Meter with CB and Neutral Current Measurement",
120 | 6540: "6540 3-phase CT Meter 3 Wire",
121 | 6550: "6550 3-phase CT Meter 4 Wire",
122 | 6560: "6560 3-phase CT/VT meter 3 Wire",
123 | # Kaifa
124 | "MA105H2E": "Domestic 1 Phase 230V/400V meter",
125 | "MA304H3E": "Domestic/Industrial 3 Phase 230V 3-Wire meter",
126 | "MA304H4": "Domestic/Industrial 3 Phase 400V 4-Wire meter",
127 | "MA304T4": "Industrial 3 Phase 230V 3-Wire meter",
128 | "MA304T3": "Industrial 3 Phase 400V 4-Wire meter",
129 | "MA304H4D": "Poly Phase 3 Phase 230V/400V 4-Wire meter",
130 | # Kamstrup
131 | 6861111: "Omnipower 1 Phase Direct meter",
132 | 6841121: "Omnipower 3 Phase 3-Wire Direct meter",
133 | 6841131: "Omnipower 3 Phase 4-Wire Direct meter",
134 | 6851121: "Omnipower 3 Phase CT 3-Wire Direct meter",
135 | 6851131: "Omnipower 3 Phase CT 4-Wire Direct meter",
136 | 6841128: "Omnipower 3 Phase Direct meter",
137 | 6841138: "Omnipower 3 Phase Direct meter",
138 | }
139 | UNKNOWN_METER = "Unknown"
140 |
141 | HOURLY_SENSORS = [
142 | HAN_ACTIVE_ENERGY_IMPORT,
143 | HAN_ACTIVE_ENERGY_EXPORT,
144 | HAN_REACTIVE_ENERGY_IMPORT,
145 | HAN_REACTIVE_ENERGY_EXPORT,
146 | ]
147 |
148 | ACTIVE_ENERGY_SENSORS = [
149 | HAN_ACTIVE_ENERGY_IMPORT,
150 | HAN_ACTIVE_ENERGY_EXPORT,
151 | ]
152 |
153 | ACTIVE_ENERGY_DEFAULT_ATTRS = {
154 | ATTR_STATE_CLASS: SensorStateClass.TOTAL_INCREASING,
155 | ATTR_DEVICE_CLASS: SensorDeviceClass.ENERGY,
156 | }
157 |
158 | CURRENT_SENSORS = [
159 | HAN_CURRENT_L1,
160 | HAN_CURRENT_L2,
161 | HAN_CURRENT_L3,
162 | ]
163 |
164 | VOLTAGE_SENSORS = [
165 | HAN_VOLTAGE_L1,
166 | HAN_VOLTAGE_L2,
167 | HAN_VOLTAGE_L3,
168 | ]
169 |
170 | ALL_SENSORS = [
171 | HAN_REACTIVE_POWER_EXPORT,
172 | HAN_VOLTAGE_L3,
173 | HAN_ACTIVE_POWER_EXPORT,
174 | HAN_VOLTAGE_L2,
175 | HAN_REACTIVE_POWER_IMPORT,
176 | HAN_CURRENT_L1,
177 | HAN_VOLTAGE_L1,
178 | HAN_CURRENT_L2,
179 | HAN_ACTIVE_POWER_IMPORT,
180 | HAN_CURRENT_L3,
181 | ] + HOURLY_SENSORS
182 |
183 | MANUFACTURER_OPTIONS = [
184 | "auto",
185 | "aidon",
186 | "aidon_se",
187 | "kaifa",
188 | "kaifa_se",
189 | "kamstrup",
190 | ]
191 |
192 | SIGNAL_UPDATE_AMS = "ams_update"
193 | SIGNAL_NEW_AMS_SENSOR = "ams_new_sensor"
194 |
195 | WEEKDAY_MAPPING = {
196 | 1: "Monday",
197 | 2: "Tuesday",
198 | 3: "Wednesday",
199 | 4: "Thursday",
200 | 5: "Friday",
201 | 6: "Saturday",
202 | 7: "Sunday",
203 | }
204 |
205 | SENSOR_OBIS_MAP = {
206 | HAN_ACTIVE_POWER_IMPORT: [[1, 0, 1, 7, 0, 255], [1, 1, 1, 7, 0, 255]],
207 | HAN_ACTIVE_POWER_EXPORT: [[1, 0, 2, 7, 0, 255], [1, 1, 2, 7, 0, 255]],
208 | HAN_REACTIVE_POWER_IMPORT: [[1, 0, 3, 7, 0, 255], [1, 1, 3, 7, 0, 255]],
209 | HAN_REACTIVE_POWER_EXPORT: [[1, 0, 4, 7, 0, 255], [1, 1, 4, 7, 0, 255]],
210 | HAN_ACTIVE_POWER_IMPORT_L1: [1, 0, 21, 7, 0, 255],
211 | HAN_ACTIVE_POWER_EXPORT_L1: [1, 0, 22, 7, 0, 255],
212 | HAN_REACTIVE_POWER_IMPORT_L1: [1, 0, 23, 7, 0, 255],
213 | HAN_REACTIVE_POWER_EXPORT_L1: [1, 0, 24, 7, 0, 255],
214 | HAN_ACTIVE_POWER_IMPORT_L2: [1, 0, 41, 7, 0, 255],
215 | HAN_ACTIVE_POWER_EXPORT_L2: [1, 0, 42, 7, 0, 255],
216 | HAN_REACTIVE_POWER_IMPORT_L2: [1, 0, 43, 7, 0, 255],
217 | HAN_REACTIVE_POWER_EXPORT_L2: [1, 0, 44, 7, 0, 255],
218 | HAN_ACTIVE_POWER_IMPORT_L3: [1, 0, 61, 7, 0, 255],
219 | HAN_ACTIVE_POWER_EXPORT_L3: [1, 0, 62, 7, 0, 255],
220 | HAN_REACTIVE_POWER_IMPORT_L3: [1, 0, 63, 7, 0, 255],
221 | HAN_REACTIVE_POWER_EXPORT_L3: [1, 0, 64, 7, 0, 255],
222 | HAN_CURRENT_L1: [[1, 0, 31, 7, 0, 255], [1, 1, 31, 7, 0, 255]],
223 | HAN_CURRENT_L2: [[1, 0, 51, 7, 0, 255], [1, 1, 51, 7, 0, 255]],
224 | HAN_CURRENT_L3: [[1, 0, 71, 7, 0, 255], [1, 1, 71, 7, 0, 255]],
225 | HAN_VOLTAGE_L1: [[1, 0, 32, 7, 0, 255], [1, 1, 32, 7, 0, 255]],
226 | HAN_VOLTAGE_L2: [[1, 0, 52, 7, 0, 255], [1, 1, 52, 7, 0, 255]],
227 | HAN_VOLTAGE_L3: [[1, 0, 72, 7, 0, 255], [1, 1, 72, 7, 0, 255]],
228 | HAN_ACTIVE_ENERGY_IMPORT: [[1, 0, 1, 8, 0, 255], [1, 1, 1, 8, 0, 255]],
229 | HAN_ACTIVE_ENERGY_EXPORT: [[1, 0, 2, 8, 0, 255], [1, 1, 2, 8, 0, 255]],
230 | HAN_REACTIVE_ENERGY_IMPORT: [[1, 0, 3, 8, 0, 255], [1, 1, 3, 8, 0, 255]],
231 | HAN_REACTIVE_ENERGY_EXPORT: [[1, 0, 4, 8, 0, 255], [1, 1, 4, 8, 0, 255]],
232 | }
233 | SENSOR_COMMON_OBIS_MAP = {
234 | HAN_LIST_VER_ID: [1, 1, 0, 2, 129, 255],
235 | HAN_METER_SERIAL: [[0, 0, 96, 1, 0, 255], [1, 1, 0, 0, 5, 255]],
236 | HAN_METER_TYPE: [[0, 0, 96, 1, 7, 255], [1, 1, 96, 1, 1, 255]],
237 | HAN_METER_DATETIME: [[0, 0, 1, 0, 0, 255], [0, 1, 1, 0, 0, 255]],
238 | }
239 |
240 | SENSOR_UNIT = {
241 | HAN_ACTIVE_POWER_IMPORT: "W",
242 | HAN_ACTIVE_POWER_EXPORT: "W",
243 | HAN_REACTIVE_POWER_IMPORT: "VAr",
244 | HAN_REACTIVE_POWER_EXPORT: "VAr",
245 | HAN_ACTIVE_POWER_IMPORT_L1: "W",
246 | HAN_ACTIVE_POWER_EXPORT_L1: "W",
247 | HAN_REACTIVE_POWER_IMPORT_L1: "VAr",
248 | HAN_REACTIVE_POWER_EXPORT_L1: "VAr",
249 | HAN_ACTIVE_POWER_IMPORT_L2: "W",
250 | HAN_ACTIVE_POWER_EXPORT_L2: "W",
251 | HAN_REACTIVE_POWER_IMPORT_L2: "VAr",
252 | HAN_REACTIVE_POWER_EXPORT_L2: "VAr",
253 | HAN_ACTIVE_POWER_IMPORT_L3: "W",
254 | HAN_ACTIVE_POWER_EXPORT_L3: "W",
255 | HAN_REACTIVE_POWER_IMPORT_L3: "VAr",
256 | HAN_REACTIVE_POWER_EXPORT_L3: "VAr",
257 | HAN_CURRENT_L1: "A",
258 | HAN_CURRENT_L2: "A",
259 | HAN_CURRENT_L3: "A",
260 | HAN_VOLTAGE_L1: "V",
261 | HAN_VOLTAGE_L2: "V",
262 | HAN_VOLTAGE_L3: "V",
263 | HAN_ACTIVE_ENERGY_IMPORT: "kWh",
264 | HAN_ACTIVE_ENERGY_EXPORT: "kWh",
265 | HAN_REACTIVE_ENERGY_IMPORT: "kVAr",
266 | HAN_REACTIVE_ENERGY_EXPORT: "kVAr",
267 | }
268 |
269 | SENSOR_ICON_MAP = {
270 | HAN_ACTIVE_POWER_IMPORT: "gauge",
271 | HAN_ACTIVE_POWER_EXPORT: "gauge",
272 | HAN_REACTIVE_POWER_IMPORT: "gauge",
273 | HAN_REACTIVE_POWER_EXPORT: "gauge",
274 | HAN_ACTIVE_POWER_IMPORT_L1: "gauge",
275 | HAN_ACTIVE_POWER_EXPORT_L1: "gauge",
276 | HAN_REACTIVE_POWER_IMPORT_L1: "gauge",
277 | HAN_REACTIVE_POWER_EXPORT_L1: "gauge",
278 | HAN_ACTIVE_POWER_IMPORT_L2: "gauge",
279 | HAN_ACTIVE_POWER_EXPORT_L2: "gauge",
280 | HAN_REACTIVE_POWER_IMPORT_L2: "gauge",
281 | HAN_REACTIVE_POWER_EXPORT_L2: "gauge",
282 | HAN_ACTIVE_POWER_IMPORT_L3: "gauge",
283 | HAN_ACTIVE_POWER_EXPORT_L3: "gauge",
284 | HAN_REACTIVE_POWER_IMPORT_L3: "gauge",
285 | HAN_REACTIVE_POWER_EXPORT_L3: "gauge",
286 | HAN_CURRENT_L1: "current-ac",
287 | HAN_CURRENT_L2: "current-ac",
288 | HAN_CURRENT_L3: "current-ac",
289 | HAN_VOLTAGE_L1: "flash",
290 | HAN_VOLTAGE_L2: "flash",
291 | HAN_VOLTAGE_L3: "flash",
292 | HAN_ACTIVE_ENERGY_IMPORT: "gauge",
293 | HAN_ACTIVE_ENERGY_EXPORT: "gauge",
294 | HAN_REACTIVE_ENERGY_IMPORT: "gauge",
295 | HAN_REACTIVE_ENERGY_EXPORT: "gauge",
296 | }
297 |
--------------------------------------------------------------------------------
/custom_components/ams/manifest.json:
--------------------------------------------------------------------------------
1 | {
2 | "domain": "ams",
3 | "name": "AMS Reader",
4 | "codeowners": ["@turbokongen"],
5 | "config_flow": true,
6 | "dependencies": [],
7 | "documentation": "https://github.com/turbokongen/hass-AMS",
8 | "iot_class": "local_push",
9 | "issue_tracker": "https://github.com/turbokongen/hass-AMS/issues",
10 | "requirements": ["pyserial==3.5", "crccheck==1.0"],
11 | "version": "2.0.3"
12 | }
13 |
--------------------------------------------------------------------------------
/custom_components/ams/parsers/__init__.py:
--------------------------------------------------------------------------------
1 | """Base functions to convert data from meter"""
2 | import logging
3 |
4 | _LOGGER = logging.getLogger(__name__)
5 |
6 |
7 | def field_type(default="", fields=None, enc=str, dec=None):
8 | """Obis/data field decoder/encoder."""
9 | # _LOGGER.debug("field_type=%s", fields)
10 | data = default.join(enc(i) for i in fields)
11 | if dec:
12 | return dec(data)
13 | return data
14 |
15 |
16 | def byte_decode(fields=None, count=4):
17 | """Data content decoder."""
18 | # _LOGGER.debug("byte_decode=%s", fields)
19 | if count == 2:
20 | data = fields[0] << 8 | fields[1]
21 | return data
22 |
23 | data = fields[0] << 24 | fields[1] << 16 | fields[2] << 8 | fields[3]
24 |
25 | return data
26 |
27 |
28 | def signed_decode(fields=None):
29 | """Signed value decoder."""
30 | s_data = fields
31 | hex_val = ""
32 | for num in s_data:
33 | hex_val += hex(num)[2:]
34 | t = int(hex_val, 16)
35 | if t & (1 << (16 - 1)):
36 | t -= 1 << 16
37 | return t
38 |
--------------------------------------------------------------------------------
/custom_components/ams/parsers/aidon.py:
--------------------------------------------------------------------------------
1 | """
2 | Decode for HAN Aidon.
3 |
4 | This module will decode the incoming message from Mbus serial.
5 | """
6 | import logging
7 | from datetime import datetime
8 | from crccheck.crc import CrcX25
9 | from custom_components.ams.parsers import (byte_decode,
10 | field_type,
11 | signed_decode)
12 | from custom_components.ams.const import (
13 | ACTIVE_ENERGY_SENSORS,
14 | ATTR_DEVICE_CLASS,
15 | ATTR_STATE_CLASS,
16 | DATA_FLAG,
17 | DEC_FRAME_FLAG,
18 | HAN_ACTIVE_POWER_IMPORT,
19 | HAN_LIST_VER_ID,
20 | HAN_METER_DATETIME,
21 | HAN_METER_MANUFACTURER,
22 | HAN_METER_SERIAL,
23 | HAN_METER_TYPE,
24 | HAN_OBIS_CODE,
25 | HAN_OBIS_DATETIME,
26 | HOURLY_SENSORS,
27 | LIST_TYPE_MINI,
28 | METER_TYPE,
29 | SENSOR_ATTR,
30 | SENSOR_COMMON_OBIS_MAP,
31 | SENSOR_ICON,
32 | SENSOR_ICON_MAP,
33 | SENSOR_OBIS_MAP,
34 | SENSOR_STATE,
35 | SENSOR_UNIT,
36 | SENSOR_UOM,
37 | UNKNOWN_METER,
38 | WEEKDAY_MAPPING,
39 | )
40 | from homeassistant.components.sensor import (
41 | SensorDeviceClass,
42 | SensorStateClass
43 | )
44 | _LOGGER = logging.getLogger(__name__)
45 |
46 |
47 | # pylint: disable=too-many-branches,too-many-locals,too-many-nested-blocks
48 | # pylint: disable=too-many-statements
49 | def parse_data(stored, data):
50 | """Parse the incoming data to dict"""
51 | sensor_data = {}
52 | han_data = {}
53 | pkt = data
54 | read_packet_size = ((data[1] & 0x0F) << 8 | data[2]) + 2
55 | han_data["packet_size"] = read_packet_size
56 | list_type = pkt[19]
57 | han_data["list_type"] = list_type
58 | _LOGGER.debug("list_type is %s", list_type)
59 | if list_type == LIST_TYPE_MINI:
60 | if HAN_ACTIVE_POWER_IMPORT not in stored:
61 | # Wait for long message (10sec) to get full attribute set before
62 | # publishing mini list data.
63 | return stored, han_data
64 | key = HAN_ACTIVE_POWER_IMPORT
65 | for item in SENSOR_OBIS_MAP[key]:
66 | for i in range(len(pkt)):
67 | if pkt[i:i + len(item)] == item:
68 | # Double-long-unsigned dict construct
69 | if pkt[i + len(item)] == 6:
70 | v_start = i + len(item) + 1
71 | v_stop = v_start + 4
72 | han_data["obis_" + key] = (
73 | '.'.join([str(elem) for elem in item])
74 | )
75 | han_data[key] = (
76 | byte_decode(fields=pkt[v_start:v_stop])
77 | )
78 | sensor_data[key] = {
79 | SENSOR_STATE: han_data[key],
80 | SENSOR_ATTR: {
81 | HAN_METER_MANUFACTURER: stored[key][
82 | SENSOR_ATTR][
83 | HAN_METER_MANUFACTURER],
84 | HAN_METER_TYPE: stored[key][
85 | SENSOR_ATTR][
86 | HAN_METER_TYPE],
87 | HAN_OBIS_CODE: han_data["obis_" + key],
88 | HAN_METER_SERIAL: stored[key][
89 | SENSOR_ATTR][
90 | HAN_METER_SERIAL],
91 | SENSOR_UOM:
92 | SENSOR_UNIT.get(key),
93 | SENSOR_ICON: (
94 | "mdi:" + SENSOR_ICON_MAP.get(
95 | key)),
96 | },
97 |
98 | }
99 | _LOGGER.debug(
100 | "%s, OBIS:%s, Index:%s, Type:%s Double OBIS",
101 | key, item, (i, i + len(item)),
102 | (pkt[(i + len(item))])
103 | )
104 | _LOGGER.debug(
105 | "Value double OBIS type 6: %s, Index:%s",
106 | han_data[key], (v_start, v_stop)
107 | )
108 | return stored, han_data
109 |
110 | # Ensure basic data before parsing package
111 | for key in SENSOR_COMMON_OBIS_MAP:
112 | if len(SENSOR_COMMON_OBIS_MAP[key]) == 2:
113 | for item in SENSOR_COMMON_OBIS_MAP[key]:
114 | for i in range(len(pkt)):
115 | if pkt[i:i + len(item)] == item:
116 | # Date time construct
117 | if pkt[i + len(item)] == 9:
118 | han_data[HAN_OBIS_DATETIME] = (
119 | '.'.join([str(elem) for elem in item])
120 | )
121 | v_start = i + len(item) + 2
122 | meter_date_time_year = (
123 | byte_decode(fields=pkt[v_start:(v_start + 2)],
124 | count=2))
125 | meter_date_time_month = pkt[v_start + 2]
126 | meter_date_time_date = pkt[v_start + 3]
127 | meter_date_time_day_of_week = (
128 | WEEKDAY_MAPPING.get(pkt[v_start + 4]))
129 | meter_date_time_hour = (
130 | str(pkt[v_start + 5]).zfill(2)
131 | )
132 | meter_date_time_minute = (
133 | str(pkt[v_start + 6]).zfill(2)
134 | )
135 | meter_date_time_seconds = (
136 | str(pkt[v_start + 7]).zfill(2)
137 | )
138 | meter_date_time_str = (
139 | str(meter_date_time_year)
140 | + "-"
141 | + str(meter_date_time_month)
142 | + "-"
143 | + str(meter_date_time_date)
144 | + "-"
145 | + str(meter_date_time_hour)
146 | + "-"
147 | + str(meter_date_time_minute)
148 | + "-"
149 | + str(meter_date_time_minute)
150 | + "-"
151 | + str(meter_date_time_seconds)
152 | )
153 | han_data[
154 | HAN_METER_DATETIME] = meter_date_time_str
155 | _LOGGER.debug(
156 | "%s, OBIS:%s, Index:%s, Type:%s Double OBIS",
157 | HAN_METER_DATETIME, item,
158 | (i, i + len(item)), (pkt[(i + len(item))]))
159 | _LOGGER.debug("%s, %s, %s, %s, %s, %s, %s, %s, "
160 | "%s, %s",
161 | HAN_METER_DATETIME,
162 | item, meter_date_time_year,
163 | meter_date_time_month,
164 | meter_date_time_date,
165 | meter_date_time_day_of_week,
166 | meter_date_time_hour,
167 | meter_date_time_minute,
168 | meter_date_time_seconds,
169 | meter_date_time_str)
170 | # Visible string construct
171 | elif pkt[i + len(item)] == 10 or\
172 | pkt[i + len(item)] == 13:
173 | if pkt[i + len(item)] == 13:
174 | _offset = 1
175 | else:
176 | _offset = 0
177 | v_start = i + len(item) + 2 + _offset
178 | v_length = pkt[v_start - 1]
179 | v_stop = v_start + v_length
180 | han_data["obis_" + key] = (
181 | '.'.join([str(elem) for elem in item])
182 | )
183 | if key == HAN_METER_TYPE:
184 | han_data[key] = (
185 | METER_TYPE.get(field_type(fields=pkt[
186 | v_start:v_stop], enc=chr, dec=int),
187 | UNKNOWN_METER)
188 | )
189 | else:
190 | han_data[key] = (
191 | field_type(fields=pkt[v_start:v_stop],
192 | enc=chr)
193 | )
194 | _LOGGER.debug(
195 | "%s, OBIS:%s, Index:%s, Type:%s Double OBIS",
196 | key, item, (i, i + len(item)),
197 | (pkt[(i + len(item))]))
198 | _LOGGER.debug(
199 | "Value double OBIS type 10/13: %s, Index:%s",
200 | han_data[key], (v_start, v_stop))
201 | for i in range(len(pkt)):
202 | if (pkt[i:i + len(SENSOR_COMMON_OBIS_MAP[key])] ==
203 | SENSOR_COMMON_OBIS_MAP[key]):
204 | # Visible string construct
205 | if pkt[i + len(SENSOR_COMMON_OBIS_MAP[key])] == 10 or \
206 | pkt[i + len(SENSOR_COMMON_OBIS_MAP[key])] == 13:
207 | if pkt[i + len(SENSOR_COMMON_OBIS_MAP[key])] == 13:
208 | _offset = 1
209 | else:
210 | _offset = 0
211 | v_start = i + len(
212 | SENSOR_COMMON_OBIS_MAP[key]) + 2 + _offset
213 | v_length = pkt[v_start - 1]
214 | v_stop = v_start + v_length
215 | han_data["obis_" + key] = (
216 | '.'.join([str(elem) for elem in
217 | SENSOR_COMMON_OBIS_MAP[key]])
218 | )
219 | han_data[key] = (
220 | field_type(fields=pkt[v_start:v_stop], enc=chr)
221 | )
222 | _LOGGER.debug(
223 | "%s, OBIS:%s, Index:%s, Type:%s Single OBIS",
224 | key, SENSOR_COMMON_OBIS_MAP[key],
225 | (i, i + len(SENSOR_COMMON_OBIS_MAP[key])),
226 | (pkt[(i + len(SENSOR_COMMON_OBIS_MAP[key]))]))
227 | _LOGGER.debug(
228 | "Value Single OBIS type 10/13: %s, Index:%s",
229 | han_data[key], (v_start, v_stop))
230 | for key in SENSOR_OBIS_MAP:
231 | if len(SENSOR_OBIS_MAP[key]) == 2:
232 | for item in SENSOR_OBIS_MAP[key]:
233 | for i in range(len(pkt)):
234 | if pkt[i:i + len(item)] == item:
235 | # Double-long-unsigned dict construct
236 | if pkt[i + len(item)] == 6:
237 | v_start = i + len(item) + 1
238 | v_stop = v_start + 4
239 | han_data["obis_" + key] = (
240 | '.'.join([str(elem) for elem in item])
241 | )
242 | measure = (
243 | byte_decode(fields=pkt[v_start:v_stop])
244 | )
245 | if key in HOURLY_SENSORS:
246 | han_data[key] = measure / 100
247 | else:
248 | han_data[key] = measure
249 | _LOGGER.debug(
250 | "%s, OBIS:%s, Index:%s, Type:%s Double OBIS",
251 | key, item, (i, i + len(item)),
252 | (pkt[(i + len(item))])
253 | )
254 | sensor_data[key] = {
255 | SENSOR_STATE: han_data[key],
256 | SENSOR_ATTR: {
257 | HAN_METER_MANUFACTURER: han_data[
258 | HAN_LIST_VER_ID],
259 | HAN_METER_TYPE: han_data[
260 | HAN_METER_TYPE],
261 | HAN_OBIS_CODE: han_data[
262 | "obis_" + key],
263 | HAN_METER_SERIAL: han_data[
264 | HAN_METER_SERIAL],
265 | SENSOR_UOM:
266 | SENSOR_UNIT.get(key),
267 | SENSOR_ICON: (
268 | "mdi:" +
269 | SENSOR_ICON_MAP.get(key)),
270 | },
271 | }
272 | if key in HOURLY_SENSORS:
273 | sensor_data[key][SENSOR_ATTR][
274 | HAN_METER_DATETIME] = han_data[
275 | HAN_METER_DATETIME]
276 | sensor_data[key][SENSOR_ATTR][
277 | ATTR_DEVICE_CLASS] = (
278 | SensorDeviceClass.ENERGY)
279 | if key in ACTIVE_ENERGY_SENSORS:
280 | sensor_data[key][SENSOR_ATTR][
281 | ATTR_STATE_CLASS] = (
282 | SensorStateClass.TOTAL_INCREASING)
283 | _LOGGER.debug(
284 | "Value double OBIS type 6: %s, Index:%s",
285 | han_data[key], (v_start, v_stop)
286 | )
287 | # Long-signed & Long-unsigned dict construct
288 | elif (pkt[i + len(item)] == 16 or
289 | pkt[i + len(item)] == 18):
290 | signed = None
291 | if pkt[i + len(item)] == 16:
292 | signed = True
293 | v_start = i + len(item) + 1
294 | v_stop = v_start + 2
295 | han_data["obis_" + key] = (
296 | '.'.join([str(elem) for elem in item])
297 | )
298 | if signed:
299 | han_data[key] = (
300 | signed_decode(
301 | fields=pkt[v_start:v_stop]) / 10
302 | )
303 | else:
304 | han_data[key] = (
305 | (byte_decode(fields=pkt[v_start:v_stop],
306 | count=2) / 10)
307 | )
308 | sensor_data[key] = {
309 | SENSOR_STATE: han_data[key],
310 | SENSOR_ATTR: {
311 | HAN_METER_MANUFACTURER: han_data[
312 | HAN_LIST_VER_ID],
313 | HAN_METER_TYPE: han_data[
314 | HAN_METER_TYPE],
315 | HAN_OBIS_CODE: han_data[
316 | "obis_" + key],
317 | HAN_METER_SERIAL: han_data[
318 | HAN_METER_SERIAL],
319 | SENSOR_UOM:
320 | SENSOR_UNIT.get(key),
321 | SENSOR_ICON: (
322 | "mdi:" +
323 | SENSOR_ICON_MAP.get(key)),
324 | },
325 |
326 | }
327 | _LOGGER.debug(
328 | "%s, OBIS:%s, Index:%s, Type:%s Double OBIS",
329 | key, item, (i, i + len(item)),
330 | (pkt[(i + len(item))]))
331 | _LOGGER.debug(
332 | "Value double OBIS type 16/18: %s, Index:%s",
333 | han_data[key], (v_start, v_stop))
334 | for i in range(len(pkt)):
335 | if (pkt[i:i + len(SENSOR_OBIS_MAP[key])] ==
336 | SENSOR_OBIS_MAP[key]):
337 | # Double-long-unsigned construct
338 | if pkt[i + len(SENSOR_OBIS_MAP[key])] == 6:
339 | v_start = i + len(SENSOR_OBIS_MAP[key]) + 1
340 | v_stop = v_start + 4
341 | han_data["obis_" + key] = (
342 | '.'.join([str(elem) for elem in
343 | SENSOR_OBIS_MAP[key]])
344 | )
345 | han_data[key] = (
346 | byte_decode(fields=pkt[v_start:v_stop])
347 | )
348 | sensor_data[key] = {
349 | SENSOR_STATE: han_data[key],
350 | SENSOR_ATTR: {
351 | HAN_METER_MANUFACTURER: han_data[
352 | HAN_LIST_VER_ID],
353 | HAN_METER_TYPE: han_data[
354 | HAN_METER_TYPE],
355 | HAN_OBIS_CODE: han_data["obis_" + key],
356 | HAN_METER_SERIAL: han_data[
357 | HAN_METER_SERIAL],
358 | SENSOR_UOM: SENSOR_UNIT.get(key),
359 | SENSOR_ICON: (
360 | "mdi:" + SENSOR_ICON_MAP.get(key)),
361 | },
362 |
363 | }
364 | _LOGGER.debug(
365 | "%s, OBIS:%s, Index:%s, Type:%s Single OBIS", key,
366 | SENSOR_OBIS_MAP[key], (i, i + len(
367 | SENSOR_OBIS_MAP[key])),
368 | (pkt[(i + len(SENSOR_OBIS_MAP[key]))]))
369 | _LOGGER.debug(
370 | "Value single OBIS type 6: %s Index:%s",
371 | han_data[key], (v_start, v_stop))
372 |
373 | stored.update(sensor_data)
374 | return stored, han_data
375 |
376 |
377 | def test_valid_data(data, oss):
378 | """Test the incoming data for validity."""
379 | # pylint: disable=too-many-return-statements
380 | _oss = oss
381 | if data is None:
382 | return False
383 |
384 | if len(data) > 581 or len(data) < 44:
385 | _LOGGER.debug("Invalid packet size %s", len(data))
386 | return False
387 |
388 | packet_size = len(data)
389 | read_packet_size = ((data[1] & 0x0F) << 8 | data[2]) + 2
390 |
391 | if packet_size != read_packet_size:
392 | _LOGGER.debug(
393 | "Packet size does not match read packet size: %s : %s",
394 | packet_size,
395 | read_packet_size,
396 | )
397 | return False
398 |
399 | if not data[0] == DEC_FRAME_FLAG and data[-1] == DEC_FRAME_FLAG:
400 | _LOGGER.debug(
401 | "%s Received %s bytes of %s data",
402 | datetime.now().isoformat(),
403 | len(data),
404 | False,
405 | )
406 | return False
407 |
408 | if data[9:13] != DATA_FLAG:
409 | _LOGGER.debug("Data does not start with %s: %s", DATA_FLAG,
410 | data[9:13])
411 | return False
412 |
413 | header_checksum = CrcX25.calc(bytes(data[1:7]))
414 | read_header_checksum = data[8] << 8 | data[7]
415 |
416 | if header_checksum != read_header_checksum:
417 | _LOGGER.debug("Invalid header CRC check")
418 | return False
419 |
420 | if not _oss:
421 | frame_checksum = CrcX25.calc(bytes(data[1:-3]))
422 | read_frame_checksum = data[-2] << 8 | data[-3]
423 |
424 | if frame_checksum != read_frame_checksum:
425 | _LOGGER.debug("Invalid frame CRC check")
426 | return False
427 |
428 | return True
429 |
--------------------------------------------------------------------------------
/custom_components/ams/parsers/aidon_se.py:
--------------------------------------------------------------------------------
1 | """
2 | Decode for Swedish HAN Aidon.
3 |
4 | This module will decode the incoming message from Mbus serial.
5 | """
6 | import logging
7 |
8 | from datetime import datetime
9 | from crccheck.crc import CrcX25
10 | from custom_components.ams.parsers import byte_decode, signed_decode
11 | from custom_components.ams.const import (
12 | ACTIVE_ENERGY_SENSORS,
13 | ATTR_DEVICE_CLASS,
14 | ATTR_STATE_CLASS,
15 | DATA_FLAG,
16 | DEC_FRAME_FLAG,
17 | HAN_LIST_VER_ID,
18 | HAN_METER_DATETIME,
19 | HAN_METER_LIST_TYPE,
20 | HAN_METER_MANUFACTURER,
21 | HAN_METER_SERIAL,
22 | HAN_METER_TYPE,
23 | HAN_OBIS_CODE,
24 | HAN_OBIS_DATETIME,
25 | HAN_PACKET_SIZE,
26 | HOURLY_SENSORS,
27 | METER_TYPE,
28 | SENSOR_ATTR,
29 | SENSOR_COMMON_OBIS_MAP,
30 | SENSOR_ICON,
31 | SENSOR_ICON_MAP,
32 | SENSOR_OBIS_MAP,
33 | SENSOR_STATE,
34 | SENSOR_UNIT,
35 | SENSOR_UOM,
36 | WEEKDAY_MAPPING,
37 | )
38 | from homeassistant.components.sensor import (
39 | SensorDeviceClass,
40 | SensorStateClass
41 | )
42 | _LOGGER = logging.getLogger(__name__)
43 |
44 |
45 | # pylint: disable=too-many-branches,too-many-locals,too-many-nested-blocks
46 | # pylint: disable=too-many-statements
47 | def parse_data(stored, data):
48 | """Parse the incoming data to dict"""
49 | sensor_data = {}
50 | han_data = {}
51 | pkt = data
52 | read_packet_size = ((data[1] & 0x0F) << 8 | data[2]) + 2
53 | han_data[HAN_PACKET_SIZE] = read_packet_size
54 | list_type = pkt[19]
55 | han_data[HAN_METER_LIST_TYPE] = list_type
56 | _LOGGER.debug("list_type is %s", list_type)
57 | # Swedish Aidon package does not contain meter_type or meter_serial
58 | han_data[HAN_METER_SERIAL] = "00"
59 | han_data[HAN_METER_TYPE] = METER_TYPE.get(6484)
60 | # Swedish Aidon package does not contain obis_list_version. It is
61 | # defined in document: Aidon RJ45 HAN interface funktionsbeskrivning
62 | # v1.4A 2020.10.06 as AIDON_H0001.
63 | han_data[HAN_LIST_VER_ID] = "AIDON_H0001"
64 |
65 | # Get the date and time
66 | for item in SENSOR_COMMON_OBIS_MAP[HAN_METER_DATETIME]:
67 | for i in range(len(pkt)):
68 | if pkt[i:i + len(item)] == item:
69 | # Date time construct
70 | if pkt[i + len(item)] == 9:
71 | han_data[HAN_OBIS_DATETIME] = (
72 | '.'.join([str(elem) for elem in item])
73 | )
74 | v_start = i + len(item) + 2
75 | meter_date_time_year = (
76 | byte_decode(fields=pkt[v_start:(v_start + 2)],
77 | count=2))
78 | meter_date_time_month = pkt[v_start + 2]
79 | meter_date_time_date = pkt[v_start + 3]
80 | meter_date_time_day_of_week = (
81 | WEEKDAY_MAPPING.get(pkt[v_start + 4]))
82 | meter_date_time_hour = str(pkt[v_start + 5]).zfill(2)
83 | meter_date_time_minute = str(pkt[v_start + 6]).zfill(2)
84 | meter_date_time_seconds = str(pkt[v_start + 7]).zfill(2)
85 | meter_date_time_str = (
86 | str(meter_date_time_year)
87 | + "-"
88 | + str(meter_date_time_month)
89 | + "-"
90 | + str(meter_date_time_date)
91 | + "-"
92 | + str(meter_date_time_hour)
93 | + "-"
94 | + str(meter_date_time_minute)
95 | + "-"
96 | + str(meter_date_time_minute)
97 | + "-"
98 | + str(meter_date_time_seconds)
99 | )
100 | han_data[HAN_METER_DATETIME] = meter_date_time_str
101 | _LOGGER.debug("%s, OBIS:%s, Index:%s, Type:%s Double OBIS",
102 | HAN_METER_DATETIME, item,
103 | (i, i + len(item)), (pkt[(i + len(item))]))
104 | _LOGGER.debug("%s, %s, %s, %s, %s, %s, %s, %s, "
105 | "%s, %s",
106 | HAN_METER_DATETIME,
107 | item, meter_date_time_year,
108 | meter_date_time_month,
109 | meter_date_time_date,
110 | meter_date_time_day_of_week,
111 | meter_date_time_hour,
112 | meter_date_time_minute,
113 | meter_date_time_seconds,
114 | meter_date_time_str)
115 |
116 | for key in SENSOR_OBIS_MAP:
117 | if len(SENSOR_OBIS_MAP[key]) == 2:
118 | for item in SENSOR_OBIS_MAP[key]:
119 | for i in range(len(pkt)):
120 | if pkt[i:i + len(item)] == item:
121 | # Double-long-unsigned dict construct
122 | if pkt[i + len(item)] == 6:
123 | v_start = i + len(item) + 1
124 | v_stop = v_start + 4
125 | han_data["obis_" + key] = (
126 | '.'.join([str(elem) for elem in item])
127 | )
128 | measure = byte_decode(fields=pkt[v_start:v_stop])
129 | if key in HOURLY_SENSORS:
130 | han_data[key] = measure / 1000
131 | else:
132 | han_data[key] = measure
133 | sensor_data[key] = {
134 | SENSOR_STATE: han_data[key],
135 | SENSOR_ATTR: {
136 | HAN_METER_MANUFACTURER: han_data[
137 | HAN_LIST_VER_ID],
138 | HAN_METER_TYPE: han_data[
139 | HAN_METER_TYPE],
140 | HAN_OBIS_CODE: han_data[
141 | "obis_" + key],
142 | HAN_METER_SERIAL: han_data[
143 | HAN_METER_SERIAL],
144 | SENSOR_UOM:
145 | SENSOR_UNIT.get(key),
146 | SENSOR_ICON: (
147 | "mdi:" +
148 | SENSOR_ICON_MAP.get(key)),
149 | },
150 | }
151 | if key in HOURLY_SENSORS:
152 | sensor_data[key][SENSOR_ATTR][
153 | HAN_METER_DATETIME] = han_data[
154 | HAN_METER_DATETIME]
155 | sensor_data[key][SENSOR_ATTR][
156 | ATTR_DEVICE_CLASS] = (
157 | SensorDeviceClass.ENERGY)
158 | if key in ACTIVE_ENERGY_SENSORS:
159 | sensor_data[key][SENSOR_ATTR][
160 | ATTR_STATE_CLASS] = (
161 | SensorStateClass.TOTAL_INCREASING)
162 | _LOGGER.debug(
163 | "%s, OBIS:%s, Index:%s, Type:%s Double OBIS",
164 | key, item, (i, i + len(item)),
165 | (pkt[(i + len(item))])
166 | )
167 | _LOGGER.debug(
168 | "Value double OBIS type 6: %s, Index:%s",
169 | han_data[key], (v_start, v_stop)
170 | )
171 | # Long-signed & Long-unsigned dict construct
172 | elif (pkt[i + len(item)] == 16 or
173 | pkt[i + len(item)] == 18):
174 | signed = None
175 | if pkt[i + len(item)] == 16:
176 | signed = True
177 | v_start = i + len(item) + 1
178 | v_stop = v_start + 2
179 | han_data["obis_" + key] = (
180 | '.'.join([str(elem) for elem in item])
181 | )
182 | if signed:
183 | han_data[key] = (
184 | signed_decode(
185 | fields=pkt[v_start:v_stop]) / 10
186 | )
187 | else:
188 | han_data[key] = (
189 | (byte_decode(fields=pkt[v_start:v_stop],
190 | count=2) / 10)
191 | )
192 | sensor_data[key] = {
193 | SENSOR_STATE: han_data[key],
194 | SENSOR_ATTR: {
195 | HAN_METER_MANUFACTURER: han_data[
196 | HAN_LIST_VER_ID],
197 | HAN_METER_TYPE: han_data[
198 | HAN_METER_TYPE],
199 | HAN_OBIS_CODE: han_data[
200 | "obis_" + key],
201 | HAN_METER_SERIAL: han_data[
202 | HAN_METER_SERIAL],
203 | SENSOR_UOM:
204 | SENSOR_UNIT.get(key),
205 | SENSOR_ICON: (
206 | "mdi:" +
207 | SENSOR_ICON_MAP.get(key)),
208 | },
209 |
210 | }
211 | _LOGGER.debug(
212 | "%s, OBIS:%s, Index:%s, Type:%s Double OBIS",
213 | key, item, (i, i + len(item)),
214 | (pkt[(i + len(item))]))
215 | _LOGGER.debug(
216 | "Value double OBIS type 16/18: %s, Index:%s",
217 | han_data[key], (v_start, v_stop))
218 | # Visible string construct
219 | elif pkt[i + len(item)] == 10:
220 | v_start = i + len(item) + 2
221 | v_length = pkt[v_start - 1]
222 | v_stop = v_start + v_length
223 | _LOGGER.debug(
224 | "%s, OBIS:%s, Index:%s, Type:%s Double OBIS",
225 | key, item, (i, i + len(item)),
226 | (pkt[(i + len(item))]))
227 | _LOGGER.debug(
228 | "Value double OBIS type 10: %s, Index:%s",
229 | han_data[key], (v_start, v_stop))
230 |
231 | for i in range(len(pkt)):
232 | if (pkt[i:i + len(SENSOR_OBIS_MAP[key])] ==
233 | SENSOR_OBIS_MAP[key]):
234 | # Double-long-unsigned construct
235 | if pkt[i + len(SENSOR_OBIS_MAP[key])] == 6:
236 | v_start = i + len(SENSOR_OBIS_MAP[key]) + 1
237 | v_stop = v_start + 4
238 | han_data["obis_" + key] = (
239 | '.'.join([str(elem) for elem in
240 | SENSOR_OBIS_MAP[key]])
241 | )
242 | han_data[key] = byte_decode(fields=pkt[v_start:v_stop])
243 | sensor_data[key] = {
244 | SENSOR_STATE: han_data[key],
245 | SENSOR_ATTR: {
246 | HAN_METER_MANUFACTURER: han_data[
247 | HAN_LIST_VER_ID],
248 | HAN_METER_TYPE: han_data[
249 | HAN_METER_TYPE],
250 | HAN_OBIS_CODE: han_data["obis_" + key],
251 | HAN_METER_SERIAL: han_data[
252 | HAN_METER_SERIAL],
253 | SENSOR_UOM: SENSOR_UNIT.get(key),
254 | SENSOR_ICON: (
255 | "mdi:" + SENSOR_ICON_MAP.get(key)),
256 | },
257 |
258 | }
259 | _LOGGER.debug(
260 | "%s, OBIS:%s, Index:%s, Type:%s Single OBIS", key,
261 | SENSOR_OBIS_MAP[key], (i, i + len(
262 | SENSOR_OBIS_MAP[key])),
263 | (pkt[(i + len(SENSOR_OBIS_MAP[key]))]))
264 | _LOGGER.debug(
265 | "Value single OBIS type 6: %s Index:%s",
266 | han_data[key], (v_start, v_stop))
267 |
268 | stored.update(sensor_data)
269 | return stored, han_data
270 |
271 |
272 | def test_valid_data(data, oss):
273 | """Test the incoming data for validity."""
274 | # pylint: disable=too-many-return-statements
275 | if data is None:
276 | _LOGGER.debug("Packet is None!")
277 | return False
278 |
279 | if len(data) > 581 or len(data) < 44:
280 | _LOGGER.debug("Invalid packet size %s", len(data))
281 | return False
282 |
283 | packet_size = len(data)
284 | read_packet_size = ((data[1] & 0x0F) << 8 | data[2]) + 2
285 |
286 | if packet_size != read_packet_size:
287 | _LOGGER.debug(
288 | "Packet size does not match read packet size: %s : %s",
289 | packet_size,
290 | read_packet_size,
291 | )
292 | return False
293 |
294 | if not data[0] == DEC_FRAME_FLAG and data[-1] == DEC_FRAME_FLAG:
295 | _LOGGER.debug(
296 | "%s Received %s bytes of %s data",
297 | datetime.now().isoformat(),
298 | len(data),
299 | False,
300 | )
301 | return False
302 |
303 | if data[9:13] != DATA_FLAG:
304 | _LOGGER.debug("Data does not start with %s: %s", DATA_FLAG,
305 | data[9:13])
306 | return False
307 |
308 | header_checksum = CrcX25.calc(bytes(data[1:7]))
309 | read_header_checksum = data[8] << 8 | data[7]
310 |
311 | if header_checksum != read_header_checksum:
312 | _LOGGER.debug("Invalid header CRC check")
313 | return False
314 |
315 | frame_checksum = CrcX25.calc(bytes(data[1:-3]))
316 | read_frame_checksum = data[-2] << 8 | data[-3]
317 |
318 | if frame_checksum != read_frame_checksum:
319 | _LOGGER.debug("Invalid frame CRC check")
320 | return False
321 |
322 | return True
323 |
--------------------------------------------------------------------------------
/custom_components/ams/parsers/kaifa_se.py:
--------------------------------------------------------------------------------
1 | """
2 | Decode for Swedish HAN Kaifa.
3 | This module will decode the incoming message from Mbus serial.
4 | """
5 | import logging
6 |
7 | from datetime import datetime
8 | from crccheck.crc import CrcX25
9 | from custom_components.ams.parsers import byte_decode, field_type
10 | from custom_components.ams.const import (
11 | ACTIVE_ENERGY_SENSORS,
12 | ATTR_DEVICE_CLASS,
13 | ATTR_STATE_CLASS,
14 | CURRENT_SENSORS,
15 | DATA_FLAG,
16 | DEC_FRAME_FLAG,
17 | HAN_LIST_VER_ID,
18 | HAN_METER_DATETIME,
19 | HAN_METER_LIST_TYPE,
20 | HAN_METER_MANUFACTURER,
21 | HAN_METER_SERIAL,
22 | HAN_METER_TYPE,
23 | HAN_OBIS_CODE,
24 | HAN_OBIS_DATETIME,
25 | HAN_PACKET_SIZE,
26 | VOLTAGE_SENSORS,
27 | HOURLY_SENSORS,
28 | METER_TYPE,
29 | SENSOR_ATTR,
30 | SENSOR_COMMON_OBIS_MAP,
31 | SENSOR_ICON,
32 | SENSOR_ICON_MAP,
33 | SENSOR_OBIS_MAP,
34 | SENSOR_STATE,
35 | SENSOR_UNIT,
36 | SENSOR_UOM,
37 | UNKNOWN_METER,
38 | WEEKDAY_MAPPING,
39 | )
40 | from homeassistant.components.sensor import (
41 | SensorDeviceClass,
42 | SensorStateClass
43 | )
44 | _LOGGER = logging.getLogger(__name__)
45 |
46 |
47 | # pylint: disable=too-many-branches,too-many-locals,too-many-nested-blocks
48 | # pylint: disable=too-many-statements
49 | def parse_data(stored, data):
50 | """Parse the incoming data to dict"""
51 | sensor_data = {}
52 | han_data = {}
53 | pkt = data
54 | read_packet_size = ((data[1] & 0x0F) << 8 | data[2]) + 2
55 | han_data[HAN_PACKET_SIZE] = read_packet_size
56 | list_type = pkt[19]
57 | han_data[HAN_METER_LIST_TYPE] = list_type
58 | _LOGGER.debug("list_type is %s", list_type)
59 | han_data[HAN_METER_SERIAL] = "00"
60 | han_data[HAN_METER_TYPE] = (
61 | METER_TYPE.get(field_type(fields=pkt[73:80], enc=chr), UNKNOWN_METER)
62 | )
63 | han_data[HAN_METER_SERIAL] = field_type(fields=pkt[47:63], enc=chr)
64 | han_data[HAN_LIST_VER_ID] = field_type(fields=pkt[30:37], enc=chr)
65 |
66 | # Get the date and time
67 | for item in SENSOR_COMMON_OBIS_MAP[HAN_METER_DATETIME]:
68 | for i in range(len(pkt)):
69 | if pkt[i:i + len(item)] == item:
70 | # Date time construct
71 | if pkt[i + len(item)] == 9:
72 | han_data[HAN_OBIS_DATETIME] = (
73 | '.'.join([str(elem) for elem in item])
74 | )
75 | v_start = i + len(item) + 2
76 | meter_date_time_year = (
77 | byte_decode(fields=pkt[v_start:(v_start + 2)],
78 | count=2))
79 | meter_date_time_month = pkt[v_start + 2]
80 | meter_date_time_date = pkt[v_start + 3]
81 | meter_date_time_day_of_week = (
82 | WEEKDAY_MAPPING.get(pkt[v_start + 4]))
83 | meter_date_time_hour = str(pkt[v_start + 5]).zfill(2)
84 | meter_date_time_minute = str(pkt[v_start + 6]).zfill(2)
85 | meter_date_time_seconds = str(pkt[v_start + 7]).zfill(2)
86 | meter_date_time_str = (
87 | str(meter_date_time_year)
88 | + "-"
89 | + str(meter_date_time_month)
90 | + "-"
91 | + str(meter_date_time_date)
92 | + "-"
93 | + str(meter_date_time_hour)
94 | + "-"
95 | + str(meter_date_time_minute)
96 | + "-"
97 | + str(meter_date_time_minute)
98 | + "-"
99 | + str(meter_date_time_seconds)
100 | )
101 | han_data[HAN_METER_DATETIME] = meter_date_time_str
102 | _LOGGER.debug("%s, OBIS:%s, Index:%s, Type:%s Double OBIS",
103 | HAN_METER_DATETIME, item,
104 | (i, i + len(item)), (pkt[(i + len(item))]))
105 | _LOGGER.debug("%s, %s, %s, %s, %s, %s, %s, %s, "
106 | "%s, %s",
107 | HAN_METER_DATETIME,
108 | item, meter_date_time_year,
109 | meter_date_time_month,
110 | meter_date_time_date,
111 | meter_date_time_day_of_week,
112 | meter_date_time_hour,
113 | meter_date_time_minute,
114 | meter_date_time_seconds,
115 | meter_date_time_str)
116 |
117 | for key in SENSOR_OBIS_MAP:
118 | if len(SENSOR_OBIS_MAP[key]) == 2:
119 | for item in SENSOR_OBIS_MAP[key]:
120 | for i in range(len(pkt)):
121 | if pkt[i:i + len(item)] == item:
122 | # Double-long-unsigned dict construct
123 | if pkt[i + len(item)] == 6:
124 | v_start = i + len(item) + 1
125 | v_stop = v_start + 4
126 | han_data["obis_" + key] = (
127 | '.'.join([str(elem) for elem in item])
128 | )
129 | measure = byte_decode(fields=pkt[v_start:v_stop])
130 | if key in HOURLY_SENSORS:
131 | han_data[key] = measure / 1000
132 | elif key in CURRENT_SENSORS:
133 | han_data[key] = measure / 1000
134 | elif key in VOLTAGE_SENSORS:
135 | han_data[key] = measure / 10
136 | else:
137 | han_data[key] = measure
138 | sensor_data[key] = {
139 | SENSOR_STATE: han_data[key],
140 | SENSOR_ATTR: {
141 | HAN_METER_MANUFACTURER: han_data[
142 | HAN_LIST_VER_ID],
143 | HAN_METER_TYPE: han_data[
144 | HAN_METER_TYPE],
145 | HAN_OBIS_CODE: han_data[
146 | "obis_" + key],
147 | HAN_METER_SERIAL: han_data[
148 | HAN_METER_SERIAL],
149 | SENSOR_UOM:
150 | SENSOR_UNIT.get(key),
151 | SENSOR_ICON: (
152 | "mdi:" +
153 | SENSOR_ICON_MAP.get(key)),
154 | },
155 | }
156 | if key in HOURLY_SENSORS:
157 | sensor_data[key][SENSOR_ATTR][
158 | HAN_METER_DATETIME] = han_data[
159 | HAN_METER_DATETIME]
160 | sensor_data[key][SENSOR_ATTR][
161 | ATTR_DEVICE_CLASS] = (
162 | SensorDeviceClass.ENERGY)
163 | if key in ACTIVE_ENERGY_SENSORS:
164 | sensor_data[key][SENSOR_ATTR][
165 | ATTR_STATE_CLASS] = (
166 | SensorStateClass.TOTAL_INCREASING)
167 | _LOGGER.debug(
168 | "%s, OBIS:%s, Index:%s, Type:%s Double OBIS",
169 | key, item, (i, i + len(item)),
170 | (pkt[(i + len(item))])
171 | )
172 | _LOGGER.debug(
173 | "Value double OBIS type 6: %s, Index:%s",
174 | han_data[key], (v_start, v_stop)
175 | )
176 | # Long-signed & Long-unsigned dict construct
177 | elif (pkt[i + len(item)] == 16 or
178 | pkt[i + len(item)] == 18):
179 | v_start = i + len(item) + 1
180 | v_stop = v_start + 2
181 | han_data["obis_" + key] = (
182 | '.'.join([str(elem) for elem in item])
183 | )
184 | han_data[key] = (
185 | (byte_decode(fields=pkt[v_start:v_stop],
186 | count=2) / 10)
187 | )
188 | sensor_data[key] = {
189 | SENSOR_STATE: han_data[key],
190 | SENSOR_ATTR: {
191 | HAN_METER_MANUFACTURER: han_data[
192 | HAN_LIST_VER_ID],
193 | HAN_METER_TYPE: han_data[
194 | HAN_METER_TYPE],
195 | HAN_OBIS_CODE: han_data[
196 | "obis_" + key],
197 | HAN_METER_SERIAL: han_data[
198 | HAN_METER_SERIAL],
199 | SENSOR_UOM:
200 | SENSOR_UNIT.get(key),
201 | SENSOR_ICON: (
202 | "mdi:" +
203 | SENSOR_ICON_MAP.get(key)),
204 | },
205 |
206 | }
207 | _LOGGER.debug(
208 | "%s, OBIS:%s, Index:%s, Type:%s Double OBIS",
209 | key, item, (i, i + len(item)),
210 | (pkt[(i + len(item))]))
211 | _LOGGER.debug(
212 | "Value double OBIS type 16/18: %s, Index:%s",
213 | han_data[key], (v_start, v_stop))
214 | # Visible string construct
215 | elif pkt[i + len(item)] == 10:
216 | v_start = i + len(item) + 2
217 | v_length = pkt[v_start - 1]
218 | v_stop = v_start + v_length
219 | _LOGGER.debug(
220 | "%s, OBIS:%s, Index:%s, Type:%s Double OBIS",
221 | key, item, (i, i + len(item)),
222 | (pkt[(i + len(item))]))
223 | _LOGGER.debug(
224 | "Value double OBIS type 10: %s, Index:%s",
225 | han_data[key], (v_start, v_stop))
226 |
227 | for i in range(len(pkt)):
228 | if (pkt[i:i + len(SENSOR_OBIS_MAP[key])] ==
229 | SENSOR_OBIS_MAP[key]):
230 | # Double-long-unsigned construct
231 | if pkt[i + len(SENSOR_OBIS_MAP[key])] == 6:
232 | v_start = i + len(SENSOR_OBIS_MAP[key]) + 1
233 | v_stop = v_start + 4
234 | han_data["obis_" + key] = (
235 | '.'.join([str(elem) for elem in
236 | SENSOR_OBIS_MAP[key]])
237 | )
238 | han_data[key] = byte_decode(fields=pkt[v_start:v_stop])
239 | sensor_data[key] = {
240 | SENSOR_STATE: han_data[key],
241 | SENSOR_ATTR: {
242 | HAN_METER_MANUFACTURER: han_data[
243 | HAN_LIST_VER_ID],
244 | HAN_METER_TYPE: han_data[
245 | HAN_METER_TYPE],
246 | HAN_OBIS_CODE: han_data["obis_" + key],
247 | HAN_METER_SERIAL: han_data[
248 | HAN_METER_SERIAL],
249 | SENSOR_UOM: SENSOR_UNIT.get(key),
250 | SENSOR_ICON: (
251 | "mdi:" + SENSOR_ICON_MAP.get(key)),
252 | },
253 |
254 | }
255 | _LOGGER.debug(
256 | "%s, OBIS:%s, Index:%s, Type:%s Single OBIS", key,
257 | SENSOR_OBIS_MAP[key], (i, i + len(
258 | SENSOR_OBIS_MAP[key])),
259 | (pkt[(i + len(SENSOR_OBIS_MAP[key]))]))
260 | _LOGGER.debug(
261 | "Value single OBIS type 6: %s Index:%s",
262 | han_data[key], (v_start, v_stop))
263 |
264 | stored.update(sensor_data)
265 | return stored, han_data
266 |
267 |
268 | def test_valid_data(data, oss):
269 | """Test the incoming data for validity."""
270 | # pylint: disable=too-many-return-statements
271 | if data is None:
272 | return False
273 |
274 | if len(data) > 581 or len(data) < 44:
275 | _LOGGER.debug("Invalid packet size %s", len(data))
276 | return False
277 |
278 | packet_size = len(data)
279 | read_packet_size = ((data[1] & 0x0F) << 8 | data[2]) + 2
280 |
281 | if packet_size != read_packet_size:
282 | _LOGGER.debug(
283 | "Packet size does not match read packet size: %s : %s",
284 | packet_size,
285 | read_packet_size,
286 | )
287 | return False
288 |
289 | if not data[0] == DEC_FRAME_FLAG and data[-1] == DEC_FRAME_FLAG:
290 | _LOGGER.debug(
291 | "%s Received %s bytes of %s data",
292 | datetime.now().isoformat(),
293 | len(data),
294 | False,
295 | )
296 | return False
297 |
298 | if data[9:13] != DATA_FLAG:
299 | _LOGGER.debug("Data does not start with %s: %s", DATA_FLAG,
300 | data[9:13])
301 | return False
302 |
303 | header_checksum = CrcX25.calc(bytes(data[1:7]))
304 | read_header_checksum = data[8] << 8 | data[7]
305 |
306 | if header_checksum != read_header_checksum:
307 | _LOGGER.debug("Invalid header CRC check")
308 | return False
309 |
310 | frame_checksum = CrcX25.calc(bytes(data[1:-3]))
311 | read_frame_checksum = data[-2] << 8 | data[-3]
312 |
313 | if frame_checksum != read_frame_checksum:
314 | _LOGGER.debug("Invalid frame CRC check")
315 | return False
316 |
317 | return True
318 |
--------------------------------------------------------------------------------
/custom_components/ams/parsers/kamstrup.py:
--------------------------------------------------------------------------------
1 | """
2 | Decode for HAN Kamstrup.
3 |
4 | This module will decode the incoming message from Mbus serial.
5 | """
6 | import logging
7 | from datetime import datetime
8 | from crccheck.crc import CrcX25
9 | from custom_components.ams.const import (
10 | ACTIVE_ENERGY_SENSORS,
11 | ATTR_DEVICE_CLASS,
12 | ATTR_STATE_CLASS,
13 | DATA_FLAG,
14 | DEC_FRAME_FLAG,
15 | HAN_ACTIVE_ENERGY_EXPORT,
16 | HAN_ACTIVE_ENERGY_IMPORT,
17 | HAN_CURRENT_L1,
18 | HAN_CURRENT_L2,
19 | HAN_CURRENT_L3,
20 | HAN_LIST_VER_ID,
21 | HAN_METER_DATETIME,
22 | HAN_METER_LIST_TYPE,
23 | HAN_METER_MANUFACTURER,
24 | HAN_METER_SERIAL,
25 | HAN_METER_TYPE,
26 | HAN_OBIS_CODE,
27 | HAN_OBIS_DATETIME,
28 | HAN_PACKET_SIZE,
29 | HAN_REACTIVE_ENERGY_EXPORT,
30 | HAN_REACTIVE_ENERGY_IMPORT,
31 | HOURLY_SENSORS,
32 | METER_TYPE,
33 | SENSOR_ATTR,
34 | SENSOR_COMMON_OBIS_MAP,
35 | SENSOR_ICON,
36 | SENSOR_ICON_MAP,
37 | SENSOR_OBIS_MAP,
38 | SENSOR_STATE,
39 | SENSOR_UNIT,
40 | SENSOR_UOM,
41 | UNKNOWN_METER,
42 | WEEKDAY_MAPPING,
43 | )
44 | from custom_components.ams.parsers import byte_decode, field_type
45 | from homeassistant.components.sensor import (
46 | SensorDeviceClass,
47 | SensorStateClass
48 | )
49 | _LOGGER = logging.getLogger(__name__)
50 |
51 | LIST_TYPE_SHORT_1PH = 17
52 | LIST_TYPE_LONG_1PH = 27
53 | LIST_TYPE_SHORT_3PH = 25
54 | LIST_TYPE_LONG_3PH = 35
55 |
56 |
57 | # pylint: disable=too-many-branches, too-many-locals, too-many-statements
58 | # pylint: disable=too-many-nested-blocks
59 | def parse_data(stored, data):
60 | """Parse the incoming data to dict"""
61 | sensor_data = {}
62 | han_data = {}
63 | pkt = data
64 | read_packet_size = ((data[1] & 0x0F) << 8 | data[2]) + 2
65 | han_data[HAN_PACKET_SIZE] = read_packet_size
66 | list_type = pkt[30]
67 | han_data[HAN_METER_LIST_TYPE] = list_type
68 | _LOGGER.debug("list_type is %s", list_type)
69 |
70 | # Ensure basic data before parsing package
71 | # Kamstrup does not include OBIS in their package for the list version
72 | han_data[HAN_LIST_VER_ID] = field_type(fields=pkt[33:47], enc=chr)
73 | for key in SENSOR_COMMON_OBIS_MAP:
74 | if len(SENSOR_COMMON_OBIS_MAP[key]) == 2:
75 | for item in SENSOR_COMMON_OBIS_MAP[key]:
76 | for i in range(len(pkt)):
77 | if pkt[i:i + len(item)] == item:
78 | # Date time construct
79 | if pkt[i + len(item)] == 9:
80 | han_data[HAN_OBIS_DATETIME] = (
81 | '.'.join([str(elem) for elem in item])
82 | )
83 | v_start = i + len(item) + 2
84 | meter_date_time_year = (
85 | byte_decode(fields=pkt[v_start:(v_start + 2)],
86 | count=2))
87 | meter_date_time_month = pkt[v_start + 2]
88 | meter_date_time_date = pkt[v_start + 3]
89 | meter_date_time_day_of_week = (
90 | WEEKDAY_MAPPING.get(pkt[v_start + 4]))
91 | meter_date_time_hour = (
92 | str(pkt[v_start + 5]).zfill(2)
93 | )
94 | meter_date_time_minute = (
95 | str(pkt[v_start + 6]).zfill(2)
96 | )
97 | meter_date_time_seconds = (
98 | str(pkt[v_start + 7]).zfill(2)
99 | )
100 | meter_date_time_str = (
101 | str(meter_date_time_year)
102 | + "-"
103 | + str(meter_date_time_month)
104 | + "-"
105 | + str(meter_date_time_date)
106 | + "-"
107 | + str(meter_date_time_hour)
108 | + "-"
109 | + str(meter_date_time_minute)
110 | + "-"
111 | + str(meter_date_time_minute)
112 | + "-"
113 | + str(meter_date_time_seconds)
114 | )
115 | han_data[
116 | HAN_METER_DATETIME] = meter_date_time_str
117 | _LOGGER.debug(
118 | "%s, OBIS:%s, Index:%s, Type:%s Double OBIS",
119 | HAN_METER_DATETIME, item,
120 | (i, i + len(item)), (pkt[(i + len(item))]))
121 | _LOGGER.debug("%s, %s, %s, %s, %s, %s, %s, %s, "
122 | "%s, %s",
123 | HAN_METER_DATETIME,
124 | item, meter_date_time_year,
125 | meter_date_time_month,
126 | meter_date_time_date,
127 | meter_date_time_day_of_week,
128 | meter_date_time_hour,
129 | meter_date_time_minute,
130 | meter_date_time_seconds,
131 | meter_date_time_str)
132 | # Visible string construct
133 | elif pkt[i + len(item)] == 10:
134 | v_start = i + len(item) + 2
135 | v_length = pkt[v_start - 1]
136 | v_stop = v_start + v_length
137 | han_data["obis_" + key] = (
138 | '.'.join([str(elem) for elem in item])
139 | )
140 | if key == HAN_METER_TYPE:
141 | han_data[key] = (
142 | METER_TYPE.get(field_type(fields=pkt[
143 | v_start:v_start + 7], enc=chr,
144 | dec=int),
145 | UNKNOWN_METER)
146 | )
147 |
148 | else:
149 | han_data[key] = (
150 | field_type(fields=pkt[v_start:v_stop],
151 | enc=chr)
152 | )
153 | _LOGGER.debug(
154 | "%s, OBIS:%s, Index:%s, Type:%s Double OBIS",
155 | key, item, (i, i + len(item)),
156 | (pkt[(i + len(item))]))
157 | _LOGGER.debug(
158 | "Value double OBIS type 10: %s, Index:%s",
159 | han_data[key], (v_start, v_stop))
160 | for i in range(len(pkt)):
161 | if (pkt[i:i + len(SENSOR_COMMON_OBIS_MAP[key])] ==
162 | SENSOR_COMMON_OBIS_MAP[key]):
163 | # Visible string construct
164 | if pkt[i + len(SENSOR_COMMON_OBIS_MAP[key])] == 10:
165 | print(SENSOR_COMMON_OBIS_MAP[key], key)
166 | v_start = i + len(SENSOR_COMMON_OBIS_MAP[key]) + 2
167 | v_length = pkt[v_start - 1]
168 | v_stop = v_start + v_length
169 | han_data["obis_" + key] = (
170 | '.'.join([str(elem) for elem in
171 | SENSOR_COMMON_OBIS_MAP[key]])
172 | )
173 | han_data[key] = (
174 | field_type(fields=pkt[v_start:v_stop], enc=chr)
175 | )
176 | _LOGGER.debug(
177 | "%s, OBIS:%s, Index:%s, Type:%s Single OBIS",
178 | key, SENSOR_COMMON_OBIS_MAP[key],
179 | (i, i + len(SENSOR_COMMON_OBIS_MAP[key])),
180 | (pkt[(i + len(SENSOR_COMMON_OBIS_MAP[key]))]))
181 | _LOGGER.debug(
182 | "Value Single OBIS type 10: %s, Index:%s",
183 | han_data[key], (v_start, v_stop))
184 | for key in SENSOR_OBIS_MAP:
185 | if len(SENSOR_OBIS_MAP[key]) == 2:
186 | for item in SENSOR_OBIS_MAP[key]:
187 | for i in range(len(pkt)):
188 | if pkt[i:i + len(item)] == item:
189 | # Double-long-unsigned dict construct
190 | if pkt[i + len(item)] == 6:
191 | v_start = i + len(item) + 1
192 | v_stop = v_start + 4
193 | han_data["obis_" + key] = (
194 | '.'.join([str(elem) for elem in item])
195 | )
196 | if (key in (HAN_CURRENT_L1,
197 | HAN_CURRENT_L2,
198 | HAN_CURRENT_L3,
199 | HAN_ACTIVE_ENERGY_IMPORT,
200 | HAN_ACTIVE_ENERGY_EXPORT,
201 | HAN_REACTIVE_ENERGY_IMPORT,
202 | HAN_REACTIVE_ENERGY_EXPORT)):
203 | han_data[key] = (
204 | byte_decode(
205 | fields=pkt[v_start:v_stop]) / 100
206 | )
207 | else:
208 | han_data[key] = (
209 | byte_decode(fields=pkt[v_start:v_stop])
210 | )
211 | sensor_data[key] = {
212 | SENSOR_STATE: han_data[key],
213 | SENSOR_ATTR: {
214 | HAN_METER_MANUFACTURER: han_data[
215 | HAN_LIST_VER_ID],
216 | HAN_METER_TYPE: han_data[
217 | HAN_METER_TYPE],
218 | HAN_OBIS_CODE: han_data[
219 | "obis_" + key],
220 | HAN_METER_SERIAL: han_data[
221 | HAN_METER_SERIAL],
222 | SENSOR_UOM:
223 | SENSOR_UNIT.get(key),
224 | SENSOR_ICON: (
225 | "mdi:" +
226 | SENSOR_ICON_MAP.get(key)),
227 | },
228 | }
229 | if key in HOURLY_SENSORS:
230 | sensor_data[key][SENSOR_ATTR][
231 | HAN_METER_DATETIME] = han_data[
232 | HAN_METER_DATETIME]
233 | sensor_data[key][SENSOR_ATTR][
234 | ATTR_DEVICE_CLASS] = (
235 | SensorDeviceClass.ENERGY)
236 | if key in ACTIVE_ENERGY_SENSORS:
237 | sensor_data[key][SENSOR_ATTR][
238 | ATTR_STATE_CLASS] = (
239 | SensorStateClass.TOTAL_INCREASING)
240 | _LOGGER.debug(
241 | "%s, OBIS:%s, Index:%s, Type:%s Double OBIS",
242 | key, item, (i, i + len(item)),
243 | (pkt[(i + len(item))])
244 | )
245 | _LOGGER.debug(
246 | "Value double OBIS type 6: %s, Index:%s",
247 | han_data[key], (v_start, v_stop)
248 | )
249 | # Long-signed & Long-unsigned dict construct
250 | elif (pkt[i + len(item)] == 16 or
251 | pkt[i + len(item)] == 18):
252 | v_start = i + len(item) + 1
253 | v_stop = v_start + 2
254 | han_data["obis_" + key] = (
255 | '.'.join([str(elem) for elem in item])
256 | )
257 | han_data[key] = (
258 | (byte_decode(fields=pkt[v_start:v_stop],
259 | count=2))
260 | )
261 | sensor_data[key] = {
262 | SENSOR_STATE: han_data[key],
263 | SENSOR_ATTR: {
264 | HAN_METER_MANUFACTURER: han_data[
265 | HAN_LIST_VER_ID],
266 | HAN_METER_TYPE: han_data[
267 | HAN_METER_TYPE],
268 | HAN_OBIS_CODE: han_data[
269 | "obis_" + key],
270 | HAN_METER_SERIAL: han_data[
271 | HAN_METER_SERIAL],
272 | SENSOR_UOM:
273 | SENSOR_UNIT.get(key),
274 | SENSOR_ICON: (
275 | "mdi:" +
276 | SENSOR_ICON_MAP.get(key)),
277 | },
278 |
279 | }
280 | _LOGGER.debug(
281 | "%s, OBIS:%s, Index:%s, Type:%s Double OBIS",
282 | key, item, (i, i + len(item)),
283 | (pkt[(i + len(item))]))
284 | _LOGGER.debug(
285 | "Value double OBIS type 16/18: %s, Index:%s",
286 | han_data[key], (v_start, v_stop))
287 | stored.update(sensor_data)
288 | return stored, han_data
289 |
290 |
291 | def test_valid_data(data, oss):
292 | """Test the incoming data for validity."""
293 | # pylint: disable=too-many-return-statements
294 | if data is None:
295 | return False
296 |
297 | if len(data) > 302 or len(data) < 180:
298 | _LOGGER.debug("Invalid packet size %s", len(data))
299 | return False
300 |
301 | packet_size = len(data)
302 | read_packet_size = ((data[1] & 0x0F) << 8 | data[2]) + 2
303 |
304 | if packet_size != read_packet_size:
305 | _LOGGER.debug(
306 | "Packet size does not match read packet size: %s : %s",
307 | packet_size,
308 | read_packet_size,
309 | )
310 | return False
311 |
312 | if not data[0] == DEC_FRAME_FLAG and data[-1] == DEC_FRAME_FLAG:
313 | _LOGGER.debug(
314 | "%s Received %s bytes of %s data",
315 | datetime.now().isoformat(),
316 | len(data),
317 | False,
318 | )
319 | return False
320 |
321 | if data[8:12] != DATA_FLAG:
322 | _LOGGER.debug("Data does not start with %s: %s", DATA_FLAG,
323 | data[8:12])
324 | return False
325 |
326 | header_checksum = CrcX25.calc(bytes(data[1:6]))
327 | read_header_checksum = data[7] << 8 | data[6]
328 |
329 | if header_checksum != read_header_checksum:
330 | _LOGGER.debug("Invalid header CRC check")
331 | return False
332 |
333 | frame_checksum = CrcX25.calc(bytes(data[1:-3]))
334 | read_frame_checksum = data[-2] << 8 | data[-3]
335 |
336 | if frame_checksum != read_frame_checksum:
337 | _LOGGER.debug("Invalid frame CRC check")
338 | return False
339 |
340 | return True
341 |
--------------------------------------------------------------------------------
/custom_components/ams/sensor.py:
--------------------------------------------------------------------------------
1 | """Support for reading data from a serial port."""
2 | import logging
3 | from datetime import timedelta
4 |
5 | from homeassistant.const import STATE_UNKNOWN
6 | from homeassistant.core import callback
7 | from homeassistant.helpers.dispatcher import async_dispatcher_connect
8 | from homeassistant.helpers.restore_state import RestoreEntity
9 | from homeassistant.util import dt as dt_utils
10 |
11 | from custom_components.ams.const import (
12 | ACTIVE_ENERGY_DEFAULT_ATTRS,
13 | ACTIVE_ENERGY_SENSORS,
14 | AMS_DEVICES,
15 | AMS_ENERGY_METER,
16 | AMS_SENSOR_CREATED_BUT_NOT_READ,
17 | DOMAIN,
18 | HOURLY_SENSORS,
19 | SIGNAL_NEW_AMS_SENSOR,
20 | SIGNAL_UPDATE_AMS
21 | )
22 |
23 | _LOGGER = logging.getLogger(__name__)
24 |
25 |
26 | async def async_setup_entry(hass, config_entry, async_add_devices):
27 | # pylint: disable=unused-argument
28 | """Setup sensor platform for the ui"""
29 |
30 | @callback
31 | def async_add_sensor():
32 | """Add AMS Sensor."""
33 | sensors = []
34 | data = hass.data[DOMAIN].sensor_data
35 |
36 | for sensor_name in data:
37 | # Check that we don't add a new sensor that already exists.
38 | # We only try to update the state for sensors in AMS_DEVICES
39 | if sensor_name not in AMS_DEVICES:
40 | AMS_DEVICES.add(sensor_name)
41 | if sensor_name in AMS_SENSOR_CREATED_BUT_NOT_READ:
42 | # The hourly sensors is added manually at the start.
43 | continue
44 |
45 | sensor_states = {
46 | "name": sensor_name,
47 | "state": data.get(sensor_name, {}).get("state"),
48 | "attributes": data.get(sensor_name, {}).get("attributes"),
49 | }
50 | sensors.append(AmsSensor(hass, sensor_states))
51 |
52 | # Handle the hourly sensors.
53 | for hourly in HOURLY_SENSORS:
54 | if hourly not in data and hourly not in (
55 | AMS_SENSOR_CREATED_BUT_NOT_READ):
56 | AMS_SENSOR_CREATED_BUT_NOT_READ.add(hourly)
57 | _LOGGER.debug(
58 | "Hourly sensor %s added so we can attempt to restore"
59 | " state", hourly
60 | )
61 | sensor_states = {
62 | "name": hourly,
63 | "state": data.get(hourly, {}).get("state"),
64 | "attributes": data.get(hourly, {}).get("attributes"),
65 | }
66 | if hourly in ACTIVE_ENERGY_SENSORS:
67 | sensor_states = {
68 | "name": hourly,
69 | "state": data.get(hourly, {}).get("state"),
70 | "attributes": data.get(hourly, {}).get("attributes", (
71 | ACTIVE_ENERGY_DEFAULT_ATTRS)),
72 | }
73 | sensors.append(AmsSensor(hass, sensor_states))
74 |
75 | if sensors:
76 | _LOGGER.debug("Trying to add %s sensors: %s", len(sensors),
77 | sensors)
78 | async_add_devices(sensors)
79 |
80 | async_dispatcher_connect(hass, SIGNAL_NEW_AMS_SENSOR, async_add_sensor)
81 |
82 | return True
83 |
84 |
85 | async def async_remove_entry(hass, entry):
86 | """Remove config entry from Homeassistant."""
87 | _LOGGER.debug("async_remove_entry AMS")
88 | try:
89 | await hass.config_entries.async_forward_entry_unload(entry, "sensor")
90 | _LOGGER.info("Successfully removed sensor from the AMS Reader"
91 | " integration")
92 | except ValueError:
93 | pass
94 |
95 |
96 | class AmsSensor(RestoreEntity):
97 | """Representation of a AMS sensor."""
98 |
99 | def __init__(self, hass, sensor_states):
100 | """Initialize the Serial sensor."""
101 | self.ams = hass.data[DOMAIN]
102 | self._hass = hass
103 | self._name = sensor_states.get("name")
104 | self._meter_id = self.ams.meter_serial
105 | self._state = None
106 | self._attributes = {}
107 | self._update_properties()
108 | _LOGGER.debug("Init %s DUMP sensor_states %s", self._name,
109 | sensor_states)
110 |
111 | def _update_properties(self):
112 | """Update all portions of sensor."""
113 | try:
114 | self._state = self.ams.sensor_data[self._name].get("state")
115 | self._attributes = self.ams.sensor_data[self._name].get(
116 | "attributes")
117 | self._meter_id = self.ams.meter_serial
118 | _LOGGER.debug("Updating sensor %s", self._name)
119 | except KeyError:
120 | pass
121 |
122 | @property
123 | def unique_id(self) -> str:
124 | """Return the unique id of the sensor."""
125 | return f"{self._name}_{self._meter_id}"
126 |
127 | @property
128 | def name(self) -> str:
129 | """Return the name of the sensor."""
130 | return self.unique_id
131 |
132 | @property
133 | def should_poll(self) -> bool:
134 | """No polling needed."""
135 | return False
136 |
137 | @property
138 | def extra_state_attributes(self):
139 | """Return the attributes of the entity (if any JSON present)."""
140 | return self._attributes
141 |
142 | @property
143 | def state(self):
144 | """Return the state of the sensor."""
145 | return self._state
146 |
147 | @property
148 | def device_info(self) -> dict:
149 | """Return the device info."""
150 | return {
151 | "name": AMS_ENERGY_METER,
152 | "identifiers": {(DOMAIN, self._meter_id)},
153 | "manufacturer": self.ams.meter_manufacturer,
154 | "model": self.ams.meter_type,
155 | }
156 |
157 | async def async_added_to_hass(self):
158 | """Register callbacks and restoring states to hourly sensors."""
159 | await super().async_added_to_hass()
160 | async_dispatcher_connect(self._hass, SIGNAL_UPDATE_AMS,
161 | self._update_callback)
162 | old_state = await self.async_get_last_state()
163 |
164 | if old_state is not None and self._name and self._name in (
165 | HOURLY_SENSORS):
166 | if dt_utils.utcnow() - old_state.last_changed < timedelta(
167 | minutes=60):
168 | if old_state.state == STATE_UNKNOWN:
169 | _LOGGER.debug(
170 | "%s state is unknown, this typically happens if "
171 | "ha never never got the real state of %s and the "
172 | "users restart ha",
173 | self._name,
174 | self._name,
175 | )
176 | else:
177 | _LOGGER.debug(
178 | "The state for %s was set less then a hour ago,"
179 | " so its still correct. Restoring state to %s with"
180 | " attrs %s",
181 | self._name,
182 | old_state.state,
183 | old_state.attributes,
184 | )
185 | self._state = old_state.state
186 | self._attributes = old_state.attributes
187 | self.async_write_ha_state()
188 | else:
189 | # I'll rather have unknown then wrong values.
190 | _LOGGER.debug(
191 | "The old state %s was set more then 60 minutes ago %s,"
192 | " ignoring it.",
193 | old_state.state,
194 | old_state.last_changed,
195 | )
196 | else:
197 | _LOGGER.debug("Skipping restore state for %s", self._name)
198 |
199 | @callback
200 | def _update_callback(self):
201 | """Update the state."""
202 | if self._name in AMS_DEVICES:
203 | self._update_properties()
204 | self.async_write_ha_state()
205 |
--------------------------------------------------------------------------------
/custom_components/ams/strings.json:
--------------------------------------------------------------------------------
1 | {
2 | "config": {
3 | "title": "AMS Reader",
4 | "step": {
5 | "user": {
6 | "description": "Setup AMS Reader sensors",
7 | "data": {
8 | "serial_port": "Serial Port",
9 | "parity": "Parity",
10 | "meter_manufacturer": "Meter manufacturer"
11 | }
12 | }
13 | },
14 | "error": {
15 | "name_exists": "Name already exists"
16 | }
17 | }
18 | }
19 |
--------------------------------------------------------------------------------
/custom_components/ams/translations/en.json:
--------------------------------------------------------------------------------
1 | {
2 | "config": {
3 | "abort": {
4 | "single_instance_allowed": "Only one instance of AMS Reader is allowed."
5 | },
6 | "title": "AMS Reader",
7 | "step": {
8 | "user": {
9 | "description": "Choose protocol for communications",
10 | "data": {
11 | "type": "Protocol type"
12 | }
13 | },
14 | "network_connection": {
15 | "description": "Setup options for TCP/IP connection",
16 | "data": {
17 | "tcp_host": "IP address",
18 | "tcp_port": "Port",
19 | "parity": "Parity",
20 | "baudrate": "Baud rate",
21 | "meter_manufacturer": "Meter manufacturer"
22 | }
23 | },
24 | "select_serial_connection": {
25 | "description": "Setup options for serial communication",
26 | "data": {
27 | "serial_port": "Serial Port",
28 | "parity": "Parity",
29 | "meter_manufacturer": "Meter manufacturer",
30 | "baudrate": "Baudrate",
31 | "oss_brikken": "OSS brikken"
32 | }
33 | },
34 | "enter_serial_connection": {
35 | "description": "Setup options for serial communication",
36 | "data": {
37 | "serial_port": "Serial Port",
38 | "parity": "Parity",
39 | "meter_manufacturer": "Meter manufacturer",
40 | "baudrate": "Baudrate",
41 | "oss_brikken": "OSS brikken"
42 | }
43 | }
44 | },
45 | "error": {
46 | "name_exists": "Name already exists"
47 | }
48 | }
49 | }
50 |
--------------------------------------------------------------------------------
/custom_components/ams/translations/nb.json:
--------------------------------------------------------------------------------
1 | {
2 | "config": {
3 | "abort": {
4 | "single_instance_allowed": "Kun en oppføring av hass-AMS er tillatt"
5 | },
6 | "step": {
7 | "user": {
8 | "description": "Velg kommunikasjonsprotokoll for inndata",
9 | "data":{
10 | "type": "Protokolltype"
11 | }
12 | },
13 | "network_connection": {
14 | "description": "Velg data for TCP/IP tilkobling",
15 | "data": {
16 | "tcp_host": "IP adresse",
17 | "tcp_port": "Port",
18 | "parity": "Paritet",
19 | "baudrate": "Baudrate",
20 | "meter_manufacturer": "Måler produsent"
21 | }
22 | },
23 | "select_serial_connection": {
24 | "description": "Velg portdata for serietilkobling",
25 | "data": {
26 | "serial_port": "Serieport",
27 | "parity": "Paritet",
28 | "meter_manufacturer": "Måler produsent",
29 | "baudrate": "Baudrate",
30 | "oss_brikken": "OSS brikken"
31 | }
32 | },
33 | "enter_serial_connection": {
34 | "description": "Setup options for serial communication",
35 | "data": {
36 | "serial_port": "Serial Port",
37 | "parity": "Parity",
38 | "meter_manufacturer": "Meter manufacturer",
39 | "baudrate": "Baudrate",
40 | "oss_brikken": "OSS brikken"
41 | }
42 | }
43 | },
44 | "error": {
45 | "name_exists": "Navn eksisterer"
46 | }
47 | }
48 | }
49 |
--------------------------------------------------------------------------------
/custom_components/ams/translations/nn.json:
--------------------------------------------------------------------------------
1 | {
2 | "config": {
3 | "abort": {
4 | "single_instance_allowed": "Kun ein oppføring av AMS Lesar er tillate"
5 | },
6 | "title": "AMS Lesar",
7 | "step": {
8 | "user": {
9 | "description": "Velg kommunikasjonsprotokoll for inndata",
10 | "data": {
11 | "type": "Protokolltype"
12 | }
13 | },
14 | "network_connection": {
15 | "title": "AMS Lesar",
16 | "description": "Vel data for TCP/IP tilkobling",
17 | "data": {
18 | "tcp_host": "IP adresse",
19 | "tcp_port": "Port",
20 | "parity": "Paritet",
21 | "baudrate": "Baudrate",
22 | "meter_manufacturer": "Målar produsent"
23 | }
24 | },
25 | "select_serial_connection": {
26 | "title": "AMS Noreg",
27 | "description": "Vel portdata for serietilkobling",
28 | "data": {
29 | "serial_port": "Serieport",
30 | "parity": "Paritet",
31 | "meter_manufacturer": "Målar produsent",
32 | "baudrate": "Baudrate",
33 | "oss_brikken": "OSS brikken"
34 | }
35 | },
36 | "enter_serial_connection": {
37 | "description": "Vel portdata for serietilkobling",
38 | "data": {
39 | "serial_port": "Serieport",
40 | "parity": "Paritet",
41 | "meter_manufacturer": "Målar produsent",
42 | "baudrate": "Baudrate",
43 | "oss_brikken": "OSS brikken"
44 | }
45 | }
46 | },
47 | "error": {
48 | "name_exists": "Navn eksisterar"
49 | }
50 | }
51 | }
52 |
--------------------------------------------------------------------------------
/custom_components/ams/translations/no.json:
--------------------------------------------------------------------------------
1 | {
2 | "config": {
3 | "abort": {
4 | "single_instance_allowed": "Kun en oppføring av AMS Leser er tillatt"
5 | },
6 | "title": "AMS Leser",
7 | "step": {
8 | "user": {
9 | "description": "Velg kommunikasjonsprotokoll for inndata",
10 | "data":{
11 | "type": "Protokolltype"
12 | }
13 | },
14 | "network_connection": {
15 | "description": "Velg data for TCP/IP tilkobling",
16 | "data": {
17 | "tcp_host": "IP adresse",
18 | "tcp_port": "Port",
19 | "parity": "Paritet",
20 | "baudrate": "Baudrate",
21 | "meter_manufacturer": "Måler produsent"
22 | }
23 | },
24 | "select_serial_connection": {
25 | "description": "Velg portdata for serietilkobling",
26 | "data": {
27 | "serial_port": "Serieport",
28 | "parity": "Paritet",
29 | "meter_manufacturer": "Måler produsent",
30 | "baudrate": "Baudrate",
31 | "oss_brikken": "OSS brikken"
32 | }
33 | },
34 | "enter_serial_connection": {
35 | "description": "Velg portdata for serietilkobling",
36 | "data": {
37 | "serial_port": "Serieport",
38 | "parity": "Paritet",
39 | "meter_manufacturer": "Måler produsent",
40 | "baudrate": "Baudrate",
41 | "oss_brikken": "OSS brikken"
42 | }
43 | }
44 | },
45 | "error": {
46 | "name_exists": "Navn eksisterer"
47 | }
48 | }
49 | }
50 |
--------------------------------------------------------------------------------
/custom_components/ams/translations/se.json:
--------------------------------------------------------------------------------
1 | {
2 | "config": {
3 | "abort": {
4 | "single_instance_allowed": "Endast en intstans av AMS läsare är tillåten"
5 | },
6 | "title": "AMS Reader",
7 | "step": {
8 | "user": {
9 | "description": "Välj kommunikastionsprotokoll för inndata",
10 | "data":{
11 | "type": "Protokolltyp"
12 | }
13 | },
14 | "network_connection": {
15 | "description": "Välj data för TCP/IP tilkobling",
16 | "data": {
17 | "tcp_host": "IP adress",
18 | "tcp_port": "Port",
19 | "parity": "Paritet",
20 | "baudrate": "Baudrate",
21 | "meter_manufacturer": "Mätartilverkare"
22 | }
23 | },
24 | "select_serial_connection": {
25 | "description": "Välj data for seriell anslutning",
26 | "data": {
27 | "serial_port": "Serieport",
28 | "parity": "Paritet",
29 | "meter_manufacturer": "Mätartilverkare",
30 | "baudrate": "Baudrate",
31 | "oss_brikken": "OSS brikken"
32 | }
33 | },
34 | "enter_manual_serial_connection": {
35 | "description": "Vãlj data for seriell anslutning",
36 | "data": {
37 | "serial_port": "Serieport",
38 | "parity": "Paritet",
39 | "meter_manufacturer": "Mãtartilverkare",
40 | "baudrate": "Baudrate",
41 | "oss_brikken": "OSS brikken"
42 | }
43 | }
44 | },
45 | "error": {
46 | "name_exists": "Namn finns"
47 | }
48 | }
49 | }
--------------------------------------------------------------------------------
/hacs.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "hass-AMS",
3 | "country": ["NO", "SE"],
4 | "domains": ["sensor"],
5 | "render_readme": true
6 | }
7 |
--------------------------------------------------------------------------------
/logo_images/Arrow-002.svg:
--------------------------------------------------------------------------------
1 |
2 |
3 |
179 |
--------------------------------------------------------------------------------
/logo_images/home-assistant.svg:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/logo_images/logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/turbokongen/hass-AMS/d399131f19c57f1a6cd53f81604f1a556b9cc83d/logo_images/logo.png
--------------------------------------------------------------------------------
/logo_images/simple_meter_icon.svg:
--------------------------------------------------------------------------------
1 |
2 |
198 |
--------------------------------------------------------------------------------
/pytest.ini:
--------------------------------------------------------------------------------
1 | [pytest]
2 | addopts = --ignore=tests/parser_test.py --capture=sys
3 | #log_format = %(asctime)s %(levelname)s %(message)s
4 | #log_date_format = %Y-%m-%d %H:%M:%S
5 | #log_cli_level = DEBUG
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | pyserial==3.5
2 | crccheck==1.0
--------------------------------------------------------------------------------
/requirements_test.txt:
--------------------------------------------------------------------------------
1 | # linters such as flake8 and pylint should be pinned, as new releases
2 | # make new things fail. Manually update these pins when pulling in a
3 | # new version
4 |
5 | pytest
6 | pytest-homeassistant-custom-component
7 | pytest-cov
8 | flake8
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
1 | """Test for AMS reader."""
2 |
--------------------------------------------------------------------------------
/tests/conftest.py:
--------------------------------------------------------------------------------
1 | """Global fixtures for integration_blueprint integration."""
2 | # Fixtures allow you to replace functions with a Mock object. You can perform
3 | # many options via the Mock to reflect a particular behavior from the original
4 | # function that you want to see without going through the function's actual logic.
5 | # Fixtures can either be passed into tests as parameters, or if autouse=True, they
6 | # will automatically be used across all tests.
7 | #
8 | # Fixtures that are defined in conftest.py are available across all tests. You can also
9 | # define fixtures within a particular test file to scope them locally.
10 | #
11 | # pytest_homeassistant_custom_component provides some fixtures that are provided by
12 | # Home Assistant core. You can find those fixture definitions here:
13 | # https://github.com/MatthewFlamm/pytest-homeassistant-custom-component/blob/master/pytest_homeassistant_custom_component/common.py
14 | #
15 | # See here for more info: https://docs.pytest.org/en/latest/fixture.html (note that
16 | # pytest includes fixtures OOB which you can use as defined on this page)
17 |
18 | import pytest
19 | from homeassistant.util import dt as dt_util, location
20 | pytest_plugins = "pytest_homeassistant_custom_component"
21 |
22 |
23 | # This fixture enables loading custom integrations in all tests.
24 | # Remove to enable selective use of this fixture
25 | @pytest.fixture(autouse=True)
26 | def auto_enable_custom_integrations(enable_custom_integrations):
27 | yield
28 |
--------------------------------------------------------------------------------
/tests/parser_test.py:
--------------------------------------------------------------------------------
1 | """ Test module for manual input of int package from debug messages
2 | Only to be used in debug console of pyCharm."""
3 | import logging
4 | import pprint
5 | import sys, binascii
6 | from homeassistant.core import HomeAssistant
7 | import homeassistant.helpers.config_validation as cv
8 | import voluptuous as vol
9 | from custom_components.ams.const import *
10 | import custom_components.ams
11 | from custom_components.ams.parsers import aidon
12 | from custom_components.ams.parsers import aidon_se
13 | from custom_components.ams.parsers import kaifa
14 | from custom_components.ams.parsers import kaifa_se
15 | from custom_components.ams.parsers import kamstrup
16 | from custom_components.ams import AmsHub
17 | from custom_components.ams.const import DOMAIN
18 |
19 | METERTYPE = aidon
20 | SWEDISH = None
21 | OSS = True
22 | #PACKAGE = [126, 160, 155, 1, 0, 1, 16, 86, 27, 230, 231, 0, 15, 64, 0, 0, 0, 9, 12, 7, 230, 9, 18, 7, 14, 56, 0, 255, 128, 0, 0, 2, 18, 9, 7, 75, 70, 77, 95, 48, 48, 49, 9, 16, 55, 51, 52, 48, 49, 53, 55, 48, 49, 49, 50, 55, 52, 53, 51, 50, 9, 8, 77, 65, 51, 48, 52, 72, 52, 68, 6, 0, 0, 6, 73, 6, 0, 0, 0, 0, 6, 0, 0, 0, 0, 6, 0, 0, 1, 206, 6, 0, 0, 2, 175, 6, 0, 0, 17, 27, 6, 0, 0, 9, 236, 6, 0, 0, 8, 241, 6, 0, 0, 9, 2, 6, 0, 0, 8, 252, 9, 12, 7, 230, 9, 18, 7, 14, 56, 0, 255, 128, 0, 0, 6, 8, 166, 101, 178, 6, 0, 0, 0, 0, 6, 2, 217, 43, 105, 6, 0, 34, 14, 104, 126, 48, 126]
23 | # Kaifa Hourly NO
24 | #PACKAGE = [126, 160, 154, 1, 2, 1, 16, 170, 165, 230, 231, 0, 15, 64, 0, 0, 0, 9, 12, 7, 230, 10, 30, 7, 12, 0, 10, 255, 128, 0, 0, 2, 18, 9, 7, 75, 70, 77, 95, 48, 48, 49, 9, 16, 54, 57, 55, 48, 54, 51, 49, 52, 48, 50, 48, 53, 53, 51, 56, 53, 9, 7, 77, 65, 51, 48, 52, 72, 52, 6, 0, 0, 9, 208, 6, 0, 0, 0, 0, 6, 0, 0, 0, 0, 6, 0, 0, 0, 233, 6, 0, 0, 5, 210, 6, 0, 0, 30, 53, 6, 0, 0, 6, 238, 6, 0, 0, 9, 93, 6, 0, 0, 9, 86, 6, 0, 0, 9, 99, 9, 12, 7, 230, 10, 30, 7, 12, 0, 10, 255, 128, 0, 0, 6, 5, 251, 233, 26, 6, 0, 0, 0, 0, 6, 0, 3, 78, 140, 6, 0, 106, 255, 81, 204, 137, 126]
25 | # MA304H4(SE)
26 | # PACKAGE = [126, 161, 29, 1, 0, 1, 16, 176, 174, 230, 231, 0, 15, 64, 0, 0, 0, 0, 2, 36, 9, 6, 1, 0, 0, 2, 129, 255, 9, 7, 75, 70, 77, 95, 48, 48, 49, 9, 6, 0, 0, 96, 1, 0, 255, 9, 16, 55, 51, 52, 48, 49, 53, 55, 48, 51, 48, 53, 52, 56, 51, 48, 48, 9, 6, 0, 0, 96, 1, 7, 255, 9, 7, 77, 65, 51, 48, 52, 72, 52, 9, 6, 1, 0, 1, 7, 0, 255, 6, 0, 0, 1, 41, 9, 6, 1, 0, 2, 7, 0, 255, 6, 0, 0, 0, 0, 9, 6, 1, 0, 3, 7, 0, 255, 6, 0, 0, 0, 0, 9, 6, 1, 0, 4, 7, 0, 255, 6, 0, 0, 0, 107, 9, 6, 1, 0, 31, 7, 0, 255, 6, 0, 0, 2, 104, 9, 6, 1, 0, 51, 7, 0, 255, 6, 0, 0, 2, 17, 9, 6, 1, 0, 71, 7, 0, 255, 6, 0, 0, 2, 46, 9, 6, 1, 0, 32, 7, 0, 255, 6, 0, 0, 9, 50, 9, 6, 1, 0, 52, 7, 0, 255, 6, 0, 0, 9, 65, 9, 6, 1, 0, 72, 7, 0, 255, 6, 0, 0, 9, 48, 9, 6, 0, 0, 1, 0, 0, 255, 9, 12, 7, 230, 10, 15, 6, 15, 8, 15, 255, 255, 196, 0, 9, 6, 1, 0, 1, 8, 0, 255, 6, 0, 148, 130, 99, 9, 6, 1, 0, 2, 8, 0, 255, 6, 0, 0, 0, 0, 9, 6, 1, 0, 3, 8, 0, 255, 6, 0, 1, 47, 198, 9, 6, 1, 0, 4, 8, 0, 255, 6, 0, 19, 107, 43, 188, 84, 126]
27 | # MA304H4D
28 | # PACKAGE = [126, 160, 155, 1, 0, 1, 16, 86, 27, 230, 231, 0, 15, 64, 0, 0, 0, 9, 12, 7, 230, 9, 18, 7, 14, 56, 15, 255, 128, 0, 0, 2, 18, 9, 7, 75, 70, 77, 95, 48, 48, 49, 9, 16, 55, 51, 52, 48, 49, 53, 55, 48, 49, 49, 50, 55, 52, 53, 51, 50, 9, 8, 77, 65, 51, 48, 52, 72, 52, 68, 6, 0, 0, 6, 54, 6, 0, 0, 0, 0, 6, 0, 0, 0, 0, 6, 0, 0, 1, 208, 6, 0, 0, 2, 212, 6, 0, 0, 16, 217, 6, 0, 0, 9, 187, 6, 0, 0, 8, 235, 6, 0, 0, 9, 2, 6, 0, 0, 8, 251, 9, 12, 7, 230, 9, 18, 7, 14, 56, 15, 255, 128, 0, 0, 6, 8, 166, 101, 185, 6, 0, 0, 0, 0, 6, 2, 217, 43, 105, 6, 0, 34, 14, 106, 197, 201, 126]
29 | # PACKAGE = [126, 162, 67, 65, 8, 131, 19, 133, 235, 230, 231, 0, 15, 64, 0, 0, 0, 0, 1, 27, 2, 2, 9, 6, 0, 0, 1, 0, 0, 255, 9, 12, 7, 230, 10, 16, 0, 16, 14, 10, 255, 128, 0, 255, 2, 3, 9, 6, 1, 0, 1, 7, 0, 255, 6, 0, 0, 2, 248, 2, 2, 15, 0, 22, 27, 2, 3, 9, 6, 1, 0, 2, 7, 0, 255, 6, 0, 0, 0, 0, 2, 2, 15, 0, 22, 27, 2, 3, 9, 6, 1, 0, 3, 7, 0, 255, 6, 0, 0, 0, 0, 2, 2, 15, 0, 22, 29, 2, 3, 9, 6, 1, 0, 4, 7, 0, 255, 6, 0, 0, 4, 16, 2, 2, 15, 0, 22, 29, 2, 3, 9, 6, 1, 0, 31, 7, 0, 255, 16, 255, 246, 2, 2, 15, 255, 22, 33, 2, 3, 9, 6, 1, 0, 51, 7, 0, 255, 16, 0, 23, 2, 2, 15, 255, 22, 33, 2, 3, 9, 6, 1, 0, 71, 7, 0, 255, 16, 0, 24, 2, 2, 15, 255, 22, 33, 2, 3, 9, 6, 1, 0, 32, 7, 0, 255, 18, 9, 44, 2, 2, 15, 255, 22, 35, 2, 3, 9, 6, 1, 0, 52, 7, 0, 255, 18, 9, 57, 2, 2, 15, 255, 22, 35, 2, 3, 9, 6, 1, 0, 72, 7, 0, 255, 18, 9, 74, 2, 2, 15, 255, 22, 35, 2, 3, 9, 6, 1, 0, 21, 7, 0, 255, 6, 0, 0, 0, 0, 2, 2, 15, 0, 22, 27, 2, 3, 9, 6, 1, 0, 22, 7, 0, 255, 6, 0, 0, 0, 39, 2, 2, 15, 0, 22, 27, 2, 3, 9, 6, 1, 0, 23, 7, 0, 255, 6, 0, 0, 0, 0, 2, 2, 15, 0, 22, 29, 2, 3, 9, 6, 1, 0, 24, 7, 0, 255, 6, 0, 0, 0, 242, 2, 2, 15, 0, 22, 29, 2, 3, 9, 6, 1, 0, 41, 7, 0, 255, 6, 0, 0, 1, 123, 2, 2, 15, 0, 22, 27, 2, 3, 9, 6, 1, 0, 42, 7, 0, 255, 6, 0, 0, 0, 0, 2, 2, 15, 0, 22, 27, 2, 3, 9, 6, 1, 0, 43, 7, 0, 255, 6, 0, 0, 0, 0, 2, 2, 15, 0, 22, 29, 2, 3, 9, 6, 1, 0, 44, 7, 0, 255, 6, 0, 0, 1, 132, 2, 2, 15, 0, 22, 29, 2, 3, 9, 6, 1, 0, 61, 7, 0, 255, 6, 0, 0, 1, 165, 2, 2, 15, 0, 22, 27, 2, 3, 9, 6, 1, 0, 62, 7, 0, 255, 6, 0, 0, 0, 0, 2, 2, 15, 0, 22, 27, 2, 3, 9, 6, 1, 0, 63, 7, 0, 255, 6, 0, 0, 0, 0, 2, 2, 15, 0, 22, 29, 2, 3, 9, 6, 1, 0, 64, 7, 0, 255, 6, 0, 0, 1, 147, 2, 2, 15, 0, 22, 29, 2, 3, 9, 6, 1, 0, 1, 8, 0, 255, 6, 2, 71, 16, 87, 2, 2, 15, 0, 22, 30, 2, 3, 9, 6, 1, 0, 2, 8, 0, 255, 6, 0, 151, 1, 103, 2, 2, 15, 0, 22, 30, 2, 3, 9, 6, 1, 0, 3, 8, 0, 255, 6, 0, 1, 85, 202, 2, 2, 15, 0, 22, 32, 2, 3, 9, 6, 1, 0, 4, 8, 0, 255, 6, 0, 143, 201, 175, 2, 2, 15, 0, 22, 32, 106, 221, 126]
30 | # MA304H4 (NO)Long
31 | # PACKAGE = [126, 160, 120, 1, 2, 1, 16, 196, 152, 230, 231, 0, 15, 64, 0, 0, 0, 9, 12, 7, 230, 10, 29, 6, 19, 59, 0, 255, 128, 0, 0, 2, 13, 9, 7, 75, 70, 77, 95, 48, 48, 49, 9, 16, 54, 57, 55, 48, 54, 51, 49, 52, 48, 50, 48, 53, 53, 51, 56, 53, 9, 7, 77, 65, 51, 48, 52, 72, 52, 6, 0, 0, 5, 138, 6, 0, 0, 0, 0, 6, 0, 0, 0, 0, 6, 0, 0, 1, 119, 6, 0, 0, 6, 147, 6, 0, 0, 6, 215, 6, 0, 0, 12, 32, 6, 0, 0, 9, 104, 6, 0, 0, 9, 100, 6, 0, 0, 9, 55, 184, 196, 126]
32 | # MA304H4 (NO)Short
33 | # PACKAGE = [126, 160, 39, 1, 2, 1, 16, 90, 135, 230, 231, 0, 15, 64, 0, 0, 0, 9, 12, 7, 230, 10, 29, 6, 19, 58, 56, 255, 128, 0, 0, 2, 1, 6, 0, 0, 5, 135, 240, 224, 126]
34 | # MA304H3E (NO)Long
35 | # PACKAGE = [126, 160, 121, 1, 2, 1, 16, 128, 147, 230, 231, 0, 15, 64, 0, 0, 0, 9, 12, 7, 230, 11, 7, 1, 9, 44, 40, 255, 128, 0, 0, 2, 13, 9, 7, 75, 70, 77, 95, 48, 48, 49, 9, 16, 54, 57, 55, 48, 54, 51, 49, 52, 48, 52, 49, 50, 57, 57, 53, 52, 9, 8, 77, 65, 51, 48, 52, 72, 51, 69, 6, 0, 0, 2, 34, 6, 0, 0, 0, 0, 6, 0, 0, 0, 0, 6, 0, 0, 0, 81, 6, 0, 0, 3, 13, 6, 0, 0, 7, 37, 6, 0, 0, 8, 14, 6, 0, 0, 9, 16, 6, 0, 0, 0, 0, 6, 0, 0, 9, 44, 111, 222, 126]
36 | # MA304H3E (NO)Mini
37 | # PACKAGE = [126, 160, 39, 1, 2, 1, 16, 90, 135, 230, 231, 0, 15, 64, 0, 0, 0, 9, 12, 7, 230, 11, 7, 1, 9, 44, 38, 255, 128, 0, 0, 2, 1, 6, 0, 0, 2, 37, 242, 181, 126]
38 | # Aidon_se
39 | #PACKAGE = [126, 162, 67, 65, 8, 131, 19, 133, 235, 230, 231, 0, 15, 64, 0, 0, 0, 0, 1, 27, 2, 2, 9, 6, 0, 0, 1, 0, 0, 255, 9, 12, 7, 230, 10, 16, 0, 16, 14, 10, 255, 128, 0, 255, 2, 3, 9, 6, 1, 0, 1, 7, 0, 255, 6, 0, 0, 2, 248, 2, 2, 15, 0, 22, 27, 2, 3, 9, 6, 1, 0, 2, 7, 0, 255, 6, 0, 0, 0, 0, 2, 2, 15, 0, 22, 27, 2, 3, 9, 6, 1, 0, 3, 7, 0, 255, 6, 0, 0, 0, 0, 2, 2, 15, 0, 22, 29, 2, 3, 9, 6, 1, 0, 4, 7, 0, 255, 6, 0, 0, 4, 16, 2, 2, 15, 0, 22, 29, 2, 3, 9, 6, 1, 0, 31, 7, 0, 255, 16, 255, 246, 2, 2, 15, 255, 22, 33, 2, 3, 9, 6, 1, 0, 51, 7, 0, 255, 16, 0, 23, 2, 2, 15, 255, 22, 33, 2, 3, 9, 6, 1, 0, 71, 7, 0, 255, 16, 0, 24, 2, 2, 15, 255, 22, 33, 2, 3, 9, 6, 1, 0, 32, 7, 0, 255, 18, 9, 44, 2, 2, 15, 255, 22, 35, 2, 3, 9, 6, 1, 0, 52, 7, 0, 255, 18, 9, 57, 2, 2, 15, 255, 22, 35, 2, 3, 9, 6, 1, 0, 72, 7, 0, 255, 18, 9, 74, 2, 2, 15, 255, 22, 35, 2, 3, 9, 6, 1, 0, 21, 7, 0, 255, 6, 0, 0, 0, 0, 2, 2, 15, 0, 22, 27, 2, 3, 9, 6, 1, 0, 22, 7, 0, 255, 6, 0, 0, 0, 39, 2, 2, 15, 0, 22, 27, 2, 3, 9, 6, 1, 0, 23, 7, 0, 255, 6, 0, 0, 0, 0, 2, 2, 15, 0, 22, 29, 2, 3, 9, 6, 1, 0, 24, 7, 0, 255, 6, 0, 0, 0, 242, 2, 2, 15, 0, 22, 29, 2, 3, 9, 6, 1, 0, 41, 7, 0, 255, 6, 0, 0, 1, 123, 2, 2, 15, 0, 22, 27, 2, 3, 9, 6, 1, 0, 42, 7, 0, 255, 6, 0, 0, 0, 0, 2, 2, 15, 0, 22, 27, 2, 3, 9, 6, 1, 0, 43, 7, 0, 255, 6, 0, 0, 0, 0, 2, 2, 15, 0, 22, 29, 2, 3, 9, 6, 1, 0, 44, 7, 0, 255, 6, 0, 0, 1, 132, 2, 2, 15, 0, 22, 29, 2, 3, 9, 6, 1, 0, 61, 7, 0, 255, 6, 0, 0, 1, 165, 2, 2, 15, 0, 22, 27, 2, 3, 9, 6, 1, 0, 62, 7, 0, 255, 6, 0, 0, 0, 0, 2, 2, 15, 0, 22, 27, 2, 3, 9, 6, 1, 0, 63, 7, 0, 255, 6, 0, 0, 0, 0, 2, 2, 15, 0, 22, 29, 2, 3, 9, 6, 1, 0, 64, 7, 0, 255, 6, 0, 0, 1, 147, 2, 2, 15, 0, 22, 29, 2, 3, 9, 6, 1, 0, 1, 8, 0, 255, 6, 2, 71, 16, 87, 2, 2, 15, 0, 22, 30, 2, 3, 9, 6, 1, 0, 2, 8, 0, 255, 6, 0, 151, 1, 103, 2, 2, 15, 0, 22, 30, 2, 3, 9, 6, 1, 0, 3, 8, 0, 255, 6, 0, 1, 85, 202, 2, 2, 15, 0, 22, 32, 2, 3, 9, 6, 1, 0, 4, 8, 0, 255, 6, 0, 143, 201, 175, 2, 2, 15, 0, 22, 32, 106, 221, 126]
40 | # Kaifa MA304H3E(NO)Short
41 | #PACKAGE = [126, 160, 121, 1, 2, 1, 16, 128, 147, 230, 231, 0, 15, 64, 0, 0, 0, 9, 12, 7, 228, 2, 17, 1, 7, 37, 10, 255, 128, 0, 0, 2, 13, 9, 7, 75, 70, 77, 95, 48, 48, 49, 9, 16, 54, 57, 55, 48, 54, 51, 49, 52, 48, 53, 56, 48, 56, 52, 54, 57, 9, 8, 77, 65, 51, 48, 52, 72, 51, 69, 6, 0, 0, 1, 105, 6, 0, 0, 0, 0, 6, 0, 0, 0, 0, 6, 0, 0, 0, 67, 6, 0, 0, 7, 59, 6, 0, 0, 3, 141, 6, 0, 0, 3, 228, 6, 0, 0, 8, 226, 6, 0, 0, 0, 0, 6, 0, 0, 8, 251, 230, 110, 126]
42 | # Aidon mini
43 | #PACKAGE = [126, 160, 42, 65, 8, 131, 19, 4, 19, 230, 231, 0, 15, 64, 0, 0, 0, 0, 1, 1, 2, 3, 9, 6, 1, 0, 1, 7, 0, 255, 6, 0, 0, 2, 221, 2, 2, 15, 0, 22, 27, 92, 246, 126]
44 | # Aidon short
45 | #PACKAGE = [126, 161, 30, 65, 8, 131, 19, 238, 238, 230, 231, 0, 15, 64, 0, 0, 0, 0, 1, 13, 2, 2, 9, 6, 1, 1, 0, 2, 129, 255, 10, 11, 65, 73, 68, 79, 78, 95, 86, 48, 48, 48, 49, 2, 2, 9, 6, 0, 0, 96, 1, 0, 255, 10, 16, 55, 51, 53, 57, 57, 57, 50, 57, 50, 49, 50, 56, 56, 49, 56, 49, 2, 2, 9, 6, 0, 0, 96, 1, 7, 255, 10, 4, 54, 53, 51, 52, 2, 3, 9, 6, 1, 0, 1, 7, 0, 255, 6, 0, 0, 2, 222, 2, 2, 15, 0, 22, 27, 2, 3, 9, 6, 1, 0, 2, 7, 0, 255, 6, 0, 0, 0, 0, 2, 2, 15, 0, 22, 27, 2, 3, 9, 6, 1, 0, 3, 7, 0, 255, 6, 0, 0, 0, 237, 2, 2, 15, 0, 22, 29, 2, 3, 9, 6, 1, 0, 4, 7, 0, 255, 6, 0, 0, 0, 0, 2, 2, 15, 0, 22, 29, 2, 3, 9, 6, 1, 0, 31, 7, 0, 255, 16, 0, 18, 2, 2, 15, 255, 22, 33, 2, 3, 9, 6, 1, 0, 51, 7, 0, 255, 16, 0, 6, 2, 2, 15, 255, 22, 33, 2, 3, 9, 6, 1, 0, 71, 7, 0, 255, 16, 0, 10, 2, 2, 15, 255, 22, 33, 2, 3, 9, 6, 1, 0, 32, 7, 0, 255, 18, 9, 39, 2, 2, 15, 255, 22, 35, 2, 3, 9, 6, 1, 0, 52, 7, 0, 255, 18, 9, 48, 2, 2, 15, 255, 22, 35, 2, 3, 9, 6, 1, 0, 72, 7, 0, 255, 18, 9, 50, 2, 2, 15, 255, 22, 35, 22, 108, 126]
46 | #PACKAGE = [126, 160, 135, 1, 2, 1, 16, 158, 109, 230, 231, 0, 15, 64, 0, 0, 0, 9, 12, 7, 230, 5, 5, 4, 21, 0, 10, 255, 128, 0, 0, 2, 14, 9, 7, 75, 70, 77, 95, 48, 48, 49, 9, 16, 54, 57, 55, 48, 54, 51, 49, 52, 48, 56, 50, 50, 53, 57, 50, 56, 9, 8, 77, 65, 49, 48, 53, 72, 50, 69, 6, 0, 0, 6, 119, 6, 0, 0, 0, 0, 6, 0, 0, 0, 0, 6, 0, 0, 0, 12, 6, 0, 0, 27, 40, 6, 0, 0, 9, 120, 9, 12, 7, 230, 5, 5, 4, 21, 0, 10, 255, 128, 0, 0, 6, 1, 134, 127, 141, 6, 0, 0, 0, 0, 6, 0, 63, 93, 46, 6, 0, 6, 95, 183, 223, 214, 126]
47 | #OSS brikken test
48 | #PACKAGE = [126, 161, 11, 65, 8, 131, 19, 250, 124, 230, 231, 0, 15, 64, 0, 0, 0, 0, 1, 12, 2, 2, 9, 6, 1, 1, 0, 2, 129, 255, 13, 10, 11, 65, 73, 68, 79, 78, 95, 86, 48, 48, 48, 49, 2, 2, 9, 6, 0, 0, 96, 1, 0, 255, 13, 10, 16, 55, 51, 53, 57, 57, 57, 50, 56, 57, 56, 48, 51, 51, 55, 55, 55, 2, 2, 9, 6, 0, 0, 96, 1, 7, 255, 13, 10, 4, 54, 53, 50, 53, 2, 3, 9, 6, 1, 0, 1, 7, 0, 255, 6, 0, 0, 9, 191, 2, 2, 15, 0, 22, 27, 2, 3, 9, 6, 1, 0, 2, 7, 0, 255, 6, 0, 0, 0, 0, 2, 2, 15, 0, 22, 27, 2, 3, 9, 6, 1, 0, 3, 7, 0, 255, 6, 0, 0, 0, 36, 2, 2, 15, 0, 22, 29, 2, 3, 9, 6, 1, 0, 4, 7, 0, 255, 6, 0, 0, 0, 0, 2, 2, 15, 0, 22, 29, 2, 3, 9, 6, 1, 0, 31, 7, 0, 255, 16, 0, 19, 2, 2, 15, 255, 22, 33, 2, 3, 9, 6, 1, 0, 71, 7, 0, 255, 16, 0, 87, 2, 2, 15, 255, 22, 33, 2, 3, 9, 6, 1, 0, 32, 7, 0, 255, 18, 9, 61, 2, 2, 15, 255, 22, 35, 2, 3, 9, 6, 1, 0, 52, 7, 0, 255, 18, 9, 91, 2, 2, 15, 255, 22, 35, 2, 3, 9, 6, 1, 0, 72, 7, 0, 255, 18, 9, 65, 2, 2, 15, 255, 22, 35]
49 | PACKAGE = [126, 161, 119, 65, 8, 131, 19, 57, 30, 230, 231, 0, 15, 64, 0, 0, 0, 0, 1, 17, 2, 2, 9, 6, 1, 1, 0, 2, 129, 255, 13, 10, 11, 65, 73, 68, 79, 78, 95, 86, 48, 48, 48, 49, 2, 2, 9, 6, 0, 0, 96, 1, 0, 255, 13, 10, 16, 55, 51, 53, 57, 57, 57, 50, 56, 57, 56, 48, 51, 51, 55, 55, 55, 2, 2, 9, 6, 0, 0, 96, 1, 7, 255, 13, 10, 4, 54, 53, 50, 53, 2, 3, 9, 6, 1, 0, 1, 7, 0, 255, 6, 0, 0, 13, 176, 2, 2, 15, 0, 22, 27, 2, 3, 9, 6, 1, 0, 2, 7, 0, 255, 6, 0, 0, 0, 0, 2, 2, 15, 0, 22, 27, 2, 3, 9, 6, 1, 0, 3, 7, 0, 255, 6, 0, 0, 0, 148, 2, 2, 15, 0, 22, 29, 2, 3, 9, 6, 1, 0, 4, 7, 0, 255, 6, 0, 0, 0, 0, 2, 2, 15, 0, 22, 29, 2, 3, 9, 6, 1, 0, 31, 7, 0, 255, 16, 0, 55, 2, 2, 15, 255, 22, 33, 2, 3, 9, 6, 1, 0, 71, 7, 0, 255, 16, 0, 118, 2, 2, 15, 255, 22, 33, 2, 3, 9, 6, 1, 0, 32, 7, 0, 255, 18, 9, 80, 2, 2, 15, 255, 22, 35, 2, 3, 9, 6, 1, 0, 52, 7, 0, 255, 18, 9, 85, 2, 2, 15, 255, 22, 35, 2, 3, 9, 6, 1, 0, 72, 7, 0, 255, 18, 9, 76, 2, 2, 15, 255, 22, 35, 2, 2, 9, 6, 0, 0, 1, 0, 0, 255, 9, 12, 7, 231, 1, 9, 1, 13, 10, 0, 0, 255, 128, 0, 255, 2, 3, 9, 6, 1, 0, 1, 8, 0, 255, 6, 0, 250, 41, 117, 2, 2, 15, 1, 22, 30, 2, 3, 9, 6, 1, 0, 2, 8, 0, 255, 6, 0, 0, 0, 0, 2, 2, 15, 1, 22, 30, 2, 3, 9, 6, 1, 0, 3, 8, 0, 255, 6, 0, 47, 232, 36, 2, 2, 15, 1, 22, 32, 2, 3, 9, 6, 1, 0, 4, 8, 0, 255, 6, 0, 0, 2, 189, 2, 2, 15, 1, 22, 32, 218, 220]
50 | PKG = []
51 | for item in PACKAGE:
52 | PKG.append(hex(item)[2:].zfill(2))
53 | PKG_STRING = " "
54 | PKG_STRING = ' '.join(map(str, PKG))
55 | PACKAGE_STRING = " "
56 | PACKAGE_STRING = ' '.join(map(str, PACKAGE))
57 | NUMBERED_PACKAGE = {}
58 | n = range(((PACKAGE[1] & 0x0F) << 8 | PACKAGE[2]) + 2)
59 | print(n)
60 | for i in n:
61 | NUMBERED_PACKAGE[i] = PACKAGE[i]
62 | NUMBERED_PKG = {}
63 | for i in n:
64 | NUMBERED_PKG[i] = PACKAGE[i]
65 |
66 | print(PACKAGE)
67 | print(PKG)
68 | print(PKG_STRING)
69 | print(PACKAGE_STRING)
70 | print(NUMBERED_PKG)
71 |
72 | root = logging.getLogger()
73 | root.setLevel(logging.DEBUG)
74 | handler = logging.StreamHandler(sys.stdout)
75 | handler.setLevel(logging.DEBUG)
76 | root.addHandler(handler)
77 | sensor_data = {}
78 | print("Testing for parser.......................")
79 | parser = AmsHub._find_parser(PACKAGE)
80 | print("Running test_valid_data..................")
81 | if OSS is True:
82 | print("Parsing with OSS")
83 | meter_validData = METERTYPE.test_valid_data(PACKAGE, oss=OSS)
84 | else:
85 | print("Parsing Normal")
86 | meter_validData = METERTYPE.test_valid_data(PACKAGE)
87 | if meter_validData:
88 | print("--------------Valid data test passed----------------")
89 | else:
90 | print("-----------------not valid data-------------")
91 | print("Running parse_data.......................")
92 | if SWEDISH:
93 | print("Parsing swedish")
94 | meter_data, _ = METERTYPE.parse_data(sensor_data, PACKAGE, SWEDISH)
95 | if OSS is True:
96 | print("Parsing swedish")
97 | meter_data, _ = METERTYPE.parse_data(sensor_data, PACKAGE, oss=OSS)
98 | else:
99 | print("Parsing Normal")
100 | meter_data, _ = METERTYPE.parse_data(sensor_data, PACKAGE)
101 | print("Checking for missing attributes")
102 | print(type(meter_data))
103 | config = {
104 | "protocol": "serial",
105 | "serial_port": "/dev/serial/by-id/usb-Prolific_Technology_Inc._USB"
106 | "-Serial_Controller-if00-port0",
107 | "meter_manufacturer": "auto",
108 | "parity": "N",
109 | "baudrate": 2400
110 | }
111 |
112 | hub = AmsHub(HomeAssistant, config)
113 | hass = HomeAssistant
114 | hass.data = {}
115 | hass.data[DOMAIN] = hub
116 | AmsHub._check_for_new_sensors_and_update(hub, meter_data)
117 | pprint.pprint(meter_data)
118 |
119 |
120 | def decodeGPS(encodedData):
121 | latMultiplier = (float(2 ** 20) / 90)
122 | lonMultiplier = (float(2 ** 20) / 180)
123 | latNeg = bool(int(binascii.hexlify(encodedData), 16) & 0x800000000000)
124 | lonNeg = bool(int(binascii.hexlify(encodedData), 16) & 0x000004000000)
125 |
126 | latEncoded = (int(binascii.hexlify(encodedData), 16) & 0x7FFFF8000000) >> 27
127 | lonEncoded = (int(binascii.hexlify(encodedData), 16) & 0x000003FFFFC0) >> 6
128 | color = (int(binascii.hexlify(encodedData), 16) & 0x00000000001F)
129 |
130 | if latNeg:
131 | lat = (-1 * (latEncoded / latMultiplier))
132 | else:
133 | lat = (90 - (latEncoded / latMultiplier))
134 |
135 | if lonNeg:
136 | lon = ((lonEncoded / lonMultiplier) - 180)
137 | else:
138 | lon = (lonEncoded / lonMultiplier)
139 |
140 | return lat, lon, color
141 |
142 | gpsData = PKG_STRING
143 | try:
144 | gpsBytes = bytes.fromhex(gpsData)
145 | except ValueError:
146 | print("\nEnter a valid HEX string eg. 50C4EDDA2B5F\n")
147 | sys.exit()
148 |
149 | if gpsBytes[0] == 0xFE:
150 | print('Device not using GPS coordinates for WAN address.')
151 | sys.exit()
152 | GPS_Lat, GPS_Lon, Color = decodeGPS(gpsBytes)
153 | print("\n", str(round(GPS_Lat, 6)) + "," + str(round(GPS_Lon, 6)), "\n")
--------------------------------------------------------------------------------
/tests/test_config_flow.py:
--------------------------------------------------------------------------------
1 | """Tests for config_flow."""
2 | import asyncio
3 | import pytest
4 | import logging
5 | import sys
6 | import serial.tools.list_ports
7 | import serial.tools.list_ports_common
8 | from custom_components.ams.const import DOMAIN
9 | from homeassistant import config_entries, data_entry_flow
10 | from unittest.mock import patch
11 | sys.path.append('../')
12 |
13 |
14 | @pytest.fixture(autouse=True)
15 | def auto_enable_custom_integrations(enable_custom_integrations):
16 | yield
17 |
18 | MOCK_SERIAL_CONFIG = {
19 | 'protocol': 'serial',
20 | 'serial_port': '/dev/serial/by-id/usb-Prolific_Technology_Inc._USB-Serial_Controller-if00-port0',
21 | 'meter_manufacturer': 'auto',
22 | 'parity': 'N',
23 | 'baudrate': 2400
24 | }
25 | FIXTURE_USER_INPUT_MANUAL_SERIAL = {'type': 'manual_serial_port'}
26 | FIXTURE_USER_INPUT_SERIAL = {'type': 'serial'}
27 | FIXTURE_USER_INPUT_NETWORK = {'type': 'tcp_ip'}
28 |
29 | _LOGGER = logging.getLogger(__name__)
30 |
31 |
32 | # This fixture bypasses the actual setup of the integration
33 | # since we only want to test the config flow. We test the
34 | # actual functionality of the integration in other test modules.
35 | @pytest.fixture(autouse=True)
36 | def bypass_setup_fixture():
37 | """Prevent setup."""
38 | with patch("custom_components.ams.async_setup", return_value=True,), patch(
39 | "custom_components.ams.async_setup_entry",
40 | return_value=True,
41 | ):
42 | yield
43 |
44 | @pytest.fixture()
45 | def testing_event_loop(request):
46 | """Create an instance of the default event loop for each test case."""
47 | loop = asyncio.get_event_loop_policy().new_event_loop()
48 | yield loop
49 | loop.close()
50 |
51 | def com_port():
52 | """Mock of a serial port."""
53 | port = serial.tools.list_ports_common.ListPortInfo("/dev/testUSB1")
54 | port.serial_number = "1234"
55 | port.manufacturer = "Mock Inc."
56 | port.device = "/dev/testUSB1"
57 | port.description = "Mocked serial port"
58 |
59 | return port
60 |
61 |
62 | # Here we simulate a successful config flow from the backend.
63 | # Note that we use the `bypass_get_data` fixture here because
64 | # we want the config flow validation to succeed during the test.
65 | async def start_options_flow(hass, entry):
66 | """Start the options flow with the entry under test."""
67 | entry.add_to_hass(hass)
68 |
69 | await hass.config_entries.async_setup(entry.entry_id)
70 | await hass.async_block_till_done()
71 |
72 | return await hass.config_entries.options.async_init(entry.entry_id)
73 |
74 |
75 | async def test_select_serial_config_flow(hass):
76 | """Test a successful select serial port select config flow."""
77 | # Initialize the first config flow, user step.
78 | with patch("serial.tools.list_ports.comports", return_value=[com_port()]):
79 | result = await hass.config_entries.flow.async_init(
80 | DOMAIN, context={"source": config_entries.SOURCE_USER}
81 | )
82 |
83 | # Check that the config flow shows the user form as the first step
84 | assert result["type"] == 'form'
85 | assert result["step_id"] == "user"
86 |
87 | # If a user were to select 'serial_port' it would result in this function call
88 | with patch("serial.tools.list_ports.comports", return_value=[com_port()]):
89 | result = await hass.config_entries.flow.async_configure(
90 | result["flow_id"], user_input=FIXTURE_USER_INPUT_SERIAL
91 | )
92 |
93 | # Check that the config flow step is complete and the next schema is loaded with
94 | # the correct input data
95 | _LOGGER.debug(result)
96 | assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
97 | assert result["step_id"] == "select_serial_connection"
98 |
99 | fixture_serial_input = {
100 | 'serial_port': "/dev/testUSB1: Mocked serial port",
101 | 'meter_manufacturer': 'auto',
102 | 'parity': 'N',
103 | 'baudrate': 2400}
104 | with patch(
105 | "custom_components.ams.async_setup_entry",
106 | return_value=True,
107 | ), patch(
108 | "serial.tools.list_ports.comports",
109 | return_value=[com_port()],
110 | ):
111 | result = await hass.config_entries.flow.async_configure(
112 | result["flow_id"], user_input=fixture_serial_input)
113 |
114 | assert result["type"] == "create_entry"
115 | assert result["title"] == "AMS Reader"
116 | assert result["data"] == {
117 | 'serial_port': '/dev/testUSB1',
118 | 'meter_manufacturer': 'auto',
119 | 'parity': 'N',
120 | 'baudrate': 2400,
121 | 'protocol': 'serial',
122 | 'oss_brikken': False,
123 | }
124 |
125 |
126 | async def test_enter_serial_config_flow(hass):
127 | """Test a successful serial port select config flow."""
128 | # Initialize the first config flow, user step.
129 | result = await hass.config_entries.flow.async_init(
130 | DOMAIN, context={"source": config_entries.SOURCE_USER}
131 | )
132 |
133 | # Check that the config flow shows the user form as the first step
134 | assert result["type"] == 'form'
135 | assert result["step_id"] == "user"
136 |
137 | # If a user were to select 'serial_port' it would result in this function call
138 | result = await hass.config_entries.flow.async_configure(
139 | result["flow_id"], user_input=FIXTURE_USER_INPUT_MANUAL_SERIAL
140 | )
141 |
142 | # Check that the config flow step is complete and the next schema is loaded with
143 | # the correct input data
144 | _LOGGER.debug(result)
145 | assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
146 | assert result["step_id"] == "enter_serial_connection"
147 |
148 | fixture_serial_input = {
149 | 'serial_port': '/dev/testUSB1',
150 | 'meter_manufacturer': 'auto',
151 | 'parity': 'N',
152 | 'baudrate': 2400}
153 | result = await hass.config_entries.flow.async_configure(
154 | result["flow_id"], user_input=fixture_serial_input)
155 |
156 | assert result["type"] == "create_entry"
157 | assert result["title"] == "AMS Reader"
158 | assert result["data"] == {
159 | 'serial_port': '/dev/testUSB1',
160 | 'meter_manufacturer': 'auto',
161 | 'parity': 'N',
162 | 'baudrate': 2400,
163 | 'protocol': 'serial',
164 | 'oss_brikken': False,
165 | }
166 |
167 | async def test_serial_network_config_flow(hass):
168 | """Test a successful serial network config flow."""
169 | # Initialize the first config flow, user step.
170 | result = await hass.config_entries.flow.async_init(
171 | DOMAIN, context={"source": config_entries.SOURCE_USER}
172 | )
173 |
174 | # Check that the config flow shows the user form as the first step
175 | assert result["type"] == 'form'
176 | assert result["step_id"] == "user"
177 |
178 | # If a user were to select 'serial_port' it would result in this function call
179 | result = await hass.config_entries.flow.async_configure(
180 | result["flow_id"], user_input=FIXTURE_USER_INPUT_NETWORK
181 | )
182 |
183 | # Check that the config flow step is complete and the next schema is loaded with
184 | # the correct input data
185 | _LOGGER.debug(result)
186 | assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
187 | assert result["step_id"] == "network_connection"
188 |
189 | fixture_network_config = {
190 | 'tcp_host': '10.0.0.99',
191 | 'tcp_port': 12345,
192 | 'meter_manufacturer': 'auto',
193 | 'parity': 'N',
194 | 'baudrate': 2400}
195 | result = await hass.config_entries.flow.async_configure(
196 | result["flow_id"], user_input=fixture_network_config)
197 |
198 | assert result["type"] == "create_entry"
199 | assert result["title"] == "AMS Reader"
200 | assert result["data"] == {
201 | 'tcp_host': '10.0.0.99',
202 | 'tcp_port': 12345,
203 | 'meter_manufacturer': 'auto',
204 | 'parity': 'N',
205 | 'baudrate': 2400,
206 | 'protocol': 'tcp_ip'
207 | }
208 |
--------------------------------------------------------------------------------
/tests/test_init.py:
--------------------------------------------------------------------------------
1 | import sys
2 | from custom_components.ams import AmsHub
3 | from .common_test_data import TestData
4 |
5 | sys.path.append('../')
6 |
7 |
8 | def test_find_parser_kamstrup():
9 | pkg = TestData.KAMSTRUP
10 | parser_detected = AmsHub._find_parser(pkg)
11 | assert parser_detected == "kamstrup"
12 |
13 | def test_find_parser_aidon_short():
14 | pkg = TestData.AIDON_SHORT
15 | parser_detected = AmsHub._find_parser(pkg)
16 | assert parser_detected == "aidon"
17 |
18 | def test_find_parser_aidon_se_3ph():
19 | pkg = TestData.AIDON_SE_3PH
20 | parser_detected = AmsHub._find_parser(pkg)
21 | assert parser_detected == "aidon_se"
22 |
23 | def test_find_parser_kaifa_long():
24 | # Kaifa MA304H4D Swedish
25 | pkg = TestData.KAIFA_MA304H4D_LONG
26 | parser_detected = AmsHub._find_parser(pkg)
27 | assert parser_detected == "kaifa"
28 |
29 | def test_find_parser_kaifa_hourly():
30 | pkg = TestData.KAIFA_HOURLY
31 | parser_detected = AmsHub._find_parser(pkg)
32 | assert parser_detected == "kaifa"
33 |
34 | def test_find_parser_kaifa_se():
35 | pkg = TestData.KAIFA_MA304H4_SE
36 | parser_detected = AmsHub._find_parser(pkg)
37 | assert parser_detected == "kaifa_se"
38 |
39 | def test_find_no_parser():
40 | pkg = [1, 2, 3, 4, 5]
41 | parser_detected = AmsHub._find_parser(pkg)
42 | assert parser_detected == None
--------------------------------------------------------------------------------
/tests/test_parser_aidon.py:
--------------------------------------------------------------------------------
1 | import sys
2 | from custom_components.ams.parsers import aidon
3 | from .common_test_data import TestData
4 |
5 | sys.path.append('../')
6 |
7 |
8 | def test_aidon_hourly():
9 |
10 | parser = aidon
11 | pkg = None
12 | assert not parser.test_valid_data(pkg, oss=TestData.OSS_FALSE), "Package test for None failed"
13 |
14 | pkg = TestData.AIDON_HOURLY
15 | assert parser.test_valid_data(pkg, oss=TestData.OSS_FALSE), "Data validity test failed"
16 |
17 | meter_data, _ = parser.parse_data({}, pkg)
18 |
19 | # Test for some parsed values
20 | assert meter_data['ams_active_power_import']['state'] == 1769, "Parsed ams_active_power_import is not correct"
21 | assert meter_data['ams_active_power_import']['attributes']['unit_of_measurement'] == "W", "Missing attribute"
22 | assert meter_data['ams_active_energy_import']['state'] == 94064.59, "Parsed ams_active_energy_import is not correct"
23 |
24 | # Test for missing keys and some attributes
25 | for k in ['ams_active_power_import', 'ams_active_power_export', 'ams_reactive_power_import',
26 | 'ams_reactive_power_export', 'ams_current_l1', 'ams_current_l2', 'ams_current_l3',
27 | 'ams_voltage_l1', 'ams_voltage_l2', 'ams_voltage_l3', 'ams_active_energy_import',
28 | 'ams_reactive_energy_import', 'ams_active_energy_export', 'ams_reactive_energy_export']:
29 | assert k in meter_data, "Key missing in parsed data"
30 | assert meter_data[k]['attributes']['meter_manufacturer'] == "AIDON_V0001", "Missing attribute"
31 | assert 'unit_of_measurement' in meter_data[k]['attributes'], "Missing attribute"
32 |
33 |
34 | def test_aidon_short():
35 |
36 | parser = aidon
37 | pkg = TestData.AIDON_SHORT
38 | assert parser.test_valid_data(pkg, oss=TestData.OSS_FALSE), "Data validity test failed"
39 |
40 | meter_data, _ = parser.parse_data({}, pkg)
41 |
42 | # Test for some parsed values
43 | assert meter_data['ams_active_power_import']['state'] == 6942, "Parsed ams_active_power_import is not correct"
44 | assert meter_data['ams_active_power_import']['attributes']['unit_of_measurement'] == "W", "Missing attribute"
45 |
46 | # Test for missing keys and some attributes
47 | for k in ['ams_active_power_import', 'ams_active_power_export', 'ams_reactive_power_import',
48 | 'ams_reactive_power_export', 'ams_current_l1', 'ams_current_l2', 'ams_current_l3',
49 | 'ams_voltage_l1', 'ams_voltage_l2', 'ams_voltage_l3']:
50 | assert k in meter_data, "Key missing in parsed data"
51 | assert meter_data[k]['attributes']['meter_manufacturer'] == "AIDON_V0001", "Missing attribute"
52 | assert 'unit_of_measurement' in meter_data[k]['attributes'], "Missing attribute"
53 |
54 |
55 | def test_aidon_mini():
56 |
57 | parser = aidon
58 | pkg = TestData.AIDON_MINI
59 | assert parser.test_valid_data(pkg, oss=TestData.OSS_FALSE), "Data validity test failed"
60 | meter_data, _ = parser.parse_data({}, pkg)
61 | # Test for parsed values
62 | assert meter_data == {}
63 | fixture_aidon_stored = {
64 | 'ams_active_power_import': {
65 | 'state': 734,
66 | 'attributes': {
67 | 'meter_manufacturer': 'AIDON_V0001',
68 | 'meter_type': '6534 3-phase Meter with CB and Neutral Current Measurement',
69 | 'obis_code': '1.0.1.7.0.255',
70 | 'meter_serial': '7359992921288181',
71 | 'unit_of_measurement': 'W',
72 | 'icon': 'mdi:gauge'
73 | }
74 | }
75 | }
76 | meter_data, _ = parser.parse_data(fixture_aidon_stored, pkg)
77 | assert meter_data['ams_active_power_import']['state'] == 734, "Parsed ams_active_power_import is not correct"
78 | assert meter_data['ams_active_power_import']['attributes']['unit_of_measurement'] == "W", "Missing attribute"
79 |
80 |
81 | def test_aidon_invalid_packet_size():
82 | parser = aidon
83 | pkg = TestData.AIDON_HOURLY_INVALID_PKG_SIZE
84 | assert not parser.test_valid_data(pkg, oss=TestData.OSS_FALSE), \
85 | "Data validity test failed on incorrect pkg range size"
86 |
87 |
88 | def test_aidon_invalid_read_packet_size():
89 | parser = aidon
90 | pkg = TestData.AIDON_HOURLY_WRONG_SIZE
91 | assert not parser.test_valid_data(pkg, oss=TestData.OSS_FALSE), \
92 | "Data validity test failed on mismatch between read and decoded pkg size"
93 |
94 |
95 | def test_aidon_invalid_frame_flag():
96 | parser = aidon
97 | pkg = TestData.AIDON_HOURLY_INVALID_FRAME_FLAG
98 | assert not parser.test_valid_data(pkg, oss=TestData.OSS_FALSE), "Data validity test failed on incorrect frame flag"
99 |
100 |
101 | def test_aidon_invalid_data_flag():
102 |
103 | parser = aidon
104 | pkg = TestData.AIDON_HOURLY_INVALID_DATA_FLAG
105 | assert not parser.test_valid_data(pkg, oss=TestData.OSS_FALSE), "Data validity test failed on incorrect data flag"
106 |
107 |
108 | def test_aidon_invalid_frame_crc():
109 |
110 | parser = aidon
111 | pkg = TestData.AIDON_HOURLY_INCORRECT_PKG_CRC
112 | assert not parser.test_valid_data(pkg, oss=TestData.OSS_FALSE), "Data validity test failed on frame crc"
113 |
114 |
115 | def test_aidon_invalid_header_crc():
116 |
117 | parser = aidon
118 | pkg = TestData.AIDON_HOURLY_INCORRECT_HEADER_CRC
119 | assert not parser.test_valid_data(pkg, oss=TestData.OSS_TRUE), "Data validity test failed on header crc"
120 |
121 |
122 | def test_aidon_with_oss_brikken():
123 |
124 | parser = aidon
125 | pkg = TestData.AIDON_OSS_10SEC
126 | assert parser.test_valid_data(pkg, oss=TestData.OSS_TRUE), "Data validity test failed on data from OSS brikken"
127 |
128 |
129 | def test_aidon_with_oss_brikken_hourly():
130 |
131 | parser = aidon
132 | pkg = TestData.AIDON_OSS_HOURLY
133 | assert parser.test_valid_data(pkg, oss=TestData.OSS_TRUE), "Data validity test failed on data from OSS brikken"
134 |
135 | meter_data, _ = parser.parse_data({}, pkg)
136 |
137 | # Test for some parsed values
138 | assert meter_data['ams_active_power_import']['state'] == 2526, "Parsed ams_active_power_import is not correct"
139 | assert meter_data['ams_active_power_import']['attributes']['unit_of_measurement'] == "W", "Missing attribute"
140 |
141 | # Test for missing keys and some attributes
142 | for k in ['ams_active_power_import', 'ams_active_power_export', 'ams_reactive_power_import',
143 | 'ams_reactive_power_export', 'ams_current_l1', 'ams_current_l3',
144 | 'ams_voltage_l1', 'ams_voltage_l2', 'ams_voltage_l3']:
145 | assert k in meter_data, "Key missing in parsed data"
146 | assert meter_data[k]['attributes']['meter_manufacturer'] == "AIDON_V0001", "Missing attribute"
147 | assert 'unit_of_measurement' in meter_data[k]['attributes'], "Missing attribute"
148 |
--------------------------------------------------------------------------------
/tests/test_parser_aidon_se.py:
--------------------------------------------------------------------------------
1 | import sys
2 | from custom_components.ams.parsers import aidon_se
3 | from .common_test_data import TestData
4 |
5 | sys.path.append('../')
6 |
7 |
8 | def test_aidon_se(): # Swedish AMS data pushes all sensor at each transmit. Only one type of package is pushed
9 |
10 | parser = aidon_se
11 | pkg = None
12 | assert not parser.test_valid_data(pkg, oss=TestData.OSS_FALSE), "Package test for None failed"
13 |
14 | pkg = TestData.AIDON_SE_3PH
15 | assert parser.test_valid_data(pkg, oss=TestData.OSS_FALSE), "Data validity test failed"
16 |
17 | meter_data, _ = parser.parse_data({}, pkg)
18 |
19 | # Test for some parsed values
20 | assert meter_data['ams_active_power_import']['state'] == 760, "Parsed ams_active_power_import is not correct"
21 | assert meter_data['ams_active_power_import']['attributes']['unit_of_measurement'] == "W", "Missing attribute"
22 |
23 | # Test for missing keys and some attributes
24 | for k in ['ams_active_power_import', 'ams_active_power_export', 'ams_reactive_power_import',
25 | 'ams_reactive_power_export', 'ams_current_l1', 'ams_current_l2', 'ams_current_l3',
26 | 'ams_voltage_l1', 'ams_voltage_l2', 'ams_voltage_l3']:
27 | assert k in meter_data, "Key missing in parsed data"
28 | assert meter_data[k]['attributes']['meter_manufacturer'] == "AIDON_H0001", "Missing attribute"
29 | assert 'unit_of_measurement' in meter_data[k]['attributes'], "Missing attribute"
30 |
31 |
32 | def test_aidon_se_invalid_packet_size():
33 | parser = aidon_se
34 | pkg = TestData.AIDON_SE_3PH_INVALID_PKG_SIZE
35 | assert not parser.test_valid_data(pkg, oss=TestData.OSS_FALSE), "Data validity test failed on incorrect pkg range size"
36 |
37 |
38 | def test_aidon_se_invalid_read_packet_size():
39 | parser = aidon_se
40 | pkg = TestData.AIDON_SE_3PH_WRONG_SIZE
41 | assert not parser.test_valid_data(pkg, oss=TestData.OSS_FALSE), "Data validity test failed on mismatch between read and decoded pkg size"
42 |
43 |
44 | def test_aidon_se_invalid_frame_flag():
45 | parser = aidon_se
46 | pkg = TestData.AIDON_SE_3PH_INVALID_FRAME_FLAG
47 | assert not parser.test_valid_data(pkg, oss=TestData.OSS_FALSE), "Data validity test failed on incorrect frame flag"
48 |
49 |
50 | def test_aidon_se_invalid_data_flag():
51 |
52 | parser = aidon_se
53 | pkg = TestData.AIDON_SE_3PH_INVALID_DATA_FLAG
54 | assert not parser.test_valid_data(pkg, oss=TestData.OSS_FALSE), "Data validity test failed on incorrect data flag"
55 |
56 |
57 | def test_aidon_se_invalid_frame_crc():
58 |
59 | parser = aidon_se
60 | pkg = TestData.AIDON_SE_3PH_INCORRECT_PKG_CRC
61 | assert not parser.test_valid_data(pkg, oss=TestData.OSS_FALSE), "Data validity test failed on frame crc"
62 |
63 |
64 | def test_aidon_se_invalid_header_crc():
65 |
66 | parser = aidon_se
67 | pkg = TestData.AIDON_SE_3PH_INCORRECT_HEADER_CRC
68 | assert not parser.test_valid_data(pkg, oss=TestData.OSS_FALSE), "Data validity test failed on header crc"
69 |
--------------------------------------------------------------------------------
/tests/test_parser_kaifa.py:
--------------------------------------------------------------------------------
1 | import sys
2 | from custom_components.ams.parsers import kaifa
3 | from .common_test_data import TestData
4 | sys.path.append('../')
5 |
6 |
7 | def test_kaifa_MA304H4D_short():
8 |
9 | parser = kaifa
10 |
11 | pkg = TestData.KAIFA_MA304H4D_SHORT
12 | assert parser.test_valid_data(pkg, oss=TestData.OSS_FALSE), "Data validity test failed"
13 |
14 | meter_data, _ = parser.parse_data({}, pkg, swedish=False)
15 | assert meter_data['ams_active_power_import']['state'] == 1590, "Parsed ams_active_power_import is not correct"
16 | assert meter_data['ams_active_power_import']['attributes']['unit_of_measurement'] == "W", "Missing attribute"
17 |
18 |
19 | def test_kaifa_MA304H4D_long():
20 |
21 | parser = kaifa
22 |
23 | pkg = TestData.KAIFA_MA304H4D_LONG
24 | assert parser.test_valid_data(pkg, oss=TestData.OSS_FALSE), "Data validity test failed"
25 |
26 | meter_data, _ = parser.parse_data({}, pkg, swedish=False)
27 |
28 | # Test for some parsed values
29 | assert meter_data['ams_active_power_import']['state'] == 1590, "Parsed ams_active_power_import is not correct"
30 | assert meter_data['ams_active_power_import']['attributes']['unit_of_measurement'] == "W", "Missing attribute"
31 |
32 | # Test for missing keys and some attributes
33 | for k in ['ams_active_power_import', 'ams_active_power_export', 'ams_reactive_power_import',
34 | 'ams_reactive_power_export', 'ams_current_l1', 'ams_current_l2', 'ams_current_l3',
35 | 'ams_voltage_l1', 'ams_voltage_l2', 'ams_voltage_l3']:
36 | assert k in meter_data, "Key missing in parsed data"
37 | assert meter_data[k]['attributes']['meter_manufacturer'] == "Kfm_001", "Missing attribute"
38 | assert 'unit_of_measurement' in meter_data[k]['attributes'], "Missing attribute"
39 |
40 |
41 | def test_kaifa_MA304H4_short():
42 |
43 | parser = kaifa
44 |
45 | pkg = TestData.KAIFA_MA304H4_SHORT
46 | assert parser.test_valid_data(pkg, oss=TestData.OSS_FALSE), "Data validity test failed"
47 |
48 | meter_data, _ = parser.parse_data({}, pkg, swedish=False)
49 | assert meter_data['ams_active_power_import']['state'] == 1415, "Parsed ams_active_power_import is not correct"
50 | assert meter_data['ams_active_power_import']['attributes']['unit_of_measurement'] == "W", "Missing attribute"
51 |
52 |
53 | def test_kaifa_MA304H4_long():
54 |
55 | parser = kaifa
56 |
57 | pkg = TestData.KAIFA_MA304H4_LONG
58 | assert parser.test_valid_data(pkg, oss=TestData.OSS_FALSE), "Data validity test failed"
59 |
60 | meter_data, _ = parser.parse_data({}, pkg, swedish=False)
61 |
62 | # Test for some parsed values
63 | assert meter_data['ams_active_power_import']['state'] == 1418, "Parsed ams_active_power_import is not correct"
64 | assert meter_data['ams_active_power_import']['attributes']['unit_of_measurement'] == "W", "Missing attribute"
65 |
66 | # Test for missing keys and some attributes
67 | for k in ['ams_active_power_import', 'ams_active_power_export', 'ams_reactive_power_import',
68 | 'ams_reactive_power_export', 'ams_current_l1', 'ams_current_l2', 'ams_current_l3',
69 | 'ams_voltage_l1', 'ams_voltage_l2', 'ams_voltage_l3']:
70 | assert k in meter_data, "Key missing in parsed data"
71 | assert meter_data[k]['attributes']['meter_manufacturer'] == "Kfm_001", "Missing attribute"
72 | assert 'unit_of_measurement' in meter_data[k]['attributes'], "Missing attribute"
73 |
74 |
75 | def test_kaifa_MA304H3E_short():
76 |
77 | parser = kaifa
78 |
79 | pkg = TestData.KAIFA_MA304H3E_SHORT
80 | assert parser.test_valid_data(pkg, oss=TestData.OSS_FALSE), "Data validity test failed"
81 |
82 | meter_data, _ = parser.parse_data({}, pkg, swedish=False)
83 |
84 | # Test for parsed values
85 | assert meter_data['ams_active_power_import']['state'] == 549, "Parsed ams_active_power_import is not correct"
86 | assert meter_data['ams_active_power_import']['attributes']['unit_of_measurement'] == "W", "Missing attribute"
87 |
88 |
89 | def test_kaifa_MA304H3E_long():
90 |
91 | parser = kaifa
92 |
93 | pkg = TestData.KAIFA_MA304H3E_LONG
94 | assert parser.test_valid_data(pkg, oss=TestData.OSS_FALSE), "Data validity test failed"
95 |
96 | meter_data, _ = parser.parse_data({}, pkg, swedish=False)
97 |
98 | # Test for some parsed values
99 | assert meter_data['ams_active_power_import']['state'] == 546, "Parsed ams_active_power_import is not correct"
100 | assert meter_data['ams_active_power_import']['attributes']['unit_of_measurement'] == "W", "Missing attribute"
101 |
102 | # Test for missing keys and some attributes
103 | for k in ['ams_active_power_import', 'ams_active_power_export', 'ams_reactive_power_import',
104 | 'ams_reactive_power_export', 'ams_current_l1', 'ams_current_l2', 'ams_current_l3',
105 | 'ams_voltage_l1', 'ams_voltage_l2', 'ams_voltage_l3']:
106 | assert k in meter_data, "Key missing in parsed data"
107 | assert meter_data[k]['attributes']['meter_manufacturer'] == "Kfm_001", "Missing attribute"
108 | assert 'unit_of_measurement' in meter_data[k]['attributes'], "Missing attribute"
109 |
110 |
111 | def test_kaifa_hourly():
112 |
113 | parser = kaifa
114 |
115 | pkg = TestData.KAIFA_HOURLY
116 | assert parser.test_valid_data(pkg, oss=TestData.OSS_FALSE), "Data validity test failed"
117 |
118 | meter_data, _ = parser.parse_data({}, pkg, swedish=False)
119 |
120 | # Test for some parsed values
121 | assert meter_data['ams_active_power_import']['state'] == 119, "Parsed ams_active_power_import is not correct"
122 | assert meter_data['ams_active_power_import']['attributes']['unit_of_measurement'] == "W", "Missing attribute"
123 | assert meter_data['ams_active_energy_import']['state'] ==\
124 | 10494.991, "Parsed ams_active_energy_import is not correct"
125 |
126 | # Test for missing keys and some attributes
127 | for k in ['ams_active_power_import', 'ams_active_power_export', 'ams_reactive_power_import',
128 | 'ams_reactive_power_export', 'ams_current_l1', 'ams_current_l2', 'ams_current_l3',
129 | 'ams_voltage_l1', 'ams_voltage_l2', 'ams_voltage_l3', 'ams_active_energy_import',
130 | 'ams_reactive_energy_import', 'ams_active_energy_export', 'ams_reactive_energy_export']:
131 | assert k in meter_data, "Key missing in parsed data"
132 | assert meter_data[k]['attributes']['meter_manufacturer'] == "Kfm_001", "Missing attribute"
133 | assert 'unit_of_measurement' in meter_data[k]['attributes'], "Missing attribute"
134 |
135 |
136 | def test_kaifa_1phase():
137 |
138 | parser = kaifa
139 |
140 | pkg = TestData.KAIFA_1PH_SHORT
141 | assert parser.test_valid_data(pkg, oss=TestData.OSS_FALSE), "Data validity test failed"
142 |
143 | meter_data, _ = parser.parse_data({}, pkg, swedish=False)
144 |
145 | # Test for some parsed values
146 | assert meter_data['ams_active_power_import']['state'] == 932, "Parsed ams_active_power_import is not correct"
147 | assert meter_data['ams_active_power_import']['attributes']['unit_of_measurement'] == "W", "Missing attribute"
148 |
149 | # Test for missing keys and some attributes
150 | for k in ['ams_active_power_import', 'ams_active_power_export', 'ams_reactive_power_import',
151 | 'ams_reactive_power_export', 'ams_current_l1', 'ams_voltage_l1']:
152 | assert k in meter_data, "Key missing in parsed data"
153 | assert meter_data[k]['attributes']['meter_manufacturer'] == "Kfm_001", "Missing attribute"
154 | assert 'unit_of_measurement' in meter_data[k]['attributes'], "Missing attribute"
155 |
156 |
157 | def test_kaifa_1phase_hourly():
158 |
159 | parser = kaifa
160 | pkg = None
161 | assert not parser.test_valid_data(pkg, oss=TestData.OSS_FALSE), "Package test for None failed"
162 | pkg = TestData.KAIFA_1PH_HOURLY
163 | assert parser.test_valid_data(pkg, oss=TestData.OSS_FALSE), "Data validity test failed"
164 |
165 | meter_data, _ = parser.parse_data({}, pkg, swedish=False)
166 |
167 | # Test for some parsed values
168 | assert meter_data['ams_active_power_import']['state'] == 1655, "Parsed ams_active_power_import is not correct"
169 | assert meter_data['ams_active_power_import']['attributes']['unit_of_measurement'] == "W", "Missing attribute"
170 | assert meter_data['ams_active_energy_import']['state'] ==\
171 | 25591.693, "Parsed ams_active_energy_import is not correct"
172 |
173 | # Test for missing keys and some attributes
174 | for k in ['ams_active_power_import', 'ams_active_power_export', 'ams_reactive_power_import',
175 | 'ams_reactive_power_export', 'ams_current_l1', 'ams_voltage_l1', 'ams_active_energy_import',
176 | 'ams_reactive_energy_import', 'ams_active_energy_export', 'ams_reactive_energy_export']:
177 | assert k in meter_data, "Key missing in parsed data"
178 | assert meter_data[k]['attributes']['meter_manufacturer'] == "Kfm_001", "Missing attribute"
179 | assert 'unit_of_measurement' in meter_data[k]['attributes'], "Missing attribute"
180 |
181 |
182 | def test_kaifa_H4PSE():
183 |
184 | parser = kaifa
185 | pkg = None
186 | assert not parser.test_valid_data(pkg, oss=TestData.OSS_FALSE), "Package test for None failed"
187 | pkg = TestData.KAIFA_MA304H4D_LONG
188 | assert parser.test_valid_data(pkg, oss=TestData.OSS_FALSE), "Data validity test failed"
189 |
190 | meter_data, _ = parser.parse_data({}, pkg, swedish=True)
191 |
192 | # Test for some parsed values
193 | assert meter_data['ams_active_power_import']['state'] == 1590, "Parsed ams_active_power_import is not correct"
194 | assert meter_data['ams_active_power_import']['attributes']['unit_of_measurement'] == "W", "Missing attribute"
195 | assert meter_data['ams_active_energy_import']['state'] ==\
196 | 145122.745, "Parsed ams_active_energy_import is not correct"
197 |
198 | # Test for missing keys and some attributes
199 | for k in ['ams_active_power_import', 'ams_active_power_export', 'ams_reactive_power_import',
200 | 'ams_reactive_power_export', 'ams_current_l1', 'ams_voltage_l1', 'ams_active_energy_import',
201 | 'ams_reactive_energy_import', 'ams_active_energy_export', 'ams_reactive_energy_export']:
202 | assert k in meter_data, "Key missing in parsed data"
203 | assert meter_data[k]['attributes']['meter_manufacturer'] == "Kfm_001", "Missing attribute"
204 | assert 'unit_of_measurement' in meter_data[k]['attributes'], "Missing attribute"
205 |
206 |
207 | def test_kaifa_invalid_packet_size():
208 | parser = kaifa
209 | pkg = TestData.KAIFA_INVALID_PKG_SIZE
210 | assert not parser.test_valid_data(pkg, oss=TestData.OSS_FALSE), \
211 | "Data validity test failed on incorrect pkg range size"
212 |
213 |
214 | def test_kaifa_invalid_read_packet_size():
215 | parser = kaifa
216 | pkg = TestData.KAIFA_WRONG_SIZE
217 | assert not parser.test_valid_data(pkg, oss=TestData.OSS_FALSE), \
218 | "Data validity test failed on mismatch between read and decoded pkg size"
219 |
220 |
221 | def test_kaifa_invalid_frame_flag():
222 | parser = kaifa
223 | pkg = TestData.KAIFA_INVALID_FRAME_FLAG
224 | assert not parser.test_valid_data(pkg, oss=TestData.OSS_FALSE), "Data validity test failed on incorrect frame flag"
225 |
226 |
227 | def test_kaifa_invalid_data_flag():
228 |
229 | parser = kaifa
230 | pkg = TestData.KAIFA_INVALID_DATA_FLAG
231 | assert not parser.test_valid_data(pkg, oss=TestData.OSS_FALSE), "Data validity test failed on incorrect data flag"
232 |
233 |
234 | def test_kaifa_invalid_frame_crc():
235 |
236 | parser = kaifa
237 | pkg = TestData.KAIFA_INCORRECT_PKG_CRC
238 | assert not parser.test_valid_data(pkg, oss=TestData.OSS_FALSE), "Data validity test failed on frame crc"
239 |
240 |
241 | def test_kaifa_invalid_header_crc():
242 |
243 | parser = kaifa
244 | pkg = TestData.KAIFA_INCORRECT_HEADER_CRC
245 | assert not parser.test_valid_data(pkg, oss=TestData.OSS_FALSE), "Data validity test failed on header crc"
246 |
--------------------------------------------------------------------------------
/tests/test_parser_kaifa_se.py:
--------------------------------------------------------------------------------
1 | import sys
2 | from custom_components.ams.parsers import kaifa_se
3 | from .common_test_data import TestData
4 | sys.path.append('../')
5 |
6 |
7 | def test_kaifa_MA304H4_se():
8 |
9 | parser = kaifa_se
10 | pkg = None
11 | assert not parser.test_valid_data(pkg, oss=TestData.OSS_FALSE), "Package test for None failed"
12 |
13 | pkg = TestData.KAIFA_MA304H4_SE
14 | assert parser.test_valid_data(pkg, oss=TestData.OSS_FALSE), "Data validity test failed"
15 |
16 | meter_data, _ = parser.parse_data({}, pkg)
17 |
18 | # Test for some parsed values
19 | assert meter_data['ams_active_power_import']['state'] == 297, "Parsed ams_active_power_import is not correct"
20 | assert meter_data['ams_active_power_import']['attributes']['unit_of_measurement'] == "W", "Missing attribute"
21 |
22 | # Test for missing keys and some attributes
23 | for k in ['ams_active_power_import', 'ams_active_power_export', 'ams_reactive_power_import',
24 | 'ams_reactive_power_export', 'ams_current_l1', 'ams_current_l2', 'ams_current_l3',
25 | 'ams_voltage_l1', 'ams_voltage_l2', 'ams_voltage_l3', 'ams_active_energy_import',
26 | 'ams_active_energy_export', 'ams_reactive_energy_import', 'ams_reactive_energy_export']:
27 | assert k in meter_data, "Key missing in parsed data"
28 | assert meter_data[k]['attributes']['meter_manufacturer'] == "KFM_001", "Missing attribute"
29 | assert 'unit_of_measurement' in meter_data[k]['attributes'], "Missing attribute"
30 |
31 |
32 | def test_kaifa_se_invalid_packet_size():
33 | parser = kaifa_se
34 | pkg = TestData.KAIFA_SE_INVALID_PKG_SIZE
35 | assert not parser.test_valid_data(pkg, oss=TestData.OSS_FALSE), \
36 | "Data validity test failed on incorrect pkg range size"
37 |
38 |
39 | def test_kaifa_se_invalid_read_packet_size():
40 | parser = kaifa_se
41 | pkg = TestData.KAIFA_SE_WRONG_SIZE
42 | assert not parser.test_valid_data(pkg, oss=TestData.OSS_FALSE), \
43 | "Data validity test failed on mismatch between read and decoded pkg size"
44 |
45 |
46 | def test_kaifa_se_invalid_frame_flag():
47 | parser = kaifa_se
48 | pkg = TestData.KAIFA_SE_INVALID_FRAME_FLAG
49 | assert not parser.test_valid_data(pkg, oss=TestData.OSS_FALSE), "Data validity test failed on incorrect frame flag"
50 |
51 |
52 | def test_kaifa_se_invalid_data_flag():
53 |
54 | parser = kaifa_se
55 | pkg = TestData.KAIFA_SE_INVALID_DATA_FLAG
56 | assert not parser.test_valid_data(pkg, oss=TestData.OSS_FALSE), "Data validity test failed on incorrect data flag"
57 |
58 |
59 | def test_kaifa_se_invalid_frame_crc():
60 |
61 | parser = kaifa_se
62 | pkg = TestData.KAIFA_SE_INCORRECT_PKG_CRC
63 | assert not parser.test_valid_data(pkg, oss=TestData.OSS_FALSE), "Data validity test failed on frame crc"
64 |
65 |
66 | def test_kaifa_se_invalid_header_crc():
67 |
68 | parser = kaifa_se
69 | pkg = TestData.KAIFA_SE_INCORRECT_HEADER_CRC
70 | assert not parser.test_valid_data(pkg, oss=TestData.OSS_FALSE), "Data validity test failed on header crc"
71 |
--------------------------------------------------------------------------------
/tests/test_parser_kamstrup.py:
--------------------------------------------------------------------------------
1 | import sys
2 | from custom_components.ams.parsers import kamstrup
3 | from .common_test_data import TestData
4 |
5 | sys.path.append('../')
6 |
7 |
8 | def test_kamstrup():
9 |
10 | parser = kamstrup
11 | pkg = None
12 | assert not parser.test_valid_data(pkg, oss=TestData.OSS_FALSE), "Package test for None failed"
13 |
14 | pkg = TestData.KAMSTRUP
15 | assert parser.test_valid_data(pkg, oss=TestData.OSS_FALSE), "Data validity test failed"
16 |
17 | meter_data, _ = parser.parse_data({}, pkg)
18 |
19 | # Test for some parsed values
20 | assert meter_data['ams_active_power_import']['state'] == 1202, "Parsed ams_active_power_import is not correct"
21 | assert meter_data['ams_active_power_import']['attributes']['unit_of_measurement'] == "W", "Missing attribute"
22 |
23 | # Test for missing keys and some attributes
24 | for k in ['ams_active_power_import', 'ams_active_power_export', 'ams_reactive_power_import',
25 | 'ams_reactive_power_export', 'ams_current_l1', 'ams_current_l2', 'ams_current_l3',
26 | 'ams_voltage_l1', 'ams_voltage_l2', 'ams_voltage_l3']:
27 | assert k in meter_data, "Key missing in parsed data"
28 | assert meter_data[k]['attributes']['meter_manufacturer'] == "Kamstrup_V0001", "Missing attribute"
29 | assert 'unit_of_measurement' in meter_data[k]['attributes'], "Missing attribute"
30 |
31 |
32 | def test_kamstrup_hourly():
33 |
34 | parser = kamstrup
35 | pkg = TestData.KAMSTRUP_HOURLY
36 | assert parser.test_valid_data(pkg, oss=TestData.OSS_FALSE), "Data validity test failed on hourly"
37 | meter_data, _ = parser.parse_data({}, pkg)
38 |
39 | # pprint.pprint(meter_data)
40 | # Test for some parsed values
41 | assert meter_data['ams_active_power_import']['state'] == 2690, "Parsed ams_active_power_import is not correct"
42 | assert meter_data['ams_active_power_import']['attributes']['unit_of_measurement'] == "W", "Missing attribute"
43 | assert meter_data['ams_active_energy_import']['state'] ==\
44 | 155232.51, "Parsed ams_active_energy_import is not correct"
45 |
46 | # Test for missing keys and some attributes
47 | for k in ['ams_active_power_import', 'ams_active_power_export', 'ams_reactive_power_import',
48 | 'ams_reactive_power_export', 'ams_current_l1', 'ams_current_l2', 'ams_current_l3', 'ams_voltage_l1',
49 | 'ams_voltage_l2', 'ams_voltage_l3', 'ams_active_energy_import', 'ams_reactive_energy_import',
50 | 'ams_active_energy_export', 'ams_reactive_energy_export']:
51 | assert k in meter_data, "Key missing in parsed data"
52 | assert meter_data[k]['attributes']['meter_manufacturer'] == "Kamstrup_V0001", "Missing attribute"
53 | assert 'unit_of_measurement' in meter_data[k]['attributes'], "Missing attribute"
54 |
55 |
56 | def test_kamstrup_invalid_packet_size():
57 | parser = kamstrup
58 | pkg = TestData.KAMSTRUP_INVALID_PKG_SIZE
59 | assert not parser.test_valid_data(pkg, oss=TestData.OSS_FALSE), \
60 | "Data validity test failed on incorrect pkg range size"
61 |
62 |
63 | def test_kamstrup_invalid_read_packet_size():
64 | parser = kamstrup
65 | pkg = TestData.KAMSTRUP_WRONG_SIZE
66 | assert not parser.test_valid_data(pkg, oss=TestData.OSS_FALSE), \
67 | "Data validity test failed on mismatch between read and decoded pkg size"
68 |
69 |
70 | def test_kamstrup_invalid_frame_flag():
71 | parser = kamstrup
72 | pkg = TestData.KAMSTRUP_INVALID_FRAME_FLAG
73 | assert not parser.test_valid_data(pkg, oss=TestData.OSS_FALSE), "Data validity test failed on incorrect frame flag"
74 |
75 |
76 | def test_kamstrup_invalid_data_flag():
77 |
78 | parser = kamstrup
79 | pkg = TestData.KAMSTRUP_INVALID_DATA_FLAG
80 | assert not parser.test_valid_data(pkg, oss=TestData.OSS_FALSE), "Data validity test failed on incorrect data flag"
81 |
82 |
83 | def test_kamstrup_invalid_frame_crc():
84 |
85 | parser = kamstrup
86 | pkg = TestData.KAMSTRUP_INCORRECT_PKG_CRC
87 | assert not parser.test_valid_data(pkg, oss=TestData.OSS_FALSE), "Data validity test failed on frame crc"
88 |
89 |
90 | def test_kamstrup_invalid_header_crc():
91 |
92 | parser = kamstrup
93 | pkg = TestData.KAMSTRUP_INCORRECT_HEADER_CRC
94 | assert not parser.test_valid_data(pkg, oss=TestData.OSS_FALSE), "Data validity test failed on header crc"
95 |
--------------------------------------------------------------------------------